• (function(){ var bp = document.createElement('script'); var curProtocol = window.location.protocol.split(':')[0]; if (curProtocol === 'https') { bp.src = 'https://zz.bdstatic.com/linksubmit/push.js'; } else { bp.src = 'http://push.zhanzhang.baidu.com/push.js'; } var s = document.getElementsByTagName("script")[0]; s.parentNode.insertBefore(bp, s); })();

    logstash 抓取IIS日志文件寫入Elasticsearch

    如果需要對IIS日志進行分析可以使用logstash從文件中抓取出來進行分析;

    輸入部分:

    input {
        file {
            type => "iis_log_monitor"
            path => ["D:/k/iislog/monitor*/W3SVC4/*.log"]
            start_position => "beginning"
            sincedb_path => "../config-demo/log/iis_log_monitor.log"
            sincedb_write_interval => 5
            discover_interval => 2
        }
        file {
            type => "iis_log_weixin"
            path => ["D:/k/iislog/weixin*/W3SVC18/*.log"]
            start_position => "beginning"
            sincedb_path => "../config-demo/log/iis_log_weixin.log"
            sincedb_write_interval => 5
            discover_interval => 2
        }
        file {
            type => "iis_log_imagedas"
            path => ["D:/k/iislog/imagedas/*.log"]
            start_position => "beginning"
            sincedb_path => "../config-demo/log/iis_log_imagedas.log"
            sincedb_write_interval => 5
            discover_interval => 2
        }
    }

    input中可以支持多個數據源的。

    篩選部分:

    filter{if [message] =~ "^#" {
            drop {}
        }
         grok {
                match => ["message", "%{TIMESTAMP_ISO8601:log_timestamp} (%{IPORHOST:s-ip}|-) (%{WORD:cs-method}|-) %{NOTSPACE:cs-uri-stem} %{NOTSPACE:cs-uri-query} (%{NUMBER:s-port}|-) (%{WORD:cs-username}|-) (%{IPORHOST:c-ip}|-) %{NOTSPACE:cs-useragent} (%{NUMBER:sc-status}|-) (%{NUMBER:sc-substatus}|-) (%{NUMBER:sc-win32-status}|-) (%{NUMBER:time-taken}|-)"]
        }
        date {
            match => [ "log_timestamp", "YYYY-MM-dd HH:mm:ss" ]
            timezone => "Asia/Shanghai"
        }
        useragent {
            source=> "cs-useragent"
        }
    }

    篩選的流程是:

    1. 刪除以“#”開頭的記錄、
    2. 使用grok格式化日志
    3. 使用日志的時間作為logstash的@timestamp
    4. 解析出用戶的ua信息

    輸出到es:

    output{
        # stdout{
        #     codec => rubydebug 
        # }
        elasticsearch { 
            hosts => ["xxx.xxx.xxx.xxx:9200"]
            index => "iislog"
            document_type => "iisloginfo"
            workers => 1
            template => "../config-demo/templates/iislog.json"
            template_name => "iislog"
            template_overwrite => true
        }
    }

     

    posted @ 2017-05-25 17:21 Mr. Hu 閱讀(...) 評論(...) 編輯 收藏
    Map
    凯发娱乐