logstash6.3.1配置

input {
 kafka {
     bootstrap_servers => "10.3.0.248:9092,10.3.0.249:9092,10.3.0.250:9092"
     client_id => "elkconsumer"
     group_id => "elkconsumergroup"
     auto_offset_reset => "latest"
     consumer_threads => 5
     decorate_events => true
     topics => ["LEK-log"]
     type => "kafka-source"
   }
}
filter {
    if [type] == "kafka-source" {
        grok {
            match => [
                "message" , "^\s*(\[(?<ticket>ticket:[0-9]+)\])?\s*\[%{TIMESTAMP_ISO8601:logtime}\]\s*\[%{IP:client_ip}\]\s*\[%{NOTSPACE:pool}\]\s*(\[[\w-]+\])?\s*\[%{LOGLEVEL:level}\s*\]\s*(\[[\w-]*?\])?\s*(\[(?<exception>(\w+\.){3,}[^\s]+)\])?",
                "message" , "^\s*\[%{IP:client_ip}\]\s*(\[(?<ticket>ticket:[0-9]+)\])?\s*\[%{TIMESTAMP_ISO8601:logtime}\]\s*\[%{NOTSPACE:pool}\]\s*(\[[\w-]+\])?\s*\[%{LOGLEVEL:level}\s*\]\s*(\[[\w-]*?\])?\s*(\[(?<exception>(\w+\.){3,}[^\s]+)\])?"
            ]
        }
        if [level] != "ERROR" { drop{}   }
        date {
            match => ["logtime", "yyyy-MM-dd HH:mm:ss.SSS"]
            target => "@timestamp"
        }
       mutate {
          add_field => {
               "logdetailpath" => "http://10.22.0.14:8080/logs/index/%{pool}/%{client_ip}/%{logtime}"
          }
        }
    } 
}
output {
   if [type] == "kafka-source" {
             elasticsearch {
                hosts => ["10.22.0.10:9200", "10.22.0.11:9200", "10.22.0.12:9200"]
                index => "logstash-pool-error-%{+yyyy.MM}"
                template_overwrite => true
             }
    }
}

猜你喜欢

转载自blog.csdn.net/jason_xiaojie_liu/article/details/81034040