logstash配置

input {
 kafka {
     zk_connect => ["10.3.0.248:2181,10.3.0.249:2181,10.3.0.250:2181"]
     consumer_id => "lekconsumer"
     group_id => "lekconsumergroup"
     auto_offset_reset => "largest"
     consumer_threads => 5
     decorate_events => true
     topic_id => ["LEK-log"]
     type => "kafka-source"
   }
}
filter {
 if [type] == "kafka-source" {
         if ([message] =~ "^\s") { drop{} }  
        # if ([message] =~ "([a-zA-Z_0-9\-]+\.)+[a-zA-Z_0-9\-]+(Exception|Throwable)") { drop{} }
         if ([message] !~ "(\[[\s\S]+\]\s+){4,}") {drop{}}
 
 
        if ([message] =~ "[\s\S]+ticket[\s\S]+") {
             grok {
                match => {"message" => "\[%{IP:client_ip}\]\s+\[(?<ticket>\S+)\]\s+\[%{TIMESTAMP_ISO8601:logtime}\]\s+\[%{NOTSPACE:pool}\]\s+\[%{LOGLEVEL:level}\]" }
             }
         } else {
            grok {
                match => {"message" => "\[%{IP:client_ip}\]\s+\[%{TIMESTAMP_ISO8601:logtime}\]\s+\[%{NOTSPACE:pool}\]\s+\[%{LOGLEVEL:level}\]" }
            }
         }
         if [level] != "ERROR" { drop{}   }  
         date {
            match => ["logtime", "yyyy-MM-dd HH:mm:ss.SSS"]
            target => "@timestamp"
         } 
     } 
}
output {
   if [type] == "kafka-source" {
             elasticsearch {
                hosts => ["10.18.3.194:9200"]
                index => "logstash-pool-error-%{+yyyy.MM}"
                template_overwrite => true
             }
    }
}

猜你喜欢

转载自blog.csdn.net/jason_xiaojie_liu/article/details/80622102