Distinguish between log events in Logstash profile

1, a plurality of the log files as an input source

{INPUT 
    # distinguished by defining an event type to the log 
    File { 
        path => [ " /var/log/nginx/access.log " ] 
        type => " nginx_access " 
        start_position => " Beginning " 
    } 
    
    # types defined by a log event to distinguish 
    File { 
        path => [ " /var/log/nginx/error.log " ] 
        type => " nginx_error " 
        start_position => " Beginning "
    }

    # Distinguished by a new field for the event log 
    File { 
        path => [ " /var/log/nginx/api.log " ] 
        add_field => { " MyID " => " API " } 
        start_position => " Beginning " 
    } 
} 

filter { 
    # after determining the type of each event processed accordingly 
    IF [type] == " nginx_access " { 
        Grok { 
            match => { " Message " => "" }
        }
    } 

    IF [type] == " nginx_error " { 
        Grok { 
            match => { " Message " => "" } 
        } 
    } 

    IF [MyID] == " API " { 
        Grok { 
            match => { " Message " => "" } 
        } 
    } 
} 

Output { 
    # of different types, are stored in the index name in a different 
    IF [type] == ' nginx_access ' {
        elasticsearch {
            hosts => ["127.0.0.1:9200"]
            index => "logstash_access-%{+YYYY.MM.dd}"
        }
    }

    if [type] == 'nginx_error' {
        elasticsearch {
            hosts => ["127.0.0.1:9200"]
            index => "logstash_error-%{+YYYY.MM.dd}"
        }
    }

    if [myid] == "api" {
        elasticsearch {
            hosts => ["127.0.0.1:9200"]
            index => "logstash_api-%{+YYYY.MM.dd}"
        }
    }
}

 

2, as the input source to redis

input {
    redis {
        host => '10.105.199.10'
        type => 'web_error'
        port => '8000'
        data_type => 'list'
        key => 'web_error'
        password => "E1e7ed7eF437416165597b956fac004e"
        db => 0
    }

}

output {
    if [type] == "web_error" {
        elasticsearch {
            hosts => ["127.0.0.1:9200"]
            index => "logstash_web_error-%{+YYYY.MM.dd}"
        }
    }

}

 

 3, as an input source to kafka

input {
    kafka {
        bootstrap_servers => "10.105.199.10:9092"
        topics => ["www.example.com"]
        codec => "json"
    }
}

filter {
    grok {
        match => {
            "message" => "正则表达式匹配nginx日志"
        }
    }
}

output {
    elasticsearch {
        hosts => ["127.0.0.1:9200"]
        index => "logstash-www.example.com_%{+YYYY.MM.dd}"
    }
}

Guess you like

Origin www.cnblogs.com/t-road/p/11274751.html