Filebeat同时采集多个日志


1.filebeat配置文件:
filebeat.inputs:
- type: log
  enabled: true
  backoff: "1s"
  tail_files: false
  paths:
    - /usr/local/nginx/logs/access-json.log
  fields:
    filetype: logjson #加这个两个文件就是区分的
  fields_under_root: true
- type: log
  enabled: true 
  backoff: "1s"
  tail_files: false 
  paths:
    - /var/log/messages
  fields:
    filetype: logsystem
  fields_under_root: true

output.logstash:
  enabled: true
  hosts: ["localhost:5044"]   #如果ip不通把localhost换成你的ip

fields:自定义字段
fields_under_root:为true,则自定义字段将为文档中的顶级字段。


2.logstash配置
input {
  beats {
  host => "0.0.0.0"
  port => 5044
  }
}

filter {

  if [filetype] == "logjson" {
    json {
     source => "message"
     remove_field => ["beat","offset","tags","prospector"]
   }
  date {
    match => ["timestamp", "dd/MMM/yyyy:HH:mm:ss Z"]
    target => "@timestamp"
  }
 }
}

output {
  if [filetype] == "logjson" {
    elasticsearch {
    hosts => ["127.0.0.1:9200"]
    index => "nginx-%{+YYYY.MM.dd}"
  }
} else if [filetype] == "logsystem" {
  elasticsearch {
  hosts => ["127.0.0.1:9200"]
  index => "msg-%{+YYYY.MM.dd}"
  }
}
}

猜你喜欢

转载自www.cnblogs.com/jw-yahui/p/10843658.html