Change: replace redis into kafka

Before the process is: filebeat, redis, logstash, elasticsearch
now is the process: filebeat, Kafka (zookeeper), logstash, elasticsearch

zookeeper cluster, using the web interface ZK UI provided by
kafka cluster, using the provided web interface kafka eagle

filebeat.yml

# 其他的配置保持不变

# 之前是输出到redis
#output.redis:
#  hosts: ["172.17.107.187:6370"]
#  key: log_messages
#  password: foobar2000
#  db: 0


# 现在输出到kafka
output.kafka:
  hosts: ["localhost:9092", "localhost:9093", "localhost:9094"]
  topic: ktopic

logstash.conf

# 之前是从redis获取数据的
#input { 
#  redis {
#    host => "172.17.107.187"
#    port => 6370
#    password => "foobar2000"
#    data_type => "list"
#    key => "log_messages"
#    db => 0
#  }
#}


# 现在从Kafka中获取数据
input {
  kafka {
    bootstrap_servers => "localhost:9092"
    auto_offset_reset => "latest"
    consumer_threads => 5
    topics_pattern => ".*"
    decorate_events => true
    topics => "ktopic"
    codec => json {
        charset => "UTF-8"
    }
  }
}

# 其他的配置保持不变

Guess you like

Origin www.cnblogs.com/sanduzxcvbnm/p/11573872.html