flume1.7 + kafka2.10-0.10.0.1

producer.sources=s
producer.channels=c1
producer.sinks=r

producer.channels.c1.type = memory
producer.channels.c1.capacity = 1000
producer.channels.c1.transactionCapacity = 100

producer.sources.s.type = spooldir
producer.sources.s.spoolDir = D:/solrhome/solr0/tomcat7/logs
producer.sources.s.channels=c1
producer.sources.s.inputCharset=utf-8
producer.sources.s.includePattern=.log

producer.sinks.r.channel=c1
producer.sinks.r.type = org.apache.flume.sink.kafka.KafkaSink
#org.apache.flume.plugins.KafkaSink
producer.sinks.r.kafka.bootstrap.servers=172.16.21.2:9092
producer.sinks.r.metadata.broker.list=172.16.21.2:9092
producer.sinks.r.partition.key=0
#producer.sinks.r.partitioner.class=org.apache.flume.plugins.SinglePartition
producer.sinks.r.serializer.class=kafka.serializer.StringEncoder
producer.sinks.r.request.required.acks=0
producer.sinks.r.max.message.size=1000000
producer.sinks.r.producer.type=sync
producer.sinks.r.custom.encoding=UTF-8
producer.sinks.r.custom.topic.name=topicA





flume-ng agent -n producer -c ../conf -f ../conf/flume-kafka-sink.properties

Guess you like

Origin http://10.200.1.11:23101/article/api/json?id=326850228&siteId=291194637