flume configuration use

The flume configuration gets the information and transmits it to the kafka cluster
Create a new configuration file in the conf directory
[root@localhost flume]# vim conf/file-monitor.conf
# declare agent
a1.sources = r1
a1.sinks = k1
a1.channels = c1

# define the data source
a1.sources.r1.type = exec
a1.sources.r1.command = tail -F /data/xx.log
a1.sources.r1.channels = c1


# filter filter
a1.sources.r1.interceptors=i1
a1.sources.r1.interceptors.i1.type=regex_filter
#a1.sources.r1.interceptors.i1.regex=(Parsing events)(.*)(END)
a1.sources.r1.interceptors.i1.regex=(aaaa)(.*) #Transfer to channels only if matched


#Define the event temporary storage location, which can make memory, disk, database, etc.
a1.channels.c1.type = file
a1.channels.c1.checkpointDir = /data/flume/chk
a1.channels.c1.dataDirs = /data/flume/data

# Define data flow to kafka
#a1.sinks.k1.type = logger
a1.sinks.k1.type = org.apache.flume.sink.kafka.KafkaSink
a1.sinks.k1.brokerList = 192.168.41.47:9092,192.168.41.127:9092,192.168.41.86:9092
a1.sinks.k1.topic = mytopic
#a1.sinks.k1.requiredAcks = 1
#a1.sinks.k1.batchSize = 20
a1.sinks.k1.serializer.class=kafka.serializer.StringEncoder
a1.sinks.k1.channel = c1

start up
[root@localhost flume]# nohup bin/flume-ng agent -n a1 -c conf/ -f conf/file-monitor.conf  -Dflume.root.logger=INFO,console > nohup.out 2>&1 &


Guess you like

Origin http://43.154.161.224:23101/article/api/json?id=326438333&siteId=291194637