Flume source

1.netcat
2.exec
        实时日志收集,实时收集日志。

a1.sources = r1
a1.sinks = k1
a1.channels = c1

a1.sources.r1.type=exec
a1.sources.r1.command=tail -F /home/centos/test.txt

a1.sinks.k1.type=logger

a1.channels.c1.type=memory

a1.sources.r1.channels=c1
a1.sinks.k1.channel=c1

3.批量收集
        监控一个文件夹,静态文件。
        收集完之后,会重命名文件成新文件。.compeleted.

        #a)配置文件
			[spooldir_r.conf]
			a1.sources = r1
			a1.channels = c1
			a1.sinks = k1

			a1.sources.r1.type=spooldir
			a1.sources.r1.spoolDir=/home/centos/spool
			a1.sources.r1.fileHeader=true

			a1.sinks.k1.type=logger

			a1.channels.c1.type=memory

			a1.sources.r1.channels=c1
			a1.sinks.k1.channel=c1

		#b)创建目录
			$>mkdir ~/spool

		#c)启动flume
			$>bin/flume-ng agent -f ../conf/helloworld.conf -n a1 -Dflume.root.logger=INFO,console

4.序列source
        [seq]

		a1.sources = r1
		a1.channels = c1
		a1.sinks = k1

		a1.sources.r1.type=seq
		a1.sources.r1.totalEvents=1000

		a1.sinks.k1.type=logger

		a1.channels.c1.type=memory

		a1.sources.r1.channels=c1
		a1.sinks.k1.channel=c1

5.StressSource

                a1.sources = stresssource-1
		a1.channels = memoryChannel-1
		a1.sources.stresssource-1.type = org.apache.flume.source.StressSource
		a1.sources.stresssource-1.size = 10240
		a1.sources.stresssource-1.maxTotalEvents = 1000000
		a1.sources.stresssource-1.channels = memoryChannel-1

6.kafka

                a1.sources = r1
		a1.sinks = k1
		a1.channels = c1

		a1.sources.r1.type = org.apache.flume.source.kafka.KafkaSource
		a1.sources.r1.batchSize = 5000
		a1.sources.r1.batchDurationMillis = 2000
		a1.sources.r1.kafka.bootstrap.servers = s202:9092
		a1.sources.r1.kafka.topics = test3
		a1.sources.r1.kafka.consumer.group.id = g4

		a1.sinks.k1.type = logger

		a1.channels.c1.type=memory

		a1.sources.r1.channels = c1
		a1.sinks.k1.channel = c1

猜你喜欢

转载自blog.csdn.net/mao502010435/article/details/89472821