flume sink to continue to generate small files hdfs

will not continue rolling files under this path due to under-replication

 

a1.sinks.k1.hdfs.minBlockReplicas=1

Let the program files are located less than a block write perception of being copied

 

# Name the components on this agent
a1.sources = r1
a1.sinks = k1
a1.channels = c1

# Describe/configure the source
a1.sources.r1.type = TAILDIR
a1.sources.r1.positionFile = /weblog/flume/taildir_position.json
a1.sources.r1.filegroups = f1 f2
a1.sources.r1.filegroups.f1 = /weblog/test1/example.log
a1.sources.r1.filegroups.f2 = /weblog/test2/.*log.*

# Describe the sink
a1.sinks.k1.type = hdfs
a1.sinks.k1.hdfs.path = /weblog/%y-%m-%d/%H-%M/
a1.sinks.k1.hdfs.filePrefix = itcast-
a1.sinks.k1.hdfs.minBlockReplicas=1 
a1.sinks.k1.hdfs.round = true
a1.sinks.k1.hdfs.roundValue = 10
a1.sinks.k1.hdfs.roundUnit = minute    
a1.sinks.k1.hdfs.rollInterval = 0
a1.sinks.k1.hdfs.rollSize = 134217728
a1.sinks.k1.hdfs.rollCount = 0
a1.sinks.k1.hdfs.idleTimeout = 20
a1.sinks.k1.hdfs.batchSize = 1
a1.sinks.k1.hdfs.useLocalTimeStamp = true
# Resulting file type, the default is Sequencefile, available DataStream, compared with normal text 
a1.sinks.k1.hdfs.fileType = DataStream 

# Which buffers the Use A Channel Events in Memory 
a1.channels.c1.type = Memory 
a1.channels. c1.capacity = 1000 
a1.channels.c1.transactionCapacity = 100 

# The Source and sink to the Bind The Channel 
a1.sources.r1.channels = C1 
a1.sinks.k1.channel = C1

 

Java environment configuration

chmod a+x flume-env.sh
bin/flume-ng agent -c conf -f conf/taildirsource_hdfs.conf -n a1 -Dflume.root.logger=INFO,console

 

Guess you like

Origin www.cnblogs.com/chong-zuo3322/p/12554936.html