flume采集下沉到kafka

版权声明:本文为博主原创文章,遵循 CC 4.0 BY-SA 版权协议,转载请附上原文出处链接和本声明。
本文链接: https://blog.csdn.net/Romantic_sir/article/details/102553937

1、配置flume

a1.sources = s1
a1.sinks = k1
a1.channels = c1

a1.sources.s1.type = exec

#采集目录
a1.sources.s1.command = tail -F /root/log/access.log
#类型为kafka
a1.sinks.k1.type = org.apache.flume.sink.kafka.KafkaSink

#topic主题为当前主题
a1.sinks.k1.topic = first_kafka
a1.sinks.k1.brokerList = hdp-1:9092, hdp-2:9092, hdp-3:9092
a1.sinks.k1.requiredAcks = 1
a1.sinks.k1.batchSize = 20

a1.channels.c1.type = memory
a1.channels.c1.capacity = 1000
a1.channels.c1.transactionCapacity = 100


a1.sources.source1.channels = c1
a1.sinks.k1.channel = c1

启动:[root@hdp-1 flume-1.6.0]#  bin/flume-ng agent -c conf/ -f tail-kafka.conf -n a1 -Dflume.root.logger=INFO,console

2、将zookeeper、kafka集群、制造假数据的循环脚本启动

3、运行ConsumerDemo(kafka的consumerAPI)

import java.util.Collections;
import java.util.Properties;

import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;

public class ConsumerDemo {
    private static KafkaConsumer<String, String> consumer;
    private static Properties props;

    static {
        props = new Properties();
        //消费者kafka地址
        props.put("bootstrap.servers", "hdp-2:9092");
        //key反序列化
        props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        //组
        props.put("group.id", "yangk");
    }

    /**
     * 从kafka中获取数据(SpringBoot也集成了kafka)
     */
    private static void ConsumerMessage() {
        //允许自动提交位移
        props.put("enable.auto.commit", true);
        consumer = new KafkaConsumer<String, String>(props);
        consumer.subscribe(Collections.singleton("first_kafka"));

        //使用轮询拉取数据--消费完成之后会根据设置时长来清除消息,被消费过的消息,如果想再次被消费,可以根据偏移量(offset)来获取
        try {
            while (true) {
                //从kafka中读到了数据放在records中
                ConsumerRecords<String, String> records = consumer.poll(100);
                for (ConsumerRecord<String, String> r : records) {
                    System.out.printf("topic = %s, offset = %s, key = %s, value = %s", r.topic(), r.offset(),
                            r.key(), r.value() + "\n");

                }
            }
        } finally {
            consumer.close();
        }
    }

    public static void main(String[] args) {

        ConsumerMessage();

    }
}

pom文件

<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>

    <groupId>com.zpark.kafkatest</groupId>
    <artifactId>kafkatest</artifactId>
    <version>1.0-SNAPSHOT</version>
    <dependencies>
        <dependency>
            <groupId>org.apache.kafka</groupId>
            <artifactId>kafka_2.12</artifactId>
        </dependency>
        <dependency>
            <groupId>org.apache.kafka</groupId>
            <artifactId>kafka-clients</artifactId>
            <version>2.2.0</version>
        </dependency>
    </dependencies>

</project>

4、结果

控制台会有下沉到kafka中的数据信息

猜你喜欢

转载自blog.csdn.net/Romantic_sir/article/details/102553937