java实现 从文件中读取文件,存储到kafka组件中,并消费出来

引入相关的依赖

   <build>
        <plugins>
            <plugin>
                <groupId>org.apache.maven.plugins</groupId>
                <artifactId>maven-compiler-plugin</artifactId>
                <configuration>
                    <source>8</source>
                    <target>8</target>
                </configuration>
            </plugin>
        </plugins>
    </build>


    <dependencies>
        <dependency><!--引入kafka依赖-->
            <groupId>org.apache.kafka</groupId>
            <artifactId>kafka-clients</artifactId>
            <version>2.6.0</version>
        </dependency>
        <dependency>
            <groupId>log4j</groupId>
            <artifactId>log4j</artifactId>
            <version>1.2.17</version>
        </dependency>
        <dependency>
            <groupId>com.fasterxml.jackson.core</groupId>
            <artifactId>jackson-databind</artifactId>
            <version>2.9.5</version>
        </dependency>
        <dependency>
            <groupId>org.slf4j</groupId>
            <artifactId>slf4j-log4j12</artifactId>
            <version>1.7.25</version>
        </dependency>
        <dependency>
            <groupId>org.slf4j</groupId>
            <artifactId>slf4j-api</artifactId>
            <version>1.7.25</version>
        </dependency>
    </dependencies>

Json文件

{
  "people": [
    {
      "firstName": "GAO",
      "lastName": "GPSA",
      "email": "aaaa"
    },
    {
      "firstName": "PENG",
      "lastName": "GAOb",
      "email": "bbbb"
    },
    {
      "firstName": "SHUAI",
      "lastName": "GAOC",
      "email": "cccc"
    }
  ]
}

核心类(生产者,消费者)

import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import sun.java2d.pipe.SpanIterator;

import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.time.Duration;
import java.util.Arrays;
import java.util.Properties;

/**
 * @author Modesty.P.Gao
 * @version 1.0
 * @description: TODO
 * @date 2021/12/2 16:56
 */
public class Main {
    
    

    public static void main(String[] args) throws IOException {
    
    
        produceData();
       consumerData();

    }



    /**
     * 读取本地jason文件 保存到kafka中
     */
    private static void produceData() throws IOException {
    
    
//            kafka的配置
        Properties properties = new Properties();
//        String topic = "demo";
        String topic = "wishlist_test";
        properties.put("bootstrap.servers", "localhost:xxxx");
        properties.put("request.required.acks", "-1");
        properties.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        properties.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        //定义kafka Producer
        KafkaProducer<String, String> kafkaProducer = new KafkaProducer<String, String>(properties);
        //用缓冲的方式来读取文本
        BufferedReader bufferedReader = new BufferedReader(new FileReader("src\\main\\resources\\jasonDemo.jason"));
//        初始化 line
        String line;
        while ((line = bufferedReader.readLine()) != null) {
    
    
            //发送给Kafka Broker的key/value 值对
            ProducerRecord<String, String> producerRecord = new ProducerRecord<String, String>(topic,"Node", line);
            //发送数据
            kafkaProducer.send(producerRecord);
            System.out.println(line);
        }

        System.out.println("发送成功!");
        kafkaProducer.close();

    }

    /**
     * 从kafka中读取刚才存储的数据
     */
    private static void consumerData() {
    
    
        Properties properties = new Properties();
        String topic = "wishlist_test";
        properties.put("bootstrap.servers", "localhost:xxxx");
        properties.put("group.id", "reason-group01");
        properties.put("auto.commit.interval.ms", "1000");
        properties.put("auto.offset.reset", "earliest");
        properties.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        properties.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        //定义kafka消费者
        KafkaConsumer<String, String> consumer = new KafkaConsumer<String, String>(properties);
        //订阅主题
        consumer.subscribe(Arrays.asList(topic));
        System.out.println("消费者消费的内容是:");
        //遍历处理数据
        while (true) {
    
    
            ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(5000));
            //遍历处理数据
            for (ConsumerRecord<String, String> record : records) {
    
    
                System.out.println( record.value());
            }
        }

    }


}

运行结果

在这里插入图片描述

猜你喜欢

转载自blog.csdn.net/gps666666/article/details/121691856
今日推荐