Kafka java生产和消费

版权声明:本文为博主原创文章,未经博主允许不得转载。 https://blog.csdn.net/sinat_30276961/article/details/83316800


Kafka分布式搭建完成的基础上,进行java调用测试。

1.建立测试项目

通过maven去创建项目,pom.xml如下:

<dependencies>
    <!-- https://mvnrepository.com/artifact/org.apache.kafka/kafka-clients -->
    <dependency>
      <groupId>org.apache.kafka</groupId>
      <artifactId>kafka-clients</artifactId>
      <version>2.0.0</version>
    </dependency>
    <!-- https://mvnrepository.com/artifact/com.google.code.gson/gson -->
    <dependency>
      <groupId>com.google.code.gson</groupId>
      <artifactId>gson</artifactId>
      <version>2.8.5</version>
    </dependency>

    <dependency>
      <groupId>junit</groupId>
      <artifactId>junit</artifactId>
      <version>4.11</version>
      <scope>test</scope>
    </dependency>
</dependencies>

2.建立生产者测试类

package com.steven.kafka;

import com.google.gson.Gson;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;

import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import java.util.UUID;

/**
 * com.steven.kafka
 * Date 2018/10/23
 */
public class KfProducer {

    private static KfProducer instance = new KfProducer();
    //kafka生产者
    private KafkaProducer<String, String> producer;

    private KfProducer() {
        init();
    }

    public static KfProducer getInstance() {
        return instance;
    }

    private void init() {
        Properties properties = new Properties();
        properties.put("bootstrap.servers", "172.16.30.100:9092,172.16.30.101:9092,172.16.30.102:9092");
        properties.put("client.id", "TestProducer");
        properties.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        properties.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        producer = new KafkaProducer<String, String>(properties);
    }

    /**
     * 上传数据
     * @param topic
     * @param data
     */
    public void sendData(String topic, String data) {
        if (producer == null) {
            init();
        }
        try {
            producer.send(new ProducerRecord<String, String>(topic, UUID.randomUUID().toString(), data)).get();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    public static void main(String[] args) {
        Gson gson = new Gson();
        for (int i = 0; i < 1; i++) {
            Map<String, Object> map = new HashMap<>();
            map.put("content", "这个是内容"+i);
            map.put("businessId", "11"+i);
            map.put("url", "http://www.baidu.com");
            String json = gson.toJson(map);

            KfProducer.getInstance().sendData("msg_center", json);
        }
    }
}

通过main模拟发送,send后面的get别忘了,不加的话,不会发送出去。

3.建立消费者测试类

package com.steven.kafka;

import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;

import java.time.Duration;
import java.util.*;

/**
 * com.steven.kafka
 * Date 2018/10/23
 */
public class KfConsumer implements Runnable{
    private KafkaConsumer<String, String> consumer;
    private String groupId;
    private String topic;

    public KfConsumer(String groupId, String topic) {
        this.groupId = groupId;
        this.topic = topic;
        init();
    }

    private void init() {
        Properties properties = new Properties();
        properties.put("bootstrap.servers", "172.16.30.100:9092,172.16.30.101:9092,172.16.30.102:9092");
        properties.put("group.id", groupId);
        //如果为真,consumer所fetch的消息offset将会自动同步到zookeeper。
        properties.put("enable.auto.commit", "true");
        //提交offset的频率
        properties.put("auto.commit.interval.ms", "1000");
        properties.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        properties.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        properties.put("auto.offset.reset", "earliest");
        consumer = new KafkaConsumer<String, String>(properties);
    }

    @Override
    public void run() {
        List<String> list = new ArrayList<>();
        if (consumer == null) {
            init();
        }
        //订阅主题
        consumer.subscribe(Arrays.asList(topic));

        while (!Thread.currentThread().isInterrupted()) {
            //5秒轮训
            ConsumerRecords<String, String> records = consumer.poll(Duration.ofSeconds(5));
            for (ConsumerRecord<String, String> record : records) {
                list.add(record.value());
                System.out.println(record.value());
            }
        }
    }

    public static void main(String[] args) {
        KfConsumer kfConsumer = new KfConsumer("Steven", "msg_center");
        kfConsumer.run();
    }
}

4.调整服务端配置文件

在先去kafka集群搭建的基础上,再打开如下几个配置参数:

listeners=PLAINTEXT://172.16.30.102:9092
advertised.listeners=PLAINTEXT://172.16.30.102:9092

其他两个节点再做调整。

5.启动测试

5.1启动消费类main

先启动消费类main,开启轮询。然后服务器端,选一个节点,创建生产者(注:此处的ip需要正式的ip,不能是localhost)
./bin/kafka-console-producer.sh --broker-list 172.16.30.100:9092 --topic msg_center
测试几条数据,java端消费类可以接收到

objc[61909]: Class JavaLaunchHelper is implemented in both /Library/Java/JavaVirtualMachines/jdk1.8.0_72.jdk/Contents/Home/bin/java (0x109ede4c0) and /Library/Java/JavaVirtualMachines/jdk1.8.0_72.jdk/Contents/Home/jre/lib/libinstrument.dylib (0x109fba4e0). One of the two will be used. Which one is undefined.
SLF4J: Failed to load class "org.slf4j.impl.StaticLoggerBinder".
SLF4J: Defaulting to no-operation (NOP) logger implementation
SLF4J: See http://www.slf4j.org/codes.html#StaticLoggerBinder for further details.
aaaaa
bbbbb
ccc
aaadfdfdfdfffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff
5.2启动生产类main

再选择一个节点,创建订阅者(注:此处的ip需要正式的ip,不能是localhost)
./bin/kafka-console-consumer.sh --bootstrap-server 172.16.30.101:9092 --topic msg_center --from-beginning
然后启动生产类main
服务端可以收到消息,消费类main也能收到消息

[root@slave1 kafka_2.12-2.0.0]# ./bin/kafka-console-consumer.sh --bootstrap-server 172.16.30.101:9092 --topic msg_center --from-beginning
aaaaa
bbbbb
ccc
aaadfdfdfdfffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff
{"businessId":"110","content":"这个是内容0","url":"http://www.baidu.com"}
objc[61909]: Class JavaLaunchHelper is implemented in both /Library/Java/JavaVirtualMachines/jdk1.8.0_72.jdk/Contents/Home/bin/java (0x109ede4c0) and /Library/Java/JavaVirtualMachines/jdk1.8.0_72.jdk/Contents/Home/jre/lib/libinstrument.dylib (0x109fba4e0). One of the two will be used. Which one is undefined.
SLF4J: Failed to load class "org.slf4j.impl.StaticLoggerBinder".
SLF4J: Defaulting to no-operation (NOP) logger implementation
SLF4J: See http://www.slf4j.org/codes.html#StaticLoggerBinder for further details.
aaaaa
bbbbb
ccc
aaadfdfdfdfffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff
{"businessId":"110","content":"这个是内容0","url":"http://www.baidu.com"}

至此,基本的环境已经搭建测试完毕。

猜你喜欢

转载自blog.csdn.net/sinat_30276961/article/details/83316800
今日推荐