Build a Kafka cluster and use springboot to integrate

In the previous article, we have successfully installed Kafka. This article explains how to deploy a Kafka cluster and use springboot to integrate tests.

Set up a multi-broker cluster

Since there is only one virtual machine, we simulate multiple brokers through multiple configuration files.
First create a configuration file for each broker:

cp config/server.properties config/server-1.propertie
cp config/server.properties config/server-2.properties

Now edit these newly created files and set the following properties
config/server.properties:

broker.id=0
#此处填写你的服务器ip
listeners=PLAINTEXT://192.168.130.128:9092
#选择你的logs存放目录
log.dirs=/usr/kafka2.12/kafka-logs
delete.topic.enable=true
#zookeeper集群信息
zookeeper.connect=192.168.130.128:2181,192.168.130.128:2182,192.168.130.128:2183

config/server-1.properties:

broker.id=1
#此处填写你的服务器ip
listeners=PLAINTEXT://192.168.130.128:9093
#选择你的logs存放目录
log.dirs=/usr/kafka2.12/kafka-logs-1
delete.topic.enable=true
#zookeeper集群信息
zookeeper.connect=192.168.130.128:2181,192.168.130.128:2182,192.168.130.128:2183

config/server-2.properties:

broker.id=0
#此处填写你的服务器ip
listeners=PLAINTEXT://192.168.130.128:9094
#选择你的logs存放目录
log.dirs=/usr/kafka2.12/kafka-logs-2
delete.topic.enable=true
#zookeeper集群信息
zookeeper.connect=192.168.130.128:2181,192.168.130.128:2182,192.168.130.128:2183

The broker.id property is the name of each node in the cluster. This name is unique and permanent.
Start three nodes

bin/kafka-server-start.sh config/server.properties &
bin/kafka-server-start.sh config/server-1.properties &
bin/kafka-server-start.sh config/server-2.properties & 

Now create a new topic with a copy of 3:

bin/kafka-topics.sh --create --zookeeper localhost:2181 --replication-factor 3 --partitions 1 --topic new_topic

When one of the brokers hangs, start and execute this command again to reset the priority copy

bin/kafka-preferred-replica-election.sh --zookeeper localhost:2181

springboot integrates kafka

Import dependency

<parent>
	<groupId>org.springframework.boot</groupId>
	<artifactId>spring-boot-starter-parent</artifactId>
	<version>2.1.3.RELEASE</version>
	<relativePath/> <!-- lookup parent from repository -->
</parent>
<dependencies>
	<dependency>
		<groupId>org.springframework.boot</groupId>
		<artifactId>spring-boot-starter-web</artifactId>
	</dependency>
	<dependency>
		<groupId>org.springframework.kafka</groupId>
		<artifactId>spring-kafka</artifactId>
	</dependency>
	<dependency>
		<groupId>org.springframework.boot</groupId>
		<artifactId>spring-boot-starter-test</artifactId>
		<scope>test</scope>
	</dependency>
</dependencies>

application.yml

spring:
  application:
    name: spring-boot-kafka

# kafka
  kafka:
    # 指定 kafka集群地址
    bootstrap-servers:
      - 192.168.130.128:9092
      - 192.168.130.128:9093
      - 192.168.130.128:9094
    # 指定listener 容器中的线程数,用于提高并发量
    listener:
      concurrency: 3

    # 生产者的配置
    producer:
    # 每次批量发送消息的数量
      batch-size: 1000
      retries: 0
      buffer-memory: 33554432
      # key,value序列化方式
      key-serializer: org.apache.kafka.common.serialization.StringSerializer
      value-serializer: org.apache.kafka.common.serialization.StringSerializer
    # 消费者的配置
    consumer:
      # 指定默认消费者group id
      group-id: test-group2
      auto-offset-reset: latest
      # 是否开启自动提交
      enable-auto-commit: true
      # 自动提交的时间间隔
      auto-commit-interval: 1000
      # key,value的解码方式
      key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
      value-deserializer: org.apache.kafka.common.serialization.StringDeserializer
    # 指定默认topic id
    template:
      default-topic: new_topic

Boot class

package com.sunyuqi.springboot;

import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;

@SpringBootApplication
public class KafkaDemoApplication {
    
    

	public static void main(String[] args) {
    
    
		SpringApplication.run(KafkaDemoApplication.class, args);
	}
}

Producer

package com.sunyuqi.springboot.producer;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.support.SendResult;
import org.springframework.stereotype.Component;
import org.springframework.util.concurrent.ListenableFuture;
import org.springframework.util.concurrent.ListenableFutureCallback;

@Component
public class Producer {
    
    
    private static final Logger log = LoggerFactory.getLogger(Producer.class);

    @Autowired
    private KafkaTemplate<String, String> kafkaTemplate;
    
    public void sendMessage(String topic, String data) {
    
    
        log.info("kafka sendMessage start");
        ListenableFuture<SendResult<String, String>> future = kafkaTemplate.send(topic, data);
        future.addCallback(new ListenableFutureCallback<SendResult<String, String>>() {
    
    
            @Override
            public void onFailure(Throwable ex) {
    
    
                log.error("kafka sendMessage error, ex = {}, topic = {}, data = {}", ex, topic, data);
            }

            @Override
            public void onSuccess(SendResult<String, String> result) {
    
    
                log.info("kafka sendMessage success topic = {}, data = {}",topic, data);
            }
        });
        log.info("kafka sendMessage end");
    }
}

consumer

package com.sunyuqi.springboot.consumer;

import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.stereotype.Component;

@Component
public class Consumer {
    
    

    @KafkaListener(topics = {
    
    "new_topic"})
    public void processMessage(ConsumerRecord<?, ?> record) {
    
    
        System.out.printf("topic is %s, offset is %d, value is %s \n", record.topic(), record.offset(), record.value());
    }
}

Run the boot class, listen for messages,
test class

package com.sunyuqi.springboot;

import com.sunyuqi.springboot.producer.Producer;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.junit4.SpringRunner;

@RunWith(SpringRunner.class)
@SpringBootTest
public class KafkaDemoApplicationTests {
    
    

	@Autowired
	private Producer producer;

	@Test
	public void contextLoads() {
    
    
		producer.sendMessage("new_topic", "hello world");
	}
}

Run test class, send message

Message received successfully
Insert picture description here

Guess you like

Origin blog.csdn.net/weixin_42494845/article/details/108874220