[kafka practice] 02 kafka producer and consumer examples

1. Dependency introduction

<dependency>
   <groupId>org.springframework.kafka</groupId>
   <artifactId>spring-kafka</artifactId>
</dependency>

2. Producer and consumer code examples

public class KafkaSimpleTest {
    
    

    private static final String TOPIC_NAME = "hello.world";
    private static final String SERVERS = "192.168.56.201:9092";
    private static final String GROUP_ID = "group1";
    private static final String USER_NAME = "user";
    private static final String PASSWORD = "psd";

    @Test
    public void testConsume() {
    
    
        KafkaConsumer<String, String> consumer = kafkaConsumer();
        consumer.subscribe(Collections.singletonList(TOPIC_NAME));
        //一般是while(true)包裹下面的代码
        while (true) {
    
    
            ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(5000));
            for (ConsumerRecord<String, String> record : records) {
    
    
                System.out.println("接收到的消息为:" + record.value() +
                        ",partition:" + record.partition() + ",offset:" + record.offset() + ",key:" + record.key());
                TopicPartition topicPartition = new TopicPartition(TOPIC_NAME, record.partition());
                OffsetAndMetadata offsetAndMetadata = new OffsetAndMetadata(record.offset() + 1);
                consumer.commitSync(Collections.singletonMap(topicPartition, offsetAndMetadata));
            }
        }
    }

    @Test
    public void testSend() {
    
    
        Properties properties = new Properties();
        properties.put(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG, SERVERS);
        properties.put("acks", "all");
        properties.put("retries", 0);
        properties.put("batch.size", 16384);
        properties.put("linger.ms", 1);
        properties.put("buffer.memory", 33554432);
        //序列化
        properties.put("key.serializer", StringSerializer.class);
        properties.put("value.serializer", StringSerializer.class);
        Producer<String, String> producer = new KafkaProducer<>(properties);
        for (int i = 0; i < 100; i++) {
    
    
            producer.send(new ProducerRecord<String, String>(TOPIC_NAME, Integer.toString(i), System.currentTimeMillis() + "," + "this is message:" + i));
        }
        System.out.println("消息发送完成");
        producer.close();
    }

    private KafkaConsumer<String, String> kafkaConsumer() {
    
    
        Properties props = new Properties();
        //设置Kafka服务器地址
        props.put("bootstrap.servers", SERVERS);
        //设置消费组
        props.put("group.id", GROUP_ID);
        //设置数据key的反序列化处理类
        props.put("key.deserializer", StringDeserializer.class.getName());
        //设置数据value的反序列化处理类
        props.put("value.deserializer", StringDeserializer.class.getName());
        props.put("enable.auto.commit", "false");
        props.put("auto.commit.interval.ms", "1000");
        props.put("session.timeout.ms", "30000");
        /*
        // 配置安全协议和认证方式
        properties.put("security.protocol", "SASL_PLAINTEXT");
        properties.put("sasl.mechanism", "PLAIN");
        // 设置用户名和密码
        properties.setProperty("sasl.jaas.config",
                String.format("org.apache.kafka.common.security.plain.PlainLoginModule required username=\"%s\" password=\"%s\";",
                        USER_NAME, PASSWORD));*/
        KafkaConsumer<String, String> kafkaConsumer = new KafkaConsumer<>(props);
        kafkaConsumer.subscribe(Collections.singletonList(TOPIC_NAME));
        return kafkaConsumer;
    }
}

Guess you like

Origin blog.csdn.net/suyuaidan/article/details/133133662