1 kafka第一个java程序
基于SpringBoot实现,对连接参数的设置进行了优化。
1.1 新建Spring Boot项目,添加依赖
<properties>
<java.version>1.8</java.version>
<kafka.version>2.0.0</kafka.version>
<scala.version>2.11</scala.version>
</properties>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-clients</artifactId>
<version>${kafka.version}</version>
</dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka_${scala.version}</artifactId>
<version>${kafka.version}</version>
</dependency>
1.2 生产者代码实现
对参数的配置进行了优化
package xb.study.kafka.producer;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.StringSerializer;
import java.util.Properties;
public class ProducerStart {
private static String brokerList="10.251.80.151:9092";
private static String topic="test";
public static void main(String[] args) {
Properties properties=new Properties();
//设置key 序列化器
//properties.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());//使用工具类避免写错
//设置重试测试
properties.put(ProducerConfig.RETRIES_CONFIG,2);
//设置值序列化器
//properties.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
//设置集群地址
//properties.put("bootstrap.servers",brokerList);
properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,brokerList);
KafkaProducer<String,String> producer=new KafkaProducer<String, String>(properties);
ProducerRecord<String,String> record=new ProducerRecord<>(topic,"kafka-demo","Hello kafka!");
try{
producer.send(record);
}catch (Exception e){
e.printStackTrace();
}
producer.close();
}
}
1.3 消费者代码实现
package xb.study.kafka.consumer;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.serialization.StringDeserializer;
import java.time.Duration;
import java.util.Collections;
import java.util.Properties;
/**
* 消费者
*/
public class ConsumerStart {
private static final String brokerList="10.251.80.151:9092";
private static final String topic="test";
private static final String groupId="group.demo";
public static void main(String[] args) {
try {
Properties props = new Properties();
//props.put("group.id", groupId);
props.put(ConsumerConfig.GROUP_ID_CONFIG,groupId);
//props.put("bootstrap.servers", brokerList);
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, brokerList);
//props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
//props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
KafkaConsumer consumer= new KafkaConsumer<String,String>(props);
consumer.subscribe(Collections.singletonList(topic));
while (true){
ConsumerRecords<String,String> records=consumer.poll(Duration.ofMillis(3000));
for(ConsumerRecord<String,String> record:records){
System.out.println(record.value());
}
}
}catch (Exception e){
e.printStackTrace();
}
}
}