引入相关的依赖
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<source>8</source>
<target>8</target>
</configuration>
</plugin>
</plugins>
</build>
<dependencies>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-clients</artifactId>
<version>2.6.0</version>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<version>1.2.17</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<version>2.9.5</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<version>1.7.25</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>1.7.25</version>
</dependency>
</dependencies>
Json文件
{
"people": [
{
"firstName": "GAO",
"lastName": "GPSA",
"email": "aaaa"
},
{
"firstName": "PENG",
"lastName": "GAOb",
"email": "bbbb"
},
{
"firstName": "SHUAI",
"lastName": "GAOC",
"email": "cccc"
}
]
}
核心类(生产者,消费者)
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import sun.java2d.pipe.SpanIterator;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.time.Duration;
import java.util.Arrays;
import java.util.Properties;
public class Main {
public static void main(String[] args) throws IOException {
produceData();
consumerData();
}
private static void produceData() throws IOException {
Properties properties = new Properties();
String topic = "wishlist_test";
properties.put("bootstrap.servers", "localhost:xxxx");
properties.put("request.required.acks", "-1");
properties.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
properties.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
KafkaProducer<String, String> kafkaProducer = new KafkaProducer<String, String>(properties);
BufferedReader bufferedReader = new BufferedReader(new FileReader("src\\main\\resources\\jasonDemo.jason"));
String line;
while ((line = bufferedReader.readLine()) != null) {
ProducerRecord<String, String> producerRecord = new ProducerRecord<String, String>(topic,"Node", line);
kafkaProducer.send(producerRecord);
System.out.println(line);
}
System.out.println("发送成功!");
kafkaProducer.close();
}
private static void consumerData() {
Properties properties = new Properties();
String topic = "wishlist_test";
properties.put("bootstrap.servers", "localhost:xxxx");
properties.put("group.id", "reason-group01");
properties.put("auto.commit.interval.ms", "1000");
properties.put("auto.offset.reset", "earliest");
properties.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
properties.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
KafkaConsumer<String, String> consumer = new KafkaConsumer<String, String>(properties);
consumer.subscribe(Arrays.asList(topic));
System.out.println("消费者消费的内容是:");
while (true) {
ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(5000));
for (ConsumerRecord<String, String> record : records) {
System.out.println( record.value());
}
}
}
}
运行结果