Kafka入门demo

在虚机启动单节点的kafka,注意首先要开启zookeeper,然后再开kafka的服务,然后设置物理机的hosts文件(C:\Windows\System32\drivers\etc),添加“192.168.229.128   bogon”,bogon是虚机的host name,或者直接修改kafka的配置文件, 将conf目录下的server.properties的listeners修改为 listeners=PLAINTEXT://192.168.229.128:9092

然后创建maven工程

pom文件

<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
	<modelVersion>4.0.0</modelVersion>
	<groupId>com.tansun</groupId>
	<artifactId>KafkaTest</artifactId>
	<version>0.0.1-SNAPSHOT</version>

	<dependencies>
		<dependency>
			<groupId>org.apache.kafka</groupId>
			<artifactId>kafka-clients</artifactId>
			<version>0.10.1.0</version>
		</dependency>
	</dependencies>

</project>

生产者

package com.tansun;

import java.util.Properties;

import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerRecord;

public class ProducerTest {
	
	public static void main(String[] args) throws InterruptedException {
		Properties props = new Properties();
		// 设置broker地址,这里是单节点的kafka
		props.put("bootstrap.servers", "192.168.229.128:9092");
		// 将ProducerRecord中的key和value转换成字节
		props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
		props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
		
		Producer<String, String> producer = new KafkaProducer<>(props);
		// 给topic1发送一个字符串
		producer.send(new ProducerRecord<String, String>("topic1", "hello kafka"));
		producer.close();
	}

}

消费者

package com.tansun;

import java.util.Arrays;
import java.util.Properties;

import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;

public class ConsumerTest {

	@SuppressWarnings("resource")
	public static void main(String[] args) {
		Properties props = new Properties();
		props.put("bootstrap.servers", "192.168.229.128:9092");
		props.put("group.id", "test");
		// 将ProducerRecord中的key和value转换成字节
		props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
		props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
		KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props);
		// 设置topic,注意需要跟生产者发送消息的topic保持一致
		consumer.subscribe(Arrays.asList("topic1"));
		while (true) {
			// 拉取消息,设置超时时间为100毫秒
			ConsumerRecords<String, String> records = consumer.poll(100);
			for (ConsumerRecord<String, String> record : records)
				System.out.print(record.value() + "\n");
		}
	}

}

猜你喜欢

转载自blog.csdn.net/jia_costa/article/details/79013815
今日推荐