1.kafka的maven项目配置?
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-streaming-kafka-0-10_2.11</artifactId>
<version>${spark.version}</version>
</dependency>
2.生产者生产消息
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import java.util.Properties;
/**
* @Author 黄仁议<6 1 3 0 2 4 7 1 0 @ qq.com>
* @Version V1.0
* @Since 1.0
* @Date 2019/6/6 0006 11:01
* @Description
* @ClassName MyProducer
*/
public class MyProducer {
public static void main(String[] args) {
//生产者的配置信息
Properties props = new Properties();
//指定kafka集群的broker和服务端口号
props.put("bootstrap.servers","hry1:9092,hry2:9092,hry3:9092");
//指定确认机制
props.put("acks","all");
//消息发送失败,重试次数
props.put("retries",3);
//请求超时
props.put("linger.ms",5000);
//指定key,value序列化机制
props.put("key.serializer","org.apache.kafka.common.serialization.StringSerializer");
props.put("value.serializer","org.apache.kafka.common.serialization.StringSerializer");
//创建一个生产者
KafkaProducer<String,String> producer = new KafkaProducer(props);
//指定发送的topic,指定消息的key 和value
for( int i=0; i<100; i++) {
producer.send(new ProducerRecord<String, String>("test1",Integer.toString(i),
"kafka"+i));
}
producer.close();
}
}
3.消费者消费消息
import kafka.consumer.Consumer;
import kafka.consumer.ConsumerConfig;
import kafka.consumer.KafkaStream;
import kafka.javaapi.consumer.ConsumerConnector;
import kafka.message.MessageAndMetadata;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
/**
* @Author 黄仁议<6 1 3 0 2 4 7 1 0 @ qq.com>
* @Version V1.0
* @Since 1.0
* @Date 2019/6/6 0006 11:14
* @Description
* @ClassName MyConsumer
*/
public class MyConsumer {
private static final String topic = "test1";
private static final Integer threads = 2;
public static void main(String[] args) {
//消费者相关的配置
Properties props = new Properties();
//连接zookeeper,获取元数据
props.put("zookeeper.connect","hry1:2181,hry2:2181,hry3:2181");
//消费者组信息
props.put("group.id","xy");
//指定消费数据位置,从第一条数据开始消费,从消费者启动之后产生的消息开始消费
props.put("auto.offset.reset","smallest");
ConsumerConfig consumerConfig = new ConsumerConfig(props);
//创建一个消费者连接器
ConsumerConnector consumerConnector = Consumer.createJavaConsumerConnector(consumerConfig);
//创建一个map
Map<String,Integer> map = new HashMap<String,Integer>();
map.put(topic,threads);
Map<String, List<KafkaStream<byte[], byte[]>>> messageStreams = consumerConnector.createMessageStreams(map);
List<KafkaStream<byte[], byte[]>> kafkaStreams = messageStreams.get(topic);
//解析某一个topic下的消息
for(KafkaStream<byte[], byte[]> kafkaStream :kafkaStreams ){
new Thread(new Runnable() {
@Override
public void run() {
for(MessageAndMetadata<byte[],byte[]> mm:kafkaStream){
System.out.println( new String(mm.message()));
}
}
}).start();
}
}
}