java代码操作kafka

架包依赖

</dependency>
            <dependency>
                <groupId>org.apache.kafka</groupId>
                <artifactId>kafka_2.11</artifactId>
                <version>1.1.0</version>
            </dependency>
</dependencies>

创建生产者

import java.util.Properties;
import kafka.javaapi.producer.Producer;
import kafka.producer.KeyedMessage;
import kafka.producer.ProducerConfig;

public class ProducerDemo {
    public static void main(String[] args) {
        Properties props = new Properties();
        //指定broker的位置
        props.put("metadata.broker.list", "hadoop1:9092,hadoop2:9092,hadoop3:9092");
        props.put("serializer.class", "kafka.serializer.StringEncoder");
        ProducerConfig config = new ProducerConfig(props);
        Producer<String, String> producer = new Producer<String, String>(config);
        for (int i = 1001; i <= 1100; i++)
 //指定主题和数据内容
            producer.send(new KeyedMessage<String, String>("test", "xiaoniu-msg" + i));
    }
}

创建消费者

import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import kafka.consumer.Consumer;
import kafka.consumer.ConsumerConfig;
import kafka.consumer.KafkaStream;
import kafka.javaapi.consumer.ConsumerConnector;
import kafka.message.MessageAndMetadata;
public class ConsumerDemo {
    //定义一个主题
    private static final String topic = "test";
    //指定两个线程同时消费这个主题
    private static final Integer threads = 2;

    public static void main(String[] args) {

        Properties props = new Properties();
        //指明zookeeper的地址
        props.put("zookeeper.connect", "hadoop1:2181,hadoop2:2181,hadoop3:2181");
        /**
         * 创建一个消费者注,
         * 同一个消费者组中的消费者共同消费一个主题,
         * 不同消费者组的消费者重复消费主题
         */
        props.put("group.id", "vvvvv");
        //smallest从最开始的数据进行消费(等价于"--from-beginning"),largest代表从消费者启动后产生的数据才消费
        props.put("auto.offset.reset", "smallest");
        ConsumerConfig config = new ConsumerConfig(props);
        //创建一个Java的消费链接
        ConsumerConnector consumer =Consumer.createJavaConsumerConnector(config);
        Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
        topicCountMap.put(topic, threads);
        Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumer.createMessageStreams(topicCountMap);
        List<KafkaStream<byte[], byte[]>> streams = consumerMap.get(topic);

        for(final KafkaStream<byte[], byte[]> kafkaStream : streams){
            new Thread(new Runnable() {
                public void run() {
                    for(MessageAndMetadata<byte[], byte[]> mm : kafkaStream){
                        String msg = new String(mm.message());
                        System.out.println(msg);
                    }
                }
            }).start();
        }
    }
}

猜你喜欢

转载自blog.csdn.net/weixin_38613375/article/details/89511022