kafka生产者与消费者

1.生产者代码

 
  
import kafka.producer.KeyedMessage;
import kafka.serializer.StringEncoder;
import org.apache.kafka.clients.producer.*;
import org.jetbrains.annotations.NotNull;

import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Properties;
import java.util.concurrent.ExecutionException;

/**
 * Created by huangjinchong on 2018/5/11.
 */
public class TestProducer {

    private String topic;

    public TestProducer(String topic) {
        super();
        this.topic = topic;
    }

    public void sendProducer(){
        KafkaProducer producer = createProducer();
        int i=0;
        SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
        System.out.println(producer.toString());
        for (int j = 0; j < 20; j++) {
//            try {
//                Object o=producer.send(new ProducerRecord(topic,"时间是:"+sdf.format(new Date())+"message:"+i)).get();
//                System.out.println(o);
//            } catch (InterruptedException e) {
//                e.printStackTrace();
//            } catch (ExecutionException e) {
//                e.printStackTrace();
//            }
            producer.send(new ProducerRecord(topic,"时间是:"+sdf.format(new Date())+"message:"+i));
        }
//        producer.flush();
//        System.out.println("时间是:"+sdf.format(new Date())+"message:"+i);
    }

    private KafkaProducer<Integer,String> createProducer(){
        Properties prop = new Properties();
        prop.put("value.serializer","org.apache.kafka.common.serialization.StringSerializer");
        prop.put("key.serializer","org.apache.kafka.common.serialization.StringSerializer");
        prop.put("bootstrap.servers","192.168.1.131:9092,192.168.1.132:9092");
        prop.put("acks", "all");
        prop.put("retries", 0);
        prop.put("batch.size", 10);
        prop.put("linger.ms", 0);
        prop.put("buffer.memory", 60*1024);
        return new KafkaProducer<Integer, String>(prop);
    }

    public static void main(String[] args) {

        new TestProducer("test").sendProducer();
        try {
            Thread.sleep(10000);
        } catch (InterruptedException e) {
            e.printStackTrace();
        }
        System.out.println("程序运行完毕!");

    }

}
经测试如果没有 Thread. sleep ( 10000 );需要producer.flush()才能将缓存中的数据push到kafka中

2.消费者代码


import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.TopicPartition;

import java.util.ArrayList;
import java.util.List;
import java.util.Properties;

/**
 * Created by huangjinchong on 2018/5/11.
 */
public class TestConsumer {

    private String topic;

    public TestConsumer(String topic) {
        super();
        this.topic = topic;
    }
    
    Runnable run1= new Runnable() {
        @Override
        public void run() {
            KafkaConsumer<String, String> consumer = createConsumer();
            List l = new ArrayList();
            l.add(topic);
            consumer.subscribe(l);
            while(true){
                ConsumerRecords<String, String> records = consumer.poll(500);
                for (ConsumerRecord<String, String> record : records){
                    System.out.println(record.value());
                    System.out.println(record.partition());
                    //手动提交已消费数据的offset
                }
            }
        }
    };

    private KafkaConsumer<String, String> createConsumer(){
        Properties props = new Properties();
        props.put("bootstrap.servers", "192.168.1.131:9092,192.168.1.132:9092");
        props.put("group.id", "hjc1");
        props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true");
        props.put("auto.commit.interval.ms", "1000");
        props.put("max.poll.records", 1);
//        props.put("auto.offset.reset", "latest");//对新的消费者而言,是从头消费还是从其他消费者已经消费过的地方开始,不同的消费者群组
        props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props);


        return consumer;
    }

    public static void main(String[] args) {
        TestConsumer tc = new TestConsumer("test");
        Thread th = new Thread(tc.run1);
        th.start();
    }

}
要以线程方式消费才能取得所有分区的数据,一次执行只能取得某一分区的数据,只消费某一个分区代码为:
       List l1 = new ArrayList();
       TopicPartition partitons = new TopicPartition(topic,0);       
l1.add(partitons);
       consumer.assign(l1);

猜你喜欢

转载自blog.csdn.net/qq_25445087/article/details/80351254