kafka consumer instance


1. Configuration file consumer.properties

#zookeeper address
zookeeper.connect=master:2181,slave1:2181,slave2:2181
#zookeeper timeout
zookeeper.connectiontimeout.ms=1000000
#kafka's consumer group
group.id=test-group



2. Organize the code
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import kafka.consumer.Consumer;
import kafka.consumer.ConsumerConfig;
import kafka.consumer.ConsumerIterator;
import kafka.consumer.KafkaStream;
import kafka.javaapi.consumer.ConsumerConnector;
import kafka.message.MessageAndMetadata;
public class init {
 public static void main(String[] args) throws FileNotFoundException, IOException {
  //1, read the configuration file
  Properties p = new Properties();
  //p.load(new FileInputStream(new File("./consumer.properties")));
  p.load(init.class.getClassLoader().getResourceAsStream("consumer.properties"));
  //2, through the configuration file, create a consumer configuration
  ConsumerConfig config = new ConsumerConfig(p);
  //3, through configuration, create consumers
  ConsumerConnector consumer = Consumer.createJavaConsumerConnector(config);
  //4. Create a map that encapsulates the message
  Map<String,Integer> topics = new HashMap<String,Integer>();
  //5. Encapsulate the topic name of consumption and several partitions in this topic
  topics.put("test-topic", 1);
  //6, create a message flow, pass in the map just created
  Map<String, List<KafkaStream<byte[], byte[]>>> streams = consumer.createMessageStreams(topics);
  //7. Get the message and encapsulate it in the list
  List<KafkaStream<byte[], byte[]>> partitions = streams.get("test-topic");
  //8. Create a thread pool, use different threads to consume different messages in the list, you can consume one, or you can consume multiple
  ExecutorService threadPool = Executors.newFixedThreadPool(1);
  for(KafkaStream<byte[], byte[]> partition : partitions){
   //9, pass the message to the thread for consumption
   threadPool.execute(new TestConsumer(partition));
  }
 }
}


3. Call the display code

import java.io.UnsupportedEncodingException;
import kafka.consumer.ConsumerIterator;
import kafka.consumer.KafkaStream;
import kafka.message.MessageAndMetadata;
public class TestConsumer extends Thread {
 private KafkaStream<byte[], byte[]> partition;
 /**
  * Constructor, pass in the message
  */
 public TestConsumer(KafkaStream<byte[], byte[]> partition){
  this.partition = partition;
 }
 public void run() {
   //1, take out the message
   ConsumerIterator<byte[], byte[]> iterator = partition.iterator();
   //2, judge whether there is a next one, if there is, iterate the message, if not, stop waiting
   while(iterator.hasNext()){
    //3, take out the message
    MessageAndMetadata<byte[], byte[]> next = iterator.next();
     try {
      //4, print the message
              System.out.println("partiton:" + next.partition());//partition
              System.out.println("offset:" + next.offset());     //偏移量
              System.out.println("message:" + new String(next.message(), "utf-8"));//消息主体
     } catch (UnsupportedEncodingException e){
      e.printStackTrace ();
     }
   }
 }

Guess you like

Origin http://43.154.161.224:23101/article/api/json?id=326494732&siteId=291194637