java use kafka 创建 demo

1. Create a java project. Add in the pom.xml file

<dependency>
    <groupId>org.apache.kafka</groupId>
    <artifactId>kafka-clients</artifactId>
    <version>0.10.2.0</version>
</dependency>

2. Create the Producer Code

the java.util.Properties Import; 

Import org.apache.kafka.clients.producer.KafkaProducer; 
Import org.apache.kafka.clients.producer.Producer; 
Import org.apache.kafka.clients.producer.ProducerRecord; 

public  class ProducerSend {
     public  static  void main (String args []) { 

        // 1. parameters: a port, a buffer memory, the maximum number of connections, key serialization, value sequence of the like (not necessarily every configuration) 
        the Properties The props = new new the Properties ( ); 
        props.put ( " bootstrap.servers " , " localhost: 9092 " ); 
        props.put ( " ACKs " , " All");
        props.put("retries", 0);
        props.put("batch.size", 16384);
        props.put("linger.ms", 1);
        props.put("buffer.memory", 33554432);
        props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        props.put("value.serializer", " Org.apache.kafka.common.serialization.StringSerializer " ); 

        // 2. Create Object producers, and establishes a connection 
        Producer <String, String> Producer = new new KafkaProducer <String, String> (The props);
         the try {
             / / 3. in my-topic relating to, send a message 
            for ( int I = 0 ; I < 10000 ; I ++ ) { 
                . the System OUT .println (Integer.toString (I)); 
                producer.send ( new new ProducerRecord <String, String > ( " My-Topic ", Integer.toString(i), Integer.toString(i)));
                Thread.sleep(500);
            }
        }
        catch (Exception e)
        {
            System.out.println("ERROR");
        }

        //4.关闭
        producer.close();

    }
}

3. Create the Consumer Code

import java.util.Arrays;
import java.util.Properties;

import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;

public class ConsumerReceive {
    public static void main(String args[]) {

        //1.参数配置:不是每一非得配置
        Properties props = new Properties();
        props.put("bootstrap.servers", "172.16.8.100:9092");
        props.put("auto.commit.interval.ms", " 1000 " );
         // because every consumer must belong to a group of consumers, it must also set group.id 
        props.put ( " group.id " , " test1 " ); 
        props.put ( " enable. auto.commit " , " to true " ); 
        props.put ( " session.timeout.ms " , " 30000 " ); 
        props.put ( " key.deserializer " , " org.apache.kafka.common.serialization.StringDeserializer");
        props.put ( " value.deserializer " , " org.apache.kafka.common.serialization.StringDeserializer " ); 

        // 2. Create the customer object and establish a connection 
        KafkaConsumer <String, String> Consumer = new new KafkaConsumer <String, String> (the props); 

        // 3. pick data provided from the "my-topic" topic 
        Consumer.subscribe (Arrays.asList ( " My-topic " )); 

        // 4. consumption data 
        the while ( to true ) {
             / / block time, the data taken from kafka 100 milliseconds, the possible removal of 0-n disposable article 
            ConsumerRecords <String, String> records = consumer.poll(100);
             // iterate 
            for (ConsumerRecord <String, String> Record: Records)
                 // print result
                 // System.out.printf ( "offset =% D, Key =% S, S% = value", record.offset ( ), record.key (), record.value ()); 
                . System OUT (.println " data consumer spending is: " + record.value ()); 
        } 
    } 
}

 

Guess you like

Origin www.cnblogs.com/tong775131501/p/12327167.html