pom.xml
<dependency> <groupId>org.apache.kafka</groupId> <artifactId>kafka-clients</artifactId> <version>0.10.1.1</version> </dependency>
send a message
@Test public void TestProducer(){ Properties props = new Properties(); props.put("bootstrap.servers", "192.168.1.245:9393,192.168.1.246:9393"); // The "all" setting will cause a full commit of the record to block, the slowest, but most persistent setting. props.put("acks", "all"); //If the request fails, the producer will automatically retry, even if set to 0 the producer can automatically retry. props.put("retries", 0); //The producer maintains buffers of unsent records for each partition. props.put("batch.size", 16384); //Send immediately by default, here is the delay in milliseconds props.put("linger.ms", 1); // Producer buffer size, when the buffer is exhausted, additional send calls will be blocked. TimeoutException will be thrown if time exceeds max.block.ms props.put("buffer.memory", 33554432); //The key.serializer and value.serializer instruct how to turn the key and value objects the user provides with their ProducerRecord into bytes. props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer"); //Create a producer class for kafka // close();//Close this producer. // close(long timeout, TimeUnit timeUnit); //This method waits up to timeout for the producer to complete the sending of all incomplete requests. // flush() ; all cached records are sent immediately Producer<String, String> producer = new KafkaProducer<String, String>(props); for(int i = 0; i < 10; i++){ producer.send(new ProducerRecord<String, String>("d-topic", Integer.toString(i), Integer.toString(i))); } producer.flush(); producer.close(); }
receive message
@Test public void TestConsumer() throws InterruptedException{ Properties props = new Properties(); props.put("bootstrap.servers", "192.168.1.245:9393,192.168.1.246:9393"); props.put("group.id", "GroupA"); props.put("enable.auto.commit", "true"); props.put("auto.commit.interval.ms", "1000"); //The duration of the call processing from poll (pull) props.put("session.timeout.ms", "30000"); // limit the number of polls //props.put("max.poll.records", "100"); props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); KafkaConsumer<String, String> consumer = new KafkaConsumer<String, String>(props); //Subscribe to topic list topic //consumer.subscribe(Arrays.asList("d-topic", "bar")); consumer.subscribe(Arrays.asList("d-topic")); while (true) { ConsumerRecords<String, String> records = consumer.poll(100); for (ConsumerRecord<String, String> record : records){ System.out.printf("offset = %d, key = %s, value = %s%n", record.offset(), record.key(), record.value()); } Thread.sleep(1000); } }
Integrate
pom.xml with spring
<dependency> <groupId>org.springframework.integration</groupId> <artifactId>spring-integration-kafka</artifactId> <version>2.1.0.RELEASE</version> </dependency>
producer.xml
<?xml version="1.0" encoding="UTF-8"?> <beans xmlns="http://www.springframework.org/schema/beans" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:int="http://www.springframework.org/schema/integration" xmlns:int-kafka="http://www.springframework.org/schema/integration/kafka" xsi:schemaLocation="http://www.springframework.org/schema/integration/kafka http://www.springframework.org/schema/integration/kafka/spring-integration-kafka.xsd http://www.springframework.org/schema/integration http://www.springframework.org/schema/integration/spring-integration.xsd http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd"> <!-- Producer Configuration --> <bean id="template" class="org.springframework.kafka.core.KafkaTemplate"> <constructor-arg index="0"> <bean class="org.springframework.kafka.core.DefaultKafkaProducerFactory"> <constructor-arg> <map> <entry key="bootstrap.servers" value="192.168.1.245:9393,192.168.1.246:9393"/> <entry key="acks" value="all"/> <entry key="retries" value="3"/> <entry key="batch.size" value="16384"/> <entry key="linger.ms" value="1"/> <entry key="buffer.memory" value="33554432"/> <entry key="key.serializer" value="org.apache.kafka.common.serialization.StringSerializer"></entry> <entry key="value.serializer" value="org.apache.kafka.common.serialization.StringSerializer"></entry> </map> </constructor-arg> </bean> </constructor-arg> </bean> <!-- Production No. 1 --> <int:channel id="inputToKafka"> <int:queue/> </int:channel> <int-kafka:outbound-channel-adapter id="kafkaOutboundChannelAdapter" kafka-template="template" auto-startup="true" channel="inputToKafka" topic="d-topic"> <int:poller fixed-delay="1000" time-unit="MILLISECONDS"/> </int-kafka:outbound-channel-adapter> </beans>
konsumer.xml
<?xml version="1.0" encoding="UTF-8"?> <beans xmlns="http://www.springframework.org/schema/beans" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:int="http://www.springframework.org/schema/integration" xmlns:int-kafka="http://www.springframework.org/schema/integration/kafka" xsi:schemaLocation="http://www.springframework.org/schema/integration/kafka http://www.springframework.org/schema/integration/kafka/spring-integration-kafka.xsd http://www.springframework.org/schema/integration http://www.springframework.org/schema/integration/spring-integration.xsd http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd"> <!-- Consumption configuration--> <bean id="consumerProperties" class="java.util.HashMap"> <constructor-arg> <map> <entry key="bootstrap.servers" value="192.168.1.245:9393,192.168.1.246:9393"/> <entry key="group.id" value="GroupA"/> <entry key="enable.auto.commit" value="true"/> <entry key="auto.commit.interval.ms" value="1000"/> <entry key="session.timeout.ms" value="15000"/> <entry key="key.deserializer" value="org.apache.kafka.common.serialization.StringDeserializer"/> <entry key="value.deserializer" value="org.apache.kafka.common.serialization.StringDeserializer"/> </map> </constructor-arg> </bean> <!-- create consumerFactory bean --> <bean id="consumerFactory" class="org.springframework.kafka.core.DefaultKafkaConsumerFactory"> <constructor-arg> <ref bean="consumerProperties"/> </constructor-arg> </bean> <!-- Consumption No. 1 --> <int:channel id="inputFromKafka"> <int:queue/> </int:channel> <int-kafka:message-driven-channel-adapter auto-startup="true" channel="inputFromKafka" listener-container="container1" /> <bean id="container1" class="org.springframework.kafka.listener.KafkaMessageListenerContainer"> <constructor-arg index="0" ref="consumerFactory"/> <constructor-arg index="1" ref="containerProperties"/> </bean> <bean id="containerProperties" class="org.springframework.kafka.listener.config.ContainerProperties"> <constructor-arg value="d-topic"/> </bean> </beans>
Send, interface message code
@RunWith(SpringJUnit4ClassRunner.class) @ContextConfiguration(locations = {"classpath:applicationContext.xml"}) public class TestKafka { @Autowired @Qualifier("inputToKafka") MessageChannel messageChannel; @Autowired @Qualifier("inputFromKafka") PollableChannel pollableChannel; @Test public void TestSpringProducer(){ for (int i = 0; i < 15; i++) { Message<String> message = new GenericMessage<String>("test_" + i); boolean flag = messageChannel.send(message); System.out.println(flag + "_" + i); } } @Test public void TestSpringConsumer(){ Message<?> received = pollableChannel.receive(1000); while (received != null) { System.out.println("message########" + received); received = pollableChannel.receive(1000); } } }