Article Directory
One, KafkaAPI code idea
1、producer
- Set up Kafka cluster, acks policy configuration, K, V serialization
- Create producer object
- Create producer information record (topic, key, value), and then send
Reference article: Kafka's Producer
2、consumer
- Configure the cluster port number, consumer group, K, V deserialization, automatic submission mode, automatic submission time
- Create consumers
- Create topic partitions that consumers want to consume. If multiple partitions are consumed, install them in an ArrayList
- Delegate the partition information of a topic to consumers
- Consumers pull data regularly
2. Dependence
Enter maven, select the corresponding version
The version of kafka used in this example is kafka_2.11-2.0.0
, so choose the corresponding version as scala2.11, kafka2.0.0, and import the dependencies of kafka and kafka-client
<!-- https://mvnrepository.com/artifact/org.apache.kafka/kafka -->
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka_2.11</artifactId>
<version>2.0.0</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.kafka/kafka-clients -->
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-clients</artifactId>
<version>2.0.0</version>
</dependency>
Three, JavaAPI
1、kafka-producer
public void writeMsg(String msg){
Properties prop = new Properties();
prop.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,"single:9092");
prop.put(ProducerConfig.ACKS_CONFIG,"all");
prop.put(ProducerConfig.RETRIES_CONFIG,"0");
prop.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getTypeName());
prop.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,StringSerializer.class.getTypeName());
KafkaProducer<String, String> producer = new KafkaProducer<>(prop);
ProducerRecord<String, String> rec = new ProducerRecord<>("mydemo3", msg);
producer.send(rec);
producer.close();
}
Kafka_demo2 producer = new Kafka_demo2();
for (int i = 0; i < 5; i++) {
producer.writeMsg("龙门飞甲"+i);
}
2、kafka-consumer
public void readMsg(){
Properties prop = new Properties();
prop.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,"single:9092");
prop.put(ConsumerConfig.GROUP_ID_CONFIG,"xym");
prop.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getTypeName());
prop.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,StringDeserializer.class.getTypeName());
prop.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG,"true");
prop.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG,"1000");
KafkaConsumer<String, String> consumer = new KafkaConsumer<>(prop);
TopicPartition tp = new TopicPartition("mydemo3", 0);
List<TopicPartition> list = new ArrayList<>();
list.add(tp);
consumer.assign(list);
while (true){
ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(100));
records.forEach(x->{
System.out.println(x.toString());
});
}
}
public static void main(String[] args) {
new Kafka_demo().readMsg();
}
Four, ScalaAPI
1、kafka-producer
object Kafka_scala_Producer{
def writeMsg(msg:String) = {
val prop = new Properties()
val map = Map(
"bootstrap.servers" -> "single:9092",
"acks" -> "all",
"key.serializer" -> "org.apache.kafka.common.serialization.StringSerializer",
"value.serializer" -> "org.apache.kafka.common.serialization.StringSerializer"
)
map.foreach(x=>{
prop.put(x._1,x._2)
})
val producer = new KafkaProducer[String, String](prop)
val rec = new ProducerRecord[String, String]("mydemo3", msg)
producer.send(rec);
producer.close()
}
def main(args: Array[String]): Unit = {
for (i<-5 to 100){
writeMsg("龙门飞甲"+i)
Thread.sleep(1000)
}
}
}
2、kafka-consumer
object Kafka_scala_Consumer{
def readMsg()={
val prop = new Properties()
val map = Map(
"bootstrap.servers" -> "single:9092",
"group.id" -> "xym",
"enable.auto.commit" -> "true",
"auto.commit.interval.ms" -> "1000",
"key.deserializer" -> "org.apache.kafka.common.serialization.StringDeserializer",
"value.deserializer" -> "org.apache.kafka.common.serialization.StringDeserializer"
)
map.foreach(x=>{
prop.put(x._1,x._2)
})
val consumer = new KafkaConsumer[String, String](prop)
val tp = new TopicPartition("mydemo3", 0)
val list = new util.ArrayList[TopicPartition]()
list.add(tp)
consumer.assign(list)
while (true){
val records = consumer.poll(Duration.ofMillis(100))
val it = records.iterator()
while (it.hasNext) {
val next = it.next()
println(next.toString)
}
}
}
def main(args: Array[String]): Unit = {
readMsg()
}
}
5. The output is as follows:
ConsumerRecord(topic = mydemo3, partition = 0, offset = 58, CreateTime = 1616874277813,
serialized key size = -1, serialized value size = 13, headers = RecordHeaders(headers = [],
isReadOnly = false), key = null, value = 龙门飞甲0)
ConsumerRecord(topic = mydemo3, partition = 0, offset = 59, CreateTime = 1616874277859,
serialized key size = -1, serialized value size = 13, headers = RecordHeaders(headers = [],
isReadOnly = false), key = null, value = 龙门飞甲1)
ConsumerRecord(topic = mydemo3, partition = 0, offset = 60, CreateTime = 1616874277890,
serialized key size = -1, serialized value size = 13, headers = RecordHeaders(headers = [],
isReadOnly = false), key = null, value = 龙门飞甲2)
ConsumerRecord(topic = mydemo3, partition = 0, offset = 61, CreateTime = 1616874277922,
serialized key size = -1, serialized value size = 13, headers = RecordHeaders(headers = [],
isReadOnly = false), key = null, value = 龙门飞甲3)
ConsumerRecord(topic = mydemo3, partition = 0, offset = 62, CreateTime = 1616874277941,
serialized key size = -1, serialized value size = 13, headers = RecordHeaders(headers = [],
isReadOnly = false), key = null, value = 龙门飞甲4)