Kafka environment configuration and code examples [Windows & Linux]

JDK installation and configuration - Standard configuration

Zookeeper installation and configuration

Zookeeper Download [https://www.apache.org/dyn/closer.cgi/zookeeper/]
(https://www.apache.org/dyn/closer.cgi/zookeeper/)
configured temporary directory
Here Insert Picture Description
configuration environment variable
Here Insert Picture Description
Here Insert Picture Description
Windows Zookeeper next start
Here Insert Picture Description
start successfully
Here Insert Picture Description
start Zookeeper Linux environment
Here Insert Picture Description
Kafka installation and configuration

Kafka service Download http://kafka.apache.org/downloads
Here Insert Picture Description
start Kafka under Windows
Here Insert Picture Description
starts successfully
Here Insert Picture Description
start Kafka service under Linux
Here Insert Picture Description
Here Insert Picture Description

The sample code
Python articles

# 生产者
# -*- coding: utf-8 -*-
# @Time    : 12/12/2019 5:02 PM
# @File    : MsgProductor.py
# @Project: Interface_AUTOMATION
import pickle
from kafka import KafkaProducer
import time

from kafka.errors import kafka_errors

producer = KafkaProducer(bootstrap_servers=['127.0.0.1:9092'], key_serializer=lambda k: pickle.dumps(k),
                         value_serializer=lambda v: pickle.dumps(v))

start_time = time.time()
for i in range(10000000000):
    string = "Say Hello World at %d. \n" % start_time
    future = producer.send(topic="sending", key="num", value=string)
    with open('sending.txt', 'a') as file_object:
        file_object.write(string)
    # 同步阻塞,通过调用get()方法进而保证一定程序是有序的.
    try:
        record_metadata = future.get(timeout=10)
        print(record_metadata.topic)
        print(record_metadata.partition)
        print(record_metadata.offset)
    except kafka_errors as e:
        print(str(e))

end_time = time.time()
time_counts = end_time - start_time
print(time_counts)

# 消费者
# -*- coding: utf-8 -*-
# @Time    : 12/12/2019 5:03 PM
# @File    : MsgConsumer.py
# @Project: Interface_AUTOMATIONfrom kafka import KafkaConsumer

consumer = KafkaConsumer('sending', bootstrap_servers=['127.0.0.1:9092'])

for msg in consumer:
    recv = "%s:%d:%d: key=%s value=%s" % (msg.topic, msg.partition, msg.offset, msg.key, msg.value)
    with open('recv.txt', 'a') as file_object:
    file_object.write(recv)
    print(recv)

package util.paas;
import java.util.Collections;
import java.util.Properties;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;

public class KafkaVerification {
    /**
     * 创建生产者,发送消息
     * @param addressandport 服务地址及端口
     * @param topicname topic名称
     * @throws NullPointerException
     */
    public static void producer(String addressandport, String topicname)throws NullPointerException{
        Properties properties = new Properties();
        properties.put("bootstrap.servers", addressandport);
        properties.put("acks", "all");
        properties.put("retries", 0);
        properties.put("batch.size", 16384);
        properties.put("linger.ms", 1);
        properties.put("buffer.memory", 33554432);
        properties.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        properties.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        try (Producer<String, String> producer = new KafkaProducer<>(properties)) {
            for (int i = 0; i < 100; i++) {
                String msg = String.format("Message %d", i);
                producer.send(new ProducerRecord<String, String>(topicname, msg));
                System.out.println("Sent:" + msg);
            }
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    /**
     * 创建消费者,接收消息
     * @param addressandport 服务地址及端口
     * @param topicname topic名称
     * @throws NullPointerException
     */
    public static void consumer(String addressandport, String topicname)throws NullPointerException{
        Properties properties = new Properties();
        properties.put("bootstrap.servers", addressandport);
        properties.put("group.id", "group-1");
        properties.put("enable.auto.commit", "true");
        properties.put("auto.commit.interval.ms", "1000");
        properties.put("auto.offset.reset", "earliest");
        properties.put("session.timeout.ms", "30000");
        properties.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        properties.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        KafkaConsumer<String, String> kafkaConsumer = new KafkaConsumer<>(properties);
        kafkaConsumer.subscribe(Collections.singletonList(topicname));
        do {
            ConsumerRecords<String, String> records = kafkaConsumer.poll(100);
            for (ConsumerRecord<String, String> record : records) {
                System.out.printf("offset = %d, value = %s", record.offset(), record.value());
                System.out.println();
            }
        } while (true);
    }
}
Released five original articles · won praise 6 · views 3859

Guess you like

Origin blog.csdn.net/wangzhimin0928/article/details/105396540