Kafka学习-第五篇 用Java编写Kafka样例程序

1. maven依赖

引入kafka的客户端支持

pom.xml

    <!-- https://mvnrepository.com/artifact/org.apache.kafka/kafka -->
    <dependency>
        <groupId>org.apache.kafka</groupId>
        <artifactId>kafka_2.12</artifactId>
        <version>2.0.0</version>
    </dependency>

    <!-- https://mvnrepository.com/artifact/org.apache.kafka/kafka-clients -->
    <dependency>
        <groupId>org.apache.kafka</groupId>
        <artifactId>kafka-clients</artifactId>
        <version>2.0.0</version>
    </dependency>

2. 生产者与消费者配置

2.1 生产者配置

send.properties

#target IP port
bootstrap.servers=127.0.0.1:9092,127.0.0.1:9093,127.0.0.1:9094

#topic序列化工具类
key.serializer=org.apache.kafka.common.serialization.StringSerializer
#需要传递的值得序列化工具类
value.serializer=org.apache.kafka.common.serialization.StringSerializer

# acks =0  最高速度,不回应offset为-1不保证消息送达
# acks =1  发送到leader但不保证送到集群中的其它服务器,当leader宕机时,消息可能丢失
# acks=all 消息在集群中多台服务器落地后才能返回,可靠性最高
acks=all

request.timeout.ms=30000
retries=3
reconnect.backoff.ms=1000
compression.type=snappy

2.2 消费者配置

rec.properties

# source IP prot
bootstrap.servers=127.0.0.1:9092

# 分组号,相同分组会共同消费消息
group.id=0

key.deserializer=org.apache.kafka.common.serialization.StringDeserializer
value.deserializer=org.apache.kafka.common.serialization.ByteArrayDeserializer

# earliest 获取该分组最早的offset,即所有未消息的消息
# latest 获取启动后最新的offset,历史消息不会再接收
auto.offset.reset=earliest

# true 开启自动提交
# false 手动提交
enable.auto.commit=true

3. 需要准备的工具类

PropertiesUtils.java

package com.demo.kafka.study.util;

import java.io.File;
import java.io.FileInputStream;
import java.util.Properties;

public class PropertiesUtils {

    public static Properties getProperties(String filePath) throws Exception {

        Properties props = new Properties();

        File configFile = new File(filePath);
        if (!configFile.exists()) {
            filePath = PropertiesUtils.class.getClassLoader().getResource(filePath).getPath();
            configFile = new File(filePath);
            if (!configFile.exists()) {
                throw new Exception("未找到配置文件:" + configFile.getAbsolutePath());
            }
        }
        FileInputStream in = new FileInputStream(configFile.getAbsolutePath());
        props.load(in);
        in.close();
        return props;
    }

    public static Properties getProperties(File configFile) throws Exception {
        Properties props = new Properties();
        if (!configFile.exists()) {
            throw new Exception("未找到配置文件:" + configFile.getAbsolutePath());
        }
        FileInputStream in = new FileInputStream(configFile.getAbsolutePath());
        props.load(in);
        in.close();
        return props;
    }

    public static long getLong(Properties properties, String key, long defaultValue) {
        try {
            defaultValue = Long.valueOf(properties.getProperty(key).trim()).longValue();
        } catch (Exception ex) {
            ex.printStackTrace();
        }
        return defaultValue;
    }

    public static int getInt(Properties properties, String key, int defaultValue) {
        try {
            defaultValue = Integer.valueOf(properties.getProperty(key).trim()).intValue();
        } catch (Exception ex) {
            ex.printStackTrace();
        }
        return defaultValue;
    }


    public static String getString(Properties properties, String key, String defaultValue) {
        String temp = properties.getProperty(key);
        if (temp != null && !temp.trim().equals("")) {
            defaultValue = temp.trim();
        }
        return defaultValue;
    }

}

ObjectSerializer.java

package com.demo.kafka.study.util;

import org.apache.kafka.common.serialization.Serializer;

import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.ObjectOutputStream;
import java.util.Map;

public class ObjectSerializer implements Serializer {
    @Override
    public void configure(Map configs, boolean isKey) {

    }

    @Override
    public byte[] serialize(String topic, Object data) {
        ByteArrayOutputStream baos = null;
        ObjectOutputStream oos = null;
        byte[] bytes = null;
        try {
            // 序列化
            baos = new ByteArrayOutputStream();
            oos = new ObjectOutputStream(baos);
            oos.writeObject(data);
            oos.flush();
            bytes = baos.toByteArray();

        } catch (Exception e) {
            e.printStackTrace();
        } finally {
            if (oos != null) {
                try {
                    oos.close();
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }
            if (baos != null) {
                try {
                    baos.close();
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }
        }
        return bytes;
    }

    @Override
    public void close() {

    }
}

ObjectDeserializer.java

package com.demo.kafka.study.util;

import org.apache.kafka.common.serialization.Deserializer;

import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.util.Map;

public class ObjectDeserializer implements Deserializer {
    @Override
    public void configure(Map configs, boolean isKey) {

    }

    @Override
    public Object deserialize(String topic, byte[] data) {
        Object Object = null;

        ByteArrayInputStream bais = null;
        ObjectInputStream ois = null;
        try {
            // 反序列化
            bais = new ByteArrayInputStream(data);
            ois = new ObjectInputStream(bais);
            Object = ois.readObject();
        } catch (Exception e) {
            e.printStackTrace();
        } finally {

            if (ois != null) {
                try {
                    ois.close();
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }

            if (bais != null) {
                try {
                    bais.close();
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }
        }
        return Object;
    }

    @Override
    public void close() {

    }
}

4. String类型消息发送接收示例

4.1 生产者

Send.java

package com.demo.kafka.study.sendrec.stringvalue;

import com.demo.kafka.study.util.PropertiesUtils;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerRecord;

import java.util.Properties;

public class Send {

    public static void main(String[] args) throws Exception {

        Properties props = PropertiesUtils.getProperties("send.properties");

        // key的序列化方式
        props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");

        // value的序列化方式
        props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        Producer<String, String> producer = new KafkaProducer<String, String>(props);


        for (int i = 0; i < 9; i++) {

            String topic="testStringTopic";
            String key=""+System.nanoTime();
            String value="消息"+i;

            // 形成kafka消息
            ProducerRecord producerRecord = new ProducerRecord<String, String>(topic, key, value);

            // 直接发送消息
            producer.send(producerRecord);

            // 等待到发送成功才返回
            // producer.send(producerRecord).get();

            // 等待三秒后超时
            // producer.send(producerRecord).get(3, TimeUnit.SECONDS);

            System.out.println("发送消息到队列"+topic+":key="+key+" value="+value);

        }
        producer.close();

    }
}

4.2 消费者

Rec.java

package com.demo.kafka.study.sendrec.stringvalue;

import com.demo.kafka.study.util.PropertiesUtils;
import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;


import java.time.Duration;
import java.util.Collections;
import java.util.Properties;

public class Rec {
    public static void main(String[] args) throws Exception {
        Properties props = PropertiesUtils.getProperties("rec.properties");

        // key的反序列化方式
        props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");

        // value的反序列化方式
        props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");

        Consumer<String, String> consumer = new KafkaConsumer<String, String>(props);

        // 监听的队列,支持对多队列的监听
        consumer.subscribe(Collections.singletonList("testStringTopic"));

        while (true) {
            ConsumerRecords<String, String> records = consumer.poll(Duration.ofSeconds(5L));
            for (ConsumerRecord<String, String> record : records) {

                System.out.println("从队列" + record.topic() + "接收消息:offset=" + record.offset()
                        + " key=" + record.key()
                        + " value=" + record.value().toString()
                        + " partition=" + record.partition()
                        + " timestamp=" + record.timestamp());

            }
        }
    }
}

5. Object类型消息发送接收示例

5.1 生产者

Send.java

package com.demo.kafka.study.sendrec.objectvalue;

import com.demo.kafka.study.info.Person;
import com.demo.kafka.study.util.PropertiesUtils;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerRecord;

import java.util.Properties;

public class Send {

    public static void main(String[] args) throws Exception {


        Properties props = PropertiesUtils.getProperties("send.properties");

        // key的序列化方式
        props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");

        // value的序列化方式
        props.put("value.serializer", "ObjectSerializer");
        Producer<String, Object> producer = new KafkaProducer<String, Object>(props);

        for (int i = 0; i < 9; i++) {

            String topic = "testObjectTopic";
            String key = "" + System.nanoTime();
            Person value = new Person("人员"+i,i);

            // 形成kafka消息
            ProducerRecord producerRecord = new ProducerRecord<String, Object>(topic, key, value);

            // 直接发送消息
            producer.send(producerRecord);

            // 等待到发送成功才返回
            // producer.send(producerRecord).get();

            // 等待三秒后超时
            // producer.send(producerRecord).get(3, TimeUnit.SECONDS);

            System.out.println("发送消息到队列" + topic + ":key=" + key + " value=" + value);

        }
        producer.close();

    }
}

5.2 消费者

Rec.java

package com.demo.kafka.study.sendrec.objectvalue;

import com.demo.kafka.study.info.Person;
import com.demo.kafka.study.util.PropertiesUtils;
import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;

import java.time.Duration;
import java.util.Collections;
import java.util.Properties;

public class Rec {
    public static void main(String[] args) throws Exception {
        Properties props = PropertiesUtils.getProperties("rec.properties");

        // key的反序列化方式
        props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");

        // value的反序列化方式
        props.put("value.deserializer", "ObjectDeserializer");

        Consumer<String, Object> consumer = new KafkaConsumer<String, Object>(props);

        // 监听的队列,支持对多队列的监听
        consumer.subscribe(Collections.singletonList("testObjectTopic"));

        while (true) {
            ConsumerRecords<String, Object> records = consumer.poll(Duration.ofSeconds(5L));
            for (ConsumerRecord<String, Object> record : records) {

                System.out.println("从队列" + record.topic() + "接收消息:offset=" + record.offset()
                        + " key=" + record.key()
                        + " value=" + ((Person)record.value()).toString()
                        + " partition=" + record.partition()
                        + " timestamp=" + record.timestamp());
            }
        }

    }
}

6. 异步手动提交方式示例

6.1 生产者

Send.java

package com.demo.kafka.study.sendrec.async;

import com.demo.kafka.study.util.PropertiesUtils;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerRecord;

import java.util.Properties;

public class Send {

    public static void main(String[] args) throws Exception {


        Properties props = PropertiesUtils.getProperties("send.properties");

        // key的序列化方式
        props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");

        // value的序列化方式
        props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        Producer<String, String> producer = new KafkaProducer<String, String>(props);


        for (int i = 0; i < 100; i++) {

            String topic="testAsyncTopic";
            String key=""+System.nanoTime();
            String value="消息"+i;

            // 形成kafka消息
            ProducerRecord producerRecord = new ProducerRecord<String, String>(topic, key, value);

            // 直接发送消息
            producer.send(producerRecord);

            // 等待到发送成功才返回
            // producer.send(producerRecord).get();

            // 等待三秒后超时
            // producer.send(producerRecord).get(3, TimeUnit.SECONDS);

            System.out.println("发送消息到队列"+topic+":key="+key+" value="+value);

        }
        producer.close();

    }
}

6.2 消费者

Rec.java

package com.demo.kafka.study.sendrec.async;

import com.demo.kafka.study.util.PropertiesUtils;
import org.apache.kafka.clients.consumer.*;
import org.apache.kafka.common.TopicPartition;

import java.time.Duration;
import java.util.Collections;
import java.util.Map;
import java.util.Properties;

public class Rec {
    public static void main(String[] args) throws Exception {
        Properties props = PropertiesUtils.getProperties("rec.properties");

        // key的反序列化方式
        props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");

        // value的反序列化方式
        props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");

        // 改为手动提交
        props.put("enable.auto.commit", "false");

        // 一次拉下来的记录条数,自动提交是在poll的时候进行提交的
        props.put("max.poll.records", 20);


        Consumer<String, String> consumer = new KafkaConsumer<String, String>(props);

        // 监听的队列,支持对多队列的监听
        consumer.subscribe(Collections.singletonList("testAsyncTopic"));

        int i = 0;
        while (true) {
            i++;
            ConsumerRecords<String, String> records = consumer.poll(Duration.ofSeconds(5L));
            for (ConsumerRecord<String, String> record : records) {
                System.out.println("从队列" + record.topic() + "接收消息:offset=" + record.offset()
                        + " key=" + record.key()
                        + " value=" + record.value().toString()
                        + " partition=" + record.partition()
                        + " timestamp=" + record.timestamp());
                sleep(1000);

                if (i % 10 == 0) {
                    commitKafkaOffset(consumer, record);
                }

            }
            commitKafka(consumer);
        }

    }


    public static void commitKafkaOffset(Consumer<String, String> consumer, ConsumerRecord<String, String> record) {
        long offset = record.offset();
        int partition = record.partition();
        String topic = record.topic();
        TopicPartition topicPartition = new TopicPartition(topic, partition);
        Map<TopicPartition, OffsetAndMetadata> map = Collections.singletonMap(topicPartition, new OffsetAndMetadata(offset + 1L));

        // 同步提交
        consumer.commitSync(map);

        // 异步提交必须要callback才行
        //consumer.commitAsync(map,null);

    }


    /**
     * 手动更新kafka-offset只针对MessageType.File
     */
    public static void commitKafka(Consumer<String, String> consumer) {
        if (null != consumer) {
            // 异步提交,失败不报错
            consumer.commitAsync();

            // 同步提交,直到失败或者成功
            // consumer.commitSync();
            System.out.println("提交-----------------------");
        }
    }

    public static void sleep(long sleepTime) {

        try {
            Thread.sleep(sleepTime);
        } catch (InterruptedException e) {
            e.printStackTrace();
        }

    }
}

7. 事务性提交方式示例

7.1 生产者

Send.java

package com.demo.kafka.study.sendrec.transaction;

import java.util.UUID;

public class Send {

    public static void main(String[] args) throws Exception {


        String uuid= UUID.randomUUID().toString();
        TransactionProducer transactionProducer1=new TransactionProducer("1",uuid);

        TransactionProducer transactionProducer2=new TransactionProducer("2",uuid);


        Thread t1=new Thread(transactionProducer1);
        t1.setDaemon(false);
        t1.start();

        Thread.sleep(6000);

        Thread t2=new Thread(transactionProducer2);
        t2.setDaemon(false);
        t2.start();
    }
}

TransactionProducer.java

package com.demo.kafka.study.sendrec.transaction;

import com.demo.kafka.study.util.PropertiesUtils;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerRecord;

import java.util.Properties;

public class TransactionProducer implements Runnable {

    private String transationID;

    private String producerKey;


    public TransactionProducer(String producerKey, String transationID) {
        this.producerKey = producerKey;
        this.transationID = transationID;
    }

    public void sendMessage() throws Exception {
        Properties props = PropertiesUtils.getProperties("send.properties");

        // key的序列化方式
        props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");

        // value的序列化方式
        props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");

        props.put("transactional.id", transationID);

        Producer<String, String> producer = new KafkaProducer<String, String>(props);

        producer.initTransactions();
        producer.beginTransaction();
        try {
            for (int i = 0; i < 10; i++) {

                Thread.sleep(2000);

                String topic = "testTransctionTopic";
                String key = "" + System.nanoTime();
                String value = "消息" + i;

                // 形成kafka消息
                ProducerRecord producerRecord = new ProducerRecord<String, String>(topic, key, value);
                // 直接发送消息
                producer.send(producerRecord);
                // 等待到发送成功才返回
                // producer.send(producerRecord).get();
                // 等待三秒后超时
                // producer.send(producerRecord).get(3, TimeUnit.SECONDS);
                System.out.println("producer" + producerKey + "发送消息到队列" + topic + ":key=" + key + " value=" + value);
            }
            producer.commitTransaction();
        } catch (Exception ex) {
            // 回滚事物
            producer.abortTransaction();
            System.out.println("producer" + producerKey + "回滚事物");
        } finally {
            producer.close();
        }
    }


    @Override
    public void run() {
        try {
            sendMessage();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
}

7.2 消费者

Rec.java

package com.demo.kafka.study.sendrec.transaction;

import com.demo.kafka.study.util.PropertiesUtils;
import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;

import java.time.Duration;
import java.util.Collections;
import java.util.Properties;

public class Rec {
    public static void main(String[] args) throws Exception {
        Properties props = PropertiesUtils.getProperties("rec.properties");


        props.put("group.id","0");


        // key的反序列化方式
        props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");

        // value的反序列化方式
        props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");

        Consumer<String, String> consumer = new KafkaConsumer<String, String>(props);

        // 监听的队列,支持对多队列的监听
        consumer.subscribe(Collections.singletonList("testTransctionTopic"));

        while (true) {
            ConsumerRecords<String, String> records = consumer.poll(Duration.ofSeconds(5L));
            for (ConsumerRecord<String, String> record : records) {

                System.out.println("从队列" + record.topic() + "接收消息:offset=" + record.offset()
                        + " key=" + record.key()
                        + " value=" + record.value().toString()
                        + " partition=" + record.partition()
                        + " timestamp=" + record.timestamp());

            }
        }

    }
}

RecCommit.java

package com.demo.kafka.study.sendrec.transaction;

import com.demo.kafka.study.util.PropertiesUtils;
import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;

import java.time.Duration;
import java.util.Collections;
import java.util.Properties;

public class RecCommit {
    public static void main(String[] args) throws Exception {
        Properties props = PropertiesUtils.getProperties("rec.properties");

        props.put("group.id","1");
        // key的反序列化方式
        props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        // value的反序列化方式
        props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");

        // 只读取已经提交的事物
        props.put("isolation.level", "read_committed");

        Consumer<String, String> consumer = new KafkaConsumer<String, String>(props);

        // 监听的队列,支持对多队列的监听
        consumer.subscribe(Collections.singletonList("testTransctionTopic"));

        while (true) {
            ConsumerRecords<String, String> records = consumer.poll(Duration.ofSeconds(5L));
            for (ConsumerRecord<String, String> record : records) {

                System.out.println("从队列" + record.topic() + "接收消息:offset=" + record.offset()
                        + " key=" + record.key()
                        + " value=" + record.value().toString()
                        + " partition=" + record.partition()
                        + " timestamp=" + record.timestamp());

            }
        }

    }
}
发布了40 篇原创文章 · 获赞 25 · 访问量 10万+

猜你喜欢

转载自blog.csdn.net/yym373872996/article/details/105674292