Kafka(二)Kafka 与 Spring 集成

配置好环境,接上篇

1.配置maven 依赖

        <dependency>
            <groupId>org.springframework.kafka</groupId>
            <artifactId>spring-kafka</artifactId>
            <version>1.1.1.RELEASE</version>
        </dependency>

2.配置文件

spring:
  kafka:
    bootstrap-servers: localhost:9092
    producer:
      key-serializer: org.apache.kafka.common.serialization.StringSerializer
      value-serializer: org.apache.kafka.common.serialization.StringSerializer

    consumer:
      group-id: pay
      auto-offset-reset: latest
      enable-auto-commit: true
      #      auto-commit-interval: 10
      key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
      value-deserializer: org.apache.kafka.common.serialization.StringDeserializer

3. 无需其他复杂的配置, 接下来是消息生产者的代码

package com.zjrcinfo.zjguahao.message.api.kafka.publish;

import com.alibaba.fastjson.JSONObject;
import com.zjrcinfo.zjguahao.message.api.kafka.dto.KafkaMessage;
import com.zjrcinfo.zjguahao.message.api.kafka.dto.KafkaResult;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.support.SendResult;
import org.springframework.stereotype.Component;
import org.springframework.util.concurrent.FailureCallback;
import org.springframework.util.concurrent.ListenableFuture;
import org.springframework.util.concurrent.SuccessCallback;

import java.util.concurrent.ExecutionException;

/**
 * Description:通用 kafka 发布
 * <p>
 * 分为手动和自动,默认为自动 acks,对消息到发要求极高,=采用手动 ack
 * </p>
 * User: zhouzhou
 * Date: 2019-05-07
 * Time: 5:25 PM
 */
@Component("commonKafkaPublish")
public class CommonKafkaPublish extends AbstractKafkaPublishWrapper {


    @Autowired
    private KafkaTemplate<String, String> kafkaTemplate;

    @Autowired
    private KafkaSendResultHandler producerListener;


    @Override
    public <T> Boolean publish(KafkaMessage<T> kafkaMessage) {

        String applicationName = kafkaMessage.getApplicationName();
        String topic = kafkaMessage.getTopic();
        String messageId = kafkaMessage.getMessageId();
        Object content = kafkaMessage.getContent();

        String kafkaValue = JSONObject.toJSONString(content);
        try {
            kafkaTemplate.send(topic, kafkaValue);
        } catch (Exception e) {
            logger.error(String.format("kafka 调用发布出现异常,messageId{%s},发起服务为{%s},topic 为{%s},消息体为{%s}", messageId, applicationName, topic, kafkaValue), e);
            return false;
        }
        logger.info(String.format("kafka 发布消息成功,messageId{%s},发起服务为{%s},topic 为{%s},消息体为{%s}", messageId, applicationName, topic, kafkaValue));

        return true;
    }

    @Override
    public <T> Boolean manualPublish(KafkaMessage<T> kafkaMessage) {
        // 设置结果回调监听
        kafkaTemplate.setProducerListener(producerListener);

        String applicationName = kafkaMessage.getApplicationName();
        String topic = kafkaMessage.getTopic();
        String messageId = kafkaMessage.getMessageId();
        Object content = kafkaMessage.getContent();

        String kafkaValue = JSONObject.toJSONString(content);

        // 同步发送
        try {
            ListenableFuture<SendResult<String, String>> future = kafkaTemplate.send(topic, kafkaValue);
            future.get();
        } catch (Exception e) {
            logger.error(String.format("kafka 调用出现异常,messageId{%s},发起服务为{%s},topic 为{%s},消息体为{%s}", messageId, applicationName, topic, kafkaValue));
            return false;
        }

        return true;

    }
}
/**
 * Description:可以模仿着写,但是不能再次写逻辑
 * User: zhouzhou
 * Date: 2019-05-09
 * Time: 11:22 AM
 */
@Component("commonKafkaSubscribe")
public class CommonKafkaSubscribe extends AbstractKafkaSubscribeWrapper{

    /**
     * 这个是自动提交的消费方式
     * @param record
     * @throws Exception
     */
    @KafkaListener(topics = TopicConstants.REG_PAY,groupId = "写自己的消费组 id")
    public void listenPay(ConsumerRecord<String, String> record) throws Exception {
        logger.info(String.format("kafka 消费消息成功---------------- listen1 topic = %s, offset = %d, value = %s ", record.topic(), record.offset(), record.value()));
        String msg = JSONObject.parseObject(record.value(), String.class);
        System.out.println(msg);
    }

    /**
     * 这是手动提交的消费方式
     * @param record
     * @param ack
     * @throws Exception
     */
    @KafkaListener(topics = TopicConstants.COMMON_PAY,groupId = "写自己的消费组 id")
    public void listenXXXPay(ConsumerRecord<String, String> record , Acknowledgment ack) throws Exception {
        String msg = JSONObject.parseObject(record.value(), String.class);
        System.out.println(msg);
        if (new Random().nextInt(100)<50){
            logger.info(String.format("kafka 综合收费消费消息成功---------------- listen1 topic = %s, offset = %d, value = %s ", record.topic(), record.offset(), record.value()));
        }
        ack.acknowledge();
    }

}

接下来启动测试即可,调用 publish 方法。 github 地址如下:

https://github.com/zjhzzhouzhou/kafka-zhouzhou

猜你喜欢

转载自blog.csdn.net/weixin_38399962/article/details/90034380