SpringBoot与Kafka整合案例

一,搭建好Zookeeper集群与Kafka集群

二.构建Maven项目

<?xml version="1.0" encoding="UTF-8"?>

<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
    <parent>
        <groupId>org.springframework.boot</groupId>
        <artifactId>spring-boot-starter-parent</artifactId>
        <version>2.1.1.RELEASE</version>
    </parent>
    <modelVersion>4.0.0</modelVersion>

    <groupId>org.jy.data.yh.bigdata.drools.hadoop.kafka</groupId>
    <artifactId>SSO-Hadoop-Kafka</artifactId>
    <version>1.0-SNAPSHOT</version>
    <name>SSO-Hadoop-Kafka</name>

    <properties>
        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
        <maven.compiler.source>1.8</maven.compiler.source>
        <maven.compiler.target>1.8</maven.compiler.target>
    </properties>

    <dependencies>
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-web</artifactId>
            <version>2.1.1.RELEASE</version>
        </dependency>
        <dependency>
            <groupId>org.mybatis.spring.boot</groupId>
            <artifactId>mybatis-spring-boot-starter</artifactId>
            <version>1.3.2</version>
        </dependency>
        <dependency>
            <groupId>org.mybatis</groupId>
            <artifactId>mybatis</artifactId>
            <version>3.5.2</version>
        </dependency>
        <!--alibaba druid连接池  -->
        <!--Druid是Java语言中最好的数据库连接池。Druid能够提供强大的监控和扩展功能。-->
        <dependency>
            <groupId>com.alibaba</groupId>
            <artifactId>druid-spring-boot-starter</artifactId>
            <version>1.1.10</version>
        </dependency>


        <dependency>
            <groupId>mysql</groupId>
            <artifactId>mysql-connector-java</artifactId>
            <version>8.0.13</version>
            <scope>runtime</scope>
        </dependency>
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-test</artifactId>
            <version>2.1.1.RELEASE</version>
            <scope>test</scope>
        </dependency>

        <!-- 通用 UTIL 包 -->
        <dependency>
            <groupId>commons-io</groupId>
            <artifactId>commons-io</artifactId>
            <version>2.6</version>
        </dependency>
        <dependency>
            <groupId>org.apache.commons</groupId>
            <artifactId>commons-lang3</artifactId>
            <version>3.9</version>
        </dependency>

        <dependency>
            <groupId>com.google.guava</groupId>
            <artifactId>guava</artifactId>
            <version>20.0</version>
        </dependency>
        <dependency>
            <groupId>com.alibaba</groupId>
            <artifactId>fastjson</artifactId>
            <version>1.2.54</version>
        </dependency>

        <!--<dependency>
            <groupId>com.fasterxml.jackson.core</groupId>
            <artifactId>jackson-databind</artifactId>
        </dependency>-->

        <dependency>
            <groupId>com.google.code.gson</groupId>
            <artifactId>gson</artifactId>
            <version>2.8.2</version>
        </dependency>



        <!-- spring boot 集成 redis -->
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-data-redis</artifactId>
            <version>2.1.1.RELEASE</version>
            <exclusions>
                <exclusion><!-- 排除lettuce依赖,使用jedis来代替 -->
                    <groupId>io.lettuce</groupId>
                    <artifactId>lettuce-core</artifactId>
                </exclusion>
            </exclusions>
        </dependency>
        <dependency>
            <groupId>redis.clients</groupId>
            <artifactId>jedis</artifactId>
            <version>2.9.0</version>
        </dependency>
        <!-- session共享需要的Jar -->
        <dependency>
            <groupId>org.springframework.session</groupId>
            <artifactId>spring-session-data-redis</artifactId>
            <version>2.1.2.RELEASE</version>
        </dependency>
        <dependency>
            <groupId>org.apache.kafka</groupId>
            <artifactId>kafka-clients</artifactId>
            <version>2.1.1</version>
        </dependency>
        <dependency>
            <groupId>org.apache.kafka</groupId>
            <artifactId>kafka-streams</artifactId>
            <version>2.1.1</version>
        </dependency>
        <!--springboot整合-->
        <dependency>
            <groupId>org.springframework.kafka</groupId>
            <artifactId>spring-kafka</artifactId>
            <version>2.2.4.RELEASE</version>
        </dependency>
        <dependency>
            <groupId>org.projectlombok</groupId>
            <artifactId>lombok</artifactId>
            <version>1.18.0</version>
            <optional>true</optional>
        </dependency>
    </dependencies>

    <build>
        <plugins>
            <plugin>
                <groupId>org.apache.maven.plugins</groupId>
                <artifactId>maven-surefire-plugin</artifactId>
                <version>2.22.1</version>
                <configuration>
                    <skipTests>true</skipTests>
                    <testFailureIgnore>true</testFailureIgnore>
                </configuration>
            </plugin>
            <plugin>
                <groupId>org.springframework.boot</groupId>
                <artifactId>spring-boot-maven-plugin</artifactId>
                <version>2.1.1.RELEASE</version>
                <!-- 支持调试 热部署 -->
                <configuration>
                    <fork>true</fork>
                </configuration>
                <!-- 支持调试 热部署 end -->
            </plugin>
        </plugins>
    </build>

</project>

application.properties文件

server.port=8855
server.session-timeout=60
server.tomcat.max-threads=1000
server.tomcat.uri-encoding=UTF-8
logging.path=../logs
logging.file=kafka.log
logging.level.root=info

spring.thymeleaf.mode=HTML
spring.thymeleaf.encoding=UTF-8
spring.thymeleaf.prefix=classpath:/templates/
spring.thymeleaf.suffix=.html
spring.thymeleaf.basename=messages


spring.datasource.type=com.alibaba.druid.pool.DruidDataSource
spring.datasource.driverClassName=com.mysql.cj.jdbc.Driver
spring.datasource.url=jdbc:mysql://localhost:3306/security?characterEncoding=utf-8&serverTimezone=GMT%2B8
spring.datasource.username=root
spring.datasource.password=123456
spring.datasource.initialSize=200
spring.datasource.minIdle=100
spring.datasource.maxActive=500
spring.datasource.maxWait=60000
spring.datasource.timeBetweenEvictionRunsMillis=60000
spring.datasource.minEvictableIdleTimeMillis=300000
spring.datasource.validationQuery=SELECT 1 FROM DUAL
spring.datasource.testWhileIdle=true
spring.datasource.testOnBorrow=false
spring.datasource.testOnReturn=false
spring.datasource.poolPreparedStatements=true
spring.datasource.maxPoolPreparedStatementPerConnectionSize=2000
spring.datasource.filters=stat,wall,logback
spring.datasource.connectionProperties=druid.stat.mergeSql=true;druid.stat.slowSqlMillis=5000

mybatis.mapperLocations=classpath:mapper/*.xml
mybatis.typeAliasesPackage=org.jy.data.elasticsearch.search.model
mybatis.configuration.log-impl=org.apache.ibatis.logging.slf4j.Slf4jImpl
mybatis.configuration.log-prefix=es_

# redis集群配置
spring.redis.cluster.nodes=192.168.227.128:6379,192.168.227.128:6380,192.168.227.129:6381,192.168.227.129:6382,192.168.227.130:6383,192.168.227.130:6384
spring.redis.cluster.timeout=2000
spring.redis.cluster.max-redirects=100
spring.redis.cluster.maxIdle=200
spring.redis.cluster.maxTotal=1000
spring.redis.cluster.maxWaitMillis=2000
spring.session.store-type = redis

#============== kafka ===================
# 指定kafka 代理地址,可以多个
spring.kafka.bootstrap-servers=centosnode01:9092,centosnode02:9092,centosnode03:9092
#=============== provider  =======================
spring.kafka.producer.retries=0
# 每次批量发送消息的数量
spring.kafka.producer.batch-size=16384
spring.kafka.producer.buffer-memory=33554432
# 指定消息key和消息体的编解码方式
spring.kafka.producer.key-serializer=org.apache.kafka.common.serialization.StringSerializer
spring.kafka.producer.value-serializer=org.apache.kafka.common.serialization.StringSerializer
#=============== consumer  =======================
# 指定默认消费者group id
spring.kafka.consumer.group-id=kafka-consumer-group
spring.kafka.consumer.auto-offset-reset=earliest
spring.kafka.consumer.enable-auto-commit=true
spring.kafka.consumer.auto-commit-interval=100

# 指定消息key和消息体的编解码方式
spring.kafka.consumer.key-deserializer=org.apache.kafka.common.serialization.StringDeserializer
spring.kafka.consumer.value-deserializer=org.apache.kafka.common.serialization.StringDeserializer

三.完整代码实例

(1)消息生成者

package org.jy.data.yh.bigdata.drools.hadoop.kafka.producer;

import com.alibaba.fastjson.JSON;
import lombok.extern.slf4j.Slf4j;
import org.jy.data.yh.bigdata.drools.hadoop.kafka.model.KafkaMessage;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;

import java.util.Date;
import java.util.UUID;

@Component
@Slf4j
public class KafkaMessageProducer {

    @Autowired
    private KafkaTemplate<String, String> kafkaTemplate;

    //发送消息方法
    @Scheduled(fixedDelay=2000)  // 2秒调度一次
    public void send() {
        KafkaMessage message = new KafkaMessage();
        message.setId(System.currentTimeMillis());
        message.setMsg(UUID.randomUUID().toString());
        message.setSendTime(new Date());
        String jsonContent = JSON.toJSONString(message);
        kafkaTemplate.send("topic_template",jsonContent);
    }
}

(2)消费者

package org.jy.data.yh.bigdata.drools.hadoop.kafka.consumer;

import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.stereotype.Component;

import java.util.Optional;

@Component
@Slf4j
public class KafkaMessageReceive {

    private static final Logger log = LoggerFactory.getLogger(KafkaMessageReceive.class);

    /**
     * @TopicPartition  有必要研究一些这个注解
     * @param record
     */
    @KafkaListener(topics = {"topic_template"})  // 订阅的主题,可以订阅多个主题
    public void listen(ConsumerRecord<?, ?> record) {
        Optional<?> kafkaMessage = Optional.ofNullable(record.value());
        if (kafkaMessage.isPresent()) {
            Object message = kafkaMessage.get();
            System.out.println("这是订阅topic_template主题的消息内容: "+message);
            log.info("----------------- record =" + record);
            log.info("------------------ message =" + message);
        }

    }

}

(3)消息类

package org.jy.data.yh.bigdata.drools.hadoop.kafka.model;

import java.io.Serializable;
import java.util.Date;
/**
 * 消息实体类
 */
public class KafkaMessage implements Serializable {
    private Long id;    //id
    private String msg; //消息
    private Date sendTime;  //时间戳

    public Long getId() {
        return id;
    }

    public void setId(Long id) {
        this.id = id;
    }

    public String getMsg() {
        return msg;
    }

    public void setMsg(String msg) {
        this.msg = msg;
    }

    public Date getSendTime() {
        return sendTime;
    }

    public void setSendTime(Date sendTime) {
        this.sendTime = sendTime;
    }
}

四,运行项目打印日志如下:

2020-04-20 15:04:39.323  INFO 23764 --- [ntainer#0-0-C-1] o.j.d.y.b.d.h.k.c.KafkaMessageReceive    : ------------------ message ={"id":1587366279318,"msg":"f5da04ec-a4f7-4bc3-b909-8a044839fbaf","sendTime":1587366279318}
这是订阅topic_template主题的消息内容: {"id":1587366281319,"msg":"c3177a1b-9645-415e-b376-c98119159339","sendTime":1587366281319}
2020-04-20 15:04:41.323  INFO 23764 --- [ntainer#0-0-C-1] o.j.d.y.b.d.h.k.c.KafkaMessageReceive    : ----------------- record =ConsumerRecord(topic = topic_template, partition = 1, leaderEpoch = 0, offset = 820, CreateTime = 1587366281319, serialized key size = -1, serialized value size = 90, headers = RecordHeaders(headers = [], isReadOnly = false), key = null, value = {"id":1587366281319,"msg":"c3177a1b-9645-415e-b376-c98119159339","sendTime":1587366281319})
2020-04-20 15:04:41.324  INFO 23764 --- [ntainer#0-0-C-1] o.j.d.y.b.d.h.k.c.KafkaMessageReceive    : ------------------ message ={"id":1587366281319,"msg":"c3177a1b-9645-415e-b376-c98119159339","sendTime":1587366281319}
这是订阅topic_template主题的消息内容: {"id":1587366283320,"msg":"8314d756-48a1-4f96-bf17-c6d2560145d0","sendTime":1587366283320}
2020-04-20 15:04:43.324  INFO 23764 --- [ntainer#0-0-C-1] o.j.d.y.b.d.h.k.c.KafkaMessageReceive    : ----------------- record =ConsumerRecord(topic = topic_template, partition = 0, leaderEpoch = 0, offset = 820, CreateTime = 1587366283320, serialized key size = -1, serialized value size = 90, headers = RecordHeaders(headers = [], isReadOnly = false), key = null, value = {"id":1587366283320,"msg":"8314d756-48a1-4f96-bf17-c6d2560145d0","sendTime":1587366283320})
2020-04-20 15:04:43.324  INFO 23764 --- [ntainer#0-0-C-1] o.j.d.y.b.d.h.k.c.KafkaMessageReceive    : ------------------ message ={"id":1587366283320,"msg":"8314d756-48a1-4f96-bf17-c6d2560145d0","sendTime":1587366283320}
这是订阅topic_template主题的消息内容: {"id":1587366285320,"msg":"bff540ac-e925-4aad-959f-a0c8672e9dbe","sendTime":1587366285320}
2020-04-20 15:04:45.324  INFO 23764 --- [ntainer#0-0-C-1] o.j.d.y.b.d.h.k.c.KafkaMessageReceive    : ----------------- record =ConsumerRecord(topic = topic_template, partition = 1, leaderEpoch = 0, offset = 821, CreateTime = 1587366285320, serialized key size = -1, serialized value size = 90, headers = RecordHeaders(headers = [], isReadOnly = false), key = null, value = {"id":1587366285320,"msg":"bff540ac-e925-4aad-959f-a0c8672e9dbe","sendTime":1587366285320})
2020-04-20 15:04:45.324  INFO 23764 --- [ntainer#0-0-C-1] o.j.d.y.b.d.h.k.c.KafkaMessageReceive    : ------------------ message ={"id":1587366285320,"msg":"bff540ac-e925-4aad-959f-a0c8672e9dbe","sendTime":1587366285320}
这是订阅topic_template主题的消息内容: {"id":1587366287321,"msg":"d5a9ec88-15ae-45d5-a17f-a2483b76f6dd","sendTime":1587366287321}
2020-04-20 15:04:47.325  INFO 23764 --- [ntainer#0-0-C-1] o.j.d.y.b.d.h.k.c.KafkaMessageReceive    : ----------------- record =ConsumerRecord(topic = topic_template, partition = 0, leaderEpoch = 0, offset = 821, CreateTime = 1587366287321, serialized key size = -1, serialized value size = 90, headers = RecordHeaders(headers = [], isReadOnly = false), key = null, value = {"id":1587366287321,"msg":"d5a9ec88-15ae-45d5-a17f-a2483b76f6dd","sendTime":1587366287321})
2020-04-20 15:04:47.325  INFO 23764 --- [ntainer#0-0-C-1] o.j.d.y.b.d.h.k.c.KafkaMessageReceive    : ------------------ message ={"id":1587366287321,"msg":"d5a9ec88-15ae-45d5-a17f-a2483b76f6dd","sendTime":1587366287321}
这是订阅topic_template主题的消息内容: {"id":1587366289321,"msg":"5688b0f5-5862-4b84-b3e1-132f11f3a568","sendTime":1587366289321}
2020-04-20 15:04:49.325  INFO 23764 --- [ntainer#0-0-C-1] o.j.d.y.b.d.h.k.c.KafkaMessageReceive    : ----------------- record =ConsumerRecord(topic = topic_template, partition = 1, leaderEpoch = 0, offset = 822, CreateTime = 1587366289321, serialized key size = -1, serialized value size = 90, headers = RecordHeaders(headers = [], isReadOnly = false), key = null, value = {"id":1587366289321,"msg":"5688b0f5-5862-4b84-b3e1-132f11f3a568","sendTime":1587366289321})
2020-04-20 15:04:49.325  INFO 23764 --- [ntainer#0-0-C-1] o.j.d.y.b.d.h.k.c.KafkaMessageReceive    : ------------------ message ={"id":1587366289321,"msg":"5688b0f5-5862-4b84-b3e1-132f11f3a568","sendTime":1587366289321}
这是订阅topic_template主题的消息内容: {"id":1587366291322,"msg":"0174a72b-a5df-4805-b23f-3faac8a32e77","sendTime":1587366291322}
2020-04-20 15:04:51.326  INFO 23764 --- [ntainer#0-0-C-1] o.j.d.y.b.d.h.k.c.KafkaMessageReceive    : ----------------- record =ConsumerRecord(topic = topic_template, partition = 0, leaderEpoch = 0, offset = 822, CreateTime = 1587366291322, serialized key size = -1, serialized value size = 90, headers = RecordHeaders(headers = [], isReadOnly = false), key = null, value = {"id":1587366291322,"msg":"0174a72b-a5df-4805-b23f-3faac8a32e77","sendTime":1587366291322})
2020-04-20 15:04:51.326  INFO 23764 --- [ntainer#0-0-C-1] o.j.d.y.b.d.h.k.c.KafkaMessageReceive    : ------------------ message ={"id":1587366291322,"msg":"0174a72b-a5df-4805-b23f-3faac8a32e77","sendTime":1587366291322}

发布了74 篇原创文章 · 获赞 4 · 访问量 3172

猜你喜欢

转载自blog.csdn.net/u014635374/article/details/105636005