SpringBoot集成Kafka实现消息上报

版权声明:本文为博主原创文章,转载请注明出处 浅然的专栏 https://blog.csdn.net/w_linux/article/details/84927972

一、该篇博客使用技术版本

SpringBoot:1.5.9.RELEASE

zookeeper:zookeeper-3.4.5

kafka:kafka_2.10-0.10.2.1


二、SpringBoot集成Kafka

1、首先修改kafka的server.properties

root@VM-0-3-ubuntu:/usr/local/kafka_2.10-0.10.2.1/config# vi server.properties

找到配置文件中的advertised.listeners,将自己服务器的ip加入

advertised.listeners=PLAINTEXT://这里是服务器的ip:9092

重启zk,kafka服务,开启kafka生产者、消费者

2、集成编码

建SpringBoot项目

目录如下,其中测试下的kafka包是测试kafka生产消费,该篇博客不做阐述

application.properties

server.port=6097
spring.application.name=linjieadmin

#kafka
kafka.consumer.zookeeper.connect=启动zookeeper服务器的ip:2181
kafka.consumer.servers=启动kafka服务器的ip:9092
#消费完成自动提交
kafka.consumer.enable.auto.commit=true
kafka.consumer.session.timeout=6000
kafka.consumer.auto.commit.interval=100
#消费策略
kafka.consumer.auto.offset.reset=latest
kafka.consumer.topic=wingcloud
kafka.consumer.group.id=wingcloud
kafka.consumer.concurrency=10

kafka.producer.servers=启动kafka服务器的ip:9092
kafka.producer.retries=0
kafka.producer.batch.size=4096
kafka.producer.linger=1
kafka.producer.buffer.memory=40960
DsInfoSJservice.java
package com.example.flinkdemo.controller;

import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;

import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.io.OutputStream;
import java.io.PrintWriter;

@Controller
@RequestMapping("DsInfoSJservice")
public class DsInfoSJservice {
    @Autowired
    private KafkaTemplate kafkaTemplate;

    @RequestMapping(value = "webInfoSJService",method = RequestMethod.POST)
    public void webInfoSJService(@RequestBody String jsonstr, HttpServletRequest request, HttpServletResponse response){
        System.out.println("hello"+jsonstr);
        //业务开始
        kafkaTemplate.send("wingcloud","key",jsonstr);

        //业务结束

        PrintWriter printWriter = getWriter(response);
        response.setStatus(HttpStatus.OK.value());
        printWriter.write("success");
        closeprintwriter(printWriter);
    }

    private PrintWriter getWriter(HttpServletResponse response){
        response.setCharacterEncoding("utf-8");
        response.setContentType("application/json");
        OutputStream out = null;
        PrintWriter printWriter = null;
        try {
            out = response.getOutputStream();
            printWriter = new PrintWriter(out);
        } catch (IOException e) {
            e.printStackTrace();
        }
        return printWriter;
    }

    private void closeprintwriter(PrintWriter printWriter){
        printWriter.flush();
        printWriter.close();
    }
}
KafkaProducerConfig.java
package com.example.flinkdemo.controller;


import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.serialization.StringSerializer;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.annotation.EnableKafka;
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.core.ProducerFactory;

import java.util.HashMap;
import java.util.Map;

@Configuration
@EnableKafka
public class KafkaProducerConfig {
    @Value("${kafka.producer.servers}")
    private String servers;
    @Value("${kafka.producer.retries}")
    private int retries;
    @Value("${kafka.producer.batch.size}")
    private int batchSize;
    @Value("${kafka.producer.linger}")
    private int linger;
    @Value("${kafka.producer.buffer.memory}")
    private int bufferMemory;


    public Map<String,Object> producerConfigs(){
        Map<String,Object> props = new HashMap<>();
        props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,servers);
        props.put(ProducerConfig.RETRIES_CONFIG,retries);
        props.put(ProducerConfig.BATCH_SIZE_CONFIG,batchSize);
        props.put(ProducerConfig.LINGER_MS_CONFIG,linger);
        props.put(ProducerConfig.BUFFER_MEMORY_CONFIG,bufferMemory);
        props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,StringSerializer.class);
        props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,StringSerializer.class);
        return props;
    }

    public ProducerFactory<String,String> producerFactory(){
        return new DefaultKafkaProducerFactory<String, String>(producerConfigs());
    }

    @Bean
    public KafkaTemplate<String,String> kafkaTemplate(){
        return new KafkaTemplate<String, String>(producerFactory());
    }
}
DsClienttest.java
package com.example.flinkdemo;

import java.io.*;
import java.net.HttpURLConnection;
import java.net.URL;


public class DsClienttest {
    public static void main(String[] args){
        String message = "kafkatest";
        String adrress = "http://localhost:6097/DsInfoSJservice/webInfoSJService";
        try {
            URL url = new URL(adrress);
            HttpURLConnection conn = (HttpURLConnection) url.openConnection();
            conn.setRequestMethod("POST");//使用post
            conn.setDoInput(true);
            conn.setDoOutput(true);
            conn.setAllowUserInteraction(true);//允许重定向
            conn.setUseCaches(false);//关缓存
            conn.setReadTimeout(6*1000);//超时时间6秒
            //这里写自己浏览器的User-Agent
            conn.setRequestProperty("User-Agent","这里写自己浏览器的User-Agent");
            conn.setRequestProperty("Content-Type","application/json");
            conn.connect();
            OutputStream outputStream = conn.getOutputStream();
            BufferedOutputStream out = new BufferedOutputStream(outputStream);
            out.write(message.getBytes());
            out.flush();

            String temp = "";
            InputStream in = conn.getInputStream();
            byte[] tempbytes = new byte[1024];
            while (in.read(tempbytes,0,1024) != -1){
                temp+=new String(tempbytes);
            }
            System.out.println(conn.getResponseCode());
            System.out.println(temp);
        } catch (Exception e) {
            e.printStackTrace();
        }

    }
}

好了,编码这些类即可

启动FlinkdemoApplication.java

运行DsClienttest类下的main方法

可以在其控制台下看到200,success。并且在FlinkdemoApplication.java控制台下看到相关配置信息表示集成成功

最后可以在之前开启的kafka消费者终端上看到打印了:kafkatest

猜你喜欢

转载自blog.csdn.net/w_linux/article/details/84927972