通过kafka发送和接收消息

生产者配置类:

@Configuration
@EnableKafka
public class KafkaProducerConfig {

    @Value("${kafkaConfig.address}")
    private String address;

    @Value("${kafkaConfig.batchSize}")
    private String batchSize;

    @Value("${kafkaConfig.linger}")
    private String linger;


    public Map<String, Object> producerConfigs() {
        Map<String, Object> props = new HashMap<>();
        props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, address);
        props.put(ProducerConfig.RETRIES_CONFIG, 0);
        props.put(ProducerConfig.BATCH_SIZE_CONFIG, batchSize);
        props.put(ProducerConfig.LINGER_MS_CONFIG, linger);
        props.put(ProducerConfig.BUFFER_MEMORY_CONFIG, batchSize);
        props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
        props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
        return props;
    }

    public ProducerFactory<String, String> producerFactory() {
        return new DefaultKafkaProducerFactory<>(producerConfigs());
    }

    @Bean
    public KafkaTemplate<String, String> kafkaTemplate() {
        return new KafkaTemplate<String, String>(producerFactory());
    }
}

消费者配置类:

@Component
public class KafkaConsumerConfig {

    @Value("${kafkaConfig.address}")
    private String address;

    @Value("${kafkaConfig.batchSize}")
    private String batchSize;

    @Value("${kafkaConfig.linger}")
    private String linger;

    @Value("${kafkaConfig.concurrency}")
    private String concurrency;

    @Value("${kafkaConfig.autoCommit}")
    private String autoCommit;

    @Value("${kafkaConfig.timeout}")
    private String timeout;

    @Value("${kafkaConfig.groupId}")
    private String groupId;

    @Value("${kafkaConfig.offsetReset}")
    private String offsetReset;

    @Bean
    public KafkaListenerContainerFactory<ConcurrentMessageListenerContainer<String, String>> kafkaListenerContainerFactory() {
        ConcurrentKafkaListenerContainerFactory<String, String> factory = new ConcurrentKafkaListenerContainerFactory<>();
        factory.setConsumerFactory(consumerFactory());
        factory.setConcurrency(Integer.valueOf(concurrency));
        factory.getContainerProperties().setPollTimeout(1500);
        return factory;
    }

    public ConsumerFactory<String, String> consumerFactory() {
        return new DefaultKafkaConsumerFactory<>(consumerConfigs());
    }

    public Map<String, Object> consumerConfigs() {
        Map<String, Object> propsMap = new HashMap<>();
        propsMap.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, address);
        propsMap.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, autoCommit);
        propsMap.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, 100);
        propsMap.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, timeout);
        propsMap.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
        propsMap.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
        propsMap.put(ConsumerConfig.GROUP_ID_CONFIG, groupId);
        propsMap.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, offsetReset);
        return propsMap;
    }

    @Bean
    public RawDataListener listener() {

        return new RawDataListener();

    }
}

发送消息:

@Service("kafkaConfig")
public class KafkaProducer {

    @Value("${kafkaConfig.topic}")
    private String topic;

    @Autowired
    private KafkaTemplate<String, String> kafkaTemplate;

    //发送消息方法
    public void send(String message) {
        kafkaTemplate.send(topic,message);
    }
}

接收消息:

/**
 * kafka监听 * @author King-Mouse *
 */
@Component
public class RawDataListener {

    @Value("${kafkaConfig.topic}")
    private String topic;

    //工具类中调用service的方法
    @Resource(name = "flowDataServiceImpl")
    private FlowDataServiceImpl flowDataService;

    private static final Logger logger = LoggerFactory.getLogger(RawDataListener.class);

    /**
     * 实时获取kafka数据(生产一条,监听生产topic自动消费一条)	 * @param record	 * @throws IOException
     */
    @KafkaListener(topics = {"${kafkaConfig.topic}"})
    public void listen(ConsumerRecord<?, ?> record) throws IOException {
        String value = (String) record.value();
        System.out.println("收到的消息:"+value);
    }
}

配置文件:

######kafka配置信息######
kafkaConfig:
  address: 127.0.0.1:9002,127.0.0.2:9003
  #(是否自动提交)
  autoCommit: true
  #(连接超时时间)
  timeout: 20000
  commitInterval: 100
  #(消费组)
  groupId: ivrKafKa
   #(实时生产,实时消费,不会从头开始消费)
  offsetReset: latest
  #(设置消费线程数)
  concurrency: 15
  #消费的topic
  topic: ivrFlowUpdate
  batchSize: 4096
  linger: 1
  memory: 40960
发布了11 篇原创文章 · 获赞 29 · 访问量 6731

猜你喜欢

转载自blog.csdn.net/qq_22331931/article/details/91463333