kafka生产者实例

package com.amarsoft.kafka;

import java.util.HashMap;
import java.util.Map;
import java.util.Properties;

import org.apache.kafka.clients.producer.Callback;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;

import com.amarsoft.are.ARE;


/**
 * 默认的kafka生成者,用于往kafka中发送消息
 */
public class KafkaProducerAdapter {
	
	/**
	 * kafka生产者,用于向kafka中发送消息
	 */
	private static Producer<String, String> kafkaProducer;
	
	/**
	 * 同步锁
	 */
	private static final Object LOCK = new Object();
	
	/**
	 * 每个topic发送的消息计数器
	 */
	private final Map<String, Long> topicCounterMap = new HashMap<>();
	
	/**
	 * kafka生产者适配器(单例),用来代理kafka生产者发送消息
	 */
	private static  KafkaProducerAdapter kafkaProducerAdapter;
	
	private KafkaProducerAdapter() {
		initKafkaProducer();
	}
	
	public static KafkaProducerAdapter getInstance() {
		ARE.getLog().info("=======================");
		if (kafkaProducerAdapter == null) {
			synchronized (LOCK) {
				if (kafkaProducerAdapter == null) {
					kafkaProducerAdapter = new KafkaProducerAdapter();
				}
			}
		}
		
		return kafkaProducerAdapter;
	}
		
	public void send(String topic,String message) {
		Long key = generateKey(topic);
		ProducerRecord<String, String> record = new ProducerRecord<String, String>(topic, key.toString(), message);
		kafkaProducer.send(record,new DefaultCallbackImpl(record));
	}
	
	public void flush() {
		if (kafkaProducer != null) {
			kafkaProducer.flush();
		}
	}
	
	public void close() {
		if (kafkaProducer != null) {
			kafkaProducer.close();
		}
	}
	
	/**
	 * 根据kafka生产者配置信息初始化kafka消息生产者,只初始化一次
	 */
	private void initKafkaProducer() {
		Properties props = new Properties();
		try {
			props.put("bootstrap.servers", "127.0.0.1:9092");
			props.put("acks", "all");
			props.put("retries", 3);
			props.put("batch.size", 16384);
			props.put("linger.ms", 1);
			props.put("buffer.memory", 33554432);
			props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
			props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
			kafkaProducer = new KafkaProducer<>(props);
		}catch (Exception e) {
			ARE.getLog().error("根据配置文件[kafka.properties]初始化kafka生产者出现异常:"+e.getMessage(),e);
		}
	} 
	
	/**
	 * 生成待发送的消息的key
	 * @return message key
	 */
	private Long generateKey(String topic) {
		synchronized(LOCK) {
			Long counter = topicCounterMap.get(topic);
			if (counter == null || Long.MAX_VALUE - counter <= 10) {
				counter = 0L;
			}
			++counter;
			topicCounterMap.put(topic, counter);
			return counter;
		}
	}
	
	/**
	 * 默认的消息回调实现
	 */
	private class DefaultCallbackImpl implements Callback {

		private ProducerRecord<String, String> record;
		
		public DefaultCallbackImpl(ProducerRecord<String, String> record) {
			this.record = record;
		}
		
		@Override
		public void onCompletion(RecordMetadata recordMetadata, Exception e) {
			if (e == null) {
				ARE.getLog().info("消息[key="+record.key()+",value="+record.value()+"]发送成功."
						+ "消息[partition="+recordMetadata.partition()+",offset="+recordMetadata.offset()+"]");
			} else {
				ARE.getLog().error("消息[key="+record.key()+",value="+record.value()+"]发送失败."+e.getMessage(),e);
			}
		}
		
	}

	public static void main(String[] args) {
		KafkaProducerAdapter kafkaProducerAdapter = KafkaProducerAdapter.getInstance();
		String topic = "test_ss";
		String message = "yoyowon";
		long startTime = System.currentTimeMillis();
		for (int i = 0; i < 1; i++) {
			kafkaProducerAdapter.send(topic, message);
		}
		kafkaProducerAdapter.flush();
		long endTime = System.currentTimeMillis();
		System.out.println("发送10条消息共耗时:"+(endTime - startTime)+"毫秒");	
		kafkaProducerAdapter.close();
    }	
}

猜你喜欢

转载自blog.csdn.net/John_Kry/article/details/81902436