kafka实战

----------------------------------------------------------------------------------------------------
applicationContext-resources.xml

<?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans"
    xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
    xsi:schemaLocation="http://www.springframework.org/schema/beans
       http://www.springframework.org/schema/beans/spring-beans-3.0.xsd">

    <!-- load properties -->
    <bean id="propertyConfigurer" class="com.agu3.kafka.base.PropertyConfigurer">
        <property name="locations">
            <list>
                  <value>classpath:/common-kafka.properties</value>
            </list>
        </property>
    </bean>

</beans>


----------------------------------------------------------------------------------------------------
package com.agu3.kafka.base;

import java.util.HashMap;
import java.util.Map;
import java.util.Properties;

import org.springframework.beans.BeansException;
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
import org.springframework.beans.factory.config.PropertyPlaceholderConfigurer;

/**
 * 用来读取配置文件
 */
public class PropertyConfigurer extends PropertyPlaceholderConfigurer {

    private static Map<String, String> ctxPropertiesMap = new HashMap<String, String>();

    @Override
    protected void processProperties(
            ConfigurableListableBeanFactory beanFactoryToProcess,Properties props) throws BeansException {
        
        super.processProperties(beanFactoryToProcess, props);
        ctxPropertiesMap = new HashMap<String, String>();
        
        for (Object key : props.keySet()) {
            String keyStr = key.toString();
            String value = props.getProperty(keyStr);
            ctxPropertiesMap.put(keyStr, value);
        }
    }

    public static String getContextProperty(String name) {
        return ctxPropertiesMap.get(name);
    }

}

----------------------------------------------------------------------------------------------------
package com.agu3.kafka.base;

import java.util.Properties;

import org.apache.commons.lang3.StringUtils;
import org.apache.log4j.Logger;

/**
 * kafka配置信息读取
 * @author fenglei.ma
 */
public class KafkaPropertyFactory {
    private static final Logger logger = Logger.getLogger(KafkaPropertyFactory.class);
    
    // 生产者的 后缀key
    public static final String REQUEST_REQUIRED_ACKS = "request.required.acks";
    public static final String METADATA_BROKER_LIST = "metadata.broker.list";
    public static final String SERIALIZER_CLASS = "serializer.class";
    public static final String KEY_SERIALIZER_CLASS = "key.serializer.class";
    public static final String VALUE_SERIALIZER_CLASS = "value.serializer.class";
    
    // 消费者的 后缀key
    public static final String ZOOKEEPER_CONNECT = "zookeeper.connect";
    public static final String GROUP_ID = "group.id";
    public static final String ZOOKEEPER_SESSION_TIMEOUT_MS = "zookeeper.session.timeout.ms";
    public static final String ZOOKEEPER_SYNC_TIME_MS = "zookeeper.sync.time.ms";
    public static final String AUTO_COMMIT_INTERVAL_MS = "auto.commit.interval.ms";
    public static final String AUTO_OFFSET_RESET = "auto.offset.reset";
    
    /**
     * 创建一个kafka消费者的property类
     * 
     * @param kafkaKey
     * @return
     */
    public static Properties getConsumerProperty(String kafkaKey) {
        String[] propertyKeys = new String[]{
                ZOOKEEPER_CONNECT,
                GROUP_ID,
                ZOOKEEPER_SESSION_TIMEOUT_MS,
                ZOOKEEPER_SYNC_TIME_MS,
                AUTO_COMMIT_INTERVAL_MS,
                AUTO_OFFSET_RESET
        };
        
        Properties prop = new Properties();
        for(String propertyKey : propertyKeys) {
            String oneKey = kafkaKey + "." + propertyKey;
            String value = PropertyConfigurer.getContextProperty(oneKey);
            if(StringUtils.isBlank(value)) {
                String msg = "kafka property key not found : " + oneKey;
                logger.error(msg, new Exception(msg));
            }
            prop.put(propertyKey, value);
        }
        return prop;
    }

    /**
     * 创建一个kafka生产者的property类
     * 
     * @param kafkaKey
     * @return
     */
    public static Properties getProductProperty(String kafkaKey) {
        String[] propertyKeys = new String[]{
                REQUEST_REQUIRED_ACKS,
                METADATA_BROKER_LIST,
                SERIALIZER_CLASS,
                KEY_SERIALIZER_CLASS,
                VALUE_SERIALIZER_CLASS
        };
        
        Properties prop = new Properties();
        for(String propertyKey : propertyKeys) {
            String oneKey = kafkaKey + "." + propertyKey;
            String value = PropertyConfigurer.getContextProperty(oneKey);
            if(StringUtils.isBlank(value)) {
                String msg = "kafka property key not found : " + oneKey;
                logger.error(msg, new Exception(msg));
            }
            prop.put(propertyKey, value);
        }
        return prop;
    }

}
----------------------------------------------------------------------------------------------------
package com.agu3.kafka.consumer;


import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;

import kafka.consumer.Consumer;
import kafka.consumer.ConsumerConfig;
import kafka.consumer.KafkaStream;
import kafka.javaapi.consumer.ConsumerConnector;
import kafka.serializer.Decoder;
import kafka.serializer.StringDecoder;
import kafka.utils.VerifiableProperties;

import com.agu3.kafka.base.KafkaPropertyFactory;

/**
 * kafka消费者
 * 
 * @author fenglei.ma
 */
public class ConsumerAdaptor {
    
    private ConsumerConnector consumer = null;
    private List<ConsumerListener> listeners = new ArrayList<ConsumerListener>();
    
    public ConsumerAdaptor(String key){
        Properties prop = KafkaPropertyFactory.getConsumerProperty(key);
        ConsumerConfig config = new ConsumerConfig(prop);
        consumer = Consumer.createJavaConsumerConnector(config);
        
    }
    
    public void addListener(ConsumerListener listener) {
        listeners.add(listener);
    }
    
    public void start() throws Exception{
        Map<String,Integer> topicCountMap = new HashMap<String,Integer>();
        
        for(int i=0; i<listeners.size(); i++) {
            String topic = listeners.get(i).getTopic();
            topicCountMap.put(topic, i+1);
        }
        
        Decoder<String> keyDecode = new StringDecoder(new VerifiableProperties());
        Decoder<String> valueDecode = new StringDecoder(new VerifiableProperties());
        
        Map<String, List<KafkaStream<String, String>>>  topicStreams = 
                consumer.createMessageStreams(topicCountMap, keyDecode, valueDecode);
        
        List<Thread> threads = new ArrayList<Thread>();
        for(ConsumerListener listener : listeners) {
            String topic = listener.getTopic();
            listener.setStreams(topicStreams.get(topic));
            Thread thread = new Thread(listener);
            thread.start();
            threads.add(thread);
            System.out.println(">>>>>>>>>>> start thread : " + topic);
        }
        
        for(Thread t : threads) {
            t.join();
        }
        
    }
    
    /**
     * 
     */
    public void close(){
        if(consumer != null){
            consumer.shutdown();
        }
    }
    
}

---------------------------------------------------------------------------------

package com.agu3.kafka.consumer;

import java.util.List;

import kafka.consumer.ConsumerIterator;
import kafka.consumer.KafkaStream;
import kafka.message.MessageAndMetadata;

import org.apache.log4j.Logger;

public class ConsumerListener implements Runnable {
    private static final Logger logger = Logger.getLogger(ConsumerListener.class);
    
    private String topic;
    private List<KafkaStream<String, String>> streams;
    
    public ConsumerListener(String topic){
        this.topic = topic;
    }

    public void run() {
        for(KafkaStream<String, String> stream : streams){
            ConsumerIterator<String, String> it = stream.iterator();
            while(it.hasNext()){
                MessageAndMetadata<String,String> meta = it.next();
                onMessage(meta.topic(),meta.message());
            }
        }
    }
    
    /**
     * 类继承后,可以重写改方法
     * @param topic
     * @param message
     */
    public void onMessage(String topic, String message) {
        logger.info("消费  topic : " + topic + " message: " + message);
    }

    public String getTopic() {
        return topic;
    }

    public void setTopic(String topic) {
        this.topic = topic;
    }

    public List<KafkaStream<String, String>> getStreams() {
        return streams;
    }

    public void setStreams(List<KafkaStream<String, String>> streams) {
        this.streams = streams;
    }
    
}

----------------------------------------------------------------------------------------------------
package com.agu3.kafka.producer;

import java.util.HashMap;
import java.util.Map;
import java.util.Properties;

import kafka.producer.ProducerConfig;

import com.agu3.kafka.base.KafkaPropertyFactory;

/**
 *  消息发送
 * 
 * @author fenglei.ma
 */
public class ProducerAdaptor {
    private static Map<String, Properties> propMap = new HashMap<String, Properties>();
    private static Map<String, ProducerSender> producerMap = new HashMap<String, ProducerSender>();;
    
    private static Properties getProperty(String key) {
        Properties properties = propMap.get(key);
        if(properties == null) {
            properties = KafkaPropertyFactory.getProductProperty(key);
            propMap.put(key, properties);
        }
        return properties;
    }
    
    public static ProducerSender getProducer(String key) {
        ProducerSender producer = producerMap.get(key);
        if(producer == null) {
            Properties property = getProperty(key);
            ProducerConfig config = new ProducerConfig(property);
            producer = new ProducerSender(config);
            producerMap.put(key, producer);
        }
        return producer;
    }

}
---------------------------------------------------------------------------------
package com.agu3.kafka.producer;

import org.apache.log4j.Logger;

import kafka.javaapi.producer.Producer;
import kafka.producer.KeyedMessage;
import kafka.producer.ProducerConfig;

public class ProducerSender extends Producer<String, String> {
    private static final Logger logger = Logger.getLogger(ProducerSender.class);

    public ProducerSender(ProducerConfig config) {
        super(config);
    }
    
    public void sendMessage(String topic, String message) {
        logger.info("发送   topic:" + topic + "  message:...");
        send(new KeyedMessage<String, String>(topic, message));
    }

}
----------------------------------------------------------------------------------------------------
package kafka;

import org.apache.log4j.Logger;
import org.junit.runner.RunWith;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;

/**
 * @author fenglei.ma 2016-09-29
 */
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(locations = {"/spring/applicationContext-resources.xml"})
public class BaseTest {
    public Logger logger = Logger.getLogger(getClass());

}
----------------------------------------------------------------
package kafka;

import org.junit.Test;

import com.agu3.kafka.consumer.ConsumerAdaptor;
import com.agu3.kafka.consumer.ConsumerListener;
import com.agu3.kafka.producer.ProducerAdaptor;
import com.agu3.kafka.producer.ProducerSender;

/**
 * @author fenglei.ma
 */
public class SendTest extends BaseTest{

    @Test
    public void producer() throws Exception {
        ProducerSender producer = ProducerAdaptor.getProducer("kafka_product_hq");
        while (true) {
            try {
                Thread.sleep(1000 * 2);
            } catch (Exception e) {
                e.printStackTrace();
            }
            producer.sendMessage("test_topic_1", "www.baidu.com");
            producer.sendMessage("test_topic_2", "www.google.com");
        }
    }
    
    @Test
    public void consumer() throws Exception {
        try{
            ConsumerAdaptor consumer = new ConsumerAdaptor("kafka_consumer_hq");
            consumer.addListener(new ConsumerListener("test_topic_1"));
            consumer.addListener(new ConsumerListener("test_topic_2"));
            consumer.start();
            System.out.println("aaa");
            
            Thread.sleep(3000);
            
            consumer.close();
            
        }catch(Exception e){
            e.printStackTrace();
        }
        
    }
}
----------------------------------------------------------------------------------------------------

common-kafka.properties 配置信息

##生产者
kafka_product_hq.request.required.acks=1
kafka_product_hq.metadata.broker.list=192.168.100.41:9092
kafka_product_hq.serializer.class=kafka.serializer.StringEncoder
kafka_product_hq.key.serializer.class=kafka.serializer.StringEncoder
kafka_product_hq.value.serializer.class=kafka.serializer.StringEncoder

##消费者
kafka_consumer_hq.zookeeper.connect=192.168.100.41:2181,192.168.100.42:2181,192.168.100.43:2181
kafka_consumer_hq.group.id=mytest
kafka_consumer_hq.zookeeper.session.timeout.ms=3000
kafka_consumer_hq.zookeeper.sync.time.ms=200
kafka_consumer_hq.auto.commit.interval.ms=1000
kafka_consumer_hq.auto.offset.reset=smallest


----------------------------------------------------------------------------------------------------
pom.xml 配置文件

<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
    xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">


    <modelVersion>4.0.0</modelVersion>
    <groupId>kafka</groupId>
    <artifactId>kafka</artifactId>
    <packaging>war</packaging>
    <version>1.0.0</version>

    <name>kafka-</name>
    <url>http://maven.apache.org</url>


    <properties>
        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
    </properties>

    <dependencies>
        <dependency>
            <groupId>org.apache.kafka</groupId>
            <artifactId>kafka-clients</artifactId>
            <version>0.8.2.2</version>
        </dependency>
        <dependency>
            <groupId>org.apache.kafka</groupId>
            <artifactId>kafka_2.10</artifactId>
            <version>0.8.2.1</version>
        </dependency>
        <dependency>
            <groupId>log4j</groupId>
            <artifactId>log4j</artifactId>
            <version>1.2.17</version>
        </dependency>
        <dependency>
            <groupId>javax.servlet</groupId>
            <artifactId>javax.servlet-api</artifactId>
            <version>3.0.1</version>
        </dependency>
        <dependency>
            <groupId>junit</groupId>
            <artifactId>junit</artifactId>
            <version>4.11</version>
            <scope>test</scope>
        </dependency>

        <dependency>
            <groupId>org.apache.commons</groupId>
            <artifactId>commons-lang3</artifactId>
            <version>3.0</version>
        </dependency>

        <dependency>
            <groupId>org.springframework</groupId>
            <artifactId>spring-beans</artifactId>
            <version>4.0.2.RELEASE</version>
        </dependency>
        <dependency>
            <groupId>org.springframework</groupId>
            <artifactId>spring-core</artifactId>
            <version>4.0.2.RELEASE</version>
        </dependency>
        <dependency>
            <groupId>org.springframework</groupId>
            <artifactId>spring-context</artifactId>
            <version>4.0.2.RELEASE</version>
        </dependency>
        <dependency>
            <groupId>org.springframework</groupId>
            <artifactId>spring-test</artifactId>
            <version>4.0.2.RELEASE</version>
            <scope>test</scope>
        </dependency>

    </dependencies>

    <build>
        <finalName>kafka</finalName>
    </build>

</project>

猜你喜欢

转载自www.cnblogs.com/xiaolei2017/p/9019341.html
今日推荐