storm 开发实战二

1、连接KafKa
<!-- KafKa 连接 -->
		<dependency>
			<groupId>com.youku.data</groupId>
			<artifactId>data-mq-common</artifactId>
			<version>0.9.3</version><!--如不使用新版本特性可以不升级成最新版本 -->
		</dependency>



// topicName为消息通道名称,管理员创建
    private static String TOPIC = "log_xm_uploadfr";

    // token 用户访问通道的token,一个token可以读或写多个消息
    private static String TOKEN = "ZgSSYUxqvH";

    // groupId 消费者组名(一个或多个消费者在同一个组里)
    private static String GROUP = "lf_ct_uploadfr";

// 从Kafka中获取数据
        try {
            readChannel = MqSystem.getMqSystem().open(MqSystem.CHANNEL_MODE_READ, TOPIC, TOKEN, GROUP);
        } catch (MqException e) {
            LOG.error(e.getMessage());
        }


2、连接Hbase
POM配置
<dependency>
			<groupId>com.youku.data</groupId>
			<artifactId>data-storm-common</artifactId>
			<version>1.0.0</version>
			<exclusions>
				<exclusion>
					<artifactId>hbase</artifactId>
					<groupId>org.apache.hbase</groupId>
				</exclusion>
			</exclusions>
		</dependency>

<!-- Hbase -->
		<dependency>
			<groupId>org.apache.phoenix</groupId>
			<artifactId>phoenix</artifactId>
			<version>4.0.0-incubating-client</version>
		</dependency>
		<dependency>
			<groupId>com.youku.data</groupId>
			<artifactId>data-hbase-phoenix-common</artifactId>
			<version>1.0.0</version>
		</dependency>


host配置:
#hbase
10.106.25.11 a001.master.hbase.qingdao.youku
10.106.25.12 a001.region.hbase.qingdao.youku
10.106.25.13 a002.region.hbase.qingdao.youku
10.106.25.14 a003.region.hbase.qingdao.youku


package com.laifeng.control.bolts;

import java.util.ArrayList;
import java.util.List;
import java.util.Map;

import org.apache.log4j.Logger;

import com.youku.data.store.SQLWriter;
import com.youku.data.store.token.TokenPhoenixWriter;

import backtype.storm.task.OutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.topology.OutputFieldsDeclarer;
import backtype.storm.topology.base.BaseRichBolt;
import backtype.storm.tuple.Tuple;
import backtype.storm.utils.Time;

/**
 * @author wangqiao
 * @date 2014-9-24 下午7:06:38
 */
public class UploadFlowrateHbaseBolt extends BaseRichBolt {

    private static final long serialVersionUID = 3218981547130543201L;

    private static final Logger LOG = Logger.getLogger(UploadFlowrateHbaseBolt.class);

    private static SQLWriter writer = null;

    private int timestamp = 0;

    private List<String> sqlList = new ArrayList<String>();

    public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {

        // 连接Hbase
        try {
            this.writer = TokenPhoenixWriter.getWriter("ADcjaS");
            timestamp = Time.currentTimeSecs();
        } catch (Exception e) {
            LOG.error(e.getMessage());
        }

    }

    public void execute(Tuple input) {

        try {
            int timenow = Time.currentTimeSecs();
            // 初始化语句
            sqlList.add(String
                    .format("upsert into T_test(minutecol,roomid,streamid,provinceid,asn,cdnid,city,ip,userid,blockcount,appid,type,duration,receiverip,logversion,ext,visittime) values('%s','%s','%s','%s','%s','%s','%s','%s',%s,%s,%s,'%s','%s','%s','%s','%s','%s')",
                            input.getStringByField("minutecol"), 
                            input.getStringByField("roomid"),
                            input.getStringByField("streamid"), 
                            input.getStringByField("provinceid"),
                            input.getStringByField("asn"), 
                            input.getStringByField("cdnid"),
                            input.getStringByField("city"), 
                            input.getStringByField("ip"),
                            input.getIntegerByField("userid"),
                            input.getIntegerByField("blockcount"),
                            input.getIntegerByField("appid"),
                            input.getStringByField("type"), 
                            input.getStringByField("duration"),
                            input.getStringByField("receiverip"),
                            input.getStringByField("logversion"),
                            input.getStringByField("ext"),
                            input.getStringByField("visittime")));
            if (sqlList.size() >= 500
                    || (timenow - timestamp >= 60 && sqlList.size() > 0)) {
                writer.writeBatchSQL(sqlList);
                writer.close();
                sqlList.clear();
                timestamp = timenow;
            }
        } catch (Exception e) {
            LOG.error(e.getMessage());
        }
    }

    public void declareOutputFields(OutputFieldsDeclarer declarer) {
        // TODO Auto-generated method stub

    }

}



maven install 然后在target找到含有dependjar的jar包放在storm环境下运行即可

运行命令如下:
storm jar /home/xm_wangqiao/stormtest-0.0.1-SNAPSHOT-jar-with-dependencies.jar com.laifeng.control.toplogy.UploadFlowrateToplogy 'lf-upload-flowrate'

storm kill 'lf-upload-flowrate'

猜你喜欢

转载自wangqiaowqo.iteye.com/blog/2119658
今日推荐