Storm整合Hbase

将Storm流计算的结果存储到Hbase上。以 WordCount 为例

准备工作

1. 启动集群
  • 启动ZooKeeper集群
$> zkServer.sh start
  • 启动hadoop集群(Hbase集群启动前应该保证Hadoop集群启动,并保证namenode为激活状态)
$> start-dfs.sh
  • 启动hbase集群
start-hbase.sh
  • 启动Storm集群
//启动master
$> storm nimbus

//启动slave
$> storm supervisor
  • 创建表名为wordcount,列族为f1的数据表
$hbase shell> create 'ns1:wordcount','f1'
2. maven依赖导入
<dependency>
    <groupId>org.apache.storm</groupId>
    <artifactId>storm-hbase</artifactId>
    <version>1.1.3</version>
</dependency>
<dependency>
    <groupId>org.apache.hbase</groupId>
    <artifactId>hbase-client</artifactId>
    <version>1.2.6</version>
</dependency>
<dependency>
    <groupId>org.apache.hadoop</groupId>
    <artifactId>hadoop-common</artifactId>
    <version>2.7.6</version>
</dependency>
3. 配置文件导入

将 hbase-site.xml文件 和 hdfs-site.xml文件导入 resources 目录下

编程实现

源头 Spout
public class WordCountSpout implements IRichSpout {
    private TopologyContext context;
    private SpoutOutputCollector collector;

    private List<String> states;

    private Random r = new Random();

    private int index = 0;

    public void open(Map conf, TopologyContext context, SpoutOutputCollector collector) {
        this.context = context;
        this.collector = collector;
        states = new ArrayList<String>();
        states.add("hello world tom");
        states.add("hello world tomas");
        states.add("hello world tomasLee");
        states.add("hello world tomson");
    }

    public void close() {

    }

    public void activate() {

    }

    public void deactivate() {

    }

    public void nextTuple() {
        if (index < 3) {
            String line = states.get(r.nextInt(4));
            collector.emit(new Values(line));
            index++;
        }
    }

    public void ack(Object msgId) {

    }

    public void fail(Object msgId) {

    }

    public void declareOutputFields(OutputFieldsDeclarer declarer) {
        declarer.declare(new Fields("line"));
    }

    public Map<String, Object> getComponentConfiguration() {
        return null;
    }
}
分片 Bolt
public class SplitBolt implements IRichBolt {

    private TopologyContext context ;
    private OutputCollector collector ;

    public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
        this.context = context ;
        this.collector = collector ;
    }

    public void execute(Tuple tuple) {
        String line = tuple.getString(0);
        String[] arr = line.split(" ");
        for(String s : arr){
            collector.emit(new Values(s,1));
        }
    }

    public void cleanup() {

    }

    public void declareOutputFields(OutputFieldsDeclarer declarer) {
        declarer.declare(new Fields("word","count"));

    }

    public Map<String, Object> getComponentConfiguration() {
        return null;
    }
}
HbaseBolt——写数据到Hbase库中
public class HbaseBolt implements IRichBolt {

    private Table table;

    public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
        try {
            Configuration conf = HBaseConfiguration.create();
            Connection connection = ConnectionFactory.createConnection(conf);
            TableName tableName = TableName.valueOf("ns1:wordcount");
            table = connection.getTable(tableName);
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

    public void execute(Tuple tuple) {
        //取到单词
        String word = tuple.getString(0);
        //取到单词数
        Integer count = tuple.getInteger(1);

        //使用hbase的计数器进行单词统计
        byte[] rowKey = Bytes.toBytes(word);
        byte[] f = Bytes.toBytes("f1");
        byte[] c = Bytes.toBytes("count");
        try {
            table.incrementColumnValue(rowKey, f, c, count);
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

    public void cleanup() {

    }

    public void declareOutputFields(OutputFieldsDeclarer declarer) {

    }

    public Map<String, Object> getComponentConfiguration() {
        return null;
    }
}
WCApp
public class WCApp {
    public static void main(String[] args) throws Exception {
        TopologyBuilder builder = new TopologyBuilder();
        builder.setSpout("wcspout", new WordCountSpout()).setNumTasks(1);
        builder.setBolt("split-bolt", new SplitBolt(), 2).shuffleGrouping("wcspout").setNumTasks(2);
        builder.setBolt("hbase-bolt", new HbaseBolt(), 2).fieldsGrouping("split-bolt", new Fields("word")).setNumTasks(2);

        Config conf = new Config();
        LocalCluster cluster = new LocalCluster();
        cluster.submitTopology("wc", conf, builder.createTopology());
    }
}

猜你喜欢

转载自blog.csdn.net/king123456man/article/details/82969724