读取HBase的表数据,然后将数据写入到hdfs上面去

读取HBase的表数据,然后将数据写入到hdfs上面去
1、Mapper

 public static class mapHdfs extends TableMapper<Text, Text> {

        @Override
        protected void map(ImmutableBytesWritable key, Result value, Context context) throws IOException, InterruptedException {
            byte[] bytes = key.get();
            String rowkey = Bytes.toString(bytes);
            String address="";
            String sex="";
            String name="";
            int id=0;

            Cell[] cells = value.rawCells();
            for (Cell cell : cells) {
                if ("f2".equals(Bytes.toString(CellUtil.cloneFamily(cell)))){
                    if ("address".equals(Bytes.toString(CellUtil.cloneQualifier(cell)))){
                         address=Bytes.toString(CellUtil.cloneValue(cell));
                    }
                if ("sex".equals(Bytes.toString(CellUtil.cloneQualifier(cell)))) {
                      sex=Bytes.toString(CellUtil.cloneValue(cell))+"\t";
                       }
 }
                if ("f1".equals(Bytes.toString(CellUtil.cloneFamily(cell)))){
                    if ("name".equals(Bytes.toString(CellUtil.cloneQualifier(cell)))) {
                        name=Bytes.toString(CellUtil.cloneValue(cell));
                    }
                    if ("id".equals(Bytes.toString(CellUtil.cloneQualifier(cell)))){
                        id=Bytes.toInt(CellUtil.cloneValue(cell));
                    }   } }
context.write(new Text(rowkey),new Text("address--->"+address+"\t"+"sex--->"+sex+"\t"+"id-->"+id+" "+"name--->"+name));

        }
    }

2、reduce

 public static class hdfsReduce extends Reducer<Text,Text,Text, Text>{

        @Override
        protected void reduce(Text key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
            for (Text value : values) {
                context.write(key,value);
            }
        }
    }

3、驱动类

public class HbaseMrHdfs extends Configured implements Tool {
 @Override
    public int run(String[] args) throws Exception {
        Job job = Job.getInstance(super.getConf(), "job");
        job.setJarByClass(HbaseMrHdfs.class);

        Scan scan = new Scan();
        TableMapReduceUtil.initTableMapperJob(TableName.valueOf("myuser"),scan,mapHdfs.class,Text.class,Text.class,job);

        job.setReducerClass(hdfsReduce.class);
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(Text.class);

        job.setOutputFormatClass(TextOutputFormat.class);
        TextOutputFormat.setOutputPath(job,new Path("hdfs://hadoop01:8020/hbase/input_001"));

        return job.waitForCompletion(true)?0:1;
    }

    public static void main(String[] args)throws Exception {
        Configuration configuration = new Configuration();
        configuration.set("hbase.zookeeper.quorum","hadoop01:2181,hadoop02:2181,hadoop03:2181");
        int run = ToolRunner.run(configuration,new HbaseMrHdfs(), args);
        System.exit(run);
    }


}

结果:
在这里插入图片描述

发布了93 篇原创文章 · 获赞 288 · 访问量 18万+

猜你喜欢

转载自blog.csdn.net/qq_45765882/article/details/103705897
今日推荐