Linux环境在eclipse中运行HBase程序

本文是一个WordCount将MapReduce与HBase结合起来的例子,首先需要给HDFS上传两个文件01.txt(包含内容为:hello world bye world)和02.txt(包含内容:hello hadoop bye hadoop)。在项目HBase下增加一个文件夹conf,将Hbase集群的配置文件hbase-site.xml复制到该目录,然后选择项目属性在Libraries->Add Class Folder,将刚刚增加的conf目录选上。


以下是测试代码:WordCountHBase.java

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.mapreduce.TableOutputFormat;
import org.apache.hadoop.hbase.mapreduce.TableReducer;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;

public class WordCountHBase {
    public static class Map extends Mapper<LongWritable,Text,Text,IntWritable>{
        private IntWritable i = new IntWritable(1);
        public void map(LongWritable key,Text value,Context context) 
                throws IOException,InterruptedException{
            String s[] = value.toString().trim().split(" ");
            for(String m:s){
                context.write(new Text(m),i);
            }
        }
    }
    
    public static class Reduce extends TableReducer<Text,IntWritable,NullWritable>{
        public void reduce(Text key,Iterable<IntWritable> values,Context context) throws 
        IOException,InterruptedException{
            int sum = 0;
            for(IntWritable i:values){
                sum += i.get();                
            }
            Put put = new Put(Bytes.toBytes(key.toString()));
            put.add(Bytes.toBytes("content"),Bytes.toBytes("count"),Bytes.toBytes(String.valueOf(sum)));
            context.write(NullWritable.get(),put);
        }
    }
    
    public static void createHBaseTable(String tablename) throws IOException{
        HTableDescriptor htd = new HTableDescriptor(tablename);
        HColumnDescriptor col = new HColumnDescriptor("content");
        htd.addFamily(col);
        HBaseConfiguration config = new HBaseConfiguration();
        HBaseAdmin admin = new HBaseAdmin(config);
        if(admin.tableExists(tablename)){
            System.out.println("table exists,trying recreate table!");
            admin.disableTable(tablename);
            admin.deleteTable(tablename);
        }
        System.out.println("create new table:"+tablename);
        admin.createTable(htd);
    }
    
    public static void main(String args[]) throws Exception{
        String tablename = "wordcount";
        Configuration conf = new Configuration();
        conf.set(TableOutputFormat.OUTPUT_TABLE, tablename);
        createHBaseTable(tablename);
        String input = args[0];
        Job job = new Job(conf,"WordCount table with "+input);
        job.setJarByClass(WordCountHBase.class);
        job.setNumReduceTasks(3);
        job.setMapperClass(Map.class);
        job.setReducerClass(Reduce.class);
        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(IntWritable.class);
        job.setInputFormatClass(TextInputFormat.class);
        job.setOutputFormatClass(TableOutputFormat.class);
        
        FileInputFormat.addInputPath(job,new Path(input));
        System.exit(job.waitForCompletion(true)?0:1);
    }

}


更多详情,请参考:http://www.aboutyun.com/thread-7149-1-1.html

猜你喜欢

转载自blog.csdn.net/xiaoyu_2011/article/details/46741755