从hbase加载数据到hdfs

    package example2;  
      
    import java.io.IOException;  
      
    import org.apache.hadoop.conf.Configuration;  
    import org.apache.hadoop.fs.Path;  
    import org.apache.hadoop.hbase.HBaseConfiguration;  
    import org.apache.hadoop.hbase.KeyValue;  
    import org.apache.hadoop.hbase.client.Result;  
    import org.apache.hadoop.hbase.client.Scan;  
    import org.apache.hadoop.hbase.io.ImmutableBytesWritable;  
    import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;  
    import org.apache.hadoop.hbase.mapreduce.TableMapper;  
    import org.apache.hadoop.hbase.util.Bytes;  
    import org.apache.hadoop.io.IntWritable;  
    import org.apache.hadoop.io.Text;  
    import org.apache.hadoop.mapreduce.Job;  
    import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;  
    import org.apache.hadoop.util.GenericOptionsParser;  
      
    //Administrator  
    public class FromHBaseToHDFSExample {  
          
        public static class HBaseMapper extends TableMapper<IntWritable, Text>{  
      
            @Override  
            protected void map(ImmutableBytesWritable key, Result value,  
                    Context context) throws IOException, InterruptedException {  
                 for(KeyValue kv:value.raw()){  
                     context.write(null, new Text(Bytes.toString(kv.getValue())));  
                 }  
            }  
              
        }  
      
        /** 
         * @param args 
         */  
        public static void main(String[] args)throws Exception {  
            Configuration conf=HBaseConfiguration.create();  
            String []argArray=new GenericOptionsParser(conf, args).getRemainingArgs();  
            if(argArray.length!=1){  
                System.exit(1);  
            }  
            Job job=new Job(conf,"import hbase to hdfs");  
            job.setJarByClass(FromHBaseToHDFSExample.class);  
            TableMapReduceUtil.initTableMapperJob("testtable", new Scan(), HBaseMapper.class, IntWritable.class, Text.class, job);  
            FileOutputFormat.setOutputPath(job, new Path(argArray[0]));  
            job.setNumReduceTasks(0);  
            System.exit(job.waitForCompletion(true) ? 0 : 1);  
              
      
        }  
      
    } 

猜你喜欢

转载自liyonghui160com.iteye.com/blog/2176641
今日推荐