hbase与hdfs上的数据互传

https://www.cnblogs.com/dongdone/p/5687786.html
######将Hbase中表数据上传到hdfs,此时无reduce
package com.test;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;

public class HbaseToHdfs {
public static void main(String[] args){
Configuration conf =HBaseConfiguration.create();
try {
Job job=Job.getInstance(conf, HbaseToHdfs.class.getSimpleName());
job.setMapperClass(HbaseMapper.class);
job.setJarByClass(HbaseToHdfs.class);
job.setNumReduceTasks(0);//设置reduce个数为0
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(Text.class);
TableMapReduceUtil.initTableMapperJob(Bytes.toBytes(“t1”), new Scan(), HbaseMapper.class, Text.class,Text.class, job);
FileOutputFormat.setOutputPath(job, new Path(“hdfs://192.168.17.128:9000/t1”));
job.setOutputFormatClass(TextOutputFormat.class);
try {
job.waitForCompletion(true);
} catch (ClassNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}

}
public static class HbaseMapper extends TableMapper<Text, Text>{
	private Text outkey=new Text();
	private Text outvalue=new Text();
	@Override
	//ImmutableBytesWritable为rowkey
	protected void map(ImmutableBytesWritable key, Result value,
			Mapper<ImmutableBytesWritable, Result, Text, Text>.Context context)
			throws IOException, InterruptedException {
		// TODO Auto-generated method stub
		byte[] name=null;
		name=value.getValue(Bytes.toBytes("f1"), Bytes.toBytes("name"));
		byte[] age=null;
		age=value.getValue(Bytes.toBytes("f1"), Bytes.toBytes("name"));
		byte[] gender=null;
		gender=value.getValue(Bytes.toBytes("f1"), Bytes.toBytes("name"));
		byte[] birthday=null;
		birthday=value.getValue(Bytes.toBytes("f1"), Bytes.toBytes("name"));
		outkey.set(key.get());
		String temp = ((name==null || name.length==0)?"NULL":new String(name)) + "\t" + ((age==null || age.length==0)?"NULL":new String(age)) + "\t" + ((gender==null||gender.length==0)?"NULL":new String(gender)) + "\t" +  ((birthday==null||birthday.length==0)?"NULL":new String(birthday));
        System.out.println(temp);
        outvalue.set(temp);
        context.write(outkey, outvalue);
	}
}

}
###################将hdfs中的数据下载到hbase中
https://blog.csdn.net/qq_26091271/article/details/52586953

猜你喜欢

转载自blog.csdn.net/qq_41776004/article/details/83182840
今日推荐