Ready to work
- Create a file called fruit.tsv in / input HDFS directory, as follows:
- Create a table named fruit in HBase as follows:
Creating Mapper
public class FruitMapper extends Mapper<LongWritable, Text, ImmutableBytesWritable, Put> {
@Override
protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
String lineValue = value.toString();
String[] values = lineValue.split("\t");
String rowKey = values[0];
String name = values[1];
String color = values[2];
ImmutableBytesWritable rowKeyWritable = new ImmutableBytesWritable(Bytes.toBytes(rowKey));
Put put = new Put(Bytes.toBytes(rowKey));
put.addColumn(Bytes.toBytes("info"), Bytes.toBytes("name"), Bytes.toBytes(name));
put.addColumn(Bytes.toBytes("info"), Bytes.toBytes("color"), Bytes.toBytes(color));
context.write(rowKeyWritable, put);
}
}
Creating Reducer
public class FruitReducer extends TableReducer<ImmutableBytesWritable, Put, NullWritable> {
@Override
protected void reduce(ImmutableBytesWritable key, Iterable<Put> values, Context context) throws IOException, InterruptedException {
for(Put put: values){
context.write(NullWritable.get(), put);
}
}
}
Creating Runner
public class FruitRunner {
public static void main(String[] arg) throws Exception {
Configuration conf = HBaseConfiguration.create();
String[] args = {"hdfs://hcmaster:8020/input/fruit.tsv"};
Job job = Job.getInstance(conf, FruitRunner.class.getSimpleName());
job.setJarByClass(FruitRunner.class);
FileInputFormat.addInputPath(job, new Path(args[0]));
job.setMapperClass(FruitMapper.class);
job.setMapOutputKeyClass(ImmutableBytesWritable.class);
job.setMapOutputValueClass(Put.class);
TableMapReduceUtil.initTableReducerJob("fruit", FruitReducer.class, job);
job.setNumReduceTasks(1);
boolean isSuccess = job.waitForCompletion(true);
if (!isSuccess) {
throw new IOException("Job running with error");
}
int status = isSuccess ? 0 : 1;
System.exit(status);
}
}
run
- The jar packaging and uploaded to Linux
- Perform jar package
3. View Results