Hbase runs mapreduce environment to build

 When running the map reduce program of hbase on hadoop, there are many jars in the environment, which needs to be supplemented in hadoop-env.sh

Introduce the jars starting with hbase under hbase lib,

 

In addition, add metrics-core-2.2.0.jar and post it under extlib

 

for f in $HBASE_HOME/lib/hbase*.jar; do

  if [ "$HADOOP_CLASSPATH" ]; then

    export HADOOP_CLASSPATH=$HADOOP_CLASSPATH:$f

  else

    export HADOOP_CLASSPATH=$f

  be

done

 

 

for f in $HADOOP_HOME/extlib/*.jar; do

  if [ "$HADOOP_CLASSPATH" ]; then

    export HADOOP_CLASSPATH=$HADOOP_CLASSPATH:$f

  else

    export HADOOP_CLASSPATH=$f

  be

done

 

 

./hadoop jar /tmp/idStatics.jar sea.hbase.IdStatic

 

 

Source code:

import java.io.IOException;

import java.util.List;

 

import org.apache.commons.collections.CollectionUtils;

import org.apache.commons.lang.StringUtils;

import org.apache.hadoop.conf.Configuration;

import org.apache.hadoop.conf.Configured;

import org.apache.hadoop.fs.FileSystem;

import org.apache.hadoop.fs.Path;

import org.apache.hadoop.hbase.Cell;

import org.apache.hadoop.hbase.CellUtil;

import org.apache.hadoop.hbase.HBaseConfiguration;

import org.apache.hadoop.hbase.client.Result;

import org.apache.hadoop.hbase.client.Scan;

import org.apache.hadoop.hbase.io.ImmutableBytesWritable;

import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;

import org.apache.hadoop.hbase.mapreduce.TableMapper;

import org.apache.hadoop.hbase.util.Bytes;

import org.apache.hadoop.io.Text;

import org.apache.hadoop.mapreduce.Job;

import org.apache.hadoop.mapreduce.Reducer;

import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import org.apache.hadoop.util.Tool;

import org.apache.hadoop.util.ToolRunner;

 

public class IdStatics extends Configured implements Tool{

 

public static final String table = "id_records";

public static final byte[] column = Bytes.toBytes("idValue");

 

@Override

public int run(String[] arg0) throws Exception {

Configuration conf = HBaseConfiguration.create();

       conf.set("hbase.zookeeper.quorum", "192.168.1.250:2181,192.168.1.250:2182,192.168.1.250:2183");

       conf.set("zookeeper.znode.parent", "/hbase13");

 

       Job job = Job.getInstance(conf,"read_data_from_hbase");

       job.setJarByClass(IdStatics.class);

       job.setMapOutputKeyClass(Text.class);

       job.setMapOutputValueClass(Text.class);

       job.setReducerClass(ReadReducer.class);

       job.setSpeculativeExecution(false);

      

       // scan the entire CF

       Scan scan = new Scan();

       scan.addFamily(column);

       scan.setMaxVersions(5);

       scan.setCaching(10);

scan.setBatch(2);

 

       TableMapReduceUtil.initTableMapperJob(this.table,

               scan,

               ReadMapper.class,

               Text.class,

               Text.class,

               job);

 

       String output = "/result";

       FileSystem.get(job.getConfiguration()).delete(new Path(output), true);

       FileOutputFormat.setOutputPath(job,new Path(output));

 

       return job.waitForCompletion(true) ? 0 : 1;

}

 

 

static class ReadMapper extends TableMapper<Text,Text> {

 

       @Override

       protected void map(ImmutableBytesWritable key, Result result, Context context) throws IOException,InterruptedException{

           if (result == null || result.isEmpty()) return;

           result.getFamilyMap(column);

 

           List<Cell> cells = result.listCells();   

            if(CollectionUtils.isNotEmpty(cells)){  

                for(Cell cell:cells){  

//String family = Bytes.toString(CellUtil.cloneFamily(cell));

String value = Bytes.toString(CellUtil.cloneValue(cell));

 

context.write(new Text(StringUtils.reverse(value)),new Text("1"));

                }  

            }  

       }

   }

 

 

static class ReadReducer extends Reducer<Text,Text,Text,Text> {

       @Override

       protected void reduce(Text key, Iterable<Text> values, Context context) throws IOException,InterruptedException{

           Integer total=0;

           for(Text each:values) {

                    total=total+Integer.valueOf(each.toString());

           }

           if(total>1) {

           context.write(key,new Text(total.toString()));

           }

       }

   }

 

 

 public static void main(String[] args) throws Exception{

       System.exit(ToolRunner.run(new IdStatics(),args));

   }

 

}

Guess you like

Origin http://43.154.161.224:23101/article/api/json?id=326158771&siteId=291194637