HBase学习笔记(5)——MapReduce操作Hbase

版权声明:本文为博主原创文章,未经博主允许不得转载。 https://blog.csdn.net/u012292754/article/details/82760528

1 实现方法

Hbase对MapReduce提供支持,它实现了TableMapper类和TableReducer类,我们只需要继承这两个类即可

1.1 写个mapper继承TableMapper<Text, IntWritable>

参数:Text:mapper的输出key类型; IntWritable:mapper的输出value类型。
map(ImmutableBytesWritable key, Result value,Context context) 注:immutable不可变的
参数:key:rowKey;value: Result ,一行数据; context上下文

1.2 写个reduce继承TableReducer<Text, IntWritable, ImmutableBytesWritable>

参数:Text:reduce的输入key; IntWritable:reduce的输入value;
ImmutableBytesWritable:reduce输出到hbase中的rowKey类型。
其中的reduce方法如下:
reduce(Text key, Iterable<IntWritable> values,Context context)
参数: key:reduce的输入key;values:reduce的输入value;

2 测试代码

注意导入hbase,mapreduce 相关的 jar 包

2.1 测试

package hbase_mr;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.hbase.mapreduce.TableReducer;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;

import java.io.IOException;
import java.util.ArrayList;
import java.util.List;

public class HBaseMR {

    static Configuration config = null;

    static {
        config = HBaseConfiguration.create();
        config.set("hbase.zookeeper.quorum", "node1,node2,node3");
        config.set("habse.zookeeper.property.clientPort", "2181");
    }

    public static final String tableName = "word";
    public static final String colf = "content";
    public static final String col = "info";
    public static final String tableName2 = "stat";

    public static void initTB() {
        HTable table = null;
        HBaseAdmin admin = null;

        try {
            admin = new HBaseAdmin(config);

            if (admin.tableExists(tableName) || admin.tableExists(tableName2)) {
                System.out.println("table has existed!!");
                admin.disableTable(tableName);
                admin.deleteTable(tableName);
                admin.disableTable(tableName2);
                admin.deleteTable(tableName2);
            }

            /*
             * 创建表
             * */
            HTableDescriptor desc = new HTableDescriptor(tableName);
            HColumnDescriptor family = new HColumnDescriptor(colf);
            desc.addFamily(family);
            admin.createTable(desc);

            HTableDescriptor desc2 = new HTableDescriptor(tableName2);
            HColumnDescriptor family2 = new HColumnDescriptor(colf);
            desc2.addFamily(family2);
            admin.createTable(desc2);

            /*
             * 插入数据
             * */
            table = new HTable(config, tableName);
            table.setAutoFlush(false);
            table.setWriteBufferSize(500);
            List<Put> lp = new ArrayList<>();

            Put p1 = new Put(Bytes.toBytes("1"));
            p1.add(colf.getBytes(), col.getBytes(),	("The Apache Hadoop software library is a framework").getBytes());
            lp.add(p1);
            Put p2 = new Put(Bytes.toBytes("2"));p2.add(colf.getBytes(),col.getBytes(),("The common utilities that support the other Hadoop modules").getBytes());
            lp.add(p2);
            Put p3 = new Put(Bytes.toBytes("3"));
            p3.add(colf.getBytes(), col.getBytes(),("Hadoop by reading the documentation").getBytes());
            lp.add(p3);
            Put p4 = new Put(Bytes.toBytes("4"));
            p4.add(colf.getBytes(), col.getBytes(),("Hadoop from the release page").getBytes());
            lp.add(p4);
            Put p5 = new Put(Bytes.toBytes("5"));
            p5.add(colf.getBytes(), col.getBytes(),("Hadoop on the mailing list").getBytes());
            lp.add(p5);
            table.put(lp);
            table.flushCommits();
            lp.clear();


        } catch (IOException e) {
            e.printStackTrace();
        }finally {
            if(table!=null){
                try {
                    table.close();
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }
        }
    }

    /**
     * MyMapper 继承 TableMapper
     * TableMapper<Text,IntWritable>
     * Text:输出的key类型,
     * IntWritable:输出的value类型
     */
    public static class MyMapper extends TableMapper<Text, IntWritable> {
        private static IntWritable one = new IntWritable(1);
        private static Text word = new Text();
        @Override
        //输入的类型为:key:rowKey; value:一行数据的结果集Result
        protected void map(ImmutableBytesWritable key, Result value,
                           Context context) throws IOException, InterruptedException {
            //获取一行数据中的colf:col
            String words = Bytes.toString(value.getValue(Bytes.toBytes(colf), Bytes.toBytes(col)));// 表里面只有一个列族,所以我就直接获取每一行的值
            //按空格分割
            String itr[] = words.toString().split(" ");
            //循环输出word和1
            for (int i = 0; i < itr.length; i++) {
                word.set(itr[i]);
                context.write(word, one);
            }
        }
    }
    /**
     * MyReducer 继承 TableReducer
     * TableReducer<Text,IntWritable>
     * Text:输入的key类型,
     * IntWritable:输入的value类型,
     * ImmutableBytesWritable:输出类型,表示rowkey的类型
     */
    public static class MyReducer extends
            TableReducer<Text, IntWritable, ImmutableBytesWritable> {
        @Override
        protected void reduce(Text key, Iterable<IntWritable> values,
                              Context context) throws IOException, InterruptedException {
            //对mapper的数据求和
            int sum = 0;
            for (IntWritable val : values) {//叠加
                sum += val.get();
            }
            // 创建put,设置rowkey为单词
            Put put = new Put(Bytes.toBytes(key.toString()));
            // 封装数据
            put.add(Bytes.toBytes(colf), Bytes.toBytes(col),Bytes.toBytes(String.valueOf(sum)));
            //写到hbase,需要指定rowkey、put
            context.write(new ImmutableBytesWritable(Bytes.toBytes(key.toString())),put);
        }
    }


    public static void main(String[] args) throws IOException,
            ClassNotFoundException, InterruptedException {
        config.set("fs.defaultFS", "hdfs://node1:9000");//设置hdfs的默认路径
        //初始化表
        initTB();//初始化表
        //创建job
        Job job = Job.getInstance(config,"HBaseMR");

        job.setJarByClass(HBaseMR.class);//主类
        //创建scan
        Scan scan = new Scan();
        //可以指定查询某一列
        scan.addColumn(Bytes.toBytes(colf), Bytes.toBytes(col));
        //创建查询hbase的mapper,设置表名、scan、mapper类、mapper的输出key、mapper的输出value
        TableMapReduceUtil.initTableMapperJob(tableName, scan, MyMapper.class,Text.class, IntWritable.class, job);
        //创建写入hbase的reducer,指定表名、reducer类、job
        TableMapReduceUtil.initTableReducerJob(tableName2, MyReducer.class, job);
        System.exit(job.waitForCompletion(true) ? 0 : 1);
    }


}

运行发现报错

Exception in thread "main" java.lang.IllegalArgumentException: Pathname /D:/Data/JavaProject/hbase/lib/netty-all-4.0.23.Final.jar from hdfs://node1:9000/D:/Data/JavaProject/hbase/lib/netty-all-4.0.23.Final.jar is not a valid DFS filename.
	at org.apache.hadoop.hdfs.DistributedFileSystem.getPathName(DistributedFileSystem.java:187)
	at org.apache.hadoop.hdfs.DistributedFileSystem.access$000(DistributedFileSystem.java:101)
	at org.apache.hadoop.hdfs.DistributedFileSystem$17.doCall(DistributedFileSystem.java:1068)
	at org.apache.hadoop.hdfs.DistributedFileSystem$17.doCall(DistributedFileSystem.java:1064)
	at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81)
	at org.apache.hadoop.hdfs.DistributedFileSystem.getFileStatus(DistributedFileSystem.java:1064)
	at org.apache.hadoop.mapreduce.filecache.ClientDistributedCacheManager.getFileStatus(ClientDistributedCacheManager.java:288)
	at org.apache.hadoop.mapreduce.filecache.ClientDistributedCacheManager.getFileStatus(ClientDistributedCacheManager.java:224)
	at org.apache.hadoop.mapreduce.filecache.ClientDistributedCacheManager.determineTimestamps(ClientDistributedCacheManager.java:93)
	at org.apache.hadoop.mapreduce.filecache.ClientDistributedCacheManager.determineTimestampsAndCacheVisibilities(ClientDistributedCacheManager.java:57)
	at org.apache.hadoop.mapreduce.JobSubmitter.copyAndConfigureFiles(JobSubmitter.java:265)
	at org.apache.hadoop.mapreduce.JobSubmitter.copyAndConfigureFiles(JobSubmitter.java:301)
	at org.apache.hadoop.mapreduce.JobSubmitter.submitJobInternal(JobSubmitter.java:389)
	at org.apache.hadoop.mapreduce.Job$10.run(Job.java:1285)
	at org.apache.hadoop.mapreduce.Job$10.run(Job.java:1282)
	at java.security.AccessController.doPrivileged(Native Method)
	at javax.security.auth.Subject.doAs(Subject.java:422)
	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1614)
	at org.apache.hadoop.mapreduce.Job.submit(Job.java:1282)
	at org.apache.hadoop.mapreduce.Job.waitForCompletion(Job.java:1303)
	at hbase_mr.HBaseMR.main(HBaseMR.java:172)

Process finished with exit code 1

2.2 错误原因

Hadoop文件系统没有检查路径时没有区分是本地windows系统还是Hadoop集群文件系统
参考链接

https://blog.csdn.net/qq_31246691/article/details/79585097
https://blog.csdn.net/lsr40/article/details/77648012

修正方案

TableMapReduceUtil.initTableMapperJob(tableName, scan, MyMapper.class,Text.class, IntWritable.class, job,false);
        //创建写入hbase的reducer,指定表名、reducer类、job
        TableMapReduceUtil.initTableReducerJob(tableName2, MyReducer.class, job,
                null,null,null,null,false);

2.3 运行结果

日志

D:\programs\Java\jdk1.8.0_181\bin\java "-javaagent:D:\programs\JetBrains\IntelliJ IDEA 2017.3.4\lib\idea_rt.jar=51278:D:\programs\JetBrains\IntelliJ IDEA 2017.3.4\bin" -Dfile.encoding=UTF-8 -classpath D:\programs\Java\jdk1.8.0_181\jre\lib\charsets.jar;D:\programs\Java\jdk1.8.0_181\jre\lib\deploy.jar;D:\programs\Java\jdk1.8.0_181\jre\lib\ext\access-bridge-64.jar;D:\programs\Java\jdk1.8.0_181\jre\lib\ext\cldrdata.jar;D:\programs\Java\jdk1.8.0_181\jre\lib\ext\dnsns.jar;D:\programs\Java\jdk1.8.0_181\jre\lib\ext\jaccess.jar;D:\programs\Java\jdk1.8.0_181\jre\lib\ext\jfxrt.jar;D:\programs\Java\jdk1.8.0_181\jre\lib\ext\localedata.jar;D:\programs\Java\jdk1.8.0_181\jre\lib\ext\nashorn.jar;D:\programs\Java\jdk1.8.0_181\jre\lib\ext\sunec.jar;D:\programs\Java\jdk1.8.0_181\jre\lib\ext\sunjce_provider.jar;D:\programs\Java\jdk1.8.0_181\jre\lib\ext\sunmscapi.jar;D:\programs\Java\jdk1.8.0_181\jre\lib\ext\sunpkcs11.jar;D:\programs\Java\jdk1.8.0_181\jre\lib\ext\zipfs.jar;D:\programs\Java\jdk1.8.0_181\jre\lib\javaws.jar;D:\programs\Java\jdk1.8.0_181\jre\lib\jce.jar;D:\programs\Java\jdk1.8.0_181\jre\lib\jfr.jar;D:\programs\Java\jdk1.8.0_181\jre\lib\jfxswt.jar;D:\programs\Java\jdk1.8.0_181\jre\lib\jsse.jar;D:\programs\Java\jdk1.8.0_181\jre\lib\management-agent.jar;D:\programs\Java\jdk1.8.0_181\jre\lib\plugin.jar;D:\programs\Java\jdk1.8.0_181\jre\lib\resources.jar;D:\programs\Java\jdk1.8.0_181\jre\lib\rt.jar;D:\Data\JavaProject\hbase\out\production\hbase;D:\Data\JavaProject\hbase\lib\hamcrest-core-1.3.jar;D:\Data\JavaProject\hbase\lib\xz-1.0.jar;D:\Data\JavaProject\hbase\lib\asm-3.1.jar;D:\Data\JavaProject\hbase\lib\guice-3.0.jar;D:\Data\JavaProject\hbase\lib\avro-1.7.4.jar;D:\Data\JavaProject\hbase\lib\joni-2.1.2.jar;D:\Data\JavaProject\hbase\lib\junit-4.12.jar;D:\Data\JavaProject\hbase\lib\jsch-0.1.42.jar;D:\Data\JavaProject\hbase\lib\xmlenc-0.52.jar;D:\Data\JavaProject\hbase\lib\guava-12.0.1.jar;D:\Data\JavaProject\hbase\lib\jets3t-0.9.0.jar;D:\Data\JavaProject\hbase\lib\jetty-6.1.26.jar;D:\Data\JavaProject\hbase\lib\log4j-1.2.17.jar;D:\Data\JavaProject\hbase\lib\paranamer-2.3.jar;D:\Data\JavaProject\hbase\lib\activation-1.1.jar;D:\Data\JavaProject\hbase\lib\commons-el-1.0.jar;D:\Data\JavaProject\hbase\lib\commons-io-2.4.jar;D:\Data\JavaProject\hbase\lib\hbase-it-1.2.6.jar;D:\Data\JavaProject\hbase\lib\httpcore-4.4.1.jar;D:\Data\JavaProject\hbase\lib\javax.inject-1.jar;D:\Data\JavaProject\hbase\lib\jaxb-api-2.2.2.jar;D:\Data\JavaProject\hbase\lib\jcodings-1.0.8.jar;D:\Data\JavaProject\hbase\lib\jettison-1.3.3.jar;D:\Data\JavaProject\hbase\lib\jsp-2.1-6.1.14.jar;D:\Data\JavaProject\hbase\lib\aopalliance-1.0.jar;D:\Data\JavaProject\hbase\lib\commons-cli-1.2.jar;D:\Data\JavaProject\hbase\lib\commons-net-3.1.jar;D:\Data\JavaProject\hbase\lib\disruptor-3.3.0.jar;D:\Data\JavaProject\hbase\lib\jersey-core-1.9.jar;D:\Data\JavaProject\hbase\lib\jersey-json-1.9.jar;D:\Data\JavaProject\hbase\lib\libthrift-0.9.3.jar;D:\Data\JavaProject\hbase\lib\servlet-api-2.5.jar;D:\Data\JavaProject\hbase\lib\slf4j-api-1.7.7.jar;D:\Data\JavaProject\hbase\lib\zookeeper-3.4.6.jar;D:\Data\JavaProject\hbase\lib\commons-lang-2.6.jar;D:\Data\JavaProject\hbase\lib\commons-math-2.2.jar;D:\Data\JavaProject\hbase\lib\hbase-rest-1.2.6.jar;D:\Data\JavaProject\hbase\lib\httpclient-4.2.5.jar;D:\Data\JavaProject\hbase\lib\jersey-guice-1.9.jar;D:\Data\JavaProject\hbase\lib\commons-codec-1.9.jar;D:\Data\JavaProject\hbase\lib\guice-servlet-3.0.jar;D:\Data\JavaProject\hbase\lib\hadoop-auth-2.5.1.jar;D:\Data\JavaProject\hbase\lib\hadoop-hdfs-2.5.1.jar;D:\Data\JavaProject\hbase\lib\hbase-shell-1.2.6.jar;D:\Data\JavaProject\hbase\lib\jackson-xc-1.9.13.jar;D:\Data\JavaProject\hbase\lib\jaxb-impl-2.2.3-1.jar;D:\Data\JavaProject\hbase\lib\jersey-client-1.9.jar;D:\Data\JavaProject\hbase\lib\jersey-server-1.9.jar;D:\Data\JavaProject\hbase\lib\jetty-util-6.1.26.jar;D:\Data\JavaProject\hbase\lib\api-util-1.0.0-M20.jar;D:\Data\JavaProject\hbase\lib\hbase-client-1.2.6.jar;D:\Data\JavaProject\hbase\lib\hbase-common-1.2.6.jar;D:\Data\JavaProject\hbase\lib\hbase-server-1.2.6.jar;D:\Data\JavaProject\hbase\lib\hbase-thrift-1.2.6.jar;D:\Data\JavaProject\hbase\lib\jsp-api-2.1-6.1.14.jar;D:\Data\JavaProject\hbase\lib\leveldbjni-all-1.8.jar;D:\Data\JavaProject\hbase\lib\metrics-core-2.2.0.jar;D:\Data\JavaProject\hbase\lib\commons-logging-1.2.jar;D:\Data\JavaProject\hbase\lib\commons-math3-3.1.1.jar;D:\Data\JavaProject\hbase\lib\hadoop-client-2.5.1.jar;D:\Data\JavaProject\hbase\lib\hadoop-common-2.5.1.jar;D:\Data\JavaProject\hbase\lib\jamon-runtime-2.4.1.jar;D:\Data\JavaProject\hbase\lib\java-xmlbuilder-0.4.jar;D:\Data\JavaProject\hbase\lib\protobuf-java-2.5.0.jar;D:\Data\JavaProject\hbase\lib\slf4j-log4j12-1.7.5.jar;D:\Data\JavaProject\hbase\lib\snappy-java-1.0.4.1.jar;D:\Data\JavaProject\hbase\lib\spymemcached-2.11.6.jar;D:\Data\JavaProject\hbase\lib\commons-digester-1.8.jar;D:\Data\JavaProject\hbase\lib\hbase-examples-1.2.6.jar;D:\Data\JavaProject\hbase\lib\hbase-it-1.2.6-tests.jar;D:\Data\JavaProject\hbase\lib\hbase-protocol-1.2.6.jar;D:\Data\JavaProject\hbase\lib\jackson-jaxrs-1.9.13.jar;D:\Data\JavaProject\hbase\lib\jruby-complete-1.6.8.jar;D:\Data\JavaProject\hbase\lib\commons-daemon-1.0.13.jar;D:\Data\JavaProject\hbase\lib\hadoop-yarn-api-2.5.1.jar;D:\Data\JavaProject\hbase\lib\hbase-procedure-1.2.6.jar;D:\Data\JavaProject\hbase\lib\jasper-runtime-5.5.23.jar;D:\Data\JavaProject\hbase\lib\api-asn1-api-1.0.0-M20.jar;D:\Data\JavaProject\hbase\lib\commons-compress-1.4.1.jar;D:\Data\JavaProject\hbase\lib\commons-httpclient-3.1.jar;D:\Data\JavaProject\hbase\lib\jasper-compiler-5.5.23.jar;D:\Data\JavaProject\hbase\lib\jetty-sslengine-6.1.26.jar;D:\Data\JavaProject\hbase\lib\netty-all-4.0.23.Final.jar;D:\Data\JavaProject\hbase\lib\servlet-api-2.5-6.1.14.jar;D:\Data\JavaProject\hbase\lib\apacheds-i18n-2.0.0-M15.jar;D:\Data\JavaProject\hbase\lib\commons-beanutils-1.7.0.jar;D:\Data\JavaProject\hbase\lib\hbase-annotations-1.2.6.jar;D:\Data\JavaProject\hbase\lib\hbase-prefix-tree-1.2.6.jar;D:\Data\JavaProject\hbase\lib\jackson-core-asl-1.9.13.jar;D:\Data\JavaProject\hbase\lib\hadoop-annotations-2.5.1.jar;D:\Data\JavaProject\hbase\lib\hadoop-yarn-client-2.5.1.jar;D:\Data\JavaProject\hbase\lib\hadoop-yarn-common-2.5.1.jar;D:\Data\JavaProject\hbase\lib\hbase-common-1.2.6-tests.jar;D:\Data\JavaProject\hbase\lib\hbase-server-1.2.6-tests.jar;D:\Data\JavaProject\hbase\lib\commons-collections-3.2.2.jar;D:\Data\JavaProject\hbase\lib\commons-configuration-1.6.jar;D:\Data\JavaProject\hbase\lib\hbase-hadoop-compat-1.2.6.jar;D:\Data\JavaProject\hbase\lib\jackson-mapper-asl-1.9.13.jar;D:\Data\JavaProject\hbase\lib\hbase-hadoop2-compat-1.2.6.jar;D:\Data\JavaProject\hbase\lib\hbase-resource-bundle-1.2.6.jar;D:\Data\JavaProject\hbase\lib\commons-beanutils-core-1.8.0.jar;D:\Data\JavaProject\hbase\lib\findbugs-annotations-1.3.9-1.jar;D:\Data\JavaProject\hbase\lib\htrace-core-3.1.0-incubating.jar;D:\Data\JavaProject\hbase\lib\hbase-annotations-1.2.6-tests.jar;D:\Data\JavaProject\hbase\lib\hadoop-yarn-server-common-2.5.1.jar;D:\Data\JavaProject\hbase\lib\hbase-external-blockcache-1.2.6.jar;D:\Data\JavaProject\hbase\lib\apacheds-kerberos-codec-2.0.0-M15.jar;D:\Data\JavaProject\hbase\lib\hadoop-mapreduce-client-app-2.5.1.jar;D:\Data\JavaProject\hbase\lib\hadoop-mapreduce-client-core-2.5.1.jar;D:\Data\JavaProject\hbase\lib\hadoop-mapreduce-client-common-2.5.1.jar;D:\Data\JavaProject\hbase\lib\hadoop-mapreduce-client-shuffle-2.5.1.jar;D:\Data\JavaProject\hbase\lib\hadoop-mapreduce-client-jobclient-2.5.1.jar;D:\programs\hadoop-2.7.6\share\hadoop\mapreduce\lib\xz-1.0.jar;D:\programs\hadoop-2.7.6\share\hadoop\mapreduce\lib\asm-3.2.jar;D:\programs\hadoop-2.7.6\share\hadoop\mapreduce\lib\guice-3.0.jar;D:\programs\hadoop-2.7.6\share\hadoop\mapreduce\lib\avro-1.7.4.jar;D:\programs\hadoop-2.7.6\share\hadoop\mapreduce\lib\junit-4.11.jar;D:\programs\hadoop-2.7.6\share\hadoop\mapreduce\lib\log4j-1.2.17.jar;D:\programs\hadoop-2.7.6\share\hadoop\mapreduce\lib\paranamer-2.3.jar;D:\programs\hadoop-2.7.6\share\hadoop\mapreduce\lib\commons-io-2.4.jar;D:\programs\hadoop-2.7.6\share\hadoop\mapreduce\lib\javax.inject-1.jar;D:\programs\hadoop-2.7.6\share\hadoop\mapreduce\lib\aopalliance-1.0.jar;D:\programs\hadoop-2.7.6\share\hadoop\mapreduce\lib\jersey-core-1.9.jar;D:\programs\hadoop-2.7.6\share\hadoop\mapreduce\lib\jersey-guice-1.9.jar;D:\programs\hadoop-2.7.6\share\hadoop\mapreduce\lib\guice-servlet-3.0.jar;D:\programs\hadoop-2.7.6\share\hadoop\mapreduce\lib\hamcrest-core-1.3.jar;D:\programs\hadoop-2.7.6\share\hadoop\mapreduce\lib\jersey-server-1.9.jar;D:\programs\hadoop-2.7.6\share\hadoop\mapreduce\lib\netty-3.6.2.Final.jar;D:\programs\hadoop-2.7.6\share\hadoop\mapreduce\lib\leveldbjni-all-1.8.jar;D:\programs\hadoop-2.7.6\share\hadoop\mapreduce\lib\protobuf-java-2.5.0.jar;D:\programs\hadoop-2.7.6\share\hadoop\mapreduce\lib\snappy-java-1.0.4.1.jar;D:\programs\hadoop-2.7.6\share\hadoop\mapreduce\lib\commons-compress-1.4.1.jar;D:\programs\hadoop-2.7.6\share\hadoop\mapreduce\lib\jackson-core-asl-1.9.13.jar;D:\programs\hadoop-2.7.6\share\hadoop\mapreduce\lib\hadoop-annotations-2.7.6.jar;D:\programs\hadoop-2.7.6\share\hadoop\mapreduce\lib\jackson-mapper-asl-1.9.13.jar;D:\programs\hadoop-2.7.6\share\hadoop\mapreduce\hadoop-mapreduce-client-app-2.7.6.jar;D:\programs\hadoop-2.7.6\share\hadoop\mapreduce\hadoop-mapreduce-client-common-2.7.6.jar;D:\programs\hadoop-2.7.6\share\hadoop\mapreduce\hadoop-mapreduce-client-core-2.7.6.jar;D:\programs\hadoop-2.7.6\share\hadoop\mapreduce\hadoop-mapreduce-client-hs-2.7.6.jar;D:\programs\hadoop-2.7.6\share\hadoop\mapreduce\hadoop-mapreduce-client-hs-plugins-2.7.6.jar;D:\programs\hadoop-2.7.6\share\hadoop\mapreduce\hadoop-mapreduce-client-jobclient-2.7.6-tests.jar;D:\programs\hadoop-2.7.6\share\hadoop\mapreduce\hadoop-mapreduce-client-jobclient-2.7.6.jar;D:\programs\hadoop-2.7.6\share\hadoop\mapreduce\hadoop-mapreduce-client-shuffle-2.7.6.jar hbase_mr.HBaseMR
2018-09-19 14:47:15,343 INFO  [main] zookeeper.RecoverableZooKeeper (RecoverableZooKeeper.java:<init>(120)) - Process identifier=hconnection-0x12028586 connecting to ZooKeeper ensemble=node1:2181,node2:2181,node3:2181
2018-09-19 14:47:15,352 INFO  [main] zookeeper.ZooKeeper (Environment.java:logEnv(100)) - Client environment:zookeeper.version=3.4.6-1569965, built on 02/20/2014 09:09 GMT
2018-09-19 14:47:15,352 INFO  [main] zookeeper.ZooKeeper (Environment.java:logEnv(100)) - Client environment:host.name=XPS-15-9570
2018-09-19 14:47:15,352 INFO  [main] zookeeper.ZooKeeper (Environment.java:logEnv(100)) - Client environment:java.version=1.8.0_181
2018-09-19 14:47:15,352 INFO  [main] zookeeper.ZooKeeper (Environment.java:logEnv(100)) - Client environment:java.vendor=Oracle Corporation
2018-09-19 14:47:15,352 INFO  [main] zookeeper.ZooKeeper (Environment.java:logEnv(100)) - Client environment:java.home=D:\programs\Java\jdk1.8.0_181\jre
2018-09-19 14:47:15,352 INFO  [main] zookeeper.ZooKeeper (Environment.java:logEnv(100)) - Client environment:java.class.path=D:\programs\Java\jdk1.8.0_181\jre\lib\charsets.jar;D:\programs\Java\jdk1.8.0_181\jre\lib\deploy.jar;D:\programs\Java\jdk1.8.0_181\jre\lib\ext\access-bridge-64.jar;D:\programs\Java\jdk1.8.0_181\jre\lib\ext\cldrdata.jar;D:\programs\Java\jdk1.8.0_181\jre\lib\ext\dnsns.jar;D:\programs\Java\jdk1.8.0_181\jre\lib\ext\jaccess.jar;D:\programs\Java\jdk1.8.0_181\jre\lib\ext\jfxrt.jar;D:\programs\Java\jdk1.8.0_181\jre\lib\ext\localedata.jar;D:\programs\Java\jdk1.8.0_181\jre\lib\ext\nashorn.jar;D:\programs\Java\jdk1.8.0_181\jre\lib\ext\sunec.jar;D:\programs\Java\jdk1.8.0_181\jre\lib\ext\sunjce_provider.jar;D:\programs\Java\jdk1.8.0_181\jre\lib\ext\sunmscapi.jar;D:\programs\Java\jdk1.8.0_181\jre\lib\ext\sunpkcs11.jar;D:\programs\Java\jdk1.8.0_181\jre\lib\ext\zipfs.jar;D:\programs\Java\jdk1.8.0_181\jre\lib\javaws.jar;D:\programs\Java\jdk1.8.0_181\jre\lib\jce.jar;D:\programs\Java\jdk1.8.0_181\jre\lib\jfr.jar;D:\programs\Java\jdk1.8.0_181\jre\lib\jfxswt.jar;D:\programs\Java\jdk1.8.0_181\jre\lib\jsse.jar;D:\programs\Java\jdk1.8.0_181\jre\lib\management-agent.jar;D:\programs\Java\jdk1.8.0_181\jre\lib\plugin.jar;D:\programs\Java\jdk1.8.0_181\jre\lib\resources.jar;D:\programs\Java\jdk1.8.0_181\jre\lib\rt.jar;D:\Data\JavaProject\hbase\out\production\hbase;D:\Data\JavaProject\hbase\lib\hamcrest-core-1.3.jar;D:\Data\JavaProject\hbase\lib\xz-1.0.jar;D:\Data\JavaProject\hbase\lib\asm-3.1.jar;D:\Data\JavaProject\hbase\lib\guice-3.0.jar;D:\Data\JavaProject\hbase\lib\avro-1.7.4.jar;D:\Data\JavaProject\hbase\lib\joni-2.1.2.jar;D:\Data\JavaProject\hbase\lib\junit-4.12.jar;D:\Data\JavaProject\hbase\lib\jsch-0.1.42.jar;D:\Data\JavaProject\hbase\lib\xmlenc-0.52.jar;D:\Data\JavaProject\hbase\lib\guava-12.0.1.jar;D:\Data\JavaProject\hbase\lib\jets3t-0.9.0.jar;D:\Data\JavaProject\hbase\lib\jetty-6.1.26.jar;D:\Data\JavaProject\hbase\lib\log4j-1.2.17.jar;D:\Data\JavaProject\hbase\lib\paranamer-2.3.jar;D:\Data\JavaProject\hbase\lib\activation-1.1.jar;D:\Data\JavaProject\hbase\lib\commons-el-1.0.jar;D:\Data\JavaProject\hbase\lib\commons-io-2.4.jar;D:\Data\JavaProject\hbase\lib\hbase-it-1.2.6.jar;D:\Data\JavaProject\hbase\lib\httpcore-4.4.1.jar;D:\Data\JavaProject\hbase\lib\javax.inject-1.jar;D:\Data\JavaProject\hbase\lib\jaxb-api-2.2.2.jar;D:\Data\JavaProject\hbase\lib\jcodings-1.0.8.jar;D:\Data\JavaProject\hbase\lib\jettison-1.3.3.jar;D:\Data\JavaProject\hbase\lib\jsp-2.1-6.1.14.jar;D:\Data\JavaProject\hbase\lib\aopalliance-1.0.jar;D:\Data\JavaProject\hbase\lib\commons-cli-1.2.jar;D:\Data\JavaProject\hbase\lib\commons-net-3.1.jar;D:\Data\JavaProject\hbase\lib\disruptor-3.3.0.jar;D:\Data\JavaProject\hbase\lib\jersey-core-1.9.jar;D:\Data\JavaProject\hbase\lib\jersey-json-1.9.jar;D:\Data\JavaProject\hbase\lib\libthrift-0.9.3.jar;D:\Data\JavaProject\hbase\lib\servlet-api-2.5.jar;D:\Data\JavaProject\hbase\lib\slf4j-api-1.7.7.jar;D:\Data\JavaProject\hbase\lib\zookeeper-3.4.6.jar;D:\Data\JavaProject\hbase\lib\commons-lang-2.6.jar;D:\Data\JavaProject\hbase\lib\commons-math-2.2.jar;D:\Data\JavaProject\hbase\lib\hbase-rest-1.2.6.jar;D:\Data\JavaProject\hbase\lib\httpclient-4.2.5.jar;D:\Data\JavaProject\hbase\lib\jersey-guice-1.9.jar;D:\Data\JavaProject\hbase\lib\commons-codec-1.9.jar;D:\Data\JavaProject\hbase\lib\guice-servlet-3.0.jar;D:\Data\JavaProject\hbase\lib\hadoop-auth-2.5.1.jar;D:\Data\JavaProject\hbase\lib\hadoop-hdfs-2.5.1.jar;D:\Data\JavaProject\hbase\lib\hbase-shell-1.2.6.jar;D:\Data\JavaProject\hbase\lib\jackson-xc-1.9.13.jar;D:\Data\JavaProject\hbase\lib\jaxb-impl-2.2.3-1.jar;D:\Data\JavaProject\hbase\lib\jersey-client-1.9.jar;D:\Data\JavaProject\hbase\lib\jersey-server-1.9.jar;D:\Data\JavaProject\hbase\lib\jetty-util-6.1.26.jar;D:\Data\JavaProject\hbase\lib\api-util-1.0.0-M20.jar;D:\Data\JavaProject\hbase\lib\hbase-client-1.2.6.jar;D:\Data\JavaProject\hbase\lib\hbase-common-1.2.6.jar;D:\Data\JavaProject\hbase\lib\hbase-server-1.2.6.jar;D:\Data\JavaProject\hbase\lib\hbase-thrift-1.2.6.jar;D:\Data\JavaProject\hbase\lib\jsp-api-2.1-6.1.14.jar;D:\Data\JavaProject\hbase\lib\leveldbjni-all-1.8.jar;D:\Data\JavaProject\hbase\lib\metrics-core-2.2.0.jar;D:\Data\JavaProject\hbase\lib\commons-logging-1.2.jar;D:\Data\JavaProject\hbase\lib\commons-math3-3.1.1.jar;D:\Data\JavaProject\hbase\lib\hadoop-client-2.5.1.jar;D:\Data\JavaProject\hbase\lib\hadoop-common-2.5.1.jar;D:\Data\JavaProject\hbase\lib\jamon-runtime-2.4.1.jar;D:\Data\JavaProject\hbase\lib\java-xmlbuilder-0.4.jar;D:\Data\JavaProject\hbase\lib\protobuf-java-2.5.0.jar;D:\Data\JavaProject\hbase\lib\slf4j-log4j12-1.7.5.jar;D:\Data\JavaProject\hbase\lib\snappy-java-1.0.4.1.jar;D:\Data\JavaProject\hbase\lib\spymemcached-2.11.6.jar;D:\Data\JavaProject\hbase\lib\commons-digester-1.8.jar;D:\Data\JavaProject\hbase\lib\hbase-examples-1.2.6.jar;D:\Data\JavaProject\hbase\lib\hbase-it-1.2.6-tests.jar;D:\Data\JavaProject\hbase\lib\hbase-protocol-1.2.6.jar;D:\Data\JavaProject\hbase\lib\jackson-jaxrs-1.9.13.jar;D:\Data\JavaProject\hbase\lib\jruby-complete-1.6.8.jar;D:\Data\JavaProject\hbase\lib\commons-daemon-1.0.13.jar;D:\Data\JavaProject\hbase\lib\hadoop-yarn-api-2.5.1.jar;D:\Data\JavaProject\hbase\lib\hbase-procedure-1.2.6.jar;D:\Data\JavaProject\hbase\lib\jasper-runtime-5.5.23.jar;D:\Data\JavaProject\hbase\lib\api-asn1-api-1.0.0-M20.jar;D:\Data\JavaProject\hbase\lib\commons-compress-1.4.1.jar;D:\Data\JavaProject\hbase\lib\commons-httpclient-3.1.jar;D:\Data\JavaProject\hbase\lib\jasper-compiler-5.5.23.jar;D:\Data\JavaProject\hbase\lib\jetty-sslengine-6.1.26.jar;D:\Data\JavaProject\hbase\lib\netty-all-4.0.23.Final.jar;D:\Data\JavaProject\hbase\lib\servlet-api-2.5-6.1.14.jar;D:\Data\JavaProject\hbase\lib\apacheds-i18n-2.0.0-M15.jar;D:\Data\JavaProject\hbase\lib\commons-beanutils-1.7.0.jar;D:\Data\JavaProject\hbase\lib\hbase-annotations-1.2.6.jar;D:\Data\JavaProject\hbase\lib\hbase-prefix-tree-1.2.6.jar;D:\Data\JavaProject\hbase\lib\jackson-core-asl-1.9.13.jar;D:\Data\JavaProject\hbase\lib\hadoop-annotations-2.5.1.jar;D:\Data\JavaProject\hbase\lib\hadoop-yarn-client-2.5.1.jar;D:\Data\JavaProject\hbase\lib\hadoop-yarn-common-2.5.1.jar;D:\Data\JavaProject\hbase\lib\hbase-common-1.2.6-tests.jar;D:\Data\JavaProject\hbase\lib\hbase-server-1.2.6-tests.jar;D:\Data\JavaProject\hbase\lib\commons-collections-3.2.2.jar;D:\Data\JavaProject\hbase\lib\commons-configuration-1.6.jar;D:\Data\JavaProject\hbase\lib\hbase-hadoop-compat-1.2.6.jar;D:\Data\JavaProject\hbase\lib\jackson-mapper-asl-1.9.13.jar;D:\Data\JavaProject\hbase\lib\hbase-hadoop2-compat-1.2.6.jar;D:\Data\JavaProject\hbase\lib\hbase-resource-bundle-1.2.6.jar;D:\Data\JavaProject\hbase\lib\commons-beanutils-core-1.8.0.jar;D:\Data\JavaProject\hbase\lib\findbugs-annotations-1.3.9-1.jar;D:\Data\JavaProject\hbase\lib\htrace-core-3.1.0-incubating.jar;D:\Data\JavaProject\hbase\lib\hbase-annotations-1.2.6-tests.jar;D:\Data\JavaProject\hbase\lib\hadoop-yarn-server-common-2.5.1.jar;D:\Data\JavaProject\hbase\lib\hbase-external-blockcache-1.2.6.jar;D:\Data\JavaProject\hbase\lib\apacheds-kerberos-codec-2.0.0-M15.jar;D:\Data\JavaProject\hbase\lib\hadoop-mapreduce-client-app-2.5.1.jar;D:\Data\JavaProject\hbase\lib\hadoop-mapreduce-client-core-2.5.1.jar;D:\Data\JavaProject\hbase\lib\hadoop-mapreduce-client-common-2.5.1.jar;D:\Data\JavaProject\hbase\lib\hadoop-mapreduce-client-shuffle-2.5.1.jar;D:\Data\JavaProject\hbase\lib\hadoop-mapreduce-client-jobclient-2.5.1.jar;D:\programs\hadoop-2.7.6\share\hadoop\mapreduce\lib\xz-1.0.jar;D:\programs\hadoop-2.7.6\share\hadoop\mapreduce\lib\asm-3.2.jar;D:\programs\hadoop-2.7.6\share\hadoop\mapreduce\lib\guice-3.0.jar;D:\programs\hadoop-2.7.6\share\hadoop\mapreduce\lib\avro-1.7.4.jar;D:\programs\hadoop-2.7.6\share\hadoop\mapreduce\lib\junit-4.11.jar;D:\programs\hadoop-2.7.6\share\hadoop\mapreduce\lib\log4j-1.2.17.jar;D:\programs\hadoop-2.7.6\share\hadoop\mapreduce\lib\paranamer-2.3.jar;D:\programs\hadoop-2.7.6\share\hadoop\mapreduce\lib\commons-io-2.4.jar;D:\programs\hadoop-2.7.6\share\hadoop\mapreduce\lib\javax.inject-1.jar;D:\programs\hadoop-2.7.6\share\hadoop\mapreduce\lib\aopalliance-1.0.jar;D:\programs\hadoop-2.7.6\share\hadoop\mapreduce\lib\jersey-core-1.9.jar;D:\programs\hadoop-2.7.6\share\hadoop\mapreduce\lib\jersey-guice-1.9.jar;D:\programs\hadoop-2.7.6\share\hadoop\mapreduce\lib\guice-servlet-3.0.jar;D:\programs\hadoop-2.7.6\share\hadoop\mapreduce\lib\hamcrest-core-1.3.jar;D:\programs\hadoop-2.7.6\share\hadoop\mapreduce\lib\jersey-server-1.9.jar;D:\programs\hadoop-2.7.6\share\hadoop\mapreduce\lib\netty-3.6.2.Final.jar;D:\programs\hadoop-2.7.6\share\hadoop\mapreduce\lib\leveldbjni-all-1.8.jar;D:\programs\hadoop-2.7.6\share\hadoop\mapreduce\lib\protobuf-java-2.5.0.jar;D:\programs\hadoop-2.7.6\share\hadoop\mapreduce\lib\snappy-java-1.0.4.1.jar;D:\programs\hadoop-2.7.6\share\hadoop\mapreduce\lib\commons-compress-1.4.1.jar;D:\programs\hadoop-2.7.6\share\hadoop\mapreduce\lib\jackson-core-asl-1.9.13.jar;D:\programs\hadoop-2.7.6\share\hadoop\mapreduce\lib\hadoop-annotations-2.7.6.jar;D:\programs\hadoop-2.7.6\share\hadoop\mapreduce\lib\jackson-mapper-asl-1.9.13.jar;D:\programs\hadoop-2.7.6\share\hadoop\mapreduce\hadoop-mapreduce-client-app-2.7.6.jar;D:\programs\hadoop-2.7.6\share\hadoop\mapreduce\hadoop-mapreduce-client-common-2.7.6.jar;D:\programs\hadoop-2.7.6\share\hadoop\mapreduce\hadoop-mapreduce-client-core-2.7.6.jar;D:\programs\hadoop-2.7.6\share\hadoop\mapreduce\hadoop-mapreduce-client-hs-2.7.6.jar;D:\programs\hadoop-2.7.6\share\hadoop\mapreduce\hadoop-mapreduce-client-hs-plugins-2.7.6.jar;D:\programs\hadoop-2.7.6\share\hadoop\mapreduce\hadoop-mapreduce-client-jobclient-2.7.6-tests.jar;D:\programs\hadoop-2.7.6\share\hadoop\mapreduce\hadoop-mapreduce-client-jobclient-2.7.6.jar;D:\programs\hadoop-2.7.6\share\hadoop\mapreduce\hadoop-mapreduce-client-shuffle-2.7.6.jar;D:\programs\JetBrains\IntelliJ IDEA 2017.3.4\lib\idea_rt.jar
2018-09-19 14:47:15,353 INFO  [main] zookeeper.ZooKeeper (Environment.java:logEnv(100)) - Client environment:java.library.path=D:\programs\Java\jdk1.8.0_181\bin;C:\Windows\Sun\Java\bin;C:\Windows\system32;C:\Windows;D:\programs\Python37\Scripts\;D:\programs\Python37\;C:\Program Files (x86)\Common Files\Oracle\Java\javapath;C:\Program Files (x86)\Intel\Intel(R) Management Engine Components\iCLS\;C:\Program Files\Intel\Intel(R) Management Engine Components\iCLS\;C:\Windows\system32;C:\Windows;C:\Windows\System32\Wbem;C:\Windows\System32\WindowsPowerShell\v1.0\;C:\Windows\System32\OpenSSH\;C:\Program Files (x86)\Intel\Intel(R) Management Engine Components\DAL;C:\Program Files\Intel\Intel(R) Management Engine Components\DAL;C:\Program Files (x86)\Intel\Intel(R) Management Engine Components\IPT;C:\Program Files\Intel\Intel(R) Management Engine Components\IPT;D:\programs\Java\jdk1.8.0_181\bin;D:\programs\Java\jdk1.8.0_181\jre\bin;D:\programs\apache-maven-3.5.4\bin;D:\programs\findbugs-1.3.9\bin;C:\Program Files (x86)\CMake\bin;C:\Program Files\Microsoft Windows Performance Toolkit\;D:\programs\Git\Git\cmd;D:\programs\Git\Git\bin;D:\programs\protobuf-2.5.0\src;D:\programs\MATLAB\R2015b\runtime\win64;D:\programs\MATLAB\R2015b\bin;D:\programs\MATLAB\R2015b\polyspace\bin;D:\programs\hadoop-2.7.6\bin;D:\programs\hadoop-2.7.6\sbin;C:\Users\tzb\AppData\Local\Microsoft\WindowsApps;.
2018-09-19 14:47:15,353 INFO  [main] zookeeper.ZooKeeper (Environment.java:logEnv(100)) - Client environment:java.io.tmpdir=C:\Users\tzb\AppData\Local\Temp\
2018-09-19 14:47:15,353 INFO  [main] zookeeper.ZooKeeper (Environment.java:logEnv(100)) - Client environment:java.compiler=<NA>
2018-09-19 14:47:15,353 INFO  [main] zookeeper.ZooKeeper (Environment.java:logEnv(100)) - Client environment:os.name=Windows 10
2018-09-19 14:47:15,353 INFO  [main] zookeeper.ZooKeeper (Environment.java:logEnv(100)) - Client environment:os.arch=amd64
2018-09-19 14:47:15,353 INFO  [main] zookeeper.ZooKeeper (Environment.java:logEnv(100)) - Client environment:os.version=10.0
2018-09-19 14:47:15,354 INFO  [main] zookeeper.ZooKeeper (Environment.java:logEnv(100)) - Client environment:user.name=tzb
2018-09-19 14:47:15,354 INFO  [main] zookeeper.ZooKeeper (Environment.java:logEnv(100)) - Client environment:user.home=C:\Users\tzb
2018-09-19 14:47:15,354 INFO  [main] zookeeper.ZooKeeper (Environment.java:logEnv(100)) - Client environment:user.dir=D:\Data\JavaProject\hbase
2018-09-19 14:47:15,355 INFO  [main] zookeeper.ZooKeeper (ZooKeeper.java:<init>(438)) - Initiating client connection, connectString=node1:2181,node2:2181,node3:2181 sessionTimeout=90000 watcher=hconnection-0x120285860x0, quorum=node1:2181,node2:2181,node3:2181, baseZNode=/hbase
2018-09-19 14:47:15,625 INFO  [main-SendThread(node2:2181)] zookeeper.ClientCnxn (ClientCnxn.java:logStartConnect(975)) - Opening socket connection to server node2/192.168.30.132:2181. Will not attempt to authenticate using SASL (unknown error)
2018-09-19 14:47:15,626 INFO  [main-SendThread(node2:2181)] zookeeper.ClientCnxn (ClientCnxn.java:primeConnection(852)) - Socket connection established to node2/192.168.30.132:2181, initiating session
2018-09-19 14:47:15,659 INFO  [main-SendThread(node2:2181)] zookeeper.ClientCnxn (ClientCnxn.java:onConnected(1235)) - Session establishment complete on server node2/192.168.30.132:2181, sessionid = 0x265f06ddcc90003, negotiated timeout = 40000
table has existed!!
2018-09-19 14:47:15,964 INFO  [main] client.HBaseAdmin (HBaseAdmin.java:call(1378)) - Started disable of word
2018-09-19 14:47:18,238 INFO  [main] client.HBaseAdmin (HBaseAdmin.java:postOperationResult(1407)) - Disabled word
2018-09-19 14:47:19,500 INFO  [main] client.HBaseAdmin (HBaseAdmin.java:postOperationResult(963)) - Deleted word
2018-09-19 14:47:19,500 INFO  [main] client.HBaseAdmin (HBaseAdmin.java:call(1378)) - Started disable of stat
2018-09-19 14:47:21,715 INFO  [main] client.HBaseAdmin (HBaseAdmin.java:postOperationResult(1407)) - Disabled stat
2018-09-19 14:47:22,957 INFO  [main] client.HBaseAdmin (HBaseAdmin.java:postOperationResult(963)) - Deleted stat
2018-09-19 14:47:24,306 INFO  [main] client.HBaseAdmin (HBaseAdmin.java:postOperationResult(790)) - Created word
2018-09-19 14:47:25,527 INFO  [main] client.HBaseAdmin (HBaseAdmin.java:postOperationResult(790)) - Created stat
2018-09-19 14:47:25,667 INFO  [main] Configuration.deprecation (Configuration.java:warnOnceIfDeprecated(1019)) - session.id is deprecated. Instead, use dfs.metrics.session-id
2018-09-19 14:47:25,668 INFO  [main] jvm.JvmMetrics (JvmMetrics.java:init(76)) - Initializing JVM Metrics with processName=JobTracker, sessionId=
2018-09-19 14:47:26,023 WARN  [main] mapreduce.JobSubmitter (JobSubmitter.java:copyAndConfigureFiles(150)) - Hadoop command-line option parsing not performed. Implement the Tool interface and execute your application with ToolRunner to remedy this.
2018-09-19 14:47:26,029 WARN  [main] mapreduce.JobSubmitter (JobSubmitter.java:copyAndConfigureFiles(259)) - No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
2018-09-19 14:47:26,224 INFO  [main] zookeeper.RecoverableZooKeeper (RecoverableZooKeeper.java:<init>(120)) - Process identifier=hconnection-0x4d9ac0b4 connecting to ZooKeeper ensemble=node1:2181,node2:2181,node3:2181
2018-09-19 14:47:26,224 INFO  [main] zookeeper.ZooKeeper (ZooKeeper.java:<init>(438)) - Initiating client connection, connectString=node1:2181,node2:2181,node3:2181 sessionTimeout=90000 watcher=hconnection-0x4d9ac0b40x0, quorum=node1:2181,node2:2181,node3:2181, baseZNode=/hbase
2018-09-19 14:47:26,226 INFO  [main-SendThread(node1:2181)] zookeeper.ClientCnxn (ClientCnxn.java:logStartConnect(975)) - Opening socket connection to server node1/192.168.30.131:2181. Will not attempt to authenticate using SASL (unknown error)
2018-09-19 14:47:26,226 INFO  [main-SendThread(node1:2181)] zookeeper.ClientCnxn (ClientCnxn.java:primeConnection(852)) - Socket connection established to node1/192.168.30.131:2181, initiating session
2018-09-19 14:47:26,230 INFO  [main-SendThread(node1:2181)] zookeeper.ClientCnxn (ClientCnxn.java:onConnected(1235)) - Session establishment complete on server node1/192.168.30.131:2181, sessionid = 0x165f06de077000d, negotiated timeout = 40000
2018-09-19 14:47:26,233 INFO  [main] util.RegionSizeCalculator (RegionSizeCalculator.java:init(91)) - Calculating region sizes for table "word".
2018-09-19 14:47:26,297 INFO  [main] client.ConnectionManager$HConnectionImplementation (ConnectionManager.java:closeMasterService(2139)) - Closing master protocol: MasterService
2018-09-19 14:47:26,298 INFO  [main] client.ConnectionManager$HConnectionImplementation (ConnectionManager.java:closeZooKeeperWatcher(1710)) - Closing zookeeper sessionid=0x165f06de077000d
2018-09-19 14:47:26,305 INFO  [main] zookeeper.ZooKeeper (ZooKeeper.java:close(684)) - Session: 0x165f06de077000d closed
2018-09-19 14:47:26,305 INFO  [main-EventThread] zookeeper.ClientCnxn (ClientCnxn.java:run(512)) - EventThread shut down
2018-09-19 14:47:26,355 INFO  [main] mapreduce.JobSubmitter (JobSubmitter.java:submitJobInternal(396)) - number of splits:1
2018-09-19 14:47:26,417 INFO  [main] mapreduce.JobSubmitter (JobSubmitter.java:printTokens(479)) - Submitting tokens for job: job_local637214210_0001
2018-09-19 14:47:26,435 WARN  [main] conf.Configuration (Configuration.java:loadProperty(2368)) - file:/tmp/hadoop-tzb/mapred/staging/tzb637214210/.staging/job_local637214210_0001/job.xml:an attempt to override final parameter: mapreduce.job.end-notification.max.retry.interval;  Ignoring.
2018-09-19 14:47:26,435 WARN  [main] conf.Configuration (Configuration.java:loadProperty(2368)) - file:/tmp/hadoop-tzb/mapred/staging/tzb637214210/.staging/job_local637214210_0001/job.xml:an attempt to override final parameter: mapreduce.job.end-notification.max.attempts;  Ignoring.
2018-09-19 14:47:26,518 WARN  [main] conf.Configuration (Configuration.java:loadProperty(2368)) - file:/tmp/hadoop-tzb/mapred/local/localRunner/tzb/job_local637214210_0001/job_local637214210_0001.xml:an attempt to override final parameter: mapreduce.job.end-notification.max.retry.interval;  Ignoring.
2018-09-19 14:47:26,520 WARN  [main] conf.Configuration (Configuration.java:loadProperty(2368)) - file:/tmp/hadoop-tzb/mapred/local/localRunner/tzb/job_local637214210_0001/job_local637214210_0001.xml:an attempt to override final parameter: mapreduce.job.end-notification.max.attempts;  Ignoring.
2018-09-19 14:47:26,523 INFO  [main] mapreduce.Job (Job.java:submit(1289)) - The url to track the job: http://localhost:8080/
2018-09-19 14:47:26,523 INFO  [main] mapreduce.Job (Job.java:monitorAndPrintJob(1334)) - Running job: job_local637214210_0001
2018-09-19 14:47:26,524 INFO  [Thread-8] mapred.LocalJobRunner (LocalJobRunner.java:createOutputCommitter(471)) - OutputCommitter set in config null
2018-09-19 14:47:26,541 INFO  [Thread-8] mapred.LocalJobRunner (LocalJobRunner.java:createOutputCommitter(489)) - OutputCommitter is org.apache.hadoop.hbase.mapreduce.TableOutputCommitter
2018-09-19 14:47:26,564 INFO  [Thread-8] mapred.LocalJobRunner (LocalJobRunner.java:runTasks(448)) - Waiting for map tasks
2018-09-19 14:47:26,566 INFO  [LocalJobRunner Map Task Executor #0] mapred.LocalJobRunner (LocalJobRunner.java:run(224)) - Starting task: attempt_local637214210_0001_m_000000_0
2018-09-19 14:47:26,595 INFO  [LocalJobRunner Map Task Executor #0] util.ProcfsBasedProcessTree (ProcfsBasedProcessTree.java:isAvailable(181)) - ProcfsBasedProcessTree currently is supported only on Linux.
2018-09-19 14:47:26,709 INFO  [LocalJobRunner Map Task Executor #0] mapred.Task (Task.java:initialize(587)) -  Using ResourceCalculatorProcessTree : org.apache.hadoop.yarn.util.WindowsBasedProcessTree@4eeaf17f
2018-09-19 14:47:26,713 INFO  [LocalJobRunner Map Task Executor #0] mapred.MapTask (MapTask.java:runNewMapper(733)) - Processing split: HBase table split(table name: word, scan: , start row: , end row: , region location: node2)
2018-09-19 14:47:26,717 INFO  [LocalJobRunner Map Task Executor #0] zookeeper.RecoverableZooKeeper (RecoverableZooKeeper.java:<init>(120)) - Process identifier=hconnection-0x5847de11 connecting to ZooKeeper ensemble=node1:2181,node2:2181,node3:2181
2018-09-19 14:47:26,717 INFO  [LocalJobRunner Map Task Executor #0] zookeeper.ZooKeeper (ZooKeeper.java:<init>(438)) - Initiating client connection, connectString=node1:2181,node2:2181,node3:2181 sessionTimeout=90000 watcher=hconnection-0x5847de110x0, quorum=node1:2181,node2:2181,node3:2181, baseZNode=/hbase
2018-09-19 14:47:26,719 INFO  [LocalJobRunner Map Task Executor #0-SendThread(node1:2181)] zookeeper.ClientCnxn (ClientCnxn.java:logStartConnect(975)) - Opening socket connection to server node1/192.168.30.131:2181. Will not attempt to authenticate using SASL (unknown error)
2018-09-19 14:47:26,721 INFO  [LocalJobRunner Map Task Executor #0-SendThread(node1:2181)] zookeeper.ClientCnxn (ClientCnxn.java:primeConnection(852)) - Socket connection established to node1/192.168.30.131:2181, initiating session
2018-09-19 14:47:26,724 INFO  [LocalJobRunner Map Task Executor #0-SendThread(node1:2181)] zookeeper.ClientCnxn (ClientCnxn.java:onConnected(1235)) - Session establishment complete on server node1/192.168.30.131:2181, sessionid = 0x165f06de077000e, negotiated timeout = 40000
2018-09-19 14:47:26,725 INFO  [LocalJobRunner Map Task Executor #0] mapreduce.TableInputFormatBase (TableInputFormatBase.java:createRecordReader(177)) - Input split length: 0 bytes.
2018-09-19 14:47:26,731 INFO  [LocalJobRunner Map Task Executor #0] mapred.MapTask (MapTask.java:createSortingCollector(388)) - Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
2018-09-19 14:47:26,763 INFO  [LocalJobRunner Map Task Executor #0] mapred.MapTask (MapTask.java:setEquator(1182)) - (EQUATOR) 0 kvi 26214396(104857584)
2018-09-19 14:47:26,763 INFO  [LocalJobRunner Map Task Executor #0] mapred.MapTask (MapTask.java:init(975)) - mapreduce.task.io.sort.mb: 100
2018-09-19 14:47:26,763 INFO  [LocalJobRunner Map Task Executor #0] mapred.MapTask (MapTask.java:init(976)) - soft limit at 83886080
2018-09-19 14:47:26,763 INFO  [LocalJobRunner Map Task Executor #0] mapred.MapTask (MapTask.java:init(977)) - bufstart = 0; bufvoid = 104857600
2018-09-19 14:47:26,763 INFO  [LocalJobRunner Map Task Executor #0] mapred.MapTask (MapTask.java:init(978)) - kvstart = 26214396; length = 6553600
2018-09-19 14:47:26,800 INFO  [LocalJobRunner Map Task Executor #0] mapred.LocalJobRunner (LocalJobRunner.java:statusUpdate(591)) - 
2018-09-19 14:47:26,800 INFO  [LocalJobRunner Map Task Executor #0] client.ConnectionManager$HConnectionImplementation (ConnectionManager.java:closeZooKeeperWatcher(1710)) - Closing zookeeper sessionid=0x165f06de077000e
2018-09-19 14:47:26,803 INFO  [LocalJobRunner Map Task Executor #0] zookeeper.ZooKeeper (ZooKeeper.java:close(684)) - Session: 0x165f06de077000e closed
2018-09-19 14:47:26,803 INFO  [LocalJobRunner Map Task Executor #0-EventThread] zookeeper.ClientCnxn (ClientCnxn.java:run(512)) - EventThread shut down
2018-09-19 14:47:26,814 INFO  [LocalJobRunner Map Task Executor #0] mapred.MapTask (MapTask.java:flush(1437)) - Starting flush of map output
2018-09-19 14:47:26,814 INFO  [LocalJobRunner Map Task Executor #0] mapred.MapTask (MapTask.java:flush(1455)) - Spilling map output
2018-09-19 14:47:26,814 INFO  [LocalJobRunner Map Task Executor #0] mapred.MapTask (MapTask.java:flush(1456)) - bufstart = 0; bufend = 329; bufvoid = 104857600
2018-09-19 14:47:26,814 INFO  [LocalJobRunner Map Task Executor #0] mapred.MapTask (MapTask.java:flush(1458)) - kvstart = 26214396(104857584); kvend = 26214272(104857088); length = 125/6553600
2018-09-19 14:47:26,841 INFO  [LocalJobRunner Map Task Executor #0] mapred.MapTask (MapTask.java:sortAndSpill(1641)) - Finished spill 0
2018-09-19 14:47:26,853 INFO  [LocalJobRunner Map Task Executor #0] mapred.Task (Task.java:done(1001)) - Task:attempt_local637214210_0001_m_000000_0 is done. And is in the process of committing
2018-09-19 14:47:26,860 INFO  [LocalJobRunner Map Task Executor #0] mapred.LocalJobRunner (LocalJobRunner.java:statusUpdate(591)) - map
2018-09-19 14:47:26,860 INFO  [LocalJobRunner Map Task Executor #0] mapred.Task (Task.java:sendDone(1121)) - Task 'attempt_local637214210_0001_m_000000_0' done.
2018-09-19 14:47:26,860 INFO  [LocalJobRunner Map Task Executor #0] mapred.LocalJobRunner (LocalJobRunner.java:run(249)) - Finishing task: attempt_local637214210_0001_m_000000_0
2018-09-19 14:47:26,860 INFO  [Thread-8] mapred.LocalJobRunner (LocalJobRunner.java:runTasks(456)) - map task executor complete.
2018-09-19 14:47:26,862 INFO  [Thread-8] mapred.LocalJobRunner (LocalJobRunner.java:runTasks(448)) - Waiting for reduce tasks
2018-09-19 14:47:26,862 INFO  [pool-7-thread-1] mapred.LocalJobRunner (LocalJobRunner.java:run(302)) - Starting task: attempt_local637214210_0001_r_000000_0
2018-09-19 14:47:26,877 INFO  [pool-7-thread-1] util.ProcfsBasedProcessTree (ProcfsBasedProcessTree.java:isAvailable(181)) - ProcfsBasedProcessTree currently is supported only on Linux.
2018-09-19 14:47:26,953 INFO  [pool-7-thread-1] mapred.Task (Task.java:initialize(587)) -  Using ResourceCalculatorProcessTree : org.apache.hadoop.yarn.util.WindowsBasedProcessTree@40fffb6a
2018-09-19 14:47:26,956 INFO  [pool-7-thread-1] mapred.ReduceTask (ReduceTask.java:run(362)) - Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@1d34704a
2018-09-19 14:47:26,964 INFO  [pool-7-thread-1] reduce.MergeManagerImpl (MergeManagerImpl.java:<init>(193)) - MergerManager: memoryLimit=1503238528, maxSingleShuffleLimit=375809632, mergeThreshold=992137472, ioSortFactor=10, memToMemMergeOutputsThreshold=10
2018-09-19 14:47:26,966 INFO  [EventFetcher for fetching Map Completion Events] reduce.EventFetcher (EventFetcher.java:run(61)) - attempt_local637214210_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
2018-09-19 14:47:26,985 INFO  [localfetcher#1] reduce.LocalFetcher (LocalFetcher.java:copyMapOutput(140)) - localfetcher#1 about to shuffle output of map attempt_local637214210_0001_m_000000_0 decomp: 395 len: 399 to MEMORY
2018-09-19 14:47:26,989 INFO  [localfetcher#1] reduce.InMemoryMapOutput (InMemoryMapOutput.java:shuffle(100)) - Read 395 bytes from map-output for attempt_local637214210_0001_m_000000_0
2018-09-19 14:47:26,990 INFO  [localfetcher#1] reduce.MergeManagerImpl (MergeManagerImpl.java:closeInMemoryFile(307)) - closeInMemoryFile -> map-output of size: 395, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->395
2018-09-19 14:47:26,991 INFO  [EventFetcher for fetching Map Completion Events] reduce.EventFetcher (EventFetcher.java:run(76)) - EventFetcher is interrupted.. Returning
2018-09-19 14:47:26,992 INFO  [pool-7-thread-1] mapred.LocalJobRunner (LocalJobRunner.java:statusUpdate(591)) - 1 / 1 copied.
2018-09-19 14:47:26,992 INFO  [pool-7-thread-1] reduce.MergeManagerImpl (MergeManagerImpl.java:finalMerge(667)) - finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs
2018-09-19 14:47:27,008 INFO  [pool-7-thread-1] mapred.Merger (Merger.java:merge(591)) - Merging 1 sorted segments
2018-09-19 14:47:27,008 INFO  [pool-7-thread-1] mapred.Merger (Merger.java:merge(690)) - Down to the last merge-pass, with 1 segments left of total size: 386 bytes
2018-09-19 14:47:27,016 INFO  [pool-7-thread-1] reduce.MergeManagerImpl (MergeManagerImpl.java:finalMerge(742)) - Merged 1 segments, 395 bytes to disk to satisfy reduce memory limit
2018-09-19 14:47:27,017 INFO  [pool-7-thread-1] reduce.MergeManagerImpl (MergeManagerImpl.java:finalMerge(772)) - Merging 1 files, 399 bytes from disk
2018-09-19 14:47:27,018 INFO  [pool-7-thread-1] reduce.MergeManagerImpl (MergeManagerImpl.java:finalMerge(787)) - Merging 0 segments, 0 bytes from memory into reduce
2018-09-19 14:47:27,018 INFO  [pool-7-thread-1] mapred.Merger (Merger.java:merge(591)) - Merging 1 sorted segments
2018-09-19 14:47:27,019 INFO  [pool-7-thread-1] mapred.Merger (Merger.java:merge(690)) - Down to the last merge-pass, with 1 segments left of total size: 386 bytes
2018-09-19 14:47:27,019 INFO  [pool-7-thread-1] mapred.LocalJobRunner (LocalJobRunner.java:statusUpdate(591)) - 1 / 1 copied.
2018-09-19 14:47:27,020 INFO  [pool-7-thread-1] zookeeper.RecoverableZooKeeper (RecoverableZooKeeper.java:<init>(120)) - Process identifier=hconnection-0xb7fd20 connecting to ZooKeeper ensemble=node1:2181,node2:2181,node3:2181
2018-09-19 14:47:27,020 INFO  [pool-7-thread-1] zookeeper.ZooKeeper (ZooKeeper.java:<init>(438)) - Initiating client connection, connectString=node1:2181,node2:2181,node3:2181 sessionTimeout=90000 watcher=hconnection-0xb7fd200x0, quorum=node1:2181,node2:2181,node3:2181, baseZNode=/hbase
2018-09-19 14:47:27,027 INFO  [pool-7-thread-1-SendThread(node1:2181)] zookeeper.ClientCnxn (ClientCnxn.java:logStartConnect(975)) - Opening socket connection to server node1/192.168.30.131:2181. Will not attempt to authenticate using SASL (unknown error)
2018-09-19 14:47:27,028 INFO  [pool-7-thread-1-SendThread(node1:2181)] zookeeper.ClientCnxn (ClientCnxn.java:primeConnection(852)) - Socket connection established to node1/192.168.30.131:2181, initiating session
2018-09-19 14:47:27,031 INFO  [pool-7-thread-1-SendThread(node1:2181)] zookeeper.ClientCnxn (ClientCnxn.java:onConnected(1235)) - Session establishment complete on server node1/192.168.30.131:2181, sessionid = 0x165f06de077000f, negotiated timeout = 40000
2018-09-19 14:47:27,032 INFO  [pool-7-thread-1] mapreduce.TableOutputFormat (TableOutputFormat.java:<init>(108)) - Created table instance for stat
2018-09-19 14:47:27,033 INFO  [pool-7-thread-1] Configuration.deprecation (Configuration.java:warnOnceIfDeprecated(1019)) - mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
2018-09-19 14:47:27,070 INFO  [pool-7-thread-1] client.ConnectionManager$HConnectionImplementation (ConnectionManager.java:closeZooKeeperWatcher(1710)) - Closing zookeeper sessionid=0x165f06de077000f
2018-09-19 14:47:27,074 INFO  [pool-7-thread-1] zookeeper.ZooKeeper (ZooKeeper.java:close(684)) - Session: 0x165f06de077000f closed
2018-09-19 14:47:27,074 INFO  [pool-7-thread-1-EventThread] zookeeper.ClientCnxn (ClientCnxn.java:run(512)) - EventThread shut down
2018-09-19 14:47:27,085 INFO  [pool-7-thread-1] mapred.Task (Task.java:done(1001)) - Task:attempt_local637214210_0001_r_000000_0 is done. And is in the process of committing
2018-09-19 14:47:27,086 INFO  [pool-7-thread-1] mapred.LocalJobRunner (LocalJobRunner.java:statusUpdate(591)) - reduce > reduce
2018-09-19 14:47:27,086 INFO  [pool-7-thread-1] mapred.Task (Task.java:sendDone(1121)) - Task 'attempt_local637214210_0001_r_000000_0' done.
2018-09-19 14:47:27,086 INFO  [pool-7-thread-1] mapred.LocalJobRunner (LocalJobRunner.java:run(325)) - Finishing task: attempt_local637214210_0001_r_000000_0
2018-09-19 14:47:27,086 INFO  [Thread-8] mapred.LocalJobRunner (LocalJobRunner.java:runTasks(456)) - reduce task executor complete.
2018-09-19 14:47:27,526 INFO  [main] mapreduce.Job (Job.java:monitorAndPrintJob(1355)) - Job job_local637214210_0001 running in uber mode : false
2018-09-19 14:47:27,530 INFO  [main] mapreduce.Job (Job.java:monitorAndPrintJob(1362)) -  map 100% reduce 100%
2018-09-19 14:47:27,532 INFO  [main] mapreduce.Job (Job.java:monitorAndPrintJob(1373)) - Job job_local637214210_0001 completed successfully
2018-09-19 14:47:27,548 INFO  [main] mapreduce.Job (Job.java:monitorAndPrintJob(1380)) - Counters: 51
	File System Counters
		FILE: Number of bytes read=1052
		FILE: Number of bytes written=571487
		FILE: Number of read operations=0
		FILE: Number of large read operations=0
		FILE: Number of write operations=0
		HDFS: Number of bytes read=0
		HDFS: Number of bytes written=0
		HDFS: Number of read operations=0
		HDFS: Number of large read operations=0
		HDFS: Number of write operations=0
	Map-Reduce Framework
		Map input records=5
		Map output records=32
		Map output bytes=329
		Map output materialized bytes=399
		Input split bytes=61
		Combine input records=0
		Combine output records=0
		Reduce input groups=24
		Reduce shuffle bytes=399
		Reduce input records=32
		Reduce output records=24
		Spilled Records=64
		Shuffled Maps =1
		Failed Shuffles=0
		Merged Map outputs=1
		GC time elapsed (ms)=4
		CPU time spent (ms)=0
		Physical memory (bytes) snapshot=0
		Virtual memory (bytes) snapshot=0
		Total committed heap usage (bytes)=577241088
	HBase Counters
		BYTES_IN_REMOTE_RESULTS=376
		BYTES_IN_RESULTS=376
		MILLIS_BETWEEN_NEXTS=18
		NOT_SERVING_REGION_EXCEPTION=0
		NUM_SCANNER_RESTARTS=0
		NUM_SCAN_RESULTS_STALE=0
		REGIONS_SCANNED=1
		REMOTE_RPC_CALLS=3
		REMOTE_RPC_RETRIES=0
		ROWS_FILTERED=0
		ROWS_SCANNED=5
		RPC_CALLS=3
		RPC_RETRIES=0
	Shuffle Errors
		BAD_ID=0
		CONNECTION=0
		IO_ERROR=0
		WRONG_LENGTH=0
		WRONG_MAP=0
		WRONG_REDUCE=0
	File Input Format Counters 
		Bytes Read=0
	File Output Format Counters 
		Bytes Written=0

Process finished with exit code 0

在这里插入图片描述

在这里插入图片描述

猜你喜欢

转载自blog.csdn.net/u012292754/article/details/82760528
今日推荐