hadoop IDE提交任务

版权声明:本文为作者创作,转载请注明出处:http://blog.csdn.net/claroja,如有商业用途请联系QQ:63183535。 https://blog.csdn.net/claroja/article/details/88692888
package WordCount;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.log4j.BasicConfigurator;

public class IDES {

    public static void main(String[] args) throws Exception {
        BasicConfigurator.configure(); //自动快速地使用缺省Log4j环境。
        System.setProperty("HADOOP_USER_NAME","root");//配置jvm变量


        Configuration conf = new Configuration();//
        //默认在本地运行,这里会直接读取win里里面的hadoop配置,所以可以把conf.set省略
        //conf.set("fs.defaultFS", "file:///");
        //conf.set("mapreduce.framework.name", "local");

        // 在hdfs上运行
        conf.set("fs.defaultFS", "hdfs://192.168.56.1:9000");
        conf.set("mapreduce.framework.name", "yarn");
        conf.set("yarn.resourcemanager.hostname","192.168.56.1");
        conf.set("mapreduce.app-submission.cross-platform","true");//兼容win

        Job job = Job.getInstance(conf);

        //mapper,reducer,submitter三个类
//        job.setJarByClass(JobSubmitter.class);
        job.setJar("d:/wc.jar");
        job.setMapperClass(CountMapper.class);
        job.setReducerClass(CountReducer.class);
        //mapper,reducer输出key和value的类型
        job.setMapOutputKeyClass(Text.class);//mapper
        job.setMapOutputValueClass(IntWritable.class);
        job.setOutputKeyClass(Text.class);//reducer
        job.setOutputValueClass(IntWritable.class);

        FileInputFormat.setInputPaths(job, new Path("/wordcount/input"));
        FileOutputFormat.setOutputPath(job, new Path("/wordcount/output"));

        job.setNumReduceTasks(3);
        boolean res = job.waitForCompletion(true);
        System.exit(res?0:1);
    }
}

mapper和reducer可在资源里下载

相关资源
IDE直接和hadoop集群连接

猜你喜欢

转载自blog.csdn.net/claroja/article/details/88692888
今日推荐