hadoop 单词统计 java

WordMap.java

package MyMap;

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;

import java.io.IOException;

public class WordMap extends Mapper<LongWritable,Text,Text,IntWritable> {

    @Override
    protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
        System.out.println("bbbbbbbbbbbbbbbbbbbbbbbb====>"+value.toString());
        String line = value.toString();
        String[] words = line.split(" ");
        for(String word : words) {
            context.write(new Text(word), new IntWritable(1));
        }
    }
}

WordReduce.java

package MyReduce;

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

import java.io.IOException;

public class WordReduce extends Reducer<Text, IntWritable,Text,IntWritable> {

    @Override
    protected void reduce(Text key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException {
        Integer count = 0;
        for(IntWritable value : values) {
            count += value.get();
        }

        context.write(key,new IntWritable(count));
    }


}

WordMain.java

package MyMain;

import MyMap.WordMap;
import MyReduce.WordReduce;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import java.io.IOException;

public class WordMain {
    public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
        //创建配置对象
        Configuration configuration = new Configuration();

        //创建job对象
        Job job = Job.getInstance(configuration,"mineWordCount");

        System.out.println("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa");

        //设置运行job类
        job.setJarByClass(WordMain.class);

        //设置mapper类
        job.setMapperClass(WordMap.class);

        //设置reduce类
        job.setReducerClass(WordReduce.class);

        //设置map输出的key value
        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(IntWritable.class);

        //设置reduce输出的key value
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(IntWritable.class);

        //设置输入输出路径
        FileInputFormat.setInputPaths(job, new Path(args[0]));
        FileOutputFormat.setOutputPath(job, new Path(args[1]));

        //提交job
        boolean b = job.waitForCompletion(true);

        if(b) {
            System.out.println("----------->success");
        } else {
            System.out.println("----------->error");
        }
    }
}

猜你喜欢

转载自my.oschina.net/u/2490316/blog/1819242