搭建基于eclipse的hadoop开发环境的笔记

常见问题:

1)Error:Call to localhost/127.0.0.1:9001 failed on connection exception:java.net.ConnectException: Connection refused

看看namenode是否启动,若没有,重新格式化一下 $bin/hadoop namenode -format,再启动。

2)out输出文件夹若已经存在,必须保证它为空,否则会出错。解决方法是要么删掉这个文件夹要么清空它。


WordCount.java

import java.io.IOException; 
import java.util.StringTokenizer; 
 
import org.apache.hadoop.conf.Configuration; 
import org.apache.hadoop.fs.Path; 
import org.apache.hadoop.io.IntWritable; 
import org.apache.hadoop.io.Text; 
import org.apache.hadoop.mapreduce.Job; 
import org.apache.hadoop.mapreduce.Mapper; 
import org.apache.hadoop.mapreduce.Reducer; 
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; 
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; 
import org.apache.hadoop.util.GenericOptionsParser; 
 
public class  WordCount { 
 
  public static class  TokenizerMapper  
       extends Mapper<Object, Text, Text, IntWritable>{ 
     
    private final  static IntWritable  one  =  new  IntWritable(1); 
    private Text word =  new  Text(); 
       
    public void map(Object key, Text value, Context context 
                    ) throws IOException, InterruptedException { 
      StringTokenizer itr = new  StringTokenizer(value.toString()); 
      while  (itr.hasMoreTokens()) { 
        word.set(itr.nextToken()); 
        context.write(word, one ); 
      } 
    } 
  } 
   
  public static class  IntSumReducer  
       extends Reducer<Text,IntWritable,Text,IntWritable> { 
    private IntWritable  result =  new  IntWritable(); 
 
    public void reduce(Text key, Iterable<IntWritable> values,  
                       Context context 
                       )  throws IOException, InterruptedException { 
      int  sum = 0; 
      for  (IntWritable val : values) { 
        sum += val.get(); 
      } 
      result.set(sum); 
      context.write(key, result); 
    } 
  } 
 
  public static void main(String[] args) throws Exception { 
    Configuration conf = new  Configuration(); 
    String[] otherArgs = new  GenericOptionsParser(conf, 
args).getRemainingArgs(); 
    if (otherArgs.length != 2) { 
      System.err .println("Usage: wordcount <in> <out>"); 
      System.exit(2); 
    } 
    Job job =  new  Job(conf, "word count"); 
    job.setJarByClass(WordCount. class ); 
    job.setMapperClass(TokenizerMapper. class ); 
    job.setCombinerClass(IntSumReducer. class ); 
    job.setReducerClass(IntSumReducer.class ); 
    job.setOutputKeyClass(Text.class ); 
    job.setOutputValueClass(IntWritable.class ); 
    FileInputFormat. addInputPath(job, new  Path(otherArgs[0])); 
    FileOutputFormat.setOutputPath (job, new  Path(otherArgs[1])); 
    System.exit(job.waitForCompletion( true) ? 0 : 1); 
  } 

猜你喜欢

转载自blog.csdn.net/xiaoyu_2011/article/details/45533631