mapreduce算法之reduce侧连接

package mapreduce_join;


import java.io.IOException;
import java.net.URI;
import java.util.HashMap;
import java.util.Map;


import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;


import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.LineReader;


public class reduce_join{  //word  count

static String INPUT_PATH="hdfs://master:9000/file/f";
static String OUTPUT_PATH="hdfs://master:9000/output";

static class MyMapper extends Mapper<Object,Object,Text,Text>{  
Text  output_key=new Text();   //fname
Text  output_value=new Text(); //add-id
 

protected void setup(Context context) throws java.io.IOException, java.lang.InterruptedException{
 
}

protected void map(Object key, Object value, Context context) throws IOException, InterruptedException{
//
String[]  tokens=value.toString().split(",");   //f    name   id
                                                                     
if(tokens!=null && tokens.length==2){
output_key.set(tokens[0]);  
output_value.set(tokens[1]);
//output_value.set(1+","+tokens[0]+","+tokens[1]);  // Beijing Red Star,1
context.write(output_key,output_value);
}
 

}
   
}

static class MyReduce extends Reducer<Text,Text,Text,Text>{

//1 setup   m  reduce   1  cleanup


Text  output_key=new Text();
Text  output_value=new Text();

Map<String,String> addMap=new HashMap<String,String>();  //a(  addr-id,addr-name )
private LineReader lineReader;

                protected void setup(Context context) throws java.io.IOException, java.lang.InterruptedException{
 
                URI  uri=context.getCacheFiles()[0];
                Path  path=new Path(uri);
                System.out.println("path="+uri.toString());
                FileSystem fs= path.getFileSystem(context.getConfiguration());
              
                lineReader = new LineReader(fs.open(path));
                
                Text line=new Text();
                while(lineReader.readLine(line)>0){
                String[]  tokens=line.toString().split(",");
                if(tokens!=null && tokens.length==2)
    addMap.put(tokens[0], tokens[1]);                
                
                }


System.out.println("addMap.size="+addMap.size());

}

//k=1 v={1,bj      Beijing Red Star,1 }
protected void reduce(Text key, Iterable<Text> values, Context context) 
throws IOException, InterruptedException{
//id
if(values==null) return;
 
String addrName= addMap.get(values.iterator().next().toString());
output_value.set(addrName);

context.write(key,output_value);
}
 
}


public static void main(String[] args) throws Exception{

Path outputpath=new Path(OUTPUT_PATH);
Path cacheFile=new Path("hdfs://master:9000/file/a");
Configuration conf=new Configuration();
 
 
FileSystem  fs=outputpath.getFileSystem(conf);
if(fs.exists(outputpath)){
fs.delete(outputpath, true);
}
 
Job  job=Job.getInstance(conf);
 
FileInputFormat.setInputPaths(job, INPUT_PATH);
FileOutputFormat.setOutputPath(job, outputpath);
 
URI  uri=cacheFile.toUri();
 
job.setCacheFiles(new URI[]{uri});
 
job.setMapperClass(MyMapper.class);   //map
job.setReducerClass(MyReduce.class);   //reduce
 
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(Text.class);
 
job.waitForCompletion(true);



}


}

猜你喜欢

转载自blog.csdn.net/oceansidexue/article/details/79212881
今日推荐