MapReduce实现TopK算法 原理及代码

1、map阶段

通过map方法将数据构造成数据小于K的TreeMap,在每次map后判断TreeMap的大小和K的大小,当TreeMap的数据量大于K时,取出最小的数。在map结束后会执行cleanup方法,该方法将map中的前K个数据传入reduce任务中。

2、reduce阶段

在 reduce方法中,依次将map方法中传入K个数据放入 TreeMap中,从而将K个数
据利用红黑树的 firstKey方法按从大到小者利用红黑树的 lastKey方法按从小到大的顺序
排列。从而求出前K个数。

3、代码部分: 从1000w数据中找到最大的100个数。

public class TopKAapp{
    
    
	private static final String INPUT PATH ="hdfs:/xxx/topk_input";
	private static final String OUT PATH="hdfs://xxx/topk out"public static void main (String[] args) throws Exception{
    
    
		Configuration conf new ConfigurationO;
		final FileSystem fileSystem FileSystem.get(new URI(INPUT_PATH), conf);
		final Path outPath new Path (OUT PATH);
		if (fileSystem.exists (outPath)){
    
    
			fileSystem.delete(outPath, true);
		}

		final Job job= new Job(conf, TopKAapp.class.getSimpleNameO);
		FilelnputFormat.setInputPaths(job, INPUT PATH);
		job.setMapperClass(MyMapper.class);
		job.setPartitionerClass(HashPartitioner.class);
		job.setNumReduceTasks(1);
		job.setReducerClass(MyReducer.class);
		job.setOutputKeyClass(NullWritable.class);
		job. setOutput ValueClass(Long Writable.class);
		FileOutputFormat.setOutputPath(job, new Path(OUT_PATH));
		job.setOutputFormatClass(TextOutputFormat.class);
		job.waitForCompletion(true);
	}

	static class MyMapper extends MapperLong Writable, Text, Null Writable, Long Writable{
    
    
		public static final int K100;
		private TreeMapLong, Long> tree= new TreeMapLong, LongO;

		public void map(Long Writable key, Text text, Context context) throws IOException, InterruptedException{
    
    
			long temp Long.parseLong(text.toStringO);
			tree.put(temp, temp);
			if (tree.size()K)
				tree.remove(tree.firstKeyO);
		}Override
		protected void cleanup(Context context) throws IOException, InterruptedException
			for (Long text tree. values)){
    
    
				context. write(Null Writable.get), new Long Writable(text));
			}
		}
	}

	static class MyReducer extends Reducer<NullWritable, Long Writable, NullWritable,
Long Writable>{
    
    
		public static final int K=100;
		private TreeMap<Long, Long> tree new TreeMap<Long, Long>);

		@Override
		protected void cleanup(Context context) throws IOException, InterruptedException{
    
    
			for (Long val tree.descendingKeySet()){
    
    
				context.write(Null Writable.get(), new Long Writable(val));
			}
		}

		@Override
		protected void reduce(Null Writable key, Iterable<Long Writable> values, Context context) throws IOException, InterruptedException{
    
    
			for (Long Writable value values){
    
    
				tree.put(value.get), value.getO);
				if(tree.size()>K)
					tree.remove(tree.firstKey));
			}
		}
	}
}

猜你喜欢

转载自blog.csdn.net/wilde123/article/details/118878923
今日推荐