使用mapreduce求解共同好友

1.题目要求:

A:B,C,D,F,E,O
B:A,C,E,K
C:F,A,D,I
D:A,E,F,L
E:B,C,D,M,L
F:A,B,C,D,E,O,M
G:A,C,D,E,F
H:A,C,D,E,O
I:A,O
J:B,O
K:A,C,D
L:D,E,F
M:E,F,G
O:A,H,I,J

求出哪些人两两之间有共同好友,及他俩的共同好友都是谁
比如:
A-B :  C E

2.解题思路:

分成两步来求解,第一步:求出形如:<好友,人>,即那些人把某人当做好友,那么这个好友列表两辆组合即为所需解答中的一条

第二步:把这些好友列表中两两组合的统计出来即可

3.代码如下:

第一步:

package cn.lyx.bigdata.mr.commonfriends;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import java.io.IOException;

public class CommonFriendsStepOne {

	static class CommonFriendsStepOneMapper extends Mapper<LongWritable, Text, Text, Text> {

		@Override
		protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
			// A:B,C,D,F,E,O
			String line = value.toString();
			String[] person_friends = line.split(":");
			String person = person_friends[0];
			String friends = person_friends[1];

			for (String friend : friends.split(",")) {
				// 输出<好友,人>
				context.write(new Text(friend), new Text(person));
			}
		}
	}

	static class CommonFriendsStepOneReducer extends Reducer<Text, Text, Text, Text> {

		@Override
		protected void reduce(Text friend, Iterable<Text> persons, Context context) throws IOException, InterruptedException {
			StringBuffer sb = new StringBuffer();
			for (Text person : persons) {
				sb.append(person).append(",");
			}
			//A	I,K,C,B,G,F,H,O,D,
			context.write(friend, new Text(sb.toString()));
		}

	}

	public static void main(String[] args) throws Exception {

		Configuration conf = new Configuration();

		Job job = Job.getInstance(conf);
		job.setJarByClass(CommonFriendsStepOne.class);

		job.setOutputKeyClass(Text.class);
		job.setOutputValueClass(Text.class);
		
		job.setMapperClass(CommonFriendsStepOneMapper.class);
		job.setReducerClass(CommonFriendsStepOneReducer.class);

		FileInputFormat.setInputPaths(job, new Path("F:/cfin/cf.txt"));
		FileOutputFormat.setOutputPath(job, new Path("F:/cfout/step1"));

		job.waitForCompletion(true);
	}
}

第二步:

package cn.lyx.bigdata.mr.commonfriends;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import java.io.IOException;
import java.util.Arrays;

public class CommonFriendsStepTwo {

	static class CommonFriendsStepTwoMapper extends Mapper<LongWritable, Text, Text, Text> {

		// 拿到的数据是上一个步骤的输出结果
		// A I,K,C,B,G,F,H,O,D,
		// 友 人,人,人
		@Override
		protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
			String line = value.toString();
			String[] friend_persons = line.split("\t");

			String friend = friend_persons[0];
			String[] persons = friend_persons[1].split(",");

			Arrays.sort(persons);

			for (int i = 0; i < persons.length - 1; i++) {
				for (int j = i + 1; j < persons.length; j++) {
					// 发出 <人-人,好友> ,这样,相同的“人-人”对的所有好友就会到同1个reduce中去
					context.write(new Text(persons[i] + "-" + persons[j]), new Text(friend));
				}
			}
		}
	}

	static class CommonFriendsStepTwoReducer extends Reducer<Text, Text, Text, Text> {

		@Override
		protected void reduce(Text person_person, Iterable<Text> friends, Context context) throws IOException, InterruptedException {

			StringBuffer sb = new StringBuffer();
			for (Text friend : friends) {
				sb.append(friend).append(" ");
			}
			context.write(person_person, new Text(sb.toString()));
		}

	}

	public static void main(String[] args) throws Exception {

		Configuration conf = new Configuration();

		Job job = Job.getInstance(conf);
		job.setJarByClass(CommonFriendsStepTwo.class);

		job.setOutputKeyClass(Text.class);
		job.setOutputValueClass(Text.class);

		job.setMapperClass(CommonFriendsStepTwoMapper.class);
		job.setReducerClass(CommonFriendsStepTwoReducer.class);

		FileInputFormat.setInputPaths(job, new Path("F:/cfout/step1/part-r-00000"));
		FileOutputFormat.setOutputPath(job, new Path("F:/cfout/step2"));

		job.waitForCompletion(true);
	}
}

输入文本:

A:B,C,D,F,E,O
B:A,C,E,K
C:F,A,D,I
D:A,E,F,L
E:B,C,D,M,L
F:A,B,C,D,E,O,M
G:A,C,D,E,F
H:A,C,D,E,O
I:A,O
J:B,O
K:A,C,D
L:D,E,F
M:E,F,G
O:A,H,I,J

第一次输出结果:

A	I,K,C,B,G,F,H,O,D,
B	A,F,J,E,
C	A,E,B,H,F,G,K,
D	G,C,K,A,L,F,E,H,
E	G,M,L,H,A,F,B,D,
F	L,M,D,C,G,A,
G	M,
H	O,
I	O,C,
J	O,
K	B,
L	D,E,
M	E,F,
O	A,H,I,J,F,

第二步输出结果(最终结果):

A-B	E C 
A-C	D F 
A-D	E F 
A-E	D B C 
A-F	O B C D E 
A-G	F E C D 
A-H	E C D O 
A-I	O 
A-J	O B 
A-K	D C 
A-L	F E D 
A-M	E F 
B-C	A 
B-D	A E 
B-E	C 
B-F	E A C 
B-G	C E A 
B-H	A E C 
B-I	A 
B-K	C A 
B-L	E 
B-M	E 
B-O	A 
C-D	A F 
C-E	D 
C-F	D A 
C-G	D F A 
C-H	D A 
C-I	A 
C-K	A D 
C-L	D F 
C-M	F 
C-O	I A 
D-E	L 
D-F	A E 
D-G	E A F 
D-H	A E 
D-I	A 
D-K	A 
D-L	E F 
D-M	F E 
D-O	A 
E-F	D M C B 
E-G	C D 
E-H	C D 
E-J	B 
E-K	C D 
E-L	D 
F-G	D C A E 
F-H	A D O E C 
F-I	O A 
F-J	B O 
F-K	D C A 
F-L	E D 
F-M	E 
F-O	A 
G-H	D C E A 
G-I	A 
G-K	D A C 
G-L	D F E 
G-M	E F 
G-O	A 
H-I	O A 
H-J	O 
H-K	A C D 
H-L	D E 
H-M	E 
H-O	A 
I-J	O 
I-K	A 
I-O	A 
K-L	D 
K-O	A 
L-M	E F 

4.另外一种思路:分别遍历相互之间的好友,然后对比有无共同好友,并记录,这样时间复杂度高一些,但是可以一步求出结果,代码如下:

package cn.lyx.bigdata.mr.commonfriend;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Set;

/**
 * Created by lyx on 2018/5/18.
 */
public class Commonfriend2 {

    static class CommonfriendMapper extends Mapper<LongWritable, Text, Text, HashMap<String, ArrayList<String>>> {
        @Override
        protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {

            HashMap<String, ArrayList<String>> map = new HashMap<String, ArrayList<String>>();
            ArrayList<String> list = new ArrayList<String>();

            //A:B,C,D,F,E,O
            String line = value.toString();
            String[] ownerFriend = line.split(":");

            String owner = ownerFriend[0];  //A
            String friendsStr = ownerFriend[1]; //B,C,D,F,E,O

            String[] friendsStrArr = friendsStr.split(",");
            for (String str : friendsStrArr) {
                list.add(str);
            }
            map.put(owner, list);

            context.write(new Text(owner), map);

        }
    }

    static class CommonfriendReducer extends Reducer<Text, HashMap<String, ArrayList<String>>, Text, NullWritable> {
        //<owner,friends>
        //HashMap<String, String> map = new HashMap<String, String>();
        @Override
        protected void reduce(Text key, Iterable<HashMap<String, ArrayList<String>>> values, Context context) throws IOException, InterruptedException {

            List<String> resList = new ArrayList<String>();

            for (HashMap<String, ArrayList<String>> map : values) {

                Set<String> owners = map.keySet();

                String ownersStrTemp = owners.toString();   //[ A,B,C ]
                String ownersStr = ownersStrTemp.substring(1, ownersStrTemp.length() - 1); //A,B,C
                String[] ownerStrArr = ownersStr.split(",");

                for (int i = 0; i < ownersStr.length(); i++) {
                    String aOwner = ownerStrArr[i].trim();

                    for (int j = i + 1; j < ownersStr.length(); j++) {
                        String bOwner = ownerStrArr[j].trim();
                        String matchAB = "";

                        ArrayList<String> aFriendList = map.get(aOwner);

                        for (String str : aFriendList) {
                            ArrayList<String> bFriendList = map.get(bOwner);
                            if (bFriendList.contains(str)) {
                                matchAB += "," + str;
                            }
                        }

                        if (matchAB.length() > 1) {
                            resList.add(aOwner + "-" + bOwner + ":" + matchAB.substring(1));
                        }
                    }
                }
            }
            for (String str : resList) {
                context.write(new Text(str), NullWritable.get());
            }
        }

    }

    public static void main(String[] args) throws Exception {
        Configuration conf = new Configuration();
        /*conf.set("mapreduce.framework.name", "yarn");
        conf.set("yarn.resoucemanager.hostname", "mini1");*/
        Job job = Job.getInstance(conf);

		/*job.setJar("/home/hadoop/wc.jar");*/
        //指定本程序的jar包所在的本地路径
        job.setJarByClass(Commonfriend2.class);

        //指定本业务job要使用的mapper/Reducer业务类
        job.setMapperClass(CommonfriendMapper.class);
        job.setReducerClass(CommonfriendReducer.class);

        //指定mapper输出数据的kv类型
        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(Text.class);

        //指定最终输出的数据的kv类型
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(Text.class);

        //指定job的输入原始文件所在目录
        FileInputFormat.setInputPaths(job, new Path(args[0]));
        //指定job的输出结果所在目录
        FileOutputFormat.setOutputPath(job, new Path(args[1]));

        //将job中配置的相关参数,以及job所用的java类所在的jar包,提交给yarn去运行
        /*job.submit();*/
        boolean res = job.waitForCompletion(true);
        System.exit(res ? 0 : 1);
    }
}


猜你喜欢

转载自blog.csdn.net/l1212xiao/article/details/80400783
今日推荐