zoukankan      html  css  js  c++  java
  • Hadoop_24_MapReduce实现QQ共同好友

    1.社交粉丝数据分析:

    以下是qq的好友列表数据,冒号前是一个用户,冒号后是该用户的所有好友(数据中的好友关系是单向的)
    A:B,C,D,F,E,O
    B:A,C,E,K
    C:F,A,D,I
    D:A,E,F,L
    E:B,C,D,M,L
    F:A,B,C,D,E,O,M
    G:A,C,D,E,F
    H:A,C,D,E,O
    I:A,O
    J:B,O
    K:A,C,D
    L:D,E,F
    M:E,F,G
    O:A,H,I,J
    求出哪些人两两之间有共同好友,及他俩的共同好友都有谁?

     解题思路:如果想要得到两两之间的共同好友,若A和B的共同好友是C,通过反向思考,可以求出C是哪些人的共同好友,然后将这些共同好友

          两两组合;

    c --> a  b  e  f g  h  k (第一步:找出c的所有共同好友)
    a-b  c
    a-e  c  (第二步:作为key,即可得到a-e的共同好友c和d)
    
    d --> a  c  e  f g  h  k 
    a-c  d
    a-e  d

     第一步代码实现:

    package cn.bigdata.hdfs.fensi;
    import java.io.IOException;
    import org.apache.hadoop.conf.Configuration;
    import org.apache.hadoop.fs.Path;
    import org.apache.hadoop.io.LongWritable;
    import org.apache.hadoop.io.Text;
    import org.apache.hadoop.mapreduce.Job;
    import org.apache.hadoop.mapreduce.Mapper;
    import org.apache.hadoop.mapreduce.Reducer;
    import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
    import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
    public class SharedFriendsStepOne {
        
        static class SharedFriendsStepOneMapper extends Mapper<LongWritable, Text, Text, Text>{
            @Override
            protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
                //A:B,C,D,F,E,O(找出具有共同好友的都是哪些人的)
                String line = value.toString();
                String[] person_friends = line.split(":");
                String person = person_friends[0];
                String feiends = person_friends[1];
                
                for(String friend : feiends.split(",")){
                    // 输出<好友,人>
                    context.write(new Text(friend), new Text(person));
                }
            }
        }
        
        static class SharedFriendsStepOneReducer extends Reducer<Text, Text, Text, Text>{
            @Override
            protected void reduce(Text friend, Iterable<Text> persons, Context context) throws IOException, InterruptedException {
                //persons:具有共同好友的所有人:c --> a b e f g h k 
                StringBuffer sb = new StringBuffer();
                for(Text person : persons){
                    sb.append(person).append(",");
                }
                context.write(friend, new Text(sb.toString()));
            }
        }
        
        public static void main(String[] args) throws Exception {
            Configuration conf = new Configuration();
    
            Job job = Job.getInstance(conf);
            job.setJarByClass(SharedFriendsStepOne.class);
    
            job.setOutputKeyClass(Text.class);
            job.setOutputValueClass(Text.class);
            
            job.setMapperClass(SharedFriendsStepOneMapper.class);
            job.setReducerClass(SharedFriendsStepOneReducer.class);
    
            FileInputFormat.setInputPaths(job, new Path(args[0]));
            FileOutputFormat.setOutputPath(job, new Path(args[1]));
    
            job.waitForCompletion(true);
        }
    }

     运行结果:

    A    I,K,C,B,G,F,H,O,D,
    B    A,F,J,E,
    C    A,E,B,H,F,G,K,
    D    G,C,K,A,L,F,E,H,
    E    G,M,L,H,A,F,B,D,
    F    L,M,D,C,G,A,
    G    M,
    H    O,
    I    O,C,
    J    O,
    K    B,
    L    D,E,
    M    E,F,
    O    A,H,I,J,F,

    第二步代码实现:具有共同好友的人两两作为key输出

    package cn.bigdata.hdfs.fensi;
    import java.io.IOException;
    import java.util.Arrays;
    import org.apache.hadoop.conf.Configuration;
    import org.apache.hadoop.fs.Path;
    import org.apache.hadoop.io.LongWritable;
    import org.apache.hadoop.io.Text;
    import org.apache.hadoop.mapreduce.Job;
    import org.apache.hadoop.mapreduce.Mapper;
    import org.apache.hadoop.mapreduce.Reducer;
    import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
    import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
    
    public class SharedFriendsStepTwo {
    
        static class SharedFriendsStepTwoMapper extends Mapper<LongWritable, Text, Text, Text> {
    
            // 拿到的数据是上一个步骤的输出结果
            // A I,K,C,B,G,F,H,O,D,
            // 友 人,人,人
            @Override
            protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
    
                String line = value.toString();
                String[] friend_persons = line.split("	");
    
                String friend = friend_persons[0];
                String[] persons = friend_persons[1].split(",");
                //排序,使得B-C和C-B不重复
                Arrays.sort(persons);
    
                for (int i = 0; i < persons.length - 1; i++) {
                    for (int j = i + 1; j < persons.length; j++) {
                        // 发出 <人-人,好友> ,这样,相同的“人-人”对的所有好友就会到同1个reduce中去
                        context.write(new Text(persons[i] + "-" + persons[j]), new Text(friend));
                    }
                }
            }
        }
    
        static class SharedFriendsStepTwoReducer extends Reducer<Text, Text, Text, Text> {
    
            @Override
            protected void reduce(Text person_person, Iterable<Text> friends, Context context) throws IOException, InterruptedException {
    
                StringBuffer sb = new StringBuffer();
    
                for (Text friend : friends) {
                    sb.append(friend).append(" ");
    
                }
                context.write(person_person, new Text(sb.toString()));
            }
        }
    
        public static void main(String[] args) throws Exception {
    
            Configuration conf = new Configuration();
            Job job = Job.getInstance(conf);
            job.setJarByClass(SharedFriendsStepTwo.class);
    
            job.setOutputKeyClass(Text.class);
            job.setOutputValueClass(Text.class);
    
            job.setMapperClass(SharedFriendsStepTwoMapper.class);
            job.setReducerClass(SharedFriendsStepTwoReducer.class);
    
            FileInputFormat.setInputPaths(job, new Path("F:/fensiOutput/part-r-00000"));
            FileOutputFormat.setOutputPath(job, new Path("F:/fensiOutput4"));
            job.waitForCompletion(true);
        }
    }

     运行结果:

    A-B    E C 
    A-C    D F 
    A-D    E F 
    A-E    D B C 
    A-F    O B C D E 
    A-G    F E C D 
    A-H    E C D O 
    A-I    O 
    A-J    O B 
    A-K    D C 
    A-L    F E D 
    A-M    E F 
    B-C    A 
    B-D    A E 
    B-E    C 
    B-F    E A C 
    B-G    C E A 
    B-H    A E C 
    B-I    A 
    B-K    C A 
    B-L    E 
    B-M    E 
    B-O    A 
    C-D    A F 
    C-E    D 
    C-F    D A 
    C-G    D F A 
    C-H    D A 
    C-I    A 
    C-K    A D 
    C-L    D F 
    C-M    F 
    C-O    I A 
    D-E    L 
    D-F    A E 
    D-G    E A F 
    D-H    A E 
    D-I    A 
    D-K    A 
    D-L    E F 
    D-M    F E 
    D-O    A 
    E-F    D M C B 
    E-G    C D 
    E-H    C D 
    E-J    B 
    E-K    C D 
    E-L    D 
    F-G    D C A E 
    F-H    A D O E C 
    F-I    O A 
    F-J    B O 
    F-K    D C A 
    F-L    E D 
    F-M    E 
    F-O    A 
    G-H    D C E A 
    G-I    A 
    G-K    D A C 
    G-L    D F E 
    G-M    E F 
    G-O    A 
    H-I    O A 
    H-J    O 
    H-K    A C D 
    H-L    D E 
    H-M    E 
    H-O    A 
    I-J    O 
    I-K    A 
    I-O    A 
    K-L    D 
    K-O    A 
    L-M    E F 
  • 相关阅读:
    Sql inner join
    转:MySQL 的show processlist
    session cookie
    集群和分布式
    设计模式
    Web性能优化——缓存
    关于 java 里面的路径
    1分钟看懂log4j 配置自己想要的日志信息
    spring @bean 的理解
    如何用iptables实现NAT(转)
  • 原文地址:https://www.cnblogs.com/yaboya/p/9253050.html
Copyright © 2011-2022 走看看