如图直接连线代表有好友关系

现在要根据这个图来给hadoop推荐好友

此算法的实现思路

    1、得到所有的二度关系

          某个用户的好友列表当中的好友 两两组合 = 二度关系

           即不是好友但能通过两个线段就能连同

            例hadoop与hello为二度关系

    2、将上一步得到的二度关系 过滤掉 直接好友关系

          得到了真正的二度关系

    3、根据上一步结果 统计二度关系的值 得到亲密度

            例如hadoop与hello的二度关系为3

            从hadoop开始有三条路走两步就能到hello

    4、根据亲密度 做降序排序 得到推荐列表


代码实现,主要代码:

数据

tom hello hadoop cat

world hadoop hello hive

cat tom hive

mr hive hello

hive cat hadoop world hello mr

hadoop tom hive world

hello tom world hive mr

第一行代表tom分别与hello,hadoop,cat有好友关系

public class FofFormat { /** * 格式化好友关系 * sq:hello-hadoop 处理成为 hadoop-hello * @param f1 好友1 * @param f2 好友2 * @return */ public String toFormat(String f1, String f2) { int c = f1.compareTo(f2); if(c < 0) { return f2 + "-" + f1; } return f1 + "-" + f2; } }

/**
* 第一个mapper得到所有的二度关系
*/ public class FofMapperOne extends Mapper<LongWritable, Text, Text, IntWritable> { @Override protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException { String[] strs = StringUtils.split(value.toString(), ' '); FofFormat fofFormat = new FofFormat(); // tom hello hadoop cat for (int i = 0; i < strs.length; i++) { // 直接好友关系 0 String fof1 = fofFormat.toFormat(strs[0],strs[i]); context.write(new Text(fof1), new IntWritable(0)); for (int j = i+1; j < strs.length; j++) { // 二度关系 1 (包含了直接好友关系 在Reduce当中进行过滤) String fof2 = fofFormat.toFormat(strs[i], strs[j]); context.write(new Text(fof2), new IntWritable(1)); } } } }


/**
  *第一个reduce累加亲密度
*/ public class FofReducerOne extends Reducer<Text, IntWritable, Text, NullWritable> { @Override protected void reduce(Text text, Iterable<IntWritable> iterable, Context context) throws IOException, InterruptedException { // 亲密度 int N = 0; Boolean flag = true; for (IntWritable i : iterable) { if( i.get() == 0 ) { flag = false; break; } N += i.get(); } if(flag) { // 第一个mr输出的结果 eq: hadoop-hello-3 String msg = text.toString() + "-" + N; context.write(new Text(msg), NullWritable.get()); } } }
//第二轮实现 分组 在 排序 输出
public class FofMapperTwo extends Mapper<LongWritable, Text, Fof, IntWritable>{ //hadoop-cat-2 @Override protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException { String[] strs = StringUtils.split(value.toString(), '-'); Fof fof1 = new Fof(); fof1.setF1(strs[0]); fof1.setF2(strs[1]); fof1.setHot(Integer.parseInt(strs[2])); //hadoop-cat-2 context.write(fof1, new IntWritable(fof1.getHot())); Fof fof2 = new Fof(); fof2.setF1(strs[1]); fof2.setF2(strs[0]); fof2.setHot(Integer.parseInt(strs[2])); //cat-hadoop-2 context.write(fof2, new IntWritable(fof2.getHot())); } } public class FofGroup extends WritableComparator { public FofGroup() { super(Fof.class, true); } @Override public int compare(WritableComparable a, WritableComparable b) { Fof fof1 = (Fof)a; Fof fof2 = (Fof)b; int c = fof1.getF1().compareTo(fof2.getF1()); return c; } } public class FofSort extends WritableComparator { public FofSort() { super(Fof.class, true); } @Override public int compare(WritableComparable a, WritableComparable b) { Fof fof1 = (Fof)a; Fof fof2 = (Fof)b; int c = fof1.getF1().compareTo(fof2.getF1()); if( c == 0 ) { // 亲密度 降序 return -Integer.compare(fof1.getHot(), fof2.getHot()); } return c; } } public class FofReducerTwo extends Reducer<Fof, IntWritable, Text, NullWritable> { @Override protected void reduce(Fof fof, Iterable<IntWritable> iterable, Context context) throws IOException, InterruptedException { for (IntWritable i : iterable) { String msg = fof.getF1() + "-" + fof.getF2() + ":" + i.get(); context.write(new Text(msg), NullWritable.get()); } } }


添加新评论