Program to illustrate FOF Map Reduce:
import java.io.IOException;
import java.util.*;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
public class FriendCommon {
public static class FriendMapper extends Mapper<Object, Text, Text, Text> {
private Text pair = new Text();
private Text user = new Text();
public void map(Object key, Text value, Context context) throws IOException, InterruptedException {
String[] line = value.toString().split(" -> ");
if (line.length == 2) {
String[] friends = line[1].split(" ");
for (int i = 0; i < friends.length; i++) {
for (int j = i + 1; j < friends.length; j++) {
String friendPair = friends[i].trim() + "," + friends[j].trim();
pair.set(friendPair);
user.set(line[0].trim());
context.write(pair, user);
}
}
}
}
}
public static class FriendReducer extends Reducer<Text, Text, Text, Text> {
private Text commonFriends = new Text();
public void reduce(Text key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
Set<String> userList = new HashSet<>();
for (Text value : values) {
userList.add(value.toString());
}
if (userList.size() > 1) {
List<String> sortedUsers = new ArrayList<>(userList);
Collections.sort(sortedUsers);
commonFriends.set(String.join(" ", sortedUsers));
context.write(key, commonFriends);
}
}
}
public static void main(String[] args) throws Exception {
Configuration conf = new Configuration();
Job job = Job.getInstance(conf, "FriendCommon");
job.setJarByClass(FriendCommon.class);
job.setMapperClass(FriendMapper.class);
job.setReducerClass(FriendReducer.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(Text.class);
FileInputFormat.addInputPath(job, new Path(args[0]));
FileOutputFormat.setOutputPath(job, new Path(args[1]));
System.exit(job.waitForCompletion(true) ? 0 : 1);
}
}
import java.util.*;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
public class FriendCommon {
public static class FriendMapper extends Mapper<Object, Text, Text, Text> {
private Text pair = new Text();
private Text user = new Text();
public void map(Object key, Text value, Context context) throws IOException, InterruptedException {
String[] line = value.toString().split(" -> ");
if (line.length == 2) {
String[] friends = line[1].split(" ");
for (int i = 0; i < friends.length; i++) {
for (int j = i + 1; j < friends.length; j++) {
String friendPair = friends[i].trim() + "," + friends[j].trim();
pair.set(friendPair);
user.set(line[0].trim());
context.write(pair, user);
}
}
}
}
}
public static class FriendReducer extends Reducer<Text, Text, Text, Text> {
private Text commonFriends = new Text();
public void reduce(Text key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
Set<String> userList = new HashSet<>();
for (Text value : values) {
userList.add(value.toString());
}
if (userList.size() > 1) {
List<String> sortedUsers = new ArrayList<>(userList);
Collections.sort(sortedUsers);
commonFriends.set(String.join(" ", sortedUsers));
context.write(key, commonFriends);
}
}
}
public static void main(String[] args) throws Exception {
Configuration conf = new Configuration();
Job job = Job.getInstance(conf, "FriendCommon");
job.setJarByClass(FriendCommon.class);
job.setMapperClass(FriendMapper.class);
job.setReducerClass(FriendReducer.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(Text.class);
FileInputFormat.addInputPath(job, new Path(args[0]));
FileOutputFormat.setOutputPath(job, new Path(args[1]));
System.exit(job.waitForCompletion(true) ? 0 : 1);
}
}
Step to run Map Reduce program :
Input File:
A -> B C D
B -> A C D E
C -> A B D E
D -> A B C E
E -> B C D
hduser@ubuntu:~/fof$ ls
FriendCommon.java in.txt
hduser@ubuntu:~/fof$ export CLASSPATH=`hadoop classpath`
hduser@ubuntu:~/fof$ echo $CLASSPATH
hduser@ubuntu:~/fof$ ls
'FriendCommon$FriendMapper.class' 'FriendCommon$FriendReducer.class' FriendCommon.class FriendCommon.java in.txt
hduser@ubuntu:~/fof$ jar -cvf fmr.jar -C /home/hduser/fof .
hduser@ubuntu:~/fof$ ls
fmr.jar 'FriendCommon$FriendMapper.class' 'FriendCommon$FriendReducer.class' FriendCommon.class FriendCommon.java in.txt
hduser@ubuntu:~/fof$ hadoop fs -mkdir /fofrk
hduser@ubuntu:~/fof$ hadoop fs -put /home/hduser/fof/in.txt /fofrk
hduser@ubuntu:~/fof$ hadoop fs -lsr /fofrk
lsr: DEPRECATED: Please use 'ls -R' instead.
-rw-r--r-- 1 hduser supergroup 62 2024-03-21 11:36 /fofrk/in.txt
hduser@ubuntu:~/fof$ hadoop fs -cat /fofrk/in.txt
A -> B C D
B -> A C D E
C -> A B D E
D -> A B C E
E -> B C D
hduser@ubuntu:~/fof$ hadoop jar fmr.jar FriendCommon /fofrk/in.txt /fofrk/out
hduser@ubuntu:~/fof$ hadoop fs -lsr /fofrk
lsr: DEPRECATED: Please use 'ls -R' instead.
-rw-r--r-- 1 hduser supergroup 62 2024-03-21 11:36 /fofrk/in.txt
drwxr-xr-x - hduser supergroup 0 2024-03-21 11:40 /fofrk/out
-rw-r--r-- 1 hduser supergroup 0 2024-03-21 11:40 /fofrk/out/_SUCCESS
-rw-r--r-- 1 hduser supergroup 88 2024-03-21 11:40 /fofrk/out/part-r-00000
hduser@ubuntu:~/fof$ hadoop fs -cat /fofrk/in.txt
A -> B C D
B -> A C D E
C -> A B D E
D -> A B C E
E -> B C D
hduser@ubuntu:~/fof$ hadoop fs -cat /fofrk/out/part-r-00000
A,B C D
A,C B D
A,D B C
A,E B C D
B,C A D E
B,D A C E
B,E C D
C,D A B E
C,E B D
D,E B C
Output: