开发工具:eclipse

目标:对下面文档phone_numbers进行倒排索引:

13599999999 10086
13899999999 120
13944444444 13800138000
13722222222 13800138000
18800000000 120
13722222222 10086
18944444444 10086

代码:

 import java.io.IOException;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat; public class Test_1 extends Configured implements Tool
{
enum Counter
{
LINESKIP, // error lines
} public static class Map extends Mapper<LongWritable, Text, Text, Text>
{
public void map(LongWritable key, Text value, Context context)throws IOException, InterruptedException
{
String line = value.toString(); // read original data try
{
// process data
String[] lineSplit = line.split(" ");
String anum = lineSplit[0];
String bnum = lineSplit[1]; context.write(new Text(bnum), new Text(anum)); // map output
}
catch(java.lang.ArrayIndexOutOfBoundsException e)
{
context.getCounter(Counter.LINESKIP).increment(1);
return;
} }
}
public static class Reduce extends Reducer<Text, Text, Text, Text>
{
public void reduce(Text key, Iterable<Text>values, Context context)throws IOException, InterruptedException
{
String valueString;
String out = ""; for (Text value : values)
{
valueString = value.toString();
out += valueString + "|";
} context.write(key, new Text(out)); // reduce output
}
}
public int run(String[] args)throws Exception
{
Configuration conf = getConf(); Job job = new Job(conf, "Test_1"); // task name
job.setJarByClass(Test_1.class); // specified task FileInputFormat.addInputPath(job, new Path(args[0])); // input path
FileOutputFormat.setOutputPath(job, new Path(args[1])); // output path job.setMapperClass(Map.class);
job.setReducerClass(Reduce.class);
job.setOutputFormatClass(TextOutputFormat.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(Text.class); job.waitForCompletion(true); return job.isSuccessful() ? 0 : 1;
} public static void main(String[] args)throws Exception
{
int res = ToolRunner.run(new Configuration(), new Test_1(), args);
System.exit(res);
}
}

运行结果:

hadoop学习笔记之倒排索引-LMLPHP

05-11 11:03