有人可以帮我找出为什么运行MapReduce代码后没有得到平均工资的原因。
问题:计算长期雇员和契约(Contract)雇员的平均工资

输入样例:
1个用户1个永久100
2个user2合约500
3个用户3个永久200
4个用户4个合约300

预期产量:
永久性285
契约(Contract)187

我得到的输出:
永久100
永久200
合约500
契约(Contract)300

运行作业:
$ hadoop jar partition.jar com.hadoop.PartitionExample
输入/partition_example.txt输出

package com.hadoop;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;


public class PartitionExample {
        public static class MapClass extends Mapper<LongWritable, Text,
        Text, IntWritable>{

        Text outKey = new Text(); ;
        IntWritable outValue = new IntWritable();

        public void map(LongWritable key, Text value, Context context)
        throws IOException, InterruptedException{
            String[] colmn = value.toString().split(" ");
            outKey.set(colmn[2]);
            outValue.set(Integer.parseInt(colmn[3]));
            context.write(outKey, outValue);
     }
}
// permanent      [100,300,200,400]
public static class ReduceClass extends Reducer<Text,IntWritable,Text,
IntWritable>{
        IntWritable outValue = new IntWritable();
        public void Reduce(Text key, Iterable<IntWritable> value, Context
        context) throws IOException, InterruptedException{
            int sum = 0; int count = 0; int avg ;
            //outKey.set(key);
            for (IntWritable sal:value){
                sum = sum + sal.get();
                count++;
            }
            avg = sum/count ;
            outValue.set(avg);
            context.write(key, outValue);
     }
 }

public static void main(String[] args) throws IOException,
    ClassNotFoundException, InterruptedException{
    Configuration conf = new Configuration();
    String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
    if(otherArgs.length != 2){
        System.err.println("Number of argument passed is not 2");
        System.exit(1);
    }
    Job job = new Job(conf, "My regular MapReduce job");

    job.setJarByClass(PartitionExample.class);
    job.setMapperClass(MapClass.class);
//  job.setCombinerClass(ReduceClass.class);
    job.setReducerClass(ReduceClass.class);

    job.setMapOutputKeyClass(Text.class);
    job.setMapOutputValueClass(IntWritable.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(IntWritable.class);

    FileInputFormat.addInputPath(job, new Path(otherArgs[0]));
    FileOutputFormat.setOutputPath(job, new Path(otherArgs[1]));

    System.exit(job.waitForCompletion(true) ? 0 : 1);

}
}

最佳答案

我在代码中发现了我的错误。我可以说这是一个很愚蠢的错误:(
它在被减少的函数名称中。我将其从“减少”更改为“减少”。

10-05 17:51