mapreduce包错误

qoefvg9y  于 2021-05-30  发布在  Hadoop
关注(0)|答案(1)|浏览(378)

//这3个项目需要什么软件包???如何将这3个程序组合成一个程序??如何在eclipse中使用这个3程序来做mapreduce??
请帮助我成功运行这个程序
操作系统:linux
面临的例外情况:
类型maxpyear.maxpubyearreducer的方法tryparseint(string)未定义
2.类型作业中的方法setinputformatclass(class)不适用于参数(class)
Map程序代码:

public static class MaxPubYearMapper extends Mapper<LongWritable , Text, IntWritable,Text>
    {
        public void map(LongWritable key, Text value , Context context)

                throws IOException, InterruptedException 
                {
            String delim = "\t";
            Text valtosend = new Text(); 
            String tokens[] = value.toString().split(delim);
            if (tokens.length == 2)
            {
                valtosend.set(tokens[0] + ";"+ tokens[1]);
                context.write(new IntWritable(1), valtosend);
            }

                }       
    }

减速机代码:

public static class MaxPubYearReducer extends Reducer<IntWritable ,Text, Text, IntWritable>
    {

        public void reduce(IntWritable key, Iterable<Text> values , Context context) throws IOException, InterruptedException
        {
            int maxiValue = Integer.MIN_VALUE;
            String maxiYear = "";
            for(Text value:values)          {
                String token[] = value.toString().split(";");
                if(token.length == 2 && TryParseInt(token[1]).intValue()> maxiValue)
                {
                    maxiValue = TryParseInt(token[1]);
                    maxiYear = token[0];
                }
            }
            context.write(new Text(maxiYear), new IntWritable(maxiValue));
        }
    }

驱动程序代码:

public static void main(String[] args) throws Exception  {
        Configuration conf = new Configuration(); 
        Job job = new Job(conf , "Frequency`enter code here`");
        job.setJarByClass(MaxPubYear.class);
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(IntWritable.class);

        job.setMapperClass(FrequencyMapper.class);
        job.setCombinerClass(FrequencyReducer.class);
        job.setReducerClass(FrequencyReducer.class);

        job.setOutputFormatClass(TextOutputFormat.class);
        job.setInputFormatClass(TextInputFormat.class);

        FileInputFormat.addInputPath(job,new Path(args[0]));
        FileOutputFormat.setOutputPath(job,new Path(args[1]+ "_temp"));
        int exitCode = job.waitForCompletion(true)?0:1; 

        if (exitCode == 0 )
        {
            Job SecondJob = new Job(conf, "Maximum Publication year");
            SecondJob.setJarByClass(MaxPubYear.class);

            SecondJob.setOutputKeyClass(Text.class);
            SecondJob.setOutputValueClass(IntWritable.class);

            SecondJob.setMapOutputKeyClass(IntWritable.class);
            SecondJob.setMapOutputValueClass(Text.class);

            SecondJob.setMapperClass(MaxPubYearMapper.class);               
            SecondJob.setReducerClass(MaxPubYearReducer.class);

            FileInputFormat.addInputPath(SecondJob,new Path(args[1]+ "_temp"));
            FileOutputFormat.setOutputPath(SecondJob,new Path(args[1]));
            System.exit(SecondJob.waitForCompletion(true)?0:1);                 

        }
    }
fafcakar

fafcakar1#

把它们写在一节课上
所需的软件包包括:

package org.myorg;

import java.io.IOException;
import java.util.*;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.conf.*;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapred.*;
import org.apache.hadoop.util.*;
import java.io.DataInput;
import java.io.DataOutput;

这里可能有一些额外的东西,因为我从代码中复制了它们。

package org.myorg;

import java.util.*;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.conf.*;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapred.*;
import org.apache.hadoop.util.*;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;

public class <your classname as well as filename> { 
    public static class MaxPubYearMapper extends Mapper<LongWritable , Text, IntWritable,Text>
    {
        public void map(LongWritable key, Text value , Context context)

                throws IOException, InterruptedException 
                {
            String delim = "\t";
            Text valtosend = new Text(); 
            String tokens[] = value.toString().split(delim);
            if (tokens.length == 2)
            {
                valtosend.set(tokens[0] + ";"+ tokens[1]);
                context.write(new IntWritable(1), valtosend);
            }

                }       
    }

    public static class MaxPubYearReducer extends Reducer<IntWritable ,Text, Text, IntWritable>
    {

        public void reduce(IntWritable key, Iterable<Text> values , Context context) throws IOException, InterruptedException
        {
            int maxiValue = Integer.MIN_VALUE;
            String maxiYear = "";
            for(Text value:values)          {
                String token[] = value.toString().split(";");
                if(token.length == 2 && TryParseInt(token[1]).intValue()> maxiValue)
                {
                    maxiValue = TryParseInt(token[1]);
                    maxiYear = token[0];
                }
            }
            context.write(new Text(maxiYear), new IntWritable(maxiValue));
        }
    }
    public static void main(String[] args) throws Exception  {
        Configuration conf = new Configuration(); 
        Job job = new Job(conf , "Frequency`enter code here`");
        job.setJarByClass(MaxPubYear.class);
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(IntWritable.class);

        job.setMapperClass(FrequencyMapper.class);
        job.setCombinerClass(FrequencyReducer.class);
        job.setReducerClass(FrequencyReducer.class);

        job.setOutputFormatClass(TextOutputFormat.class);
        job.setInputFormatClass(TextInputFormat.class);

        FileInputFormat.addInputPath(job,new Path(args[0]));
        FileOutputFormat.setOutputPath(job,new Path(args[1]+ "_temp"));
        int exitCode = job.waitForCompletion(true)?0:1; 

        if (exitCode == 0 )
        {
            Job SecondJob = new Job(conf, "Maximum Publication year");
            SecondJob.setJarByClass(MaxPubYear.class);

            SecondJob.setOutputKeyClass(Text.class);
            SecondJob.setOutputValueClass(IntWritable.class);

            SecondJob.setMapOutputKeyClass(IntWritable.class);
            SecondJob.setMapOutputValueClass(Text.class);

            SecondJob.setMapperClass(MaxPubYearMapper.class);               
            SecondJob.setReducerClass(MaxPubYearReducer.class);

            FileInputFormat.addInputPath(SecondJob,new Path(args[1]+ "_temp"));
            FileOutputFormat.setOutputPath(SecondJob,new Path(args[1]));
            System.exit(SecondJob.waitForCompletion(true)?0:1);                 

        }
    }
}

相关问题