eofexception

kupeojn6  于 2021-05-29  发布在  Hadoop
关注(0)|答案(0)|浏览(295)

这是我的自定义可写类,在readfields方法中读取name的过程中发生eofexception。有人能解释这个问题并改正吗

  1. import java.io.DataInput;
  2. import java.io.DataOutput;
  3. import java.io.IOException;
  4. import org.apache.hadoop.io.WritableComparable;
  5. public class CustomWritable implements WritableComparable<CustomWritable>/*implements Comparable<CustomWritable>*/
  6. {
  7. public String name;
  8. public long viewcount;
  9. public CustomWritable(){
  10. }
  11. @Override
  12. public void readFields(DataInput arg0) throws IOException {
  13. name=arg0.readUTF(); //EOFException is occured here
  14. viewcount=arg0.readLong();
  15. }
  16. @Override
  17. public void write(DataOutput arg0) throws IOException {
  18. // TODO Auto-generated method stub
  19. arg0.writeUTF(name);
  20. arg0.writeLong(viewcount);
  21. }
  22. public CustomWritable(String n,long v){
  23. name=n;
  24. viewcount=v;
  25. }
  26. @Override
  27. public int compareTo(CustomWritable o) {
  28. String nae=o.getName();
  29. if(o.getViewcount()<this.getViewcount())
  30. return -1;
  31. else return o.getViewcount()==this.getViewcount()?0:1;
  32. }
  33. public String getName() {
  34. return name;
  35. }
  36. public void setName(String name) {
  37. this.name = name;
  38. }
  39. public long getViewcount() {
  40. return viewcount;
  41. }
  42. public void setViewcount(long viewcount) {
  43. this.viewcount = viewcount;
  44. }
  45. public String toString(){
  46. return name+"="+viewcount;
  47. }
  48. }

这是我的mapper类:

  1. package youtube1;
  2. import org.apache.hadoop.io.*;
  3. import org.apache.hadoop.mapreduce.*;
  4. public class Mapper2 extends Mapper<LongWritable,Text,Text,CustomWritable>{
  5. public void map(LongWritable key, Text value,Context context) throws java.io.IOException,InterruptedException{
  6. String s[]=value.toString().split("\t");
  7. if(s.length>=5){
  8. CustomWritable c = new CustomWritable(s[0],Long.parseLong(s[5]));
  9. context.write(new Text("1"),c);
  10. }
  11. }
  12. }

这是我的分组比较器类:

  1. package youtube1;
  2. import org.apache.hadoop.io.WritableComparator;
  3. public class CustomGrouper extends WritableComparator{
  4. public CustomGrouper(){
  5. super(CustomWritable.class,true);
  6. }
  7. public int compareTo(CustomWritable c1,CustomWritable c2){
  8. return 0;
  9. }
  10. }

这是我的课程:

  1. package youtube1;
  2. import org.apache.hadoop.io.*;
  3. import org.apache.hadoop.mapreduce.*;
  4. public class Reducer2 extends Reducer<Text,CustomWritable,Text,Text>{
  5. public void reduce(Text key,Iterable<CustomWritable> value,Context context) throws java.io.IOException,InterruptedException{
  6. int count = 0;
  7. for(CustomWritable c:value){
  8. if(count++<10)
  9. context.write(new Text(c.toString()),new Text(""));
  10. else break;
  11. }
  12. }
  13. }

例外情况如下:

  1. java.lang.Exception: java.lang.RuntimeException: java.io.EOFException
  2. at org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:462)
  3. at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:529)
  4. Caused by: java.lang.RuntimeException: java.io.EOFException
  5. at org.apache.hadoop.io.WritableComparator.compare(WritableComparator.java:135)
  6. at org.apache.hadoop.mapreduce.task.ReduceContextImpl.nextKeyValue(ReduceContextImpl.java:157)
  7. at org.apache.hadoop.mapreduce.task.ReduceContextImpl.nextKey(ReduceContextImpl.java:121)
  8. at org.apache.hadoop.mapreduce.lib.reduce.WrappedReducer$Context.nextKey(WrappedReducer.java:302)
  9. at org.apache.hadoop.mapreduce.Reducer.run(Reducer.java:170)
  10. at org.apache.hadoop.mapred.ReduceTask.runNewReducer(ReduceTask.java:627)
  11. at org.apache.hadoop.mapred.ReduceTask.run(ReduceTask.java:389)
  12. at org.apache.hadoop.mapred.LocalJobRunner$Job$ReduceTaskRunnable.run(LocalJobRunner.java:319)
  13. at java.util.concurrent.Executors$RunnableAdapter.call(Unknown Source)
  14. at java.util.concurrent.FutureTask.run(Unknown Source)
  15. at java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
  16. at java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
  17. at java.lang.Thread.run(Unknown Source)
  18. Caused by: java.io.EOFException
  19. at java.io.DataInputStream.readFully(Unknown Source)
  20. at java.io.DataInputStream.readUTF(Unknown Source)
  21. at java.io.DataInputStream.readUTF(Unknown Source)
  22. at youtube1.CustomWritable.readFields(CustomWritable.java:22)
  23. at org.apache.hadoop.io.WritableComparator.compare(WritableComparator.java:129)
  24. ... 12 more

暂无答案!

目前还没有任何答案,快来回答吧!

相关问题