为什么我不能运行hadoop jar文件,每次运行它都会显示class not found exception itemclick.itemclick.itemclick?

bakd9h0s  于 2021-05-31  发布在  Hadoop
关注(0)|答案(1)|浏览(342)

我使用下面的代码并使用以下命令运行hadoop jar文件hadoop jar'/home/manthanjain/hadoop click analysis/itemclick.jar'itemclick.itemclick.itemclick'/hadoop click analysis/input'/hadoop click analysis/output'这里的输入和输出文件是hadoop中加载的hdfs文件

  1. package itemclick.itemclick;
  2. import java.io.IOException;
  3. import java.util.Collections;
  4. import java.util.Comparator;
  5. import java.util.HashMap;
  6. import java.util.Iterator;
  7. import java.util.LinkedHashMap;
  8. import java.util.LinkedList;
  9. import java.util.List;
  10. import java.util.Map;
  11. import org.apache.hadoop.conf.Configuration;
  12. import org.apache.hadoop.fs.Path;
  13. import org.apache.hadoop.io.IntWritable;
  14. import org.apache.hadoop.io.Text;
  15. import org.apache.hadoop.mapreduce.Job;
  16. import org.apache.hadoop.mapreduce.Mapper;
  17. import org.apache.hadoop.mapreduce.Reducer;
  18. import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
  19. import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
  20. import org.apache.hadoop.util.GenericOptionsParser;
  21. public class ItemClick {
  22. public static class ItemClickMapper extends Mapper<Object, Text, Text, IntWritable>{
  23. public void map(Object key, Text value, Context context) throws IOException, InterruptedException {
  24. Text itemID = new Text();
  25. IntWritable one = new IntWritable(1);
  26. String[] tokens = value.toString().split(",");
  27. String[] timestamp = tokens[1].split("-");
  28. if(timestamp[1].equals("04")) {
  29. itemID.set(tokens[2]);
  30. context.write(itemID, one);
  31. }
  32. }
  33. }
  34. public static class SumReducer extends Reducer<Text, IntWritable, Text, IntWritable>{
  35. public void reduce(Text itemID, Iterable<IntWritable> ones, Context context) throws IOException, InterruptedException {
  36. int count = 0;
  37. Iterator<IntWritable> iterator = ones.iterator();
  38. while(iterator.hasNext()) {
  39. count++;
  40. iterator.next();
  41. }
  42. IntWritable clicks = new IntWritable(count);
  43. context.write(itemID, clicks);
  44. }
  45. }
  46. public static class ItemClickMapper2 extends Mapper<Object, Text, IntWritable, Text>{
  47. public void map(Object key, Text value, Context context) throws IOException, InterruptedException {
  48. String[] tokens = value.toString().split("\t");
  49. Text itemId = new Text(tokens[0]);
  50. int clicks = Integer.parseInt(tokens[1]);
  51. IntWritable clicksFinal = new IntWritable(clicks);
  52. context.write(clicksFinal, itemId);
  53. }
  54. }
  55. public static class SumReducer2 extends Reducer<IntWritable, Text, Text, IntWritable>{
  56. private Map<String, Integer> countMap = new HashMap<String, Integer>();
  57. @Override
  58. public void reduce(IntWritable key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
  59. for (Text value: values) {
  60. countMap.put(value.toString(), key.get());
  61. }
  62. }
  63. public void cleanup(Context context) throws IOException, InterruptedException {
  64. Map<String, Integer> sortedMap = sort(countMap);
  65. int counter = 0;
  66. for (String key: sortedMap.keySet()) {
  67. if (counter ++ == 10) {
  68. break;
  69. }
  70. context.write(new Text(key),new IntWritable(sortedMap.get(key)));
  71. }
  72. }
  73. public static Map<String, Integer> sort(Map<String, Integer> unsortMap) {
  74. List<Map.Entry<String, Integer>> list =
  75. new LinkedList<Map.Entry<String, Integer>>(unsortMap.entrySet());
  76. // for (Map.Entry<String, Integer> entry : list) {
  77. // System.out.println("-----IN COMPARATOR METHOD------"+entry.getKey()+" "+entry.getValue());
  78. // }
  79. Collections.sort(list, new Comparator<Map.Entry<String, Integer>>() {
  80. public int compare(Map.Entry<String, Integer> o1,
  81. Map.Entry<String, Integer> o2) {
  82. if(!o2.getValue().equals(o1.getValue()))
  83. return (o2.getValue()).compareTo(o1.getValue());
  84. else
  85. return (o1.getKey().compareTo(o2.getKey()));
  86. }
  87. });
  88. Map<String, Integer> sortedMap = new LinkedHashMap<String, Integer>();
  89. for (Map.Entry<String, Integer> entry : list) {
  90. sortedMap.put(entry.getKey(), entry.getValue());
  91. }
  92. return sortedMap;
  93. }
  94. //context.write(itemId, key);
  95. }
  96. public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
  97. Configuration conf1 = new Configuration();
  98. String[] otherArgs = new GenericOptionsParser(conf1, args).getRemainingArgs();
  99. if (otherArgs.length !=2) {
  100. System.err.println("Usage: input_file output_file");
  101. System.exit(2);
  102. }
  103. Job job1 = Job.getInstance(conf1, "Item Click");
  104. job1.setJarByClass(ItemClick.class);
  105. job1.setMapperClass(ItemClickMapper.class);
  106. job1.setReducerClass(SumReducer.class);
  107. job1.setNumReduceTasks(10);
  108. job1.setOutputKeyClass(Text.class);
  109. job1.setMapOutputValueClass(IntWritable.class);
  110. FileInputFormat.addInputPath(job1, new Path(otherArgs[0]));
  111. FileOutputFormat.setOutputPath(job1, new Path(otherArgs[1]+"/temp"));
  112. if (!job1.waitForCompletion(true)) {
  113. System.exit(1);
  114. }
  115. Job job2 = Job.getInstance(conf1);
  116. job2.setJarByClass(ItemClick.class);
  117. job2.setJobName("sort");
  118. job2.setNumReduceTasks(1);
  119. FileInputFormat.setInputPaths(job2, new Path(otherArgs[1] + "/temp"));
  120. FileOutputFormat.setOutputPath(job2, new Path(otherArgs[1] + "/final"));
  121. job2.setMapperClass(ItemClickMapper2.class);
  122. job2.setReducerClass(SumReducer2.class);
  123. job2.setMapOutputKeyClass(IntWritable.class);
  124. job2.setMapOutputValueClass(Text.class);
  125. job2.setOutputKeyClass(Text.class);
  126. job2.setOutputValueClass(IntWritable.class);
  127. if (!job2.waitForCompletion(true)) {
  128. System.exit(1);
  129. }
  130. }
  131. }```
ttygqcqt

ttygqcqt1#

我希望这对你有帮助

  1. java -cp Itemclick.jar:./conf/:./lib:$HADOOP_CLASSPATH itemclick.itemclick.ItemClick /Hadoop-click-analysis/Input /Hadoop-click-analysis/Output

相关问题