作业提交后classnotfoundexception

4dbbbstv  于 2021-06-03  发布在  Hadoop
关注(0)|答案(4)|浏览(498)

我正在尝试使用SpringData-hadoop从本地机器的ide在远程集群上执行mr代码
//hadoop 1.1.2、spring 3.2.4、spring data hadoop 1.0.0
尝试了以下版本:
hadoop 1.2.1、spring 4.0.1、spring data hadoop 2.0.2
applicationcontext.xml:

  1. <?xml version="1.0" encoding="UTF-8"?>
  2. <beans xmlns="http://www.springframework.org/schema/beans"
  3. xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:hdp="http://www.springframework.org/schema/hadoop"
  4. xmlns:context="http://www.springframework.org/schema/context"
  5. xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd
  6. http://www.springframework.org/schema/hadoop http://www.springframework.org/schema/hadoop/spring-hadoop.xsd
  7. http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context-3.2.xsd">
  8. <context:property-placeholder location="resources/hadoop.properties" />
  9. <hdp:configuration file-system-uri="${hd.fs}" job-tracker-uri="${hd.jobtracker.uri}">
  10. </hdp:configuration>
  11. <hdp:job id="wc-job" mapper="com.hadoop.basics.WordCounter.WCMapper"
  12. reducer="com.hadoop.basics.WordCounter.WCReducer" input-path="${wordcount.input.path}"
  13. output-path="${wordcount.output.path}" user="bigdata">
  14. </hdp:job>
  15. <hdp:job-runner id="myjobs-runner" job-ref="wc-job"
  16. run-at-startup="true" />
  17. <hdp:resource-loader id="resourceLoader" uri="${hd.fs}"
  18. user="bigdata" />
  19. </beans>

wordcounter.java语言:

  1. package com.hadoop.basics;
  2. import java.io.IOException;
  3. import java.util.StringTokenizer;
  4. import org.apache.hadoop.conf.Configured;
  5. import org.apache.hadoop.io.IntWritable;
  6. import org.apache.hadoop.io.Text;
  7. import org.apache.hadoop.mapreduce.Mapper;
  8. import org.apache.hadoop.mapreduce.Reducer;
  9. import org.springframework.context.support.AbstractApplicationContext;
  10. import org.springframework.context.support.ClassPathXmlApplicationContext;
  11. public class WordCounter {
  12. private static IntWritable one = new IntWritable(1);
  13. public static class WCMapper extends Mapper<Text, Text, Text, IntWritable> {
  14. @Override
  15. protected void map(
  16. Text key,
  17. Text value,
  18. org.apache.hadoop.mapreduce.Mapper<Text, Text, Text, IntWritable>.Context context)
  19. throws IOException, InterruptedException {
  20. // TODO Auto-generated method stub
  21. StringTokenizer strTokenizer = new StringTokenizer(value.toString());
  22. Text token = new Text();
  23. while (strTokenizer.hasMoreTokens()) {
  24. token.set(strTokenizer.nextToken());
  25. context.write(token, one);
  26. }
  27. }
  28. }
  29. public static class WCReducer extends
  30. Reducer<Text, IntWritable, Text, IntWritable> {
  31. @Override
  32. protected void reduce(
  33. Text key,
  34. Iterable<IntWritable> values,
  35. org.apache.hadoop.mapreduce.Reducer<Text, IntWritable, Text, IntWritable>.Context context)
  36. throws IOException, InterruptedException {
  37. // TODO Auto-generated method stub
  38. int sum = 0;
  39. for (IntWritable value : values) {
  40. sum += value.get();
  41. }
  42. context.write(key, new IntWritable(sum));
  43. }
  44. }
  45. public static void main(String[] args) {
  46. AbstractApplicationContext context = new ClassPathXmlApplicationContext(
  47. "applicationContext.xml", WordCounter.class);
  48. System.out.println("Word Count Application Running");
  49. context.registerShutdownHook();
  50. }
  51. }

输出为:

  1. Aug 23, 2013 11:07:48 AM org.springframework.context.support.AbstractApplicationContext prepareRefresh
  2. INFO: Refreshing org.springframework.context.support.ClassPathXmlApplicationContext@1815338: startup date [Fri Aug 23 11:07:48 IST 2013]; root of context hierarchy
  3. Aug 23, 2013 11:07:48 AM org.springframework.beans.factory.xml.XmlBeanDefinitionReader loadBeanDefinitions
  4. INFO: Loading XML bean definitions from class path resource [com/hadoop/basics/applicationContext.xml]
  5. Aug 23, 2013 11:07:48 AM org.springframework.core.io.support.PropertiesLoaderSupport loadProperties
  6. INFO: Loading properties file from class path resource [resources/hadoop.properties]
  7. Aug 23, 2013 11:07:48 AM org.springframework.beans.factory.support.DefaultListableBeanFactory preInstantiateSingletons
  8. INFO: Pre-instantiating singletons in org.springframework.beans.factory.support.DefaultListableBeanFactory@7c197e: defining beans [org.springframework.context.support.PropertySourcesPlaceholderConfigurer#0,hadoopConfiguration,wc-job,myjobs-runner,resourceLoader]; root of factory hierarchy
  9. Aug 23, 2013 11:07:49 AM org.springframework.data.hadoop.mapreduce.JobExecutor$2 run
  10. INFO: Starting job [wc-job]
  11. Aug 23, 2013 11:07:49 AM org.apache.hadoop.mapred.JobClient copyAndConfigureFiles
  12. WARNING: No job jar file set. User classes may not be found. See JobConf(Class) or JobConf#setJar(String).
  13. Aug 23, 2013 11:07:49 AM org.apache.hadoop.mapreduce.lib.input.FileInputFormat listStatus
  14. INFO: Total input paths to process : 1
  15. Aug 23, 2013 11:07:50 AM org.apache.hadoop.util.NativeCodeLoader <clinit>
  16. WARNING: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
  17. Aug 23, 2013 11:07:50 AM org.apache.hadoop.io.compress.snappy.LoadSnappy <clinit>
  18. WARNING: Snappy native library not loaded
  19. Aug 23, 2013 11:07:52 AM org.apache.hadoop.mapred.JobClient monitorAndPrintJob
  20. INFO: Running job: job_201308231532_0002
  21. Aug 23, 2013 11:07:53 AM org.apache.hadoop.mapred.JobClient monitorAndPrintJob
  22. INFO: map 0% reduce 0%
  23. Aug 23, 2013 11:08:12 AM org.apache.hadoop.mapred.JobClient monitorAndPrintJob
  24. INFO: Task Id : attempt_201308231532_0002_m_000000_0, Status : FAILED
  25. java.lang.RuntimeException: java.lang.ClassNotFoundException: com.hadoop.basics.WordCounter$WCMapper
  26. at org.apache.hadoop.conf.Configuration.getClass(Configuration.java:849)
  27. at org.apache.hadoop.mapreduce.JobContext.getMapperClass(JobContext.java:199)
  28. at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:719)
  29. at org.apache.hadoop.mapred.MapTask.run(MapTask.java:370)
  30. at org.apache.hadoop.mapred.Child$4.run(Child.java:255)
  31. at java.security.AccessController.doPrivileged(Native Method)
  32. at javax.security.auth.Subject.doAs(Subject.java:415)
  33. at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1149)
  34. at org.apache.hadoop.mapred.Child.main(Child.java:249)
  35. Caused by: java.lang.ClassNotFoundException: com.hadoop.basics.WordCounter$WCMapper
  36. at java.net.URLClassLoader$1.run(URLClassLoader.java:366)
  37. at java.net.URLClassLoader$1.run(URLClassLoader.java:355)
  38. at java.security.AccessController.doPrivileged(Native Method)
  39. at java.net.URLClassLoader.findClass(URLClassLoader.java:354)
  40. at java.lang.ClassLoader.loadClass(ClassLoader.java:423)
  41. at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308)
  42. at java.lang.ClassLoader.loadClass(ClassLoader.java:356)
  43. at java.lang.Class.forName0(Native Method)
  44. at java.lang.Class.forName(Class.java:264)
  45. at org.apache.hadoop.conf.Configuration.getClassByName(Configuration.java:802)
  46. at org.apache.hadoop.conf.Configuration.getClass(Configuration.java:847)
  47. ... 8 more
  48. Aug 23, 2013 11:08:33 AM org.apache.hadoop.mapred.JobClient monitorAndPrintJob
  49. INFO: Task Id : attempt_201308231532_0002_m_000000_1, Status : FAILED
  50. java.lang.RuntimeException: java.lang.ClassNotFoundException: com.hadoop.basics.WordCounter$WCMapper
  51. at org.apache.hadoop.conf.Configuration.getClass(Configuration.java:849)
  52. at org.apache.hadoop.mapreduce.JobContext.getMapperClass(JobContext.java:199)
  53. at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:719)
  54. at org.apache.hadoop.mapred.MapTask.run(MapTask.java:370)
  55. at org.apache.hadoop.mapred.Child$4.run(Child.java:255)
  56. at java.security.AccessController.doPrivileged(Native Method)
  57. at javax.security.auth.Subject.doAs(Subject.java:415)
  58. at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1149)
  59. at org.apache.hadoop.mapred.Child.main(Child.java:249)
  60. Caused by: java.lang.ClassNotFoundException: com.hadoop.basics.WordCounter$WCMapper
  61. at java.net.URLClassLoader$1.run(URLClassLoader.java:366)
  62. at java.net.URLClassLoader$1.run(URLClassLoader.java:355)
  63. at java.security.AccessController.doPrivileged(Native Method)
  64. at java.net.URLClassLoader.findClass(URLClassLoader.java:354)
  65. at java.lang.ClassLoader.loadClass(ClassLoader.java:423)
  66. at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308)
  67. at java.lang.ClassLoader.loadClass(ClassLoader.java:356)
  68. at java.lang.Class.forName0(Native Method)
  69. at java.lang.Class.forName(Class.java:264)
  70. at org.apache.hadoop.conf.Configuration.getClassByName(Configuration.java:802)
  71. at org.apache.hadoop.conf.Configuration.getClass(Configuration.java:847)
  72. ... 8 more
  73. Aug 23, 2013 11:08:51 AM org.apache.hadoop.mapred.JobClient monitorAndPrintJob
  74. INFO: Task Id : attempt_201308231532_0002_m_000000_2, Status : FAILED
  75. java.lang.RuntimeException: java.lang.ClassNotFoundException: com.hadoop.basics.WordCounter$WCMapper
  76. at org.apache.hadoop.conf.Configuration.getClass(Configuration.java:849)
  77. at org.apache.hadoop.mapreduce.JobContext.getMapperClass(JobContext.java:199)
  78. at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:719)
  79. at org.apache.hadoop.mapred.MapTask.run(MapTask.java:370)
  80. at org.apache.hadoop.mapred.Child$4.run(Child.java:255)
  81. at java.security.AccessController.doPrivileged(Native Method)
  82. at javax.security.auth.Subject.doAs(Subject.java:415)
  83. at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1149)
  84. at org.apache.hadoop.mapred.Child.main(Child.java:249)
  85. Caused by: java.lang.ClassNotFoundException: com.hadoop.basics.WordCounter$WCMapper
  86. at java.net.URLClassLoader$1.run(URLClassLoader.java:366)
  87. at java.net.URLClassLoader$1.run(URLClassLoader.java:355)
  88. at java.security.AccessController.doPrivileged(Native Method)
  89. at java.net.URLClassLoader.findClass(URLClassLoader.java:354)
  90. at java.lang.ClassLoader.loadClass(ClassLoader.java:423)
  91. at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308)
  92. at java.lang.ClassLoader.loadClass(ClassLoader.java:356)
  93. at java.lang.Class.forName0(Native Method)
  94. at java.lang.Class.forName(Class.java:264)
  95. at org.apache.hadoop.conf.Configuration.getClassByName(Configuration.java:802)
  96. at org.apache.hadoop.conf.Configuration.getClass(Configuration.java:847)
  97. ... 8 more
  98. Aug 23, 2013 11:09:24 AM org.apache.hadoop.mapred.JobClient monitorAndPrintJob
  99. INFO: Job complete: job_201308231532_0002
  100. Aug 23, 2013 11:09:24 AM org.apache.hadoop.mapred.Counters log
  101. INFO: Counters: 7
  102. Aug 23, 2013 11:09:24 AM org.apache.hadoop.mapred.Counters log
  103. INFO: Job Counters
  104. Aug 23, 2013 11:09:24 AM org.apache.hadoop.mapred.Counters log
  105. INFO: SLOTS_MILLIS_MAPS=86688
  106. Aug 23, 2013 11:09:24 AM org.apache.hadoop.mapred.Counters log
  107. INFO: Total time spent by all reduces waiting after reserving slots (ms)=0
  108. Aug 23, 2013 11:09:24 AM org.apache.hadoop.mapred.Counters log
  109. INFO: Total time spent by all maps waiting after reserving slots (ms)=0
  110. Aug 23, 2013 11:09:24 AM org.apache.hadoop.mapred.Counters log
  111. INFO: Launched map tasks=4
  112. Aug 23, 2013 11:09:24 AM org.apache.hadoop.mapred.Counters log
  113. INFO: Data-local map tasks=4
  114. Aug 23, 2013 11:09:24 AM org.apache.hadoop.mapred.Counters log
  115. INFO: SLOTS_MILLIS_REDUCES=0
  116. Aug 23, 2013 11:09:24 AM org.apache.hadoop.mapred.Counters log
  117. INFO: Failed map tasks=1
  118. Aug 23, 2013 11:09:24 AM org.springframework.data.hadoop.mapreduce.JobExecutor$2 run
  119. INFO: Completed job [wc-job]
  120. Aug 23, 2013 11:09:24 AM org.springframework.beans.factory.support.DefaultSingletonBeanRegistry destroySingletons
  121. INFO: Destroying singletons in org.springframework.beans.factory.support.DefaultListableBeanFactory@7c197e: defining beans [org.springframework.context.support.PropertySourcesPlaceholderConfigurer#0,hadoopConfiguration,wc-job,myjobs-runner,resourceLoader]; root of factory hierarchy
  122. Exception in thread "main" org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'myjobs-runner': Invocation of init method failed; nested exception is java.lang.IllegalStateException: Job wc-job] failed to start; status=FAILED
  123. at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.initializeBean(AbstractAutowireCapableBeanFactory.java:1482)
  124. at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.doCreateBean(AbstractAutowireCapableBeanFactory.java:521)
  125. at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBean(AbstractAutowireCapableBeanFactory.java:458)
  126. at org.springframework.beans.factory.support.AbstractBeanFactory$1.getObject(AbstractBeanFactory.java:295)
  127. at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.getSingleton(DefaultSingletonBeanRegistry.java:223)
  128. at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:292)
  129. at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:194)
  130. at org.springframework.beans.factory.support.DefaultListableBeanFactory.preInstantiateSingletons(DefaultListableBeanFactory.java:628)
  131. at org.springframework.context.support.AbstractApplicationContext.finishBeanFactoryInitialization(AbstractApplicationContext.java:932)
  132. at org.springframework.context.support.AbstractApplicationContext.refresh(AbstractApplicationContext.java:479)
  133. at org.springframework.context.support.ClassPathXmlApplicationContext.<init>(ClassPathXmlApplicationContext.java:197)
  134. at org.springframework.context.support.ClassPathXmlApplicationContext.<init>(ClassPathXmlApplicationContext.java:172)
  135. at org.springframework.context.support.ClassPathXmlApplicationContext.<init>(ClassPathXmlApplicationContext.java:158)
  136. at com.hadoop.basics.WordCounter.main(WordCounter.java:58)
  137. Caused by: java.lang.IllegalStateException: Job wc-job] failed to start; status=FAILED
  138. at org.springframework.data.hadoop.mapreduce.JobExecutor$2.run(JobExecutor.java:219)
  139. at org.springframework.core.task.SyncTaskExecutor.execute(SyncTaskExecutor.java:49)
  140. at org.springframework.data.hadoop.mapreduce.JobExecutor.startJobs(JobExecutor.java:168)
  141. at org.springframework.data.hadoop.mapreduce.JobExecutor.startJobs(JobExecutor.java:160)
  142. at org.springframework.data.hadoop.mapreduce.JobRunner.call(JobRunner.java:52)
  143. at org.springframework.data.hadoop.mapreduce.JobRunner.afterPropertiesSet(JobRunner.java:44)
  144. at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.invokeInitMethods(AbstractAutowireCapableBeanFactory.java:1541)
  145. at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.initializeBean(AbstractAutowireCapableBeanFactory.java:1479)
  146. ... 13 more

什么配置。我错过了吗?是否真的可以使用spring数据远程提交hadoop作业而不创建jar等?

fafcakar

fafcakar1#

右键单击项目,build path->configure build path。选择libraries选项卡,单击addexternaljars并复制hadoop目录中所有必需的jar文件。我想,这会解决你的问题。

xvw2m8pv

xvw2m8pv2#

我得到了同样的问题,然后分离了mapper和reducer类的工作,并在 applicationContext.xml :

  1. <?xml version="1.0" encoding="UTF-8"?>
  2. <beans xmlns="http://www.springframework.org/schema/beans"
  3. xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:util="http://www.springframework.org/schema/util"
  4. xmlns:context="http://www.springframework.org/schema/context"
  5. xmlns:hdp="http://www.springframework.org/schema/hadoop" xmlns:batch="http://www.springframework.org/schema/batch"
  6. xsi:schemaLocation="
  7. http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd
  8. http://www.springframework.org/schema/hadoop http://www.springframework.org/schema/hadoop/spring-hadoop.xsd
  9. http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context.xsd
  10. http://www.springframework.org/schema/batch http://www.springframework.org/schema/batch/spring-batch.xsd
  11. http://www.springframework.org/schema/util http://www.springframework.org/schema/util/spring-util-4.2.xsd">
  12. <context:property-placeholder location="classpath:application.properties" />
  13. <hdp:configuration namenode-principal="hdfs://xx.yy.com" rm-manager-uri="xx.yy.com"
  14. security-method="kerb" user-keytab="location" rm-manager-principal="username"
  15. user-principal="username">
  16. fs.default.name=${fs.default.name}
  17. mapred.job.tracker=${mapred.job.tracker}
  18. </hdp:configuration>
  19. <hdp:job id="wordCountJobId" input-path="${input.path}"
  20. output-path="${output.path}" jar-by-class="com.xx.poc.Application"
  21. mapper="com.xx.poc.Map" reducer="com.xx.poc.Reduce" />
  22. <hdp:job-runner id="wordCountJobRunner" job-ref="wordCountJobId"
  23. run-at-startup="true" />
  24. </beans>
展开查看全部
kh212irz

kh212irz3#

我在这里的第一个hadoop工作也遇到了同样的问题,解决方法是在eclipse中编译jar文件。
当您想在eclipse中将java项目导出为jar文件时,有两个选项可用:

  1. Extract required libraries into generated JAR
  2. Package required libraries into generated JAR

第一个选择解决了我的问题,也可能解决了你的问题。

csbfibhn

csbfibhn4#

你可以试试 jar-by-class 属性来定位jar并让hadoop将jar上传到tasktracker。

  1. <hdp:job id="wc-job" mapper="com.hadoop.basics.WordCounter.WCMapper"
  2. reducer="com.hadoop.basics.WordCounter.WCReducer"
  3. input-path="${wordcount.input.path}"
  4. jar-by-class="com.hadoop.basics.WordCounter"
  5. output-path="${wordcount.output.path}" user="bigdata">
  6. </hdp:job>

最后,wcmapper和wcreducer应该是静态的,否则它们不能由hadoop创建。

相关问题