hadoop连接

dtcbnfnu  于 2021-06-03  发布在  Hadoop
关注(0)|答案(1)|浏览(464)

我正在尝试使用SpringData-hadoop从本地机器的ide在远程集群上执行mr代码
hadoop 1.1.2、spring 3.2.4、spring data hadoop 1.0.0
我的bean配置文件。applicationcontext.xml如下所示:

  1. <?xml version="1.0" encoding="UTF-8"?>
  2. <beans xmlns="http://www.springframework.org/schema/beans"
  3. xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:hdp="http://www.springframework.org/schema/hadoop"
  4. xmlns:context="http://www.springframework.org/schema/context"
  5. xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd
  6. http://www.springframework.org/schema/hadoop http://www.springframework.org/schema/hadoop/spring-hadoop.xsd
  7. http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context-3.2.xsd">
  8. <context:property-placeholder location="resources/hadoop.properties" />
  9. <hdp:configuration file-system-uri="${hd.fs}" job-tracker-uri="${hd.jobtracker.uri}">
  10. </hdp:configuration>
  11. <hdp:job id="wc-job" mapper="com.hadoop.basics.WordCounter.WCMapper"
  12. reducer="com.hadoop.basics.WordCounter.WCReducer" input-path="${wordcount.input.path}"
  13. output-path="${wordcount.output.path}" user="bigdata">
  14. </hdp:job>
  15. <hdp:job-runner id="myjobs-runner" job-ref="wc-job"
  16. run-at-startup="true" />
  17. <hdp:resource-loader id="resourceLoader" uri="${hd.fs}"
  18. user="bigdata" />
  19. </beans>

hadoop.properties属性

  1. hd.fs=hdfs://cloudx-843-770:9000
  2. hd.jobtracker.uri=cloudx-843-770:9001
  3. wordcount.input.path=/scratchpad/input/Childhood_days.txt
  4. wordcount.output.path=/scratchpad/output

我正在做的java类“运行方式…”

  1. package com.hadoop.basics;
  2. import java.io.IOException;
  3. import java.util.StringTokenizer;
  4. import org.apache.hadoop.io.IntWritable;
  5. import org.apache.hadoop.io.Text;
  6. import org.apache.hadoop.mapreduce.Mapper;
  7. import org.apache.hadoop.mapreduce.Reducer;
  8. import org.springframework.context.support.AbstractApplicationContext;
  9. import org.springframework.context.support.ClassPathXmlApplicationContext;
  10. public class WordCounter {
  11. private static IntWritable one = new IntWritable(1);
  12. public class WCMapper extends Mapper<Text, Text, Text, IntWritable> {
  13. @Override
  14. protected void map(
  15. Text key,
  16. Text value,
  17. org.apache.hadoop.mapreduce.Mapper<Text, Text, Text, IntWritable>.Context context)
  18. throws IOException, InterruptedException {
  19. // TODO Auto-generated method stub
  20. StringTokenizer strTokenizer = new StringTokenizer(value.toString());
  21. Text token = new Text();
  22. while (strTokenizer.hasMoreTokens()) {
  23. token.set(strTokenizer.nextToken());
  24. context.write(token, one);
  25. }
  26. }
  27. }
  28. public class WCReducer extends
  29. Reducer<Text, IntWritable, Text, IntWritable> {
  30. @Override
  31. protected void reduce(
  32. Text key,
  33. Iterable<IntWritable> values,
  34. org.apache.hadoop.mapreduce.Reducer<Text, IntWritable, Text, IntWritable>.Context context)
  35. throws IOException, InterruptedException {
  36. // TODO Auto-generated method stub
  37. int sum = 0;
  38. for (IntWritable value : values) {
  39. sum += value.get();
  40. }
  41. context.write(key, new IntWritable(sum));
  42. }
  43. }
  44. public static void main(String[] args) {
  45. AbstractApplicationContext context = new ClassPathXmlApplicationContext(
  46. "applicationContext.xml", WordCounter.class);
  47. System.out.println("Word Count Application Running");
  48. context.registerShutdownHook();
  49. }
  50. }

输出为:

  1. Aug 22, 2013 9:59:02 AM org.springframework.context.support.AbstractApplicationContext prepareRefresh
  2. INFO: Refreshing org.springframework.context.support.ClassPathXmlApplicationContext@1815338: startup date [Thu Aug 22 09:59:02 IST 2013]; root of context hierarchy
  3. Aug 22, 2013 9:59:03 AM org.springframework.beans.factory.xml.XmlBeanDefinitionReader loadBeanDefinitions
  4. INFO: Loading XML bean definitions from class path resource [com/hadoop/basics/applicationContext.xml]
  5. Aug 22, 2013 9:59:03 AM org.springframework.core.io.support.PropertiesLoaderSupport loadProperties
  6. INFO: Loading properties file from class path resource [resources/hadoop.properties]
  7. Aug 22, 2013 9:59:03 AM org.springframework.beans.factory.support.DefaultListableBeanFactory preInstantiateSingletons
  8. INFO: Pre-instantiating singletons in org.springframework.beans.factory.support.DefaultListableBeanFactory@7c197e: defining beans [org.springframework.context.support.PropertySourcesPlaceholderConfigurer#0,hadoopConfiguration,wc-job,myjobs-runner,resourceLoader]; root of factory hierarchy
  9. Aug 22, 2013 9:59:03 AM org.springframework.data.hadoop.mapreduce.JobExecutor$2 run
  10. INFO: Starting job [wc-job]
  11. Aug 22, 2013 9:59:03 AM org.apache.hadoop.security.UserGroupInformation doAs
  12. SEVERE: PriviledgedActionException as:bigdata via 298790 cause:org.apache.hadoop.ipc.RemoteException: User: 298790 is not allowed to impersonate bigdata
  13. Aug 22, 2013 9:59:03 AM org.springframework.data.hadoop.mapreduce.JobExecutor$2 run
  14. WARNING: Cannot start job [wc-job]
  15. org.apache.hadoop.ipc.RemoteException: User: 298790 is not allowed to impersonate bigdata
  16. at org.apache.hadoop.ipc.Client.call(Client.java:1107)
  17. at org.apache.hadoop.ipc.RPC$Invoker.invoke(RPC.java:229)
  18. at org.apache.hadoop.mapred.$Proxy2.getProtocolVersion(Unknown Source)
  19. at org.apache.hadoop.ipc.RPC.getProxy(RPC.java:411)
  20. at org.apache.hadoop.mapred.JobClient.createRPCProxy(JobClient.java:499)
  21. at org.apache.hadoop.mapred.JobClient.init(JobClient.java:490)
  22. at org.apache.hadoop.mapred.JobClient.<init>(JobClient.java:473)
  23. at org.apache.hadoop.mapreduce.Job$1.run(Job.java:513)
  24. at java.security.AccessController.doPrivileged(Native Method)
  25. at javax.security.auth.Subject.doAs(Unknown Source)
  26. at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1149)
  27. at org.apache.hadoop.mapreduce.Job.connect(Job.java:511)
  28. at org.apache.hadoop.mapreduce.Job.submit(Job.java:499)
  29. at org.apache.hadoop.mapreduce.Job.waitForCompletion(Job.java:530)
  30. at org.springframework.data.hadoop.mapreduce.JobExecutor$2.run(JobExecutor.java:197)
  31. at org.springframework.core.task.SyncTaskExecutor.execute(SyncTaskExecutor.java:49)
  32. at org.springframework.data.hadoop.mapreduce.JobExecutor.startJobs(JobExecutor.java:168)
  33. at org.springframework.data.hadoop.mapreduce.JobExecutor.startJobs(JobExecutor.java:160)
  34. at org.springframework.data.hadoop.mapreduce.JobRunner.call(JobRunner.java:52)
  35. at org.springframework.data.hadoop.mapreduce.JobRunner.afterPropertiesSet(JobRunner.java:44)
  36. at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.invokeInitMethods(AbstractAutowireCapableBeanFactory.java:1541)
  37. at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.initializeBean(AbstractAutowireCapableBeanFactory.java:1479)
  38. at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.doCreateBean(AbstractAutowireCapableBeanFactory.java:521)
  39. at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBean(AbstractAutowireCapableBeanFactory.java:458)
  40. at org.springframework.beans.factory.support.AbstractBeanFactory$1.getObject(AbstractBeanFactory.java:295)
  41. at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.getSingleton(DefaultSingletonBeanRegistry.java:223)
  42. at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:292)
  43. at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:194)
  44. at org.springframework.beans.factory.support.DefaultListableBeanFactory.preInstantiateSingletons(DefaultListableBeanFactory.java:628)
  45. at org.springframework.context.support.AbstractApplicationContext.finishBeanFactoryInitialization(AbstractApplicationContext.java:932)
  46. at org.springframework.context.support.AbstractApplicationContext.refresh(AbstractApplicationContext.java:479)
  47. at org.springframework.context.support.ClassPathXmlApplicationContext.<init>(ClassPathXmlApplicationContext.java:197)
  48. at org.springframework.context.support.ClassPathXmlApplicationContext.<init>(ClassPathXmlApplicationContext.java:172)
  49. at org.springframework.context.support.ClassPathXmlApplicationContext.<init>(ClassPathXmlApplicationContext.java:158)
  50. at com.hadoop.basics.WordCounter.main(WordCounter.java:58)
  51. Aug 22, 2013 9:59:03 AM org.springframework.beans.factory.support.DefaultSingletonBeanRegistry destroySingletons
  52. INFO: Destroying singletons in org.springframework.beans.factory.support.DefaultListableBeanFactory@7c197e: defining beans [org.springframework.context.support.PropertySourcesPlaceholderConfigurer#0,hadoopConfiguration,wc-job,myjobs-runner,resourceLoader]; root of factory hierarchy
  53. Exception in thread "main" org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'myjobs-runner': Invocation of init method failed; nested exception is java.lang.IllegalStateException: org.apache.hadoop.ipc.RemoteException: User: 298790 is not allowed to impersonate bigdata
  54. at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.initializeBean(AbstractAutowireCapableBeanFactory.java:1482)
  55. at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.doCreateBean(AbstractAutowireCapableBeanFactory.java:521)
  56. at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBean(AbstractAutowireCapableBeanFactory.java:458)
  57. at org.springframework.beans.factory.support.AbstractBeanFactory$1.getObject(AbstractBeanFactory.java:295)
  58. at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.getSingleton(DefaultSingletonBeanRegistry.java:223)
  59. at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:292)
  60. at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:194)
  61. at org.springframework.beans.factory.support.DefaultListableBeanFactory.preInstantiateSingletons(DefaultListableBeanFactory.java:628)
  62. at org.springframework.context.support.AbstractApplicationContext.finishBeanFactoryInitialization(AbstractApplicationContext.java:932)
  63. at org.springframework.context.support.AbstractApplicationContext.refresh(AbstractApplicationContext.java:479)
  64. at org.springframework.context.support.ClassPathXmlApplicationContext.<init>(ClassPathXmlApplicationContext.java:197)
  65. at org.springframework.context.support.ClassPathXmlApplicationContext.<init>(ClassPathXmlApplicationContext.java:172)
  66. at org.springframework.context.support.ClassPathXmlApplicationContext.<init>(ClassPathXmlApplicationContext.java:158)
  67. at com.hadoop.basics.WordCounter.main(WordCounter.java:58)
  68. Caused by: java.lang.IllegalStateException: org.apache.hadoop.ipc.RemoteException: User: 298790 is not allowed to impersonate bigdata
  69. at org.springframework.data.hadoop.mapreduce.JobExecutor$2.run(JobExecutor.java:209)
  70. at org.springframework.core.task.SyncTaskExecutor.execute(SyncTaskExecutor.java:49)
  71. at org.springframework.data.hadoop.mapreduce.JobExecutor.startJobs(JobExecutor.java:168)
  72. at org.springframework.data.hadoop.mapreduce.JobExecutor.startJobs(JobExecutor.java:160)
  73. at org.springframework.data.hadoop.mapreduce.JobRunner.call(JobRunner.java:52)
  74. at org.springframework.data.hadoop.mapreduce.JobRunner.afterPropertiesSet(JobRunner.java:44)
  75. at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.invokeInitMethods(AbstractAutowireCapableBeanFactory.java:1541)
  76. at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.initializeBean(AbstractAutowireCapableBeanFactory.java:1479)
  77. ... 13 more
  78. Caused by: org.apache.hadoop.ipc.RemoteException: User: 298790 is not allowed to impersonate bigdata
  79. at org.apache.hadoop.ipc.Client.call(Client.java:1107)
  80. at org.apache.hadoop.ipc.RPC$Invoker.invoke(RPC.java:229)
  81. at org.apache.hadoop.mapred.$Proxy2.getProtocolVersion(Unknown Source)
  82. at org.apache.hadoop.ipc.RPC.getProxy(RPC.java:411)
  83. at org.apache.hadoop.mapred.JobClient.createRPCProxy(JobClient.java:499)
  84. at org.apache.hadoop.mapred.JobClient.init(JobClient.java:490)
  85. at org.apache.hadoop.mapred.JobClient.<init>(JobClient.java:473)
  86. at org.apache.hadoop.mapreduce.Job$1.run(Job.java:513)
  87. at java.security.AccessController.doPrivileged(Native Method)
  88. at javax.security.auth.Subject.doAs(Unknown Source)
  89. at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1149)
  90. at org.apache.hadoop.mapreduce.Job.connect(Job.java:511)
  91. at org.apache.hadoop.mapreduce.Job.submit(Job.java:499)
  92. at org.apache.hadoop.mapreduce.Job.waitForCompletion(Job.java:530)
  93. at org.springframework.data.hadoop.mapreduce.JobExecutor$2.run(JobExecutor.java:197)
  94. ... 20 more

很明显,在集群上无法识别用户298790(我的本地windows机器用户)——这就是为什么在配置文件中。文件
我在文档中提到的作业配置中指定了user=“bigdata”。
医生。还提到:
shdp遵循hdfs权限,使用当前用户的身份(默认情况下)与文件系统进行交互。尤其是,hdfsresourceloader在进行模式匹配时,只考虑它认为可以看到的文件,而不执行任何特权操作。但是,可以指定不同的用户,这意味着resourceloader使用该用户的权限与hdfs进行交互-但是这遵守了api中的用户模拟规则,我决定使用hdfsresourceloader,但在文档中找不到任何示例甚至配置-有人能提供任何指针吗?
根据hadoop安全模拟,我认为我需要将我的windows用户298790添加到远程集群机(ubuntu)用户组中,还需要添加我的windows主机名,在大量用户和不断变化的windows客户机的情况下,我发现这是不可行的。如果我的假设是正确的,那么怎样才能避免添加和配置所有这些用户呢?
/将更改添加到core-site.xml/

  1. <?xml version="1.0"?>
  2. <?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
  3. <!-- Put site-specific property overrides in this file. -->
  4. <configuration>
  5. <property>
  6. <name>fs.default.name</name>
  7. <value>hdfs://cloudx-843-770:9000</value>
  8. </property>
  9. <property>
  10. <name>hadoop.proxyuser.298790.groups</name>
  11. <value>bigdata</value>
  12. <description>Allow the superuser bigdatato impersonate any members of the group bigdata</description>
  13. </property>
  14. <property>
  15. <name>hadoop.proxyuser.298790.hosts</name>
  16. <value>*</value>
  17. <description>The superuser can connect only from INFVA03351 to impersonate a user</description>
  18. </property>
  19. </configuration>

我重新启动了所有hadoop进程,但错误仍然存在。
然后,我决定创建一个新的用户viz。并将其添加到组bigdata中以进行模拟:

  1. root@cloudx-843-770:/home/bigdata# useradd -G bigdata 298790
  2. root@cloudx-843-770:/home/bigdata#
  3. root@cloudx-843-770:/home/bigdata#
  4. root@cloudx-843-770:/home/bigdata# usermod -G bigdata 298790
  5. root@cloudx-843-770:/home/bigdata#
  6. root@cloudx-843-770:/home/bigdata# su 298790
  7. $ groups
  8. 298790 bigdata
  9. root@cloudx-843-770:/home/bigdata#
  10. root@cloudx-843-770:/home/bigdata# cat /etc/passwd
  11. root:x:0:0:root:/root:/bin/bash
  12. daemon:x:1:1:daemon:/usr/sbin:/bin/sh
  13. bin:x:2:2:bin:/bin:/bin/sh
  14. sys:x:3:3:sys:/dev:/bin/sh
  15. sync:x:4:65534:sync:/bin:/bin/sync
  16. games:x:5:60:games:/usr/games:/bin/sh
  17. man:x:6:12:man:/var/cache/man:/bin/sh
  18. lp:x:7:7:lp:/var/spool/lpd:/bin/sh
  19. mail:x:8:8:mail:/var/mail:/bin/sh
  20. news:x:9:9:news:/var/spool/news:/bin/sh
  21. uucp:x:10:10:uucp:/var/spool/uucp:/bin/sh
  22. proxy:x:13:13:proxy:/bin:/bin/sh
  23. www-data:x:33:33:www-data:/var/www:/bin/sh
  24. backup:x:34:34:backup:/var/backups:/bin/sh
  25. list:x:38:38:Mailing List Manager:/var/list:/bin/sh
  26. irc:x:39:39:ircd:/var/run/ircd:/bin/sh
  27. gnats:x:41:41:Gnats Bug-Reporting System (admin):/var/lib/gnats:/bin/sh
  28. nobody:x:65534:65534:nobody:/nonexistent:/bin/sh
  29. libuuid:x:100:101::/var/lib/libuuid:/bin/sh
  30. syslog:x:101:103::/home/syslog:/bin/false
  31. mysql:x:102:105:MySQL Server,,,:/nonexistent:/bin/false
  32. messagebus:x:103:106::/var/run/dbus:/bin/false
  33. whoopsie:x:104:107::/nonexistent:/bin/false
  34. landscape:x:105:110::/var/lib/landscape:/bin/false
  35. sshd:x:106:65534::/var/run/sshd:/usr/sbin/nologin
  36. tomcat6:x:107:113::/usr/share/tomcat6:/bin/false
  37. coesystem:x:1000:1000:coesystem,,,:/home/coesystem:/bin/bash
  38. hpcc:x:999:1001:hpcc Runtime User:/home/hpcc:/bin/sh
  39. hduser:x:1001:1002:hduser,1,1,1,1:/home/hduser:/bin/bash
  40. bigdata:x:1002:1003:Big Data,1,1,1,1:/home/bigdata:/bin/bash
  41. 298790:x:1003:1004::/home/298790:/bin/sh

但是现在,当我尝试停止(然后启动)集群时,它会请求所有进程的密码:

  1. bigdata@cloudx-843-770:~/hadoop_ecosystem/apache_hadoop/hadoop-1.1.2/bin$ stop-all.sh
  2. Warning: $HADOOP_HOME is deprecated.
  3. stopping jobtracker
  4. bigdata@localhost's password:
  5. localhost: stopping tasktracker
  6. stopping namenode
  7. bigdata@localhost's password:
  8. localhost: stopping datanode
  9. bigdata@localhost's password:
  10. localhost: stopping secondarynamenode

现在,错误被稍微修改了一下-它首先无法连接,然后无法模拟:

  1. Aug 22, 2013 5:14:17 PM org.springframework.context.support.AbstractApplicationContext prepareRefresh
  2. INFO: Refreshing org.springframework.context.support.ClassPathXmlApplicationContext@922804: startup date [Thu Aug 22 17:14:17 IST 2013]; root of context hierarchy
  3. Aug 22, 2013 5:14:17 PM org.springframework.beans.factory.xml.XmlBeanDefinitionReader loadBeanDefinitions
  4. INFO: Loading XML bean definitions from class path resource [com/hadoop/basics/applicationContext.xml]
  5. Aug 22, 2013 5:14:17 PM org.springframework.core.io.support.PropertiesLoaderSupport loadProperties
  6. INFO: Loading properties file from class path resource [resources/hadoop.properties]
  7. Aug 22, 2013 5:14:17 PM org.springframework.beans.factory.support.DefaultListableBeanFactory preInstantiateSingletons
  8. INFO: Pre-instantiating singletons in org.springframework.beans.factory.support.DefaultListableBeanFactory@7c197e: defining beans [org.springframework.context.support.PropertySourcesPlaceholderConfigurer#0,hadoopConfiguration,wc-job,myjobs-runner,resourceLoader]; root of factory hierarchy
  9. Aug 22, 2013 5:14:18 PM org.springframework.data.hadoop.mapreduce.JobExecutor$2 run
  10. INFO: Starting job [wc-job]
  11. Aug 22, 2013 5:14:20 PM org.apache.hadoop.ipc.Client$Connection handleConnectionFailure
  12. INFO: Retrying connect to server: cloudx-843-770/172.25.37.135:9001. Already tried 0 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1 SECONDS)
  13. Aug 22, 2013 5:14:22 PM org.apache.hadoop.ipc.Client$Connection handleConnectionFailure
  14. INFO: Retrying connect to server: cloudx-843-770/172.25.37.135:9001. Already tried 1 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1 SECONDS)
  15. Aug 22, 2013 5:14:24 PM org.apache.hadoop.ipc.Client$Connection handleConnectionFailure
  16. INFO: Retrying connect to server: cloudx-843-770/172.25.37.135:9001. Already tried 2 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1 SECONDS)
  17. Aug 22, 2013 5:14:26 PM org.apache.hadoop.ipc.Client$Connection handleConnectionFailure
  18. INFO: Retrying connect to server: cloudx-843-770/172.25.37.135:9001. Already tried 3 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1 SECONDS)
  19. Aug 22, 2013 5:14:36 PM org.apache.hadoop.security.UserGroupInformation doAs
  20. SEVERE: PriviledgedActionException as:bigdata via 298790 cause:org.apache.hadoop.ipc.RemoteException: User: 298790 is not allowed to impersonate bigdata
  21. Aug 22, 2013 5:14:36 PM org.springframework.data.hadoop.mapreduce.JobExecutor$2 run
  22. WARNING: Cannot start job [wc-job]
  23. org.apache.hadoop.ipc.RemoteException: User: 298790 is not allowed to impersonate bigdata
  24. at org.apache.hadoop.ipc.Client.call(Client.java:1107)
  25. at org.apache.hadoop.ipc.RPC$Invoker.invoke(RPC.java:229)
  26. at org.apache.hadoop.mapred.$Proxy2.getProtocolVersion(Unknown Source)
  27. at org.apache.hadoop.ipc.RPC.getProxy(RPC.java:411)
  28. at org.apache.hadoop.mapred.JobClient.createRPCProxy(JobClient.java:499)
  29. at org.apache.hadoop.mapred.JobClient.init(JobClient.java:490)
  30. at org.apache.hadoop.mapred.JobClient.<init>(JobClient.java:473)
  31. at org.apache.hadoop.mapreduce.Job$1.run(Job.java:513)
  32. at java.security.AccessController.doPrivileged(Native Method)
  33. at javax.security.auth.Subject.doAs(Unknown Source)
  34. at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1149)
  35. at org.apache.hadoop.mapreduce.Job.connect(Job.java:511)
  36. at org.apache.hadoop.mapreduce.Job.submit(Job.java:499)
  37. at org.apache.hadoop.mapreduce.Job.waitForCompletion(Job.java:530)
  38. at org.springframework.data.hadoop.mapreduce.JobExecutor$2.run(JobExecutor.java:197)
  39. at org.springframework.core.task.SyncTaskExecutor.execute(SyncTaskExecutor.java:49)
  40. at org.springframework.data.hadoop.mapreduce.JobExecutor.startJobs(JobExecutor.java:168)
  41. at org.springframework.data.hadoop.mapreduce.JobExecutor.startJobs(JobExecutor.java:160)
  42. at org.springframework.data.hadoop.mapreduce.JobRunner.call(JobRunner.java:52)
  43. at org.springframework.data.hadoop.mapreduce.JobRunner.afterPropertiesSet(JobRunner.java:44)
  44. at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.invokeInitMethods(AbstractAutowireCapableBeanFactory.java:1541)
  45. at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.initializeBean(AbstractAutowireCapableBeanFactory.java:1479)
  46. at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.doCreateBean(AbstractAutowireCapableBeanFactory.java:521)
  47. at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBean(AbstractAutowireCapableBeanFactory.java:458)
  48. at org.springframework.beans.factory.support.AbstractBeanFactory$1.getObject(AbstractBeanFactory.java:295)
  49. at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.getSingleton(DefaultSingletonBeanRegistry.java:223)
  50. at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:292)
  51. at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:194)
  52. at org.springframework.beans.factory.support.DefaultListableBeanFactory.preInstantiateSingletons(DefaultListableBeanFactory.java:628)
  53. at org.springframework.context.support.AbstractApplicationContext.finishBeanFactoryInitialization(AbstractApplicationContext.java:932)
  54. at org.springframework.context.support.AbstractApplicationContext.refresh(AbstractApplicationContext.java:479)
  55. at org.springframework.context.support.ClassPathXmlApplicationContext.<init>(ClassPathXmlApplicationContext.java:197)
  56. at org.springframework.context.support.ClassPathXmlApplicationContext.<init>(ClassPathXmlApplicationContext.java:172)
  57. at org.springframework.context.support.ClassPathXmlApplicationContext.<init>(ClassPathXmlApplicationContext.java:158)
  58. at com.hadoop.basics.WordCounter.main(WordCounter.java:58)
  59. Aug 22, 2013 5:14:36 PM org.springframework.beans.factory.support.DefaultSingletonBeanRegistry destroySingletons
  60. INFO: Destroying singletons in org.springframework.beans.factory.support.DefaultListableBeanFactory@7c197e: defining beans [org.springframework.context.support.PropertySourcesPlaceholderConfigurer#0,hadoopConfiguration,wc-job,myjobs-runner,resourceLoader]; root of factory hierarchy
  61. Exception in thread "main" org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'myjobs-runner': Invocation of init method failed; nested exception is java.lang.IllegalStateException: org.apache.hadoop.ipc.RemoteException: User: 298790 is not allowed to impersonate bigdata
  62. at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.initializeBean(AbstractAutowireCapableBeanFactory.java:1482)
  63. at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.doCreateBean(AbstractAutowireCapableBeanFactory.java:521)
  64. at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBean(AbstractAutowireCapableBeanFactory.java:458)
  65. at org.springframework.beans.factory.support.AbstractBeanFactory$1.getObject(AbstractBeanFactory.java:295)
  66. at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.getSingleton(DefaultSingletonBeanRegistry.java:223)
  67. at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:292)
  68. at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:194)
  69. at org.springframework.beans.factory.support.DefaultListableBeanFactory.preInstantiateSingletons(DefaultListableBeanFactory.java:628)
  70. at org.springframework.context.support.AbstractApplicationContext.finishBeanFactoryInitialization(AbstractApplicationContext.java:932)
  71. at org.springframework.context.support.AbstractApplicationContext.refresh(AbstractApplicationContext.java:479)
  72. at org.springframework.context.support.ClassPathXmlApplicationContext.<init>(ClassPathXmlApplicationContext.java:197)
  73. at org.springframework.context.support.ClassPathXmlApplicationContext.<init>(ClassPathXmlApplicationContext.java:172)
  74. at org.springframework.context.support.ClassPathXmlApplicationContext.<init>(ClassPathXmlApplicationContext.java:158)
  75. at com.hadoop.basics.WordCounter.main(WordCounter.java:58)
  76. Caused by: java.lang.IllegalStateException: org.apache.hadoop.ipc.RemoteException: User: 298790 is not allowed to impersonate bigdata
  77. at org.springframework.data.hadoop.mapreduce.JobExecutor$2.run(JobExecutor.java:209)
  78. at org.springframework.core.task.SyncTaskExecutor.execute(SyncTaskExecutor.java:49)
  79. at org.springframework.data.hadoop.mapreduce.JobExecutor.startJobs(JobExecutor.java:168)
  80. at org.springframework.data.hadoop.mapreduce.JobExecutor.startJobs(JobExecutor.java:160)
  81. at org.springframework.data.hadoop.mapreduce.JobRunner.call(JobRunner.java:52)
  82. at org.springframework.data.hadoop.mapreduce.JobRunner.afterPropertiesSet(JobRunner.java:44)
  83. at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.invokeInitMethods(AbstractAutowireCapableBeanFactory.java:1541)
  84. at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.initializeBean(AbstractAutowireCapableBeanFactory.java:1479)
  85. ... 13 more
  86. Caused by: org.apache.hadoop.ipc.RemoteException: User: 298790 is not allowed to impersonate bigdata
  87. at org.apache.hadoop.ipc.Client.call(Client.java:1107)
  88. at org.apache.hadoop.ipc.RPC$Invoker.invoke(RPC.java:229)
  89. at org.apache.hadoop.mapred.$Proxy2.getProtocolVersion(Unknown Source)
  90. at org.apache.hadoop.ipc.RPC.getProxy(RPC.java:411)
  91. at org.apache.hadoop.mapred.JobClient.createRPCProxy(JobClient.java:499)
  92. at org.apache.hadoop.mapred.JobClient.init(JobClient.java:490)
  93. at org.apache.hadoop.mapred.JobClient.<init>(JobClient.java:473)
  94. at org.apache.hadoop.mapreduce.Job$1.run(Job.java:513)
  95. at java.security.AccessController.doPrivileged(Native Method)
  96. at javax.security.auth.Subject.doAs(Unknown Source)
  97. at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1149)
  98. at org.apache.hadoop.mapreduce.Job.connect(Job.java:511)
  99. at org.apache.hadoop.mapreduce.Job.submit(Job.java:499)
  100. at org.apache.hadoop.mapreduce.Job.waitForCompletion(Job.java:530)
  101. at org.springframework.data.hadoop.mapreduce.JobExecutor$2.run(JobExecutor.java:197)
  102. ... 20 more
rhfm7lfc

rhfm7lfc1#

如果您希望将一个用户模拟成另一个用户,apachehadoop要求您的服务端配置允许这样做。
这意味着,如果您以“foo”身份运行,并且希望实际以“bar”身份提交作业,那么您的namenode/jobtracker要求其加载的core-site.xml配置允许“foo”代理其他用户,并且通常在namenode/jobtracker的core-site.xml中必须存在如下内容:

  1. <property>
  2. <name>hadoop.proxyuser.foo.groups</name>
  3. <value>*</value>
  4. </property>
  5. <property>
  6. <name>hadoop.proxyuser.foo.hosts</name>
  7. <value>*</value>
  8. </property>

这将允许用户foo模拟任何其他用户(*对于组),并在从任何主机提交时这样做(*对于主机)。
虽然这不是必需的,但是明确建议定义的用户和组需要存在于namenode上,以便权限、组解析等以适当的方式工作。更多信息请参见:http://www.cloudera.com/blog/2012/03/authorization-and-authentication-in-hadoop/
文档位于http://static.springsource.org/spring-hadoop/docs/1.0.x/reference/html/security.html#security:kerberos可以更清楚地解决这个问题。

展开查看全部

相关问题