在hadoop中将文件从本地文件系统复制到hdfs时出错

kpbpu008  于 2021-06-03  发布在  Hadoop
关注(0)|答案(2)|浏览(541)

我正在使用hadoop处理文件,目前我正在尝试使用下面的命令将文件从本地文件系统复制到hdfs

  1. hadoop fs -put d:\hadoop\weblogs /so/data/weblogs

得到如下错误

  1. c:\Hadoop\hadoop-1.1.0-SNAPSHOT>hadoop fs -put d:\hadoop\weblogs /so/data/weblog
  2. s
  3. 12/12/03 19:05:16 WARN hdfs.DFSClient: DataStreamer Exception: org.apache.hadoop
  4. .ipc.RemoteException: java.io.IOException: File /so/data/weblogs/weblogs/u_ex121
  5. 10418.log could only be replicated to 0 nodes, instead of 1
  6. at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBloc
  7. k(FSNamesystem.java:1557)
  8. at org.apache.hadoop.hdfs.server.namenode.NameNode.addBlock(NameNode.jav
  9. a:695)
  10. at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  11. at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.
  12. java:39)
  13. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAcces
  14. sorImpl.java:25)
  15. at java.lang.reflect.Method.invoke(Method.java:597)
  16. at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:563)
  17. at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1393)
  18. at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1389)
  19. at java.security.AccessController.doPrivileged(Native Method)
  20. at javax.security.auth.Subject.doAs(Subject.java:396)
  21. at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInforma
  22. tion.java:1135)
  23. at org.apache.hadoop.ipc.Server$Handler.run(Server.java:1387)
  24. at org.apache.hadoop.ipc.Client.call(Client.java:1070)
  25. at org.apache.hadoop.ipc.RPC$Invoker.invoke(RPC.java:225)
  26. at $Proxy1.addBlock(Unknown Source)
  27. at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  28. at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.
  29. java:39)
  30. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAcces
  31. sorImpl.java:25)
  32. at java.lang.reflect.Method.invoke(Method.java:597)
  33. at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryI
  34. nvocationHandler.java:82)
  35. at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocat
  36. ionHandler.java:59)
  37. at $Proxy1.addBlock(Unknown Source)
  38. at org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.locateFollowingBlock
  39. (DFSClient.java:3518)
  40. at org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.nextBlockOutputStrea
  41. m(DFSClient.java:3381)
  42. at org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.access$2500(DFSClien
  43. t.java:2593)
  44. at org.apache.hadoop.hdfs.DFSClient$DFSOutputStream$DataStreamer.run(DFS
  45. Client.java:2833)
  46. 12/12/03 19:05:16 WARN hdfs.DFSClient: Error Recovery for block null bad datanod
  47. e[0] nodes == null
  48. 12/12/03 19:05:16 WARN hdfs.DFSClient: Could not get block locations. Source fil
  49. e "/so/data/weblogs/weblogs/u_ex12110418.log" - Aborting...
  50. put: java.io.IOException: File /so/data/weblogs/weblogs/u_ex12110418.log could o
  51. nly be replicated to 0 nodes, instead of 1
  52. 12/12/03 19:05:16 ERROR hdfs.DFSClient: Exception closing file /so/data/weblogs/
  53. weblogs/u_ex12110418.log : org.apache.hadoop.ipc.RemoteException: java.io.IOExce
  54. ption: File /so/data/weblogs/weblogs/u_ex12110418.log could only be replicated t
  55. o 0 nodes, instead of 1
  56. at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBloc
  57. k(FSNamesystem.java:1557)
  58. at org.apache.hadoop.hdfs.server.namenode.NameNode.addBlock(NameNode.jav
  59. a:695)
  60. at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  61. at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.
  62. java:39)
  63. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAcces
  64. sorImpl.java:25)
  65. at java.lang.reflect.Method.invoke(Method.java:597)
  66. at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:563)
  67. at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1393)
  68. at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1389)
  69. at java.security.AccessController.doPrivileged(Native Method)
  70. at javax.security.auth.Subject.doAs(Subject.java:396)
  71. at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInforma
  72. tion.java:1135)
  73. at org.apache.hadoop.ipc.Server$Handler.run(Server.java:1387)
  74. org.apache.hadoop.ipc.RemoteException: java.io.IOException: File /so/data/weblog
  75. s/weblogs/u_ex12110418.log could only be replicated to 0 nodes, instead of 1
  76. at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBloc
  77. k(FSNamesystem.java:1557)
  78. at org.apache.hadoop.hdfs.server.namenode.NameNode.addBlock(NameNode.jav
  79. a:695)
  80. at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  81. at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.
  82. java:39)
  83. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAcces
  84. sorImpl.java:25)
  85. at java.lang.reflect.Method.invoke(Method.java:597)
  86. at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:563)
  87. at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1393)
  88. at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1389)
  89. at java.security.AccessController.doPrivileged(Native Method)
  90. at javax.security.auth.Subject.doAs(Subject.java:396)
  91. at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInforma
  92. tion.java:1135)
  93. at org.apache.hadoop.ipc.Server$Handler.run(Server.java:1387)
  94. at org.apache.hadoop.ipc.Client.call(Client.java:1070)
  95. at org.apache.hadoop.ipc.RPC$Invoker.invoke(RPC.java:225)
  96. at $Proxy1.addBlock(Unknown Source)
  97. at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  98. at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.
  99. java:39)
  100. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAcces
  101. sorImpl.java:25)
  102. at java.lang.reflect.Method.invoke(Method.java:597)
  103. at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryI
  104. nvocationHandler.java:82)
  105. at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocat
  106. ionHandler.java:59)
  107. at $Proxy1.addBlock(Unknown Source)
  108. at org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.locateFollowingBlock
  109. (DFSClient.java:3518)
  110. at org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.nextBlockOutputStrea
  111. m(DFSClient.java:3381)
  112. at org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.access$2500(DFSClien
  113. t.java:2593)
  114. at org.apache.hadoop.hdfs.DFSClient$DFSOutputStream$DataStreamer.run(DFS
  115. Client.java:2833)
  116. c:\Hadoop\hadoop-1.1.0-SNAPSHOT>

有谁能告诉我,上面的命令有什么错误,需要做些什么来避免这个错误?

bejyjqdl

bejyjqdl1#

hadoop集群的datanode没有运行。使用jps命令检查它。

anauzrmj

anauzrmj2#

什么之中的一个:
带有hdfs的远程系统(在配置中指定)尚未启动
存在网络问题,无法与远程系统连接
hdfs文件系统的磁盘空间不足
您配置了错误的远程系统。

相关问题