hive classnotfoundexception,即使所有JAR都添加到maven存储库中

qnzebej0  于 2021-05-29  发布在  Hadoop
关注(0)|答案(2)|浏览(561)

我添加了该项目所需的所有jar,但我无法解决此异常。是否有人对此提出建议。你能告诉我如何授予配置单元数据库访问权限吗。提前谢谢。

  1. java.lang.ClassNotFoundException: org.apache.hadoop.hive.jdbc.HiveDriver
  2. at java.net.URLClassLoader$1.run(URLClassLoader.java:366)
  3. at java.net.URLClassLoader$1.run(URLClassLoader.java:355)
  4. at java.security.AccessController.doPrivileged(Native Method)
  5. at java.net.URLClassLoader.findClass(URLClassLoader.java:354)
  6. at java.lang.ClassLoader.loadClass(ClassLoader.java:425)
  7. at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308)
  8. at java.lang.ClassLoader.loadClass(ClassLoader.java:358)
  9. at java.lang.Class.forName0(Native Method)
  10. at java.lang.Class.forName(Class.java:190)
  11. at org.ezytruk.com.CreateHiveExternalTable.createHiveExternalTable(CreateHiveExternalTable.java:20)
  12. at org.ezytruk.com.CreateHiveExternalTable.main(CreateHiveExternalTable.java:53)
  13. Exception in thread "main" java.sql.SQLException: No suitable driver found for jdbc:hive://localhost/EZYTRUK
  14. at java.sql.DriverManager.getConnection(DriverManager.java:596)
  15. at java.sql.DriverManager.getConnection(DriverManager.java:215)
  16. at org.ezytruk.com.CreateHiveExternalTable.createHiveExternalTable(CreateHiveExternalTable.java:39)
  17. at org.ezytruk.com.CreateHiveExternalTable.main(CreateHiveExternalTable.java:53)

pom.xml文件

  1. <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
  2. <modelVersion>4.0.0</modelVersion>
  3. <groupId>BigData</groupId>
  4. <artifactId>BigData</artifactId>
  5. <version>0.0.1-SNAPSHOT</version>
  6. <properties>
  7. <slf4j.version>1.6.1</slf4j.version>
  8. <hadoop-version>2.6.0</hadoop-version>
  9. <mysql-connector-version>5.1.40</mysql-connector-version>
  10. <sqoop-core-version>1.99.3</sqoop-core-version>
  11. <zookeeper-version>3.4.9</zookeeper-version>
  12. <hive-jdbc-version>1.2.1</hive-jdbc-version>
  13. <commons-io-version>2.2</commons-io-version>
  14. <commons-logging.version>1.2</commons-logging.version>
  15. </properties>
  16. <dependencies>
  17. <dependency>
  18. <groupId>commons-io</groupId>
  19. <artifactId>commons-io</artifactId>
  20. <version>${commons-io-version}</version>
  21. </dependency>
  22. <dependency>
  23. <groupId>commons-logging</groupId>
  24. <artifactId>commons-logging</artifactId>
  25. <version>${commons-logging.version}</version>
  26. </dependency>
  27. <dependency>
  28. <groupId>mysql</groupId>
  29. <artifactId>mysql-connector-java</artifactId>
  30. <version>${mysql-connector-version}</version>
  31. </dependency>
  32. <dependency>
  33. <groupId>org.apache.hadoop</groupId>
  34. <artifactId>hadoop-common</artifactId>
  35. <version>${hadoop-version}</version>
  36. </dependency>
  37. <dependency>
  38. <groupId>org.apache.hadoop</groupId>
  39. <artifactId>hadoop-client</artifactId>
  40. <version>${hadoop-version}</version>
  41. </dependency>
  42. <dependency>
  43. <groupId>org.apache.hadoop</groupId>
  44. <artifactId>hadoop-hdfs</artifactId>
  45. <version>${hadoop-version}</version>
  46. </dependency>
  47. <dependency>
  48. <groupId>org.apache.hadoop</groupId>
  49. <artifactId>hadoop-mapreduce-client-core</artifactId>
  50. <version>${hadoop-version}</version>
  51. </dependency>
  52. <dependency>
  53. <groupId>org.apache.hadoop</groupId>
  54. <artifactId>hadoop-yarn-common</artifactId>
  55. <version>${hadoop-version}</version>
  56. </dependency>
  57. <dependency>
  58. <groupId>org.apache.hadoop</groupId>
  59. <artifactId>hadoop-core</artifactId>
  60. <version>1.2.1</version>
  61. </dependency>
  62. <dependency>
  63. <groupId>org.apache.sqoop</groupId>
  64. <artifactId>sqoop-core</artifactId>
  65. <version>${sqoop-core-version}</version>
  66. </dependency>
  67. <dependency>
  68. <groupId>org.apache.sqoop</groupId>
  69. <artifactId>sqoop-client</artifactId>
  70. <version>${sqoop-core-version}</version>
  71. </dependency>
  72. <dependency>
  73. <groupId>org.apache.sqoop</groupId>
  74. <artifactId>sqoop-common</artifactId>
  75. <version>${sqoop-core-version}</version>
  76. </dependency>
  77. <dependency>
  78. <groupId>org.apache.sqoop.connector</groupId>
  79. <artifactId>sqoop-connector-generic-jdbc</artifactId>
  80. <version>${sqoop-core-version}</version>
  81. </dependency>
  82. <dependency>
  83. <groupId>org.apache.sqoop</groupId>
  84. <artifactId>sqoop</artifactId>
  85. <version>1.4.1-incubating</version>
  86. </dependency>
  87. <dependency>
  88. <groupId>org.apache.zookeeper</groupId>
  89. <artifactId>zookeeper</artifactId>
  90. <version>${zookeeper-version}</version>
  91. </dependency>
  92. <dependency>
  93. <groupId>org.apache.hive</groupId>
  94. <artifactId>hive-jdbc</artifactId>
  95. <version>${hive-jdbc-version}</version>
  96. </dependency>
  97. <dependency>
  98. <groupId>org.apache.hive</groupId>
  99. <artifactId>hive-exec</artifactId>
  100. <version>${hive-jdbc-version}</version>
  101. </dependency>
  102. <dependency>
  103. <groupId>org.apache.hive</groupId>
  104. <artifactId>hive-metastore</artifactId>
  105. <version>${hive-jdbc-version}</version>
  106. </dependency>
  107. <dependency>
  108. <groupId>org.apache.hive</groupId>
  109. <artifactId>hive-common</artifactId>
  110. <version>${hive-jdbc-version}</version>
  111. </dependency>
  112. <dependency>
  113. <groupId>org.apache.hive</groupId>
  114. <artifactId>hive-service</artifactId>
  115. <version>${hive-jdbc-version}</version>
  116. </dependency>
  117. <dependency>
  118. <groupId>org.apache.hive</groupId>
  119. <artifactId>hive-shims</artifactId>
  120. <version>${hive-jdbc-version}</version>
  121. </dependency>
  122. <dependency>
  123. <groupId>org.apache.hive</groupId>
  124. <artifactId>hive-serde</artifactId>
  125. <version>${hive-jdbc-version}</version>
  126. </dependency>
  127. </dependencies>
  128. <packaging>war</packaging>
  129. <build>
  130. <sourceDirectory>src</sourceDirectory>
  131. <plugins>
  132. <plugin>
  133. <artifactId>maven-compiler-plugin</artifactId>
  134. <version>3.3</version>
  135. <configuration>
  136. <source>1.7</source>
  137. <target>1.7</target>
  138. </configuration>
  139. </plugin>
  140. <plugin>
  141. <artifactId>maven-war-plugin</artifactId>
  142. <version>2.6</version>
  143. <configuration>
  144. <warSourceDirectory>WebContent</warSourceDirectory>
  145. </configuration>
  146. </plugin>
  147. </plugins>
  148. </build>
  149. </project>

课程:

  1. package org.hive.com;
  2. import java.io.FileNotFoundException;
  3. import java.io.IOException;
  4. import java.sql.Connection;
  5. import java.sql.DriverManager;
  6. import java.sql.SQLException;
  7. import org.apache.hadoop.conf.Configuration;
  8. import org.apache.hadoop.fs.Path;
  9. import com.mysql.jdbc.Statement;
  10. public class CreateHiveExternalTable {
  11. public static String driverName = "org.apache.hadoop.hive.jdbc.HiveDriver";
  12. public static void createHiveExternalTable() throws FileNotFoundException, IOException, SQLException {
  13. try {
  14. Class.forName(driverName);
  15. } catch (ClassNotFoundException e) {
  16. // TODO Auto-generated catch block
  17. e.printStackTrace();
  18. }
  19. Configuration config = new Configuration();
  20. config.addResource(new Path("/usr/local/hadoop/etc/hadoop/conf/core-site.xml"));
  21. config.addResource(new Path("/usr/local/hadoop/etc/hadoop/conf/hdfs-site.xml"));
  22. Connection connect = DriverManager.getConnection("jdbc:hive://localhost/hivedb","hive","");
  23. Statement stmt = (Statement) connect.createStatement();
  24. //String tableName = properties.getProperty("hive_table_name");
  25. stmt.executeQuery("CREATE EXTERNAL TABLE IF NOT EXISTS"
  26. +"SHIPPER(S_ID INT,S_NAME VARCHAR(100),S_ADDR VARCHAR(100),S_CITY VARCHAR(100)"
  27. +"ROW FORMAT DELIMITED FIELDS TERMINATED BY ','"
  28. +"LOCATION 'hdfs://localhost://hive'");
  29. System.out.println("Table created.");
  30. connect.close();
  31. }
  32. public static void main(String[] args) throws FileNotFoundException, IOException, SQLException{
  33. CreateHiveExternalTable hiveTable = new CreateHiveExternalTable();
  34. hiveTable.createHiveExternalTable();
  35. }
  36. }
qlfbtfca

qlfbtfca1#

从这个post使用jdbc从java连接到hive
尝试

  1. private static String driverName = "org.apache.hive.jdbc.HiveDriver"

而不是

  1. private static String driverName = "org.apache.hadoop.hive.jdbc.HiveDriver";

我希望你补充说 Class.forName(driverName) 代码中的语句
也:

  1. Connection connect = DriverManager.getConnection("jdbc:hive2://localhost:HIVEPORT/hivedb","hive","");

而不是

  1. Connection connect = DriverManager.getConnection("jdbc:hive://localhost/hivedb","hive","");

我不知道你运行的是哪个端口,但是记住要改变这条线路

  1. localhost:HIVEPORT
展开查看全部
bmp9r5qi

bmp9r5qi2#

hive.server2.thrift.port是可以检查端口的属性。
在配置单元外壳上,给出命令“set hive.server2.thrift.port”,这将给出配置单元的端口号
默认情况下,配置单元端口设置为10000,但您可以在配置单元外壳上使用上述命令进行检查。。

相关问题