如何使用eclipse连接到hive?

kknvjkwl  于 2021-05-29  发布在  Hadoop
关注(0)|答案(0)|浏览(343)

我对大数据/hadoop还不熟悉。我正在尝试使用jdbc从eclipse连接到配置单元,但不断出现以下错误:

SLF4J: Class path contains multiple SLF4J bindings.
SLF4J: Found binding in [jar:file:/usr/local/hive/lib/log4j-slf4j-impl-2.4.1.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in [jar:file:/home/javier/.m2/repository/org/slf4j/slf4j-log4j12/1.7.25/slf4j-log4j12-1.7.25.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in [jar:file:/home/javier/.m2/repository/org/apache/logging/log4j/log4j-slf4j-impl/2.4.1/log4j-slf4j-impl-2.4.1.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation.
SLF4J: Actual binding is of type [org.apache.logging.slf4j.Log4jLoggerFactory]
ERROR StatusLogger No log4j2 configuration file found. Using default configuration: logging only errors to the console.
19:39:11.391 [main] ERROR hive.ql.metadata.Hive - Cannot initialize metastore due to autoCreate error
javax.jdo.JDODataStoreException: La tabla no existe : VERSION en Catalog  Schema . El MetaData no es correcto, o necesita poner "datanucleus.schema.autoCreateTables" como "true"
    at org.datanucleus.api.jdo.NucleusJDOHelper.getJDOExceptionForNucleusException(NucleusJDOHelper.java:553) ~[datanucleus-api-jdo-4.2.1.jar:?]
    at org.datanucleus.api.jdo.JDOPersistenceManager.jdoMakePersistent(JDOPersistenceManager.java:720) ~[datanucleus-api-jdo-4.2.1.jar:?]
    at org.datanucleus.api.jdo.JDOPersistenceManager.makePersistent(JDOPersistenceManager.java:740) ~[datanucleus-api-jdo-4.2.1.jar:?]
    at org.apache.hadoop.hive.metastore.ObjectStore.setMetaStoreSchemaVersion(ObjectStore.java:7763) ~[hive-metastore-2.1.0.jar:2.1.0]
    at org.apache.hadoop.hive.metastore.ObjectStore.checkSchema(ObjectStore.java:7657) ~[hive-metastore-2.1.0.jar:2.1.0]
    at org.apache.hadoop.hive.metastore.ObjectStore.verifySchema(ObjectStore.java:7632) ~[hive-metastore-2.1.0.jar:2.1.0]
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_191]
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_191]
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_191]
    at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_191]
    at org.apache.hadoop.hive.metastore.RawStoreProxy.invoke(RawStoreProxy.java:101) ~[hive-metastore-2.1.0.jar:2.1.0]
    at com.sun.proxy.$Proxy9.verifySchema(Unknown Source) ~[?:?]
    at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:547) ~[hive-metastore-2.1.0.jar:2.1.0]
    at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:612) ~[hive-metastore-2.1.0.jar:2.1.0]
    at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:398) ~[hive-metastore-2.1.0.jar:2.1.0]
    at org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:78) ~[hive-metastore-2.1.0.jar:2.1.0]
    at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:84) ~[hive-metastore-2.1.0.jar:2.1.0]
    at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:6396) ~[hive-metastore-2.1.0.jar:2.1.0]
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:236) ~[hive-metastore-2.1.0.jar:2.1.0]
    at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:70) ~[hive-exec-2.1.0.jar:2.1.0]
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) ~[?:1.8.0_191]
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) ~[?:1.8.0_191]
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) ~[?:1.8.0_191]
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423) ~[?:1.8.0_191]
    at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1625) ~[hive-metastore-2.1.0.jar:2.1.0]
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:80) ~[hive-metastore-2.1.0.jar:2.1.0]
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:130) ~[hive-metastore-2.1.0.jar:2.1.0]
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:101) ~[hive-metastore-2.1.0.jar:2.1.0]
    at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3317) ~[hive-exec-2.1.0.jar:2.1.0]
    at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3356) [hive-exec-2.1.0.jar:2.1.0]
    at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3336) [hive-exec-2.1.0.jar:2.1.0]
    at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3590) [hive-exec-2.1.0.jar:2.1.0]
    at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:236) [hive-exec-2.1.0.jar:2.1.0]
    at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:221) [hive-exec-2.1.0.jar:2.1.0]
    at org.apache.hadoop.hive.ql.metadata.Hive.<init>(Hive.java:366) [hive-exec-2.1.0.jar:2.1.0]
    at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:310) [hive-exec-2.1.0.jar:2.1.0]
    at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:290) [hive-exec-2.1.0.jar:2.1.0]
    at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:266) [hive-exec-2.1.0.jar:2.1.0]
    at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:545) [hive-exec-2.1.0.jar:2.1.0]
    at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:513) [hive-exec-2.1.0.jar:2.1.0]
    at org.apache.hive.service.cli.CLIService.applyAuthorizationConfigPolicy(CLIService.java:125) [hive-service-2.1.0.jar:2.1.0]
    at org.apache.hive.service.cli.CLIService.init(CLIService.java:110) [hive-service-2.1.0.jar:2.1.0]
    at org.apache.hive.service.cli.thrift.EmbeddedThriftBinaryCLIService.init(EmbeddedThriftBinaryCLIService.java:45) [hive-service-2.1.0.jar:2.1.0]
    at org.apache.hive.jdbc.HiveConnection.<init>(HiveConnection.java:153) [hive-jdbc-2.1.0.jar:2.1.0]
    at org.apache.hive.jdbc.HiveDriver.connect(HiveDriver.java:107) [hive-jdbc-2.1.0.jar:2.1.0]
    at java.sql.DriverManager.getConnection(DriverManager.java:664) [?:1.8.0_191]
    at java.sql.DriverManager.getConnection(DriverManager.java:247) [?:1.8.0_191]
    at com.conexion.ConexionBD.getConnection(ConexionBD.java:17) [classes/:?]
    at com.controladora.Main.inicia(Main.java:24) [classes/:?]
    at com.controladora.Main.main(Main.java:19) [classes/:?]
Caused by: org.datanucleus.store.rdbms.exceptions.MissingTableException: La tabla no existe : VERSION en Catalog  Schema . El MetaData no es correcto, o necesita poner "datanucleus.schema.autoCreateTables" como "true"
    at org.datanucleus.store.rdbms.table.AbstractTable.exists(AbstractTable.java:606) ~[datanucleus-rdbms-4.1.7.jar:?]
    at org.datanucleus.store.rdbms.RDBMSStoreManager$ClassAdder.performTablesValidation(RDBMSStoreManager.java:3365) ~[datanucleus-rdbms-4.1.7.jar:?]
    at org.datanucleus.store.rdbms.RDBMSStoreManager$ClassAdder.run(RDBMSStoreManager.java:2877) ~[datanucleus-rdbms-4.1.7.jar:?]
    at org.datanucleus.store.rdbms.AbstractSchemaTransaction.execute(AbstractSchemaTransaction.java:119) ~[datanucleus-rdbms-4.1.7.jar:?]
    at org.datanucleus.store.rdbms.RDBMSStoreManager.manageClasses(RDBMSStoreManager.java:1608) ~[datanucleus-rdbms-4.1.7.jar:?]
    at org.datanucleus.store.rdbms.RDBMSStoreManager.getDatastoreClass(RDBMSStoreManager.java:671) ~[datanucleus-rdbms-4.1.7.jar:?]
    at org.datanucleus.store.rdbms.RDBMSStoreManager.getPropertiesForGenerator(RDBMSStoreManager.java:2069) ~[datanucleus-rdbms-4.1.7.jar:?]
    at org.datanucleus.store.AbstractStoreManager.getStrategyValue(AbstractStoreManager.java:1271) ~[datanucleus-core-4.1.6.jar:?]
    at org.datanucleus.ExecutionContextImpl.newObjectId(ExecutionContextImpl.java:3759) ~[datanucleus-core-4.1.6.jar:?]
    at org.datanucleus.state.StateManagerImpl.setIdentity(StateManagerImpl.java:2267) ~[datanucleus-core-4.1.6.jar:?]
    at org.datanucleus.state.StateManagerImpl.initialiseForPersistentNew(StateManagerImpl.java:484) ~[datanucleus-core-4.1.6.jar:?]
    at org.datanucleus.state.StateManagerImpl.initialiseForPersistentNew(StateManagerImpl.java:120) ~[datanucleus-core-4.1.6.jar:?]
    at org.datanucleus.state.ObjectProviderFactoryImpl.newForPersistentNew(ObjectProviderFactoryImpl.java:218) ~[datanucleus-core-4.1.6.jar:?]
    at org.datanucleus.ExecutionContextImpl.persistObjectInternal(ExecutionContextImpl.java:2078) ~[datanucleus-core-4.1.6.jar:?]
    at org.datanucleus.ExecutionContextImpl.persistObjectWork(ExecutionContextImpl.java:1922) ~[datanucleus-core-4.1.6.jar:?]
    at org.datanucleus.ExecutionContextImpl.persistObject(ExecutionContextImpl.java:1777) ~[datanucleus-core-4.1.6.jar:?]
    at org.datanucleus.ExecutionContextThreadedImpl.persistObject(ExecutionContextThreadedImpl.java:217) ~[datanucleus-core-4.1.6.jar:?]
    at org.datanucleus.api.jdo.JDOPersistenceManager.jdoMakePersistent(JDOPersistenceManager.java:715) ~[datanucleus-api-jdo-4.2.1.jar:?]
    ... 48 more
Excepcion solicitando conexion: java.lang.RuntimeException: Error applying authorization policy on hive configuration: org.apache.hadoop.hive.ql.metadata.HiveException: org.apache.hadoop.hive.ql.metadata.HiveException: MetaException(message:Hive metastore database is not initialized. Please use schematool (e.g. ./schematool -initSchema -dbType ...) to create the schema. If needed, don't forget to include the option to auto-create the underlying database in your JDBC connection string (e.g. ?createDatabaseIfNotExist=true for mysql))
error
package com.conexion;

import java.io.IOException;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import com.configuracion.Configuracion;

public class ConexionBD {
    private Connection conexion = null;
    Configuracion configuracion;

    ConexionBD() {
        this.configuracion = new Configuracion();
    }

    public Connection getConnection() throws IOException {
        try {
            if (conexion == null) {
                Class.forName("org.apache.hive.jdbc.HiveDriver").newInstance();
                conexion = DriverManager.getConnection("jdbc:hive2://", "", "");
                System.out.println("conectado a la base de datos");
            }
        } catch (Exception e) {
            System.out.println("Excepcion solicitando conexion: " + e);
        }
        return conexion;
    }

    public void desconexion() {
        try {
            conexion.close();
        } catch (SQLException e) {
            e.printStackTrace();
        }
    }
}

package com.configuracion;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;

public class Configuracion {
    protected Configuration conf;
    protected FileSystem fs;
public Configuracion()
{
    conf=new Configuration();
    conf.set("io.serializations","org.apache.hadoop.io.serializer.JavaSerialization," 
        + "org.apache.hadoop.io.serializer.WritableSerialization");
    //conf.addResource(new Path("/usr/local/hadoop/etc/hadoop/core-site.xml"));  
    conf.set("fs.default.name", "hdfs://localhost:9000");   
    try {
        this.fs=FileSystem.get(conf);
        } catch (IOException e) {
        e.printStackTrace();
    }
}
}
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
  <modelVersion>4.0.0</modelVersion>
  <groupId>Hive</groupId>
  <artifactId>Hive</artifactId>
  <version>0.0.1-SNAPSHOT</version>
  <build>
    <sourceDirectory>src</sourceDirectory>
    <plugins>
      <plugin>
        <artifactId>maven-compiler-plugin</artifactId>
        <version>3.7.0</version>
        <configuration>
          <source>1.8</source>
          <target>1.8</target>
        </configuration>
      </plugin>
    </plugins>
  </build>
<dependencies>  
  <dependency>
    <groupId>org.apache.hadoop</groupId>
    <artifactId>hadoop-hdfs</artifactId>
    <version>2.9.2</version>
</dependency>
<dependency>
    <groupId>org.apache.hadoop</groupId>
    <artifactId>hadoop-client</artifactId>
    <version>2.9.2</version>    
</dependency>
<dependency>
    <groupId>org.apache.hadoop</groupId>
    <artifactId>hadoop-common</artifactId>
    <version>2.9.2</version>
</dependency>

<dependency>
    <groupId>org.apache.hive</groupId>
    <artifactId>hive-jdbc</artifactId>
    <version>2.1.0</version>
</dependency>

<dependency>
    <groupId>org.apache.httpcomponents</groupId>
    <artifactId>httpcore</artifactId>
    <version>4.4</version>
</dependency>
</dependencies>

</project>

我可以通过ubuntu的命令行连接到hive。我可以将配置单元设置传递给我的代码吗?
java 1.8版
hadoop 1.9.2版
Hive2.1.0
日 eclipse 氧

暂无答案!

目前还没有任何答案,快来回答吧!

相关问题