org.apache.hadoop.hive.ql.metadata.Hive.getAllTables()方法的使用及代码示例

x33g5p2x  于2022-01-20 转载在 其他  
字(5.6k)|赞(0)|评价(0)|浏览(278)

本文整理了Java中org.apache.hadoop.hive.ql.metadata.Hive.getAllTables()方法的一些代码示例,展示了Hive.getAllTables()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Hive.getAllTables()方法的具体详情如下:
包路径:org.apache.hadoop.hive.ql.metadata.Hive
类名称:Hive
方法名:getAllTables

Hive.getAllTables介绍

[英]Get all table names for the current database.
[中]获取当前数据库的所有表名。

代码示例

代码示例来源:origin: apache/hive

public static Collection<String> getAllTables(Hive db, String dbName) throws HiveException {
 return Collections2.filter(db.getAllTables(dbName),
     tableName -> {
      assert tableName != null;
      return !tableName.toLowerCase().startsWith(
          SemanticAnalyzer.VALUES_TMP_TABLE_NAME_PREFIX.toLowerCase());
     });
}

代码示例来源:origin: apache/hive

private boolean isDbEmpty(String dbName) throws HiveException {
 List<String> allTables = context.hiveDb.getAllTables(dbName);
 List<String> allFunctions = context.hiveDb.getFunctions(dbName, "*");
 return allTables.isEmpty() && allFunctions.isEmpty();
}

代码示例来源:origin: apache/drill

private Iterable<? extends String> matchesTbl(String dbName, String tblPattern)
  throws HiveException {
 if (tblPattern == null) {
  return db.getAllTables(dbName);
 } else {
  return db.getTablesByPattern(dbName, tblPattern);
 }
}

代码示例来源:origin: apache/hive

/**
 * Removes all databases and tables from the metastore
 */
public static void cleanupHMS(Hive hive, Warehouse wh, FsPermission defaultPerm)
 throws HiveException, MetaException, NoSuchObjectException {
 for (String dbName : hive.getAllDatabases()) {
  if (dbName.equals("default")) {
   continue;
  }
  try {
   Path path = getDbPath(hive, wh, dbName);
   FileSystem whFs = path.getFileSystem(hive.getConf());
   whFs.setPermission(path, defaultPerm);
  } catch (IOException ex) {
   //ignore
  }
  hive.dropDatabase(dbName, true, true, true);
 }
 //clean tables in default db
 for (String tablename : hive.getAllTables("default")) {
  hive.dropTable("default", tablename, true, true);
 }
}

代码示例来源:origin: apache/drill

tableNames = db.getAllTables(dbName);
} catch (HiveException e) {
 throw new SemanticException(e);

代码示例来源:origin: apache/hive

tableNames = db.getAllTables(dbName);
} catch (HiveException e) {
 throw new SemanticException(e);

代码示例来源:origin: apache/hive

for (String tableName : hm.getAllTables(dbName)) {
 Table table = hm.getTable(dbName, tableName);
 hm.dropTable(dbName, tableName);

代码示例来源:origin: apache/hive

for (String tableName : hm.getAllTables(dbName)) {
 Table table = hm.getTable(dbName, tableName);
 hm.dropTable(dbName, tableName);

代码示例来源:origin: apache/phoenix

for (String tblName : db.getAllTables()) {
 Table tblObj = null;
 try {
for (String tblName : db.getAllTables()) {
 if (!DEFAULT_DATABASE_NAME.equals(dbName) || !srcTables.contains(tblName)) {
  Table tblObj = null;

代码示例来源:origin: org.apache.hadoop.hive/hive-exec

/**
 * Get all table names for the current database.
 * @return List of table names
 * @throws HiveException
 */
public List<String> getAllTables() throws HiveException {
 return getAllTables(getCurrentDatabase());
}

代码示例来源:origin: apache/incubator-atlas

/**
 * Imports all tables for the given db
 * @param databaseReferenceable
 * @param databaseName
 * @param failOnError
 * @throws Exception
 */
private int importTables(Referenceable databaseReferenceable, String databaseName, final boolean failOnError) throws Exception {
  int tablesImported = 0;
  List<String> hiveTables = hiveClient.getAllTables(databaseName);
  LOG.info("Importing tables {} for db {}", hiveTables.toString(), databaseName);
  for (String tableName : hiveTables) {
    int imported = importTable(databaseReferenceable, databaseName, tableName, failOnError);
    tablesImported += imported;
  }
  if (tablesImported == hiveTables.size()) {
    LOG.info("Successfully imported all {} tables from {} ", tablesImported, databaseName);
  } else {
    LOG.error("Able to import {} tables out of {} tables from {}. Please check logs for import errors", tablesImported, hiveTables.size(), databaseName);
  }
  return tablesImported;
}

代码示例来源:origin: org.apache.lens/lens-cube

private Collection<String> getAllHiveTableNames() throws HiveException, LensException {
 if (!allTablesPopulated) {
  List<String> allTables = getClient().getAllTables();
  for (String tblName : allTables) {
   // getTable call here would add the table to allHiveTables
   getTable(tblName);
  }
  allTablesPopulated = enableCaching;
  return allTables;
 } else {
  return allHiveTables.keySet();
 }
}

代码示例来源:origin: apache/lens

private Collection<String> getAllHiveTableNames() throws HiveException, LensException {
 if (!allTablesPopulated) {
  List<String> allTables = getClient().getAllTables();
  for (String tblName : allTables) {
   // getTable call here would add the table to allHiveTables
   getTable(tblName);
  }
  allTablesPopulated = enableCaching;
  return allTables;
 } else {
  return allHiveTables.keySet();
 }
}

代码示例来源:origin: com.facebook.presto.hive/hive-apache

/**
 * Get all table names for the current database.
 * @return List of table names
 * @throws HiveException
 */
public List<String> getAllTables() throws HiveException {
 return getAllTables(SessionState.get().getCurrentDatabase());
}

代码示例来源:origin: org.apache.hadoop.hive/hive-exec

LOG.info("results : " + tbls.size());
} else {
 tbls = db.getAllTables(dbName);

代码示例来源:origin: com.facebook.presto.hive/hive-apache

LOG.info("results : " + tbls.size());
} else {
 tbls = db.getAllTables(dbName);

代码示例来源:origin: com.facebook.presto.hive/hive-apache

tableNames = db.getAllTables(dbName);
} catch (HiveException e) {
 throw new SemanticException(e);

代码示例来源:origin: io.snappydata/snappydata-core

ArrayList<ExternalTableMetaData> externalTables = new ArrayList<>();
for (String schema : schemas) {
 List<String> tables = hmc.getAllTables(schema);
 for (String tableName : tables) {
  try {
ArrayList<PolicyTableData> policyData = new ArrayList<>();
for (String schema : schemas) {
 List<String> tables = hmc.getAllTables(schema);
 for (String tableName : tables) {
  try {
HashMap<String, List<String>> dbTablesMap = new HashMap<>();
for (String db : dbList) {
 List<String> tables = hmc.getAllTables(db);

相关文章

Hive类方法