org.apache.hadoop.hive.ql.metadata.Table.getStorageHandler()方法的使用及代码示例

x33g5p2x  于2022-01-29 转载在 其他  
字(9.8k)|赞(0)|评价(0)|浏览(156)

本文整理了Java中org.apache.hadoop.hive.ql.metadata.Table.getStorageHandler()方法的一些代码示例,展示了Table.getStorageHandler()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Table.getStorageHandler()方法的具体详情如下:
包路径:org.apache.hadoop.hive.ql.metadata.Table
类名称:Table
方法名:getStorageHandler

Table.getStorageHandler介绍

暂无

代码示例

代码示例来源:origin: apache/hive

  1. private static TableType obtainTableType(Table tabMetaData) {
  2. if (tabMetaData.getStorageHandler() != null) {
  3. final String storageHandlerStr = tabMetaData.getStorageHandler().toString();
  4. if (storageHandlerStr.equals(Constants.DRUID_HIVE_STORAGE_HANDLER_ID)) {
  5. return TableType.DRUID;
  6. }
  7. if (storageHandlerStr.equals(Constants.JDBC_HIVE_STORAGE_HANDLER_ID)) {
  8. return TableType.JDBC;
  9. }
  10. }
  11. return TableType.NATIVE;
  12. }

代码示例来源:origin: apache/drill

  1. private TableType obtainTableType(Table tabMetaData) {
  2. if (tabMetaData.getStorageHandler() != null &&
  3. tabMetaData.getStorageHandler().toString().equals(
  4. Constants.DRUID_HIVE_STORAGE_HANDLER_ID)) {
  5. return TableType.DRUID;
  6. }
  7. return TableType.NATIVE;
  8. }

代码示例来源:origin: apache/drill

  1. private static TableType obtainTableType(Table tabMetaData) {
  2. if (tabMetaData.getStorageHandler() != null &&
  3. tabMetaData.getStorageHandler().toString().equals(
  4. Constants.DRUID_HIVE_STORAGE_HANDLER_ID)) {
  5. return TableType.DRUID;
  6. }
  7. return TableType.NATIVE;
  8. }

代码示例来源:origin: apache/hive

  1. private TableType obtainTableType(Table tabMetaData) {
  2. if (tabMetaData.getStorageHandler() != null) {
  3. final String storageHandlerStr = tabMetaData.getStorageHandler().toString();
  4. if (storageHandlerStr
  5. .equals(Constants.DRUID_HIVE_STORAGE_HANDLER_ID)) {
  6. return TableType.DRUID;
  7. }
  8. if (storageHandlerStr
  9. .equals(Constants.JDBC_HIVE_STORAGE_HANDLER_ID)) {
  10. return TableType.JDBC;
  11. }
  12. }
  13. return TableType.NATIVE;
  14. }

代码示例来源:origin: apache/drill

  1. public static boolean doesTableNeedLocation(Table tbl) {
  2. // TODO: If we are ok with breaking compatibility of existing 3rd party StorageHandlers,
  3. // this method could be moved to the HiveStorageHandler interface.
  4. boolean retval = true;
  5. if (tbl.getStorageHandler() != null) {
  6. String sh = tbl.getStorageHandler().toString();
  7. retval = !sh.equals("org.apache.hadoop.hive.hbase.HBaseStorageHandler")
  8. && !sh.equals(Constants.DRUID_HIVE_STORAGE_HANDLER_ID);
  9. }
  10. return retval;
  11. }
  12. }

代码示例来源:origin: apache/hive

  1. public static boolean doesTableNeedLocation(Table tbl) {
  2. // TODO: If we are ok with breaking compatibility of existing 3rd party StorageHandlers,
  3. // this method could be moved to the HiveStorageHandler interface.
  4. boolean retval = true;
  5. if (tbl.getStorageHandler() != null) {
  6. // TODO: why doesn't this check class name rather than toString?
  7. String sh = tbl.getStorageHandler().toString();
  8. retval = !sh.equals("org.apache.hadoop.hive.hbase.HBaseStorageHandler")
  9. && !sh.equals(Constants.DRUID_HIVE_STORAGE_HANDLER_ID)
  10. && !sh.equals(Constants.JDBC_HIVE_STORAGE_HANDLER_ID)
  11. && !sh.equals("org.apache.hadoop.hive.accumulo.AccumuloStorageHandler");
  12. }
  13. return retval;
  14. }

代码示例来源:origin: apache/drill

  1. final public Class<? extends InputFormat> getInputFormatClass() {
  2. if (inputFormatClass == null) {
  3. try {
  4. String className = tTable.getSd().getInputFormat();
  5. if (className == null) {
  6. if (getStorageHandler() == null) {
  7. return null;
  8. }
  9. inputFormatClass = getStorageHandler().getInputFormatClass();
  10. } else {
  11. inputFormatClass = (Class<? extends InputFormat>)
  12. Class.forName(className, true, Utilities.getSessionSpecifiedClassLoader());
  13. }
  14. } catch (ClassNotFoundException e) {
  15. throw new RuntimeException(e);
  16. }
  17. }
  18. return inputFormatClass;
  19. }

代码示例来源:origin: apache/hive

  1. final public Class<? extends InputFormat> getInputFormatClass() {
  2. if (inputFormatClass == null) {
  3. try {
  4. String className = tTable.getSd().getInputFormat();
  5. if (className == null) {
  6. if (getStorageHandler() == null) {
  7. return null;
  8. }
  9. inputFormatClass = getStorageHandler().getInputFormatClass();
  10. } else {
  11. inputFormatClass = (Class<? extends InputFormat>)
  12. Class.forName(className, true, Utilities.getSessionSpecifiedClassLoader());
  13. }
  14. } catch (ClassNotFoundException e) {
  15. throw new RuntimeException(e);
  16. }
  17. }
  18. return inputFormatClass;
  19. }

代码示例来源:origin: apache/drill

  1. final public Class<? extends OutputFormat> getOutputFormatClass() {
  2. if (outputFormatClass == null) {
  3. try {
  4. String className = tTable.getSd().getOutputFormat();
  5. Class<?> c;
  6. if (className == null) {
  7. if (getStorageHandler() == null) {
  8. return null;
  9. }
  10. c = getStorageHandler().getOutputFormatClass();
  11. } else {
  12. c = Class.forName(className, true, Utilities.getSessionSpecifiedClassLoader());
  13. }
  14. // Replace FileOutputFormat for backward compatibility
  15. outputFormatClass = HiveFileFormatUtils.getOutputFormatSubstitute(c);
  16. } catch (ClassNotFoundException e) {
  17. throw new RuntimeException(e);
  18. }
  19. }
  20. return outputFormatClass;
  21. }

代码示例来源:origin: apache/hive

  1. final public Class<? extends OutputFormat> getOutputFormatClass() {
  2. if (outputFormatClass == null) {
  3. try {
  4. String className = tTable.getSd().getOutputFormat();
  5. Class<?> c;
  6. if (className == null) {
  7. if (getStorageHandler() == null) {
  8. return null;
  9. }
  10. c = getStorageHandler().getOutputFormatClass();
  11. } else {
  12. c = Class.forName(className, true, Utilities.getSessionSpecifiedClassLoader());
  13. }
  14. // Replace FileOutputFormat for backward compatibility
  15. outputFormatClass = HiveFileFormatUtils.getOutputFormatSubstitute(c);
  16. } catch (ClassNotFoundException e) {
  17. throw new RuntimeException(e);
  18. }
  19. }
  20. return outputFormatClass;
  21. }

代码示例来源:origin: apache/hive

  1. private void processAlias(MapWork work, Set<TableScanOperator> tableScans) {
  2. Set<String> aliases = new HashSet<>();
  3. for (TableScanOperator tso : tableScans) {
  4. // use LinkedHashMap<String, Operator<? extends OperatorDesc>>
  5. // getAliasToWork() should not apply this for non-native table
  6. if (tso.getConf().getTableMetadata().getStorageHandler() != null) {
  7. continue;
  8. }
  9. String alias = getAliasForTableScanOperator(work, tso);
  10. aliases.add(alias);
  11. tso.getConf().setIsMetadataOnly(true);
  12. }
  13. // group path alias according to work
  14. Map<Path, ArrayList<String>> candidates = new HashMap<>();
  15. for (Path path : work.getPaths()) {
  16. ArrayList<String> aliasesAffected = work.getPathToAliases().get(path);
  17. if (CollectionUtils.isNotEmpty(aliasesAffected)) {
  18. candidates.put(path, aliasesAffected);
  19. }
  20. }
  21. for (Entry<Path, ArrayList<String>> entry : candidates.entrySet()) {
  22. processAlias(work, entry.getKey(), entry.getValue(), aliases);
  23. }
  24. }

代码示例来源:origin: apache/drill

  1. private void processAlias(MapWork work, HashSet<TableScanOperator> tableScans) {
  2. ArrayList<String> aliases = new ArrayList<String>();
  3. for (TableScanOperator tso : tableScans) {
  4. // use LinkedHashMap<String, Operator<? extends OperatorDesc>>
  5. // getAliasToWork()
  6. // should not apply this for non-native table
  7. if (tso.getConf().getTableMetadata().getStorageHandler() != null) {
  8. continue;
  9. }
  10. String alias = getAliasForTableScanOperator(work, tso);
  11. aliases.add(alias);
  12. tso.getConf().setIsMetadataOnly(true);
  13. }
  14. // group path alias according to work
  15. LinkedHashMap<Path, ArrayList<String>> candidates = new LinkedHashMap<>();
  16. for (Path path : work.getPaths()) {
  17. ArrayList<String> aliasesAffected = work.getPathToAliases().get(path);
  18. if (aliasesAffected != null && aliasesAffected.size() > 0) {
  19. candidates.put(path, aliasesAffected);
  20. }
  21. }
  22. for (Entry<Path, ArrayList<String>> entry : candidates.entrySet()) {
  23. processAlias(work, entry.getKey(), entry.getValue(), aliases);
  24. }
  25. }

代码示例来源:origin: apache/hive

  1. Utilities.setColumnNameList(jobConf, scanOp, true);
  2. Utilities.setColumnTypeList(jobConf, scanOp, true);
  3. HiveStorageHandler handler = table.getStorageHandler();
  4. if (handler instanceof InputEstimator) {
  5. InputEstimator estimator = (InputEstimator) handler;

代码示例来源:origin: apache/hive

  1. private int preInsertWork(Hive db, PreInsertTableDesc preInsertTableDesc) throws HiveException {
  2. try{
  3. HiveMetaHook hook = preInsertTableDesc.getTable().getStorageHandler().getMetaHook();
  4. if (hook == null || !(hook instanceof DefaultHiveMetaHook)) {
  5. return 0;
  6. }
  7. DefaultHiveMetaHook hiveMetaHook = (DefaultHiveMetaHook) hook;
  8. hiveMetaHook.preInsertTable(preInsertTableDesc.getTable().getTTable(), preInsertTableDesc.isOverwrite());
  9. } catch (MetaException e) {
  10. throw new HiveException(e);
  11. }
  12. return 0;
  13. }

代码示例来源:origin: apache/hive

  1. && jobConf.get(ConfVars.HIVE_EXECUTION_ENGINE.varname).equals("mr")
  2. && (scanDesc.getTableMetadata() == null
  3. || !(scanDesc.getTableMetadata().getStorageHandler() instanceof HiveStoragePredicateHandler))) {
  4. return;

代码示例来源:origin: apache/drill

  1. private int preInsertWork(Hive db, PreInsertTableDesc preInsertTableDesc) throws HiveException {
  2. try{
  3. HiveMetaHook hook = preInsertTableDesc.getTable().getStorageHandler().getMetaHook();
  4. if (hook == null || !(hook instanceof DefaultHiveMetaHook)) {
  5. return 0;
  6. }
  7. DefaultHiveMetaHook hiveMetaHook = (DefaultHiveMetaHook) hook;
  8. hiveMetaHook.preInsertTable(preInsertTableDesc.getTable().getTTable(), preInsertTableDesc.isOverwrite());
  9. } catch (MetaException e) {
  10. throw new HiveException(e);
  11. }
  12. return 0;
  13. }

代码示例来源:origin: apache/hive

  1. return originalPredicate;
  2. HiveStorageHandler storageHandler = tbl.getStorageHandler();
  3. if (!(storageHandler instanceof HiveStoragePredicateHandler)) {

代码示例来源:origin: apache/hive

  1. private int insertCommitWork(Hive db, InsertCommitHookDesc insertCommitHookDesc) throws MetaException {
  2. boolean failed = true;
  3. HiveMetaHook hook = insertCommitHookDesc.getTable().getStorageHandler().getMetaHook();
  4. if (hook == null || !(hook instanceof DefaultHiveMetaHook)) {
  5. return 0;
  6. }
  7. DefaultHiveMetaHook hiveMetaHook = (DefaultHiveMetaHook) hook;
  8. try {
  9. hiveMetaHook.commitInsertTable(insertCommitHookDesc.getTable().getTTable(),
  10. insertCommitHookDesc.isOverwrite()
  11. );
  12. failed = false;
  13. } finally {
  14. if (failed) {
  15. hiveMetaHook.rollbackInsertTable(insertCommitHookDesc.getTable().getTTable(),
  16. insertCommitHookDesc.isOverwrite()
  17. );
  18. }
  19. }
  20. return 0;
  21. }

代码示例来源:origin: apache/drill

  1. private int insertCommitWork(Hive db, InsertTableDesc insertTableDesc) throws MetaException {
  2. boolean failed = true;
  3. HiveMetaHook hook = insertTableDesc.getTable().getStorageHandler().getMetaHook();
  4. if (hook == null || !(hook instanceof DefaultHiveMetaHook)) {
  5. return 0;
  6. }
  7. DefaultHiveMetaHook hiveMetaHook = (DefaultHiveMetaHook) hook;
  8. try {
  9. hiveMetaHook.commitInsertTable(insertTableDesc.getTable().getTTable(),
  10. insertTableDesc.isOverwrite()
  11. );
  12. failed = false;
  13. } finally {
  14. if (failed) {
  15. hiveMetaHook.rollbackInsertTable(insertTableDesc.getTable().getTTable(),
  16. insertTableDesc.isOverwrite()
  17. );
  18. }
  19. }
  20. return 0;
  21. }

代码示例来源:origin: apache/hive

  1. getStorageHandler());
  2. HiveStorageHandler storageHandler = tbl.getStorageHandler();

相关文章

Table类方法