org.apache.hadoop.hive.ql.metadata.Table.getPartCols()方法的使用及代码示例

x33g5p2x  于2022-01-29 转载在 其他  
字(5.6k)|赞(0)|评价(0)|浏览(162)

本文整理了Java中org.apache.hadoop.hive.ql.metadata.Table.getPartCols()方法的一些代码示例,展示了Table.getPartCols()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Table.getPartCols()方法的具体详情如下:
包路径:org.apache.hadoop.hive.ql.metadata.Table
类名称:Table
方法名:getPartCols

Table.getPartCols介绍

暂无

代码示例

代码示例来源:origin: apache/hive

  1. public boolean isPartitioned() {
  2. if (getPartCols() == null) {
  3. return false;
  4. }
  5. return (getPartCols().size() != 0);
  6. }

代码示例来源:origin: apache/hive

  1. public List<String> getPartColNames() {
  2. List<String> partColNames = new ArrayList<String>();
  3. for (FieldSchema key : getPartCols()) {
  4. partColNames.add(key.getName());
  5. }
  6. return partColNames;
  7. }

代码示例来源:origin: apache/hive

  1. public static boolean isFullSpec(Table table, Map<String, String> partSpec) {
  2. for (FieldSchema partCol : table.getPartCols()) {
  3. if (partSpec.get(partCol.getName()) == null) {
  4. return false;
  5. }
  6. }
  7. return true;
  8. }

代码示例来源:origin: apache/hive

  1. /**
  2. * Returns a list of all the columns of the table (data columns + partition
  3. * columns in that order.
  4. *
  5. * @return List<FieldSchema>
  6. */
  7. public List<FieldSchema> getAllCols() {
  8. ArrayList<FieldSchema> f_list = new ArrayList<FieldSchema>();
  9. f_list.addAll(getCols());
  10. f_list.addAll(getPartCols());
  11. return f_list;
  12. }

代码示例来源:origin: apache/hive

  1. public FieldSchema getPartColByName(String colName) {
  2. for (FieldSchema key : getPartCols()) {
  3. if (key.getName().toLowerCase().equals(colName)) {
  4. return key;
  5. }
  6. }
  7. return null;
  8. }

代码示例来源:origin: apache/hive

  1. private static List<String> extractPartColNames(Table tab) {
  2. List<FieldSchema> pCols = tab.getPartCols();
  3. List<String> partCols = new ArrayList<String>(pCols.size());
  4. for (FieldSchema pCol : pCols) {
  5. partCols.add(pCol.getName());
  6. }
  7. return partCols;
  8. }

代码示例来源:origin: apache/drill

  1. /**
  2. * Returns a list of all the columns of the table (data columns + partition
  3. * columns in that order.
  4. *
  5. * @return List<FieldSchema>
  6. */
  7. public List<FieldSchema> getAllCols() {
  8. ArrayList<FieldSchema> f_list = new ArrayList<FieldSchema>();
  9. f_list.addAll(getCols());
  10. f_list.addAll(getPartCols());
  11. return f_list;
  12. }

代码示例来源:origin: apache/drill

  1. public FieldSchema getPartColByName(String colName) {
  2. for (FieldSchema key : getPartCols()) {
  3. if (key.getName().toLowerCase().equals(colName)) {
  4. return key;
  5. }
  6. }
  7. return null;
  8. }

代码示例来源:origin: apache/drill

  1. public static boolean isFullSpec(Table table, Map<String, String> partSpec) {
  2. for (FieldSchema partCol : table.getPartCols()) {
  3. if (partSpec.get(partCol.getName()) == null) {
  4. return false;
  5. }
  6. }
  7. return true;
  8. }

代码示例来源:origin: apache/hive

  1. public ColumnStatsAutoGatherContext(SemanticAnalyzer sa, HiveConf conf,
  2. Operator<? extends OperatorDesc> op, Table tbl, Map<String, String> partSpec,
  3. boolean isInsertInto, Context ctx) throws SemanticException {
  4. super();
  5. this.sa = sa;
  6. this.conf = conf;
  7. this.op = op;
  8. this.tbl = tbl;
  9. this.partSpec = partSpec;
  10. this.isInsertInto = isInsertInto;
  11. this.origCtx = ctx;
  12. columns = tbl.getCols();
  13. partitionColumns = tbl.getPartCols();
  14. }

代码示例来源:origin: apache/drill

  1. private static List<String> extractPartColNames(Table tab) {
  2. List<FieldSchema> pCols = tab.getPartCols();
  3. List<String> partCols = new ArrayList<String>(pCols.size());
  4. for (FieldSchema pCol : pCols) {
  5. partCols.add(pCol.getName());
  6. }
  7. return partCols;
  8. }

代码示例来源:origin: apache/hive

  1. private static List<PrimitiveTypeInfo> extractPartColTypes(Table tab) {
  2. List<FieldSchema> pCols = tab.getPartCols();
  3. List<PrimitiveTypeInfo> partColTypeInfos = new ArrayList<PrimitiveTypeInfo>(pCols.size());
  4. for (FieldSchema pCol : pCols) {
  5. partColTypeInfos.add(TypeInfoFactory.getPrimitiveTypeInfo(pCol.getType()));
  6. }
  7. return partColTypeInfos;
  8. }

代码示例来源:origin: apache/hive

  1. @Override
  2. public List<String> getValues() {
  3. List<String> values = new ArrayList<String>();
  4. for (FieldSchema fs : this.getTable().getPartCols()) {
  5. values.add(partSpec.get(fs.getName()));
  6. }
  7. return values;
  8. }

代码示例来源:origin: apache/hive

  1. /**
  2. * @param targetTableNameInSourceQuery alias or simple name
  3. */
  4. OnClauseAnalyzer(ASTNode onClause, Table targetTable, String targetTableNameInSourceQuery,
  5. HiveConf conf, String onClauseAsString) {
  6. this.onClause = onClause;
  7. allTargetTableColumns.addAll(targetTable.getCols());
  8. allTargetTableColumns.addAll(targetTable.getPartCols());
  9. this.targetTableNameInSourceQuery = unescapeIdentifier(targetTableNameInSourceQuery);
  10. this.conf = conf;
  11. this.onClauseAsString = onClauseAsString;
  12. }

代码示例来源:origin: apache/drill

  1. @Override
  2. public List<String> getValues() {
  3. List<String> values = new ArrayList<String>();
  4. for (FieldSchema fs : this.getTable().getPartCols()) {
  5. values.add(partSpec.get(fs.getName()));
  6. }
  7. return values;
  8. }

代码示例来源:origin: apache/hive

  1. public String getName() {
  2. try {
  3. return Warehouse.makePartName(table.getPartCols(), tPartition.getValues());
  4. } catch (MetaException e) {
  5. throw new RuntimeException(e);
  6. }
  7. }

代码示例来源:origin: apache/drill

  1. private static List<PrimitiveTypeInfo> extractPartColTypes(Table tab) {
  2. List<FieldSchema> pCols = tab.getPartCols();
  3. List<PrimitiveTypeInfo> partColTypeInfos = new ArrayList<PrimitiveTypeInfo>(pCols.size());
  4. for (FieldSchema pCol : pCols) {
  5. partColTypeInfos.add(TypeInfoFactory.getPrimitiveTypeInfo(pCol.getType()));
  6. }
  7. return partColTypeInfos;
  8. }

代码示例来源:origin: apache/drill

  1. public String getName() {
  2. try {
  3. return Warehouse.makePartName(table.getPartCols(), tPartition.getValues());
  4. } catch (MetaException e) {
  5. throw new RuntimeException(e);
  6. }
  7. }

代码示例来源:origin: apache/hive

  1. public ExtractPartPruningPredicate(RelOptCluster cluster,
  2. RelOptHiveTable hiveTable) {
  3. super(true);
  4. this.hiveTable = hiveTable;
  5. rType = hiveTable.getRowType();
  6. List<FieldSchema> pfs = hiveTable.getHiveTableMD().getPartCols();
  7. partCols = new HashSet<String>();
  8. for (FieldSchema pf : pfs) {
  9. partCols.add(pf.getName());
  10. }
  11. this.cluster = cluster;
  12. }

代码示例来源:origin: apache/hive

  1. public void checkValidity() throws HiveException {
  2. if (!tPartition.getSd().equals(table.getSd())) {
  3. Table.validateColumns(getCols(), table.getPartCols());
  4. }
  5. }
  6. }

相关文章

Table类方法