org.apache.hadoop.hive.ql.metadata.Table.getCols()方法的使用及代码示例

x33g5p2x  于2022-01-29 转载在 其他  
字(7.6k)|赞(0)|评价(0)|浏览(199)

本文整理了Java中org.apache.hadoop.hive.ql.metadata.Table.getCols()方法的一些代码示例,展示了Table.getCols()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Table.getCols()方法的具体详情如下:
包路径:org.apache.hadoop.hive.ql.metadata.Table
类名称:Table
方法名:getCols

Table.getCols介绍

暂无

代码示例

代码示例来源:origin: apache/hive

  1. private boolean isField(String col) {
  2. for (FieldSchema field : getCols()) {
  3. if (field.getName().equals(col)) {
  4. return true;
  5. }
  6. }
  7. return false;
  8. }

代码示例来源:origin: apache/hive

  1. /**
  2. * Returns a list of all the columns of the table (data columns + partition
  3. * columns in that order.
  4. *
  5. * @return List<FieldSchema>
  6. */
  7. public List<FieldSchema> getAllCols() {
  8. ArrayList<FieldSchema> f_list = new ArrayList<FieldSchema>();
  9. f_list.addAll(getCols());
  10. f_list.addAll(getPartCols());
  11. return f_list;
  12. }

代码示例来源:origin: apache/hive

  1. public ColumnStatsAutoGatherContext(SemanticAnalyzer sa, HiveConf conf,
  2. Operator<? extends OperatorDesc> op, Table tbl, Map<String, String> partSpec,
  3. boolean isInsertInto, Context ctx) throws SemanticException {
  4. super();
  5. this.sa = sa;
  6. this.conf = conf;
  7. this.op = op;
  8. this.tbl = tbl;
  9. this.partSpec = partSpec;
  10. this.isInsertInto = isInsertInto;
  11. this.origCtx = ctx;
  12. columns = tbl.getCols();
  13. partitionColumns = tbl.getPartCols();
  14. }

代码示例来源:origin: apache/drill

  1. private boolean isField(String col) {
  2. for (FieldSchema field : getCols()) {
  3. if (field.getName().equals(col)) {
  4. return true;
  5. }
  6. }
  7. return false;
  8. }

代码示例来源:origin: apache/drill

  1. /**
  2. * Returns a list of all the columns of the table (data columns + partition
  3. * columns in that order.
  4. *
  5. * @return List<FieldSchema>
  6. */
  7. public List<FieldSchema> getAllCols() {
  8. ArrayList<FieldSchema> f_list = new ArrayList<FieldSchema>();
  9. f_list.addAll(getCols());
  10. f_list.addAll(getPartCols());
  11. return f_list;
  12. }

代码示例来源:origin: apache/incubator-gobblin

  1. public List<FieldSchema> getCols() {
  2. return this.hivePartition.getTable().getCols();
  3. }

代码示例来源:origin: apache/drill

  1. public ColumnStatsAutoGatherContext(SemanticAnalyzer sa, HiveConf conf,
  2. Operator<? extends OperatorDesc> op, Table tbl, Map<String, String> partSpec,
  3. boolean isInsertInto, Context ctx) throws SemanticException {
  4. super();
  5. this.sa = sa;
  6. this.conf = conf;
  7. this.op = op;
  8. this.tbl = tbl;
  9. this.partSpec = partSpec;
  10. this.isInsertInto = isInsertInto;
  11. this.origCtx = ctx;
  12. columns = tbl.getCols();
  13. partitionColumns = tbl.getPartCols();
  14. }

代码示例来源:origin: apache/hive

  1. /**
  2. * @param targetTableNameInSourceQuery alias or simple name
  3. */
  4. OnClauseAnalyzer(ASTNode onClause, Table targetTable, String targetTableNameInSourceQuery,
  5. HiveConf conf, String onClauseAsString) {
  6. this.onClause = onClause;
  7. allTargetTableColumns.addAll(targetTable.getCols());
  8. allTargetTableColumns.addAll(targetTable.getPartCols());
  9. this.targetTableNameInSourceQuery = unescapeIdentifier(targetTableNameInSourceQuery);
  10. this.conf = conf;
  11. this.onClauseAsString = onClauseAsString;
  12. }

代码示例来源:origin: apache/hive

  1. private void validateSpecifiedColumnNames(List<String> specifiedCols)
  2. throws SemanticException {
  3. List<String> tableCols = Utilities.getColumnNamesFromFieldSchema(tbl.getCols());
  4. for (String sc : specifiedCols) {
  5. if (!tableCols.contains(sc.toLowerCase())) {
  6. String msg = "'" + sc + "' (possible columns are " + tableCols.toString() + ")";
  7. throw new SemanticException(ErrorMsg.INVALID_COLUMN.getMsg(msg));
  8. }
  9. }
  10. }

代码示例来源:origin: apache/drill

  1. /**
  2. * @param targetTableNameInSourceQuery alias or simple name
  3. */
  4. OnClauseAnalyzer(ASTNode onClause, Table targetTable, String targetTableNameInSourceQuery,
  5. HiveConf conf, String onClauseAsString) {
  6. this.onClause = onClause;
  7. allTargetTableColumns.addAll(targetTable.getCols());
  8. allTargetTableColumns.addAll(targetTable.getPartCols());
  9. this.targetTableNameInSourceQuery = unescapeIdentifier(targetTableNameInSourceQuery);
  10. this.conf = conf;
  11. this.onClauseAsString = onClauseAsString;
  12. }
  13. /**

代码示例来源:origin: apache/drill

  1. private void validateSpecifiedColumnNames(List<String> specifiedCols)
  2. throws SemanticException {
  3. List<String> tableCols = Utilities.getColumnNamesFromFieldSchema(tbl.getCols());
  4. for(String sc : specifiedCols) {
  5. if (!tableCols.contains(sc.toLowerCase())) {
  6. String msg = "'" + sc + "' (possible columns are " + tableCols.toString() + ")";
  7. throw new SemanticException(ErrorMsg.INVALID_COLUMN.getMsg(msg));
  8. }
  9. }
  10. }

代码示例来源:origin: apache/hive

  1. public static HCatSchema extractSchema(Table table) throws HCatException {
  2. return new HCatSchema(HCatUtil.getHCatFieldSchemaList(table.getCols()));
  3. }

代码示例来源:origin: apache/hive

  1. private ArrayList<Integer> getSortOrders(String dest, QB qb, Table tab, Operator input)
  2. throws SemanticException {
  3. List<Order> tabSortCols = tab.getSortCols();
  4. List<FieldSchema> tabCols = tab.getCols();
  5. ArrayList<Integer> orders = new ArrayList<Integer>();
  6. for (Order sortCol : tabSortCols) {
  7. for (FieldSchema tabCol : tabCols) {
  8. if (sortCol.getCol().equals(tabCol.getName())) {
  9. orders.add(sortCol.getOrder());
  10. break;
  11. }
  12. }
  13. }
  14. return orders;
  15. }

代码示例来源:origin: apache/hive

  1. public void setBucketCols(List<String> bucketCols) throws HiveException {
  2. if (bucketCols == null) {
  3. return;
  4. }
  5. for (String col : bucketCols) {
  6. if (!isField(col)) {
  7. throw new HiveException("Bucket columns " + col
  8. + " is not part of the table columns (" + getCols() );
  9. }
  10. }
  11. tTable.getSd().setBucketCols(bucketCols);
  12. }

代码示例来源:origin: apache/hive

  1. /**
  2. * Validate alter table description.
  3. *
  4. * @throws SemanticException
  5. */
  6. public void validate() throws SemanticException {
  7. if (null != tableForSkewedColValidation) {
  8. /* Validate skewed information. */
  9. ValidationUtility.validateSkewedInformation(
  10. ParseUtils.validateColumnNameUniqueness(tableForSkewedColValidation.getCols()),
  11. this.getSkewedColNames(), this.getSkewedColValues());
  12. }
  13. }

代码示例来源:origin: apache/hive

  1. public static HCatSchema getTableSchemaWithPtnCols(Table table) throws IOException {
  2. HCatSchema tableSchema = new HCatSchema(HCatUtil.getHCatFieldSchemaList(table.getCols()));
  3. if (table.getPartitionKeys().size() != 0) {
  4. // add partition keys to table schema
  5. // NOTE : this assumes that we do not ever have ptn keys as columns
  6. // inside the table schema as well!
  7. for (FieldSchema fs : table.getPartitionKeys()) {
  8. tableSchema.append(HCatSchemaUtils.getHCatFieldSchema(fs));
  9. }
  10. }
  11. return tableSchema;
  12. }

代码示例来源:origin: apache/drill

  1. public void setBucketCols(List<String> bucketCols) throws HiveException {
  2. if (bucketCols == null) {
  3. return;
  4. }
  5. for (String col : bucketCols) {
  6. if (!isField(col)) {
  7. throw new HiveException("Bucket columns " + col
  8. + " is not part of the table columns (" + getCols() );
  9. }
  10. }
  11. tTable.getSd().setBucketCols(bucketCols);
  12. }

代码示例来源:origin: apache/drill

  1. /**
  2. * Validate alter table description.
  3. *
  4. * @throws SemanticException
  5. */
  6. public void validate() throws SemanticException {
  7. if (null != table) {
  8. /* Validate skewed information. */
  9. ValidationUtility.validateSkewedInformation(
  10. ParseUtils.validateColumnNameUniqueness(table.getCols()), this.getSkewedColNames(),
  11. this.getSkewedColValues());
  12. }
  13. }

代码示例来源:origin: apache/hive

  1. private List<String> getColumnName(ASTNode tree) throws SemanticException {
  2. switch (tree.getChildCount()) {
  3. case 2:
  4. return Utilities.getColumnNamesFromFieldSchema(tbl.getCols());
  5. case 3:
  6. int numCols = tree.getChild(2).getChildCount();
  7. List<String> colName = new ArrayList<String>(numCols);
  8. for (int i = 0; i < numCols; i++) {
  9. colName.add(getUnescapedName((ASTNode) tree.getChild(2).getChild(i)));
  10. }
  11. return colName;
  12. default:
  13. throw new SemanticException("Internal error. Expected number of children of ASTNode to be"
  14. + " either 2 or 3. Found : " + tree.getChildCount());
  15. }
  16. }

代码示例来源:origin: apache/hive

  1. static AnalyzeRewriteContext genAnalyzeRewriteContext(HiveConf conf, Table tbl) {
  2. AnalyzeRewriteContext analyzeRewrite = new AnalyzeRewriteContext();
  3. analyzeRewrite.setTableName(tbl.getFullyQualifiedName());
  4. analyzeRewrite.setTblLvl(!(conf.getBoolVar(ConfVars.HIVE_STATS_COLLECT_PART_LEVEL_STATS) && tbl.isPartitioned()));
  5. List<String> colNames = Utilities.getColumnNamesFromFieldSchema(tbl.getCols());
  6. List<String> colTypes = getColumnTypes(tbl, colNames);
  7. analyzeRewrite.setColName(colNames);
  8. analyzeRewrite.setColType(colTypes);
  9. return analyzeRewrite;
  10. }

相关文章

Table类方法