本文整理了Java中org.apache.hadoop.hive.ql.metadata.Table.getCols()
方法的一些代码示例,展示了Table.getCols()
的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Table.getCols()
方法的具体详情如下:
包路径:org.apache.hadoop.hive.ql.metadata.Table
类名称:Table
方法名:getCols
暂无
代码示例来源:origin: apache/hive
private boolean isField(String col) {
for (FieldSchema field : getCols()) {
if (field.getName().equals(col)) {
return true;
}
}
return false;
}
代码示例来源:origin: apache/hive
/**
* Returns a list of all the columns of the table (data columns + partition
* columns in that order.
*
* @return List<FieldSchema>
*/
public List<FieldSchema> getAllCols() {
ArrayList<FieldSchema> f_list = new ArrayList<FieldSchema>();
f_list.addAll(getCols());
f_list.addAll(getPartCols());
return f_list;
}
代码示例来源:origin: apache/hive
public ColumnStatsAutoGatherContext(SemanticAnalyzer sa, HiveConf conf,
Operator<? extends OperatorDesc> op, Table tbl, Map<String, String> partSpec,
boolean isInsertInto, Context ctx) throws SemanticException {
super();
this.sa = sa;
this.conf = conf;
this.op = op;
this.tbl = tbl;
this.partSpec = partSpec;
this.isInsertInto = isInsertInto;
this.origCtx = ctx;
columns = tbl.getCols();
partitionColumns = tbl.getPartCols();
}
代码示例来源:origin: apache/drill
private boolean isField(String col) {
for (FieldSchema field : getCols()) {
if (field.getName().equals(col)) {
return true;
}
}
return false;
}
代码示例来源:origin: apache/drill
/**
* Returns a list of all the columns of the table (data columns + partition
* columns in that order.
*
* @return List<FieldSchema>
*/
public List<FieldSchema> getAllCols() {
ArrayList<FieldSchema> f_list = new ArrayList<FieldSchema>();
f_list.addAll(getCols());
f_list.addAll(getPartCols());
return f_list;
}
代码示例来源:origin: apache/incubator-gobblin
public List<FieldSchema> getCols() {
return this.hivePartition.getTable().getCols();
}
代码示例来源:origin: apache/drill
public ColumnStatsAutoGatherContext(SemanticAnalyzer sa, HiveConf conf,
Operator<? extends OperatorDesc> op, Table tbl, Map<String, String> partSpec,
boolean isInsertInto, Context ctx) throws SemanticException {
super();
this.sa = sa;
this.conf = conf;
this.op = op;
this.tbl = tbl;
this.partSpec = partSpec;
this.isInsertInto = isInsertInto;
this.origCtx = ctx;
columns = tbl.getCols();
partitionColumns = tbl.getPartCols();
}
代码示例来源:origin: apache/hive
/**
* @param targetTableNameInSourceQuery alias or simple name
*/
OnClauseAnalyzer(ASTNode onClause, Table targetTable, String targetTableNameInSourceQuery,
HiveConf conf, String onClauseAsString) {
this.onClause = onClause;
allTargetTableColumns.addAll(targetTable.getCols());
allTargetTableColumns.addAll(targetTable.getPartCols());
this.targetTableNameInSourceQuery = unescapeIdentifier(targetTableNameInSourceQuery);
this.conf = conf;
this.onClauseAsString = onClauseAsString;
}
代码示例来源:origin: apache/hive
private void validateSpecifiedColumnNames(List<String> specifiedCols)
throws SemanticException {
List<String> tableCols = Utilities.getColumnNamesFromFieldSchema(tbl.getCols());
for (String sc : specifiedCols) {
if (!tableCols.contains(sc.toLowerCase())) {
String msg = "'" + sc + "' (possible columns are " + tableCols.toString() + ")";
throw new SemanticException(ErrorMsg.INVALID_COLUMN.getMsg(msg));
}
}
}
代码示例来源:origin: apache/drill
/**
* @param targetTableNameInSourceQuery alias or simple name
*/
OnClauseAnalyzer(ASTNode onClause, Table targetTable, String targetTableNameInSourceQuery,
HiveConf conf, String onClauseAsString) {
this.onClause = onClause;
allTargetTableColumns.addAll(targetTable.getCols());
allTargetTableColumns.addAll(targetTable.getPartCols());
this.targetTableNameInSourceQuery = unescapeIdentifier(targetTableNameInSourceQuery);
this.conf = conf;
this.onClauseAsString = onClauseAsString;
}
/**
代码示例来源:origin: apache/drill
private void validateSpecifiedColumnNames(List<String> specifiedCols)
throws SemanticException {
List<String> tableCols = Utilities.getColumnNamesFromFieldSchema(tbl.getCols());
for(String sc : specifiedCols) {
if (!tableCols.contains(sc.toLowerCase())) {
String msg = "'" + sc + "' (possible columns are " + tableCols.toString() + ")";
throw new SemanticException(ErrorMsg.INVALID_COLUMN.getMsg(msg));
}
}
}
代码示例来源:origin: apache/hive
public static HCatSchema extractSchema(Table table) throws HCatException {
return new HCatSchema(HCatUtil.getHCatFieldSchemaList(table.getCols()));
}
代码示例来源:origin: apache/hive
private ArrayList<Integer> getSortOrders(String dest, QB qb, Table tab, Operator input)
throws SemanticException {
List<Order> tabSortCols = tab.getSortCols();
List<FieldSchema> tabCols = tab.getCols();
ArrayList<Integer> orders = new ArrayList<Integer>();
for (Order sortCol : tabSortCols) {
for (FieldSchema tabCol : tabCols) {
if (sortCol.getCol().equals(tabCol.getName())) {
orders.add(sortCol.getOrder());
break;
}
}
}
return orders;
}
代码示例来源:origin: apache/hive
public void setBucketCols(List<String> bucketCols) throws HiveException {
if (bucketCols == null) {
return;
}
for (String col : bucketCols) {
if (!isField(col)) {
throw new HiveException("Bucket columns " + col
+ " is not part of the table columns (" + getCols() );
}
}
tTable.getSd().setBucketCols(bucketCols);
}
代码示例来源:origin: apache/hive
/**
* Validate alter table description.
*
* @throws SemanticException
*/
public void validate() throws SemanticException {
if (null != tableForSkewedColValidation) {
/* Validate skewed information. */
ValidationUtility.validateSkewedInformation(
ParseUtils.validateColumnNameUniqueness(tableForSkewedColValidation.getCols()),
this.getSkewedColNames(), this.getSkewedColValues());
}
}
代码示例来源:origin: apache/hive
public static HCatSchema getTableSchemaWithPtnCols(Table table) throws IOException {
HCatSchema tableSchema = new HCatSchema(HCatUtil.getHCatFieldSchemaList(table.getCols()));
if (table.getPartitionKeys().size() != 0) {
// add partition keys to table schema
// NOTE : this assumes that we do not ever have ptn keys as columns
// inside the table schema as well!
for (FieldSchema fs : table.getPartitionKeys()) {
tableSchema.append(HCatSchemaUtils.getHCatFieldSchema(fs));
}
}
return tableSchema;
}
代码示例来源:origin: apache/drill
public void setBucketCols(List<String> bucketCols) throws HiveException {
if (bucketCols == null) {
return;
}
for (String col : bucketCols) {
if (!isField(col)) {
throw new HiveException("Bucket columns " + col
+ " is not part of the table columns (" + getCols() );
}
}
tTable.getSd().setBucketCols(bucketCols);
}
代码示例来源:origin: apache/drill
/**
* Validate alter table description.
*
* @throws SemanticException
*/
public void validate() throws SemanticException {
if (null != table) {
/* Validate skewed information. */
ValidationUtility.validateSkewedInformation(
ParseUtils.validateColumnNameUniqueness(table.getCols()), this.getSkewedColNames(),
this.getSkewedColValues());
}
}
代码示例来源:origin: apache/hive
private List<String> getColumnName(ASTNode tree) throws SemanticException {
switch (tree.getChildCount()) {
case 2:
return Utilities.getColumnNamesFromFieldSchema(tbl.getCols());
case 3:
int numCols = tree.getChild(2).getChildCount();
List<String> colName = new ArrayList<String>(numCols);
for (int i = 0; i < numCols; i++) {
colName.add(getUnescapedName((ASTNode) tree.getChild(2).getChild(i)));
}
return colName;
default:
throw new SemanticException("Internal error. Expected number of children of ASTNode to be"
+ " either 2 or 3. Found : " + tree.getChildCount());
}
}
代码示例来源:origin: apache/hive
static AnalyzeRewriteContext genAnalyzeRewriteContext(HiveConf conf, Table tbl) {
AnalyzeRewriteContext analyzeRewrite = new AnalyzeRewriteContext();
analyzeRewrite.setTableName(tbl.getFullyQualifiedName());
analyzeRewrite.setTblLvl(!(conf.getBoolVar(ConfVars.HIVE_STATS_COLLECT_PART_LEVEL_STATS) && tbl.isPartitioned()));
List<String> colNames = Utilities.getColumnNamesFromFieldSchema(tbl.getCols());
List<String> colTypes = getColumnTypes(tbl, colNames);
analyzeRewrite.setColName(colNames);
analyzeRewrite.setColType(colTypes);
return analyzeRewrite;
}
内容来源于网络,如有侵权,请联系作者删除!