org.apache.hadoop.hive.ql.metadata.Table.setFields()方法的使用及代码示例

x33g5p2x  于2022-01-29 转载在 其他  
字(5.3k)|赞(0)|评价(0)|浏览(153)

本文整理了Java中org.apache.hadoop.hive.ql.metadata.Table.setFields()方法的一些代码示例,展示了Table.setFields()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Table.setFields()方法的具体详情如下:
包路径:org.apache.hadoop.hive.ql.metadata.Table
类名称:Table
方法名:setFields

Table.setFields介绍

暂无

代码示例

代码示例来源:origin: apache/hive

List<FieldSchema> fields = new ArrayList<FieldSchema>();
fields.add(new FieldSchema("val", "int", null));
table.setFields(fields);
table.setDataLocation(Warehouse.getDnsPath(new Path(SessionState.get().getTempTableSpace(),
  tableName), conf));

代码示例来源:origin: apache/drill

List<FieldSchema> fields = new ArrayList<FieldSchema>();
fields.add(new FieldSchema("val", "int", null));
table.setFields(fields);
table.setDataLocation(Warehouse.getDnsPath(new Path(SessionState.get().getTempTableSpace(),
 tableName), conf));

代码示例来源:origin: apache/hive

private static class ThreadLocalHive extends ThreadLocal<Hive> {
 @Override
 protected Hive initialValue() {
  return null;
 }
 @Override
 public synchronized void set(Hive hiveObj) {
  Hive currentHive = this.get();
  if (currentHive != hiveObj) {
   // Remove/close current thread-local Hive object before overwriting with new Hive object.
   remove();
   super.set(hiveObj);
  }
 }
 @Override
 public synchronized void remove() {
  Hive currentHive = this.get();
  if (currentHive != null) {
   // Close the metastore connections before removing it from thread local hiveDB.
   currentHive.close(false);
   super.remove();
  }
 }
}

代码示例来源:origin: apache/hive

tempTableObj.setFields(table.getAllCols());

代码示例来源:origin: apache/drill

Table table = db.newTable(tableName);
table.setSerializationLib(format.getSerde());
table.setFields(fields);
table.setDataLocation(tablePath);
table.getTTable().setTemporary(true);

代码示例来源:origin: apache/drill

Deserializer oldSerde = MetaStoreUtils.getDeserializer(
   conf, tbl.getTTable(), false, oldSerdeName);
 tbl.setFields(Hive.getFieldsFromDeserializer(tbl.getTableName(), oldSerde));
} catch (MetaException ex) {
 throw new HiveException(ex);

代码示例来源:origin: apache/hive

Deserializer oldSerde = HiveMetaStoreUtils.getDeserializer(
   conf, tbl.getTTable(), false, oldSerdeName);
 tbl.setFields(Hive.getFieldsFromDeserializer(tbl.getTableName(), oldSerde));
} catch (MetaException ex) {
 throw new HiveException(ex);

代码示例来源:origin: apache/hive

tbl.setFields(oldtbl.getCols());
tbl.setPartCols(oldtbl.getPartCols());

代码示例来源:origin: apache/drill

tbl.setFields(oldtbl.getCols());
tbl.setPartCols(oldtbl.getPartCols());

代码示例来源:origin: apache/drill

tbl.setFields(MetaStoreUtils.getFieldsFromDeserializer(tbl.getTableName(),
  tbl.getDeserializer()));

代码示例来源:origin: apache/hive

tbl.setFields(getSchema());
if (getComment() != null) {
 tbl.setProperty("comment", getComment());

代码示例来源:origin: apache/drill

tbl.setFields(getCols());

代码示例来源:origin: apache/hive

tbl.setFields(getCols());

代码示例来源:origin: apache/hive

fields.add(new FieldSchema("col2", serdeConstants.STRING_TYPE_NAME, "string -- second column"));
fields.add(new FieldSchema("col3", serdeConstants.DOUBLE_TYPE_NAME, "double -- thrift column"));
tbl.setFields(fields);

代码示例来源:origin: apache/hive

oldview.setFields(crtView.getSchema());
if (crtView.getComment() != null) {
 oldview.setProperty("comment", crtView.getComment());

代码示例来源:origin: apache/drill

oldview.setFields(crtView.getSchema());
 if (crtView.getComment() != null) {
  oldview.setProperty("comment", crtView.getComment());
tbl.setFields(crtView.getSchema());
if (crtView.getComment() != null) {
 tbl.setProperty("comment", crtView.getComment());

代码示例来源:origin: qubole/streamx

@Override
public void alterSchema(String database, String tableName, Schema schema) {
 Table table = hiveMetaStore.getTable(database, tableName);
 List<FieldSchema> columns = HiveSchemaConverter.convertSchema(schema);
 table.setFields(columns);
 hiveMetaStore.alterTable(table);
}

代码示例来源:origin: apache/lens

/**
 * Creates the hive table.
 *
 * @param db      the db
 * @param table   the table
 * @param columns the columns
 * @throws Exception the exception
 */
void createHiveTable(String db, String table, List<FieldSchema> columns) throws Exception {
 Table tbl1 = new Table(db, table);
 tbl1.setFields(columns);
 Hive.get().createTable(tbl1);
 System.out.println("Created table : " + table);
}

代码示例来源:origin: apache/lens

/**
 * Creates the hive table.
 *
 * @param db      the db
 * @param table   the table
 * @param columns the columns
 * @throws Exception the exception
 */
void createHiveTable(String db, String table, List<FieldSchema> columns) throws Exception {
 Table tbl1 = new Table(db, table);
 tbl1.setFields(columns);
 Hive.get().createTable(tbl1);
 System.out.println("Created table : " + table);
}

代码示例来源:origin: qubole/streamx

private Table constructParquetTable(String database, String tableName, Schema schema, Partitioner partitioner) throws HiveMetaStoreException {
 Table table = new Table(database, tableName);
 table.setTableType(TableType.EXTERNAL_TABLE);
 table.getParameters().put("EXTERNAL", "TRUE");
 String tablePath = FileUtils.hiveDirectoryName(url, topicsDir, tableName);
 table.setDataLocation(new Path(tablePath));
 table.setSerializationLib(getHiveParquetSerde());
 try {
  table.setInputFormatClass(getHiveParquetInputFormat());
  table.setOutputFormatClass(getHiveParquetOutputFormat());
 } catch (HiveException e) {
  throw new HiveMetaStoreException("Cannot find input/output format:", e);
 }
 // convert copycat schema schema to Hive columns
 List<FieldSchema> columns = HiveSchemaConverter.convertSchema(schema);
 table.setFields(columns);
 table.setPartCols(partitioner.partitionFields());
 return table;
}

相关文章

Table类方法