本文整理了Java中org.apache.hadoop.hive.ql.metadata.Hive.createTable()
方法的一些代码示例,展示了Hive.createTable()
的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Hive.createTable()
方法的具体详情如下:
包路径:org.apache.hadoop.hive.ql.metadata.Hive
类名称:Hive
方法名:createTable
[英]Creates a table metadata and the directory for the table data
[中]创建表元数据和表数据的目录
代码示例来源:origin: apache/hive
/**
* Creates the table with the give objects
*
* @param tbl
* a table object
* @throws HiveException
*/
public void createTable(Table tbl) throws HiveException {
createTable(tbl, false);
}
代码示例来源:origin: apache/hive
public void createTable(Table tbl, boolean ifNotExists) throws HiveException {
createTable(tbl, ifNotExists, null, null, null, null,
null, null);
}
代码示例来源:origin: apache/drill
/**
* Creates the table with the give objects
*
* @param tbl
* a table object
* @throws HiveException
*/
public void createTable(Table tbl) throws HiveException {
createTable(tbl, false);
}
代码示例来源:origin: apache/drill
public void createTable(Table tbl, boolean ifNotExists) throws HiveException {
createTable(tbl, ifNotExists, null, null);
}
代码示例来源:origin: apache/hive
/**
* Creates a table metadata and the directory for the table data
*
* @param tableName
* name of the table
* @param columns
* list of fields of the table
* @param partCols
* partition keys of the table
* @param fileInputFormat
* Class of the input format of the table data file
* @param fileOutputFormat
* Class of the output format of the table data file
* @throws HiveException
* thrown if the args are invalid or if the metadata or the data
* directory couldn't be created
*/
public void createTable(String tableName, List<String> columns,
List<String> partCols, Class<? extends InputFormat> fileInputFormat,
Class<?> fileOutputFormat) throws HiveException {
this.createTable(tableName, columns, partCols, fileInputFormat,
fileOutputFormat, -1, null);
}
代码示例来源:origin: apache/hive
/**
* Creates a table metadata and the directory for the table data
*
* @param tableName
* name of the table
* @param columns
* list of fields of the table
* @param partCols
* partition keys of the table
* @param fileInputFormat
* Class of the input format of the table data file
* @param fileOutputFormat
* Class of the output format of the table data file
* @param bucketCount
* number of buckets that each partition (or the table itself) should
* be divided into
* @throws HiveException
* thrown if the args are invalid or if the metadata or the data
* directory couldn't be created
*/
public void createTable(String tableName, List<String> columns,
List<String> partCols, Class<? extends InputFormat> fileInputFormat,
Class<?> fileOutputFormat, int bucketCount, List<String> bucketCols)
throws HiveException {
createTable(tableName, columns, partCols, fileInputFormat, fileOutputFormat, bucketCount,
bucketCols, null);
}
代码示例来源:origin: apache/drill
/**
* Creates a table metadata and the directory for the table data
*
* @param tableName
* name of the table
* @param columns
* list of fields of the table
* @param partCols
* partition keys of the table
* @param fileInputFormat
* Class of the input format of the table data file
* @param fileOutputFormat
* Class of the output format of the table data file
* @throws HiveException
* thrown if the args are invalid or if the metadata or the data
* directory couldn't be created
*/
public void createTable(String tableName, List<String> columns,
List<String> partCols, Class<? extends InputFormat> fileInputFormat,
Class<?> fileOutputFormat) throws HiveException {
this.createTable(tableName, columns, partCols, fileInputFormat,
fileOutputFormat, -1, null);
}
代码示例来源:origin: apache/drill
/**
* Creates a table metadata and the directory for the table data
*
* @param tableName
* name of the table
* @param columns
* list of fields of the table
* @param partCols
* partition keys of the table
* @param fileInputFormat
* Class of the input format of the table data file
* @param fileOutputFormat
* Class of the output format of the table data file
* @param bucketCount
* number of buckets that each partition (or the table itself) should
* be divided into
* @throws HiveException
* thrown if the args are invalid or if the metadata or the data
* directory couldn't be created
*/
public void createTable(String tableName, List<String> columns,
List<String> partCols, Class<? extends InputFormat> fileInputFormat,
Class<?> fileOutputFormat, int bucketCount, List<String> bucketCols)
throws HiveException {
createTable(tableName, columns, partCols, fileInputFormat, fileOutputFormat, bucketCount,
bucketCols, null);
}
代码示例来源:origin: apache/hive
table.setInputFormatClass(format.getInputFormat());
table.setOutputFormatClass(format.getOutputFormat());
db.createTable(table, true);
代码示例来源:origin: apache/hive
private Table createPartitionedTable(String dbName, String tableName) throws Exception {
try {
hm.dropTable(dbName, tableName);
hm.createTable(tableName,
Arrays.asList("key", "value"), // Data columns.
Arrays.asList("ds", "hr"), // Partition columns.
TextInputFormat.class,
HiveIgnoreKeyTextOutputFormat.class);
return hm.getTable(dbName, tableName);
}
catch (Exception exception) {
fail("Unable to drop and create table " + StatsUtils.getFullyQualifiedTableName(dbName, tableName)
+ " because " + StringUtils.stringifyException(exception));
throw exception;
}
}
代码示例来源:origin: apache/hive
tbl.setParameters(parameters);
createTable(tbl);
代码示例来源:origin: apache/drill
tbl.setParameters(parameters);
createTable(tbl);
代码示例来源:origin: apache/hive
@BeforeClass
public static void init() throws Exception {
queryState = new QueryState.Builder().build();
conf = queryState.getConf();
conf
.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER,
"org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory");
SessionState.start(conf);
// Create a table so we can work against it
Hive h = Hive.get(conf);
List<String> cols = new ArrayList<String>();
cols.add("a");
List<String> partCols = new ArrayList<String>();
partCols.add("ds");
h.createTable("foo", cols, partCols, OrcInputFormat.class, OrcOutputFormat.class);
Table t = h.getTable("foo");
Map<String, String> partSpec = new HashMap<String, String>();
partSpec.put("ds", "today");
h.createPartition(t, partSpec);
}
代码示例来源:origin: apache/hive
part_cols.add("hr");
try {
hm.createTable(tableName, cols, part_cols, TextInputFormat.class,
HiveIgnoreKeyTextOutputFormat.class);
} catch (HiveException e) {
代码示例来源:origin: apache/hive
db.createTable("T", Arrays.asList("a", "b"), null, OrcInputFormat.class,
OrcOutputFormat.class, 2, Arrays.asList("a"), params);
db.createTable("U", Arrays.asList("a", "b"), Arrays.asList("ds"), OrcInputFormat.class,
OrcOutputFormat.class, 2, Arrays.asList("a"), params);
Table u = db.getTable("U");
代码示例来源:origin: apache/hive
ts.add("table2");
Table tbl1 = createTestTable(dbName, ts.get(0));
hm.createTable(tbl1);
hm.createTable(tbl2);
代码示例来源:origin: apache/hive
hm.createTable(tbl);
} catch (HiveException e) {
System.err.println(StringUtils.stringifyException(e));
代码示例来源:origin: apache/hive
@Test
public void testDataDeletion() throws HiveException,
IOException, TException {
Database db = new Database();
db.setName(dbName);
hive.createDatabase(db);
Table table = new Table(dbName, tableName);
table.setDbName(dbName);
table.setInputFormatClass(TextInputFormat.class);
table.setOutputFormatClass(HiveIgnoreKeyTextOutputFormat.class);
table.setPartCols(partCols);
hive.createTable(table);
table = hive.getTable(dbName, tableName);
Path fakeTable = table.getPath().getParent().suffix(
Path.SEPARATOR + "faketable");
fs = fakeTable.getFileSystem(hive.getConf());
fs.mkdirs(fakeTable);
fs.deleteOnExit(fakeTable);
Path fakePart = new Path(table.getDataLocation().toString(),
"fakepartition=fakevalue");
fs.mkdirs(fakePart);
fs.deleteOnExit(fakePart);
hive.dropTable(dbName, tableName, true, true);
assertFalse(fs.exists(fakePart));
hive.dropDatabase(dbName);
assertFalse(fs.exists(fakeTable));
}
代码示例来源:origin: apache/hive
hive.createTable(table);
result = new CheckResult();
checker.checkMetastore(catName, dbName, null, null, result);
代码示例来源:origin: apache/hive
private Table createTestTable() throws HiveException, AlreadyExistsException {
Database db = new Database();
db.setName(dbName);
hive.createDatabase(db, true);
Table table = new Table(dbName, tableName);
table.setDbName(dbName);
table.setInputFormatClass(TextInputFormat.class);
table.setOutputFormatClass(HiveIgnoreKeyTextOutputFormat.class);
table.setPartCols(partCols);
hive.createTable(table);
table = hive.getTable(dbName, tableName);
Assert.assertTrue(table.getTTable().isSetId());
table.getTTable().unsetId();
for (Map<String, String> partSpec : parts) {
hive.createPartition(table, partSpec);
}
return table;
}
内容来源于网络,如有侵权,请联系作者删除!