本文整理了Java中org.apache.hadoop.hive.metastore.api.Table.<init>()
方法的一些代码示例,展示了Table.<init>()
的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Table.<init>()
方法的具体详情如下:
包路径:org.apache.hadoop.hive.metastore.api.Table
类名称:Table
方法名:<init>
[英]Performs a deep copy on other.
[中]在其他计算机上执行深度复制。
代码示例来源:origin: apache/hive
protected Table deepCopy(Table table) {
Table copy = null;
if (table != null) {
copy = new Table(table);
}
return copy;
}
代码示例来源:origin: apache/hive
public Table deepCopy() {
return new Table(this);
}
代码示例来源:origin: apache/hive
/**
* Performs a deep copy on <i>other</i>.
*/
public GetTablesResult(GetTablesResult other) {
if (other.isSetTables()) {
List<Table> __this__tables = new ArrayList<Table>(other.tables.size());
for (Table other_element : other.tables) {
__this__tables.add(new Table(other_element));
}
this.tables = __this__tables;
}
}
代码示例来源:origin: apache/hive
/**
* Performs a deep copy on <i>other</i>.
*/
public GetTableResult(GetTableResult other) {
__isset_bitfield = other.__isset_bitfield;
if (other.isSetTable()) {
this.table = new Table(other.table);
}
this.isStatsCompliant = other.isStatsCompliant;
}
代码示例来源:origin: apache/hive
protected Table deepCopy(Table table) {
Table copy = null;
if (table != null) {
copy = new Table(table);
}
return copy;
}
代码示例来源:origin: apache/hive
private Table table() throws TException {
return deserialize(new Table(), tableDesc);
}
代码示例来源:origin: apache/hive
public static Table getTableObj(ObjectNode jsonTree) throws Exception {
TDeserializer deSerializer = new TDeserializer(new TJSONProtocol.Factory());
Table tableObj = new Table();
String tableJson = jsonTree.get("tableObjJson").asText();
deSerializer.deserialize(tableObj, tableJson, "UTF-8");
return tableObj;
}
代码示例来源:origin: apache/hive
private void addTable(String databaseName, String tableName, Set<Table> tables) {
checkNotNullOrEmpty(databaseName);
checkNotNullOrEmpty(tableName);
Table table = new Table();
table.setDbName(databaseName);
table.setTableName(tableName);
tables.add(table);
}
代码示例来源:origin: apache/hive
@Override
public HCatTable deserializeTable(String hcatTableStringRep) throws HCatException {
try {
Table table = new Table();
new TDeserializer(new TJSONProtocol.Factory()).deserialize(table, hcatTableStringRep, "UTF-8");
return new HCatTable(table);
}
catch(TException exception) {
if (LOG.isDebugEnabled())
LOG.debug("Could not de-serialize from: " + hcatTableStringRep);
throw new HCatException("Could not de-serialize HCatTable.", exception);
}
}
代码示例来源:origin: apache/hive
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, GetTableResult struct) throws org.apache.thrift.TException {
TTupleProtocol iprot = (TTupleProtocol) prot;
struct.table = new Table();
struct.table.read(iprot);
struct.setTableIsSet(true);
BitSet incoming = iprot.readBitSet(1);
if (incoming.get(0)) {
struct.isStatsCompliant = iprot.readBool();
struct.setIsStatsCompliantIsSet(true);
}
}
}
代码示例来源:origin: apache/hive
private static List<Table> createTable() {
Table table = new Table();
table.setDbName("DB");
table.setTableName("TABLE");
return Arrays.asList(table);
}
}
代码示例来源:origin: apache/hive
private static Table createTable(String databaseName, String tableName) {
Table table = new Table();
table.setDbName(databaseName);
table.setTableName(tableName);
return table;
}
代码示例来源:origin: prestodb/presto
@Override
public Table getTable(String dbName, String tableName)
throws TException
{
accessCount.incrementAndGet();
if (throwException) {
throw new RuntimeException();
}
if (!dbName.equals(TEST_DATABASE) || !tableName.equals(TEST_TABLE)) {
throw new NoSuchObjectException();
}
return new Table(
TEST_TABLE,
TEST_DATABASE,
"",
0,
0,
0,
DEFAULT_STORAGE_DESCRIPTOR,
ImmutableList.of(new FieldSchema("key", "string", null)),
null,
"",
"",
TableType.MANAGED_TABLE.name());
}
代码示例来源:origin: prestodb/presto
public static org.apache.hadoop.hive.metastore.api.Table toMetastoreApiTable(Table table, PrincipalPrivileges privileges)
{
org.apache.hadoop.hive.metastore.api.Table result = new org.apache.hadoop.hive.metastore.api.Table();
result.setDbName(table.getDatabaseName());
result.setTableName(table.getTableName());
result.setOwner(table.getOwner());
result.setTableType(table.getTableType());
result.setParameters(table.getParameters());
result.setPartitionKeys(table.getPartitionColumns().stream().map(ThriftMetastoreUtil::toMetastoreApiFieldSchema).collect(toList()));
result.setSd(makeStorageDescriptor(table.getTableName(), table.getDataColumns(), table.getStorage()));
result.setPrivileges(toMetastoreApiPrincipalPrivilegeSet(table.getOwner(), privileges));
result.setViewOriginalText(table.getViewOriginalText().orElse(null));
result.setViewExpandedText(table.getViewExpandedText().orElse(null));
return result;
}
代码示例来源:origin: apache/incubator-gobblin
private CopyableDatasetRequestor getRequestor(String dbName, String tableName) {
CopyableDatasetRequestor requestor = Mockito.mock(CopyableDatasetRequestor.class);
HiveDataset dataset = Mockito.mock(HiveDataset.class);
Table table = new Table(new org.apache.hadoop.hive.metastore.api.Table());
table.setDbName(dbName);
table.setTableName(tableName);
Mockito.when(dataset.getTable()).thenReturn(table);
Mockito.when(requestor.getDataset()).thenReturn(dataset);
return requestor;
}
}
代码示例来源:origin: apache/hive
private Table createTestTbl(String dbName, String tblName, String tblOwner,
List<FieldSchema> cols, List<FieldSchema> ptnCols) {
String serdeLocation = "file:/tmp";
Map<String, String> serdeParams = new HashMap<>();
Map<String, String> tblParams = new HashMap<>();
SerDeInfo serdeInfo = new SerDeInfo("serde", "seriallib", new HashMap<>());
StorageDescriptor sd = new StorageDescriptor(cols, serdeLocation, "input", "output", false, 0,
serdeInfo, null, null, serdeParams);
sd.setStoredAsSubDirectories(false);
Table tbl = new Table(tblName, dbName, tblOwner, 0, 0, 0, sd, ptnCols, tblParams, null, null,
TableType.MANAGED_TABLE.toString());
tbl.setCatName(DEFAULT_CATALOG_NAME);
return tbl;
}
代码示例来源:origin: apache/incubator-gobblin
private HiveTargetPathHelper createTestTargetPathHelper(Properties properties) {
HiveDataset dataset = Mockito.mock(HiveDataset.class);
Table table = new Table(new org.apache.hadoop.hive.metastore.api.Table());
table.setDbName("dbName");
table.setTableName("tableName");
Mockito.when(dataset.getTable()).thenReturn(table);
Mockito.when(dataset.getTableRootPath()).thenReturn(Optional.of(TABLE_ROOT));
Mockito.when(dataset.getProperties()).thenReturn(properties);
HiveTargetPathHelper helper = new HiveTargetPathHelper(dataset);
return helper;
}
代码示例来源:origin: apache/incubator-gobblin
public static Table getTestTable(String dbName, String tableName) {
Table table = new Table();
table.setDbName(dbName);
table.setTableName(tableName);
table.setTableType(TableType.EXTERNAL_TABLE.name());
StorageDescriptor sd = new StorageDescriptor();
sd.setLocation("/tmp/test");
table.setSd(sd);
return table;
}
代码示例来源:origin: apache/hive
@Test
public void testCreateTable() throws IOException {
Table t = new Table();
t.setDbName("testdb");
t.setTableName("testtable");
NotificationEvent event = new NotificationEvent(getEventId(), getTime(),
HCatConstants.HCAT_CREATE_TABLE_EVENT, msgFactory.buildCreateTableMessage(t).toString());
event.setDbName(t.getDbName());
event.setTableName(t.getTableName());
HCatNotificationEvent hev = new HCatNotificationEvent(event);
ReplicationTask rtask = ReplicationTask.create(client,hev);
assertEquals(hev.toString(), rtask.getEvent().toString());
verifyCreateTableReplicationTask(rtask);
}
代码示例来源:origin: apache/hive
@Test
public void testDropTable() throws IOException {
Table t = new Table();
t.setDbName("testdb");
t.setTableName("testtable");
NotificationEvent event = new NotificationEvent(getEventId(), getTime(),
HCatConstants.HCAT_DROP_TABLE_EVENT, msgFactory.buildDropTableMessage(t).toString());
event.setDbName(t.getDbName());
event.setTableName(t.getTableName());
HCatNotificationEvent hev = new HCatNotificationEvent(event);
ReplicationTask rtask = ReplicationTask.create(client,hev);
assertEquals(hev.toString(), rtask.getEvent().toString());
verifyDropTableReplicationTask(rtask);
}
内容来源于网络,如有侵权,请联系作者删除!