本文整理了Java中org.apache.hadoop.hive.ql.metadata.Table.setSerializationLib()
方法的一些代码示例,展示了Table.setSerializationLib()
的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Table.setSerializationLib()
方法的具体详情如下:
包路径:org.apache.hadoop.hive.ql.metadata.Table
类名称:Table
方法名:setSerializationLib
暂无
代码示例来源:origin: apache/hive
private static Table createTestTable(String dbName, String tableName) throws HiveException {
Table tbl = new Table(dbName, tableName);
tbl.setInputFormatClass(SequenceFileInputFormat.class.getName());
tbl.setOutputFormatClass(SequenceFileOutputFormat.class.getName());
tbl.setSerializationLib(ThriftDeserializer.class.getName());
tbl.setSerdeParam(serdeConstants.SERIALIZATION_CLASS, Complex.class.getName());
tbl.setSerdeParam(serdeConstants.SERIALIZATION_FORMAT, TBinaryProtocol.class
.getName());
return tbl;
}
代码示例来源:origin: apache/hive
format.processStorageFormat("TextFile");
Table table = db.newTable(tableName);
table.setSerializationLib(format.getSerde());
List<FieldSchema> fields = new ArrayList<FieldSchema>();
fields.add(new FieldSchema("val", "int", null));
代码示例来源:origin: apache/hive
tbl.setSerializationLib(LazySimpleSerDe.class.getName());
tbl.setNumBuckets(bucketCount);
tbl.setBucketCols(bucketCols);
代码示例来源:origin: apache/drill
format.processStorageFormat("TextFile");
Table table = db.newTable(tableName);
table.setSerializationLib(format.getSerde());
List<FieldSchema> fields = new ArrayList<FieldSchema>();
fields.add(new FieldSchema("val", "int", null));
代码示例来源:origin: apache/drill
tbl.setSerializationLib(LazySimpleSerDe.class.getName());
tbl.setNumBuckets(bucketCount);
tbl.setBucketCols(bucketCols);
代码示例来源:origin: apache/hive
try {
tempTableObj.setInputFormatClass(inputFormatClassName);
tempTableObj.setSerializationLib(serDeClassName);
} catch (HiveException e) {
throw new SemanticException("Load Data: Failed to set inputFormat or SerDe");
代码示例来源:origin: apache/hive
tbl.setSerializationLib(org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.class.getName());
} else {
tbl.setSerializationLib(crtTbl.getDefaultSerName());
if (crtTbl.getDefaultSerName() == null) {
LOG.info("Default to LazySimpleSerDe for like table {}", targetTableName);
tbl.setSerializationLib(org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.class.getName());
} else {
tbl.setSerializationLib(crtTbl.getDefaultSerName());
代码示例来源:origin: apache/hive
tbl.setInputFormatClass(SequenceFileInputFormat.class.getName());
tbl.setOutputFormatClass(SequenceFileOutputFormat.class.getName());
tbl.setSerializationLib(ThriftDeserializer.class.getName());
tbl.setSerdeParam(serdeConstants.SERIALIZATION_CLASS, Complex.class.getName());
tbl.setSerdeParam(serdeConstants.SERIALIZATION_FORMAT, TBinaryProtocol.class
代码示例来源:origin: apache/drill
tbl.setSerializationLib(org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.class.getName());
} else {
tbl.setSerializationLib(crtTbl.getDefaultSerName());
if (crtTbl.getDefaultSerName() == null) {
LOG.info("Default to LazySimpleSerDe for like table " + crtTbl.getTableName());
tbl.setSerializationLib(org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.class.getName());
} else {
tbl.setSerializationLib(crtTbl.getDefaultSerName());
代码示例来源:origin: apache/drill
format.processStorageFormat("TextFile");
Table table = db.newTable(tableName);
table.setSerializationLib(format.getSerde());
table.setFields(fields);
table.setDataLocation(tablePath);
代码示例来源:origin: apache/hive
tbl.setTableType(TableType.VIRTUAL_VIEW);
tbl.setSerializationLib(null);
tbl.clearSerDeInfo();
tbl.setFields(getSchema());
DDLTask.validateSerDe(serDeClassName, conf);
tbl.setSerializationLib(serDeClassName);
代码示例来源:origin: apache/drill
if (storageHandler == null) {
LOG.info("Default to LazySimpleSerDe for table " + tableName);
tbl.setSerializationLib(LazySimpleSerDe.class.getName());
} else {
String serDeClassName = storageHandler.getSerDeClass().getName();
LOG.info("Use StorageHandler-supplied " + serDeClassName
+ " for table " + tableName);
tbl.setSerializationLib(serDeClassName);
tbl.setSerializationLib(getSerName());
代码示例来源:origin: apache/hive
DDLTask.validateSerDe(serDeClassName, conf);
tbl.setSerializationLib(serDeClassName);
代码示例来源:origin: apache/hive
tbl.setSerializationLib(LazySimpleSerDe.class.getName());
tbl.setStoredAsSubDirectories(false);
代码示例来源:origin: apache/drill
tbl.setTableType(TableType.VIRTUAL_VIEW);
tbl.setSerializationLib(null);
tbl.clearSerDeInfo();
tbl.setFields(crtView.getSchema());
代码示例来源:origin: apache/lens
@Override
public Table getHiveTable(HiveConf conf) {
Table table = new Table(conf.get(LensConfConstants.STATISTICS_DATABASE_KEY,
LensConfConstants.DEFAULT_STATISTICS_DATABASE), this.getClass().getSimpleName());
LinkedList<FieldSchema> colList = new LinkedList<FieldSchema>();
colList.add(new FieldSchema("handle", "string", "Query Handle"));
colList.add(new FieldSchema("userQuery", "string", "User Query before rewrite"));
colList.add(new FieldSchema("submitter", "string", "submitter"));
colList.add(new FieldSchema("clusterUser", "string", "Cluster User which will do all operations on hdfs"));
colList.add(new FieldSchema("sessionId", "string", "Lens Session which ran the query"));
colList.add(new FieldSchema("submissionTime", "bigint", "Time which query was submitted"));
colList.add(new FieldSchema("startTime", "bigint", "Timestamp which query was Started"));
colList.add(new FieldSchema("endTime", "bigint", "Timestamp which query was finished"));
colList.add(new FieldSchema("result", "string", "path to result of query"));
colList.add(new FieldSchema("cause", "string", "failure/eror cause if any"));
colList.add(new FieldSchema("status", "map<string,string>", "status object of the query"));
colList.add(new FieldSchema("driverStats", "map<string,string>", "driver statistics of the query"));
table.setFields(colList);
LinkedList<FieldSchema> partCols = new LinkedList<FieldSchema>();
partCols.add(new FieldSchema("dt", "string", "partCol"));
table.setPartCols(partCols);
table.setSerializationLib(JSonSerde.class.getName());
try {
table.setInputFormatClass(TextInputFormat.class.getName());
} catch (HiveException e) {
log.error("Encountered hive exception.", e);
}
return table;
}
代码示例来源:origin: qubole/streamx
private Table constructAvroTable(String database, String tableName, Schema schema, Partitioner partitioner)
throws HiveMetaStoreException {
Table table = new Table(database, tableName);
table.setTableType(TableType.EXTERNAL_TABLE);
table.getParameters().put("EXTERNAL", "TRUE");
String tablePath = FileUtils.hiveDirectoryName(url, topicsDir, tableName);
table.setDataLocation(new Path(tablePath));
table.setSerializationLib(avroSerde);
try {
table.setInputFormatClass(avroInputFormat);
table.setOutputFormatClass(avroOutputFormat);
} catch (HiveException e) {
throw new HiveMetaStoreException("Cannot find input/output format:", e);
}
List<FieldSchema> columns = HiveSchemaConverter.convertSchema(schema);
table.setFields(columns);
table.setPartCols(partitioner.partitionFields());
table.getParameters().put(AVRO_SCHEMA_LITERAL, avroData.fromConnectSchema(schema).toString());
return table;
}
}
代码示例来源:origin: apache/lens
Table tbl1 = new Table(db, table);
if (setCustomSerde) {
tbl1.setSerializationLib("DatabaseJarSerde");
代码示例来源:origin: qubole/streamx
private Table constructParquetTable(String database, String tableName, Schema schema, Partitioner partitioner) throws HiveMetaStoreException {
Table table = new Table(database, tableName);
table.setTableType(TableType.EXTERNAL_TABLE);
table.getParameters().put("EXTERNAL", "TRUE");
String tablePath = FileUtils.hiveDirectoryName(url, topicsDir, tableName);
table.setDataLocation(new Path(tablePath));
table.setSerializationLib(getHiveParquetSerde());
try {
table.setInputFormatClass(getHiveParquetInputFormat());
table.setOutputFormatClass(getHiveParquetOutputFormat());
} catch (HiveException e) {
throw new HiveMetaStoreException("Cannot find input/output format:", e);
}
// convert copycat schema schema to Hive columns
List<FieldSchema> columns = HiveSchemaConverter.convertSchema(schema);
table.setFields(columns);
table.setPartCols(partitioner.partitionFields());
return table;
}
代码示例来源:origin: org.apache.hadoop.hive/hive-exec
Table tbl = db.newTable(crtView.getViewName());
tbl.setTableType(TableType.VIRTUAL_VIEW);
tbl.setSerializationLib(null);
tbl.clearSerDeInfo();
tbl.setViewOriginalText(crtView.getViewOriginalText());
内容来源于网络,如有侵权,请联系作者删除!