本文整理了Java中org.apache.flink.types.Row.setField
方法的一些代码示例,展示了Row.setField
的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Row.setField
方法的具体详情如下:
包路径:org.apache.flink.types.Row
类名称:Row
方法名:setField
[英]Sets the field at the specified position.
[中]将字段设置在指定位置。
代码示例来源:origin: apache/flink
@Override
protected Row fillRecord(Row reuse, Object[] parsedValues) {
Row reuseRow;
if (reuse == null) {
reuseRow = new Row(arity);
} else {
reuseRow = reuse;
}
for (int i = 0; i < parsedValues.length; i++) {
reuseRow.setField(i, parsedValues[i]);
}
return reuseRow;
}
代码示例来源:origin: apache/flink
private static Row createRow(Object f0, Object f1, Object f2, Object f3, Object f4) {
Row row = new Row(5);
row.setField(0, f0);
row.setField(1, f1);
row.setField(2, f2);
row.setField(3, f3);
row.setField(4, f4);
return row;
}
代码示例来源:origin: apache/flink
private static Row createRow(Object f0, Object f1, Object f2, Object f3, Object f4) {
Row row = new Row(5);
row.setField(0, f0);
row.setField(1, f1);
row.setField(2, f2);
row.setField(3, f3);
row.setField(4, f4);
return row;
}
代码示例来源:origin: apache/flink
@Override
public Row deserialize(DataInputView source) throws IOException {
int len = fieldSerializers.length;
Row result = new Row(len);
// read null mask
readIntoNullMask(len, source, nullMask);
for (int i = 0; i < len; i++) {
if (nullMask[i]) {
result.setField(i, null);
}
else {
result.setField(i, fieldSerializers[i].deserialize(source));
}
}
return result;
}
代码示例来源:origin: apache/flink
@Override
public Object apply(Object o) {
if (o == null) {
return null;
} else {
Row r = (Row) o;
Row copy = new Row(copyFields.length);
for (int i = 0; i < copyFields.length; i++) {
copy.setField(i, copyFields[i].apply(r.getField(i)));
}
return copy;
}
}
}
代码示例来源:origin: apache/flink
private Row convertAvroRecordToRow(Schema schema, RowTypeInfo typeInfo, IndexedRecord record) {
final List<Schema.Field> fields = schema.getFields();
final TypeInformation<?>[] fieldInfo = typeInfo.getFieldTypes();
final int length = fields.size();
final Row row = new Row(length);
for (int i = 0; i < length; i++) {
final Schema.Field field = fields.get(i);
row.setField(i, convertAvroType(field.schema(), fieldInfo[i], record.get(i)));
}
return row;
}
代码示例来源:origin: apache/flink
@Override
public Row copy(Row from) {
int len = fieldSerializers.length;
if (from.getArity() != len) {
throw new RuntimeException("Row arity of from does not match serializers.");
}
Row result = new Row(len);
for (int i = 0; i < len; i++) {
Object fromField = from.getField(i);
if (fromField != null) {
Object copy = fieldSerializers[i].copy(fromField);
result.setField(i, copy);
}
else {
result.setField(i, null);
}
}
return result;
}
代码示例来源:origin: apache/flink
@Test
public void testRowProject() {
Row row = new Row(5);
row.setField(0, 1);
row.setField(1, "hello");
row.setField(2, null);
row.setField(3, new Tuple2<>(2, "hi"));
row.setField(4, "hello world");
Row projected = Row.project(row, new int[]{0, 2, 4});
Row expected = new Row(3);
expected.setField(0, 1);
expected.setField(1, null);
expected.setField(2, "hello world");
assertEquals(expected, projected);
}
}
代码示例来源:origin: apache/flink
private Row convertRow(JsonNode node, RowTypeInfo info) {
final String[] names = info.getFieldNames();
final TypeInformation<?>[] types = info.getFieldTypes();
final Row row = new Row(names.length);
for (int i = 0; i < names.length; i++) {
final String name = names[i];
final JsonNode subNode = node.get(name);
if (subNode == null) {
if (failOnMissingField) {
throw new IllegalStateException(
"Could not find field with name '" + name + "'.");
} else {
row.setField(i, null);
}
} else {
row.setField(i, convert(subNode, types[i]));
}
}
return row;
}
代码示例来源:origin: apache/flink
@Test
public void testRowToString() {
Row row = new Row(5);
row.setField(0, 1);
row.setField(1, "hello");
row.setField(2, null);
row.setField(3, new Tuple2<>(2, "hi"));
row.setField(4, "hello world");
assertEquals("1,hello,null,(2,hi),hello world", row.toString());
}
代码示例来源:origin: apache/flink
@Test
public void testRowCopy() {
Row row = new Row(5);
row.setField(0, 1);
row.setField(1, "hello");
row.setField(2, null);
row.setField(3, new Tuple2<>(2, "hi"));
row.setField(4, "hello world");
Row copy = Row.copy(row);
assertEquals(row, copy);
assertTrue(row != copy);
}
代码示例来源:origin: apache/flink
@Test
public void testRowOf() {
Row row1 = Row.of(1, "hello", null, Tuple2.of(2L, "hi"), true);
Row row2 = new Row(5);
row2.setField(0, 1);
row2.setField(1, "hello");
row2.setField(2, null);
row2.setField(3, new Tuple2<>(2L, "hi"));
row2.setField(4, true);
assertEquals(row1, row2);
}
代码示例来源:origin: apache/flink
@Override
public void configure(Configuration parameters) {
LOG.info("Initializing HBase configuration.");
connectToTable();
if (table != null) {
scan = getScanner();
}
// prepare output rows
this.resultRow = new Row(families.length);
this.familyRows = new Row[families.length];
for (int f = 0; f < families.length; f++) {
this.familyRows[f] = new Row(qualifiers[f].length);
this.resultRow.setField(f, this.familyRows[f]);
}
this.stringCharset = Charset.forName(schema.getStringCharset());
}
代码示例来源:origin: apache/flink
private static void readNonNullDecimalColumn(Object[] vals, int fieldIdx, DecimalColumnVector vector, int childCount) {
if (vector.isRepeating) { // fill complete column with first value
fillColumnWithRepeatingValue(vals, fieldIdx, readBigDecimal(vector.vector[0]), childCount);
} else {
if (fieldIdx == -1) { // set as an object
for (int i = 0; i < childCount; i++) {
vals[i] = readBigDecimal(vector.vector[i]);
}
} else { // set as a field of Row
Row[] rows = (Row[]) vals;
for (int i = 0; i < childCount; i++) {
rows[i].setField(fieldIdx, readBigDecimal(vector.vector[i]));
}
}
}
}
代码示例来源:origin: apache/flink
private static void readNonNullBytesColumnAsString(Object[] vals, int fieldIdx, BytesColumnVector bytes, int childCount) {
if (bytes.isRepeating) { // fill complete column with first value
String repeatingValue = readString(bytes.vector[0], bytes.start[0], bytes.length[0]);
fillColumnWithRepeatingValue(vals, fieldIdx, repeatingValue, childCount);
} else {
if (fieldIdx == -1) { // set as an object
for (int i = 0; i < childCount; i++) {
vals[i] = readString(bytes.vector[i], bytes.start[i], bytes.length[i]);
}
} else { // set as a field of Row
Row[] rows = (Row[]) vals;
for (int i = 0; i < childCount; i++) {
rows[i].setField(fieldIdx, readString(bytes.vector[i], bytes.start[i], bytes.length[i]));
}
}
}
}
代码示例来源:origin: apache/flink
private static <T> void readNonNullDoubleColumn(Object[] vals, int fieldIdx, DoubleColumnVector vector,
int childCount, DoubleFunction<T> reader) {
if (vector.isRepeating) { // fill complete column with first value
T repeatingValue = reader.apply(vector.vector[0]);
fillColumnWithRepeatingValue(vals, fieldIdx, repeatingValue, childCount);
} else {
if (fieldIdx == -1) { // set as an object
for (int i = 0; i < childCount; i++) {
vals[i] = reader.apply(vector.vector[i]);
}
} else { // set as a field of Row
Row[] rows = (Row[]) vals;
for (int i = 0; i < childCount; i++) {
rows[i].setField(fieldIdx, reader.apply(vector.vector[i]));
}
}
}
}
代码示例来源:origin: apache/flink
private static <T> void readNonNullLongColumn(Object[] vals, int fieldIdx, LongColumnVector vector,
int childCount, LongFunction<T> reader) {
if (vector.isRepeating) { // fill complete column with first value
T repeatingValue = reader.apply(vector.vector[0]);
fillColumnWithRepeatingValue(vals, fieldIdx, repeatingValue, childCount);
} else {
if (fieldIdx == -1) { // set as an object
for (int i = 0; i < childCount; i++) {
vals[i] = reader.apply(vector.vector[i]);
}
} else { // set as a field of Row
Row[] rows = (Row[]) vals;
for (int i = 0; i < childCount; i++) {
rows[i].setField(fieldIdx, reader.apply(vector.vector[i]));
}
}
}
}
代码示例来源:origin: apache/flink
@Test
public void testRowSerializer() {
TypeInformation<Row> typeInfo = new RowTypeInfo(
BasicTypeInfo.INT_TYPE_INFO,
BasicTypeInfo.STRING_TYPE_INFO);
Row row1 = new Row(2);
row1.setField(0, 1);
row1.setField(1, "a");
Row row2 = new Row(2);
row2.setField(0, 2);
row2.setField(1, null);
TypeSerializer<Row> serializer = typeInfo.createSerializer(new ExecutionConfig());
RowSerializerTestInstance instance = new RowSerializerTestInstance(serializer, row1, row2);
instance.testAll();
}
代码示例来源:origin: apache/flink
private static Row createRow(Object... values) {
checkNotNull(values);
checkArgument(values.length == numberOfFields);
Row row = new Row(numberOfFields);
for (int i = 0; i < values.length; i++) {
row.setField(i, values[i]);
}
return row;
}
}
代码示例来源:origin: apache/flink
@Test
public void testRow() {
Row row = new Row(2);
row.setField(0, "string");
row.setField(1, 15);
TypeInformation<Row> rowInfo = TypeExtractor.getForObject(row);
Assert.assertEquals(rowInfo.getClass(), RowTypeInfo.class);
Assert.assertEquals(2, rowInfo.getArity());
Assert.assertEquals(
new RowTypeInfo(
BasicTypeInfo.STRING_TYPE_INFO,
BasicTypeInfo.INT_TYPE_INFO),
rowInfo);
Row nullRow = new Row(2);
TypeInformation<Row> genericRowInfo = TypeExtractor.getForObject(nullRow);
Assert.assertEquals(genericRowInfo, new GenericTypeInfo<>(Row.class));
}
内容来源于网络,如有侵权,请联系作者删除!