本文整理了Java中org.apache.flink.types.Row.getField
方法的一些代码示例,展示了Row.getField
的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Row.getField
方法的具体详情如下:
包路径:org.apache.flink.types.Row
类名称:Row
方法名:getField
[英]Gets the field at the specified position.
[中]获取指定位置的字段。
代码示例来源:origin: apache/flink
@Override
protected Object[] extract(Row record) {
Object[] al = new Object[rowArity];
for (int i = 0; i < rowArity; i++) {
al[i] = record.getField(i);
}
return al;
}
}
代码示例来源:origin: apache/flink
@Override
public String getBucketId(final Row element, final Context context) {
return String.valueOf(element.getField(0));
}
代码示例来源:origin: apache/flink
@Override
protected Object[] extractFields(Row record) {
Object[] fields = new Object[record.getArity()];
for (int i = 0; i < fields.length; i++) {
fields[i] = record.getField(i);
}
return fields;
}
}
代码示例来源:origin: apache/flink
private String createKey(Row row) {
final StringBuilder builder = new StringBuilder();
for (int i = 0; i < keyFieldIndices.length; i++) {
final int keyFieldIndex = keyFieldIndices[i];
if (i > 0) {
builder.append(keyDelimiter);
}
final Object value = row.getField(keyFieldIndex);
if (value == null) {
builder.append(keyNullLiteral);
} else {
builder.append(value.toString());
}
}
return builder.toString();
}
代码示例来源:origin: apache/flink
@Override
public int extractKeys(Object record, Object[] target, int index) {
int len = comparators.length;
int localIndex = index;
for (int i = 0; i < len; i++) {
Object element = ((Row) record).getField(keyPositions[i]); // element can be null
localIndex += comparators[i].extractKeys(element, target, localIndex);
}
return localIndex - index;
}
代码示例来源:origin: apache/flink
@Override
public void putNormalizedKey(Row record, MemorySegment target, int offset, int numBytes) {
int bytesLeft = numBytes;
int currentOffset = offset;
for (int i = 0; i < numLeadingNormalizableKeys && bytesLeft > 0; i++) {
int len = normalizedKeyLengths[i];
len = bytesLeft >= len ? len : bytesLeft;
TypeComparator<Object> comparator = comparators[i];
Object element = record.getField(keyPositions[i]); // element can be null
// write key
comparator.putNormalizedKey(element, target, currentOffset, len);
bytesLeft -= len;
currentOffset += len;
}
}
代码示例来源:origin: apache/flink
private GenericRecord convertRowToAvroRecord(Schema schema, Row row) {
final List<Schema.Field> fields = schema.getFields();
final int length = fields.size();
final GenericRecord record = new GenericData.Record(schema);
for (int i = 0; i < length; i++) {
final Schema.Field field = fields.get(i);
record.put(i, convertFlinkType(field.schema(), row.getField(i)));
}
return record;
}
代码示例来源:origin: apache/flink
@Override
public int compare(Row first, Row second) {
int i = 0;
try {
for (; i < keyPositions.length; i++) {
int keyPos = keyPositions[i];
TypeComparator<Object> comparator = comparators[i];
Object firstElement = first.getField(keyPos); // element can be null
Object secondElement = second.getField(keyPos); // element can be null
int cmp = comparator.compare(firstElement, secondElement);
if (cmp != 0) {
return cmp;
}
}
} catch (IndexOutOfBoundsException e) {
throw new KeyFieldOutOfBoundsException(keyPositions[i]);
}
return 0;
}
代码示例来源:origin: apache/flink
public static String[] rowToString(Row row) {
final String[] fields = new String[row.getArity()];
for (int i = 0; i < row.getArity(); i++) {
final Object field = row.getField(i);
if (field == null) {
fields[i] = CliStrings.NULL_COLUMN;
} else {
fields[i] = field.toString();
}
}
return fields;
}
代码示例来源:origin: apache/flink
@Override
public Object apply(Object o) {
if (o == null) {
return null;
} else {
Row r = (Row) o;
Row copy = new Row(copyFields.length);
for (int i = 0; i < copyFields.length; i++) {
copy.setField(i, copyFields[i].apply(r.getField(i)));
}
return copy;
}
}
}
代码示例来源:origin: apache/flink
@Override
public void setReference(Row toCompare) {
int i = 0;
try {
for (; i < keyPositions.length; i++) {
TypeComparator<Object> comparator = comparators[i];
Object element = toCompare.getField(keyPositions[i]);
comparator.setReference(element); // element can be null
}
} catch (IndexOutOfBoundsException e) {
throw new KeyFieldOutOfBoundsException(keyPositions[i]);
}
}
代码示例来源:origin: apache/flink
@Override
public int hash(Row record) {
int code = 0;
int i = 0;
try {
for (; i < keyPositions.length; i++) {
code *= TupleComparatorBase.HASH_SALT[i & 0x1F];
Object element = record.getField(keyPositions[i]); // element can be null
code += comparators[i].hash(element);
}
} catch (IndexOutOfBoundsException e) {
throw new KeyFieldOutOfBoundsException(keyPositions[i]);
}
return code;
}
代码示例来源:origin: apache/flink
@Override
public boolean equalToReference(Row candidate) {
int i = 0;
try {
for (; i < keyPositions.length; i++) {
TypeComparator<Object> comparator = comparators[i];
Object element = candidate.getField(keyPositions[i]); // element can be null
// check if reference is not equal
if (!comparator.equalToReference(element)) {
return false;
}
}
} catch (IndexOutOfBoundsException e) {
throw new KeyFieldOutOfBoundsException(keyPositions[i]);
}
return true;
}
代码示例来源:origin: apache/flink
@Override
public void serialize(Row record, DataOutputView target) throws IOException {
int len = fieldSerializers.length;
if (record.getArity() != len) {
throw new RuntimeException("Row arity of from does not match serializers.");
}
// write a null mask
writeNullMask(len, record, target);
// serialize non-null fields
for (int i = 0; i < len; i++) {
Object o = record.getField(i);
if (o != null) {
fieldSerializers[i].serialize(o, target);
}
}
}
代码示例来源:origin: apache/flink
@Override
public Row copy(Row from) {
int len = fieldSerializers.length;
if (from.getArity() != len) {
throw new RuntimeException("Row arity of from does not match serializers.");
}
Row result = new Row(len);
for (int i = 0; i < len; i++) {
Object fromField = from.getField(i);
if (fromField != null) {
Object copy = fieldSerializers[i].copy(fromField);
result.setField(i, copy);
}
else {
result.setField(i, null);
}
}
return result;
}
代码示例来源:origin: apache/flink
@Override
protected void deepEquals(String message, Row should, Row is) {
int arity = should.getArity();
assertEquals(message, arity, is.getArity());
for (int i = 0; i < arity; i++) {
Object copiedValue = should.getField(i);
Object element = is.getField(i);
assertEquals(message, element, copiedValue);
}
}
代码示例来源:origin: apache/flink
private ObjectNode convertRow(ObjectNode reuse, RowTypeInfo info, Row row) {
if (reuse == null) {
reuse = mapper.createObjectNode();
}
final String[] fieldNames = info.getFieldNames();
final TypeInformation<?>[] fieldTypes = info.getFieldTypes();
// validate the row
if (row.getArity() != fieldNames.length) {
throw new IllegalStateException(String.format(
"Number of elements in the row '%s' is different from number of field names: %d", row, fieldNames.length));
}
for (int i = 0; i < fieldNames.length; i++) {
final String name = fieldNames[i];
final JsonNode fieldConverted = convert(reuse, reuse.get(name), fieldTypes[i], row.getField(i));
reuse.set(name, fieldConverted);
}
return reuse;
}
代码示例来源:origin: apache/flink
@Override
protected void deepEquals(String message, Row should, Row is) {
int arity = should.getArity();
assertEquals(message, arity, is.getArity());
for (int i = 0; i < arity; i++) {
Object copiedValue = should.getField(i);
Object element = is.getField(i);
assertEquals(message, element, copiedValue);
}
}
代码示例来源:origin: apache/flink
@Override
protected void deepEquals(String message, Row should, Row is) {
int arity = should.getArity();
assertEquals(message, arity, is.getArity());
for (int i = 0; i < arity; i++) {
Object copiedValue = should.getField(i);
Object element = is.getField(i);
assertEquals(message, element, copiedValue);
}
}
}
代码示例来源:origin: apache/flink
private static void testRemovingTrailingCR(String lineBreakerInFile, String lineBreakerSetup) throws IOException {
String fileContent = FIRST_PART + lineBreakerInFile + SECOND_PART + lineBreakerInFile;
// create input file
File tempFile = File.createTempFile("CsvInputFormatTest", "tmp");
tempFile.deleteOnExit();
tempFile.setWritable(true);
OutputStreamWriter wrt = new OutputStreamWriter(new FileOutputStream(tempFile));
wrt.write(fileContent);
wrt.close();
TypeInformation[] fieldTypes = new TypeInformation[]{BasicTypeInfo.STRING_TYPE_INFO};
RowCsvInputFormat inputFormat = new RowCsvInputFormat(new Path(tempFile.toURI().toString()), fieldTypes);
inputFormat.configure(new Configuration());
inputFormat.setDelimiter(lineBreakerSetup);
FileInputSplit[] splits = inputFormat.createInputSplits(1);
inputFormat.open(splits[0]);
Row result = inputFormat.nextRecord(new Row(1));
assertNotNull("Expecting to not return null", result);
assertEquals(FIRST_PART, result.getField(0));
result = inputFormat.nextRecord(result);
assertNotNull("Expecting to not return null", result);
assertEquals(SECOND_PART, result.getField(0));
}
}
内容来源于网络,如有侵权,请联系作者删除!