org.apache.nifi.serialization.RecordReader.getSchema()方法的使用及代码示例

x33g5p2x  于2022-01-29 转载在 其他  
字(8.9k)|赞(0)|评价(0)|浏览(85)

本文整理了Java中org.apache.nifi.serialization.RecordReader.getSchema方法的一些代码示例,展示了RecordReader.getSchema的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。RecordReader.getSchema方法的具体详情如下:
包路径:org.apache.nifi.serialization.RecordReader
类名称:RecordReader
方法名:getSchema

RecordReader.getSchema介绍

暂无

代码示例

代码示例来源:origin: apache/nifi

@Override
public RecordSchema getSchema() throws IOException {
  try {
    return RecordReader.this.getSchema();
  } catch (final MalformedRecordException mre) {
    throw new IOException(mre);
  }
}

代码示例来源:origin: apache/nifi

RecordSchema recordSchema = recordReader.getSchema();
for (RecordField field : recordSchema.getFields()) {
  String fieldName = field.getFieldName();

代码示例来源:origin: apache/nifi

throws IllegalArgumentException, MalformedRecordException, IOException, SQLException {
final RecordSchema recordSchema = recordParser.getSchema();

代码示例来源:origin: apache/nifi

@Override
public RelDataType getRowType(final RelDataTypeFactory typeFactory) {
  if (relDataType != null) {
    return relDataType;
  }
  RecordSchema schema;
  try (final InputStream in = session.read(flowFile)) {
    final RecordReader recordParser = recordParserFactory.createRecordReader(flowFile, in, logger);
    schema = recordParser.getSchema();
  } catch (final Exception e) {
    throw new ProcessException("Failed to determine schema of data records for " + flowFile, e);
  }
  final List<String> names = new ArrayList<>();
  final List<RelDataType> types = new ArrayList<>();
  final JavaTypeFactory javaTypeFactory = (JavaTypeFactory) typeFactory;
  for (final RecordField field : schema.getFields()) {
    names.add(field.getFieldName());
    final RelDataType relDataType = getRelDataType(field.getDataType(), javaTypeFactory);
    types.add(javaTypeFactory.createTypeWithNullability(relDataType, field.isNullable()));
  }
  logger.debug("Found Schema: {}", new Object[] {schema});
  if (recordSchema == null) {
    recordSchema = schema;
  }
  relDataType = typeFactory.createStructType(Pair.zip(names, types));
  return relDataType;
}

代码示例来源:origin: apache/nifi

throws IllegalArgumentException, MalformedRecordException, IOException, SQLException {
final RecordSchema recordSchema = recordParser.getSchema();
final ComponentLog log = getLogger();

代码示例来源:origin: apache/nifi

protected RecordSchema getValidationSchema(final ProcessContext context, final FlowFile flowFile, final RecordReader reader)
    throws MalformedRecordException, IOException, SchemaNotFoundException {
    final String schemaAccessStrategy = context.getProperty(SCHEMA_ACCESS_STRATEGY).getValue();
    if (schemaAccessStrategy.equals(READER_SCHEMA.getValue())) {
      return reader.getSchema();
    } else if (schemaAccessStrategy.equals(SCHEMA_NAME_PROPERTY.getValue())) {
      final SchemaRegistry schemaRegistry = context.getProperty(SCHEMA_REGISTRY).asControllerService(SchemaRegistry.class);
      final String schemaName = context.getProperty(SCHEMA_NAME).evaluateAttributeExpressions(flowFile).getValue();
      final SchemaIdentifier schemaIdentifier = SchemaIdentifier.builder().name(schemaName).build();
      return schemaRegistry.retrieveSchema(schemaIdentifier);
    } else if (schemaAccessStrategy.equals(SCHEMA_TEXT_PROPERTY.getValue())) {
      final String schemaText = context.getProperty(SCHEMA_TEXT).evaluateAttributeExpressions(flowFile).getValue();
      final Parser parser = new Schema.Parser();
      final Schema avroSchema = parser.parse(schemaText);
      return AvroTypeUtil.createSchema(avroSchema);
    } else {
      throw new ProcessException("Invalid Schema Access Strategy: " + schemaAccessStrategy);
    }
  }
}

代码示例来源:origin: apache/nifi

@Override
  public void process(final InputStream in, final OutputStream out) throws IOException {
    try (final RecordReader reader = readerFactory.createRecordReader(originalAttributes, in, getLogger())) {
      final RecordSchema writeSchema = writerFactory.getSchema(originalAttributes, reader.getSchema());
      try (final RecordSetWriter writer = writerFactory.createWriter(getLogger(), writeSchema, out)) {
        writer.beginRecordSet();
        Record record;
        while ((record = reader.nextRecord()) != null) {
          final Record processed = AbstractRecordProcessor.this.process(record, writeSchema, original, context);
          writer.write(processed);
        }
        final WriteResult writeResult = writer.finishRecordSet();
        attributes.put("record.count", String.valueOf(writeResult.getRecordCount()));
        attributes.put(CoreAttributes.MIME_TYPE.key(), writer.getMimeType());
        attributes.putAll(writeResult.getAttributes());
        recordCount.set(writeResult.getRecordCount());
      }
    } catch (final SchemaNotFoundException e) {
      throw new ProcessException(e.getLocalizedMessage(), e);
    } catch (final MalformedRecordException e) {
      throw new ProcessException("Could not parse incoming data", e);
    }
  }
});

代码示例来源:origin: apache/nifi

@Override
  public void process(final InputStream in) throws IOException {
    try (final RecordReader reader = readerFactory.createRecordReader(originalAttributes, in, getLogger())) {
      final RecordSchema writeSchema = writerFactory.getSchema(originalAttributes, reader.getSchema());
      Record record;
      while ((record = reader.nextRecord()) != null) {
        final Set<Relationship> relationships = route(record, writeSchema, original, context, flowFileContext);
        numRecords.incrementAndGet();
        for (final Relationship relationship : relationships) {
          final RecordSetWriter recordSetWriter;
          Tuple<FlowFile, RecordSetWriter> tuple = writers.get(relationship);
          if (tuple == null) {
            FlowFile outFlowFile = session.create(original);
            final OutputStream out = session.write(outFlowFile);
            recordSetWriter = writerFactory.createWriter(getLogger(), writeSchema, out);
            recordSetWriter.beginRecordSet();
            tuple = new Tuple<>(outFlowFile, recordSetWriter);
            writers.put(relationship, tuple);
          } else {
            recordSetWriter = tuple.getValue();
          }
          recordSetWriter.write(record);
        }
      }
    } catch (final SchemaNotFoundException | MalformedRecordException e) {
      throw new ProcessException("Could not parse incoming data", e);
    }
  }
});

代码示例来源:origin: apache/nifi

try (final RecordReader reader = readerFactory.createRecordReader(originalAttributes, in, getLogger())) {
  final RecordSchema schema = writerFactory.getSchema(originalAttributes, reader.getSchema());

代码示例来源:origin: apache/nifi

try (final InputStream in = session.read(flowFile);
   final RecordReader recordReader = recordReaderFactory.createRecordReader(flowFile, in, getLogger())) {
  final List<String> fieldNames = recordReader.getSchema().getFieldNames();
  final RecordSet recordSet = recordReader.createRecordSet();

代码示例来源:origin: apache/nifi

private void binFlowFile(final ProcessContext context, final FlowFile flowFile, final ProcessSession session, final RecordBinManager binManager, final boolean block) {
  final RecordReaderFactory readerFactory = context.getProperty(RECORD_READER).asControllerService(RecordReaderFactory.class);
  try (final InputStream in = session.read(flowFile);
    final RecordReader reader = readerFactory.createRecordReader(flowFile, in, getLogger())) {
    final RecordSchema schema = reader.getSchema();
    final String groupId = getGroupId(context, flowFile, schema, session);
    getLogger().debug("Got Group ID {} for {}", new Object[] {groupId, flowFile});
    binManager.add(groupId, flowFile, reader, session, block);
  } catch (MalformedRecordException | IOException | SchemaNotFoundException e) {
    throw new ProcessException(e);
  }
}

代码示例来源:origin: apache/nifi

final RecordReader reader = readerFactory.createRecordReader(originalAttributes, in, getLogger());
final RecordSchema writeSchema = writerFactory.getSchema(originalAttributes, reader.getSchema());

代码示例来源:origin: apache/nifi

recordWriter = createHDFSRecordWriter(context, flowFile, configuration, tempFile, recordReader.getSchema());
  writeResult.set(recordWriter.write(recordSet));
} catch (Exception e) {

代码示例来源:origin: apache/nifi

final RecordReader reader = recordParserFactory.createRecordReader(flowFile, inStream, getLogger())) {
final MongoCollection<Document> collection = getCollection(context, flowFile).withWriteConcern(writeConcern);
RecordSchema schema = reader.getSchema();
Record record;
while ((record = reader.nextRecord()) != null) {

代码示例来源:origin: apache/nifi

PutFlowFile putFlowFile = createPut(context, record, reader.getSchema(), recordPath, flowFile, rowFieldName, columnFamily,
    timestampFieldName, fieldEncodingStrategy, rowEncodingStrategy, complexFieldStrategy);
if (putFlowFile.getColumns().size() == 0) {

代码示例来源:origin: apache/nifi

try (final RecordReader reader = readerFactory.createRecordReader(originalAttributes, in, getLogger())) {
  final RecordSchema writeSchema = writerFactory.getSchema(originalAttributes, reader.getSchema());
  final OutputStream out = session.write(outFlowFile);

代码示例来源:origin: apache/nifi

final Map<String, String> originalAttributes = original.getAttributes();
final RecordReader reader = recordReaderFactory.createRecordReader(originalAttributes, rawIn, getLogger());
final RecordSchema inputSchema = reader.getSchema();

代码示例来源:origin: apache/nifi

final RecordReader reader = recordParserFactory.createRecordReader(inputFlowFile, inputStream, getLogger())){
final RecordSchema schema = reader.getSchema();
Record record;

代码示例来源:origin: apache/nifi

try (final InputStream in = session.read(original);
   final RecordReader reader = readerFactory.createRecordReader(original, in, getLogger())) {
  schema = writerFactory.getSchema(original.getAttributes(), reader.getSchema());
  Record record;

代码示例来源:origin: apache/nifi

final RecordReader recordReader = avroReader.createRecordReader(validFlowFile.getAttributes(), resultContentStream, runner.getLogger());
) {
  final RecordSchema resultSchema = recordReader.getSchema();
  assertEquals(3, resultSchema.getFieldCount());

相关文章