org.apache.nifi.serialization.record.Record.getSchema()方法的使用及代码示例

x33g5p2x  于2022-01-29 转载在 其他  
字(10.3k)|赞(0)|评价(0)|浏览(88)

本文整理了Java中org.apache.nifi.serialization.record.Record.getSchema方法的一些代码示例,展示了Record.getSchema的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Record.getSchema方法的具体详情如下:
包路径:org.apache.nifi.serialization.record.Record
类名称:Record
方法名:getSchema

Record.getSchema介绍

暂无

代码示例

代码示例来源:origin: apache/nifi

private Record applyMappings(Record record, Map<String, Object> source) {
  Record _rec = new MapRecord(record.getSchema(), new HashMap<>());
  mappings.entrySet().forEach(entry -> {
    try {
      Object o = JsonPath.read(source, entry.getKey());
      RecordPath path = entry.getValue();
      Optional<FieldValue> first = path.evaluate(_rec).getSelectedFields().findFirst();
      if (first.isPresent()) {
        first.get().updateValue(o);
      }
    } catch (Exception ex) {
      throw new RuntimeException(ex);
    }
  });
  return _rec;
}

代码示例来源:origin: apache/nifi

@Override
public WriteResult write(Record record) throws IOException {
  if (++recordCount > failAfterN && failAfterN > -1) {
    throw new IOException("Unit Test intentionally throwing IOException after " + failAfterN + " records were written");
  }
  if (header != null && !headerWritten) {
    out.write(header.getBytes());
    out.write("\n".getBytes());
    headerWritten = true;
  }
  final int numCols = record.getSchema().getFieldCount();
  int i = 0;
  for (final String fieldName : record.getSchema().getFieldNames()) {
    final String val = record.getAsString(fieldName);
    if (val != null) {
      if (quoteValues) {
        out.write("\"".getBytes());
        out.write(val.getBytes());
        out.write("\"".getBytes());
      } else {
        out.write(val.getBytes());
      }
    }
    if (i++ < numCols - 1) {
      out.write(",".getBytes());
    }
  }
  out.write("\n".getBytes());
  return WriteResult.of(1, Collections.emptyMap());
}

代码示例来源:origin: apache/nifi

final int numCols = record.getSchema().getFieldCount();
for (final String fieldName : record.getSchema().getFieldNames()) {
  final String val = record.getAsString(fieldName);
  if (val != null) {

代码示例来源:origin: apache/nifi

@Override
public Map<String, String> writeRecord(final Record record) throws IOException {
  write(record, getOutputStream(), getColumnNames(record.getSchema()));
  return Collections.emptyMap();
}

代码示例来源:origin: apache/nifi

private Stream<FieldValue> getChildren(final FieldValue fieldValue) {
  if (fieldValue == null || fieldValue.getValue() == null || !Filters.isRecord(fieldValue)) {
    return Stream.empty();
  }
  final Record record = (Record) fieldValue.getValue();
  return Filters.presentValues(record.getSchema().getFields().stream()
    .map(field -> {
      final Object value = record.getValue(field);
      if (value == null) {
        return Optional.empty();
      }
      return Optional.of(new StandardFieldValue(value, field, fieldValue));
    }));
}

代码示例来源:origin: apache/nifi

final RecordSchema recordSchema = record.getSchema();
if (recordSchema == null) {
  throw new IllegalTypeConversionException("Cannot convert value [" + value + "] of type Record to Map for field " + fieldName

代码示例来源:origin: apache/nifi

public static GenericRecord createAvroRecord(final Record record, final Schema avroSchema, final Charset charset) throws IOException {
  final GenericRecord rec = new GenericData.Record(avroSchema);
  final RecordSchema recordSchema = record.getSchema();
  for (final RecordField recordField : recordSchema.getFields()) {
    final Object rawValue = record.getValue(recordField);
    Pair<String, Field> fieldPair = lookupField(avroSchema, recordField);
    final String fieldName = fieldPair.getLeft();
    final Field field = fieldPair.getRight();
    if (field == null) {
      continue;
    }
    final Object converted = convertToAvroObject(rawValue, field.schema(), fieldName, charset);
    rec.put(fieldName, converted);
  }
  // see if the Avro schema has any fields that aren't in the RecordSchema, and if those fields have a default
  // value then we want to populate it in the GenericRecord being produced
  for (final Field field : avroSchema.getFields()) {
    final Optional<RecordField> recordField = recordSchema.getField(field.name());
    if (!recordField.isPresent() && rec.get(field.name()) == null && field.defaultVal() != null) {
      rec.put(field.name(), field.defaultVal());
    }
  }
  return rec;
}

代码示例来源:origin: apache/nifi

@Override
  public Stream<FieldValue> evaluate(final RecordPathEvaluationContext context) {
    final RecordField field = new RecordField("root", RecordFieldType.RECORD.getRecordDataType(context.getRecord().getSchema()));
    final FieldValue fieldValue = new StandardFieldValue(context.getRecord(), field, null);
    return Stream.of(fieldValue);
  }
}

代码示例来源:origin: apache/nifi

private void writeRecord(final Record record, final RecordSchema writeSchema, final JsonGenerator generator)
    throws IOException {
  RecordSchema schema = record.getSchema();
  generator.writeStartObject();
  for (int i = 0; i < schema.getFieldCount(); i++) {
    final RecordField field = schema.getField(i);
    final String fieldName = field.getFieldName();
    final Object value = record.getValue(field);
    if (value == null) {
      if (nullSuppression.equals(NEVER_SUPPRESS.getValue()) || (nullSuppression.equals(SUPPRESS_MISSING.getValue())) && record.getRawFieldNames().contains(fieldName)) {
        generator.writeNullField(fieldName);
      }
      continue;
    }
    generator.writeFieldName(fieldName);
    final DataType dataType = schema.getDataType(fieldName).get();
    writeValue(generator, value, fieldName, dataType);
  }
  generator.writeEndObject();
}

代码示例来源:origin: apache/nifi

@Override
public Object deserialize(Writable writable) throws SerDeException {
  ObjectWritable t = (ObjectWritable) writable;
  Record record = (Record) t.get();
  List<Object> r = new ArrayList<>(Collections.nCopies(columnNames.size(), null));
  try {
    RecordSchema recordSchema = record.getSchema();
    for (RecordField field : recordSchema.getFields()) {
      String fieldName = field.getFieldName();
      String normalizedFieldName = fieldName.toLowerCase();
      // Get column position of field name, and set field value there
      Integer fpos = fieldPositionMap.get(normalizedFieldName);
      if(fpos == null || fpos == -1) {
        // This is either a partition column or not a column in the target table, ignore either way
        continue;
      }
      Object currField = extractCurrentField(record, field, schema.getStructFieldTypeInfo(normalizedFieldName));
      r.set(fpos, currField);
    }
    stats.setRowCount(stats.getRowCount() + 1);
  } catch (Exception e) {
    log.warn("Error [{}] parsing Record [{}].", new Object[]{e.toString(), t}, e);
    throw new SerDeException(e);
  }
  return r;
}

代码示例来源:origin: apache/nifi

private List<FieldValue> findDescendants(final FieldValue fieldValue) {
    if (fieldValue == null || fieldValue.getValue() == null) {
      return Collections.emptyList();
    }
    if (!Filters.isRecord(fieldValue)) {
      return Collections.emptyList();
    }

    final Record record = (Record) fieldValue.getValue();
    final List<FieldValue> matchingValues = new ArrayList<>();

    for (final RecordField childField : record.getSchema().getFields()) {
      final Object value = record.getValue(childField);
      if (value == null) {
        continue;
      }

      final FieldValue descendantFieldValue = new StandardFieldValue(value, childField, fieldValue);
      matchingValues.add(descendantFieldValue);

      if (Filters.isRecord(childField.getDataType(), value)) {
        final FieldValue childFieldValue = new StandardFieldValue(value, childField, fieldValue);
        matchingValues.addAll(findDescendants(childFieldValue));
      }
    }

    return matchingValues;
  }
}

代码示例来源:origin: apache/nifi

private void recursivelyAddParentFields(Record recordToWrite, FieldValue fieldValue) {
    try {
      // we get the parent data
      FieldValue parentField = fieldValue.getParent().get();
      Record parentRecord = fieldValue.getParentRecord().get();
      // for each field of the parent
      for (String field : parentRecord.getSchema().getFieldNames()) {
        // if and only if there is not an already existing field with this name
        // (we want to give priority to the deeper existing fields)
        if(recordToWrite.getValue(field) == null) {
          // Updates the value of the field with the given name to the given value.
          // If the field specified is not present in the schema, will do nothing.
          recordToWrite.setValue(field, parentRecord.getValue(field));
        }
      }
      // recursive call
      recursivelyAddParentFields(recordToWrite, parentField);
    } catch (NoSuchElementException e) {
      return;
    }
  }
});

代码示例来源:origin: apache/nifi

final RecordSchema recordSchema = record.getSchema();
return RecordFieldType.RECORD.getRecordDataType(recordSchema);

代码示例来源:origin: apache/nifi

final List<FieldValue> matchingValues = new ArrayList<>();
for (final RecordField childField : record.getSchema().getFields()) {
  if (childField.getFieldName().equals(descendantName) || childField.getAliases().contains(descendantName)) {
    final Object value = record.getValue(descendantName);

代码示例来源:origin: apache/nifi

/**
 * Writes each Record as a SolrInputDocument.
 */
public static void writeRecord(final Record record, final SolrInputDocument inputDocument,final List<String> fieldsToIndex,String parentFieldName)
    throws IOException {
  RecordSchema schema = record.getSchema();
  for (int i = 0; i < schema.getFieldCount(); i++) {
    final RecordField field = schema.getField(i);
    String fieldName;
    if(!StringUtils.isBlank(parentFieldName)) {
      // Prefixing parent field name
      fieldName = parentFieldName+"_"+field.getFieldName();
    }else{
      fieldName = field.getFieldName();
    }
    final Object value = record.getValue(field);
    if (value == null) {
      continue;
    }else {
      final DataType dataType = schema.getDataType(field.getFieldName()).get();
      writeValue(inputDocument, value, fieldName, dataType,fieldsToIndex);
    }
  }
}

代码示例来源:origin: apache/nifi

private FieldValue getChild(final FieldValue fieldValue) {
  if (!Filters.isRecord(fieldValue)) {
    return missingChild(fieldValue);
  }
  final Record record = (Record) fieldValue.getValue();
  if(record == null) {
    return missingChild(fieldValue);
  }
  final Object value = record.getValue(childName);
  if (value == null) {
    return missingChild(fieldValue);
  }
  final Optional<RecordField> field = record.getSchema().getField(childName);
  if (!field.isPresent()) {
    return missingChild(fieldValue);
  }
  return new StandardFieldValue(value, field.get(), fieldValue);
}

代码示例来源:origin: apache/nifi

RecordSchema recordSchema = record.getSchema();
if (recordSchema == null) {
  throw new IllegalTypeConversionException("Cannot convert value of type Record to Map because Record does not have an associated Schema");

代码示例来源:origin: apache/nifi

if (serializedForm.isPresent()) {
  final SerializedForm form = serializedForm.get();
  if (form.getMimeType().equals(getMimeType()) && record.getSchema().equals(writeSchema)) {
    final Object serialized = form.getSerialized();
    if (serialized instanceof String) {
  logger.error("Failed to write {} with schema {} as a JSON Object due to {}", new Object[] {record, record.getSchema(), e.toString(), e});
  throw e;

代码示例来源:origin: apache/nifi

writeRecord(record, record.getSchema(), generator, gen -> gen.writeStartObject(), gen -> gen.writeEndObject(), false);
return;

代码示例来源:origin: apache/nifi

while ((record = reader.nextRecord()) != null) {
  Map<String, Object> contentMap = (Map<String, Object>) DataTypeUtils.convertRecordFieldtoObject(record, RecordFieldType.RECORD.getRecordDataType(record.getSchema()));
  Document document = new Document();
  for (String name : schema.getFieldNames()) {

相关文章