本文整理了Java中org.apache.nifi.serialization.record.Record
类的一些代码示例,展示了Record
类的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Record
类的具体详情如下:
包路径:org.apache.nifi.serialization.record.Record
类名称:Record
暂无
代码示例来源:origin: apache/nifi
private Stream<FieldValue> getChildren(final FieldValue fieldValue) {
if (fieldValue == null || fieldValue.getValue() == null || !Filters.isRecord(fieldValue)) {
return Stream.empty();
}
final Record record = (Record) fieldValue.getValue();
return Filters.presentValues(record.getSchema().getFields().stream()
.map(field -> {
final Object value = record.getValue(field);
if (value == null) {
return Optional.empty();
}
return Optional.of(new StandardFieldValue(value, field, fieldValue));
}));
}
代码示例来源:origin: apache/nifi
Integer bIntValue = record.getAsInt(fieldName);
val = bIntValue == null ? null : bIntValue.byteValue();
break;
case SHORT:
Integer sIntValue = record.getAsInt(fieldName);
val = sIntValue == null ? null : sIntValue.shortValue();
break;
case INT:
val = record.getAsInt(fieldName);
break;
case LONG:
val = record.getAsLong(fieldName);
break;
case BOOLEAN:
val = record.getAsBoolean(fieldName);
break;
case FLOAT:
val = record.getAsFloat(fieldName);
break;
case DOUBLE:
val = record.getAsDouble(fieldName);
break;
case STRING:
case VARCHAR:
case CHAR:
val = record.getAsString(fieldName);
break;
case BINARY:
Object[] array = record.getAsArray(fieldName);
代码示例来源:origin: apache/nifi
private void writeRecord(final Record record, final RecordSchema writeSchema, final JsonGenerator generator)
throws IOException {
RecordSchema schema = record.getSchema();
generator.writeStartObject();
for (int i = 0; i < schema.getFieldCount(); i++) {
final RecordField field = schema.getField(i);
final String fieldName = field.getFieldName();
final Object value = record.getValue(field);
if (value == null) {
if (nullSuppression.equals(NEVER_SUPPRESS.getValue()) || (nullSuppression.equals(SUPPRESS_MISSING.getValue())) && record.getRawFieldNames().contains(fieldName)) {
generator.writeNullField(fieldName);
}
continue;
}
generator.writeFieldName(fieldName);
final DataType dataType = schema.getDataType(fieldName).get();
writeValue(generator, value, fieldName, dataType);
}
generator.writeEndObject();
}
代码示例来源:origin: apache/nifi
private void recursivelyAddParentFields(Record recordToWrite, FieldValue fieldValue) {
try {
// we get the parent data
FieldValue parentField = fieldValue.getParent().get();
Record parentRecord = fieldValue.getParentRecord().get();
// for each field of the parent
for (String field : parentRecord.getSchema().getFieldNames()) {
// if and only if there is not an already existing field with this name
// (we want to give priority to the deeper existing fields)
if(recordToWrite.getValue(field) == null) {
// Updates the value of the field with the given name to the given value.
// If the field specified is not present in the schema, will do nothing.
recordToWrite.setValue(field, parentRecord.getValue(field));
}
}
// recursive call
recursivelyAddParentFields(recordToWrite, parentField);
} catch (NoSuchElementException e) {
return;
}
}
});
代码示例来源:origin: apache/nifi
@Override
public WriteResult write(Record record) throws IOException {
if (++recordCount > failAfterN && failAfterN > -1) {
throw new IOException("Unit Test intentionally throwing IOException after " + failAfterN + " records were written");
}
if (header != null && !headerWritten) {
out.write(header.getBytes());
out.write("\n".getBytes());
headerWritten = true;
}
final int numCols = record.getSchema().getFieldCount();
int i = 0;
for (final String fieldName : record.getSchema().getFieldNames()) {
final String val = record.getAsString(fieldName);
if (val != null) {
if (quoteValues) {
out.write("\"".getBytes());
out.write(val.getBytes());
out.write("\"".getBytes());
} else {
out.write(val.getBytes());
}
}
if (i++ < numCols - 1) {
out.write(",".getBytes());
}
}
out.write("\n".getBytes());
return WriteResult.of(1, Collections.emptyMap());
}
代码示例来源:origin: apache/nifi
private Record applyMappings(Record record, Map<String, Object> source) {
Record _rec = new MapRecord(record.getSchema(), new HashMap<>());
mappings.entrySet().forEach(entry -> {
try {
Object o = JsonPath.read(source, entry.getKey());
RecordPath path = entry.getValue();
Optional<FieldValue> first = path.evaluate(_rec).getSelectedFields().findFirst();
if (first.isPresent()) {
first.get().updateValue(o);
}
} catch (Exception ex) {
throw new RuntimeException(ex);
}
});
return _rec;
}
代码示例来源:origin: apache/nifi
break;
default:
final String value = record.getAsString(field);
retVal = clientService.toBytes(value);
break;
break;
case BOOLEAN:
retVal = clientService.toBytes(record.getAsBoolean(field));
break;
case DOUBLE:
retVal = clientService.toBytes(record.getAsDouble(field));
break;
case FLOAT:
retVal = clientService.toBytes(record.getAsFloat(field));
break;
case INT:
retVal = clientService.toBytes(record.getAsInt(field));
break;
case LONG:
retVal = clientService.toBytes(record.getAsLong(field));
break;
default:
final String value = record.getAsString(field);
retVal = clientService.toBytes(value);
break;
代码示例来源:origin: apache/nifi
private void writeRecord(final Record record, final RecordSchema writeSchema, final JsonGenerator generator,
final GeneratorTask startTask, final GeneratorTask endTask, final boolean schemaAware) throws JsonGenerationException, IOException {
final Optional<SerializedForm> serializedForm = record.getSerializedForm();
if (serializedForm.isPresent()) {
final SerializedForm form = serializedForm.get();
if (form.getMimeType().equals(getMimeType()) && record.getSchema().equals(writeSchema)) {
final Object serialized = form.getSerialized();
if (serialized instanceof String) {
for (final RecordField field : writeSchema.getFields()) {
final String fieldName = field.getFieldName();
final Object value = record.getValue(field);
if (value == null) {
if (nullSuppression == NullSuppression.NEVER_SUPPRESS || (nullSuppression == NullSuppression.SUPPRESS_MISSING) && isFieldPresent(field, record)) {
for (final String fieldName : record.getRawFieldNames()) {
final Object value = record.getValue(fieldName);
if (value == null) {
if (nullSuppression == NullSuppression.NEVER_SUPPRESS || (nullSuppression == NullSuppression.SUPPRESS_MISSING) && record.getRawFieldNames().contains(fieldName)) {
generator.writeNullField(fieldName);
logger.error("Failed to write {} with schema {} as a JSON Object due to {}", new Object[] {record, record.getSchema(), e.toString(), e});
throw e;
代码示例来源:origin: apache/nifi
private boolean iterateThroughRecordWithoutSchema(Deque<String> tagsToOpen, Record record) throws XMLStreamException {
boolean loopHasWritten = false;
for (String fieldName : record.getRawFieldNames()) {
Object value = record.getValue(fieldName);
if (value != null) {
boolean hasWritten = writeUnknownField(tagsToOpen, value, fieldName);
if (hasWritten) {
loopHasWritten = true;
}
} else {
if (nullSuppression.equals(NullSuppression.NEVER_SUPPRESS) || nullSuppression.equals(NullSuppression.SUPPRESS_MISSING)) {
writeAllTags(tagsToOpen, fieldName);
writer.writeEndElement();
loopHasWritten = true;
}
}
}
return loopHasWritten;
}
代码示例来源:origin: apache/nifi
@Override
public void write(final Record record) throws IOException {
if (recordFields != null) {
for (int i = 0; i < numRecordFields; i++) {
final RecordField field = recordFields.get(i);
final DataType fieldType = field.getDataType();
final String fieldName = field.getFieldName();
Object o = record.getValue(field);
try {
workingRow[i] = NiFiOrcUtils.convertToORCObject(NiFiOrcUtils.getOrcField(fieldType, hiveFieldNames), o, hiveFieldNames);
} catch (ArrayIndexOutOfBoundsException aioobe) {
final String errorMsg = "Index out of bounds for column " + i + ", type " + fieldName + ", and object " + o.toString();
throw new IOException(errorMsg, aioobe);
}
}
orcWriter.addRow(NiFiOrcUtils.createOrcStruct(orcSchema, workingRow));
}
}
代码示例来源:origin: apache/nifi
private void write(final Record record, final OutputStream out, final List<String> columnNames) throws IOException {
final int numCols = columnNames.size();
final Map<String, String> values = new HashMap<>(numCols);
for (int i = 0; i < numCols; i++) {
final String columnName = columnNames.get(i);
final String columnValue = record.getAsString(columnName);
values.put(columnName, columnValue);
}
final String evaluated = propertyValue.evaluateAttributeExpressions(values).getValue();
out.write(evaluated.getBytes(charset));
out.write(NEW_LINE);
}
代码示例来源:origin: apache/nifi
record.incorporateSchema(writeSchema);
final Record destinationRecord = (Record) destinationValue;
for (final String fieldName : lookupRecord.getRawFieldNames()) {
final Object value = lookupRecord.getValue(fieldName);
destinationRecord.setValue(fieldName, value);
parentOption.get().setValue(fieldVal.getField().getFieldName(), lookupRecord);
代码示例来源:origin: apache/nifi
if (!StringUtils.isBlank(timestampFieldName)) {
try {
timestamp = record.getAsLong(timestampFieldName);
} catch (IllegalTypeConversionException e) {
throw new PutCreationFailedInvokedException("Could not convert " + timestampFieldName + " to a long", e);
Object val = record.getValue(name);
final byte[] fieldValueBytes;
if (val == null && nullStrategy.equals(NULL_FIELD_SKIP.getValue())) {
String rowIdValue = record.getAsString(rowFieldName);
if (rowIdValue == null) {
throw new PutCreationFailedInvokedException(String.format("Row ID was null for flowfile with ID %s", flowFile.getAttribute("uuid")));
代码示例来源:origin: apache/nifi
private String[] getFieldNames(final Record record) {
if (fieldNames != null) {
return fieldNames;
}
final Set<String> allFields = new LinkedHashSet<>();
allFields.addAll(record.getRawFieldNames());
allFields.addAll(recordSchema.getFieldNames());
fieldNames = allFields.toArray(new String[0]);
return fieldNames;
}
代码示例来源:origin: apache/nifi
@Override
public Map<String, String> writeRecord(final Record record) throws IOException {
write(record, getOutputStream(), getColumnNames(record.getSchema()));
return Collections.emptyMap();
}
代码示例来源:origin: apache/nifi
if (record.getValue(colName) == null) {
row.setNull(colName);
continue;
row.addBoolean(colIdx, record.getAsBoolean(colName));
break;
case FLOAT:
row.addFloat(colIdx, record.getAsFloat(colName));
break;
case DOUBLE:
row.addDouble(colIdx, record.getAsDouble(colName));
break;
case BINARY:
row.addBinary(colIdx, record.getAsString(colName).getBytes());
break;
case INT8:
row.addByte(colIdx, record.getAsInt(colName).byteValue());
break;
case INT16:
row.addShort(colIdx, record.getAsInt(colName).shortValue());
break;
case INT32:
row.addInt(colIdx, record.getAsInt(colName));
break;
case INT64:
case UNIXTIME_MICROS:
row.addLong(colIdx, record.getAsLong(colName));
break;
case STRING:
代码示例来源:origin: org.apache.nifi/nifi-standard-processors
private void recursivelyAddParentFields(Record recordToWrite, FieldValue fieldValue) {
try {
// we get the parent data
FieldValue parentField = fieldValue.getParent().get();
Record parentRecord = fieldValue.getParentRecord().get();
// for each field of the parent
for (String field : parentRecord.getSchema().getFieldNames()) {
// if and only if there is not an already existing field with this name
// (we want to give priority to the deeper existing fields)
if(recordToWrite.getValue(field) == null) {
// Updates the value of the field with the given name to the given value.
// If the field specified is not present in the schema, will do nothing.
recordToWrite.setValue(field, parentRecord.getValue(field));
}
}
// recursive call
recursivelyAddParentFields(recordToWrite, parentField);
} catch (NoSuchElementException e) {
return;
}
}
});
代码示例来源:origin: apache/nifi
final int numCols = record.getSchema().getFieldCount();
for (final String fieldName : record.getSchema().getFieldNames()) {
final String val = record.getAsString(fieldName);
if (val != null) {
if (quoteValues) {
代码示例来源:origin: apache/nifi
final DataType valueDataType = mapDataType.getValueType();
for (final String fieldName : record.getRawFieldNames()) {
final Object fieldValue = record.getValue(fieldName);
if (!isTypeCorrect(fieldValue, valueDataType)) {
return false;
代码示例来源:origin: apache/nifi
Object sql = currentRecord.getValue(sqlField);
if (sql == null || StringUtils.isEmpty((String) sql)) {
throw new MalformedRecordException(format("Record had no (or null) value for Field Containing SQL: %s, FlowFile %s", sqlField, flowFile));
内容来源于网络,如有侵权,请联系作者删除!