org.apache.nifi.components.PropertyValue.asDataSize()方法的使用及代码示例

x33g5p2x  于2022-01-26 转载在 其他  
字(16.2k)|赞(0)|评价(0)|浏览(93)

本文整理了Java中org.apache.nifi.components.PropertyValue.asDataSize()方法的一些代码示例,展示了PropertyValue.asDataSize()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。PropertyValue.asDataSize()方法的具体详情如下:
包路径:org.apache.nifi.components.PropertyValue
类名称:PropertyValue
方法名:asDataSize

PropertyValue.asDataSize介绍

暂无

代码示例

代码示例来源:origin: apache/nifi

@Override
public Double asDataSize(final DataUnit dataUnit) {
  ensureExpressionsEvaluated();
  return stdPropValue.asDataSize(dataUnit);
}

代码示例来源:origin: apache/nifi

protected void configurePolicy(final ConfigurationContext context, final WebSocketPolicy policy) {
  final int inputBufferSize = context.getProperty(INPUT_BUFFER_SIZE).asDataSize(DataUnit.B).intValue();
  final int maxTextMessageSize = context.getProperty(MAX_TEXT_MESSAGE_SIZE).asDataSize(DataUnit.B).intValue();
  final int maxBinaryMessageSize = context.getProperty(MAX_BINARY_MESSAGE_SIZE).asDataSize(DataUnit.B).intValue();
  policy.setInputBufferSize(inputBufferSize);
  policy.setMaxTextMessageSize(maxTextMessageSize);
  policy.setMaxBinaryMessageSize(maxBinaryMessageSize);
}

代码示例来源:origin: apache/nifi

private byte[] generateData(final ProcessContext context) {
  final int byteCount = context.getProperty(FILE_SIZE).asDataSize(DataUnit.B).intValue();
  final Random random = new Random();
  final byte[] array = new byte[byteCount];
  if (context.getProperty(DATA_FORMAT).getValue().equals(DATA_FORMAT_BINARY)) {
    random.nextBytes(array);
  } else {
    for (int i = 0; i < array.length; i++) {
      final int index = random.nextInt(TEXT_CHARS.length);
      array[i] = (byte) TEXT_CHARS[index];
    }
  }
  return array;
}

代码示例来源:origin: apache/nifi

protected PublisherPool createPublisherPool(final ProcessContext context) {
  final int maxMessageSize = context.getProperty(MAX_REQUEST_SIZE).asDataSize(DataUnit.B).intValue();
  final long maxAckWaitMillis = context.getProperty(ACK_WAIT_TIME).asTimePeriod(TimeUnit.MILLISECONDS).longValue();
  final Map<String, Object> kafkaProperties = new HashMap<>();
  KafkaProcessorUtils.buildCommonKafkaProperties(context, ProducerConfig.class, kafkaProperties);
  kafkaProperties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getName());
  kafkaProperties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getName());
  kafkaProperties.put("max.request.size", String.valueOf(maxMessageSize));
  return new PublisherPool(kafkaProperties, getLogger(), maxMessageSize, maxAckWaitMillis);
}

代码示例来源:origin: apache/nifi

protected PublisherPool createPublisherPool(final ProcessContext context) {
  final int maxMessageSize = context.getProperty(MAX_REQUEST_SIZE).asDataSize(DataUnit.B).intValue();
  final long maxAckWaitMillis = context.getProperty(ACK_WAIT_TIME).asTimePeriod(TimeUnit.MILLISECONDS).longValue();
  final Map<String, Object> kafkaProperties = new HashMap<>();
  KafkaProcessorUtils.buildCommonKafkaProperties(context, ProducerConfig.class, kafkaProperties);
  kafkaProperties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getName());
  kafkaProperties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getName());
  kafkaProperties.put("max.request.size", String.valueOf(maxMessageSize));
  return new PublisherPool(kafkaProperties, getLogger(), maxMessageSize, maxAckWaitMillis);
}

代码示例来源:origin: apache/nifi

protected PublisherPool createPublisherPool(final ProcessContext context) {
  final int maxMessageSize = context.getProperty(MAX_REQUEST_SIZE).asDataSize(DataUnit.B).intValue();
  final long maxAckWaitMillis = context.getProperty(ACK_WAIT_TIME).asTimePeriod(TimeUnit.MILLISECONDS).longValue();
  final Map<String, Object> kafkaProperties = new HashMap<>();
  KafkaProcessorUtils.buildCommonKafkaProperties(context, ProducerConfig.class, kafkaProperties);
  kafkaProperties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getName());
  kafkaProperties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getName());
  kafkaProperties.put("max.request.size", String.valueOf(maxMessageSize));
  return new PublisherPool(kafkaProperties, getLogger(), maxMessageSize, maxAckWaitMillis);
}

代码示例来源:origin: apache/nifi

@OnScheduled
public void onScheduled(final ProcessContext context) {
  maxDocumentsSize = context.getProperty(MAX_DOCUMENTS_SIZE).asDataSize(DataUnit.B).longValue();
  super.onScheduled(context);
}

代码示例来源:origin: apache/nifi

@OnScheduled
public void onScheduled(final ProcessContext context) {
  maxDocumentsSize = context.getProperty(MAX_DOCUMENTS_SIZE).asDataSize(DataUnit.B).longValue();
  super.onScheduled(context);
}

代码示例来源:origin: apache/nifi

@OnScheduled
public void onScheduled(final ProcessContext context) {
  super.onScheduled(context);
  maxRecordsSize = context.getProperty(MAX_RECORDS_SIZE).evaluateAttributeExpressions().asDataSize(DataUnit.B).longValue();
}

代码示例来源:origin: apache/nifi

protected PublisherPool createPublisherPool(final ProcessContext context) {
  final int maxMessageSize = context.getProperty(MAX_REQUEST_SIZE).asDataSize(DataUnit.B).intValue();
  final long maxAckWaitMillis = context.getProperty(ACK_WAIT_TIME).asTimePeriod(TimeUnit.MILLISECONDS).longValue();
  final String attributeNameRegex = context.getProperty(ATTRIBUTE_NAME_REGEX).getValue();
  final Pattern attributeNamePattern = attributeNameRegex == null ? null : Pattern.compile(attributeNameRegex);
  final boolean useTransactions = context.getProperty(USE_TRANSACTIONS).asBoolean();
  final String charsetName = context.getProperty(MESSAGE_HEADER_ENCODING).evaluateAttributeExpressions().getValue();
  final Charset charset = Charset.forName(charsetName);
  final Map<String, Object> kafkaProperties = new HashMap<>();
  KafkaProcessorUtils.buildCommonKafkaProperties(context, ProducerConfig.class, kafkaProperties);
  kafkaProperties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getName());
  kafkaProperties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getName());
  kafkaProperties.put("max.request.size", String.valueOf(maxMessageSize));
  return new PublisherPool(kafkaProperties, getLogger(), maxMessageSize, maxAckWaitMillis, useTransactions, attributeNamePattern, charset);
}

代码示例来源:origin: apache/nifi

protected PublisherPool createPublisherPool(final ProcessContext context) {
  final int maxMessageSize = context.getProperty(MAX_REQUEST_SIZE).asDataSize(DataUnit.B).intValue();
  final long maxAckWaitMillis = context.getProperty(ACK_WAIT_TIME).asTimePeriod(TimeUnit.MILLISECONDS).longValue();
  final String attributeNameRegex = context.getProperty(ATTRIBUTE_NAME_REGEX).getValue();
  final Pattern attributeNamePattern = attributeNameRegex == null ? null : Pattern.compile(attributeNameRegex);
  final boolean useTransactions = context.getProperty(USE_TRANSACTIONS).asBoolean();
  final String charsetName = context.getProperty(MESSAGE_HEADER_ENCODING).evaluateAttributeExpressions().getValue();
  final Charset charset = Charset.forName(charsetName);
  final Map<String, Object> kafkaProperties = new HashMap<>();
  KafkaProcessorUtils.buildCommonKafkaProperties(context, ProducerConfig.class, kafkaProperties);
  kafkaProperties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getName());
  kafkaProperties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getName());
  kafkaProperties.put("max.request.size", String.valueOf(maxMessageSize));
  return new PublisherPool(kafkaProperties, getLogger(), maxMessageSize, maxAckWaitMillis, useTransactions, attributeNamePattern, charset);
}

代码示例来源:origin: apache/nifi

protected PublisherPool createPublisherPool(final ProcessContext context) {
  final int maxMessageSize = context.getProperty(MAX_REQUEST_SIZE).asDataSize(DataUnit.B).intValue();
  final long maxAckWaitMillis = context.getProperty(ACK_WAIT_TIME).asTimePeriod(TimeUnit.MILLISECONDS).longValue();
  final String attributeNameRegex = context.getProperty(ATTRIBUTE_NAME_REGEX).getValue();
  final Pattern attributeNamePattern = attributeNameRegex == null ? null : Pattern.compile(attributeNameRegex);
  final boolean useTransactions = context.getProperty(USE_TRANSACTIONS).asBoolean();
  final String charsetName = context.getProperty(MESSAGE_HEADER_ENCODING).evaluateAttributeExpressions().getValue();
  final Charset charset = Charset.forName(charsetName);
  final Map<String, Object> kafkaProperties = new HashMap<>();
  KafkaProcessorUtils.buildCommonKafkaProperties(context, ProducerConfig.class, kafkaProperties);
  kafkaProperties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getName());
  kafkaProperties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getName());
  kafkaProperties.put("max.request.size", String.valueOf(maxMessageSize));
  return new PublisherPool(kafkaProperties, getLogger(), maxMessageSize, maxAckWaitMillis, useTransactions, attributeNamePattern, charset);
}

代码示例来源:origin: apache/nifi

protected PublisherPool createPublisherPool(final ProcessContext context) {
  final int maxMessageSize = context.getProperty(MAX_REQUEST_SIZE).asDataSize(DataUnit.B).intValue();
  final long maxAckWaitMillis = context.getProperty(ACK_WAIT_TIME).asTimePeriod(TimeUnit.MILLISECONDS).longValue();
  final String attributeNameRegex = context.getProperty(ATTRIBUTE_NAME_REGEX).getValue();
  final Pattern attributeNamePattern = attributeNameRegex == null ? null : Pattern.compile(attributeNameRegex);
  final boolean useTransactions = context.getProperty(USE_TRANSACTIONS).asBoolean();
  final String charsetName = context.getProperty(MESSAGE_HEADER_ENCODING).evaluateAttributeExpressions().getValue();
  final Charset charset = Charset.forName(charsetName);
  final Map<String, Object> kafkaProperties = new HashMap<>();
  KafkaProcessorUtils.buildCommonKafkaProperties(context, ProducerConfig.class, kafkaProperties);
  kafkaProperties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getName());
  kafkaProperties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getName());
  kafkaProperties.put("max.request.size", String.valueOf(maxMessageSize));
  return new PublisherPool(kafkaProperties, getLogger(), maxMessageSize, maxAckWaitMillis, useTransactions, attributeNamePattern, charset);
}

代码示例来源:origin: apache/nifi

protected PublisherPool createPublisherPool(final ProcessContext context) {
  final int maxMessageSize = context.getProperty(MAX_REQUEST_SIZE).asDataSize(DataUnit.B).intValue();
  final long maxAckWaitMillis = context.getProperty(ACK_WAIT_TIME).asTimePeriod(TimeUnit.MILLISECONDS).longValue();
  final String attributeNameRegex = context.getProperty(ATTRIBUTE_NAME_REGEX).getValue();
  final Pattern attributeNamePattern = attributeNameRegex == null ? null : Pattern.compile(attributeNameRegex);
  final boolean useTransactions = context.getProperty(USE_TRANSACTIONS).asBoolean();
  final String charsetName = context.getProperty(MESSAGE_HEADER_ENCODING).evaluateAttributeExpressions().getValue();
  final Charset charset = Charset.forName(charsetName);
  final Map<String, Object> kafkaProperties = new HashMap<>();
  KafkaProcessorUtils.buildCommonKafkaProperties(context, ProducerConfig.class, kafkaProperties);
  kafkaProperties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getName());
  kafkaProperties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getName());
  kafkaProperties.put("max.request.size", String.valueOf(maxMessageSize));
  return new PublisherPool(kafkaProperties, getLogger(), maxMessageSize, maxAckWaitMillis, useTransactions, attributeNamePattern, charset);
}

代码示例来源:origin: apache/nifi

protected PublisherPool createPublisherPool(final ProcessContext context) {
  final int maxMessageSize = context.getProperty(MAX_REQUEST_SIZE).asDataSize(DataUnit.B).intValue();
  final long maxAckWaitMillis = context.getProperty(ACK_WAIT_TIME).asTimePeriod(TimeUnit.MILLISECONDS).longValue();
  final String attributeNameRegex = context.getProperty(ATTRIBUTE_NAME_REGEX).getValue();
  final Pattern attributeNamePattern = attributeNameRegex == null ? null : Pattern.compile(attributeNameRegex);
  final boolean useTransactions = context.getProperty(USE_TRANSACTIONS).asBoolean();
  final String charsetName = context.getProperty(MESSAGE_HEADER_ENCODING).evaluateAttributeExpressions().getValue();
  final Charset charset = Charset.forName(charsetName);
  final Map<String, Object> kafkaProperties = new HashMap<>();
  KafkaProcessorUtils.buildCommonKafkaProperties(context, ProducerConfig.class, kafkaProperties);
  kafkaProperties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getName());
  kafkaProperties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getName());
  kafkaProperties.put("max.request.size", String.valueOf(maxMessageSize));
  return new PublisherPool(kafkaProperties, getLogger(), maxMessageSize, maxAckWaitMillis, useTransactions, attributeNamePattern, charset);
}

代码示例来源:origin: apache/nifi

@Override
protected ChannelDispatcher createDispatcher(final ProcessContext context, final BlockingQueue<StandardEvent> events)
    throws IOException {
  final String sendingHost = context.getProperty(SENDING_HOST).evaluateAttributeExpressions().getValue();
  final Integer sendingHostPort = context.getProperty(SENDING_HOST_PORT).evaluateAttributeExpressions().asInteger();
  final Integer bufferSize = context.getProperty(RECV_BUFFER_SIZE).asDataSize(DataUnit.B).intValue();
  final BlockingQueue<ByteBuffer> bufferPool = createBufferPool(context.getMaxConcurrentTasks(), bufferSize);
  final EventFactory<StandardEvent> eventFactory = new StandardEventFactory();
  return new DatagramChannelDispatcher<>(eventFactory, bufferPool, events, getLogger(), sendingHost, sendingHostPort);
}

代码示例来源:origin: apache/nifi

@Override
protected ChannelDispatcher createDispatcher(final ProcessContext context, final BlockingQueue<StandardEvent> events)
    throws IOException {
  final String sendingHost = context.getProperty(SENDING_HOST).evaluateAttributeExpressions().getValue();
  final Integer sendingHostPort = context.getProperty(SENDING_HOST_PORT).evaluateAttributeExpressions().asInteger();
  final Integer bufferSize = context.getProperty(RECV_BUFFER_SIZE).asDataSize(DataUnit.B).intValue();
  final BlockingQueue<ByteBuffer> bufferPool = createBufferPool(context.getMaxConcurrentTasks(), bufferSize);
  final EventFactory<StandardEvent> eventFactory = new StandardEventFactory();
  return new DatagramChannelDispatcher<>(eventFactory, bufferPool, events, getLogger(), sendingHost, sendingHostPort);
}

代码示例来源:origin: apache/nifi

protected ChannelSender createSender(final ProcessContext context) throws IOException {
  final int port = context.getProperty(PORT).evaluateAttributeExpressions().asInteger();
  final String host = context.getProperty(HOSTNAME).evaluateAttributeExpressions().getValue();
  final String protocol = context.getProperty(PROTOCOL).getValue();
  final int maxSendBuffer = context.getProperty(MAX_SOCKET_SEND_BUFFER_SIZE).evaluateAttributeExpressions().asDataSize(DataUnit.B).intValue();
  final int timeout = context.getProperty(TIMEOUT).evaluateAttributeExpressions().asTimePeriod(TimeUnit.MILLISECONDS).intValue();
  final SSLContextService sslContextService = context.getProperty(SSL_CONTEXT_SERVICE).asControllerService(SSLContextService.class);
  return createSender(sslContextService, protocol, host, port, maxSendBuffer, timeout);
}

代码示例来源:origin: apache/nifi

@Override
protected ChannelSender createSender(ProcessContext context) throws IOException {
  final int port = context.getProperty(PORT).evaluateAttributeExpressions().asInteger();
  final String host = context.getProperty(HOSTNAME).evaluateAttributeExpressions().getValue();
  final String protocol = context.getProperty(PROTOCOL).getValue();
  final int timeout = context.getProperty(TIMEOUT).asTimePeriod(TimeUnit.MILLISECONDS).intValue();
  final int maxSendBuffer = context.getProperty(MAX_SOCKET_SEND_BUFFER_SIZE).asDataSize(DataUnit.B).intValue();
  final SSLContextService sslContextService = context.getProperty(SSL_CONTEXT_SERVICE).asControllerService(SSLContextService.class);
  SSLContext sslContext = null;
  if (sslContextService != null) {
    sslContext = sslContextService.createSSLContext(SSLContextService.ClientAuth.REQUIRED);
  }
  return createSender(protocol, host, port, timeout, maxSendBuffer, sslContext);
}

代码示例来源:origin: apache/nifi

@OnScheduled
public void onSchedule(ProcessContext context) {
  this.removeTrailingNewLines = context.getProperty(REMOVE_TRAILING_NEWLINES).isSet()
      ? context.getProperty(REMOVE_TRAILING_NEWLINES).asBoolean() : false;
  this.maxSplitSize = context.getProperty(FRAGMENT_MAX_SIZE).isSet()
      ? context.getProperty(FRAGMENT_MAX_SIZE).asDataSize(DataUnit.B).longValue() : Long.MAX_VALUE;
  this.lineCount = context.getProperty(LINE_SPLIT_COUNT).asInteger();
  this.headerLineCount = context.getProperty(HEADER_LINE_COUNT).asInteger();
  this.headerMarker = context.getProperty(HEADER_MARKER).getValue();
}

相关文章