org.apache.nifi.components.PropertyValue.asTimePeriod()方法的使用及代码示例

x33g5p2x  于2022-01-26 转载在 其他  
字(11.2k)|赞(0)|评价(0)|浏览(75)

本文整理了Java中org.apache.nifi.components.PropertyValue.asTimePeriod()方法的一些代码示例,展示了PropertyValue.asTimePeriod()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。PropertyValue.asTimePeriod()方法的具体详情如下:
包路径:org.apache.nifi.components.PropertyValue
类名称:PropertyValue
方法名:asTimePeriod

PropertyValue.asTimePeriod介绍

暂无

代码示例

代码示例来源:origin: apache/nifi

private void cleanupInactiveStates() {
  final Long inactiveTimeout = processContext.getProperty(INACTIVE_TIMEOUT).asTimePeriod(TimeUnit.MILLISECONDS);
  final List<String> inactiveGroups = groupStates.keySet().stream()
      .filter(k -> k.endsWith(STATE_SUFFIX_UPDATED_AT) && (now - Long.parseLong(groupStates.get(k)) > inactiveTimeout))
      .map(k -> k.substring(0, k.length() - STATE_SUFFIX_UPDATED_AT.length()))
      .collect(Collectors.toList());
  inactiveGroups.forEach(groupId -> {
    groupStates.remove(STATE_TARGET_ORDER.apply(groupId));
    groupStates.remove(STATE_UPDATED_AT.apply(groupId));
    groupStates.remove(STATE_MAX_ORDER.apply(groupId));
  });
}

代码示例来源:origin: apache/nifi

@Override
public Long asTimePeriod(final TimeUnit timeUnit) {
  ensureExpressionsEvaluated();
  return stdPropValue.asTimePeriod(timeUnit);
}

代码示例来源:origin: apache/nifi

private Set<String> findOldFlowFileIds(final ProcessContext ctx) {
  final Set<String> old = new HashSet<>();
  final long expiryMillis = ctx.getProperty(MAX_UNCONFIRMED_TIME).asTimePeriod(TimeUnit.MILLISECONDS);
  final long cutoffTime = System.currentTimeMillis() - expiryMillis;
  for (final Map.Entry<String, FlowFileEntryTimeWrapper> entry : flowFileMap.entrySet()) {
    final FlowFileEntryTimeWrapper wrapper = entry.getValue();
    if (wrapper != null && wrapper.getEntryTime() < cutoffTime) {
      old.add(entry.getKey());
    }
  }
  return old;
}

代码示例来源:origin: apache/nifi

protected PublisherPool createPublisherPool(final ProcessContext context) {
  final int maxMessageSize = context.getProperty(MAX_REQUEST_SIZE).asDataSize(DataUnit.B).intValue();
  final long maxAckWaitMillis = context.getProperty(ACK_WAIT_TIME).asTimePeriod(TimeUnit.MILLISECONDS).longValue();
  final Map<String, Object> kafkaProperties = new HashMap<>();
  KafkaProcessorUtils.buildCommonKafkaProperties(context, ProducerConfig.class, kafkaProperties);
  kafkaProperties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getName());
  kafkaProperties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getName());
  kafkaProperties.put("max.request.size", String.valueOf(maxMessageSize));
  return new PublisherPool(kafkaProperties, getLogger(), maxMessageSize, maxAckWaitMillis);
}

代码示例来源:origin: apache/nifi

protected PublisherPool createPublisherPool(final ProcessContext context) {
  final int maxMessageSize = context.getProperty(MAX_REQUEST_SIZE).asDataSize(DataUnit.B).intValue();
  final long maxAckWaitMillis = context.getProperty(ACK_WAIT_TIME).asTimePeriod(TimeUnit.MILLISECONDS).longValue();
  final Map<String, Object> kafkaProperties = new HashMap<>();
  KafkaProcessorUtils.buildCommonKafkaProperties(context, ProducerConfig.class, kafkaProperties);
  kafkaProperties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getName());
  kafkaProperties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getName());
  kafkaProperties.put("max.request.size", String.valueOf(maxMessageSize));
  return new PublisherPool(kafkaProperties, getLogger(), maxMessageSize, maxAckWaitMillis);
}

代码示例来源:origin: apache/nifi

protected PublisherPool createPublisherPool(final ProcessContext context) {
  final int maxMessageSize = context.getProperty(MAX_REQUEST_SIZE).asDataSize(DataUnit.B).intValue();
  final long maxAckWaitMillis = context.getProperty(ACK_WAIT_TIME).asTimePeriod(TimeUnit.MILLISECONDS).longValue();
  final Map<String, Object> kafkaProperties = new HashMap<>();
  KafkaProcessorUtils.buildCommonKafkaProperties(context, ProducerConfig.class, kafkaProperties);
  kafkaProperties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getName());
  kafkaProperties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getName());
  kafkaProperties.put("max.request.size", String.valueOf(maxMessageSize));
  return new PublisherPool(kafkaProperties, getLogger(), maxMessageSize, maxAckWaitMillis);
}

代码示例来源:origin: apache/nifi

@OnScheduled
public void schedule(ProcessContext context) {
  this.deadlockTimeout = context.getProperty(KAFKA_TIMEOUT).asTimePeriod(TimeUnit.MILLISECONDS) * 2;
}

代码示例来源:origin: apache/nifi

@Override
@OnScheduled
public void onScheduled(ProcessContext context) throws IOException {
  super.onScheduled(context);
  this.pollTimeout = context.getProperty(POLL_TIMEOUT).asTimePeriod(TimeUnit.MILLISECONDS);
}

代码示例来源:origin: apache/nifi

@Override
protected Collection<ValidationResult> customValidate(ValidationContext context) {
  final List<ValidationResult> problems = new ArrayList<>(super.customValidate(context));
  final Long minAgeProp = context.getProperty(MIN_AGE).asTimePeriod(TimeUnit.MILLISECONDS);
  final Long maxAgeProp = context.getProperty(MAX_AGE).asTimePeriod(TimeUnit.MILLISECONDS);
  final long minimumAge = (minAgeProp == null) ? 0L : minAgeProp;
  final long maximumAge = (maxAgeProp == null) ? Long.MAX_VALUE : maxAgeProp;
  if (minimumAge > maximumAge) {
    problems.add(new ValidationResult.Builder().valid(false).subject("GetHDFS Configuration")
        .explanation(MIN_AGE.getName() + " cannot be greater than " + MAX_AGE.getName()).build());
  }
  return problems;
}

代码示例来源:origin: apache/nifi

@OnEnabled
public void onEnabled(final ConfigurationContext context) {
  this.redisConnectionPool = context.getProperty(REDIS_CONNECTION_POOL).asControllerService(RedisConnectionPool.class);
  this.ttl = context.getProperty(TTL).asTimePeriod(TimeUnit.SECONDS);
  if (ttl == 0) {
    this.ttl = -1L;
  }
}

代码示例来源:origin: apache/nifi

ProcessorConfiguration(final ProcessContext context) {
  ignoreDottedFiles = context.getProperty(IGNORE_DOTTED_FILES).asBoolean();
  final String fileFilterRegex = context.getProperty(FILE_FILTER_REGEX).getValue();
  fileFilterPattern = (fileFilterRegex == null) ? null : Pattern.compile(fileFilterRegex);
  filterMatchBasenameOnly = context.getProperty(FILTER_MATCH_NAME_ONLY).asBoolean();
  final Long minAgeProp = context.getProperty(MIN_AGE).asTimePeriod(TimeUnit.MILLISECONDS);
  minimumAge = (minAgeProp == null) ? 0L : minAgeProp;
  final Long maxAgeProp = context.getProperty(MAX_AGE).asTimePeriod(TimeUnit.MILLISECONDS);
  maximumAge = (maxAgeProp == null) ? Long.MAX_VALUE : maxAgeProp;
  recurseSubdirs = context.getProperty(RECURSE_SUBDIRS).asBoolean();
}

代码示例来源:origin: apache/nifi

@Override
public synchronized void init(final StateProviderInitializationContext context) {
  connectionString = context.getProperty(CONNECTION_STRING).getValue();
  rootNode = context.getProperty(ROOT_NODE).getValue();
  timeoutMillis = context.getProperty(SESSION_TIMEOUT).asTimePeriod(TimeUnit.MILLISECONDS).intValue();
  if (context.getProperty(ACCESS_CONTROL).getValue().equalsIgnoreCase(CREATOR_ONLY.getValue())) {
    acl = Ids.CREATOR_ALL_ACL;
  } else {
    acl = Ids.OPEN_ACL_UNSAFE;
  }
}

代码示例来源:origin: apache/nifi

@OnScheduled
public void onScheduled(final ProcessContext context) {
  rateControlCriteria = context.getProperty(RATE_CONTROL_CRITERIA).getValue().toLowerCase();
  rateControlAttribute = context.getProperty(RATE_CONTROL_ATTRIBUTE_NAME).getValue();
  maximumRateStr = context.getProperty(MAX_RATE).getValue().toUpperCase();
  groupingAttributeName = context.getProperty(GROUPING_ATTRIBUTE_NAME).getValue();
  timePeriodSeconds = context.getProperty(TIME_PERIOD).asTimePeriod(TimeUnit.SECONDS).intValue();
}

代码示例来源:origin: apache/nifi

private OrderingContext(final ProcessContext processContext, final ProcessSession processSession) {
  this.processContext = processContext;
  this.processSession = processSession;
  orderAttribute = processContext.getProperty(ORDER_ATTRIBUTE).getValue();
  waitTimeoutMillis = processContext.getProperty(WAIT_TIMEOUT).asTimePeriod(TimeUnit.MILLISECONDS);
  getOrder = flowFile -> Integer.parseInt(flowFile.getAttribute(orderAttribute));
  groupIdentifierProperty = processContext.getProperty(GROUP_IDENTIFIER);
  initOrderProperty = processContext.getProperty(INITIAL_ORDER);
  maxOrderProperty = processContext.getProperty(MAX_ORDER);
}

代码示例来源:origin: apache/nifi

@Override
public Collection<ValidationResult> customValidate(final ValidationContext validationContext) {
  final List<ValidationResult> problems = new ArrayList<>(super.customValidate(validationContext));
  final int visibilityTimeout = validationContext.getProperty(VISIBILITY_TIMEOUT).asTimePeriod(TimeUnit.SECONDS).intValue();
  if (visibilityTimeout <= 0) {
    problems.add(new ValidationResult.Builder()
                     .valid(false)
                     .subject(VISIBILITY_TIMEOUT.getDisplayName())
                     .explanation(VISIBILITY_TIMEOUT.getDisplayName() + " should be greater than 0 secs")
                     .build());
  }
  AzureStorageUtils.validateProxySpec(validationContext, problems);
  return problems;
}

代码示例来源:origin: apache/nifi

@Override
public void onTrigger(ProcessContext context, ProcessSessionFactory sessionFactory) throws ProcessException {
  final Boolean rollbackOnFailure = context.getProperty(RollbackOnFailure.ROLLBACK_ON_FAILURE).asBoolean();
  final Integer queryTimeout = context.getProperty(QUERY_TIMEOUT).evaluateAttributeExpressions().asTimePeriod(TimeUnit.SECONDS).intValue();
  final FunctionContext functionContext = new FunctionContext(rollbackOnFailure, queryTimeout);
  RollbackOnFailure.onTrigger(context, sessionFactory, functionContext, getLogger(), session -> process.onTrigger(context, session, functionContext));
}

代码示例来源:origin: apache/nifi

@OnUnscheduled
public void onUnscheduled(final ProcessContext context) throws InterruptedException {
  sleep(context.getProperty(ON_UNSCHEDULED_SLEEP_TIME).asTimePeriod(TimeUnit.MILLISECONDS),
    context.getProperty(IGNORE_INTERRUPTS).asBoolean());
  fail(context.getProperty(ON_UNSCHEDULED_FAIL).asBoolean(), OnUnscheduled.class);
}

代码示例来源:origin: apache/nifi

/**
 * Will create an instance of {@link JMSConsumer}
 */
@Override
protected JMSConsumer finishBuildingJmsWorker(CachingConnectionFactory connectionFactory, JmsTemplate jmsTemplate, ProcessContext processContext) {
  int ackMode = processContext.getProperty(ACKNOWLEDGEMENT_MODE).asInteger();
  jmsTemplate.setSessionAcknowledgeMode(ackMode);
  long timeout = processContext.getProperty(TIMEOUT).evaluateAttributeExpressions().asTimePeriod(TimeUnit.MILLISECONDS);
  jmsTemplate.setReceiveTimeout(timeout);
  return new JMSConsumer(connectionFactory, jmsTemplate, this.getLogger());
}

代码示例来源:origin: apache/nifi

protected ChannelSender createSender(final ProcessContext context) throws IOException {
  final int port = context.getProperty(PORT).evaluateAttributeExpressions().asInteger();
  final String host = context.getProperty(HOSTNAME).evaluateAttributeExpressions().getValue();
  final String protocol = context.getProperty(PROTOCOL).getValue();
  final int maxSendBuffer = context.getProperty(MAX_SOCKET_SEND_BUFFER_SIZE).evaluateAttributeExpressions().asDataSize(DataUnit.B).intValue();
  final int timeout = context.getProperty(TIMEOUT).evaluateAttributeExpressions().asTimePeriod(TimeUnit.MILLISECONDS).intValue();
  final SSLContextService sslContextService = context.getProperty(SSL_CONTEXT_SERVICE).asControllerService(SSLContextService.class);
  return createSender(sslContextService, protocol, host, port, maxSendBuffer, timeout);
}

代码示例来源:origin: apache/nifi

@OnStopped
public void onStopped(final ProcessContext context) throws InterruptedException {
  sleep(context.getProperty(ON_STOPPED_SLEEP_TIME).evaluateAttributeExpressions().asTimePeriod(TimeUnit.MILLISECONDS),
    context.getProperty(IGNORE_INTERRUPTS).asBoolean());
  fail(context.getProperty(ON_STOPPED_FAIL).asBoolean(), OnStopped.class);
}

相关文章