org.apache.kafka.streams.kstream.Window.end()方法的使用及代码示例

x33g5p2x  于2022-02-03 转载在 其他  
字(6.6k)|赞(0)|评价(0)|浏览(78)

本文整理了Java中org.apache.kafka.streams.kstream.Window.end()方法的一些代码示例,展示了Window.end()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Window.end()方法的具体详情如下:
包路径:org.apache.kafka.streams.kstream.Window
类名称:Window
方法名:end

Window.end介绍

[英]Return the end timestamp of this window.
[中]返回此窗口的结束时间戳。

代码示例

代码示例来源:origin: confluentinc/kafka-streams-examples

.map((key, value) -> new KeyValue<>(key.key() + "@" + key.window().start() + "->" + key.window().end(), value))

代码示例来源:origin: simplesteph/medium-blog-kafka-udemy

private boolean keepCurrentWindow(Windowed<String> window, long advanceMs) {
  long now = System.currentTimeMillis();
  return window.window().end() > now &&
      window.window().end() < now + advanceMs;
}

代码示例来源:origin: org.apache.kafka/kafka-streams

@Override
public long time(final ProcessorContext context, final K key) {
  return key.window().end();
}

代码示例来源:origin: org.apache.kafka/kafka-streams

@Override
public String toString() {
  return "[" + key + "@" + window.start() + "/" + window.end() + "]";
}

代码示例来源:origin: org.apache.kafka/kafka-streams

public static byte[] toBinary(final Windowed<Bytes> sessionKey) {
    final byte[] bytes = sessionKey.key().get();
    final ByteBuffer buf = ByteBuffer.allocate(bytes.length + 2 * TIMESTAMP_SIZE);
    buf.put(bytes);
    buf.putLong(sessionKey.window().end());
    buf.putLong(sessionKey.window().start());
    return buf.array();
  }
}

代码示例来源:origin: org.apache.kafka/kafka-streams

public static <K> byte[] toBinary(final Windowed<K> sessionKey,
                 final Serializer<K> serializer,
                 final String topic) {
  final byte[] bytes = serializer.serialize(topic, sessionKey.key());
  final ByteBuffer buf = ByteBuffer.allocate(bytes.length + 2 * TIMESTAMP_SIZE);
  buf.put(bytes);
  buf.putLong(sessionKey.window().end());
  buf.putLong(sessionKey.window().start());
  return buf.array();
}

代码示例来源:origin: spring-cloud/spring-cloud-stream-samples

@StreamListener("input")
  @SendTo({"output1","output2","output3"})
  @SuppressWarnings("unchecked")
  public KStream<?, WordCount>[] process(KStream<Object, String> input) {
    Predicate<Object, WordCount> isEnglish = (k, v) -> v.word.equals("english");
    Predicate<Object, WordCount> isFrench =  (k, v) -> v.word.equals("french");
    Predicate<Object, WordCount> isSpanish = (k, v) -> v.word.equals("spanish");
    return input
        .flatMapValues(value -> Arrays.asList(value.toLowerCase().split("\\W+")))
        .groupBy((key, value) -> value)
        .windowedBy(timeWindows)
        .count(Materialized.as("WordCounts-1"))
        .toStream()
        .map((key, value) -> new KeyValue<>(null, new WordCount(key.key(), value, new Date(key.window().start()), new Date(key.window().end()))))
        .branch(isEnglish, isFrench, isSpanish);
  }
}

代码示例来源:origin: spring-cloud/spring-cloud-stream-samples

@StreamListener("binding2")
@SendTo("singleOutput")
public KStream<?, WordCount> process(KStream<Object, String> input) {
  return input
      .flatMapValues(value -> Arrays.asList(value.toLowerCase().split("\\W+")))
      .map((key, value) -> new KeyValue<>(value, value))
      .groupByKey(Serialized.with(Serdes.String(), Serdes.String()))
      .windowedBy(timeWindows)
      .count(Materialized.as("WordCounts-1"))
      .toStream()
      .map((key, value) -> new KeyValue<>(null, new WordCount(key.key(), value, new Date(key.window().start()), new Date(key.window().end()))));
}

代码示例来源:origin: spring-cloud/spring-cloud-stream-samples

@StreamListener("input")
  @SendTo("output")
  public KStream<?, WordCount> process(KStream<Object, String> input) {
    return input
        .flatMapValues(value -> Arrays.asList(value.toLowerCase().split("\\W+")))
        .map((key, value) -> new KeyValue<>(value, value))
        .groupByKey(Serialized.with(Serdes.String(), Serdes.String()))
        .windowedBy(TimeWindows.of(5000))
        .count(Materialized.as("WordCounts-1"))
        .toStream()
        .map((key, value) -> new KeyValue<>(null, new WordCount(key.key(), value, new Date(key.window().start()), new Date(key.window().end()))));
  }
}

代码示例来源:origin: spring-cloud/spring-cloud-stream-samples

@StreamListener("input")
  @SendTo("output")
  public KStream<?, WordCount> process(KStream<Object, String> input) {
    return input
        .flatMapValues(value -> Arrays.asList(value.toLowerCase().split("\\W+")))
        .map((key, value) -> new KeyValue<>(value, value))
        .groupByKey(Serialized.with(Serdes.String(), Serdes.String()))
        .windowedBy(TimeWindows.of(30000))
        .count(Materialized.as("WordCounts-1"))
        .toStream()
        .map((key, value) -> new KeyValue<>(null, new WordCount(key.key(), value, new Date(key.window().start()), new Date(key.window().end()))));
  }
}

代码示例来源:origin: org.apache.kafka/kafka-streams

private AGG fetchPrevious(final Bytes rawKey, final Window window) {
  try (final KeyValueIterator<Windowed<Bytes>, byte[]> iterator = bytesStore.findSessions(rawKey, window.start(), window.end())) {
    if (!iterator.hasNext()) {
      return null;
    }
    return serdes.valueFrom(iterator.next().value);
  }
}

代码示例来源:origin: spring-cloud/spring-cloud-stream-samples

@StreamListener("input")
@SendTo("output")
public KStream<Integer, ProductStatus> process(KStream<Object, Product> input) {
  return input
      .filter((key, product) -> productIds().contains(product.getId()))
      .map((key, value) -> new KeyValue<>(value, value))
      .groupByKey(Serialized.with(new JsonSerde<>(Product.class), new JsonSerde<>(Product.class)))
      .windowedBy(timeWindows)
      .count(Materialized.as("product-counts"))
      .toStream()
      .map((key, value) -> new KeyValue<>(key.key().id, new ProductStatus(key.key().id,
          value, Instant.ofEpochMilli(key.window().start()).atZone(ZoneId.systemDefault()).toLocalTime(),
          Instant.ofEpochMilli(key.window().end()).atZone(ZoneId.systemDefault()).toLocalTime())));
}

代码示例来源:origin: habren/KafkaExample

.toStream()
.map((Windowed<String> window, Long value) -> {
  return new KeyValue<String, String>(window.key(), String.format("key=%s, value=%s, start=%d, end=%d\n",window.key(), value, window.window().start(), window.window().end()));
  });
kStream.to(Serdes.String(), Serdes.String(), "count");

代码示例来源:origin: org.apache.kafka/kafka-streams

@Override
  public boolean hasNext(final KeyValueIterator<Bytes, ?> iterator) {
    while (iterator.hasNext()) {
      final Bytes bytes = iterator.peekNextKey();
      final Windowed<Bytes> windowedKey = SessionKeySchema.from(bytes);
      if ((binaryKeyFrom == null || windowedKey.key().compareTo(binaryKeyFrom) >= 0)
        && (binaryKeyTo == null || windowedKey.key().compareTo(binaryKeyTo) <= 0)
        && windowedKey.window().end() >= from
        && windowedKey.window().start() <= to) {
        return true;
      }
      iterator.next();
    }
    return false;
  }
};

相关文章

微信公众号

最新文章

更多