scala.collection.JavaConverters.seqAsJavaList()方法的使用及代码示例

x33g5p2x  于2022-01-21 转载在 其他  
字(6.7k)|赞(0)|评价(0)|浏览(93)

本文整理了Java中scala.collection.JavaConverters.seqAsJavaList()方法的一些代码示例,展示了JavaConverters.seqAsJavaList()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。JavaConverters.seqAsJavaList()方法的具体详情如下:
包路径:scala.collection.JavaConverters
类名称:JavaConverters
方法名:seqAsJavaList

JavaConverters.seqAsJavaList介绍

暂无

代码示例

代码示例来源:origin: dstl/baleen

/**
  * Attempt to create {@link Event}s from the given {@link Mention}s
  *
  * @param mentions to extract events from
  */
 public void create(Seq<Mention> mentions) {
  JavaConverters.seqAsJavaList(mentions).forEach(this::create);
 }
}

代码示例来源:origin: uk.gov.dstl.baleen/baleen-odin

/**
  * Attempt to create {@link Event}s from the given {@link Mention}s
  *
  * @param mentions to extract events from
  */
 public void create(Seq<Mention> mentions) {
  JavaConverters.seqAsJavaList(mentions).forEach(this::create);
 }
}

代码示例来源:origin: pinterest/doctorkafka

/**
 *   return the list of brokers that do not have stats
 */
public List<Broker> getNoStatsBrokers() {
 Seq<Broker> brokerSeq = zkUtils.getAllBrokersInCluster();
 List<Broker> brokers = scala.collection.JavaConverters.seqAsJavaList(brokerSeq);
 List<Broker> noStatsBrokers = new ArrayList<>();
 brokers.stream().forEach(broker -> {
  if (kafkaCluster.getBroker(broker.id()) == null) {
   noStatsBrokers.add(broker);
  }
 });
 return noStatsBrokers;
}

代码示例来源:origin: open-korean-text/elasticsearch-analysis-openkoreantext

@Override
public final boolean incrementToken() throws IOException {
  clearAttributes();
  if (this.preparedTokens == null) {
    this.preparedTokens = JavaConverters.seqAsJavaList(prepareKoreanTokens());
  }
  if (this.preparedTokens == null || this.preparedTokens.isEmpty() || tokenIndex >= this.preparedTokens.size()) {
    return false;
  }
  setAttributes(this.preparedTokens.get(tokenIndex++));
  return true;
}

代码示例来源:origin: open-korean-text/elasticsearch-analysis-openkoreantext

@Override
  protected Seq<KoreanToken> perform(Seq<KoreanToken> tokens) {
    List<KoreanToken> performed = new ArrayList<>();
    for(KoreanToken token : JavaConverters.seqAsJavaList(tokens)) {
      if(redundantTypes.contains(token.pos().toString())){
        continue;
      }
      if(redundantTerms.contains(token.text())){
        continue;
      }
      performed.add(token);
    }

    return JavaConverters.asScalaBuffer(performed).toSeq();
  }
}

代码示例来源:origin: open-korean-text/elasticsearch-analysis-openkoreantext

@Override
public final boolean incrementToken() throws IOException {
  clearAttributes();
  if(input instanceof KoreanTokenPrepareable) {
    if(preparedTokens == null) {
      this.preparedTokens = JavaConverters.seqAsJavaList(prepareKoreanTokens());
    }
    if (this.preparedTokens == null || this.preparedTokens.isEmpty() || tokenIndex >= this.preparedTokens.size()) {
      return false;
    }
    setAttributes(this.preparedTokens.get(tokenIndex++));
    return true;
  } else {
    return input.incrementToken();
  }
}

代码示例来源:origin: pinterest/doctorkafka

/**
 * Get the replica assignment for a given topic partition. This information should be retrieved
 * from zookeeper as topic metadata that we get from kafkaConsumer.listTopic() does not specify
 * the preferred leader for topic partitions.
 *
 * @param tp  topic partition
 * @return the list of brokers that host the replica
 */
private List<Integer> getReplicaAssignment(TopicPartition tp) {
 scala.collection.Map<Object, Seq<Object>> replicaAssignmentMap =
   getReplicaAssignmentForTopic(zkUtils, tp.topic());
 scala.Option<Seq<Object>> replicasOption = replicaAssignmentMap.get(tp.partition());
 Seq<Object> replicas = replicasOption.get();
 List<Object> replicasList = scala.collection.JavaConverters.seqAsJavaList(replicas);
 return replicasList.stream().map(obj -> (Integer) obj).collect(Collectors.toList());
}

代码示例来源:origin: pinterest/doctorkafka

public static void main(String[] args) throws Exception {
  CommandLine commandLine = parseCommandLine(args);
  String zookeeper = commandLine.getOptionValue(ZOOKEEPER);

  ZkUtils zkUtils = KafkaUtils.getZkUtils(zookeeper);
  Seq<String> topicsSeq = zkUtils.getAllTopics();
  List<String> topics = scala.collection.JavaConverters.seqAsJavaList(topicsSeq);

  scala.collection.mutable.Map<String, scala.collection.Map<Object, Seq<Object>>>
    partitionAssignments = zkUtils.getPartitionAssignmentForTopics(topicsSeq);

  Map<String, Integer> replicationFactors = new HashMap<>();
  Map<String, Integer> partitionCounts = new HashMap<>();

  topics.stream().forEach(topic -> {
   int partitionCount = partitionAssignments.get(topic).get().size();
   int factor = partitionAssignments.get(topic).get().head()._2().size();
   partitionCounts.put(topic, partitionCount);
   replicationFactors.put(topic, factor);
  });

  List<PartitionInfo> urps = KafkaClusterManager.getUnderReplicatedPartitions(
    zookeeper, SecurityProtocol.PLAINTEXT, null, topics, partitionAssignments, replicationFactors, partitionCounts);

  for (PartitionInfo partitionInfo : urps) {
   LOG.info("under-replicated : {}", partitionInfo);
  }
 }
}

代码示例来源:origin: kframework/k

private static Production resolve(Production prod) {
  if (prod.klabel().isDefined() && prod.klabel().get().equals(KLabels.GENERATED_TOP_CELL)) {
    List<Integer> cellPositions = new ArrayList<Integer>();
    int i = 1;
    for (ProductionItem p: JavaConverters.seqAsJavaList(prod.items())) {
      if (p instanceof NonTerminal) {
        NonTerminal nt = (NonTerminal) p;
        if (! nt.sort().equals(Sorts.GeneratedCounterCell())) {
          cellPositions.add(i);
        }
      }
      i++;
    }
    StringBuilder format = new StringBuilder();
    if (cellPositions.size() == 1) {
      format.append("%").append(cellPositions.get(0));
    } else {
      format.append("%1%i");
      int j;
      for (j = 0; j < cellPositions.size(); j++) {
        format.append("%n%").append(cellPositions.get(j));
      }
      format.append("%d%n%").append(cellPositions.get(j - 1) + 1);
    }
    return Production(prod.klabel(), prod.sort(), prod.items(), prod.att().add("format", format.toString()));
  }
  return prod;
}

代码示例来源:origin: pinterest/doctorkafka

if (noLeaderFlags[partitionId]) {
 Seq<Object> seq = partitionAssignments.get(topic).get().get(partitionId).get();
 Node[] nodes = JavaConverters.seqAsJavaList(seq).stream()
   .map(val -> new Node((Integer) val, "", -1)).toArray(Node[]::new);
 PartitionInfo partitionInfo =

代码示例来源:origin: dstl/baleen

e.setValue(eventMention.text());
List<String> labels = JavaConverters.seqAsJavaList(eventMention.labels());
e.setEventType(new StringArray(jCas, labels.size()));
for (int i = 0; i < labels.size(); i++) {
  .forEach(
    (k, v) ->
      JavaConverters.seqAsJavaList(v)
        .forEach(
          m ->

代码示例来源:origin: pinterest/doctorkafka

List<String> topics = scala.collection.JavaConverters.seqAsJavaList(topicsSeq);
scala.collection.mutable.Map<String, scala.collection.Map<Object, Seq<Object>>>
  partitionAssignments = zkUtils.getPartitionAssignmentForTopics(topicsSeq);

代码示例来源:origin: uk.gov.dstl.baleen/baleen-odin

e.setValue(eventMention.text());
List<String> labels = JavaConverters.seqAsJavaList(eventMention.labels());
e.setEventType(new StringArray(jCas, labels.size()));
for (int i = 0; i < labels.size(); i++) {
  .forEach(
    (k, v) ->
      JavaConverters.seqAsJavaList(v)
        .forEach(
          m ->

相关文章