scala.collection.JavaConverters类的使用及代码示例

x33g5p2x  于2022-01-21 转载在 其他  
字(12.3k)|赞(0)|评价(0)|浏览(186)

本文整理了Java中scala.collection.JavaConverters类的一些代码示例,展示了JavaConverters类的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。JavaConverters类的具体详情如下:
包路径:scala.collection.JavaConverters
类名称:JavaConverters

JavaConverters介绍

暂无

代码示例

代码示例来源:origin: apache/ignite

/**
 * Util method that checks that input schema contains only one double type.
 *
 * @param mdl Pipeline model.
 * @return Name of output field.
 */
private String checkAndGetOutputSchema(PipelineModel mdl) {
  Transformer lastTransformer = mdl.transformers().last();
  StructType outputSchema = lastTransformer.outputSchema();
  List<StructField> output = new ArrayList<>(JavaConverters.seqAsJavaListConverter(outputSchema.fields()).asJava());
  if (output.size() != 1)
    throw new IllegalArgumentException("Parser supports only scalar outputs");
  return output.get(0).name();
}

代码示例来源:origin: twosigma/beakerx

@Override
public Object deserialize(JsonNode n, ObjectMapper mapper) {
 HashMap<String, Object> o = new HashMap<String, Object>();
 try {
  logger.debug("using custom map deserializer");
  Iterator<Map.Entry<String, JsonNode>> e = n.fields();
  while (e.hasNext()) {
   Map.Entry<String, JsonNode> ee = e.next();
   o.put(ee.getKey(), parent.deserialize(ee.getValue(), mapper));
  }
 } catch (Exception e) {
  logger.error("exception deserializing Map {}", e.getMessage());
  o = null;
 }
 if (o != null)
  return JavaConverters.mapAsScalaMapConverter(o).asScala().toMap(Predef.<Tuple2<String, Object>>conforms());
 return null;
}

代码示例来源:origin: com.typesafe.play/play

/**
 * Converts a Scala Map to Java.
 *
 * @param scalaMap the scala map.
 * @param <K>      key type
 * @param <V>      value type
 * @return the java map.
 */
public static <K, V> java.util.Map<K, V> asJava(scala.collection.Map<K, V> scalaMap) {
  return scala.collection.JavaConverters.mapAsJavaMapConverter(scalaMap).asJava();
}

代码示例来源:origin: twosigma/beakerx

private static Object asJava(Object scalaObject) {
 if (scalaObject instanceof scala.collection.Seq) {
  List objects = new ArrayList(Arrays.asList(
      JavaConverters.asJavaCollectionConverter((Seq<?>) scalaObject).asJavaCollection()));
  return objects.stream().map(Scala::asJava).collect(Collectors.toList());
 } else if (scalaObject instanceof scala.collection.immutable.Map) {
  @SuppressWarnings("unchecked")
  scala.collection.immutable.Map<Object, Object> map = (scala.collection.immutable.Map<Object, Object>) scalaObject;
  Map<Object, Object> objects = new HashMap<>(JavaConverters.mapAsJavaMapConverter(map).asJava());
  return objects.entrySet().stream()
      .collect(Collectors.toMap(incomingMap -> asJava(incomingMap.getKey()), incomingMap -> asJava(incomingMap.getValue())));
 }
 return scalaObject;
}

代码示例来源:origin: apache/samza

/**
 * Get the oldest offset for each changelog SSP based on the stream's metadata (obtained from streamMetadataCache).
 */
private void getOldestChangeLogOffsets() {
 Map<SystemStream, SystemStreamMetadata> changeLogMetadata = JavaConverters.mapAsJavaMapConverter(
   streamMetadataCache.getStreamMetadata(
     JavaConverters.asScalaSetConverter(new HashSet<>(changelogSystemStreams.values())).asScala().toSet(),
     false)).asJava();
 LOG.info("Got change log stream metadata: {}", changeLogMetadata);
 changeLogOldestOffsets =
   getChangeLogOldestOffsetsForPartition(taskModel.getChangelogPartition(), changeLogMetadata);
 LOG.info("Assigning oldest change log offsets for taskName {} : {}", taskModel.getTaskName(),
   changeLogOldestOffsets);
}

代码示例来源:origin: org.apache.spark/spark-core_2.11

@Test
public void combineByKey() {
 JavaRDD<Integer> originalRDD = sc.parallelize(Arrays.asList(1, 2, 3, 4, 5, 6));
 Function<Integer, Integer> keyFunction = v1 -> v1 % 3;
 Function<Integer, Integer> createCombinerFunction = v1 -> v1;
 Function2<Integer, Integer, Integer> mergeValueFunction = (v1, v2) -> v1 + v2;
 JavaPairRDD<Integer, Integer> combinedRDD = originalRDD.keyBy(keyFunction)
  .combineByKey(createCombinerFunction, mergeValueFunction, mergeValueFunction);
 Map<Integer, Integer> results = combinedRDD.collectAsMap();
 ImmutableMap<Integer, Integer> expected = ImmutableMap.of(0, 9, 1, 5, 2, 7);
 assertEquals(expected, results);
 Partitioner defaultPartitioner = Partitioner.defaultPartitioner(
  combinedRDD.rdd(),
  JavaConverters.collectionAsScalaIterableConverter(
   Collections.<RDD<?>>emptyList()).asScala().toSeq());
 combinedRDD = originalRDD.keyBy(keyFunction)
  .combineByKey(
   createCombinerFunction,
   mergeValueFunction,
   mergeValueFunction,
   defaultPartitioner,
   false,
   new KryoSerializer(new SparkConf()));
 results = combinedRDD.collectAsMap();
 assertEquals(expected, results);
}

代码示例来源:origin: gnuhpc/Kafka-zk-restapi

private List<TopicAndPartition> getTopicPartitions(String t) {
 List<TopicAndPartition> tpList = new ArrayList<>();
 List<String> l = Arrays.asList(t);
 java.util.Map<String, Seq<Object>> tpMap =
   JavaConverters.mapAsJavaMapConverter(
       zkUtils.getPartitionsForTopics(
         JavaConverters.asScalaIteratorConverter(l.iterator()).asScala().toSeq()))
     .asJava();
 if (tpMap != null) {
  ArrayList<Object> partitionLists =
    new ArrayList<>(JavaConverters.seqAsJavaListConverter(tpMap.get(t)).asJava());
  tpList =
    partitionLists.stream().map(p -> new TopicAndPartition(t, (Integer) p)).collect(toList());
 }
 return tpList;
}

代码示例来源:origin: com.typesafe.play/play_2.12

/**
 * Converts a Java List to Scala Seq.
 *
 * @param list    the java list.
 * @return the converted Seq.
 * @param <T> the element type.
 */
public static <T> scala.collection.Seq<T> toSeq(java.util.List<T> list) {
  return scala.collection.JavaConverters.asScalaBufferConverter(list).asScala();
}

代码示例来源:origin: com.typesafe.play/play_2.10

/**
 * Retrieves the set of keys available in this configuration.
 *
 * @return the set of keys available in this configuration
 */
public Set<String> keys() {
  return JavaConverters.setAsJavaSetConverter(conf.keys()).asJava();
}

代码示例来源:origin: uber/marmaray

public static <T> Set<T> toScalaSet(@NonNull final java.util.Set<T> javaSet) {
  return JavaConverters.asScalaSetConverter(javaSet).asScala().<T>toSet();
}

代码示例来源:origin: apache/samza

/**
 * Gets the metadata for all the specified system streams from the provided metadata cache.
 * Handles scala-java conversions.
 *
 * @param streamsToMonitor  the set of system streams for which the metadata is needed.
 * @param metadataCache     the metadata cache which will be used to fetch metadata.
 * @return                  a map from each system stream to its metadata.
 */
private static Map<SystemStream, SystemStreamMetadata> getMetadata(Set<SystemStream> streamsToMonitor,
  StreamMetadataCache metadataCache) {
 return JavaConverters
   .mapAsJavaMapConverter(
     metadataCache.getStreamMetadata(
       JavaConverters.asScalaSetConverter(streamsToMonitor).asScala().toSet(),
       true
     )
   ).asJava();
}

代码示例来源:origin: org.apache.spark/spark-core_2.10

@Test
public void combineByKey() {
 JavaRDD<Integer> originalRDD = sc.parallelize(Arrays.asList(1, 2, 3, 4, 5, 6));
 Function<Integer, Integer> keyFunction = v1 -> v1 % 3;
 Function<Integer, Integer> createCombinerFunction = v1 -> v1;
 Function2<Integer, Integer, Integer> mergeValueFunction = (v1, v2) -> v1 + v2;
 JavaPairRDD<Integer, Integer> combinedRDD = originalRDD.keyBy(keyFunction)
  .combineByKey(createCombinerFunction, mergeValueFunction, mergeValueFunction);
 Map<Integer, Integer> results = combinedRDD.collectAsMap();
 ImmutableMap<Integer, Integer> expected = ImmutableMap.of(0, 9, 1, 5, 2, 7);
 assertEquals(expected, results);
 Partitioner defaultPartitioner = Partitioner.defaultPartitioner(
  combinedRDD.rdd(),
  JavaConverters.collectionAsScalaIterableConverter(
   Collections.<RDD<?>>emptyList()).asScala().toSeq());
 combinedRDD = originalRDD.keyBy(keyFunction)
  .combineByKey(
   createCombinerFunction,
   mergeValueFunction,
   mergeValueFunction,
   defaultPartitioner,
   false,
   new KryoSerializer(new SparkConf()));
 results = combinedRDD.collectAsMap();
 assertEquals(expected, results);
}

代码示例来源:origin: com.typesafe.play/play_2.11

/**
 * Converts a Java List to Scala Seq.
 *
 * @param list    the java list.
 * @return the converted Seq.
 * @param <T> the element type.
 */
public static <T> scala.collection.Seq<T> toSeq(java.util.List<T> list) {
  return scala.collection.JavaConverters.asScalaBufferConverter(list).asScala();
}

代码示例来源:origin: com.typesafe.play/play_2.10

/**
 * Retrieves the set of direct sub-keys available in this configuration.
 *
 * @return the set of direct sub-keys available in this configuration
 */
public Set<String> subKeys() {
  return JavaConverters.setAsJavaSetConverter(conf.subKeys()).asJava();
}

代码示例来源:origin: uber/hudi

public static Set<String> toScalaSet(HashSet<String> s) {
 return JavaConverters.asScalaSetConverter(s).asScala().toSet();
}

代码示例来源:origin: apache/flume

/**
 * Generates the Kafka bootstrap connection string from the metadata stored in Zookeeper.
 * Allows for backwards compatibility of the zookeeperConnect configuration.
 */
private String lookupBootstrap(String zookeeperConnect, SecurityProtocol securityProtocol) {
 try (KafkaZkClient zkClient = KafkaZkClient.apply(zookeeperConnect,
     JaasUtils.isZkSecurityEnabled(), ZK_SESSION_TIMEOUT, ZK_CONNECTION_TIMEOUT, 10,
     Time.SYSTEM, "kafka.server", "SessionExpireListener")) {
  List<Broker> brokerList =
      JavaConverters.seqAsJavaListConverter(zkClient.getAllBrokersInCluster()).asJava();
  List<BrokerEndPoint> endPoints = brokerList.stream()
      .map(broker -> broker.brokerEndPoint(
        ListenerName.forSecurityProtocol(securityProtocol))
      )
      .collect(Collectors.toList());
  List<String> connections = new ArrayList<>();
  for (BrokerEndPoint endPoint : endPoints) {
   connections.add(endPoint.connectionString());
  }
  return StringUtils.join(connections, ',');
 }
}

代码示例来源:origin: twosigma/beakerx

@SuppressWarnings("unchecked")
@Override
public Object deserialize(JsonNode n, ObjectMapper mapper) {
 org.apache.commons.lang3.tuple.Pair<String, Object> deserializeObject = TableDisplayDeSerializer.getDeserializeObject(parent, n, mapper);
 String subtype = deserializeObject.getLeft();
 if (subtype != null && subtype.equals(TableDisplay.DICTIONARY_SUBTYPE)) {
  return JavaConverters.mapAsScalaMapConverter((Map<String, Object>) deserializeObject.getRight()).asScala().toMap(Predef.<Tuple2<String, Object>>conforms());
 } else if (subtype != null && subtype.equals(TableDisplay.LIST_OF_MAPS_SUBTYPE)) {
  List<Map<String, Object>> rows = (List<Map<String, Object>>) deserializeObject.getRight();
  List<Object> oo = new ArrayList<Object>();
  for (Map<String, Object> row : rows) {
   oo.add(JavaConverters.mapAsScalaMapConverter(row).asScala().toMap(Predef.<Tuple2<String, Object>>conforms()));
  }
  return scala.collection.JavaConversions.collectionAsScalaIterable(oo);
 } else if (subtype != null && subtype.equals(TableDisplay.MATRIX_SUBTYPE)) {
  List<List<?>> matrix = (List<List<?>>) deserializeObject.getRight();
  ArrayList<Object> ll = new ArrayList<Object>();
  for (List<?> ob : matrix) {
   ll.add(scala.collection.JavaConversions.asScalaBuffer(ob).toList());
  }
  return scala.collection.JavaConversions.asScalaBuffer(ll).toList();
 }
 return deserializeObject.getRight();
}

代码示例来源:origin: apache/samza

Map<SystemStream, SystemStreamMetadata> metadata = JavaConverters.mapAsJavaMapConverter(
  streamMetadataCache.getStreamMetadata(
    JavaConverters.asScalaSetConverter(systemStreamToSsp.keySet()).asScala().toSet(), false)).asJava();

代码示例来源:origin: com.typesafe.play/play_2.10

/**
 * Converts a Scala Map to Java.
 */
public static <K,V> java.util.Map<K,V> asJava(scala.collection.Map<K,V> scalaMap) {
  return scala.collection.JavaConverters.mapAsJavaMapConverter(scalaMap).asJava();
}

代码示例来源:origin: org.apache.spark/spark-core

@Test
public void combineByKey() {
 JavaRDD<Integer> originalRDD = sc.parallelize(Arrays.asList(1, 2, 3, 4, 5, 6));
 Function<Integer, Integer> keyFunction = v1 -> v1 % 3;
 Function<Integer, Integer> createCombinerFunction = v1 -> v1;
 Function2<Integer, Integer, Integer> mergeValueFunction = (v1, v2) -> v1 + v2;
 JavaPairRDD<Integer, Integer> combinedRDD = originalRDD.keyBy(keyFunction)
  .combineByKey(createCombinerFunction, mergeValueFunction, mergeValueFunction);
 Map<Integer, Integer> results = combinedRDD.collectAsMap();
 ImmutableMap<Integer, Integer> expected = ImmutableMap.of(0, 9, 1, 5, 2, 7);
 assertEquals(expected, results);
 Partitioner defaultPartitioner = Partitioner.defaultPartitioner(
  combinedRDD.rdd(),
  JavaConverters.collectionAsScalaIterableConverter(
   Collections.<RDD<?>>emptyList()).asScala().toSeq());
 combinedRDD = originalRDD.keyBy(keyFunction)
  .combineByKey(
   createCombinerFunction,
   mergeValueFunction,
   mergeValueFunction,
   defaultPartitioner,
   false,
   new KryoSerializer(new SparkConf()));
 results = combinedRDD.collectAsMap();
 assertEquals(expected, results);
}

相关文章