scala.collection.mutable.Buffer.toList()方法的使用及代码示例

x33g5p2x  于2022-01-16 转载在 其他  
字(11.1k)|赞(0)|评价(0)|浏览(198)

本文整理了Java中scala.collection.mutable.Buffer.toList()方法的一些代码示例,展示了Buffer.toList()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Buffer.toList()方法的具体详情如下:
包路径:scala.collection.mutable.Buffer
类名称:Buffer
方法名:toList

Buffer.toList介绍

暂无

代码示例

代码示例来源:origin: twitter/distributedlog

private static Seq<String> gaugeName(String name) {
  return scala.collection.JavaConversions.asScalaBuffer(Arrays.asList(name)).toList();
}

代码示例来源:origin: twosigma/beakerx

@Override
 public Object deserialize(JsonNode n, ObjectMapper mapper) {
  List<Object> o = new ArrayList<Object>();
  try {
   logger.debug("using custom array deserializer");
   for (int i = 0; i < n.size(); i++) {
    o.add(parent.deserialize(n.get(i), mapper));
   }
  } catch (Exception e) {
   logger.error("exception deserializing Collection {}", e.getMessage());
   o = null;
  }
  if (o != null)
   return scala.collection.JavaConversions.asScalaBuffer(o).toList();
  return null;
 }
}

代码示例来源:origin: twosigma/beakerx

@SuppressWarnings("unchecked")
@Override
public Object deserialize(JsonNode n, ObjectMapper mapper) {
 org.apache.commons.lang3.tuple.Pair<String, Object> deserializeObject = TableDisplayDeSerializer.getDeserializeObject(parent, n, mapper);
 String subtype = deserializeObject.getLeft();
 if (subtype != null && subtype.equals(TableDisplay.DICTIONARY_SUBTYPE)) {
  return JavaConverters.mapAsScalaMapConverter((Map<String, Object>) deserializeObject.getRight()).asScala().toMap(Predef.<Tuple2<String, Object>>conforms());
 } else if (subtype != null && subtype.equals(TableDisplay.LIST_OF_MAPS_SUBTYPE)) {
  List<Map<String, Object>> rows = (List<Map<String, Object>>) deserializeObject.getRight();
  List<Object> oo = new ArrayList<Object>();
  for (Map<String, Object> row : rows) {
   oo.add(JavaConverters.mapAsScalaMapConverter(row).asScala().toMap(Predef.<Tuple2<String, Object>>conforms()));
  }
  return scala.collection.JavaConversions.collectionAsScalaIterable(oo);
 } else if (subtype != null && subtype.equals(TableDisplay.MATRIX_SUBTYPE)) {
  List<List<?>> matrix = (List<List<?>>) deserializeObject.getRight();
  ArrayList<Object> ll = new ArrayList<Object>();
  for (List<?> ob : matrix) {
   ll.add(scala.collection.JavaConversions.asScalaBuffer(ob).toList());
  }
  return scala.collection.JavaConversions.asScalaBuffer(ll).toList();
 }
 return deserializeObject.getRight();
}

代码示例来源:origin: pinterest/secor

public void start() {
    Duration[] defaultLatchIntervals = {Duration.apply(1, TimeUnit.MINUTES)};
    @SuppressWarnings("deprecation")
    AdminServiceFactory adminServiceFactory = new AdminServiceFactory(
      this.mPort,
      20,
      List$.MODULE$.<StatsFactory>empty(),
      Option.<String>empty(),
      List$.MODULE$.<Regex>empty(),
      Map$.MODULE$.<String, CustomHttpHandler>empty(),
      JavaConversions
        .asScalaBuffer(Arrays.asList(defaultLatchIntervals)).toList()
    );
    RuntimeEnvironment runtimeEnvironment = new RuntimeEnvironment(this);
    adminServiceFactory.apply(runtimeEnvironment);
    try {
      Properties properties = new Properties();
      properties.load(this.getClass().getResource("build.properties").openStream());
      String buildRevision = properties.getProperty("build_revision", "unknown");
      LOG.info("build.properties build_revision: {}",
           properties.getProperty("build_revision", "unknown"));
      StatsUtil.setLabel("secor.build_revision", buildRevision);
    } catch (Throwable t) {
      LOG.error("Failed to load properties from build.properties", t);
    }
  }
}

代码示例来源:origin: twitter/distributedlog

scala.collection.JavaConversions.asScalaBuffer(Arrays.asList("num_streams")).toList();
cacheStatReceiver.provideGauge(numCachedStreamsGaugeName, new Function0<Object>() {
  @Override
    scala.collection.JavaConversions.asScalaBuffer(Arrays.asList("num_hosts")).toList();
cacheStatReceiver.provideGauge(numCachedHostsGaugeName, new Function0<Object>() {
  @Override

代码示例来源:origin: apache/incubator-pinot

@Override
 public TopicMetadataResponse send(TopicMetadataRequest request) {
  java.util.List<String> topics = request.topics();
  TopicMetadata[] topicMetadataArray = new TopicMetadata[topics.size()];
  for (int i = 0; i < topicMetadataArray.length; i++) {
   String topic = topics.get(i);
   if (!topic.equals(topicName)) {
    topicMetadataArray[i] = new TopicMetadata(topic, null, Errors.UNKNOWN_TOPIC_OR_PARTITION.code());
   } else {
    PartitionMetadata[] partitionMetadataArray = new PartitionMetadata[partitionCount];
    for (int j = 0; j < partitionCount; j++) {
     java.util.List<BrokerEndPoint> emptyJavaList = Collections.emptyList();
     List<BrokerEndPoint> emptyScalaList = JavaConversions.asScalaBuffer(emptyJavaList).toList();
     partitionMetadataArray[j] =
       new PartitionMetadata(j, Some.apply(brokerArray[partitionLeaderIndices[j]]), emptyScalaList,
         emptyScalaList, Errors.NONE.code());
    }
    Seq<PartitionMetadata> partitionsMetadata = List.fromArray(partitionMetadataArray);
    topicMetadataArray[i] = new TopicMetadata(topic, partitionsMetadata, Errors.NONE.code());
   }
  }
  Seq<BrokerEndPoint> brokers = List.fromArray(brokerArray);
  Seq<TopicMetadata> topicsMetadata = List.fromArray(topicMetadataArray);
  return new TopicMetadataResponse(new kafka.api.TopicMetadataResponse(brokers, topicsMetadata, -1));
 }
}

代码示例来源:origin: com.typesafe.play/play_2.10

/**
 * Converts a Java List to Scala Seq.
 */
public static <T> scala.collection.Seq<T> toSeq(java.util.List<T> list) {
  return scala.collection.JavaConverters.asScalaBufferConverter(list).asScala().toList();
}

代码示例来源:origin: com.typesafe.play/play_2.12

/**
 * Converts the varargs to a scala buffer,
 * takes care of wrapping varargs into a intermediate list if necessary
 *
 * @param args the message arguments
 * @return scala type for message processing
 */
private static Seq<Object> convertArgsToScalaBuffer(final Object... args) {
  return scala.collection.JavaConverters.asScalaBufferConverter(wrapArgsToListIfNeeded(args)).asScala().toList();
}

代码示例来源:origin: com.typesafe.play/play

/**
 * Converts the varargs to a scala buffer,
 * takes care of wrapping varargs into a intermediate list if necessary
 *
 * @param args the message arguments
 * @return scala type for message processing
 */
private static Seq<Object> convertArgsToScalaBuffer(final Object... args) {
  return scala.collection.JavaConverters.asScalaBufferConverter(wrapArgsToListIfNeeded(args)).asScala().toList();
}

代码示例来源:origin: com.typesafe.play/play_2.11

/**
 * Converts the varargs to a scala buffer,
 * takes care of wrapping varargs into a intermediate list if necessary
 *
 * @param args the message arguments
 * @return scala type for message processing
 */
private static Seq<Object> convertArgsToScalaBuffer(final Object... args) {
  return scala.collection.JavaConverters.asScalaBufferConverter(wrapArgsToListIfNeeded(args)).asScala().toList();
}

代码示例来源:origin: Impetus/Kundera

@Override
public boolean persist(List listEntity, EntityMetadata m, SparkClient sparkClient)
{
  Seq s = scala.collection.JavaConversions.asScalaBuffer(listEntity).toList();
  ClassTag tag = scala.reflect.ClassTag$.MODULE$.apply(m.getEntityClazz());
  JavaRDD personRDD = sparkClient.sparkContext.parallelize(s, 1, tag).toJavaRDD();
  DataFrame df = sparkClient.sqlContext.createDataFrame(personRDD, m.getEntityClazz());
  String outputFilePath = getOutputFilePath(sparkClient.properties);
  String ext = (String) sparkClient.properties.get("format");
  FileType fileType = FileFormatConstants.extension.get(ext);
  switch (fileType)
  {
  case CSV:
    return writeDataInCsvFile(df, outputFilePath);
  case JSON:
    return writeDataInJsonFile(df, outputFilePath);
  default:
    throw new UnsupportedOperationException("Files of type " + ext + " are not yet supported.");
  }
}

代码示例来源:origin: Impetus/Kundera

Seq s = scala.collection.JavaConversions.asScalaBuffer(listEntity).toList();
ClassTag tag = scala.reflect.ClassTag$.MODULE$.apply(m.getEntityClazz());
JavaRDD javaRDD = sparkClient.sparkContext.parallelize(s, 1, tag).toJavaRDD();

代码示例来源:origin: Impetus/Kundera

@Override
public boolean persist(List listEntity, EntityMetadata m, SparkClient sparkClient)
{
  try
  {
    Seq s = scala.collection.JavaConversions.asScalaBuffer(listEntity).toList();
    ClassTag tag = scala.reflect.ClassTag$.MODULE$.apply(m.getEntityClazz());
    JavaRDD personRDD = sparkClient.sparkContext.parallelize(s, 1, tag).toJavaRDD();
    DataFrame df = sparkClient.sqlContext.createDataFrame(personRDD, m.getEntityClazz());
    sparkClient.sqlContext.sql("use " + m.getSchema());
    if (logger.isDebugEnabled())
    {
      logger.info("Below are the registered table with hive context: ");
      sparkClient.sqlContext.sql("show tables").show();
    }
    df.write().insertInto(m.getTableName());
    return true;
  }
  catch (Exception e)
  {
    throw new KunderaException("Cannot persist object(s)", e);
  }
}

代码示例来源:origin: Impetus/Kundera

@Override
public boolean persist(List listEntity, EntityMetadata m, SparkClient sparkClient)
{
  try
  {
    Seq s = scala.collection.JavaConversions.asScalaBuffer(listEntity).toList();
    ClassTag tag = scala.reflect.ClassTag$.MODULE$.apply(m.getEntityClazz());
    JavaRDD personRDD = sparkClient.sparkContext.parallelize(s, 1, tag).toJavaRDD();
    CassandraJavaUtil.javaFunctions(personRDD)
        .writerBuilder(m.getSchema(), m.getTableName(), CassandraJavaUtil.mapToRow(m.getEntityClazz()))
        .saveToCassandra();
    return true;
  }
  catch (Exception e)
  {
    throw new KunderaException("Cannot persist object(s)", e);
  }
}

代码示例来源:origin: org.apache.beam/beam-runners-spark

@Override
public scala.collection.immutable.List<DStream<?>> dependencies() {
 return scala.collection.JavaConversions.asScalaBuffer(
     Collections.<DStream<?>>singletonList(parent))
   .toList();
}

代码示例来源:origin: com.cerner.beadledom/beadledom-swagger

@Override
public scala.collection.immutable.List<Class<?>> classesFromContext(
  Application app, ServletConfig sc) {
 List<Class<?>> classes = Lists.newArrayList();
 addJaxrsClasses(injector, classes);
 return JavaConverters.asScalaBufferConverter(classes).asScala().toList();
}

代码示例来源:origin: fluxtream/fluxtream-app

private List<Class<?>> filterClasses(final List<Class<?>> classes) {
        final Iterator<Class<?>> eachClass = classes.iterator();
        final ArrayList<Class<?>> filteredClasses = new ArrayList<Class<?>>();
        while(eachClass.hasNext()) {
          Class clazz = eachClass.next();
//                    if (clazz.getName().indexOf("fluxtream")!=-1)
//                        continue;
          filteredClasses.add(clazz);
        }
        return JavaConversions.asScalaBuffer(filteredClasses).toList();
      }

代码示例来源:origin: com.aol.microservices/micro-swagger

public SwaggerInitializer(ServerData serverData) {
  this.resourceClasses = JavaConversions.asScalaBuffer(
      serverData.getResources()
      .stream()
      .map(resource -> resource.getClass())
      .collect(Collectors.<Class<?>> toList())).toList();
  this.baseUrlPattern = serverData.getBaseUrlPattern();
}

代码示例来源:origin: te-con/ehour

private void withLock(Interval... lockedRange) {
  Buffer<Interval> scalaBuffer = JavaConversions.asScalaBuffer(Lists.newArrayList(lockedRange));
  when(timesheetLockService.findLockedDatesInRange(any(Date.class), any(Date.class), any(User.class))).thenReturn(scalaBuffer.toList());
}

代码示例来源:origin: info.hupel/libisabelle

public static final <A, B> Operation<java.util.List<String>, B> useThys(MarkupProcessor<A, B> processor) {
 Operation<scala.collection.immutable.List<String>, B> operation =
   Operation.UseThys(
     processor.init(),
     func(processor::markup),
     func(processor::finish));
 return operation.<java.util.List<String>, B> map(
   func(x -> scala.collection.JavaConversions.asScalaBuffer(x).toList()),
   func(x -> x));
}

相关文章

微信公众号

最新文章

更多