org.apache.spark.api.java.JavaRDD.fold()方法的使用及代码示例

x33g5p2x  于2022-01-21 转载在 其他  
字(6.2k)|赞(0)|评价(0)|浏览(89)

本文整理了Java中org.apache.spark.api.java.JavaRDD.fold()方法的一些代码示例,展示了JavaRDD.fold()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。JavaRDD.fold()方法的具体详情如下:
包路径:org.apache.spark.api.java.JavaRDD
类名称:JavaRDD
方法名:fold

JavaRDD.fold介绍

暂无

代码示例

代码示例来源:origin: databricks/learning-spark

public static void main(String[] args) throws Exception {
    String master;
    if (args.length > 0) {
   master = args[0];
    } else {
      master = "local";
    }
    JavaSparkContext sc = new JavaSparkContext(
   master, "basicmap", System.getenv("SPARK_HOME"), System.getenv("JARS"));
  JavaRDD<Integer> rdd = sc.parallelize(Arrays.asList(1, 2, 3, 4));
  Integer result = rdd.fold(0, new Function2<Integer, Integer, Integer>() {
    public Integer call(Integer x, Integer y) { return x + y;}});
  System.out.println(result);
  }
}

代码示例来源:origin: org.apache.spark/spark-core_2.10

@Test
public void fold() {
 JavaRDD<Integer> rdd = sc.parallelize(Arrays.asList(1, 2, 3, 4));
 int sum = rdd.fold(0, new AddInts());
 assertEquals(10, sum);
}

代码示例来源:origin: org.apache.spark/spark-core

@Test
public void fold() {
 JavaRDD<Integer> rdd = sc.parallelize(Arrays.asList(1, 2, 3, 4));
 int sum = rdd.fold(0, new AddInts());
 assertEquals(10, sum);
}

代码示例来源:origin: org.apache.spark/spark-core_2.11

@Test
public void fold() {
 JavaRDD<Integer> rdd = sc.parallelize(Arrays.asList(1, 2, 3, 4));
 int sum = rdd.fold(0, new AddInts());
 assertEquals(10, sum);
}

代码示例来源:origin: org.apache.spark/spark-core

@Test
public void foldReduce() {
 JavaRDD<Integer> rdd = sc.parallelize(Arrays.asList(1, 1, 2, 3, 5, 8, 13));
 Function2<Integer, Integer, Integer> add = (a, b) -> a + b;
 int sum = rdd.fold(0, add);
 Assert.assertEquals(33, sum);
 sum = rdd.reduce(add);
 Assert.assertEquals(33, sum);
}

代码示例来源:origin: org.apache.spark/spark-core_2.11

@Test
public void foldReduce() {
 JavaRDD<Integer> rdd = sc.parallelize(Arrays.asList(1, 1, 2, 3, 5, 8, 13));
 Function2<Integer, Integer, Integer> add = (a, b) -> a + b;
 int sum = rdd.fold(0, add);
 assertEquals(33, sum);
 sum = rdd.reduce(add);
 assertEquals(33, sum);
}

代码示例来源:origin: org.apache.spark/spark-core_2.10

@Test
public void foldReduce() {
 JavaRDD<Integer> rdd = sc.parallelize(Arrays.asList(1, 1, 2, 3, 5, 8, 13));
 Function2<Integer, Integer, Integer> add = (a, b) -> a + b;
 int sum = rdd.fold(0, add);
 Assert.assertEquals(33, sum);
 sum = rdd.reduce(add);
 Assert.assertEquals(33, sum);
}

代码示例来源:origin: org.apache.spark/spark-core_2.11

@Test
public void foldReduce() {
 JavaRDD<Integer> rdd = sc.parallelize(Arrays.asList(1, 1, 2, 3, 5, 8, 13));
 Function2<Integer, Integer, Integer> add = (a, b) -> a + b;
 int sum = rdd.fold(0, add);
 Assert.assertEquals(33, sum);
 sum = rdd.reduce(add);
 Assert.assertEquals(33, sum);
}

代码示例来源:origin: org.apache.spark/spark-core_2.10

@Test
public void foldReduce() {
 JavaRDD<Integer> rdd = sc.parallelize(Arrays.asList(1, 1, 2, 3, 5, 8, 13));
 Function2<Integer, Integer, Integer> add = (a, b) -> a + b;
 int sum = rdd.fold(0, add);
 assertEquals(33, sum);
 sum = rdd.reduce(add);
 assertEquals(33, sum);
}

代码示例来源:origin: org.apache.spark/spark-core

@Test
public void foldReduce() {
 JavaRDD<Integer> rdd = sc.parallelize(Arrays.asList(1, 1, 2, 3, 5, 8, 13));
 Function2<Integer, Integer, Integer> add = (a, b) -> a + b;
 int sum = rdd.fold(0, add);
 assertEquals(33, sum);
 sum = rdd.reduce(add);
 assertEquals(33, sum);
}

代码示例来源:origin: apache/tinkerpop

result = nextRDD.map(Traverser::bulk).fold(0L, (a, b) -> a + b);
else if (endStep instanceof SumGlobalStep) {
  result = nextRDD.isEmpty() ? null : nextRDD
      .map(traverser -> NumberHelper.mul(traverser.bulk(), (Number) traverser.get()))
      .fold(0, NumberHelper::add);
} else if (endStep instanceof MeanGlobalStep) {
  result = nextRDD.isEmpty() ? null : nextRDD
      .map(traverser -> new MeanGlobalStep.MeanNumber((Number) traverser.get(), traverser.bulk()))
      .fold(MeanNumberSupplier.instance().get(), MeanGlobalStep.MeanNumber::add)
      .getFinal();
} else if (endStep instanceof MinGlobalStep) {
  result = nextRDD.isEmpty() ? null : nextRDD
      .map(traverser -> (Comparable) traverser.get())
      .fold(Double.NaN, NumberHelper::min);
} else if (endStep instanceof MaxGlobalStep) {
  result = nextRDD.isEmpty() ? null : nextRDD
      .map(traverser -> (Comparable) traverser.get())
      .fold(Double.NaN, NumberHelper::max);
} else if (endStep instanceof FoldStep) {
  final BinaryOperator biOperator = endStep.getBiOperator();
  }).fold(endStep.getSeedSupplier().get(), biOperator::apply);
} else if (endStep instanceof GroupStep) {
  final GroupStep.GroupBiOperator<Object, Object> biOperator = (GroupStep.GroupBiOperator) endStep.getBiOperator();
        final GroupStep<Object, Object, Object> clone = (GroupStep) endStep.clone();
        return IteratorUtils.map(partitions, clone::projectTraverser);
      }).fold(((GroupStep<Object, Object, Object>) endStep).getSeedSupplier().get(), biOperator::apply));
} else if (endStep instanceof GroupCountStep) {

代码示例来源:origin: com.davidbracewell/mango

@Override
public T fold(T zeroValue, @NonNull SerializableBinaryOperator<T> operator) {
 return rdd.fold(zeroValue, (t, u) -> {
   Configurator.INSTANCE.configure(configBroadcast.value());
   return operator.apply(t, u);
 });
}

代码示例来源:origin: org.apache.tinkerpop/spark-gremlin

result = nextRDD.map(Traverser::bulk).fold(0L, (a, b) -> a + b);
else if (endStep instanceof SumGlobalStep) {
  result = nextRDD.isEmpty() ? null : nextRDD
      .map(traverser -> NumberHelper.mul(traverser.bulk(), (Number) traverser.get()))
      .fold(0, NumberHelper::add);
} else if (endStep instanceof MeanGlobalStep) {
  result = nextRDD.isEmpty() ? null : nextRDD
      .map(traverser -> new MeanGlobalStep.MeanNumber((Number) traverser.get(), traverser.bulk()))
      .fold(MeanNumberSupplier.instance().get(), MeanGlobalStep.MeanNumber::add)
      .getFinal();
} else if (endStep instanceof MinGlobalStep) {
  result = nextRDD.isEmpty() ? null : nextRDD
      .map(traverser -> (Comparable) traverser.get())
      .fold(Double.NaN, NumberHelper::min);
} else if (endStep instanceof MaxGlobalStep) {
  result = nextRDD.isEmpty() ? null : nextRDD
      .map(traverser -> (Comparable) traverser.get())
      .fold(Double.NaN, NumberHelper::max);
} else if (endStep instanceof FoldStep) {
  final BinaryOperator biOperator = endStep.getBiOperator();
  }).fold(endStep.getSeedSupplier().get(), biOperator::apply);
} else if (endStep instanceof GroupStep) {
  final GroupStep.GroupBiOperator<Object, Object> biOperator = (GroupStep.GroupBiOperator) endStep.getBiOperator();
        final GroupStep<Object, Object, Object> clone = (GroupStep) endStep.clone();
        return IteratorUtils.map(partitions, clone::projectTraverser);
      }).fold(((GroupStep<Object, Object, Object>) endStep).getSeedSupplier().get(), biOperator::apply));
} else if (endStep instanceof GroupCountStep) {

相关文章

微信公众号

最新文章

更多