本文整理了Java中org.apache.spark.api.java.JavaSparkContext.parallelizeDoubles()
方法的一些代码示例,展示了JavaSparkContext.parallelizeDoubles()
的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。JavaSparkContext.parallelizeDoubles()
方法的具体详情如下:
包路径:org.apache.spark.api.java.JavaSparkContext
类名称:JavaSparkContext
方法名:parallelizeDoubles
暂无
代码示例来源:origin: databricks/learning-spark
public static void main(String[] args) {
String master;
if (args.length > 0) {
master = args[0];
} else {
master = "local";
}
JavaSparkContext sc = new JavaSparkContext(
master, "basicmap", System.getenv("SPARK_HOME"), System.getenv("JARS"));
JavaDoubleRDD input = sc.parallelizeDoubles(Arrays.asList(1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 1000.0));
JavaDoubleRDD result = removeOutliers(input);
System.out.println(StringUtils.join(result.collect(), ","));
}
static JavaDoubleRDD removeOutliers(JavaDoubleRDD rdd) {
代码示例来源:origin: org.apache.spark/spark-core_2.10
@Test
public void reduceOnJavaDoubleRDD() {
JavaDoubleRDD rdd = sc.parallelizeDoubles(Arrays.asList(1.0, 2.0, 3.0, 4.0));
double sum = rdd.reduce((v1, v2) -> v1 + v2);
assertEquals(10.0, sum, 0.001);
}
代码示例来源:origin: org.apache.spark/spark-core_2.11
@Test
public void naturalMin() {
JavaDoubleRDD rdd = sc.parallelizeDoubles(Arrays.asList(1.0, 2.0, 3.0, 4.0));
double max = rdd.min();
assertEquals(1.0, max, 0.0);
}
代码示例来源:origin: org.apache.spark/spark-core_2.10
@Test
public void naturalMax() {
JavaDoubleRDD rdd = sc.parallelizeDoubles(Arrays.asList(1.0, 2.0, 3.0, 4.0));
double max = rdd.max();
assertEquals(4.0, max, 0.0);
}
代码示例来源:origin: org.apache.spark/spark-core_2.10
@Test
public void cartesian() {
JavaDoubleRDD doubleRDD = sc.parallelizeDoubles(Arrays.asList(1.0, 1.0, 2.0, 3.0, 5.0, 8.0));
JavaRDD<String> stringRDD = sc.parallelize(Arrays.asList("Hello", "World"));
JavaPairRDD<String, Double> cartesian = stringRDD.cartesian(doubleRDD);
assertEquals(new Tuple2<>("Hello", 1.0), cartesian.first());
}
代码示例来源:origin: org.apache.spark/spark-core
@Test
public void naturalMin() {
JavaDoubleRDD rdd = sc.parallelizeDoubles(Arrays.asList(1.0, 2.0, 3.0, 4.0));
double max = rdd.min();
assertEquals(1.0, max, 0.0);
}
代码示例来源:origin: org.apache.spark/spark-core
@Test
public void reduceOnJavaDoubleRDD() {
JavaDoubleRDD rdd = sc.parallelizeDoubles(Arrays.asList(1.0, 2.0, 3.0, 4.0));
double sum = rdd.reduce((v1, v2) -> v1 + v2);
assertEquals(10.0, sum, 0.001);
}
代码示例来源:origin: org.apache.spark/spark-core_2.11
@Test
public void naturalMax() {
JavaDoubleRDD rdd = sc.parallelizeDoubles(Arrays.asList(1.0, 2.0, 3.0, 4.0));
double max = rdd.max();
assertEquals(4.0, max, 0.0);
}
代码示例来源:origin: org.apache.spark/spark-core_2.11
@Test
public void reduceOnJavaDoubleRDD() {
JavaDoubleRDD rdd = sc.parallelizeDoubles(Arrays.asList(1.0, 2.0, 3.0, 4.0));
double sum = rdd.reduce((v1, v2) -> v1 + v2);
assertEquals(10.0, sum, 0.001);
}
代码示例来源:origin: org.apache.spark/spark-core_2.10
@Test
public void naturalMin() {
JavaDoubleRDD rdd = sc.parallelizeDoubles(Arrays.asList(1.0, 2.0, 3.0, 4.0));
double max = rdd.min();
assertEquals(1.0, max, 0.0);
}
代码示例来源:origin: org.apache.spark/spark-core_2.11
@Test
public void cartesian() {
JavaDoubleRDD doubleRDD = sc.parallelizeDoubles(Arrays.asList(1.0, 1.0, 2.0, 3.0, 5.0, 8.0));
JavaRDD<String> stringRDD = sc.parallelize(Arrays.asList("Hello", "World"));
JavaPairRDD<String, Double> cartesian = stringRDD.cartesian(doubleRDD);
assertEquals(new Tuple2<>("Hello", 1.0), cartesian.first());
}
代码示例来源:origin: org.apache.spark/spark-core
@Test
public void cartesian() {
JavaDoubleRDD doubleRDD = sc.parallelizeDoubles(Arrays.asList(1.0, 1.0, 2.0, 3.0, 5.0, 8.0));
JavaRDD<String> stringRDD = sc.parallelize(Arrays.asList("Hello", "World"));
JavaPairRDD<String, Double> cartesian = stringRDD.cartesian(doubleRDD);
assertEquals(new Tuple2<>("Hello", 1.0), cartesian.first());
}
代码示例来源:origin: org.apache.spark/spark-core_2.11
@Test
public void min() {
JavaDoubleRDD rdd = sc.parallelizeDoubles(Arrays.asList(1.0, 2.0, 3.0, 4.0));
double max = rdd.min(new DoubleComparator());
assertEquals(1.0, max, 0.001);
}
代码示例来源:origin: org.apache.spark/spark-core
@Test
public void max() {
JavaDoubleRDD rdd = sc.parallelizeDoubles(Arrays.asList(1.0, 2.0, 3.0, 4.0));
double max = rdd.max(new DoubleComparator());
assertEquals(4.0, max, 0.001);
}
代码示例来源:origin: org.apache.spark/spark-core_2.11
@Test
public void takeOrdered() {
JavaDoubleRDD rdd = sc.parallelizeDoubles(Arrays.asList(1.0, 2.0, 3.0, 4.0));
assertEquals(Arrays.asList(1.0, 2.0), rdd.takeOrdered(2, new DoubleComparator()));
assertEquals(Arrays.asList(1.0, 2.0), rdd.takeOrdered(2));
}
代码示例来源:origin: org.apache.spark/spark-core_2.10
@Test
public void min() {
JavaDoubleRDD rdd = sc.parallelizeDoubles(Arrays.asList(1.0, 2.0, 3.0, 4.0));
double max = rdd.min(new DoubleComparator());
assertEquals(1.0, max, 0.001);
}
代码示例来源:origin: org.apache.spark/spark-core_2.10
@Test
public void max() {
JavaDoubleRDD rdd = sc.parallelizeDoubles(Arrays.asList(1.0, 2.0, 3.0, 4.0));
double max = rdd.max(new DoubleComparator());
assertEquals(4.0, max, 0.001);
}
代码示例来源:origin: org.apache.spark/spark-core_2.10
@Test
public void takeOrdered() {
JavaDoubleRDD rdd = sc.parallelizeDoubles(Arrays.asList(1.0, 2.0, 3.0, 4.0));
assertEquals(Arrays.asList(1.0, 2.0), rdd.takeOrdered(2, new DoubleComparator()));
assertEquals(Arrays.asList(1.0, 2.0), rdd.takeOrdered(2));
}
代码示例来源:origin: org.apache.spark/spark-core
@Test
public void takeOrdered() {
JavaDoubleRDD rdd = sc.parallelizeDoubles(Arrays.asList(1.0, 2.0, 3.0, 4.0));
assertEquals(Arrays.asList(1.0, 2.0), rdd.takeOrdered(2, new DoubleComparator()));
assertEquals(Arrays.asList(1.0, 2.0), rdd.takeOrdered(2));
}
代码示例来源:origin: org.apache.spark/spark-core_2.11
@Test
public void max() {
JavaDoubleRDD rdd = sc.parallelizeDoubles(Arrays.asList(1.0, 2.0, 3.0, 4.0));
double max = rdd.max(new DoubleComparator());
assertEquals(4.0, max, 0.001);
}
内容来源于网络,如有侵权,请联系作者删除!