org.apache.spark.api.java.JavaRDD.intersection()方法的使用及代码示例

x33g5p2x  于2022-01-21 转载在 其他  
字(4.5k)|赞(0)|评价(0)|浏览(73)

本文整理了Java中org.apache.spark.api.java.JavaRDD.intersection()方法的一些代码示例,展示了JavaRDD.intersection()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。JavaRDD.intersection()方法的具体详情如下:
包路径:org.apache.spark.api.java.JavaRDD
类名称:JavaRDD
方法名:intersection

JavaRDD.intersection介绍

暂无

代码示例

代码示例来源:origin: org.apache.spark/spark-core_2.11

@SuppressWarnings("unchecked")
@Test
public void intersection() {
 List<Integer> ints1 = Arrays.asList(1, 10, 2, 3, 4, 5);
 List<Integer> ints2 = Arrays.asList(1, 6, 2, 3, 7, 8);
 JavaRDD<Integer> s1 = sc.parallelize(ints1);
 JavaRDD<Integer> s2 = sc.parallelize(ints2);
 JavaRDD<Integer> intersections = s1.intersection(s2);
 assertEquals(3, intersections.count());
 JavaRDD<Integer> empty = sc.emptyRDD();
 JavaRDD<Integer> emptyIntersection = empty.intersection(s2);
 assertEquals(0, emptyIntersection.count());
 List<Double> doubles = Arrays.asList(1.0, 2.0);
 JavaDoubleRDD d1 = sc.parallelizeDoubles(doubles);
 JavaDoubleRDD d2 = sc.parallelizeDoubles(doubles);
 JavaDoubleRDD dIntersection = d1.intersection(d2);
 assertEquals(2, dIntersection.count());
 List<Tuple2<Integer, Integer>> pairs = new ArrayList<>();
 pairs.add(new Tuple2<>(1, 2));
 pairs.add(new Tuple2<>(3, 4));
 JavaPairRDD<Integer, Integer> p1 = sc.parallelizePairs(pairs);
 JavaPairRDD<Integer, Integer> p2 = sc.parallelizePairs(pairs);
 JavaPairRDD<Integer, Integer> pIntersection = p1.intersection(p2);
 assertEquals(2, pIntersection.count());
}

代码示例来源:origin: org.apache.spark/spark-core_2.10

@SuppressWarnings("unchecked")
@Test
public void intersection() {
 List<Integer> ints1 = Arrays.asList(1, 10, 2, 3, 4, 5);
 List<Integer> ints2 = Arrays.asList(1, 6, 2, 3, 7, 8);
 JavaRDD<Integer> s1 = sc.parallelize(ints1);
 JavaRDD<Integer> s2 = sc.parallelize(ints2);
 JavaRDD<Integer> intersections = s1.intersection(s2);
 assertEquals(3, intersections.count());
 JavaRDD<Integer> empty = sc.emptyRDD();
 JavaRDD<Integer> emptyIntersection = empty.intersection(s2);
 assertEquals(0, emptyIntersection.count());
 List<Double> doubles = Arrays.asList(1.0, 2.0);
 JavaDoubleRDD d1 = sc.parallelizeDoubles(doubles);
 JavaDoubleRDD d2 = sc.parallelizeDoubles(doubles);
 JavaDoubleRDD dIntersection = d1.intersection(d2);
 assertEquals(2, dIntersection.count());
 List<Tuple2<Integer, Integer>> pairs = new ArrayList<>();
 pairs.add(new Tuple2<>(1, 2));
 pairs.add(new Tuple2<>(3, 4));
 JavaPairRDD<Integer, Integer> p1 = sc.parallelizePairs(pairs);
 JavaPairRDD<Integer, Integer> p2 = sc.parallelizePairs(pairs);
 JavaPairRDD<Integer, Integer> pIntersection = p1.intersection(p2);
 assertEquals(2, pIntersection.count());
}

代码示例来源:origin: org.apache.spark/spark-core

@SuppressWarnings("unchecked")
@Test
public void intersection() {
 List<Integer> ints1 = Arrays.asList(1, 10, 2, 3, 4, 5);
 List<Integer> ints2 = Arrays.asList(1, 6, 2, 3, 7, 8);
 JavaRDD<Integer> s1 = sc.parallelize(ints1);
 JavaRDD<Integer> s2 = sc.parallelize(ints2);
 JavaRDD<Integer> intersections = s1.intersection(s2);
 assertEquals(3, intersections.count());
 JavaRDD<Integer> empty = sc.emptyRDD();
 JavaRDD<Integer> emptyIntersection = empty.intersection(s2);
 assertEquals(0, emptyIntersection.count());
 List<Double> doubles = Arrays.asList(1.0, 2.0);
 JavaDoubleRDD d1 = sc.parallelizeDoubles(doubles);
 JavaDoubleRDD d2 = sc.parallelizeDoubles(doubles);
 JavaDoubleRDD dIntersection = d1.intersection(d2);
 assertEquals(2, dIntersection.count());
 List<Tuple2<Integer, Integer>> pairs = new ArrayList<>();
 pairs.add(new Tuple2<>(1, 2));
 pairs.add(new Tuple2<>(3, 4));
 JavaPairRDD<Integer, Integer> p1 = sc.parallelizePairs(pairs);
 JavaPairRDD<Integer, Integer> p2 = sc.parallelizePairs(pairs);
 JavaPairRDD<Integer, Integer> pIntersection = p1.intersection(p2);
 assertEquals(2, pIntersection.count());
}

代码示例来源:origin: org.qcri.rheem/rheem-spark

@Override
public Tuple<Collection<ExecutionLineageNode>, Collection<ChannelInstance>> evaluate(
    ChannelInstance[] inputs,
    ChannelInstance[] outputs,
    SparkExecutor sparkExecutor,
    OptimizationContext.OperatorContext operatorContext) {
  assert inputs.length == this.getNumInputs();
  assert outputs.length == this.getNumOutputs();
  final RddChannel.Instance input0 = (RddChannel.Instance) inputs[0];
  final RddChannel.Instance input1 = (RddChannel.Instance) inputs[1];
  final RddChannel.Instance output = (RddChannel.Instance) outputs[0];
  final JavaRDD<Object> outputRdd = input0.provideRdd().intersection(input1.provideRdd());
  this.name(outputRdd);
  output.accept(outputRdd, sparkExecutor);
  return ExecutionOperator.modelLazyExecution(inputs, outputs, operatorContext);
}

相关文章

微信公众号

最新文章

更多