org.apache.spark.SparkContext.getOrCreate()方法的使用及代码示例

x33g5p2x  于2022-01-30 转载在 其他  
字(5.8k)|赞(0)|评价(0)|浏览(181)

本文整理了Java中org.apache.spark.SparkContext.getOrCreate()方法的一些代码示例,展示了SparkContext.getOrCreate()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。SparkContext.getOrCreate()方法的具体详情如下:
包路径:org.apache.spark.SparkContext
类名称:SparkContext
方法名:getOrCreate

SparkContext.getOrCreate介绍

暂无

代码示例

代码示例来源:origin: OryxProject/oryx

protected final JavaStreamingContext buildStreamingContext() {
 log.info("Starting SparkContext with interval {} seconds", generationIntervalSec);
 SparkConf sparkConf = new SparkConf();
 // Only for tests, really
 if (sparkConf.getOption("spark.master").isEmpty()) {
  log.info("Overriding master to {} for tests", streamingMaster);
  sparkConf.setMaster(streamingMaster);
 }
 // Only for tests, really
 if (sparkConf.getOption("spark.app.name").isEmpty()) {
  String appName = "Oryx" + getLayerName();
  if (id != null) {
   appName = appName + "-" + id;
  }
  log.info("Overriding app name to {} for tests", appName);
  sparkConf.setAppName(appName);
 }
 extraSparkConfig.forEach((key, value) -> sparkConf.setIfMissing(key, value.toString()));
 // Turn this down to prevent long blocking at shutdown
 sparkConf.setIfMissing(
   "spark.streaming.gracefulStopTimeout",
   Long.toString(TimeUnit.MILLISECONDS.convert(generationIntervalSec, TimeUnit.SECONDS)));
 sparkConf.setIfMissing("spark.cleaner.ttl", Integer.toString(20 * generationIntervalSec));
 long generationIntervalMS =
   TimeUnit.MILLISECONDS.convert(generationIntervalSec, TimeUnit.SECONDS);
 JavaSparkContext jsc = JavaSparkContext.fromSparkContext(SparkContext.getOrCreate(sparkConf));
 return new JavaStreamingContext(jsc, new Duration(generationIntervalMS));
}

代码示例来源:origin: OryxProject/oryx

@BeforeClass
public static void setUp() {
 SparkConf sparkConf = new SparkConf().setMaster("local[*]").setAppName("SparkIT");
 javaSparkContext = JavaSparkContext.fromSparkContext(SparkContext.getOrCreate(sparkConf));
}

代码示例来源:origin: apache/tinkerpop

public static SparkContext recreateStopped() {
  if (null == CONTEXT)
    throw new IllegalStateException("The Spark context has not been created.");
  if (!CONTEXT.isStopped())
    throw new IllegalStateException("The Spark context is not stopped.");
  CONTEXT = SparkContext.getOrCreate(CONTEXT.getConf());
  return CONTEXT;
}
public static SparkContext getContext() {

代码示例来源:origin: apache/tinkerpop

public static SparkContext create(final SparkConf sparkConf) {
  if (isContextNullOrStopped()) {
    sparkConf.setAppName("Apache TinkerPop's Spark-Gremlin");
    CONTEXT = SparkContext.getOrCreate(sparkConf);
  }
  return CONTEXT;
}

代码示例来源:origin: apache/incubator-nemo

/**
 * Constructor with configuration.
 *
 * @param sparkConf spark configuration to wrap.
 */
public SparkContext(final SparkConf sparkConf) {
 super(sparkConf);
 this.sparkContext = org.apache.spark.SparkContext.getOrCreate(sparkConf);
}

代码示例来源:origin: apache/incubator-nemo

/**
 * Constructor.
 */
public SparkContext() {
 this.sparkContext = org.apache.spark.SparkContext.getOrCreate();
}

代码示例来源:origin: org.apache.tinkerpop/spark-gremlin

public static SparkContext recreateStopped() {
  if (null == CONTEXT)
    throw new IllegalStateException("The Spark context has not been created.");
  if (!CONTEXT.isStopped())
    throw new IllegalStateException("The Spark context is not stopped.");
  CONTEXT = SparkContext.getOrCreate(CONTEXT.getConf());
  return CONTEXT;
}
public static SparkContext getContext() {

代码示例来源:origin: org.apache.tinkerpop/spark-gremlin

public static SparkContext create(final SparkConf sparkConf) {
  if (isContextNullOrStopped()) {
    sparkConf.setAppName("Apache TinkerPop's Spark-Gremlin");
    CONTEXT = SparkContext.getOrCreate(sparkConf);
  }
  return CONTEXT;
}

代码示例来源:origin: jpmml/jpmml-sparkml

static
public void checkVersion(){
  SparkContext sparkContext = SparkContext.getOrCreate();
  int[] version = parseVersion(sparkContext.version());
  if(!Arrays.equals(ConverterFactory.VERSION, version)){
    throw new IllegalArgumentException("Expected Apache Spark ML version " + formatVersion(ConverterFactory.VERSION) + ", got version " + formatVersion(version) + " (" + sparkContext.version() + ")");
  }
}

代码示例来源:origin: com.cloudera.oryx/oryx-lambda

protected final JavaStreamingContext buildStreamingContext() {
 log.info("Starting SparkContext with interval {} seconds", generationIntervalSec);
 SparkConf sparkConf = new SparkConf();
 // Only for tests, really
 if (sparkConf.getOption("spark.master").isEmpty()) {
  log.info("Overriding master to {} for tests", streamingMaster);
  sparkConf.setMaster(streamingMaster);
 }
 // Only for tests, really
 if (sparkConf.getOption("spark.app.name").isEmpty()) {
  String appName = "Oryx" + getLayerName();
  if (id != null) {
   appName = appName + "-" + id;
  }
  log.info("Overriding app name to {} for tests", appName);
  sparkConf.setAppName(appName);
 }
 extraSparkConfig.forEach((key, value) -> sparkConf.setIfMissing(key, value.toString()));
 // Turn this down to prevent long blocking at shutdown
 sparkConf.setIfMissing(
   "spark.streaming.gracefulStopTimeout",
   Long.toString(TimeUnit.MILLISECONDS.convert(generationIntervalSec, TimeUnit.SECONDS)));
 sparkConf.setIfMissing("spark.cleaner.ttl", Integer.toString(20 * generationIntervalSec));
 long generationIntervalMS =
   TimeUnit.MILLISECONDS.convert(generationIntervalSec, TimeUnit.SECONDS);
 JavaSparkContext jsc = JavaSparkContext.fromSparkContext(SparkContext.getOrCreate(sparkConf));
 return new JavaStreamingContext(jsc, new Duration(generationIntervalMS));
}

代码示例来源:origin: apache/incubator-nemo

@Override
 protected Iterator<String> initializeIterator() {
  // for setting up the same environment in the executors.
  final SparkContext sparkContext = SparkContext.getOrCreate(sparkConf);
  // Spark does lazy evaluation: it doesn't load the full data in rdd, but only the partition it is asked for.
  final RDD<String> rdd = sparkContext.textFile(inputPath, numPartitions);
  final Iterable<String> iterable = () -> JavaConverters.asJavaIteratorConverter(
   rdd.iterator(rdd.getPartitions()[partitionIndex], TaskContext$.MODULE$.empty())).asJava();
  return iterable.iterator();
 }
}

相关文章