org.apache.spark.SparkContext.isStopped()方法的使用及代码示例

x33g5p2x  于2022-01-30 转载在 其他  
字(7.5k)|赞(0)|评价(0)|浏览(109)

本文整理了Java中org.apache.spark.SparkContext.isStopped()方法的一些代码示例,展示了SparkContext.isStopped()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。SparkContext.isStopped()方法的具体详情如下:
包路径:org.apache.spark.SparkContext
类名称:SparkContext
方法名:isStopped

SparkContext.isStopped介绍

暂无

代码示例

代码示例来源:origin: apache/tinkerpop

public static boolean isContextNullOrStopped() {
  return null == CONTEXT || CONTEXT.isStopped();
}

代码示例来源:origin: apache/hive

private void runSparkTestSession(HiveConf conf, int threadId) throws Exception {
 conf.setVar(HiveConf.ConfVars.SPARK_SESSION_TIMEOUT, "10s");
 conf.setVar(HiveConf.ConfVars.SPARK_SESSION_TIMEOUT_PERIOD, "1s");
 Driver driver = null;
 try {
  driver = new Driver(new QueryState.Builder()
    .withGenerateNewQueryId(true)
    .withHiveConf(conf).build(), null, null);
  SparkSession sparkSession = SparkUtilities.getSparkSession(conf,
    SparkSessionManagerImpl.getInstance());
  Assert.assertEquals(0, driver.run("show tables").getResponseCode());
  barrier.await();
  SparkContext sparkContext = getSparkContext(sparkSession);
  Assert.assertFalse(sparkContext.isStopped());
  if(threadId == 1) {
   barrier.await();
   closeSparkSession(sparkSession);
   Assert.assertTrue(sparkContext.isStopped());
  } else {
   closeSparkSession(sparkSession);
   Assert.assertFalse(sparkContext.isStopped());
   barrier.await();
  }
 } finally {
  if (driver != null) {
   driver.destroy();
  }
 }
}

代码示例来源:origin: apache/tinkerpop

public static SparkContext getContext() {
  if (null != CONTEXT && CONTEXT.isStopped())
    recreateStopped();
  return CONTEXT;
}

代码示例来源:origin: apache/tinkerpop

public static SparkContext recreateStopped() {
  if (null == CONTEXT)
    throw new IllegalStateException("The Spark context has not been created.");
  if (!CONTEXT.isStopped())
    throw new IllegalStateException("The Spark context is not stopped.");
  CONTEXT = SparkContext.getOrCreate(CONTEXT.getConf());
  return CONTEXT;
}
public static SparkContext getContext() {

代码示例来源:origin: apache/tinkerpop

public static SparkContext create(final SparkContext sparkContext) {
  if (null != CONTEXT && !CONTEXT.isStopped() && sparkContext !=CONTEXT /*exact the same object => NOP*/
      && !sparkContext.getConf().getBoolean("spark.driver.allowMultipleContexts", false)) {
    throw new IllegalStateException(
        "Active Spark context exists. Call Spark.close() to close it before creating a new one");
  }
  CONTEXT = sparkContext;
  return CONTEXT;
}

代码示例来源:origin: apache/tinkerpop

public static void refresh() {
  if (null == CONTEXT)
    throw new IllegalStateException("The Spark context has not been created.");
  if (CONTEXT.isStopped())
    recreateStopped();
  final Set<String> keepNames = new HashSet<>();
  for (final RDD<?> rdd : JavaConversions.asJavaIterable(CONTEXT.persistentRdds().values())) {
    if (null != rdd.name()) {
      keepNames.add(rdd.name());
      NAME_TO_RDD.put(rdd.name(), rdd);
    }
  }
  // remove all stale names in the NAME_TO_RDD map
  NAME_TO_RDD.keySet().stream().filter(key -> !keepNames.contains(key)).collect(Collectors.toList()).forEach(NAME_TO_RDD::remove);
}

代码示例来源:origin: org.apache.tinkerpop/spark-gremlin

public static boolean isContextNullOrStopped() {
  return null == CONTEXT || CONTEXT.isStopped();
}

代码示例来源:origin: org.apache.tinkerpop/spark-gremlin

public static SparkContext getContext() {
  if (null != CONTEXT && CONTEXT.isStopped())
    recreateStopped();
  return CONTEXT;
}

代码示例来源:origin: com.davidbracewell/mango

private static SparkStreamingContext contextOf(SparkContext sparkContext) {
 if (context == null || context.sc().isStopped()) {
   synchronized (SparkStreamingContext.class) {
    if (context == null || context.sc().isStopped()) {
      context = new JavaSparkContext(sparkContext);
    }
   }
 }
 return SparkStreamingContext.INSTANCE;
}

代码示例来源:origin: org.apache.tinkerpop/spark-gremlin

public static SparkContext recreateStopped() {
  if (null == CONTEXT)
    throw new IllegalStateException("The Spark context has not been created.");
  if (!CONTEXT.isStopped())
    throw new IllegalStateException("The Spark context is not stopped.");
  CONTEXT = SparkContext.getOrCreate(CONTEXT.getConf());
  return CONTEXT;
}
public static SparkContext getContext() {

代码示例来源:origin: org.apache.tinkerpop/spark-gremlin

public static SparkContext create(final SparkContext sparkContext) {
  if (null != CONTEXT && !CONTEXT.isStopped() && sparkContext !=CONTEXT /*exact the same object => NOP*/
      && !sparkContext.getConf().getBoolean("spark.driver.allowMultipleContexts", false)) {
    throw new IllegalStateException(
        "Active Spark context exists. Call Spark.close() to close it before creating a new one");
  }
  CONTEXT = sparkContext;
  return CONTEXT;
}

代码示例来源:origin: org.apache.tinkerpop/spark-gremlin

public static void refresh() {
  if (null == CONTEXT)
    throw new IllegalStateException("The Spark context has not been created.");
  if (CONTEXT.isStopped())
    recreateStopped();
  final Set<String> keepNames = new HashSet<>();
  for (final RDD<?> rdd : JavaConversions.asJavaIterable(CONTEXT.persistentRdds().values())) {
    if (null != rdd.name()) {
      keepNames.add(rdd.name());
      NAME_TO_RDD.put(rdd.name(), rdd);
    }
  }
  // remove all stale names in the NAME_TO_RDD map
  NAME_TO_RDD.keySet().stream().filter(key -> !keepNames.contains(key)).collect(Collectors.toList()).forEach(NAME_TO_RDD::remove);
}

代码示例来源:origin: org.apache.beam/beam-runners-spark

public static synchronized JavaSparkContext getSparkContext(SparkPipelineOptions options) {
 SparkContextOptions contextOptions = options.as(SparkContextOptions.class);
 usesProvidedSparkContext = contextOptions.getUsesProvidedSparkContext();
 // reuse should be ignored if the context is provided.
 if (Boolean.getBoolean(TEST_REUSE_SPARK_CONTEXT) && !usesProvidedSparkContext) {
  // if the context is null or stopped for some reason, re-create it.
  if (sparkContext == null || sparkContext.sc().isStopped()) {
   sparkContext = createSparkContext(contextOptions);
   sparkMaster = options.getSparkMaster();
  } else if (!options.getSparkMaster().equals(sparkMaster)) {
   throw new IllegalArgumentException(
     String.format(
       "Cannot reuse spark context "
         + "with different spark master URL. Existing: %s, requested: %s.",
       sparkMaster, options.getSparkMaster()));
  }
  return sparkContext;
 } else {
  return createSparkContext(contextOptions);
 }
}

代码示例来源:origin: com.davidbracewell/mango

private static JavaSparkContext getSparkContext() {
 if (context == null || context.sc().isStopped()) {
   synchronized (SparkStreamingContext.class) {
    if (context == null || context.sc().isStopped()) {
      SparkConf conf = new SparkConf()
                .setAppName(Config.get(SPARK_APPNAME).asString(StringUtils.randomHexString(20)));
      if (Config.hasProperty(SPARK_MASTER)) {
       conf = conf.setMaster(Config.get(SPARK_MASTER).asString("local[*]"));
      }
      context = new JavaSparkContext(conf);
    }
   }
 }
 return context;
}

代码示例来源:origin: org.apache.beam/beam-runners-spark

private static JavaSparkContext createSparkContext(SparkContextOptions contextOptions) {
  if (usesProvidedSparkContext) {
   LOG.info("Using a provided Spark Context");
   JavaSparkContext jsc = contextOptions.getProvidedSparkContext();
   if (jsc == null || jsc.sc().isStopped()) {
    LOG.error("The provided Spark context " + jsc + " was not created or was stopped");
    throw new RuntimeException("The provided Spark context was not created or was stopped");
   }
   return jsc;
  } else {
   LOG.info("Creating a brand new Spark Context.");
   SparkConf conf = new SparkConf();
   if (!conf.contains("spark.master")) {
    // set master if not set.
    conf.setMaster(contextOptions.getSparkMaster());
   }

   if (contextOptions.getFilesToStage() != null && !contextOptions.getFilesToStage().isEmpty()) {
    conf.setJars(contextOptions.getFilesToStage().toArray(new String[0]));
   }

   conf.setAppName(contextOptions.getAppName());
   // register immutable collections serializers because the SDK uses them.
   conf.set("spark.kryo.registrator", BeamSparkRunnerRegistrator.class.getName());
   return new JavaSparkContext(conf);
  }
 }
}

相关文章