org.apache.spark.api.java.JavaSparkContext.stop()方法的使用及代码示例

x33g5p2x  于2022-01-21 转载在 其他  
字(5.6k)|赞(0)|评价(0)|浏览(134)

本文整理了Java中org.apache.spark.api.java.JavaSparkContext.stop()方法的一些代码示例,展示了JavaSparkContext.stop()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。JavaSparkContext.stop()方法的具体详情如下:
包路径:org.apache.spark.api.java.JavaSparkContext
类名称:JavaSparkContext
方法名:stop

JavaSparkContext.stop介绍

暂无

代码示例

代码示例来源:origin: apache/drill

@Override
 public void close() {
  synchronized (LocalHiveSparkClient.class) {
   client = null;
  }
  if (sc != null) {
   sc.stop();
  }
 }
}

代码示例来源:origin: apache/hive

void stop() {
 monitoredJobs.clear();
 sc.stop();
}

代码示例来源:origin: apache/hive

@Override
 public void close() {
  synchronized (LocalHiveSparkClient.class) {
   if (--activeSessions == 0) {
    client = null;
    if (sc != null) {
     LOG.debug("Shutting down the SparkContext");
     sc.stop();
    }
   }
  }
 }
}

代码示例来源:origin: org.apache.spark/spark-core_2.11

@After
public void tearDown() {
 sc.stop();
 sc = null;
}

代码示例来源:origin: org.apache.spark/spark-core_2.10

@After
public void tearDown() {
 sc.stop();
 sc = null;
}

代码示例来源:origin: org.apache.spark/spark-core

@After
public void tearDown() {
 if (sc != null) {
  sc.stop();
  sc = null;
 }
}

代码示例来源:origin: org.apache.spark/spark-core_2.11

@After
public void tearDown() {
 sc.stop();
 sc = null;
}

代码示例来源:origin: org.apache.spark/spark-core_2.11

@After
public void tearDown() {
 if (sc != null) {
  sc.stop();
  sc = null;
 }
}

代码示例来源:origin: org.apache.spark/spark-core_2.10

@After
public void tearDown() {
 sc.stop();
 sc = null;
}

代码示例来源:origin: org.apache.spark/spark-core

@After
public void tearDown() {
 sc.stop();
 sc = null;
}

代码示例来源:origin: org.apache.spark/spark-core

@After
public void tearDown() {
 sc.stop();
 sc = null;
}

代码示例来源:origin: org.apache.spark/spark-core_2.10

@After
public void tearDown() throws SQLException {
 try {
  DriverManager.getConnection("jdbc:derby:target/JavaJdbcRDDSuiteDb;shutdown=true");
 } catch(SQLException e) {
  // Throw if not normal single database shutdown
  // https://db.apache.org/derby/docs/10.2/ref/rrefexcept71493.html
  if (e.getSQLState().compareTo("08006") != 0) {
   throw e;
  }
 }
 sc.stop();
 sc = null;
}

代码示例来源:origin: org.apache.spark/spark-core

@Test
public void javaSparkContext() {
 String[] jars = new String[] {};
 java.util.Map<String, String> environment = new java.util.HashMap<>();
 new JavaSparkContext(new SparkConf().setMaster("local").setAppName("name")).stop();
 new JavaSparkContext("local", "name", new SparkConf()).stop();
 new JavaSparkContext("local", "name").stop();
 new JavaSparkContext("local", "name", "sparkHome", "jarFile").stop();
 new JavaSparkContext("local", "name", "sparkHome", jars).stop();
 new JavaSparkContext("local", "name", "sparkHome", jars, environment).stop();
}

代码示例来源:origin: org.apache.spark/spark-core_2.11

@Test
public void javaSparkContext() {
 String[] jars = new String[] {};
 java.util.Map<String, String> environment = new java.util.HashMap<>();
 new JavaSparkContext(new SparkConf().setMaster("local").setAppName("name")).stop();
 new JavaSparkContext("local", "name", new SparkConf()).stop();
 new JavaSparkContext("local", "name").stop();
 new JavaSparkContext("local", "name", "sparkHome", "jarFile").stop();
 new JavaSparkContext("local", "name", "sparkHome", jars).stop();
 new JavaSparkContext("local", "name", "sparkHome", jars, environment).stop();
}

代码示例来源:origin: org.apache.spark/spark-core

@Test
public void testPluginShutdownWithException() {
 // Verify an exception in one plugin shutdown does not affect the others
 String pluginNames = testPluginName + "," + testBadPluginName + "," + testPluginName;
 SparkConf conf = initializeSparkConf(pluginNames);
 sc = new JavaSparkContext(conf);
 assertEquals(3, numSuccessfulPlugins);
 sc.stop();
 sc = null;
 assertEquals(2, numSuccessfulTerminations);
}

代码示例来源:origin: org.apache.spark/spark-core_2.11

@Test
public void testAddMultiplePlugins() throws InterruptedException {
 // Load two plugins and verify they both execute.
 SparkConf conf = initializeSparkConf(testPluginName + "," + testSecondPluginName);
 sc = new JavaSparkContext(conf);
 assertEquals(2, numSuccessfulPlugins);
 sc.stop();
 sc = null;
 assertEquals(2, numSuccessfulTerminations);
}

代码示例来源:origin: org.apache.spark/spark-core_2.11

@Test
public void testAddPlugin() throws InterruptedException {
 // Load the sample TestExecutorPlugin, which will change the value of numSuccessfulPlugins
 SparkConf conf = initializeSparkConf(testPluginName);
 sc = new JavaSparkContext(conf);
 assertEquals(1, numSuccessfulPlugins);
 sc.stop();
 sc = null;
 assertEquals(1, numSuccessfulTerminations);
}

代码示例来源:origin: org.apache.spark/spark-core_2.11

@Test
public void testPluginShutdownWithException() {
 // Verify an exception in one plugin shutdown does not affect the others
 String pluginNames = testPluginName + "," + testBadPluginName + "," + testPluginName;
 SparkConf conf = initializeSparkConf(pluginNames);
 sc = new JavaSparkContext(conf);
 assertEquals(3, numSuccessfulPlugins);
 sc.stop();
 sc = null;
 assertEquals(2, numSuccessfulTerminations);
}

代码示例来源:origin: org.apache.spark/spark-core

@Test
public void testAddPlugin() throws InterruptedException {
 // Load the sample TestExecutorPlugin, which will change the value of numSuccessfulPlugins
 SparkConf conf = initializeSparkConf(testPluginName);
 sc = new JavaSparkContext(conf);
 assertEquals(1, numSuccessfulPlugins);
 sc.stop();
 sc = null;
 assertEquals(1, numSuccessfulTerminations);
}

代码示例来源:origin: org.apache.spark/spark-core

@Test
public void testAddMultiplePlugins() throws InterruptedException {
 // Load two plugins and verify they both execute.
 SparkConf conf = initializeSparkConf(testPluginName + "," + testSecondPluginName);
 sc = new JavaSparkContext(conf);
 assertEquals(2, numSuccessfulPlugins);
 sc.stop();
 sc = null;
 assertEquals(2, numSuccessfulTerminations);
}

相关文章

微信公众号

最新文章

更多