org.apache.spark.util.Utils.deleteRecursively()方法的使用及代码示例

x33g5p2x  于2022-02-01 转载在 其他  
字(6.5k)|赞(0)|评价(0)|浏览(104)

本文整理了Java中org.apache.spark.util.Utils.deleteRecursively()方法的一些代码示例,展示了Utils.deleteRecursively()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Utils.deleteRecursively()方法的具体详情如下:
包路径:org.apache.spark.util.Utils
类名称:Utils
方法名:deleteRecursively

Utils.deleteRecursively介绍

暂无

代码示例

代码示例来源:origin: org.apache.spark/spark-core_2.11

@After
public void tearDown() {
 try {
  assertEquals(0L, taskMemoryManager.cleanUpAllAllocatedMemory());
 } finally {
  Utils.deleteRecursively(tempDir);
  tempDir = null;
 }
}

代码示例来源:origin: org.apache.spark/spark-core_2.11

@Test
public void sequenceFile() {
 File tempDir = Files.createTempDir();
 tempDir.deleteOnExit();
 String outputDir = new File(tempDir, "output").getAbsolutePath();
 List<Tuple2<Integer, String>> pairs = Arrays.asList(
  new Tuple2<>(1, "a"),
  new Tuple2<>(2, "aa"),
  new Tuple2<>(3, "aaa")
 );
 JavaPairRDD<Integer, String> rdd = sc.parallelizePairs(pairs);
 rdd.mapToPair(pair -> new Tuple2<>(new IntWritable(pair._1()), new Text(pair._2())))
  .saveAsHadoopFile(outputDir, IntWritable.class, Text.class, SequenceFileOutputFormat.class);
 // Try reading the output back as an object file
 JavaPairRDD<Integer, String> readRDD = sc.sequenceFile(outputDir, IntWritable.class, Text.class)
  .mapToPair(pair -> new Tuple2<>(pair._1().get(), pair._2().toString()));
 Assert.assertEquals(pairs, readRDD.collect());
 Utils.deleteRecursively(tempDir);
}

代码示例来源:origin: org.apache.spark/spark-core_2.10

@Test
public void sequenceFile() {
 File tempDir = Files.createTempDir();
 tempDir.deleteOnExit();
 String outputDir = new File(tempDir, "output").getAbsolutePath();
 List<Tuple2<Integer, String>> pairs = Arrays.asList(
  new Tuple2<>(1, "a"),
  new Tuple2<>(2, "aa"),
  new Tuple2<>(3, "aaa")
 );
 JavaPairRDD<Integer, String> rdd = sc.parallelizePairs(pairs);
 rdd.mapToPair(pair -> new Tuple2<>(new IntWritable(pair._1()), new Text(pair._2())))
  .saveAsHadoopFile(outputDir, IntWritable.class, Text.class, SequenceFileOutputFormat.class);
 // Try reading the output back as an object file
 JavaPairRDD<Integer, String> readRDD = sc.sequenceFile(outputDir, IntWritable.class, Text.class)
  .mapToPair(pair -> new Tuple2<>(pair._1().get(), pair._2().toString()));
 Assert.assertEquals(pairs, readRDD.collect());
 Utils.deleteRecursively(tempDir);
}

代码示例来源:origin: org.apache.spark/spark-core_2.10

@After
public void tearDown() {
 Utils.deleteRecursively(tempDir);
 final long leakedMemory = taskMemoryManager.cleanUpAllAllocatedMemory();
 if (leakedMemory != 0) {
  fail("Test leaked " + leakedMemory + " bytes of managed memory");
 }
}

代码示例来源:origin: org.apache.spark/spark-core

@After
public void tearDown() {
 Utils.deleteRecursively(tempDir);
 final long leakedMemory = taskMemoryManager.cleanUpAllAllocatedMemory();
 if (leakedMemory != 0) {
  fail("Test leaked " + leakedMemory + " bytes of managed memory");
 }
}

代码示例来源:origin: org.apache.spark/spark-core

@After
public void tearDown() {
 try {
  assertEquals(0L, taskMemoryManager.cleanUpAllAllocatedMemory());
 } finally {
  Utils.deleteRecursively(tempDir);
  tempDir = null;
 }
}

代码示例来源:origin: org.apache.spark/spark-core_2.10

@After
public void tearDown() {
 try {
  assertEquals(0L, taskMemoryManager.cleanUpAllAllocatedMemory());
 } finally {
  Utils.deleteRecursively(tempDir);
  tempDir = null;
 }
}

代码示例来源:origin: org.apache.spark/spark-core_2.11

@After
public void tearDown() {
 Utils.deleteRecursively(tempDir);
 final long leakedMemory = taskMemoryManager.cleanUpAllAllocatedMemory();
 if (leakedMemory != 0) {
  fail("Test leaked " + leakedMemory + " bytes of managed memory");
 }
}

代码示例来源:origin: org.apache.spark/spark-core

@Test
public void sequenceFile() {
 File tempDir = Files.createTempDir();
 tempDir.deleteOnExit();
 String outputDir = new File(tempDir, "output").getAbsolutePath();
 List<Tuple2<Integer, String>> pairs = Arrays.asList(
  new Tuple2<>(1, "a"),
  new Tuple2<>(2, "aa"),
  new Tuple2<>(3, "aaa")
 );
 JavaPairRDD<Integer, String> rdd = sc.parallelizePairs(pairs);
 rdd.mapToPair(pair -> new Tuple2<>(new IntWritable(pair._1()), new Text(pair._2())))
  .saveAsHadoopFile(outputDir, IntWritable.class, Text.class, SequenceFileOutputFormat.class);
 // Try reading the output back as an object file
 JavaPairRDD<Integer, String> readRDD = sc.sequenceFile(outputDir, IntWritable.class, Text.class)
  .mapToPair(pair -> new Tuple2<>(pair._1().get(), pair._2().toString()));
 Assert.assertEquals(pairs, readRDD.collect());
 Utils.deleteRecursively(tempDir);
}

代码示例来源:origin: org.apache.spark/spark-core_2.10

@After
public void tearDown() {
 Utils.deleteRecursively(tempDir);
 tempDir = null;
 if (taskMemoryManager != null) {
  Assert.assertEquals(0L, taskMemoryManager.cleanUpAllAllocatedMemory());
  long leakedMemory = taskMemoryManager.getMemoryConsumptionForThisTask();
  taskMemoryManager = null;
  Assert.assertEquals(0L, leakedMemory);
 }
}

代码示例来源:origin: org.apache.spark/spark-core_2.11

@After
public void tearDown() {
 Utils.deleteRecursively(tempDir);
 tempDir = null;
 if (taskMemoryManager != null) {
  Assert.assertEquals(0L, taskMemoryManager.cleanUpAllAllocatedMemory());
  long leakedMemory = taskMemoryManager.getMemoryConsumptionForThisTask();
  taskMemoryManager = null;
  Assert.assertEquals(0L, leakedMemory);
 }
}

代码示例来源:origin: org.apache.spark/spark-core

@After
public void tearDown() {
 Utils.deleteRecursively(tempDir);
 tempDir = null;
 if (taskMemoryManager != null) {
  Assert.assertEquals(0L, taskMemoryManager.cleanUpAllAllocatedMemory());
  long leakedMemory = taskMemoryManager.getMemoryConsumptionForThisTask();
  taskMemoryManager = null;
  Assert.assertEquals(0L, leakedMemory);
 }
}

代码示例来源:origin: org.apache.spark/spark-sql_2.11

@After
public void tearDown() {
 try {
  Utils.deleteRecursively(new File(input));
 } finally {
  spark.stop();
  spark = null;
 }
}

代码示例来源:origin: org.apache.spark/spark-sql

@After
public void tearDown() {
 try {
  Utils.deleteRecursively(new File(input));
 } finally {
  spark.stop();
  spark = null;
 }
}

代码示例来源:origin: org.apache.spark/spark-mllib_2.11

@Override
public void tearDown() {
 super.tearDown();
 Utils.deleteRecursively(tempDir);
}

代码示例来源:origin: org.apache.spark/spark-mllib_2.10

@Override
public void tearDown() {
 super.tearDown();
 Utils.deleteRecursively(tempDir);
}

代码示例来源:origin: org.apache.spark/spark-mllib

@Override
public void tearDown() {
 super.tearDown();
 Utils.deleteRecursively(tempDir);
}

代码示例来源:origin: org.apache.spark/spark-mllib

@Override
public void tearDown() {
 super.tearDown();
 Utils.deleteRecursively(tempDir);
}

代码示例来源:origin: org.apache.spark/spark-mllib_2.10

@Override
public void tearDown() {
 super.tearDown();
 Utils.deleteRecursively(tempDir);
}

代码示例来源:origin: org.apache.spark/spark-mllib_2.11

@Override
public void tearDown() {
 super.tearDown();
 Utils.deleteRecursively(tempDir);
}

相关文章