org.apache.hadoop.fs.FileUtil.unTar()方法的使用及代码示例

x33g5p2x  于2022-01-19 转载在 其他  
字(8.6k)|赞(0)|评价(0)|浏览(341)

本文整理了Java中org.apache.hadoop.fs.FileUtil.unTar()方法的一些代码示例,展示了FileUtil.unTar()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。FileUtil.unTar()方法的具体详情如下:
包路径:org.apache.hadoop.fs.FileUtil
类名称:FileUtil
方法名:unTar

FileUtil.unTar介绍

[英]Given a Tar File as input it will untar the file in a the untar directory passed as the second parameter This utility will untar ".tar" files and ".tar.gz","tgz" files.
[中]给定一个Tar文件作为输入,它将在作为第二个参数传递的untar目录中解压该文件。此实用程序将解压“.Tar”文件和“.Tar.gz”、“tgz”文件。

代码示例

代码示例来源:origin: apache/ignite

archiveNameLC.endsWith(".tgz") ||
  archiveNameLC.endsWith(".tar"))
  FileUtil.unTar(archiveFile, dstPath);
else
  throw new IOException("Cannot unpack archive [path=" + srcPath + ", jobId=" + jobId + ']');

代码示例来源:origin: KylinOLAP/Kylin

FileUtil.unTar(exportFile, folder);
String[] child = folder.list();
Preconditions.checkState(child.length == 1);

代码示例来源:origin: jetoile/hadoop-unit

public String extractOozieTarFileToTempDir(File fullOozieTarFilePath) throws IOException {
  File tempDir = File.createTempFile(OozieConfig.SHARE_LIB_LOCAL_TEMP_PREFIX, "", Paths.get(oozieTmpDir).toFile());
  tempDir.delete();
  tempDir.mkdir();
  tempDir.deleteOnExit();
  FileUtil.unTar(fullOozieTarFilePath, tempDir);
  return tempDir.getAbsolutePath();
}

代码示例来源:origin: jetoile/hadoop-unit

public String extractOozieShareLibTarFileToTempDir(File fullOozieShareLibTarFilePath) throws IOException {
  File tempDir = File.createTempFile(SHARE_LIB_LOCAL_TEMP_PREFIX, "");
  tempDir.delete();
  tempDir.mkdir();
  tempDir.deleteOnExit();
  FileUtil.unTar(fullOozieShareLibTarFilePath, tempDir);
  // Remove shared lib to try to get the CP down.
  if (oozieShareLibFrameworks != null || !oozieShareLibFrameworks.isEmpty()) {
    Arrays.stream(Framework.values()).forEach(framework -> {
      if (!oozieShareLibFrameworks.contains(framework)) {
        LOGGER.info("OOZIE: Excluding framework " + framework.getValue() + " from shared lib.");
        File removeShareLibDir = new File(tempDir.getAbsolutePath() + "/share/lib/" + framework.getValue());
        if (removeShareLibDir.isDirectory()) {
          try {
            org.apache.commons.io.FileUtils.deleteDirectory(removeShareLibDir);
          } catch (IOException e) {
            LOGGER.error("unable to delete directory {}", removeShareLibDir);
          }
        }
      }
    });
  }
  return tempDir.getAbsolutePath();
}

代码示例来源:origin: org.apache.hadoop/hadoop-hdfs-test

throw new IOException("Could not delete dfs directory '" + dfsDir + "'");
FileUtil.unTar(new File(tarFile), new File(dataDir));

代码示例来源:origin: apache/attic-mrunit

/**
 * Extract an archive to the temp directory.
 * Code borrowed from Hadoop's TrackerDistributedCacheManager
 *
 * @param cacheArchive the cache archive to extract
 * @param tmpDir root location of temp directory
 * @return the path to the extracted archive
 * @throws IOException
 */
public static Path extractArchiveToTemp(Path cacheArchive, File tmpDir) throws IOException {
 String tmpArchive = cacheArchive.getName().toLowerCase();
 File srcFile = new File(cacheArchive.toString());
 File destDir = new File(tmpDir, srcFile.getName());
 LOG.debug(String.format("Extracting %s to %s",
      srcFile.toString(), destDir.toString()));
 if (tmpArchive.endsWith(".jar")) {
  RunJar.unJar(srcFile, destDir);
 } else if (tmpArchive.endsWith(".zip")) {
  FileUtil.unZip(srcFile, destDir);
 } else if (isTarFile(tmpArchive)) {
  FileUtil.unTar(srcFile, destDir);
 } else {
  LOG.warn(String.format(
    "Cache file %s specified as archive, but not valid extension.",
    srcFile.toString()));
  return cacheArchive;
 }
 return new Path(destDir.toString());
}

代码示例来源:origin: org.apache.hadoop/hadoop-mapred

FileUtil.unZip(srcFile, destDir);
} else if (isTarFile(tmpArchive)) {
 FileUtil.unTar(srcFile, destDir);
} else {
 LOG.warn(String.format(

代码示例来源:origin: com.facebook.hadoop/hadoop-core

FileUtil.unZip(srcFile, destDir);
} else if (isTarFile(tmpArchive)) {
 FileUtil.unTar(srcFile, destDir);

代码示例来源:origin: com.github.jiayuhan-it/hadoop-yarn-common

lowerDst.endsWith(".tgz") ||
      lowerDst.endsWith(".tar")) {
 FileUtil.unTar(localrsrc, dst);
} else {
 LOG.warn("Cannot unpack " + localrsrc);
 LOG.warn("Treating [" + localrsrc + "] as an archive even though it " +
 "was specified as PATTERN");
 FileUtil.unTar(localrsrc, dst);
} else {
 LOG.warn("Cannot unpack " + localrsrc);

代码示例来源:origin: org.apache.hadoop/hadoop-yarn-common

lowerDst.endsWith(".tgz") ||
  lowerDst.endsWith(".tar")) {
 FileUtil.unTar(inputStream, dst, lowerDst.endsWith("gz"));
} else {
 LOG.warn("Cannot unpack " + source);
 LOG.warn("Treating [" + source + "] as an archive even though it " +
   "was specified as PATTERN");
 FileUtil.unTar(inputStream, dst, lowerDst.endsWith("gz"));
} else {
 LOG.warn("Cannot unpack " + source);

代码示例来源:origin: ch.cern.hadoop/hadoop-yarn-common

lowerDst.endsWith(".tgz") ||
      lowerDst.endsWith(".tar")) {
 FileUtil.unTar(localrsrc, dst);
} else {
 LOG.warn("Cannot unpack " + localrsrc);
 LOG.warn("Treating [" + localrsrc + "] as an archive even though it " +
 "was specified as PATTERN");
 FileUtil.unTar(localrsrc, dst);
} else {
 LOG.warn("Cannot unpack " + localrsrc);

代码示例来源:origin: org.apache.oozie/oozie-tools

FileUtil.unTar(srcFile, temp);
srcFile = new File(temp.toString() + "/share/lib");

代码示例来源:origin: ch.cern.hadoop/hadoop-hdfs

FileUtil.unTar(new File(tarFile), new File(dataDir));

代码示例来源:origin: ch.cern.hadoop/hadoop-common

private void doUntarAndVerify(File tarFile, File untarDir) 
                throws IOException {
 if (untarDir.exists() && !FileUtil.fullyDelete(untarDir)) {
  throw new IOException("Could not delete directory '" + untarDir + "'");
 }
 FileUtil.unTar(tarFile, untarDir);
 String parentDir = untarDir.getCanonicalPath() + Path.SEPARATOR + "name";
 File testFile = new File(parentDir + Path.SEPARATOR + "version");
 Assert.assertTrue(testFile.exists());
 Assert.assertTrue(testFile.length() == 0);
 String imageDir = parentDir + Path.SEPARATOR + "image";
 testFile = new File(imageDir + Path.SEPARATOR + "fsimage");
 Assert.assertTrue(testFile.exists());
 Assert.assertTrue(testFile.length() == 157);
 String currentDir = parentDir + Path.SEPARATOR + "current";
 testFile = new File(currentDir + Path.SEPARATOR + "fsimage");
 Assert.assertTrue(testFile.exists());
 Assert.assertTrue(testFile.length() == 4331);
 testFile = new File(currentDir + Path.SEPARATOR + "edits");
 Assert.assertTrue(testFile.exists());
 Assert.assertTrue(testFile.length() == 1033);
 testFile = new File(currentDir + Path.SEPARATOR + "fstime");
 Assert.assertTrue(testFile.exists());
 Assert.assertTrue(testFile.length() == 8);
}

代码示例来源:origin: com.github.jiayuhan-it/hadoop-common

private void doUntarAndVerify(File tarFile, File untarDir) 
                throws IOException {
 if (untarDir.exists() && !FileUtil.fullyDelete(untarDir)) {
  throw new IOException("Could not delete directory '" + untarDir + "'");
 }
 FileUtil.unTar(tarFile, untarDir);
 String parentDir = untarDir.getCanonicalPath() + Path.SEPARATOR + "name";
 File testFile = new File(parentDir + Path.SEPARATOR + "version");
 Assert.assertTrue(testFile.exists());
 Assert.assertTrue(testFile.length() == 0);
 String imageDir = parentDir + Path.SEPARATOR + "image";
 testFile = new File(imageDir + Path.SEPARATOR + "fsimage");
 Assert.assertTrue(testFile.exists());
 Assert.assertTrue(testFile.length() == 157);
 String currentDir = parentDir + Path.SEPARATOR + "current";
 testFile = new File(currentDir + Path.SEPARATOR + "fsimage");
 Assert.assertTrue(testFile.exists());
 Assert.assertTrue(testFile.length() == 4331);
 testFile = new File(currentDir + Path.SEPARATOR + "edits");
 Assert.assertTrue(testFile.exists());
 Assert.assertTrue(testFile.length() == 1033);
 testFile = new File(currentDir + Path.SEPARATOR + "fstime");
 Assert.assertTrue(testFile.exists());
 Assert.assertTrue(testFile.length() == 8);
}

代码示例来源:origin: ch.cern.hadoop/hadoop-common

FileUtil.unTar(simpleTar, tmp);
assertTrue(regularFile.exists());
try {
 FileUtil.unTar(simpleTar, regularFile);
 assertTrue("An IOException expected.", false);
} catch (IOException ioe) {

代码示例来源:origin: com.github.jiayuhan-it/hadoop-common

FileUtil.unTar(simpleTar, tmp);
assertTrue(regularFile.exists());
try {
 FileUtil.unTar(simpleTar, regularFile);
 assertTrue("An IOException expected.", false);
} catch (IOException ioe) {

代码示例来源:origin: ch.cern.hadoop/hadoop-hdfs

throw new IOException("Could not delete dfs directory '" + dfsDir + "'");
FileUtil.unTar(new File(tarFile), new File(testDir));

代码示例来源:origin: ch.cern.hadoop/hadoop-hdfs

throw new IOException("Could not delete dfs directory '" + dfsDir + "'");
FileUtil.unTar(new File(tarFile), new File(testDir));

代码示例来源:origin: ch.cern.hadoop/hadoop-hdfs

throw new IOException("Could not delete dfs directory '" + dfsDir + "'");
FileUtil.unTar(new File(tarFile), new File(testDir));
File nameDir = new File(dfsDir, "name");
GenericTestUtils.assertExists(nameDir);

相关文章