org.apache.hadoop.hive.ql.exec.Utilities.getTaskIdFromFilename()方法的使用及代码示例

x33g5p2x  于2022-02-01 转载在 其他  
字(6.4k)|赞(0)|评价(0)|浏览(71)

本文整理了Java中org.apache.hadoop.hive.ql.exec.Utilities.getTaskIdFromFilename()方法的一些代码示例,展示了Utilities.getTaskIdFromFilename()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Utilities.getTaskIdFromFilename()方法的具体详情如下:
包路径:org.apache.hadoop.hive.ql.exec.Utilities
类名称:Utilities
方法名:getTaskIdFromFilename

Utilities.getTaskIdFromFilename介绍

[英]Get the task id from the filename. It is assumed that the filename is derived from the output of getTaskId
[中]从文件名中获取任务id。假定文件名是从GetTaskKid的输出中派生出来的

代码示例

代码示例来源:origin: apache/hive

public static String replaceTaskIdFromFilename(String filename, String fileId) {
 String taskId = getTaskIdFromFilename(filename);
 String newTaskId = replaceTaskId(taskId, fileId);
 String ret = replaceTaskIdFromFilename(filename, taskId, newTaskId);
 return (ret);
}

代码示例来源:origin: apache/drill

public static String replaceTaskIdFromFilename(String filename, String fileId) {
 String taskId = getTaskIdFromFilename(filename);
 String newTaskId = replaceTaskId(taskId, fileId);
 String ret = replaceTaskIdFromFilename(filename, taskId, newTaskId);
 return (ret);
}

代码示例来源:origin: apache/hive

private void testTaskIds(String [] taskIds, String expectedAttemptId, String expectedTaskId) {
 Configuration conf = new JobConf(TestOperators.class);
 for (String one: taskIds) {
  conf.set("mapred.task.id", one);
  String attemptId = Utilities.getTaskId(conf);
  assertEquals(expectedAttemptId, attemptId);
  assertEquals(Utilities.getTaskIdFromFilename(attemptId), expectedTaskId);
  assertEquals(Utilities.getTaskIdFromFilename(attemptId + ".gz"), expectedTaskId);
  assertEquals(Utilities.getTaskIdFromFilename
         (Utilities.toTempPath(new Path(attemptId + ".gz")).toString()), expectedTaskId);
 }
}

代码示例来源:origin: apache/hive

int currReducer = Integer.parseInt(Utilities.getTaskIdFromFilename(Utilities
  .getTaskId(hconf)));

代码示例来源:origin: apache/drill

int currReducer = Integer.parseInt(Utilities.getTaskIdFromFilename(Utilities
  .getTaskId(hconf)));

代码示例来源:origin: apache/hive

String taskID = Utilities.getTaskIdFromFilename(fspKey);

代码示例来源:origin: apache/hive

/**
 * More stuff needs to be added here. Currently it only checks some basic
 * file naming libraries
 * The old test was deactivated as part of hive-405
 */
public void testFileSinkOperator() throws Throwable {
 try {
  testTaskIds (new String [] {
    "attempt_200707121733_0003_m_000005_0",
    "attempt_local_0001_m_000005_0",
    "task_200709221812_0001_m_000005_0",
    "task_local_0001_m_000005_0"
   }, "000005_0", "000005");
  testTaskIds (new String [] {
    "job_local_0001_map_000005",
    "job_local_0001_reduce_000005",
   }, "000005", "000005");
  testTaskIds (new String [] {"1234567"},
         "1234567", "1234567");
  assertEquals(Utilities.getTaskIdFromFilename
         ("/mnt/dev005/task_local_0001_m_000005_0"),
         "000005");
  System.out.println("FileSink Operator ok");
 } catch (Throwable e) {
  e.printStackTrace();
  throw e;
 }
}

代码示例来源:origin: apache/hive

int acidBucketNum = Integer.parseInt(Utilities.getTaskIdFromFilename(taskId));
fsp.updaters[filesIdx] = HiveFileFormatUtils.getAcidRecordUpdater(jc, conf.getTableInfo(),
  acidBucketNum, conf, fsp.outPaths[filesIdx], inspector, reporter, -1);

代码示例来源:origin: apache/drill

String taskID = Utilities.getTaskIdFromFilename(fspKey);

代码示例来源:origin: apache/drill

int acidBucketNum = Integer.parseInt(Utilities.getTaskIdFromFilename(taskId));
fsp.updaters[filesIdx] = HiveFileFormatUtils.getAcidRecordUpdater(jc, conf.getTableInfo(),
  acidBucketNum, conf, fsp.outPaths[filesIdx], inspector, reporter, -1);

代码示例来源:origin: com.facebook.presto.hive/hive-apache

public static String replaceTaskIdFromFilename(String filename, String fileId) {
 String taskId = getTaskIdFromFilename(filename);
 String newTaskId = replaceTaskId(taskId, fileId);
 String ret = replaceTaskIdFromFilename(filename, taskId, newTaskId);
 return (ret);
}

代码示例来源:origin: org.apache.hadoop.hive/hive-exec

/**
 * Replace the task id from the filename. It is assumed that the filename is derived from the
 * output of getTaskId
 *
 * @param filename
 *          filename to replace taskid "0_0" or "0_0.gz" by 33 to "33_0" or "33_0.gz"
 */
public static String replaceTaskIdFromFilename(String filename, int bucketNum) {
 String taskId = getTaskIdFromFilename(filename);
 String newTaskId = replaceTaskId(taskId, bucketNum);
 String ret = replaceTaskIdFromFilename(filename, taskId, newTaskId);
 return (ret);
}

代码示例来源:origin: org.apache.hadoop.hive/hive-exec

String taskId = getTaskIdFromFilename(one.getPath().getName());
FileStatus otherFile = taskIdToFile.get(taskId);
if (otherFile == null) {

代码示例来源:origin: org.apache.hadoop.hive/hive-exec

private void publishStats() {
  // Initializing a stats publisher
  StatsPublisher statsPublisher = Utilities.getStatsPublisher(jc);
  if (!statsPublisher.connect(jc)) {
   // just return, stats gathering should not block the main query.
   LOG.info("StatsPublishing error: cannot connect to database.");
   return;
  }

  String key;
  String taskID = Utilities.getTaskIdFromFilename(Utilities.getTaskId(hconf));
  if (partitionSpecs.isEmpty()) {
   // In case of a non-partitioned table, the key for temp storage is just
   // "tableName + taskID"
   key = conf.getStatsAggPrefix() + taskID;
  } else {
   // In case of a partition, the key for temp storage is
   // "tableName + partitionSpecs + taskID"
   key = conf.getStatsAggPrefix() + partitionSpecs + Path.SEPARATOR + taskID;
  }
  statsPublisher.publishStat(key, StatsSetupConst.ROW_COUNT, Long.toString(stat.getNumRows()));
  statsPublisher.closeConnection();
 }
}

代码示例来源:origin: org.apache.hadoop.hive/hive-exec

String taskID = Utilities.getTaskIdFromFilename(Utilities.getTaskId(hconf));
String spSpec = conf.getStaticSpec() != null ? conf.getStaticSpec() : "";

代码示例来源:origin: com.facebook.presto.hive/hive-apache

String taskID = Utilities.getTaskIdFromFilename(Utilities.getTaskId(hconf));
Map<String, String> statsToPublish = new HashMap<String, String>();

代码示例来源:origin: com.facebook.presto.hive/hive-apache

int currReducer = Integer.valueOf(Utilities.getTaskIdFromFilename(Utilities
  .getTaskId(hconf)));

代码示例来源:origin: com.facebook.presto.hive/hive-apache

String taskID = Utilities.getTaskIdFromFilename(Utilities.getTaskId(jc));
key = Utilities.join(key, taskID);

代码示例来源:origin: com.facebook.presto.hive/hive-apache

String taskID = Utilities.getTaskIdFromFilename(Utilities.getTaskId(hconf));
String spSpec = conf.getStaticSpec();
  taskID = Utilities.getTaskIdFromFilename(fspKey);

代码示例来源:origin: com.facebook.presto.hive/hive-apache

int acidBucketNum = Integer.valueOf(Utilities.getTaskIdFromFilename(taskId));
fsp.updaters[filesIdx] = HiveFileFormatUtils.getAcidRecordUpdater(jc, conf.getTableInfo(),
  acidBucketNum, conf, fsp.outPaths[filesIdx], inspector, reporter, -1);

相关文章

微信公众号

最新文章

更多

Utilities类方法