org.apache.hadoop.hive.ql.exec.Utilities.copyTableJobPropertiesToConf()方法的使用及代码示例

x33g5p2x  于2022-02-01 转载在 其他  
字(8.1k)|赞(0)|评价(0)|浏览(202)

本文整理了Java中org.apache.hadoop.hive.ql.exec.Utilities.copyTableJobPropertiesToConf()方法的一些代码示例,展示了Utilities.copyTableJobPropertiesToConf()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Utilities.copyTableJobPropertiesToConf()方法的具体详情如下:
包路径:org.apache.hadoop.hive.ql.exec.Utilities
类名称:Utilities
方法名:copyTableJobPropertiesToConf

Utilities.copyTableJobPropertiesToConf介绍

[英]Copies the storage handler properties configured for a table descriptor to a runtime job configuration.
[中]将为表描述符配置的存储处理程序属性复制到运行时作业配置。

代码示例

代码示例来源:origin: apache/drill

public JobConf pushProjectionsAndFilters(JobConf jobConf, Path path)
   throws IOException {
  updateMrWork(jobConf);  // TODO: refactor this in HIVE-6366
  final JobConf cloneJobConf = new JobConf(jobConf);
  final PartitionDesc part = pathToPartitionInfo.get(path);

  if ((part != null) && (part.getTableDesc() != null)) {
   Utilities.copyTableJobPropertiesToConf(part.getTableDesc(), cloneJobConf);
  }
  pushProjectionsAndFilters(cloneJobConf, path.toString(), path.toUri().getPath());
  return cloneJobConf;
 }
}

代码示例来源:origin: apache/hive

public JobConf pushProjectionsAndFilters(JobConf jobConf, Path path)
   throws IOException {
  updateMrWork(jobConf);  // TODO: refactor this in HIVE-6366
  final JobConf cloneJobConf = new JobConf(jobConf);
  final PartitionDesc part = HiveFileFormatUtils.getFromPathRecursively(
    pathToPartitionInfo, path, null, false, true);

  try {
   if ((part != null) && (part.getTableDesc() != null)) {
    Utilities.copyTableJobPropertiesToConf(part.getTableDesc(), cloneJobConf);
   }
  } catch (Exception e) {
   throw new IOException(e);
  }

  pushProjectionsAndFilters(cloneJobConf, path.toString(), path.toUri().getPath());
  return cloneJobConf;
 }
}

代码示例来源:origin: apache/hive

Utilities.copyTableJobPropertiesToConf(currDesc.getTableDesc(), job);
InputFormat inputFormat = getInputFormatFromCache(formatter, job);
List<Path> dirs = new ArrayList<>(), dirsWithOriginals = new ArrayList<>();

代码示例来源:origin: apache/hive

private void createHiveOutputFormat(JobConf job) throws HiveException {
 if (hiveOutputFormat == null) {
  Utilities.copyTableJobPropertiesToConf(conf.getTableInfo(), job);
 }
 try {
  hiveOutputFormat = HiveFileFormatUtils.getHiveOutputFormat(job, getConf().getTableInfo());
 } catch (Throwable t) {
  throw (t instanceof HiveException) ? (HiveException)t : new HiveException(t);
 }
}

代码示例来源:origin: apache/drill

public InputSplit[] doGetSplits(JobConf job, int numSplits) throws IOException {
 super.init(job);
 Path[] dirs = FileInputFormat.getInputPaths(job);
 if (dirs.length == 0) {
  throw new IOException("No input paths specified in job");
 }
 JobConf newjob = new JobConf(job);
 ArrayList<InputSplit> result = new ArrayList<InputSplit>();
 // for each dir, get the InputFormat, and do getSplits.
 PartitionDesc part;
 for (Path dir : dirs) {
  part = HiveFileFormatUtils
    .getPartitionDescFromPathRecursively(pathToPartitionInfo, dir,
      IOPrepareCache.get().allocatePartitionDescMap(), true);
  // create a new InputFormat instance if this is the first time to see this
  // class
  Class inputFormatClass = part.getInputFileFormatClass();
  InputFormat inputFormat = getInputFormatFromCache(inputFormatClass, job);
  Utilities.copyTableJobPropertiesToConf(part.getTableDesc(), newjob);
  FileInputFormat.setInputPaths(newjob, dir);
  newjob.setInputFormat(inputFormat.getClass());
  InputSplit[] iss = inputFormat.getSplits(newjob, numSplits / dirs.length);
  for (InputSplit is : iss) {
   result.add(new HiveInputSplit(is, inputFormatClass.getName()));
  }
 }
 return result.toArray(new HiveInputSplit[result.size()]);
}

代码示例来源:origin: apache/drill

private void createHiveOutputFormat(Configuration hconf) throws HiveException {
 if (hiveOutputFormat == null) {
  Utilities.copyTableJobPropertiesToConf(conf.getTableInfo(), hconf);
 }
 try {
  hiveOutputFormat = HiveFileFormatUtils.getHiveOutputFormat(hconf, getConf().getTableInfo());
 } catch (Throwable t) {
  throw (t instanceof HiveException) ? (HiveException)t : new HiveException(t);
 }
}

代码示例来源:origin: apache/hive

TableDesc tableDesc = Utilities.getTableDesc(table);
PlanUtils.configureInputJobPropertiesForStorageHandler(tableDesc);
Utilities.copyTableJobPropertiesToConf(tableDesc, jobConf);
long len = estimator.estimate(jobConf, scanOp, threshold).getTotalLength();
if (LOG.isDebugEnabled()) {

代码示例来源:origin: apache/hive

Utilities.setColumnTypeList(jobConf, scanOp, true);
PlanUtils.configureInputJobPropertiesForStorageHandler(tableDesc);
Utilities.copyTableJobPropertiesToConf(tableDesc, jobConf);
total += estimator.estimate(jobConf, scanOp, -1).getTotalLength();

代码示例来源:origin: apache/drill

Utilities.setColumnTypeList(jobConf, scanOp, true);
PlanUtils.configureInputJobPropertiesForStorageHandler(tableDesc);
Utilities.copyTableJobPropertiesToConf(tableDesc, jobConf);
total += estimator.estimate(jobConf, scanOp, -1).getTotalLength();

代码示例来源:origin: apache/drill

TableDesc tableDesc = Utilities.getTableDesc(table);
PlanUtils.configureInputJobPropertiesForStorageHandler(tableDesc);
Utilities.copyTableJobPropertiesToConf(tableDesc, jobConf);
long len = estimator.estimate(jobConf, scanOp, threshold).getTotalLength();
if (LOG.isDebugEnabled()) {

代码示例来源:origin: apache/drill

protected FetchInputFormatSplit[] getNextSplits() throws Exception {
 while (getNextPath()) {
  // not using FileInputFormat.setInputPaths() here because it forces a connection to the
  // default file system - which may or may not be online during pure metadata operations
  job.set("mapred.input.dir", StringUtils.escapeString(currPath.toString()));
  // Fetch operator is not vectorized and as such turn vectorization flag off so that
  // non-vectorized record reader is created below.
  HiveConf.setBoolVar(job, HiveConf.ConfVars.HIVE_VECTORIZATION_ENABLED, false);
  Class<? extends InputFormat> formatter = currDesc.getInputFileFormatClass();
  Utilities.copyTableJobPropertiesToConf(currDesc.getTableDesc(), job);
  InputFormat inputFormat = getInputFormatFromCache(formatter, job);
  InputSplit[] splits = inputFormat.getSplits(job, 1);
  FetchInputFormatSplit[] inputSplits = new FetchInputFormatSplit[splits.length];
  for (int i = 0; i < splits.length; i++) {
   inputSplits[i] = new FetchInputFormatSplit(splits[i], inputFormat);
  }
  if (work.getSplitSample() != null) {
   inputSplits = splitSampling(work.getSplitSample(), inputSplits);
  }
  if (inputSplits.length > 0) {
   return inputSplits;
  }
 }
 return null;
}

代码示例来源:origin: apache/hive

Utilities.copyTableJobPropertiesToConf(conf.getTableInfo(), jc);

代码示例来源:origin: apache/hive

Utilities.copyTableJobPropertiesToConf(part.getTableDesc(), job);
nonNative = part.getTableDesc().isNonNative();

代码示例来源:origin: apache/hive

Utilities.copyTableJobPropertiesToConf(
 Utilities.getTableDesc(tbl),
 jobConf);

代码示例来源:origin: apache/drill

Utilities.setColumnNameList(jobConf, tableScanOp);
Utilities.setColumnTypeList(jobConf, tableScanOp);
Utilities.copyTableJobPropertiesToConf(
 Utilities.getTableDesc(tbl),
 jobConf);

代码示例来源:origin: apache/drill

Utilities.copyTableJobPropertiesToConf(conf.getTableInfo(), jc);

代码示例来源:origin: apache/drill

Utilities.copyTableJobPropertiesToConf(part.getTableDesc(), job);
nonNative = part.getTableDesc().isNonNative();

代码示例来源:origin: com.facebook.presto.hive/hive-apache

public JobConf pushProjectionsAndFilters(JobConf jobConf, Path path)
   throws IOException {
  updateMrWork(jobConf);  // TODO: refactor this in HIVE-6366
  final JobConf cloneJobConf = new JobConf(jobConf);
  final PartitionDesc part = pathToPartitionInfo.get(path.toString());

  if ((part != null) && (part.getTableDesc() != null)) {
   Utilities.copyTableJobPropertiesToConf(part.getTableDesc(), cloneJobConf);
  }
  pushProjectionsAndFilters(cloneJobConf, path.toString(), path.toUri().getPath());
  return cloneJobConf;
 }
}

代码示例来源:origin: com.twitter/parquet-hive-0.10-binding

/**
 * {@inheritDoc}
 */
@Override
public JobConf pushProjectionsAndFilters(JobConf jobConf, Path path)
  throws IOException {
 init(jobConf);
 final JobConf cloneJobConf = new JobConf(jobConf);
 final PartitionDesc part = pathToPartitionInfo.get(path.toString());
 if ((part != null) && (part.getTableDesc() != null)) {
  Utilities.copyTableJobPropertiesToConf(part.getTableDesc(), cloneJobConf);
 }
 pushProjectionsAndFilters(cloneJobConf, path.toString(), path.toUri().toString());
 return cloneJobConf;
}

代码示例来源:origin: com.facebook.presto.hive/hive-apache

private void createHiveOutputFormat(Configuration hconf) throws HiveException {
 if (hiveOutputFormat == null) {
  Utilities.copyTableJobPropertiesToConf(conf.getTableInfo(), hconf);
 }
 try {
  hiveOutputFormat = HiveFileFormatUtils.getHiveOutputFormat(hconf, getConf().getTableInfo());
 } catch (Throwable t) {
  throw (t instanceof HiveException) ? (HiveException)t : new HiveException(t);
 }
}

相关文章

微信公众号

最新文章

更多

Utilities类方法