org.apache.hadoop.hive.ql.metadata.Partition.getInputFormatClass()方法的使用及代码示例

x33g5p2x  于2022-01-26 转载在 其他  
字(5.5k)|赞(0)|评价(0)|浏览(152)

本文整理了Java中org.apache.hadoop.hive.ql.metadata.Partition.getInputFormatClass()方法的一些代码示例,展示了Partition.getInputFormatClass()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Partition.getInputFormatClass()方法的具体详情如下:
包路径:org.apache.hadoop.hive.ql.metadata.Partition
类名称:Partition
方法名:getInputFormatClass

Partition.getInputFormatClass介绍

暂无

代码示例

代码示例来源:origin: apache/hive

@Override
public Class<? extends InputFormat> getInputFormatClass() throws HiveException {
 return partition.getInputFormatClass();
}

代码示例来源:origin: apache/hive

@Override
 public Long call() throws Exception {
  long len = getPathLength(jobConf, path, partition.getInputFormatClass(), threshold);
  LOG.trace(path + ", length=" + len);
  return total.addAndGet(len);
 }
}));

代码示例来源:origin: apache/drill

@Override
 public Long call() throws Exception {
  long len = getPathLength(jobConf, path, partition.getInputFormatClass(), threshold);
  LOG.trace(path + ", length=" + len);
  return total.addAndGet(len);
 }
}));

代码示例来源:origin: apache/drill

private void PartitionDescConstructorHelper(final Partition part,final TableDesc tblDesc, boolean setInputFileFormat)
 throws HiveException {
 this.tableDesc = tblDesc;
 setPartSpec(part.getSpec());
 if (setInputFileFormat) {
  setInputFileFormatClass(part.getInputFormatClass());
 } else {
  setOutputFileFormatClass(part.getInputFormatClass());
 }
 setOutputFileFormatClass(part.getOutputFormatClass());
}

代码示例来源:origin: apache/hive

private void PartitionDescConstructorHelper(final Partition part,final TableDesc tblDesc, boolean setInputFileFormat)
 throws HiveException {
 PlanUtils.configureInputJobPropertiesForStorageHandler(tblDesc);
 this.tableDesc = tblDesc;
 setPartSpec(part.getSpec());
 if (setInputFileFormat) {
  setInputFileFormatClass(part.getInputFormatClass());
 } else {
  setOutputFileFormatClass(part.getInputFormatClass());
 }
 setOutputFileFormatClass(part.getOutputFormatClass());
}

代码示例来源:origin: apache/hive

tblLoc = par.getDataLocation().toString();
inputFormattCls = par.getInputFormatClass() == null ? null : par.getInputFormatClass().getName();
outputFormattCls = par.getOutputFormatClass() == null ? null : par.getOutputFormatClass().getName();

代码示例来源:origin: apache/drill

tblLoc = par.getDataLocation().toString();
inputFormattCls = par.getInputFormatClass().getName();
outputFormattCls = par.getOutputFormatClass().getName();

代码示例来源:origin: apache/hive

private void ensureFileFormatsMatch(TableSpec ts, List<FileStatus> fileStatuses,
  final URI fromURI)
  throws SemanticException {
 final Class<? extends InputFormat> destInputFormat;
 try {
  if (ts.getPartSpec() == null || ts.getPartSpec().isEmpty()) {
   destInputFormat = ts.tableHandle.getInputFormatClass();
  } else {
   destInputFormat = ts.partHandle.getInputFormatClass();
  }
 } catch (HiveException e) {
  throw new SemanticException(e);
 }
 try {
  FileSystem fs = FileSystem.get(fromURI, conf);
  boolean validFormat = HiveFileFormatUtils.checkInputFormat(fs, conf, destInputFormat,
    fileStatuses);
  if (!validFormat) {
   throw new SemanticException(ErrorMsg.INVALID_FILE_FORMAT_IN_LOAD.getMsg());
  }
 } catch (Exception e) {
  throw new SemanticException("Unable to load data to destination table." +
    " Error: " + e.getMessage());
 }
}

代码示例来源:origin: apache/drill

private void ensureFileFormatsMatch(TableSpec ts, List<FileStatus> fileStatuses,
   final URI fromURI)
   throws SemanticException {
  final Class<? extends InputFormat> destInputFormat;
  try {
   if (ts.getPartSpec() == null || ts.getPartSpec().isEmpty()) {
    destInputFormat = ts.tableHandle.getInputFormatClass();
   } else {
    destInputFormat = ts.partHandle.getInputFormatClass();
   }
  } catch (HiveException e) {
   throw new SemanticException(e);
  }

  try {
   FileSystem fs = FileSystem.get(fromURI, conf);
   boolean validFormat = HiveFileFormatUtils.checkInputFormat(fs, conf, destInputFormat,
     fileStatuses);
   if (!validFormat) {
    throw new SemanticException(ErrorMsg.INVALID_FILE_FORMAT_IN_LOAD.getMsg());
   }
  } catch (Exception e) {
   throw new SemanticException("Unable to load data to destination table." +
     " Error: " + e.getMessage());
  }
 }
}

代码示例来源:origin: apache/hive

total.addAndGet(getPathLength(jobConf, path, partition.getInputFormatClass(), threshold));

代码示例来源:origin: apache/drill

if (!file.isDir()) {
 InputFormat<?, ?> inputFormat = ReflectionUtil.newInstance(
   partn.getInputFormatClass(), jc);
 InputSplit dummySplit = new FileSplit(file.getPath(), 0, 0,
   new String[] { partn.getLocation() });

代码示例来源:origin: apache/hive

} else {
 flag = HiveFileFormatUtils.checkInputFormat(
   srcFs, conf, oldPart.getInputFormatClass(), files);

代码示例来源:origin: apache/drill

total.addAndGet(getPathLength(jobConf, path, partition.getInputFormatClass(), threshold));

代码示例来源:origin: apache/hive

tblLoc = par.getDataLocation().toString();
inputFormattCls = par.getInputFormatClass() == null ? null : par.getInputFormatClass().getName();
outputFormattCls = par.getOutputFormatClass() == null ? null : par.getOutputFormatClass().getName();

代码示例来源:origin: apache/drill

tblLoc = par.getDataLocation().toString();
inputFormattCls = par.getInputFormatClass().getName();
outputFormattCls = par.getOutputFormatClass().getName();

代码示例来源:origin: apache/drill

inputFormatClass = part.getInputFormatClass();
isArchived = ArchiveUtils.isArchived(part);

代码示例来源:origin: apache/hive

inputFormatClass = part.getInputFormatClass();
isArchived = ArchiveUtils.isArchived(part);

代码示例来源:origin: apache/drill

} else {
 flag = HiveFileFormatUtils.checkInputFormat(
   srcFs, conf, oldPart.getInputFormatClass(), files);

代码示例来源:origin: apache/hive

inputFormatClass = part.getInputFormatClass();
isArchived = ArchiveUtils.isArchived(part);
lbCtx = constructListBucketingCtx(part.getSkewedColNames(), part.getSkewedColValues(),

代码示例来源:origin: apache/drill

inputFormatClass = part.getInputFormatClass();
isArchived = ArchiveUtils.isArchived(part);
lbCtx = constructListBucketingCtx(part.getSkewedColNames(), part.getSkewedColValues(),

相关文章

微信公众号

最新文章

更多