本文整理了Java中org.apache.hadoop.hive.ql.metadata.Partition.getOutputFormatClass()
方法的一些代码示例,展示了Partition.getOutputFormatClass()
的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Partition.getOutputFormatClass()
方法的具体详情如下:
包路径:org.apache.hadoop.hive.ql.metadata.Partition
类名称:Partition
方法名:getOutputFormatClass
暂无
代码示例来源:origin: apache/hive
@Override
public Class<? extends OutputFormat> getOutputFormatClass() throws HiveException {
return partition.getOutputFormatClass();
}
代码示例来源:origin: apache/hive
outputFormattCls = par.getOutputFormatClass() == null ? null : par.getOutputFormatClass().getName();
代码示例来源:origin: apache/drill
outputFormattCls = par.getOutputFormatClass().getName();
代码示例来源:origin: apache/drill
private void PartitionDescConstructorHelper(final Partition part,final TableDesc tblDesc, boolean setInputFileFormat)
throws HiveException {
this.tableDesc = tblDesc;
setPartSpec(part.getSpec());
if (setInputFileFormat) {
setInputFileFormatClass(part.getInputFormatClass());
} else {
setOutputFileFormatClass(part.getInputFormatClass());
}
setOutputFileFormatClass(part.getOutputFormatClass());
}
代码示例来源:origin: apache/hive
private void PartitionDescConstructorHelper(final Partition part,final TableDesc tblDesc, boolean setInputFileFormat)
throws HiveException {
PlanUtils.configureInputJobPropertiesForStorageHandler(tblDesc);
this.tableDesc = tblDesc;
setPartSpec(part.getSpec());
if (setInputFileFormat) {
setInputFileFormatClass(part.getInputFormatClass());
} else {
setOutputFileFormatClass(part.getInputFormatClass());
}
setOutputFileFormatClass(part.getOutputFormatClass());
}
代码示例来源:origin: apache/hive
outputFormattCls = par.getOutputFormatClass() == null ? null : par.getOutputFormatClass().getName();
代码示例来源:origin: apache/drill
outputFormattCls = par.getOutputFormatClass().getName();
代码示例来源:origin: org.apache.hadoop.hive/hive-exec
public PartitionDesc(final org.apache.hadoop.hive.ql.metadata.Partition part)
throws HiveException {
tableDesc = Utilities.getTableDesc(part.getTable());
partSpec = part.getSpec();
deserializerClass = part.getDeserializer().getClass();
inputFileFormatClass = part.getInputFormatClass();
outputFileFormatClass = part.getOutputFormatClass();
properties = part.getSchema();
serdeClassName = properties
.getProperty(org.apache.hadoop.hive.serde.Constants.SERIALIZATION_LIB);
;
}
代码示例来源:origin: com.facebook.presto.hive/hive-apache
outputFormattCls = par.getOutputFormatClass().getName();
代码示例来源:origin: org.apache.lens/lens-cube
latestPart.setLocation(partition.getLocation());
latestPart.setInputFormatClass(partition.getInputFormatClass());
latestPart.setOutputFormatClass(partition.getOutputFormatClass().asSubclass(HiveOutputFormat.class));
latestPart.getTPartition().getSd().getSerdeInfo()
.setSerializationLib(partition.getTPartition().getSd().getSerdeInfo().getSerializationLib());
代码示例来源:origin: apache/lens
latestPart.setLocation(partition.getLocation());
latestPart.setInputFormatClass(partition.getInputFormatClass());
latestPart.setOutputFormatClass(partition.getOutputFormatClass().asSubclass(HiveOutputFormat.class));
latestPart.getTPartition().getSd().getSerdeInfo()
.setSerializationLib(partition.getTPartition().getSd().getSerdeInfo().getSerializationLib());
代码示例来源:origin: com.facebook.presto.hive/hive-apache
public PartitionDesc(final Partition part,final TableDesc tblDesc) throws HiveException {
this.tableDesc = tblDesc;
setProperties(part.getSchemaFromTableSchema(tblDesc.getProperties())); // each partition maintains a large properties
partSpec = part.getSpec();
setOutputFileFormatClass(part.getInputFormatClass());
setOutputFileFormatClass(part.getOutputFormatClass());
}
代码示例来源:origin: org.apache.hadoop.hive/hive-exec
outputFormattCls = par.getOutputFormatClass().getName();
代码示例来源:origin: com.facebook.presto.hive/hive-apache
public PartitionDesc(final Partition part) throws HiveException {
this.tableDesc = Utilities.getTableDesc(part.getTable());
setProperties(part.getMetadataFromPartitionSchema());
partSpec = part.getSpec();
setInputFileFormatClass(part.getInputFormatClass());
setOutputFileFormatClass(part.getOutputFormatClass());
}
代码示例来源:origin: org.apache.hadoop.hive/hive-exec
getOutputFormatClass();
代码示例来源:origin: com.facebook.presto.hive/hive-apache
outputFormattCls = par.getOutputFormatClass().getName();
代码示例来源:origin: apache/lens
xp.setLocation(p.getLocation());
xp.setInputFormat(p.getInputFormatClass().getCanonicalName());
xp.setOutputFormat(p.getOutputFormatClass().getCanonicalName());
xp.getPartitionParameters().getProperty().addAll(xPropertiesFromMap(p.getParameters()));
String upParam = p.getParameters().get(MetastoreConstants.PARTITION_UPDATE_PERIOD);
代码示例来源:origin: org.apache.lens/lens-cube
xp.setLocation(p.getLocation());
xp.setInputFormat(p.getInputFormatClass().getCanonicalName());
xp.setOutputFormat(p.getOutputFormatClass().getCanonicalName());
xp.getPartitionParameters().getProperty().addAll(xPropertiesFromMap(p.getParameters()));
String upParam = p.getParameters().get(MetastoreConstants.PARTITION_UPDATE_PERIOD);
代码示例来源:origin: org.apache.lens/lens-cube
latest.latestParts.get(latestPartCol).getPartParams(latest.part.getParameters()));
latestPart.getPartition(0).setInputFormat(latest.part.getInputFormatClass().getCanonicalName());
latestPart.getPartition(0).setOutputFormat(latest.part.getOutputFormatClass().getCanonicalName());
latestPart.getPartition(0).setNumBuckets(latest.part.getBucketCount());
latestPart.getPartition(0).setCols(latest.part.getCols());
代码示例来源:origin: apache/lens
latest.latestParts.get(latestPartCol).getPartParams(latest.part.getParameters()));
latestPart.getPartition(0).setInputFormat(latest.part.getInputFormatClass().getCanonicalName());
latestPart.getPartition(0).setOutputFormat(latest.part.getOutputFormatClass().getCanonicalName());
latestPart.getPartition(0).setNumBuckets(latest.part.getBucketCount());
latestPart.getPartition(0).setCols(latest.part.getCols());
内容来源于网络,如有侵权,请联系作者删除!