本文整理了Java中org.apache.hadoop.hive.ql.metadata.Table.setTableType()
方法的一些代码示例,展示了Table.setTableType()
的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Table.setTableType()
方法的具体详情如下:
包路径:org.apache.hadoop.hive.ql.metadata.Table
类名称:Table
方法名:setTableType
暂无
代码示例来源:origin: apache/drill
private void addDbAndTabToOutputs(String[] qualifiedTabName, TableType type) throws SemanticException {
Database database = getDatabase(qualifiedTabName[0]);
outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_SHARED));
Table t = new Table(qualifiedTabName[0], qualifiedTabName[1]);
t.setTableType(type);
outputs.add(new WriteEntity(t, WriteEntity.WriteType.DDL_NO_LOCK));
}
代码示例来源:origin: apache/hive
/** Adds entities for create table/create view. */
private void addDbAndTabToOutputs(String[] qualifiedTabName, TableType type,
boolean isTemporary, Map<String, String> tblProps) throws SemanticException {
Database database = getDatabase(qualifiedTabName[0]);
outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_SHARED));
Table t = new Table(qualifiedTabName[0], qualifiedTabName[1]);
t.setParameters(tblProps);
t.setTableType(type);
t.setTemporary(isTemporary);
outputs.add(new WriteEntity(t, WriteEntity.WriteType.DDL_NO_LOCK));
}
代码示例来源:origin: apache/hive
private List<Task<?>> alterTableDropProps(AlterTableDesc alterTbl, Table tbl,
Partition part, EnvironmentContext environmentContext) throws HiveException {
if (StatsSetupConst.USER.equals(environmentContext.getProperties()
.get(StatsSetupConst.STATS_GENERATED))) {
// drop a stats parameter, which triggers recompute stats update automatically
environmentContext.getProperties().remove(StatsSetupConst.DO_NOT_UPDATE_STATS);
}
List<Task<?>> result = null;
if (part == null) {
Set<String> removedSet = alterTbl.getProps().keySet();
boolean isFromMmTable = AcidUtils.isInsertOnlyTable(tbl.getParameters()),
isRemoved = AcidUtils.isRemovedInsertOnlyTable(removedSet);
if (isFromMmTable && isRemoved) {
throw new HiveException("Cannot convert an ACID table to non-ACID");
}
// Check if external table property being removed
if (removedSet.contains("EXTERNAL") && tbl.getTableType() == TableType.EXTERNAL_TABLE) {
tbl.setTableType(TableType.MANAGED_TABLE);
}
}
Iterator<String> keyItr = alterTbl.getProps().keySet().iterator();
while (keyItr.hasNext()) {
if (part != null) {
part.getTPartition().getParameters().remove(keyItr.next());
} else {
tbl.getTTable().getParameters().remove(keyItr.next());
}
}
return result;
}
代码示例来源:origin: apache/hive
if (externalProp != null) {
if (Boolean.parseBoolean(externalProp) && tbl.getTableType() == TableType.MANAGED_TABLE) {
tbl.setTableType(TableType.EXTERNAL_TABLE);
} else if (!Boolean.parseBoolean(externalProp) && tbl.getTableType() == TableType.EXTERNAL_TABLE) {
tbl.setTableType(TableType.MANAGED_TABLE);
代码示例来源:origin: apache/hive
tbl.setTableType(TableType.MANAGED_TABLE);
tbl.setTableType(TableType.EXTERNAL_TABLE);
tbl.setTableType(TableType.EXTERNAL_TABLE);
代码示例来源:origin: apache/drill
tbl.setTableType(TableType.MANAGED_TABLE);
tbl.setTableType(TableType.EXTERNAL_TABLE);
tbl.setTableType(TableType.EXTERNAL_TABLE);
} else {
tbl.getParameters().remove("EXTERNAL");
代码示例来源:origin: apache/hive
if (isMaterialized()) {
tbl.setRewriteEnabled(isRewriteEnabled());
tbl.setTableType(TableType.MATERIALIZED_VIEW);
} else {
tbl.setTableType(TableType.VIRTUAL_VIEW);
代码示例来源:origin: apache/drill
tbl.setTableType(TableType.EXTERNAL_TABLE);
代码示例来源:origin: apache/hive
tbl.setTableType(TableType.EXTERNAL_TABLE);
代码示例来源:origin: apache/drill
if (crtView.isMaterialized()) {
tbl.setRewriteEnabled(crtView.isRewriteEnabled());
tbl.setTableType(TableType.MATERIALIZED_VIEW);
} else {
tbl.setViewExpandedText(crtView.getViewExpandedText());
tbl.setTableType(TableType.VIRTUAL_VIEW);
代码示例来源:origin: com.facebook.presto.hive/hive-apache
private void addDbAndTabToOutputs(String[] qualifiedTabName, TableType type) throws SemanticException {
Database database = getDatabase(qualifiedTabName[0]);
outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_SHARED));
Table t = new Table(qualifiedTabName[0], qualifiedTabName[1]);
t.setTableType(type);
outputs.add(new WriteEntity(t, WriteEntity.WriteType.DDL_NO_LOCK));
}
代码示例来源:origin: apache/lens
tbl.setTableType(TableType.MANAGED_TABLE);
tbl.getTTable().getSd().setCols(columns);
代码示例来源:origin: apache/lens
tbl.setTableType(TableType.MANAGED_TABLE);
tbl.setInputFormatClass(INPUT_FORMAT);
代码示例来源:origin: qubole/streamx
private Table constructAvroTable(String database, String tableName, Schema schema, Partitioner partitioner)
throws HiveMetaStoreException {
Table table = new Table(database, tableName);
table.setTableType(TableType.EXTERNAL_TABLE);
table.getParameters().put("EXTERNAL", "TRUE");
String tablePath = FileUtils.hiveDirectoryName(url, topicsDir, tableName);
table.setDataLocation(new Path(tablePath));
table.setSerializationLib(avroSerde);
try {
table.setInputFormatClass(avroInputFormat);
table.setOutputFormatClass(avroOutputFormat);
} catch (HiveException e) {
throw new HiveMetaStoreException("Cannot find input/output format:", e);
}
List<FieldSchema> columns = HiveSchemaConverter.convertSchema(schema);
table.setFields(columns);
table.setPartCols(partitioner.partitionFields());
table.getParameters().put(AVRO_SCHEMA_LITERAL, avroData.fromConnectSchema(schema).toString());
return table;
}
}
代码示例来源:origin: apache/lens
private Table createCubeHiveTable(AbstractCubeTable table) throws LensException {
try {
Table tbl = getClient().newTable(table.getName().toLowerCase());
tbl.setTableType(TableType.MANAGED_TABLE);
tbl.getTTable().getSd().setCols(table.getColumns());
tbl.getTTable().getParameters().putAll(table.getProperties());
getClient().createTable(tbl);
// do get to update cache
getTable(tbl.getTableName());
return tbl;
} catch (Exception e) {
throw new LensException("Exception creating table", e);
}
}
代码示例来源:origin: org.apache.lens/lens-cube
private Table createCubeHiveTable(AbstractCubeTable table) throws LensException {
try {
Table tbl = getClient().newTable(table.getName().toLowerCase());
tbl.setTableType(TableType.MANAGED_TABLE);
tbl.getTTable().getSd().setCols(table.getColumns());
tbl.getTTable().getParameters().putAll(table.getProperties());
getClient().createTable(tbl);
// do get to update cache
getTable(tbl.getTableName());
return tbl;
} catch (Exception e) {
throw new LensException("Exception creating table", e);
}
}
代码示例来源:origin: apache/lens
/**
* Creates the hive table.
*
* @param tableName the table name
* @throws HiveException the hive exception
*/
public static void createHiveTable(String tableName, Map<String, String> parameters) throws HiveException {
List<FieldSchema> columns = new ArrayList<FieldSchema>();
columns.add(new FieldSchema("col1", "string", ""));
List<FieldSchema> partCols = new ArrayList<FieldSchema>();
partCols.add(new FieldSchema("pcol1", "string", ""));
Map<String, String> params = new HashMap<String, String>();
params.put("test.hive.table.prop", "tvalue");
if (null != parameters && !parameters.isEmpty()) {
params.putAll(parameters);
}
Table tbl = Hive.get().newTable(tableName);
tbl.setTableType(TableType.MANAGED_TABLE);
tbl.getTTable().getSd().setCols(columns);
tbl.setPartCols(partCols);
tbl.getTTable().getParameters().putAll(params);
Hive.get().createTable(tbl);
}
代码示例来源:origin: qubole/streamx
private Table constructParquetTable(String database, String tableName, Schema schema, Partitioner partitioner) throws HiveMetaStoreException {
Table table = new Table(database, tableName);
table.setTableType(TableType.EXTERNAL_TABLE);
table.getParameters().put("EXTERNAL", "TRUE");
String tablePath = FileUtils.hiveDirectoryName(url, topicsDir, tableName);
table.setDataLocation(new Path(tablePath));
table.setSerializationLib(getHiveParquetSerde());
try {
table.setInputFormatClass(getHiveParquetInputFormat());
table.setOutputFormatClass(getHiveParquetOutputFormat());
} catch (HiveException e) {
throw new HiveMetaStoreException("Cannot find input/output format:", e);
}
// convert copycat schema schema to Hive columns
List<FieldSchema> columns = HiveSchemaConverter.convertSchema(schema);
table.setFields(columns);
table.setPartCols(partitioner.partitionFields());
return table;
}
代码示例来源:origin: apache/lens
tbl.setTableType(TableType.MANAGED_TABLE);
tbl.getTTable().getSd().setCols(columns);
tbl.getTTable().getParameters().putAll(tableParams);
代码示例来源:origin: org.apache.hadoop.hive/hive-exec
tbl.setTableType(TableType.VIRTUAL_VIEW);
tbl.setSerializationLib(null);
tbl.clearSerDeInfo();
内容来源于网络,如有侵权,请联系作者删除!