org.apache.hadoop.hive.ql.metadata.Table.<init>()方法的使用及代码示例

x33g5p2x  于2022-01-29 转载在 其他  
字(8.7k)|赞(0)|评价(0)|浏览(144)

本文整理了Java中org.apache.hadoop.hive.ql.metadata.Table.<init>()方法的一些代码示例,展示了Table.<init>()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Table.<init>()方法的具体详情如下:
包路径:org.apache.hadoop.hive.ql.metadata.Table
类名称:Table
方法名:<init>

Table.<init>介绍

[英]Used only for serialization.
[中]仅用于序列化。

代码示例

代码示例来源:origin: apache/incubator-gobblin

public SchemaAwareHivePartition(org.apache.hadoop.hive.metastore.api.Table table, org.apache.hadoop.hive.metastore.api.Partition partition, Schema schema)
  throws HiveException {
 super(new Table(table), partition);
 this.avroSchema = schema;
}

代码示例来源:origin: apache/hive

private static ImportTableDesc getBaseCreateTableDescFromTable(String dbName,
  org.apache.hadoop.hive.metastore.api.Table tblObj) throws Exception {
 Table table = new Table(tblObj);
 return new ImportTableDesc(dbName, table);
}

代码示例来源:origin: apache/hive

public static Table getTable(IMetaStoreClient client, String dbName, String tableName)
 throws NoSuchObjectException, TException, MetaException {
 return new Table(client.getTable(dbName, tableName));
}

代码示例来源:origin: apache/hive

public Table newTable(String tableName) throws HiveException {
 String[] names = Utilities.getDbTableName(tableName);
 return new Table(names[0], names[1]);
}

代码示例来源:origin: apache/hive

void updatePartitionLocation(String dbName, Table table, String partName,
  Partition part, Path newLocation) throws HiveException, TException {
 String msg = String.format("ALTER TABLE %s PARTITION (%s) SET LOCATION '%s'",
   getQualifiedName(table), partName, newLocation.toString());
 LOG.info(msg);
 org.apache.hadoop.hive.ql.metadata.Partition modifiedPart =
   new org.apache.hadoop.hive.ql.metadata.Partition(
     new org.apache.hadoop.hive.ql.metadata.Table(table),
     part);
 modifiedPart.setLocation(newLocation.toString());
 alterPartitionInternal(table, modifiedPart);
}

代码示例来源:origin: apache/hive

@Override
public Object getOutput() throws HiveException {
 return new Table(getTable().getTTable());
}

代码示例来源:origin: apache/incubator-gobblin

private static org.apache.hadoop.hive.ql.metadata.Partition getQlPartition(final Table table, final Partition partition) {
 try {
  return new org.apache.hadoop.hive.ql.metadata.Partition(new org.apache.hadoop.hive.ql.metadata.Table(table), partition);
 } catch (HiveException e) {
  throw new RuntimeException(e);
 }
}

代码示例来源:origin: apache/hive

void updateTableLocation(Table table, Path newLocation) throws HiveException {
 String msg = String.format("ALTER TABLE %s SET LOCATION '%s'",
   getQualifiedName(table), newLocation);
 LOG.info(msg);
 boolean isTxn = TxnUtils.isTransactionalTable(table);
 org.apache.hadoop.hive.ql.metadata.Table modifiedTable =
   new org.apache.hadoop.hive.ql.metadata.Table(table);
 modifiedTable.setDataLocation(newLocation);
 alterTableInternal(isTxn, table, modifiedTable);
}

代码示例来源:origin: apache/kylin

@Override
public long getHiveTableRows(String database, String tableName) throws Exception {
  Table table = getMetaStoreClient().getTable(database, tableName);
  return getBasicStatForTable(new org.apache.hadoop.hive.ql.metadata.Table(table), StatsSetupConst.ROW_COUNT);
}

代码示例来源:origin: apache/incubator-gobblin

@Override
 protected HiveDataset createHiveDataset(Table table, Config config)
   throws IOException {
  if (table.getTableName().equals(THROW_EXCEPTION)) {
   throw new IOException("bad table");
  }
  return new HiveDataset(super.fs, super.clientPool, new org.apache.hadoop.hive.ql.metadata.Table(table), config);
 }
}

代码示例来源:origin: apache/incubator-gobblin

public static ConvertibleHiveDataset createTestConvertibleDataset(Config config)
  throws URISyntaxException {
 Table table = getTestTable("db1", "tb1");
 FileSystem mockFs = Mockito.mock(FileSystem.class);
 when(mockFs.getUri()).thenReturn(new URI("test"));
 ConvertibleHiveDataset cd =
   new ConvertibleHiveDataset(mockFs, Mockito.mock(HiveMetastoreClientPool.class), new org.apache.hadoop.hive.ql.metadata.Table(
     table), new Properties(), config);
 return cd;
}

代码示例来源:origin: apache/hive

/** Adds entities for create table/create view. */
private void addDbAndTabToOutputs(String[] qualifiedTabName, TableType type,
  boolean isTemporary, Map<String, String> tblProps) throws SemanticException {
 Database database  = getDatabase(qualifiedTabName[0]);
 outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_SHARED));
 Table t = new Table(qualifiedTabName[0], qualifiedTabName[1]);
 t.setParameters(tblProps);
 t.setTableType(type);
 t.setTemporary(isTemporary);
 outputs.add(new WriteEntity(t, WriteEntity.WriteType.DDL_NO_LOCK));
}

代码示例来源:origin: apache/incubator-gobblin

private static Table localTestTable(String dbName, String name, boolean partitioned) throws Exception {
 File tableSdFile = Files.createTempDir();
 tableSdFile.deleteOnExit();
 return new Table(LocalHiveMetastoreTestUtils.getInstance()
   .createTestAvroTable(dbName, name, tableSdFile.getAbsolutePath(),
     partitioned ? Optional.of("part") : Optional.<String>absent()));
}

代码示例来源:origin: apache/incubator-gobblin

private HiveTargetPathHelper createTestTargetPathHelper(Properties properties) {
 HiveDataset dataset = Mockito.mock(HiveDataset.class);
 Table table = new Table(new org.apache.hadoop.hive.metastore.api.Table());
 table.setDbName("dbName");
 table.setTableName("tableName");
 Mockito.when(dataset.getTable()).thenReturn(table);
 Mockito.when(dataset.getTableRootPath()).thenReturn(Optional.of(TABLE_ROOT));
 Mockito.when(dataset.getProperties()).thenReturn(properties);
 HiveTargetPathHelper helper = new HiveTargetPathHelper(dataset);
 return helper;
}

代码示例来源:origin: apache/hive

private static Table createTestTable(String dbName, String tableName) throws HiveException {
 Table tbl = new Table(dbName, tableName);
 tbl.setInputFormatClass(SequenceFileInputFormat.class.getName());
 tbl.setOutputFormatClass(SequenceFileOutputFormat.class.getName());
 tbl.setSerializationLib(ThriftDeserializer.class.getName());
 tbl.setSerdeParam(serdeConstants.SERIALIZATION_CLASS, Complex.class.getName());
 tbl.setSerdeParam(serdeConstants.SERIALIZATION_FORMAT, TBinaryProtocol.class
   .getName());
 return tbl;
}

代码示例来源:origin: apache/incubator-gobblin

@Test
public void testDefaults() throws Exception {
 DatePartitionHiveVersionFinder versionFinder = new DatePartitionHiveVersionFinder(this.fs, ConfigFactory.empty());
 String tableName = "VfTb1";
 Table tbl = this.hiveMetastoreTestUtils.createTestAvroTable(dbName, tableName, ImmutableList.of("datepartition"));
 org.apache.hadoop.hive.metastore.api.Partition tp =
   this.hiveMetastoreTestUtils.addTestPartition(tbl, ImmutableList.of("2016-01-01-20"), (int) System.currentTimeMillis());
 Partition partition = new Partition(new org.apache.hadoop.hive.ql.metadata.Table(tbl), tp);
 assertThat(partition.getName(), is("datepartition=2016-01-01-20"));
 TimestampedHiveDatasetVersion dv = versionFinder.getDatasetVersion(partition);
 Assert.assertEquals(dv.getDateTime(), formatter.parseDateTime("2016/01/01/20"));
}

代码示例来源:origin: apache/hive

protected Table getDummyTable() throws SemanticException {
 Path dummyPath = createDummyFile();
 Table desc = new Table(DUMMY_DATABASE, DUMMY_TABLE);
 desc.getTTable().getSd().setLocation(dummyPath.toString());
 desc.getTTable().getSd().getSerdeInfo().setSerializationLib(NullStructSerDe.class.getName());
 desc.setInputFormatClass(NullRowsInputFormat.class);
 desc.setOutputFormatClass(HiveIgnoreKeyTextOutputFormat.class);
 return desc;
}

代码示例来源:origin: apache/hive

private Table newTable(boolean isPartitioned) {
  Table t = new Table("default", "table" + Integer.toString(nextInput++));
  if (isPartitioned) {
   FieldSchema fs = new FieldSchema();
   fs.setName("version");
   fs.setType("String");
   List<FieldSchema> partCols = new ArrayList<FieldSchema>(1);
   partCols.add(fs);
   t.setPartCols(partCols);
  }
  return t;
 }
}

代码示例来源:origin: apache/hive

private Operator<TableScanDesc> getTsOp(int i) {
 Table tblMetadata = new Table("db", "table");
 TableScanDesc desc = new TableScanDesc("alias"/*+ cCtx.nextOperatorId()*/, tblMetadata);
 List<ExprNodeDesc> as =
   Lists.newArrayList(new ExprNodeConstantDesc(TypeInfoFactory.intTypeInfo, Integer.valueOf(i)),
     new ExprNodeColumnDesc(TypeInfoFactory.intTypeInfo, "c1", "aa", false));
 ExprNodeGenericFuncDesc f1 = new ExprNodeGenericFuncDesc(TypeInfoFactory.intTypeInfo, udf, as);
 desc.setFilterExpr(f1);
 Operator<TableScanDesc> ts = OperatorFactory.get(cCtx, desc);
 return ts;
}

代码示例来源:origin: apache/hive

private Operator<TableScanDesc> getTsOp(int i) {
 Table tblMetadata = new Table("db", "table");
 TableScanDesc desc = new TableScanDesc("alias_" + cCtx.nextOperatorId(), tblMetadata);
 List<ExprNodeDesc> as =
   Lists.newArrayList(new ExprNodeConstantDesc(TypeInfoFactory.intTypeInfo, Integer.valueOf(i)),
     new ExprNodeColumnDesc(TypeInfoFactory.intTypeInfo, "c1", "aa", false));
 ExprNodeGenericFuncDesc f1 = new ExprNodeGenericFuncDesc(TypeInfoFactory.intTypeInfo, udf, as);
 desc.setFilterExpr(f1);
 Operator<TableScanDesc> ts = OperatorFactory.get(cCtx, desc);
 return ts;
}

相关文章

微信公众号

最新文章

更多

Table类方法