org.apache.hadoop.hive.ql.exec.Utilities.getColumnNames()方法的使用及代码示例

x33g5p2x  于2022-02-01 转载在 其他  
字(11.2k)|赞(0)|评价(0)|浏览(104)

本文整理了Java中org.apache.hadoop.hive.ql.exec.Utilities.getColumnNames()方法的一些代码示例,展示了Utilities.getColumnNames()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Utilities.getColumnNames()方法的具体详情如下:
包路径:org.apache.hadoop.hive.ql.exec.Utilities
类名称:Utilities
方法名:getColumnNames

Utilities.getColumnNames介绍

暂无

代码示例

代码示例来源:origin: apache/hive

private void initFromProperties(final Properties properties) throws SerDeException {
 final List<String> columnNames = new ArrayList<>(Utilities.getColumnNames(properties));
 if (!columnNames.contains(DruidConstants.DEFAULT_TIMESTAMP_COLUMN)) {
  throw new SerDeException("Timestamp column (' "
    + DruidConstants.DEFAULT_TIMESTAMP_COLUMN
    + "') not specified in create table; list of columns is : "
    + properties.getProperty(serdeConstants.LIST_COLUMNS));
 }
 final List<PrimitiveTypeInfo>
   columnTypes =
   Utilities.getColumnTypes(properties)
     .stream()
     .map(TypeInfoFactory::getPrimitiveTypeInfo)
     .collect(Collectors.toList())
     .stream()
     .map(e -> e instanceof TimestampLocalTZTypeInfo ? tsTZTypeInfo : e)
     .collect(Collectors.toList());
 final List<ObjectInspector>
   inspectors =
   columnTypes.stream()
     .map(PrimitiveObjectInspectorFactory::getPrimitiveJavaObjectInspector)
     .collect(Collectors.toList());
 columns = columnNames.toArray(new String[0]);
 types = columnTypes.toArray(new PrimitiveTypeInfo[0]);
 inspector = ObjectInspectorFactory.getStandardStructObjectInspector(columnNames, inspectors);
}

代码示例来源:origin: apache/hive

public static int getDPColOffset(FileSinkDesc conf) {
 if (conf.getWriteType() == AcidUtils.Operation.DELETE) {
  // For deletes, there is only ROW__ID in non-partitioning, non-bucketing columns.
  //See : UpdateDeleteSemanticAnalyzer::reparseAndSuperAnalyze() for details.
  return 1;
 } else if (conf.getWriteType() == AcidUtils.Operation.UPDATE) {
  // For updates, ROW__ID is an extra column at index 0.
  //See : UpdateDeleteSemanticAnalyzer::reparseAndSuperAnalyze() for details.
  return getColumnNames(conf.getTableInfo().getProperties()).size() + 1;
 } else {
  return getColumnNames(conf.getTableInfo().getProperties()).size();
 }
}

代码示例来源:origin: apache/drill

public static int getDPColOffset(FileSinkDesc conf) {
 if (conf.getWriteType() == AcidUtils.Operation.DELETE) {
  // For deletes, there is only ROW__ID in non-partitioning, non-bucketing columns.
  //See : UpdateDeleteSemanticAnalyzer::reparseAndSuperAnalyze() for details.
  return 1;
 } else if (conf.getWriteType() == AcidUtils.Operation.UPDATE) {
  // For updates, ROW__ID is an extra column at index 0.
  //See : UpdateDeleteSemanticAnalyzer::reparseAndSuperAnalyze() for details.
  return getColumnNames(conf.getTableInfo().getProperties()).size() + 1;
 } else {
  return getColumnNames(conf.getTableInfo().getProperties()).size();
 }
}

代码示例来源:origin: apache/hive

throw new IOException("Druid data source cannot be empty or null");
druidQuery = DruidStorageHandlerUtils.createScanAllQuery(dataSource, Utilities.getColumnNames(conf));
druidQueryType = Query.SCAN;
conf.set(Constants.DRUID_QUERY_TYPE, druidQueryType);

代码示例来源:origin: apache/drill

private boolean checkVectorizerSupportedTypes(boolean hasLlap) {
 for (Map.Entry<String, Operator<? extends OperatorDesc>> entry : aliasToWork.entrySet()) {
  final String alias = entry.getKey();
  Operator<? extends OperatorDesc> op = entry.getValue();
  PartitionDesc partitionDesc = aliasToPartnInfo.get(alias);
  if (op instanceof TableScanOperator && partitionDesc != null &&
    partitionDesc.getTableDesc() != null) {
   final TableScanOperator tsOp = (TableScanOperator) op;
   final List<String> readColumnNames = tsOp.getNeededColumns();
   final Properties props = partitionDesc.getTableDesc().getProperties();
   final List<TypeInfo> typeInfos = TypeInfoUtils.getTypeInfosFromTypeString(
     props.getProperty(serdeConstants.LIST_COLUMN_TYPES));
   final List<String> allColumnTypes = TypeInfoUtils.getTypeStringsFromTypeInfo(typeInfos);
   final List<String> allColumnNames = Utilities.getColumnNames(props);
   hasLlap = Utilities.checkVectorizerSupportedTypes(readColumnNames, allColumnNames,
     allColumnTypes);
  }
 }
 return hasLlap;
}

代码示例来源:origin: apache/hive

List<String> keyColNames = Utilities.getColumnNames(joinKeyDesc
  .getProperties());
StructObjectInspector structTblKeyInpector = ObjectInspectorFactory
List<String> valColNames = new ArrayList<String>();
if (valTblDesc != null) {
 valColNames = Utilities.getColumnNames(valTblDesc.getProperties());

代码示例来源:origin: apache/drill

List<String> keyColNames = Utilities.getColumnNames(joinKeyDesc
  .getProperties());
StructObjectInspector structTblKeyInpector = ObjectInspectorFactory
List<String> valColNames = new ArrayList<String>();
if (valTblDesc != null) {
 valColNames = Utilities.getColumnNames(valTblDesc.getProperties());

代码示例来源:origin: apache/hive

private static void serializeObject(Properties properties,
  DruidSerDe serDe,
  Object[] rowObject,
  DruidWritable druidWritable) throws SerDeException {
 // Build OI with timestamp granularity column
 final List<String> columnNames = new ArrayList<>(Utilities.getColumnNames(properties));
 columnNames.add(Constants.DRUID_TIMESTAMP_GRANULARITY_COL_NAME);
 final List<PrimitiveTypeInfo>
   columnTypes =
   Utilities.getColumnTypes(properties)
     .stream()
     .map(TypeInfoFactory::getPrimitiveTypeInfo)
     .collect(Collectors.toList());
 columnTypes.add(TypeInfoFactory.getPrimitiveTypeInfo("timestamp"));
 List<ObjectInspector>
   inspectors =
   columnTypes.stream()
     .map(PrimitiveObjectInspectorFactory::getPrimitiveWritableObjectInspector)
     .collect(Collectors.toList());
 ObjectInspector inspector = ObjectInspectorFactory.getStandardStructObjectInspector(columnNames, inspectors);
 // Serialize
 DruidWritable writable = (DruidWritable) serDe.serialize(rowObject, inspector);
 // Check result
 assertEquals(druidWritable.getValue().size(), writable.getValue().size());
 for (Entry<String, Object> e : druidWritable.getValue().entrySet()) {
  assertEquals(e.getValue(), writable.getValue().get(e.getKey()));
 }
}

代码示例来源:origin: apache/hive

public static RowContainer<List<Object>> getRowContainer(Configuration hconf,
  List<ObjectInspector> structFieldObjectInspectors,
  Byte alias,int containerSize, TableDesc[] spillTableDesc,
  JoinDesc conf,boolean noFilter, Reporter reporter) throws HiveException {
 TableDesc tblDesc = JoinUtil.getSpillTableDesc(alias,spillTableDesc,conf, noFilter);
 AbstractSerDe serde = JoinUtil.getSpillSerDe(alias, spillTableDesc, conf, noFilter);
 if (serde == null) {
  containerSize = -1;
 }
 RowContainer<List<Object>> rc = new RowContainer<List<Object>>(containerSize, hconf, reporter);
 StructObjectInspector rcOI = null;
 if (tblDesc != null) {
  // arbitrary column names used internally for serializing to spill table
  List<String> colNames = Utilities.getColumnNames(tblDesc.getProperties());
  // object inspector for serializing input tuples
  rcOI = ObjectInspectorFactory.getStandardStructObjectInspector(colNames,
    structFieldObjectInspectors);
 }
 rc.setSerDe(serde, rcOI);
 rc.setTableDesc(tblDesc);
 return rc;
}

代码示例来源:origin: apache/hive

.getColumnNames(keyTblDesc.getProperties());
List<String> joinKeyTypes = Utilities.getColumnTypes(keyTblDesc
  .getProperties());

代码示例来源:origin: apache/drill

public static RowContainer<List<Object>> getRowContainer(Configuration hconf,
  List<ObjectInspector> structFieldObjectInspectors,
  Byte alias,int containerSize, TableDesc[] spillTableDesc,
  JoinDesc conf,boolean noFilter, Reporter reporter) throws HiveException {
 TableDesc tblDesc = JoinUtil.getSpillTableDesc(alias,spillTableDesc,conf, noFilter);
 AbstractSerDe serde = JoinUtil.getSpillSerDe(alias, spillTableDesc, conf, noFilter);
 if (serde == null) {
  containerSize = -1;
 }
 RowContainer<List<Object>> rc = new RowContainer<List<Object>>(containerSize, hconf, reporter);
 StructObjectInspector rcOI = null;
 if (tblDesc != null) {
  // arbitrary column names used internally for serializing to spill table
  List<String> colNames = Utilities.getColumnNames(tblDesc.getProperties());
  // object inspector for serializing input tuples
  rcOI = ObjectInspectorFactory.getStandardStructObjectInspector(colNames,
    structFieldObjectInspectors);
 }
 rc.setSerDe(serde, rcOI);
 rc.setTableDesc(tblDesc);
 return rc;
}

代码示例来源:origin: apache/hive

.getColumnNames(keyTblDesc.getProperties());
List<String> joinKeyTypes = Utilities.getColumnTypes(keyTblDesc
  .getProperties());

代码示例来源:origin: apache/drill

.getColumnNames(keyTblDesc.getProperties());
List<String> joinKeyTypes = Utilities.getColumnTypes(keyTblDesc
  .getProperties());

代码示例来源:origin: apache/drill

.getColumnNames(keyTblDesc.getProperties());
List<String> joinKeyTypes = Utilities.getColumnTypes(keyTblDesc
  .getProperties());

代码示例来源:origin: org.apache.hadoop.hive/hive-exec

List<String> keyColNames = Utilities.getColumnNames(joinKeyDesc
  .getProperties());
StructObjectInspector structTblKeyInpector = ObjectInspectorFactory
List<String> valColNames = new ArrayList<String>();
if (valTblDesc != null) {
 valColNames = Utilities.getColumnNames(valTblDesc.getProperties());

代码示例来源:origin: com.facebook.presto.hive/hive-apache

List<String> keyColNames = Utilities.getColumnNames(joinKeyDesc
  .getProperties());
StructObjectInspector structTblKeyInpector = ObjectInspectorFactory
List<String> valColNames = new ArrayList<String>();
if (valTblDesc != null) {
 valColNames = Utilities.getColumnNames(valTblDesc.getProperties());

代码示例来源:origin: org.apache.hadoop.hive/hive-exec

.getColumnNames(keyTblDesc.getProperties());
List<String> joinKeyTypes = Utilities.getColumnTypes(keyTblDesc
  .getProperties());

代码示例来源:origin: com.facebook.presto.hive/hive-apache

public static RowContainer<List<Object>> getRowContainer(Configuration hconf,
   List<ObjectInspector> structFieldObjectInspectors,
   Byte alias,int containerSize, TableDesc[] spillTableDesc,
   JoinDesc conf,boolean noFilter, Reporter reporter) throws HiveException {

  TableDesc tblDesc = JoinUtil.getSpillTableDesc(alias,spillTableDesc,conf, noFilter);
  SerDe serde = JoinUtil.getSpillSerDe(alias, spillTableDesc, conf, noFilter);

  if (serde == null) {
   containerSize = -1;
  }

  RowContainer<List<Object>> rc = new RowContainer<List<Object>>(containerSize, hconf, reporter);
  StructObjectInspector rcOI = null;
  if (tblDesc != null) {
   // arbitrary column names used internally for serializing to spill table
   List<String> colNames = Utilities.getColumnNames(tblDesc.getProperties());
   // object inspector for serializing input tuples
   rcOI = ObjectInspectorFactory.getStandardStructObjectInspector(colNames,
     structFieldObjectInspectors);
  }

  rc.setSerDe(serde, rcOI);
  rc.setTableDesc(tblDesc);
  return rc;
 }
}

代码示例来源:origin: org.apache.hadoop.hive/hive-exec

public static RowContainer getRowContainer(Configuration hconf,
   List<ObjectInspector> structFieldObjectInspectors,
   Byte alias,int containerSize, Map<Byte, TableDesc> spillTableDesc,
   JoinDesc conf,boolean noOuterJoin) throws HiveException {

  TableDesc tblDesc = JoinUtil.getSpillTableDesc(alias,spillTableDesc,conf, noOuterJoin);
  SerDe serde = JoinUtil.getSpillSerDe(alias,spillTableDesc,conf,noOuterJoin);

  if (serde == null) {
   containerSize = 1;
  }

  RowContainer rc = new RowContainer(containerSize, hconf);
  StructObjectInspector rcOI = null;
  if (tblDesc != null) {
   // arbitrary column names used internally for serializing to spill table
   List<String> colNames = Utilities.getColumnNames(tblDesc.getProperties());
   // object inspector for serializing input tuples
   rcOI = ObjectInspectorFactory.getStandardStructObjectInspector(colNames,
     structFieldObjectInspectors);
  }

  rc.setSerDe(serde, rcOI);
  rc.setTableDesc(tblDesc);
  return rc;
 }
}

代码示例来源:origin: com.facebook.presto.hive/hive-apache

.getColumnNames(keyTblDesc.getProperties());
List<String> joinKeyTypes = Utilities.getColumnTypes(keyTblDesc
  .getProperties());

相关文章

微信公众号

最新文章

更多

Utilities类方法