org.apache.hadoop.hive.ql.metadata.Table.getDeserializer()方法的使用及代码示例

x33g5p2x  于2022-01-29 转载在 其他  
字(7.9k)|赞(0)|评价(0)|浏览(112)

本文整理了Java中org.apache.hadoop.hive.ql.metadata.Table.getDeserializer()方法的一些代码示例,展示了Table.getDeserializer()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Table.getDeserializer()方法的具体详情如下:
包路径:org.apache.hadoop.hive.ql.metadata.Table
类名称:Table
方法名:getDeserializer

Table.getDeserializer介绍

暂无

代码示例

代码示例来源:origin: apache/hive

public StructField getField(String fld) {
 try {
  StructObjectInspector structObjectInspector = (StructObjectInspector) getDeserializer()
    .getObjectInspector();
  return structObjectInspector.getStructFieldRef(fld);
 } catch (Exception e) {
  throw new RuntimeException(e);
 }
}

代码示例来源:origin: apache/drill

public StructField getField(String fld) {
 try {
  StructObjectInspector structObjectInspector = (StructObjectInspector) getDeserializer()
    .getObjectInspector();
  return structObjectInspector.getStructFieldRef(fld);
 } catch (Exception e) {
  throw new RuntimeException(e);
 }
}

代码示例来源:origin: apache/hive

public ArrayList<StructField> getFields() {
 ArrayList<StructField> fields = new ArrayList<StructField>();
 try {
  Deserializer decoder = getDeserializer();
  // Expand out all the columns of the table
  StructObjectInspector structObjectInspector = (StructObjectInspector) decoder
    .getObjectInspector();
  List<? extends StructField> fld_lst = structObjectInspector
    .getAllStructFieldRefs();
  for (StructField field : fld_lst) {
   fields.add(field);
  }
 } catch (SerDeException e) {
  throw new RuntimeException(e);
 }
 return fields;
}

代码示例来源:origin: apache/drill

public ArrayList<StructField> getFields() {
 ArrayList<StructField> fields = new ArrayList<StructField>();
 try {
  Deserializer decoder = getDeserializer();
  // Expand out all the columns of the table
  StructObjectInspector structObjectInspector = (StructObjectInspector) decoder
    .getObjectInspector();
  List<? extends StructField> fld_lst = structObjectInspector
    .getAllStructFieldRefs();
  for (StructField field : fld_lst) {
   fields.add(field);
  }
 } catch (SerDeException e) {
  throw new RuntimeException(e);
 }
 return fields;
}

代码示例来源:origin: apache/hive

public static TableDesc getTableDesc(Table tbl) {
 Properties props = tbl.getMetadata();
 props.put(serdeConstants.SERIALIZATION_LIB, tbl.getDeserializer().getClass().getName());
 return (new TableDesc(tbl.getInputFormatClass(), tbl
   .getOutputFormatClass(), props));
}

代码示例来源:origin: apache/drill

public static TableDesc getTableDesc(Table tbl) {
 Properties props = tbl.getMetadata();
 props.put(serdeConstants.SERIALIZATION_LIB, tbl.getDeserializer().getClass().getName());
 return (new TableDesc(tbl.getInputFormatClass(), tbl
   .getOutputFormatClass(), props));
}

代码示例来源:origin: apache/hive

private List<FieldSchema> getColsInternal(boolean forMs) {
 String serializationLib = getSerializationLib();
 try {
  // Do the lightweight check for general case.
  if (hasMetastoreBasedSchema(SessionState.getSessionConf(), serializationLib)) {
   return tTable.getSd().getCols();
  } else if (forMs && !shouldStoreFieldsInMetastore(
    SessionState.getSessionConf(), serializationLib, tTable.getParameters())) {
   return Hive.getFieldsFromDeserializerForMsStorage(this, getDeserializer());
  } else {
   return HiveMetaStoreUtils.getFieldsFromDeserializer(getTableName(), getDeserializer());
  }
 } catch (Exception e) {
  LOG.error("Unable to get field from serde: " + serializationLib, e);
 }
 return new ArrayList<FieldSchema>();
}

代码示例来源:origin: apache/drill

private List<FieldSchema> getColsInternal(boolean forMs) {
 String serializationLib = getSerializationLib();
 try {
  // Do the lightweight check for general case.
  if (hasMetastoreBasedSchema(SessionState.getSessionConf(), serializationLib)) {
   return tTable.getSd().getCols();
  } else if (forMs && !shouldStoreFieldsInMetastore(
    SessionState.getSessionConf(), serializationLib, tTable.getParameters())) {
   return Hive.getFieldsFromDeserializerForMsStorage(this, getDeserializer());
  } else {
   return MetaStoreUtils.getFieldsFromDeserializer(getTableName(), getDeserializer());
  }
 } catch (Exception e) {
  LOG.error("Unable to get field from serde: " + serializationLib, e);
 }
 return new ArrayList<FieldSchema>();
}

代码示例来源:origin: apache/hive

static Object evalExprWithPart(ExprNodeDesc expr, Partition p, List<VirtualColumn> vcs)
  throws SemanticException {
 StructObjectInspector rowObjectInspector;
 Table tbl = p.getTable();
 try {
  rowObjectInspector = (StructObjectInspector) tbl
    .getDeserializer().getObjectInspector();
 } catch (SerDeException e) {
  throw new SemanticException(e);
 }
 try {
  return PartExprEvalUtils.evalExprWithPart(expr, p, vcs, rowObjectInspector);
 } catch (HiveException e) {
  throw new SemanticException(e);
 }
}

代码示例来源:origin: apache/drill

static Object evalExprWithPart(ExprNodeDesc expr, Partition p, List<VirtualColumn> vcs)
  throws SemanticException {
 StructObjectInspector rowObjectInspector;
 Table tbl = p.getTable();
 try {
  rowObjectInspector = (StructObjectInspector) tbl
    .getDeserializer().getObjectInspector();
 } catch (SerDeException e) {
  throw new SemanticException(e);
 }
 try {
  return PartExprEvalUtils.evalExprWithPart(expr, p, vcs, rowObjectInspector);
 } catch (HiveException e) {
  throw new SemanticException(e);
 }
}

代码示例来源:origin: apache/hive

private int updateColumns(Table tbl, Partition part)
    throws HiveException {
 String serializationLib = tbl.getSd().getSerdeInfo().getSerializationLib();
 if (MetastoreConf.getStringCollection(conf,
     MetastoreConf.ConfVars.SERDES_USING_METASTORE_FOR_SCHEMA).contains(serializationLib)) {
  throw new HiveException(tbl.getTableName() + " has serde " + serializationLib + " for which schema " +
      "is already handled by HMS.");
 }
 Deserializer deserializer = tbl.getDeserializer(true);
 try {
  LOG.info("Updating metastore columns for table: {}", tbl.getTableName());
  final List<FieldSchema> fields = HiveMetaStoreUtils.getFieldsFromDeserializer(
      tbl.getTableName(), deserializer);
  StorageDescriptor sd = retrieveStorageDescriptor(tbl, part);
  sd.setCols(fields);
 } catch (org.apache.hadoop.hive.serde2.SerDeException | MetaException e) {
  LOG.error("alter table update columns: {}", e);
  throw new HiveException(e, ErrorMsg.GENERIC_ERROR);
 }
 return 0;
}

代码示例来源:origin: apache/hive

rowObjectInspector = (StructObjectInspector) viewTable.getDeserializer()
   .getObjectInspector();
} catch (SerDeException e) {

代码示例来源:origin: apache/hive

private static class ThreadLocalHive extends ThreadLocal<Hive> {
 @Override
 protected Hive initialValue() {
  return null;
 }
 @Override
 public synchronized void set(Hive hiveObj) {
  Hive currentHive = this.get();
  if (currentHive != hiveObj) {
   // Remove/close current thread-local Hive object before overwriting with new Hive object.
   remove();
   super.set(hiveObj);
  }
 }
 @Override
 public synchronized void remove() {
  Hive currentHive = this.get();
  if (currentHive != null) {
   // Close the metastore connections before removing it from thread local hiveDB.
   currentHive.close(false);
   super.remove();
  }
 }
}

代码示例来源:origin: apache/hive

Deserializer deserializer = tbl.getDeserializer();
HiveStoragePredicateHandler.DecomposedPredicate decomposed =
 predicateHandler.decomposePredicate(

代码示例来源:origin: apache/drill

rowObjectInspector = (StructObjectInspector) viewTable.getDeserializer()
   .getObjectInspector();
} catch (SerDeException e) {

代码示例来源:origin: apache/drill

Utilities.getTableDesc(tbl),
 jobConf);
Deserializer deserializer = tbl.getDeserializer();
HiveStoragePredicateHandler.DecomposedPredicate decomposed =
 predicateHandler.decomposePredicate(

代码示例来源:origin: apache/drill

try {
 StructObjectInspector rowObjectInspector = (StructObjectInspector) indexTableHandle
   .getDeserializer().getObjectInspector();
 StructField field = rowObjectInspector.getStructFieldRef(rewriteQueryCtx.getIndexKey());
 sigRS.add(new ColumnInfo(field.getFieldName(), TypeInfoUtils.getTypeInfoFromObjectInspector(

代码示例来源:origin: apache/hive

List<ColumnStatisticsObj> colStats = null;
Deserializer deserializer = tbl.getDeserializer(true);
if (deserializer instanceof AbstractSerDe) {
 String errorMsgs = ((AbstractSerDe) deserializer).getConfigurationErrors();

代码示例来源:origin: apache/drill

tbl.getDeserializer()));

代码示例来源:origin: apache/hive

assertEquals("SerializationLib is not set correctly", tbl
   .getSerializationLib(), ft.getSerializationLib());
 assertEquals("Serde is not set correctly", tbl.getDeserializer()
   .getClass().getName(), ft.getDeserializer().getClass().getName());
} catch (HiveException e) {
 System.err.println(StringUtils.stringifyException(e));

相关文章

微信公众号

最新文章

更多

Table类方法