org.apache.hadoop.hive.ql.metadata.Table.getStorageHandler()方法的使用及代码示例

x33g5p2x  于2022-01-29 转载在 其他  
字(9.8k)|赞(0)|评价(0)|浏览(74)

本文整理了Java中org.apache.hadoop.hive.ql.metadata.Table.getStorageHandler()方法的一些代码示例,展示了Table.getStorageHandler()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Table.getStorageHandler()方法的具体详情如下:
包路径:org.apache.hadoop.hive.ql.metadata.Table
类名称:Table
方法名:getStorageHandler

Table.getStorageHandler介绍

暂无

代码示例

代码示例来源:origin: apache/hive

private static TableType obtainTableType(Table tabMetaData) {
 if (tabMetaData.getStorageHandler() != null) {
  final String storageHandlerStr = tabMetaData.getStorageHandler().toString();
  if (storageHandlerStr.equals(Constants.DRUID_HIVE_STORAGE_HANDLER_ID)) {
   return TableType.DRUID;
  }
  if (storageHandlerStr.equals(Constants.JDBC_HIVE_STORAGE_HANDLER_ID)) {
   return TableType.JDBC;
  }
 }
 return TableType.NATIVE;
}

代码示例来源:origin: apache/drill

private TableType obtainTableType(Table tabMetaData) {
 if (tabMetaData.getStorageHandler() != null &&
     tabMetaData.getStorageHandler().toString().equals(
         Constants.DRUID_HIVE_STORAGE_HANDLER_ID)) {
  return TableType.DRUID;
 }
 return TableType.NATIVE;
}

代码示例来源:origin: apache/drill

private static TableType obtainTableType(Table tabMetaData) {
 if (tabMetaData.getStorageHandler() != null &&
     tabMetaData.getStorageHandler().toString().equals(
         Constants.DRUID_HIVE_STORAGE_HANDLER_ID)) {
  return TableType.DRUID;
 }
 return TableType.NATIVE;
}

代码示例来源:origin: apache/hive

private TableType obtainTableType(Table tabMetaData) {
 if (tabMetaData.getStorageHandler() != null) {
  final String storageHandlerStr = tabMetaData.getStorageHandler().toString();
  if (storageHandlerStr
    .equals(Constants.DRUID_HIVE_STORAGE_HANDLER_ID)) {
   return TableType.DRUID;
  }
  if (storageHandlerStr
    .equals(Constants.JDBC_HIVE_STORAGE_HANDLER_ID)) {
   return TableType.JDBC;
  }
 }
 return TableType.NATIVE;
}

代码示例来源:origin: apache/drill

public static boolean doesTableNeedLocation(Table tbl) {
  // TODO: If we are ok with breaking compatibility of existing 3rd party StorageHandlers,
  // this method could be moved to the HiveStorageHandler interface.
  boolean retval = true;
  if (tbl.getStorageHandler() != null) {
   String sh = tbl.getStorageHandler().toString();
   retval = !sh.equals("org.apache.hadoop.hive.hbase.HBaseStorageHandler")
       && !sh.equals(Constants.DRUID_HIVE_STORAGE_HANDLER_ID);
  }
  return retval;
 }
}

代码示例来源:origin: apache/hive

public static boolean doesTableNeedLocation(Table tbl) {
 // TODO: If we are ok with breaking compatibility of existing 3rd party StorageHandlers,
 // this method could be moved to the HiveStorageHandler interface.
 boolean retval = true;
 if (tbl.getStorageHandler() != null) {
  // TODO: why doesn't this check class name rather than toString?
  String sh = tbl.getStorageHandler().toString();
  retval = !sh.equals("org.apache.hadoop.hive.hbase.HBaseStorageHandler")
      && !sh.equals(Constants.DRUID_HIVE_STORAGE_HANDLER_ID)
      && !sh.equals(Constants.JDBC_HIVE_STORAGE_HANDLER_ID)
      && !sh.equals("org.apache.hadoop.hive.accumulo.AccumuloStorageHandler");
 }
 return retval;
}

代码示例来源:origin: apache/drill

final public Class<? extends InputFormat> getInputFormatClass() {
 if (inputFormatClass == null) {
  try {
   String className = tTable.getSd().getInputFormat();
   if (className == null) {
    if (getStorageHandler() == null) {
     return null;
    }
    inputFormatClass = getStorageHandler().getInputFormatClass();
   } else {
    inputFormatClass = (Class<? extends InputFormat>)
     Class.forName(className, true, Utilities.getSessionSpecifiedClassLoader());
   }
  } catch (ClassNotFoundException e) {
   throw new RuntimeException(e);
  }
 }
 return inputFormatClass;
}

代码示例来源:origin: apache/hive

final public Class<? extends InputFormat> getInputFormatClass() {
 if (inputFormatClass == null) {
  try {
   String className = tTable.getSd().getInputFormat();
   if (className == null) {
    if (getStorageHandler() == null) {
     return null;
    }
    inputFormatClass = getStorageHandler().getInputFormatClass();
   } else {
    inputFormatClass = (Class<? extends InputFormat>)
     Class.forName(className, true, Utilities.getSessionSpecifiedClassLoader());
   }
  } catch (ClassNotFoundException e) {
   throw new RuntimeException(e);
  }
 }
 return inputFormatClass;
}

代码示例来源:origin: apache/drill

final public Class<? extends OutputFormat> getOutputFormatClass() {
 if (outputFormatClass == null) {
  try {
   String className = tTable.getSd().getOutputFormat();
   Class<?> c;
   if (className == null) {
    if (getStorageHandler() == null) {
     return null;
    }
    c = getStorageHandler().getOutputFormatClass();
   } else {
    c = Class.forName(className, true, Utilities.getSessionSpecifiedClassLoader());
   }
   // Replace FileOutputFormat for backward compatibility
   outputFormatClass = HiveFileFormatUtils.getOutputFormatSubstitute(c);
  } catch (ClassNotFoundException e) {
   throw new RuntimeException(e);
  }
 }
 return outputFormatClass;
}

代码示例来源:origin: apache/hive

final public Class<? extends OutputFormat> getOutputFormatClass() {
 if (outputFormatClass == null) {
  try {
   String className = tTable.getSd().getOutputFormat();
   Class<?> c;
   if (className == null) {
    if (getStorageHandler() == null) {
     return null;
    }
    c = getStorageHandler().getOutputFormatClass();
   } else {
    c = Class.forName(className, true, Utilities.getSessionSpecifiedClassLoader());
   }
   // Replace FileOutputFormat for backward compatibility
   outputFormatClass = HiveFileFormatUtils.getOutputFormatSubstitute(c);
  } catch (ClassNotFoundException e) {
   throw new RuntimeException(e);
  }
 }
 return outputFormatClass;
}

代码示例来源:origin: apache/hive

private void processAlias(MapWork work, Set<TableScanOperator> tableScans) {
 Set<String> aliases = new HashSet<>();
 for (TableScanOperator tso : tableScans) {
  // use LinkedHashMap<String, Operator<? extends OperatorDesc>>
  // getAliasToWork() should not apply this for non-native table
  if (tso.getConf().getTableMetadata().getStorageHandler() != null) {
   continue;
  }
  String alias = getAliasForTableScanOperator(work, tso);
  aliases.add(alias);
  tso.getConf().setIsMetadataOnly(true);
 }
 // group path alias according to work
 Map<Path, ArrayList<String>> candidates = new HashMap<>();
 for (Path path : work.getPaths()) {
  ArrayList<String> aliasesAffected = work.getPathToAliases().get(path);
  if (CollectionUtils.isNotEmpty(aliasesAffected)) {
   candidates.put(path, aliasesAffected);
  }
 }
 for (Entry<Path, ArrayList<String>> entry : candidates.entrySet()) {
  processAlias(work, entry.getKey(), entry.getValue(), aliases);
 }
}

代码示例来源:origin: apache/drill

private void processAlias(MapWork work, HashSet<TableScanOperator> tableScans) {
 ArrayList<String> aliases = new ArrayList<String>();
 for (TableScanOperator tso : tableScans) {
  // use LinkedHashMap<String, Operator<? extends OperatorDesc>>
  // getAliasToWork()
  // should not apply this for non-native table
  if (tso.getConf().getTableMetadata().getStorageHandler() != null) {
   continue;
  }
  String alias = getAliasForTableScanOperator(work, tso);
  aliases.add(alias);
  tso.getConf().setIsMetadataOnly(true);
 }
 // group path alias according to work
 LinkedHashMap<Path, ArrayList<String>> candidates = new LinkedHashMap<>();
 for (Path path : work.getPaths()) {
  ArrayList<String> aliasesAffected = work.getPathToAliases().get(path);
  if (aliasesAffected != null && aliasesAffected.size() > 0) {
   candidates.put(path, aliasesAffected);
  }
 }
 for (Entry<Path, ArrayList<String>> entry : candidates.entrySet()) {
  processAlias(work, entry.getKey(), entry.getValue(), aliases);
 }
}

代码示例来源:origin: apache/hive

Utilities.setColumnNameList(jobConf, scanOp, true);
Utilities.setColumnTypeList(jobConf, scanOp, true);
HiveStorageHandler handler = table.getStorageHandler();
if (handler instanceof InputEstimator) {
 InputEstimator estimator = (InputEstimator) handler;

代码示例来源:origin: apache/hive

private int preInsertWork(Hive db, PreInsertTableDesc preInsertTableDesc) throws HiveException {
 try{
  HiveMetaHook hook = preInsertTableDesc.getTable().getStorageHandler().getMetaHook();
  if (hook == null || !(hook instanceof DefaultHiveMetaHook)) {
   return 0;
  }
  DefaultHiveMetaHook hiveMetaHook = (DefaultHiveMetaHook) hook;
  hiveMetaHook.preInsertTable(preInsertTableDesc.getTable().getTTable(), preInsertTableDesc.isOverwrite());
 } catch (MetaException e) {
  throw new HiveException(e);
 }
 return 0;
}

代码示例来源:origin: apache/hive

&& jobConf.get(ConfVars.HIVE_EXECUTION_ENGINE.varname).equals("mr")
 && (scanDesc.getTableMetadata() == null
   || !(scanDesc.getTableMetadata().getStorageHandler() instanceof HiveStoragePredicateHandler))) {
return;

代码示例来源:origin: apache/drill

private int preInsertWork(Hive db, PreInsertTableDesc preInsertTableDesc) throws HiveException {
 try{
  HiveMetaHook hook = preInsertTableDesc.getTable().getStorageHandler().getMetaHook();
  if (hook == null || !(hook instanceof DefaultHiveMetaHook)) {
   return 0;
  }
  DefaultHiveMetaHook hiveMetaHook = (DefaultHiveMetaHook) hook;
  hiveMetaHook.preInsertTable(preInsertTableDesc.getTable().getTTable(), preInsertTableDesc.isOverwrite());
 } catch (MetaException e) {
  throw new HiveException(e);
 }
 return 0;
}

代码示例来源:origin: apache/hive

return originalPredicate;
HiveStorageHandler storageHandler = tbl.getStorageHandler();
if (!(storageHandler instanceof HiveStoragePredicateHandler)) {

代码示例来源:origin: apache/hive

private int insertCommitWork(Hive db, InsertCommitHookDesc insertCommitHookDesc) throws MetaException {
 boolean failed = true;
 HiveMetaHook hook = insertCommitHookDesc.getTable().getStorageHandler().getMetaHook();
 if (hook == null || !(hook instanceof DefaultHiveMetaHook)) {
  return 0;
 }
 DefaultHiveMetaHook hiveMetaHook = (DefaultHiveMetaHook) hook;
 try {
  hiveMetaHook.commitInsertTable(insertCommitHookDesc.getTable().getTTable(),
      insertCommitHookDesc.isOverwrite()
  );
  failed = false;
 } finally {
  if (failed) {
   hiveMetaHook.rollbackInsertTable(insertCommitHookDesc.getTable().getTTable(),
       insertCommitHookDesc.isOverwrite()
   );
  }
 }
 return 0;
}

代码示例来源:origin: apache/drill

private int insertCommitWork(Hive db, InsertTableDesc insertTableDesc) throws MetaException {
 boolean failed = true;
 HiveMetaHook hook = insertTableDesc.getTable().getStorageHandler().getMetaHook();
 if (hook == null || !(hook instanceof DefaultHiveMetaHook)) {
  return 0;
 }
 DefaultHiveMetaHook hiveMetaHook = (DefaultHiveMetaHook) hook;
 try {
  hiveMetaHook.commitInsertTable(insertTableDesc.getTable().getTTable(),
      insertTableDesc.isOverwrite()
  );
  failed = false;
 } finally {
  if (failed) {
   hiveMetaHook.rollbackInsertTable(insertTableDesc.getTable().getTTable(),
       insertTableDesc.isOverwrite()
   );
  }
 }
 return 0;
}

代码示例来源:origin: apache/hive

getStorageHandler());
HiveStorageHandler storageHandler = tbl.getStorageHandler();

相关文章

微信公众号

最新文章

更多

Table类方法