org.apache.hadoop.hive.ql.exec.Utilities.getResourceFiles()方法的使用及代码示例

x33g5p2x  于2022-02-01 转载在 其他  
字(10.5k)|赞(0)|评价(0)|浏览(126)

本文整理了Java中org.apache.hadoop.hive.ql.exec.Utilities.getResourceFiles()方法的一些代码示例,展示了Utilities.getResourceFiles()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Utilities.getResourceFiles()方法的具体详情如下:
包路径:org.apache.hadoop.hive.ql.exec.Utilities
类名称:Utilities
方法名:getResourceFiles

Utilities.getResourceFiles介绍

暂无

代码示例

代码示例来源:origin: apache/hive

private static String[] getTempArchivesFromConf(Configuration conf) {
 String addedArchives = Utilities.getResourceFiles(conf, SessionState.ResourceType.ARCHIVE);
 if (StringUtils.isNotBlank(addedArchives)) {
  HiveConf.setVar(conf, ConfVars.HIVEADDEDARCHIVES, addedArchives);
  return addedArchives.split(",");
 }
 return new String[0];
}

代码示例来源:origin: apache/drill

private static String[] getTempArchivesFromConf(Configuration conf) {
 String addedArchives = Utilities.getResourceFiles(conf, SessionState.ResourceType.ARCHIVE);
 if (StringUtils.isNotBlank(addedArchives)) {
  HiveConf.setVar(conf, ConfVars.HIVEADDEDARCHIVES, addedArchives);
  return addedArchives.split(",");
 }
 return new String[0];
}

代码示例来源:origin: apache/drill

private static String[] getTempFilesFromConf(Configuration conf) {
 String addedFiles = Utilities.getResourceFiles(conf, SessionState.ResourceType.FILE);
 if (StringUtils.isNotBlank(addedFiles)) {
  HiveConf.setVar(conf, ConfVars.HIVEADDEDFILES, addedFiles);
 }
 String addedJars = Utilities.getResourceFiles(conf, SessionState.ResourceType.JAR);
 if (StringUtils.isNotBlank(addedJars)) {
  HiveConf.setVar(conf, ConfVars.HIVEADDEDJARS, addedJars);
 }
 String auxJars = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEAUXJARS);
 // need to localize the additional jars and files
 // we need the directory on hdfs to which we shall put all these files
 String allFiles = auxJars + "," + addedJars + "," + addedFiles;
 return allFiles.split(",");
}

代码示例来源:origin: apache/hive

public static String[] getTempFilesFromConf(Configuration conf) {
 if (conf == null) {
  return new String[0]; // In tests.
 }
 String addedFiles = Utilities.getResourceFiles(conf, SessionState.ResourceType.FILE);
 if (StringUtils.isNotBlank(addedFiles)) {
  HiveConf.setVar(conf, ConfVars.HIVEADDEDFILES, addedFiles);
 }
 String addedJars = Utilities.getResourceFiles(conf, SessionState.ResourceType.JAR);
 if (StringUtils.isNotBlank(addedJars)) {
  HiveConf.setVar(conf, ConfVars.HIVEADDEDJARS, addedJars);
 }
 String auxJars = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEAUXJARS);
 // need to localize the additional jars and files
 // we need the directory on hdfs to which we shall put all these files
 String allFiles = auxJars + "," + addedJars + "," + addedFiles;
 return allFiles.split(",");
}

代码示例来源:origin: apache/hive

/**
 * Retrieve the resources from the current session and configuration for the given type.
 * @return Comma-separated list of resources
 */
protected static String getResource(HiveConf conf, SessionState.ResourceType resType) {
 switch(resType) {
 case JAR:
  String addedJars = Utilities.getResourceFiles(conf, SessionState.ResourceType.JAR);
  String auxJars = conf.getAuxJars();
  String reloadableAuxJars = SessionState.get() == null ? null : SessionState.get().getReloadableAuxJars();
  return HiveStringUtils.joinIgnoringEmpty(new String[]{addedJars, auxJars, reloadableAuxJars}, ',');
 case FILE:
  return Utilities.getResourceFiles(conf, SessionState.ResourceType.FILE);
 case ARCHIVE:
  return Utilities.getResourceFiles(conf, SessionState.ResourceType.ARCHIVE);
 }
 return null;
}

代码示例来源:origin: apache/drill

/**
 * Retrieve the resources from the current session and configuration for the given type.
 * @return Comma-separated list of resources
 */
protected static String getResource(HiveConf conf, SessionState.ResourceType resType) {
 switch(resType) {
 case JAR:
  String addedJars = Utilities.getResourceFiles(conf, SessionState.ResourceType.JAR);
  String auxJars = conf.getAuxJars();
  String reloadableAuxJars = SessionState.get() == null ? null : SessionState.get().getReloadableAuxJars();
  return HiveStringUtils.joinIgnoringEmpty(new String[]{addedJars, auxJars, reloadableAuxJars}, ',');
 case FILE:
  return Utilities.getResourceFiles(conf, SessionState.ResourceType.FILE);
 case ARCHIVE:
  return Utilities.getResourceFiles(conf, SessionState.ResourceType.ARCHIVE);
 }
 return null;
}

代码示例来源:origin: apache/hive

private synchronized void refreshLocalResources(SparkWork sparkWork, HiveConf conf) throws IOException {
 // add hive-exec jar
 addJars((new JobConf(this.getClass())).getJar());
 // add aux jars
 addJars(conf.getAuxJars());
 addJars(SessionState.get() == null ? null : SessionState.get().getReloadableAuxJars());
 // add added jars
 String addedJars = Utilities.getResourceFiles(conf, SessionState.ResourceType.JAR);
 HiveConf.setVar(conf, HiveConf.ConfVars.HIVEADDEDJARS, addedJars);
 addJars(addedJars);
 // add plugin module jars on demand
 // jobConf will hold all the configuration for hadoop, tez, and hive
 JobConf jobConf = new JobConf(conf);
 jobConf.set(MR_JAR_PROPERTY, "");
 for (BaseWork work : sparkWork.getAllWork()) {
  work.configureJobConf(jobConf);
 }
 addJars(jobConf.get(MR_JAR_PROPERTY));
 // remove the location of container tokens
 conf.unset(MR_CREDENTIALS_LOCATION_PROPERTY);
 // add added files
 String addedFiles = Utilities.getResourceFiles(conf, SessionState.ResourceType.FILE);
 HiveConf.setVar(conf, HiveConf.ConfVars.HIVEADDEDFILES, addedFiles);
 addResources(addedFiles);
 // add added archives
 String addedArchives = Utilities.getResourceFiles(conf, SessionState.ResourceType.ARCHIVE);
 HiveConf.setVar(conf, HiveConf.ConfVars.HIVEADDEDARCHIVES, addedArchives);
 addResources(addedArchives);
}

代码示例来源:origin: apache/hive

String addedJars = Utilities.getResourceFiles(conf, SessionState.ResourceType.JAR);
HiveConf.setVar(conf, HiveConf.ConfVars.HIVEADDEDJARS, addedJars);
addJars(addedJars);
String addedFiles = Utilities.getResourceFiles(conf, SessionState.ResourceType.FILE);
HiveConf.setVar(conf, HiveConf.ConfVars.HIVEADDEDFILES, addedFiles);
addResources(addedFiles);
String addedArchives = Utilities.getResourceFiles(conf, SessionState.ResourceType.ARCHIVE);
HiveConf.setVar(conf, HiveConf.ConfVars.HIVEADDEDARCHIVES, addedArchives);
addResources(addedArchives);

代码示例来源:origin: apache/hive

conf.setVar(ConfVars.HIVEADDEDJARS, Utilities.getResourceFiles(conf, SessionState.ResourceType.JAR));
String files = Utilities.getResourceFiles(conf, SessionState.ResourceType.FILE);

代码示例来源:origin: apache/drill

private void refreshLocalResources(SparkWork sparkWork, HiveConf conf) throws IOException {
 // add hive-exec jar
 addJars((new JobConf(this.getClass())).getJar());
 // add aux jars
 addJars(conf.getAuxJars());
 addJars(SessionState.get() == null ? null : SessionState.get().getReloadableAuxJars());
 // add added jars
 String addedJars = Utilities.getResourceFiles(conf, SessionState.ResourceType.JAR);
 HiveConf.setVar(conf, HiveConf.ConfVars.HIVEADDEDJARS, addedJars);
 addJars(addedJars);
 // add plugin module jars on demand
 // jobConf will hold all the configuration for hadoop, tez, and hive
 JobConf jobConf = new JobConf(conf);
 jobConf.set(MR_JAR_PROPERTY, "");
 for (BaseWork work : sparkWork.getAllWork()) {
  work.configureJobConf(jobConf);
 }
 addJars(conf.get(MR_JAR_PROPERTY));
 // add added files
 String addedFiles = Utilities.getResourceFiles(conf, SessionState.ResourceType.FILE);
 HiveConf.setVar(conf, HiveConf.ConfVars.HIVEADDEDFILES, addedFiles);
 addResources(addedFiles);
 // add added archives
 String addedArchives = Utilities.getResourceFiles(conf, SessionState.ResourceType.ARCHIVE);
 HiveConf.setVar(conf, HiveConf.ConfVars.HIVEADDEDARCHIVES, addedArchives);
 addResources(addedArchives);
}

代码示例来源:origin: apache/drill

conf.setVar(ConfVars.HIVEADDEDJARS, Utilities.getResourceFiles(conf, SessionState.ResourceType.JAR));
String files = Utilities.getResourceFiles(conf, SessionState.ResourceType.FILE);

代码示例来源:origin: apache/drill

String addedJars = Utilities.getResourceFiles(conf, SessionState.ResourceType.JAR);
HiveConf.setVar(conf, HiveConf.ConfVars.HIVEADDEDJARS, addedJars);
addJars(addedJars);
String addedFiles = Utilities.getResourceFiles(conf, SessionState.ResourceType.FILE);
HiveConf.setVar(conf, HiveConf.ConfVars.HIVEADDEDFILES, addedFiles);
addResources(addedFiles);
String addedArchives = Utilities.getResourceFiles(conf, SessionState.ResourceType.ARCHIVE);
HiveConf.setVar(conf, HiveConf.ConfVars.HIVEADDEDARCHIVES, addedArchives);
addResources(addedArchives);

代码示例来源:origin: apache/hive

String addedJars = Utilities.getResourceFiles(job, SessionState.ResourceType.JAR);
if (!addedJars.isEmpty()) {
 job.set("tmpjars", addedJars);

代码示例来源:origin: apache/hive

@Override
public OperationHandle getColumns(String catalogName, String schemaName,
  String tableName, String columnName)  throws HiveSQLException {
 acquire(true, true);
 String addedJars = Utilities.getResourceFiles(sessionConf, SessionState.ResourceType.JAR);
 if (StringUtils.isNotBlank(addedJars)) {
   IMetaStoreClient metastoreClient = getSession().getMetaStoreClient();
   metastoreClient.setHiveAddedJars(addedJars);
 }
 OperationManager operationManager = getOperationManager();
 GetColumnsOperation operation = operationManager.newGetColumnsOperation(getSession(),
   catalogName, schemaName, tableName, columnName);
 OperationHandle opHandle = operation.getHandle();
 try {
  addOpHandle(opHandle);
  operation.run();
  return opHandle;
 } catch (HiveSQLException e) {
  removeOpHandle(opHandle);
  operationManager.closeOperation(opHandle);
  throw e;
 } finally {
  release(true, true);
 }
}

代码示例来源:origin: apache/drill

String addedJars = Utilities.getResourceFiles(job, SessionState.ResourceType.JAR);
if (!addedJars.isEmpty()) {
 job.set("tmpjars", addedJars);

代码示例来源:origin: apache/hive

String addedJars = Utilities.getResourceFiles(job,
  SessionState.ResourceType.JAR);
if (!addedJars.isEmpty()) {

代码示例来源:origin: apache/drill

String addedJars = Utilities.getResourceFiles(job,
  SessionState.ResourceType.JAR);
if (!addedJars.isEmpty()) {

代码示例来源:origin: apache/drill

String addedJars = Utilities.getResourceFiles(job, SessionState.ResourceType.JAR);
if (!addedJars.isEmpty()) {
 job.set("tmpjars", addedJars);

代码示例来源:origin: com.facebook.presto.hive/hive-apache

private static String[] getTempArchivesFromConf(Configuration conf) {
 String addedArchives = Utilities.getResourceFiles(conf, SessionState.ResourceType.ARCHIVE);
 if (StringUtils.isNotBlank(addedArchives)) {
  HiveConf.setVar(conf, ConfVars.HIVEADDEDARCHIVES, addedArchives);
  return addedArchives.split(",");
 }
 return new String[0];
}

代码示例来源:origin: com.facebook.presto.hive/hive-apache

private static String[] getTempFilesFromConf(Configuration conf) {
 String addedFiles = Utilities.getResourceFiles(conf, SessionState.ResourceType.FILE);
 if (StringUtils.isNotBlank(addedFiles)) {
  HiveConf.setVar(conf, ConfVars.HIVEADDEDFILES, addedFiles);
 }
 String addedJars = Utilities.getResourceFiles(conf, SessionState.ResourceType.JAR);
 if (StringUtils.isNotBlank(addedJars)) {
  HiveConf.setVar(conf, ConfVars.HIVEADDEDJARS, addedJars);
 }
 String auxJars = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEAUXJARS);
 // need to localize the additional jars and files
 // we need the directory on hdfs to which we shall put all these files
 String allFiles = auxJars + "," + addedJars + "," + addedFiles;
 return allFiles.split(",");
}

相关文章

微信公众号

最新文章

更多

Utilities类方法