org.apache.hadoop.hive.shims.Utils类的使用及代码示例

x33g5p2x  于2022-02-01 转载在 其他  
字(13.6k)|赞(0)|评价(0)|浏览(128)

本文整理了Java中org.apache.hadoop.hive.shims.Utils类的一些代码示例,展示了Utils类的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Utils类的具体详情如下:
包路径:org.apache.hadoop.hive.shims.Utils
类名称:Utils

Utils介绍

暂无

代码示例

代码示例来源:origin: apache/hive

@Override
protected int execute(DriverContext driverContext) {
 String distCpDoAsUser = conf.getVar(HiveConf.ConfVars.HIVE_DISTCP_DOAS_USER);
 if (conf.getBoolVar(HiveConf.ConfVars.REPL_ADD_RAW_RESERVED_NAMESPACE)) {
  sourcePath = reservedRawPath(work.fullyQualifiedSourcePath.toUri());
  targetPath = reservedRawPath(work.fullyQualifiedTargetPath.toUri());
 while (currentRetry < MAX_COPY_RETRY) {
  try {
   UserGroupInformation ugi = Utils.getUGI();
   String currentUser = ugi.getShortUserName();
   boolean usePrivilegedUser =
     distCpDoAsUser != null && !currentUser.equals(distCpDoAsUser);

代码示例来源:origin: apache/drill

if (!hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS)) {
} else {
 ugiForRpc = ImpersonationUtil.createProxyUgi(userName);
 if (hiveConf.getBoolVar(ConfVars.METASTORE_USE_THRIFT_SASL)) {
   Utils.setTokenStr(ugiForRpc, delegationToken, HiveClientWithAuthzWithCaching.DRILL2HMS_TOKEN);
  } catch (IOException e) {
   throw new DrillRuntimeException("Couldn't setup delegation token in the UGI for Hive MetaStoreClient", e);
 hiveConfForClient = new HiveConf(hiveConf);
 hiveConfForClient.set("hive.metastore.token.signature", HiveClientWithAuthzWithCaching.DRILL2HMS_TOKEN);
} else {
return ugiForRpc.doAs(new PrivilegedExceptionAction<DrillHiveMetaStoreClient>() {
 @Override
 public DrillHiveMetaStoreClient run() throws Exception {

代码示例来源:origin: apache/hive

/**
 * For a kerberized cluster, we dynamically set up the client's JAAS conf.
 *
 * @param hiveConf
 * @return
 * @throws Exception
 */
private void setUpZooKeeperAuth(HiveConf hiveConf) throws Exception {
 if (UserGroupInformation.isSecurityEnabled()) {
  String principal = hiveConf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL);
  if (principal.isEmpty()) {
   throw new IOException("HiveServer2 Kerberos principal is empty");
  }
  String keyTabFile = hiveConf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_KEYTAB);
  if (keyTabFile.isEmpty()) {
   throw new IOException("HiveServer2 Kerberos keytab is empty");
  }
  // Install the JAAS Configuration for the runtime
  Utils.setZookeeperClientKerberosJaasConfig(principal, keyTabFile);
 }
}

代码示例来源:origin: apache/hive

final FileStatus stat, final FsAction action, final String user, final List<FileStatus> children)
   throws IOException, AccessControlException, InterruptedException, Exception {
UserGroupInformation ugi = Utils.getUGI();
String currentUser = ugi.getShortUserName();
UserGroupInformation proxyUser = UserGroupInformation.createProxyUser(
  user, UserGroupInformation.getLoginUser());
try {
 proxyUser.doAs(new PrivilegedExceptionAction<Object>() {

代码示例来源:origin: apache/hive

String hiveJar = conf.getJar();
String hadoopExec = conf.getVar(HiveConf.ConfVars.HADOOPBIN);
conf.setVar(ConfVars.HIVEADDEDJARS, Utilities.getResourceFiles(conf, SessionState.ResourceType.JAR));
String endUserName = Utils.getUGI().getShortUserName();
LOG.debug("setting HADOOP_USER_NAME\t" + endUserName);
variables.put("HADOOP_USER_NAME", endUserName);
if(UserGroupInformation.isSecurityEnabled() &&
   UserGroupInformation.isLoginKeytabBased()) {

代码示例来源:origin: apache/hive

throws IOException, LoginException, URISyntaxException, TezException {
String confQueueName = conf.get(TezConfiguration.TEZ_QUEUE_NAME);
if (queueName != null && !queueName.equals(confQueueName)) {
 LOG.warn("Resetting a queue name that was already set: was "
this.doAsEnabled = conf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS);
final boolean llapMode = "llap".equalsIgnoreCase(HiveConf.getVar(
  conf, HiveConf.ConfVars.HIVE_EXECUTION_MODE));
UserGroupInformation ugi = Utils.getUGI();
user = ugi.getShortUserName();
LOG.info("User of session id " + sessionId + " is " + user);
 if (UserGroupInformation.isSecurityEnabled()) {
  llapCredentials = new Credentials();
  llapCredentials.addToken(LlapTokenIdentifier.KIND_NAME, getLlapToken(user, tezConfig));

代码示例来源:origin: apache/hive

public static void checkFileAccess(FileSystem fs, FileStatus stat, FsAction action)
  throws IOException, AccessControlException, LoginException {
 // Get the user/groups for checking permissions based on the current UGI.
 UserGroupInformation currentUgi = Utils.getUGI();
 DefaultFileAccess.checkFileAccess(fs, stat, action,
   currentUgi.getShortUserName(),
   Arrays.asList(currentUgi.getGroupNames()));
}

代码示例来源:origin: org.spark-project.hive/hive-metastore

private void open() throws MetaException {
 isConnected = false;
 TTransportException tte = null;
 boolean useSasl = conf.getBoolVar(ConfVars.METASTORE_USE_THRIFT_SASL);
 boolean useFramedTransport = conf.getBoolVar(ConfVars.METASTORE_USE_THRIFT_FRAMED_TRANSPORT);
 boolean useCompactProtocol = conf.getBoolVar(ConfVars.METASTORE_USE_THRIFT_COMPACT_PROTOCOL);
 int clientSocketTimeout = (int) conf.getTimeVar(
   ConfVars.METASTORE_CLIENT_SOCKET_TIMEOUT, TimeUnit.MILLISECONDS);
      String tokenSig = conf.get("hive.metastore.token.signature");
      tokenStrForm = Utils.getTokenStrForm(tokenSig);
      if(tokenStrForm != null) {
      UserGroupInformation ugi = Utils.getUGI();
      client.set_ugi(ugi.getUserName(), Arrays.asList(ugi.getGroupNames()));
     } catch (LoginException e) {
      LOG.warn("Failed to do login. set_ugi() is not successful, " +

代码示例来源:origin: apache/hive

private void configureImpersonation(Configuration conf) {
 String user;
 try {
  user = Utils.getUGI().getShortUserName();
 } catch (Exception e) {
  String msg = "Cannot obtain username: " + e;
  throw new IllegalStateException(msg, e);
 }
 conf.set("hadoop.proxyuser." + user + ".groups", "*");
 conf.set("hadoop.proxyuser." + user + ".hosts", "*");
}

代码示例来源:origin: apache/hive

@BeforeClass
public static void startMetaStoreServer() throws Exception {
 hcatConf = new HiveConf(TestHCatClient.class);
 String metastoreUri = System.getProperty("test."+HiveConf.ConfVars.METASTOREURIS.varname);
 if (metastoreUri != null) {
  hcatConf.setVar(HiveConf.ConfVars.METASTOREURIS, metastoreUri);
  useExternalMS = true;
  return;
 conf.set("hadoop.proxyuser." + Utils.getUGI().getShortUserName() + ".hosts", "*");
 ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
 Policy.setPolicy(new DerbyPolicy());
 hcatConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:"
  + msPort);
 hcatConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3);

代码示例来源:origin: apache/hive

String queueName = conf.get(TezConfiguration.TEZ_QUEUE_NAME);
boolean hasQueue = (queueName != null) && !queueName.isEmpty();
if (hasQueue) {
  queueName = null;
  hasQueue = false;
  conf.unset(TezConfiguration.TEZ_QUEUE_NAME);
   userName = Utils.getUGI().getShortUserName();
   LOG.info("No session user set; using the UGI user " + userName);
 conf.set(ConfVars.LLAP_CACHE_DEFAULT_FS_FILE_ID.varname,
   HiveConf.getVarWithoutType(initConf, ConfVars.LLAP_CACHE_DEFAULT_FS_FILE_ID));

代码示例来源:origin: apache/hive

/**
 * @return the user name set in hadoop.job.ugi param or the current user from System
 * @throws IOException
 */
public String getUser() throws IOException {
 try {
  UserGroupInformation ugi = Utils.getUGI();
  return ugi.getUserName();
 } catch (LoginException le) {
  throw new IOException(le);
 }
}

代码示例来源:origin: apache/hive

conf.getBoolVar(HiveConf.ConfVars.LOCALMODEAUTO)) {
runningViaChild = conf.getBoolVar(HiveConf.ConfVars.SUBMITVIACHILD);
String hadoopExec = conf.getVar(HiveConf.ConfVars.HADOOPBIN);
String hiveJar = conf.getJar();
if (PROXY == Utils.getUGI().getAuthenticationMethod()) {
 variables.put(HADOOP_PROXY_USER, Utils.getUGI().getShortUserName());

代码示例来源:origin: apache/hive

/**
 * createTezDir creates a temporary directory in the scratchDir folder to
 * be used with Tez. Assumes scratchDir exists.
 */
public Path createTezDir(Path scratchDir, Configuration conf)
  throws IOException {
 UserGroupInformation ugi;
 String userName = System.getProperty("user.name");
 try {
  ugi = Utils.getUGI();
  userName = ugi.getShortUserName();
 } catch (LoginException e) {
  throw new IOException(e);
 }
 scratchDir = new Path(scratchDir, userName);
 Path tezDir = getTezDir(scratchDir);
 if (!HiveConf.getBoolVar(conf, ConfVars.HIVE_RPC_QUERY_PLAN)) {
  FileSystem fs = tezDir.getFileSystem(conf);
  LOG.debug("TezDir path set " + tezDir + " for user: " + userName);
  // since we are adding the user name to the scratch dir, we do not
  // need to give more permissions here
  // Since we are doing RPC creating a dir is not necessary
  fs.mkdirs(tezDir);
 }
 return tezDir;
}

代码示例来源:origin: apache/hive

/**
 * @param conf
 * @return path to destination directory on hdfs
 * @throws LoginException if we are unable to figure user information
 * @throws IOException when any dfs operation fails.
 */
@SuppressWarnings("deprecation")
public Path getDefaultDestDir(Configuration conf) throws LoginException, IOException {
 UserGroupInformation ugi = Utils.getUGI();
 String userName = ugi.getShortUserName();
 String userPathStr = HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_USER_INSTALL_DIR);
 Path userPath = new Path(userPathStr);
 FileSystem fs = userPath.getFileSystem(conf);
 String jarPathStr = userPathStr + "/" + userName;
 String hdfsDirPathStr = jarPathStr;
 Path hdfsDirPath = new Path(hdfsDirPathStr);
 try {
  FileStatus fstatus = fs.getFileStatus(hdfsDirPath);
  if (!fstatus.isDir()) {
   throw new IOException(ErrorMsg.INVALID_DIR.format(hdfsDirPath.toString()));
  }
 } catch (FileNotFoundException e) {
  // directory does not exist, create it
  fs.mkdirs(hdfsDirPath);
 }
 Path retPath = new Path(hdfsDirPath.toString() + "/.hiveJars");
 fs.mkdirs(retPath);
 return retPath;
}

代码示例来源:origin: org.apache.spark/spark-hive-thriftserver

addService(sessionManager);
if (UserGroupInformation.isSecurityEnabled()) {
 try {
  HiveAuthFactory.loginFromKeytab(hiveConf);
  this.serviceUGI = Utils.getUGI();
 } catch (IOException e) {
  throw new ServiceException("Unable to login to kerberos with given principal/keytab", e);
 String principal = hiveConf.getVar(ConfVars.HIVE_SERVER2_SPNEGO_PRINCIPAL);
 String keyTabFile = hiveConf.getVar(ConfVars.HIVE_SERVER2_SPNEGO_KEYTAB);
 if (principal.isEmpty() || keyTabFile.isEmpty()) {
  LOG.info("SPNego httpUGI not created, spNegoPrincipal: " + principal +

代码示例来源:origin: dremio/dremio-oss

/**
 * Helper method that gets the delegation token using <i>processHiveClient</i> for given <i>proxyUserName</i>
 * and sets it in proxy user UserGroupInformation and proxy user HiveConf.
 */
protected static void getAndSetDelegationToken(final HiveConf proxyUserHiveConf, final UserGroupInformation proxyUGI,
  final HiveClient processHiveClient) {
 checkNotNull(processHiveClient, "process user Hive client required");
 checkNotNull(proxyUserHiveConf, "Proxy user HiveConf required");
 checkNotNull(proxyUGI, "Proxy user UserGroupInformation required");
 try {
  final String delegationToken = processHiveClient.getDelegationToken(proxyUGI.getUserName());
  Utils.setTokenStr(proxyUGI, delegationToken, "DremioDelegationTokenForHiveMetaStoreServer");
  proxyUserHiveConf.set("hive.metastore.token.signature", "DremioDelegationTokenForHiveMetaStoreServer");
 } catch (Exception e) {
  final String processUsername = ImpersonationUtil.getProcessUserUGI().getShortUserName();
  throw UserException.permissionError(e)
    .message("Failed to generate Hive metastore delegation token for user %s. " +
      "Check Hadoop services (including metastore) have correct proxy user impersonation settings (%s, %s) " +
        "and services are restarted after applying those settings.",
      proxyUGI.getUserName(),
      String.format("hadoop.proxyuser.%s.hosts", processUsername),
      String.format("hadoop.proxyuser.%s.groups", processUsername)
    )
    .addContext("Proxy user", proxyUGI.getUserName())
    .build(logger);
 }
}

代码示例来源:origin: apache/hive

setHiveConf(hiveConf);
sessionManager = new SessionManager(hiveServer2, allowSessionsInitial);
defaultFetchRows = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_THRIFT_RESULTSET_DEFAULT_FETCH_SIZE);
addService(sessionManager);
if (UserGroupInformation.isSecurityEnabled()) {
 try {
  HiveAuthFactory.loginFromKeytab(hiveConf);
  this.serviceUGI = Utils.getUGI();
 } catch (IOException e) {
  throw new ServiceException("Unable to login to kerberos with given principal/keytab", e);
 String principal = hiveConf.getVar(ConfVars.HIVE_SERVER2_SPNEGO_PRINCIPAL);
 String keyTabFile = hiveConf.getVar(ConfVars.HIVE_SERVER2_SPNEGO_KEYTAB);
 if (principal.isEmpty() || keyTabFile.isEmpty()) {
  LOG.info("SPNego httpUGI not created, spNegoPrincipal: " + principal +

代码示例来源:origin: com.facebook.presto.hive/hive-apache

throws IOException, LoginException, IllegalArgumentException, URISyntaxException, TezException {
this.conf = conf;
this.queueName = conf.get("tez.queue.name");
this.doAsEnabled = conf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS);
UserGroupInformation ugi = Utils.getUGI();
user = ugi.getShortUserName();
LOG.info("User of session id " + sessionId + " is " + user);
conf.stripHiddenConfigurations(tezConfig);
if (HiveConf.getBoolVar(conf, ConfVars.HIVE_PREWARM_ENABLED)) {

代码示例来源:origin: apache/hive

@Test
public void mrTaskSumbitViaChildWithImpersonation() throws IOException, LoginException {
 Utils.getUGI().setAuthenticationMethod(PROXY);
 Context ctx = Mockito.mock(Context.class);
 when(ctx.getLocalTmpPath()).thenReturn(new Path(System.getProperty("java.io.tmpdir")));
 DriverContext dctx = new DriverContext(ctx);
 QueryState queryState = new QueryState.Builder().build();
 HiveConf conf= queryState.getConf();
 conf.setBoolVar(HiveConf.ConfVars.SUBMITVIACHILD, true);
 MapredWork mrWork = new MapredWork();
 mrWork.setMapWork(Mockito.mock(MapWork.class));
 MapRedTask mrTask = Mockito.spy(new MapRedTask());
 mrTask.setWork(mrWork);
 mrTask.initialize(queryState, null, dctx, null);
 mrTask.jobExecHelper = Mockito.mock(HadoopJobExecHelper.class);
 when(mrTask.jobExecHelper.progressLocal(Mockito.any(Process.class), Mockito.anyString())).thenReturn(0);
 mrTask.execute(dctx);
 ArgumentCaptor<String[]> captor = ArgumentCaptor.forClass(String[].class);
 verify(mrTask).spawn(Mockito.anyString(), Mockito.anyString(), captor.capture());
 String expected = "HADOOP_PROXY_USER=" + Utils.getUGI().getUserName();
 Assert.assertTrue(Arrays.asList(captor.getValue()).contains(expected));
}

相关文章