org.apache.hadoop.security.token.Token类的使用及代码示例

x33g5p2x  于2022-01-29 转载在 其他  
字(12.9k)|赞(0)|评价(0)|浏览(222)

本文整理了Java中org.apache.hadoop.security.token.Token类的一些代码示例,展示了Token类的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Token类的具体详情如下:
包路径:org.apache.hadoop.security.token.Token
类名称:Token

Token介绍

[英]The client-side form of the token.
[中]令牌的客户端形式。

代码示例

代码示例来源:origin: apache/hive

public String getUserFromToken(String tokenStr) throws IOException {
  Token<DelegationTokenIdentifier> delegationToken = new Token<>();
  delegationToken.decodeFromUrlString(tokenStr);

  ByteArrayInputStream buf = new ByteArrayInputStream(delegationToken.getIdentifier());
  DataInputStream in = new DataInputStream(buf);
  DelegationTokenIdentifier id = createIdentifier();
  id.readFields(in);
  return id.getUser().getShortUserName();
 }
}

代码示例来源:origin: org.apache.hadoop/hadoop-common

/**
 * Get the token identifier object, or null if it could not be constructed
 * (because the class could not be loaded, for example).
 * @return the token identifier, or null
 * @throws IOException
 */
@SuppressWarnings("unchecked")
public T decodeIdentifier() throws IOException {
 Class<? extends TokenIdentifier> cls = getClassForIdentifier(getKind());
 if (cls == null) {
  return null;
 }
 TokenIdentifier tokenIdentifier = ReflectionUtils.newInstance(cls, null);
 ByteArrayInputStream buf = new ByteArrayInputStream(identifier);
 DataInputStream in = new DataInputStream(buf);
 tokenIdentifier.readFields(in);
 in.close();
 return (T) tokenIdentifier;
}

代码示例来源:origin: apache/incubator-gobblin

private static void getHdfsToken(Configuration conf, Credentials cred) throws IOException {
 FileSystem fs = FileSystem.get(conf);
 LOG.info("Getting DFS token from " + fs.getUri());
 Token<?> fsToken = fs.getDelegationToken(getMRTokenRenewerInternal(new JobConf()).toString());
 if (fsToken == null) {
  LOG.error("Failed to fetch DFS token for ");
  throw new IOException("Failed to fetch DFS token.");
 }
 LOG.info("Created DFS token: " + fsToken.toString());
 LOG.info("Token kind: " + fsToken.getKind());
 LOG.info("Token id: " + Arrays.toString(fsToken.getIdentifier()));
 LOG.info("Token service: " + fsToken.getService());
 cred.addToken(fsToken.getService(), fsToken);
}

代码示例来源:origin: apache/hive

public static Token<? extends AbstractDelegationTokenIdentifier> extractThriftToken(
 String tokenStrForm, String tokenSignature) throws MetaException,
 TException, IOException {
 // LOG.info("extractThriftToken("+tokenStrForm+","+tokenSignature+")");
 Token<? extends AbstractDelegationTokenIdentifier> t = new Token<DelegationTokenIdentifier>();
 t.decodeFromUrlString(tokenStrForm);
 t.setService(new Text(tokenSignature));
 // LOG.info("returning "+t);
 return t;
}

代码示例来源:origin: apache/hive

/** Verifies the token available as serialized bytes. */
 public void verifyToken(byte[] tokenBytes) throws IOException {
  if (!UserGroupInformation.isSecurityEnabled()) return;
  if (tokenBytes == null) throw new SecurityException("Token required for authentication");
  Token<LlapTokenIdentifier> token = new Token<>();
  token.readFields(new DataInputStream(new ByteArrayInputStream(tokenBytes)));
  verifyToken(token.decodeIdentifier(), token.getPassword());
 }
}

代码示例来源:origin: apache/hive

public synchronized void cancelDelegationToken(String tokenStrForm) throws IOException {
 Token<DelegationTokenIdentifier> t= new Token<>();
 t.decodeFromUrlString(tokenStrForm);
 String user = UserGroupInformation.getCurrentUser().getUserName();
 cancelToken(t, user);
}

代码示例来源:origin: apache/hive

protected DelegationTokenIdentifier getTokenIdentifier(Token<DelegationTokenIdentifier> token)
  throws IOException {
 // turn bytes back into identifier for cache lookup
 ByteArrayInputStream buf = new ByteArrayInputStream(token.getIdentifier());
 DataInputStream in = new DataInputStream(buf);
 DelegationTokenIdentifier id = createIdentifier();
 id.readFields(in);
 return id;
}

代码示例来源:origin: apache/hive

@Override
public LlapTokenIdentifier decodeTokenIdentifier(
  Token<LlapTokenIdentifier> token) throws IOException {
 DataInputStream dis = new DataInputStream(new ByteArrayInputStream(token.getIdentifier()));
 LlapTokenIdentifier id = new LlapTokenIdentifier();
 id.readFields(dis);
 dis.close();
 return id;
}

代码示例来源:origin: org.apache.hadoop/hadoop-common

ByteArrayInputStream buf = new ByteArrayInputStream(token.getIdentifier());
DataInputStream in = new DataInputStream(buf);
TokenIdent id = createIdentifier();
id.readFields(in);
 throw new InvalidToken("Token with no owner " + formatTokenId(id));
String owner = id.getUser().getUserName();
Text renewer = id.getRenewer();
HadoopKerberosName cancelerKrbName = new HadoopKerberosName(canceller);
String cancelerShortName = cancelerKrbName.getShortName();
if (!canceller.equals(owner)
  && (renewer == null || renewer.toString().isEmpty() || !cancelerShortName
    .equals(renewer.toString()))) {
 throw new AccessControlException(canceller
   + " is not authorized to cancel the token " + formatTokenId(id));

代码示例来源:origin: org.apache.hadoop/hadoop-common

ByteArrayInputStream buf = new ByteArrayInputStream(token.getIdentifier());
DataInputStream in = new DataInputStream(buf);
TokenIdent id = createIdentifier();
id.readFields(in);
   + " currentTime: " + Time.formatTime(now));
if ((id.getRenewer() == null) || (id.getRenewer().toString().isEmpty())) {
 throw new AccessControlException(renewer +
   " tried to renew a token " + formatTokenId(id)
   + " without a renewer");
if (!id.getRenewer().toString().equals(renewer)) {
 throw new AccessControlException(renewer
   + " tries to renew a token " + formatTokenId(id)
   + id.getSequenceNumber());
byte[] password = createPassword(token.getIdentifier(), key.getKey());
if (!MessageDigest.isEqual(password, token.getPassword())) {
 throw new AccessControlException(renewer
   + " is trying to renew a token "

代码示例来源:origin: apache/accumulo

/**
 * Unwraps the provided {@link AuthenticationToken} if it is an instance of DelegationTokenStub,
 * reconstituting it from the provided {@link JobConf}.
 *
 * @param job
 *          The job
 * @param token
 *          The authentication token
 */
public static AuthenticationToken unwrapAuthenticationToken(JobConf job,
  AuthenticationToken token) {
 requireNonNull(job);
 requireNonNull(token);
 if (token instanceof org.apache.accumulo.core.clientImpl.mapreduce.DelegationTokenStub) {
  org.apache.accumulo.core.clientImpl.mapreduce.DelegationTokenStub delTokenStub = (org.apache.accumulo.core.clientImpl.mapreduce.DelegationTokenStub) token;
  Token<? extends TokenIdentifier> hadoopToken = job.getCredentials()
    .getToken(new Text(delTokenStub.getServiceName()));
  AuthenticationTokenIdentifier identifier = new AuthenticationTokenIdentifier();
  try {
   identifier
     .readFields(new DataInputStream(new ByteArrayInputStream(hadoopToken.getIdentifier())));
   return new DelegationTokenImpl(hadoopToken.getPassword(), identifier);
  } catch (IOException e) {
   throw new RuntimeException("Could not construct DelegationToken from JobConf Credentials",
     e);
  }
 }
 return token;
}

代码示例来源:origin: org.apache.hadoop/hadoop-hdfs

private UserGroupInformation tokenUGI(Token<DelegationTokenIdentifier> token)
  throws IOException {
 ByteArrayInputStream buf =
  new ByteArrayInputStream(token.getIdentifier());
 DataInputStream in = new DataInputStream(buf);
 DelegationTokenIdentifier id = new DelegationTokenIdentifier();
 id.readFields(in);
 UserGroupInformation ugi = id.getUser();
 ugi.addToken(token);
 return ugi;
}

代码示例来源:origin: apache/storm

if (provider != null) {
  ugi = provider.getCurrent().getUGI();
  LOG.debug("Current USER for provider: {}", ugi.getUserName());
} else {
  ugi = UserGroupInformation.getCurrentUser();
  LOG.debug("UGI for current USER : {}", ugi.getUserName());
  if (ugi.hasKerberosCredentials()) {
    LOG.debug("UGI has Kerberos credentials");
    boolean foundHBaseAuthToken = false;
    for (Token<? extends TokenIdentifier> token : ugi.getTokens()) {
      LOG.debug("Token in UGI (delegation token): {} / {}", token.toString(),
          token.decodeIdentifier().getUser());
      if (token.getKind().toString().equals(TOKEN_KIND_HBASE_AUTH_TOKEN)) {
          ugi = token.decodeIdentifier().getUser();
          ugi.addToken(token);

代码示例来源:origin: apache/hive

public synchronized String getDelegationToken(final String ownerStr, final String renewer) throws IOException {
 if (ownerStr == null) {
  throw new RuntimeException("Delegation token owner is null");
 }
 Text owner = new Text(ownerStr);
 Text realUser = null;
 UserGroupInformation currentUgi = UserGroupInformation.getCurrentUser();
 if (currentUgi.getUserName() != null) {
  realUser = new Text(currentUgi.getUserName());
 }
 DelegationTokenIdentifier ident =
  new DelegationTokenIdentifier(owner, new Text(renewer), realUser);
 Token<DelegationTokenIdentifier> t = new Token<>(
   ident, this);
 return t.encodeToUrlString();
}

代码示例来源:origin: apache/hive

public Token<LlapTokenIdentifier> createLlapToken(
  String appId, String user, boolean isSignatureRequired) throws IOException {
 Text realUser = null, renewer = null;
 if (user == null) {
  UserGroupInformation ugi  = UserGroupInformation.getCurrentUser();
  user = ugi.getUserName();
  if (ugi.getRealUser() != null) {
   realUser = new Text(ugi.getRealUser().getUserName());
  }
  renewer = new Text(ugi.getShortUserName());
 } else {
  renewer = new Text(user);
 }
 LlapTokenIdentifier llapId = new LlapTokenIdentifier(
   new Text(user), renewer, realUser, clusterId, appId, isSignatureRequired);
 // TODO: note that the token is not renewable right now and will last for 2 weeks by default.
 Token<LlapTokenIdentifier> token = new Token<LlapTokenIdentifier>(llapId, this);
 if (LOG.isInfoEnabled()) {
  LOG.info("Created LLAP token {}", token);
 }
 return token;
}

代码示例来源:origin: apache/flink

public static void setTokensFor(ContainerLaunchContext amContainer, List<Path> paths, Configuration conf) throws IOException {
  Credentials credentials = new Credentials();
  // for HDFS
  TokenCache.obtainTokensForNamenodes(credentials, paths.toArray(new Path[0]), conf);
  // for HBase
  obtainTokenForHBase(credentials, conf);
  // for user
  UserGroupInformation currUsr = UserGroupInformation.getCurrentUser();
  Collection<Token<? extends TokenIdentifier>> usrTok = currUsr.getTokens();
  for (Token<? extends TokenIdentifier> token : usrTok) {
    final Text id = new Text(token.getIdentifier());
    LOG.info("Adding user token " + id + " with " + token);
    credentials.addToken(id, token);
  }
  try (DataOutputBuffer dob = new DataOutputBuffer()) {
    credentials.writeTokenStorageToStream(dob);
    if (LOG.isDebugEnabled()) {
      LOG.debug("Wrote tokens. Credentials buffer length: " + dob.getLength());
    }
    ByteBuffer securityTokens = ByteBuffer.wrap(dob.getData(), 0, dob.getLength());
    amContainer.setTokens(securityTokens);
  }
}

代码示例来源:origin: apache/hive

/**
 * Get the string form of the token given a token signature. The signature is used as the value of
 * the "service" field in the token for lookup. Ref: AbstractDelegationTokenSelector in Hadoop. If
 * there exists such a token in the token cache (credential store) of the job, the lookup returns
 * that. This is relevant only when running against a "secure" hadoop release The method gets hold
 * of the tokens if they are set up by hadoop - this should happen on the map/reduce tasks if the
 * client added the tokens into hadoop's credential store in the front end during job submission.
 * The method will select the hive delegation token among the set of tokens and return the string
 * form of it
 * 
 * @param tokenSignature
 * @return the string form of the token found
 * @throws IOException
 */
public static String getTokenStrForm(String tokenSignature) throws IOException {
 UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
 TokenSelector<? extends TokenIdentifier> tokenSelector = new DelegationTokenSelector();
 Token<? extends TokenIdentifier> token = tokenSelector.selectToken(
   tokenSignature == null ? new Text() : new Text(tokenSignature), ugi.getTokens());
 return token != null ? token.encodeToUrlString() : null;
}

代码示例来源:origin: org.apache.hadoop/hadoop-yarn-server-resourcemanager

private String getRenewerForToken(Token<RMDelegationTokenIdentifier> token)
  throws IOException {
 UserGroupInformation user = UserGroupInformation.getCurrentUser();
 UserGroupInformation loginUser = UserGroupInformation.getLoginUser();
 // we can always renew our own tokens
 return loginUser.getUserName().equals(user.getUserName())
   ? token.decodeIdentifier().getRenewer().toString()
   : user.getShortUserName();
}

代码示例来源:origin: Qihoo360/XLearning

@Override
public RenewDelegationTokenResponse renewDelegationToken(
  RenewDelegationTokenRequest request) throws IOException {
 if (!isAllowedDelegationTokenOp()) {
  throw new IOException(
    "Delegation Token can be renewed only with kerberos authentication");
 }
 org.apache.hadoop.yarn.api.records.Token protoToken = request.getDelegationToken();
 Token<MRDelegationTokenIdentifier> token =
   new Token<MRDelegationTokenIdentifier>(
     protoToken.getIdentifier().array(), protoToken.getPassword()
     .array(), new Text(protoToken.getKind()), new Text(
     protoToken.getService()));
 String user = UserGroupInformation.getCurrentUser().getShortUserName();
 long nextExpTime = jhsDTSecretManager.renewToken(token, user);
 RenewDelegationTokenResponse renewResponse = Records
   .newRecord(RenewDelegationTokenResponse.class);
 renewResponse.setNextExpirationTime(nextExpTime);
 return renewResponse;
}

代码示例来源:origin: apache/hive

private static Token<JobTokenIdentifier> createAmsToken(ApplicationId id) {
 if (!UserGroupInformation.isSecurityEnabled()) return null;
 JobTokenIdentifier identifier = new JobTokenIdentifier(new Text(id.toString()));
 JobTokenSecretManager jobTokenManager = new JobTokenSecretManager();
 Token<JobTokenIdentifier> sessionToken = new Token<>(identifier, jobTokenManager);
 sessionToken.setService(identifier.getJobId());
 return sessionToken;
}

相关文章