本文整理了Java中org.apache.hadoop.security.Credentials.addToken()
方法的一些代码示例,展示了Credentials.addToken()
的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Credentials.addToken()
方法的具体详情如下:
包路径:org.apache.hadoop.security.Credentials
类名称:Credentials
方法名:addToken
[英]Add a token in the storage (in memory)
[中]在存储器(内存中)中添加令牌
代码示例来源:origin: org.apache.hadoop/hadoop-common
private void addAll(Credentials other, boolean overwrite) {
for(Map.Entry<Text, byte[]> secret: other.secretKeysMap.entrySet()) {
Text key = secret.getKey();
if (!secretKeysMap.containsKey(key) || overwrite) {
secretKeysMap.put(key, secret.getValue());
}
}
for(Map.Entry<Text, Token<?>> token: other.tokenMap.entrySet()){
Text key = token.getKey();
if (!tokenMap.containsKey(key) || overwrite) {
addToken(key, token.getValue());
}
}
}
}
代码示例来源:origin: org.apache.hadoop/hadoop-common
/**
* Add a named token to this UGI
*
* @param alias Name of the token
* @param token Token to be added
* @return true on successful add of new token
*/
public boolean addToken(Text alias, Token<? extends TokenIdentifier> token) {
synchronized (subject) {
getCredentialsInternal().addToken(alias, token);
return true;
}
}
代码示例来源:origin: apache/flink
credentials.addToken(token.getService(), token);
LOG.info("Added HBase Kerberos security token to credentials.");
} catch (ClassNotFoundException
代码示例来源:origin: apache/hive
public Object run() throws IOException {
Credentials cred = new Credentials();
for(Token<?> fsToken : fsTokens) {
cred.addToken(fsToken.getService(), fsToken);
}
cred.addToken(msToken.getService(), msToken);
cred.writeTokenStorageFile(tokenPath, conf);
return null;
}
});
代码示例来源:origin: apache/incubator-gobblin
private static void getHdfsToken(Configuration conf, Credentials cred) throws IOException {
FileSystem fs = FileSystem.get(conf);
LOG.info("Getting DFS token from " + fs.getUri());
Token<?> fsToken = fs.getDelegationToken(getMRTokenRenewerInternal(new JobConf()).toString());
if (fsToken == null) {
LOG.error("Failed to fetch DFS token for ");
throw new IOException("Failed to fetch DFS token.");
}
LOG.info("Created DFS token: " + fsToken.toString());
LOG.info("Token kind: " + fsToken.getKind());
LOG.info("Token id: " + Arrays.toString(fsToken.getIdentifier()));
LOG.info("Token service: " + fsToken.getService());
cred.addToken(fsToken.getService(), fsToken);
}
代码示例来源:origin: apache/incubator-gobblin
private static void getJtToken(Credentials cred) throws IOException {
try {
JobConf jobConf = new JobConf();
JobClient jobClient = new JobClient(jobConf);
LOG.info("Pre-fetching JT token from JobTracker");
Token<DelegationTokenIdentifier> mrdt = jobClient.getDelegationToken(getMRTokenRenewerInternal(jobConf));
if (mrdt == null) {
LOG.error("Failed to fetch JT token");
throw new IOException("Failed to fetch JT token.");
}
LOG.info("Created JT token: " + mrdt.toString());
LOG.info("Token kind: " + mrdt.getKind());
LOG.info("Token id: " + Arrays.toString(mrdt.getIdentifier()));
LOG.info("Token service: " + mrdt.getService());
cred.addToken(mrdt.getService(), mrdt);
} catch (InterruptedException ie) {
throw new IOException(ie);
}
}
代码示例来源:origin: apache/hive
private String addHMSToken(Job job, String user) throws IOException, InterruptedException,
TException {
if(!secureMetastoreAccess) {
return null;
}
Token<org.apache.hadoop.hive.metastore.security.DelegationTokenIdentifier> hiveToken =
new Token<org.apache.hadoop.hive.metastore.security.DelegationTokenIdentifier>();
String metastoreTokenStrForm = buildHcatDelegationToken(user);
hiveToken.decodeFromUrlString(metastoreTokenStrForm);
job.getCredentials().addToken(new
Text(SecureProxySupport.HCAT_SERVICE), hiveToken);
return metastoreTokenStrForm;
}
private String buildHcatDelegationToken(String user) throws IOException, InterruptedException,
代码示例来源:origin: apache/incubator-gobblin
private static void getJhToken(Configuration conf, Credentials cred) throws IOException {
YarnRPC rpc = YarnRPC.create(conf);
final String serviceAddr = conf.get(JHAdminConfig.MR_HISTORY_ADDRESS);
LOG.debug("Connecting to HistoryServer at: " + serviceAddr);
HSClientProtocol hsProxy =
(HSClientProtocol) rpc.getProxy(HSClientProtocol.class, NetUtils.createSocketAddr(serviceAddr), conf);
LOG.info("Pre-fetching JH token from job history server");
Token<?> jhToken = null;
try {
jhToken = getDelegationTokenFromHS(hsProxy, conf);
} catch (Exception exc) {
throw new IOException("Failed to fetch JH token.", exc);
}
if (jhToken == null) {
LOG.error("getDelegationTokenFromHS() returned null");
throw new IOException("Unable to fetch JH token.");
}
LOG.info("Created JH token: " + jhToken.toString());
LOG.info("Token kind: " + jhToken.getKind());
LOG.info("Token id: " + Arrays.toString(jhToken.getIdentifier()));
LOG.info("Token service: " + jhToken.getService());
cred.addToken(jhToken.getService(), jhToken);
}
代码示例来源:origin: apache/incubator-gobblin
/**
* Write a {@link Token} to a given file.
*
* @param token the token to write
* @param tokenFilePath the token file path
* @param configuration a {@link Configuration} object carrying Hadoop configuration properties
* @throws IOException
*/
public static void writeTokenToFile(Token<? extends TokenIdentifier> token, Path tokenFilePath,
Configuration configuration) throws IOException {
Credentials credentials = new Credentials();
credentials.addToken(token.getService(), token);
credentials.writeTokenStorageFile(tokenFilePath, configuration);
}
代码示例来源:origin: apache/flink
public static void setTokensFor(ContainerLaunchContext amContainer, List<Path> paths, Configuration conf) throws IOException {
Credentials credentials = new Credentials();
// for HDFS
TokenCache.obtainTokensForNamenodes(credentials, paths.toArray(new Path[0]), conf);
// for HBase
obtainTokenForHBase(credentials, conf);
// for user
UserGroupInformation currUsr = UserGroupInformation.getCurrentUser();
Collection<Token<? extends TokenIdentifier>> usrTok = currUsr.getTokens();
for (Token<? extends TokenIdentifier> token : usrTok) {
final Text id = new Text(token.getIdentifier());
LOG.info("Adding user token " + id + " with " + token);
credentials.addToken(id, token);
}
try (DataOutputBuffer dob = new DataOutputBuffer()) {
credentials.writeTokenStorageToStream(dob);
if (LOG.isDebugEnabled()) {
LOG.debug("Wrote tokens. Credentials buffer length: " + dob.getLength());
}
ByteBuffer securityTokens = ByteBuffer.wrap(dob.getData(), 0, dob.getLength());
amContainer.setTokens(securityTokens);
}
}
代码示例来源:origin: org.apache.hadoop/hadoop-common
/**
* Populates keys/values from proto buffer storage.
* @param in - stream ready to read a serialized proto buffer message
*/
void readProto(DataInput in) throws IOException {
CredentialsProto storage = CredentialsProto.parseDelimitedFrom((DataInputStream)in);
for (CredentialsKVProto kv : storage.getTokensList()) {
addToken(new Text(kv.getAliasBytes().toByteArray()),
(Token<? extends TokenIdentifier>) new Token(kv.getToken()));
}
for (CredentialsKVProto kv : storage.getSecretsList()) {
addSecretKey(new Text(kv.getAliasBytes().toByteArray()),
kv.getSecret().toByteArray());
}
}
代码示例来源:origin: apache/hbase
user.getName() + " on cluster " + clusterId.toString());
job.getCredentials().addToken(clusterId, token);
} catch (IOException ioe) {
throw ioe;
代码示例来源:origin: apache/hbase
user.getName() + " on cluster " + clusterId.toString());
job.getCredentials().addToken(clusterId, token);
} catch (IOException ioe) {
throw ioe;
代码示例来源:origin: org.apache.hadoop/hadoop-common
/** Alias a token from a file and save back to file in the local filesystem.
* @param tokenFile a local File object to hold the input and output.
* @param fileFormat a string equal to FORMAT_PB or FORMAT_JAVA, for output
* @param alias overwrite service field of fetched token with this text.
* @param service only apply alias to tokens matching this service text.
* @param conf Configuration object passed along.
* @throws IOException
*/
public static void aliasTokenFile(File tokenFile, String fileFormat,
Text alias, Text service, Configuration conf) throws Exception {
Credentials newCreds = new Credentials();
Credentials creds = Credentials.readTokenStorageFile(tokenFile, conf);
for (Token<?> token : creds.getAllTokens()) {
newCreds.addToken(token.getService(), token);
if (token.getService().equals(service)) {
Token<?> aliasedToken = token.copyToken();
aliasedToken.setService(alias);
newCreds.addToken(alias, aliasedToken);
}
}
doFormattedWrite(tokenFile, fileFormat, newCreds, conf);
}
代码示例来源:origin: org.apache.hadoop/hadoop-common
if (token != null) {
tokens.add(token);
credentials.addToken(service, token);
代码示例来源:origin: apache/hbase
/**
* Checks for an authentication token for the given user, obtaining a new token if necessary,
* and adds it to the credentials for the given map reduce job.
*
* @param conn The HBase cluster connection
* @param user The user for whom to obtain the token
* @param job The job configuration in which the token should be stored
* @throws IOException If making a remote call to the authentication service fails
* @throws InterruptedException If executing as the given user is interrupted
*/
public static void addTokenForJob(final Connection conn, final JobConf job, User user)
throws IOException, InterruptedException {
Token<AuthenticationTokenIdentifier> token = getAuthToken(conn.getConfiguration(), user);
if (token == null) {
token = obtainToken(conn, user);
}
job.getCredentials().addToken(token.getService(), token);
}
代码示例来源:origin: apache/hbase
/**
* Checks for an authentication token for the given user, obtaining a new token if necessary,
* and adds it to the credentials for the given map reduce job.
*
* @param conn The HBase cluster connection
* @param user The user for whom to obtain the token
* @param job The job instance in which the token should be stored
* @throws IOException If making a remote call to the authentication service fails
* @throws InterruptedException If executing as the given user is interrupted
*/
public static void addTokenForJob(final Connection conn, User user, Job job)
throws IOException, InterruptedException {
Token<AuthenticationTokenIdentifier> token = getAuthToken(conn.getConfiguration(), user);
if (token == null) {
token = obtainToken(conn, user);
}
job.getCredentials().addToken(token.getService(), token);
}
代码示例来源:origin: org.apache.hadoop/hadoop-common
/** Append tokens from list of files in local filesystem, saving to last file.
* @param tokenFiles list of local File objects. Last file holds the output.
* @param fileFormat a string equal to FORMAT_PB or FORMAT_JAVA, for output
* @param conf Configuration object passed along.
* @throws IOException
*/
public static void appendTokenFiles(
ArrayList<File> tokenFiles, String fileFormat, Configuration conf)
throws IOException {
Credentials newCreds = new Credentials();
File lastTokenFile = null;
for (File tokenFile : tokenFiles) {
lastTokenFile = tokenFile;
Credentials creds = Credentials.readTokenStorageFile(tokenFile, conf);
for (Token<?> token : creds.getAllTokens()) {
newCreds.addToken(token.getService(), token);
}
}
doFormattedWrite(lastTokenFile, fileFormat, newCreds, conf);
}
代码示例来源:origin: org.apache.hadoop/hadoop-common
/** Remove a token from a file in the local filesystem, matching alias.
* @param cancel cancel token as well as remove from file.
* @param tokenFile a local File object.
* @param fileFormat a string equal to FORMAT_PB or FORMAT_JAVA, for output
* @param alias remove only tokens matching alias; null matches all.
* @param conf Configuration object passed along.
* @throws IOException
* @throws InterruptedException
*/
public static void removeTokenFromFile(boolean cancel,
File tokenFile, String fileFormat, Text alias, Configuration conf)
throws IOException, InterruptedException {
Credentials newCreds = new Credentials();
Credentials creds = Credentials.readTokenStorageFile(tokenFile, conf);
for (Token<?> token : creds.getAllTokens()) {
if (matchAlias(token, alias)) {
if (token.isManaged() && cancel) {
token.cancel(conf);
LOG.info("Canceled " + token.getKind() + ":" + token.getService());
}
} else {
newCreds.addToken(token.getService(), token);
}
}
doFormattedWrite(tokenFile, fileFormat, newCreds, conf);
}
代码示例来源:origin: apache/hive
@Test
public void testCredentialsNotOverwritten() throws Exception {
final UserGroupInformation testUser = UserGroupInformation.createUserForTesting("test_user", new String[0]);
final DagUtils dagUtils = DagUtils.getInstance();
Credentials originalCredentials = new Credentials();
final Text testTokenAlias = new Text("my_test_token");
@SuppressWarnings("unchecked")
Token<? extends TokenIdentifier> testToken = mock(Token.class);
originalCredentials.addToken(testTokenAlias, testToken);
Credentials testUserCredentials = new Credentials();
testUser.addCredentials(testUserCredentials);
final BaseWork work = mock(BaseWork.class);
final DAG dag = DAG.create("test_credentials_dag");
dag.setCredentials(originalCredentials);
testUser.doAs(new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
dagUtils.addCredentials(work, dag);
return null;
}
});
Token<? extends TokenIdentifier> actualToken = dag.getCredentials().getToken(testTokenAlias);
assertEquals(testToken, actualToken);
}
内容来源于网络,如有侵权,请联系作者删除!