本文整理了Java中org.apache.hadoop.security.Credentials.readTokenStorageFile()
方法的一些代码示例,展示了Credentials.readTokenStorageFile()
的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Credentials.readTokenStorageFile()
方法的具体详情如下:
包路径:org.apache.hadoop.security.Credentials
类名称:Credentials
方法名:readTokenStorageFile
[英]Convenience method for reading a token storage file, and loading the Tokens therein in the passed UGI
[中]用于读取令牌存储文件并将其中的令牌加载到传递的UGI中的方便方法
代码示例来源:origin: apache/incubator-gobblin
/**
* Read a collection {@link Token}s from a given file.
*
* @param tokenFilePath the token file path
* @param configuration a {@link Configuration} object carrying Hadoop configuration properties
* @return a collection of {@link Token}s
* @throws IOException
*/
public static Collection<Token<? extends TokenIdentifier>> readTokensFromFile(Path tokenFilePath,
Configuration configuration) throws IOException {
return Credentials.readTokenStorageFile(tokenFilePath, configuration).getAllTokens();
}
代码示例来源:origin: org.apache.hadoop/hadoop-common
/** Print out a Credentials file from the local filesystem.
* @param tokenFile a local File object.
* @param alias print only tokens matching alias (null matches all).
* @param conf Configuration object passed along.
* @param out print to this stream.
* @throws IOException
*/
public static void printTokenFile(
File tokenFile, Text alias, Configuration conf, PrintStream out)
throws IOException {
out.println("File: " + tokenFile.getPath());
Credentials creds = Credentials.readTokenStorageFile(tokenFile, conf);
printCredentials(creds, alias, out);
}
代码示例来源:origin: org.apache.hadoop/hadoop-common
/**
* Verify that tokenFile contains valid Credentials.
*
* If not, an exception is raised, or, if {@link #nofail} is set,
* an error will be logged and the method return false.
*
*/
private boolean verify(File tokenFile, Configuration conf, String category,
String message) throws KerberosDiagsFailure {
try {
Credentials.readTokenStorageFile(tokenFile, conf);
} catch(Exception e) {
if (!nofail) {
fail(category, message);
} else {
error(category, message);
}
return false;
}
return true;
}
代码示例来源:origin: org.apache.hadoop/hadoop-common
File tokenFile = new File(tokenFileName);
if (tokenFile.exists() && tokenFile.isFile()) {
Credentials cred = Credentials.readTokenStorageFile(
tokenFile, conf);
loginUser.addCredentials(cred);
+ " not found");
Credentials cred = Credentials.readTokenStorageFile(
source, conf);
LOG.debug("Loaded {} tokens", cred.numberOfTokens());
代码示例来源:origin: org.apache.hadoop/hadoop-common
Credentials.readTokenStorageFile(p, conf));
conf.set("mapreduce.job.credentials.binary", p.toString(),
"from -tokenCacheFile command line option");
代码示例来源:origin: apache/hbase
if(StringUtils.isNotEmpty(conf.get(CREDENTIALS_LOCATION))) {
String fileLoc = conf.get(CREDENTIALS_LOCATION);
Credentials cred = Credentials.readTokenStorageFile(new File(fileLoc), conf);
job.getCredentials().addAll(cred);
代码示例来源:origin: org.apache.hadoop/hadoop-common
Token<?> token = null;
Credentials creds = tokenFile.exists() ?
Credentials.readTokenStorageFile(tokenFile, conf) : new Credentials();
ServiceLoader<DtFetcher> loader = ServiceLoader.load(DtFetcher.class);
for (DtFetcher fetcher : loader) {
代码示例来源:origin: org.apache.hadoop/hadoop-common
/** Append tokens from list of files in local filesystem, saving to last file.
* @param tokenFiles list of local File objects. Last file holds the output.
* @param fileFormat a string equal to FORMAT_PB or FORMAT_JAVA, for output
* @param conf Configuration object passed along.
* @throws IOException
*/
public static void appendTokenFiles(
ArrayList<File> tokenFiles, String fileFormat, Configuration conf)
throws IOException {
Credentials newCreds = new Credentials();
File lastTokenFile = null;
for (File tokenFile : tokenFiles) {
lastTokenFile = tokenFile;
Credentials creds = Credentials.readTokenStorageFile(tokenFile, conf);
for (Token<?> token : creds.getAllTokens()) {
newCreds.addToken(token.getService(), token);
}
}
doFormattedWrite(lastTokenFile, fileFormat, newCreds, conf);
}
代码示例来源:origin: org.apache.hadoop/hadoop-common
/** Renew a token from a file in the local filesystem, matching alias.
* @param tokenFile a local File object.
* @param fileFormat a string equal to FORMAT_PB or FORMAT_JAVA, for output
* @param alias renew only tokens matching alias; null matches all.
* @param conf Configuration object passed along.
* @throws IOException
* @throws InterruptedException
*/
public static void renewTokenFile(
File tokenFile, String fileFormat, Text alias, Configuration conf)
throws IOException, InterruptedException {
Credentials creds = Credentials.readTokenStorageFile(tokenFile, conf);
for (Token<?> token : creds.getAllTokens()) {
if (token.isManaged() && matchAlias(token, alias)) {
long result = token.renew(conf);
LOG.info("Renewed" + token.getKind() + ":" + token.getService() +
" until " + formatDate(result));
}
}
doFormattedWrite(tokenFile, fileFormat, creds, conf);
}
}
代码示例来源:origin: org.apache.hadoop/hadoop-common
/** Alias a token from a file and save back to file in the local filesystem.
* @param tokenFile a local File object to hold the input and output.
* @param fileFormat a string equal to FORMAT_PB or FORMAT_JAVA, for output
* @param alias overwrite service field of fetched token with this text.
* @param service only apply alias to tokens matching this service text.
* @param conf Configuration object passed along.
* @throws IOException
*/
public static void aliasTokenFile(File tokenFile, String fileFormat,
Text alias, Text service, Configuration conf) throws Exception {
Credentials newCreds = new Credentials();
Credentials creds = Credentials.readTokenStorageFile(tokenFile, conf);
for (Token<?> token : creds.getAllTokens()) {
newCreds.addToken(token.getService(), token);
if (token.getService().equals(service)) {
Token<?> aliasedToken = token.copyToken();
aliasedToken.setService(alias);
newCreds.addToken(alias, aliasedToken);
}
}
doFormattedWrite(tokenFile, fileFormat, newCreds, conf);
}
代码示例来源:origin: org.apache.hadoop/hadoop-common
/** Remove a token from a file in the local filesystem, matching alias.
* @param cancel cancel token as well as remove from file.
* @param tokenFile a local File object.
* @param fileFormat a string equal to FORMAT_PB or FORMAT_JAVA, for output
* @param alias remove only tokens matching alias; null matches all.
* @param conf Configuration object passed along.
* @throws IOException
* @throws InterruptedException
*/
public static void removeTokenFromFile(boolean cancel,
File tokenFile, String fileFormat, Text alias, Configuration conf)
throws IOException, InterruptedException {
Credentials newCreds = new Credentials();
Credentials creds = Credentials.readTokenStorageFile(tokenFile, conf);
for (Token<?> token : creds.getAllTokens()) {
if (matchAlias(token, alias)) {
if (token.isManaged() && cancel) {
token.cancel(conf);
LOG.info("Canceled " + token.getKind() + ":" + token.getService());
}
} else {
newCreds.addToken(token.getService(), token);
}
}
doFormattedWrite(tokenFile, fileFormat, newCreds, conf);
}
代码示例来源:origin: org.apache.hadoop/hadoop-hdfs
private static Collection<Token<?>> readTokens(Path file, Configuration conf)
throws IOException {
Credentials creds = Credentials.readTokenStorageFile(file, conf);
return creds.getAllTokens();
}
}
代码示例来源:origin: ch.cern.hadoop/hadoop-hdfs
private static Collection<Token<?>> readTokens(Path file, Configuration conf)
throws IOException {
Credentials creds = Credentials.readTokenStorageFile(file, conf);
return creds.getAllTokens();
}
代码示例来源:origin: io.prestosql.hadoop/hadoop-apache
private static Collection<Token<?>> readTokens(Path file, Configuration conf)
throws IOException {
Credentials creds = Credentials.readTokenStorageFile(file, conf);
return creds.getAllTokens();
}
代码示例来源:origin: org.apache.gobblin/gobblin-yarn
/**
* Read a collection {@link Token}s from a given file.
*
* @param tokenFilePath the token file path
* @param configuration a {@link Configuration} object carrying Hadoop configuration properties
* @return a collection of {@link Token}s
* @throws IOException
*/
public static Collection<Token<? extends TokenIdentifier>> readTokensFromFile(Path tokenFilePath,
Configuration configuration) throws IOException {
return Credentials.readTokenStorageFile(tokenFilePath, configuration).getAllTokens();
}
代码示例来源:origin: io.prestosql.hadoop/hadoop-apache
private static void mergeBinaryTokens(Credentials creds, Configuration conf) {
String binaryTokenFilename =
conf.get(MRJobConfig.MAPREDUCE_JOB_CREDENTIALS_BINARY);
if (binaryTokenFilename != null) {
Credentials binary;
try {
binary = Credentials.readTokenStorageFile(
FileSystem.getLocal(conf).makeQualified(
new Path(binaryTokenFilename)),
conf);
} catch (IOException e) {
throw new RuntimeException(e);
}
// supplement existing tokens with the tokens in the binary file
creds.mergeAll(binary);
}
}
代码示例来源:origin: com.github.jiayuhan-it/hadoop-mapreduce-client-core
private static void mergeBinaryTokens(Credentials creds, Configuration conf) {
String binaryTokenFilename =
conf.get(MRJobConfig.MAPREDUCE_JOB_CREDENTIALS_BINARY);
if (binaryTokenFilename != null) {
Credentials binary;
try {
binary = Credentials.readTokenStorageFile(
FileSystem.getLocal(conf).makeQualified(
new Path(binaryTokenFilename)),
conf);
} catch (IOException e) {
throw new RuntimeException(e);
}
// supplement existing tokens with the tokens in the binary file
creds.mergeAll(binary);
}
}
代码示例来源:origin: ch.cern.hadoop/hadoop-mapreduce-client-core
private static void mergeBinaryTokens(Credentials creds, Configuration conf) {
String binaryTokenFilename =
conf.get(MRJobConfig.MAPREDUCE_JOB_CREDENTIALS_BINARY);
if (binaryTokenFilename != null) {
Credentials binary;
try {
binary = Credentials.readTokenStorageFile(
FileSystem.getLocal(conf).makeQualified(
new Path(binaryTokenFilename)),
conf);
} catch (IOException e) {
throw new RuntimeException(e);
}
// supplement existing tokens with the tokens in the binary file
creds.mergeAll(binary);
}
}
代码示例来源:origin: io.hops/hadoop-mapreduce-client-core
private static void mergeBinaryTokens(Credentials creds, Configuration conf) {
String binaryTokenFilename =
conf.get(MRJobConfig.MAPREDUCE_JOB_CREDENTIALS_BINARY);
if (binaryTokenFilename != null) {
Credentials binary;
try {
binary = Credentials.readTokenStorageFile(
FileSystem.getLocal(conf).makeQualified(
new Path(binaryTokenFilename)),
conf);
} catch (IOException e) {
throw new RuntimeException(e);
}
// supplement existing tokens with the tokens in the binary file
creds.mergeAll(binary);
}
}
代码示例来源:origin: org.apache.hadoop/hadoop-mapred
/**
* load job token from a file
* @param conf
* @throws IOException
*/
@InterfaceAudience.Private
public static Credentials loadTokens(String jobTokenFile, JobConf conf)
throws IOException {
Path localJobTokenFile = new Path ("file:///" + jobTokenFile);
Credentials ts = Credentials.readTokenStorageFile(localJobTokenFile, conf);
if(LOG.isDebugEnabled()) {
LOG.debug("Task: Loaded jobTokenFile from: "+
localJobTokenFile.toUri().getPath()
+"; num of sec keys = " + ts.numberOfSecretKeys() +
" Number of tokens " + ts.numberOfTokens());
}
return ts;
}
/**
内容来源于网络,如有侵权,请联系作者删除!