本文整理了Java中org.apache.hadoop.security.Credentials.addAll()
方法的一些代码示例,展示了Credentials.addAll()
的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Credentials.addAll()
方法的具体详情如下:
包路径:org.apache.hadoop.security.Credentials
类名称:Credentials
方法名:addAll
[英]Copy all of the credentials from one credential object into another. Existing secrets and tokens are overwritten.
[中]将所有凭据从一个凭据对象复制到另一个凭据对象。现有机密和令牌将被覆盖。
代码示例来源:origin: org.apache.hadoop/hadoop-common
/**
* Create a copy of the given credentials.
* @param credentials to copy
*/
public Credentials(Credentials credentials) {
this.addAll(credentials);
}
代码示例来源:origin: org.apache.hadoop/hadoop-common
/**
* Copy all of the credentials from one credential object into another.
* Existing secrets and tokens are not overwritten.
* @param other the credentials to copy
*/
public void mergeAll(Credentials other) {
addAll(other, false);
}
代码示例来源:origin: org.apache.hadoop/hadoop-common
/**
* Copy all of the credentials from one credential object into another.
* Existing secrets and tokens are overwritten.
* @param other the credentials to copy
*/
public void addAll(Credentials other) {
addAll(other, true);
}
代码示例来源:origin: org.apache.hadoop/hadoop-common
/**
* Add the given Credentials to this user.
* @param credentials of tokens and secrets
*/
public void addCredentials(Credentials credentials) {
synchronized (subject) {
getCredentialsInternal().addAll(credentials);
}
}
代码示例来源:origin: apache/hive
/**
* Configure the job with the multiple output formats added. This method
* should be called after all the output formats have been added and
* configured and before the job submission.
*/
public void configure() {
StringBuilder aliases = new StringBuilder();
Configuration jobConf = job.getConfiguration();
for (Entry<String, Job> entry : outputConfigs.entrySet()) {
// Copy credentials
job.getCredentials().addAll(entry.getValue().getCredentials());
String alias = entry.getKey();
aliases.append(alias).append(COMMA_DELIM);
// Store the differing configuration for each alias in the job
// as a setting.
setAliasConf(alias, job, entry.getValue());
}
aliases.delete(aliases.length() - COMMA_DELIM.length(), aliases.length());
jobConf.set(MO_ALIASES, aliases.toString());
}
代码示例来源:origin: apache/flink
@SuppressWarnings("unchecked")
private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException {
super.read(in);
String hadoopOutputFormatName = in.readUTF();
if (jobConf == null) {
jobConf = new JobConf();
}
jobConf.readFields(in);
try {
this.mapredOutputFormat = (org.apache.hadoop.mapred.OutputFormat<K, V>) Class.forName(hadoopOutputFormatName, true, Thread.currentThread().getContextClassLoader()).newInstance();
} catch (Exception e) {
throw new RuntimeException("Unable to instantiate the hadoop output format", e);
}
ReflectionUtils.setConf(mapredOutputFormat, jobConf);
jobConf.getCredentials().addAll(this.credentials);
Credentials currentUserCreds = getCredentialsFromUGI(UserGroupInformation.getCurrentUser());
if (currentUserCreds != null) {
jobConf.getCredentials().addAll(currentUserCreds);
}
}
}
代码示例来源:origin: apache/flink
jobConf.getCredentials().addAll(this.credentials);
Credentials currentUserCreds = getCredentialsFromUGI(UserGroupInformation.getCurrentUser());
if (currentUserCreds != null) {
jobConf.getCredentials().addAll(currentUserCreds);
代码示例来源:origin: apache/flink
@Override
public HadoopInputSplit[] createInputSplits(int minNumSplits)
throws IOException {
configuration.setInt("mapreduce.input.fileinputformat.split.minsize", minNumSplits);
JobContext jobContext = new JobContextImpl(configuration, new JobID());
jobContext.getCredentials().addAll(this.credentials);
Credentials currentUserCreds = getCredentialsFromUGI(UserGroupInformation.getCurrentUser());
if (currentUserCreds != null) {
jobContext.getCredentials().addAll(currentUserCreds);
}
List<org.apache.hadoop.mapreduce.InputSplit> splits;
try {
splits = this.mapreduceInputFormat.getSplits(jobContext);
} catch (InterruptedException e) {
throw new IOException("Could not get Splits.", e);
}
HadoopInputSplit[] hadoopInputSplits = new HadoopInputSplit[splits.size()];
for (int i = 0; i < hadoopInputSplits.length; i++) {
hadoopInputSplits[i] = new HadoopInputSplit(i, splits.get(i), jobContext);
}
return hadoopInputSplits;
}
代码示例来源:origin: apache/flink
@Override
public void finalizeGlobal(int parallelism) throws IOException {
JobContext jobContext;
TaskAttemptContext taskContext;
try {
TaskAttemptID taskAttemptID = TaskAttemptID.forName("attempt__0000_r_"
+ String.format("%" + (6 - Integer.toString(1).length()) + "s", " ").replace(" ", "0")
+ Integer.toString(1)
+ "_0");
jobContext = new JobContextImpl(this.configuration, new JobID());
taskContext = new TaskAttemptContextImpl(this.configuration, taskAttemptID);
this.outputCommitter = this.mapreduceOutputFormat.getOutputCommitter(taskContext);
} catch (Exception e) {
throw new RuntimeException(e);
}
jobContext.getCredentials().addAll(this.credentials);
Credentials currentUserCreds = getCredentialsFromUGI(UserGroupInformation.getCurrentUser());
if (currentUserCreds != null) {
jobContext.getCredentials().addAll(currentUserCreds);
}
// finalize HDFS output format
if (this.outputCommitter != null) {
this.outputCommitter.commitJob(jobContext);
}
}
代码示例来源:origin: apache/flink
this.context.getCredentials().addAll(this.credentials);
Credentials currentUserCreds = getCredentialsFromUGI(UserGroupInformation.getCurrentUser());
if (currentUserCreds != null) {
this.context.getCredentials().addAll(currentUserCreds);
代码示例来源:origin: apache/hive
private ByteBuffer serializeCredentials(Credentials credentials) throws IOException {
Credentials containerCredentials = new Credentials();
containerCredentials.addAll(credentials);
DataOutputBuffer containerTokens_dob = new DataOutputBuffer();
containerCredentials.writeTokenStorageToStream(containerTokens_dob);
return ByteBuffer.wrap(containerTokens_dob.getData(), 0, containerTokens_dob.getLength());
}
代码示例来源:origin: apache/hive
public static ByteBuffer serializeCredentials(Credentials credentials) throws
IOException {
Credentials containerCredentials = new Credentials();
containerCredentials.addAll(credentials);
DataOutputBuffer containerTokensDob = new DataOutputBuffer();
containerCredentials.writeTokenStorageToStream(containerTokensDob);
return ByteBuffer.wrap(containerTokensDob.getData(), 0, containerTokensDob.getLength());
}
}
代码示例来源:origin: apache/incubator-gobblin
ugi.getCredentials().addAll(cred);
代码示例来源:origin: apache/hive
@Override
public void checkOutputSpecs(JobContext context) throws IOException, InterruptedException {
for (String alias : getOutputFormatAliases(context)) {
LOGGER.debug("Calling checkOutputSpecs for alias: " + alias);
JobContext aliasContext = getJobContext(alias, context);
OutputFormat<?, ?> outputFormat = getOutputFormatInstance(aliasContext);
outputFormat.checkOutputSpecs(aliasContext);
// Copy credentials and any new config added back to JobContext
context.getCredentials().addAll(aliasContext.getCredentials());
setAliasConf(alias, context, aliasContext);
}
}
代码示例来源:origin: apache/hbase
String fileLoc = conf.get(CREDENTIALS_LOCATION);
Credentials cred = Credentials.readTokenStorageFile(new File(fileLoc), conf);
job.getCredentials().addAll(cred);
代码示例来源:origin: apache/hive
job.getCredentials().addAll(crd);
crd.addAll(job.getCredentials());
jobCredentials.put(INNER_SIGNATURE_PREFIX + "_" + signature, crd);
代码示例来源:origin: apache/hive
job.getCredentials().addAll(crd);
代码示例来源:origin: io.hops/hadoop-common
/**
* Copy all of the credentials from one credential object into another.
* Existing secrets and tokens are overwritten.
* @param other the credentials to copy
*/
public void addAll(Credentials other) {
addAll(other, true);
}
代码示例来源:origin: org.apache.twill/twill-yarn
/**
* Creates an {@link Credentials} by copying the {@link Credentials} of the current user.
*/
private Credentials createCredentials() {
Credentials credentials = new Credentials();
try {
credentials.addAll(UserGroupInformation.getCurrentUser().getCredentials());
} catch (IOException e) {
LOG.warn("Failed to get current user UGI. Current user credentials not added.", e);
}
return credentials;
}
代码示例来源:origin: com.facebook.presto.hive/hive-apache
@Override
public void checkOutputSpecs(JobContext context) throws IOException, InterruptedException {
for (String alias : getOutputFormatAliases(context)) {
LOGGER.debug("Calling checkOutputSpecs for alias: " + alias);
JobContext aliasContext = getJobContext(alias, context);
OutputFormat<?, ?> outputFormat = getOutputFormatInstance(aliasContext);
outputFormat.checkOutputSpecs(aliasContext);
// Copy credentials and any new config added back to JobContext
context.getCredentials().addAll(aliasContext.getCredentials());
setAliasConf(alias, context, aliasContext);
}
}
内容来源于网络,如有侵权,请联系作者删除!