org.apache.hadoop.security.Credentials.writeTokenStorageFile()方法的使用及代码示例

x33g5p2x  于2022-01-18 转载在 其他  
字(11.3k)|赞(0)|评价(0)|浏览(115)

本文整理了Java中org.apache.hadoop.security.Credentials.writeTokenStorageFile()方法的一些代码示例,展示了Credentials.writeTokenStorageFile()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Credentials.writeTokenStorageFile()方法的具体详情如下:
包路径:org.apache.hadoop.security.Credentials
类名称:Credentials
方法名:writeTokenStorageFile

Credentials.writeTokenStorageFile介绍

暂无

代码示例

代码示例来源:origin: org.apache.hadoop/hadoop-common

public void writeTokenStorageFile(Path filename,
                 Configuration conf) throws IOException {
 // by default store in the oldest supported format for compatibility
 writeTokenStorageFile(filename, conf, SerializedFormat.WRITABLE);
}

代码示例来源:origin: org.apache.hadoop/hadoop-common

/** Write out a Credentials object as a local file.
 *  @param f a local File object.
 *  @param format a string equal to FORMAT_PB or FORMAT_JAVA.
 *  @param creds the Credentials object to be written out.
 *  @param conf a Configuration object passed along.
 *  @throws IOException
 */
public static void doFormattedWrite(
  File f, String format, Credentials creds, Configuration conf)
  throws IOException {
 // default to oldest supported format for compatibility
 Credentials.SerializedFormat credsFormat =
   Credentials.SerializedFormat.WRITABLE;
 if (format.equals(FORMAT_PB)) {
  credsFormat = Credentials.SerializedFormat.PROTOBUF;
 }
 creds.writeTokenStorageFile(fileToPath(f), conf, credsFormat);
}

代码示例来源:origin: apache/hive

public SecureCmdDoAs(HiveConf conf) throws HiveException, IOException{
 // Get delegation token for user from filesystem and write the token along with
 // metastore tokens into a file
 String uname = UserGroupInformation.getLoginUser().getShortUserName();
 FileSystem fs = FileSystem.get(conf);
 Credentials cred = new Credentials();
 ShimLoader.getHadoopShims().addDelegationTokens(fs, cred, uname); // ask default fs first
 for (String uri : conf.getStringCollection("mapreduce.job.hdfs-servers")) {
  try {
   ShimLoader.getHadoopShims().addDelegationTokens(
     FileSystem.get(new URI(uri), conf),
     cred, uname);
  } catch (URISyntaxException e) {
   LOG.warn("Invalid URI in mapreduce.job.hdfs-servers:["+uri+"], ignoring.", e);
  }
 }
 tokenFile = File.createTempFile("hive_hadoop_delegation_token", null);
 tokenPath = new Path(tokenFile.toURI());
 //write credential with token to file
 cred.writeTokenStorageFile(tokenPath, conf);
}

代码示例来源:origin: apache/incubator-gobblin

/**
 * Write a {@link Token} to a given file.
 *
 * @param token the token to write
 * @param tokenFilePath the token file path
 * @param configuration a {@link Configuration} object carrying Hadoop configuration properties
 * @throws IOException
 */
public static void writeTokenToFile(Token<? extends TokenIdentifier> token, Path tokenFilePath,
  Configuration configuration) throws IOException {
 Credentials credentials = new Credentials();
 credentials.addToken(token.getService(), token);
 credentials.writeTokenStorageFile(tokenFilePath, configuration);
}

代码示例来源:origin: apache/drill

public SecureCmdDoAs(HiveConf conf) throws HiveException, IOException{
 // Get delegation token for user from filesystem and write the token along with
 // metastore tokens into a file
 String uname = UserGroupInformation.getLoginUser().getShortUserName();
 FileSystem fs = FileSystem.get(conf);
 Credentials cred = new Credentials();
 ShimLoader.getHadoopShims().addDelegationTokens(fs, cred, uname); // ask default fs first
 for (String uri : conf.getStringCollection("mapreduce.job.hdfs-servers")) {
  try {
   ShimLoader.getHadoopShims().addDelegationTokens(
     FileSystem.get(new URI(uri), conf),
     cred, uname);
  } catch (URISyntaxException e) {
   LOG.warn("Invalid URI in mapreduce.job.hdfs-servers:["+uri+"], ignoring.", e);
  }
 }
 tokenFile = File.createTempFile("hive_hadoop_delegation_token", null);
 tokenPath = new Path(tokenFile.toURI());
 //write credential with token to file
 cred.writeTokenStorageFile(tokenPath, conf);
}

代码示例来源:origin: apache/hive

public Object run() throws IOException {
  Credentials cred = new Credentials();
  for(Token<?> fsToken : fsTokens) {
   cred.addToken(fsToken.getService(), fsToken);
  }
  cred.addToken(msToken.getService(), msToken);
  cred.writeTokenStorageFile(tokenPath, conf);
  return null;
 }
});

代码示例来源:origin: org.apache.hadoop/hadoop-hdfs

@VisibleForTesting
static void saveDelegationToken(Configuration conf, FileSystem fs,
                final String renewer, final Path tokenFile)
    throws IOException {
 Token<?> token = fs.getDelegationToken(renewer);
 if (null != token) {
  Credentials cred = new Credentials();
  cred.addToken(token.getService(), token);
  // dtutil is replacing this tool; preserve legacy functionality
  cred.writeTokenStorageFile(tokenFile, conf,
    Credentials.SerializedFormat.WRITABLE);
  if (LOG.isDebugEnabled()) {
   LOG.debug("Fetched token " + fs.getUri() + " for " +
     token.getService() + " into " + tokenFile);
  }
 } else {
  System.err.println("ERROR: Failed to fetch token from " + fs.getUri());
 }
}

代码示例来源:origin: org.apache.hadoop/hadoop-mapreduce-client-core

@SuppressWarnings("deprecation")
 @Test
 public void testGetTokensForNamenodes() throws IOException,
   URISyntaxException {
  Path TEST_ROOT_DIR =
    new Path(System.getProperty("test.build.data", "test/build/data"));
  // ick, but need fq path minus file:/
  String binaryTokenFile =
    FileSystem.getLocal(conf)
     .makeQualified(new Path(TEST_ROOT_DIR, "tokenFile")).toUri()
     .getPath();

  MockFileSystem fs1 = createFileSystemForServiceName("service1");
  Credentials creds = new Credentials();
  Token<?> token1 = fs1.getDelegationToken(renewer);
  creds.addToken(token1.getService(), token1);
  // wait to set, else the obtain tokens call above will fail with FNF
  conf.set(MRJobConfig.MAPREDUCE_JOB_CREDENTIALS_BINARY, binaryTokenFile);
  creds.writeTokenStorageFile(new Path(binaryTokenFile), conf);
  TokenCache.obtainTokensForNamenodesInternal(fs1, creds, conf, renewer);
  String fs_addr = fs1.getCanonicalServiceName();
  Token<?> nnt = TokenCache.getDelegationToken(creds, fs_addr);
  assertNotNull("Token for nn is null", nnt);
 }
}

代码示例来源:origin: org.apache.hadoop/hadoop-mapreduce-client-core

creds.writeTokenStorageFile(new Path(binaryTokenFile), conf);

代码示例来源:origin: org.apache.hadoop/hadoop-mapred-test

/**
 * create fake JobTokens file
 * @return
 * @throws IOException
 */
protected void uploadJobTokensFile() throws IOException {
 
 File dir = new File(TEST_ROOT_DIR, jobId.toString());
 if(!dir.exists())
  assertTrue("faild to create dir="+dir.getAbsolutePath(), dir.mkdirs());
 // writing empty file, we don't need the keys for this test
 new Credentials().writeTokenStorageFile(new Path("file:///" + dir,
   TokenCache.JOB_TOKEN_HDFS_FILE), new Configuration());
}

代码示例来源:origin: org.apache.hive.shims/hive-shims-common-secure

@Override
public Path createDelegationTokenFile(Configuration conf) throws IOException {
 //get delegation token for user
 String uname = UserGroupInformation.getLoginUser().getShortUserName();
 FileSystem fs = FileSystem.get(conf);
 Token<?> fsToken = fs.getDelegationToken(uname);
 File t = File.createTempFile("hive_hadoop_delegation_token", null);
 Path tokenPath = new Path(t.toURI());
 //write credential with token to file
 Credentials cred = new Credentials();
 cred.addToken(fsToken.getService(), fsToken);
 cred.writeTokenStorageFile(tokenPath, conf);
 return tokenPath;
}

代码示例来源:origin: org.spark-project.hive.shims/hive-shims-common-secure

@Override
public Path createDelegationTokenFile(Configuration conf) throws IOException {
 //get delegation token for user
 String uname = UserGroupInformation.getLoginUser().getShortUserName();
 FileSystem fs = FileSystem.get(conf);
 Token<?> fsToken = fs.getDelegationToken(uname);
 File t = File.createTempFile("hive_hadoop_delegation_token", null);
 Path tokenPath = new Path(t.toURI());
 //write credential with token to file
 Credentials cred = new Credentials();
 cred.addToken(fsToken.getService(), fsToken);
 cred.writeTokenStorageFile(tokenPath, conf);
 return tokenPath;
}

代码示例来源:origin: org.apache.gobblin/gobblin-yarn

/**
 * Write a {@link Token} to a given file.
 *
 * @param token the token to write
 * @param tokenFilePath the token file path
 * @param configuration a {@link Configuration} object carrying Hadoop configuration properties
 * @throws IOException
 */
public static void writeTokenToFile(Token<? extends TokenIdentifier> token, Path tokenFilePath,
  Configuration configuration) throws IOException {
 Credentials credentials = new Credentials();
 credentials.addToken(token.getService(), token);
 credentials.writeTokenStorageFile(tokenFilePath, configuration);
}

代码示例来源:origin: com.facebook.presto.hive/hive-apache

public SecureCmdDoAs(HiveConf conf) throws HiveException, IOException{
 // Get delegation token for user from filesystem and write the token along with
 // metastore tokens into a file
 String uname = UserGroupInformation.getLoginUser().getShortUserName();
 FileSystem fs = FileSystem.get(conf);
 Credentials cred = new Credentials();
 ShimLoader.getHadoopShims().addDelegationTokens(fs, cred, uname); // ask default fs first
 for (String uri : conf.getStringCollection("mapreduce.job.hdfs-servers")) {
  try {
   ShimLoader.getHadoopShims().addDelegationTokens(
     FileSystem.get(new URI(uri), conf),
     cred, uname);
  } catch (URISyntaxException e) {
   LOG.warn("Invalid URI in mapreduce.job.hdfs-servers:["+uri+"], ignoring.", e);
  }
 }
 tokenFile = File.createTempFile("hive_hadoop_delegation_token", null);
 tokenPath = new Path(tokenFile.toURI());
 //write credential with token to file
 cred.writeTokenStorageFile(tokenPath, conf);
}

代码示例来源:origin: org.apache.hadoop/hadoop-mapred

/**
 * generate job token and save it into the file
 * @throws IOException
 */
private void generateAndStoreTokens() throws IOException{
 Path jobDir = jobtracker.getSystemDirectoryForJob(jobId);
 Path keysFile = new Path(jobDir, TokenCache.JOB_TOKEN_HDFS_FILE);
 if (tokenStorage == null) {
  tokenStorage = new Credentials();
 }
 
 //create JobToken file and write token to it
 JobTokenIdentifier identifier = new JobTokenIdentifier(new Text(jobId
   .toString()));
 Token<JobTokenIdentifier> token = new Token<JobTokenIdentifier>(identifier,
   jobtracker.getJobTokenSecretManager());
 token.setService(identifier.getJobId());
 
 TokenCache.setJobToken(token, tokenStorage);
 
 // write TokenStorage out
 tokenStorage.writeTokenStorageFile(keysFile, jobtracker.getConf());
 LOG.info("jobToken generated and stored with users keys in "
   + keysFile.toUri().getPath());
}

代码示例来源:origin: org.apache.hive.hcatalog/hive-webhcat

public Object run() throws IOException {
  Credentials cred = new Credentials();
  for(Token<?> fsToken : fsTokens) {
   cred.addToken(fsToken.getService(), fsToken);
  }
  cred.addToken(msToken.getService(), msToken);
  cred.writeTokenStorageFile(tokenPath, conf);
  return null;
 }
});

代码示例来源:origin: ch.cern.hadoop/hadoop-hdfs

Credentials creds = getDTfromRemote(connectionFactory, new URI(
  webUrl), renewer, null);
creds.writeTokenStorageFile(tokenFile, conf);
for (Token<?> token : creds.getAllTokens()) {
 System.out.println("Fetched token via " + webUrl + " for "
Credentials cred = new Credentials();
Token<?> tokens[] = fs.addDelegationTokens(renewer, cred);
cred.writeTokenStorageFile(tokenFile, conf);
for (Token<?> token : tokens) {
 System.out.println("Fetched token for " + token.getService()

代码示例来源:origin: io.prestosql.hadoop/hadoop-apache

Credentials creds = getDTfromRemote(connectionFactory, new URI(
  webUrl), renewer, null);
creds.writeTokenStorageFile(tokenFile, conf);
for (Token<?> token : creds.getAllTokens()) {
 System.out.println("Fetched token via " + webUrl + " for "
Credentials cred = new Credentials();
Token<?> tokens[] = fs.addDelegationTokens(renewer, cred);
cred.writeTokenStorageFile(tokenFile, conf);
for (Token<?> token : tokens) {
 System.out.println("Fetched token for " + token.getService()

代码示例来源:origin: ch.cern.hadoop/hadoop-common

Credentials creds = new Credentials();
creds.addToken(new Text("token-alias"), token);
creds.writeTokenStorageFile(tmpPath, conf);

代码示例来源:origin: com.github.jiayuhan-it/hadoop-common

Credentials creds = new Credentials();
creds.addToken(new Text("token-alias"), token);
creds.writeTokenStorageFile(tmpPath, conf);

相关文章