org.apache.hadoop.security.Credentials.writeTokenStorageToStream()方法的使用及代码示例

x33g5p2x  于2022-01-18 转载在 其他  
字(8.5k)|赞(0)|评价(0)|浏览(126)

本文整理了Java中org.apache.hadoop.security.Credentials.writeTokenStorageToStream()方法的一些代码示例,展示了Credentials.writeTokenStorageToStream()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Credentials.writeTokenStorageToStream()方法的具体详情如下:
包路径:org.apache.hadoop.security.Credentials
类名称:Credentials
方法名:writeTokenStorageToStream

Credentials.writeTokenStorageToStream介绍

暂无

代码示例

代码示例来源:origin: org.apache.hadoop/hadoop-common

public void writeTokenStorageToStream(DataOutputStream os)
  throws IOException {
 // by default store in the oldest supported format for compatibility
 writeTokenStorageToStream(os, SerializedFormat.WRITABLE);
}

代码示例来源:origin: org.apache.hadoop/hadoop-common

public void writeTokenStorageFile(Path filename, Configuration conf,
  SerializedFormat format) throws IOException {
 try (FSDataOutputStream os =
      filename.getFileSystem(conf).create(filename)) {
  writeTokenStorageToStream(os, format);
 }
}

代码示例来源:origin: apache/incubator-gobblin

private static void persistTokens(Credentials cred, File tokenFile) throws IOException {
 try (FileOutputStream fos = new FileOutputStream(tokenFile); DataOutputStream dos = new DataOutputStream(fos)) {
  cred.writeTokenStorageToStream(dos);
 }
 LOG.info("Tokens loaded in " + tokenFile.getAbsolutePath());
}

代码示例来源:origin: apache/ignite

/**
   * Creates a ByteBuffer with serialized {@link Credentials}.
   *
   * @param creds The credentials.
   * @return The ByteBuffer with the credentials.
   * @throws IOException
   */
  public static ByteBuffer createTokenBuffer(Credentials creds) throws IOException {
    DataOutputBuffer dob = new DataOutputBuffer();

    creds.writeTokenStorageToStream(dob);

    return ByteBuffer.wrap(dob.getData(), 0, dob.getLength());
  }
}

代码示例来源:origin: apache/incubator-gobblin

private ByteBuffer getSecurityTokens() throws IOException {
 Credentials credentials = UserGroupInformation.getCurrentUser().getCredentials();
 Closer closer = Closer.create();
 try {
  DataOutputBuffer dataOutputBuffer = closer.register(new DataOutputBuffer());
  credentials.writeTokenStorageToStream(dataOutputBuffer);
  // Remove the AM->RM token so that containers cannot access it
  Iterator<Token<?>> tokenIterator = credentials.getAllTokens().iterator();
  while (tokenIterator.hasNext()) {
   Token<?> token = tokenIterator.next();
   if (token.getKind().equals(AMRMTokenIdentifier.KIND_NAME)) {
    tokenIterator.remove();
   }
  }
  return ByteBuffer.wrap(dataOutputBuffer.getData(), 0, dataOutputBuffer.getLength());
 } catch (Throwable t) {
  throw closer.rethrow(t);
 } finally {
  closer.close();
 }
}

代码示例来源:origin: apache/incubator-gobblin

private void setupSecurityTokens(ContainerLaunchContext containerLaunchContext) throws IOException {
 Credentials credentials = UserGroupInformation.getCurrentUser().getCredentials();
 String tokenRenewer = this.yarnConfiguration.get(YarnConfiguration.RM_PRINCIPAL);
 if (tokenRenewer == null || tokenRenewer.length() == 0) {
  throw new IOException("Failed to get master Kerberos principal for the RM to use as renewer");
 }
 // For now, only getting tokens for the default file-system.
 Token<?> tokens[] = this.fs.addDelegationTokens(tokenRenewer, credentials);
 if (tokens != null) {
  for (Token<?> token : tokens) {
   LOGGER.info("Got delegation token for " + this.fs.getUri() + "; " + token);
  }
 }
 Closer closer = Closer.create();
 try {
  DataOutputBuffer dataOutputBuffer = closer.register(new DataOutputBuffer());
  credentials.writeTokenStorageToStream(dataOutputBuffer);
  ByteBuffer fsTokens = ByteBuffer.wrap(dataOutputBuffer.getData(), 0, dataOutputBuffer.getLength());
  containerLaunchContext.setTokens(fsTokens);
 } catch (Throwable t) {
  throw closer.rethrow(t);
 } finally {
  closer.close();
 }
}

代码示例来源:origin: apache/flink

public static void setTokensFor(ContainerLaunchContext amContainer, List<Path> paths, Configuration conf) throws IOException {
  Credentials credentials = new Credentials();
  // for HDFS
  TokenCache.obtainTokensForNamenodes(credentials, paths.toArray(new Path[0]), conf);
  // for HBase
  obtainTokenForHBase(credentials, conf);
  // for user
  UserGroupInformation currUsr = UserGroupInformation.getCurrentUser();
  Collection<Token<? extends TokenIdentifier>> usrTok = currUsr.getTokens();
  for (Token<? extends TokenIdentifier> token : usrTok) {
    final Text id = new Text(token.getIdentifier());
    LOG.info("Adding user token " + id + " with " + token);
    credentials.addToken(id, token);
  }
  try (DataOutputBuffer dob = new DataOutputBuffer()) {
    credentials.writeTokenStorageToStream(dob);
    if (LOG.isDebugEnabled()) {
      LOG.debug("Wrote tokens. Credentials buffer length: " + dob.getLength());
    }
    ByteBuffer securityTokens = ByteBuffer.wrap(dob.getData(), 0, dob.getLength());
    amContainer.setTokens(securityTokens);
  }
}

代码示例来源:origin: apache/hive

private ByteBuffer serializeCredentials(Credentials credentials) throws IOException {
 Credentials containerCredentials = new Credentials();
 containerCredentials.addAll(credentials);
 DataOutputBuffer containerTokens_dob = new DataOutputBuffer();
 containerCredentials.writeTokenStorageToStream(containerTokens_dob);
 return ByteBuffer.wrap(containerTokens_dob.getData(), 0, containerTokens_dob.getLength());
}

代码示例来源:origin: apache/hive

public static ByteBuffer serializeCredentials(Credentials credentials) throws
   IOException {
  Credentials containerCredentials = new Credentials();
  containerCredentials.addAll(credentials);
  DataOutputBuffer containerTokensDob = new DataOutputBuffer();
  containerCredentials.writeTokenStorageToStream(containerTokensDob);
  return ByteBuffer.wrap(containerTokensDob.getData(), 0, containerTokensDob.getLength());
 }
}

代码示例来源:origin: apache/flink

HadoopUtils.getHadoopConfiguration(flinkConfig));
cred.writeTokenStorageToStream(dob);
ByteBuffer securityTokens = ByteBuffer.wrap(dob.getData(), 0, dob.getLength());
ctx.setTokens(securityTokens);

代码示例来源:origin: alibaba/jstorm

credentials.writeTokenStorageToStream(dob);
ByteBuffer fsTokens = ByteBuffer.wrap(dob.getData(), 0, dob.getLength());
amContainer.setTokens(fsTokens);

代码示例来源:origin: alibaba/jstorm

UserGroupInformation.getCurrentUser().getCredentials();
DataOutputBuffer dob = new DataOutputBuffer();
credentials.writeTokenStorageToStream(dob);

代码示例来源:origin: apache/metron

public ByteBuffer tokensFromCredentials(Credentials credentials) throws IOException {
 // Note: Credentials, Token, UserGroupInformation, DataOutputBuffer class
 // are marked as LimitedPrivate
 credentials = credentials == null? UserGroupInformation.getCurrentUser().getCredentials():credentials;
 DataOutputBuffer dob = new DataOutputBuffer();
 credentials.writeTokenStorageToStream(dob);
 // Now remove the AM->RM token so that containers cannot access it.
 Iterator<Token<?>> iter = credentials.getAllTokens().iterator();
 LOG.info("Executing with tokens:");
 while (iter.hasNext()) {
  Token<?> token = iter.next();
  LOG.info(token);
  if (token.getKind().equals(AMRMTokenIdentifier.KIND_NAME)) {
   iter.remove();
  }
 }
 return ByteBuffer.wrap(dob.getData(), 0, dob.getLength());
}
public void publishContainerEndEvent(

代码示例来源:origin: apache/metron

credentials.writeTokenStorageToStream(dob);
ByteBuffer fsTokens = ByteBuffer.wrap(dob.getData(), 0, dob.getLength());
amContainer.setTokens(fsTokens);

代码示例来源:origin: io.prestosql.hadoop/hadoop-apache

public void writeTokenStorageFile(Path filename, 
                 Configuration conf) throws IOException {
 FSDataOutputStream os = filename.getFileSystem(conf).create(filename);
 writeTokenStorageToStream(os);
 os.close();
}

代码示例来源:origin: io.hops/hadoop-common

public void writeTokenStorageFile(Path filename, 
                 Configuration conf) throws IOException {
 FSDataOutputStream os = filename.getFileSystem(conf).create(filename);
 writeTokenStorageToStream(os);
 os.close();
}

代码示例来源:origin: org.apache.gobblin/gobblin-utility

private static void persistTokens(Credentials cred, File tokenFile) throws IOException {
 try (FileOutputStream fos = new FileOutputStream(tokenFile); DataOutputStream dos = new DataOutputStream(fos)) {
  cred.writeTokenStorageToStream(dos);
 }
 LOG.info("Tokens loaded in " + tokenFile.getAbsolutePath());
}

代码示例来源:origin: org.apache.hive/hive-llap-ext-client

private ByteBuffer serializeCredentials(Credentials credentials) throws IOException {
 Credentials containerCredentials = new Credentials();
 containerCredentials.addAll(credentials);
 DataOutputBuffer containerTokens_dob = new DataOutputBuffer();
 containerCredentials.writeTokenStorageToStream(containerTokens_dob);
 return ByteBuffer.wrap(containerTokens_dob.getData(), 0, containerTokens_dob.getLength());
}

代码示例来源:origin: org.apache.hadoop/hadoop-yarn-server-resourcemanager

private Credentials getCreds() throws IOException {
 Credentials ts = new Credentials();
 DataOutputBuffer dob = new DataOutputBuffer();
 ts.writeTokenStorageToStream(dob);
 return ts;
}

代码示例来源:origin: org.apache.hadoop/hadoop-yarn-server-resourcemanager

private ByteBuffer getTokens() throws IOException {
 Credentials ts = new Credentials();
 DataOutputBuffer dob = new DataOutputBuffer();
 ts.writeTokenStorageToStream(dob);
 ByteBuffer securityTokens =
   ByteBuffer.wrap(dob.getData(), 0, dob.getLength());
 return securityTokens;
}

相关文章