org.apache.hadoop.security.Credentials.<init>()方法的使用及代码示例

x33g5p2x  于2022-01-18 转载在 其他  
字(10.7k)|赞(0)|评价(0)|浏览(127)

本文整理了Java中org.apache.hadoop.security.Credentials.<init>()方法的一些代码示例,展示了Credentials.<init>()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Credentials.<init>()方法的具体详情如下:
包路径:org.apache.hadoop.security.Credentials
类名称:Credentials
方法名:<init>

Credentials.<init>介绍

[英]Create an empty credentials instance
[中]创建一个空的凭据实例

代码示例

代码示例来源:origin: alibaba/jstorm

public JstormMasterContext(String user, ContainerId containerId,
              ApplicationAttemptId applicationAttemptId,
              long appSubmitTime, String nodeHostString,
              Configuration yarnConfig) {
  this.user = user;
  this.containerId = containerId;
  this.attemptId = applicationAttemptId;
  this.credentials = new Credentials();
  this.submitTime = appSubmitTime;
  this.address = nodeHostString;
  this.config = yarnConfig;
}

代码示例来源:origin: apache/hive

private static Set<Object> cloneCredentials(Set<Object> old) {
 Set<Object> set = new HashSet<>();
 // Make sure Hadoop credentials objects do not reuse the maps.
 for (Object o : old) {
  set.add(o instanceof Credentials ? new Credentials((Credentials)o) : o);
 }
 return set;
}

代码示例来源:origin: apache/flink

public void read(ObjectInputStream in) throws IOException {
    this.credentials = new Credentials();
    credentials.readFields(in);
  }
}

代码示例来源:origin: apache/flink

public void read(ObjectInputStream in) throws IOException {
  this.credentials = new Credentials();
  credentials.readFields(in);
}

代码示例来源:origin: org.apache.hadoop/hadoop-common

private synchronized Credentials getCredentialsInternal() {
 final Credentials credentials;
 final Set<Credentials> credentialsSet =
  subject.getPrivateCredentials(Credentials.class);
 if (!credentialsSet.isEmpty()){
  credentials = credentialsSet.iterator().next();
 } else {
  credentials = new Credentials();
  subject.getPrivateCredentials().add(credentials);
 }
 return credentials;
}

代码示例来源:origin: apache/hive

public Object run() throws IOException, URISyntaxException {
  Credentials creds = new Credentials();
  //get Tokens for default FS.  Not all FSs support delegation tokens, e.g. WASB
  collectTokens(FileSystem.get(conf), twrapper, creds, ugi.getShortUserName());
  //get tokens for all other known FSs since Hive tables may result in different ones
  //passing "creds" prevents duplicate tokens from being added
  Collection<String> URIs = conf.getStringCollection("mapreduce.job.hdfs-servers");
  for(String uri : URIs) {
   LOG.debug("Getting tokens for " + uri);
   collectTokens(FileSystem.get(new URI(uri), conf), twrapper, creds, ugi.getShortUserName());
  }
  return null;
 }
});

代码示例来源:origin: apache/storm

private static Credentials doGetCredentials(CredentialKeyProvider provider,
  Map<String, String> credentials, String configKey) {
 Credentials credential = null;
 String credentialKey = provider.getCredentialKey(configKey);
 if (credentials != null && credentials.containsKey(credentialKey)) {
  try {
   byte[] credBytes = DatatypeConverter.parseBase64Binary(credentialKey);
   ObjectInputStream in = new ObjectInputStream(new ByteArrayInputStream(credBytes));
   credential = new Credentials();
   credential.readFields(in);
  } catch (Exception e) {
   LOG.error("Could not obtain credentials from credentials map.", e);
  }
 }
 return credential;
}

代码示例来源:origin: org.apache.hadoop/hadoop-common

/**
 * Obtain the tokens in credentials form associated with this user.
 * 
 * @return Credentials of tokens associated with this user
 */
public Credentials getCredentials() {
 synchronized (subject) {
  Credentials creds = new Credentials(getCredentialsInternal());
  Iterator<Token<?>> iter = creds.getAllTokens().iterator();
  while (iter.hasNext()) {
   if (iter.next().isPrivate()) {
    iter.remove();
   }
  }
  return creds;
 }
}

代码示例来源:origin: apache/hive

public SecureCmdDoAs(HiveConf conf) throws HiveException, IOException{
 // Get delegation token for user from filesystem and write the token along with
 // metastore tokens into a file
 String uname = UserGroupInformation.getLoginUser().getShortUserName();
 FileSystem fs = FileSystem.get(conf);
 Credentials cred = new Credentials();
 ShimLoader.getHadoopShims().addDelegationTokens(fs, cred, uname); // ask default fs first
 for (String uri : conf.getStringCollection("mapreduce.job.hdfs-servers")) {
  try {
   ShimLoader.getHadoopShims().addDelegationTokens(
     FileSystem.get(new URI(uri), conf),
     cred, uname);
  } catch (URISyntaxException e) {
   LOG.warn("Invalid URI in mapreduce.job.hdfs-servers:["+uri+"], ignoring.", e);
  }
 }
 tokenFile = File.createTempFile("hive_hadoop_delegation_token", null);
 tokenPath = new Path(tokenFile.toURI());
 //write credential with token to file
 cred.writeTokenStorageFile(tokenPath, conf);
}

代码示例来源:origin: apache/hive

/**
  * transform a byte of crendetials to a hadoop Credentials object.
  * @param binaryCredentials credentials in byte format as they would
  *                          usually be when received from protobuffers
  * @return a hadoop Credentials object
  */
 public static Credentials credentialsFromByteArray(byte[] binaryCredentials)
   throws IOException  {
  Credentials credentials = new Credentials();
  DataInputBuffer dib = new DataInputBuffer();
  dib.reset(binaryCredentials, binaryCredentials.length);
  credentials.readTokenStorageStream(dib);
  return credentials;
 }
}

代码示例来源:origin: apache/incubator-gobblin

/**
 * Write a {@link Token} to a given file.
 *
 * @param token the token to write
 * @param tokenFilePath the token file path
 * @param configuration a {@link Configuration} object carrying Hadoop configuration properties
 * @throws IOException
 */
public static void writeTokenToFile(Token<? extends TokenIdentifier> token, Path tokenFilePath,
  Configuration configuration) throws IOException {
 Credentials credentials = new Credentials();
 credentials.addToken(token.getService(), token);
 credentials.writeTokenStorageFile(tokenFilePath, configuration);
}

代码示例来源:origin: apache/hive

private ByteBuffer serializeCredentials(Credentials credentials) throws IOException {
 Credentials containerCredentials = new Credentials();
 containerCredentials.addAll(credentials);
 DataOutputBuffer containerTokens_dob = new DataOutputBuffer();
 containerCredentials.writeTokenStorageToStream(containerTokens_dob);
 return ByteBuffer.wrap(containerTokens_dob.getData(), 0, containerTokens_dob.getLength());
}

代码示例来源:origin: apache/flink

public static void setTokensFor(ContainerLaunchContext amContainer, List<Path> paths, Configuration conf) throws IOException {
  Credentials credentials = new Credentials();
  // for HDFS
  TokenCache.obtainTokensForNamenodes(credentials, paths.toArray(new Path[0]), conf);
  // for HBase
  obtainTokenForHBase(credentials, conf);
  // for user
  UserGroupInformation currUsr = UserGroupInformation.getCurrentUser();
  Collection<Token<? extends TokenIdentifier>> usrTok = currUsr.getTokens();
  for (Token<? extends TokenIdentifier> token : usrTok) {
    final Text id = new Text(token.getIdentifier());
    LOG.info("Adding user token " + id + " with " + token);
    credentials.addToken(id, token);
  }
  try (DataOutputBuffer dob = new DataOutputBuffer()) {
    credentials.writeTokenStorageToStream(dob);
    if (LOG.isDebugEnabled()) {
      LOG.debug("Wrote tokens. Credentials buffer length: " + dob.getLength());
    }
    ByteBuffer securityTokens = ByteBuffer.wrap(dob.getData(), 0, dob.getLength());
    amContainer.setTokens(securityTokens);
  }
}

代码示例来源:origin: apache/hive

public static ByteBuffer serializeCredentials(Credentials credentials) throws
   IOException {
  Credentials containerCredentials = new Credentials();
  containerCredentials.addAll(credentials);
  DataOutputBuffer containerTokensDob = new DataOutputBuffer();
  containerCredentials.writeTokenStorageToStream(containerTokensDob);
  return ByteBuffer.wrap(containerTokensDob.getData(), 0, containerTokensDob.getLength());
 }
}

代码示例来源:origin: apache/hive

public MockRequest(SubmitWorkRequestProto requestProto, QueryFragmentInfo fragmentInfo,
          boolean canFinish, boolean canFinishQueue, long workTime,
          TezEvent initialEvent, boolean isGuaranteed) {
 super(requestProto, fragmentInfo, new Configuration(), new ExecutionContextImpl("localhost"),
   null, new Credentials(), 0, mock(AMReporter.class), null, mock(
   LlapDaemonExecutorMetrics.class), mock(KilledTaskHandler.class), mock(
   FragmentCompletionHandler.class), new DefaultHadoopShim(), null,
   requestProto.getWorkSpec().getVertex(), initialEvent, null, mock(
   SchedulerFragmentCompletingListener.class), mock(SocketFactory.class), isGuaranteed, null);
 this.workTime = workTime;
 this.canFinish = canFinish;
 this.canFinishQueue = canFinishQueue;
}

代码示例来源:origin: apache/hive

public Object run() throws IOException {
  Credentials cred = new Credentials();
  for(Token<?> fsToken : fsTokens) {
   cred.addToken(fsToken.getService(), fsToken);
  }
  cred.addToken(msToken.getService(), msToken);
  cred.writeTokenStorageFile(tokenPath, conf);
  return null;
 }
});

代码示例来源:origin: apache/ignite

/**
   * Create UserGroupInformation for specified user and credentials.
   *
   * @param user User.
   * @param credentialsBytes Credentials byte array.
   */
  public static UserGroupInformation createUGI(String user, byte[] credentialsBytes) throws IOException {
    Credentials credentials = new Credentials();

    HadoopUtils.deserialize(credentials, credentialsBytes);

    UserGroupInformation ugi = UserGroupInformation.createRemoteUser(user);

    ugi.addCredentials(credentials);

    if (credentials.numberOfTokens() > 0)
      ugi.setAuthenticationMethod(UserGroupInformation.AuthenticationMethod.TOKEN);

    return ugi;
  }
}

代码示例来源:origin: apache/hive

@Override
public RegisterDagResponseProto registerDag(RegisterDagRequestProto request)
  throws IOException {
 QueryIdentifierProto identifier = request.getQueryIdentifier();
 Credentials credentials;
 if (request.hasCredentialsBinary()) {
  credentials = LlapUtil.credentialsFromByteArray(
    request.getCredentialsBinary().toByteArray());
 } else {
  credentials = new Credentials();
 }
 queryTracker.registerDag(identifier.getApplicationIdString(),
   identifier.getDagIndex(), request.getUser(), credentials);
 if (LOG.isInfoEnabled()) {
  LOG.info("Application with  id={}, dagId={} registered",
    identifier.getApplicationIdString(), identifier.getDagIndex());
 }
 return RegisterDagResponseProto.newBuilder().build();
}

代码示例来源:origin: org.apache.hadoop/hadoop-common

/** Append tokens from list of files in local filesystem, saving to last file.
 *  @param tokenFiles list of local File objects.  Last file holds the output.
 *  @param fileFormat a string equal to FORMAT_PB or FORMAT_JAVA, for output
 *  @param conf Configuration object passed along.
 *  @throws IOException
 */
public static void appendTokenFiles(
  ArrayList<File> tokenFiles, String fileFormat, Configuration conf)
  throws IOException {
 Credentials newCreds = new Credentials();
 File lastTokenFile = null;
 for (File tokenFile : tokenFiles) {
  lastTokenFile = tokenFile;
  Credentials creds = Credentials.readTokenStorageFile(tokenFile, conf);
  for (Token<?> token : creds.getAllTokens()) {
   newCreds.addToken(token.getService(), token);
  }
 }
 doFormattedWrite(lastTokenFile, fileFormat, newCreds, conf);
}

代码示例来源:origin: apache/hive

private SubmitWorkRequestProto createRequest(int fragmentNumber, int numSelfAndUpstreamTasks,
  int numSelfAndUpstreamComplete, int dagStartTime,
  int attemptStartTime, int withinDagPriority,
  String dagName) throws IOException {
 ApplicationId appId = ApplicationId.newInstance(9999, 72);
 TezDAGID dagId = TezDAGID.getInstance(appId, 1);
 TezVertexID vId = TezVertexID.getInstance(dagId, 35);
 return LlapDaemonTestUtils.buildSubmitProtoRequest(fragmentNumber, appId.toString(),
   dagId.getId(), vId.getId(), dagName, dagStartTime, attemptStartTime,
   numSelfAndUpstreamTasks, numSelfAndUpstreamComplete, withinDagPriority,
   new Credentials());
}

相关文章