org.apache.hadoop.security.Credentials.readFields()方法的使用及代码示例

x33g5p2x  于2022-01-18 转载在 其他  
字(7.6k)|赞(0)|评价(0)|浏览(164)

本文整理了Java中org.apache.hadoop.security.Credentials.readFields()方法的一些代码示例,展示了Credentials.readFields()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Credentials.readFields()方法的具体详情如下:
包路径:org.apache.hadoop.security.Credentials
类名称:Credentials
方法名:readFields

Credentials.readFields介绍

[英]Loads all the keys
[中]

代码示例

代码示例来源:origin: apache/flink

public void read(ObjectInputStream in) throws IOException {
  this.credentials = new Credentials();
  credentials.readFields(in);
}

代码示例来源:origin: apache/flink

public void read(ObjectInputStream in) throws IOException {
    this.credentials = new Credentials();
    credentials.readFields(in);
  }
}

代码示例来源:origin: apache/storm

private static Credentials doGetCredentials(CredentialKeyProvider provider,
  Map<String, String> credentials, String configKey) {
 Credentials credential = null;
 String credentialKey = provider.getCredentialKey(configKey);
 if (credentials != null && credentials.containsKey(credentialKey)) {
  try {
   byte[] credBytes = DatatypeConverter.parseBase64Binary(credentialKey);
   ObjectInputStream in = new ObjectInputStream(new ByteArrayInputStream(credBytes));
   credential = new Credentials();
   credential.readFields(in);
  } catch (Exception e) {
   LOG.error("Could not obtain credentials from credentials map.", e);
  }
 }
 return credential;
}

代码示例来源:origin: org.apache.hadoop/hadoop-common

/**
 * Convenience method for reading a token from a DataInputStream.
 */
public void readTokenStorageStream(DataInputStream in) throws IOException {
 byte[] magic = new byte[TOKEN_STORAGE_MAGIC.length];
 in.readFully(magic);
 if (!Arrays.equals(magic, TOKEN_STORAGE_MAGIC)) {
  throw new IOException("Bad header found in token storage.");
 }
 SerializedFormat format;
 try {
  format = SerializedFormat.valueOf(in.readByte());
 } catch (IllegalArgumentException e) {
  throw new IOException(e);
 }
 switch (format) {
 case WRITABLE:
  readFields(in);
  break;
 case PROTOBUF:
  readProto(in);
  break;
 default:
  throw new IOException("Unsupported format " + format);
 }
}

代码示例来源:origin: ch.cern.hadoop/hadoop-common

/**
 * Convenience method for reading a token storage file directly from a 
 * datainputstream
 */
public void readTokenStorageStream(DataInputStream in) throws IOException {
 byte[] magic = new byte[TOKEN_STORAGE_MAGIC.length];
 in.readFully(magic);
 if (!Arrays.equals(magic, TOKEN_STORAGE_MAGIC)) {
  throw new IOException("Bad header found in token storage.");
 }
 byte version = in.readByte();
 if (version != TOKEN_STORAGE_VERSION) {
  throw new IOException("Unknown version " + version + 
             " in token storage.");
 }
 readFields(in);
}

代码示例来源:origin: org.apache.flink/flink-hadoop-compatibility

public void read(ObjectInputStream in) throws IOException {
    this.credentials = new Credentials();
    credentials.readFields(in);
  }
}

代码示例来源:origin: com.alibaba.blink/flink-hadoop-compatibility

public void read(ObjectInputStream in) throws IOException {
    this.credentials = new Credentials();
    credentials.readFields(in);
  }
}

代码示例来源:origin: com.github.jiayuhan-it/hadoop-common

/**
 * Convenience method for reading a token storage file directly from a 
 * datainputstream
 */
public void readTokenStorageStream(DataInputStream in) throws IOException {
 byte[] magic = new byte[TOKEN_STORAGE_MAGIC.length];
 in.readFully(magic);
 if (!Arrays.equals(magic, TOKEN_STORAGE_MAGIC)) {
  throw new IOException("Bad header found in token storage.");
 }
 byte version = in.readByte();
 if (version != TOKEN_STORAGE_VERSION) {
  throw new IOException("Unknown version " + version + 
             " in token storage.");
 }
 readFields(in);
}

代码示例来源:origin: io.hops/hadoop-common

/**
 * Convenience method for reading a token storage file directly from a 
 * datainputstream
 */
public void readTokenStorageStream(DataInputStream in) throws IOException {
 byte[] magic = new byte[TOKEN_STORAGE_MAGIC.length];
 in.readFully(magic);
 if (!Arrays.equals(magic, TOKEN_STORAGE_MAGIC)) {
  throw new IOException("Bad header found in token storage.");
 }
 byte version = in.readByte();
 if (version != TOKEN_STORAGE_VERSION) {
  throw new IOException("Unknown version " + version + 
             " in token storage.");
 }
 readFields(in);
}

代码示例来源:origin: org.apache.flink/flink-hadoop-compatibility_2.11

public void read(ObjectInputStream in) throws IOException {
    this.credentials = new Credentials();
    credentials.readFields(in);
  }
}

代码示例来源:origin: io.prestosql.hadoop/hadoop-apache

/**
 * Convenience method for reading a token storage file directly from a 
 * datainputstream
 */
public void readTokenStorageStream(DataInputStream in) throws IOException {
 byte[] magic = new byte[TOKEN_STORAGE_MAGIC.length];
 in.readFully(magic);
 if (!Arrays.equals(magic, TOKEN_STORAGE_MAGIC)) {
  throw new IOException("Bad header found in token storage.");
 }
 byte version = in.readByte();
 if (version != TOKEN_STORAGE_VERSION) {
  throw new IOException("Unknown version " + version + 
             " in token storage.");
 }
 readFields(in);
}

代码示例来源:origin: org.apache.flink/flink-hadoop-compatibility

public void read(ObjectInputStream in) throws IOException {
  this.credentials = new Credentials();
  credentials.readFields(in);
}

代码示例来源:origin: com.alibaba.blink/flink-hadoop-compatibility

public void read(ObjectInputStream in) throws IOException {
  this.credentials = new Credentials();
  credentials.readFields(in);
}

代码示例来源:origin: org.apache.flink/flink-hadoop-compatibility_2.11

public void read(ObjectInputStream in) throws IOException {
  this.credentials = new Credentials();
  credentials.readFields(in);
}

代码示例来源:origin: org.talend.components/simplefileio-runtime

private final UserGroupInformation getUgi() {
  // If the UGI has not been created, create it from the credentials, and don't inherit from the current or
  // login user.
  if (ugi == null) {
    // If the UGI has not been initialized, then create a new one with the credentials.
    try (DataInputStream in = new DataInputStream(new ByteArrayInputStream(credentials))) {
      Credentials cred = new Credentials();
      cred.readFields(in);
      ugi = UserGroupInformation.createRemoteUser(principal, SaslRpcServer.AuthMethod.KERBEROS);
      ugi.addCredentials(cred);
    } catch (IOException e) {
      throw TalendRuntimeException.createUnexpectedException(e);
    }
  }
  return ugi;
}

代码示例来源:origin: Talend/components

private final UserGroupInformation getUgi() {
  // If the UGI has not been created, create it from the credentials, and don't inherit from the current or
  // login user.
  if (ugi == null) {
    // If the UGI has not been initialized, then create a new one with the credentials.
    try (DataInputStream in = new DataInputStream(new ByteArrayInputStream(credentials))) {
      Credentials cred = new Credentials();
      cred.readFields(in);
      ugi = UserGroupInformation.createRemoteUser(principal, SaslRpcServer.AuthMethod.KERBEROS);
      ugi.addCredentials(cred);
    } catch (IOException e) {
      throw TalendRuntimeException.createUnexpectedException(e);
    }
  }
  return ugi;
}

代码示例来源:origin: org.apache.tez/tez-runtime-internals

@Override
public void readFields(DataInput in) throws IOException {
 shouldDie = in.readBoolean();
 boolean taskComing = in.readBoolean();
 if (taskComing) {
  taskSpec = new TaskSpec();
  taskSpec.readFields(in);
 }
 int numAdditionalResources = in.readInt();
 additionalResources = Maps.newHashMap();
 if (numAdditionalResources != -1) {
  for (int i = 0 ; i < numAdditionalResources ; i++) {
   String resourceName = in.readUTF();
   TezLocalResource localResource = new TezLocalResource();
   localResource.readFields(in);
   additionalResources.put(resourceName, localResource);
  }
 }
 credentialsChanged = in.readBoolean();
 if (credentialsChanged) {
  boolean hasCredentials = in.readBoolean();
  if (hasCredentials) {
   credentials = new Credentials();
   credentials.readFields(in);
  }
 }
}

代码示例来源:origin: ch.cern.hadoop/hadoop-hdfs

Credentials ts = new Credentials();
dis = new DataInputStream(in);
ts.readFields(dis);
for (Token<?> token : ts.getAllTokens()) {
 token.setKind(isHttps ? HsftpFileSystem.TOKEN_KIND : HftpFileSystem.TOKEN_KIND);

代码示例来源:origin: io.prestosql.hadoop/hadoop-apache

Credentials ts = new Credentials();
dis = new DataInputStream(in);
ts.readFields(dis);
for (Token<?> token : ts.getAllTokens()) {
 token.setKind(isHttps ? HsftpFileSystem.TOKEN_KIND : HftpFileSystem.TOKEN_KIND);

代码示例来源:origin: ch.cern.hadoop/hadoop-common

new DataInputStream(new FileInputStream(tmpFileName));    
ts = new Credentials();
ts.readFields(dis);
dis.close();

相关文章