org.apache.hadoop.security.Credentials.addSecretKey()方法的使用及代码示例

x33g5p2x  于2022-01-18 转载在 其他  
字(8.6k)|赞(0)|评价(0)|浏览(163)

本文整理了Java中org.apache.hadoop.security.Credentials.addSecretKey()方法的一些代码示例,展示了Credentials.addSecretKey()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Credentials.addSecretKey()方法的具体详情如下:
包路径:org.apache.hadoop.security.Credentials
类名称:Credentials
方法名:addSecretKey

Credentials.addSecretKey介绍

[英]Set the key for an alias
[中]设置别名的键

代码示例

代码示例来源:origin: org.apache.hadoop/hadoop-common

@Override
public synchronized CredentialEntry createCredentialEntry(String name, char[] credential) 
  throws IOException {
 Text nameT = new Text(name);
 if (credentials.getSecretKey(nameT) != null) {
  throw new IOException("Credential " + name + 
    " already exists in " + this);
 }
 credentials.addSecretKey(new Text(name), 
   new String(credential).getBytes("UTF-8"));
 return new CredentialEntry(name, credential);
}

代码示例来源:origin: org.apache.hadoop/hadoop-common

@Override
public synchronized KeyVersion rollNewVersion(String name,
                 byte[] material) throws IOException {
 Metadata meta = getMetadata(name);
 if (meta == null) {
  throw new IOException("Key " + name + " not found");
 }
 if (meta.getBitLength() != 8 * material.length) {
  throw new IOException("Wrong key length. Required " +
    meta.getBitLength() + ", but got " + (8 * material.length));
 }
 int nextVersion = meta.addVersion();
 credentials.addSecretKey(new Text(name), meta.serialize());
 String versionName = buildVersionName(name, nextVersion);
 credentials.addSecretKey(new Text(versionName), material);
 return new KeyVersion(name, versionName, material);
}

代码示例来源:origin: apache/hive

public static void configureJobConf(TableDesc tableDesc, JobConf jobConf) {
 String handlerClass = tableDesc.getProperties().getProperty(
   org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE);
 try {
  HiveStorageHandler storageHandler = HiveUtils.getStorageHandler(jobConf, handlerClass);
  if (storageHandler != null) {
   storageHandler.configureJobConf(tableDesc, jobConf);
  }
  if (tableDesc.getJobSecrets() != null) {
   for (Map.Entry<String, String> entry : tableDesc.getJobSecrets().entrySet()) {
    String key = TableDesc.SECRET_PREFIX + TableDesc.SECRET_DELIMIT +
        tableDesc.getTableName() + TableDesc.SECRET_DELIMIT + entry.getKey();
    jobConf.getCredentials().addSecretKey(new Text(key), entry.getValue().getBytes());
   }
   tableDesc.getJobSecrets().clear();
  }
 } catch (HiveException e) {
  throw new RuntimeException(e);
 }
}

代码示例来源:origin: org.apache.hadoop/hadoop-common

@Override
public synchronized KeyVersion createKey(String name, byte[] material,
               Options options) throws IOException {
 Text nameT = new Text(name);
 if (credentials.getSecretKey(nameT) != null) {
  throw new IOException("Key " + name + " already exists in " + this);
 }
 if (options.getBitLength() != 8 * material.length) {
  throw new IOException("Wrong key length. Required " +
    options.getBitLength() + ", but got " + (8 * material.length));
 }
 Metadata meta = new Metadata(options.getCipher(), options.getBitLength(),
   options.getDescription(), options.getAttributes(), new Date(), 1);
 cache.put(name, meta);
 String versionName = buildVersionName(name, 0);
 credentials.addSecretKey(nameT, meta.serialize());
 credentials.addSecretKey(new Text(versionName), material);
 return new KeyVersion(name, versionName, material);
}

代码示例来源:origin: org.apache.hadoop/hadoop-common

/**
 * Populates keys/values from proto buffer storage.
 * @param in - stream ready to read a serialized proto buffer message
 */
void readProto(DataInput in) throws IOException {
 CredentialsProto storage = CredentialsProto.parseDelimitedFrom((DataInputStream)in);
 for (CredentialsKVProto kv : storage.getTokensList()) {
  addToken(new Text(kv.getAliasBytes().toByteArray()),
       (Token<? extends TokenIdentifier>) new Token(kv.getToken()));
 }
 for (CredentialsKVProto kv : storage.getSecretsList()) {
  addSecretKey(new Text(kv.getAliasBytes().toByteArray()),
         kv.getSecret().toByteArray());
 }
}

代码示例来源:origin: org.apache.hadoop/hadoop-mapreduce-client-core

@Test
 public void testUGICredentialsPropogation() throws Exception {
  Credentials creds = new Credentials();
  Token<?> token = mock(Token.class);
  Text tokenService = new Text("service");
  Text secretName = new Text("secret");
  byte secret[] = new byte[]{};
    
  creds.addToken(tokenService,  token);
  creds.addSecretKey(secretName, secret);
  UserGroupInformation.getLoginUser().addCredentials(creds);
  
  JobConf jobConf = new JobConf();
  Job job = new Job(jobConf);

  assertSame(token, job.getCredentials().getToken(tokenService));
  assertSame(secret, job.getCredentials().getSecretKey(secretName));
 }
}

代码示例来源:origin: ch.cern.hadoop/hadoop-mapreduce-client-core

@InterfaceAudience.Private
public static void setShuffleSecretKey(byte[] key, Credentials credentials) {
 credentials.addSecretKey(SHUFFLE_TOKEN, key);
}

代码示例来源:origin: com.github.jiayuhan-it/hadoop-mapreduce-client-core

@InterfaceAudience.Private
public static void setEncryptedSpillKey(byte[] key, Credentials credentials) {
 credentials.addSecretKey(ENC_SPILL_KEY, key);
}

代码示例来源:origin: io.hops/hadoop-mapreduce-client-core

@InterfaceAudience.Private
public static void setEncryptedSpillKey(byte[] key, Credentials credentials) {
 credentials.addSecretKey(ENC_SPILL_KEY, key);
}

代码示例来源:origin: io.prestosql.hadoop/hadoop-apache

@InterfaceAudience.Private
public static void setEncryptedSpillKey(byte[] key, Credentials credentials) {
 credentials.addSecretKey(ENC_SPILL_KEY, key);
}

代码示例来源:origin: io.hops/hadoop-common

@Override
public CredentialEntry createCredentialEntry(String name, char[] credential) 
  throws IOException {
 Text nameT = new Text(name);
 if (credentials.getSecretKey(nameT) != null) {
  throw new IOException("Credential " + name + 
    " already exists in " + this);
 }
 credentials.addSecretKey(new Text(name), 
   new String(credential).getBytes("UTF-8"));
 return new CredentialEntry(name, credential);
}

代码示例来源:origin: io.hops/hadoop-mapreduce-client-core

@InterfaceAudience.Private
public static void setShuffleSecretKey(byte[] key, Credentials credentials) {
 credentials.addSecretKey(SHUFFLE_TOKEN, key);
}

代码示例来源:origin: ch.cern.hadoop/hadoop-mapreduce-client-core

@InterfaceAudience.Private
public static void setEncryptedSpillKey(byte[] key, Credentials credentials) {
 credentials.addSecretKey(ENC_SPILL_KEY, key);
}

代码示例来源:origin: com.github.jiayuhan-it/hadoop-common

@Override
public CredentialEntry createCredentialEntry(String name, char[] credential) 
  throws IOException {
 Text nameT = new Text(name);
 if (credentials.getSecretKey(nameT) != null) {
  throw new IOException("Credential " + name + 
    " already exists in " + this);
 }
 credentials.addSecretKey(new Text(name), 
   new String(credential).getBytes("UTF-8"));
 return new CredentialEntry(name, credential);
}

代码示例来源:origin: io.prestosql.hadoop/hadoop-apache

@Override
public CredentialEntry createCredentialEntry(String name, char[] credential) 
  throws IOException {
 Text nameT = new Text(name);
 if (credentials.getSecretKey(nameT) != null) {
  throw new IOException("Credential " + name + 
    " already exists in " + this);
 }
 credentials.addSecretKey(new Text(name), 
   new String(credential).getBytes("UTF-8"));
 return new CredentialEntry(name, credential);
}

代码示例来源:origin: ch.cern.hadoop/hadoop-common

@Override
public CredentialEntry createCredentialEntry(String name, char[] credential) 
  throws IOException {
 Text nameT = new Text(name);
 if (credentials.getSecretKey(nameT) != null) {
  throw new IOException("Credential " + name + 
    " already exists in " + this);
 }
 credentials.addSecretKey(new Text(name), 
   new String(credential).getBytes("UTF-8"));
 return new CredentialEntry(name, credential);
}

代码示例来源:origin: com.github.jiayuhan-it/hadoop-yarn-server-resourcemanager

public Credentials getCredentialsFromAppAttempt(RMAppAttempt appAttempt) {
 Credentials credentials = new Credentials();
 SecretKey clientTokenMasterKey =
   appAttempt.getClientTokenMasterKey();
 if(clientTokenMasterKey != null){
  credentials.addSecretKey(AM_CLIENT_TOKEN_MASTER_KEY_NAME,
    clientTokenMasterKey.getEncoded());
 }
 return credentials;
}

代码示例来源:origin: org.apache.hadoop/hadoop-yarn-server-resourcemanager

public Credentials getCredentialsFromAppAttempt(RMAppAttempt appAttempt) {
 Credentials credentials = new Credentials();
 SecretKey clientTokenMasterKey =
   appAttempt.getClientTokenMasterKey();
 if(clientTokenMasterKey != null){
  credentials.addSecretKey(AM_CLIENT_TOKEN_MASTER_KEY_NAME,
    clientTokenMasterKey.getEncoded());
 }
 return credentials;
}

代码示例来源:origin: ch.cern.hadoop/hadoop-yarn-server-resourcemanager

public Credentials getCredentialsFromAppAttempt(RMAppAttempt appAttempt) {
 Credentials credentials = new Credentials();
 SecretKey clientTokenMasterKey =
   appAttempt.getClientTokenMasterKey();
 if(clientTokenMasterKey != null){
  credentials.addSecretKey(AM_CLIENT_TOKEN_MASTER_KEY_NAME,
    clientTokenMasterKey.getEncoded());
 }
 return credentials;
}

代码示例来源:origin: apache/sqoop

private void setupDefaultConfiguration() {
  conf.set(DBConfiguration.URL_PROPERTY, "localhost:11111");
  conf.set(DBConfiguration.USERNAME_PROPERTY, DEFAULT_FTP_USERNAME);
  Text PASSWORD_SECRET_KEY = new Text(DBConfiguration.PASSWORD_PROPERTY);
  conf.getCredentials().addSecretKey(PASSWORD_SECRET_KEY,
   DEFAULT_FTP_PASSWORD.getBytes());
 }
}

相关文章