org.apache.hadoop.security.Credentials.numberOfSecretKeys()方法的使用及代码示例

x33g5p2x  于2022-01-18 转载在 其他  
字(12.5k)|赞(0)|评价(0)|浏览(124)

本文整理了Java中org.apache.hadoop.security.Credentials.numberOfSecretKeys()方法的一些代码示例,展示了Credentials.numberOfSecretKeys()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Credentials.numberOfSecretKeys()方法的具体详情如下:
包路径:org.apache.hadoop.security.Credentials
类名称:Credentials
方法名:numberOfSecretKeys

Credentials.numberOfSecretKeys介绍

暂无

代码示例

代码示例来源:origin: org.apache.tez/tez-runtime-internals

@Override
public String toString() {
 StringBuilder sb = new StringBuilder();
 sb.append("shouldDie: ").append(shouldDie);
 sb.append(", TaskSpec: ");
 if (taskSpec == null) {
  sb.append("none");
 } else {
  sb.append(taskSpec);
 }
 sb.append(", additionalResources: ");
 if (additionalResources == null) {
  sb.append("none");
 } else {
  sb.append(additionalResources);
 }
 sb.append(", credentialsChanged: ").append(credentialsChanged);
 sb.append(", credentials: ");
 if (credentials == null) {
  sb.append("none");
 } else {
  sb.append("#tokens=").append(credentials.numberOfTokens())
  .append(", #secretKeys: ").append(credentials.numberOfSecretKeys());
 }
 return sb.toString();
}

代码示例来源:origin: org.apache.hadoop/hadoop-mapred-test

/**
 * attempts to access tokenCache as from client
 */
public void map(IntWritable key, IntWritable value,
  OutputCollector<IntWritable, NullWritable> output, Reporter reporter)
  throws IOException {
 // get token storage and a key
 byte[] key1 = ts.getSecretKey(new Text("alias1"));
 Collection<Token<? extends TokenIdentifier>> dts = ts.getAllTokens();
 int dts_size = 0;
 if(dts != null)
  dts_size = dts.size();
 if(dts.size() != 2) { // one job token and one delegation token
  throw new RuntimeException("tokens are not available"); // fail the test
 }
 if(key1 == null || ts == null || ts.numberOfSecretKeys() != NUM_OF_KEYS) {
  throw new RuntimeException("secret keys are not available"); // fail the test
 }
 
 output.collect(new IntWritable(1), NullWritable.get());
}

代码示例来源:origin: com.github.jiayuhan-it/hadoop-mapreduce-client-core

/**
 * load job token from a file
 * @deprecated Use {@link Credentials#readTokenStorageFile} instead,
 * this method is included for compatibility against Hadoop-1.
 * @param conf
 * @throws IOException
 */
@InterfaceAudience.Private
@Deprecated
public static Credentials loadTokens(String jobTokenFile, JobConf conf)
throws IOException {
 Path localJobTokenFile = new Path ("file:///" + jobTokenFile);
 Credentials ts = Credentials.readTokenStorageFile(localJobTokenFile, conf);
 if(LOG.isDebugEnabled()) {
  LOG.debug("Task: Loaded jobTokenFile from: "+
    localJobTokenFile.toUri().getPath() 
    +"; num of sec keys  = " + ts.numberOfSecretKeys() +
    " Number of tokens " +  ts.numberOfTokens());
 }
 return ts;
}

代码示例来源:origin: io.hops/hadoop-mapreduce-client-core

/**
 * load job token from a file
 * @deprecated Use {@link Credentials#readTokenStorageFile} instead,
 * this method is included for compatibility against Hadoop-1.
 * @param conf
 * @throws IOException
 */
@InterfaceAudience.Private
@Deprecated
public static Credentials loadTokens(String jobTokenFile, JobConf conf)
throws IOException {
 Path localJobTokenFile = new Path ("file:///" + jobTokenFile);
 Credentials ts = Credentials.readTokenStorageFile(localJobTokenFile, conf);
 if(LOG.isDebugEnabled()) {
  LOG.debug("Task: Loaded jobTokenFile from: "+
    localJobTokenFile.toUri().getPath() 
    +"; num of sec keys  = " + ts.numberOfSecretKeys() +
    " Number of tokens " +  ts.numberOfTokens());
 }
 return ts;
}

代码示例来源:origin: org.apache.hadoop/hadoop-mapred

/**
 * load job token from a file
 * @param conf
 * @throws IOException
 */
@InterfaceAudience.Private
public static Credentials loadTokens(String jobTokenFile, JobConf conf) 
throws IOException {
 Path localJobTokenFile = new Path ("file:///" + jobTokenFile);
 Credentials ts = Credentials.readTokenStorageFile(localJobTokenFile, conf);
 if(LOG.isDebugEnabled()) {
  LOG.debug("Task: Loaded jobTokenFile from: "+
    localJobTokenFile.toUri().getPath() 
    +"; num of sec keys  = " + ts.numberOfSecretKeys() +
    " Number of tokens " +  ts.numberOfTokens());
 }
 return ts;
}
/**

代码示例来源:origin: ch.cern.hadoop/hadoop-mapreduce-client-core

/**
 * load job token from a file
 * @deprecated Use {@link Credentials#readTokenStorageFile} instead,
 * this method is included for compatibility against Hadoop-1.
 * @param conf
 * @throws IOException
 */
@InterfaceAudience.Private
@Deprecated
public static Credentials loadTokens(String jobTokenFile, JobConf conf)
throws IOException {
 Path localJobTokenFile = new Path ("file:///" + jobTokenFile);
 Credentials ts = Credentials.readTokenStorageFile(localJobTokenFile, conf);
 if(LOG.isDebugEnabled()) {
  LOG.debug("Task: Loaded jobTokenFile from: "+
    localJobTokenFile.toUri().getPath() 
    +"; num of sec keys  = " + ts.numberOfSecretKeys() +
    " Number of tokens " +  ts.numberOfTokens());
 }
 return ts;
}

代码示例来源:origin: org.apache.hadoop/hadoop-mapred-test

/**
  * attempts to access tokenCache as from client
  */
 @Override
 public void map(IntWritable key, IntWritable value, Context context)
 throws IOException, InterruptedException {
  // get token storage and a key
  Credentials ts = context.getCredentials();
  byte[] key1 = ts.getSecretKey(new Text("alias1"));
  Collection<Token<? extends TokenIdentifier>> dts = ts.getAllTokens();
  int dts_size = 0;
  if(dts != null)
   dts_size = dts.size();
  
  
  if(dts_size != 2) { // one job token and one delegation token
   throw new RuntimeException("tokens are not available"); // fail the test
  }
  
  
  if(key1 == null || ts == null || ts.numberOfSecretKeys() != NUM_OF_KEYS) {
   throw new RuntimeException("secret keys are not available"); // fail the test
  } 
  super.map(key, value, context);
 }
}

代码示例来源:origin: io.prestosql.hadoop/hadoop-apache

/**
 * load job token from a file
 * @deprecated Use {@link Credentials#readTokenStorageFile} instead,
 * this method is included for compatibility against Hadoop-1.
 * @param conf
 * @throws IOException
 */
@InterfaceAudience.Private
@Deprecated
public static Credentials loadTokens(String jobTokenFile, JobConf conf)
throws IOException {
 Path localJobTokenFile = new Path ("file:///" + jobTokenFile);
 Credentials ts = Credentials.readTokenStorageFile(localJobTokenFile, conf);
 if(LOG.isDebugEnabled()) {
  LOG.debug("Task: Loaded jobTokenFile from: "+
    localJobTokenFile.toUri().getPath() 
    +"; num of sec keys  = " + ts.numberOfSecretKeys() +
    " Number of tokens " +  ts.numberOfTokens());
 }
 return ts;
}

代码示例来源:origin: org.apache.hadoop/hadoop-common-test

mapLen, ts.numberOfSecretKeys());
for(Text a : m.keySet()) {
 byte [] kTS = ts.getSecretKey(a);

代码示例来源:origin: org.apache.tez/tez-runtime-internals

/**
 * Setup
 * 
 * @param containerTask
 *          the new task specification. Must be a valid task
 * @param childUGI
 *          the old UGI instance being used
 * @return childUGI
 */
UserGroupInformation handleNewTaskCredentials(ContainerTask containerTask,
  UserGroupInformation childUGI) {
 // Re-use the UGI only if the Credentials have not changed.
 Preconditions.checkState(!containerTask.shouldDie());
 Preconditions.checkState(containerTask.getTaskSpec() != null);
 if (containerTask.haveCredentialsChanged()) {
  Credentials taskCreds = containerTask.getCredentials();
  if (taskCreds != null) {
   LOG.info("Refreshing UGI since Credentials have changed. Credentials : #Tokens=" +
     taskCreds.numberOfTokens() + ", #SecretKeys="
     + taskCreds.numberOfSecretKeys());
   childUGI = UserGroupInformation.createRemoteUser(user);
   childUGI.addCredentials(containerTask.getCredentials());
  } else {
   LOG.info("Not loading any credentials, since no credentials provided");
  }
 }
 return childUGI;
}

代码示例来源:origin: org.apache.tez/tez-dag

+ credentials.numberOfSecretKeys() + " secret keys for NM use for launching container in common CLC");

代码示例来源:origin: org.apache.hadoop/hadoop-mapreduce-client-app

+ credentials.numberOfSecretKeys()
  + " secret keys for NM use for launching container");
Credentials taskCredentials = new Credentials(credentials);

代码示例来源:origin: ch.cern.hadoop/hadoop-common

mapLen, ts.numberOfSecretKeys());
for(Text a : m.keySet()) {
 byte [] kTS = ts.getSecretKey(a);

代码示例来源:origin: com.github.jiayuhan-it/hadoop-common

mapLen, ts.numberOfSecretKeys());
for(Text a : m.keySet()) {
 byte [] kTS = ts.getSecretKey(a);

代码示例来源:origin: ch.cern.hadoop/hadoop-common

Credentials ugiCreds = ugi.getCredentials();
assertSame(secretKey, ugiCreds.getSecretKey(secretName));
assertEquals(1, ugiCreds.numberOfSecretKeys());

代码示例来源:origin: com.github.jiayuhan-it/hadoop-common

Credentials ugiCreds = ugi.getCredentials();
assertSame(secretKey, ugiCreds.getSecretKey(secretName));
assertEquals(1, ugiCreds.numberOfSecretKeys());

代码示例来源:origin: com.github.jiayuhan-it/hadoop-common

@Test
public void mergeAll() {
 Credentials creds = new Credentials();
 creds.addToken(service[0], token[0]);
 creds.addToken(service[1], token[1]);
 creds.addSecretKey(secret[0], secret[0].getBytes());
 creds.addSecretKey(secret[1], secret[1].getBytes());
 
 Credentials credsToAdd = new Credentials();
 // one duplicate with different value, one new
 credsToAdd.addToken(service[0], token[3]);
 credsToAdd.addToken(service[2], token[2]);
 credsToAdd.addSecretKey(secret[0], secret[3].getBytes());
 credsToAdd.addSecretKey(secret[2], secret[2].getBytes());
 
 creds.mergeAll(credsToAdd);
 assertEquals(3, creds.numberOfTokens());
 assertEquals(3, creds.numberOfSecretKeys());
 // existing token & secret should not be overwritten
 assertEquals(token[0], creds.getToken(service[0]));
 assertEquals(secret[0], new Text(creds.getSecretKey(secret[0])));
 // non-duplicate token & secret should be present
 assertEquals(token[1], creds.getToken(service[1]));
 assertEquals(secret[1], new Text(creds.getSecretKey(secret[1])));
 // new token & secret should be added
 assertEquals(token[2], creds.getToken(service[2]));
 assertEquals(secret[2], new Text(creds.getSecretKey(secret[2])));
}

代码示例来源:origin: com.github.jiayuhan-it/hadoop-common

@Test
public void addAll() {
 Credentials creds = new Credentials();
 creds.addToken(service[0], token[0]);
 creds.addToken(service[1], token[1]);
 creds.addSecretKey(secret[0], secret[0].getBytes());
 creds.addSecretKey(secret[1], secret[1].getBytes());
 Credentials credsToAdd = new Credentials();
 // one duplicate with different value, one new
 credsToAdd.addToken(service[0], token[3]);
 credsToAdd.addToken(service[2], token[2]);
 credsToAdd.addSecretKey(secret[0], secret[3].getBytes());
 credsToAdd.addSecretKey(secret[2], secret[2].getBytes());
 
 creds.addAll(credsToAdd);
 assertEquals(3, creds.numberOfTokens());
 assertEquals(3, creds.numberOfSecretKeys());
 // existing token & secret should be overwritten
 assertEquals(token[3], creds.getToken(service[0]));
 assertEquals(secret[3], new Text(creds.getSecretKey(secret[0])));
 // non-duplicate token & secret should be present
 assertEquals(token[1], creds.getToken(service[1]));
 assertEquals(secret[1], new Text(creds.getSecretKey(secret[1])));
 // new token & secret should be added
 assertEquals(token[2], creds.getToken(service[2]));
 assertEquals(secret[2], new Text(creds.getSecretKey(secret[2])));
}

代码示例来源:origin: ch.cern.hadoop/hadoop-common

@Test
public void addAll() {
 Credentials creds = new Credentials();
 creds.addToken(service[0], token[0]);
 creds.addToken(service[1], token[1]);
 creds.addSecretKey(secret[0], secret[0].getBytes());
 creds.addSecretKey(secret[1], secret[1].getBytes());
 Credentials credsToAdd = new Credentials();
 // one duplicate with different value, one new
 credsToAdd.addToken(service[0], token[3]);
 credsToAdd.addToken(service[2], token[2]);
 credsToAdd.addSecretKey(secret[0], secret[3].getBytes());
 credsToAdd.addSecretKey(secret[2], secret[2].getBytes());
 
 creds.addAll(credsToAdd);
 assertEquals(3, creds.numberOfTokens());
 assertEquals(3, creds.numberOfSecretKeys());
 // existing token & secret should be overwritten
 assertEquals(token[3], creds.getToken(service[0]));
 assertEquals(secret[3], new Text(creds.getSecretKey(secret[0])));
 // non-duplicate token & secret should be present
 assertEquals(token[1], creds.getToken(service[1]));
 assertEquals(secret[1], new Text(creds.getSecretKey(secret[1])));
 // new token & secret should be added
 assertEquals(token[2], creds.getToken(service[2]));
 assertEquals(secret[2], new Text(creds.getSecretKey(secret[2])));
}

代码示例来源:origin: ch.cern.hadoop/hadoop-common

@Test
public void mergeAll() {
 Credentials creds = new Credentials();
 creds.addToken(service[0], token[0]);
 creds.addToken(service[1], token[1]);
 creds.addSecretKey(secret[0], secret[0].getBytes());
 creds.addSecretKey(secret[1], secret[1].getBytes());
 
 Credentials credsToAdd = new Credentials();
 // one duplicate with different value, one new
 credsToAdd.addToken(service[0], token[3]);
 credsToAdd.addToken(service[2], token[2]);
 credsToAdd.addSecretKey(secret[0], secret[3].getBytes());
 credsToAdd.addSecretKey(secret[2], secret[2].getBytes());
 
 creds.mergeAll(credsToAdd);
 assertEquals(3, creds.numberOfTokens());
 assertEquals(3, creds.numberOfSecretKeys());
 // existing token & secret should not be overwritten
 assertEquals(token[0], creds.getToken(service[0]));
 assertEquals(secret[0], new Text(creds.getSecretKey(secret[0])));
 // non-duplicate token & secret should be present
 assertEquals(token[1], creds.getToken(service[1]));
 assertEquals(secret[1], new Text(creds.getSecretKey(secret[1])));
 // new token & secret should be added
 assertEquals(token[2], creds.getToken(service[2]));
 assertEquals(secret[2], new Text(creds.getSecretKey(secret[2])));
}

相关文章