org.apache.hadoop.hive.shims.Utils.getUGI()方法的使用及代码示例

x33g5p2x  于2022-02-01 转载在 其他  
字(10.8k)|赞(0)|评价(0)|浏览(144)

本文整理了Java中org.apache.hadoop.hive.shims.Utils.getUGI()方法的一些代码示例,展示了Utils.getUGI()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Utils.getUGI()方法的具体详情如下:
包路径:org.apache.hadoop.hive.shims.Utils
类名称:Utils
方法名:getUGI

Utils.getUGI介绍

暂无

代码示例

代码示例来源:origin: apache/hive

/**
 * @return the user name set in hadoop.job.ugi param or the current user from System
 * @throws IOException
 */
public String getUser() throws IOException {
 try {
  UserGroupInformation ugi = Utils.getUGI();
  return ugi.getUserName();
 } catch (LoginException le) {
  throw new IOException(le);
 }
}

代码示例来源:origin: apache/hive

private void configureImpersonation(Configuration conf) {
 String user;
 try {
  user = Utils.getUGI().getShortUserName();
 } catch (Exception e) {
  String msg = "Cannot obtain username: " + e;
  throw new IllegalStateException(msg, e);
 }
 conf.set("hadoop.proxyuser." + user + ".groups", "*");
 conf.set("hadoop.proxyuser." + user + ".hosts", "*");
}

代码示例来源:origin: apache/hive

private HiveClientCacheKey(HiveConf hiveConf, final int threadId) throws IOException, LoginException {
 this.metaStoreURIs = hiveConf.getVar(HiveConf.ConfVars.METASTOREURIS);
 ugi = Utils.getUGI();
 this.hiveConf = hiveConf;
 this.threadId = threadId;
}

代码示例来源:origin: apache/hive

private HiveClientCacheKey(HiveConf hiveConf, final int threadId) throws IOException, LoginException {
 this.metaStoreURIs = hiveConf.getVar(HiveConf.ConfVars.METASTOREURIS);
 ugi = Utils.getUGI();
 this.hiveConf = hiveConf;
 this.threadId = threadId;
}

代码示例来源:origin: apache/hive

/**
 * Returns the current UGI on the stack
 *
 * @return UserGroupInformation
 *
 * @throws HiveSQLException
 */
private UserGroupInformation getCurrentUGI() throws HiveSQLException {
 try {
  return Utils.getUGI();
 } catch (Exception e) {
  throw new HiveSQLException("Unable to get current user", e);
 }
}

代码示例来源:origin: apache/hive

private void doCopyOnce(FileSystem sourceFs, List<Path> srcList,
            FileSystem destinationFs, Path destination,
            boolean useRegularCopy) throws IOException, LoginException {
 UserGroupInformation ugi = Utils.getUGI();
 String currentUser = ugi.getShortUserName();
 boolean usePrivilegedUser = copyAsUser != null && !currentUser.equals(copyAsUser);
 if (useRegularCopy) {
  doRegularCopyOnce(sourceFs, srcList, destinationFs, destination, usePrivilegedUser);
 } else {
  doDistCpCopyOnce(sourceFs, srcList, destination, usePrivilegedUser);
 }
}

代码示例来源:origin: apache/hive

public static void checkFileAccess(FileSystem fs, FileStatus stat, FsAction action)
  throws IOException, AccessControlException, LoginException {
 // Get the user/groups for checking permissions based on the current UGI.
 UserGroupInformation currentUgi = Utils.getUGI();
 DefaultFileAccess.checkFileAccess(fs, stat, action,
   currentUgi.getShortUserName(),
   Arrays.asList(currentUgi.getGroupNames()));
}

代码示例来源:origin: apache/hive

@Override
public void setConf(Configuration conf) {
 this.conf = conf;
 UserGroupInformation ugi = null;
 try {
  ugi = Utils.getUGI();
 } catch (Exception e) {
  throw new RuntimeException(e);
 }
 if (ugi == null) {
  throw new RuntimeException(
    "Can not initialize HadoopDefaultAuthenticator.");
 }
 this.userName = ugi.getShortUserName();
 if (ugi.getGroupNames() != null) {
  this.groupNames = Arrays.asList(ugi.getGroupNames());
 }
}

代码示例来源:origin: apache/drill

/**
 * createTezDir creates a temporary directory in the scratchDir folder to
 * be used with Tez. Assumes scratchDir exists.
 */
public Path createTezDir(Path scratchDir, Configuration conf)
  throws IOException {
 UserGroupInformation ugi;
 String userName = System.getProperty("user.name");
 try {
  ugi = Utils.getUGI();
  userName = ugi.getShortUserName();
 } catch (LoginException e) {
  throw new IOException(e);
 }
 scratchDir = new Path(scratchDir, userName);
 Path tezDir = getTezDir(scratchDir);
 FileSystem fs = tezDir.getFileSystem(conf);
 LOG.debug("TezDir path set " + tezDir + " for user: " + userName);
 // since we are adding the user name to the scratch dir, we do not
 // need to give more permissions here
 fs.mkdirs(tezDir);
 return tezDir;
}

代码示例来源:origin: apache/hive

/**
 * createTezDir creates a temporary directory in the scratchDir folder to
 * be used with Tez. Assumes scratchDir exists.
 */
public Path createTezDir(Path scratchDir, Configuration conf)
  throws IOException {
 UserGroupInformation ugi;
 String userName = System.getProperty("user.name");
 try {
  ugi = Utils.getUGI();
  userName = ugi.getShortUserName();
 } catch (LoginException e) {
  throw new IOException(e);
 }
 scratchDir = new Path(scratchDir, userName);
 Path tezDir = getTezDir(scratchDir);
 if (!HiveConf.getBoolVar(conf, ConfVars.HIVE_RPC_QUERY_PLAN)) {
  FileSystem fs = tezDir.getFileSystem(conf);
  LOG.debug("TezDir path set " + tezDir + " for user: " + userName);
  // since we are adding the user name to the scratch dir, we do not
  // need to give more permissions here
  // Since we are doing RPC creating a dir is not necessary
  fs.mkdirs(tezDir);
 }
 return tezDir;
}

代码示例来源:origin: apache/drill

@Override
public void setConf(Configuration conf) {
 this.conf = conf;
 UserGroupInformation ugi = null;
 try {
  ugi = Utils.getUGI();
 } catch (Exception e) {
  throw new RuntimeException(e);
 }
 if (ugi == null) {
  throw new RuntimeException(
    "Can not initialize HadoopDefaultAuthenticator.");
 }
 this.userName = ugi.getShortUserName();
 if (ugi.getGroupNames() != null) {
  this.groupNames = Arrays.asList(ugi.getGroupNames());
 }
}

代码示例来源:origin: apache/hive

/**
 * @param conf
 * @return path to destination directory on hdfs
 * @throws LoginException if we are unable to figure user information
 * @throws IOException when any dfs operation fails.
 */
@SuppressWarnings("deprecation")
public Path getDefaultDestDir(Configuration conf) throws LoginException, IOException {
 UserGroupInformation ugi = Utils.getUGI();
 String userName = ugi.getShortUserName();
 String userPathStr = HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_USER_INSTALL_DIR);
 Path userPath = new Path(userPathStr);
 FileSystem fs = userPath.getFileSystem(conf);
 String jarPathStr = userPathStr + "/" + userName;
 String hdfsDirPathStr = jarPathStr;
 Path hdfsDirPath = new Path(hdfsDirPathStr);
 try {
  FileStatus fstatus = fs.getFileStatus(hdfsDirPath);
  if (!fstatus.isDir()) {
   throw new IOException(ErrorMsg.INVALID_DIR.format(hdfsDirPath.toString()));
  }
 } catch (FileNotFoundException e) {
  // directory does not exist, create it
  fs.mkdirs(hdfsDirPath);
 }
 Path retPath = new Path(hdfsDirPath.toString() + "/.hiveJars");
 fs.mkdirs(retPath);
 return retPath;
}

代码示例来源:origin: apache/hive

private static void storeTokenInJobConf(String tokenStr) throws Exception {
 SessionUtils.setTokenStr(Utils.getUGI(),
    tokenStr, HiveAuthConstants.HS2_CLIENT_TOKEN);
 System.out.println("Stored token " + tokenStr);
}

代码示例来源:origin: apache/hive

/**
 * Checks if a given path has read-only access permissions.
 *
 * @param path The path to check for read-only permissions.
 * @return True if the path is read-only; False otherwise.
 * @throws HiveException If an error occurs while checking file permissions.
 */
private boolean isPathReadOnly(Path path) throws HiveException {
 HiveConf conf = SessionState.get().getConf();
 try {
  FileSystem fs = path.getFileSystem(conf);
  UserGroupInformation ugi = Utils.getUGI();
  FileStatus status = fs.getFileStatus(path);
  // We just check for writing permissions. If it fails with AccessControException, then it
  // means the location may be read-only.
  FileUtils.checkFileAccessWithImpersonation(fs, status, FsAction.WRITE, ugi.getUserName());
  // Path has writing permissions
  return false;
 } catch (AccessControlException e) {
  // An AccessControlException may be caused for other different errors,
  // but we take it as if our path is read-only
  return true;
 } catch (Exception e) {
  throw new HiveException("Unable to determine if " + path + " is read only: " + e, e);
 }
}

代码示例来源:origin: apache/hive

@Override
public void open() throws LoginException, IOException {
 UserGroupInformation ugi = Utils.getUGI();
 user = ugi.getShortUserName();
 this.doAsEnabled = hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS);
 setOpen(true);
}

代码示例来源:origin: apache/hive

@Test
public void testTransactionBatchCommit_DelimitedUGI() throws Exception {
 testTransactionBatchCommit_Delimited(Utils.getUGI());
}
private void testTransactionBatchCommit_Delimited(UserGroupInformation ugi) throws Exception {

代码示例来源:origin: apache/hive

@Test
public void testTransactionBatchCommit_RegexUGI() throws Exception {
 testTransactionBatchCommit_Regex(Utils.getUGI());
}
private void testTransactionBatchCommit_Regex(UserGroupInformation ugi) throws Exception {

代码示例来源:origin: apache/hive

public HookContext(QueryPlan queryPlan, QueryState queryState,
  Map<String, ContentSummary> inputPathToContentSummary, String userName, String ipAddress,
  String hiveInstanceAddress, String operationId, String sessionId, String threadId,
  boolean isHiveServerQuery, PerfLogger perfLogger, QueryInfo queryInfo) throws Exception {
 this.queryPlan = queryPlan;
 this.queryState = queryState;
 this.conf = queryState.getConf();
 this.inputPathToContentSummary = inputPathToContentSummary;
 completeTaskList = new ArrayList<TaskRunner>();
 inputs = queryPlan.getInputs();
 outputs = queryPlan.getOutputs();
 ugi = Utils.getUGI();
 linfo = queryState.getLineageState().getLineageInfo();
 depMap = queryState.getLineageState().getIndex();
 this.userName = userName;
 this.ipAddress = ipAddress;
 this.hiveInstanceAddress = hiveInstanceAddress;
 this.operationId = operationId;
 this.sessionId = sessionId;
 this.threadId = threadId;
 this.isHiveServerQuery = isHiveServerQuery;
 this.perfLogger = perfLogger;
 this.queryInfo = queryInfo;
}

代码示例来源:origin: apache/hive

@Test
public void mrTaskSumbitViaChildWithImpersonation() throws IOException, LoginException {
 Utils.getUGI().setAuthenticationMethod(PROXY);
 Context ctx = Mockito.mock(Context.class);
 when(ctx.getLocalTmpPath()).thenReturn(new Path(System.getProperty("java.io.tmpdir")));
 DriverContext dctx = new DriverContext(ctx);
 QueryState queryState = new QueryState.Builder().build();
 HiveConf conf= queryState.getConf();
 conf.setBoolVar(HiveConf.ConfVars.SUBMITVIACHILD, true);
 MapredWork mrWork = new MapredWork();
 mrWork.setMapWork(Mockito.mock(MapWork.class));
 MapRedTask mrTask = Mockito.spy(new MapRedTask());
 mrTask.setWork(mrWork);
 mrTask.initialize(queryState, null, dctx, null);
 mrTask.jobExecHelper = Mockito.mock(HadoopJobExecHelper.class);
 when(mrTask.jobExecHelper.progressLocal(Mockito.any(Process.class), Mockito.anyString())).thenReturn(0);
 mrTask.execute(dctx);
 ArgumentCaptor<String[]> captor = ArgumentCaptor.forClass(String[].class);
 verify(mrTask).spawn(Mockito.anyString(), Mockito.anyString(), captor.capture());
 String expected = "HADOOP_PROXY_USER=" + Utils.getUGI().getUserName();
 Assert.assertTrue(Arrays.asList(captor.getValue()).contains(expected));
}

代码示例来源:origin: apache/hive

@Test(expected = IOException.class)
 public void shouldThrowExceptionOnDistcpFailure() throws Exception {
  Path destination = mock(Path.class);
  Path source = mock(Path.class);
  FileSystem fs = mock(FileSystem.class);
  List<Path> srcPaths = Arrays.asList(source, source);
  HiveConf conf = mock(HiveConf.class);
  CopyUtils copyUtils = Mockito.spy(new CopyUtils(null, conf));

  mockStatic(FileUtils.class);
  mockStatic(Utils.class);
  when(destination.getFileSystem(same(conf))).thenReturn(fs);
  when(source.getFileSystem(same(conf))).thenReturn(fs);
  when(FileUtils.distCp(same(fs), anyListOf(Path.class), same(destination),
             anyBoolean(), eq(null), same(conf),
             same(ShimLoader.getHadoopShims())))
    .thenReturn(false);
  when(Utils.getUGI()).thenReturn(mock(UserGroupInformation.class));
  doReturn(false).when(copyUtils).regularCopy(same(fs), same(fs), anyListOf(ReplChangeManager.FileInfo.class));

  copyUtils.doCopy(destination, srcPaths);
 }
}

相关文章