com.github.sakserv.minicluster.util.WindowsLibsUtils.setHadoopHome()方法的使用及代码示例

x33g5p2x  于2022-02-03 转载在 其他  
字(11.2k)|赞(0)|评价(0)|浏览(128)

本文整理了Java中com.github.sakserv.minicluster.util.WindowsLibsUtils.setHadoopHome()方法的一些代码示例,展示了WindowsLibsUtils.setHadoopHome()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。WindowsLibsUtils.setHadoopHome()方法的具体详情如下:
包路径:com.github.sakserv.minicluster.util.WindowsLibsUtils
类名称:WindowsLibsUtils
方法名:setHadoopHome

WindowsLibsUtils.setHadoopHome介绍

暂无

代码示例

代码示例来源:origin: sakserv/hadoop-mini-clusters

  1. @Override
  2. public void configure() throws Exception {
  3. if(null != hdfsEnableRunningUserAsProxyUser && hdfsEnableRunningUserAsProxyUser) {
  4. hdfsConfig.set("hadoop.proxyuser." + System.getProperty("user.name") + ".hosts", "*");
  5. hdfsConfig.set("hadoop.proxyuser." + System.getProperty("user.name") + ".groups", "*");
  6. }
  7. hdfsConfig.setBoolean("dfs.permissions", hdfsEnablePermissions);
  8. System.setProperty("test.build.data", hdfsTempDir);
  9. // Handle Windows
  10. WindowsLibsUtils.setHadoopHome();
  11. }

代码示例来源:origin: com.github.sakserv/hadoop-mini-clusters-hbase

  1. @Override
  2. public void configure() throws Exception {
  3. configure(hbaseConfiguration);
  4. // Handle Windows
  5. WindowsLibsUtils.setHadoopHome();
  6. }

代码示例来源:origin: sakserv/hadoop-mini-clusters

  1. @Override
  2. public void configure() throws Exception {
  3. // Handle Windows
  4. WindowsLibsUtils.setHadoopHome();
  5. configuration.set(YarnConfiguration.RM_ADDRESS, resourceManagerAddress);
  6. configuration.set(YarnConfiguration.RM_HOSTNAME, resourceManagerHostname);
  7. configuration.set(YarnConfiguration.RM_SCHEDULER_ADDRESS, resourceManagerSchedulerAddress);
  8. configuration.set(YarnConfiguration.RM_RESOURCE_TRACKER_ADDRESS, resourceManagerResourceTrackerAddress);
  9. configuration.set(YarnConfiguration.RM_WEBAPP_ADDRESS, resourceManagerWebappAddress);
  10. configuration.set(YarnConfiguration.YARN_MINICLUSTER_FIXED_PORTS, "true");
  11. if (getUseInJvmContainerExecutor()) {
  12. configuration.set(YarnConfiguration.NM_CONTAINER_EXECUTOR, inJvmContainerExecutorClass);
  13. configuration.set("fs.hdfs.impl", org.apache.hadoop.hdfs.DistributedFileSystem.class.getName());
  14. configuration.set("fs.file.impl", org.apache.hadoop.fs.LocalFileSystem.class.getName());
  15. }
  16. }

代码示例来源:origin: jetoile/hadoop-unit

  1. @Override
  2. public void configure() throws Exception {
  3. // Handle Windows
  4. WindowsLibsUtils.setHadoopHome();
  5. // Oozie has very particular naming conventions for these directories, don't change
  6. fullOozieHomeDir = oozieTestDir + "/" + oozieHomeDir;
  7. fullOozieConfDir = fullOozieHomeDir + "/conf";
  8. fullOozieHadoopConfDir = fullOozieConfDir + "/hadoop-conf";
  9. fullOozieActionDir = fullOozieConfDir + "/action-conf";
  10. //set system properties
  11. System.setProperty(Services.OOZIE_HOME_DIR, new File(fullOozieHomeDir).getAbsolutePath());
  12. System.setProperty(ConfigurationService.OOZIE_CONFIG_DIR, fullOozieConfDir);
  13. System.setProperty("oozielocal.log", fullOozieHomeDir + "/oozielocal.log");
  14. System.setProperty(XTestCase.OOZIE_TEST_JOB_TRACKER, oozieYarnResourceManagerAddress);
  15. System.setProperty(XTestCase.OOZIE_TEST_NAME_NODE, oozieHdfsDefaultFs);
  16. System.setProperty("oozie.test.db.host", "localhost");
  17. System.setProperty(ConfigurationService.OOZIE_DATA_DIR, fullOozieHomeDir);
  18. System.setProperty(HadoopAccessorService.SUPPORTED_FILESYSTEMS, "*");
  19. if (oozieShareLibCreate) {
  20. oozieConf.set("oozie.service.WorkflowAppService.system.libpath",
  21. oozieHdfsDefaultFs + oozieHdfsShareLibDir);
  22. oozieConf.set("use.system.libpath.for.mapreduce.and.pig.jobs", "true");
  23. }
  24. oozieConf.set("oozie.service.JPAService.jdbc.driver", "org.hsqldb.jdbcDriver");
  25. oozieConf.set("oozie.service.JPAService.jdbc.url", "jdbc:hsqldb:mem:oozie-db;create=true");
  26. oozieConf.set(JPAService.CONF_CREATE_DB_SCHEMA, "true");
  27. }

代码示例来源:origin: fr.jetoile.hadoop/hadoop-unit-hive

  1. private HiveConf buildHiveConf() {
  2. // Handle Windows
  3. WindowsLibsUtils.setHadoopHome();
  4. HiveConf hiveConf = new HiveConf();
  5. hiveConf.set("fs.defaultFS", "hdfs://" + configuration.getString(HdfsConfig.HDFS_NAMENODE_HOST_KEY) + ":" + configuration.getInt(HdfsConfig.HDFS_NAMENODE_PORT_KEY));
  6. // hiveConf.set(HiveConf.ConfVars.HIVE_TXN_MANAGER.varname, "org.apache.hadoop.hive.ql.lockmgr.DbTxnManager");
  7. // hiveConf.set(HiveConf.ConfVars.HIVE_COMPACTOR_INITIATOR_ON.varname, "true");
  8. // hiveConf.set(HiveConf.ConfVars.HIVE_COMPACTOR_WORKER_THREADS.varname, "5");
  9. // hiveConf.set("hive.root.logger", "DEBUG,console");
  10. // hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3);
  11. // hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
  12. // hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
  13. // System.setProperty(HiveConf.ConfVars.PREEXECHOOKS.varname, " ");
  14. // System.setProperty(HiveConf.ConfVars.POSTEXECHOOKS.varname, " ");
  15. return hiveConf;
  16. }

代码示例来源:origin: jetoile/hadoop-unit

  1. private HiveConf buildHiveConf() {
  2. // Handle Windows
  3. WindowsLibsUtils.setHadoopHome();
  4. HiveConf hiveConf = new HiveConf();
  5. hiveConf.set("fs.defaultFS", "hdfs://" + configuration.getString(HdfsConfig.HDFS_NAMENODE_HOST_KEY) + ":" + configuration.getInt(HdfsConfig.HDFS_NAMENODE_PORT_KEY));
  6. // hiveConf.set(HiveConf.ConfVars.HIVE_TXN_MANAGER.varname, "org.apache.hadoop.hive.ql.lockmgr.DbTxnManager");
  7. // hiveConf.set(HiveConf.ConfVars.HIVE_COMPACTOR_INITIATOR_ON.varname, "true");
  8. // hiveConf.set(HiveConf.ConfVars.HIVE_COMPACTOR_WORKER_THREADS.varname, "5");
  9. // hiveConf.set("hive.root.logger", "DEBUG,console");
  10. // hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3);
  11. // hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
  12. // hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
  13. // System.setProperty(HiveConf.ConfVars.PREEXECHOOKS.varname, " ");
  14. // System.setProperty(HiveConf.ConfVars.POSTEXECHOOKS.varname, " ");
  15. return hiveConf;
  16. }

代码示例来源:origin: fr.jetoile.hadoop/hadoop-unit-hive

  1. private HiveConf buildHiveConf() {
  2. // Handle Windows
  3. WindowsLibsUtils.setHadoopHome();
  4. HiveConf hiveConf = new HiveConf();
  5. hiveConf.set("fs.defaultFS", hdfsUri);
  6. // hiveConf.set(HiveConf.ConfVars.HIVE_TXN_MANAGER.varname, "org.apache.hadoop.hive.ql.lockmgr.DbTxnManager");
  7. // hiveConf.set(HiveConf.ConfVars.HIVE_COMPACTOR_INITIATOR_ON.varname, "true");
  8. // hiveConf.set(HiveConf.ConfVars.HIVE_COMPACTOR_WORKER_THREADS.varname, "5");
  9. // hiveConf.set("hive.root.logger", "DEBUG,console");
  10. // hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3);
  11. // hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
  12. // hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
  13. // System.setProperty(HiveConf.ConfVars.PREEXECHOOKS.varname, " ");
  14. // System.setProperty(HiveConf.ConfVars.POSTEXECHOOKS.varname, " ");
  15. return hiveConf;
  16. }

代码示例来源:origin: jetoile/hadoop-unit

  1. private HiveConf buildHiveConf() {
  2. // Handle Windows
  3. WindowsLibsUtils.setHadoopHome();
  4. HiveConf hiveConf = new HiveConf();
  5. hiveConf.set("fs.defaultFS", hdfsUri);
  6. // hiveConf.set(HiveConf.ConfVars.HIVE_TXN_MANAGER.varname, "org.apache.hadoop.hive.ql.lockmgr.DbTxnManager");
  7. // hiveConf.set(HiveConf.ConfVars.HIVE_COMPACTOR_INITIATOR_ON.varname, "true");
  8. // hiveConf.set(HiveConf.ConfVars.HIVE_COMPACTOR_WORKER_THREADS.varname, "5");
  9. // hiveConf.set("hive.root.logger", "DEBUG,console");
  10. // hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3);
  11. // hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
  12. // hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
  13. // System.setProperty(HiveConf.ConfVars.PREEXECHOOKS.varname, " ");
  14. // System.setProperty(HiveConf.ConfVars.POSTEXECHOOKS.varname, " ");
  15. return hiveConf;
  16. }

代码示例来源:origin: sakserv/hadoop-mini-clusters

  1. @Override
  2. public void configure() throws Exception {
  3. hiveConf.setVar(HiveConf.ConfVars.METASTOREURIS,
  4. "thrift://" + hiveMetastoreHostname + ":" + hiveMetastorePort);
  5. hiveConf.setVar(HiveConf.ConfVars.SCRATCHDIR, hiveScratchDir);
  6. hiveConf.setVar(HiveConf.ConfVars.METASTORECONNECTURLKEY,
  7. "jdbc:derby:;databaseName=" + hiveMetastoreDerbyDbDir + ";create=true");
  8. hiveConf.setVar(HiveConf.ConfVars.METASTOREWAREHOUSE, new File(hiveWarehouseDir).getAbsolutePath());
  9. hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_IN_TEST, true);
  10. hiveConf.set("datanucleus.schema.autoCreateTables", "true");
  11. hiveConf.set("hive.metastore.schema.verification", "false");
  12. // Handle Windows
  13. WindowsLibsUtils.setHadoopHome();
  14. }

代码示例来源:origin: com.github.sakserv/hadoop-mini-clusters-hiveserver2

  1. @Override
  2. public void configure() throws Exception {
  3. // Handle Windows
  4. WindowsLibsUtils.setHadoopHome();
  5. hiveConf.setVar(HiveConf.ConfVars.METASTOREURIS,
  6. "thrift://" + hiveMetastoreHostname + ":" + hiveMetastorePort);
  7. hiveConf.setVar(HiveConf.ConfVars.SCRATCHDIR, hiveScratchDir);
  8. hiveConf.setVar(HiveConf.ConfVars.METASTORECONNECTURLKEY,
  9. "jdbc:derby:;databaseName=" + hiveMetastoreDerbyDbDir + ";create=true");
  10. hiveConf.setVar(HiveConf.ConfVars.METASTOREWAREHOUSE, new File(hiveWarehouseDir).getAbsolutePath());
  11. hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_IN_TEST, true);
  12. hiveConf.setVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST, String.valueOf(hiveServer2Hostname));
  13. hiveConf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_PORT, hiveServer2Port);
  14. hiveConf.setVar(HiveConf.ConfVars.HIVE_ZOOKEEPER_QUORUM, zookeeperConnectionString);
  15. hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, Boolean.TRUE);
  16. }

代码示例来源:origin: fr.jetoile.hadoop/hadoop-unit-hive

  1. @Override
  2. public void configure() throws Exception {
  3. hiveConf.setVar(HiveConf.ConfVars.METASTOREURIS,
  4. "thrift://" + hiveMetastoreHostname + ":" + hiveMetastorePort);
  5. hiveConf.setVar(HiveConf.ConfVars.SCRATCHDIR, hiveScratchDir);
  6. hiveConf.setVar(HiveConf.ConfVars.METASTORECONNECTURLKEY,
  7. "jdbc:derby:;databaseName=" + hiveMetastoreDerbyDbDir + ";create=true");
  8. // hiveConf.setVar(HiveConf.ConfVars.METASTOREWAREHOUSE, new File(hiveWarehouseDir).getAbsolutePath());
  9. hiveConf.setVar(HiveConf.ConfVars.METASTOREWAREHOUSE, hiveWarehouseDir);
  10. hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_IN_TEST, true);
  11. hiveConf.set("datanucleus.schema.autoCreateTables", "true");
  12. hiveConf.set("hive.metastore.schema.verification", "false");
  13. // Handle Windows
  14. WindowsLibsUtils.setHadoopHome();
  15. }

代码示例来源:origin: jetoile/hadoop-unit

  1. @Override
  2. public void configure() throws Exception {
  3. hiveConf.setVar(HiveConf.ConfVars.METASTOREURIS,
  4. "thrift://" + hiveMetastoreHostname + ":" + hiveMetastorePort);
  5. hiveConf.setVar(HiveConf.ConfVars.SCRATCHDIR, hiveScratchDir);
  6. hiveConf.setVar(HiveConf.ConfVars.METASTORECONNECTURLKEY,
  7. "jdbc:derby:;databaseName=" + hiveMetastoreDerbyDbDir + ";create=true");
  8. // hiveConf.setVar(HiveConf.ConfVars.METASTOREWAREHOUSE, new File(hiveWarehouseDir).getAbsolutePath());
  9. hiveConf.setVar(HiveConf.ConfVars.METASTOREWAREHOUSE, hiveWarehouseDir);
  10. hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_IN_TEST, true);
  11. hiveConf.set("datanucleus.schema.autoCreateTables", "true");
  12. hiveConf.set("hive.metastore.schema.verification", "false");
  13. // Handle Windows
  14. WindowsLibsUtils.setHadoopHome();
  15. }

相关文章

WindowsLibsUtils类方法