org.apache.hadoop.mapreduce.Mapper.setup()方法的使用及代码示例

x33g5p2x  于2022-01-25 转载在 其他  
字(7.1k)|赞(0)|评价(0)|浏览(181)

本文整理了Java中org.apache.hadoop.mapreduce.Mapper.setup()方法的一些代码示例,展示了Mapper.setup()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Mapper.setup()方法的具体详情如下:
包路径:org.apache.hadoop.mapreduce.Mapper
类名称:Mapper
方法名:setup

Mapper.setup介绍

[英]Called once at the beginning of the task.
[中]在任务开始时调用一次。

代码示例

代码示例来源:origin: apache/hbase

  1. @Override
  2. protected void setup(Context context) throws IOException,
  3. InterruptedException {
  4. super.setup(context);
  5. Configuration conf = context.getConfiguration();
  6. keyLength = conf.getInt(KEYLEN_CONF, KEYLEN_DEFAULT);
  7. valLength = conf.getInt(VALLEN_CONF, VALLEN_DEFAULT);
  8. multiTableMapper = conf.getBoolean(HFileOutputFormat2.MULTI_TABLE_HFILEOUTPUTFORMAT_CONF_KEY,
  9. false);
  10. if (multiTableMapper) {
  11. tables = TABLE_NAMES;
  12. } else {
  13. tables = new TableName[]{TABLE_NAMES[0]};
  14. }
  15. }

代码示例来源:origin: apache/hbase

  1. @Override
  2. protected void setup(Context context) throws IOException,
  3. InterruptedException {
  4. super.setup(context);
  5. Configuration conf = context.getConfiguration();
  6. keyLength = conf.getInt(KEYLEN_CONF, KEYLEN_DEFAULT);
  7. valLength = conf.getInt(VALLEN_CONF, VALLEN_DEFAULT);
  8. multiTableMapper = conf.getBoolean(HFileOutputFormat2.MULTI_TABLE_HFILEOUTPUTFORMAT_CONF_KEY,
  9. false);
  10. if (multiTableMapper) {
  11. tables = TABLE_NAMES;
  12. } else {
  13. tables = new TableName[]{TABLE_NAMES[0]};
  14. }
  15. }

代码示例来源:origin: apache/incubator-gobblin

  1. @Override
  2. protected void setup(Context context) throws IOException, InterruptedException {
  3. Map<String, String> configMap = Maps.newHashMap();
  4. SharedResourcesBrokerFactory.addBrokerKeys(configMap, context.getConfiguration());
  5. this.broker = SharedResourcesBrokerFactory.createDefaultTopLevelBroker(ConfigFactory.parseMap(configMap),
  6. SimpleScopeType.GLOBAL.defaultScopeInstance());
  7. super.setup(context);
  8. }

代码示例来源:origin: apache/ignite

  1. /** {@inheritDoc} */
  2. @Override protected void setup(Context ctx) throws IOException, InterruptedException {
  3. super.setup(ctx);
  4. wasSetUp = true;
  5. HadoopErrorSimulator.instance().onMapSetup();
  6. }

代码示例来源:origin: thinkaurelius/titan

  1. @Override
  2. protected void setup(Context context) throws IOException, InterruptedException {
  3. super.setup(context);
  4. org.apache.hadoop.conf.Configuration hadoopConf = DEFAULT_COMPAT.getContextConfiguration(context);
  5. ModifiableHadoopConfiguration scanConf = ModifiableHadoopConfiguration.of(TitanHadoopConfiguration.MAPRED_NS, hadoopConf);
  6. job = getJob(scanConf);
  7. metrics = new HadoopContextScanMetrics(context);
  8. Configuration graphConf = getTitanConfiguration(context);
  9. finishSetup(scanConf, graphConf);
  10. }

代码示例来源:origin: apache/metron

  1. @Override
  2. protected void setup(Context context) throws IOException, InterruptedException {
  3. super.setup(context);
  4. filter = PcapFilters.valueOf(context.getConfiguration().get(PcapFilterConfigurator.PCAP_FILTER_NAME_CONF)).create();
  5. filter.configure(context.getConfiguration());
  6. start = Long.parseUnsignedLong(context.getConfiguration().get(START_TS_CONF));
  7. end = Long.parseUnsignedLong(context.getConfiguration().get(END_TS_CONF));
  8. }

代码示例来源:origin: apache/phoenix

  1. @Override
  2. protected void setup(final Context context) throws IOException, InterruptedException {
  3. super.setup(context);
  4. final Configuration configuration = context.getConfiguration();
  5. try {
  6. indxTblColumnMetadata = PhoenixConfigurationUtil.getUpsertColumnMetadataList(context.getConfiguration());
  7. indxWritable.setColumnMetadata(indxTblColumnMetadata);
  8. preUpdateProcessor = PhoenixConfigurationUtil.loadPreUpsertProcessor(configuration);
  9. indexTableName = PhoenixConfigurationUtil.getPhysicalTableName(configuration);
  10. final Properties overrideProps = new Properties ();
  11. String scn = configuration.get(PhoenixConfigurationUtil.CURRENT_SCN_VALUE);
  12. String txScnValue = configuration.get(PhoenixConfigurationUtil.TX_SCN_VALUE);
  13. if(txScnValue==null) {
  14. overrideProps.put(PhoenixRuntime.BUILD_INDEX_AT_ATTRIB, scn);
  15. }
  16. connection = ConnectionUtil.getOutputConnection(configuration,overrideProps);
  17. connection.setAutoCommit(false);
  18. final String upsertQuery = PhoenixConfigurationUtil.getUpsertStatement(configuration);
  19. this.pStatement = connection.prepareStatement(upsertQuery);
  20. } catch (SQLException e) {
  21. throw new RuntimeException(e.getMessage());
  22. }
  23. }

代码示例来源:origin: apache/phoenix

  1. @Override
  2. protected void setup(final Context context) throws IOException, InterruptedException {
  3. super.setup(context);
  4. final Configuration configuration = context.getConfiguration();
  5. writer = new DirectHTableWriter(configuration);

代码示例来源:origin: apache/phoenix

  1. @Override
  2. protected void setup(final Context context) throws IOException, InterruptedException {
  3. super.setup(context);
  4. final Configuration configuration = context.getConfiguration();
  5. try {

代码示例来源:origin: apache/jena

  1. @Override
  2. protected void setup(Context context) throws IOException, InterruptedException {
  3. super.setup(context);
  4. this.tracing = LOG.isTraceEnabled();
  5. }

代码示例来源:origin: apache/jena

  1. @Override
  2. protected void setup(Context context) throws IOException, InterruptedException {
  3. super.setup(context);
  4. this.tracing = LOG.isTraceEnabled();
  5. }

代码示例来源:origin: apache/jena

  1. @Override
  2. protected void setup(Context context) throws IOException, InterruptedException {
  3. super.setup(context);
  4. this.tracing = LOG.isTraceEnabled();
  5. }

代码示例来源:origin: apache/jena

  1. @Override
  2. protected void setup(Context context) throws IOException, InterruptedException {
  3. super.setup(context);
  4. this.tracing = LOG.isTraceEnabled();
  5. }

代码示例来源:origin: apache/jena

  1. @Override
  2. protected void setup(Context context) throws IOException, InterruptedException {
  3. super.setup(context);
  4. this.tracing = LOG.isTraceEnabled();
  5. }

代码示例来源:origin: apache/jena

  1. @Override
  2. protected void setup(Context context) throws IOException, InterruptedException {
  3. super.setup(context);
  4. this.tracing = LOG.isTraceEnabled();
  5. }

代码示例来源:origin: apache/jena

  1. @Override
  2. protected void setup(Context context) throws IOException, InterruptedException {
  3. super.setup(context);
  4. this.invert = context.getConfiguration().getBoolean(RdfMapReduceConstants.FILTER_INVERT, this.invert);
  5. }

代码示例来源:origin: org.apache.mahout/mahout-core

  1. @Override
  2. protected void setup(Context context) throws IOException, InterruptedException {
  3. super.setup(context);
  4. Configuration conf = context.getConfiguration();
  5. this.maxShingleSize = conf.getInt(MAX_SHINGLE_SIZE, DEFAULT_MAX_SHINGLE_SIZE);
  6. this.emitUnigrams = conf.getBoolean(CollocDriver.EMIT_UNIGRAMS, CollocDriver.DEFAULT_EMIT_UNIGRAMS);
  7. if (log.isInfoEnabled()) {
  8. log.info("Max Ngram size is {}", this.maxShingleSize);
  9. log.info("Emit Unitgrams is {}", emitUnigrams);
  10. }
  11. }

代码示例来源:origin: org.apache.mahout/mahout-core

  1. @Override
  2. protected void setup(Context context) throws IOException, InterruptedException {
  3. super.setup(context);
  4. String analyzerClassName = context.getConfiguration().get(DocumentProcessor.ANALYZER_CLASS,
  5. StandardAnalyzer.class.getName());
  6. try {
  7. analyzer = AnalyzerUtils.createAnalyzer(analyzerClassName);
  8. } catch (ClassNotFoundException e) {
  9. throw new IOException("Unable to create analyzer: " + analyzerClassName, e);
  10. }
  11. }
  12. }

代码示例来源:origin: alexholmes/hadoop-book

  1. @Override
  2. protected void setup(Context context)
  3. throws IOException, InterruptedException {
  4. super.setup(context);
  5. log.info("Input split = {}", context.getInputSplit());
  6. }

代码示例来源:origin: org.apache.mahout/mahout-core

  1. @Override
  2. protected void setup(Context context) throws IOException, InterruptedException {
  3. Configuration conf = context.getConfiguration();
  4. String priorClustersPath = conf.get(ClusterIterator.PRIOR_PATH_KEY);
  5. classifier = new ClusterClassifier();
  6. classifier.readFromSeqFiles(conf, new Path(priorClustersPath));
  7. policy = classifier.getPolicy();
  8. policy.update(classifier);
  9. super.setup(context);
  10. }

相关文章