本文整理了Java中org.apache.hadoop.mapreduce.Mapper.setup()
方法的一些代码示例,展示了Mapper.setup()
的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Mapper.setup()
方法的具体详情如下:
包路径:org.apache.hadoop.mapreduce.Mapper
类名称:Mapper
方法名:setup
[英]Called once at the beginning of the task.
[中]在任务开始时调用一次。
代码示例来源:origin: apache/hbase
@Override
protected void setup(Context context) throws IOException,
InterruptedException {
super.setup(context);
Configuration conf = context.getConfiguration();
keyLength = conf.getInt(KEYLEN_CONF, KEYLEN_DEFAULT);
valLength = conf.getInt(VALLEN_CONF, VALLEN_DEFAULT);
multiTableMapper = conf.getBoolean(HFileOutputFormat2.MULTI_TABLE_HFILEOUTPUTFORMAT_CONF_KEY,
false);
if (multiTableMapper) {
tables = TABLE_NAMES;
} else {
tables = new TableName[]{TABLE_NAMES[0]};
}
}
代码示例来源:origin: apache/hbase
@Override
protected void setup(Context context) throws IOException,
InterruptedException {
super.setup(context);
Configuration conf = context.getConfiguration();
keyLength = conf.getInt(KEYLEN_CONF, KEYLEN_DEFAULT);
valLength = conf.getInt(VALLEN_CONF, VALLEN_DEFAULT);
multiTableMapper = conf.getBoolean(HFileOutputFormat2.MULTI_TABLE_HFILEOUTPUTFORMAT_CONF_KEY,
false);
if (multiTableMapper) {
tables = TABLE_NAMES;
} else {
tables = new TableName[]{TABLE_NAMES[0]};
}
}
代码示例来源:origin: apache/incubator-gobblin
@Override
protected void setup(Context context) throws IOException, InterruptedException {
Map<String, String> configMap = Maps.newHashMap();
SharedResourcesBrokerFactory.addBrokerKeys(configMap, context.getConfiguration());
this.broker = SharedResourcesBrokerFactory.createDefaultTopLevelBroker(ConfigFactory.parseMap(configMap),
SimpleScopeType.GLOBAL.defaultScopeInstance());
super.setup(context);
}
代码示例来源:origin: apache/ignite
/** {@inheritDoc} */
@Override protected void setup(Context ctx) throws IOException, InterruptedException {
super.setup(ctx);
wasSetUp = true;
HadoopErrorSimulator.instance().onMapSetup();
}
代码示例来源:origin: thinkaurelius/titan
@Override
protected void setup(Context context) throws IOException, InterruptedException {
super.setup(context);
org.apache.hadoop.conf.Configuration hadoopConf = DEFAULT_COMPAT.getContextConfiguration(context);
ModifiableHadoopConfiguration scanConf = ModifiableHadoopConfiguration.of(TitanHadoopConfiguration.MAPRED_NS, hadoopConf);
job = getJob(scanConf);
metrics = new HadoopContextScanMetrics(context);
Configuration graphConf = getTitanConfiguration(context);
finishSetup(scanConf, graphConf);
}
代码示例来源:origin: apache/metron
@Override
protected void setup(Context context) throws IOException, InterruptedException {
super.setup(context);
filter = PcapFilters.valueOf(context.getConfiguration().get(PcapFilterConfigurator.PCAP_FILTER_NAME_CONF)).create();
filter.configure(context.getConfiguration());
start = Long.parseUnsignedLong(context.getConfiguration().get(START_TS_CONF));
end = Long.parseUnsignedLong(context.getConfiguration().get(END_TS_CONF));
}
代码示例来源:origin: apache/phoenix
@Override
protected void setup(final Context context) throws IOException, InterruptedException {
super.setup(context);
final Configuration configuration = context.getConfiguration();
try {
indxTblColumnMetadata = PhoenixConfigurationUtil.getUpsertColumnMetadataList(context.getConfiguration());
indxWritable.setColumnMetadata(indxTblColumnMetadata);
preUpdateProcessor = PhoenixConfigurationUtil.loadPreUpsertProcessor(configuration);
indexTableName = PhoenixConfigurationUtil.getPhysicalTableName(configuration);
final Properties overrideProps = new Properties ();
String scn = configuration.get(PhoenixConfigurationUtil.CURRENT_SCN_VALUE);
String txScnValue = configuration.get(PhoenixConfigurationUtil.TX_SCN_VALUE);
if(txScnValue==null) {
overrideProps.put(PhoenixRuntime.BUILD_INDEX_AT_ATTRIB, scn);
}
connection = ConnectionUtil.getOutputConnection(configuration,overrideProps);
connection.setAutoCommit(false);
final String upsertQuery = PhoenixConfigurationUtil.getUpsertStatement(configuration);
this.pStatement = connection.prepareStatement(upsertQuery);
} catch (SQLException e) {
throw new RuntimeException(e.getMessage());
}
}
代码示例来源:origin: apache/phoenix
@Override
protected void setup(final Context context) throws IOException, InterruptedException {
super.setup(context);
final Configuration configuration = context.getConfiguration();
writer = new DirectHTableWriter(configuration);
代码示例来源:origin: apache/phoenix
@Override
protected void setup(final Context context) throws IOException, InterruptedException {
super.setup(context);
final Configuration configuration = context.getConfiguration();
try {
代码示例来源:origin: apache/jena
@Override
protected void setup(Context context) throws IOException, InterruptedException {
super.setup(context);
this.tracing = LOG.isTraceEnabled();
}
代码示例来源:origin: apache/jena
@Override
protected void setup(Context context) throws IOException, InterruptedException {
super.setup(context);
this.tracing = LOG.isTraceEnabled();
}
代码示例来源:origin: apache/jena
@Override
protected void setup(Context context) throws IOException, InterruptedException {
super.setup(context);
this.tracing = LOG.isTraceEnabled();
}
代码示例来源:origin: apache/jena
@Override
protected void setup(Context context) throws IOException, InterruptedException {
super.setup(context);
this.tracing = LOG.isTraceEnabled();
}
代码示例来源:origin: apache/jena
@Override
protected void setup(Context context) throws IOException, InterruptedException {
super.setup(context);
this.tracing = LOG.isTraceEnabled();
}
代码示例来源:origin: apache/jena
@Override
protected void setup(Context context) throws IOException, InterruptedException {
super.setup(context);
this.tracing = LOG.isTraceEnabled();
}
代码示例来源:origin: apache/jena
@Override
protected void setup(Context context) throws IOException, InterruptedException {
super.setup(context);
this.invert = context.getConfiguration().getBoolean(RdfMapReduceConstants.FILTER_INVERT, this.invert);
}
代码示例来源:origin: org.apache.mahout/mahout-core
@Override
protected void setup(Context context) throws IOException, InterruptedException {
super.setup(context);
Configuration conf = context.getConfiguration();
this.maxShingleSize = conf.getInt(MAX_SHINGLE_SIZE, DEFAULT_MAX_SHINGLE_SIZE);
this.emitUnigrams = conf.getBoolean(CollocDriver.EMIT_UNIGRAMS, CollocDriver.DEFAULT_EMIT_UNIGRAMS);
if (log.isInfoEnabled()) {
log.info("Max Ngram size is {}", this.maxShingleSize);
log.info("Emit Unitgrams is {}", emitUnigrams);
}
}
代码示例来源:origin: org.apache.mahout/mahout-core
@Override
protected void setup(Context context) throws IOException, InterruptedException {
super.setup(context);
String analyzerClassName = context.getConfiguration().get(DocumentProcessor.ANALYZER_CLASS,
StandardAnalyzer.class.getName());
try {
analyzer = AnalyzerUtils.createAnalyzer(analyzerClassName);
} catch (ClassNotFoundException e) {
throw new IOException("Unable to create analyzer: " + analyzerClassName, e);
}
}
}
代码示例来源:origin: alexholmes/hadoop-book
@Override
protected void setup(Context context)
throws IOException, InterruptedException {
super.setup(context);
log.info("Input split = {}", context.getInputSplit());
}
代码示例来源:origin: org.apache.mahout/mahout-core
@Override
protected void setup(Context context) throws IOException, InterruptedException {
Configuration conf = context.getConfiguration();
String priorClustersPath = conf.get(ClusterIterator.PRIOR_PATH_KEY);
classifier = new ClusterClassifier();
classifier.readFromSeqFiles(conf, new Path(priorClustersPath));
policy = classifier.getPolicy();
policy.update(classifier);
super.setup(context);
}
内容来源于网络,如有侵权,请联系作者删除!