org.apache.hadoop.mapreduce.RecordReader.getCurrentKey()方法的使用及代码示例

x33g5p2x  于2022-01-28 转载在 其他  
字(6.7k)|赞(0)|评价(0)|浏览(123)

本文整理了Java中org.apache.hadoop.mapreduce.RecordReader.getCurrentKey方法的一些代码示例,展示了RecordReader.getCurrentKey的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。RecordReader.getCurrentKey方法的具体详情如下:
包路径:org.apache.hadoop.mapreduce.RecordReader
类名称:RecordReader
方法名:getCurrentKey

RecordReader.getCurrentKey介绍

[英]Get the current key
[中]获取当前密钥

代码示例

代码示例来源:origin: apache/ignite

/** {@inheritDoc} */
@Override public Object getCurrentKey() throws IOException, InterruptedException {
  if (reader != null)
    return reader.getCurrentKey();
  return input.key();
}

代码示例来源:origin: apache/flink

@Override
public Tuple2<K, V> nextRecord(Tuple2<K, V> record) throws IOException {
  if (!this.fetched) {
    fetchNext();
  }
  if (!this.hasNext) {
    return null;
  }
  try {
    record.f0 = recordReader.getCurrentKey();
    record.f1 = recordReader.getCurrentValue();
  } catch (InterruptedException e) {
    throw new IOException("Could not get KeyValue pair.", e);
  }
  this.fetched = false;
  return record;
}

代码示例来源:origin: apache/incubator-gobblin

/**
 * {@inheritDoc}.
 *
 * This method will throw a {@link ClassCastException} if type {@link #<D>} is not compatible
 * with type {@link #<K>} if keys are supposed to be read, or if it is not compatible with type
 * {@link #<V>} if values are supposed to be read.
 */
@Override
@SuppressWarnings("unchecked")
public D readRecord(@Deprecated D reuse) throws DataRecordException, IOException {
 try {
  if (this.recordReader.nextKeyValue()) {
   return this.readKeys ? (D) this.recordReader.getCurrentKey() : (D) this.recordReader.getCurrentValue();
  }
 } catch (InterruptedException ie) {
  throw new IOException(ie);
 }
 return null;
}

代码示例来源:origin: thinkaurelius/titan

@Override
public boolean nextKeyValue() throws IOException, InterruptedException {
  while (reader.nextKeyValue()) {
    // TODO titan05 integration -- the duplicate() call may be unnecessary
    final TinkerVertex maybeNullTinkerVertex =
        deser.readHadoopVertex(reader.getCurrentKey(), reader.getCurrentValue());
    if (null != maybeNullTinkerVertex) {
      vertex = new VertexWritable(maybeNullTinkerVertex);
      //vertexQuery.filterRelationsOf(vertex); // TODO reimplement vertexquery filtering
      return true;
    }
  }
  return false;
}

代码示例来源:origin: apache/incubator-gobblin

@Test
public void testRecordReader()
  throws Exception {
 List<String> paths = Lists.newArrayList("/path1", "/path2");
 GobblinWorkUnitsInputFormat.GobblinSplit split = new GobblinWorkUnitsInputFormat.GobblinSplit(paths);
 GobblinWorkUnitsInputFormat inputFormat = new GobblinWorkUnitsInputFormat();
 RecordReader<LongWritable, Text> recordReader =
   inputFormat.createRecordReader(split, new TaskAttemptContextImpl(new Configuration(), new TaskAttemptID("a", 1,
   TaskType.MAP, 1, 1)));
 recordReader.nextKeyValue();
 Assert.assertEquals(recordReader.getCurrentKey().get(), 0);
 Assert.assertEquals(recordReader.getCurrentValue().toString(), "/path1");
 recordReader.nextKeyValue();
 Assert.assertEquals(recordReader.getCurrentKey().get(), 1);
 Assert.assertEquals(recordReader.getCurrentValue().toString(), "/path2");
 Assert.assertFalse(recordReader.nextKeyValue());
}

代码示例来源:origin: apache/avro

key = recordReader.getCurrentKey();
value = recordReader.getCurrentValue();
  key == recordReader.getCurrentKey());
assertTrue("getCurrentValue() returned different values for the same record",
  value == recordReader.getCurrentValue());
key = recordReader.getCurrentKey();
value = recordReader.getCurrentValue();

代码示例来源:origin: apache/avro

key = recordReader.getCurrentKey();
value = recordReader.getCurrentValue();
  key == recordReader.getCurrentKey());
assertTrue("getCurrentValue() returned different values for the same record",
  value == recordReader.getCurrentValue());
key = recordReader.getCurrentKey();
value = recordReader.getCurrentValue();

代码示例来源:origin: apache/hbase

byte[] row = rr.getCurrentKey().get();
verifyRowFromMap(rr.getCurrentKey(), rr.getCurrentValue());
rowTracker.addRow(row);

代码示例来源:origin: larsgeorge/hbase-book

LOG.info(String.format("Fill: Collected %d samples from %d splits", counter, i));
 counter++;
 samples.add(ReflectionUtils.copy(job.getConfiguration(), reader.getCurrentKey(), null));
} else {
 if (ind != numSamples) {
  samples.set(ind, ReflectionUtils.copy(job.getConfiguration(),
   reader.getCurrentKey(), null));
  if (counter % 1000 == 0)
   LOG.info(String.format("Replace Random: Collected %d samples from %d splits", counter, i));

代码示例来源:origin: openimaj/openimaj

@Override
public K getCurrentKey()
throws IOException, InterruptedException {
  return rr.getCurrentKey();
}

代码示例来源:origin: org.openimaj/core-hadoop

@Override
public K getCurrentKey()
throws IOException, InterruptedException {
  return rr.getCurrentKey();
}

代码示例来源:origin: apache/jena

@Override
public final LongWritable getCurrentKey() throws IOException, InterruptedException {
  return this.reader.getCurrentKey();
}

代码示例来源:origin: kite-sdk/kite

@Override
 public E getCurrentKey() throws IOException, InterruptedException {
  return delegate.getCurrentKey().datum();
 }
}

代码示例来源:origin: com.google.cloud.bigdataoss/bigquery-connector

/**
 * Helper for populating currentKey and currentValue from delegateReader. Should only be called
 * once per new key/value from the delegateReader; this method is also responsible for tracking
 * the number of records read so far.
 */
private void populateCurrentKeyValue() throws IOException, InterruptedException {
 currentKey = delegateReader.getCurrentKey();
 currentValue = delegateReader.getCurrentValue();
 ++recordsRead;
}

代码示例来源:origin: cdapio/cdap

@Override
public Long getCurrentKey() throws IOException, InterruptedException {
 LongWritable writable = delegate.getCurrentKey();
 return writable == null ? null : writable.get();
}

代码示例来源:origin: kite-sdk/kite

private E computeNextKey() throws IOException, InterruptedException {
 while (unfiltered.nextKeyValue()) {
  E element = unfiltered.getCurrentKey();
  if (predicate.apply(element)) {
   return element;
  }
 }
 return null;
}

代码示例来源:origin: com.github.jiayuhan-it/hadoop-mapreduce-client-core

/**
 * Read the next k,v pair into the head of this object; return true iff
 * the RR and this are exhausted.
 */
private boolean next() throws IOException, InterruptedException {
 empty = !rr.nextKeyValue();
 key = rr.getCurrentKey();
 value = rr.getCurrentValue();
 return !empty;
}

代码示例来源:origin: io.prestosql.hadoop/hadoop-apache

/**
 * Read the next k,v pair into the head of this object; return true iff
 * the RR and this are exhausted.
 */
private boolean next() throws IOException, InterruptedException {
 empty = !rr.nextKeyValue();
 key = rr.getCurrentKey();
 value = rr.getCurrentValue();
 return !empty;
}

代码示例来源:origin: ch.cern.hadoop/hadoop-mapreduce-client-core

/**
 * Read the next k,v pair into the head of this object; return true iff
 * the RR and this are exhausted.
 */
private boolean next() throws IOException, InterruptedException {
 empty = !rr.nextKeyValue();
 key = rr.getCurrentKey();
 value = rr.getCurrentValue();
 return !empty;
}

代码示例来源:origin: com.twitter.elephantbird/elephant-bird-pig

private Object getCurrentKeyObject() throws IOException, InterruptedException {
 DataInputBuffer ibuf = (DataInputBuffer) reader.getCurrentKey();
 keyDataByteArray.set(Arrays.copyOf(ibuf.getData(), ibuf.getLength()));
 return config.keyConverter.bytesToObject(keyDataByteArray);
}

相关文章