org.apache.hadoop.record.Buffer.<init>()方法的使用及代码示例

x33g5p2x  于2022-01-17 转载在 其他  
字(5.6k)|赞(0)|评价(0)|浏览(93)

本文整理了Java中org.apache.hadoop.record.Buffer.<init>()方法的一些代码示例,展示了Buffer.<init>()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Buffer.<init>()方法的具体详情如下:
包路径:org.apache.hadoop.record.Buffer
类名称:Buffer
方法名:<init>

Buffer.<init>介绍

[英]Create a zero-count sequence.
[中]创建一个零计数序列。

代码示例

代码示例来源:origin: apache/chukwa

public void add(String key, String value) {
 synchronized (this) {
  if (this.mapFields == null) {
   this.mapFields = new TreeMap<String, Buffer>();
  }
 }
 this.mapFields.put(key, new Buffer(value.getBytes(Charset.forName("UTF-8"))));
}

代码示例来源:origin: ch.cern.hadoop/hadoop-common

@Override
public Buffer readBuffer(final String tag) throws IOException {
 final int len = Utils.readVInt(in);
 final byte[] barr = new byte[len];
 in.readFully(barr);
 return new Buffer(barr);
}

代码示例来源:origin: org.jvnet.hudson.hadoop/hadoop-core

public Buffer readBuffer(final String tag) throws IOException {
 final int len = Utils.readVInt(in);
 final byte[] barr = new byte[len];
 in.readFully(barr);
 return new Buffer(barr);
}

代码示例来源:origin: org.apache.hadoop/hadoop-streaming

@Override
public Buffer readBuffer(final String tag) throws IOException {
 final int len = Utils.readVInt(in);
 final byte[] barr = new byte[len];
 in.readFully(barr);
 return new Buffer(barr);
}

代码示例来源:origin: com.github.jiayuhan-it/hadoop-common

@Override
public Buffer readBuffer(final String tag) throws IOException {
 final int len = Utils.readVInt(in);
 final byte[] barr = new byte[len];
 in.readFully(barr);
 return new Buffer(barr);
}

代码示例来源:origin: io.hops/hadoop-common

@Override
public Buffer readBuffer(final String tag) throws IOException {
 final int len = Utils.readVInt(in);
 final byte[] barr = new byte[len];
 in.readFully(barr);
 return new Buffer(barr);
}

代码示例来源:origin: com.facebook.hadoop/hadoop-core

public Buffer readBuffer(final String tag) throws IOException {
 final int len = Utils.readVInt(in);
 final byte[] barr = new byte[len];
 in.readFully(barr);
 return new Buffer(barr);
}

代码示例来源:origin: io.prestosql.hadoop/hadoop-apache

@Override
public Buffer readBuffer(final String tag) throws IOException {
 final int len = Utils.readVInt(in);
 final byte[] barr = new byte[len];
 in.readFully(barr);
 return new Buffer(barr);
}

代码示例来源:origin: org.apache.hadoop/hadoop-common-test

private static void initBuffers(Record[] buffers) {
 final int BUFLEN = 32;
 for (int idx = 0; idx < buffers.length; idx++) {
  buffers[idx] = new RecBuffer();
  int buflen = rand.nextInt(BUFLEN);
  byte[] bytes = new byte[buflen];
  rand.nextBytes(bytes);
  ((RecBuffer)buffers[idx]).setData(new Buffer(bytes));
 }
}

代码示例来源:origin: org.apache.hadoop/hadoop-common-test

/**
 * Test of getCount method, of class org.apache.hadoop.record.Buffer.
 */
public void testGetCount() {
 final Buffer instance = new Buffer();
 
 final int expResult = 0;
 final int result = instance.getCount();
 assertEquals("getSize failed", expResult, result);
}

代码示例来源:origin: org.apache.hadoop.hive/hive-contrib

public Buffer readBuffer(String tag) throws IOException {
 in.skipType();
 return new Buffer(in.readBytes());
}

代码示例来源:origin: edu.berkeley.cs.shark/hive-contrib

public Buffer readBuffer(String tag) throws IOException {
 in.skipType();
 return new Buffer(in.readBytes());
}

代码示例来源:origin: org.apache.hadoop/hadoop-streaming

public Buffer readBuffer(String tag) throws IOException {
 in.skipType();
 return new Buffer(in.readBytes());
}

代码示例来源:origin: ch.cern.hadoop/hadoop-streaming

public Buffer readBuffer(String tag) throws IOException {
 in.skipType();
 return new Buffer(in.readBytes());
}

代码示例来源:origin: org.apache.hadoop/hadoop-common-test

/**
 * Test of set method, of class org.apache.hadoop.record.Buffer.
 */
public void testSet() {
 final byte[] bytes = new byte[10];
 final Buffer instance = new Buffer();
 
 instance.set(bytes);
 
 assertEquals("set failed", bytes, instance.get());
}

代码示例来源:origin: org.apache.hadoop/hadoop-common-test

/**
 * Test of getCapacity method, of class org.apache.hadoop.record.Buffer.
 */
public void testGetCapacity() {
 final Buffer instance = new Buffer();
 
 final int expResult = 0;
 final int result = instance.getCapacity();
 assertEquals("getCapacity failed", expResult, result);
 
 instance.setCapacity(100);
 assertEquals("setCapacity failed", 100, instance.getCapacity());
}

代码示例来源:origin: org.apache.hadoop/hadoop-streaming

/**
 * Reads the raw bytes following a <code>Type.LIST</code> code.
 * @return the obtained bytes sequence
 * @throws IOException
 */
public byte[] readRawList() throws IOException {
 Buffer buffer = new Buffer(new byte[] { (byte) Type.LIST.code });
 byte[] bytes = readRaw();
 while (bytes != null) {
  buffer.append(bytes);
  bytes = readRaw();
 }
 buffer.append(new byte[] { (byte) Type.MARKER.code });
 return buffer.get();
}

代码示例来源:origin: org.apache.hadoop/hadoop-common-test

/**
 * Test of truncate method, of class org.apache.hadoop.record.Buffer.
 */
public void testTruncate() {
 final Buffer instance = new Buffer();
 instance.setCapacity(100);
 assertEquals("setCapacity failed", 100, instance.getCapacity());
 
 instance.truncate();
 assertEquals("truncate failed", 0, instance.getCapacity());
}

代码示例来源:origin: ch.cern.hadoop/hadoop-streaming

/**
 * Reads the raw bytes following a <code>Type.LIST</code> code.
 * @return the obtained bytes sequence
 * @throws IOException
 */
public byte[] readRawList() throws IOException {
 Buffer buffer = new Buffer(new byte[] { (byte) Type.LIST.code });
 byte[] bytes = readRaw();
 while (bytes != null) {
  buffer.append(bytes);
  bytes = readRaw();
 }
 buffer.append(new byte[] { (byte) Type.MARKER.code });
 return buffer.get();
}

代码示例来源:origin: edu.berkeley.cs.shark/hive-contrib

/**
 * Reads the raw bytes following a <code>Type.VECTOR</code> code.
 * 
 * @return the obtained bytes sequence
 * @throws IOException
 */
public byte[] readRawVector() throws IOException {
 Buffer buffer = new Buffer();
 int length = readVectorHeader();
 buffer.append(new byte[] {(byte) Type.VECTOR.code,
   (byte) (0xff & (length >> 24)), (byte) (0xff & (length >> 16)),
   (byte) (0xff & (length >> 8)), (byte) (0xff & length)});
 for (int i = 0; i < length; i++) {
  buffer.append(readRaw());
 }
 return buffer.get();
}

相关文章