本文整理了Java中org.apache.hadoop.record.Buffer.append()
方法的一些代码示例,展示了Buffer.append()
的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Buffer.append()
方法的具体详情如下:
包路径:org.apache.hadoop.record.Buffer
类名称:Buffer
方法名:append
[英]Append specified bytes to the buffer
[中]将指定的字节追加到缓冲区
代码示例来源:origin: ch.cern.hadoop/hadoop-common
/**
* Append specified bytes to the buffer
*
* @param bytes byte array to be appended
*/
public void append(byte[] bytes) {
append(bytes, 0, bytes.length);
}
代码示例来源:origin: com.github.jiayuhan-it/hadoop-common
/**
* Append specified bytes to the buffer
*
* @param bytes byte array to be appended
*/
public void append(byte[] bytes) {
append(bytes, 0, bytes.length);
}
代码示例来源:origin: com.facebook.hadoop/hadoop-core
/**
* Append specified bytes to the buffer
*
* @param bytes byte array to be appended
*/
public void append(byte[] bytes) {
append(bytes, 0, bytes.length);
}
代码示例来源:origin: io.prestosql.hadoop/hadoop-apache
/**
* Append specified bytes to the buffer
*
* @param bytes byte array to be appended
*/
public void append(byte[] bytes) {
append(bytes, 0, bytes.length);
}
代码示例来源:origin: org.jvnet.hudson.hadoop/hadoop-core
/**
* Append specified bytes to the buffer
*
* @param bytes byte array to be appended
*/
public void append(byte[] bytes) {
append(bytes, 0, bytes.length);
}
代码示例来源:origin: org.apache.hadoop/hadoop-streaming
/**
* Append specified bytes to the buffer
*
* @param bytes byte array to be appended
*/
public void append(byte[] bytes) {
append(bytes, 0, bytes.length);
}
代码示例来源:origin: io.hops/hadoop-common
/**
* Append specified bytes to the buffer
*
* @param bytes byte array to be appended
*/
public void append(byte[] bytes) {
append(bytes, 0, bytes.length);
}
代码示例来源:origin: edu.berkeley.cs.shark/hive-contrib
/**
* Reads the raw bytes following a <code>Type.MAP</code> code.
*
* @return the obtained bytes sequence
* @throws IOException
*/
public byte[] readRawMap() throws IOException {
Buffer buffer = new Buffer();
int length = readMapHeader();
buffer.append(new byte[] {(byte) Type.MAP.code,
(byte) (0xff & (length >> 24)), (byte) (0xff & (length >> 16)),
(byte) (0xff & (length >> 8)), (byte) (0xff & length)});
for (int i = 0; i < length; i++) {
buffer.append(readRaw());
buffer.append(readRaw());
}
return buffer.get();
}
代码示例来源:origin: org.apache.hadoop.hive/hive-contrib
/**
* Reads the raw bytes following a <code>Type.MAP</code> code.
*
* @return the obtained bytes sequence
* @throws IOException
*/
public byte[] readRawMap() throws IOException {
Buffer buffer = new Buffer();
int length = readMapHeader();
buffer.append(new byte[] {(byte) Type.MAP.code,
(byte) (0xff & (length >> 24)), (byte) (0xff & (length >> 16)),
(byte) (0xff & (length >> 8)), (byte) (0xff & length)});
for (int i = 0; i < length; i++) {
buffer.append(readRaw());
buffer.append(readRaw());
}
return buffer.get();
}
代码示例来源:origin: ch.cern.hadoop/hadoop-streaming
/**
* Reads the raw bytes following a <code>Type.VECTOR</code> code.
* @return the obtained bytes sequence
* @throws IOException
*/
public byte[] readRawVector() throws IOException {
Buffer buffer = new Buffer();
int length = readVectorHeader();
buffer.append(new byte[] {
(byte) Type.VECTOR.code,
(byte) (0xff & (length >> 24)), (byte) (0xff & (length >> 16)),
(byte) (0xff & (length >> 8)), (byte) (0xff & length)
});
for (int i = 0; i < length; i++) {
buffer.append(readRaw());
}
return buffer.get();
}
代码示例来源:origin: org.apache.hadoop/hadoop-streaming
/**
* Reads the raw bytes following a <code>Type.LIST</code> code.
* @return the obtained bytes sequence
* @throws IOException
*/
public byte[] readRawList() throws IOException {
Buffer buffer = new Buffer(new byte[] { (byte) Type.LIST.code });
byte[] bytes = readRaw();
while (bytes != null) {
buffer.append(bytes);
bytes = readRaw();
}
buffer.append(new byte[] { (byte) Type.MARKER.code });
return buffer.get();
}
代码示例来源:origin: org.apache.hadoop/hadoop-streaming
/**
* Reads the raw bytes following a <code>Type.MAP</code> code.
* @return the obtained bytes sequence
* @throws IOException
*/
public byte[] readRawMap() throws IOException {
Buffer buffer = new Buffer();
int length = readMapHeader();
buffer.append(new byte[] {
(byte) Type.MAP.code,
(byte) (0xff & (length >> 24)), (byte) (0xff & (length >> 16)),
(byte) (0xff & (length >> 8)), (byte) (0xff & length)
});
for (int i = 0; i < length; i++) {
buffer.append(readRaw());
buffer.append(readRaw());
}
return buffer.get();
}
代码示例来源:origin: ch.cern.hadoop/hadoop-streaming
/**
* Reads the raw bytes following a <code>Type.LIST</code> code.
* @return the obtained bytes sequence
* @throws IOException
*/
public byte[] readRawList() throws IOException {
Buffer buffer = new Buffer(new byte[] { (byte) Type.LIST.code });
byte[] bytes = readRaw();
while (bytes != null) {
buffer.append(bytes);
bytes = readRaw();
}
buffer.append(new byte[] { (byte) Type.MARKER.code });
return buffer.get();
}
代码示例来源:origin: ch.cern.hadoop/hadoop-streaming
/**
* Reads the raw bytes following a <code>Type.MAP</code> code.
* @return the obtained bytes sequence
* @throws IOException
*/
public byte[] readRawMap() throws IOException {
Buffer buffer = new Buffer();
int length = readMapHeader();
buffer.append(new byte[] {
(byte) Type.MAP.code,
(byte) (0xff & (length >> 24)), (byte) (0xff & (length >> 16)),
(byte) (0xff & (length >> 8)), (byte) (0xff & length)
});
for (int i = 0; i < length; i++) {
buffer.append(readRaw());
buffer.append(readRaw());
}
return buffer.get();
}
代码示例来源:origin: org.apache.hadoop.hive/hive-contrib
/**
* Reads the raw bytes following a <code>Type.LIST</code> code.
*
* @return the obtained bytes sequence
* @throws IOException
*/
public byte[] readRawList() throws IOException {
Buffer buffer = new Buffer(new byte[] {(byte) Type.LIST.code});
byte[] bytes = readRaw();
while (bytes != null) {
buffer.append(bytes);
bytes = readRaw();
}
buffer.append(new byte[] {(byte) Type.MARKER.code});
return buffer.get();
}
代码示例来源:origin: edu.berkeley.cs.shark/hive-contrib
/**
* Reads the raw bytes following a <code>Type.LIST</code> code.
*
* @return the obtained bytes sequence
* @throws IOException
*/
public byte[] readRawList() throws IOException {
Buffer buffer = new Buffer(new byte[] {(byte) Type.LIST.code});
byte[] bytes = readRaw();
while (bytes != null) {
buffer.append(bytes);
bytes = readRaw();
}
buffer.append(new byte[] {(byte) Type.MARKER.code});
return buffer.get();
}
代码示例来源:origin: edu.berkeley.cs.shark/hive-contrib
/**
* Reads the raw bytes following a <code>Type.VECTOR</code> code.
*
* @return the obtained bytes sequence
* @throws IOException
*/
public byte[] readRawVector() throws IOException {
Buffer buffer = new Buffer();
int length = readVectorHeader();
buffer.append(new byte[] {(byte) Type.VECTOR.code,
(byte) (0xff & (length >> 24)), (byte) (0xff & (length >> 16)),
(byte) (0xff & (length >> 8)), (byte) (0xff & length)});
for (int i = 0; i < length; i++) {
buffer.append(readRaw());
}
return buffer.get();
}
代码示例来源:origin: org.apache.hadoop.hive/hive-contrib
/**
* Reads the raw bytes following a <code>Type.VECTOR</code> code.
*
* @return the obtained bytes sequence
* @throws IOException
*/
public byte[] readRawVector() throws IOException {
Buffer buffer = new Buffer();
int length = readVectorHeader();
buffer.append(new byte[] {(byte) Type.VECTOR.code,
(byte) (0xff & (length >> 24)), (byte) (0xff & (length >> 16)),
(byte) (0xff & (length >> 8)), (byte) (0xff & length)});
for (int i = 0; i < length; i++) {
buffer.append(readRaw());
}
return buffer.get();
}
代码示例来源:origin: org.apache.hadoop/hadoop-streaming
/**
* Reads the raw bytes following a <code>Type.VECTOR</code> code.
* @return the obtained bytes sequence
* @throws IOException
*/
public byte[] readRawVector() throws IOException {
Buffer buffer = new Buffer();
int length = readVectorHeader();
buffer.append(new byte[] {
(byte) Type.VECTOR.code,
(byte) (0xff & (length >> 24)), (byte) (0xff & (length >> 16)),
(byte) (0xff & (length >> 8)), (byte) (0xff & length)
});
for (int i = 0; i < length; i++) {
buffer.append(readRaw());
}
return buffer.get();
}
代码示例来源:origin: org.apache.hadoop/hadoop-common-test
/**
* Test of append method, of class org.apache.hadoop.record.Buffer.
*/
public void testAppend() {
final byte[] bytes = new byte[100];
final int offset = 0;
final int length = 100;
for (int idx = 0; idx < 100; idx++) {
bytes[idx] = (byte) (100-idx);
}
final Buffer instance = new Buffer();
instance.append(bytes, offset, length);
assertEquals("Buffer size mismatch", 100, instance.getCount());
for (int idx = 0; idx < 100; idx++) {
assertEquals("Buffer contents corrupted", 100-idx, instance.get()[idx]);
}
}
}
内容来源于网络,如有侵权,请联系作者删除!