本文整理了Java中org.apache.hadoop.record.Utils.writeVInt()
方法的一些代码示例,展示了Utils.writeVInt()
的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Utils.writeVInt()
方法的具体详情如下:
包路径:org.apache.hadoop.record.Utils
类名称:Utils
方法名:writeVInt
[英]Serializes an int to a binary stream with zero-compressed encoding.
[中]用零压缩编码将int序列化为二进制流。
代码示例来源:origin: ch.cern.hadoop/hadoop-common
@Override
public void writeInt(int i, String tag) throws IOException {
Utils.writeVInt(out, i);
}
代码示例来源:origin: io.prestosql.hadoop/hadoop-apache
@Override
public void writeInt(int i, String tag) throws IOException {
Utils.writeVInt(out, i);
}
代码示例来源:origin: org.apache.hadoop/hadoop-streaming
@Override
public void writeInt(int i, String tag) throws IOException {
Utils.writeVInt(out, i);
}
代码示例来源:origin: com.github.jiayuhan-it/hadoop-common
@Override
public void writeInt(int i, String tag) throws IOException {
Utils.writeVInt(out, i);
}
代码示例来源:origin: com.facebook.hadoop/hadoop-core
public void writeInt(int i, String tag) throws IOException {
Utils.writeVInt(out, i);
}
代码示例来源:origin: io.hops/hadoop-common
@Override
public void writeInt(int i, String tag) throws IOException {
Utils.writeVInt(out, i);
}
代码示例来源:origin: org.jvnet.hudson.hadoop/hadoop-core
public void writeInt(int i, String tag) throws IOException {
Utils.writeVInt(out, i);
}
代码示例来源:origin: ch.cern.hadoop/hadoop-common
static void toBinaryString(final DataOutput out, final String str)
throws IOException {
final int strlen = str.length();
byte[] bytes = new byte[strlen*4]; // Codepoints expand to 4 bytes max
int utf8Len = 0;
int idx = 0;
while(idx < strlen) {
final int cpt = str.codePointAt(idx);
idx += Character.isSupplementaryCodePoint(cpt) ? 2 : 1;
utf8Len += writeUtf8(cpt, bytes, utf8Len);
}
writeVInt(out, utf8Len);
out.write(bytes, 0, utf8Len);
}
代码示例来源:origin: com.facebook.hadoop/hadoop-core
static void toBinaryString(final DataOutput out, final String str)
throws IOException {
final int strlen = str.length();
byte[] bytes = new byte[strlen*4]; // Codepoints expand to 4 bytes max
int utf8Len = 0;
int idx = 0;
while(idx < strlen) {
final int cpt = str.codePointAt(idx);
idx += Character.isSupplementaryCodePoint(cpt) ? 2 : 1;
utf8Len += writeUtf8(cpt, bytes, utf8Len);
}
writeVInt(out, utf8Len);
out.write(bytes, 0, utf8Len);
}
代码示例来源:origin: com.facebook.hadoop/hadoop-core
public void writeBuffer(Buffer buf, String tag)
throws IOException {
byte[] barr = buf.get();
int len = buf.getCount();
Utils.writeVInt(out, len);
out.write(barr, 0, len);
}
代码示例来源:origin: org.jvnet.hudson.hadoop/hadoop-core
public void writeBuffer(Buffer buf, String tag)
throws IOException {
byte[] barr = buf.get();
int len = buf.getCount();
Utils.writeVInt(out, len);
out.write(barr, 0, len);
}
代码示例来源:origin: com.github.jiayuhan-it/hadoop-common
static void toBinaryString(final DataOutput out, final String str)
throws IOException {
final int strlen = str.length();
byte[] bytes = new byte[strlen*4]; // Codepoints expand to 4 bytes max
int utf8Len = 0;
int idx = 0;
while(idx < strlen) {
final int cpt = str.codePointAt(idx);
idx += Character.isSupplementaryCodePoint(cpt) ? 2 : 1;
utf8Len += writeUtf8(cpt, bytes, utf8Len);
}
writeVInt(out, utf8Len);
out.write(bytes, 0, utf8Len);
}
代码示例来源:origin: io.prestosql.hadoop/hadoop-apache
static void toBinaryString(final DataOutput out, final String str)
throws IOException {
final int strlen = str.length();
byte[] bytes = new byte[strlen*4]; // Codepoints expand to 4 bytes max
int utf8Len = 0;
int idx = 0;
while(idx < strlen) {
final int cpt = str.codePointAt(idx);
idx += Character.isSupplementaryCodePoint(cpt) ? 2 : 1;
utf8Len += writeUtf8(cpt, bytes, utf8Len);
}
writeVInt(out, utf8Len);
out.write(bytes, 0, utf8Len);
}
代码示例来源:origin: com.github.jiayuhan-it/hadoop-common
@Override
public void writeBuffer(Buffer buf, String tag)
throws IOException {
byte[] barr = buf.get();
int len = buf.getCount();
Utils.writeVInt(out, len);
out.write(barr, 0, len);
}
代码示例来源:origin: io.prestosql.hadoop/hadoop-apache
@Override
public void writeBuffer(Buffer buf, String tag)
throws IOException {
byte[] barr = buf.get();
int len = buf.getCount();
Utils.writeVInt(out, len);
out.write(barr, 0, len);
}
代码示例来源:origin: io.hops/hadoop-common
static void toBinaryString(final DataOutput out, final String str)
throws IOException {
final int strlen = str.length();
byte[] bytes = new byte[strlen*4]; // Codepoints expand to 4 bytes max
int utf8Len = 0;
int idx = 0;
while(idx < strlen) {
final int cpt = str.codePointAt(idx);
idx += Character.isSupplementaryCodePoint(cpt) ? 2 : 1;
utf8Len += writeUtf8(cpt, bytes, utf8Len);
}
writeVInt(out, utf8Len);
out.write(bytes, 0, utf8Len);
}
代码示例来源:origin: org.apache.hadoop/hadoop-streaming
static void toBinaryString(final DataOutput out, final String str)
throws IOException {
final int strlen = str.length();
byte[] bytes = new byte[strlen*4]; // Codepoints expand to 4 bytes max
int utf8Len = 0;
int idx = 0;
while(idx < strlen) {
final int cpt = str.codePointAt(idx);
idx += Character.isSupplementaryCodePoint(cpt) ? 2 : 1;
utf8Len += writeUtf8(cpt, bytes, utf8Len);
}
writeVInt(out, utf8Len);
out.write(bytes, 0, utf8Len);
}
代码示例来源:origin: io.hops/hadoop-common
@Override
public void writeBuffer(Buffer buf, String tag)
throws IOException {
byte[] barr = buf.get();
int len = buf.getCount();
Utils.writeVInt(out, len);
out.write(barr, 0, len);
}
代码示例来源:origin: org.apache.hadoop/hadoop-streaming
@Override
public void writeBuffer(Buffer buf, String tag)
throws IOException {
byte[] barr = buf.get();
int len = buf.getCount();
Utils.writeVInt(out, len);
out.write(barr, 0, len);
}
代码示例来源:origin: ch.cern.hadoop/hadoop-common
@Override
public void writeBuffer(Buffer buf, String tag)
throws IOException {
byte[] barr = buf.get();
int len = buf.getCount();
Utils.writeVInt(out, len);
out.write(barr, 0, len);
}
内容来源于网络,如有侵权,请联系作者删除!