org.apache.hadoop.record.Utils.readVInt()方法的使用及代码示例

x33g5p2x  于2022-02-01 转载在 其他  
字(5.1k)|赞(0)|评价(0)|浏览(144)

本文整理了Java中org.apache.hadoop.record.Utils.readVInt()方法的一些代码示例,展示了Utils.readVInt()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Utils.readVInt()方法的具体详情如下:
包路径:org.apache.hadoop.record.Utils
类名称:Utils
方法名:readVInt

Utils.readVInt介绍

[英]Reads a zero-compressed encoded integer from a stream and returns it.
[中]从流中读取一个零压缩编码整数并返回它。

代码示例

代码示例来源:origin: io.hops/hadoop-common

@Override
public int readInt(final String tag) throws IOException {
 return Utils.readVInt(in);
}

代码示例来源:origin: org.apache.hadoop/hadoop-streaming

@Override
public int readInt(final String tag) throws IOException {
 return Utils.readVInt(in);
}

代码示例来源:origin: ch.cern.hadoop/hadoop-common

@Override
public int readInt(final String tag) throws IOException {
 return Utils.readVInt(in);
}

代码示例来源:origin: com.facebook.hadoop/hadoop-core

public int readInt(final String tag) throws IOException {
 return Utils.readVInt(in);
}

代码示例来源:origin: com.github.jiayuhan-it/hadoop-common

@Override
public int readInt(final String tag) throws IOException {
 return Utils.readVInt(in);
}

代码示例来源:origin: io.prestosql.hadoop/hadoop-apache

@Override
public int readInt(final String tag) throws IOException {
 return Utils.readVInt(in);
}

代码示例来源:origin: org.jvnet.hudson.hadoop/hadoop-core

public int readInt(final String tag) throws IOException {
 return Utils.readVInt(in);
}

代码示例来源:origin: io.hops/hadoop-common

@Override
public Buffer readBuffer(final String tag) throws IOException {
 final int len = Utils.readVInt(in);
 final byte[] barr = new byte[len];
 in.readFully(barr);
 return new Buffer(barr);
}

代码示例来源:origin: io.prestosql.hadoop/hadoop-apache

@Override
public Buffer readBuffer(final String tag) throws IOException {
 final int len = Utils.readVInt(in);
 final byte[] barr = new byte[len];
 in.readFully(barr);
 return new Buffer(barr);
}

代码示例来源:origin: com.facebook.hadoop/hadoop-core

public Buffer readBuffer(final String tag) throws IOException {
 final int len = Utils.readVInt(in);
 final byte[] barr = new byte[len];
 in.readFully(barr);
 return new Buffer(barr);
}

代码示例来源:origin: ch.cern.hadoop/hadoop-common

@Override
public Buffer readBuffer(final String tag) throws IOException {
 final int len = Utils.readVInt(in);
 final byte[] barr = new byte[len];
 in.readFully(barr);
 return new Buffer(barr);
}

代码示例来源:origin: org.jvnet.hudson.hadoop/hadoop-core

public Buffer readBuffer(final String tag) throws IOException {
 final int len = Utils.readVInt(in);
 final byte[] barr = new byte[len];
 in.readFully(barr);
 return new Buffer(barr);
}

代码示例来源:origin: org.apache.hadoop/hadoop-streaming

@Override
public Buffer readBuffer(final String tag) throws IOException {
 final int len = Utils.readVInt(in);
 final byte[] barr = new byte[len];
 in.readFully(barr);
 return new Buffer(barr);
}

代码示例来源:origin: com.github.jiayuhan-it/hadoop-common

@Override
public Buffer readBuffer(final String tag) throws IOException {
 final int len = Utils.readVInt(in);
 final byte[] barr = new byte[len];
 in.readFully(barr);
 return new Buffer(barr);
}

代码示例来源:origin: org.apache.hadoop/hadoop-common-test

static public int slurpRaw(byte[] b, int s, int l) {
 try {
  int os = s;
  {
   int i = org.apache.hadoop.record.Utils.readVInt(b, s);
   int z = org.apache.hadoop.record.Utils.getVIntSize(i);
   s += z+i; l -= (z+i);
  }
  return (os - s);
 } catch(java.io.IOException e) {
  throw new RuntimeException(e);
 }
}
static public int compareRaw(byte[] b1, int s1, int l1,

代码示例来源:origin: org.apache.hadoop/hadoop-common-test

static public int slurpRaw(byte[] b, int s, int l) {
 try {
  int os = s;
  {
   int i = org.apache.hadoop.record.Utils.readVInt(b, s);
   int z = org.apache.hadoop.record.Utils.getVIntSize(i);
   s+=z; l-=z;
  }
  return (os - s);
 } catch(java.io.IOException e) {
  throw new RuntimeException(e);
 }
}
static public int compareRaw(byte[] b1, int s1, int l1,

代码示例来源:origin: org.apache.hadoop/hadoop-common-test

static public int slurpRaw(byte[] b, int s, int l) {
 try {
  int os = s;
  {
   int i = org.apache.hadoop.record.Utils.readVInt(b, s);
   int z = org.apache.hadoop.record.Utils.getVIntSize(i);
   s+=(z+i); l-= (z+i);
  }
  return (os - s);
 } catch(java.io.IOException e) {
  throw new RuntimeException(e);
 }
}
static public int compareRaw(byte[] b1, int s1, int l1,

代码示例来源:origin: org.apache.hadoop/hadoop-common-test

static public int slurpRaw(byte[] b, int s, int l) {
 try {
  int os = s;
  {
   int i = org.apache.hadoop.record.Utils.readVInt(b, s);
   int z = org.apache.hadoop.record.Utils.getVIntSize(i);
   s+=(z+i); l-= (z+i);
  }
  return (os - s);
 } catch(java.io.IOException e) {
  throw new RuntimeException(e);
 }
}
static public int compareRaw(byte[] b1, int s1, int l1,

代码示例来源:origin: dnmilne/wikipediaminer

static public int slurpRaw(byte[] b, int s, int l) {
 try {
  int os = s;
  {
   int i = org.apache.hadoop.record.Utils.readVInt(b, s);
   int z = org.apache.hadoop.record.Utils.getVIntSize(i);
   s+=z; l-=z;
  }
  {
   int i = org.apache.hadoop.record.Utils.readVInt(b, s);
   int z = org.apache.hadoop.record.Utils.getVIntSize(i);
   s+=z; l-=z;
  }
  return (os - s);
 } catch(java.io.IOException e) {
  throw new RuntimeException(e);
 }
}
static public int compareRaw(byte[] b1, int s1, int l1,

代码示例来源:origin: org.apache.hadoop/hadoop-common-test

static public int compareRaw(byte[] b1, int s1, int l1,
                byte[] b2, int s2, int l2) {
 try {
  int os1 = s1;
  {
   int i1 = org.apache.hadoop.record.Utils.readVInt(b1, s1);
   int i2 = org.apache.hadoop.record.Utils.readVInt(b2, s2);
   int z1 = org.apache.hadoop.record.Utils.getVIntSize(i1);
   int z2 = org.apache.hadoop.record.Utils.getVIntSize(i2);
   s1+=z1; s2+=z2; l1-=z1; l2-=z2;
   int r1 = org.apache.hadoop.record.Utils.compareBytes(b1,s1,i1,b2,s2,i2);
   if (r1 != 0) { return (r1<0)?-1:0; }
   s1+=i1; s2+=i2; l1-=i1; l1-=i2;
  }
  return (os1 - s1);
 } catch(java.io.IOException e) {
  throw new RuntimeException(e);
 }
}
public int compare(byte[] b1, int s1, int l1,

相关文章