本文整理了Java中org.apache.hadoop.record.Utils
类的一些代码示例,展示了Utils
类的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Utils
类的具体详情如下:
包路径:org.apache.hadoop.record.Utils
类名称:Utils
[英]Various utility functions for Hadoop record I/O runtime.
[中]Hadoop记录I/O运行时的各种实用程序函数。
代码示例来源:origin: org.apache.hadoop/hadoop-common-test
static public int compareRaw(byte[] b1, int s1, int l1,
byte[] b2, int s2, int l2) {
try {
int os1 = s1;
{
int i1 = org.apache.hadoop.record.Utils.readVInt(b1, s1);
int i2 = org.apache.hadoop.record.Utils.readVInt(b2, s2);
int z1 = org.apache.hadoop.record.Utils.getVIntSize(i1);
int z2 = org.apache.hadoop.record.Utils.getVIntSize(i2);
s1+=z1; s2+=z2; l1-=z1; l2-=z2;
int r1 = org.apache.hadoop.record.Utils.compareBytes(b1,s1,i1,b2,s2,i2);
if (r1 != 0) { return (r1<0)?-1:0; }
s1+=i1; s2+=i2; l1-=i1; l1-=i2;
}
return (os1 - s1);
} catch(java.io.IOException e) {
throw new RuntimeException(e);
}
}
public int compare(byte[] b1, int s1, int l1,
代码示例来源:origin: ch.cern.hadoop/hadoop-common
@Override
public long readLong(final String tag) throws IOException {
return Utils.readVLong(in);
}
代码示例来源:origin: io.hops/hadoop-common
@Override
public String readString(final String tag) throws IOException {
return Utils.fromBinaryString(in);
}
代码示例来源:origin: org.apache.hadoop/hadoop-common-test
static public int slurpRaw(byte[] b, int s, int l) {
try {
int os = s;
{
int i = org.apache.hadoop.record.Utils.readVInt(b, s);
int z = org.apache.hadoop.record.Utils.getVIntSize(i);
s+=(z+i); l-= (z+i);
}
return (os - s);
} catch(java.io.IOException e) {
throw new RuntimeException(e);
}
}
static public int compareRaw(byte[] b1, int s1, int l1,
代码示例来源:origin: ch.cern.hadoop/hadoop-common
static String fromBinaryString(final DataInput din) throws IOException {
final int utf8Len = readVInt(din);
final byte[] bytes = new byte[utf8Len];
din.readFully(bytes);
} else if ((b1 & B11111) == B11110) {
int b2 = bytes[len++] & 0xFF;
checkB10(b2);
int b3 = bytes[len++] & 0xFF;
checkB10(b3);
int b4 = bytes[len++] & 0xFF;
checkB10(b4);
cpt = utf8ToCodePoint(b1, b2, b3, b4);
} else if ((b1 & B1111) == B1110) {
int b2 = bytes[len++] & 0xFF;
checkB10(b2);
int b3 = bytes[len++] & 0xFF;
checkB10(b3);
cpt = utf8ToCodePoint(b1, b2, b3);
} else if ((b1 & B111) == B110) {
int b2 = bytes[len++] & 0xFF;
checkB10(b2);
cpt = utf8ToCodePoint(b1, b2);
} else {
throw new IOException("Invalid UTF-8 byte "+Integer.toHexString(b1)+
" at offset "+(len-1)+" in length of "+utf8Len);
if (!isValidCodePoint(cpt)) {
throw new IOException("Illegal Unicode Codepoint "+
Integer.toHexString(cpt)+" in stream.");
代码示例来源:origin: apache/chukwa
int os = s;
long i = org.apache.hadoop.record.Utils.readVLong(b, s);
int z = org.apache.hadoop.record.Utils.getVIntSize(i);
s += z;
l -= z;
int i = org.apache.hadoop.record.Utils.readVInt(b, s);
int z = org.apache.hadoop.record.Utils.getVIntSize(i);
s += (z + i);
l -= (z + i);
int i = org.apache.hadoop.record.Utils.readVInt(b, s);
int z = org.apache.hadoop.record.Utils.getVIntSize(i);
s += (z + i);
l -= (z + i);
long i = org.apache.hadoop.record.Utils.readVLong(b, s);
int z = org.apache.hadoop.record.Utils.getVIntSize(i);
s += z;
l -= z;
代码示例来源:origin: apache/chukwa
long i1 = org.apache.hadoop.record.Utils.readVLong(b1, s1);
long i2 = org.apache.hadoop.record.Utils.readVLong(b2, s2);
if (i1 != i2) {
return ((i1 - i2) < 0) ? -1 : 0;
int z1 = org.apache.hadoop.record.Utils.getVIntSize(i1);
int z2 = org.apache.hadoop.record.Utils.getVIntSize(i2);
s1 += z1;
s2 += z2;
int i1 = org.apache.hadoop.record.Utils.readVInt(b1, s1);
int i2 = org.apache.hadoop.record.Utils.readVInt(b2, s2);
int z1 = org.apache.hadoop.record.Utils.getVIntSize(i1);
int z2 = org.apache.hadoop.record.Utils.getVIntSize(i2);
s1 += z1;
s2 += z2;
l1 -= z1;
l2 -= z2;
int r1 = org.apache.hadoop.record.Utils.compareBytes(b1, s1, i1, b2,
s2, i2);
if (r1 != 0) {
int i1 = org.apache.hadoop.record.Utils.readVInt(b1, s1);
int i2 = org.apache.hadoop.record.Utils.readVInt(b2, s2);
int z1 = org.apache.hadoop.record.Utils.getVIntSize(i1);
int z2 = org.apache.hadoop.record.Utils.getVIntSize(i2);
s1 += z1;
s2 += z2;
l1 -= z1;
代码示例来源:origin: org.apache.hadoop/hadoop-common-test
int i1 = org.apache.hadoop.record.Utils.readVInt(b1, s1);
int i2 = org.apache.hadoop.record.Utils.readVInt(b2, s2);
if (i1 != i2) {
return ((i1-i2) < 0) ? -1 : 0;
int z1 = org.apache.hadoop.record.Utils.getVIntSize(i1);
int z2 = org.apache.hadoop.record.Utils.getVIntSize(i2);
s1+=z1; s2+=z2; l1-=z1; l2-=z2;
long i1 = org.apache.hadoop.record.Utils.readVLong(b1, s1);
long i2 = org.apache.hadoop.record.Utils.readVLong(b2, s2);
if (i1 != i2) {
return ((i1-i2) < 0) ? -1 : 0;
int z1 = org.apache.hadoop.record.Utils.getVIntSize(i1);
int z2 = org.apache.hadoop.record.Utils.getVIntSize(i2);
s1+=z1; s2+=z2; l1-=z1; l2-=z2;
float f1 = org.apache.hadoop.record.Utils.readFloat(b1, s1);
float f2 = org.apache.hadoop.record.Utils.readFloat(b2, s2);
if (f1 != f2) {
return ((f1-f2) < 0) ? -1 : 0;
double d1 = org.apache.hadoop.record.Utils.readDouble(b1, s1);
double d2 = org.apache.hadoop.record.Utils.readDouble(b2, s2);
if (d1 != d2) {
return ((d1-d2) < 0) ? -1 : 0;
int i1 = org.apache.hadoop.record.Utils.readVInt(b1, s1);
代码示例来源:origin: io.hops/hadoop-common
@Override
public int readInt(final String tag) throws IOException {
return Utils.readVInt(in);
}
代码示例来源:origin: io.hops/hadoop-mapreduce-client-core
/**
* verify that hash equals to HMacHash(msg)
* @param newHash
* @return true if is the same
*/
private static boolean verifyHash(byte[] hash, byte[] msg, SecretKey key) {
byte[] msg_hash = generateByteHash(msg, key);
return Utils.compareBytes(msg_hash, 0, msg_hash.length, hash, 0, hash.length) == 0;
}
代码示例来源:origin: org.apache.hadoop/hadoop-streaming
@Override
public void writeBuffer(Buffer buf, String tag)
throws IOException {
printCommaUnlessFirst();
stream.print(Utils.toCSVBuffer(buf));
throwExceptionOnError(tag);
}
代码示例来源:origin: ch.cern.hadoop/hadoop-common
@Override
public void writeString(String s, String tag) throws IOException {
printCommaUnlessFirst();
stream.print(Utils.toCSVString(s));
throwExceptionOnError(tag);
}
代码示例来源:origin: org.apache.hadoop/hadoop-streaming
@Override
public void writeString(String s, String tag) throws IOException {
Utils.toBinaryString(out, s);
}
代码示例来源:origin: org.apache.hadoop/hadoop-streaming
/**
*
* @param s
* @return
*/
static String fromXMLString(String s) {
StringBuilder sb = new StringBuilder();
for (int idx = 0; idx < s.length();) {
char ch = s.charAt(idx++);
if (ch == '%') {
int ch1 = h2c(s.charAt(idx++)) << 12;
int ch2 = h2c(s.charAt(idx++)) << 8;
int ch3 = h2c(s.charAt(idx++)) << 4;
int ch4 = h2c(s.charAt(idx++));
char res = (char)(ch1 | ch2 | ch3 | ch4);
sb.append(res);
} else {
sb.append(ch);
}
}
return sb.toString();
}
代码示例来源:origin: org.apache.hadoop/hadoop-common-test
private static void initStrings(Record[] strings) {
final int STRLEN = 32;
for (int idx = 0; idx < strings.length; idx++) {
strings[idx] = new RecString();
int strlen = rand.nextInt(STRLEN);
StringBuilder sb = new StringBuilder(strlen);
for (int ich = 0; ich < strlen; ich++) {
int cpt = 0;
while (true) {
cpt = rand.nextInt(0x10FFFF+1);
if (Utils.isValidCodePoint(cpt)) {
break;
}
}
sb.appendCodePoint(cpt);
}
((RecString)strings[idx]).setData(sb.toString());
}
}
代码示例来源:origin: com.github.jiayuhan-it/hadoop-common
static String fromBinaryString(final DataInput din) throws IOException {
final int utf8Len = readVInt(din);
final byte[] bytes = new byte[utf8Len];
din.readFully(bytes);
} else if ((b1 & B11111) == B11110) {
int b2 = bytes[len++] & 0xFF;
checkB10(b2);
int b3 = bytes[len++] & 0xFF;
checkB10(b3);
int b4 = bytes[len++] & 0xFF;
checkB10(b4);
cpt = utf8ToCodePoint(b1, b2, b3, b4);
} else if ((b1 & B1111) == B1110) {
int b2 = bytes[len++] & 0xFF;
checkB10(b2);
int b3 = bytes[len++] & 0xFF;
checkB10(b3);
cpt = utf8ToCodePoint(b1, b2, b3);
} else if ((b1 & B111) == B110) {
int b2 = bytes[len++] & 0xFF;
checkB10(b2);
cpt = utf8ToCodePoint(b1, b2);
} else {
throw new IOException("Invalid UTF-8 byte "+Integer.toHexString(b1)+
" at offset "+(len-1)+" in length of "+utf8Len);
if (!isValidCodePoint(cpt)) {
throw new IOException("Illegal Unicode Codepoint "+
Integer.toHexString(cpt)+" in stream.");
代码示例来源:origin: apache/chukwa
int os = s;
long i = org.apache.hadoop.record.Utils.readVLong(b, s);
int z = org.apache.hadoop.record.Utils.getVIntSize(i);
s += z;
l -= z;
int mi1 = org.apache.hadoop.record.Utils.readVInt(b, s);
int mz1 = org.apache.hadoop.record.Utils.getVIntSize(mi1);
s += mz1;
l -= mz1;
for (int midx1 = 0; midx1 < mi1; midx1++) {
int i = org.apache.hadoop.record.Utils.readVInt(b, s);
int z = org.apache.hadoop.record.Utils.getVIntSize(i);
s += (z + i);
l -= (z + i);
int i = org.apache.hadoop.record.Utils.readVInt(b, s);
int z = org.apache.hadoop.record.Utils.getVIntSize(i);
s += z + i;
l -= (z + i);
代码示例来源:origin: apache/chukwa
long i1 = org.apache.hadoop.record.Utils.readVLong(b1, s1);
long i2 = org.apache.hadoop.record.Utils.readVLong(b2, s2);
if (i1 != i2) {
return ((i1 - i2) < 0) ? -1 : 0;
int z1 = org.apache.hadoop.record.Utils.getVIntSize(i1);
int z2 = org.apache.hadoop.record.Utils.getVIntSize(i2);
s1 += z1;
s2 += z2;
int mi11 = org.apache.hadoop.record.Utils.readVInt(b1, s1);
int mi21 = org.apache.hadoop.record.Utils.readVInt(b2, s2);
int mz11 = org.apache.hadoop.record.Utils.getVIntSize(mi11);
int mz21 = org.apache.hadoop.record.Utils.getVIntSize(mi21);
s1 += mz11;
s2 += mz21;
int i1 = org.apache.hadoop.record.Utils.readVInt(b1, s1);
int i2 = org.apache.hadoop.record.Utils.readVInt(b2, s2);
int z1 = org.apache.hadoop.record.Utils.getVIntSize(i1);
int z2 = org.apache.hadoop.record.Utils.getVIntSize(i2);
s1 += z1;
s2 += z2;
l1 -= z1;
l2 -= z2;
int r1 = org.apache.hadoop.record.Utils.compareBytes(b1, s1, i1,
b2, s2, i2);
if (r1 != 0) {
代码示例来源:origin: org.apache.hadoop/hadoop-streaming
@Override
public int readInt(final String tag) throws IOException {
return Utils.readVInt(in);
}
代码示例来源:origin: org.apache.hadoop/hadoop-common-test
static public int slurpRaw(byte[] b, int s, int l) {
try {
int os = s;
{
int i = org.apache.hadoop.record.Utils.readVInt(b, s);
int z = org.apache.hadoop.record.Utils.getVIntSize(i);
s+=(z+i); l-= (z+i);
}
return (os - s);
} catch(java.io.IOException e) {
throw new RuntimeException(e);
}
}
static public int compareRaw(byte[] b1, int s1, int l1,
内容来源于网络,如有侵权,请联系作者删除!