org.apache.hadoop.record.Utils.writeUtf8()方法的使用及代码示例

x33g5p2x  于2022-02-01 转载在 其他  
字(3.2k)|赞(0)|评价(0)|浏览(134)

本文整理了Java中org.apache.hadoop.record.Utils.writeUtf8()方法的一些代码示例,展示了Utils.writeUtf8()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Utils.writeUtf8()方法的具体详情如下:
包路径:org.apache.hadoop.record.Utils
类名称:Utils
方法名:writeUtf8

Utils.writeUtf8介绍

暂无

代码示例

代码示例来源:origin: ch.cern.hadoop/hadoop-common

static void toBinaryString(final DataOutput out, final String str)
 throws IOException {
 final int strlen = str.length();
 byte[] bytes = new byte[strlen*4]; // Codepoints expand to 4 bytes max
 int utf8Len = 0;
 int idx = 0;
 while(idx < strlen) {
  final int cpt = str.codePointAt(idx);
  idx += Character.isSupplementaryCodePoint(cpt) ? 2 : 1;
  utf8Len += writeUtf8(cpt, bytes, utf8Len);
 }
 writeVInt(out, utf8Len);
 out.write(bytes, 0, utf8Len);
}

代码示例来源:origin: com.facebook.hadoop/hadoop-core

static void toBinaryString(final DataOutput out, final String str)
 throws IOException {
 final int strlen = str.length();
 byte[] bytes = new byte[strlen*4]; // Codepoints expand to 4 bytes max
 int utf8Len = 0;
 int idx = 0;
 while(idx < strlen) {
  final int cpt = str.codePointAt(idx);
  idx += Character.isSupplementaryCodePoint(cpt) ? 2 : 1;
  utf8Len += writeUtf8(cpt, bytes, utf8Len);
 }
 writeVInt(out, utf8Len);
 out.write(bytes, 0, utf8Len);
}

代码示例来源:origin: com.github.jiayuhan-it/hadoop-common

static void toBinaryString(final DataOutput out, final String str)
 throws IOException {
 final int strlen = str.length();
 byte[] bytes = new byte[strlen*4]; // Codepoints expand to 4 bytes max
 int utf8Len = 0;
 int idx = 0;
 while(idx < strlen) {
  final int cpt = str.codePointAt(idx);
  idx += Character.isSupplementaryCodePoint(cpt) ? 2 : 1;
  utf8Len += writeUtf8(cpt, bytes, utf8Len);
 }
 writeVInt(out, utf8Len);
 out.write(bytes, 0, utf8Len);
}

代码示例来源:origin: io.prestosql.hadoop/hadoop-apache

static void toBinaryString(final DataOutput out, final String str)
 throws IOException {
 final int strlen = str.length();
 byte[] bytes = new byte[strlen*4]; // Codepoints expand to 4 bytes max
 int utf8Len = 0;
 int idx = 0;
 while(idx < strlen) {
  final int cpt = str.codePointAt(idx);
  idx += Character.isSupplementaryCodePoint(cpt) ? 2 : 1;
  utf8Len += writeUtf8(cpt, bytes, utf8Len);
 }
 writeVInt(out, utf8Len);
 out.write(bytes, 0, utf8Len);
}

代码示例来源:origin: io.hops/hadoop-common

static void toBinaryString(final DataOutput out, final String str)
 throws IOException {
 final int strlen = str.length();
 byte[] bytes = new byte[strlen*4]; // Codepoints expand to 4 bytes max
 int utf8Len = 0;
 int idx = 0;
 while(idx < strlen) {
  final int cpt = str.codePointAt(idx);
  idx += Character.isSupplementaryCodePoint(cpt) ? 2 : 1;
  utf8Len += writeUtf8(cpt, bytes, utf8Len);
 }
 writeVInt(out, utf8Len);
 out.write(bytes, 0, utf8Len);
}

代码示例来源:origin: org.apache.hadoop/hadoop-streaming

static void toBinaryString(final DataOutput out, final String str)
 throws IOException {
 final int strlen = str.length();
 byte[] bytes = new byte[strlen*4]; // Codepoints expand to 4 bytes max
 int utf8Len = 0;
 int idx = 0;
 while(idx < strlen) {
  final int cpt = str.codePointAt(idx);
  idx += Character.isSupplementaryCodePoint(cpt) ? 2 : 1;
  utf8Len += writeUtf8(cpt, bytes, utf8Len);
 }
 writeVInt(out, utf8Len);
 out.write(bytes, 0, utf8Len);
}

代码示例来源:origin: org.jvnet.hudson.hadoop/hadoop-core

static void toBinaryString(final DataOutput out, final String str)
 throws IOException {
 final int strlen = str.length();
 byte[] bytes = new byte[strlen*4]; // Codepoints expand to 4 bytes max
 int utf8Len = 0;
 int idx = 0;
 while(idx < strlen) {
  final int cpt = str.codePointAt(idx);
  idx += Character.isSupplementaryCodePoint(cpt) ? 2 : 1;
  utf8Len += writeUtf8(cpt, bytes, utf8Len);
 }
 writeVInt(out, utf8Len);
 out.write(bytes, 0, utf8Len);
}

相关文章