本文整理了Java中org.apache.hadoop.io.file.tfile.Utils.readString()
方法的一些代码示例,展示了Utils.readString()
的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Utils.readString()
方法的具体详情如下:
包路径:org.apache.hadoop.io.file.tfile.Utils
类名称:Utils
方法名:readString
[英]Read a String as a VInt n, followed by n Bytes in Text format.
[中]以VInt n格式读取字符串,后跟文本格式的n字节。
代码示例来源:origin: org.apache.hadoop/hadoop-common
public MetaIndexEntry(DataInput in) throws IOException {
String fullMetaName = Utils.readString(in);
if (fullMetaName.startsWith(defaultPrefix)) {
metaName =
fullMetaName.substring(defaultPrefix.length(), fullMetaName
.length());
} else {
throw new IOException("Corrupted Meta region Index");
}
compressionAlgorithm =
Compression.getCompressionAlgorithmByName(Utils.readString(in));
region = new BlockRegion(in);
}
代码示例来源:origin: org.apache.hadoop/hadoop-common
public DataIndex(DataInput in) throws IOException {
defaultCompressionAlgorithm =
Compression.getCompressionAlgorithmByName(Utils.readString(in));
int n = Utils.readVInt(in);
listRegions = new ArrayList<BlockRegion>(n);
for (int i = 0; i < n; i++) {
BlockRegion region = new BlockRegion(in);
listRegions.add(region);
}
}
代码示例来源:origin: org.apache.hadoop/hadoop-common
public TFileMeta(DataInput in) throws IOException {
version = new Version(in);
if (!version.compatibleWith(TFile.API_VERSION)) {
throw new RuntimeException("Incompatible TFile fileVersion.");
}
recordCount = Utils.readVLong(in);
strComparator = Utils.readString(in);
comparator = makeComparator(strComparator);
}
代码示例来源:origin: ch.cern.hadoop/hadoop-common
public MetaIndexEntry(DataInput in) throws IOException {
String fullMetaName = Utils.readString(in);
if (fullMetaName.startsWith(defaultPrefix)) {
metaName =
fullMetaName.substring(defaultPrefix.length(), fullMetaName
.length());
} else {
throw new IOException("Corrupted Meta region Index");
}
compressionAlgorithm =
Compression.getCompressionAlgorithmByName(Utils.readString(in));
region = new BlockRegion(in);
}
代码示例来源:origin: io.hops/hadoop-common
public MetaIndexEntry(DataInput in) throws IOException {
String fullMetaName = Utils.readString(in);
if (fullMetaName.startsWith(defaultPrefix)) {
metaName =
fullMetaName.substring(defaultPrefix.length(), fullMetaName
.length());
} else {
throw new IOException("Corrupted Meta region Index");
}
compressionAlgorithm =
Compression.getCompressionAlgorithmByName(Utils.readString(in));
region = new BlockRegion(in);
}
代码示例来源:origin: com.facebook.hadoop/hadoop-core
public MetaIndexEntry(DataInput in) throws IOException {
String fullMetaName = Utils.readString(in);
if (fullMetaName.startsWith(defaultPrefix)) {
metaName =
fullMetaName.substring(defaultPrefix.length(), fullMetaName
.length());
} else {
throw new IOException("Corrupted Meta region Index");
}
compressionAlgorithm =
Compression.getCompressionAlgorithmByName(Utils.readString(in));
region = new BlockRegion(in);
}
代码示例来源:origin: com.github.jiayuhan-it/hadoop-common
public MetaIndexEntry(DataInput in) throws IOException {
String fullMetaName = Utils.readString(in);
if (fullMetaName.startsWith(defaultPrefix)) {
metaName =
fullMetaName.substring(defaultPrefix.length(), fullMetaName
.length());
} else {
throw new IOException("Corrupted Meta region Index");
}
compressionAlgorithm =
Compression.getCompressionAlgorithmByName(Utils.readString(in));
region = new BlockRegion(in);
}
代码示例来源:origin: org.apache.apex/malhar-library
public MetaIndexEntry(DataInput in) throws IOException {
String fullMetaName = Utils.readString(in);
if (fullMetaName.startsWith(defaultPrefix)) {
metaName =
fullMetaName.substring(defaultPrefix.length(), fullMetaName
.length());
} else {
throw new IOException("Corrupted Meta region Index");
}
compressionAlgorithm =
Compression.getCompressionAlgorithmByName(Utils.readString(in));
region = new BlockRegion(in);
}
代码示例来源:origin: io.hops/hadoop-common
public DataIndex(DataInput in) throws IOException {
defaultCompressionAlgorithm =
Compression.getCompressionAlgorithmByName(Utils.readString(in));
int n = Utils.readVInt(in);
listRegions = new ArrayList<BlockRegion>(n);
for (int i = 0; i < n; i++) {
BlockRegion region = new BlockRegion(in);
listRegions.add(region);
}
}
代码示例来源:origin: com.facebook.hadoop/hadoop-core
public DataIndex(DataInput in) throws IOException {
defaultCompressionAlgorithm =
Compression.getCompressionAlgorithmByName(Utils.readString(in));
int n = Utils.readVInt(in);
listRegions = new ArrayList<BlockRegion>(n);
for (int i = 0; i < n; i++) {
BlockRegion region = new BlockRegion(in);
listRegions.add(region);
}
}
代码示例来源:origin: org.apache.apex/malhar-library
public DataIndex(DataInput in) throws IOException {
defaultCompressionAlgorithm =
Compression.getCompressionAlgorithmByName(Utils.readString(in));
int n = Utils.readVInt(in);
listRegions = new ArrayList<BlockRegion>(n);
for (int i = 0; i < n; i++) {
BlockRegion region = new BlockRegion(in);
listRegions.add(region);
}
}
代码示例来源:origin: com.github.jiayuhan-it/hadoop-common
public DataIndex(DataInput in) throws IOException {
defaultCompressionAlgorithm =
Compression.getCompressionAlgorithmByName(Utils.readString(in));
int n = Utils.readVInt(in);
listRegions = new ArrayList<BlockRegion>(n);
for (int i = 0; i < n; i++) {
BlockRegion region = new BlockRegion(in);
listRegions.add(region);
}
}
代码示例来源:origin: ch.cern.hadoop/hadoop-common
public DataIndex(DataInput in) throws IOException {
defaultCompressionAlgorithm =
Compression.getCompressionAlgorithmByName(Utils.readString(in));
int n = Utils.readVInt(in);
listRegions = new ArrayList<BlockRegion>(n);
for (int i = 0; i < n; i++) {
BlockRegion region = new BlockRegion(in);
listRegions.add(region);
}
}
代码示例来源:origin: io.prestosql.hadoop/hadoop-apache
public DataIndex(DataInput in) throws IOException {
defaultCompressionAlgorithm =
Compression.getCompressionAlgorithmByName(Utils.readString(in));
int n = Utils.readVInt(in);
listRegions = new ArrayList<BlockRegion>(n);
for (int i = 0; i < n; i++) {
BlockRegion region = new BlockRegion(in);
listRegions.add(region);
}
}
代码示例来源:origin: ch.cern.hadoop/hadoop-common
public TFileMeta(DataInput in) throws IOException {
version = new Version(in);
if (!version.compatibleWith(TFile.API_VERSION)) {
throw new RuntimeException("Incompatible TFile fileVersion.");
}
recordCount = Utils.readVLong(in);
strComparator = Utils.readString(in);
comparator = makeComparator(strComparator);
}
代码示例来源:origin: io.prestosql.hadoop/hadoop-apache
public TFileMeta(DataInput in) throws IOException {
version = new Version(in);
if (!version.compatibleWith(TFile.API_VERSION)) {
throw new RuntimeException("Incompatible TFile fileVersion.");
}
recordCount = Utils.readVLong(in);
strComparator = Utils.readString(in);
comparator = makeComparator(strComparator);
}
代码示例来源:origin: io.hops/hadoop-common
public TFileMeta(DataInput in) throws IOException {
version = new Version(in);
if (!version.compatibleWith(TFile.API_VERSION)) {
throw new RuntimeException("Incompatible TFile fileVersion.");
}
recordCount = Utils.readVLong(in);
strComparator = Utils.readString(in);
comparator = makeComparator(strComparator);
}
代码示例来源:origin: com.github.jiayuhan-it/hadoop-common
public TFileMeta(DataInput in) throws IOException {
version = new Version(in);
if (!version.compatibleWith(TFile.API_VERSION)) {
throw new RuntimeException("Incompatible TFile fileVersion.");
}
recordCount = Utils.readVLong(in);
strComparator = Utils.readString(in);
comparator = makeComparator(strComparator);
}
代码示例来源:origin: com.facebook.hadoop/hadoop-core
public TFileMeta(DataInput in) throws IOException {
version = new Version(in);
if (!version.compatibleWith(TFile.API_VERSION)) {
throw new RuntimeException("Incompatible TFile fileVersion.");
}
recordCount = Utils.readVLong(in);
strComparator = Utils.readString(in);
comparator = makeComparator(strComparator);
}
代码示例来源:origin: org.apache.apex/malhar-library
public TFileMeta(DataInput in) throws IOException {
version = new Version(in);
if (!version.compatibleWith(DTFile.API_VERSION)) {
throw new RuntimeException("Incompatible TFile fileVersion.");
}
recordCount = Utils.readVLong(in);
strComparator = Utils.readString(in);
comparator = makeComparator(strComparator);
}
内容来源于网络,如有侵权,请联系作者删除!