org.apache.hadoop.hdfs.protocol.Block.readFields()方法的使用及代码示例

x33g5p2x  于2022-01-17 转载在 其他  
字(6.6k)|赞(0)|评价(0)|浏览(233)

本文整理了Java中org.apache.hadoop.hdfs.protocol.Block.readFields()方法的一些代码示例,展示了Block.readFields()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Block.readFields()方法的具体详情如下:
包路径:org.apache.hadoop.hdfs.protocol.Block
类名称:Block
方法名:readFields

Block.readFields介绍

暂无

代码示例

代码示例来源:origin: org.apache.hadoop/hadoop-hdfs

private static Block[] readBlocks(
  DataInputStream in,
  int logVersion) throws IOException {
 int numBlocks = in.readInt();
 if (numBlocks < 0) {
  throw new IOException("invalid negative number of blocks");
 } else if (numBlocks > MAX_BLOCKS) {
  throw new IOException("invalid number of blocks: " + numBlocks +
    ".  The maximum number of blocks per file is " + MAX_BLOCKS);
 }
 Block[] blocks = new Block[numBlocks];
 for (int i = 0; i < numBlocks; i++) {
  Block blk = new Block();
  blk.readFields(in);
  blocks[i] = blk;
 }
 return blocks;
}

代码示例来源:origin: org.apache.hadoop/hadoop-hdfs

int i = 0;
for (; i < numBlocks - 1; i++) {
 blk.readFields(in);
 blocksContiguous[i] = new BlockInfoContiguous(blk, blockReplication);
 blk.readFields(in);
 blocksContiguous[i] = new BlockInfoContiguous(blk, blockReplication);
 blocksContiguous[i].convertToBlockUnderConstruction(

代码示例来源:origin: com.facebook.hadoop/hadoop-core

@Override
public void readFields(DataInput in) throws IOException {
 block.readFields(in);
 wasRecoveredOnStartup = in.readBoolean();
}

代码示例来源:origin: org.jvnet.hudson.hadoop/hadoop-core

/** {@inheritDoc} */
 public void readFields(DataInput in) throws IOException {
  super.readFields(in);
  lastScanTime = in.readLong();
 }
}

代码示例来源:origin: com.facebook.hadoop/hadoop-core

/** {@inheritDoc} */
 public void readFields(DataInput in) throws IOException {
  super.readFields(in);
  lastScanTime = in.readLong();
 }
}

代码示例来源:origin: com.facebook.hadoop/hadoop-core

static private Block[] readBlocks(DataInputStream in) throws IOException {
 int numBlocks = in.readInt();
 Block[] blocks = new Block[numBlocks];
 for (int i = 0; i < numBlocks; i++) {
  blocks[i] = new Block();
  blocks[i].readFields(in);
 }
 return blocks;
}

代码示例来源:origin: org.jvnet.hudson.hadoop/hadoop-core

static private Block[] readBlocks(DataInputStream in) throws IOException {
  int numBlocks = in.readInt();
  Block[] blocks = new Block[numBlocks];
  for (int i = 0; i < numBlocks; i++) {
   blocks[i] = new Block();
   blocks[i].readFields(in);
  }
  return blocks;
 }
}

代码示例来源:origin: ch.cern.hadoop/hadoop-hdfs

private static Block[] readBlocks(
  DataInputStream in,
  int logVersion) throws IOException {
 int numBlocks = in.readInt();
 if (numBlocks < 0) {
  throw new IOException("invalid negative number of blocks");
 } else if (numBlocks > MAX_BLOCKS) {
  throw new IOException("invalid number of blocks: " + numBlocks +
    ".  The maximum number of blocks per file is " + MAX_BLOCKS);
 }
 Block[] blocks = new Block[numBlocks];
 for (int i = 0; i < numBlocks; i++) {
  Block blk = new Block();
  blk.readFields(in);
  blocks[i] = blk;
 }
 return blocks;
}

代码示例来源:origin: io.prestosql.hadoop/hadoop-apache

private static Block[] readBlocks(
  DataInputStream in,
  int logVersion) throws IOException {
 int numBlocks = in.readInt();
 if (numBlocks < 0) {
  throw new IOException("invalid negative number of blocks");
 } else if (numBlocks > MAX_BLOCKS) {
  throw new IOException("invalid number of blocks: " + numBlocks +
    ".  The maximum number of blocks per file is " + MAX_BLOCKS);
 }
 Block[] blocks = new Block[numBlocks];
 for (int i = 0; i < numBlocks; i++) {
  Block blk = new Block();
  blk.readFields(in);
  blocks[i] = blk;
 }
 return blocks;
}

代码示例来源:origin: com.facebook.hadoop/hadoop-core

public void readFields(DataInput in) throws IOException {
 super.readFields(in);
 int nblocks = in.readInt();
 blocks = new Block[nblocks];
 for (int i = 0; i < nblocks; i++) {
  blocks[i] = new Block();
  blocks[i].readFields(in);
 }
 leaseHolder = in.readUTF();
}

代码示例来源:origin: com.facebook.hadoop/hadoop-core

public void readFields(DataInput in) throws IOException {
 this.corrupt = in.readBoolean();
 offset = in.readLong();
 this.b = new Block();
 b.readFields(in);
 int count = in.readInt();
 this.locs = new DatanodeInfo[count];
 for (int i = 0; i < locs.length; i++) {
  locs[i] = new DatanodeInfo();
  locs[i].readFields(in);
 }
}

代码示例来源:origin: org.jvnet.hudson.hadoop/hadoop-core

public void readFields(DataInput in) throws IOException {
  this.corrupt = in.readBoolean();
  offset = in.readLong();
  this.b = new Block();
  b.readFields(in);
  int count = in.readInt();
  this.locs = new DatanodeInfo[count];
  for (int i = 0; i < locs.length; i++) {
   locs[i] = new DatanodeInfo();
   locs[i].readFields(in);
  }
 }
}

代码示例来源:origin: com.facebook.hadoop/hadoop-core

/** {@inheritDoc} */
 public void readFields(DataInput in) throws IOException {
  super.readFields(in);
  localBlockPath = Text.readString(in);
  localMetaPath = Text.readString(in);
 }
}

代码示例来源:origin: com.facebook.hadoop/hadoop-core

/** deserialization method */
public void readFields(DataInput in) throws IOException {
 block.readFields(in);
 int len = WritableUtils.readVInt(in); // variable length integer
 datanodeIDs = new String[len];
 for(int i=0; i<len; i++) {
  datanodeIDs[i] = Text.readString(in);
 }
}

代码示例来源:origin: com.facebook.hadoop/hadoop-core

@Override
public void readFields(DataInput in) throws IOException {
 this.block = new Block();
 this.block.readFields(in);
 this.delHints = Text.readString(in);
}

代码示例来源:origin: com.facebook.hadoop/hadoop-core

@Override
 public void readFields(DataInput in) throws IOException {
  super.readFields(in);
  this.setDelHints(Text.readString(in));
 }
}

代码示例来源:origin: org.jvnet.hudson.hadoop/hadoop-core

/** deserialization method */
public void readFields(DataInput in) throws IOException {
 block.readFields(in);
 int len = WritableUtils.readVInt(in); // variable length integer
 datanodeIDs = new String[len];
 for(int i=0; i<len; i++) {
  datanodeIDs[i] = Text.readString(in);
 }
}

代码示例来源:origin: com.facebook.hadoop/hadoop-core

public void readFields(DataInput in) throws IOException {
  super.readFields(in);
  this.blocks = new Block[in.readInt()];
  for (int i = 0; i < blocks.length; i++) {
   blocks[i] = new Block();
   blocks[i].readFields(in);
  }

  this.targets = new DatanodeInfo[in.readInt()][];
  for (int i = 0; i < targets.length; i++) {
   this.targets[i] = new DatanodeInfo[in.readInt()];
   for (int j = 0; j < targets[i].length; j++) {
    targets[i][j] = new DatanodeInfo();
    targets[i][j].readFields(in);
   }
  }
 }
}

代码示例来源:origin: org.jvnet.hudson.hadoop/hadoop-core

public void readFields(DataInput in) throws IOException {
  super.readFields(in);
  this.blocks = new Block[in.readInt()];
  for (int i = 0; i < blocks.length; i++) {
   blocks[i] = new Block();
   blocks[i].readFields(in);
  }

  this.targets = new DatanodeInfo[in.readInt()][];
  for (int i = 0; i < targets.length; i++) {
   this.targets[i] = new DatanodeInfo[in.readInt()];
   for (int j = 0; j < targets[i].length; j++) {
    targets[i][j] = new DatanodeInfo();
    targets[i][j].readFields(in);
   }
  }
 }
}

代码示例来源:origin: com.facebook.hadoop/hadoop-core

private static Block[] readBlocks(
   DataInputStream in,
   int logVersion) throws IOException {
  int numBlocks = in.readInt();
  Block[] blocks = new Block[numBlocks];
  for (int i = 0; i < numBlocks; i++) {
   Block blk = new Block();
   if (logVersion <= -14) {
    blk.readFields(in);
   } else {
    BlockTwo oldblk = new BlockTwo();
    oldblk.readFields(in);
    blk.set(oldblk.blkid, oldblk.len,
        Block.GRANDFATHER_GENERATION_STAMP);
   }
   blocks[i] = blk;
  }
  return blocks;
 }
}

相关文章