org.apache.hadoop.hdfs.protocol.Block.write()方法的使用及代码示例

x33g5p2x  于2022-01-17 转载在 其他  
字(4.4k)|赞(0)|评价(0)|浏览(108)

本文整理了Java中org.apache.hadoop.hdfs.protocol.Block.write()方法的一些代码示例,展示了Block.write()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Block.write()方法的具体详情如下:
包路径:org.apache.hadoop.hdfs.protocol.Block
类名称:Block
方法名:write

Block.write介绍

暂无

代码示例

代码示例来源:origin: org.apache.hadoop/hadoop-hdfs

private static void writeBlocks(final Block[] blocks,
  final DataOutput out) throws IOException {
 if (blocks == null) {
  out.writeInt(0);
 } else {
  out.writeInt(blocks.length);
  for (Block blk : blocks) {
   blk.write(out);
  }
 }
}

代码示例来源:origin: com.facebook.hadoop/hadoop-core

@Override
public void write(DataOutput out) throws IOException {
 block.write(out);
 out.writeBoolean(wasRecoveredOnStartup);
}

代码示例来源:origin: io.prestosql.hadoop/hadoop-apache

private static void writeBlocks(final Block[] blocks,
  final DataOutput out) throws IOException {
 if (blocks == null) {
  out.writeInt(0);
 } else {
  out.writeInt(blocks.length);
  for (Block blk : blocks) {
   blk.write(out);
  }
 }
}

代码示例来源:origin: com.facebook.hadoop/hadoop-core

/** {@inheritDoc} */
public void write(DataOutput out) throws IOException {
 super.write(out);
 out.writeLong(lastScanTime);
}

代码示例来源:origin: org.jvnet.hudson.hadoop/hadoop-core

/** {@inheritDoc} */
public void write(DataOutput out) throws IOException {
 super.write(out);
 out.writeLong(lastScanTime);
}

代码示例来源:origin: ch.cern.hadoop/hadoop-hdfs

private static void writeBlocks(final Block[] blocks,
  final DataOutput out) throws IOException {
 if (blocks == null) {
  out.writeInt(0);
 } else {
  out.writeInt(blocks.length);
  for (Block blk : blocks) {
   blk.write(out);
  }
 }
}

代码示例来源:origin: com.facebook.hadoop/hadoop-core

public void write(DataOutput out) throws IOException {
 out.writeBoolean(corrupt);
 out.writeLong(offset);
 b.write(out);
 out.writeInt(locs.length);
 for (int i = 0; i < locs.length; i++) {
  locs[i].write(out);
 }
}

代码示例来源:origin: com.facebook.hadoop/hadoop-core

@Override
public void write(DataOutput out) throws IOException {
 this.block.write(out);
 Text.writeString(out, this.delHints);
}

代码示例来源:origin: org.jvnet.hudson.hadoop/hadoop-core

public void write(DataOutput out) throws IOException {
 out.writeBoolean(corrupt);
 out.writeLong(offset);
 b.write(out);
 out.writeInt(locs.length);
 for (int i = 0; i < locs.length; i++) {
  locs[i].write(out);
 }
}

代码示例来源:origin: com.facebook.hadoop/hadoop-core

public void write(DataOutput out) throws IOException {
 super.write(out);
 int nblocks = (blocks == null) ? 0 : blocks.length;
 out.writeInt(nblocks);
 for (int i = 0; i < nblocks; i++) {
  blocks[i].write(out);
 }
 out.writeUTF(leaseHolder);
}

代码示例来源:origin: com.facebook.hadoop/hadoop-core

@Override
public void write(DataOutput out) throws IOException {
 super.write(out);
 Text.writeString(out, this.delHints);
}

代码示例来源:origin: org.jvnet.hudson.hadoop/hadoop-core

public void write(DataOutput out) throws IOException {
 super.write(out);
 out.writeInt(blocks.length);
 for (int i = 0; i < blocks.length; i++) {
  blocks[i].write(out);
 }
 out.writeInt(targets.length);
 for (int i = 0; i < targets.length; i++) {
  out.writeInt(targets[i].length);
  for (int j = 0; j < targets[i].length; j++) {
   targets[i][j].write(out);
  }
 }
}

代码示例来源:origin: com.facebook.hadoop/hadoop-core

public void write(DataOutput out) throws IOException {
 super.write(out);
 out.writeInt(blocks.length);
 for (int i = 0; i < blocks.length; i++) {
  blocks[i].write(out);
 }
 out.writeInt(targets.length);
 for (int i = 0; i < targets.length; i++) {
  out.writeInt(targets[i].length);
  for (int j = 0; j < targets[i].length; j++) {
   targets[i][j].write(out);
  }
 }
}

代码示例来源:origin: com.facebook.hadoop/hadoop-core

/** {@inheritDoc} */
public void write(DataOutput out) throws IOException {
 super.write(out);
 Text.writeString(out, localBlockPath);
 Text.writeString(out, localMetaPath);
}

代码示例来源:origin: com.facebook.hadoop/hadoop-core

/** serialization method */
 public void write(DataOutput out) throws IOException {
  block.write(out);
  WritableUtils.writeVInt(out, datanodeIDs.length); // variable length int
  for(String id:datanodeIDs) {
   Text.writeString(out, id);
  }
 }
}

代码示例来源:origin: org.jvnet.hudson.hadoop/hadoop-core

/** serialization method */
 public void write(DataOutput out) throws IOException {
  block.write(out);
  WritableUtils.writeVInt(out, datanodeIDs.length); // variable length int
  for(String id:datanodeIDs) {
   Text.writeString(out, id);
  }
 }
}

代码示例来源:origin: org.jvnet.hudson.hadoop/hadoop-core

out.writeInt(blocks.length);
for (Block blk : blocks)
 blk.write(out);
FILE_PERM.fromShort(fileINode.getFsPermissionShort());
PermissionStatus.write(out, fileINode.getUserName(),

代码示例来源:origin: com.facebook.hadoop/hadoop-core

out.writeInt(blocks.length);
for (Block blk : blocks)
 blk.write(out);
filePerm.fromShort(fileINode.getFsPermissionShort());
PermissionStatus.write(out, fileINode.getUserName(),

相关文章