org.apache.hadoop.io.Text.writeString()方法的使用及代码示例

x33g5p2x  于2022-01-29 转载在 其他  
字(6.1k)|赞(0)|评价(0)|浏览(114)

本文整理了Java中org.apache.hadoop.io.Text.writeString()方法的一些代码示例,展示了Text.writeString()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Text.writeString()方法的具体详情如下:
包路径:org.apache.hadoop.io.Text
类名称:Text
方法名:writeString

Text.writeString介绍

[英]Write a UTF8 encoded string to out
[中]将UTF8编码的字符串写入输出

代码示例

代码示例来源:origin: Qihoo360/XLearning

@Override
public void write(DataOutput dataOutput) throws IOException {
 Text.writeString(dataOutput, localLocation);
 Text.writeString(dataOutput, dfsLocation);
}

代码示例来源:origin: apache/incubator-gobblin

@Override
public void write(DataOutput out) throws IOException {
 Text.writeString(out, "split");
}

代码示例来源:origin: org.apache.hadoop/hadoop-common

/**
 * writes String value of enum to DataOutput. 
 * @param out Dataoutput stream
 * @param enumVal enum value
 * @throws IOException
 */
public static void writeEnum(DataOutput out,  Enum<?> enumVal) 
 throws IOException{
 Text.writeString(out, enumVal.name()); 
}
/**

代码示例来源:origin: apache/kylin

public void write(DataOutput out) throws IOException {
  Text.writeString(out, brokers);
  Text.writeString(out, topic);
  out.writeInt(partition);
  out.writeLong(offsetStart);
  out.writeLong(offsetEnd);
}

代码示例来源:origin: Qihoo360/XLearning

@Override
public void write(DataOutput dataOutput) throws IOException {
 Text.writeString(dataOutput, aliasName);
 dataOutput.writeInt(paths.size());
 for (Path p : paths) {
  Text.writeString(dataOutput, p.toString());
 }
}

代码示例来源:origin: apache/incubator-gobblin

@Override
public void write(DataOutput dataOutput) throws IOException {
 Text.writeString(dataOutput, this.owner);
 Text.writeString(dataOutput, this.group);
 this.fsPermission.write(dataOutput);
}

代码示例来源:origin: apache/avro

public void write(DataOutput out) throws IOException {
 Text.writeString(out, inputSplitClass.getName());
 inputSplit.write(out);
 Text.writeString(out, inputFormatClass.getName());
 Text.writeString(out, mapperClass.getName());
 Text.writeString(out, schema.toString());
}

代码示例来源:origin: org.apache.hadoop/hadoop-common

/**
 * Serialize a {@link PermissionStatus} from its base components.
 */
public static void write(DataOutput out,
             String username, 
             String groupname,
             FsPermission permission) throws IOException {
 Text.writeString(out, username, Text.DEFAULT_MAX_LEN);
 Text.writeString(out, groupname, Text.DEFAULT_MAX_LEN);
 permission.write(out);
}

代码示例来源:origin: Qihoo360/XLearning

@Override
public void write(DataOutput dataOutput) throws IOException {
 WritableUtils.writeEnum(dataOutput, this.xlearningContainerStatus);
 interResultSavedStatus.write(dataOutput);
 Text.writeString(dataOutput, this.progressLog);
 Text.writeString(dataOutput, this.containersStartTime);
 Text.writeString(dataOutput, this.containersFinishTime);
}

代码示例来源:origin: apache/flink

@Override
public void write(DataOutput out) throws IOException {
 Properties props = getProps();
 WritableUtils.writeVInt(out, props.size());
 for(Entry<Object, Object> item: props.entrySet()) {
  org.apache.hadoop.io.Text.writeString(out, (String) item.getKey());
  org.apache.hadoop.io.Text.writeString(out, (String) item.getValue());
  WritableUtils.writeCompressedStringArray(out,
    updatingResource.get(item.getKey()));
 }
}

代码示例来源:origin: apache/flink

@Override
public void write(DataOutput out) throws IOException {
  Properties props = getProps();
  WritableUtils.writeVInt(out, props.size());
  for(Map.Entry<Object, Object> item: props.entrySet()) {
    org.apache.hadoop.io.Text.writeString(out, (String) item.getKey());
    org.apache.hadoop.io.Text.writeString(out, (String) item.getValue());
    WritableUtils.writeCompressedStringArray(out, updatingResource != null ?
        updatingResource.get(item.getKey()) : null);
  }
}

代码示例来源:origin: org.apache.hadoop/hadoop-common

/**
 * Serializes the AccessControlList object
 */
@Override
public void write(DataOutput out) throws IOException {
 String aclString = getAclString();
 Text.writeString(out, aclString);
}

代码示例来源:origin: Qihoo360/XLearning

@Override
public void write(DataOutput dataOutput) throws IOException {
 WritableUtils.writeEnum(dataOutput, this.logType);
 Text.writeString(dataOutput, message);
}

代码示例来源:origin: elastic/elasticsearch-hadoop

public void write(DataOutput out) throws IOException {
  Text.writeString(out, path.toString());
  delegate.write(out);
}

代码示例来源:origin: Qihoo360/XLearning

@Override
public void write(DataOutput dataOutput) throws IOException {
 Text.writeString(dataOutput, this.toString());
}

代码示例来源:origin: org.apache.hadoop/hadoop-common

@Override
public void write(DataOutput out) throws IOException {
 Properties props = getProps();
 WritableUtils.writeVInt(out, props.size());
 for(Map.Entry<Object, Object> item: props.entrySet()) {
  org.apache.hadoop.io.Text.writeString(out, (String) item.getKey());
  org.apache.hadoop.io.Text.writeString(out, (String) item.getValue());
  WritableUtils.writeCompressedStringArray(out, updatingResource != null ?
    updatingResource.get(item.getKey()) : null);
 }
}

代码示例来源:origin: apache/hive

/** Write and flush the file header. */
void writeFileHeader() throws IOException {
 if (useNewMagic) {
  out.writeBoolean(isCompressed());
 } else {
  Text.writeString(out, KeyBuffer.class.getName());
  Text.writeString(out, ValueBuffer.class.getName());
  out.writeBoolean(isCompressed());
  out.writeBoolean(false);
 }
 if (isCompressed()) {
  Text.writeString(out, (codec.getClass()).getName());
 }
 metadata.write(out);
}

代码示例来源:origin: org.apache.hadoop/hadoop-common

/** Write and flush the file header. */
private void writeFileHeader() 
 throws IOException {
 out.write(VERSION);
 Text.writeString(out, keyClass.getName());
 Text.writeString(out, valClass.getName());
 
 out.writeBoolean(this.isCompressed());
 out.writeBoolean(this.isBlockCompressed());
 
 if (this.isCompressed()) {
  Text.writeString(out, (codec.getClass()).getName());
 }
 this.metadata.write(out);
 out.write(sync);                       // write the sync bytes
 out.flush();                           // flush header
}

代码示例来源:origin: apache/drill

/** Write and flush the file header. */
void writeFileHeader() throws IOException {
 if (useNewMagic) {
  out.writeBoolean(isCompressed());
 } else {
  Text.writeString(out, KeyBuffer.class.getName());
  Text.writeString(out, ValueBuffer.class.getName());
  out.writeBoolean(isCompressed());
  out.writeBoolean(false);
 }
 if (isCompressed()) {
  Text.writeString(out, (codec.getClass()).getName());
 }
 metadata.write(out);
}

代码示例来源:origin: mahmoudparsian/data-algorithms-book

/**
 * Serializes this pair.
 *
 * @param out where to write the raw byte representation
 */
@Override
public void write(DataOutput out) throws IOException {
  Text.writeString(out, leftElement);
  Text.writeString(out, rightElement);
}

相关文章