本文整理了Java中org.apache.hadoop.io.Text.write()
方法的一些代码示例,展示了Text.write()
的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Text.write()
方法的具体详情如下:
包路径:org.apache.hadoop.io.Text
类名称:Text
方法名:write
[英]serialize write this object to out length uses zero-compressed encoding
[中]使用零压缩编码将此对象序列化为out length
代码示例来源:origin: apache/hive
public void write(DataOutput out) throws IOException {
value.write(out);
}
代码示例来源:origin: apache/kylin
@Override
public void write(DataOutput dataOutput) throws IOException {
dataOutput.writeByte(typeId);
rawKey.write(dataOutput);
}
代码示例来源:origin: org.apache.hadoop/hadoop-common
@Override
public void write(DataOutput out) throws IOException {
out.writeInt(this.theMetadata.size());
Iterator<Map.Entry<Text, Text>> iter =
this.theMetadata.entrySet().iterator();
while (iter.hasNext()) {
Map.Entry<Text, Text> en = iter.next();
en.getKey().write(out);
en.getValue().write(out);
}
}
代码示例来源:origin: apache/incubator-gobblin
@Override
public void write(DataOutput out) throws IOException {
Text text = new Text();
text.set(this.jobId);
text.write(out);
text.set(this.taskId);
text.write(out);
out.writeLong(this.startTime);
out.writeLong(this.endTime);
out.writeLong(this.duration);
super.write(out);
}
代码示例来源:origin: org.apache.hadoop/hadoop-common
@Override
public void write(DataOutput out) throws IOException {
WritableUtils.writeVInt(out, identifier.length);
out.write(identifier);
WritableUtils.writeVInt(out, password.length);
out.write(password);
kind.write(out);
service.write(out);
}
代码示例来源:origin: org.apache.hadoop/hadoop-common
@VisibleForTesting
void writeImpl(DataOutput out) throws IOException {
out.writeByte(VERSION);
owner.write(out);
renewer.write(out);
realUser.write(out);
WritableUtils.writeVLong(out, issueDate);
WritableUtils.writeVLong(out, maxDate);
WritableUtils.writeVInt(out, sequenceNumber);
WritableUtils.writeVInt(out, masterKeyId);
}
代码示例来源:origin: apache/incubator-gobblin
/**
* Serializes the state to the {@link DataOutput}
* @param dataOutput output target receiving the serialized data
* @param state the state to serialize
* @throws IOException
*/
private void addStateToDataOutputStream(DataOutput dataOutput, T state) throws IOException {
new Text(Strings.nullToEmpty(state.getId())).write(dataOutput);
state.write(dataOutput);
}
代码示例来源:origin: apache/incubator-gobblin
/**
* Serializes the state to the {@link DataOutput}
* @param dataOutput output target receiving the serialized data
* @param state the state to serialize
* @throws IOException
*/
private void addStateToDataOutputStream(DataOutput dataOutput, T state) throws IOException {
new Text(Strings.nullToEmpty(state.getId())).write(dataOutput);
state.write(dataOutput);
}
代码示例来源:origin: apache/hive
@Override
public void write(DataOutput dataOutput) throws IOException {
data.write(dataOutput);
if (partVal == null) {
dataOutput.writeBoolean(false);
} else {
dataOutput.writeBoolean(true);
partVal.write(dataOutput);
}
}
代码示例来源:origin: apache/incubator-gobblin
public void write(DataOutput out, boolean writeTasks, boolean writePreviousWorkUnitStates)
throws IOException {
Text text = new Text();
text.set(this.jobName);
text.write(out);
text.set(this.jobId);
text.write(out);
out.writeLong(this.startTime);
out.writeLong(this.endTime);
out.writeLong(this.duration);
text.set(this.state.name());
text.write(out);
out.writeInt(this.taskCount);
if (writeTasks) {
out.writeInt(this.taskStates.size() + this.skippedTaskStates.size());
for (TaskState taskState : this.taskStates.values()) {
taskState.write(out);
}
for (TaskState taskState : this.skippedTaskStates.values()) {
taskState.write(out);
}
} else {
out.writeInt(0);
}
super.write(out, writePreviousWorkUnitStates);
}
代码示例来源:origin: org.apache.hadoop/hadoop-common
/**
* Stores all the keys to DataOutput.
* @param out
* @throws IOException
*/
@Override
public void write(DataOutput out) throws IOException {
// write out tokens first
WritableUtils.writeVInt(out, tokenMap.size());
for(Map.Entry<Text,
Token<? extends TokenIdentifier>> e: tokenMap.entrySet()) {
e.getKey().write(out);
e.getValue().write(out);
}
// now write out secret keys
WritableUtils.writeVInt(out, secretKeysMap.size());
for(Map.Entry<Text, byte[]> e : secretKeysMap.entrySet()) {
e.getKey().write(out);
WritableUtils.writeVInt(out, e.getValue().length);
out.write(e.getValue());
}
}
代码示例来源:origin: apache/hive
@Override
public void write(DataOutput dataOutput) throws IOException {
data.write(dataOutput);
if (partVal == null) {
dataOutput.writeBoolean(false);
} else {
dataOutput.writeBoolean(true);
partVal.write(dataOutput);
}
if (recId == null) {
dataOutput.writeBoolean(false);
} else {
dataOutput.writeBoolean(true);
recId.write(dataOutput);
}
}
代码示例来源:origin: apache/hive
@Override
public void write(DataOutput dataOutput) throws IOException {
if (partVal == null) {
dataOutput.writeBoolean(false);
} else {
dataOutput.writeBoolean(true);
partVal.write(dataOutput);
}
if (recId == null) {
dataOutput.writeBoolean(false);
} else {
dataOutput.writeBoolean(true);
recId.write(dataOutput);
}
}
@Override
代码示例来源:origin: apache/incubator-gobblin
@Test
public void testDeserialize() throws IOException {
// Use Hadoop's serializer, verify our deserializer can read the string back
for (String textToSerialize : textsToSerialize) {
ByteArrayOutputStream bOs = new ByteArrayOutputStream();
DataOutputStream dataOutputStream = new DataOutputStream(bOs);
Text hadoopText = new Text();
hadoopText.set(textToSerialize);
hadoopText.write(dataOutputStream);
dataOutputStream.close();
ByteArrayInputStream bIn = new ByteArrayInputStream(bOs.toByteArray());
DataInputStream dataInputStream = new DataInputStream(bIn);
String deserializedString = TextSerializer.readTextAsString(dataInputStream);
Assert.assertEquals(deserializedString, textToSerialize);
}
}
}
代码示例来源:origin: mahmoudparsian/data-algorithms-book
@Override
public void write(DataOutput out) throws IOException {
yearMonth.write(out);
day.write(out);
temperature.write(out);
}
代码示例来源:origin: stackoverflow.com
Text text = new Text((String)null);
ByteArrayOutputStream out = new ByteArrayOutputStream();
text.write(new DataOutputStream(out)); // throws NullPointerException
System.out.println(Arrays.toString(out.toByteArray()));
代码示例来源:origin: apache/accumulo
@Override
public void write(DataOutput out) throws IOException {
new Text(getTableId().getUtf8()).write(out);
if (getEndRow() != null) {
out.writeBoolean(true);
getEndRow().write(out);
} else {
out.writeBoolean(false);
}
if (getPrevEndRow() != null) {
out.writeBoolean(true);
getPrevEndRow().write(out);
} else {
out.writeBoolean(false);
}
}
代码示例来源:origin: apache/accumulo
public void save(DataOutputStream dos, HashMap<String,Integer> symbolTable) throws IOException {
Preconditions.checkState(count == 0);
dos.writeUTF(name);
if (firstRow == null) {
WritableUtils.writeVInt(dos, 0);
} else {
firstRow.write(dos);
}
// write summaries
WritableUtils.writeVInt(dos, summaries.size());
for (SummaryInfo summaryInfo : summaries) {
summaryInfo.getLastRow().write(dos);
WritableUtils.writeVInt(dos, summaryInfo.count);
saveSummary(dos, symbolTable, summaryInfo.summary);
}
}
代码示例来源:origin: apache/accumulo
if (column.getSecond() == null) {
dataOutput.writeInt(1);
column.getFirst().write(dataOutput);
} else {
dataOutput.writeInt(2);
column.getFirst().write(dataOutput);
column.getSecond().write(dataOutput);
代码示例来源:origin: io.hops/hadoop-common
@Override
public void write(DataOutput out) throws IOException {
out.writeInt(this.theMetadata.size());
Iterator<Map.Entry<Text, Text>> iter =
this.theMetadata.entrySet().iterator();
while (iter.hasNext()) {
Map.Entry<Text, Text> en = iter.next();
en.getKey().write(out);
en.getValue().write(out);
}
}
内容来源于网络,如有侵权,请联系作者删除!