org.apache.hadoop.mapred.InputSplit.write()方法的使用及代码示例

x33g5p2x  于2022-01-21 转载在 其他  
字(6.0k)|赞(0)|评价(0)|浏览(152)

本文整理了Java中org.apache.hadoop.mapred.InputSplit.write()方法的一些代码示例,展示了InputSplit.write()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。InputSplit.write()方法的具体详情如下:
包路径:org.apache.hadoop.mapred.InputSplit
类名称:InputSplit
方法名:write

InputSplit.write介绍

暂无

代码示例

代码示例来源:origin: apache/flink

private void writeObject(ObjectOutputStream out) throws IOException {
  // serialize the parent fields and the final fields
  out.defaultWriteObject();
  // the job conf knows how to serialize itself
  jobConf.write(out);
  // write the input split
  hadoopInputSplit.write(out);
}

代码示例来源:origin: apache/hive

@Override
public void write(DataOutput out) throws IOException {
 out.writeUTF(inputSplit.getClass().getName());
 inputSplit.write(out);
 out.writeUTF(inputFormatClassName);
}

代码示例来源:origin: apache/hive

@Override
 public void write(DataOutput out) throws IOException {
  assert (inputSplits != null && inputSplits.length > 0);
  out.writeUTF(inputSplits[0].getClass().getName());
  out.writeInt(inputSplits.length);
  for (InputSplit inputSplit : inputSplits) {
   inputSplit.write(out);
  }
  out.writeUTF(inputFormatClassName);
 }
}

代码示例来源:origin: apache/drill

@Override
public void write(DataOutput out) throws IOException {
 out.writeUTF(inputSplit.getClass().getName());
 inputSplit.write(out);
 out.writeUTF(inputFormatClassName);
}

代码示例来源:origin: apache/drill

@Override
 public void write(DataOutput out) throws IOException {
  assert (inputSplits != null && inputSplits.length > 0);
  out.writeUTF(inputSplits[0].getClass().getName());
  out.writeInt(inputSplits.length);
  for (InputSplit inputSplit : inputSplits) {
   inputSplit.write(out);
  }
  out.writeUTF(inputFormatClassName);
 }
}

代码示例来源:origin: elastic/elasticsearch-hadoop

public void write(DataOutput out) throws IOException {
  Text.writeString(out, path.toString());
  delegate.write(out);
}

代码示例来源:origin: apache/hive

@Override
public void write(DataOutput out) throws IOException {
 super.write(out);
 out.writeBoolean(isTableSplit);
 if (isTableSplit) {
  tableSplit.write(out);
 } else {
  snapshotSplit.write(out);
 }
}

代码示例来源:origin: apache/drill

/**
 * Serializes each input split to string using Base64 encoding.
 *
 * @return list of serialized input splits
 */
public List<String> serialize() throws IOException {
 List<String> serializedInputSplits = new ArrayList<>();
 for (InputSplit inputSplit : inputSplits) {
  final ByteArrayDataOutput byteArrayOutputStream = ByteStreams.newDataOutput();
  inputSplit.write(byteArrayOutputStream);
  final String encoded = Base64.encodeBase64String(byteArrayOutputStream.toByteArray());
  logger.debug("Encoded split string for split {} : {}", inputSplit, encoded);
  serializedInputSplits.add(encoded);
 }
 return serializedInputSplits;
}

代码示例来源:origin: apache/avro

public void write(DataOutput out) throws IOException {
 Text.writeString(out, inputSplitClass.getName());
 inputSplit.write(out);
 Text.writeString(out, inputFormatClass.getName());
 Text.writeString(out, mapperClass.getName());
 Text.writeString(out, schema.toString());
}

代码示例来源:origin: apache/drill

@Override
public void process(Object[] arguments) throws HiveException {
 String query = stringOI.getPrimitiveJavaObject(arguments[0]);
 int num = intOI.get(arguments[1]);
 PlanFragment fragment = createPlanFragment(query, num);
 TezWork tezWork = fragment.work;
 Schema schema = fragment.schema;
 try {
  for (InputSplit s : getSplits(jc, num, tezWork, schema)) {
   Object[] os = new Object[1];
   bos.reset();
   s.write(dos);
   byte[] frozen = bos.toByteArray();
   os[0] = frozen;
   forward(os);
  }
 } catch (Exception e) {
  throw new HiveException(e);
 }
}

代码示例来源:origin: apache/hive

Object[] os = new Object[1];
bos.reset();
s.write(dos);
byte[] frozen = bos.toByteArray();
os[0] = frozen;

代码示例来源:origin: com.hazelcast.jet/hazelcast-jet-hadoop

private void writeObject(ObjectOutputStream out) throws IOException {
  out.writeInt(index);
  out.writeUTF(split.getClass().getName());
  split.write(out);
}

代码示例来源:origin: com.facebook.presto.hive/hive-apache

@Override
public void write(DataOutput out) throws IOException {
 out.writeUTF(inputSplit.getClass().getName());
 inputSplit.write(out);
 out.writeUTF(inputFormatClassName);
}

代码示例来源:origin: com.facebook.presto.hive/hive-apache

@Override
 public void write(DataOutput out) throws IOException {
  assert (inputSplits != null && inputSplits.length > 0);
  out.writeUTF(inputSplits[0].getClass().getName());
  out.writeInt(inputSplits.length);
  for (InputSplit inputSplit : inputSplits) {
   inputSplit.write(out);
  }
  out.writeUTF(inputFormatClassName);
 }
}

代码示例来源:origin: org.apache.hadoop.hive/hive-exec

@Override
 public void write(DataOutput out) throws IOException {
  assert (inputSplits != null && inputSplits.length > 0);
  out.writeUTF(inputSplits[0].getClass().getName());
  out.writeInt(inputSplits.length);
  for (InputSplit inputSplit : inputSplits) {
   inputSplit.write(out);
  }
  out.writeUTF(inputFormatClassName);
 }
}

代码示例来源:origin: cwensel/cascading

public void write( DataOutput out ) throws IOException
 {
 out.writeUTF( inputSplit.getClass().getName() );
 String[] keys = config.keySet().toArray( new String[ config.size() ] );
 String[] values = new String[ keys.length ];
 for( int i = 0; i < keys.length; i++ )
  values[ i ] = config.get( keys[ i ] );
 WritableUtils.writeStringArray( out, keys );
 WritableUtils.writeStringArray( out, values );
 inputSplit.write( out );
 }

代码示例来源:origin: ch.cern.hadoop/hadoop-mapreduce-client-core

public void write(DataOutput out) throws IOException {
 Text.writeString(out, inputSplitClass.getName());
 inputSplit.write(out);
 Text.writeString(out, inputFormatClass.getName());
 Text.writeString(out, mapperClass.getName());
}

代码示例来源:origin: org.jvnet.hudson.hadoop/hadoop-core

public void write(DataOutput out) throws IOException {
 Text.writeString(out, inputSplitClass.getName());
 inputSplit.write(out);
 Text.writeString(out, inputFormatClass.getName());
 Text.writeString(out, mapperClass.getName());
}

代码示例来源:origin: io.hops/hadoop-mapreduce-client-core

public void write(DataOutput out) throws IOException {
 Text.writeString(out, inputSplitClass.getName());
 inputSplit.write(out);
 Text.writeString(out, inputFormatClass.getName());
 Text.writeString(out, mapperClass.getName());
}

代码示例来源:origin: dremio/dremio-oss

private SerializedInputSplit serialize(InputSplit split) throws IOException{
 ByteArrayDataOutput output = ByteStreams.newDataOutput();
 split.write(output);
 return SerializedInputSplit.newBuilder()
  .setInputSplitClass(split.getClass().getName())
  .setInputSplit(com.google.protobuf.ByteString.copyFrom(output.toByteArray())).build();
}

相关文章

微信公众号

最新文章

更多