org.apache.hadoop.mapred.InputSplit类的使用及代码示例

x33g5p2x  于2022-01-21 转载在 其他  
字(5.4k)|赞(0)|评价(0)|浏览(220)

本文整理了Java中org.apache.hadoop.mapred.InputSplit类的一些代码示例,展示了InputSplit类的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。InputSplit类的具体详情如下:
包路径:org.apache.hadoop.mapred.InputSplit
类名称:InputSplit

InputSplit介绍

[英]InputSplit represents the data to be processed by an individual Mapper.

Typically, it presents a byte-oriented view on the input and is the responsibility of RecordReader of the job to process this and present a record-oriented view.
[中]InputSplit表示要由单个映射程序处理的数据。
通常,它在输入上显示一个面向字节的视图,作业的RecordReader负责处理该视图并显示一个面向记录的视图。

代码示例

代码示例来源:origin: apache/flink

@Override
public String[] getHostnames() {
  try {
    return this.hadoopInputSplit.getLocations();
  }
  catch (IOException e) {
    return new String[0];
  }
}

代码示例来源:origin: apache/drill

/**
 * @return returns total length of all stored input splits
 */
public long getLength() throws IOException {
 long length = 0L;
 for (InputSplit inputSplit: inputSplits) {
  length += inputSplit.getLength();
 }
 return length;
}

代码示例来源:origin: elastic/elasticsearch-hadoop

public void readFields(DataInput in) throws IOException {
  path = new Path(Text.readString(in));
  delegate.readFields(in);
}

代码示例来源:origin: Alluxio/alluxio

/**
 * Returns a string representation of a {@link InputSplit}.
 *
 * @param is Hadoop {@link InputSplit}
 * @return its string representation
 */
public static String toStringHadoopInputSplit(InputSplit is) {
 StringBuilder sb = new StringBuilder("HadoopInputSplit: ");
 try {
  sb.append(" Length: ").append(is.getLength());
  sb.append(" , Locations: ");
  for (String loc : is.getLocations()) {
   sb.append(loc).append(" ; ");
  }
 } catch (IOException e) {
  LOG.error(e.getMessage());
 }
 return sb.toString();
}

代码示例来源:origin: elastic/elasticsearch-hadoop

public void write(DataOutput out) throws IOException {
  Text.writeString(out, path.toString());
  delegate.write(out);
}

代码示例来源:origin: apache/hive

@Override
public void readFields(DataInput in) throws IOException {
 String inputSplitClassName = in.readUTF();
 try {
  inputSplit = (InputSplit) ReflectionUtil.newInstance(conf
    .getClassByName(inputSplitClassName), conf);
 } catch (Exception e) {
  throw new IOException(
    "Cannot create an instance of InputSplit class = "
    + inputSplitClassName + ":" + e.getMessage(), e);
 }
 inputSplit.readFields(in);
 inputFormatClassName = in.readUTF();
}

代码示例来源:origin: apache/flink

private void writeObject(ObjectOutputStream out) throws IOException {
  // serialize the parent fields and the final fields
  out.defaultWriteObject();
  // the job conf knows how to serialize itself
  jobConf.write(out);
  // write the input split
  hadoopInputSplit.write(out);
}

代码示例来源:origin: apache/avro

public void write(DataOutput out) throws IOException {
 Text.writeString(out, inputSplitClass.getName());
 inputSplit.write(out);
 Text.writeString(out, inputFormatClass.getName());
 Text.writeString(out, mapperClass.getName());
 Text.writeString(out, schema.toString());
}

代码示例来源:origin: apache/hive

@Override
public void readFields(DataInput in) throws IOException {
 super.readFields(in);
 this.isTableSplit = in.readBoolean();
 if (this.isTableSplit) {
  tableSplit.readFields(in);
 } else {
  snapshotSplit.readFields(in);
 }
}

代码示例来源:origin: apache/hive

@Override
public void write(DataOutput out) throws IOException {
 out.writeUTF(inputSplit.getClass().getName());
 inputSplit.write(out);
 out.writeUTF(inputFormatClassName);
}

代码示例来源:origin: qindongliang/hive-solr

@Override
public void write(final DataOutput out) throws IOException {
  Text.writeString(out, path.toString());
  delegate.write(out);
}

代码示例来源:origin: apache/avro

@SuppressWarnings("unchecked")
public void readFields(DataInput in) throws IOException {
 inputSplitClass = (Class<? extends InputSplit>) readClass(in);
 inputSplit = (InputSplit) ReflectionUtils
   .newInstance(inputSplitClass, conf);
 inputSplit.readFields(in);
 inputFormatClass = (Class<? extends InputFormat>) readClass(in);
 mapperClass = (Class<? extends AvroMapper>) readClass(in);
 String schemaString = Text.readString(in);
 schema = schemaParser.parse(schemaString);
}

代码示例来源:origin: apache/hive

@Override
public String[] getLocations() throws IOException {
 assert (inputSplits != null && inputSplits.length > 0);
 return inputSplits[0].getLocations();
}

代码示例来源:origin: apache/avro

public long getLength() throws IOException {
 return inputSplit.getLength();
}

代码示例来源:origin: apache/hive

@Override
public void readFields(DataInput in) throws IOException {
 String inputSplitClassName = in.readUTF();
 int numSplits = in.readInt();
 inputSplits = new InputSplit[numSplits];
 for (int i = 0; i < numSplits; i++) {
  try {
   inputSplits[i] = (InputSplit) ReflectionUtil.newInstance(conf
     .getClassByName(inputSplitClassName), conf);
  } catch (Exception e) {
   throw new IOException(
     "Cannot create an instance of InputSplit class = "
       + inputSplitClassName + ":" + e.getMessage());
  }
  inputSplits[i].readFields(in);
 }
 inputFormatClassName = in.readUTF();
}

代码示例来源:origin: apache/hive

@Override
 public void write(DataOutput out) throws IOException {
  assert (inputSplits != null && inputSplits.length > 0);
  out.writeUTF(inputSplits[0].getClass().getName());
  out.writeInt(inputSplits.length);
  for (InputSplit inputSplit : inputSplits) {
   inputSplit.write(out);
  }
  out.writeUTF(inputFormatClassName);
 }
}

代码示例来源:origin: org.elasticsearch/elasticsearch-hadoop

public void write(DataOutput out) throws IOException {
  Text.writeString(out, path.toString());
  delegate.write(out);
}

代码示例来源:origin: qindongliang/hive-solr

@Override
public void readFields(final DataInput in) throws IOException {
  path = new Path(Text.readString(in));
  delegate.readFields(in);
}

代码示例来源:origin: apache/hive

@Override
public String[] getLocations() throws IOException {
 return inputSplit.getLocations();
}

代码示例来源:origin: apache/hive

public long getLength(int idx) {
 if (inputSplits != null) {
  try {
   return inputSplits[idx].getLength();
  } catch (Exception e) {
   throw new RuntimeException(e);
  }
 }
 return -1;
}

相关文章

微信公众号

最新文章

更多