org.apache.hadoop.mapred.InputSplit.readFields()方法的使用及代码示例

x33g5p2x  于2022-01-21 转载在 其他  
字(8.5k)|赞(0)|评价(0)|浏览(131)

本文整理了Java中org.apache.hadoop.mapred.InputSplit.readFields()方法的一些代码示例,展示了InputSplit.readFields()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。InputSplit.readFields()方法的具体详情如下:
包路径:org.apache.hadoop.mapred.InputSplit
类名称:InputSplit
方法名:readFields

InputSplit.readFields介绍

暂无

代码示例

代码示例来源:origin: elastic/elasticsearch-hadoop

public void readFields(DataInput in) throws IOException {
  path = new Path(Text.readString(in));
  delegate.readFields(in);
}

代码示例来源:origin: apache/hive

@Override
public void readFields(DataInput in) throws IOException {
 String inputSplitClassName = in.readUTF();
 try {
  inputSplit = (InputSplit) ReflectionUtil.newInstance(conf
    .getClassByName(inputSplitClassName), conf);
 } catch (Exception e) {
  throw new IOException(
    "Cannot create an instance of InputSplit class = "
    + inputSplitClassName + ":" + e.getMessage(), e);
 }
 inputSplit.readFields(in);
 inputFormatClassName = in.readUTF();
}

代码示例来源:origin: apache/hive

@Override
public void readFields(DataInput in) throws IOException {
 super.readFields(in);
 this.isTableSplit = in.readBoolean();
 if (this.isTableSplit) {
  tableSplit.readFields(in);
 } else {
  snapshotSplit.readFields(in);
 }
}

代码示例来源:origin: apache/drill

@Override
public void readFields(DataInput in) throws IOException {
 String inputSplitClassName = in.readUTF();
 try {
  inputSplit = (InputSplit) ReflectionUtil.newInstance(conf
    .getClassByName(inputSplitClassName), conf);
 } catch (Exception e) {
  throw new IOException(
    "Cannot create an instance of InputSplit class = "
    + inputSplitClassName + ":" + e.getMessage(), e);
 }
 inputSplit.readFields(in);
 inputFormatClassName = in.readUTF();
}

代码示例来源:origin: apache/hive

@Override
public void readFields(DataInput in) throws IOException {
 String inputSplitClassName = in.readUTF();
 int numSplits = in.readInt();
 inputSplits = new InputSplit[numSplits];
 for (int i = 0; i < numSplits; i++) {
  try {
   inputSplits[i] = (InputSplit) ReflectionUtil.newInstance(conf
     .getClassByName(inputSplitClassName), conf);
  } catch (Exception e) {
   throw new IOException(
     "Cannot create an instance of InputSplit class = "
       + inputSplitClassName + ":" + e.getMessage());
  }
  inputSplits[i].readFields(in);
 }
 inputFormatClassName = in.readUTF();
}

代码示例来源:origin: apache/drill

private static List<InputSplit> deserializeInputSplit(List<String> base64, String className)
  throws IOException, ReflectiveOperationException{
 Constructor<?> constructor = Class.forName(className).getDeclaredConstructor();
 if (constructor == null) {
  throw new ReflectiveOperationException("Class " + className + " does not implement a default constructor.");
 }
 constructor.setAccessible(true);
 List<InputSplit> splits = new ArrayList<>();
 for (String str : base64) {
  InputSplit split = (InputSplit) constructor.newInstance();
  ByteArrayDataInput byteArrayDataInput = ByteStreams.newDataInput(Base64.decodeBase64(str));
  split.readFields(byteArrayDataInput);
  splits.add(split);
 }
 return splits;
}

代码示例来源:origin: apache/drill

@Override
public void readFields(DataInput in) throws IOException {
 String inputSplitClassName = in.readUTF();
 int numSplits = in.readInt();
 inputSplits = new InputSplit[numSplits];
 for (int i = 0; i < numSplits; i++) {
  try {
   inputSplits[i] = (InputSplit) ReflectionUtil.newInstance(conf
     .getClassByName(inputSplitClassName), conf);
  } catch (Exception e) {
   throw new IOException(
     "Cannot create an instance of InputSplit class = "
       + inputSplitClassName + ":" + e.getMessage());
  }
  inputSplits[i].readFields(in);
 }
 inputFormatClassName = in.readUTF();
}

代码示例来源:origin: apache/flink

private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException {
    // read the parent fields and the final fields
    in.defaultReadObject();

    // the job conf knows how to deserialize itself
    jobConf = new JobConf();
    jobConf.readFields(in);

    try {
      hadoopInputSplit = (org.apache.hadoop.mapred.InputSplit) WritableFactories.newInstance(splitType);
    }
    catch (Exception e) {
      throw new RuntimeException("Unable to instantiate Hadoop InputSplit", e);
    }

    if (hadoopInputSplit instanceof Configurable) {
      ((Configurable) hadoopInputSplit).setConf(this.jobConf);
    }
    else if (hadoopInputSplit instanceof JobConfigurable) {
      ((JobConfigurable) hadoopInputSplit).configure(this.jobConf);
    }
    hadoopInputSplit.readFields(in);
  }
}

代码示例来源:origin: apache/avro

@SuppressWarnings("unchecked")
public void readFields(DataInput in) throws IOException {
 inputSplitClass = (Class<? extends InputSplit>) readClass(in);
 inputSplit = (InputSplit) ReflectionUtils
   .newInstance(inputSplitClass, conf);
 inputSplit.readFields(in);
 inputFormatClass = (Class<? extends InputFormat>) readClass(in);
 mapperClass = (Class<? extends AvroMapper>) readClass(in);
 String schemaString = Text.readString(in);
 schema = schemaParser.parse(schemaString);
}

代码示例来源:origin: org.apache.tez/tez-mapreduce

InputSplit readWrappedSplit(DataInput in, Class<? extends InputSplit> clazz) 
  throws IOException {
 InputSplit split;
 try {
  split = ReflectionUtils.newInstance(clazz, conf);
 } catch (Exception e) {
  throw new TezUncheckedException(e);
 }
 split.readFields(in);
 return split;
}

代码示例来源:origin: org.elasticsearch/elasticsearch-hadoop

public void readFields(DataInput in) throws IOException {
  path = new Path(Text.readString(in));
  delegate.readFields(in);
}

代码示例来源:origin: hazelcast/hazelcast-jet

private void readObject(ObjectInputStream in) throws Exception {
  index = in.readInt();
  split = ClassLoaderUtil.newInstance(Thread.currentThread().getContextClassLoader(), in.readUTF());
  split.readFields(in);
}

代码示例来源:origin: com.hazelcast.jet/hazelcast-jet-hadoop

private void readObject(ObjectInputStream in) throws Exception {
  index = in.readInt();
  split = ClassLoaderUtil.newInstance(Thread.currentThread().getContextClassLoader(), in.readUTF());
  split.readFields(in);
}

代码示例来源:origin: com.github.hyukjinkwon/hive-hbase-handler

@Override
public void readFields(DataInput in) throws IOException {
 super.readFields(in);
 this.isTableSplit = in.readBoolean();
 if (this.isTableSplit) {
  tableSplit.readFields(in);
 } else {
  snapshotSplit.readFields(in);
 }
}

代码示例来源:origin: dremio/dremio-oss

public static InputSplit deserializeInputSplit(SerializedInputSplit split) throws IOException, ReflectiveOperationException{
 Constructor<?> constructor = Class.forName(split.getInputSplitClass()).getDeclaredConstructor();
 if (constructor == null) {
  throw new ReflectiveOperationException("Class " + split.getInputSplitClass() + " does not implement a default constructor.");
 }
 constructor.setAccessible(true);
 InputSplit deserializedSplit = (InputSplit) constructor.newInstance();
 deserializedSplit.readFields(ByteStreams.newDataInput(split.getInputSplit().toByteArray()));
 return deserializedSplit;
}

代码示例来源:origin: io.hops/hadoop-mapreduce-client-core

@SuppressWarnings("unchecked")
public void readFields(DataInput in) throws IOException {
 inputSplitClass = (Class<? extends InputSplit>) readClass(in);
 inputSplit = (InputSplit) ReflectionUtils
   .newInstance(inputSplitClass, conf);
 inputSplit.readFields(in);
 inputFormatClass = (Class<? extends InputFormat>) readClass(in);
 mapperClass = (Class<? extends Mapper>) readClass(in);
}

代码示例来源:origin: ch.cern.hadoop/hadoop-mapreduce-client-core

@SuppressWarnings("unchecked")
public void readFields(DataInput in) throws IOException {
 inputSplitClass = (Class<? extends InputSplit>) readClass(in);
 inputSplit = (InputSplit) ReflectionUtils
   .newInstance(inputSplitClass, conf);
 inputSplit.readFields(in);
 inputFormatClass = (Class<? extends InputFormat>) readClass(in);
 mapperClass = (Class<? extends Mapper>) readClass(in);
}

代码示例来源:origin: com.github.jiayuhan-it/hadoop-mapreduce-client-core

@SuppressWarnings("unchecked")
public void readFields(DataInput in) throws IOException {
 inputSplitClass = (Class<? extends InputSplit>) readClass(in);
 inputSplit = (InputSplit) ReflectionUtils
   .newInstance(inputSplitClass, conf);
 inputSplit.readFields(in);
 inputFormatClass = (Class<? extends InputFormat>) readClass(in);
 mapperClass = (Class<? extends Mapper>) readClass(in);
}

代码示例来源:origin: io.prestosql.hadoop/hadoop-apache

@SuppressWarnings("unchecked")
public void readFields(DataInput in) throws IOException {
 inputSplitClass = (Class<? extends InputSplit>) readClass(in);
 inputSplit = (InputSplit) ReflectionUtils
   .newInstance(inputSplitClass, conf);
 inputSplit.readFields(in);
 inputFormatClass = (Class<? extends InputFormat>) readClass(in);
 mapperClass = (Class<? extends Mapper>) readClass(in);
}

代码示例来源:origin: org.apache.avro/avro-mapred

@SuppressWarnings("unchecked")
public void readFields(DataInput in) throws IOException {
 inputSplitClass = (Class<? extends InputSplit>) readClass(in);
 inputSplit = (InputSplit) ReflectionUtils
   .newInstance(inputSplitClass, conf);
 inputSplit.readFields(in);
 inputFormatClass = (Class<? extends InputFormat>) readClass(in);
 mapperClass = (Class<? extends AvroMapper>) readClass(in);
 String schemaString = Text.readString(in);
 schema = schemaParser.parse(schemaString);
}

相关文章

微信公众号

最新文章

更多