org.apache.hadoop.io.Text.readString()方法的使用及代码示例

x33g5p2x  于2022-01-29 转载在 其他  
字(6.9k)|赞(0)|评价(0)|浏览(112)

本文整理了Java中org.apache.hadoop.io.Text.readString()方法的一些代码示例,展示了Text.readString()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Text.readString()方法的具体详情如下:
包路径:org.apache.hadoop.io.Text
类名称:Text
方法名:readString

Text.readString介绍

[英]Read a UTF8 encoded string from in
[中]从中读取UTF8编码的字符串

代码示例

代码示例来源:origin: org.apache.hadoop/hadoop-common

/** Read a UTF8 encoded string from in
 */
public static String readString(DataInput in) throws IOException {
 return readString(in, Integer.MAX_VALUE);
}

代码示例来源:origin: apache/incubator-gobblin

@Override
 public void readFields(DataInput in) throws IOException {
  Text.readString(in);
 }
}

代码示例来源:origin: Qihoo360/XLearning

@Override
 public void readFields(DataInput dataInput) throws IOException {
  this.localLocation = Text.readString(dataInput);
  this.dfsLocation = Text.readString(dataInput);
 }
}

代码示例来源:origin: apache/kylin

public void readFields(DataInput in) throws IOException {
  brokers = Text.readString(in);
  topic = Text.readString(in);
  partition = in.readInt();
  offsetStart = in.readLong();
  offsetEnd = in.readLong();
}

代码示例来源:origin: org.apache.hadoop/hadoop-common

/**
 * Read an Enum value from DataInput, Enums are read and written 
 * using String values. 
 * @param <T> Enum type
 * @param in DataInput to read from 
 * @param enumType Class type of Enum
 * @return Enum represented by String read from DataInput
 * @throws IOException
 */
public static <T extends Enum<T>> T readEnum(DataInput in, Class<T> enumType)
 throws IOException{
 return T.valueOf(enumType, Text.readString(in));
}
/**

代码示例来源:origin: apache/incubator-gobblin

@Override
public void readFields(DataInput dataInput) throws IOException {
 this.owner = Text.readString(dataInput);
 this.group = Text.readString(dataInput);
 this.fsPermission = FsPermission.read(dataInput);
}

代码示例来源:origin: Qihoo360/XLearning

@Override
 public void readFields(DataInput dataInput) throws IOException {
  this.aliasName = Text.readString(dataInput);
  this.paths = new ArrayList<>();
  int size = dataInput.readInt();
  for (int i = 0; i < size; i++) {
   this.paths.add(new Path(Text.readString(dataInput)));
  }
 }
}

代码示例来源:origin: org.apache.hadoop/hadoop-common

@Override
public void readFields(DataInput in) throws IOException {
 username = Text.readString(in, Text.DEFAULT_MAX_LEN);
 groupname = Text.readString(in, Text.DEFAULT_MAX_LEN);
 permission = FsPermission.read(in);
}

代码示例来源:origin: Qihoo360/XLearning

@Override
public void readFields(DataInput dataInput) throws IOException {
 this.xlearningContainerStatus = WritableUtils.readEnum(dataInput, XLearningContainerStatus.class);
 interResultSavedStatus.readFields(dataInput);
 this.progressLog = Text.readString(dataInput);
 this.containersStartTime = Text.readString(dataInput);
 this.containersFinishTime = Text.readString(dataInput);
}

代码示例来源:origin: apache/flink

@Override
public void readFields(DataInput in) throws IOException {
 clear();
 int size = WritableUtils.readVInt(in);
 for(int i=0; i < size; ++i) {
  String key = org.apache.hadoop.io.Text.readString(in);
  String value = org.apache.hadoop.io.Text.readString(in);
  set(key, value);
  String sources[] = WritableUtils.readCompressedStringArray(in);
  if(sources != null) {
   updatingResource.put(key, sources);
  }
 }
}

代码示例来源:origin: Qihoo360/XLearning

@Override
 public void readFields(DataInput dataInput) throws IOException {
  this.logType = WritableUtils.readEnum(dataInput, LogType.class);
  this.message = Text.readString(dataInput);
 }
}

代码示例来源:origin: org.apache.hadoop/hadoop-common

/**
 * Deserializes the AccessControlList object
 */
@Override
public void readFields(DataInput in) throws IOException {
 String aclString = Text.readString(in);
 buildACL(aclString.split(" ", 2));
}

代码示例来源:origin: apache/avro

private Class<?> readClass(DataInput in) throws IOException {
 String className = Text.readString(in);
 try {
  return conf.getClassByName(className);
 } catch (ClassNotFoundException e) {
  throw new RuntimeException("readObject can't find class", e);
 }
}

代码示例来源:origin: Qihoo360/XLearning

@Override
public void readFields(DataInput dataInput) throws IOException {
 this.containerId = ConverterUtils.toContainerId(Text.readString(dataInput));
}

代码示例来源:origin: elastic/elasticsearch-hadoop

public void readFields(DataInput in) throws IOException {
  path = new Path(Text.readString(in));
  delegate.readFields(in);
}

代码示例来源:origin: apache/flink

@Override
public void readFields(DataInput in) throws IOException {
  clear();
  int size = WritableUtils.readVInt(in);
  for(int i=0; i < size; ++i) {
    String key = org.apache.hadoop.io.Text.readString(in);
    String value = org.apache.hadoop.io.Text.readString(in);
    set(key, value);
    String sources[] = WritableUtils.readCompressedStringArray(in);
    if (sources != null) {
      putIntoUpdatingResource(key, sources);
    }
  }
}

代码示例来源:origin: org.apache.hadoop/hadoop-common

@Override
public void readFields(DataInput in) throws IOException {
 clear();
 int size = WritableUtils.readVInt(in);
 for(int i=0; i < size; ++i) {
  String key = org.apache.hadoop.io.Text.readString(in);
  String value = org.apache.hadoop.io.Text.readString(in);
  set(key, value); 
  String sources[] = WritableUtils.readCompressedStringArray(in);
  if (sources != null) {
   putIntoUpdatingResource(key, sources);
  }
 }
}

代码示例来源:origin: apache/avro

@SuppressWarnings("unchecked")
public void readFields(DataInput in) throws IOException {
 inputSplitClass = (Class<? extends InputSplit>) readClass(in);
 inputSplit = (InputSplit) ReflectionUtils
   .newInstance(inputSplitClass, conf);
 inputSplit.readFields(in);
 inputFormatClass = (Class<? extends InputFormat>) readClass(in);
 mapperClass = (Class<? extends AvroMapper>) readClass(in);
 String schemaString = Text.readString(in);
 schema = schemaParser.parse(schemaString);
}

代码示例来源:origin: apache/hbase

private static List<Permission> readWritableUserPermission(DataInput in,
  Configuration conf) throws IOException, ClassNotFoundException {
 assert WritableUtils.readVInt(in) == LIST_CODE;
 int length = in.readInt();
 List<Permission> list = new ArrayList<>(length);
 for (int i = 0; i < length; i++) {
  assert WritableUtils.readVInt(in) == WRITABLE_CODE;
  assert WritableUtils.readVInt(in) == WRITABLE_NOT_ENCODED;
  String className = Text.readString(in);
  Class<? extends Writable> clazz = conf.getClassByName(className).asSubclass(Writable.class);
  Writable instance = WritableFactories.newInstance(clazz, conf);
  instance.readFields(in);
  list.add((Permission) instance);
 }
 return list;
}

代码示例来源:origin: apache/hbase

public static ListMultimap<String, Permission> readPermissions(byte[] data,
  Configuration conf) throws DeserializationException {
 if (ProtobufUtil.isPBMagicPrefix(data)) {
  int pblen = ProtobufUtil.lengthOfPBMagic();
  try {
   AccessControlProtos.UsersAndPermissions.Builder builder =
    AccessControlProtos.UsersAndPermissions.newBuilder();
   ProtobufUtil.mergeFrom(builder, data, pblen, data.length - pblen);
   return AccessControlUtil.toPermission(builder.build());
  } catch (IOException e) {
   throw new DeserializationException(e);
  }
 } else {
  // TODO: We have to re-write non-PB data as PB encoded. Otherwise we will carry old Writables
  // forever (here and a couple of other places).
  ListMultimap<String, Permission> perms = ArrayListMultimap.create();
  try {
   DataInput in = new DataInputStream(new ByteArrayInputStream(data));
   int length = in.readInt();
   for (int i = 0; i < length; i++) {
    String user = Text.readString(in);
    perms.putAll(user, readWritableUserPermission(in, conf));
   }
  } catch (IOException | ClassNotFoundException e) {
   throw new DeserializationException(e);
  }
  return perms;
 }
}

相关文章