org.apache.hadoop.io.Text.clear()方法的使用及代码示例

x33g5p2x  于2022-01-29 转载在 其他  
字(4.4k)|赞(0)|评价(0)|浏览(108)

本文整理了Java中org.apache.hadoop.io.Text.clear()方法的一些代码示例,展示了Text.clear()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Text.clear()方法的具体详情如下:
包路径:org.apache.hadoop.io.Text
类名称:Text
方法名:clear

Text.clear介绍

[英]Clear the string to empty. Note: For performance reasons, this call does not clear the underlying byte array that is retrievable via #getBytes(). In order to free the byte-array memory, call #set(byte[])with an empty byte array (For example, new byte[0]).
[中]将字符串清除为空注意:出于性能原因,此调用不会清除可通过#getBytes()检索的底层字节数组。为了释放字节数组内存,请使用空字节数组(例如,[$0$])调用#set(byte[])。

代码示例

代码示例来源:origin: apache/drill

public Text evaluate(BooleanWritable i) {
 if (i == null) {
  return null;
 } else {
  t.clear();
  t.set(i.get() ? trueBytes : falseBytes);
  return t;
 }
}

代码示例来源:origin: apache/hive

public Text evaluate(Text t, IntWritable pos, IntWritable len) {
 if ((t == null) || (pos == null) || (len == null)) {
  return null;
 }
 r.clear();
 if ((len.get() <= 0)) {
  return r;
 }
 String s = t.toString();
 int[] index = makeIndex(pos.get(), len.get(), s.length());
 if (index == null) {
  return r;
 }
 r.set(s.substring(index[0], index[1]));
 return r;
}

代码示例来源:origin: apache/drill

public Text evaluate(Text t, IntWritable pos, IntWritable len) {
 if ((t == null) || (pos == null) || (len == null)) {
  return null;
 }
 r.clear();
 if ((len.get() <= 0)) {
  return r;
 }
 String s = t.toString();
 int[] index = makeIndex(pos.get(), len.get(), s.length());
 if (index == null) {
  return r;
 }
 r.set(s.substring(index[0], index[1]));
 return r;
}

代码示例来源:origin: apache/hive

escape.clear();

代码示例来源:origin: apache/drill

escape.clear();

代码示例来源:origin: apache/nifi

session.remove(flowFile);
key.clear();

代码示例来源:origin: apache/nifi

session.remove(flowFile);
key.clear();

代码示例来源:origin: org.apache.hadoop/hadoop-common

stores.add(new Store(Long.parseLong(readStr[2]), 
  Long.parseLong(readStr[3])));
line.clear();
 parsed[0] = decodeFileName(parsed[0]);
 archive.put(new Path(parsed[0]), new HarStatus(lineFeed));
 line.clear();

代码示例来源:origin: org.apache.hadoop/hadoop-common

str.clear();
int txtLength = 0; //tracks str.getLength(), as an optimization
int newlineLength = 0; //length of terminating newline

代码示例来源:origin: apache/hive

public static int unescapeText(Text text) {
 Text escape = new Text(text);
 text.clear();

代码示例来源:origin: org.apache.hadoop/hadoop-common

str.clear();
int txtLength = 0; // tracks str.getLength(), as an optimization
long bytesConsumed = 0;

代码示例来源:origin: apache/drill

public static int unescapeText(Text text) {
 Text escape = new Text(text);
 text.clear();

代码示例来源:origin: com.moz.fiji.mapreduce/fiji-mapreduce

/** {@inheritDoc} */
@Override
public void close() throws IOException {
 mFileText.clear();
}

代码示例来源:origin: cloudera/crunch

@Override
 public void write(byte[] data, int offset, int len) throws IOException {
  target.clear();
  target.set(data, offset, len);
 }
}

代码示例来源:origin: ch.cern.hadoop/hadoop-streaming

@Override
public boolean readKeyValue() throws IOException {
 if (lineReader.readLine(line) <= 0) {
  return false;
 }
 bytes = line.getBytes();
 key.set(bytes, 0, line.getLength());
 line.clear();
 return true;
}

代码示例来源:origin: org.apache.hadoop/hadoop-streaming

@Override
public boolean readKeyValue() throws IOException {
 if (lineReader.readLine(line) <= 0) {
  return false;
 }
 bytes = line.getBytes();
 key.set(bytes, 0, line.getLength());
 line.clear();
 return true;
}

代码示例来源:origin: ch.cern.hadoop/hadoop-common

static void randomText(Text val, int id, int recordSize) {
  val.clear();
  final StringBuilder ret = new StringBuilder(recordSize);
  ret.append(String.format(REC_FMT, id, id));
  recordSize -= ret.length();
  for (int i = 0; i < recordSize; ++i) {
   ret.append(rand.nextInt(9));
  }
  val.set(ret.toString());
 }
}

代码示例来源:origin: com.github.jiayuhan-it/hadoop-common

static void randomText(Text val, int id, int recordSize) {
  val.clear();
  final StringBuilder ret = new StringBuilder(recordSize);
  ret.append(String.format(REC_FMT, id, id));
  recordSize -= ret.length();
  for (int i = 0; i < recordSize; ++i) {
   ret.append(rand.nextInt(9));
  }
  val.set(ret.toString());
 }
}

代码示例来源:origin: ch.cern.hadoop/hadoop-common

private static void forOffset(SequenceFile.Reader reader,
  IntWritable key, Text val, int iter, long off, int expectedRecord)
  throws IOException {
 val.clear();
 reader.sync(off);
 reader.next(key, val);
 assertEquals(key.get(), expectedRecord);
 final String test = String.format(REC_FMT, expectedRecord, expectedRecord);
 assertEquals("Invalid value " + val, 0, val.find(test, 0));
}

代码示例来源:origin: com.github.jiayuhan-it/hadoop-common

private static void forOffset(SequenceFile.Reader reader,
  IntWritable key, Text val, int iter, long off, int expectedRecord)
  throws IOException {
 val.clear();
 reader.sync(off);
 reader.next(key, val);
 assertEquals(key.get(), expectedRecord);
 final String test = String.format(REC_FMT, expectedRecord, expectedRecord);
 assertEquals("Invalid value " + val, 0, val.find(test, 0));
}

相关文章