org.apache.hadoop.io.Text.charAt()方法的使用及代码示例

x33g5p2x  于2022-01-29 转载在 其他  
字(6.2k)|赞(0)|评价(0)|浏览(117)

本文整理了Java中org.apache.hadoop.io.Text.charAt()方法的一些代码示例,展示了Text.charAt()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Text.charAt()方法的具体详情如下:
包路径:org.apache.hadoop.io.Text
类名称:Text
方法名:charAt

Text.charAt介绍

[英]Returns the Unicode Scalar Value (32-bit integer value) for the character at position. Note that this method avoids using the converter or doing String instantiation
[中]返回[$0$]处字符的Unicode标量值(32位整数值)。请注意,此方法避免使用转换器或进行字符串实例化

代码示例

代码示例来源:origin: apache/hive

int c = text.charAt(i);
byte[] escaped;
int start;

代码示例来源:origin: apache/drill

int c = text.charAt(i);
byte[] escaped;
int start;

代码示例来源:origin: apache/hive

int c = escape.charAt(i);
switch (c) {
case '\\':

代码示例来源:origin: apache/drill

int c = escape.charAt(i);
switch (c) {
case '\\':

代码示例来源:origin: NationalSecurityAgency/datawave

boolean isEventKey(Key k) {
    Text cf = k.getColumnFamily();
    return cf.getLength() > 0
            && cf.find("\u0000") != -1
            && !((cf.charAt(0) == 'f' && cf.charAt(1) == 'i' && cf.charAt(2) == 0) || (cf.getLength() == 1 && cf.charAt(0) == 'd') || (cf
                    .getLength() == 2 && cf.charAt(0) == 't' && cf.charAt(1) == 'f'));
  }
}

代码示例来源:origin: NationalSecurityAgency/datawave

private boolean isFieldIndexKey(Key key) {
  Text cf = key.getColumnFamily();
  return (cf.getLength() >= 3 && cf.charAt(0) == 'f' && cf.charAt(1) == 'i' && cf.charAt(2) == '\0');
}

代码示例来源:origin: NationalSecurityAgency/datawave

protected boolean isEventKey(Key k) {
  Text cf = k.getColumnFamily();
  return cf.getLength() > 0
          && cf.find("\u0000") != -1
          && !((cf.charAt(0) == 'f' && cf.charAt(1) == 'i' && cf.charAt(2) == 0) || (cf.getLength() == 1 && cf.charAt(0) == 'd') || (cf
                  .getLength() == 2 && cf.charAt(0) == 't' && cf.charAt(1) == 'f'));
}

代码示例来源:origin: NationalSecurityAgency/datawave

private boolean isFieldIndexKey(Key key) {
  Text cf = key.getColumnFamily();
  return (cf.getLength() >= 3 && cf.charAt(0) == 'f' && cf.charAt(1) == 'i' && cf.charAt(2) == '\0');
}

代码示例来源:origin: org.terrier/terrier-core

public int[] searchBounds(Text key) throws IOException {
    if (key.getLength() == 0)
      return defaultReturn;
    int[] boundaries = map.get(key.charAt(0));
    if (boundaries == null)
      return defaultReturn;
    return boundaries;
  }    
}

代码示例来源:origin: terrier-org/terrier-core

public int[] searchBounds(Text key) throws IOException {
    if (key.getLength() == 0)
      return defaultReturn;
    int[] boundaries = map.get(key.charAt(0));
    if (boundaries == null)
      return defaultReturn;
    return boundaries;
  }    
}

代码示例来源:origin: geftimov/hadoop-map-reduce-patterns

public void reduce(Text key, Iterable<Text> values, Context context)
    throws IOException, InterruptedException {
  // Clear our lists
  listA.clear();
  listB.clear();
  // iterate through all our values, binning each record based on what
  // it was tagged with. Make sure to remove the tag!
  while (values.iterator().hasNext()) {
    tmp = values.iterator().next();
    if (tmp.charAt(0) == 'A') {
      listA.add(new Text(tmp.toString().substring(1)));
    } else if (tmp.charAt('0') == 'B') {
      listB.add(new Text(tmp.toString().substring(1)));
    }
  }
  // Execute our join logic now that the lists are filled
  executeJoinLogic(context);
}

代码示例来源:origin: geftimov/hadoop-map-reduce-patterns

public void reduce(Text key, Iterable<Text> values, Context context)
    throws IOException, InterruptedException {
  // Clear our lists
  listA.clear();
  listB.clear();
  // iterate through all our values, binning each record based on what
  // it was tagged with. Make sure to remove the tag!
  while (values.iterator().hasNext()) {
    tmp = values.iterator().next();
    if (tmp.charAt(0) == 'A') {
      listA.add(new Text(tmp.toString().substring(1)));
    } else if (tmp.charAt('0') == 'B') {
      listB.add(new Text(tmp.toString().substring(1)));
    }
  }
  // Execute our join logic now that the lists are filled
  executeJoinLogic(context);
}

代码示例来源:origin: NationalSecurityAgency/timely

private boolean isNextMetricTheSame(Text nextRow) {
  byte[] next = nextRow.getBytes();
  if (next.length > prevMetricBytes.getLength()
      && 0 == prevMetricBytes.compareTo(next, 0, prevMetricBytes.getLength())
      && nextRow.charAt(prevMetricBytes.getLength()) == 0x00) {
    return true;
  } else {
    return false;
  }
}

代码示例来源:origin: ch.cern.hadoop/hadoop-common

/**
 * 
 */
public void testCharAt() {
 String line = "adsawseeeeegqewgasddga";
 Text text = new Text(line);
 for (int i = 0; i < line.length(); i++) {
  assertTrue("testCharAt error1 !!!", text.charAt(i) == line.charAt(i));
 }    
 assertEquals("testCharAt error2 !!!", -1, text.charAt(-1));    
 assertEquals("testCharAt error3 !!!", -1, text.charAt(100));
}

代码示例来源:origin: com.facebook.presto.hive/hive-apache

int c = text.charAt(i);
byte[] escaped;
int start;

代码示例来源:origin: com.github.jiayuhan-it/hadoop-common

/**
 * 
 */
public void testCharAt() {
 String line = "adsawseeeeegqewgasddga";
 Text text = new Text(line);
 for (int i = 0; i < line.length(); i++) {
  assertTrue("testCharAt error1 !!!", text.charAt(i) == line.charAt(i));
 }    
 assertEquals("testCharAt error2 !!!", -1, text.charAt(-1));    
 assertEquals("testCharAt error3 !!!", -1, text.charAt(100));
}

代码示例来源:origin: NationalSecurityAgency/datawave

source.seek(new Range(new Key(source.getTopKey().getRow(), cf), false, totalRange.getEndKey(), totalRange.isEndKeyInclusive()),
          columnFamilies, inclusive);
} else if (cf.getLength() == 1 && cf.charAt(0) == 'd') {
  if (log.isDebugEnabled()) {
    log.debug("Seeking over 'd'");
  source.seek(new Range(new Key(source.getTopKey().getRow(), cf), false, totalRange.getEndKey(), totalRange.isEndKeyInclusive()),
          columnFamilies, inclusive);
} else if (cf.getLength() == 2 && cf.charAt(0) == 't' && cf.charAt(1) == 'f') {
  if (log.isDebugEnabled()) {
    log.debug("Seeking over 'tf'");

代码示例来源:origin: NationalSecurityAgency/datawave

source.seek(new Range(new Key(source.getTopKey().getRow(), cf), false, totalRange.getEndKey(), totalRange.isEndKeyInclusive()),
          columnFamilies, inclusive);
} else if (cf.getLength() == 1 && cf.charAt(0) == 'd') {
  if (log.isDebugEnabled()) {
    log.debug("Seeking over 'd'");
  source.seek(new Range(new Key(source.getTopKey().getRow(), cf), false, totalRange.getEndKey(), totalRange.isEndKeyInclusive()),
          columnFamilies, inclusive);
} else if (cf.getLength() == 2 && cf.charAt(0) == 't' && cf.charAt(1) == 'f') {
  if (log.isDebugEnabled()) {
    log.debug("Seeking over 'tf'");

代码示例来源:origin: com.facebook.presto.hive/hive-apache

int c = escape.charAt(i);
switch (c) {
case '\\':

代码示例来源:origin: com.twitter.elephantbird/elephant-bird-core

if (line_.getLength() == 0 || line_.charAt(0) == '\n') {
 HadoopCompat.incrementCounter(emptyLinesCounter, 1);
 continue;

相关文章