org.apache.lucene.analysis.Token.setType()方法的使用及代码示例

x33g5p2x  于2022-01-30 转载在 其他  
字(6.0k)|赞(0)|评价(0)|浏览(90)

本文整理了Java中org.apache.lucene.analysis.Token.setType()方法的一些代码示例,展示了Token.setType()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Token.setType()方法的具体详情如下:
包路径:org.apache.lucene.analysis.Token
类名称:Token
方法名:setType

Token.setType介绍

暂无

代码示例

代码示例来源:origin: org.apache.lucene/lucene-analyzers

/**
 * Final touch of a shingle token before it is passed on to the consumer from method {@link #incrementToken()}.
 *
 * Calculates and sets type, flags, position increment, start/end offsets and weight.
 *
 * @param token Shingle token
 * @param shingle Tokens used to produce the shingle token.
 * @param currentPermutationStartOffset Start offset in parameter currentPermutationTokens
 * @param currentPermutationRows index to Matrix.Column.Row from the position of tokens in parameter currentPermutationTokens
 * @param currentPermuationTokens tokens of the current permutation of rows in the matrix.
 */
public void updateToken(Token token, List<Token> shingle, int currentPermutationStartOffset, List<Row> currentPermutationRows, List<Token> currentPermuationTokens) {
 token.setType(ShingleMatrixFilter.class.getName());
 token.setFlags(0);
 token.setPositionIncrement(1);
 token.setStartOffset(shingle.get(0).startOffset());
 token.setEndOffset(shingle.get(shingle.size() - 1).endOffset());
 settingsCodec.setWeight(token, calculateShingleWeight(token, shingle, currentPermutationStartOffset, currentPermutationRows, currentPermuationTokens));
}

代码示例来源:origin: org.apache.lucene/com.springsource.org.apache.lucene

reusableToken.setType(StandardTokenizerImpl.TOKEN_TYPES[StandardTokenizerImpl.HOST]);
 reusableToken.setTermLength(reusableToken.termLength() - 1); // remove extra '.'
} else {
 reusableToken.setType(StandardTokenizerImpl.TOKEN_TYPES[StandardTokenizerImpl.ACRONYM]);
reusableToken.setType(StandardTokenizerImpl.TOKEN_TYPES[tokenType]);

代码示例来源:origin: org.apache.lucene/lucene-core-jfrog

reusableToken.setType(StandardTokenizerImpl.TOKEN_TYPES[StandardTokenizerImpl.HOST]);
 reusableToken.setTermLength(reusableToken.termLength() - 1); // remove extra '.'
} else {
 reusableToken.setType(StandardTokenizerImpl.TOKEN_TYPES[StandardTokenizerImpl.ACRONYM]);
reusableToken.setType(StandardTokenizerImpl.TOKEN_TYPES[tokenType]);

代码示例来源:origin: org.apache.lucene/lucene-analyzers

private Token getNextInputToken(Token token) throws IOException {
 if (!input.incrementToken()) return null;
 token.copyBuffer(in_termAtt.buffer(), 0, in_termAtt.length());
 token.setPositionIncrement(in_posIncrAtt.getPositionIncrement());
 token.setFlags(in_flagsAtt.getFlags());
 token.setOffset(in_offsetAtt.startOffset(), in_offsetAtt.endOffset());
 token.setType(in_typeAtt.type());
 token.setPayload(in_payloadAtt.getPayload());
 return token;
}

代码示例来源:origin: org.infinispan/infinispan-embedded-query

private Token getNextSuffixInputToken(Token token) throws IOException {
 if (!suffix.incrementToken()) return null;
 token.copyBuffer(termAtt.buffer(), 0, termAtt.length());
 token.setPositionIncrement(posIncrAtt.getPositionIncrement());
 token.setFlags(flagsAtt.getFlags());
 token.setOffset(offsetAtt.startOffset(), offsetAtt.endOffset());
 token.setType(typeAtt.type());
 token.setPayload(payloadAtt.getPayload());
 return token;
}

代码示例来源:origin: org.apache.lucene/lucene-analyzers

private Token getNextPrefixInputToken(Token token) throws IOException {
 if (!prefix.incrementToken()) return null;
 token.copyBuffer(p_termAtt.buffer(), 0, p_termAtt.length());
 token.setPositionIncrement(p_posIncrAtt.getPositionIncrement());
 token.setFlags(p_flagsAtt.getFlags());
 token.setOffset(p_offsetAtt.startOffset(), p_offsetAtt.endOffset());
 token.setType(p_typeAtt.type());
 token.setPayload(p_payloadAtt.getPayload());
 return token;
}

代码示例来源:origin: org.apache.lucene/lucene-analyzers

private Token getNextSuffixInputToken(Token token) throws IOException {
 if (!suffix.incrementToken()) return null;
 token.copyBuffer(termAtt.buffer(), 0, termAtt.length());
 token.setPositionIncrement(posIncrAtt.getPositionIncrement());
 token.setFlags(flagsAtt.getFlags());
 token.setOffset(offsetAtt.startOffset(), offsetAtt.endOffset());
 token.setType(typeAtt.type());
 token.setPayload(payloadAtt.getPayload());
 return token;
}

代码示例来源:origin: org.infinispan/infinispan-embedded-query

private Token getNextPrefixInputToken(Token token) throws IOException {
 if (!prefix.incrementToken()) return null;
 token.copyBuffer(p_termAtt.buffer(), 0, p_termAtt.length());
 token.setPositionIncrement(p_posIncrAtt.getPositionIncrement());
 token.setFlags(p_flagsAtt.getFlags());
 token.setOffset(p_offsetAtt.startOffset(), p_offsetAtt.endOffset());
 token.setType(p_typeAtt.type());
 token.setPayload(p_payloadAtt.getPayload());
 return token;
}

代码示例来源:origin: org.apache.lucene/lucene-analyzers

private Token getNextToken(Token token) throws IOException {
 if (!this.incrementToken()) return null;
 token.copyBuffer(termAtt.buffer(), 0, termAtt.length());
 token.setPositionIncrement(posIncrAtt.getPositionIncrement());
 token.setFlags(flagsAtt.getFlags());
 token.setOffset(offsetAtt.startOffset(), offsetAtt.endOffset());
 token.setType(typeAtt.type());
 token.setPayload(payloadAtt.getPayload());
 return token;
}

代码示例来源:origin: DiceTechJobs/SolrPlugins

private Collection<Token> getTokens(String q, Analyzer analyzer) throws IOException {
  Collection<Token> result = new ArrayList<Token>();
  assert analyzer != null;
  TokenStream ts = analyzer.tokenStream("", q);
  try {
    ts.reset();
    // TODO: support custom attributes
    CharTermAttribute termAtt = ts.addAttribute(CharTermAttribute.class);
    OffsetAttribute offsetAtt = ts.addAttribute(OffsetAttribute.class);
    TypeAttribute typeAtt = ts.addAttribute(TypeAttribute.class);
    FlagsAttribute flagsAtt = ts.addAttribute(FlagsAttribute.class);
    PayloadAttribute payloadAtt = ts.addAttribute(PayloadAttribute.class);
    PositionIncrementAttribute posIncAtt = ts.addAttribute(PositionIncrementAttribute.class);
    while (ts.incrementToken()){
      Token token = new Token();
      token.copyBuffer(termAtt.buffer(), 0, termAtt.length());
      token.setOffset(offsetAtt.startOffset(), offsetAtt.endOffset());
      token.setType(typeAtt.type());
      token.setFlags(flagsAtt.getFlags());
      token.setPayload(payloadAtt.getPayload());
      token.setPositionIncrement(posIncAtt.getPositionIncrement());
      result.add(token);
    }
    ts.end();
    return result;
  } finally {
    IOUtils.closeWhileHandlingException(ts);
  }
}

相关文章