org.apache.lucene.analysis.Token.copyBuffer()方法的使用及代码示例

x33g5p2x  于2022-01-30 转载在 其他  
字(7.9k)|赞(0)|评价(0)|浏览(125)

本文整理了Java中org.apache.lucene.analysis.Token.copyBuffer()方法的一些代码示例,展示了Token.copyBuffer()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Token.copyBuffer()方法的具体详情如下:
包路径:org.apache.lucene.analysis.Token
类名称:Token
方法名:copyBuffer

Token.copyBuffer介绍

暂无

代码示例

代码示例来源:origin: gncloud/fastcatsearch

/**
 * Copy the prototype token's fields into this one, with a different term. Note: Payloads are shared.
 * @param prototype existing Token
 * @param newTermBuffer buffer containing new term text
 * @param offset the index in the buffer of the first character
 * @param length number of valid characters in the buffer
 */
public void reinit(Token prototype, char[] newTermBuffer, int offset, int length) {
 copyBuffer(newTermBuffer, offset, length);
 positionIncrement = prototype.positionIncrement;
 flags = prototype.flags;
 startOffset = prototype.startOffset;
 endOffset = prototype.endOffset;
 type = prototype.type;
 payload =  prototype.payload;
}

代码示例来源:origin: gncloud/fastcatsearch

/**
 *  Constructs a Token with the given term buffer (offset
 *  & length), start and end
 *  offsets
 * @param startTermBuffer buffer containing term text
 * @param termBufferOffset the index in the buffer of the first character
 * @param termBufferLength number of valid characters in the buffer
 * @param start start offset in the source text
 * @param end end offset in the source text
 */
public Token(char[] startTermBuffer, int termBufferOffset, int termBufferLength, int start, int end) {
 checkOffsets(start, end);
 copyBuffer(startTermBuffer, termBufferOffset, termBufferLength);
 startOffset = start;
 endOffset = end;
}

代码示例来源:origin: gncloud/fastcatsearch

/**
 * Copy the prototype token's fields into this one. Note: Payloads are shared.
 * @param prototype source Token to copy fields from
 */
public void reinit(Token prototype) {
 copyBuffer(prototype.buffer(), 0, prototype.length());
 positionIncrement = prototype.positionIncrement;
 flags = prototype.flags;
 startOffset = prototype.startOffset;
 endOffset = prototype.endOffset;
 type = prototype.type;
 payload =  prototype.payload;
}

代码示例来源:origin: gncloud/fastcatsearch

/** Shorthand for calling {@link #clear},
 *  {@link #copyBuffer(char[], int, int)},
 *  {@link #setOffset},
 *  {@link #setType}
 *  @return this Token instance */
public Token reinit(char[] newTermBuffer, int newTermOffset, int newTermLength, int newStartOffset, int newEndOffset, String newType) {
 checkOffsets(newStartOffset, newEndOffset);
 clearNoTermBuffer();
 copyBuffer(newTermBuffer, newTermOffset, newTermLength);
 payload = null;
 positionIncrement = 1;
 startOffset = newStartOffset;
 endOffset = newEndOffset;
 type = newType;
 return this;
}

代码示例来源:origin: gncloud/fastcatsearch

/** Shorthand for calling {@link #clear},
 *  {@link #copyBuffer(char[], int, int)},
 *  {@link #setOffset},
 *  {@link #setType} on Token.DEFAULT_TYPE
 *  @return this Token instance */
public Token reinit(char[] newTermBuffer, int newTermOffset, int newTermLength, int newStartOffset, int newEndOffset) {
 checkOffsets(newStartOffset, newEndOffset);
 clearNoTermBuffer();
 copyBuffer(newTermBuffer, newTermOffset, newTermLength);
 startOffset = newStartOffset;
 endOffset = newEndOffset;
 type = DEFAULT_TYPE;
 return this;
}

代码示例来源:origin: hibernate/hibernate-search

public static Token[] tokensFromAnalysis(Analyzer analyzer, String field, String text) throws IOException {
  final List<Token> tokenList = new ArrayList<Token>();
  final TokenStream stream = analyzer.tokenStream( field, new StringReader( text ) );
  try {
    CharTermAttribute term = stream.addAttribute( CharTermAttribute.class );
    stream.reset();
    while ( stream.incrementToken() ) {
      Token token = new Token();
      token.copyBuffer( term.buffer(), 0, term.length() );
      tokenList.add( token );
    }
    stream.end();
  }
  finally {
    stream.close();
  }
  return tokenList.toArray( new Token[tokenList.size()] );
}

代码示例来源:origin: org.infinispan/infinispan-embedded-query

public static Token[] tokensFromAnalysis(Analyzer analyzer, String field, String text) throws IOException {
  final List<Token> tokenList = new ArrayList<Token>();
  final TokenStream stream = analyzer.tokenStream( field, new StringReader( text ) );
  try {
    CharTermAttribute term = stream.addAttribute( CharTermAttribute.class );
    stream.reset();
    while ( stream.incrementToken() ) {
      Token token = new Token();
      token.copyBuffer( term.buffer(), 0, term.length() );
      tokenList.add( token );
    }
    stream.end();
  }
  finally {
    stream.close();
  }
  return tokenList.toArray( new Token[tokenList.size()] );
}

代码示例来源:origin: org.infinispan/infinispan-embedded-query

private Token getNextSuffixInputToken(Token token) throws IOException {
 if (!suffix.incrementToken()) return null;
 token.copyBuffer(termAtt.buffer(), 0, termAtt.length());
 token.setPositionIncrement(posIncrAtt.getPositionIncrement());
 token.setFlags(flagsAtt.getFlags());
 token.setOffset(offsetAtt.startOffset(), offsetAtt.endOffset());
 token.setType(typeAtt.type());
 token.setPayload(payloadAtt.getPayload());
 return token;
}

代码示例来源:origin: org.apache.lucene/lucene-analyzers

private Token getNextSuffixInputToken(Token token) throws IOException {
 if (!suffix.incrementToken()) return null;
 token.copyBuffer(termAtt.buffer(), 0, termAtt.length());
 token.setPositionIncrement(posIncrAtt.getPositionIncrement());
 token.setFlags(flagsAtt.getFlags());
 token.setOffset(offsetAtt.startOffset(), offsetAtt.endOffset());
 token.setType(typeAtt.type());
 token.setPayload(payloadAtt.getPayload());
 return token;
}

代码示例来源:origin: org.apache.lucene/lucene-analyzers

private Token getNextInputToken(Token token) throws IOException {
 if (!input.incrementToken()) return null;
 token.copyBuffer(in_termAtt.buffer(), 0, in_termAtt.length());
 token.setPositionIncrement(in_posIncrAtt.getPositionIncrement());
 token.setFlags(in_flagsAtt.getFlags());
 token.setOffset(in_offsetAtt.startOffset(), in_offsetAtt.endOffset());
 token.setType(in_typeAtt.type());
 token.setPayload(in_payloadAtt.getPayload());
 return token;
}

代码示例来源:origin: org.apache.lucene/lucene-analyzers

private Token getNextPrefixInputToken(Token token) throws IOException {
 if (!prefix.incrementToken()) return null;
 token.copyBuffer(p_termAtt.buffer(), 0, p_termAtt.length());
 token.setPositionIncrement(p_posIncrAtt.getPositionIncrement());
 token.setFlags(p_flagsAtt.getFlags());
 token.setOffset(p_offsetAtt.startOffset(), p_offsetAtt.endOffset());
 token.setType(p_typeAtt.type());
 token.setPayload(p_payloadAtt.getPayload());
 return token;
}

代码示例来源:origin: org.infinispan/infinispan-embedded-query

private Token getNextPrefixInputToken(Token token) throws IOException {
 if (!prefix.incrementToken()) return null;
 token.copyBuffer(p_termAtt.buffer(), 0, p_termAtt.length());
 token.setPositionIncrement(p_posIncrAtt.getPositionIncrement());
 token.setFlags(p_flagsAtt.getFlags());
 token.setOffset(p_offsetAtt.startOffset(), p_offsetAtt.endOffset());
 token.setType(p_typeAtt.type());
 token.setPayload(p_payloadAtt.getPayload());
 return token;
}

代码示例来源:origin: org.apache.lucene/lucene-analyzers

private Token getNextToken(Token token) throws IOException {
 if (!this.incrementToken()) return null;
 token.copyBuffer(termAtt.buffer(), 0, termAtt.length());
 token.setPositionIncrement(posIncrAtt.getPositionIncrement());
 token.setFlags(flagsAtt.getFlags());
 token.setOffset(offsetAtt.startOffset(), offsetAtt.endOffset());
 token.setType(typeAtt.type());
 token.setPayload(payloadAtt.getPayload());
 return token;
}

代码示例来源:origin: DiceTechJobs/SolrPlugins

private Collection<Token> getTokens(String q, Analyzer analyzer) throws IOException {
  Collection<Token> result = new ArrayList<Token>();
  assert analyzer != null;
  TokenStream ts = analyzer.tokenStream("", q);
  try {
    ts.reset();
    // TODO: support custom attributes
    CharTermAttribute termAtt = ts.addAttribute(CharTermAttribute.class);
    OffsetAttribute offsetAtt = ts.addAttribute(OffsetAttribute.class);
    TypeAttribute typeAtt = ts.addAttribute(TypeAttribute.class);
    FlagsAttribute flagsAtt = ts.addAttribute(FlagsAttribute.class);
    PayloadAttribute payloadAtt = ts.addAttribute(PayloadAttribute.class);
    PositionIncrementAttribute posIncAtt = ts.addAttribute(PositionIncrementAttribute.class);
    while (ts.incrementToken()){
      Token token = new Token();
      token.copyBuffer(termAtt.buffer(), 0, termAtt.length());
      token.setOffset(offsetAtt.startOffset(), offsetAtt.endOffset());
      token.setType(typeAtt.type());
      token.setFlags(flagsAtt.getFlags());
      token.setPayload(payloadAtt.getPayload());
      token.setPositionIncrement(posIncAtt.getPositionIncrement());
      result.add(token);
    }
    ts.end();
    return result;
  } finally {
    IOUtils.closeWhileHandlingException(ts);
  }
}

相关文章