Java 类org.apache.lucene.analysis.standard.ClassicTokenizer 实例源码

项目:elasticsearch_my    文件:ClassicTokenizerFactory.java   
@Override
public Tokenizer create() {
    ClassicTokenizer tokenizer = new ClassicTokenizer();
    tokenizer.setMaxTokenLength(maxTokenLength);
    return tokenizer;
}
项目:Elasticsearch    文件:ClassicTokenizerFactory.java   
@Override
public Tokenizer create() {
    ClassicTokenizer tokenizer = new ClassicTokenizer();
    tokenizer.setMaxTokenLength(maxTokenLength);
    return tokenizer;
}
项目:NYBC    文件:ClassicTokenizerFactory.java   
@Override
public Tokenizer create(Reader input) {
  ClassicTokenizer tokenizer = new ClassicTokenizer(luceneMatchVersion, input); 
  tokenizer.setMaxTokenLength(maxTokenLength);
  return tokenizer;
}
项目:meresco-lucene    文件:MerescoStandardAnalyzer.java   
public List<String> post_analyse(String fieldName, String string) throws IOException {
    ClassicTokenizer src = new ClassicTokenizer();
    src.setReader(new StringReader(string));
    TokenStream tok = this.post_analyzer(fieldName, src);
    return this.readTokenStream(tok);
}