/** * * @param fieldName * @param reader * @return * @throws IOException */ @Override public TokenStream reusableTokenStream(String fieldName, Reader reader) throws IOException { SavedStreams streams = (SavedStreams) getPreviousTokenStream(); if (streams == null) { streams = new SavedStreams(); streams.source = new WhitespaceTokenizer(reader); streams.result = new StopFilter(true, streams.source, stopwords, true); streams.result = new PorterStemFilter(streams.source); setPreviousTokenStream(streams); } else { streams.source.reset(reader); } return streams.result; }
/** * * @param fieldName * @param reader * @return * @throws IOException */ @Override public TokenStream reusableTokenStream(String fieldName, Reader reader) throws IOException { SavedStreams streams = (SavedStreams) getPreviousTokenStream(); if (streams == null) { streams = new SavedStreams(); streams.source = new WhitespaceTokenizer(reader); streams.result = new StopFilter(true, streams.source, stopWords, true); // streams.result = new PorterStemFilter(streams.source); setPreviousTokenStream(streams); } else { streams.source.reset(reader); } return streams.result; }
@Override protected TokenStreamComponents createComponents(final String fieldName, final Reader reader) { final Tokenizer src = new WhitespaceTokenizer(matchVersion, reader); TokenStream tok = new LowerCaseFilter(matchVersion, src); return new TokenStreamComponents(src, tok); }
public TokenStream tokenStream(String fieldname, Reader reader) { return new LatCyrFilter( new LowerCaseFilter( new WhitespaceTokenizer(reader))); //new StandardTokenizer(reader))); }
public DateTokenFilter(Reader in) { super(in); baseTokeniser = new WhitespaceTokenizer(in); }
public FloatTokenFilter(Reader in) { super(in); baseTokeniser = new WhitespaceTokenizer(in); }
public DateTimeTokenFilter(Reader in) { super(in); baseTokeniser = new WhitespaceTokenizer(in); }
public IntegerTokenFilter(Reader in) { super(in); baseTokeniser = new WhitespaceTokenizer(in); }
public DoubleTokenFilter(Reader in) { super(in); baseTokeniser = new WhitespaceTokenizer(in); }
public LongTokenFilter(Reader in) { super(in); baseTokeniser = new WhitespaceTokenizer(in); }
@Override public TokenStream tokenStream( String fieldName, Reader reader ) { return new LowerCaseFilter( LUCENE_VERSION, new WhitespaceTokenizer( LUCENE_VERSION, reader ) ); }
@Override public TokenStream tokenStream( String fieldName, Reader reader ) { return new WhitespaceTokenizer( LUCENE_VERSION, reader ); }