Java 类org.apache.lucene.analysis.ngram.NGramFilterFactory 实例源码

项目:cia    文件:DataSearchMappingFactory.java   
/**
 * Gets the search mapping.
 *
 * @return the search mapping
 */
@Factory
public SearchMapping getSearchMapping() {
    final SearchMapping mapping = new SearchMapping();
    mapping.analyzerDef("ngram", StandardTokenizerFactory.class).filter(LowerCaseFilterFactory.class)
            .filter(NGramFilterFactory.class).param("minGramSize", "3").param("maxGramSize", "3")
            .analyzerDef("se", StandardTokenizerFactory.class).filter(LowerCaseFilterFactory.class)
            .filter(SwedishLightStemFilterFactory.class).analyzerDef("en", StandardTokenizerFactory.class)
            .filter(LowerCaseFilterFactory.class).filter(PorterStemFilterFactory.class)
            .entity(DocumentContentData.class).indexed().property("hjid", ElementType.FIELD).documentId().property("content", ElementType.METHOD).field().analyzer("se").store(Store.NO).analyze(Analyze.YES).property("id", ElementType.METHOD).field()
            .entity(DocumentElement.class).indexed().property("id", ElementType.FIELD).documentId().property("title", ElementType.METHOD).field().analyzer("se").store(Store.NO).analyze(Analyze.YES).property("subTitle", ElementType.METHOD).field().analyzer("se").store(Store.NO).analyze(Analyze.YES)
            .entity(DocumentStatusContainer.class).indexed().property("hjid", ElementType.FIELD).documentId().property("documentCategory", ElementType.METHOD).field().analyzer("se").store(Store.NO).analyze(Analyze.YES);

    return mapping;
}
项目:hapi-fhir    文件:LuceneSearchMappingFactory.java   
@Factory
public SearchMapping getSearchMapping() {
    SearchMapping mapping = new SearchMapping();

    mapping.analyzerDef("autocompleteEdgeAnalyzer", PatternTokenizerFactory.class)
            .tokenizerParam("pattern", "(.*)")
            .tokenizerParam("group", "1")
            .filter(LowerCaseFilterFactory.class)
            .filter(StopFilterFactory.class)
            .filter(EdgeNGramFilterFactory.class)
            .param("minGramSize", "3")
            .param("maxGramSize", "50")
        .analyzerDef("autocompletePhoneticAnalyzer", StandardTokenizerFactory.class)
            .filter(StandardFilterFactory.class)
            .filter(StopFilterFactory.class)
            .filter(PhoneticFilterFactory.class)
            .param("encoder", "DoubleMetaphone")
            .filter(SnowballPorterFilterFactory.class)
            .param("language", "English")
        .analyzerDef("autocompleteNGramAnalyzer", StandardTokenizerFactory.class)
            .filter(WordDelimiterFilterFactory.class)
            .filter(LowerCaseFilterFactory.class)
            .filter(NGramFilterFactory.class)
            .param("minGramSize", "3")
            .param("maxGramSize", "20")
        .analyzerDef("standardAnalyzer", StandardTokenizerFactory.class)
            .filter(LowerCaseFilterFactory.class)
        .analyzerDef("exactAnalyzer", StandardTokenizerFactory.class)
        .analyzerDef("conceptParentPidsAnalyzer", WhitespaceTokenizerFactory.class);

    return mapping;
}