public void testMaxSizeEndHighlight() throws Exception { TestHighlightRunner helper = new TestHighlightRunner() { @Override public void run() throws Exception { CharacterRunAutomaton stopWords = new CharacterRunAutomaton(new RegExp("i[nt]").toAutomaton()); TermQuery query = new TermQuery(new Term("text", "searchterm")); String text = "this is a text with searchterm in it"; SimpleHTMLFormatter fm = new SimpleHTMLFormatter(); Highlighter hg = getHighlighter(query, "text", fm); hg.setTextFragmenter(new NullFragmenter()); hg.setMaxDocCharsToAnalyze(36); String match = hg.getBestFragment(new MockAnalyzer(random(), MockTokenizer.SIMPLE, true, stopWords), "text", text); assertTrue( "Matched text should contain remainder of text after highlighted query ", match.endsWith("in it")); } }; helper.start(); }
public void testStopwords() throws Exception { StandardQueryParser qp = new StandardQueryParser(); CharacterRunAutomaton stopSet = new CharacterRunAutomaton(new RegExp("the|foo").toAutomaton()); qp.setAnalyzer(new MockAnalyzer(random(), MockTokenizer.SIMPLE, true, stopSet)); Query result = qp.parse("a:the OR a:foo", "a"); assertNotNull("result is null and it shouldn't be", result); assertTrue("result is not a BooleanQuery", result instanceof BooleanQuery); assertTrue(((BooleanQuery) result).clauses().size() + " does not equal: " + 0, ((BooleanQuery) result).clauses().size() == 0); result = qp.parse("a:woo OR a:the", "a"); assertNotNull("result is null and it shouldn't be", result); assertTrue("result is not a TermQuery", result instanceof TermQuery); result = qp.parse( "(fieldX:xxxxx OR fieldy:xxxxxxxx)^2 AND (fieldx:the OR fieldy:foo)", "a"); assertNotNull("result is null and it shouldn't be", result); assertTrue("result is not a BooleanQuery", result instanceof BooleanQuery); if (VERBOSE) System.out.println("Result: " + result); assertTrue(((BooleanQuery) result).clauses().size() + " does not equal: " + 2, ((BooleanQuery) result).clauses().size() == 2); }
public void testStopwords() throws Exception { CharacterRunAutomaton stopSet = new CharacterRunAutomaton(new RegExp("the|foo").toAutomaton()); CommonQueryParserConfiguration qp = getParserConfig(new MockAnalyzer(random(), MockTokenizer.SIMPLE, true, stopSet)); Query result = getQuery("field:the OR field:foo",qp); assertNotNull("result is null and it shouldn't be", result); assertTrue("result is not a BooleanQuery", result instanceof BooleanQuery); assertTrue(((BooleanQuery) result).clauses().size() + " does not equal: " + 0, ((BooleanQuery) result).clauses().size() == 0); result = getQuery("field:woo OR field:the",qp); assertNotNull("result is null and it shouldn't be", result); assertTrue("result is not a TermQuery", result instanceof TermQuery); result = getQuery("(fieldX:xxxxx OR fieldy:xxxxxxxx)^2 AND (fieldx:the OR fieldy:foo)",qp); assertNotNull("result is null and it shouldn't be", result); assertTrue("result is not a BooleanQuery", result instanceof BooleanQuery); if (VERBOSE) System.out.println("Result: " + result); assertTrue(((BooleanQuery) result).clauses().size() + " does not equal: " + 2, ((BooleanQuery) result).clauses().size() == 2); }
public void testCustomProvider() throws IOException { AutomatonProvider myProvider = new AutomatonProvider() { // automaton that matches quick or brown private Automaton quickBrownAutomaton = Operations.union(Arrays .asList(Automata.makeString("quick"), Automata.makeString("brown"), Automata.makeString("bob"))); @Override public Automaton getAutomaton(String name) { if (name.equals("quickBrown")) return quickBrownAutomaton; else return null; } }; RegexpQuery query = new RegexpQuery(newTerm("<quickBrown>"), RegExp.ALL, myProvider, DEFAULT_MAX_DETERMINIZED_STATES); assertEquals(1, searcher.search(query, 5).totalHits); }
/** Test a configuration where two characters makes a term */ public void testTwoChars() throws Exception { CharacterRunAutomaton single = new CharacterRunAutomaton(new RegExp("..").toAutomaton()); Analyzer a = new MockAnalyzer(random(), single, false); assertAnalyzesTo(a, "foobar", new String[] { "fo", "ob", "ar"}, new int[] { 0, 2, 4 }, new int[] { 2, 4, 6 } ); // make sure when last term is a "partial" match that end() is correct assertTokenStreamContents(a.tokenStream("bogus", "fooba"), new String[] { "fo", "ob" }, new int[] { 0, 2 }, new int[] { 2, 4 }, new int[] { 1, 1 }, new Integer(5) ); checkRandomData(random(), a, 100); }
/** Test a configuration where three characters makes a term */ public void testThreeChars() throws Exception { CharacterRunAutomaton single = new CharacterRunAutomaton(new RegExp("...").toAutomaton()); Analyzer a = new MockAnalyzer(random(), single, false); assertAnalyzesTo(a, "foobar", new String[] { "foo", "bar"}, new int[] { 0, 3 }, new int[] { 3, 6 } ); // make sure when last term is a "partial" match that end() is correct assertTokenStreamContents(a.tokenStream("bogus", "fooba"), new String[] { "foo" }, new int[] { 0 }, new int[] { 3 }, new int[] { 1 }, new Integer(5) ); checkRandomData(random(), a, 100); }
/** Test a configuration where word starts with one uppercase */ public void testUppercase() throws Exception { CharacterRunAutomaton single = new CharacterRunAutomaton(new RegExp("[A-Z][a-z]*").toAutomaton()); Analyzer a = new MockAnalyzer(random(), single, false); assertAnalyzesTo(a, "FooBarBAZ", new String[] { "Foo", "Bar", "B", "A", "Z"}, new int[] { 0, 3, 6, 7, 8 }, new int[] { 3, 6, 7, 8, 9 } ); assertAnalyzesTo(a, "aFooBar", new String[] { "Foo", "Bar" }, new int[] { 1, 4 }, new int[] { 4, 7 } ); checkRandomData(random(), a, 100); }
@Test public void testStopwords() throws Exception { CharacterRunAutomaton stopSet = new CharacterRunAutomaton(new RegExp("the|foo").toAutomaton()); QueryParser qp = getParserConfig(new MockAnalyzer(random(), MockTokenizer.SIMPLE, true, stopSet)); Query result = getQuery("field:the OR field:foo", qp); Assert.assertNotNull("result is null and it shouldn't be", result); Assert.assertTrue("result is not a BooleanQuery", result instanceof BooleanQuery || result instanceof MatchNoDocsQuery); if (result instanceof BooleanQuery) { Assert.assertEquals(0, ((BooleanQuery) result).clauses().size()); } result = getQuery("field:woo OR field:the", qp); Assert.assertNotNull("result is null and it shouldn't be", result); Assert.assertTrue("result is not a TermQuery", result instanceof TermQuery); result = getQuery("(fieldX:xxxxx OR fieldy:xxxxxxxx)^2 AND (fieldx:the OR fieldy:foo)", qp); Assert.assertNotNull("result is null and it shouldn't be", result); Assert.assertTrue("result is not a BoostQuery", result instanceof BoostQuery); result = ((BoostQuery) result).getQuery(); Assert.assertTrue("result is not a BooleanQuery", result instanceof BooleanQuery); if (VERBOSE) System.out.println("Result: " + result); Assert.assertTrue(((BooleanQuery) result).clauses().size() + " does not equal: " + 2, ((BooleanQuery) result).clauses().size() == 2); }
public void testMaxSizeEndHighlight() throws Exception { TestHighlightRunner helper = new TestHighlightRunner() { @Override public void run() throws Exception { CharacterRunAutomaton stopWords = new CharacterRunAutomaton(new RegExp("i[nt]").toAutomaton()); TermQuery query = new TermQuery(new Term("text", "searchterm")); String text = "this is a text with searchterm in it"; SimpleHTMLFormatter fm = new SimpleHTMLFormatter(); Highlighter hg = getHighlighter(query, "text", fm); hg.setTextFragmenter(new NullFragmenter()); hg.setMaxDocCharsToAnalyze(36); String match = hg.getBestFragment(new MockAnalyzer(random(), MockTokenizer.SIMPLE, true, stopWords, true), "text", text); assertTrue( "Matched text should contain remainder of text after highlighted query ", match.endsWith("in it")); } }; helper.start(); }
public void testStopwords() throws Exception { StandardQueryParser qp = new StandardQueryParser(); CharacterRunAutomaton stopSet = new CharacterRunAutomaton(new RegExp("the|foo").toAutomaton()); qp.setAnalyzer(new MockAnalyzer(random(), MockTokenizer.SIMPLE, true, stopSet, true)); Query result = qp.parse("a:the OR a:foo", "a"); assertNotNull("result is null and it shouldn't be", result); assertTrue("result is not a BooleanQuery", result instanceof BooleanQuery); assertTrue(((BooleanQuery) result).clauses().size() + " does not equal: " + 0, ((BooleanQuery) result).clauses().size() == 0); result = qp.parse("a:woo OR a:the", "a"); assertNotNull("result is null and it shouldn't be", result); assertTrue("result is not a TermQuery", result instanceof TermQuery); result = qp.parse( "(fieldX:xxxxx OR fieldy:xxxxxxxx)^2 AND (fieldx:the OR fieldy:foo)", "a"); assertNotNull("result is null and it shouldn't be", result); assertTrue("result is not a BooleanQuery", result instanceof BooleanQuery); if (VERBOSE) System.out.println("Result: " + result); assertTrue(((BooleanQuery) result).clauses().size() + " does not equal: " + 2, ((BooleanQuery) result).clauses().size() == 2); }
public void testStopwords() throws Exception { CharacterRunAutomaton stopSet = new CharacterRunAutomaton(new RegExp("the|foo").toAutomaton()); CommonQueryParserConfiguration qp = getParserConfig(new MockAnalyzer(random(), MockTokenizer.SIMPLE, true, stopSet, true)); Query result = getQuery("field:the OR field:foo",qp); assertNotNull("result is null and it shouldn't be", result); assertTrue("result is not a BooleanQuery", result instanceof BooleanQuery); assertTrue(((BooleanQuery) result).clauses().size() + " does not equal: " + 0, ((BooleanQuery) result).clauses().size() == 0); result = getQuery("field:woo OR field:the",qp); assertNotNull("result is null and it shouldn't be", result); assertTrue("result is not a TermQuery", result instanceof TermQuery); result = getQuery("(fieldX:xxxxx OR fieldy:xxxxxxxx)^2 AND (fieldx:the OR fieldy:foo)",qp); assertNotNull("result is null and it shouldn't be", result); assertTrue("result is not a BooleanQuery", result instanceof BooleanQuery); if (VERBOSE) System.out.println("Result: " + result); assertTrue(((BooleanQuery) result).clauses().size() + " does not equal: " + 2, ((BooleanQuery) result).clauses().size() == 2); }
public void testCustomProvider() throws IOException { AutomatonProvider myProvider = new AutomatonProvider() { // automaton that matches quick or brown private Automaton quickBrownAutomaton = BasicOperations.union(Arrays .asList(BasicAutomata.makeString("quick"), BasicAutomata.makeString("brown"), BasicAutomata.makeString("bob"))); @Override public Automaton getAutomaton(String name) { if (name.equals("quickBrown")) return quickBrownAutomaton; else return null; } }; RegexpQuery query = new RegexpQuery(newTerm("<quickBrown>"), RegExp.ALL, myProvider); assertEquals(1, searcher.search(query, 5).totalHits); }
public void testStopwords() throws Exception { CharacterRunAutomaton stopSet = new CharacterRunAutomaton(new RegExp("the|foo").toAutomaton()); CommonQueryParserConfiguration qp = getParserConfig(new MockAnalyzer(random(), MockTokenizer.SIMPLE, true, stopSet)); Query result = getQuery("field:the OR field:foo",qp); assertNotNull("result is null and it shouldn't be", result); System.out.println(result.getClass()); assertTrue("result is not a BooleanQuery", result instanceof SpanOrQuery || result instanceof BooleanQuery || result instanceof MatchNoDocsQuery); if (result instanceof BooleanQuery) { assertEquals(0, ((BooleanQuery) result).clauses().size()); } result = getQuery("field:woo OR field:the",qp); assertNotNull("result is null and it shouldn't be", result); assertTrue("result is not a TermQuery", result instanceof TermQuery); result = getQuery("(fieldX:xxxxx OR fieldy:xxxxxxxx)^2 AND (fieldx:the OR fieldy:foo)",qp); assertNotNull("result is null and it shouldn't be", result); assertTrue("result is not a BoostQuery", result instanceof BoostQuery); result = ((BoostQuery) result).getQuery(); assertTrue("result is not a BooleanQuery", result instanceof BooleanQuery); if (VERBOSE) System.out.println("Result: " + result); assertTrue(((BooleanQuery) result).clauses().size() + " does not equal: " + 2, ((BooleanQuery) result).clauses().size() == 2); }
private Query getRegexpQuerySingle(String field, String termStr) throws ParseException { currentFieldType = null; Analyzer oldAnalyzer = getAnalyzer(); try { currentFieldType = context.fieldMapper(field); if (currentFieldType != null) { if (!settings.forceAnalyzer()) { setAnalyzer(context.getSearchAnalyzer(currentFieldType)); } Query query = null; if (currentFieldType.tokenized() == false) { query = currentFieldType.regexpQuery(termStr, RegExp.ALL, getMaxDeterminizedStates(), getMultiTermRewriteMethod(), context); } if (query == null) { query = super.getRegexpQuery(field, termStr); } return query; } return super.getRegexpQuery(field, termStr); } catch (RuntimeException e) { if (settings.lenient()) { return null; } throw e; } finally { setAnalyzer(oldAnalyzer); } }
/** * @param include The regular expression pattern for the terms to be included * @param exclude The regular expression pattern for the terms to be excluded */ public IncludeExclude(RegExp include, RegExp exclude) { if (include == null && exclude == null) { throw new IllegalArgumentException(); } this.include = include; this.exclude = exclude; this.includeValues = null; this.excludeValues = null; this.incZeroBasedPartition = 0; this.incNumPartitions = 0; }
public void testRegexQueryType() throws Exception { String mapping = jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .endObject().endObject() .endObject().endObject().string(); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); CompletionFieldMapper completionFieldMapper = (CompletionFieldMapper) fieldMapper; Query prefixQuery = completionFieldMapper.fieldType() .regexpQuery(new BytesRef("co"), RegExp.ALL, Operations.DEFAULT_MAX_DETERMINIZED_STATES); assertThat(prefixQuery, instanceOf(RegexCompletionQuery.class)); }
private Query getRegexpQuerySingle(String field, String termStr) throws ParseException { currentFieldType = null; Analyzer oldAnalyzer = getAnalyzer(); try { currentFieldType = parseContext.fieldMapper(field); if (currentFieldType != null) { if (!forcedAnalyzer) { setAnalyzer(parseContext.getSearchAnalyzer(currentFieldType)); } Query query = null; if (currentFieldType.useTermQueryWithQueryString()) { query = currentFieldType.regexpQuery(termStr, RegExp.ALL, maxDeterminizedStates, multiTermRewriteMethod, parseContext); } if (query == null) { query = super.getRegexpQuery(field, termStr); } return query; } return super.getRegexpQuery(field, termStr); } catch (RuntimeException e) { if (settings.lenient()) { return null; } throw e; } finally { setAnalyzer(oldAnalyzer); } }
/** * @param include The regular expression pattern for the terms to be included * @param exclude The regular expression pattern for the terms to be excluded */ public IncludeExclude(RegExp include, RegExp exclude) { if (include == null && exclude == null) { throw new IllegalArgumentException(); } this.include = include; this.exclude = exclude; this.includeValues = null; this.excludeValues = null; }
public IncludeExclude includeExclude() { RegExp includePattern = include != null ? new RegExp(include) : null; RegExp excludePattern = exclude != null ? new RegExp(exclude) : null; if (includePattern != null || excludePattern != null) { if (includeValues != null || excludeValues != null) { throw new IllegalArgumentException("Can only use regular expression include/exclude or a set of values, not both"); } return new IncludeExclude(includePattern, excludePattern); } else if (includeValues != null || excludeValues != null) { return new IncludeExclude(includeValues, excludeValues); } else { return null; } }
/** * Terms api equivalency */ public void assertTermsEquals(String info, IndexReader leftReader, Terms leftTerms, Terms rightTerms, boolean deep) throws IOException { if (leftTerms == null || rightTerms == null) { assertNull(info, leftTerms); assertNull(info, rightTerms); return; } assertTermsStatisticsEquals(info, leftTerms, rightTerms); assertEquals(leftTerms.hasOffsets(), rightTerms.hasOffsets()); assertEquals(leftTerms.hasPositions(), rightTerms.hasPositions()); assertEquals(leftTerms.hasPayloads(), rightTerms.hasPayloads()); TermsEnum leftTermsEnum = leftTerms.iterator(null); TermsEnum rightTermsEnum = rightTerms.iterator(null); assertTermsEnumEquals(info, leftReader, leftTermsEnum, rightTermsEnum, true); assertTermsSeekingEquals(info, leftTerms, rightTerms); if (deep) { int numIntersections = atLeast(3); for (int i = 0; i < numIntersections; i++) { String re = AutomatonTestUtil.randomRegexp(random()); CompiledAutomaton automaton = new CompiledAutomaton(new RegExp(re, RegExp.NONE).toAutomaton()); if (automaton.type == CompiledAutomaton.AUTOMATON_TYPE.NORMAL) { // TODO: test start term too TermsEnum leftIntersection = leftTerms.intersect(automaton, null); TermsEnum rightIntersection = rightTerms.intersect(automaton, null); assertTermsEnumEquals(info, leftReader, leftIntersection, rightIntersection, rarely()); } } } }
/** * Builds a new RegexpQuery instance * @param regexp Regexp term * @return new RegexpQuery instance */ protected Query newRegexpQuery(Term regexp) { RegexpQuery query = new RegexpQuery(regexp, RegExp.ALL, maxDeterminizedStates); query.setRewriteMethod(multiTermRewriteMethod); return query; }
public void testPhraseQueryPositionIncrements() throws Exception { CharacterRunAutomaton stopStopList = new CharacterRunAutomaton(new RegExp("[sS][tT][oO][pP]").toAutomaton()); CommonQueryParserConfiguration qp = getParserConfig(new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false, stopStopList)); qp = getParserConfig( new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false, stopStopList)); qp.setEnablePositionIncrements(true); PhraseQuery phraseQuery = new PhraseQuery(); phraseQuery.add(new Term("field", "1")); phraseQuery.add(new Term("field", "2"), 2); assertEquals(phraseQuery, getQuery("\"1 stop 2\"",qp)); }
public void assertTerms(Terms leftTerms, Terms rightTerms, boolean deep) throws Exception { if (leftTerms == null || rightTerms == null) { assertNull(leftTerms); assertNull(rightTerms); return; } assertTermsStatistics(leftTerms, rightTerms); // NOTE: we don't assert hasOffsets/hasPositions/hasPayloads because they are allowed to be different TermsEnum leftTermsEnum = leftTerms.iterator(null); TermsEnum rightTermsEnum = rightTerms.iterator(null); assertTermsEnum(leftTermsEnum, rightTermsEnum, true); assertTermsSeeking(leftTerms, rightTerms); if (deep) { int numIntersections = atLeast(3); for (int i = 0; i < numIntersections; i++) { String re = AutomatonTestUtil.randomRegexp(random()); CompiledAutomaton automaton = new CompiledAutomaton(new RegExp(re, RegExp.NONE).toAutomaton()); if (automaton.type == CompiledAutomaton.AUTOMATON_TYPE.NORMAL) { // TODO: test start term too TermsEnum leftIntersection = leftTerms.intersect(automaton, null); TermsEnum rightIntersection = rightTerms.intersect(automaton, null); assertTermsEnum(leftIntersection, rightIntersection, rarely()); } } } }
/** check that the # of hits is the same as if the query * is run against the inverted index */ protected void assertSame(String regexp) throws IOException { RegexpQuery docValues = new RegexpQuery(new Term(fieldName, regexp), RegExp.NONE); docValues.setRewriteMethod(new DocTermOrdsRewriteMethod()); RegexpQuery inverted = new RegexpQuery(new Term(fieldName, regexp), RegExp.NONE); TopDocs invertedDocs = searcher1.search(inverted, 25); TopDocs docValuesDocs = searcher2.search(docValues, 25); CheckHits.checkEqual(inverted, invertedDocs.scoreDocs, docValuesDocs.scoreDocs); }
public void testEquals() throws Exception { RegexpQuery a1 = new RegexpQuery(new Term(fieldName, "[aA]"), RegExp.NONE); RegexpQuery a2 = new RegexpQuery(new Term(fieldName, "[aA]"), RegExp.NONE); RegexpQuery b = new RegexpQuery(new Term(fieldName, "[bB]"), RegExp.NONE); assertEquals(a1, a2); assertFalse(a1.equals(b)); a1.setRewriteMethod(new DocTermOrdsRewriteMethod()); a2.setRewriteMethod(new DocTermOrdsRewriteMethod()); b.setRewriteMethod(new DocTermOrdsRewriteMethod()); assertEquals(a1, a2); assertFalse(a1.equals(b)); QueryUtils.check(a1); }
public void testStartPositions() throws Exception { Directory dir = newDirectory(); // mimic StopAnalyzer CharacterRunAutomaton stopSet = new CharacterRunAutomaton(new RegExp("the|a|of").toAutomaton()); Analyzer analyzer = new MockAnalyzer(random(), MockTokenizer.SIMPLE, true, stopSet); RandomIndexWriter writer = new RandomIndexWriter(random(), dir, analyzer); Document doc = new Document(); doc.add(newTextField("field", "the quick brown fox", Field.Store.NO)); writer.addDocument(doc); Document doc2 = new Document(); doc2.add(newTextField("field", "quick brown fox", Field.Store.NO)); writer.addDocument(doc2); IndexReader reader = writer.getReader(); IndexSearcher searcher = newSearcher(reader); // user queries on "starts-with quick" SpanQuery sfq = new SpanFirstQuery(new SpanTermQuery(new Term("field", "quick")), 1); assertEquals(1, searcher.search(sfq, 10).totalHits); // user queries on "starts-with the quick" SpanQuery include = new SpanFirstQuery(new SpanTermQuery(new Term("field", "quick")), 2); sfq = new SpanNotQuery(include, sfq); assertEquals(1, searcher.search(sfq, 10).totalHits); writer.close(); reader.close(); dir.close(); }
/** check that the # of hits is the same as from a very * simple regexpquery implementation. */ protected void assertSame(String regexp) throws IOException { RegexpQuery smart = new RegexpQuery(new Term(fieldName, regexp), RegExp.NONE); DumbRegexpQuery dumb = new DumbRegexpQuery(new Term(fieldName, regexp), RegExp.NONE); TopDocs smartDocs = searcher1.search(smart, 25); TopDocs dumbDocs = searcher2.search(dumb, 25); CheckHits.checkEqual(smart, smartDocs.scoreDocs, dumbDocs.scoreDocs); }
/** Test fieldcache rewrite against filter rewrite */ @Override protected void assertSame(String regexp) throws IOException { RegexpQuery fieldCache = new RegexpQuery(new Term(fieldName, regexp), RegExp.NONE); fieldCache.setRewriteMethod(new FieldCacheRewriteMethod()); RegexpQuery filter = new RegexpQuery(new Term(fieldName, regexp), RegExp.NONE); filter.setRewriteMethod(MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE); TopDocs fieldCacheDocs = searcher1.search(fieldCache, 25); TopDocs filterDocs = searcher2.search(filter, 25); CheckHits.checkEqual(fieldCache, fieldCacheDocs.scoreDocs, filterDocs.scoreDocs); }
public void testEquals() throws Exception { RegexpQuery a1 = new RegexpQuery(new Term(fieldName, "[aA]"), RegExp.NONE); RegexpQuery a2 = new RegexpQuery(new Term(fieldName, "[aA]"), RegExp.NONE); RegexpQuery b = new RegexpQuery(new Term(fieldName, "[bB]"), RegExp.NONE); assertEquals(a1, a2); assertFalse(a1.equals(b)); a1.setRewriteMethod(new FieldCacheRewriteMethod()); a2.setRewriteMethod(new FieldCacheRewriteMethod()); b.setRewriteMethod(new FieldCacheRewriteMethod()); assertEquals(a1, a2); assertFalse(a1.equals(b)); QueryUtils.check(a1); }
/** Test a configuration where each character is a term */ public void testSingleChar() throws Exception { CharacterRunAutomaton single = new CharacterRunAutomaton(new RegExp(".").toAutomaton()); Analyzer a = new MockAnalyzer(random(), single, false); assertAnalyzesTo(a, "foobar", new String[] { "f", "o", "o", "b", "a", "r" }, new int[] { 0, 1, 2, 3, 4, 5 }, new int[] { 1, 2, 3, 4, 5, 6 } ); checkRandomData(random(), a, 100); }
/** Test a configuration that behaves a lot like LengthFilter */ public void testLength() throws Exception { CharacterRunAutomaton length5 = new CharacterRunAutomaton(new RegExp(".{5,}").toAutomaton()); Analyzer a = new MockAnalyzer(random(), MockTokenizer.WHITESPACE, true, length5); assertAnalyzesTo(a, "ok toolong fine notfine", new String[] { "ok", "fine" }, new int[] { 1, 2 }); }
public void testPhraseQueryPositionIncrements() throws Exception { PhraseQuery expected = new PhraseQuery(); expected.add(new Term("field", "1")); expected.add(new Term("field", "2"), 2); CharacterRunAutomaton stopList = new CharacterRunAutomaton(new RegExp("[sS][tT][oO][pP]").toAutomaton()); Analyzer analyzer = new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false, stopList); QueryBuilder builder = new QueryBuilder(analyzer); assertEquals(expected, builder.createPhraseQuery("field", "1 stop 2")); }
@Test public void testPhraseQueryPositionIncrements() throws Exception { CharacterRunAutomaton stopStopList = new CharacterRunAutomaton(new RegExp("[sS][tT][oO][pP]").toAutomaton()); QueryParser qp = getParserConfig(new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false, stopStopList)); qp.setEnablePositionIncrements(true); PhraseQuery.Builder phraseQuery = new PhraseQuery.Builder(); phraseQuery.add(new Term("field", "1")); phraseQuery.add(new Term("field", "2"), 2); Assert.assertEquals(phraseQuery.build(), getQuery("\"1 stop 2\"", qp)); }