public void testPayloadNear() throws IOException { SpanNearQuery q1, q2; PayloadNearQuery query; //SpanNearQuery(clauses, 10000, false) q1 = spanNearQuery("field2", "twenty two"); q2 = spanNearQuery("field2", "twenty three"); SpanQuery[] clauses = new SpanQuery[2]; clauses[0] = q1; clauses[1] = q2; query = new PayloadNearQuery(clauses, 10, false); //System.out.println(query.toString()); assertEquals(12, searcher.search(query, null, 100).totalHits); /* System.out.println(hits.totalHits); for (int j = 0; j < hits.scoreDocs.length; j++) { ScoreDoc doc = hits.scoreDocs[j]; System.out.println("doc: "+doc.doc+", score: "+doc.score); } */ }
/** * Creates a span query from the tokenstream. In the case of a single token, * a simple <code>SpanTermQuery</code> is returned. When multiple tokens, an * ordered <code>SpanNearQuery</code> with slop of 0 is returned. */ protected final SpanQuery createSpanQuery(TokenStream in, String field) throws IOException { TermToBytesRefAttribute termAtt = in.getAttribute(TermToBytesRefAttribute.class); if (termAtt == null) { return null; } List<SpanTermQuery> terms = new ArrayList<>(); while (in.incrementToken()) { terms.add(new SpanTermQuery(new Term(field, termAtt.getBytesRef()))); } if (terms.isEmpty()) { return null; } else if (terms.size() == 1) { return terms.get(0); } else { return new SpanNearQuery(terms.toArray(new SpanTermQuery[0]), 0, true); } }
public Query rewrite(SpanNearQuery nq) { // For text queries, set the max to the chunk overlap size. For // meta-data fields, set it to the bump between multiple values // for the same field, *minus one* to prevent matches across // the boundary. // boolean isText = nq.getField().equals("text"); int maxSlop = isText ? docNumMap.getChunkOverlap() : (1000000 - 1); int targetSlop = Math.min(nq.getSlop(), maxSlop); if (targetSlop == nq.getSlop()) return super.rewrite(nq); // Okay, rewrite and reset the slop. SpanNearQuery newQ = (SpanNearQuery)super.rewrite(nq); assert newQ != nq; newQ.setSlop(targetSlop); return newQ; }
/** * Rewrite a span NEAR query. Stop words will be bi-grammed into adjacent * terms. * * @param q The query to rewrite * @return Rewritten version, or 'q' unchanged if no changed needed. */ protected Query rewrite(final SpanNearQuery q) { // Rewrite each clause. Allow single clauses to be promoted, and // do perform bi-gramming. // return rewriteClauses(q, q.getClauses(), true, true, q.getSlop(), new SpanClauseJoiner() { public SpanQuery join(SpanQuery[] clauses) { return new SpanNearQuery(clauses, q.getSlop(), false); } }); }
@SuppressWarnings("rawtypes") @Override public void assertInstanceOf(Query q, Class other) { if (q instanceof SpanMultiTermQueryWrapper) { q = ((SpanMultiTermQueryWrapper)q).getWrappedQuery(); } else if (q instanceof SpanTermQuery && other.equals(TermQuery.class)) { assertTrue("termquery", true); return; } else if (q instanceof SpanNearQuery && other.equals(PhraseQuery.class)) { assertTrue("spannear/phrase", true); return; } else if (q instanceof SpanOrQuery && other.equals(BooleanQuery.class)) { assertTrue("spanor/boolean", true); return; } super.assertInstanceOf(q, other); }
@Override public void testPositionIncrement() throws Exception { //For SQP, this only tests whether stop words have been dropped. //PositionIncrements are not available in SpanQueries yet. CommonQueryParserConfiguration qp = getParserConfig( new MockAnalyzer(random(), MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET)); //qp.setEnablePositionIncrements(true); String qtxt = "\"the words in poisitions pos02578 are stopped in this phrasequery\""; // 0 2 5 7 8 SpanNearQuery pq = (SpanNearQuery) getQuery(qtxt,qp); SpanQuery[] clauses = pq.getClauses(); assertEquals(clauses.length, 5); Set<Term> expected = new HashSet<Term>(); expected.add(new Term("field", "words")); expected.add(new Term("field", "poisitions")); expected.add(new Term("field", "pos")); expected.add(new Term("field", "stopped")); expected.add(new Term("field", "phrasequery")); }
public void testSpanOrQuery() throws Exception { SpanNearQuery quick_fox = new SpanNearQuery(new SpanQuery[]{quick, fox}, 1, true); SpanNearQuery lazy_dog = new SpanNearQuery(new SpanQuery[]{lazy, dog}, 0, true); SpanNearQuery sleepy_cat = new SpanNearQuery(new SpanQuery[]{sleepy, cat}, 0, true); SpanNearQuery qf_near_ld = new SpanNearQuery( new SpanQuery[]{quick_fox, lazy_dog}, 3, true); assertOnlyBrownFox(qf_near_ld); dumpSpans(qf_near_ld); SpanNearQuery qf_near_sc = new SpanNearQuery( new SpanQuery[]{quick_fox, sleepy_cat}, 3, true); dumpSpans(qf_near_sc); SpanOrQuery or = new SpanOrQuery( new SpanQuery[]{qf_near_ld, qf_near_sc}); assertBothFoxes(or); dumpSpans(or); }
public void testPlay() throws Exception { SpanOrQuery or = new SpanOrQuery(new SpanQuery[]{quick, fox}); dumpSpans(or); SpanNearQuery quick_fox = new SpanNearQuery(new SpanQuery[]{quick, fox}, 1, true); SpanFirstQuery sfq = new SpanFirstQuery(quick_fox, 4); dumpSpans(sfq); dumpSpans(new SpanTermQuery(new Term("f", "the"))); SpanNearQuery quick_brown = new SpanNearQuery(new SpanQuery[]{quick, brown}, 0, false); dumpSpans(quick_brown); }
/** * Replace PhraseQuery with SpanNearQuery to force in-order * phrase matching rather than reverse. */ protected Query getFieldQuery(String field, String queryText, int slop) throws ParseException { Query orig = super.getFieldQuery(field, queryText, slop); // if (!(orig instanceof PhraseQuery)) { // return orig; // } // PhraseQuery pq = (PhraseQuery) orig; Term[] terms = pq.getTerms(); // SpanTermQuery[] clauses = new SpanTermQuery[terms.length]; for (int i = 0; i < terms.length; i++) { clauses[i] = new SpanTermQuery(terms[i]); } SpanNearQuery query = new SpanNearQuery( // clauses, slop, true); // return query; }
public Query build(QueryNode queryNode) throws QueryNodeException { TokenizedPhraseQueryNode phraseNode = (TokenizedPhraseQueryNode) queryNode; PhraseQuery phraseQuery = new PhraseQuery(); List<QueryNode> children = phraseNode.getChildren(); //D SpanTermQuery[] clauses; if (children != null) { int numTerms = children.size(); clauses = new SpanTermQuery[numTerms]; for (int i=0;i<numTerms;i++) { FieldQueryNode termNode = (FieldQueryNode) children.get(i); TermQuery termQuery = (TermQuery) termNode .getTag(QueryTreeBuilder.QUERY_TREE_BUILDER_TAGID); clauses[i] = new SpanTermQuery(termQuery.getTerm()); } } else { clauses = new SpanTermQuery[0]; } return new SpanNearQuery(clauses, phraseQuery.getSlop(), true); //E }
public void testExtractQueryMetadata_spanNearQuery() { SpanTermQuery spanTermQuery1 = new SpanTermQuery(new Term("_field", "_short_term")); SpanTermQuery spanTermQuery2 = new SpanTermQuery(new Term("_field", "_very_long_term")); SpanNearQuery spanNearQuery = new SpanNearQuery.Builder("_field", true) .addClause(spanTermQuery1).addClause(spanTermQuery2).build(); Result result = analyze(spanNearQuery); assertThat(result.verified, is(false)); assertTermsEqual(result.terms, spanTermQuery2.getTerm()); }
private Query addSlopToSpan(SpanQuery query, int slop) { if (query instanceof SpanNearQuery) { return new SpanNearQuery(((SpanNearQuery) query).getClauses(), slop, ((SpanNearQuery) query).isInOrder()); } else if (query instanceof SpanOrQuery) { SpanQuery[] clauses = new SpanQuery[((SpanOrQuery) query).getClauses().length]; int pos = 0; for (SpanQuery clause : ((SpanOrQuery) query).getClauses()) { clauses[pos++] = (SpanQuery) addSlopToSpan(clause, slop); } return new SpanOrQuery(clauses); } else { return query; } }
@Override protected void doAssertLuceneQuery(SpanNearQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException { assertThat(query, instanceOf(SpanNearQuery.class)); SpanNearQuery spanNearQuery = (SpanNearQuery) query; assertThat(spanNearQuery.getSlop(), equalTo(queryBuilder.slop())); assertThat(spanNearQuery.isInOrder(), equalTo(queryBuilder.inOrder())); assertThat(spanNearQuery.getClauses().length, equalTo(queryBuilder.clauses().size())); Iterator<SpanQueryBuilder> spanQueryBuilderIterator = queryBuilder.clauses().iterator(); for (SpanQuery spanQuery : spanNearQuery.getClauses()) { assertThat(spanQuery, equalTo(spanQueryBuilderIterator.next().toQuery(context.getQueryShardContext()))); } }
@Override public SpanQuery getSpanQuery(Element e) throws ParserException { String slopString = DOMUtils.getAttributeOrFail(e, "slop"); int slop = Integer.parseInt(slopString); boolean inOrder = DOMUtils.getAttribute(e, "inOrder", false); List<SpanQuery> spans = new ArrayList<>(); for (Node kid = e.getFirstChild(); kid != null; kid = kid.getNextSibling()) { if (kid.getNodeType() == Node.ELEMENT_NODE) { spans.add(factory.getSpanQuery((Element) kid)); } } SpanQuery[] spanQueries = spans.toArray(new SpanQuery[spans.size()]); return new SpanNearQuery(spanQueries, slop, inOrder); }
public Query getSpanNearQuery( IndexReader reader, String fieldName, float boost, BasicQueryFactory qf) throws IOException { SpanQuery[] spanClauses = new SpanQuery[getNrSubQueries()]; Iterator<?> sqi = getSubQueriesIterator(); int qi = 0; while (sqi.hasNext()) { SpanNearClauseFactory sncf = new SpanNearClauseFactory(reader, fieldName, qf); ((DistanceSubQuery)sqi.next()).addSpanQueries(sncf); if (sncf.size() == 0) { /* distance operator requires all sub queries */ while (sqi.hasNext()) { /* produce evt. error messages but ignore results */ ((DistanceSubQuery)sqi.next()).addSpanQueries(sncf); sncf.clear(); } return SrndQuery.theEmptyLcnQuery; } spanClauses[qi] = sncf.makeSpanClause(); qi++; } SpanNearQuery r = new SpanNearQuery(spanClauses, getOpDistance() - 1, subQueriesOrdered()); r.setBoost(boost); return r; }
private Query spanFilter(SpanQuery query) { if (query instanceof SpanNearQuery) { return spanNearFilter((SpanNearQuery) query); } else if (query instanceof SpanNotQuery) { return spanNotFilter((SpanNotQuery) query); } else if (query instanceof SpanOrQuery) { return spanOrFilter((SpanOrQuery) query); } else if (query instanceof SpanTermQuery) { return new TermQuery(((SpanTermQuery) query).getTerm()); } else if (query instanceof SpanMultiTermQueryWrapper) { return ((SpanMultiTermQueryWrapper) query).getWrappedQuery(); } else { return new QueryWrapperFilter(query); } }
private Query spanNearFilter(SpanNearQuery query) { List<Query> ret = new ArrayList<>(); for (SpanQuery sub : query.getClauses()) { ret.add(spanFilter(sub)); } return all(ret); }
@Test public void testWildcardPlusWithCollection () throws IOException { ki = new KrillIndex(); ki.addDoc(createFieldDoc1()); ki.commit(); // mein+ /+w1:2,s0 &Erfahrung SpanMultiTermQueryWrapper<WildcardQuery> mtq = new SpanMultiTermQueryWrapper<WildcardQuery>( new WildcardQuery(new Term("tokens", "s:mein+"))); // Just to make sure, Lucene internal queries treat SpanOr([]) correctly SpanQuery soq = new SpanNearQuery(new SpanQuery[] { mtq, sq }, 1, true); kr = ki.search(soq, (short) 10); // As described in http://korap.github.io/Koral/, '+' is not a valid wildcard assertEquals(0, kr.getMatches().size()); // Check the reported classed query SpanMultipleDistanceQuery mdsq = new SpanMultipleDistanceQuery( new SpanClassQuery(mtq, (byte) 129), new SpanClassQuery(sq, (byte) 129), constraints, true, true); kr = ki.search(mdsq, (short) 10); assertEquals(0, kr.getMatches().size()); // Check multiple distance query mdsq = new SpanMultipleDistanceQuery(mtq, sq, constraints, true, true); kr = ki.search(mdsq, (short) 10); assertEquals(0, kr.getMatches().size()); }
public void testSparseSpan() throws IOException, InvalidTokenOffsetsException { final String TEXT = "the fox did not jump"; final Directory directory = newDirectory(); final IndexWriter indexWriter = new IndexWriter(directory, newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false))); try { final Document document = new Document(); FieldType customType = new FieldType(TextField.TYPE_NOT_STORED); customType.setStoreTermVectorOffsets(true); customType.setStoreTermVectorPositions(true); customType.setStoreTermVectors(true); document.add(new Field(FIELD, new TokenStreamSparse(), customType)); indexWriter.addDocument(document); } finally { indexWriter.close(); } final IndexReader indexReader = DirectoryReader.open(directory); try { assertEquals(1, indexReader.numDocs()); final IndexSearcher indexSearcher = newSearcher(indexReader); final Query phraseQuery = new SpanNearQuery(new SpanQuery[] { new SpanTermQuery(new Term(FIELD, "did")), new SpanTermQuery(new Term(FIELD, "jump")) }, 0, true); TopDocs hits = indexSearcher.search(phraseQuery, 1); assertEquals(0, hits.totalHits); final Highlighter highlighter = new Highlighter( new SimpleHTMLFormatter(), new SimpleHTMLEncoder(), new QueryScorer(phraseQuery)); final TokenStream tokenStream = TokenSources .getTokenStream(indexReader.getTermVector( 0, FIELD), false); assertEquals( highlighter.getBestFragment(new TokenStreamSparse(), TEXT), highlighter.getBestFragment(tokenStream, TEXT)); } finally { indexReader.close(); directory.close(); } }
private static Query[] getPrebuiltQueries(String field) { // be wary of unanalyzed text return new Query[] { new SpanFirstQuery(new SpanTermQuery(new Term(field, "ford")), 5), new SpanNearQuery(new SpanQuery[]{new SpanTermQuery(new Term(field, "night")), new SpanTermQuery(new Term(field, "trading"))}, 4, false), new SpanNearQuery(new SpanQuery[]{new SpanFirstQuery(new SpanTermQuery(new Term(field, "ford")), 10), new SpanTermQuery(new Term(field, "credit"))}, 10, false), new WildcardQuery(new Term(field, "fo*")), }; }
private static Query[] getPrebuiltQueries(String field) { WildcardQuery wcq = new WildcardQuery(new Term(field, "fo*")); wcq .setRewriteMethod(MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE); // be wary of unanalyzed text return new Query[] { new SpanFirstQuery(new SpanTermQuery(new Term(field, "ford")), 5), new SpanNearQuery(new SpanQuery[] { new SpanTermQuery(new Term(field, "night")), new SpanTermQuery(new Term(field, "trading")) }, 4, false), new SpanNearQuery(new SpanQuery[] { new SpanFirstQuery(new SpanTermQuery(new Term(field, "ford")), 10), new SpanTermQuery(new Term(field, "credit")) }, 10, false), wcq, }; }
private int spanRegexQueryNrHits(String regex1, String regex2, int slop, boolean ordered) throws Exception { SpanQuery srq1 = new SpanMultiTermQueryWrapper<>(new RegexQuery(newTerm(regex1))); SpanQuery srq2 = new SpanMultiTermQueryWrapper<>(new RegexQuery(newTerm(regex2))); SpanNearQuery query = new SpanNearQuery( new SpanQuery[]{srq1, srq2}, slop, ordered); return searcher.search(query, null, 1000).totalHits; }
private SpanQuery[] getClauses() { SpanNearQuery q1, q2; q1 = spanNearQuery("field2", "twenty two"); q2 = spanNearQuery("field2", "twenty three"); SpanQuery[] clauses = new SpanQuery[2]; clauses[0] = q1; clauses[1] = q2; return clauses; }
private SpanNearQuery spanNearQuery(String fieldName, String words) { String[] wordList = words.split("[\\s]+"); SpanQuery clauses[] = new SpanQuery[wordList.length]; for (int i=0;i<clauses.length;i++) { clauses[i] = new PayloadTermQuery(new Term(fieldName, wordList[i]), new AveragePayloadFunction()); } return new SpanNearQuery(clauses, 10000, false); }
@Test public void testFlatQueryShouldBeGeneratedFromSequentiallyShiftedTokens() throws Exception { // prepare test data LinkedList<Token> tokenSequenceWithRepeatedGroup = new LinkedList<Token>(); tokenSequenceWithRepeatedGroup.add(new Token(TEST_QUERY.substring(0, 4), 0, 4, null)); tokenSequenceWithRepeatedGroup.add(new Token(TEST_QUERY.substring(5, 6), 5, 6, null)); tokenSequenceWithRepeatedGroup.add(new Token(TEST_QUERY.substring(6, 10), 6, 10, null)); tokenSequenceWithRepeatedGroup.add(new Token(TEST_QUERY.substring(10, 11), 10, 11, null)); assertTrue("All tokens in test data must be sequentially shifted", parser.isAllTokensSequentiallyShifted(tokenSequenceWithRepeatedGroup)); assertTrue(parser.getEnablePositionIncrements()); LinkedList<LinkedList<Token>> fixedTokenSequences = new LinkedList<LinkedList<Token>>(); fixedTokenSequences.add(tokenSequenceWithRepeatedGroup); // call method to test SpanOrQuery q = parser.generateSpanOrQuery(TEST_FIELD, fixedTokenSequences); // check results assertNotNull(q); SpanQuery[] spanQuery = q.getClauses(); assertEquals("Flat query must be generated", 1, spanQuery.length); assertTrue(spanQuery[0] instanceof SpanNearQuery); SpanNearQuery spanNearQuery = (SpanNearQuery) spanQuery[0]; assertEquals("Slop between term must be 0", 0, spanNearQuery.getSlop()); assertTrue("Terms must be in order", spanNearQuery.isInOrder()); SpanQuery[] termClauses = spanNearQuery.getClauses(); assertEquals("Flat query must be generated (Query: " + q + ")", tokenSequenceWithRepeatedGroup.size(), termClauses.length); for (int i = 0; i < termClauses.length; i++) { assertTrue(termClauses[i] instanceof SpanTermQuery); assertEquals("All tokens must become spanQuery terms", tokenSequenceWithRepeatedGroup.get(i).toString(), ((SpanTermQuery) termClauses[i]).getTerm().text()); } }
@Override protected void addTextSpanQuery(String field, String first, String last, int slop, boolean inOrder, String expandedFieldName, IndexTokenisationMode tokenisationMode, BooleanQuery booleanQuery, MLAnalysisMode mlAnalysisMode, Locale locale) { SpanQuery firstTerm = new SpanTermQuery(new Term(field, first)); SpanQuery lastTerm = new SpanTermQuery(new Term(field, last)); SpanNearQuery result = new SpanNearQuery(new SpanQuery[] { firstTerm, lastTerm }, slop, inOrder); booleanQuery.add(result, Occur.SHOULD); }
@Override protected org.apache.lucene.search.Query addContentSpanQuery(String field, String first, String last, int slop, boolean inOrder, String expandedFieldName, List<Locale> expandedLocales, MLAnalysisMode mlAnalysisMode) { SpanQuery firstTerm = new SpanTermQuery(new Term(field, first)); SpanQuery lastTerm = new SpanTermQuery(new Term(field, last)); SpanNearQuery result = new SpanNearQuery(new SpanQuery[] { firstTerm, lastTerm }, slop, inOrder); return result; }
@Override protected void addMLTextSpanQuery(String field, String first, String last, int slop, boolean inOrder, String expandedFieldName, PropertyDefinition propertyDef, IndexTokenisationMode tokenisationMode, BooleanQuery booleanQuery, MLAnalysisMode mlAnalysisMode, Locale locale) { SpanQuery firstTerm = new SpanTermQuery(new Term(field, first)); SpanQuery lastTerm = new SpanTermQuery(new Term(field, last)); SpanNearQuery result = new SpanNearQuery(new SpanQuery[] { firstTerm, lastTerm }, slop, inOrder); booleanQuery.add(result, Occur.SHOULD); }
private static void searchForColoredFox(IndexSearcher searcher) throws IOException { final SpanQuery anyColor = new SpanTermQuery(new Term(COLOR_FIELD, ANY_ANNOTATION_TERM)); final SpanQuery colorAsText = new FieldMaskingSpanQuery(anyColor, TEXT_FIELD); final SpanQuery fox = new SpanTermQuery(new Term(TEXT_FIELD, "fox")); final SpanQuery coloredFox = new SpanNearQuery(new SpanQuery[] { colorAsText, fox }, 0, true); search(searcher, coloredFox); }
private static void searchForRedAnimal(IndexSearcher searcher) throws IOException { final SpanQuery red = new SpanTermQuery(new Term(COLOR_FIELD, "red")); final SpanQuery redColorAsAnimal = new FieldMaskingSpanQuery(red, ANIMAL_FIELD); final SpanQuery anyAnimal = new SpanTermQuery(new Term(ANIMAL_FIELD, AnyAnnotationTokenFilter.ANY_ANNOTATION_TERM)); final SpanQuery redAnimals = new SpanNearQuery(new SpanQuery[] { redColorAsAnimal, anyAnimal }, 0, true); search(searcher, redAnimals); }
private static void searchForColoredFox(IndexSearcher searcher) throws IOException { final SpanQuery anyColor = new MultiPositionSpanTermQuery(new Term(COLOR_FIELD, ANY_ANNOTATION_TERM)); final SpanQuery colorAsText = new FieldMaskingSpanQuery(anyColor, TEXT_FIELD); final SpanQuery fox = new SpanTermQuery(new Term(TEXT_FIELD, "fox")); final SpanQuery coloredFox = new SpanNearQuery(new SpanQuery[] { colorAsText, fox }, 0, true); search(searcher, coloredFox); }
public void testSparseSpan() throws IOException, InvalidTokenOffsetsException { final String TEXT = "the fox did not jump"; final Directory directory = newDirectory(); final IndexWriter indexWriter = new IndexWriter(directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false))); try { final Document document = new Document(); FieldType customType = new FieldType(TextField.TYPE_NOT_STORED); customType.setStoreTermVectorOffsets(true); customType.setStoreTermVectorPositions(true); customType.setStoreTermVectors(true); document.add(new Field(FIELD, new TokenStreamSparse(), customType)); indexWriter.addDocument(document); } finally { indexWriter.close(); } final IndexReader indexReader = DirectoryReader.open(directory); try { assertEquals(1, indexReader.numDocs()); final IndexSearcher indexSearcher = newSearcher(indexReader); final Query phraseQuery = new SpanNearQuery(new SpanQuery[] { new SpanTermQuery(new Term(FIELD, "did")), new SpanTermQuery(new Term(FIELD, "jump")) }, 0, true); TopDocs hits = indexSearcher.search(phraseQuery, 1); assertEquals(0, hits.totalHits); final Highlighter highlighter = new Highlighter( new SimpleHTMLFormatter(), new SimpleHTMLEncoder(), new QueryScorer(phraseQuery)); final TokenStream tokenStream = TokenSources .getTokenStream(indexReader.getTermVector( 0, FIELD), false); assertEquals( highlighter.getBestFragment(new TokenStreamSparse(), TEXT), highlighter.getBestFragment(tokenStream, TEXT)); } finally { indexReader.close(); directory.close(); } }
private int spanRegexQueryNrHits(String regex1, String regex2, int slop, boolean ordered) throws Exception { SpanQuery srq1 = new SpanMultiTermQueryWrapper<RegexQuery>(new RegexQuery(newTerm(regex1))); SpanQuery srq2 = new SpanMultiTermQueryWrapper<RegexQuery>(new RegexQuery(newTerm(regex2))); SpanNearQuery query = new SpanNearQuery( new SpanQuery[]{srq1, srq2}, slop, ordered); return searcher.search(query, null, 1000).totalHits; }
@Override public SpanQuery getSpanQuery(Element e) throws ParserException { String slopString = DOMUtils.getAttributeOrFail(e, "slop"); int slop = Integer.parseInt(slopString); boolean inOrder = DOMUtils.getAttribute(e, "inOrder", false); List<SpanQuery> spans = new ArrayList<SpanQuery>(); for (Node kid = e.getFirstChild(); kid != null; kid = kid.getNextSibling()) { if (kid.getNodeType() == Node.ELEMENT_NODE) { spans.add(factory.getSpanQuery((Element) kid)); } } SpanQuery[] spanQueries = spans.toArray(new SpanQuery[spans.size()]); return new SpanNearQuery(spanQueries, slop, inOrder); }