public void testCreateMultiDocumentSearcher() throws Exception { int numDocs = randomIntBetween(2, 8); List<ParseContext.Document> docs = new ArrayList<>(numDocs); for (int i = 0; i < numDocs; i++) { docs.add(new ParseContext.Document()); } Analyzer analyzer = new WhitespaceAnalyzer(); ParsedDocument parsedDocument = new ParsedDocument(null, null, "_id", "_type", null, docs, null, null, null); IndexSearcher indexSearcher = PercolateQueryBuilder.createMultiDocumentSearcher(analyzer, parsedDocument); assertThat(indexSearcher.getIndexReader().numDocs(), equalTo(numDocs)); // ensure that any query get modified so that the nested docs are never included as hits: Query query = new MatchAllDocsQuery(); BooleanQuery result = (BooleanQuery) indexSearcher.createNormalizedWeight(query, true).getQuery(); assertThat(result.clauses().size(), equalTo(2)); assertThat(result.clauses().get(0).getQuery(), sameInstance(query)); assertThat(result.clauses().get(0).getOccur(), equalTo(BooleanClause.Occur.MUST)); assertThat(result.clauses().get(1).getOccur(), equalTo(BooleanClause.Occur.MUST_NOT)); }
@Override protected Query doToQuery(QueryShardContext context) throws IOException { BooleanQuery.Builder booleanQueryBuilder = new BooleanQuery.Builder(); booleanQueryBuilder.setDisableCoord(disableCoord); addBooleanClauses(context, booleanQueryBuilder, mustClauses, BooleanClause.Occur.MUST); addBooleanClauses(context, booleanQueryBuilder, mustNotClauses, BooleanClause.Occur.MUST_NOT); addBooleanClauses(context, booleanQueryBuilder, shouldClauses, BooleanClause.Occur.SHOULD); addBooleanClauses(context, booleanQueryBuilder, filterClauses, BooleanClause.Occur.FILTER); BooleanQuery booleanQuery = booleanQueryBuilder.build(); if (booleanQuery.clauses().isEmpty()) { return new MatchAllDocsQuery(); } final String minimumShouldMatch; if (context.isFilter() && this.minimumShouldMatch == null && shouldClauses.size() > 0) { minimumShouldMatch = "1"; } else { minimumShouldMatch = this.minimumShouldMatch; } Query query = Queries.applyMinimumShouldMatch(booleanQuery, minimumShouldMatch); return adjustPureNegative ? fixNegativeQueryIfNeeded(query) : query; }
@SuppressWarnings("unchecked") public void testNoDocs() throws IOException { try (Directory directory = newDirectory()) { try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { // intentionally not writing any docs } try (IndexReader indexReader = DirectoryReader.open(directory)) { ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME); aggregationBuilder.mapScript(MAP_SCRIPT); // map script is mandatory, even if its not used in this case ScriptedMetric scriptedMetric = search(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder); assertEquals(AGG_NAME, scriptedMetric.getName()); assertNotNull(scriptedMetric.aggregation()); assertEquals(0, ((HashMap<Object, String>) scriptedMetric.aggregation()).size()); } } }
/** * without combine script, the "_aggs" map should contain a list of the size of the number of documents matched */ @SuppressWarnings("unchecked") public void testScriptedMetricWithoutCombine() throws IOException { try (Directory directory = newDirectory()) { int numDocs = randomInt(100); try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { for (int i = 0; i < numDocs; i++) { indexWriter.addDocument(singleton(new SortedNumericDocValuesField("number", i))); } } try (IndexReader indexReader = DirectoryReader.open(directory)) { ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME); aggregationBuilder.initScript(INIT_SCRIPT).mapScript(MAP_SCRIPT); ScriptedMetric scriptedMetric = search(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder); assertEquals(AGG_NAME, scriptedMetric.getName()); assertNotNull(scriptedMetric.aggregation()); Map<String, Object> agg = (Map<String, Object>) scriptedMetric.aggregation(); assertEquals(numDocs, ((List<Integer>) agg.get("collector")).size()); } } }
/** * test that combine script sums the list produced by the "mapScript" */ public void testScriptedMetricWithCombine() throws IOException { try (Directory directory = newDirectory()) { Integer numDocs = randomInt(100); try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { for (int i = 0; i < numDocs; i++) { indexWriter.addDocument(singleton(new SortedNumericDocValuesField("number", i))); } } try (IndexReader indexReader = DirectoryReader.open(directory)) { ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME); aggregationBuilder.initScript(INIT_SCRIPT).mapScript(MAP_SCRIPT).combineScript(COMBINE_SCRIPT); ScriptedMetric scriptedMetric = search(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder); assertEquals(AGG_NAME, scriptedMetric.getName()); assertNotNull(scriptedMetric.aggregation()); assertEquals(numDocs, scriptedMetric.aggregation()); } } }
public void testCase(MappedFieldType ft, CheckedConsumer<RandomIndexWriter, IOException> buildIndex, Consumer<InternalExtendedStats> verify) throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { buildIndex.accept(indexWriter); try (IndexReader reader = indexWriter.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); ExtendedStatsAggregationBuilder aggBuilder = new ExtendedStatsAggregationBuilder("my_agg") .field("field") .sigma(randomDoubleBetween(0, 10, true)); InternalExtendedStats stats = search(searcher, new MatchAllDocsQuery(), aggBuilder, ft); verify.accept(stats); } } }
public void testEmpty() throws Exception { try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { GeoBoundsAggregationBuilder aggBuilder = new GeoBoundsAggregationBuilder("my_agg") .field("field") .wrapLongitude(false); MappedFieldType fieldType = new GeoPointFieldMapper.GeoPointFieldType(); fieldType.setHasDocValues(true); fieldType.setName("field"); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); InternalGeoBounds bounds = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); assertTrue(Double.isInfinite(bounds.top)); assertTrue(Double.isInfinite(bounds.bottom)); assertTrue(Double.isInfinite(bounds.posLeft)); assertTrue(Double.isInfinite(bounds.posRight)); assertTrue(Double.isInfinite(bounds.negLeft)); assertTrue(Double.isInfinite(bounds.negRight)); } } }
public void testTopLevel() throws Exception { Aggregation result; if (randomBoolean()) { result = testCase(new MatchAllDocsQuery(), topHits("_name").sort("string", SortOrder.DESC)); } else { Query query = new QueryParser("string", new KeywordAnalyzer()).parse("d^1000 c^100 b^10 a^1"); result = testCase(query, topHits("_name")); } SearchHits searchHits = ((TopHits) result).getHits(); assertEquals(3L, searchHits.getTotalHits()); assertEquals("3", searchHits.getAt(0).getId()); assertEquals("type", searchHits.getAt(0).getType()); assertEquals("2", searchHits.getAt(1).getId()); assertEquals("type", searchHits.getAt(1).getType()); assertEquals("1", searchHits.getAt(2).getId()); assertEquals("type", searchHits.getAt(2).getType()); }
public void testNumericDocValues() throws IOException { testCase(new MatchAllDocsQuery(), iw -> { iw.addDocument(singleton(new NumericDocValuesField(FIELD_NAME, 1))); iw.addDocument(singleton(new NumericDocValuesField(FIELD_NAME, 2))); iw.addDocument(singleton(new NumericDocValuesField(FIELD_NAME, 1))); iw.addDocument(singleton(new NumericDocValuesField(FIELD_NAME, 2))); iw.addDocument(singleton(new NumericDocValuesField(FIELD_NAME, 1))); iw.addDocument(singleton(new NumericDocValuesField(FIELD_NAME, 2))); iw.addDocument(singleton(new NumericDocValuesField(FIELD_NAME, 1))); iw.addDocument(singleton(new NumericDocValuesField(FIELD_NAME, 2))); iw.addDocument(singleton(new NumericDocValuesField(FIELD_NAME, 1))); iw.addDocument(singleton(new NumericDocValuesField(FIELD_NAME, 2))); iw.addDocument(singleton(new NumericDocValuesField(FIELD_NAME, 1))); iw.addDocument(singleton(new NumericDocValuesField(FIELD_NAME, 2))); iw.addDocument(singleton(new NumericDocValuesField(FIELD_NAME, 1))); iw.addDocument(singleton(new NumericDocValuesField(FIELD_NAME, 2))); iw.addDocument(singleton(new NumericDocValuesField(FIELD_NAME, 1))); iw.addDocument(singleton(new NumericDocValuesField(FIELD_NAME, 2))); }, count -> assertEquals(24L, count.getValue(), 0d)); }
public void testMinAggregator_noDocs() throws Exception { Directory directory = newDirectory(); RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory); indexWriter.close(); IndexReader indexReader = DirectoryReader.open(directory); IndexSearcher indexSearcher = newSearcher(indexReader, true, true); MinAggregationBuilder aggregationBuilder = new MinAggregationBuilder("_name").field("number"); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG); fieldType.setName("number"); try (MinAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType)) { aggregator.preCollection(); indexSearcher.search(new MatchAllDocsQuery(), aggregator); aggregator.postCollection(); InternalMin result = (InternalMin) aggregator.buildAggregation(0L); assertEquals(Double.POSITIVE_INFINITY, result.getValue(), 0); } indexReader.close(); directory.close(); }
public void testNoDocs() throws IOException { Directory directory = newDirectory(); RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory); // intentionally not writing any docs indexWriter.close(); IndexReader indexReader = DirectoryReader.open(directory); testCase(new MatchAllDocsQuery(), newSearcher(indexReader, false, true), parentToChild -> { assertEquals(0, parentToChild.getDocCount()); assertEquals(Double.POSITIVE_INFINITY, ((InternalMin) parentToChild.getAggregations().get("in_child")).getValue(), Double.MIN_VALUE); }); indexReader.close(); directory.close(); }
private void testCase(IndexSearcher indexSearcher, MappedFieldType genreFieldType, String executionHint, Consumer<InternalSampler> verify) throws IOException { MappedFieldType idFieldType = new KeywordFieldMapper.KeywordFieldType(); idFieldType.setName("id"); idFieldType.setHasDocValues(true); SortedNumericDVIndexFieldData fieldData = new SortedNumericDVIndexFieldData(new Index("index", "index"), "price", IndexNumericFieldData.NumericType.DOUBLE); FunctionScoreQuery query = new FunctionScoreQuery(new MatchAllDocsQuery(), new FieldValueFactorFunction("price", 1, FieldValueFactorFunction.Modifier.RECIPROCAL, null, fieldData)); DiversifiedAggregationBuilder builder = new DiversifiedAggregationBuilder("_name") .field(genreFieldType.name()) .executionHint(executionHint) .subAggregation(new TermsAggregationBuilder("terms", null).field("id")); InternalSampler result = search(indexSearcher, query, builder, genreFieldType, idFieldType); verify.accept(result); }
public void testDiversifiedSampler_noDocs() throws Exception { Directory directory = newDirectory(); RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory); indexWriter.close(); IndexReader indexReader = DirectoryReader.open(directory); IndexSearcher indexSearcher = new IndexSearcher(indexReader); MappedFieldType idFieldType = new KeywordFieldMapper.KeywordFieldType(); idFieldType.setName("id"); idFieldType.setHasDocValues(true); MappedFieldType genreFieldType = new KeywordFieldMapper.KeywordFieldType(); genreFieldType.setName("genre"); genreFieldType.setHasDocValues(true); DiversifiedAggregationBuilder builder = new DiversifiedAggregationBuilder("_name") .field(genreFieldType.name()) .subAggregation(new TermsAggregationBuilder("terms", null).field("id")); InternalSampler result = search(indexSearcher, new MatchAllDocsQuery(), builder, genreFieldType, idFieldType); Terms terms = result.getAggregations().get("terms"); assertEquals(0, terms.getBuckets().size()); indexReader.close(); directory.close(); }
public void testEmpty() throws Exception { Directory directory = newDirectory(); RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory); indexWriter.close(); IndexReader indexReader = DirectoryReader.open(directory); IndexSearcher indexSearcher = newSearcher(indexReader, true, true); int numFilters = randomIntBetween(1, 10); QueryBuilder[] filters = new QueryBuilder[numFilters]; for (int i = 0; i < filters.length; i++) { filters[i] = QueryBuilders.termQuery("field", randomAsciiOfLength(5)); } FiltersAggregationBuilder builder = new FiltersAggregationBuilder("test", filters); builder.otherBucketKey("other"); InternalFilters response = search(indexSearcher, new MatchAllDocsQuery(), builder, fieldType); assertEquals(response.getBuckets().size(), numFilters); for (InternalFilters.InternalBucket filter : response.getBuckets()) { assertEquals(filter.getDocCount(), 0); } indexReader.close(); directory.close(); }
public void testMatchAllDocs() throws IOException { Query query = new MatchAllDocsQuery(); testSearchCase(query, dataset, aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.YEAR).field(DATE_FIELD), histogram -> assertEquals(6, histogram.getBuckets().size()) ); testSearchAndReduceCase(query, dataset, aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.YEAR).field(DATE_FIELD), histogram -> assertEquals(8, histogram.getBuckets().size()) ); testBothCases(query, dataset, aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.YEAR).field(DATE_FIELD).minDocCount(1L), histogram -> assertEquals(6, histogram.getBuckets().size()) ); }
public void testIntervalMonth() throws IOException { testBothCases(new MatchAllDocsQuery(), Arrays.asList("2017-01-01", "2017-02-02", "2017-02-03", "2017-03-04", "2017-03-05", "2017-03-06"), aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.MONTH).field(DATE_FIELD), histogram -> { List<Histogram.Bucket> buckets = histogram.getBuckets(); assertEquals(3, buckets.size()); Histogram.Bucket bucket = buckets.get(0); assertEquals("2017-01-01T00:00:00.000Z", bucket.getKeyAsString()); assertEquals(1, bucket.getDocCount()); bucket = buckets.get(1); assertEquals("2017-02-01T00:00:00.000Z", bucket.getKeyAsString()); assertEquals(2, bucket.getDocCount()); bucket = buckets.get(2); assertEquals("2017-03-01T00:00:00.000Z", bucket.getKeyAsString()); assertEquals(3, bucket.getDocCount()); } ); }
public void testMinDocCount() throws Exception { try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { for (long value : new long[] {7, 3, -10, -6, 5, 50}) { Document doc = new Document(); doc.add(new SortedNumericDocValuesField("field", value)); w.addDocument(doc); } HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg") .field("field") .interval(10) .minDocCount(2); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG); fieldType.setName("field"); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); Histogram histogram = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); assertEquals(2, histogram.getBuckets().size()); assertEquals(-10d, histogram.getBuckets().get(0).getKey()); assertEquals(2, histogram.getBuckets().get(0).getDocCount()); assertEquals(0d, histogram.getBuckets().get(1).getKey()); assertEquals(3, histogram.getBuckets().get(1).getDocCount()); } } }
public void testPostFilterDisablesCountOptimization() throws Exception { TestSearchContext context = new TestSearchContext(null); context.parsedQuery(new ParsedQuery(new MatchAllDocsQuery())); context.setSize(0); context.setTask(new SearchTask(123L, "", "", "", null)); final AtomicBoolean collected = new AtomicBoolean(); IndexSearcher contextSearcher = new IndexSearcher(new MultiReader()) { protected void search(List<LeafReaderContext> leaves, Weight weight, Collector collector) throws IOException { collected.set(true); super.search(leaves, weight, collector); } }; QueryPhase.execute(context, contextSearcher); assertEquals(0, context.queryResult().topDocs().totalHits); assertFalse(collected.get()); context.parsedPostFilter(new ParsedQuery(new MatchNoDocsQuery())); QueryPhase.execute(context, contextSearcher); assertEquals(0, context.queryResult().topDocs().totalHits); assertTrue(collected.get()); }
public void testMinScoreDisablesCountOptimization() throws Exception { TestSearchContext context = new TestSearchContext(null); context.parsedQuery(new ParsedQuery(new MatchAllDocsQuery())); context.setSize(0); context.setTask(new SearchTask(123L, "", "", "", null)); final AtomicBoolean collected = new AtomicBoolean(); IndexSearcher contextSearcher = new IndexSearcher(new MultiReader()) { protected void search(List<LeafReaderContext> leaves, Weight weight, Collector collector) throws IOException { collected.set(true); super.search(leaves, weight, collector); } }; QueryPhase.execute(context, contextSearcher); assertEquals(0, context.queryResult().topDocs().totalHits); assertFalse(collected.get()); context.minimumScore(1); QueryPhase.execute(context, contextSearcher); assertEquals(0, context.queryResult().topDocs().totalHits); assertTrue(collected.get()); }
/** User runs a query and aggregates facets by summing their association values. */ private List<FacetResult> sumAssociations() throws IOException { DirectoryReader indexReader = DirectoryReader.open(indexDir); IndexSearcher searcher = new IndexSearcher(indexReader); TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir); FacetsCollector fc = new FacetsCollector(); // MatchAllDocsQuery is for "browsing" (counts facets // for all non-deleted docs in the index); normally // you'd use a "normal" query: FacetsCollector.search(searcher, new MatchAllDocsQuery(), 10, fc); Facets tags = new TaxonomyFacetSumIntAssociations("$tags", taxoReader, config, fc); Facets genre = new TaxonomyFacetSumFloatAssociations("$genre", taxoReader, config, fc); // Retrieve results List<FacetResult> results = new ArrayList<FacetResult>(); results.add(tags.getTopChildren(10, "tags")); results.add(genre.getTopChildren(10, "genre")); indexReader.close(); taxoReader.close(); return results; }
public void testMinShouldMatchFilterWithoutShouldClauses() throws Exception { BoolQueryBuilder boolQueryBuilder = new BoolQueryBuilder(); boolQueryBuilder.filter(new BoolQueryBuilder().must(new MatchAllQueryBuilder())); Query query = boolQueryBuilder.toQuery(createShardContext()); assertThat(query, instanceOf(BooleanQuery.class)); BooleanQuery booleanQuery = (BooleanQuery) query; assertThat(booleanQuery.getMinimumNumberShouldMatch(), equalTo(0)); assertThat(booleanQuery.clauses().size(), equalTo(1)); BooleanClause booleanClause = booleanQuery.clauses().get(0); assertThat(booleanClause.getOccur(), equalTo(BooleanClause.Occur.FILTER)); assertThat(booleanClause.getQuery(), instanceOf(BooleanQuery.class)); BooleanQuery innerBooleanQuery = (BooleanQuery) booleanClause.getQuery(); //we didn't set minimum should match initially, there are no should clauses so it should be 0 assertThat(innerBooleanQuery.getMinimumNumberShouldMatch(), equalTo(0)); assertThat(innerBooleanQuery.clauses().size(), equalTo(1)); BooleanClause innerBooleanClause = innerBooleanQuery.clauses().get(0); assertThat(innerBooleanClause.getOccur(), equalTo(BooleanClause.Occur.MUST)); assertThat(innerBooleanClause.getQuery(), instanceOf(MatchAllDocsQuery.class)); }
public void testMinShouldMatchFilterWithShouldClauses() throws Exception { BoolQueryBuilder boolQueryBuilder = new BoolQueryBuilder(); boolQueryBuilder.filter(new BoolQueryBuilder().must(new MatchAllQueryBuilder()).should(new MatchAllQueryBuilder())); Query query = boolQueryBuilder.toQuery(createShardContext()); assertThat(query, instanceOf(BooleanQuery.class)); BooleanQuery booleanQuery = (BooleanQuery) query; assertThat(booleanQuery.getMinimumNumberShouldMatch(), equalTo(0)); assertThat(booleanQuery.clauses().size(), equalTo(1)); BooleanClause booleanClause = booleanQuery.clauses().get(0); assertThat(booleanClause.getOccur(), equalTo(BooleanClause.Occur.FILTER)); assertThat(booleanClause.getQuery(), instanceOf(BooleanQuery.class)); BooleanQuery innerBooleanQuery = (BooleanQuery) booleanClause.getQuery(); //we didn't set minimum should match initially, but there are should clauses so it should be 1 assertThat(innerBooleanQuery.getMinimumNumberShouldMatch(), equalTo(1)); assertThat(innerBooleanQuery.clauses().size(), equalTo(2)); BooleanClause innerBooleanClause1 = innerBooleanQuery.clauses().get(0); assertThat(innerBooleanClause1.getOccur(), equalTo(BooleanClause.Occur.MUST)); assertThat(innerBooleanClause1.getQuery(), instanceOf(MatchAllDocsQuery.class)); BooleanClause innerBooleanClause2 = innerBooleanQuery.clauses().get(1); assertThat(innerBooleanClause2.getOccur(), equalTo(BooleanClause.Occur.SHOULD)); assertThat(innerBooleanClause2.getQuery(), instanceOf(MatchAllDocsQuery.class)); }
public void testBlendNoTermQuery() { FakeFieldType ft1 = new FakeFieldType(); ft1.setName("foo"); FakeFieldType ft2 = new FakeFieldType() { @Override public Query termQuery(Object value, QueryShardContext context) { return new MatchAllDocsQuery(); } }; ft2.setName("bar"); Term[] terms = new Term[] { new Term("foo", "baz") }; float[] boosts = new float[] {2}; Query expectedClause1 = BlendedTermQuery.booleanBlendedQuery(terms, boosts, false); Query expectedClause2 = new BoostQuery(new MatchAllDocsQuery(), 3); Query expected = new BooleanQuery.Builder().setDisableCoord(true) .add(expectedClause1, Occur.SHOULD) .add(expectedClause2, Occur.SHOULD) .build(); Query actual = MultiMatchQuery.blendTerm( indexService.newQueryShardContext(randomInt(20), null, () -> { throw new UnsupportedOperationException(); }), new BytesRef("baz"), null, 1f, new FieldAndFieldType(ft1, 2), new FieldAndFieldType(ft2, 3)); assertEquals(expected, actual); }
public void testSortValues() throws Exception { Directory dir = new RAMDirectory(); IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER)); for (int i = 0; i < 10; i++) { Document document = new Document(); String text = new String(new char[]{(char) (97 + i), (char) (97 + i)}); document.add(new TextField("str", text, Field.Store.YES)); document.add(new SortedDocValuesField("str", new BytesRef(text))); indexWriter.addDocument(document); } IndexReader reader = SlowCompositeReaderWrapper.wrap(DirectoryReader.open(indexWriter)); IndexSearcher searcher = new IndexSearcher(reader); TopFieldDocs docs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(new SortField("str", SortField.Type.STRING))); for (int i = 0; i < 10; i++) { FieldDoc fieldDoc = (FieldDoc) docs.scoreDocs[i]; assertThat((BytesRef) fieldDoc.fields[0], equalTo(new BytesRef(new String(new char[]{(char) (97 + i), (char) (97 + i)})))); } }
public void testNoTokens() throws Exception { Directory dir = new RAMDirectory(); IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.KEYWORD_ANALYZER)); FieldType allFt = getAllFieldType(); Document doc = new Document(); doc.add(new Field("_id", "1", StoredField.TYPE)); doc.add(new AllField("_all", "", 2.0f, allFt)); indexWriter.addDocument(doc); IndexReader reader = DirectoryReader.open(indexWriter); IndexSearcher searcher = new IndexSearcher(reader); TopDocs docs = searcher.search(new MatchAllDocsQuery(), 10); assertThat(docs.totalHits, equalTo(1)); assertThat(docs.scoreDocs[0].doc, equalTo(0)); }
/** User runs a query and counts facets. */ private List<FacetResult> search() throws IOException { DirectoryReader indexReader = DirectoryReader.open(indexDir); IndexSearcher searcher = new IndexSearcher(indexReader); SortedSetDocValuesReaderState state = new DefaultSortedSetDocValuesReaderState(indexReader); // Aggregatses the facet counts FacetsCollector fc = new FacetsCollector(); // MatchAllDocsQuery is for "browsing" (counts facets // for all non-deleted docs in the index); normally // you'd use a "normal" query: FacetsCollector.search(searcher, new MatchAllDocsQuery(), 10, fc); // Retrieve results Facets facets = new SortedSetDocValuesFacetCounts(state, fc); List<FacetResult> results = new ArrayList<FacetResult>(); results.add(facets.getTopChildren(10, "Author")); results.add(facets.getTopChildren(10, "Publish Year")); indexReader.close(); return results; }
@Override public Query createAllDocsQuery(final @NonNull String name) { if (name.length() == 0) { return new MatchAllDocsQuery(); } else { return new FilteredQuery(new MatchAllDocsQuery(), new HasFieldFilter(name)); } }
public void testHitsExecutionNeeded() { PercolateQuery percolateQuery = new PercolateQuery( "", ctx -> null, new BytesArray("{}"), new MatchAllDocsQuery(), Mockito.mock(IndexSearcher.class), new MatchAllDocsQuery() ); PercolatorHighlightSubFetchPhase subFetchPhase = new PercolatorHighlightSubFetchPhase(Settings.EMPTY, emptyMap()); SearchContext searchContext = Mockito.mock(SearchContext.class); Mockito.when(searchContext.highlight()).thenReturn(new SearchContextHighlight(Collections.emptyList())); Mockito.when(searchContext.query()).thenReturn(new MatchAllDocsQuery()); assertThat(subFetchPhase.hitsExecutionNeeded(searchContext), is(false)); Mockito.when(searchContext.query()).thenReturn(percolateQuery); assertThat(subFetchPhase.hitsExecutionNeeded(searchContext), is(true)); }
@Override protected Query rewrite(Query query) throws IOException { // TemplateQueryBuilder adds some optimization if the template and query builder have boosts / query names that wraps // the actual QueryBuilder that comes from the template into a BooleanQueryBuilder to give it an outer boost / name // this causes some queries to be not exactly equal but equivalent such that we need to rewrite them before comparing. if (query != null) { MemoryIndex idx = new MemoryIndex(); return idx.createSearcher().rewrite(query); } return new MatchAllDocsQuery(); // null == *:* }
private static boolean returnsDocsInOrder(Query query, SortAndFormats sf) { if (sf == null || Sort.RELEVANCE.equals(sf.sort)) { // sort by score // queries that return constant scores will return docs in index // order since Lucene tie-breaks on the doc id return query.getClass() == ConstantScoreQuery.class || query.getClass() == MatchAllDocsQuery.class; } else { return Sort.INDEXORDER.equals(sf.sort); } }
/** Returns true if the given query might match nested documents. */ public boolean mightMatchNestedDocs(Query query) { if (query instanceof ConstantScoreQuery) { return mightMatchNestedDocs(((ConstantScoreQuery) query).getQuery()); } else if (query instanceof BoostQuery) { return mightMatchNestedDocs(((BoostQuery) query).getQuery()); } else if (query instanceof MatchAllDocsQuery) { return true; } else if (query instanceof MatchNoDocsQuery) { return false; } else if (query instanceof TermQuery) { // We only handle term queries and range queries, which should already // cover a high majority of use-cases return mightMatchNestedDocs(((TermQuery) query).getTerm().field()); } else if (query instanceof PointRangeQuery) { return mightMatchNestedDocs(((PointRangeQuery) query).getField()); } else if (query instanceof IndexOrDocValuesQuery) { return mightMatchNestedDocs(((IndexOrDocValuesQuery) query).getIndexQuery()); } else if (query instanceof BooleanQuery) { final BooleanQuery bq = (BooleanQuery) query; final boolean hasRequiredClauses = bq.clauses().stream().anyMatch(BooleanClause::isRequired); if (hasRequiredClauses) { return bq.clauses().stream() .filter(BooleanClause::isRequired) .map(BooleanClause::getQuery) .allMatch(this::mightMatchNestedDocs); } else { return bq.clauses().stream() .filter(c -> c.getOccur() == Occur.SHOULD) .map(BooleanClause::getQuery) .anyMatch(this::mightMatchNestedDocs); } } else if (query instanceof ESToParentBlockJoinQuery) { return ((ESToParentBlockJoinQuery) query).getPath() != null; } else { return true; } }
/** Returns true if the given query might match parent documents or documents * that are nested under a different path. */ public boolean mightMatchNonNestedDocs(Query query, String nestedPath) { if (query instanceof ConstantScoreQuery) { return mightMatchNonNestedDocs(((ConstantScoreQuery) query).getQuery(), nestedPath); } else if (query instanceof BoostQuery) { return mightMatchNonNestedDocs(((BoostQuery) query).getQuery(), nestedPath); } else if (query instanceof MatchAllDocsQuery) { return true; } else if (query instanceof MatchNoDocsQuery) { return false; } else if (query instanceof TermQuery) { return mightMatchNonNestedDocs(((TermQuery) query).getTerm().field(), nestedPath); } else if (query instanceof PointRangeQuery) { return mightMatchNonNestedDocs(((PointRangeQuery) query).getField(), nestedPath); } else if (query instanceof IndexOrDocValuesQuery) { return mightMatchNonNestedDocs(((IndexOrDocValuesQuery) query).getIndexQuery(), nestedPath); } else if (query instanceof BooleanQuery) { final BooleanQuery bq = (BooleanQuery) query; final boolean hasRequiredClauses = bq.clauses().stream().anyMatch(BooleanClause::isRequired); if (hasRequiredClauses) { return bq.clauses().stream() .filter(BooleanClause::isRequired) .map(BooleanClause::getQuery) .allMatch(q -> mightMatchNonNestedDocs(q, nestedPath)); } else { return bq.clauses().stream() .filter(c -> c.getOccur() == Occur.SHOULD) .map(BooleanClause::getQuery) .anyMatch(q -> mightMatchNonNestedDocs(q, nestedPath)); } } else { return true; } }
/** Return a query that matches all documents but those that match the given query. */ public static Query not(Query q) { return new BooleanQuery.Builder() .add(new MatchAllDocsQuery(), Occur.MUST) .add(q, Occur.MUST_NOT) .build(); }
/** User runs a query and counts facets only without collecting the matching documents.*/ private List<FacetResult> facetsOnly() throws IOException { DirectoryReader indexReader = DirectoryReader.open(indexDir); IndexSearcher searcher = new IndexSearcher(indexReader); TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir); FacetsCollector fc = new FacetsCollector(); // MatchAllDocsQuery is for "browsing" (counts facets // for all non-deleted docs in the index); normally // you'd use a "normal" query: searcher.search(new MatchAllDocsQuery(), fc); // Retrieve results List<FacetResult> results = new ArrayList<FacetResult>(); // Count both "Publish Date" and "Author" dimensions Facets facets = new FastTaxonomyFacetCounts(taxoReader, config, fc); results.add(facets.getTopChildren(10, "Author")); results.add(facets.getTopChildren(10, "Publish Date")); indexReader.close(); taxoReader.close(); return results; }
public void testEmptyNumericSegment() throws Exception { final Directory dir = newDirectory(); final RandomIndexWriter w = new RandomIndexWriter(random(), dir); Document doc = new Document(); doc.add(new NumericDocValuesField("group", 0)); w.addDocument(doc); doc.clear(); doc.add(new NumericDocValuesField("group", 1)); w.addDocument(doc); w.commit(); doc.clear(); doc.add(new NumericDocValuesField("group", 10)); w.addDocument(doc); w.commit(); doc.clear(); doc.add(new NumericDocValuesField("category", 0)); w.addDocument(doc); w.commit(); final IndexReader reader = w.getReader(); final IndexSearcher searcher = newSearcher(reader); SortField sortField = new SortField("group", SortField.Type.LONG); sortField.setMissingValue(Long.MAX_VALUE); Sort sort = new Sort(sortField); final CollapsingTopDocsCollector collapsingCollector = CollapsingTopDocsCollector.createNumeric("group", sort, 10, false); searcher.search(new MatchAllDocsQuery(), collapsingCollector); CollapseTopFieldDocs collapseTopFieldDocs = collapsingCollector.getTopDocs(); assertEquals(4, collapseTopFieldDocs.scoreDocs.length); assertEquals(4, collapseTopFieldDocs.collapseValues.length); assertEquals(0L, collapseTopFieldDocs.collapseValues[0]); assertEquals(1L, collapseTopFieldDocs.collapseValues[1]); assertEquals(10L, collapseTopFieldDocs.collapseValues[2]); assertNull(collapseTopFieldDocs.collapseValues[3]); w.close(); reader.close(); dir.close(); }
public void testEmptySortedSegment() throws Exception { final Directory dir = newDirectory(); final RandomIndexWriter w = new RandomIndexWriter(random(), dir); Document doc = new Document(); doc.add(new SortedDocValuesField("group", new BytesRef("0"))); w.addDocument(doc); doc.clear(); doc.add(new SortedDocValuesField("group", new BytesRef("1"))); w.addDocument(doc); w.commit(); doc.clear(); doc.add(new SortedDocValuesField("group", new BytesRef("10"))); w.addDocument(doc); w.commit(); doc.clear(); doc.add(new NumericDocValuesField("category", 0)); w.addDocument(doc); w.commit(); final IndexReader reader = w.getReader(); final IndexSearcher searcher = newSearcher(reader); Sort sort = new Sort(new SortField("group", SortField.Type.STRING_VAL)); final CollapsingTopDocsCollector collapsingCollector = CollapsingTopDocsCollector.createKeyword("group", sort, 10, false); searcher.search(new MatchAllDocsQuery(), collapsingCollector); CollapseTopFieldDocs collapseTopFieldDocs = collapsingCollector.getTopDocs(); assertEquals(4, collapseTopFieldDocs.scoreDocs.length); assertEquals(4, collapseTopFieldDocs.collapseValues.length); assertNull(collapseTopFieldDocs.collapseValues[0]); assertEquals(new BytesRef("0"), collapseTopFieldDocs.collapseValues[1]); assertEquals(new BytesRef("1"), collapseTopFieldDocs.collapseValues[2]); assertEquals(new BytesRef("10"), collapseTopFieldDocs.collapseValues[3]); w.close(); reader.close(); dir.close(); }
public void testNoDocs() throws IOException { testCase(new MatchAllDocsQuery(), iw -> { // Intentionally not writing any docs }, max -> { assertEquals(Double.NEGATIVE_INFINITY, max.getValue(), 0); }); }
public void testNoMatchingField() throws IOException { testCase(new MatchAllDocsQuery(), iw -> { iw.addDocument(singleton(new SortedNumericDocValuesField("wrong_number", 7))); iw.addDocument(singleton(new SortedNumericDocValuesField("wrong_number", 1))); }, max -> { assertEquals(Double.NEGATIVE_INFINITY, max.getValue(), 0); }); }
public void testEmpty() throws IOException { PercentileRanksAggregationBuilder aggBuilder = new PercentileRanksAggregationBuilder("my_agg") .field("field") .method(PercentilesMethod.HDR) .values(0.5); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.DOUBLE); fieldType.setName("field"); try (IndexReader reader = new MultiReader()) { IndexSearcher searcher = new IndexSearcher(reader); PercentileRanks ranks = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); Percentile rank = ranks.iterator().next(); assertEquals(Double.NaN, rank.getPercent(), 0d); assertEquals(0.5, rank.getValue(), 0d); } }
public void testSimple() throws IOException { try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { for (double value : new double[] {3, 0.2, 10}) { Document doc = new Document(); doc.add(new SortedNumericDocValuesField("field", NumericUtils.doubleToSortableLong(value))); w.addDocument(doc); } PercentileRanksAggregationBuilder aggBuilder = new PercentileRanksAggregationBuilder("my_agg") .field("field") .method(PercentilesMethod.HDR) .values(0.1, 0.5, 12); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.DOUBLE); fieldType.setName("field"); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); PercentileRanks ranks = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); Iterator<Percentile> rankIterator = ranks.iterator(); Percentile rank = rankIterator.next(); assertEquals(0.1, rank.getValue(), 0d); assertThat(rank.getPercent(), Matchers.equalTo(0d)); rank = rankIterator.next(); assertEquals(0.5, rank.getValue(), 0d); assertThat(rank.getPercent(), Matchers.greaterThan(0d)); assertThat(rank.getPercent(), Matchers.lessThan(100d)); rank = rankIterator.next(); assertEquals(12, rank.getValue(), 0d); assertThat(rank.getPercent(), Matchers.equalTo(100d)); assertFalse(rankIterator.hasNext()); } } }