@Override public LeafBucketCollector getLeafCollector(LeafReaderContext ctx) throws IOException { perSegCollector = new PerSegmentCollects(ctx); entries.add(perSegCollector); // Deferring collector return new LeafBucketCollector() { @Override public void setScorer(Scorer scorer) throws IOException { perSegCollector.setScorer(scorer); } @Override public void collect(int doc, long bucket) throws IOException { perSegCollector.collect(doc, bucket); } }; }
@Override public Scorer scorer(LeafReaderContext context) throws IOException { DocIdSet childrenDocIdSet = childrenFilter.getDocIdSet(context, null); if (Lucene.isEmpty(childrenDocIdSet)) { return null; } SortedDocValues globalValues = globalIfd.load(context).getOrdinalsValues(parentType); if (globalValues != null) { // we forcefully apply live docs here so that deleted children don't give matching parents childrenDocIdSet = BitsFilteredDocIdSet.wrap(childrenDocIdSet, context.reader().getLiveDocs()); DocIdSetIterator innerIterator = childrenDocIdSet.iterator(); if (innerIterator != null) { ChildrenDocIdIterator childrenDocIdIterator = new ChildrenDocIdIterator( innerIterator, parentOrds, globalValues ); return ConstantScorer.create(childrenDocIdIterator, this, queryWeight); } } return null; }
/** * Check whether there is one or more documents matching the provided query. */ public static boolean exists(IndexSearcher searcher, Query query) throws IOException { final Weight weight = searcher.createNormalizedWeight(query, false); // the scorer API should be more efficient at stopping after the first // match than the bulk scorer API for (LeafReaderContext context : searcher.getIndexReader().leaves()) { final Scorer scorer = weight.scorer(context); if (scorer == null) { continue; } final Bits liveDocs = context.reader().getLiveDocs(); final DocIdSetIterator iterator = scorer.iterator(); for (int doc = iterator.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = iterator.nextDoc()) { if (liveDocs == null || liveDocs.get(doc)) { return true; } } } return false; }
/** * Return a Scorer that throws an ElasticsearchIllegalStateException * on all operations with the given message. */ public static Scorer illegalScorer(final String message) { return new Scorer(null) { @Override public float score() throws IOException { throw new IllegalStateException(message); } @Override public int freq() throws IOException { throw new IllegalStateException(message); } @Override public int docID() { throw new IllegalStateException(message); } @Override public DocIdSetIterator iterator() { throw new IllegalStateException(message); } }; }
private void addMatchedQueries(HitContext hitContext, ImmutableMap<String, Query> namedQueries, List<String> matchedQueries) throws IOException { for (Map.Entry<String, Query> entry : namedQueries.entrySet()) { String name = entry.getKey(); Query filter = entry.getValue(); final Weight weight = hitContext.topLevelSearcher().createNormalizedWeight(filter, false); final Scorer scorer = weight.scorer(hitContext.readerContext()); if (scorer == null) { continue; } final TwoPhaseIterator twoPhase = scorer.twoPhaseIterator(); if (twoPhase == null) { if (scorer.iterator().advance(hitContext.docId()) == hitContext.docId()) { matchedQueries.add(name); } } else { if (twoPhase.approximation().advance(hitContext.docId()) == hitContext.docId() && twoPhase.matches()) { matchedQueries.add(name); } } } }
@Override public Scorer scorer(LeafReaderContext context) throws IOException { final Scorer parentScorer = parentWeight.scorer(context); // no matches if (parentScorer == null) { return null; } BitSet parents = parentsFilter.getBitSet(context); if (parents == null) { // No matches return null; } int firstParentDoc = parentScorer.iterator().nextDoc(); if (firstParentDoc == DocIdSetIterator.NO_MORE_DOCS) { // No matches return null; } return new IncludeNestedDocsScorer(this, parentScorer, parents, firstParentDoc); }
IncludeNestedDocsScorer(Weight weight, Scorer parentScorer, BitSet parentBits, int currentParentPointer) { super(weight); this.parentScorer = parentScorer; this.parentBits = parentBits; this.currentParentPointer = currentParentPointer; if (currentParentPointer == 0) { currentChildPointer = 0; } else { this.currentChildPointer = this.parentBits.prevSetBit(currentParentPointer - 1); if (currentChildPointer == -1) { // no previous set parent, we delete from doc 0 currentChildPointer = 0; } else { currentChildPointer++; // we only care about children } } currentDoc = currentChildPointer; }
@Override public Scorer scorer(LeafReaderContext context) throws IOException { DocIdSet childrenDocSet = childrenFilter.getDocIdSet(context, null); // we forcefully apply live docs here so that deleted children don't give matching parents childrenDocSet = BitsFilteredDocIdSet.wrap(childrenDocSet, context.reader().getLiveDocs()); if (Lucene.isEmpty(childrenDocSet)) { return null; } final DocIdSetIterator childIterator = childrenDocSet.iterator(); if (childIterator == null) { return null; } SortedDocValues bytesValues = globalIfd.load(context).getOrdinalsValues(parentType); if (bytesValues == null) { return null; } return new ChildScorer(this, parentIdxs, scores, childIterator, bytesValues); }
BaseScorer(Weight weight, Scorer approximation, CheckedFunction<Integer, Query, IOException> percolatorQueries, IndexSearcher percolatorIndexSearcher) { super(weight); this.approximation = approximation; this.percolatorQueries = percolatorQueries; this.percolatorIndexSearcher = percolatorIndexSearcher; }
private static double getScore(Scorer scorer) { try { return scorer.score(); } catch (IOException e) { throw new ElasticsearchException("couldn't lookup score", e); } }
@Override public Scorer scorer(LeafReaderContext context) throws IOException { if (remaining == 0) { return null; } if (shortCircuitFilter != null) { DocIdSet docIdSet = shortCircuitFilter.getDocIdSet(context, null); if (!Lucene.isEmpty(docIdSet)) { DocIdSetIterator iterator = docIdSet.iterator(); if (iterator != null) { return ConstantScorer.create(iterator, this, queryWeight); } } return null; } DocIdSet parentDocIdSet = this.parentFilter.getDocIdSet(context, null); if (!Lucene.isEmpty(parentDocIdSet)) { // We can't be sure of the fact that liveDocs have been applied, so we apply it here. The "remaining" // count down (short circuit) logic will then work as expected. parentDocIdSet = BitsFilteredDocIdSet.wrap(parentDocIdSet, context.reader().getLiveDocs()); DocIdSetIterator innerIterator = parentDocIdSet.iterator(); if (innerIterator != null) { LongBitSet parentOrds = collector.parentOrds; SortedDocValues globalValues = globalIfd.load(context).getOrdinalsValues(parentType); if (globalValues != null) { DocIdSetIterator parentIdIterator = new ParentOrdIterator(innerIterator, parentOrds, globalValues, this); return ConstantScorer.create(parentIdIterator, this, queryWeight); } } } return null; }
@Override public LeafSearchScript getLeafSearchScript(LeafReaderContext context) throws IOException { LeafSearchLookup leafLookup = lookup.getLeafSearchLookup(context); Map<String, Object> ctx = new HashMap<>(); ctx.putAll(leafLookup.asMap()); if (vars != null) { ctx.putAll(vars); } AbstractSearchScript leafSearchScript = new AbstractSearchScript() { @Override public Object run() { return script.apply(ctx); } @Override public void setNextVar(String name, Object value) { ctx.put(name, value); } @Override public void setScorer(Scorer scorer) { super.setScorer(scorer); ctx.put("_score", new ScoreAccessor(scorer)); } }; leafSearchScript.setLookup(leafLookup); return leafSearchScript; }
private FiltersFunctionFactorScorer functionScorer(LeafReaderContext context) throws IOException { Scorer subQueryScorer = subQueryWeight.scorer(context); if (subQueryScorer == null) { return null; } final LeafScoreFunction[] functions = new LeafScoreFunction[filterFunctions.length]; final Bits[] docSets = new Bits[filterFunctions.length]; for (int i = 0; i < filterFunctions.length; i++) { FilterFunction filterFunction = filterFunctions[i]; functions[i] = filterFunction.function.getLeafScoreFunction(context); Scorer filterScorer = filterWeights[i].scorer(context); docSets[i] = Lucene.asSequentialAccessBits(context.reader().maxDoc(), filterScorer); } return new FiltersFunctionFactorScorer(this, subQueryScorer, scoreMode, filterFunctions, maxBoost, functions, docSets, combineFunction, needsScores); }
private BitSet getAndLoadIfNotPresent(final Query query, final LeafReaderContext context) throws IOException, ExecutionException { final Object coreCacheReader = context.reader().getCoreCacheKey(); final ShardId shardId = ShardUtils.extractShardId(context.reader()); if (shardId != null // can't require it because of the percolator && index.getName().equals(shardId.getIndex()) == false) { // insanity throw new IllegalStateException("Trying to load bit set for index [" + shardId.getIndex() + "] with cache of index [" + index.getName() + "]"); } Cache<Query, Value> filterToFbs = loadedFilters.get(coreCacheReader, new Callable<Cache<Query, Value>>() { @Override public Cache<Query, Value> call() throws Exception { context.reader().addCoreClosedListener(BitsetFilterCache.this); return CacheBuilder.newBuilder().build(); } }); return filterToFbs.get(query,new Callable<Value>() { @Override public Value call() throws Exception { final IndexReaderContext topLevelContext = ReaderUtil.getTopLevelContext(context); final IndexSearcher searcher = new IndexSearcher(topLevelContext); searcher.setQueryCache(null); final Weight weight = searcher.createNormalizedWeight(query, false); final Scorer s = weight.scorer(context); final BitSet bitSet; if (s == null) { bitSet = null; } else { bitSet = BitSet.of(s.iterator(), context.reader().maxDoc()); } Value value = new Value(bitSet, shardId); listener.onCache(shardId, value.bitset); return value; } }).bitset; }
/** * Returns the best nested {@link ObjectMapper} instances that is in the scope of the specified nested docId. */ public ObjectMapper findNestedObjectMapper(int nestedDocId, SearchContext sc, LeafReaderContext context) throws IOException { ObjectMapper nestedObjectMapper = null; for (ObjectMapper objectMapper : objectMappers().values()) { if (!objectMapper.nested().isNested()) { continue; } Query filter = objectMapper.nestedTypeFilter(); if (filter == null) { continue; } // We can pass down 'null' as acceptedDocs, because nestedDocId is a doc to be fetched and // therefor is guaranteed to be a live doc. final Weight nestedWeight = filter.createWeight(sc.searcher(), false); Scorer scorer = nestedWeight.scorer(context); if (scorer == null) { continue; } if (scorer.iterator().advance(nestedDocId) == nestedDocId) { if (nestedObjectMapper == null) { nestedObjectMapper = objectMapper; } else { if (nestedObjectMapper.fullPath().length() < objectMapper.fullPath().length()) { nestedObjectMapper = objectMapper; } } } } return nestedObjectMapper; }
@Override protected void doPostCollection() throws IOException { IndexReader indexReader = context().searcher().getIndexReader(); for (LeafReaderContext ctx : indexReader.leaves()) { Scorer childDocsScorer = childFilter.scorer(ctx); if (childDocsScorer == null) { continue; } DocIdSetIterator childDocsIter = childDocsScorer.iterator(); final LeafBucketCollector sub = collectableSubAggregators.getLeafCollector(ctx); final SortedDocValues globalOrdinals = valuesSource.globalOrdinalsValues(parentType, ctx); // Set the scorer, since we now replay only the child docIds sub.setScorer(new ConstantScoreScorer(null, 1f,childDocsIter)); final Bits liveDocs = ctx.reader().getLiveDocs(); for (int docId = childDocsIter.nextDoc(); docId != DocIdSetIterator.NO_MORE_DOCS; docId = childDocsIter.nextDoc()) { if (liveDocs != null && liveDocs.get(docId) == false) { continue; } long globalOrdinal = globalOrdinals.getOrd(docId); if (globalOrdinal != -1) { long bucketOrd = parentOrdToBuckets.get(globalOrdinal); if (bucketOrd != -1) { collectBucket(sub, docId, bucketOrd); if (multipleBucketsPerParentOrd) { long[] otherBucketOrds = parentOrdToOtherBuckets.get(globalOrdinal); if (otherBucketOrds != null) { for (long otherBucketOrd : otherBucketOrds) { collectBucket(sub, docId, otherBucketOrd); } } } } } } } }
PerParentBucketSamples(long parentBucket, Scorer scorer, LeafReaderContext readerContext) { try { this.parentBucket = parentBucket; tdc = createTopDocsCollector(shardSize); currentLeafCollector = tdc.getLeafCollector(readerContext); setScorer(scorer); } catch (IOException e) { throw new ElasticsearchException("IO error creating collector", e); } }
public void setScorer(Scorer scorer) throws IOException { this.currentScorer = scorer; for (int i = 0; i < perBucketSamples.size(); i++) { PerParentBucketSamples perBucketSample = perBucketSamples.get(i); if (perBucketSample == null) { continue; } perBucketSample.setScorer(scorer); } }
@Override public LeafBucketCollector getLeafCollector(final LeafReaderContext ctx, final LeafBucketCollector sub) throws IOException { IndexReaderContext topLevelContext = ReaderUtil.getTopLevelContext(ctx); IndexSearcher searcher = new IndexSearcher(topLevelContext); searcher.setQueryCache(null); Weight weight = searcher.createNormalizedWeight(childFilter, false); Scorer childDocsScorer = weight.scorer(ctx); final BitSet parentDocs = parentFilter.getBitSet(ctx); final DocIdSetIterator childDocs = childDocsScorer != null ? childDocsScorer.iterator() : null; return new LeafBucketCollectorBase(sub, null) { @Override public void collect(int parentDoc, long bucket) throws IOException { // if parentDoc is 0 then this means that this parent doesn't have child docs (b/c these appear always before the parent // doc), so we can skip: if (parentDoc == 0 || parentDocs == null || childDocs == null) { return; } final int prevParentDoc = parentDocs.prevSetBit(parentDoc - 1); int childDocId = childDocs.docID(); if (childDocId <= prevParentDoc) { childDocId = childDocs.advance(prevParentDoc + 1); } for (; childDocId < parentDoc; childDocId = childDocs.nextDoc()) { collectBucket(sub, childDocId, bucket); } } }; }
@Override public void setScorer(Scorer s) throws IOException { sub.setScorer(s); if (values != null) { values.setScorer(s); } }
@Override public Scorer scorer(LeafReaderContext context) throws IOException { profile.startTime(QueryTimingType.BUILD_SCORER); final Scorer subQueryScorer; try { subQueryScorer = subQueryWeight.scorer(context); } finally { profile.stopAndRecordTime(); } if (subQueryScorer == null) { return null; } return new ProfileScorer(this, subQueryScorer, profile); }
private FiltersFunctionFactorScorer(CustomBoostFactorWeight w, Scorer scorer, ScoreMode scoreMode, FilterFunction[] filterFunctions, float maxBoost, LeafScoreFunction[] functions, Bits[] docSets, CombineFunction scoreCombiner, boolean needsScores) throws IOException { super(scorer, w); this.scoreMode = scoreMode; this.filterFunctions = filterFunctions; this.functions = functions; this.docSets = docSets; this.scoreCombiner = scoreCombiner; this.maxBoost = maxBoost; this.needsScores = needsScores; }
/** * Get a {@link DocIdSet} that matches the inner documents. */ public DocIdSetIterator innerDocs(LeafReaderContext ctx) throws IOException { final IndexReaderContext topLevelCtx = ReaderUtil.getTopLevelContext(ctx); IndexSearcher indexSearcher = new IndexSearcher(topLevelCtx); Weight weight = indexSearcher.createNormalizedWeight(innerQuery, false); Scorer s = weight.scorer(ctx); return s == null ? null : s.iterator(); }
private BitSet getAndLoadIfNotPresent(final Query query, final LeafReaderContext context) throws IOException, ExecutionException { final Object coreCacheReader = context.reader().getCoreCacheKey(); final ShardId shardId = ShardUtils.extractShardId(context.reader()); if (shardId != null // can't require it because of the percolator && indexSettings.getIndex().equals(shardId.getIndex()) == false) { // insanity throw new IllegalStateException("Trying to load bit set for index " + shardId.getIndex() + " with cache of index " + indexSettings.getIndex()); } Cache<Query, Value> filterToFbs = loadedFilters.computeIfAbsent(coreCacheReader, key -> { context.reader().addCoreClosedListener(BitsetFilterCache.this); return CacheBuilder.<Query, Value>builder().build(); }); return filterToFbs.computeIfAbsent(query, key -> { final IndexReaderContext topLevelContext = ReaderUtil.getTopLevelContext(context); final IndexSearcher searcher = new IndexSearcher(topLevelContext); searcher.setQueryCache(null); final Weight weight = searcher.createNormalizedWeight(query, false); Scorer s = weight.scorer(context); final BitSet bitSet; if (s == null) { bitSet = null; } else { bitSet = BitSet.of(s.iterator(), context.reader().maxDoc()); } Value value = new Value(bitSet, shardId); listener.onCache(shardId, value.bitset); return value; }).bitset; }
private FunctionFactorScorer functionScorer(LeafReaderContext context) throws IOException { Scorer subQueryScorer = subQueryWeight.scorer(context); if (subQueryScorer == null) { return null; } LeafScoreFunction leafFunction = null; if (function != null) { leafFunction = function.getLeafScoreFunction(context); } return new FunctionFactorScorer(this, subQueryScorer, leafFunction, maxBoost, combineFunction, needsScores); }
private FunctionFactorScorer(CustomBoostFactorWeight w, Scorer scorer, LeafScoreFunction function, float maxBoost, CombineFunction scoreCombiner, boolean needsScores) throws IOException { super(scorer, w); this.function = function; this.scoreCombiner = scoreCombiner; this.maxBoost = maxBoost; this.needsScores = needsScores; }
@Override public LeafCollector getLeafCollector(LeafReaderContext context) throws IOException { final Scorer filterScorer = filter.scorer(context); final LeafCollector in = collector.getLeafCollector(context); final Bits bits = Lucene.asSequentialAccessBits(context.reader().maxDoc(), filterScorer); return new FilterLeafCollector(in) { @Override public void collect(int doc) throws IOException { if (bits.get(doc)) { in.collect(doc); } } }; }
@Override public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { return new ConstantScoreWeight(this) { @Override public Scorer scorer(LeafReaderContext context) throws IOException { return new ConstantScoreScorer(this, score(), DocIdSetIterator.all(context.reader().maxDoc())); } }; }
@Override public Scorer scorer(LeafReaderContext context) throws IOException { assert terms.length > 0; final LeafReader reader = context.reader(); PostingsAndFreq[] postingsFreqs = new PostingsAndFreq[terms.length]; final Terms fieldTerms = reader.terms(field); if (fieldTerms == null) { return null; } if (fieldTerms.hasPositions() == false) { throw new IllegalStateException( "field \"" + terms[0].field() + "\" was indexed without position data; cannot run " + "SeqSpanQuery (phrase=" + getQuery() + ")"); } // Reuse single TermsEnum below: final TermsEnum te = fieldTerms.iterator(); float totalMatchCost = 0; for (int i = 0; i < terms.length; i++) { final Term t = terms[i]; final TermState state = states[i].get(context.ord); if (state == null) { /* term doesnt exist in this segment */ assert reader.docFreq(t) == 0 : "no termstate found but term exists in reader"; return null; } te.seekExact(t.bytes(), state); PostingsEnum postingsEnum = te.postings(null, PostingsEnum.POSITIONS); postingsFreqs[i] = new PostingsAndFreq(postingsEnum, positions[i], t); // totalMatchCost += termPositionsCost(te); } return new SeqSpanScorer(this, postingsFreqs, similarity.simScorer(stats, context), needsScores, totalMatchCost); }
@Override protected void doPostCollection() throws IOException { IndexReader indexReader = context().searchContext().searcher().getIndexReader(); for (LeafReaderContext ctx : indexReader.leaves()) { Scorer childDocsScorer = childFilter.scorer(ctx); if (childDocsScorer == null) { continue; } DocIdSetIterator childDocsIter = childDocsScorer.iterator(); final LeafBucketCollector sub = collectableSubAggregators.getLeafCollector(ctx); final SortedDocValues globalOrdinals = valuesSource.globalOrdinalsValues(parentType, ctx); // Set the scorer, since we now replay only the child docIds sub.setScorer(ConstantScorer.create(childDocsIter, null, 1f)); final Bits liveDocs = ctx.reader().getLiveDocs(); for (int docId = childDocsIter.nextDoc(); docId != DocIdSetIterator.NO_MORE_DOCS; docId = childDocsIter.nextDoc()) { if (liveDocs != null && liveDocs.get(docId) == false) { continue; } long globalOrdinal = globalOrdinals.getOrd(docId); if (globalOrdinal != -1) { long bucketOrd = parentOrdToBuckets.get(globalOrdinal); if (bucketOrd != -1) { collectBucket(sub, docId, bucketOrd); if (multipleBucketsPerParentOrd) { long[] otherBucketOrds = parentOrdToOtherBuckets.get(globalOrdinal); if (otherBucketOrds != null) { for (long otherBucketOrd : otherBucketOrds) { collectBucket(sub, docId, otherBucketOrd); } } } } } } } }
public PerParentBucketSamples(long parentBucket, Scorer scorer, LeafReaderContext readerContext) { try { this.parentBucket = parentBucket; tdc = createTopDocsCollector(shardSize); currentLeafCollector = tdc.getLeafCollector(readerContext); setScorer(scorer); } catch (IOException e) { throw new ElasticsearchException("IO error creating collector", e); } }
@Override public Scorer scorer(LeafReaderContext context) throws IOException { profile.startTime(ProfileBreakdown.TimingType.BUILD_SCORER); final Scorer subQueryScorer; try { subQueryScorer = subQueryWeight.scorer(context); } finally { profile.stopAndRecordTime(); } if (subQueryScorer == null) { return null; } return new ProfileScorer(this, subQueryScorer, profile); }
@Override public Scorer scorer(LeafReaderContext context) throws IOException { DocIdSet parentsSet = parentFilter.getDocIdSet(context, null); if (Lucene.isEmpty(parentsSet) || remaining == 0) { return null; } // We can't be sure of the fact that liveDocs have been applied, so we apply it here. The "remaining" // count down (short circuit) logic will then work as expected. DocIdSetIterator parents = BitsFilteredDocIdSet.wrap(parentsSet, context.reader().getLiveDocs()).iterator(); if (parents != null) { SortedDocValues bytesValues = collector.globalIfd.load(context).getOrdinalsValues(parentType); if (bytesValues == null) { return null; } if (minChildren > 0 || maxChildren != 0 || scoreType == ScoreType.NONE) { switch (scoreType) { case NONE: DocIdSetIterator parentIdIterator = new CountParentOrdIterator(this, parents, collector, bytesValues, minChildren, maxChildren); return ConstantScorer.create(parentIdIterator, this, queryWeight); case AVG: return new AvgParentCountScorer(this, parents, collector, bytesValues, minChildren, maxChildren); default: return new ParentCountScorer(this, parents, collector, bytesValues, minChildren, maxChildren); } } switch (scoreType) { case AVG: return new AvgParentScorer(this, parents, collector, bytesValues); default: return new ParentScorer(this, parents, collector, bytesValues); } } return null; }