/** * Returns all {@link News} of the given news group except the {@link News} with the given id. * @param newsGroupId the news group id * @param exceptId the news which should not be returned * @return a {@link List} with all {@link News} of the requested news group except the {@link News} with the exceptId. */ private List<News> getNewsOfNewsGroup(final long newsGroupId, final long exceptId) { final BooleanQuery query = new BooleanQuery(); QueryUtil.addTypeConf(query, NewsIndexType.getInstance()); final NumericRangeQuery<Long> groupQuery = NumericRangeQuery.newLongRange( NewsIndexType.FIELD_NEWSGROUPID, newsGroupId, newsGroupId, true, true); query.add(groupQuery, Occur.MUST); // exclude news query.add(new TermQuery(new Term(IIndexElement.FIELD_ID, String.valueOf(exceptId))), Occur.MUST_NOT); final SearchOptions options = new SearchOptions(); options.setSort(new Sort(ESortField.PUBLISH_DATE.getSortField(ESortOrder.DESC))); final DocumentsSearchResult result = IndexSearch.getInstance().search(query, options); return NewsIndexType.docsToNews(result.getResults()); }
MergeSortQueue(Sort sort, CollapseTopFieldDocs[] shardHits) throws IOException { super(shardHits.length); this.shardHits = new ScoreDoc[shardHits.length][]; for (int shardIDX = 0; shardIDX < shardHits.length; shardIDX++) { final ScoreDoc[] shard = shardHits[shardIDX].scoreDocs; if (shard != null) { this.shardHits[shardIDX] = shard; // Fail gracefully if API is misused: for (int hitIDX = 0; hitIDX < shard.length; hitIDX++) { final ScoreDoc sd = shard[hitIDX]; final FieldDoc gd = (FieldDoc) sd; assert gd.fields != null; } } } final SortField[] sortFields = sort.getSort(); comparators = new FieldComparator[sortFields.length]; reverseMul = new int[sortFields.length]; for (int compIDX = 0; compIDX < sortFields.length; compIDX++) { final SortField sortField = sortFields[compIDX]; comparators[compIDX] = sortField.getComparator(1, compIDX); reverseMul[compIDX] = sortField.getReverse() ? -1 : 1; } }
public void testMultiPhrasePrefixQuery() throws Exception { Analyzer analyzer = new StandardAnalyzer(); Directory dir = newDirectory(); String value = "The quick brown fox."; IndexReader ir = indexOneDoc(dir, "text", value, analyzer); MultiPhrasePrefixQuery query = new MultiPhrasePrefixQuery(); query.add(new Term("text", "quick")); query.add(new Term("text", "brown")); query.add(new Term("text", "fo")); IndexSearcher searcher = newSearcher(ir); TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER); assertThat(topDocs.totalHits, equalTo(1)); int docId = topDocs.scoreDocs[0].doc; CustomPassageFormatter passageFormatter = new CustomPassageFormatter("<b>", "</b>", new DefaultEncoder()); CustomUnifiedHighlighter highlighter = new CustomUnifiedHighlighter(searcher, analyzer, passageFormatter, null, value, false); Snippet[] snippets = highlighter.highlightField("text", query, docId, 5); assertThat(snippets.length, equalTo(1)); assertThat(snippets[0].getText(), equalTo("The <b>quick</b> <b>brown</b> <b>fox</b>.")); ir.close(); dir.close(); }
public void testAllTermQuery() throws IOException { Directory dir = newDirectory(); String value = "The quick brown fox."; Analyzer analyzer = new StandardAnalyzer(); IndexReader ir = indexOneDoc(dir, "all", value, analyzer); AllTermQuery query = new AllTermQuery(new Term("all", "fox")); IndexSearcher searcher = newSearcher(ir); TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER); assertThat(topDocs.totalHits, equalTo(1)); int docId = topDocs.scoreDocs[0].doc; CustomPassageFormatter passageFormatter = new CustomPassageFormatter("<b>", "</b>", new DefaultEncoder()); CustomUnifiedHighlighter highlighter = new CustomUnifiedHighlighter(searcher, analyzer, passageFormatter, null, value, false); Snippet[] snippets = highlighter.highlightField("all", query, docId, 5); assertThat(snippets.length, equalTo(1)); assertThat(snippets[0].getText(), equalTo("The quick brown <b>fox</b>.")); ir.close(); dir.close(); }
public void testCommonTermsQuery() throws IOException { Directory dir = newDirectory(); String value = "The quick brown fox."; Analyzer analyzer = new StandardAnalyzer(); IndexReader ir = indexOneDoc(dir, "text", value, analyzer); CommonTermsQuery query = new CommonTermsQuery(BooleanClause.Occur.SHOULD, BooleanClause.Occur.SHOULD, 128); query.add(new Term("text", "quick")); query.add(new Term("text", "brown")); query.add(new Term("text", "fox")); IndexSearcher searcher = newSearcher(ir); TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER); assertThat(topDocs.totalHits, equalTo(1)); int docId = topDocs.scoreDocs[0].doc; CustomPassageFormatter passageFormatter = new CustomPassageFormatter("<b>", "</b>", new DefaultEncoder()); CustomUnifiedHighlighter highlighter = new CustomUnifiedHighlighter(searcher, analyzer, passageFormatter, null, value, false); Snippet[] snippets = highlighter.highlightField("text", query, docId, 5); assertThat(snippets.length, equalTo(1)); assertThat(snippets[0].getText(), equalTo("The <b>quick</b> <b>brown</b> <b>fox</b>.")); ir.close(); dir.close(); }
public void testNoScoring() throws IOException { QueryProfiler profiler = new QueryProfiler(); searcher.setProfiler(profiler); Query query = new TermQuery(new Term("foo", "bar")); searcher.search(query, 1, Sort.INDEXORDER); // scores are not needed List<ProfileResult> results = profiler.getTree(); assertEquals(1, results.size()); Map<String, Long> breakdown = results.get(0).getTimeBreakdown(); assertThat(breakdown.get(QueryTimingType.CREATE_WEIGHT.toString()).longValue(), greaterThan(0L)); assertThat(breakdown.get(QueryTimingType.BUILD_SCORER.toString()).longValue(), greaterThan(0L)); assertThat(breakdown.get(QueryTimingType.NEXT_DOC.toString()).longValue(), greaterThan(0L)); assertThat(breakdown.get(QueryTimingType.ADVANCE.toString()).longValue(), equalTo(0L)); assertThat(breakdown.get(QueryTimingType.SCORE.toString()).longValue(), equalTo(0L)); assertThat(breakdown.get(QueryTimingType.MATCH.toString()).longValue(), equalTo(0L)); assertThat(breakdown.get(QueryTimingType.CREATE_WEIGHT.toString() + "_count").longValue(), greaterThan(0L)); assertThat(breakdown.get(QueryTimingType.BUILD_SCORER.toString() + "_count").longValue(), greaterThan(0L)); assertThat(breakdown.get(QueryTimingType.NEXT_DOC.toString() + "_count").longValue(), greaterThan(0L)); assertThat(breakdown.get(QueryTimingType.ADVANCE.toString() + "_count").longValue(), equalTo(0L)); assertThat(breakdown.get(QueryTimingType.SCORE.toString() + "_count").longValue(), equalTo(0L)); assertThat(breakdown.get(QueryTimingType.MATCH.toString() + "_count").longValue(), equalTo(0L)); long rewriteTime = profiler.getRewriteTime(); assertThat(rewriteTime, greaterThan(0L)); }
@Override protected void assertAvgScoreMode(Query parentFilter, IndexSearcher searcher) throws IOException { MultiValueMode sortMode = MultiValueMode.AVG; Query childFilter = Queries.not(parentFilter); XFieldComparatorSource nestedComparatorSource = createFieldComparator("field2", sortMode, -127, createNested(searcher, parentFilter, childFilter)); Query query = new ToParentBlockJoinQuery(new ConstantScoreQuery(childFilter), new QueryBitSetProducer(parentFilter), ScoreMode.None); Sort sort = new Sort(new SortField("field2", nestedComparatorSource)); TopDocs topDocs = searcher.search(query, 5, sort); assertThat(topDocs.totalHits, equalTo(7)); assertThat(topDocs.scoreDocs.length, equalTo(5)); assertThat(topDocs.scoreDocs[0].doc, equalTo(11)); assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).intValue(), equalTo(2)); assertThat(topDocs.scoreDocs[1].doc, equalTo(7)); assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[1]).fields[0]).intValue(), equalTo(2)); assertThat(topDocs.scoreDocs[2].doc, equalTo(3)); assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[2]).fields[0]).intValue(), equalTo(3)); assertThat(topDocs.scoreDocs[3].doc, equalTo(15)); assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[3]).fields[0]).intValue(), equalTo(3)); assertThat(topDocs.scoreDocs[4].doc, equalTo(19)); assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[4]).fields[0]).intValue(), equalTo(3)); }
protected void assertAvgScoreMode(Query parentFilter, IndexSearcher searcher, IndexFieldData.XFieldComparatorSource innerFieldComparator) throws IOException { MultiValueMode sortMode = MultiValueMode.AVG; Query childFilter = Queries.not(parentFilter); XFieldComparatorSource nestedComparatorSource = createFieldComparator("field2", sortMode, -127, createNested(searcher, parentFilter, childFilter)); Query query = new ToParentBlockJoinQuery(new ConstantScoreQuery(childFilter), new QueryBitSetProducer(parentFilter), ScoreMode.None); Sort sort = new Sort(new SortField("field2", nestedComparatorSource)); TopDocs topDocs = searcher.search(query, 5, sort); assertThat(topDocs.totalHits, equalTo(7)); assertThat(topDocs.scoreDocs.length, equalTo(5)); assertThat(topDocs.scoreDocs[0].doc, equalTo(11)); assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).intValue(), equalTo(2)); assertThat(topDocs.scoreDocs[1].doc, equalTo(7)); assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[1]).fields[0]).intValue(), equalTo(2)); assertThat(topDocs.scoreDocs[2].doc, equalTo(3)); assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[2]).fields[0]).intValue(), equalTo(3)); assertThat(topDocs.scoreDocs[3].doc, equalTo(15)); assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[3]).fields[0]).intValue(), equalTo(3)); assertThat(topDocs.scoreDocs[4].doc, equalTo(19)); assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[4]).fields[0]).intValue(), equalTo(3)); }
protected void assertAvgScoreMode(Query parentFilter, IndexSearcher searcher) throws IOException { MultiValueMode sortMode = MultiValueMode.AVG; Query childFilter = Queries.not(parentFilter); XFieldComparatorSource nestedComparatorSource = createFieldComparator("field2", sortMode, -127, createNested(searcher, parentFilter, childFilter)); Query query = new ToParentBlockJoinQuery(new ConstantScoreQuery(childFilter), new QueryBitSetProducer(parentFilter), ScoreMode.None); Sort sort = new Sort(new SortField("field2", nestedComparatorSource)); TopDocs topDocs = searcher.search(query, 5, sort); assertThat(topDocs.totalHits, equalTo(7)); assertThat(topDocs.scoreDocs.length, equalTo(5)); assertThat(topDocs.scoreDocs[0].doc, equalTo(11)); assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).intValue(), equalTo(2)); assertThat(topDocs.scoreDocs[1].doc, equalTo(3)); assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[1]).fields[0]).intValue(), equalTo(3)); assertThat(topDocs.scoreDocs[2].doc, equalTo(7)); assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[2]).fields[0]).intValue(), equalTo(3)); assertThat(topDocs.scoreDocs[3].doc, equalTo(15)); assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[3]).fields[0]).intValue(), equalTo(3)); assertThat(topDocs.scoreDocs[4].doc, equalTo(19)); assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[4]).fields[0]).intValue(), equalTo(4)); }
public void testSortValues() throws Exception { Directory dir = new RAMDirectory(); IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER)); for (int i = 0; i < 10; i++) { Document document = new Document(); String text = new String(new char[]{(char) (97 + i), (char) (97 + i)}); document.add(new TextField("str", text, Field.Store.YES)); document.add(new SortedDocValuesField("str", new BytesRef(text))); indexWriter.addDocument(document); } IndexReader reader = SlowCompositeReaderWrapper.wrap(DirectoryReader.open(indexWriter)); IndexSearcher searcher = new IndexSearcher(reader); TopFieldDocs docs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(new SortField("str", SortField.Type.STRING))); for (int i = 0; i < 10; i++) { FieldDoc fieldDoc = (FieldDoc) docs.scoreDocs[i]; assertThat((BytesRef) fieldDoc.fields[0], equalTo(new BytesRef(new String(new char[]{(char) (97 + i), (char) (97 + i)})))); } }
@Override public Sort convert(org.springframework.data.domain.Sort sort) { SortContext context = queryBuilder.sort(); SortFieldContext currentContext = null; for (org.springframework.data.domain.Sort.Order order : sort) { String fieldName = entityMetadataContext.getFieldName(order.getProperty()); if (currentContext == null) { currentContext = context.byField(fieldName); } else { currentContext = currentContext.andByField(fieldName); } org.springframework.data.domain.Sort.NullHandling nullHandling = order.getNullHandling(); if (nullHandling == org.springframework.data.domain.Sort.NullHandling.NULLS_FIRST) { currentContext = currentContext.onMissingValue().sortFirst(); } else if (nullHandling == org.springframework.data.domain.Sort.NullHandling.NULLS_LAST) { currentContext = currentContext.onMissingValue().sortLast(); } boolean desc = (order.getDirection() == org.springframework.data.domain.Sort.Direction.DESC); if (desc) { currentContext = currentContext.desc(); } else { currentContext = currentContext.asc(); } } return (currentContext != null) ? currentContext.createSort() : null; }
@Nullable public static Sort generateLuceneSort(CollectorContext context, OrderBy orderBy, CollectInputSymbolVisitor<?> inputSymbolVisitor) { if (orderBy.orderBySymbols().isEmpty()) { return null; } SortSymbolVisitor sortSymbolVisitor = new SortSymbolVisitor(inputSymbolVisitor); SortField[] sortFields = sortSymbolVisitor.generateSortFields( orderBy.orderBySymbols(), context, orderBy.reverseFlags(), orderBy.nullsFirst() ); return new Sort(sortFields); }
public CoreSearchIterable( CoreKVStore<K, V> store, LuceneSearchIndex index, Query searchQuery, Sort sort, int pageSize, int offset, int limit ) { this.store = store; this.index = index; this.searchQuery = searchQuery; this.sort = sort; this.pageSize = pageSize; this.offset = offset; this.limit = limit; }
@Override public void run() { int i = 0; while (i < 10000) { try { if (data.size() <= i) { sleep(1); continue; } final String key = "key" + i; final String val = "value" + i; final List<Document> documents = index.searchForDocuments(new TermQuery(new Term(key, val)), 10, new Sort(new SortField(key, SortField.Type.STRING))); if (documents.size() != 1) { throw new RuntimeException("Invalid number of matching documents for " + key + ", found " + documents); } ++i; } catch (IOException ioe) { error = ioe; break; } catch (InterruptedException e) { } catch (AlreadyClosedException ace) { error = ace; break; } } }
@Override public ScoreDoc[] prefixSearch(String keywords) throws IOException { if (StringUtils.isEmpty(keywords) || keywords.length() > appConfig.getKeywordMaxLength()) { logger.error("empty keywords or over-length! {}", keywords); return null; } Sort sort = new Sort(new SortField("downloadRank", SortField.INT, true)); Term nameFldTerm = new Term(fieldName, keywords); PrefixQuery nameFldQuery = new PrefixQuery(nameFldTerm); NumericRangeQuery<Integer> catalogQuery = NumericRangeQuery.newIntRange("catalog", (int) EnumCatalog.SOFT.getCatalog(), (int) EnumCatalog.GAME.getCatalog(), true, true); BooleanQuery booleanQuery = new BooleanQuery(); booleanQuery.add(catalogQuery, Occur.MUST); booleanQuery.add(nameFldQuery, Occur.MUST); TopDocs topDocs = quickTipsSearcher.search(booleanQuery, appConfig.getQuickTipsNum() * 2, sort); ScoreDoc[] docs = topDocs.scoreDocs; return docs; }
/** * 查询索引 * * @param keywords * @return * @throws Exception */ public List<Document> searchIndex(Integer typeId, String keywords) throws Exception { // 1.init searcher Analyzer analyzer = new PaodingAnalyzer(); IndexReader reader = IndexReader.open(typeId == appConfig.getGameTypeId() ? appConfig.getGameIndexDir() : appConfig.getSoftIndexDir()); BooleanClause.Occur[] flags = new BooleanClause.Occur[] { BooleanClause.Occur.SHOULD, BooleanClause.Occur.SHOULD }; Query query = MultiFieldQueryParser.parse(keywords, appConfig.getQueryFields(), flags, analyzer); query = query.rewrite(reader); // 2.search List<Document> docs = new ArrayList<Document>(); Hits hits = (typeId == appConfig.getGameTypeId() ? gameSearcher.search(query, Sort.RELEVANCE) : softSearcher .search(query, Sort.RELEVANCE));// searcher.search(query, // Sort.RELEVANCE); for (int i = 0; i < hits.length(); i++) { docs.add(hits.doc(i)); } // 3.return reader.close(); return docs; }
@Override public MergePolicy getInstance(Map<String, String> params) throws IOException { String field = params.get(SORT_FIELD); SortField.Type sortFieldType = SortField.Type.DOC; if (params.containsKey(SORT_FIELD_TYPE)) { sortFieldType = SortField.Type.valueOf(params.get(SORT_FIELD_TYPE).toUpperCase()); } if (sortFieldType == SortField.Type.DOC) { throw new IOException( "Relying on internal lucene DocIDs is not guaranteed to work, this is only an implementation detail."); } boolean desc = true; if (params.containsKey(SORT_DESC)) { try { desc = Boolean.valueOf(params.get(SORT_DESC)); } catch (Exception e) { desc = true; } } SortField sortField = new SortField(field, sortFieldType, desc); Sort sort = new Sort(sortField); return new SortingMergePolicyDecorator(new TieredMergePolicy(), sort); }
protected Sort getLuceneSort( PaginacioParamsDto paginacioParams, List<Camp> informeCamps) { String sort = "expedient$identificador"; boolean asc = false; if (paginacioParams != null) { for (OrdreDto ordre: paginacioParams.getOrdres()) { asc = ordre.getDireccio().equals(OrdreDireccioDto.ASCENDENT); String clau = ordre.getCamp().replace( net.conselldemallorca.helium.v3.core.api.dto.ExpedientCamps.EXPEDIENT_PREFIX_JSP, net.conselldemallorca.helium.v3.core.api.dto.ExpedientCamps.EXPEDIENT_PREFIX); if (ordre.getCamp().contains("dadesExpedient")) { sort = clau.replace("/", ".").replace("dadesExpedient.", "").replace(".valorMostrar", ""); } else { sort = clau.replace(".", net.conselldemallorca.helium.v3.core.api.dto.ExpedientCamps.EXPEDIENT_PREFIX_SEPARATOR); } break; } } return getLuceneSort( sort, asc, informeCamps); }
/** * Finds the top {@code count} hits for {@code query} and sorting the hits * by {@code sort}. * * @param query * the {@link Query} to search for * @param sort * the {@link Sort} to be applied * @param count * the max number of results to be collected * @param fields * the names of the fields to be loaded * @return the found documents */ public List<Document> search(Query query, Sort sort, Integer count, Set<String> fields) { try { indexWriter.commit(); IndexReader reader = DirectoryReader.open(directory); IndexSearcher searcher = new IndexSearcher(reader); sort = sort.rewrite(searcher); TopDocs topDocs = searcher.search(query, count, sort); ScoreDoc[] scoreDocs = topDocs.scoreDocs; List<Document> documents = new LinkedList<>(); for (ScoreDoc scoreDoc : scoreDocs) { Document document = searcher.doc(scoreDoc.doc, fields); documents.add(document); } searcher.getIndexReader().close(); return documents; } catch (IOException e) { throw new FhirIndexException(e, "Error while searching"); } }
@Override @SuppressWarnings("unchecked") public List<PostPO> searchByTag(Paging paigng, String tag) { FullTextSession fullTextSession = Search.getFullTextSession(super.session()); SearchFactory sf = fullTextSession.getSearchFactory(); QueryBuilder qb = sf.buildQueryBuilder().forEntity(PostPO.class).get(); org.apache.lucene.search.Query luceneQuery = qb.phrase().onField("tags").sentence(tag).createQuery(); FullTextQuery query = fullTextSession.createFullTextQuery(luceneQuery); query.setFirstResult(paigng.getFirstResult()); query.setMaxResults(paigng.getMaxResults()); Sort sort = new Sort(new SortField("id", SortField.Type.LONG, true)); query.setSort(sort); paigng.setTotalCount(query.getResultSize()); return query.list(); }
private AggregationWaitable[] buildWaitables(NodeContext context, RequestNode [] requests) throws IOException { AggregationWaitable[] runners = new AggregationWaitable[requests.length]; for(int i = 0; i < requests.length; ++i) { if(requests[i].discover_values) { // populate required docListAndSet once and only if necessary if(context.queryDomainList == null) { context.queryDomainList = context.req.getSearcher().getDocListAndSet(new MatchAllDocsQuery(), context.queryDomain, Sort.INDEXORDER, 0, 0); } FacetFieldAdapter adapter = new FacetFieldAdapter(context, requests[i].type); runners[i] = new AggregationWaitable(context, adapter, adapter.field, 0, requests[i].limit); } } return runners; }
private List<AggregationWaitable> buildWaitables(NodeContext context, RequestNode request) throws IOException { List<AggregationWaitable> runners = new LinkedList<>(); FacetFieldAdapter adapter = new FacetFieldAdapter(context, request.type); if(request.values != null && adapter.hasExtension()) { for (int k = 0; k < request.values.length; ++k) { // load required docListAndSet once and only if necessary if (context.queryDomainList == null) { context.queryDomainList = context.req.getSearcher().getDocListAndSet(new MatchAllDocsQuery(), context.queryDomain, Sort.INDEXORDER, 0, 0); } String facetQuery = buildFacetQuery(adapter.baseField, request.values[k].toLowerCase()); runners.add(new AggregationWaitable(context, adapter, facetQuery, adapter.field, k, DEFAULT_NORM_LIMIT)); } } return runners; }
private TopDocs toTopDocs( Query query, QueryContext context, IndexSearcher searcher ) throws IOException { Sort sorting = context != null ? context.getSorting() : null; TopDocs topDocs; if ( sorting == null && context != null ) { topDocs = searcher.search( query, context.getTop() ); } else { if ( context == null || !context.getTradeCorrectnessForSpeed() ) { TopFieldCollector collector = LuceneDataSource.scoringCollector( sorting, context.getTop() ); searcher.search( query, collector ); topDocs = collector.topDocs(); } else { topDocs = searcher.search( query, null, context.getTop(), sorting ); } } return topDocs; }
public List<String> extractCategories() { FullTextEntityManager fullTextEntityManager = org.hibernate.search.jpa.Search.getFullTextEntityManager( em ); QueryBuilder queryBuilder = fullTextEntityManager.getSearchFactory().buildQueryBuilder().forEntity( Categories.class ).get(); org.apache.lucene.search.Query query = queryBuilder.all().createQuery(); FullTextQuery fullTextQuery = fullTextEntityManager.createFullTextQuery( query , Categories.class ); fullTextQuery.setProjection( FullTextQuery.ID , "category" ); Sort sort = new Sort( new SortField( "category" , SortField.STRING ) ); fullTextQuery.setSort( sort ); //fullTextQuery.initializeObjectsWith(ObjectLookupMethod.SKIP, DatabaseRetrievalMethod.FIND_BY_ID); List<String> results = fullTextQuery.getResultList(); return results; }
/** * Search, sorting by {@link Sort}, and computing * drill down and sideways counts. */ public DrillSidewaysResult search(DrillDownQuery query, Filter filter, FieldDoc after, int topN, Sort sort, boolean doDocScores, boolean doMaxScore) throws IOException { if (filter != null) { query = new DrillDownQuery(config, filter, query); } if (sort != null) { int limit = searcher.getIndexReader().maxDoc(); if (limit == 0) { limit = 1; // the collector does not alow numHits = 0 } topN = Math.min(topN, limit); final TopFieldCollector hitCollector = TopFieldCollector.create(sort, topN, after, true, doDocScores, doMaxScore, true); DrillSidewaysResult r = search(query, hitCollector); return new DrillSidewaysResult(r.facets, hitCollector.topDocs()); } else { return search(after, query, topN); } }
@Test public void shouldReturnIndexHitsInGivenSortOrder() throws Exception { // given DocValuesCollector collector = new DocValuesCollector( false ); IndexReaderStub readerStub = indexReaderWithMaxDocs( 43 ); // when collector.doSetNextReader( readerStub.getContext() ); collector.collect( 1 ); collector.collect( 3 ); collector.collect( 37 ); collector.collect( 42 ); // then Sort byIdDescending = new Sort( new SortField( "id", SortField.Type.LONG, true ) ); IndexHits<Document> indexHits = collector.getIndexHits( byIdDescending ); assertEquals( 4, indexHits.size() ); assertEquals( "42", indexHits.next().get( "id" ) ); assertEquals( "37", indexHits.next().get( "id" ) ); assertEquals( "3", indexHits.next().get( "id" ) ); assertEquals( "1", indexHits.next().get( "id" ) ); assertFalse( indexHits.hasNext() ); }
@Override public SpatialQueryContext build(Map<String, Object> query) throws Exception { Shape shape = parseShape(query); double distance = 0; Number n = (Number) query.get(MAX_DISTANCE); if (n != null) { distance = n.doubleValue(); } Point p = (Point) shape; SpatialArgs args = new SpatialArgs(SpatialOperation.Intersects, factory.context().makeCircle(p.getX(), p.getY(), DistanceUtils.dist2Degrees(distance, DistanceUtils.EARTH_MEAN_RADIUS_KM))); Filter filter = manager.strategy().makeFilter(args); ValueSource valueSource = manager.strategy().makeDistanceValueSource(p); IndexSearcher searcher = manager.searcher(); Sort distSort = new Sort(valueSource.getSortField(false)).rewrite(searcher); return new SpatialQueryContext(null, searcher, new MatchAllDocsQuery(), filter, distSort).setSpatialArgs(args); }
public void testEmptyIndex() throws Exception { IndexSearcher empty = newSearcher(new MultiReader()); Query query = new TermQuery(new Term("contents", "foo")); Sort sort = new Sort(); sort.setSort(new SortedSetSortField("sortedset", false)); TopDocs td = empty.search(query, null, 10, sort, true, true); assertEquals(0, td.totalHits); // for an empty index, any selector should work for (SortedSetSortField.Selector v : SortedSetSortField.Selector.values()) { sort.setSort(new SortedSetSortField("sortedset", false, v)); td = empty.search(query, null, 10, sort, true, true); assertEquals(0, td.totalHits); } }
@Test public void shouldReturnIndexHitsOrderedByRelevance() throws Exception { // given DocValuesCollector collector = new DocValuesCollector( true ); IndexReaderStub readerStub = indexReaderWithMaxDocs( 42 ); // when collector.doSetNextReader( readerStub.getContext() ); collector.setScorer( constantScorer( 1.0f ) ); collector.collect( 1 ); collector.setScorer( constantScorer( 2.0f ) ); collector.collect( 2 ); // then IndexHits<Document> indexHits = collector.getIndexHits( Sort.RELEVANCE ); assertEquals( 2, indexHits.size() ); assertEquals( "2", indexHits.next().get( "id" ) ); assertEquals( 2.0f, indexHits.currentScore(), 0.0f ); assertEquals( "1", indexHits.next().get( "id" ) ); assertEquals( 1.0f, indexHits.currentScore(), 0.0f ); assertFalse( indexHits.hasNext() ); }
public List<Products> extractPromotionalProducts() { FullTextEntityManager fullTextEntityManager = org.hibernate.search.jpa.Search.getFullTextEntityManager( em ); org.apache.lucene.search.Query query = NumericRangeQuery.newDoubleRange( "old_price" , 0.0d , 1000d , false , true ); FullTextQuery fullTextQuery = fullTextEntityManager.createFullTextQuery( query , Products.class ); Sort sort = new Sort( new SortField( "price" , SortField.DOUBLE ) ); fullTextQuery.setSort( sort ); //fullTextQuery.initializeObjectsWith(ObjectLookupMethod.SKIP, DatabaseRetrievalMethod.FIND_BY_ID); List results = fullTextQuery.getResultList(); return results; }
public Map<Integer , List<Products>> extractProducts( String id , int page ) { FullTextEntityManager fullTextEntityManager = org.hibernate.search.jpa.Search.getFullTextEntityManager( em ); QueryBuilder queryBuilder = fullTextEntityManager.getSearchFactory().buildQueryBuilder().forEntity( Products.class ).get(); org.apache.lucene.search.Query query = queryBuilder.keyword().onField( "category.id" ).matching( id ).createQuery(); FullTextQuery fullTextQuery = fullTextEntityManager.createFullTextQuery( query , Products.class ); Sort sort = new Sort( new SortField( "price" , SortField.DOUBLE ) ); fullTextQuery.setSort( sort ); //fullTextQuery.initializeObjectsWith(ObjectLookupMethod.SKIP, DatabaseRetrievalMethod.FIND_BY_ID); fullTextQuery.setFirstResult( page * 3 ); fullTextQuery.setMaxResults( 3 ); List<Products> results = fullTextQuery.getResultList(); Map<Integer , List<Products>> results_and_total = new HashMap<Integer , List<Products>>(); results_and_total.put( fullTextQuery.getResultSize() , results ); return results_and_total; }
private NamedList serializeSearchGroup(Collection<SearchGroup<BytesRef>> data, Sort groupSort) { NamedList<Object[]> result = new NamedList<>(); for (SearchGroup<BytesRef> searchGroup : data) { Object[] convertedSortValues = new Object[searchGroup.sortValues.length]; for (int i = 0; i < searchGroup.sortValues.length; i++) { Object sortValue = searchGroup.sortValues[i]; SchemaField field = groupSort.getSort()[i].getField() != null ? searcher.getSchema().getFieldOrNull(groupSort.getSort()[i].getField()) : null; if (field != null) { FieldType fieldType = field.getType(); if (sortValue != null) { sortValue = fieldType.marshalSortValue(sortValue); } } convertedSortValues[i] = sortValue; } String groupValue = searchGroup.groupValue != null ? searchGroup.groupValue.utf8ToString() : null; result.add(groupValue, convertedSortValues); } return result; }
/** * @param field the field that contains the values * @param sort how the results should be sorted * @return an iterator over all NumericDocValues from the given field with respect to the given sort * @throws IOException */ public PrimitiveLongIterator getSortedValuesIterator( String field, Sort sort ) throws IOException { if ( sort == null || sort == Sort.INDEXORDER ) { return getValuesIterator( field ); } int size = getTotalHits(); if ( size == 0 ) { return PrimitiveLongCollections.emptyIterator(); } TopDocs topDocs = getTopDocs( sort, size ); LeafReaderContext[] contexts = getLeafReaderContexts( getMatchingDocs() ); return new TopDocsValuesIterator( topDocs, contexts, field ); }
/** tests the returned sort values are correct */ public void testSortValues() throws Exception { Expression expr = JavascriptCompiler.compile("sqrt(_score)"); SimpleBindings bindings = new SimpleBindings(); bindings.add(new SortField("_score", SortField.Type.SCORE)); Sort sort = new Sort(expr.getSortField(bindings, true)); Query query = new TermQuery(new Term("body", "contents")); TopFieldDocs td = searcher.search(query, null, 3, sort, true, true); for (int i = 0; i < 3; i++) { FieldDoc d = (FieldDoc) td.scoreDocs[i]; float expected = (float) Math.sqrt(d.score); float actual = ((Double)d.fields[0]).floatValue(); assertEquals(expected, actual, CheckHits.explainToleranceDelta(expected, actual)); } }
/** Uses variables with $ */ public void testDollarVariable() throws Exception { Expression expr = JavascriptCompiler.compile("$0+$score"); SimpleBindings bindings = new SimpleBindings(); bindings.add(new SortField("$0", SortField.Type.SCORE)); bindings.add(new SortField("$score", SortField.Type.SCORE)); Sort sort = new Sort(expr.getSortField(bindings, true)); Query query = new TermQuery(new Term("body", "contents")); TopFieldDocs td = searcher.search(query, null, 3, sort, true, true); for (int i = 0; i < 3; i++) { FieldDoc d = (FieldDoc) td.scoreDocs[i]; float expected = 2*d.score; float actual = ((Double)d.fields[0]).floatValue(); assertEquals(expected, actual, CheckHits.explainToleranceDelta(expected, actual)); } }
/** tests expression referring to another expression */ public void testExpressionRefersToExpression() throws Exception { Expression expr1 = JavascriptCompiler.compile("_score"); Expression expr2 = JavascriptCompiler.compile("2*expr1"); SimpleBindings bindings = new SimpleBindings(); bindings.add(new SortField("_score", SortField.Type.SCORE)); bindings.add("expr1", expr1); Sort sort = new Sort(expr2.getSortField(bindings, true)); Query query = new TermQuery(new Term("body", "contents")); TopFieldDocs td = searcher.search(query, null, 3, sort, true, true); for (int i = 0; i < 3; i++) { FieldDoc d = (FieldDoc) td.scoreDocs[i]; float expected = 2*d.score; float actual = ((Double)d.fields[0]).floatValue(); assertEquals(expected, actual, CheckHits.explainToleranceDelta(expected, actual)); } }
public QueryResultKey(Query query, List<Query> filters, Sort sort, int nc_flags) { this.query = query; this.sort = sort; this.filters = filters; this.nc_flags = nc_flags; int h = query.hashCode(); if (filters != null) { for (Query filt : filters) // NOTE: simple summation used here so keys with the same filters but in // different orders get the same hashCode h += filt.hashCode(); } sfields = (this.sort !=null) ? this.sort.getSort() : defaultSort; for (SortField sf : sfields) { h = h*29 + sf.hashCode(); } hc = h; }
private int[] createExpectedGroupHeads(String searchTerm, GroupDoc[] groupDocs, Sort docSort, boolean sortByScoreOnly, int[] fieldIdToDocID) { Map<BytesRef, List<GroupDoc>> groupHeads = new HashMap<>(); for (GroupDoc groupDoc : groupDocs) { if (!groupDoc.content.startsWith(searchTerm)) { continue; } if (!groupHeads.containsKey(groupDoc.group)) { List<GroupDoc> list = new ArrayList<>(); list.add(groupDoc); groupHeads.put(groupDoc.group, list); continue; } groupHeads.get(groupDoc.group).add(groupDoc); } int[] allGroupHeads = new int[groupHeads.size()]; int i = 0; for (BytesRef groupValue : groupHeads.keySet()) { List<GroupDoc> docs = groupHeads.get(groupValue); Collections.sort(docs, getComparator(docSort, sortByScoreOnly, fieldIdToDocID)); allGroupHeads[i++] = docs.get(0).id; } return allGroupHeads; }
@Test public void testScoring() { Index<Node> index = nodeIndex( LuceneIndexImplementation.FULLTEXT_CONFIG ); Node node1 = graphDb.createNode(); Node node2 = graphDb.createNode(); String key = "text"; // Where the heck did I get this sentence from? index.add( node1, key, "a time where no one was really awake" ); index.add( node2, key, "once upon a time there was" ); restartTx(); IndexHits<Node> hits = index.query( key, new QueryContext( "once upon a time was" ).sort( Sort.RELEVANCE ) ); Node hit1 = hits.next(); float score1 = hits.currentScore(); Node hit2 = hits.next(); float score2 = hits.currentScore(); assertEquals( node2, hit1 ); assertEquals( node1, hit2 ); assertTrue( "Score 1 (" + score1 + ") should have been higher than score 2 (" + score2 + ")", score1 > score2 ); }