public void testSimpleDiversity() throws Exception { int MAX_DOCS_PER_AUTHOR = 1; DiversifiedAggregationBuilder sampleAgg = new DiversifiedAggregationBuilder("sample").shardSize(100); sampleAgg.field("author").maxDocsPerValue(MAX_DOCS_PER_AUTHOR).executionHint(randomExecutionHint()); sampleAgg.subAggregation(terms("authors").field("author")); SearchResponse response = client().prepareSearch("test") .setSearchType(SearchType.QUERY_THEN_FETCH) .setQuery(new TermQueryBuilder("genre", "fantasy")) .setFrom(0).setSize(60) .addAggregation(sampleAgg) .execute() .actionGet(); assertSearchResponse(response); Sampler sample = response.getAggregations().get("sample"); Terms authors = sample.getAggregations().get("authors"); Collection<Bucket> testBuckets = authors.getBuckets(); for (Terms.Bucket testBucket : testBuckets) { assertThat(testBucket.getDocCount(), lessThanOrEqualTo((long) NUM_SHARDS * MAX_DOCS_PER_AUTHOR)); } }
public void testSimpleSampler() throws Exception { SamplerAggregationBuilder sampleAgg = sampler("sample").shardSize(100); sampleAgg.subAggregation(terms("authors").field("author")); SearchResponse response = client().prepareSearch("test").setSearchType(SearchType.QUERY_THEN_FETCH) .setQuery(new TermQueryBuilder("genre", "fantasy")).setFrom(0).setSize(60).addAggregation(sampleAgg).execute().actionGet(); assertSearchResponse(response); Sampler sample = response.getAggregations().get("sample"); Terms authors = sample.getAggregations().get("authors"); Collection<Bucket> testBuckets = authors.getBuckets(); long maxBooksPerAuthor = 0; for (Terms.Bucket testBucket : testBuckets) { maxBooksPerAuthor = Math.max(testBucket.getDocCount(), maxBooksPerAuthor); } assertThat(maxBooksPerAuthor, equalTo(3L)); }
public void testUnmappedChildAggNoDiversity() throws Exception { SamplerAggregationBuilder sampleAgg = sampler("sample").shardSize(100); sampleAgg.subAggregation(terms("authors").field("author")); SearchResponse response = client().prepareSearch("idx_unmapped") .setSearchType(SearchType.QUERY_THEN_FETCH) .setQuery(new TermQueryBuilder("genre", "fantasy")) .setFrom(0).setSize(60) .addAggregation(sampleAgg) .execute() .actionGet(); assertSearchResponse(response); Sampler sample = response.getAggregations().get("sample"); assertThat(sample.getDocCount(), equalTo(0L)); Terms authors = sample.getAggregations().get("authors"); assertThat(authors.getBuckets().size(), equalTo(0)); }
public void testPartiallyUnmappedChildAggNoDiversity() throws Exception { SamplerAggregationBuilder sampleAgg = sampler("sample").shardSize(100); sampleAgg.subAggregation(terms("authors").field("author")); SearchResponse response = client().prepareSearch("idx_unmapped", "test") .setSearchType(SearchType.QUERY_THEN_FETCH) .setQuery(new TermQueryBuilder("genre", "fantasy")) .setFrom(0).setSize(60).setExplain(true) .addAggregation(sampleAgg) .execute() .actionGet(); assertSearchResponse(response); Sampler sample = response.getAggregations().get("sample"); assertThat(sample.getDocCount(), greaterThan(0L)); Terms authors = sample.getAggregations().get("authors"); assertThat(authors.getBuckets().size(), greaterThan(0)); }
public void testFilteredAnalysis() throws Exception { SearchResponse response = client().prepareSearch("test") .setSearchType(SearchType.QUERY_THEN_FETCH) .setQuery(new TermQueryBuilder("description", "weller")) .setFrom(0).setSize(60).setExplain(true) .addAggregation(significantTerms("mySignificantTerms").field("description") .minDocCount(1).backgroundFilter(QueryBuilders.termsQuery("description", "paul"))) .execute() .actionGet(); assertSearchResponse(response); SignificantTerms topTerms = response.getAggregations().get("mySignificantTerms"); HashSet<String> topWords = new HashSet<String>(); for (Bucket topTerm : topTerms) { topWords.add(topTerm.getKeyAsString()); } //The word "paul" should be a constant of all docs in the background set and therefore not seen as significant assertFalse(topWords.contains("paul")); //"Weller" is the only Paul who was in The Jam and therefore this should be identified as a differentiator from the background of all other Pauls. assertTrue(topWords.contains("jam")); }
public void testDefaultSignificanceHeuristic() throws Exception { SearchResponse response = client().prepareSearch("test") .setSearchType(SearchType.QUERY_THEN_FETCH) .setQuery(new TermQueryBuilder("description", "terje")) .setFrom(0).setSize(60).setExplain(true) .addAggregation(significantTerms("mySignificantTerms") .field("description") .executionHint(randomExecutionHint()) .significanceHeuristic(new JLHScore()) .minDocCount(2)) .execute() .actionGet(); assertSearchResponse(response); SignificantTerms topTerms = response.getAggregations().get("mySignificantTerms"); checkExpectedStringTermsFound(topTerms); }
public void testMutualInformation() throws Exception { SearchResponse response = client().prepareSearch("test") .setSearchType(SearchType.QUERY_THEN_FETCH) .setQuery(new TermQueryBuilder("description", "terje")) .setFrom(0).setSize(60).setExplain(true) .addAggregation(significantTerms("mySignificantTerms") .field("description") .executionHint(randomExecutionHint()) .significanceHeuristic(new MutualInformation(false, true)) .minDocCount(1)) .execute() .actionGet(); assertSearchResponse(response); SignificantTerms topTerms = response.getAggregations().get("mySignificantTerms"); checkExpectedStringTermsFound(topTerms); }
/** * Tests {@link ElasticsearchQueryBuilderVisitor#buildSimpleExpression(PrimitiveStatement)} for the case where * we're building a {@link ConditionType#EQUALS} expression. */ @SuppressWarnings("unchecked") @Test public void testBuildSimpleEqualsExpression() throws Exception { final Object value = "hello"; final String property = "property"; doReturn(value).when(classValue).getValue(); doReturn(EQUALS).when(statement).getCondition(); doReturn(property).when(statement).getProperty(); assertThat(visitor.buildSimpleExpression(statement), instanceOf(TermQueryBuilder.class)); verify(visitor).buildSimpleExpression(statement); verify(visitor).doGetPrimitiveFieldClass(statement); verify(visitor).validateNotCollectionCheck(statement, classValue); verify(visitor).createTermQuery(property, value); verify(visitor).getEnumSafeValue(classValue); verify(statement).getProperty(); verify(statement).getCondition(); verifyNoMoreCollaboration(); }
@Override public long countReplies(String id) { // Prepare count request SearchRequestBuilder searchRequest = client .prepareSearch(getIndex()) .setTypes(getType()) .setFetchSource(false) .setSearchType(SearchType.QUERY_AND_FETCH) .setSize(0); // Query = filter on reference TermQueryBuilder query = QueryBuilders.termQuery(RecordComment.PROPERTY_REPLY_TO_JSON, id); searchRequest.setQuery(query); // Execute query try { SearchResponse response = searchRequest.execute().actionGet(); return response.getHits().getTotalHits(); } catch(SearchPhaseExecutionException e) { // Failed or no item on index logger.error(String.format("Error while counting comment replies: %s", e.getMessage()), e); } return 1; }
private Driver getDriver(Credentials credentials) { TermQueryBuilder query = QueryBuilders.termQuery("credentialsId", credentials.id()); SearchResponse response = Start.get().getElasticClient()// .prepareSearch(credentials.backendId(), "driver")// .setQuery(query)// .setSize(1)// .get(); if (response.getHits().getTotalHits() != 1) throw Exceptions.illegalArgument("credentials [%s] has more than one driver", credentials.name()); SearchHit hit = response.getHits().getHits()[0]; Driver driver = Json7.toPojo(hit.sourceAsString(), Driver.class); driver.id = hit.id(); return driver; }
void runTest() { System.out.println("Running"); IResult r = null; // StringBuilder buf = new StringBuilder(); // buf.append("{\"from\":"+0+",\"size\":"+30+","); //fails {"size":30, "from":0,"term": {"sbOf": "TypeType"}} //fails {"from":0, "size":30,"query":{"term": {"sbOf": "TypeType"}}} //fails: {"from":0,"size":30,"query":{"term":{"sbOf":"TypeType"}}} //fails: {"from":0,"size":30,"query":{"match":{"sbOf":"TypeType"}}} //http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/search-request-from-size.html TermQueryBuilder termQuery = QueryBuilders.termQuery(ITopicQuestsOntology.SUBCLASS_OF_PROPERTY_TYPE, ITopicQuestsOntology.TYPE_TYPE); //StringBuilder buf1 = new StringBuilder("\"query\":{\"term\":{"); // StringBuilder buf1 = new StringBuilder("\"term\": {"); //buf1.append("\""+ITopicQuestsOntology.SUBCLASS_OF_PROPERTY_TYPE+"\":\""+ITopicQuestsOntology.TYPE_TYPE+"\"}}}"); // buf1.append("\""+ITopicQuestsOntology.SUBCLASS_OF_PROPERTY_TYPE+"\": \""+ITopicQuestsOntology.TYPE_TYPE+"\"}}"); // buf.append(termQuery.toString()); // StringBuilder buf = new StringBuilder("{\"term\":{"); // buf.append("\""+ITopicQuestsOntology.SUBCLASS_OF_PROPERTY_TYPE+"\": \""+ITopicQuestsOntology.TYPE_TYPE+"\"},"); // buf.append("\"from\":"+10+",\"size\":"+30+"}"); r = database.runQuery(termQuery.toString(), 3, 2, credentials); System.out.println("Done "+r.getErrorString()+" "+r.getResultObject()); environment.shutDown(); }
public void testRewriteWithInnerBoost() throws IOException { final TermQueryBuilder query = new TermQueryBuilder("foo", "bar").boost(2); QueryBuilder builder = new TemplateQueryBuilder(new Script(ScriptType.INLINE, "mockscript", query.toString(), Collections.singletonMap(Script.CONTENT_TYPE_OPTION, XContentType.JSON.mediaType()), Collections.emptyMap())); assertEquals(query, builder.rewrite(createShardContext())); builder = new TemplateQueryBuilder(new Script(ScriptType.INLINE, "mockscript", query.toString(), Collections.singletonMap(Script.CONTENT_TYPE_OPTION, XContentType.JSON.mediaType()), Collections.emptyMap())).boost(3); assertEquals(new BoolQueryBuilder().must(query).boost(3), builder.rewrite(createShardContext())); }
public void testPartiallyUnmappedDiversifyField() throws Exception { // One of the indexes is missing the "author" field used for // diversifying results DiversifiedAggregationBuilder sampleAgg = new DiversifiedAggregationBuilder("sample").shardSize(100).field("author") .maxDocsPerValue(1); sampleAgg.subAggregation(terms("authors").field("author")); SearchResponse response = client().prepareSearch("idx_unmapped_author", "test").setSearchType(SearchType.QUERY_THEN_FETCH) .setQuery(new TermQueryBuilder("genre", "fantasy")).setFrom(0).setSize(60).addAggregation(sampleAgg) .execute().actionGet(); assertSearchResponse(response); Sampler sample = response.getAggregations().get("sample"); assertThat(sample.getDocCount(), greaterThan(0L)); Terms authors = sample.getAggregations().get("authors"); assertThat(authors.getBuckets().size(), greaterThan(0)); }
public void testWhollyUnmappedDiversifyField() throws Exception { //All of the indices are missing the "author" field used for diversifying results int MAX_DOCS_PER_AUTHOR = 1; DiversifiedAggregationBuilder sampleAgg = new DiversifiedAggregationBuilder("sample").shardSize(100); sampleAgg.field("author").maxDocsPerValue(MAX_DOCS_PER_AUTHOR).executionHint(randomExecutionHint()); sampleAgg.subAggregation(terms("authors").field("author")); SearchResponse response = client().prepareSearch("idx_unmapped", "idx_unmapped_author").setSearchType(SearchType.QUERY_THEN_FETCH) .setQuery(new TermQueryBuilder("genre", "fantasy")).setFrom(0).setSize(60).addAggregation(sampleAgg).execute().actionGet(); assertSearchResponse(response); Sampler sample = response.getAggregations().get("sample"); assertThat(sample.getDocCount(), equalTo(0L)); Terms authors = sample.getAggregations().get("authors"); assertNull(authors); }
public void testStructuredAnalysis() throws Exception { SearchResponse response = client().prepareSearch("test") .setSearchType(SearchType.QUERY_THEN_FETCH) .setQuery(new TermQueryBuilder("description", "terje")) .setFrom(0).setSize(60).setExplain(true) .addAggregation(significantTerms("mySignificantTerms").field("fact_category").executionHint(randomExecutionHint()) .minDocCount(2)) .execute() .actionGet(); assertSearchResponse(response); SignificantTerms topTerms = response.getAggregations().get("mySignificantTerms"); Number topCategory = (Number) topTerms.getBuckets().iterator().next().getKey(); assertTrue(topCategory.equals(Long.valueOf(SNOWBOARDING_CATEGORY))); }
public void testStructuredAnalysisWithIncludeExclude() throws Exception { long[] excludeTerms = { MUSIC_CATEGORY }; SearchResponse response = client().prepareSearch("test") .setSearchType(SearchType.QUERY_THEN_FETCH) .setQuery(new TermQueryBuilder("description", "paul")) .setFrom(0).setSize(60).setExplain(true) .addAggregation(significantTerms("mySignificantTerms").field("fact_category").executionHint(randomExecutionHint()) .minDocCount(1).includeExclude(new IncludeExclude(null, excludeTerms))) .execute() .actionGet(); assertSearchResponse(response); SignificantTerms topTerms = response.getAggregations().get("mySignificantTerms"); Number topCategory = (Number) topTerms.getBuckets().iterator().next().getKey(); assertTrue(topCategory.equals(Long.valueOf(OTHER_CATEGORY))); }
public void testUnmapped() throws Exception { SearchResponse response = client().prepareSearch("idx_unmapped") .setSearchType(SearchType.QUERY_THEN_FETCH) .setQuery(new TermQueryBuilder("description", "terje")) .setFrom(0).setSize(60).setExplain(true) .addAggregation(significantTerms("mySignificantTerms").field("fact_category").executionHint(randomExecutionHint()) .minDocCount(2)) .execute() .actionGet(); assertSearchResponse(response); SignificantTerms topTerms = response.getAggregations().get("mySignificantTerms"); assertThat(topTerms.getBuckets().size(), equalTo(0)); }
public void testTextAnalysis() throws Exception { SearchResponse response = client().prepareSearch("test") .setSearchType(SearchType.QUERY_THEN_FETCH) .setQuery(new TermQueryBuilder("description", "terje")) .setFrom(0).setSize(60).setExplain(true) .addAggregation(significantTerms("mySignificantTerms").field("description").executionHint(randomExecutionHint()) .minDocCount(2)) .execute() .actionGet(); assertSearchResponse(response); SignificantTerms topTerms = response.getAggregations().get("mySignificantTerms"); checkExpectedStringTermsFound(topTerms); }
public void testTextAnalysisGND() throws Exception { SearchResponse response = client().prepareSearch("test") .setSearchType(SearchType.QUERY_THEN_FETCH) .setQuery(new TermQueryBuilder("description", "terje")) .setFrom(0).setSize(60).setExplain(true) .addAggregation(significantTerms("mySignificantTerms").field("description").executionHint(randomExecutionHint()).significanceHeuristic(new GND(true)) .minDocCount(2)) .execute() .actionGet(); assertSearchResponse(response); SignificantTerms topTerms = response.getAggregations().get("mySignificantTerms"); checkExpectedStringTermsFound(topTerms); }
public void testTextAnalysisChiSquare() throws Exception { SearchResponse response = client().prepareSearch("test") .setSearchType(SearchType.QUERY_THEN_FETCH) .setQuery(new TermQueryBuilder("description", "terje")) .setFrom(0).setSize(60).setExplain(true) .addAggregation(significantTerms("mySignificantTerms").field("description").executionHint(randomExecutionHint()).significanceHeuristic(new ChiSquare(false,true)) .minDocCount(2)) .execute() .actionGet(); assertSearchResponse(response); SignificantTerms topTerms = response.getAggregations().get("mySignificantTerms"); checkExpectedStringTermsFound(topTerms); }
public void testTextAnalysisPercentageScore() throws Exception { SearchResponse response = client() .prepareSearch("test") .setSearchType(SearchType.QUERY_THEN_FETCH) .setQuery(new TermQueryBuilder("description", "terje")) .setFrom(0) .setSize(60) .setExplain(true) .addAggregation( significantTerms("mySignificantTerms").field("description").executionHint(randomExecutionHint()) .significanceHeuristic(new PercentageScore()).minDocCount(2)).execute().actionGet(); assertSearchResponse(response); SignificantTerms topTerms = response.getAggregations().get("mySignificantTerms"); checkExpectedStringTermsFound(topTerms); }
public void testBadFilteredAnalysis() throws Exception { // Deliberately using a bad choice of filter here for the background context in order // to test robustness. // We search for the name of a snowboarder but use music-related content (fact_category:1) // as the background source of term statistics. SearchResponse response = client().prepareSearch("test") .setSearchType(SearchType.QUERY_THEN_FETCH) .setQuery(new TermQueryBuilder("description", "terje")) .setFrom(0).setSize(60).setExplain(true) .addAggregation(significantTerms("mySignificantTerms").field("description") .minDocCount(2).backgroundFilter(QueryBuilders.termQuery("fact_category", 1))) .execute() .actionGet(); assertSearchResponse(response); SignificantTerms topTerms = response.getAggregations().get("mySignificantTerms"); // We expect at least one of the significant terms to have been selected on the basis // that it is present in the foreground selection but entirely missing from the filtered // background used as context. boolean hasMissingBackgroundTerms = false; for (Bucket topTerm : topTerms) { if (topTerm.getSupersetDf() == 0) { hasMissingBackgroundTerms = true; break; } } assertTrue(hasMissingBackgroundTerms); }
public void testPartiallyUnmapped() throws Exception { SearchResponse response = client().prepareSearch("idx_unmapped", "test") .setSearchType(SearchType.QUERY_THEN_FETCH) .setQuery(new TermQueryBuilder("description", "terje")) .setFrom(0).setSize(60).setExplain(true) .addAggregation(significantTerms("mySignificantTerms").field("description") .executionHint(randomExecutionHint()) .minDocCount(2)) .execute() .actionGet(); assertSearchResponse(response); SignificantTerms topTerms = response.getAggregations().get("mySignificantTerms"); checkExpectedStringTermsFound(topTerms); }
public void testPassQueryOrFilterAsJSONString() throws Exception { createIndex("test"); client().prepareIndex("test", "type1", "1").setSource("field1", "value1_1", "field2", "value2_1").setRefreshPolicy(IMMEDIATE).get(); WrapperQueryBuilder wrapper = new WrapperQueryBuilder("{ \"term\" : { \"field1\" : \"value1_1\" } }"); assertHitCount(client().prepareSearch().setQuery(wrapper).get(), 1L); BoolQueryBuilder bool = boolQuery().must(wrapper).must(new TermQueryBuilder("field2", "value2_1")); assertHitCount(client().prepareSearch().setQuery(bool).get(), 1L); WrapperQueryBuilder wrapperFilter = wrapperQuery("{ \"term\" : { \"field1\" : \"value1_1\" } }"); assertHitCount(client().prepareSearch().setPostFilter(wrapperFilter).get(), 1L); }
protected static QueryBuilder randomNestedFilter() { int id = randomIntBetween(0, 2); switch(id) { case 0: return (new MatchAllQueryBuilder()).boost(randomFloat()); case 1: return (new IdsQueryBuilder()).boost(randomFloat()); case 2: return (new TermQueryBuilder( randomAsciiOfLengthBetween(1, 10), randomDouble()).boost(randomFloat())); default: throw new IllegalStateException("Only three query builders supported for testing sort"); } }
public void testRewrite() throws IOException { FunctionScoreQueryBuilder functionScoreQueryBuilder = new FunctionScoreQueryBuilder(new WrapperQueryBuilder(new TermQueryBuilder("foo", "bar").toString())) .boostMode(CombineFunction.REPLACE) .scoreMode(FiltersFunctionScoreQuery.ScoreMode.SUM) .setMinScore(1) .maxBoost(100); FunctionScoreQueryBuilder rewrite = (FunctionScoreQueryBuilder) functionScoreQueryBuilder.rewrite(createShardContext()); assertNotSame(functionScoreQueryBuilder, rewrite); assertEquals(rewrite.query(), new TermQueryBuilder("foo", "bar")); assertEquals(rewrite.boostMode(), CombineFunction.REPLACE); assertEquals(rewrite.scoreMode(), FiltersFunctionScoreQuery.ScoreMode.SUM); assertEquals(rewrite.getMinScore(), 1f, 0.0001); assertEquals(rewrite.maxBoost(), 100f, 0.0001); }
public void testRewriteWithFunction() throws IOException { QueryBuilder firstFunction = new WrapperQueryBuilder(new TermQueryBuilder("tq", "1").toString()); TermQueryBuilder secondFunction = new TermQueryBuilder("tq", "2"); QueryBuilder queryBuilder = randomBoolean() ? new WrapperQueryBuilder(new TermQueryBuilder("foo", "bar").toString()) : new TermQueryBuilder("foo", "bar"); FunctionScoreQueryBuilder functionScoreQueryBuilder = new FunctionScoreQueryBuilder(queryBuilder, new FunctionScoreQueryBuilder.FilterFunctionBuilder[] { new FunctionScoreQueryBuilder.FilterFunctionBuilder(firstFunction, new RandomScoreFunctionBuilder()), new FunctionScoreQueryBuilder.FilterFunctionBuilder(secondFunction, new RandomScoreFunctionBuilder()) }); FunctionScoreQueryBuilder rewrite = (FunctionScoreQueryBuilder) functionScoreQueryBuilder.rewrite(createShardContext()); assertNotSame(functionScoreQueryBuilder, rewrite); assertEquals(rewrite.query(), new TermQueryBuilder("foo", "bar")); assertEquals(rewrite.filterFunctionBuilders()[0].getFilter(), new TermQueryBuilder("tq", "1")); assertSame(rewrite.filterFunctionBuilders()[1].getFilter(), secondFunction); }
/** * Tests {@link ElasticsearchQueryBuilderVisitor#canMergeQueryParts(QueryBuilder, QueryBuilder, ConditionType)} for * the case where the previous query part is not a {@link RangeQueryBuilder}. You might see this with something like * an expression like {@code foo==3;foo=le=400}. */ @Test public void testCanMergeQueryPartsForNonRangePreviousQueryPart() throws Exception { final TermQueryBuilder previousPart = mock(TermQueryBuilder.class); final RangeQueryBuilder currentPart = mock(RangeQueryBuilder.class); doReturn("taters").when(previousPart).fieldName(); doReturn("taters").when(currentPart).fieldName(); assertThat(visitor.canMergeQueryParts(previousPart, currentPart, ConditionType.AND), is(false)); }
/** * Tests {@link ElasticsearchQueryBuilderVisitor#createTermQuery(String, Object)} to make sure it does what we think it does. */ @Test public void testCreateTermQuery() throws Exception { doCallRealMethod().when(visitor).createTermQuery(anyString(), any()); final TermQueryBuilder queryBuilder = visitor.createTermQuery("fieldNameTaters", "tatersValue"); assertThat(queryBuilder.fieldName(), is("fieldNameTaters")); assertThat(queryBuilder.value(), is("tatersValue")); verify(visitor).createTermQuery("fieldNameTaters", "tatersValue"); verifyNoMoreCollaboration(visitor); }
private static TermQueryBuilder createTermQuery(String name, Object text, Float boost) { if (isNotNull(boost)) { return QueryBuilders.termQuery(name, text).boost(boost); } else { return QueryBuilders.termQuery(name, text); } }
@Test public void shouldDeleteAllByQuery() { final Person saved = repository.save(PERSON); final TermQueryBuilder byFirstname = new TermQueryBuilder("firstname", PERSON.getFirstname()); repository.deleteAllByQuery(byFirstname); assertFalse(repository.exists(saved)); }
@Test public void shouldNotDeleteAllByQuery() { final Person saved = repository.save(PERSON); final TermQueryBuilder byFirstname = new TermQueryBuilder("firstname", PERSON.getLastname()); repository.deleteAllByQuery(byFirstname); assertTrue(repository.exists(saved)); repository.delete(saved); }
@Test public void shouldFindByFirstname() { final Person saved = repository.save(PERSON); final TermQueryBuilder term = new TermQueryBuilder("firstname", PERSON.getFirstname()); final List<Person> search = repository.search(term); assertEquals(1, search.size()); assertEquals(saved, search.get(0)); repository.delete(saved); }
@Test public void shouldFindByFirstnameAndLastname() { final Person saved = repository.save(PERSON); final TermQueryBuilder byFirstname = new TermQueryBuilder("firstname", PERSON.getFirstname()); final TermQueryBuilder byLastname = new TermQueryBuilder("lastname", PERSON.getLastname()); final BoolQueryBuilder bool = new BoolQueryBuilder() .must(byFirstname) .must(byLastname); final List<Person> search = repository.search(bool); assertEquals(1, search.size()); assertEquals(saved, search.get(0)); repository.delete(saved); }
@Override public Observable<Optional<PersistentMasterKey>> call(Void aVoid) { final Elasticsearch elasticSearch = vertxContext.verticle().elasticsearch(); TermQueryBuilder query = termQuery("algorithm_name", algorithmDef.getAlgorithmName()); SearchRequestBuilder request = elasticSearch.get() .prepareSearch( elasticSearch.masterKeyTypeIndex()) .setVersion(true) .setTypes(elasticSearch.defaultType()) .addSort("create_ts", DESC) .setQuery(query) .setSize(1) .setTimeout(timeValueMillis(elasticSearch.getDefaultSearchTimeout() - 10)); if (LOGGER.isDebugEnabled()) { LOGGER.debug(format("Search Request {%s,%s} = %s", elasticSearch.defaultType(), elasticSearch.masterKeyTypeIndex(), Jsonify.toString(request))); } return elasticSearch.execute(vertxContext, request, elasticSearch.getDefaultSearchTimeout()) .flatMap(oSearchResponse -> { SearchResponse searchResponse = oSearchResponse.get(); if (LOGGER.isDebugEnabled()) { LOGGER.debug(format("Search Response {%s,%s} = %s", elasticSearch.defaultType(), elasticSearch.masterKeyTypeIndex(), Jsonify.toString(searchResponse))); } if (oSearchResponse.isPresent()) { return from(oSearchResponse.get().getHits()); } else { return from(emptyList()); } }) .map(PersistentMasterKey::fromSearchHit) .map(Optional::of) .singleOrDefault(absent()); }
@Override public Observable<Optional<PersistentContainerKey>> call(final PersistentContainer persistentContainer) { String containerName = persistentContainer.getId(); TermQueryBuilder query = termQuery("container_id", containerName); final Elasticsearch elasticSearch = vertxContext.verticle().elasticsearch(); if (LOGGER.isDebugEnabled()) { LOGGER.debug(format("Search Request {%s,%s} = %s", elasticSearch.defaultType(), elasticSearch.containerKeyIndex(), Jsonify.toString(query))); } SearchRequestBuilder request = elasticSearch.get() .prepareSearch(elasticSearch.containerKeyIndex()) .setTypes(elasticSearch.defaultType()) .addSort("create_ts", DESC) .setQuery(query) .setVersion(true) .setSize(1) .setTimeout(timeValueMillis(elasticSearch.getDefaultSearchTimeout() - 10)); return elasticSearch.execute(vertxContext, request, elasticSearch.getDefaultSearchTimeout()) .map(oSearchResponse -> { SearchResponse searchResponse = oSearchResponse.get(); if (LOGGER.isDebugEnabled()) { LOGGER.debug(format("Search Response {%s,%s} = %s", elasticSearch.defaultType(), elasticSearch.containerKeyIndex(), Jsonify.toString(searchResponse))); } for (SearchHit searchHit : searchResponse.getHits()) { if (!searchHit.isSourceEmpty()) { return of(fromSearchHit(persistentContainer, searchHit)); } } return Optional.<PersistentContainerKey>absent(); }); }
@Override protected ExplorerQueryBuilder doCreateTestQueryBuilder() { ExplorerQueryBuilder builder = new ExplorerQueryBuilder(); builder.query(new TermQueryBuilder("foo", "bar")); builder.statsType("sum_raw_ttf"); return builder; }
/** * Register percolate queries on ElasticSearch * * @throws IOException * */ private void registerPercolateQueries() throws IOException { store.registerPercolateQuery(INDEX_NAME, GITHUB_TOPIC, new TermQueryBuilder("content", "github")); store.registerPercolateQuery(INDEX_NAME, MALHAR_TOPIC, new TermQueryBuilder("content", "malhar")); }
@Test public void queryTearm(){ initIndex(); TermQueryBuilder termQuery = QueryBuilders.termQuery("desc", "内心"); Iterator<Book> iterator = repository.search(termQuery).iterator(); while (iterator.hasNext()) { Book book = (Book) iterator.next(); print(book); } elasticsearchTemplate.deleteIndex(Book.class);//删除索引目录 }
/** * scroll search on a bigger index */ @Test public void testScrollSearch() { TermQueryBuilder termQuery = QueryBuilders.termQuery(fieldShakeSpeare, "henry"); SearchResponse scrollResponse = client.prepareSearch(indexShakeSpeare).setSearchType(SearchType.SCAN) .setScroll(new TimeValue(60000)).setQuery(termQuery) // actual retrieval size: number_of_shards * size = 5 * 40 = 200, in this case .setSize(40).execute().actionGet(); // scroll until no hits are returned int searchTime = 1; while (true) { System.out.println("Search Time: " + searchTime++); for (SearchHit hit : scrollResponse.getHits()) { // handle hit System.out.println(hit.sourceAsString()); } scrollResponse = client.prepareSearchScroll(scrollResponse.getScrollId()).setScroll(new TimeValue(60000)) .execute().actionGet(); // first getHits returns SearchHits, second getHits returns SearchHit[] if (scrollResponse.getHits().getHits().length == 0) { break; } } }