/** * Delete documents using a query. Check what would be deleted first with a normal search query! * Elasticsearch once provided a native prepareDeleteByQuery method, but this was removed * in later versions. Instead, there is a plugin which iterates over search results, * see https://www.elastic.co/guide/en/elasticsearch/plugins/current/plugins-delete-by-query.html * We simulate the same behaviour here without the need of that plugin. * * @param q * @return delete document count */ public int deleteByQuery(String indexName, final QueryBuilder q) { Map<String, String> ids = new TreeMap<>(); SearchResponse response = elasticsearchClient.prepareSearch(indexName).setSearchType(SearchType.QUERY_THEN_FETCH) .setScroll(new TimeValue(60000)).setQuery(q).setSize(100).execute().actionGet(); while (true) { // accumulate the ids here, don't delete them right now to prevent an interference of the delete with the // scroll for (SearchHit hit : response.getHits().getHits()) { ids.put(hit.getId(), hit.getType()); } response = elasticsearchClient.prepareSearchScroll(response.getScrollId()).setScroll(new TimeValue(600000)) .execute().actionGet(); // termination if (response.getHits().getHits().length == 0) break; } return deleteBulk(indexName, ids); }
@Override public JsonNode search(JsonNode jsonQuery) { if (jsonQuery == null) { throw new IllegalArgumentException("JSON Query can not be null"); } if (this.client == null) { throw new IllegalStateException("ElasticSearch client is closed"); } QueryBuilder query = QueryBuilders.wrapperQuery(jsonQuery.toString()); SearchResponse response = client.prepareSearch(indexNames.toArray(new String[indexNames.size()])) .setTypes(typeNames.toArray(new String[typeNames.size()])) .setSearchType(SearchType.DFS_QUERY_THEN_FETCH) .setQuery(query) .get(); JsonNode result = null; try { result = mapper.readTree(response.toString()); } catch (IOException e) { log.warn("Can not parse ES response '{}' as JSON. Exception: {}", response.toString(), e); } return result; }
public void testScore() throws Exception { createIndex("test"); ensureGreen("test"); indexRandom(true, client().prepareIndex("test", "doc", "1").setSource("text", "hello goodbye"), client().prepareIndex("test", "doc", "2").setSource("text", "hello hello hello goodbye"), client().prepareIndex("test", "doc", "3").setSource("text", "hello hello goodebye")); ScoreFunctionBuilder<?> score = ScoreFunctionBuilders.scriptFunction(new Script(ScriptType.INLINE, "expression", "1 / _score", Collections.emptyMap())); SearchRequestBuilder req = client().prepareSearch().setIndices("test"); req.setQuery(QueryBuilders.functionScoreQuery(QueryBuilders.termQuery("text", "hello"), score).boostMode(CombineFunction.REPLACE)); req.setSearchType(SearchType.DFS_QUERY_THEN_FETCH); // make sure DF is consistent SearchResponse rsp = req.get(); assertSearchResponse(rsp); SearchHits hits = rsp.getHits(); assertEquals(3, hits.getTotalHits()); assertEquals("1", hits.getAt(0).getId()); assertEquals("3", hits.getAt(1).getId()); assertEquals("2", hits.getAt(2).getId()); }
public RandomizingClient(Client client, Random random) { super(client); // we don't use the QUERY_AND_FETCH types that break quite a lot of tests // given that they return `size*num_shards` hits instead of `size` defaultSearchType = RandomPicks.randomFrom(random, Arrays.asList( SearchType.DFS_QUERY_THEN_FETCH, SearchType.QUERY_THEN_FETCH)); if (random.nextInt(10) == 0) { defaultPreference = RandomPicks.randomFrom(random, EnumSet.of(Preference.PRIMARY_FIRST, Preference.LOCAL)).type(); } else if (random.nextInt(10) == 0) { String s = TestUtil.randomRealisticUnicodeString(random, 1, 10); defaultPreference = s.startsWith("_") ? null : s; // '_' is a reserved character } else { defaultPreference = null; } this.batchedReduceSize = 2 + random.nextInt(10); }
public String selectAll(String indexs,String types,String condition){ try { if(client==null){ init(); } SearchRequestBuilder request = client.prepareSearch(indexs.split(",")).setTypes(types.split(",")); request.setSearchType(SearchType.DFS_QUERY_THEN_FETCH); request.setQuery(QueryBuilders.queryStringQuery(condition)); request.setExplain(false); SearchResponse response = request.get(); return response.toString(); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } return null; }
public String selectTermAll(String indexs,String types,String field,String value){ try { if(StringUtil.isEmpty(indexs))indexs="_all"; if(xclient==null){ init(); } SearchSourceBuilder search = new SearchSourceBuilder(); if(!StringUtil.isEmpty(field)&&!StringUtil.isEmpty(value)&&!(field.matches(regex)||field.matches(value))){ search.query(QueryBuilders.termQuery(field, value)); } search.aggregation(AggregationBuilders.terms("data").field(field+".keyword")); search.explain(false); SearchRequest request = new SearchRequest(); request.searchType(SearchType.DFS_QUERY_THEN_FETCH); request.source(search); request.indices(indexs.split(",")); request.types(types.split(",")); SearchResponse response = xclient.search(request); return response.toString(); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } return null; }
protected void innerReadFrom(StreamInput in) throws IOException { shardId = ShardId.readShardId(in); searchType = SearchType.fromId(in.readByte()); numberOfShards = in.readVInt(); scroll = in.readOptionalWriteable(Scroll::new); source = in.readOptionalWriteable(SearchSourceBuilder::new); types = in.readStringArray(); aliasFilter = new AliasFilter(in); if (in.getVersion().onOrAfter(Version.V_5_2_0_UNRELEASED)) { indexBoost = in.readFloat(); } else { // Nodes < 5.2.0 doesn't send index boost. Read it from source. if (source != null) { Optional<SearchSourceBuilder.IndexBoost> boost = source.indexBoosts() .stream() .filter(ib -> ib.getIndex().equals(shardId.getIndexName())) .findFirst(); indexBoost = boost.isPresent() ? boost.get().getBoost() : 1.0f; } else { indexBoost = 1.0f; } } nowInMillis = in.readVLong(); requestCache = in.readOptionalBoolean(); }
@Override public String toString() { StringBuilder result = new StringBuilder().append(shardTarget()); if (searchType() != SearchType.DEFAULT) { result.append("searchType=[").append(searchType()).append("]"); } if (scrollContext() != null) { if (scrollContext().scroll != null) { result.append("scroll=[").append(scrollContext().scroll.keepAlive()).append("]"); } else { result.append("scroll=[null]"); } } result.append(" query=[").append(query()).append("]"); return result.toString(); }
private static void deleteES(Client client) { BulkRequestBuilder bulkRequest = client.prepareBulk(); SearchResponse response = client.prepareSearch(index).setTypes(type) .setSearchType(SearchType.DFS_QUERY_THEN_FETCH) .setQuery(QueryBuilders.matchAllQuery()) .setFrom(0).setSize(20).setExplain(true).execute().actionGet(); System.out.println("length: " + response.getHits().getHits().length); if (response.getHits().getHits().length != 0) { for (SearchHit hit : response.getHits()) { String id = hit.getId(); System.out.println("id: " + id); bulkRequest.add(client.prepareDelete(index, type, id).request()); } BulkResponse bulkResponse = bulkRequest.get(); if (bulkResponse.hasFailures()) { for (BulkItemResponse item : bulkResponse.getItems()) { System.out.println(item.getFailureMessage()); } } else { System.out.println("delete ok"); } } else { System.out.println("delete ok"); } }
@Override public SqlElasticSearchRequestBuilder explain() throws SqlParseException { this.request = client.prepareSearch(); setIndicesAndTypes(); setFields(select.getFields()); setWhere(select.getWhere()); setSorts(select.getOrderBys()); setLimit(select.getOffset(), select.getRowCount()); boolean usedScroll = useScrollIfNeeded(select.isOrderdSelect()); if (!usedScroll) { request.setSearchType(SearchType.DFS_QUERY_THEN_FETCH); } updateRequestWithIndexAndRoutingOptions(select, request); updateRequestWithHighlight(select, request); SqlElasticSearchRequestBuilder sqlElasticRequestBuilder = new SqlElasticSearchRequestBuilder(request); return sqlElasticRequestBuilder; }
public void testNestedDiversity() throws Exception { // Test multiple samples gathered under buckets made by a parent agg int MAX_DOCS_PER_AUTHOR = 1; TermsAggregationBuilder rootTerms = terms("genres").field("genre"); DiversifiedAggregationBuilder sampleAgg = new DiversifiedAggregationBuilder("sample").shardSize(100); sampleAgg.field("author").maxDocsPerValue(MAX_DOCS_PER_AUTHOR).executionHint(randomExecutionHint()); sampleAgg.subAggregation(terms("authors").field("author")); rootTerms.subAggregation(sampleAgg); SearchResponse response = client().prepareSearch("test").setSearchType(SearchType.QUERY_THEN_FETCH) .addAggregation(rootTerms).execute().actionGet(); assertSearchResponse(response); Terms genres = response.getAggregations().get("genres"); Collection<Bucket> genreBuckets = genres.getBuckets(); for (Terms.Bucket genreBucket : genreBuckets) { Sampler sample = genreBucket.getAggregations().get("sample"); Terms authors = sample.getAggregations().get("authors"); Collection<Bucket> testBuckets = authors.getBuckets(); for (Terms.Bucket testBucket : testBuckets) { assertThat(testBucket.getDocCount(), lessThanOrEqualTo((long) NUM_SHARDS * MAX_DOCS_PER_AUTHOR)); } } }
public void testSimpleSampler() throws Exception { SamplerAggregationBuilder sampleAgg = sampler("sample").shardSize(100); sampleAgg.subAggregation(terms("authors").field("author")); SearchResponse response = client().prepareSearch("test").setSearchType(SearchType.QUERY_THEN_FETCH) .setQuery(new TermQueryBuilder("genre", "fantasy")).setFrom(0).setSize(60).addAggregation(sampleAgg).execute().actionGet(); assertSearchResponse(response); Sampler sample = response.getAggregations().get("sample"); Terms authors = sample.getAggregations().get("authors"); Collection<Bucket> testBuckets = authors.getBuckets(); long maxBooksPerAuthor = 0; for (Terms.Bucket testBucket : testBuckets) { maxBooksPerAuthor = Math.max(testBucket.getDocCount(), maxBooksPerAuthor); } assertThat(maxBooksPerAuthor, equalTo(3L)); }
public void testUnmappedChildAggNoDiversity() throws Exception { SamplerAggregationBuilder sampleAgg = sampler("sample").shardSize(100); sampleAgg.subAggregation(terms("authors").field("author")); SearchResponse response = client().prepareSearch("idx_unmapped") .setSearchType(SearchType.QUERY_THEN_FETCH) .setQuery(new TermQueryBuilder("genre", "fantasy")) .setFrom(0).setSize(60) .addAggregation(sampleAgg) .execute() .actionGet(); assertSearchResponse(response); Sampler sample = response.getAggregations().get("sample"); assertThat(sample.getDocCount(), equalTo(0L)); Terms authors = sample.getAggregations().get("authors"); assertThat(authors.getBuckets().size(), equalTo(0)); }
public void testPartiallyUnmappedChildAggNoDiversity() throws Exception { SamplerAggregationBuilder sampleAgg = sampler("sample").shardSize(100); sampleAgg.subAggregation(terms("authors").field("author")); SearchResponse response = client().prepareSearch("idx_unmapped", "test") .setSearchType(SearchType.QUERY_THEN_FETCH) .setQuery(new TermQueryBuilder("genre", "fantasy")) .setFrom(0).setSize(60).setExplain(true) .addAggregation(sampleAgg) .execute() .actionGet(); assertSearchResponse(response); Sampler sample = response.getAggregations().get("sample"); assertThat(sample.getDocCount(), greaterThan(0L)); Terms authors = sample.getAggregations().get("authors"); assertThat(authors.getBuckets().size(), greaterThan(0)); }
public void deleteAllByQuery(String index, String type, QueryBuilder query) { createBulkProcessor(); SearchResponse scrollResp = getClient().prepareSearch(index).setSearchType(SearchType.QUERY_AND_FETCH).setTypes(type).setScroll(new TimeValue(60000)).setQuery(query).setSize(10000).execute() .actionGet(); while (true) { for (SearchHit hit : scrollResp.getHits().getHits()) { DeleteRequest deleteRequest = new DeleteRequest(index, type, hit.getId()); getBulkProcessor().add(deleteRequest); } scrollResp = getClient().prepareSearchScroll(scrollResp.getScrollId()).setScroll(new TimeValue(600000)).execute().actionGet(); if (scrollResp.getHits().getHits().length == 0) { break; } } destroyBulkProcessor(); }
public void testFilteredAnalysis() throws Exception { SearchResponse response = client().prepareSearch("test") .setSearchType(SearchType.QUERY_THEN_FETCH) .setQuery(new TermQueryBuilder("description", "weller")) .setFrom(0).setSize(60).setExplain(true) .addAggregation(significantTerms("mySignificantTerms").field("description") .minDocCount(1).backgroundFilter(QueryBuilders.termsQuery("description", "paul"))) .execute() .actionGet(); assertSearchResponse(response); SignificantTerms topTerms = response.getAggregations().get("mySignificantTerms"); HashSet<String> topWords = new HashSet<String>(); for (Bucket topTerm : topTerms) { topWords.add(topTerm.getKeyAsString()); } //The word "paul" should be a constant of all docs in the background set and therefore not seen as significant assertFalse(topWords.contains("paul")); //"Weller" is the only Paul who was in The Jam and therefore this should be identified as a differentiator from the background of all other Pauls. assertTrue(topWords.contains("jam")); }
public void testPartiallyUnmappedWithFormat() throws Exception { SearchResponse response = client().prepareSearch("idx_unmapped", "test") .setSearchType(SearchType.QUERY_THEN_FETCH) .setQuery(boolQuery().should(termQuery("description", "the")).should(termQuery("description", "terje"))) .setFrom(0).setSize(60).setExplain(true) .addAggregation(significantTerms("mySignificantTerms") .field("fact_category") .executionHint(randomExecutionHint()) .minDocCount(1) .format("0000")) .execute() .actionGet(); assertSearchResponse(response); SignificantTerms topTerms = response.getAggregations().get("mySignificantTerms"); for (int i = 1; i <= 3; i++) { String key = String.format(Locale.ROOT, "%04d", i); SignificantTerms.Bucket bucket = topTerms.getBucketByKey(key); assertThat(bucket, notNullValue()); assertThat(bucket.getKeyAsString(), equalTo(key)); } }
public void testDefaultSignificanceHeuristic() throws Exception { SearchResponse response = client().prepareSearch("test") .setSearchType(SearchType.QUERY_THEN_FETCH) .setQuery(new TermQueryBuilder("description", "terje")) .setFrom(0).setSize(60).setExplain(true) .addAggregation(significantTerms("mySignificantTerms") .field("description") .executionHint(randomExecutionHint()) .significanceHeuristic(new JLHScore()) .minDocCount(2)) .execute() .actionGet(); assertSearchResponse(response); SignificantTerms topTerms = response.getAggregations().get("mySignificantTerms"); checkExpectedStringTermsFound(topTerms); }
public void testMutualInformation() throws Exception { SearchResponse response = client().prepareSearch("test") .setSearchType(SearchType.QUERY_THEN_FETCH) .setQuery(new TermQueryBuilder("description", "terje")) .setFrom(0).setSize(60).setExplain(true) .addAggregation(significantTerms("mySignificantTerms") .field("description") .executionHint(randomExecutionHint()) .significanceHeuristic(new MutualInformation(false, true)) .minDocCount(1)) .execute() .actionGet(); assertSearchResponse(response); SignificantTerms topTerms = response.getAggregations().get("mySignificantTerms"); checkExpectedStringTermsFound(topTerms); }
public void testExceptionThrownIfScaleLE0() throws Exception { assertAcked(prepareCreate("test").addMapping( "type1", jsonBuilder().startObject().startObject("type1").startObject("properties").startObject("test").field("type", "text") .endObject().startObject("num1").field("type", "date").endObject().endObject().endObject().endObject())); client().index( indexRequest("test").type("type1").id("1") .source(jsonBuilder().startObject().field("test", "value").field("num1", "2013-05-27").endObject())).actionGet(); client().index( indexRequest("test").type("type1").id("2") .source(jsonBuilder().startObject().field("test", "value").field("num1", "2013-05-28").endObject())).actionGet(); refresh(); ActionFuture<SearchResponse> response = client().search( searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source( searchSource().query( functionScoreQuery(termQuery("test", "value"), gaussDecayFunction("num1", "2013-05-28", "-1d"))))); try { response.actionGet(); fail("Expected SearchPhaseExecutionException"); } catch (SearchPhaseExecutionException e) { assertThat(e.getMessage(), is("all shards failed")); } }
public void testParsingExceptionIfFieldTypeDoesNotMatch() throws Exception { assertAcked(prepareCreate("test").addMapping( "type", jsonBuilder().startObject().startObject("type").startObject("properties").startObject("test").field("type", "text") .endObject().startObject("num").field("type", "text").endObject().endObject().endObject().endObject())); client().index( indexRequest("test").type("type").source( jsonBuilder().startObject().field("test", "value").field("num", Integer.toString(1)).endObject())).actionGet(); refresh(); // so, we indexed a string field, but now we try to score a num field ActionFuture<SearchResponse> response = client().search(searchRequest().searchType(SearchType.QUERY_THEN_FETCH) .source(searchSource().query(functionScoreQuery(termQuery("test", "value"), linearDecayFunction("num", 1.0, 0.5)) .scoreMode(ScoreMode.MULTIPLY)))); try { response.actionGet(); fail("Expected SearchPhaseExecutionException"); } catch (SearchPhaseExecutionException e) { assertThat(e.getMessage(), is("all shards failed")); } }
public void testNoQueryGiven() throws Exception { assertAcked(prepareCreate("test").addMapping( "type", jsonBuilder().startObject().startObject("type").startObject("properties").startObject("test").field("type", "text") .endObject().startObject("num").field("type", "double").endObject().endObject().endObject().endObject())); client().index( indexRequest("test").type("type").source(jsonBuilder().startObject().field("test", "value").field("num", 1.0).endObject())) .actionGet(); refresh(); // so, we indexed a string field, but now we try to score a num field ActionFuture<SearchResponse> response = client().search( searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source( searchSource().query( functionScoreQuery(linearDecayFunction("num", 1, 0.5)).scoreMode( FiltersFunctionScoreQuery.ScoreMode.MULTIPLY)))); response.actionGet(); }
private SearchRequest mutate(SearchRequest searchRequest) throws IOException { SearchRequest mutation = copyRequest(searchRequest); List<Runnable> mutators = new ArrayList<>(); mutators.add(() -> mutation.indices(ArrayUtils.concat(searchRequest.indices(), new String[] { randomAsciiOfLength(10) }))); mutators.add(() -> mutation.indicesOptions(randomValueOtherThan(searchRequest.indicesOptions(), () -> IndicesOptions.fromOptions(randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean())))); mutators.add(() -> mutation.types(ArrayUtils.concat(searchRequest.types(), new String[] { randomAsciiOfLength(10) }))); mutators.add(() -> mutation.preference(randomValueOtherThan(searchRequest.preference(), () -> randomAsciiOfLengthBetween(3, 10)))); mutators.add(() -> mutation.routing(randomValueOtherThan(searchRequest.routing(), () -> randomAsciiOfLengthBetween(3, 10)))); mutators.add(() -> mutation.requestCache((randomValueOtherThan(searchRequest.requestCache(), () -> randomBoolean())))); mutators.add(() -> mutation .scroll(randomValueOtherThan(searchRequest.scroll(), () -> new Scroll(new TimeValue(randomNonNegativeLong() % 100000))))); mutators.add(() -> mutation.searchType(randomValueOtherThan(searchRequest.searchType(), () -> randomFrom(SearchType.values())))); mutators.add(() -> mutation.source(randomValueOtherThan(searchRequest.source(), this::createSearchSourceBuilder))); randomFrom(mutators).run(); return mutation; }
public void testMustNot() throws IOException, ExecutionException, InterruptedException { assertAcked(prepareCreate("test") //issue manifested only with shards>=2 .setSettings(SETTING_NUMBER_OF_SHARDS, between(2, DEFAULT_MAX_NUM_SHARDS))); indexRandom(true, client().prepareIndex("test", "test", "1").setSource("description", "foo other anything bar"), client().prepareIndex("test", "test", "2").setSource("description", "foo other anything"), client().prepareIndex("test", "test", "3").setSource("description", "foo other"), client().prepareIndex("test", "test", "4").setSource("description", "foo")); SearchResponse searchResponse = client().prepareSearch("test").setQuery(matchAllQuery()) .setSearchType(SearchType.DFS_QUERY_THEN_FETCH).get(); assertHitCount(searchResponse, 4L); searchResponse = client().prepareSearch("test").setQuery( boolQuery() .mustNot(matchQuery("description", "anything").type(Type.BOOLEAN)) ).setSearchType(SearchType.DFS_QUERY_THEN_FETCH).get(); assertHitCount(searchResponse, 2L); }
public void testQueryStringParserCache() throws Exception { createIndex("test"); indexRandom(true, false, client().prepareIndex("test", "type", "1").setSource("nameTokens", "xyz")); SearchResponse response = client().prepareSearch("test") .setSearchType(SearchType.DFS_QUERY_THEN_FETCH) .setQuery(QueryBuilders.queryStringQuery("xyz").boost(100)) .get(); assertThat(response.getHits().getTotalHits(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); float first = response.getHits().getAt(0).getScore(); for (int i = 0; i < 100; i++) { response = client().prepareSearch("test") .setSearchType(SearchType.DFS_QUERY_THEN_FETCH) .setQuery(QueryBuilders.queryStringQuery("xyz").boost(100)) .get(); assertThat(response.getHits().getTotalHits(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); float actual = response.getHits().getAt(0).getScore(); assertThat(i + " expected: " + first + " actual: " + actual, Float.compare(first, actual), equalTo(0)); } }
public void testDfsSearchType() throws Exception { assertAcked(prepareCreate("test") .addMapping("parent") .addMapping("child", "_parent", "type=parent")); ensureGreen(); // index simple data client().prepareIndex("test", "parent", "p1").setSource("p_field", "p_value1").get(); client().prepareIndex("test", "child", "c1").setSource("c_field", "red").setParent("p1").get(); client().prepareIndex("test", "child", "c2").setSource("c_field", "yellow").setParent("p1").get(); client().prepareIndex("test", "parent", "p2").setSource("p_field", "p_value2").get(); client().prepareIndex("test", "child", "c3").setSource("c_field", "blue").setParent("p2").get(); client().prepareIndex("test", "child", "c4").setSource("c_field", "red").setParent("p2").get(); refresh(); SearchResponse searchResponse = client().prepareSearch("test").setSearchType(SearchType.DFS_QUERY_THEN_FETCH) .setQuery(boolQuery().mustNot(hasChildQuery("child", boolQuery().should(queryStringQuery("c_field:*")), ScoreMode.None))) .get(); assertNoFailures(searchResponse); searchResponse = client().prepareSearch("test").setSearchType(SearchType.DFS_QUERY_THEN_FETCH) .setQuery(boolQuery().mustNot(hasParentQuery("parent", boolQuery().should(queryStringQuery("p_field:*")), false))).execute() .actionGet(); assertNoFailures(searchResponse); }
public void testSearchQueryThenFetch() throws Exception { interceptTransportActions(SearchTransportService.QUERY_ACTION_NAME, SearchTransportService.FETCH_ID_ACTION_NAME, SearchTransportService.FREE_CONTEXT_ACTION_NAME); String[] randomIndicesOrAliases = randomIndicesOrAliases(); for (int i = 0; i < randomIndicesOrAliases.length; i++) { client().prepareIndex(randomIndicesOrAliases[i], "type", "id-" + i).setSource("field", "value").get(); } refresh(); SearchRequest searchRequest = new SearchRequest(randomIndicesOrAliases).searchType(SearchType.QUERY_THEN_FETCH); SearchResponse searchResponse = internalCluster().coordOnlyNodeClient().search(searchRequest).actionGet(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().getTotalHits(), greaterThan(0L)); clearInterceptedActions(); assertSameIndices(searchRequest, SearchTransportService.QUERY_ACTION_NAME, SearchTransportService.FETCH_ID_ACTION_NAME); //free context messages are not necessarily sent, but if they are, check their indices assertSameIndicesOptionalRequests(searchRequest, SearchTransportService.FREE_CONTEXT_ACTION_NAME); }
public void testSearchDfsQueryThenFetch() throws Exception { interceptTransportActions(SearchTransportService.DFS_ACTION_NAME, SearchTransportService.QUERY_ID_ACTION_NAME, SearchTransportService.FETCH_ID_ACTION_NAME, SearchTransportService.FREE_CONTEXT_ACTION_NAME); String[] randomIndicesOrAliases = randomIndicesOrAliases(); for (int i = 0; i < randomIndicesOrAliases.length; i++) { client().prepareIndex(randomIndicesOrAliases[i], "type", "id-" + i).setSource("field", "value").get(); } refresh(); SearchRequest searchRequest = new SearchRequest(randomIndicesOrAliases).searchType(SearchType.DFS_QUERY_THEN_FETCH); SearchResponse searchResponse = internalCluster().coordOnlyNodeClient().search(searchRequest).actionGet(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().getTotalHits(), greaterThan(0L)); clearInterceptedActions(); assertSameIndices(searchRequest, SearchTransportService.DFS_ACTION_NAME, SearchTransportService.QUERY_ID_ACTION_NAME, SearchTransportService.FETCH_ID_ACTION_NAME); //free context messages are not necessarily sent, but if they are, check their indices assertSameIndicesOptionalRequests(searchRequest, SearchTransportService.FREE_CONTEXT_ACTION_NAME); }
@SuppressWarnings("unchecked") protected void innerReadFrom(StreamInput in) throws IOException { index = in.readString(); shardId = in.readVInt(); searchType = SearchType.fromId(in.readByte()); numberOfShards = in.readVInt(); if (in.readBoolean()) { scroll = readScroll(in); } source = in.readBytesReference(); extraSource = in.readBytesReference(); types = in.readStringArray(); filteringAliases = in.readStringArray(); nowInMillis = in.readVLong(); templateSource = in.readBytesReference(); if (in.readBoolean()) { template = Template.readTemplate(in); } requestCache = in.readOptionalBoolean(); }
public String selectMatchAll(String indexs,String types,String field,String value){ try { if(client==null){ init(); } SearchRequestBuilder request = client.prepareSearch(indexs.split(",")).setTypes(types.split(",")); request.setSearchType(SearchType.DFS_QUERY_THEN_FETCH); request.setQuery(QueryBuilders.matchQuery(field, value)); request.highlighter(new HighlightBuilder().field(field)); request.addAggregation(AggregationBuilders.terms("data").field(field+".keyword")); request.setExplain(false); SearchResponse response = request.get(); return response.toString(); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } return null; }
public List<DateHistogramValue> calculateStats(String sourceUrl) { BoolQueryBuilder filter = QueryBuilders.boolQuery() .must(QueryBuilders.rangeQuery("created").gte("now-1M")) .must(QueryBuilders.termQuery("source", sourceUrl)); SearchResponse response = getConnection().getClient() .prepareSearch(getIndex()) .setTypes(getType()) .setSearchType(SearchType.DEFAULT) .setQuery(filter) .addAggregation(AggregationBuilders .dateHistogram("urls_over_time") .field("created") .format("yyyy-MM-dd") .dateHistogramInterval(DateHistogramInterval.DAY)) .setSize(0) .setFetchSource(true) .setExplain(false) .execute() .actionGet(); InternalDateHistogram hits = response.getAggregations().get("urls_over_time"); return hits.getBuckets().stream() .map(b -> new DateHistogramValue(b.getKeyAsString(), b.getDocCount())) .collect(Collectors.toList()); }
public List<HttpSource> findEnabledSources() { BoolQueryBuilder filter = QueryBuilders.boolQuery() .must(QueryBuilders.termQuery("enabled", true)); SearchResponse response = getConnection().getClient() .prepareSearch(getIndex()) .setTypes(getType()) .setSearchType(SearchType.DEFAULT) .setPostFilter(filter) .addSort("updated", SortOrder.ASC) .setSize(10000) .setFetchSource(true) .setExplain(false) .execute() .actionGet(); SearchHits hits = response.getHits(); return Arrays.stream(hits.getHits()) .map(SearchHit::sourceAsMap) .map(this::mapToHttpSource) .collect(Collectors.toList()); }
public String selectAll(String indexs,String types,String condition){ try { if(StringUtil.isEmpty(indexs))indexs="_all"; if(xclient==null){ init(); } SearchSourceBuilder search = new SearchSourceBuilder(); search.query(QueryBuilders.queryStringQuery(condition)); search.explain(false); SearchRequest request = new SearchRequest(); request.searchType(SearchType.DFS_QUERY_THEN_FETCH); request.source(search); request.indices(indexs.split(",")); request.types(types.split(",")); SearchResponse response = xclient.search(request); return response.toString(); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } return null; }
public String selectTermAll(String indexs,String types,String field,String value){ try { if(client==null){ init(); } SearchRequestBuilder request = client.prepareSearch(indexs.split(",")).setTypes(types.split(",")); request.setSearchType(SearchType.DFS_QUERY_THEN_FETCH); request.setQuery(QueryBuilders.termQuery(field, value)); request.highlighter(new HighlightBuilder().field(field)); request.addAggregation(AggregationBuilders.terms("data").field(field+".keyword")); request.setExplain(false); SearchResponse response = request.get(); return response.toString(); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } return null; }
/** * * @param type * @param nsId */ private void fetchAndDeleteRecords(String type, String index, String nsId) { SearchResponse scrollResp = client.prepareSearch(index) .setSearchType(SearchType.SCAN) .setTypes(type) .setScroll(new TimeValue(60000)) .setQuery(QueryBuilders.termQuery("nsId", nsId)) .setSize(100).execute().actionGet(); //100 hits per shard will be returned for each scroll //Scroll until no hits are returned while (true) { for (SearchHit hit : scrollResp.getHits()){ indexer.getTemplate().delete(index, type, String.valueOf(hit.getId())); logger.info("Deleted message with id::"+ hit.getId() +" and type::"+type+" from ES for nsId " + nsId); } scrollResp = client.prepareSearchScroll(scrollResp.getScrollId()).setScroll(new TimeValue(600000)).execute().actionGet(); //Break condition: No hits are returned if (scrollResp.getHits().getHits().length == 0) { break; } } }
public List<Map<String, Object>> queryWithConstraints(final String indexName, final String fieldName, final String fieldValue, final Map<String, String> constraints, boolean latest) throws IOException { SearchRequestBuilder request = this.elasticsearchClient.prepareSearch(indexName) .setSearchType(SearchType.QUERY_THEN_FETCH) .setFrom(0); BoolQueryBuilder bFilter = QueryBuilders.boolQuery(); bFilter.filter(QueryBuilders.constantScoreQuery(QueryBuilders.constantScoreQuery(QueryBuilders.termQuery(fieldName, fieldValue)))); for (Object o : constraints.entrySet()) { @SuppressWarnings("rawtypes") Map.Entry entry = (Map.Entry) o; bFilter.filter(QueryBuilders.constantScoreQuery(QueryBuilders.termQuery((String) entry.getKey(), ((String) entry.getValue()).toLowerCase()))); } request.setQuery(bFilter); // get response SearchResponse response = request.execute().actionGet(); // evaluate search result ArrayList<Map<String, Object>> result = new ArrayList<Map<String, Object>>(); SearchHit[] hits = response.getHits().getHits(); for (SearchHit hit: hits) { Map<String, Object> map = hit.getSourceAsMap(); result.add(map); } return result; }
/** * This method Search the available Document */ @Override public void searchDocument() { SearchHits hits = null; try { client = ESclient.getInstant(); SearchResponse response = client.prepareSearch("school", "college") .setTypes("tenth", "be") .setSearchType(SearchType.DFS_QUERY_THEN_FETCH) .setQuery(QueryBuilders.termQuery("name", "sundar")) .setPostFilter(QueryBuilders.rangeQuery("age").from(15).to(24)) .setFrom(0).setSize(60).setExplain(true) .get(); if (response != null) { hits = response.getHits(); } if (hits != null) { while (hits.iterator().hasNext()) { hits.iterator().next(); } } } catch (Exception ex) { log.error("Excption occurred while Search Document : " + ex); } }
/** * Build a random search request. * * @param randomSearchSourceBuilder builds a random {@link SearchSourceBuilder}. You can use * {@link #randomSearchSourceBuilder(Supplier, Supplier, Supplier, Supplier, Supplier)}. */ public static SearchRequest randomSearchRequest(Supplier<SearchSourceBuilder> randomSearchSourceBuilder) throws IOException { SearchRequest searchRequest = new SearchRequest(); if (randomBoolean()) { searchRequest.indices(generateRandomStringArray(10, 10, false, false)); } if (randomBoolean()) { searchRequest.indicesOptions(IndicesOptions.fromOptions(randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean())); } if (randomBoolean()) { searchRequest.types(generateRandomStringArray(10, 10, false, false)); } if (randomBoolean()) { searchRequest.preference(randomAsciiOfLengthBetween(3, 10)); } if (randomBoolean()) { searchRequest.requestCache(randomBoolean()); } if (randomBoolean()) { searchRequest.routing(randomAsciiOfLengthBetween(3, 10)); } if (randomBoolean()) { searchRequest.scroll(randomPositiveTimeValue()); } if (randomBoolean()) { searchRequest.searchType(randomFrom(SearchType.values())); } if (randomBoolean()) { searchRequest.source(randomSearchSourceBuilder.get()); } return searchRequest; }