private SearchResponse minMaxQuery(ScoreMode scoreMode, int minChildren, Integer maxChildren) throws SearchPhaseExecutionException { HasChildQueryBuilder hasChildQuery = hasChildQuery( "child", QueryBuilders.functionScoreQuery(constantScoreQuery(QueryBuilders.termQuery("foo", "two")), new FunctionScoreQueryBuilder.FilterFunctionBuilder[]{ new FunctionScoreQueryBuilder.FilterFunctionBuilder(weightFactorFunction(1)), new FunctionScoreQueryBuilder.FilterFunctionBuilder(QueryBuilders.termQuery("foo", "three"), weightFactorFunction(1)), new FunctionScoreQueryBuilder.FilterFunctionBuilder(QueryBuilders.termQuery("foo", "four"), weightFactorFunction(1)) }).boostMode(CombineFunction.REPLACE).scoreMode(FiltersFunctionScoreQuery.ScoreMode.SUM), scoreMode) .minMaxChildren(minChildren, maxChildren != null ? maxChildren : HasChildQueryBuilder.DEFAULT_MAX_CHILDREN); return client() .prepareSearch("test") .setQuery(hasChildQuery) .addSort("_score", SortOrder.DESC).addSort("id", SortOrder.ASC).get(); }
public void testPostingsHighlighterFuzzyQuery() throws Exception { assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); ensureGreen(); client().prepareIndex("test", "type1") .setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog! Second sentence.").get(); refresh(); for (String type : UNIFIED_AND_NULL) { logger.info("--> highlighting and searching on field2"); SearchSourceBuilder source = searchSource().query(fuzzyQuery("field2", "quck")) .highlighter(highlight().field("field2").highlighterType(type)); SearchResponse searchResponse = client().prepareSearch("test").setSource(source).get(); assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("The <em>quick</em> brown fox jumps over the lazy dog!")); } }
public void testSingleValuedNumericValueScript() throws Exception { SearchResponse response = client().prepareSearch("idx").setTypes("type") .addAggregation( cardinality("cardinality") .precisionThreshold(precisionThreshold) .field(singleNumericField()) .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value", emptyMap()))) .execute().actionGet(); assertSearchResponse(response); Cardinality count = response.getAggregations().get("cardinality"); assertThat(count, notNullValue()); assertThat(count.getName(), equalTo("cardinality")); assertCount(count, numDocs); }
/** * Delete documents using a query. Check what would be deleted first with a normal search query! * Elasticsearch once provided a native prepareDeleteByQuery method, but this was removed * in later versions. Instead, there is a plugin which iterates over search results, * see https://www.elastic.co/guide/en/elasticsearch/plugins/current/plugins-delete-by-query.html * We simulate the same behaviour here without the need of that plugin. * * @param q * @return delete document count */ public int deleteByQuery(String indexName, final QueryBuilder q) { Map<String, String> ids = new TreeMap<>(); SearchResponse response = elasticsearchClient.prepareSearch(indexName).setSearchType(SearchType.QUERY_THEN_FETCH) .setScroll(new TimeValue(60000)).setQuery(q).setSize(100).execute().actionGet(); while (true) { // accumulate the ids here, don't delete them right now to prevent an interference of the delete with the // scroll for (SearchHit hit : response.getHits().getHits()) { ids.put(hit.getId(), hit.getType()); } response = elasticsearchClient.prepareSearchScroll(response.getScrollId()).setScroll(new TimeValue(600000)) .execute().actionGet(); // termination if (response.getHits().getHits().length == 0) break; } return deleteBulk(indexName, ids); }
private void assertNoDocCountError(int size, SearchResponse accurateResponse, SearchResponse testResponse) { Terms accurateTerms = accurateResponse.getAggregations().get("terms"); assertThat(accurateTerms, notNullValue()); assertThat(accurateTerms.getName(), equalTo("terms")); assertThat(accurateTerms.getDocCountError(), equalTo(0L)); Terms testTerms = testResponse.getAggregations().get("terms"); assertThat(testTerms, notNullValue()); assertThat(testTerms.getName(), equalTo("terms")); assertThat(testTerms.getDocCountError(), equalTo(0L)); Collection<Bucket> testBuckets = testTerms.getBuckets(); assertThat(testBuckets.size(), lessThanOrEqualTo(size)); assertThat(accurateTerms.getBuckets().size(), greaterThanOrEqualTo(testBuckets.size())); for (Terms.Bucket testBucket : testBuckets) { assertThat(testBucket, notNullValue()); Terms.Bucket accurateBucket = accurateTerms.getBucketByKey(testBucket.getKeyAsString()); assertThat(accurateBucket, notNullValue()); assertThat(accurateBucket.getDocCountError(), equalTo(0L)); assertThat(testBucket.getDocCountError(), equalTo(0L)); } }
public void testIndexDirIsDeletedWhenShardRemoved() throws Exception { Environment env = getInstanceFromNode(Environment.class); Path idxPath = env.sharedDataFile().resolve(randomAsciiOfLength(10)); logger.info("--> idxPath: [{}]", idxPath); Settings idxSettings = Settings.builder() .put(IndexMetaData.SETTING_DATA_PATH, idxPath) .build(); createIndex("test", idxSettings); ensureGreen("test"); client().prepareIndex("test", "bar", "1").setSource("{}", XContentType.JSON).setRefreshPolicy(IMMEDIATE).get(); SearchResponse response = client().prepareSearch("test").get(); assertHitCount(response, 1L); client().admin().indices().prepareDelete("test").get(); assertAllIndicesRemovedAndDeletionCompleted(Collections.singleton(getInstanceFromNode(IndicesService.class))); assertPathHasBeenCleared(idxPath); }
public void testSingleValuedStringValueScript() throws Exception { SearchResponse response = client().prepareSearch("idx").setTypes("type") .addAggregation( cardinality("cardinality") .precisionThreshold(precisionThreshold) .field("str_value") .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value", emptyMap()))) .execute().actionGet(); assertSearchResponse(response); Cardinality count = response.getAggregations().get("cardinality"); assertThat(count, notNullValue()); assertThat(count.getName(), equalTo("cardinality")); assertCount(count, numDocs); }
public void testPostingsHighlighterPrefixQuery() throws Exception { assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); ensureGreen(); client().prepareIndex("test", "type1") .setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog! Second sentence.").get(); refresh(); logger.info("--> highlighting and searching on field2"); for (String type : UNIFIED_AND_NULL) { SearchSourceBuilder source = searchSource().query(prefixQuery("field2", "qui")) .highlighter(highlight().field("field2").highlighterType(type)); SearchResponse searchResponse = client().prepareSearch("test").setSource(source).get(); assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("The <em>quick</em> brown fox jumps over the lazy dog!")); } }
public void testThatCustomHighlighterReceivesFieldsInOrder() throws Exception { SearchResponse searchResponse = client().prepareSearch("test").setTypes("test") .setQuery(QueryBuilders.boolQuery().must(QueryBuilders.matchAllQuery()).should(QueryBuilders .termQuery("name", "arbitrary"))) .highlighter( new HighlightBuilder().highlighterType("test-custom").field("name").field("other_name").field("other_other_name") .useExplicitFieldOrder(true)) .get(); assertHighlight(searchResponse, 0, "name", 0, equalTo("standard response for name at position 1")); assertHighlight(searchResponse, 0, "other_name", 0, equalTo("standard response for other_name at position 2")); assertHighlight(searchResponse, 0, "other_other_name", 0, equalTo("standard response for other_other_name at position 3")); assertHighlight(searchResponse, 1, "name", 0, equalTo("standard response for name at position 1")); assertHighlight(searchResponse, 1, "other_name", 0, equalTo("standard response for other_name at position 2")); assertHighlight(searchResponse, 1, "other_other_name", 0, equalTo("standard response for other_other_name at position 3")); }
public void testScoreRange() throws Exception { // all random scores should be in range [0.0, 1.0] createIndex("test"); ensureGreen(); int docCount = randomIntBetween(100, 200); for (int i = 0; i < docCount; i++) { String id = randomRealisticUnicodeOfCodepointLengthBetween(1, 50); index("test", "type", id, jsonBuilder().startObject().endObject()); } flush(); refresh(); int iters = scaledRandomIntBetween(10, 20); for (int i = 0; i < iters; ++i) { int seed = randomInt(); SearchResponse searchResponse = client().prepareSearch() .setQuery(functionScoreQuery(matchAllQuery(), randomFunction(seed))) .setSize(docCount) .execute().actionGet(); assertNoFailures(searchResponse); for (SearchHit hit : searchResponse.getHits().getHits()) { assertThat(hit.getScore(), allOf(greaterThanOrEqualTo(0.0f), lessThanOrEqualTo(1.0f))); } } }
public void testPostingsHighlighterPrefixQueryWithinBooleanQuery() throws Exception { assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); ensureGreen(); client().prepareIndex("test", "type1").setSource("field1", "The photography word will get highlighted").get(); refresh(); for (String type : UNIFIED_AND_NULL) { logger.info("--> highlighting and searching on field1"); SearchSourceBuilder source = searchSource() .query(boolQuery().must(prefixQuery("field1", "photo")).should(matchQuery("field1", "test").minimumShouldMatch("0"))) .highlighter(highlight().field("field1").highlighterType(type)); SearchResponse searchResponse = client().prepareSearch("test").setSource(source).get(); assertHighlight(searchResponse, 0, "field1", 0, 1, equalTo("The <em>photography</em> word will get highlighted")); } }
public void testMatchQueryNumeric() throws Exception { assertAcked(prepareCreate("test").addMapping("type1", "long", "type=long", "double", "type=double")); indexRandom(true, client().prepareIndex("test", "type1", "1").setSource("long", 1L, "double", 1.0d), client().prepareIndex("test", "type1", "2").setSource("long", 2L, "double", 2.0d), client().prepareIndex("test", "type1", "3").setSource("long", 3L, "double", 3.0d)); SearchResponse searchResponse = client().prepareSearch().setQuery(matchQuery("long", "1")).get(); assertHitCount(searchResponse, 1L); assertFirstHit(searchResponse, hasId("1")); searchResponse = client().prepareSearch().setQuery(matchQuery("double", "2")).get(); assertHitCount(searchResponse, 1L); assertFirstHit(searchResponse, hasId("2")); expectThrows(SearchPhaseExecutionException.class, () -> client().prepareSearch().setQuery(matchQuery("double", "2 3 4")).get()); }
public void testSingleValuedFieldOrderedBySubAggregationAsc() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(dateHistogram("histo") .field("date") .dateHistogramInterval(DateHistogramInterval.MONTH) .order(Histogram.Order.aggregation("sum", true)) .subAggregation(max("sum").field("value"))) .execute().actionGet(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); assertThat(histo.getBuckets().size(), equalTo(3)); int i = 0; for (Histogram.Bucket bucket : histo.getBuckets()) { assertThat(((DateTime) bucket.getKey()), equalTo(new DateTime(2012, i + 1, 1, 0, 0, DateTimeZone.UTC))); i++; } }
public void testSingleValuedFieldOrderedByKeyAsc() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(dateHistogram("histo") .field("date") .dateHistogramInterval(DateHistogramInterval.MONTH) .order(Histogram.Order.KEY_ASC)) .execute().actionGet(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); List<? extends Bucket> buckets = histo.getBuckets(); assertThat(buckets.size(), equalTo(3)); int i = 0; for (Histogram.Bucket bucket : buckets) { assertThat(((DateTime) bucket.getKey()), equalTo(new DateTime(2012, i + 1, 1, 0, 0, DateTimeZone.UTC))); i++; } }
public void testNestedDefinedAsObject() throws Exception { assertAcked(prepareCreate("articles").addMapping("article", "comments", "type=nested", "title", "type=text")); List<IndexRequestBuilder> requests = new ArrayList<>(); requests.add(client().prepareIndex("articles", "article", "1").setSource(jsonBuilder().startObject() .field("title", "quick brown fox") .startObject("comments").field("message", "fox eat quick").endObject() .endObject())); indexRandom(true, requests); SearchResponse response = client().prepareSearch("articles") .setQuery(nestedQuery("comments", matchQuery("comments.message", "fox"), ScoreMode.Avg) .innerHit(new InnerHitBuilder(), false)) .get(); assertNoFailures(response); assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits(), equalTo(1L)); assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getId(), equalTo("1")); assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getOffset(), equalTo(0)); assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getChild(), nullValue()); }
@Override public void testUnmapped() throws Exception { int sigDigits = randomSignificantDigits(); SearchResponse searchResponse = client() .prepareSearch("idx_unmapped") .setQuery(matchAllQuery()) .addAggregation( percentileRanks("percentile_ranks").method(PercentilesMethod.HDR).numberOfSignificantValueDigits(sigDigits) .field("value").values(0, 10, 15, 100)) .execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(0L)); PercentileRanks reversePercentiles = searchResponse.getAggregations().get("percentile_ranks"); assertThat(reversePercentiles, notNullValue()); assertThat(reversePercentiles.getName(), equalTo("percentile_ranks")); assertThat(reversePercentiles.percent(0), equalTo(Double.NaN)); assertThat(reversePercentiles.percent(10), equalTo(Double.NaN)); assertThat(reversePercentiles.percent(15), equalTo(Double.NaN)); assertThat(reversePercentiles.percent(100), equalTo(Double.NaN)); }
public void testSingleValueFieldAsSubAggToGeohashGrid() throws Exception { SearchResponse response = client().prepareSearch(HIGH_CARD_IDX_NAME) .addAggregation(geohashGrid("geoGrid").field(SINGLE_VALUED_FIELD_NAME) .subAggregation(geoCentroid(aggName).field(SINGLE_VALUED_FIELD_NAME))) .execute().actionGet(); assertSearchResponse(response); GeoHashGrid grid = response.getAggregations().get("geoGrid"); assertThat(grid, notNullValue()); assertThat(grid.getName(), equalTo("geoGrid")); List<GeoHashGrid.Bucket> buckets = grid.getBuckets(); for (int i=0; i < buckets.size(); ++i) { GeoHashGrid.Bucket cell = buckets.get(i); String geohash = cell.getKeyAsString(); GeoPoint expectedCentroid = expectedCentroidsForGeoHash.get(geohash); GeoCentroid centroidAgg = cell.getAggregations().get(aggName); assertThat("Geohash " + geohash + " has wrong centroid latitude ", expectedCentroid.lat(), closeTo(centroidAgg.centroid().lat(), GEOHASH_TOLERANCE)); assertThat("Geohash " + geohash + " has wrong centroid longitude", expectedCentroid.lon(), closeTo(centroidAgg.centroid().lon(), GEOHASH_TOLERANCE)); } }
public void testMissingStoredField() throws Exception { assertAcked(prepareCreate("test") .addMapping("type1", "highlight_field", "type=text,store=true")); ensureGreen(); client().prepareIndex("test", "type1", "1") .setSource(jsonBuilder().startObject() .field("field", "highlight") .endObject()).get(); refresh(); for (String type : UNIFIED_AND_NULL) { // This query used to fail when the field to highlight was absent SearchResponse response = client().prepareSearch("test") .setQuery(QueryBuilders.matchQuery("field", "highlight").type(MatchQuery.Type.BOOLEAN)) .highlighter( new HighlightBuilder().field(new HighlightBuilder.Field("highlight_field").fragmentSize(-1).numOfFragments(1) .fragmenter("simple")).highlighterType(type)).get(); assertThat(response.getHits().getHits()[0].getHighlightFields().isEmpty(), equalTo(true)); } }
public void testSimplePolygon() throws Exception { List<GeoPoint> points = new ArrayList<>(); points.add(new GeoPoint(40.7, -74.0)); points.add(new GeoPoint(40.7, -74.1)); points.add(new GeoPoint(40.8, -74.1)); points.add(new GeoPoint(40.8, -74.0)); points.add(new GeoPoint(40.7, -74.0)); SearchResponse searchResponse = client().prepareSearch("test") // from NY .setQuery(boolQuery().must(geoPolygonQuery("location", points))) .execute().actionGet(); assertHitCount(searchResponse, 4); assertThat(searchResponse.getHits().getHits().length, equalTo(4)); for (SearchHit hit : searchResponse.getHits()) { assertThat(hit.getId(), anyOf(equalTo("1"), equalTo("3"), equalTo("4"), equalTo("5"))); } }
public void testShapeFilterWithRandomGeoCollection() throws Exception { // Create a random geometry collection. GeometryCollectionBuilder gcb = RandomShapeGenerator.createGeometryCollection(random()); logger.info("Created Random GeometryCollection containing {} shapes", gcb.numShapes()); client().admin().indices().prepareCreate("test").addMapping("type", "location", "type=geo_shape,tree=quadtree") .execute().actionGet(); XContentBuilder docSource = gcb.toXContent(jsonBuilder().startObject().field("location"), null).endObject(); client().prepareIndex("test", "type", "1").setSource(docSource).setRefreshPolicy(IMMEDIATE).get(); ShapeBuilder filterShape = (gcb.getShapeAt(randomIntBetween(0, gcb.numShapes() - 1))); GeoShapeQueryBuilder filter = QueryBuilders.geoShapeQuery("location", filterShape); filter.relation(ShapeRelation.INTERSECTS); SearchResponse result = client().prepareSearch("test").setTypes("type").setQuery(QueryBuilders.matchAllQuery()) .setPostFilter(filter).get(); assertSearchResponse(result); assertHitCount(result, 1); }
private void testMinDocCountOnHistogram(Histogram.Order order) throws Exception { final int interval = randomIntBetween(1, 3); final SearchResponse allResponse = client().prepareSearch("idx").setTypes("type") .setSize(0) .setQuery(QUERY) .addAggregation(histogram("histo").field("d").interval(interval).order(order).minDocCount(0)) .execute().actionGet(); final Histogram allHisto = allResponse.getAggregations().get("histo"); for (long minDocCount = 0; minDocCount < 50; ++minDocCount) { final SearchResponse response = client().prepareSearch("idx").setTypes("type") .setSize(0) .setQuery(QUERY) .addAggregation(histogram("histo").field("d").interval(interval).order(order).minDocCount(minDocCount)) .execute().actionGet(); assertSubset(allHisto, (Histogram) response.getAggregations().get("histo"), minDocCount); } }
public void testSimpleUnclosedPolygon() throws Exception { List<GeoPoint> points = new ArrayList<>(); points.add(new GeoPoint(40.7, -74.0)); points.add(new GeoPoint(40.7, -74.1)); points.add(new GeoPoint(40.8, -74.1)); points.add(new GeoPoint(40.8, -74.0)); SearchResponse searchResponse = client().prepareSearch("test") // from NY .setQuery(boolQuery().must(geoPolygonQuery("location", points))).execute().actionGet(); assertHitCount(searchResponse, 4); assertThat(searchResponse.getHits().getHits().length, equalTo(4)); for (SearchHit hit : searchResponse.getHits()) { assertThat(hit.getId(), anyOf(equalTo("1"), equalTo("3"), equalTo("4"), equalTo("5"))); } }
@Override public void process(ResultItems resultItems, Task task) { SpiderInfo spiderInfo = resultItems.get("spiderInfo"); Webpage webpage = convertResultItems2Webpage(resultItems); SearchRequestBuilder searchRequestBuilder = client.prepareSearch(INDEX_NAME) .setTypes(TYPE_NAME) .setQuery(QueryBuilders.matchQuery("url", webpage.getUrl())); SearchResponse response = searchRequestBuilder.execute().actionGet(); if (response.getHits().totalHits() == 0) { try { client.prepareIndex(INDEX_NAME, TYPE_NAME) .setId(Hashing.md5().hashString(webpage.getUrl(), Charset.forName("utf-8")).toString()) .setSource(gson.toJson(webpage)) .get(); } catch (Exception e) { LOG.error("索引 Webpage 出错," + e.getLocalizedMessage()); } } }
public void testSingleValuedField() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval)) .execute().actionGet(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); List<? extends Bucket> buckets = histo.getBuckets(); assertThat(buckets.size(), equalTo(numValueBuckets)); for (int i = 0; i < numValueBuckets; ++i) { Histogram.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval)); assertThat(bucket.getDocCount(), equalTo(valueCounts[i])); } }
/** * Create an index and index some docs */ public void testPutDocument() { // TODO: remove when Netty 4.1.5 is upgraded to Netty 4.1.6 including https://github.com/netty/netty/pull/5778 assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9); Client client = getClient(); // START SNIPPET: java-doc-index-doc-simple client.prepareIndex(index, "doc", "1") // Index, Type, Id .setSource("foo", "bar") // Simple document: { "foo" : "bar" } .get(); // Execute and wait for the result // END SNIPPET: java-doc-index-doc-simple // START SNIPPET: java-doc-admin-indices-refresh // Prepare a refresh action on a given index, execute and wait for the result client.admin().indices().prepareRefresh(index).get(); // END SNIPPET: java-doc-admin-indices-refresh // START SNIPPET: java-doc-search-simple SearchResponse searchResponse = client.prepareSearch(index).get(); assertThat(searchResponse.getHits().getTotalHits(), is(1L)); // END SNIPPET: java-doc-search-simple }
public void testIssue8209() throws InterruptedException, ExecutionException { assertAcked(client().admin().indices().prepareCreate("test8209").addMapping("type", "d", "type=date").get()); indexRandom(true, client().prepareIndex("test8209", "type").setSource("d", "2014-01-01T00:00:00Z"), client().prepareIndex("test8209", "type").setSource("d", "2014-04-01T00:00:00Z"), client().prepareIndex("test8209", "type").setSource("d", "2014-04-30T00:00:00Z")); ensureSearchable("test8209"); SearchResponse response = client().prepareSearch("test8209") .addAggregation(dateHistogram("histo").field("d").dateHistogramInterval(DateHistogramInterval.MONTH).timeZone(DateTimeZone.forID("CET")) .minDocCount(0)) .execute().actionGet(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo.getBuckets().size(), equalTo(4)); assertThat(histo.getBuckets().get(0).getKeyAsString(), equalTo("2014-01-01T00:00:00.000+01:00")); assertThat(histo.getBuckets().get(0).getDocCount(), equalTo(1L)); assertThat(histo.getBuckets().get(1).getKeyAsString(), equalTo("2014-02-01T00:00:00.000+01:00")); assertThat(histo.getBuckets().get(1).getDocCount(), equalTo(0L)); assertThat(histo.getBuckets().get(2).getKeyAsString(), equalTo("2014-03-01T00:00:00.000+01:00")); assertThat(histo.getBuckets().get(2).getDocCount(), equalTo(0L)); assertThat(histo.getBuckets().get(3).getKeyAsString(), equalTo("2014-04-01T00:00:00.000+02:00")); assertThat(histo.getBuckets().get(3).getDocCount(), equalTo(2L)); internalCluster().wipeIndices("test8209"); }
public void testUnmapped() { SearchResponse rsp = client().prepareSearch("idx_unmapped").addAggregation( AggregationBuilders.ipRange("my_range") .field("ip") .addUnboundedTo("192.168.1.0") .addRange("192.168.1.0", "192.168.1.10") .addUnboundedFrom("192.168.1.10")).get(); assertSearchResponse(rsp); Range range = rsp.getAggregations().get("my_range"); assertEquals(3, range.getBuckets().size()); Range.Bucket bucket1 = range.getBuckets().get(0); assertNull(bucket1.getFrom()); assertEquals("192.168.1.0", bucket1.getTo()); assertEquals(0, bucket1.getDocCount()); Range.Bucket bucket2 = range.getBuckets().get(1); assertEquals("192.168.1.0", bucket2.getFrom()); assertEquals("192.168.1.10", bucket2.getTo()); assertEquals(0, bucket2.getDocCount()); Range.Bucket bucket3 = range.getBuckets().get(2); assertEquals("192.168.1.10", bucket3.getFrom()); assertNull(bucket3.getTo()); assertEquals(0, bucket3.getDocCount()); }
@Override public void testSingleValuedFieldPartiallyUnmapped() throws Exception { double sigma = randomDouble() * randomIntBetween(1, 10); SearchResponse searchResponse = client().prepareSearch("idx", "idx_unmapped") .setQuery(matchAllQuery()) .addAggregation(extendedStats("stats").field("value").sigma(sigma)) .execute().actionGet(); assertHitCount(searchResponse, 10); ExtendedStats stats = searchResponse.getAggregations().get("stats"); assertThat(stats, notNullValue()); assertThat(stats.getName(), equalTo("stats")); assertThat(stats.getAvg(), equalTo((double) (1+2+3+4+5+6+7+8+9+10) / 10)); assertThat(stats.getMin(), equalTo(1.0)); assertThat(stats.getMax(), equalTo(10.0)); assertThat(stats.getSum(), equalTo((double) 1+2+3+4+5+6+7+8+9+10)); assertThat(stats.getCount(), equalTo(10L)); assertThat(stats.getSumOfSquares(), equalTo((double) 1+4+9+16+25+36+49+64+81+100)); assertThat(stats.getVariance(), equalTo(variance(1, 2, 3, 4, 5, 6, 7, 8, 9, 10))); assertThat(stats.getStdDeviation(), equalTo(stdDev(1, 2, 3, 4, 5, 6, 7, 8, 9, 10))); checkUpperLowerBounds(stats, sigma); }
public void testMultiValuedStringValueScript() throws Exception { SearchResponse response = client().prepareSearch("idx").setTypes("type") .addAggregation( cardinality("cardinality") .precisionThreshold(precisionThreshold) .field("str_values") .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value", emptyMap()))) .execute().actionGet(); assertSearchResponse(response); Cardinality count = response.getAggregations().get("cardinality"); assertThat(count, notNullValue()); assertThat(count.getName(), equalTo("cardinality")); assertCount(count, numDocs * 2); }
public void testRegExpQuerySupportsName() { createIndex("test1"); ensureGreen(); client().prepareIndex("test1", "type1", "1").setSource("title", "title1").get(); refresh(); SearchResponse searchResponse = client().prepareSearch() .setQuery(QueryBuilders.regexpQuery("title", "title1").queryName("regex")).get(); assertHitCount(searchResponse, 1L); for (SearchHit hit : searchResponse.getHits()) { if (hit.getId().equals("1")) { assertThat(hit.getMatchedQueries().length, equalTo(1)); assertThat(hit.getMatchedQueries(), hasItemInArray("regex")); } else { fail("Unexpected document returned with id " + hit.getId()); } } }
private Response wrap(SearchResponse response) { List<SearchFailure> failures; if (response.getShardFailures() == null) { failures = emptyList(); } else { failures = new ArrayList<>(response.getShardFailures().length); for (ShardSearchFailure failure: response.getShardFailures()) { String nodeId = failure.shard() == null ? null : failure.shard().getNodeId(); failures.add(new SearchFailure(failure.getCause(), failure.index(), failure.shardId(), nodeId)); } } List<Hit> hits; if (response.getHits().getHits() == null || response.getHits().getHits().length == 0) { hits = emptyList(); } else { hits = new ArrayList<>(response.getHits().getHits().length); for (SearchHit hit: response.getHits().getHits()) { hits.add(new ClientHit(hit)); } hits = unmodifiableList(hits); } return new Response(response.isTimedOut(), failures, response.getHits().getTotalHits(), hits, response.getScrollId()); }
/** * getMetadataNameMap: Get on service metadata names, key is lowcase of short * name and value is the original short name * * @param es * the elasticsearch client * @return a map from lower case metadata name to original metadata name */ private Map<String, String> getOnServiceMetadata(ESDriver es) { String indexName = props.getProperty(MudrodConstants.ES_INDEX_NAME); String metadataType = props.getProperty("recom_metadataType"); Map<String, String> shortnameMap = new HashMap<>(); SearchResponse scrollResp = es.getClient().prepareSearch(indexName).setTypes(metadataType).setScroll(new TimeValue(60000)).setQuery(QueryBuilders.matchAllQuery()).setSize(100).execute() .actionGet(); while (true) { for (SearchHit hit : scrollResp.getHits().getHits()) { Map<String, Object> metadata = hit.getSource(); String shortName = (String) metadata.get("Dataset-ShortName"); shortnameMap.put(shortName.toLowerCase(), shortName); } scrollResp = es.getClient().prepareSearchScroll(scrollResp.getScrollId()).setScroll(new TimeValue(600000)).execute().actionGet(); if (scrollResp.getHits().getHits().length == 0) { break; } } return shortnameMap; }
/** * Make sure that a request using a script does not get cached and a request * not using a script does get cached. */ public void testDontCacheScripts() throws Exception { assertAcked(prepareCreate("cache_test_idx").addMapping("type", "i", "type=integer") .setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1)) .get()); indexRandom(true, client().prepareIndex("cache_test_idx", "type", "1").setSource(jsonBuilder().startObject().field("i", 1).endObject()), client().prepareIndex("cache_test_idx", "type", "2").setSource(jsonBuilder().startObject().field("i", 2).endObject())); // Make sure we are starting with a clear cache assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() .getHitCount(), equalTo(0L)); assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() .getMissCount(), equalTo(0L)); // Test that a request using a script does not get cached Map<String, Object> params = new HashMap<>(); params.put("fieldname", "date"); SearchResponse r = client().prepareSearch("cache_test_idx").setSize(0).addAggregation( range("foo").field("i").script( new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value + 1", Collections.emptyMap())).addRange(0, 10)) .get(); assertSearchResponse(r); assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() .getHitCount(), equalTo(0L)); assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() .getMissCount(), equalTo(0L)); // To make sure that the cache is working test that a request not using // a script is cached r = client().prepareSearch("cache_test_idx").setSize(0).addAggregation(range("foo").field("i").addRange(0, 10)).get(); assertSearchResponse(r); assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() .getHitCount(), equalTo(0L)); assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() .getMissCount(), equalTo(1L)); }
public long countLocal(final String index, final String provider_hash) { try { SearchResponse response = elasticsearchClient.prepareSearch(index) .setSize(0) .setQuery(QueryBuilders.matchQuery("provider_hash", provider_hash)) .execute() .actionGet(); return response.getHits().getTotalHits(); } catch (Throwable e) { Data.logger.warn("", e); return 0; } }
/** * Make sure that a request using a script does not get cached and a request * not using a script does get cached. */ public void testDontCacheScripts() throws Exception { assertAcked(prepareCreate("cache_test_idx").addMapping("type", "d", "type=long") .setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1)) .get()); indexRandom(true, client().prepareIndex("cache_test_idx", "type", "1").setSource("s", 1), client().prepareIndex("cache_test_idx", "type", "2").setSource("s", 2)); // Make sure we are starting with a clear cache assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() .getHitCount(), equalTo(0L)); assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() .getMissCount(), equalTo(0L)); // Test that a request using a script does not get cached SearchResponse r = client().prepareSearch("cache_test_idx").setSize(0).addAggregation( min("foo").field("d").script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - 1", emptyMap()))) .get(); assertSearchResponse(r); assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() .getHitCount(), equalTo(0L)); assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() .getMissCount(), equalTo(0L)); // To make sure that the cache is working test that a request not using // a script is cached r = client().prepareSearch("cache_test_idx").setSize(0).addAggregation(min("foo").field("d")).get(); assertSearchResponse(r); assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() .getHitCount(), equalTo(0L)); assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() .getMissCount(), equalTo(1L)); }
@Override public long countDocuments() { IndicesExistsResponse indicesExistsResponse = esClient.prepareExists().get(); if (indicesExistsResponse.isExists()) { SearchResponse response = esClient.prepareSearch().setQuery(QueryBuilders.matchAllQuery()).setSize(1).get(); return response.getHits().getTotalHits(); } return 0L; }
public void testHighlightUsesHighlightQuery() throws IOException { assertAcked(prepareCreate("test").addMapping("type1", "text", "type=text," + randomStoreField() + "term_vector=with_positions_offsets,index_options=offsets")); ensureGreen(); index("test", "type1", "1", "text", "Testing the highlight query feature"); refresh(); for (String type : ALL_TYPES) { HighlightBuilder.Field field = new HighlightBuilder.Field("text"); HighlightBuilder highlightBuilder = new HighlightBuilder().field(field).highlighterType(type); SearchRequestBuilder search = client().prepareSearch("test").setQuery(QueryBuilders.matchQuery("text", "testing")) .highlighter(highlightBuilder); Matcher<String> searchQueryMatcher = equalTo("<em>Testing</em> the highlight query feature"); SearchResponse response = search.get(); assertHighlight(response, 0, "text", 0, searchQueryMatcher); field = new HighlightBuilder.Field("text"); Matcher<String> hlQueryMatcher = equalTo("Testing the highlight <em>query</em> feature"); field.highlightQuery(matchQuery("text", "query")); highlightBuilder = new HighlightBuilder().field(field); search = client().prepareSearch("test").setQuery(QueryBuilders.matchQuery("text", "testing")).highlighter(highlightBuilder); response = search.get(); assertHighlight(response, 0, "text", 0, hlQueryMatcher); // Make sure the highlightQuery is taken into account when it is set on the highlight context instead of the field highlightBuilder.highlightQuery(matchQuery("text", "query")); field.highlighterType(type).highlightQuery(null); response = search.get(); assertHighlight(response, 0, "text", 0, hlQueryMatcher); } }
public void testMultiValuedStringScript() throws Exception { SearchResponse response = client().prepareSearch("idx").setTypes("type") .addAggregation( cardinality("cardinality") .precisionThreshold(precisionThreshold) .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['str_values'].values", emptyMap()))) .execute().actionGet(); assertSearchResponse(response); Cardinality count = response.getAggregations().get("cardinality"); assertThat(count, notNullValue()); assertThat(count.getName(), equalTo("cardinality")); assertCount(count, numDocs * 2); }
public void testDontExplode() throws Exception { SearchResponse response = client() .prepareSearch("idx") .setTypes("type") .addAggregation(terms("terms") .executionHint(randomExecutionHint()) .field(TERMS_AGGS_FIELD) .subAggregation( topHits("hits").size(ArrayUtil.MAX_ARRAY_LENGTH - 1).sort(SortBuilders.fieldSort(SORT_FIELD).order(SortOrder.DESC)) ) ) .get(); assertNoFailures(response); }
public void testDocCountopLevel() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) .extendedBounds(minRandomValue, maxRandomValue)) .addAggregation(percentilesBucket("percentiles_bucket", "histo>_count") .percents(PERCENTS)).execute().actionGet(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); List<? extends Histogram.Bucket> buckets = histo.getBuckets(); assertThat(buckets.size(), equalTo(numValueBuckets)); double[] values = new double[numValueBuckets]; for (int i = 0; i < numValueBuckets; ++i) { Histogram.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval)); assertThat(bucket.getDocCount(), equalTo(valueCounts[i])); values[i] = bucket.getDocCount(); } Arrays.sort(values); PercentilesBucket percentilesBucketValue = response.getAggregations().get("percentiles_bucket"); assertThat(percentilesBucketValue, notNullValue()); assertThat(percentilesBucketValue.getName(), equalTo("percentiles_bucket")); assertPercentileBucket(PERCENTS, values, percentilesBucketValue); }