/** * Make sure we can load stored binary fields. */ void assertStoredBinaryFields(String indexName, Version version) throws Exception { SearchRequestBuilder builder = client().prepareSearch(indexName); builder.setQuery(QueryBuilders.matchAllQuery()); builder.setSize(100); builder.addStoredField("binary"); SearchHits hits = builder.get().getHits(); assertEquals(100, hits.getHits().length); for(SearchHit hit : hits) { SearchHitField field = hit.field("binary"); assertNotNull(field); Object value = field.getValue(); assertTrue(value instanceof BytesArray); assertEquals(16, ((BytesArray) value).length()); } }
/** * 多字段查询 */ public static void multisearch() { try { Settings settings = Settings.settingsBuilder().put("cluster.name", "elasticsearch1").build(); TransportClient transportClient = TransportClient.builder(). settings(settings).build().addTransportAddress( new InetSocketTransportAddress(InetAddress.getByName("172.16.2.93"), 9300)); SearchRequestBuilder searchRequestBuilder = transportClient.prepareSearch("service2","clients"); SearchResponse searchResponse = searchRequestBuilder. setQuery(QueryBuilders.boolQuery() .should(QueryBuilders.termQuery("id","5")) .should(QueryBuilders.prefixQuery("content","oracle"))) .setFrom(0).setSize(100).setExplain(true).execute().actionGet(); SearchHits searchHits = searchResponse.getHits(); System.out.println(); System.out.println("Total Hits is " + searchHits.totalHits()); System.out.println(); } catch (Exception e) { e.printStackTrace(); } }
private void deleteExtendedDataForElement(Element element) { try { QueryBuilder filter = QueryBuilders.termQuery(EXTENDED_DATA_ELEMENT_ID_FIELD_NAME, element.getId()); SearchRequestBuilder s = getClient().prepareSearch(getIndicesToQuery()) .setTypes(ELEMENT_TYPE) .setQuery(QueryBuilders.boolQuery().must(QueryBuilders.matchAllQuery()).filter(filter)) .storedFields( EXTENDED_DATA_ELEMENT_ID_FIELD_NAME, EXTENDED_DATA_TABLE_NAME_FIELD_NAME, EXTENDED_DATA_TABLE_ROW_ID_FIELD_NAME ); for (SearchHit hit : s.execute().get().getHits()) { if (MUTATION_LOGGER.isTraceEnabled()) { LOGGER.trace("deleting extended data document %s", hit.getId()); } getClient().prepareDelete(hit.getIndex(), ELEMENT_TYPE, hit.getId()).execute().actionGet(); } } catch (Exception ex) { throw new MemgraphException("Could not delete extended data for element: " + element.getId()); } }
/** * add the sort order to the request searchRequestBuilder * if the frontend send sort with "path : desc". It should be converted to "path.raw" : {"order" : "desc" } * https://www.elastic.co/guide/en/elasticsearch/guide/current/multi-fields.html#multi-fields * * @param pageable * @param searchRequestBuilder */ private void addPagingAndSortingToSearchRequest(Pageable pageable, SearchRequestBuilder searchRequestBuilder) { //par défaut, renvoi la première page trié sur le _score ou le _doc, si rien n'est spécifié //effectue le tri if (pageable != null) { searchRequestBuilder .setFrom(pageable.getOffset()) .setSize(pageable.getPageSize()); if (pageable.getSort() != null) { pageable.getSort().forEach( order -> searchRequestBuilder.addSort( Constants.ORDER_FIELD_MAPPING.get(order.getProperty()), SortOrder.valueOf(order.getDirection().name())) ); } } }
private SearchResponse getSearchResponse(EnumSet<ElasticsearchDocumentType> elementType, int skip, int limit, boolean includeAggregations) { SearchRequestBuilder q = buildQuery(elementType, includeAggregations) .setFrom(skip) .setSize(limit); if (QUERY_LOGGER.isTraceEnabled()) { QUERY_LOGGER.trace("query: %s", q); } SearchResponse searchResponse = q.execute().actionGet(); SearchHits hits = searchResponse.getHits(); if (LOGGER.isDebugEnabled()) { LOGGER.debug( "elasticsearch results %d of %d (time: %dms)", hits.hits().length, hits.getTotalHits(), searchResponse.getTookInMillis() ); } return searchResponse; }
public void testScore() throws Exception { createIndex("test"); ensureGreen("test"); indexRandom(true, client().prepareIndex("test", "doc", "1").setSource("text", "hello goodbye"), client().prepareIndex("test", "doc", "2").setSource("text", "hello hello hello goodbye"), client().prepareIndex("test", "doc", "3").setSource("text", "hello hello goodebye")); ScoreFunctionBuilder<?> score = ScoreFunctionBuilders.scriptFunction(new Script(ScriptType.INLINE, "expression", "1 / _score", Collections.emptyMap())); SearchRequestBuilder req = client().prepareSearch().setIndices("test"); req.setQuery(QueryBuilders.functionScoreQuery(QueryBuilders.termQuery("text", "hello"), score).boostMode(CombineFunction.REPLACE)); req.setSearchType(SearchType.DFS_QUERY_THEN_FETCH); // make sure DF is consistent SearchResponse rsp = req.get(); assertSearchResponse(rsp); SearchHits hits = rsp.getHits(); assertEquals(3, hits.getTotalHits()); assertEquals("1", hits.getAt(0).getId()); assertEquals("3", hits.getAt(1).getId()); assertEquals("2", hits.getAt(2).getId()); }
private void explanFields(SearchRequestBuilder request, List<Field> fields, AggregationBuilder groupByAgg) throws SqlParseException { for (Field field : fields) { if (field instanceof MethodField) { if (field.getName().equals("script")) { request.addStoredField(field.getAlias()); DefaultQueryAction defaultQueryAction = new DefaultQueryAction(client, select); defaultQueryAction.intialize(request); List<Field> tempFields = Lists.newArrayList(field); defaultQueryAction.setFields(tempFields); continue; } AggregationBuilder makeAgg = aggMaker.makeFieldAgg((MethodField) field, groupByAgg); if (groupByAgg != null) { groupByAgg.subAggregation(makeAgg); } else { request.addAggregation(makeAgg); } } else if (field instanceof Field) { request.addStoredField(field.getName()); } else { throw new SqlParseException("it did not support this field method " + field); } } }
/** * Return all records, and truncate the content with the ResultTruncatedContentMapper * * @param pageable * @param version * @param project * @return */ @Override public Page<File> customfindAll(Pageable pageable, List<String> version, List<String> project, List<String> extension) { NativeSearchQueryBuilder nativeSearchQueryBuilder = Queries.constructSearchQueryBuilder(""); NativeSearchQuery nativeSearchQuery = nativeSearchQueryBuilder.build(); SearchRequestBuilder searchRequestBuilder = constructRequestBuilder(nativeSearchQuery, pageable, version, project, extension); SearchResponse response = searchRequestBuilder.execute().actionGet(); SearchHit[] hits = response.getHits().hits(); ResultTruncatedContentMapper mapper = new ResultTruncatedContentMapper(); return mapper.mapResults(response, File.class, nativeSearchQuery.getPageable()); // } }
public void testDocIdSort() throws Exception { int numShards = setupIndex(true); SearchResponse sr = client().prepareSearch("test") .setQuery(matchAllQuery()) .setSize(0) .get(); int numDocs = (int) sr.getHits().getTotalHits(); assertThat(numDocs, equalTo(NUM_DOCS)); int max = randomIntBetween(2, numShards*3); for (String field : new String[]{"_uid", "random_int", "static_int"}) { int fetchSize = randomIntBetween(10, 100); SearchRequestBuilder request = client().prepareSearch("test") .setQuery(matchAllQuery()) .setScroll(new Scroll(TimeValue.timeValueSeconds(10))) .setSize(fetchSize) .addSort(SortBuilders.fieldSort("_doc")); assertSearchSlicesWithScroll(request, field, max); } }
protected void updateRequestWithHighlight(Select select, SearchRequestBuilder request) { boolean foundAnyHighlights = false; HighlightBuilder highlightBuilder = new HighlightBuilder(); for(Hint hint : select.getHints()){ if(hint.getType() == HintType.HIGHLIGHT){ HighlightBuilder.Field highlightField = parseHighlightField(hint.getParams()); if(highlightField != null){ foundAnyHighlights = true; highlightBuilder.field(highlightField); } } } if(foundAnyHighlights){ request.highlighter(highlightBuilder); } }
private <P extends AbstractQueryBuilder<P>> void phraseBoostTestCaseForClauses(String highlighterType, float boost, QueryBuilder terms, P phrase) { Matcher<String> highlightedMatcher = Matchers.either(containsString("<em>highlight words together</em>")).or( containsString("<em>highlight</em> <em>words</em> <em>together</em>")); SearchRequestBuilder search = client().prepareSearch("test").highlighter( new HighlightBuilder().field("field1", 100, 1).order("score").highlighterType(highlighterType).requireFieldMatch(true)); // Try with a bool query phrase.boost(boost); SearchResponse response = search.setQuery(boolQuery().must(terms).should(phrase)).get(); assertHighlight(response, 0, "field1", 0, 1, highlightedMatcher); phrase.boost(1); // Try with a boosting query response = search.setQuery(boostingQuery(phrase, terms).boost(boost).negativeBoost(1)).get(); assertHighlight(response, 0, "field1", 0, 1, highlightedMatcher); // Try with a boosting query using a negative boost response = search.setQuery(boostingQuery(phrase, terms).boost(1).negativeBoost(1/boost)).get(); assertHighlight(response, 0, "field1", 0, 1, highlightedMatcher); }
void assertBasicSearchWorks(String indexName) { logger.info("--> testing basic search"); SearchRequestBuilder searchReq = client().prepareSearch(indexName).setQuery(QueryBuilders.matchAllQuery()); SearchResponse searchRsp = searchReq.get(); ElasticsearchAssertions.assertNoFailures(searchRsp); long numDocs = searchRsp.getHits().getTotalHits(); logger.info("Found {} in old index", numDocs); logger.info("--> testing basic search with sort"); searchReq.addSort("long_sort", SortOrder.ASC); ElasticsearchAssertions.assertNoFailures(searchReq.get()); logger.info("--> testing exists filter"); searchReq = client().prepareSearch(indexName).setQuery(QueryBuilders.existsQuery("string")); searchRsp = searchReq.get(); ElasticsearchAssertions.assertNoFailures(searchRsp); assertEquals(numDocs, searchRsp.getHits().getTotalHits()); GetSettingsResponse getSettingsResponse = client().admin().indices().prepareGetSettings(indexName).get(); Version versionCreated = Version.fromId(Integer.parseInt(getSettingsResponse.getSetting(indexName, "index.version.created"))); if (versionCreated.onOrAfter(Version.V_2_4_0)) { searchReq = client().prepareSearch(indexName).setQuery(QueryBuilders.existsQuery("field.with.dots")); searchRsp = searchReq.get(); ElasticsearchAssertions.assertNoFailures(searchRsp); assertEquals(numDocs, searchRsp.getHits().getTotalHits()); } }
private List<SearchHit> fetchAllHits(TableInJoinRequestBuilder tableInJoinRequest) { Integer hintLimit = tableInJoinRequest.getHintLimit(); SearchRequestBuilder requestBuilder = tableInJoinRequest.getRequestBuilder(); if (hintLimit != null && hintLimit < MAX_RESULTS_ON_ONE_FETCH) { requestBuilder.setSize(hintLimit); SearchResponse searchResponse = requestBuilder.get(); updateMetaSearchResults(searchResponse); return Arrays.asList(searchResponse.getHits().getHits()); } return scrollTillLimit(tableInJoinRequest, hintLimit); }
public String selectAll(String indexs,String types,String condition){ try { if(client==null){ init(); } SearchRequestBuilder request = client.prepareSearch(indexs.split(",")).setTypes(types.split(",")); request.setSearchType(SearchType.DFS_QUERY_THEN_FETCH); request.setQuery(QueryBuilders.queryStringQuery(condition)); request.setExplain(false); SearchResponse response = request.get(); return response.toString(); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } return null; }
public String selectMatchAll(String indexs,String types,String field,String value){ try { if(client==null){ init(); } SearchRequestBuilder request = client.prepareSearch(indexs.split(",")).setTypes(types.split(",")); request.setSearchType(SearchType.DFS_QUERY_THEN_FETCH); request.setQuery(QueryBuilders.matchQuery(field, value)); request.highlighter(new HighlightBuilder().field(field)); request.addAggregation(AggregationBuilders.terms("data").field(field+".keyword")); request.setExplain(false); SearchResponse response = request.get(); return response.toString(); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } return null; }
@Nonnull @Override public <T extends AbstractEsDocument> DocumentsAndTotalCount<T> getScrollResponse(@Nonnull final SearchRequestBuilder searchRequestBuilder, @Nonnull final Class<T> clazz, final long timeoutMillis) { Assert.notNull(searchRequestBuilder, "The search request builder should not be null"); Assert.notNull(clazz, "The document clazz should not be null"); Assert.isTrue(timeoutMillis > 0, "The timeout millis should be greater than 0"); final List<T> documents = new ArrayList<>(); SearchResponse searchResponse = searchRequestBuilder .setScroll(new TimeValue(timeoutMillis)) .setSize(scrollChunkSize == null ? DEFAULT_SCROLL_CHUNK_SIZE : scrollChunkSize).execute().actionGet(); while (true) { documents.addAll(searchResponseComponent.convertSearchResponseToDocuments(searchResponse, clazz)); searchResponse = esClient.prepareSearchScroll(searchResponse.getScrollId()).setScroll(new TimeValue(timeoutMillis)).execute().actionGet(); if (searchResponse.getHits().getHits().length == 0) { break; } } return new DocumentsAndTotalCount<>(documents, documents.size()); }
/** * 获取query的关联信息 * * @param query 查询queryString * @param size 结果集数量 * @return 相关信息 */ public Pair<Map<String, List<Terms.Bucket>>, List<Webpage>> relatedInfo(String query, int size) { SearchRequestBuilder searchRequestBuilder = client.prepareSearch(INDEX_NAME) .setTypes(TYPE_NAME) .setQuery(QueryBuilders.queryStringQuery(query)) .addSort("gatherTime", SortOrder.DESC) .addAggregation(AggregationBuilders.terms("relatedPeople").field("namedEntity.nr")) .addAggregation(AggregationBuilders.terms("relatedLocation").field("namedEntity.ns")) .addAggregation(AggregationBuilders.terms("relatedInstitution").field("namedEntity.nt")) .addAggregation(AggregationBuilders.terms("relatedKeywords").field("keywords")) .setSize(size); SearchResponse response = searchRequestBuilder.execute().actionGet(); Map<String, List<Terms.Bucket>> info = Maps.newHashMap(); info.put("relatedPeople", ((Terms) response.getAggregations().get("relatedPeople")).getBuckets()); info.put("relatedLocation", ((Terms) response.getAggregations().get("relatedLocation")).getBuckets()); info.put("relatedInstitution", ((Terms) response.getAggregations().get("relatedInstitution")).getBuckets()); info.put("relatedKeywords", ((Terms) response.getAggregations().get("relatedKeywords")).getBuckets()); return Pair.of(info, warpHits2List(response.getHits())); }
/** * 统计指定网站每天抓取数量 * * @param domain 网站域名 * @return */ public Map<Date, Long> countDomainByGatherTime(String domain) { AggregationBuilder aggregation = AggregationBuilders .dateHistogram("agg") .field("gatherTime") .dateHistogramInterval(DateHistogramInterval.DAY).order(Histogram.Order.KEY_DESC); SearchRequestBuilder searchRequestBuilder = client.prepareSearch(INDEX_NAME) .setTypes(TYPE_NAME) .setQuery(QueryBuilders.matchQuery("domain", domain)) .addAggregation(aggregation); SearchResponse response = searchRequestBuilder.execute().actionGet(); Histogram agg = response.getAggregations().get("agg"); Map<Date, Long> result = Maps.newHashMap(); for (Histogram.Bucket entry : agg.getBuckets()) { DateTime key = (DateTime) entry.getKey(); // Key long docCount = entry.getDocCount(); // Doc count result.put(key.toDate(), docCount); } return result; }
@Override public void process(ResultItems resultItems, Task task) { SpiderInfo spiderInfo = resultItems.get("spiderInfo"); Webpage webpage = convertResultItems2Webpage(resultItems); SearchRequestBuilder searchRequestBuilder = client.prepareSearch(INDEX_NAME) .setTypes(TYPE_NAME) .setQuery(QueryBuilders.matchQuery("url", webpage.getUrl())); SearchResponse response = searchRequestBuilder.execute().actionGet(); if (response.getHits().totalHits() == 0) { try { client.prepareIndex(INDEX_NAME, TYPE_NAME) .setId(Hashing.md5().hashString(webpage.getUrl(), Charset.forName("utf-8")).toString()) .setSource(gson.toJson(webpage)) .get(); } catch (Exception e) { LOG.error("索引 Webpage 出错," + e.getLocalizedMessage()); } } }
protected <E extends EsDocument> ScrollableResponse<List<E>> retrieveScrollByField( String field, Object value, String[] includeFields, int size, ThrowingFunction<String, E> createFunc) throws Exception { Preconditions.checkArgument(size > 0); SearchRequestBuilder builder = esClient.prepareSearch() .setIndices(getIndexName()).setTypes(getDocTypeName()) .setScroll(new TimeValue(SCROLLDEFAULTTIMEOUT)) .setSize(size) .setQuery(QueryBuilders .filteredQuery(QueryBuilders.matchAllQuery(), FilterBuilders.termsFilter(field, value))) .setFetchSource(includeFields, null).setVersion(true); SearchResponse response = builder.execute().actionGet(); return convertToScrollableResponse(response, createFunc); }
protected <E extends EsDocument> ScrollableResponse<List<E>> retrieveScrollByQuery( QueryBuilder queryBuilder, String[] includeFields, int size, ThrowingFunction<String, E> createFunc) throws Exception { Preconditions.checkArgument(size > 0); SearchRequestBuilder builder = esClient.prepareSearch() .setIndices(getIndexName()).setTypes(getDocTypeName()) .setScroll(new TimeValue(SCROLLDEFAULTTIMEOUT)) .setSize(size) .setQuery(queryBuilder) .setFetchSource(includeFields, null).setVersion(true); SearchResponse response = builder.execute().actionGet(); return convertToScrollableResponse(response, createFunc); }
/** * wildcard查询/or条件/and条件 */ public static void wildcardQuery() { try { Settings settings = Settings.settingsBuilder().put("cluster.name", "elasticsearch1").build(); TransportClient transportClient = TransportClient.builder(). settings(settings).build().addTransportAddress( new InetSocketTransportAddress(InetAddress.getByName("172.16.2.94"), 9300)); SearchRequestBuilder searchRequestBuilder = transportClient.prepareSearch("sqd.es_start"); // {"query": {"bool": {"must": [{"or": [{"wildcard": {"content": "*oracle*"}},{"wildcard": {"content": "*mysql*"}}]}],"must_not": [],"should": []}},"from": 0, "size": 10, "sort": [],"aggs": {}} SearchResponse searchResponse = searchRequestBuilder. setQuery(QueryBuilders.boolQuery() .must(QueryBuilders.orQuery(QueryBuilders.wildcardQuery("content","*mysql*"), QueryBuilders.wildcardQuery("content","*oracle*"))) .must(QueryBuilders.termQuery("tbool","false"))) .setFrom(0).setSize(100).setExplain(true).execute().actionGet(); SearchHits searchHits = searchResponse.getHits(); System.out.println(); System.out.println("Total Hits is " + searchHits.totalHits()); System.out.println(); for (int i = 0; i < searchHits.getHits().length; ++i) { System.out.println("content is " + searchHits.getHits()[i].getSource().get("content")); } } catch (Exception e) { e.printStackTrace(); } }
@Override public Long getCount(String appid, String type, Map<String, ?> terms) { if (StringUtils.isBlank(appid) || terms == null || terms.isEmpty()) { return 0L; } Long count = 0L; QueryBuilder query = getTermsQuery(terms, true); if (query != null) { if (!StringUtils.isBlank(type)) { query = boolQuery().must(query).must(termQuery(Config._TYPE, type)); } try { SearchRequestBuilder crb = client().prepareSearch(getIndexName(appid)).setSize(0).setQuery(query); count = crb.execute().actionGet().getHits().getTotalHits(); } catch (Exception e) { Throwable cause = e.getCause(); String msg = cause != null ? cause.getMessage() : e.getMessage(); logger.warn("Could not count results in index '{}': {}", appid, msg); } } return count; }
public EsResponse queryByEsQueryDo(EsQueryDo esQueryObj) throws EsException { validationEsQuery(esQueryObj.getIndexName(),esQueryObj.getTypeName()); //创建ES查询Request对象 SearchRequestBuilder esSearch=buildSearchRequest (esQueryObj); //执行查询 SearchResponse response =esSearch.execute().actionGet(); JSONObject resObj = new JSONObject(); //获取facet结果 if(!Check.NuNObject(esQueryObj.aggregationFields())){ parseAggregationResult(response, esQueryObj.aggregationFields(), resObj); } //1、获取搜索的文档结果 SearchHits searchHits = response.getHits(); if (searchHits == null || searchHits.getTotalHits() == 0) { return EsResponse.responseOK(null); } SearchHit[] hits = searchHits.getHits(); resObj.put("total", searchHits.getTotalHits()); //1.1、获取搜索结果 parseSearchResult(hits, esQueryObj.isHighLigth(), esQueryObj, resObj); return EsResponse.responseOK(resObj); }
/** * Get recommend dataset for a giving dataset * * @param type recommend method * @param input a giving dataset * @param num the number of recommended dataset * @return recommended dataset list */ public List<LinkedTerm> getRelatedDataFromES(String type, String input, int num) { SearchRequestBuilder builder = es.getClient().prepareSearch(props.getProperty(INDEX_NAME)).setTypes(type).setQuery(QueryBuilders.termQuery("concept_A", input)).addSort(WEIGHT, SortOrder.DESC) .setSize(num); SearchResponse usrhis = builder.execute().actionGet(); for (SearchHit hit : usrhis.getHits().getHits()) { Map<String, Object> result = hit.getSource(); String conceptB = (String) result.get("concept_B"); if (!conceptB.equals(input)) { LinkedTerm lTerm = new LinkedTerm(conceptB, (double) result.get(WEIGHT), type); termList.add(lTerm); } } return termList; }
public List<LinkedTerm> getRelatedDataFromES(String type, String input, int num) { SearchRequestBuilder builder = es.getClient().prepareSearch(props.getProperty(INDEX_NAME)).setTypes(type).setQuery(QueryBuilders.termQuery("concept_A", input)).addSort(WEIGHT, SortOrder.DESC) .setSize(num); SearchResponse usrhis = builder.execute().actionGet(); for (SearchHit hit : usrhis.getHits().getHits()) { Map<String, Object> result = hit.getSource(); String conceptB = (String) result.get("concept_B"); if (!conceptB.equals(input)) { LinkedTerm lTerm = new LinkedTerm(conceptB, (double) result.get(WEIGHT), type); termList.add(lTerm); } } return termList; }
private static double getMaxSimilarity(ESDriver es, String index, String type, String concept) { double maxSim = 1.0; SearchRequestBuilder builder = es.getClient().prepareSearch(index).setTypes(type).setQuery(QueryBuilders.termQuery("concept_A", concept)).addSort("weight", SortOrder.DESC).setSize(1); SearchResponse usrhis = builder.execute().actionGet(); SearchHit[] hits = usrhis.getHits().getHits(); if (hits.length == 1) { SearchHit hit = hits[0]; Map<String, Object> result = hit.getSource(); maxSim = (double) result.get("weight"); } if (maxSim == 0.0) { maxSim = 1.0; } return maxSim; }
protected int applyFetch(SearchRequestBuilder searchRequest, ElasticsearchStoragePluginConfig config, ElasticsearchLimit limit, ElasticsearchFilter filter, ElasticsearchSample sample){ final int configuredFetchSize = config.getBatchSize(); int fetch = configuredFetchSize; // If there is a limit or sample, add it to the search builder. if (limit != null) { fetch = limit.getFetchSize(); } else if (sample != null) { fetch = sample.getFetchSize(); } // make sure that limit 100000 doesn't create a fetch size beyond the configured fetch size. fetch = Math.min(fetch, configuredFetchSize); searchRequest.setFrom(0).setSize(fetch); return fetch; }
public List<Map<String, Object>> queryWithConstraints(final String indexName, final String fieldName, final String fieldValue, final Map<String, String> constraints, boolean latest) throws IOException { SearchRequestBuilder request = this.elasticsearchClient.prepareSearch(indexName) .setSearchType(SearchType.QUERY_THEN_FETCH) .setFrom(0); BoolQueryBuilder bFilter = QueryBuilders.boolQuery(); bFilter.filter(QueryBuilders.constantScoreQuery(QueryBuilders.constantScoreQuery(QueryBuilders.termQuery(fieldName, fieldValue)))); for (Object o : constraints.entrySet()) { @SuppressWarnings("rawtypes") Map.Entry entry = (Map.Entry) o; bFilter.filter(QueryBuilders.constantScoreQuery(QueryBuilders.termQuery((String) entry.getKey(), ((String) entry.getValue()).toLowerCase()))); } request.setQuery(bFilter); // get response SearchResponse response = request.execute().actionGet(); // evaluate search result ArrayList<Map<String, Object>> result = new ArrayList<Map<String, Object>>(); SearchHit[] hits = response.getHits().getHits(); for (SearchHit hit: hits) { Map<String, Object> map = hit.getSourceAsMap(); result.add(map); } return result; }
@Test public void deleteAllTest() throws IOException, SqlParseException, SQLFeatureNotSupportedException { delete(String.format("DELETE FROM %s/account_temp", TEST_INDEX)); // Assert no results exist for this type. SearchRequestBuilder request = MainTestSuite.getClient().prepareSearch(TEST_INDEX); request.setTypes("account_temp"); SearchResponse response = request.setQuery(QueryBuilders.matchAllQuery()).get(); assertThat(response.getHits().getTotalHits(), equalTo(0L)); }
public void testStringSpecialValueVariable() throws Exception { // i.e. expression script for term aggregations, which is not allowed assertAcked(client().admin().indices().prepareCreate("test") .addMapping("doc", "text", "type=keyword").get()); ensureGreen("test"); indexRandom(true, client().prepareIndex("test", "doc", "1").setSource("text", "hello"), client().prepareIndex("test", "doc", "2").setSource("text", "goodbye"), client().prepareIndex("test", "doc", "3").setSource("text", "hello")); SearchRequestBuilder req = client().prepareSearch().setIndices("test"); req.setQuery(QueryBuilders.matchAllQuery()) .addAggregation( AggregationBuilders.terms("term_agg").field("text") .script( new Script(ScriptType.INLINE, ExpressionScriptEngineService.NAME, "_value", Collections.emptyMap()))); String message; try { // shards that don't have docs with the "text" field will not fail, // so we may or may not get a total failure SearchResponse rsp = req.get(); assertThat(rsp.getShardFailures().length, greaterThan(0)); // at least the shards containing the docs should have failed message = rsp.getShardFailures()[0].reason(); } catch (SearchPhaseExecutionException e) { message = e.toString(); } assertThat(message + "should have contained ScriptException", message.contains("ScriptException"), equalTo(true)); assertThat(message + "should have contained text variable error", message.contains("text variable"), equalTo(true)); }
public void testMultipleRescores() throws Exception { int numDocs = indexRandomNumbers("keyword", 1, true); QueryRescorerBuilder eightIsGreat = RescoreBuilder .queryRescorer(QueryBuilders.functionScoreQuery(QueryBuilders.termQuery("field1", English.intToEnglish(8)), ScoreFunctionBuilders.weightFactorFunction(1000.0f)).boostMode(CombineFunction.REPLACE)) .setScoreMode(QueryRescoreMode.Total); QueryRescorerBuilder sevenIsBetter = RescoreBuilder .queryRescorer(QueryBuilders.functionScoreQuery(QueryBuilders.termQuery("field1", English.intToEnglish(7)), ScoreFunctionBuilders.weightFactorFunction(10000.0f)).boostMode(CombineFunction.REPLACE)) .setScoreMode(QueryRescoreMode.Total); // First set the rescore window large enough that both rescores take effect SearchRequestBuilder request = client().prepareSearch(); request.addRescorer(eightIsGreat, numDocs).addRescorer(sevenIsBetter, numDocs); SearchResponse response = request.get(); assertFirstHit(response, hasId("7")); assertSecondHit(response, hasId("8")); // Now squash the second rescore window so it never gets to see a seven response = request.setSize(1).clearRescorers().addRescorer(eightIsGreat, numDocs).addRescorer(sevenIsBetter, 1).get(); assertFirstHit(response, hasId("8")); // We have no idea what the second hit will be because we didn't get a chance to look for seven // Now use one rescore to drag the number we're looking for into the window of another QueryRescorerBuilder ninetyIsGood = RescoreBuilder.queryRescorer(QueryBuilders .functionScoreQuery(QueryBuilders.queryStringQuery("*ninety*"), ScoreFunctionBuilders.weightFactorFunction(1000.0f)) .boostMode(CombineFunction.REPLACE)).setScoreMode(QueryRescoreMode.Total); QueryRescorerBuilder oneToo = RescoreBuilder.queryRescorer(QueryBuilders .functionScoreQuery(QueryBuilders.queryStringQuery("*one*"), ScoreFunctionBuilders.weightFactorFunction(1000.0f)) .boostMode(CombineFunction.REPLACE)).setScoreMode(QueryRescoreMode.Total); request.clearRescorers().addRescorer(ninetyIsGood, numDocs).addRescorer(oneToo, 10); response = request.setSize(2).get(); assertThat(response.getHits().getMaxScore(), equalTo(response.getHits().getHits()[0].getScore())); assertFirstHit(response, hasId("91")); assertFirstHit(response, hasScore(2001.0f)); assertSecondHit(response, hasScore(1001.0f)); // Not sure which one it is but it is ninety something }
public static SearchResponse scrollOneTimeWithHits(Client client, SearchRequestBuilder requestBuilder, Select originalSelect, int resultSize) { SearchResponse responseWithHits;SearchRequestBuilder scrollRequest = requestBuilder .setScroll(new TimeValue(60000)) .setSize(resultSize); boolean ordered = originalSelect.isOrderdSelect(); if(!ordered) scrollRequest.addSort(FieldSortBuilder.DOC_FIELD_NAME, SortOrder.ASC); responseWithHits = scrollRequest.get(); //on ordered select - not using SCAN , elastic returns hits on first scroll //es5.0 elastic always return docs on scan // if(!ordered) { // responseWithHits = client.prepareSearchScroll(responseWithHits.getScrollId()).setScroll(new TimeValue(600000)).get(); // } return responseWithHits; }
protected Suggest searchSuggest(String suggestText, int expectShardsFailed, Map<String, SuggestionBuilder<?>> suggestions) { SearchRequestBuilder builder = client().prepareSearch().setSize(0); SuggestBuilder suggestBuilder = new SuggestBuilder(); if (suggestText != null) { suggestBuilder.setGlobalText(suggestText); } for (Entry<String, SuggestionBuilder<?>> suggestion : suggestions.entrySet()) { suggestBuilder.addSuggestion(suggestion.getKey(), suggestion.getValue()); } builder.suggest(suggestBuilder); SearchResponse actionGet = builder.execute().actionGet(); assertThat(Arrays.toString(actionGet.getShardFailures()), actionGet.getFailedShards(), equalTo(expectShardsFailed)); return actionGet.getSuggest(); }
public void testThatCustomSuggestersCanBeRegisteredAndWork() throws Exception { createIndex("test"); client().prepareIndex("test", "test", "1").setSource(jsonBuilder() .startObject() .field("name", "arbitrary content") .endObject()) .setRefreshPolicy(IMMEDIATE).get(); String randomText = randomAsciiOfLength(10); String randomField = randomAsciiOfLength(10); String randomSuffix = randomAsciiOfLength(10); SuggestBuilder suggestBuilder = new SuggestBuilder(); suggestBuilder.addSuggestion("someName", new CustomSuggestionBuilder(randomField, randomSuffix).text(randomText)); SearchRequestBuilder searchRequestBuilder = client().prepareSearch("test").setTypes("test").setFrom(0).setSize(1) .suggest(suggestBuilder); SearchResponse searchResponse = searchRequestBuilder.execute().actionGet(); // TODO: infer type once JI-9019884 is fixed // TODO: see also JDK-8039214 List<Suggest.Suggestion.Entry<? extends Suggest.Suggestion.Entry.Option>> suggestions = CollectionUtils.<Suggest.Suggestion.Entry<? extends Suggest.Suggestion.Entry.Option>>iterableAsArrayList( searchResponse.getSuggest().getSuggestion("someName")); assertThat(suggestions, hasSize(2)); assertThat(suggestions.get(0).getText().string(), is(String.format(Locale.ROOT, "%s-%s-%s-12", randomText, randomField, randomSuffix))); assertThat(suggestions.get(1).getText().string(), is(String.format(Locale.ROOT, "%s-%s-%s-123", randomText, randomField, randomSuffix))); }
public void testHighlightUsesHighlightQuery() throws IOException { assertAcked(prepareCreate("test").addMapping("type1", "text", "type=text," + randomStoreField() + "term_vector=with_positions_offsets,index_options=offsets")); ensureGreen(); index("test", "type1", "1", "text", "Testing the highlight query feature"); refresh(); for (String type : ALL_TYPES) { HighlightBuilder.Field field = new HighlightBuilder.Field("text"); HighlightBuilder highlightBuilder = new HighlightBuilder().field(field).highlighterType(type); SearchRequestBuilder search = client().prepareSearch("test").setQuery(QueryBuilders.matchQuery("text", "testing")) .highlighter(highlightBuilder); Matcher<String> searchQueryMatcher = equalTo("<em>Testing</em> the highlight query feature"); SearchResponse response = search.get(); assertHighlight(response, 0, "text", 0, searchQueryMatcher); field = new HighlightBuilder.Field("text"); Matcher<String> hlQueryMatcher = equalTo("Testing the highlight <em>query</em> feature"); field.highlightQuery(matchQuery("text", "query")); highlightBuilder = new HighlightBuilder().field(field); search = client().prepareSearch("test").setQuery(QueryBuilders.matchQuery("text", "testing")).highlighter(highlightBuilder); response = search.get(); assertHighlight(response, 0, "text", 0, hlQueryMatcher); // Make sure the highlightQuery is taken into account when it is set on the highlight context instead of the field highlightBuilder.highlightQuery(matchQuery("text", "query")); field.highlighterType(type).highlightQuery(null); response = search.get(); assertHighlight(response, 0, "text", 0, hlQueryMatcher); } }
private void assertRescoreWindowFails(int windowSize) { SearchRequestBuilder search = client().prepareSearch("idx") .addRescorer(new QueryRescorerBuilder(matchAllQuery()).windowSize(windowSize)); SearchPhaseExecutionException e = expectThrows(SearchPhaseExecutionException.class, () -> search.get()); assertThat(e.toString(), containsString("Rescore window [" + windowSize + "] is too large. It must " + "be less than [" + IndexSettings.MAX_RESCORE_WINDOW_SETTING.get(Settings.EMPTY))); assertThat(e.toString(), containsString( "This limit can be set by changing the [" + IndexSettings.MAX_RESCORE_WINDOW_SETTING.getKey() + "] index level setting.")); }
private void assertSearchOnRandomNodes(SearchRequestBuilder request) { Set<String> hitNodes = new HashSet<>(); for (int i = 0; i < 2; i++) { SearchResponse searchResponse = request.execute().actionGet(); assertThat(searchResponse.getHits().getHits().length, greaterThan(0)); hitNodes.add(searchResponse.getHits().getAt(0).getShard().getNodeId()); } assertThat(hitNodes.size(), greaterThan(1)); }