private static QueryBuilder randomTerminalQuery(List<String> stringFields, List<String> numericFields, int numDocs) { switch (randomIntBetween(0,6)) { case 0: return randomTermQuery(stringFields, numDocs); case 1: return randomTermsQuery(stringFields, numDocs); case 2: return randomRangeQuery(numericFields, numDocs); case 3: return QueryBuilders.matchAllQuery(); case 4: return randomCommonTermsQuery(stringFields, numDocs); case 5: return randomFuzzyQuery(stringFields); case 6: return randomIDsQuery(); default: return randomTermQuery(stringFields, numDocs); } }
/** * Delete documents using a query. Check what would be deleted first with a normal search query! * Elasticsearch once provided a native prepareDeleteByQuery method, but this was removed * in later versions. Instead, there is a plugin which iterates over search results, * see https://www.elastic.co/guide/en/elasticsearch/plugins/current/plugins-delete-by-query.html * We simulate the same behaviour here without the need of that plugin. * * @param q * @return delete document count */ public int deleteByQuery(String indexName, final QueryBuilder q) { Map<String, String> ids = new TreeMap<>(); SearchResponse response = elasticsearchClient.prepareSearch(indexName).setSearchType(SearchType.QUERY_THEN_FETCH) .setScroll(new TimeValue(60000)).setQuery(q).setSize(100).execute().actionGet(); while (true) { // accumulate the ids here, don't delete them right now to prevent an interference of the delete with the // scroll for (SearchHit hit : response.getHits().getHits()) { ids.put(hit.getId(), hit.getType()); } response = elasticsearchClient.prepareSearchScroll(response.getScrollId()).setScroll(new TimeValue(600000)) .execute().actionGet(); // termination if (response.getHits().getHits().length == 0) break; } return deleteBulk(indexName, ids); }
@Override public JsonNode search(JsonNode jsonQuery) { if (jsonQuery == null) { throw new IllegalArgumentException("JSON Query can not be null"); } if (this.client == null) { throw new IllegalStateException("ElasticSearch client is closed"); } QueryBuilder query = QueryBuilders.wrapperQuery(jsonQuery.toString()); SearchResponse response = client.prepareSearch(indexNames.toArray(new String[indexNames.size()])) .setTypes(typeNames.toArray(new String[typeNames.size()])) .setSearchType(SearchType.DFS_QUERY_THEN_FETCH) .setQuery(query) .get(); JsonNode result = null; try { result = mapper.readTree(response.toString()); } catch (IOException e) { log.warn("Can not parse ES response '{}' as JSON. Exception: {}", response.toString(), e); } return result; }
@Override public Scores propertySearch(ModifiableSearchParams searchParams, ModifiableRankParams rankParams) { searchParams.model(BuiltInModel.PROPERTY); if (rankParams instanceof ModifiableIndraParams) { configureDistributionalParams(searchParams.getKbId(), (ModifiableIndraParams) rankParams); } QueryBuilder queryBuilder = boolQuery() .should(nestedQuery("hyponyms", matchQuery("hyponyms.word", searchParams.getSearchTerm()), ScoreMode.Max)) .should(nestedQuery("hypernyms", matchQuery("hypernyms.word", searchParams.getSearchTerm()), ScoreMode.Max)) .should(nestedQuery("synonyms", matchQuery("synonyms.word", searchParams.getSearchTerm()), ScoreMode.Max)) .minimumNumberShouldMatch(1); Searcher searcher = core.getSearcher(searchParams.getKbId()); Scores scores = searcher.search(new ElasticQueryHolder(queryBuilder, searchParams)); return Rankers.apply(scores, rankParams, searchParams.getSearchTerm()); }
@Override protected List<QueryBuilder> getFilters(EnumSet<ElasticsearchDocumentType> elementTypes) { List<QueryBuilder> filters = super.getFilters(elementTypes); List<QueryBuilder> relatedFilters = new ArrayList<>(); if (elementTypes.contains(ElasticsearchDocumentType.VERTEX) || elementTypes.contains(ElasticsearchDocumentType.VERTEX_EXTENDED_DATA)) { relatedFilters.add(getVertexFilter(elementTypes)); } if (elementTypes.contains(ElasticsearchDocumentType.EDGE) || elementTypes.contains(ElasticsearchDocumentType.EDGE_EXTENDED_DATA)) { relatedFilters.add(getEdgeFilter()); } filters.add(orFilters(relatedFilters)); return filters; }
/** * Tests {@link ElasticsearchQueryBuilder#generateQueryBuilder(String)} to make sure it generates the query we expect. */ @Test public void testGenerateQueryBuilder() throws Exception { final String fiqlFilter = "tenantName==taters,(containerName==delicious;tenantName==dinner)"; // Generate one of our newfangled queries final QueryBuilder generatedBuilder = elasticsearchQueryBuilder.generateQueryBuilder(fiqlFilter); // Use out-of-the-box approach final FiqlParser<MetadataRecord> parser = new FiqlParser<>(MetadataRecord.class); final ElasticsearchQueryBuilderVisitor<MetadataRecord> visitor = new ElasticsearchQueryBuilderVisitor<>(); visitor.visit(parser.parse(fiqlFilter)); final QueryBuilder builder = visitor.getQuery(); assertThat(generatedBuilder.toString(), is(builder.toString())); }
public static QueryBuilder urlParamsToQueryBuilder(RestRequest request) { String queryString = request.param("q"); if (queryString == null) { return null; } QueryStringQueryBuilder queryBuilder = QueryBuilders.queryStringQuery(queryString); queryBuilder.defaultField(request.param("df")); queryBuilder.analyzer(request.param("analyzer")); queryBuilder.analyzeWildcard(request.paramAsBoolean("analyze_wildcard", false)); queryBuilder.lenient(request.paramAsBoolean("lenient", null)); String defaultOperator = request.param("default_operator"); if (defaultOperator != null) { queryBuilder.defaultOperator(Operator.fromString(defaultOperator)); } return queryBuilder; }
@Override public Long getCount(String appid, String type) { if (StringUtils.isBlank(appid)) { return 0L; } QueryBuilder query; if (!StringUtils.isBlank(type)) { query = termQuery(Config._TYPE, type); } else { query = matchAllQuery(); } Long count = 0L; try { SearchRequestBuilder crb = client().prepareSearch(getIndexName(appid)).setSize(0).setQuery(query); count = crb.execute().actionGet().getHits().getTotalHits(); } catch (Exception e) { Throwable cause = e.getCause(); String msg = cause != null ? cause.getMessage() : e.getMessage(); logger.warn("Could not count results in index '{}': {}", appid, msg); } return count; }
private <P extends AbstractQueryBuilder<P>> void phraseBoostTestCaseForClauses(String highlighterType, float boost, QueryBuilder terms, P phrase) { Matcher<String> highlightedMatcher = Matchers.either(containsString("<em>highlight words together</em>")).or( containsString("<em>highlight</em> <em>words</em> <em>together</em>")); SearchRequestBuilder search = client().prepareSearch("test").highlighter( new HighlightBuilder().field("field1", 100, 1).order("score").highlighterType(highlighterType).requireFieldMatch(true)); // Try with a bool query phrase.boost(boost); SearchResponse response = search.setQuery(boolQuery().must(terms).should(phrase)).get(); assertHighlight(response, 0, "field1", 0, 1, highlightedMatcher); phrase.boost(1); // Try with a boosting query response = search.setQuery(boostingQuery(phrase, terms).boost(boost).negativeBoost(1)).get(); assertHighlight(response, 0, "field1", 0, 1, highlightedMatcher); // Try with a boosting query using a negative boost response = search.setQuery(boostingQuery(phrase, terms).boost(1).negativeBoost(1/boost)).get(); assertHighlight(response, 0, "field1", 0, 1, highlightedMatcher); }
public void testStoringQueries() throws Exception { addQueryMapping(); QueryBuilder[] queries = new QueryBuilder[]{ termQuery("field", "value"), matchAllQuery(), matchQuery("field", "value"), matchPhraseQuery("field", "value"), prefixQuery("field", "v"), wildcardQuery("field", "v*"), rangeQuery("number_field").gte(0).lte(9), rangeQuery("date_field").from("2015-01-01T00:00").to("2015-01-01T00:00") }; // note: it important that range queries never rewrite, otherwise it will cause results to be wrong. // (it can't use shard data for rewriting purposes, because percolator queries run on MemoryIndex) for (QueryBuilder query : queries) { ParsedDocument doc = mapperService.documentMapper(typeName).parse("test", typeName, "1", XContentFactory.jsonBuilder().startObject() .field(fieldName, query) .endObject().bytes()); BytesRef qbSource = doc.rootDoc().getFields(fieldType.queryBuilderField.name())[0].binaryValue(); assertQueryBuilder(qbSource, query); } }
public void testMultiplePercolatorFields() throws Exception { String typeName = "another_type"; String percolatorMapper = XContentFactory.jsonBuilder().startObject().startObject(typeName) .startObject("_field_names").field("enabled", false).endObject() // makes testing easier .startObject("properties") .startObject("query_field1").field("type", "percolator").endObject() .startObject("query_field2").field("type", "percolator").endObject() .endObject() .endObject().endObject().string(); mapperService.merge(typeName, new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE, true); QueryBuilder queryBuilder = matchQuery("field", "value"); ParsedDocument doc = mapperService.documentMapper(typeName).parse("test", typeName, "1", jsonBuilder().startObject() .field("query_field1", queryBuilder) .field("query_field2", queryBuilder) .endObject().bytes() ); assertThat(doc.rootDoc().getFields().size(), equalTo(14)); // also includes all other meta fields BytesRef queryBuilderAsBytes = doc.rootDoc().getField("query_field1.query_builder_field").binaryValue(); assertQueryBuilder(queryBuilderAsBytes, queryBuilder); queryBuilderAsBytes = doc.rootDoc().getField("query_field2.query_builder_field").binaryValue(); assertQueryBuilder(queryBuilderAsBytes, queryBuilder); }
public static LogcenterConfig createConfig(JSONObject jsonInput) throws LogConsumerException { String productLine = String.valueOf(jsonInput.get("productLine")); String appName = String.valueOf(jsonInput.get("appName")); QueryBuilder qb = QueryBuilders.queryStringQuery("productLine:'" + productLine + "' AND appName:'" + appName + "'"); SearchResponse response = ElasticsearchClient.getClient() .prepareSearch(Constants.METADATA_INDEX) .setTypes(Constants.METADATA_TYPE) .setQuery(qb) .get(); JSONObject jsonObject = JSON.parseObject(response.toString()); JSONArray hitArray = (JSONArray) jsonObject.getJSONObject("hits").get("hits"); if (hitArray.size() == 0) { throw new LogConsumerException("index does not exist,please check the configuration of the .logcenter index"); } JSONObject document = (JSONObject) hitArray.get(0); String jsonStr = document.get("_source").toString(); return JSONObject.parseObject(jsonStr, LogcenterConfig.class); }
/** * * @param data * @param queryBuilder * @param postFilter */ @SuppressWarnings("rawtypes") private void queryToList(UAVHttpMessage data, QueryBuilder queryBuilder, QueryBuilder postFilter, SortBuilder[] sorts) { SearchResponse sr = query(data, queryBuilder, postFilter, sorts); SearchHits shits = sr.getHits(); List<Map<String, Object>> records = new ArrayList<Map<String, Object>>(); for (SearchHit sh : shits) { Map<String, Object> record = sh.getSourceAsMap(); if (record == null) { continue; } records.add(record); } data.putResponse("rs", JSONHelper.toString(records)); // 返回总的条数 data.putResponse("count", shits.getTotalHits() + ""); }
@Override @SuppressWarnings("unchecked") public <P extends ParaObject> List<P> findTerms(String appid, String type, Map<String, ?> terms, boolean mustMatchAll, Pager... pager) { if (terms == null || terms.isEmpty()) { return Collections.emptyList(); } QueryBuilder fb = getTermsQuery(terms, mustMatchAll); if (fb == null) { return Collections.emptyList(); } else { return searchQuery(appid, type, fb, pager); } }
@Override public Iterator<EsNameMetaData> getMetaDataByName(String field, String name) throws Exception { Preconditions.checkNotNull(field); Preconditions.checkNotNull(name); QueryBuilder queryBuilder = QueryBuilders.boolQuery() .must(QueryBuilders.termQuery(field, name)) .must(QueryBuilders.termQuery("state", "running")); ScrollableResponse<List<EsNameMetaData>> response = this.retrieveScrollByQuery(queryBuilder, EsMapper.getIncludeFields(EsNameMetaData.class, this.getClass()), BATCHSIZE, str -> updateMapper.readValue(str, EsNameMetaData.class)); EsIterator<EsNameMetaData> iterator = new EsIterator<>(response, r -> scrollNext(r.getContinousToken(), str -> updateMapper.readValue(str, EsNameMetaData.class))); return iterator; }
protected <E extends EsDocument> ScrollableResponse<List<E>> retrieveScrollByQuery( QueryBuilder queryBuilder, String[] includeFields, int size, ThrowingFunction<String, E> createFunc) throws Exception { Preconditions.checkArgument(size > 0); SearchRequestBuilder builder = esClient.prepareSearch() .setIndices(getIndexName()).setTypes(getDocTypeName()) .setScroll(new TimeValue(SCROLLDEFAULTTIMEOUT)) .setSize(size) .setQuery(queryBuilder) .setFetchSource(includeFields, null).setVersion(true); SearchResponse response = builder.execute().actionGet(); return convertToScrollableResponse(response, createFunc); }
/** * Associates a filter to the alias */ public Alias filter(QueryBuilder filterBuilder) { if (filterBuilder == null) { this.filter = null; return this; } try { XContentBuilder builder = XContentFactory.jsonBuilder(); filterBuilder.toXContent(builder, ToXContent.EMPTY_PARAMS); builder.close(); this.filter = builder.string(); return this; } catch (IOException e) { throw new ElasticsearchGenerationException("Failed to build json for alias request", e); } }
/** * 所有 字段查询 * * @param data */ private void queryByField(UAVHttpMessage data) { QueryBuilder query = buildQuery(data); if (query == null) { return; } SearchResponse sr = query(data, query, null, buildSorts(data)); List<Map<String, Object>> records = getRecords(sr); long count = getCount(sr); data.putResponse("rs", JSONHelper.toString(records)); data.putResponse("count", count + ""); // 返回总的条数 }
private static QueryBuilder randomBoolQuery(List<String> stringFields, List<String> numericFields, int numDocs, int depth) { QueryBuilder q = QueryBuilders.boolQuery(); int numClause = randomIntBetween(0,5); for (int i = 0; i < numClause; i++) { ((BoolQueryBuilder)q).must(randomQueryBuilder(stringFields, numericFields,numDocs, depth -1)); } numClause = randomIntBetween(0,5); for (int i = 0; i < numClause; i++) { ((BoolQueryBuilder)q).should(randomQueryBuilder(stringFields, numericFields,numDocs, depth -1)); } numClause = randomIntBetween(0,5); for (int i = 0; i < numClause; i++) { ((BoolQueryBuilder)q).mustNot(randomQueryBuilder(stringFields, numericFields, numDocs, depth -1)); } return q; }
@Override protected AggregatorFactory<?> doBuild(SearchContext context, AggregatorFactory<?> parent, Builder subFactoriesBuilder) throws IOException { int maxFilters = context.indexShard().indexSettings().getMaxAdjacencyMatrixFilters(); if (filters.size() > maxFilters){ throw new QueryPhaseExecutionException(context, "Number of filters is too large, must be less than or equal to: [" + maxFilters + "] but was [" + filters.size() + "]." + "This limit can be set by changing the [" + IndexSettings.MAX_ADJACENCY_MATRIX_FILTERS_SETTING.getKey() + "] index level setting."); } List<KeyedFilter> rewrittenFilters = new ArrayList<>(); for (KeyedFilter kf : filters) { rewrittenFilters.add(new KeyedFilter(kf.key(), QueryBuilder.rewriteQuery(kf.filter(), context.getQueryShardContext()))); } return new AdjacencyMatrixAggregatorFactory(name, rewrittenFilters, separator, context, parent, subFactoriesBuilder, metaData); }
/** * Read from a stream. */ public FiltersAggregationBuilder(StreamInput in) throws IOException { super(in); keyed = in.readBoolean(); int filtersSize = in.readVInt(); filters = new ArrayList<>(filtersSize); if (keyed) { for (int i = 0; i < filtersSize; i++) { filters.add(new KeyedFilter(in)); } } else { for (int i = 0; i < filtersSize; i++) { filters.add(new KeyedFilter(String.valueOf(i), in.readNamedWriteable(QueryBuilder.class))); } } otherBucket = in.readBoolean(); otherBucketKey = in.readString(); }
@Override public Iterator<EsInstance> getRecentlyTerminatedInstances(Region region, int days) throws Exception { Preconditions.checkNotNull(region); Preconditions.checkArgument(days > 0); DateTime start = getStartSinceDay(days); QueryBuilder queryBuilder = QueryBuilders.boolQuery() .must(QueryBuilders.termQuery("region", region.getName().toLowerCase())) .must(QueryBuilders.termQuery("state", "terminated")) .must(QueryBuilders.rangeQuery("aws_launch_time").gte(start)); ScrollableResponse<List<EsInstance>> response = this.retrieveScrollByQuery(queryBuilder, EsMapper.getIncludeFields(getInstanceClass()), BATCHSIZE, str -> (EsInstance) insertMapper.readValue(str, getInstanceClass())); EsIterator<EsInstance> iterator = new EsIterator<>(response, r -> scrollNext(r.getContinousToken(), str -> (EsInstance) insertMapper.readValue(str, getInstanceClass()))); return iterator; }
private static QueryBuilder createLexicalQuery(String key, Map<String, Object> rangeOperation, Float boost) { QueryBuilder queryBuilder = null; for (Map.Entry<String, Object> it : rangeOperation.entrySet()) { if (it.getKey().equalsIgnoreCase(STARTS_WITH)) { if (isNotNull(boost)) { queryBuilder = QueryBuilders.prefixQuery(key + RAW_APPEND, (String) it.getValue()) .boost(boost); } queryBuilder = QueryBuilders.prefixQuery(key + RAW_APPEND, (String) it.getValue()); } else if (it.getKey().equalsIgnoreCase(ENDS_WITH)) { String endsWithRegex = "~" + it.getValue(); if (isNotNull(boost)) { queryBuilder = QueryBuilders.regexpQuery(key + RAW_APPEND, endsWithRegex).boost(boost); } queryBuilder = QueryBuilders.regexpQuery(key + RAW_APPEND, endsWithRegex); } } return queryBuilder; }
@SuppressWarnings("rawtypes") private void queryToList(UAVHttpMessage data, QueryBuilder queryBuilder, QueryBuilder postFilter, SortBuilder[] sorts) { SearchResponse sr = query(data, queryBuilder, postFilter, sorts); SearchHits shits = sr.getHits(); List<Map<String, Object>> records = new ArrayList<Map<String, Object>>(); for (SearchHit sh : shits) { Map<String, Object> record = sh.getSourceAsMap(); if (record == null) { continue; } records.add(record); } // 如果只存在eline则需要把结果逆序,保证其原始顺序 long startLine = DataConvertHelper.toLong(data.getRequest("sline"), -1); long endLine = DataConvertHelper.toLong(data.getRequest("eline"), -1); if (startLine == -1 && endLine > -1) { Collections.reverse(records); } data.putResponse("rs", JSONHelper.toString(records)); // 返回总条数 data.putResponse("count", shits.getTotalHits() + ""); }
AliasFilter rewrite(QueryRewriteContext context) throws IOException { QueryBuilder queryBuilder = reparseFilter(context); if (queryBuilder != null) { return new AliasFilter(QueryBuilder.rewriteQuery(queryBuilder, context), aliases); } return new AliasFilter(filter, aliases); }
@Override public List<LabeledEntity> getEntities(String dbId, List<String> ids) { logger.info(marker, "Fetching ids={}", ids); Namespace ns = core.getNamespace(dbId); List idList = ids.stream().map(ns::shrinkURI).collect(Collectors.toList()); ModifiableSearchParams searchParams = ModifiableSearchParams.create(dbId).model(BuiltInModel.ENTITY); QueryBuilder queryBuilder = termsQuery("id", idList); Searcher searcher = core.getSearcher(searchParams.getKbId()); Scores scores = searcher.search(new ElasticQueryHolder(queryBuilder, searchParams)); return scores.stream().map(s -> (LabeledEntity)s.getEntry()).collect(Collectors.toList()); }
@Override public Scores pivotedSearch(InstanceEntity pivot, ModifiableSearchParams searchParams, ModifiableRankParams rankParams) { searchParams.model(BuiltInModel.FACT); if (rankParams instanceof ModifiableIndraParams) { configureDistributionalParams(searchParams.getKbId(), (ModifiableIndraParams) rankParams); } QueryBuilder queryBuilder = boolQuery() .should(nestedQuery("s", termQuery("s.id", pivot.getId()), ScoreMode.Max)) .should(nestedQuery("o", termQuery("o.id", pivot.getId()), ScoreMode.Max)).minimumNumberShouldMatch(1); Searcher searcher = core.getSearcher(searchParams.getKbId()); Scores scores = searcher.search(new ElasticQueryHolder(queryBuilder, searchParams)); // We have to remap the facts to properties, the real target of the ranker call. // Thus we're discarding the score values from the underlying search engine. Shall we? Scores propScores = new Scores(scores.stream() .map(s -> ((Fact) s.getEntry()).getPredicate()) .distinct() .map(p -> new Score(p, 0)) .collect(Collectors.toList())); return Rankers.apply(propScores, rankParams, searchParams.getSearchTerm()); }
@Override public <P extends ParaObject> List<P> findPrefix(String appid, String type, String field, String prefix, Pager... pager) { if (StringUtils.isBlank(field) || StringUtils.isBlank(prefix)) { return Collections.emptyList(); } QueryBuilder qb; if (nestedMode() && field.startsWith(PROPS_PREFIX)) { qb = nestedPropsQuery(keyValueBoolQuery(field, prefixQuery(getValueFieldName(prefix), prefix))); } else { qb = prefixQuery(field, prefix); } return searchQuery(appid, type, qb, pager); }
private static QueryBuilder randomIDsQuery() { QueryBuilder q = QueryBuilders.idsQuery(); int numIDs = randomInt(100); for (int i = 0; i < numIDs; i++) { ((IdsQueryBuilder)q).addIds(String.valueOf(randomInt())); } if (randomBoolean()) { ((IdsQueryBuilder)q).boost(randomFloat()); } return q; }
private QueryBuilder getVertexFilter(EnumSet<ElasticsearchDocumentType> elementTypes) { List<QueryBuilder> filters = new ArrayList<>(); List<String> edgeLabels = getParameters().getEdgeLabels(); String[] edgeLabelsArray = edgeLabels == null || edgeLabels.size() == 0 ? null : edgeLabels.toArray(new String[edgeLabels.size()]); Stream<EdgeInfo> edgeInfos = stream(sourceVertex.getEdgeInfos( direction, edgeLabelsArray, getParameters().getAuthorizations() )); if (otherVertexId != null) { edgeInfos = edgeInfos.filter(ei -> ei.getVertexId().equals(otherVertexId)); } if (getParameters().getIds().size() > 0) { edgeInfos = edgeInfos.filter(ei -> getParameters().getIds().contains(ei.getVertexId())); } String[] ids = edgeInfos.map(EdgeInfo::getVertexId).toArray(String[]::new); if (elementTypes.contains(ElasticsearchDocumentType.VERTEX)) { filters.add(QueryBuilders.idsQuery().addIds(ids)); } if (elementTypes.contains(ElasticsearchDocumentType.VERTEX_EXTENDED_DATA)) { for (String vertexId : ids) { filters.add( QueryBuilders.boolQuery() .must(QueryBuilders.termQuery(Elasticsearch5SearchIndex.ELEMENT_TYPE_FIELD_NAME, ElasticsearchDocumentType.VERTEX_EXTENDED_DATA.getKey())) .must(QueryBuilders.termQuery(Elasticsearch5SearchIndex.EXTENDED_DATA_ELEMENT_ID_FIELD_NAME, vertexId))); } } return orFilters(filters); }
private QueryBuilder orFilters(List<QueryBuilder> filters) { if (filters.size() == 1) { return filters.get(0); } else { BoolQueryBuilder boolQuery = QueryBuilders.boolQuery(); for (QueryBuilder filter : filters) { boolQuery.should(filter); } boolQuery.minimumShouldMatch(1); return boolQuery; } }
protected static QueryBuilder randomNestedFilter() { int id = randomIntBetween(0, 2); switch(id) { case 0: return (new MatchAllQueryBuilder()).boost(randomFloat()); case 1: return (new IdsQueryBuilder()).boost(randomFloat()); case 2: return (new TermQueryBuilder( randomAsciiOfLengthBetween(1, 10), randomDouble()).boost(randomFloat())); default: throw new IllegalStateException("Only three query builders supported for testing sort"); } }
/** * Download product from given category. * * @param request which contains query category * @param productPublishSubject the subject which downloaded product should publish to */ public void downloadProducts(DownloadProductsRequest request, PublishSubject<Product> productPublishSubject) { QueryBuilder queryBuilder = QueryBuilders.termQuery("category", request.getCategory()); SearchResponse scrollResponse = esClient .prepareSearch(INDEX) .setScroll(DEFAULT_SCROLL_TIME_VALUE) .setTypes(TYPE) .setQuery(queryBuilder) .setSize(SCROLL_SIZE) .get(); do { scrollResponse.getHits().forEach(hit -> { try { Product.Builder builder = Product.newBuilder(); jsonParser.merge(hit.sourceAsString(), builder); productPublishSubject.onNext(builder.build()); } catch (IOException ioe) { // Don't fail the whole stream log.error("Unable to read product record", ioe); productPublishSubject.onError(ioe); throw new IllegalStateException(ioe); } }); // Fetch next batch of cite group records scrollResponse = esClient .prepareSearchScroll(scrollResponse.getScrollId()) .setScroll(DEFAULT_SCROLL_TIME_VALUE) .execute() .actionGet(); } while (scrollResponse.getHits().getHits().length != 0); productPublishSubject.onComplete(); }
@Override public Mapper parse(ParseContext context) throws IOException { QueryShardContext queryShardContext = this.queryShardContext.get(); if (context.doc().getField(queryBuilderField.name()) != null) { // If a percolator query has been defined in an array object then multiple percolator queries // could be provided. In order to prevent this we fail if we try to parse more than one query // for the current document. throw new IllegalArgumentException("a document can only contain one percolator query"); } XContentParser parser = context.parser(); QueryBuilder queryBuilder = parseQueryBuilder( queryShardContext.newParseContext(parser), parser.getTokenLocation() ); verifyQuery(queryBuilder); // Fetching of terms, shapes and indexed scripts happen during this rewrite: queryBuilder = queryBuilder.rewrite(queryShardContext); try (XContentBuilder builder = XContentFactory.contentBuilder(QUERY_BUILDER_CONTENT_TYPE)) { queryBuilder.toXContent(builder, new MapParams(Collections.emptyMap())); builder.flush(); byte[] queryBuilderAsBytes = BytesReference.toBytes(builder.bytes()); context.doc().add(new Field(queryBuilderField.name(), queryBuilderAsBytes, queryBuilderField.fieldType())); } Query query = toQuery(queryShardContext, mapUnmappedFieldAsString, queryBuilder); processQuery(query, context); return null; }
private static QueryBuilder parseQueryBuilder(QueryParseContext context, XContentLocation location) { try { return context.parseInnerQueryBuilder(); } catch (IOException e) { throw new ParsingException(location, "Failed to parse", e); } }
@Override public void testMustRewrite() throws IOException { PercolateQueryBuilder pqb = doCreateTestQueryBuilder(true); IllegalStateException e = expectThrows(IllegalStateException.class, () -> pqb.toQuery(createShardContext())); assertThat(e.getMessage(), equalTo("query builder must be rewritten first")); QueryBuilder rewrite = pqb.rewrite(createShardContext()); PercolateQueryBuilder geoShapeQueryBuilder = new PercolateQueryBuilder(pqb.getField(), pqb.getDocumentType(), documentSource, XContentType.JSON); assertEquals(geoShapeQueryBuilder, rewrite); }
private static void assertExplanation(QueryBuilder queryBuilder, Matcher<String> matcher, boolean withRewrite) { ValidateQueryResponse response = client().admin().indices().prepareValidateQuery("test") .setTypes("type1") .setQuery(queryBuilder) .setExplain(true) .setRewrite(withRewrite) .execute().actionGet(); assertThat(response.getQueryExplanation().size(), equalTo(1)); assertThat(response.getQueryExplanation().get(0).getError(), nullValue()); assertThat(response.getQueryExplanation().get(0).getExplanation(), matcher); assertThat(response.isValid(), equalTo(true)); }
public void testRewriteWithInnerBoost() throws IOException { final TermQueryBuilder query = new TermQueryBuilder("foo", "bar").boost(2); QueryBuilder builder = new TemplateQueryBuilder(new Script(ScriptType.INLINE, "mockscript", query.toString(), Collections.singletonMap(Script.CONTENT_TYPE_OPTION, XContentType.JSON.mediaType()), Collections.emptyMap())); assertEquals(query, builder.rewrite(createShardContext())); builder = new TemplateQueryBuilder(new Script(ScriptType.INLINE, "mockscript", query.toString(), Collections.singletonMap(Script.CONTENT_TYPE_OPTION, XContentType.JSON.mediaType()), Collections.emptyMap())).boost(3); assertEquals(new BoolQueryBuilder().must(query).boost(3), builder.rewrite(createShardContext())); }
@Test public void testForClient() throws Exception { QueryBuilder qb = QueryBuilders.constantScoreQuery( QueryBuilders.termQuery("name","kimchy") ).boost(2.0f); client.prepareSearch().setQuery(qb).execute().actionGet(); }