/** * Here to establish QueryBuilder patterns */ void test() { SearchSourceBuilder b; BoolQueryBuilder qb = QueryBuilders.boolQuery(); QueryBuilder qbmm = QueryBuilders.multiMatchQuery("name", "fieldNames"); QueryBuilder qbm = QueryBuilders.matchQuery("name", "text"); QueryBuilder qbmp = QueryBuilders.matchPhraseQuery("name", "text"); QueryBuilder qb1 = QueryBuilders.termQuery(ITQCoreOntology.TUPLE_SUBJECT_PROPERTY, ""); QueryBuilder qb2 = QueryBuilders.termQuery(ITQCoreOntology.TUPLE_OBJECT_PROPERTY, ""); QueryBuilder qb3 = QueryBuilders.termQuery(ITQCoreOntology.INSTANCE_OF_PROPERTY_TYPE, ""); qb.must(qb3); qb.should(qb1); qb.should(qb2); QueryBuilder partial = QueryBuilders.regexpQuery("name", "regexp"); QueryBuilder wildcard = QueryBuilders.wildcardQuery("name", "query"); }
public void testUnsupportedQueries() { RangeQueryBuilder rangeQuery1 = new RangeQueryBuilder("field").from("2016-01-01||/D").to("2017-01-01||/D"); RangeQueryBuilder rangeQuery2 = new RangeQueryBuilder("field").from("2016-01-01||/D").to("now"); PercolatorFieldMapper.verifyQuery(rangeQuery1); expectThrows(IllegalArgumentException.class, () -> PercolatorFieldMapper.verifyQuery(rangeQuery2)); PercolatorFieldMapper.verifyQuery(new BoolQueryBuilder().must(rangeQuery1)); expectThrows(IllegalArgumentException.class, () -> PercolatorFieldMapper.verifyQuery(new BoolQueryBuilder().must(rangeQuery2))); PercolatorFieldMapper.verifyQuery(new ConstantScoreQueryBuilder((rangeQuery1))); expectThrows(IllegalArgumentException.class, () -> PercolatorFieldMapper.verifyQuery(new ConstantScoreQueryBuilder(rangeQuery2))); PercolatorFieldMapper.verifyQuery(new BoostingQueryBuilder(rangeQuery1, new MatchAllQueryBuilder())); expectThrows(IllegalArgumentException.class, () -> PercolatorFieldMapper.verifyQuery(new BoostingQueryBuilder(rangeQuery2, new MatchAllQueryBuilder()))); PercolatorFieldMapper.verifyQuery(new FunctionScoreQueryBuilder(rangeQuery1, new RandomScoreFunctionBuilder())); expectThrows(IllegalArgumentException.class, () -> PercolatorFieldMapper.verifyQuery(new FunctionScoreQueryBuilder(rangeQuery2, new RandomScoreFunctionBuilder()))); HasChildQueryBuilder hasChildQuery = new HasChildQueryBuilder("_type", new MatchAllQueryBuilder(), ScoreMode.None); expectThrows(IllegalArgumentException.class, () -> PercolatorFieldMapper.verifyQuery(hasChildQuery)); expectThrows(IllegalArgumentException.class, () -> PercolatorFieldMapper.verifyQuery(new BoolQueryBuilder().must(hasChildQuery))); HasParentQueryBuilder hasParentQuery = new HasParentQueryBuilder("_type", new MatchAllQueryBuilder(), false); expectThrows(IllegalArgumentException.class, () -> PercolatorFieldMapper.verifyQuery(hasParentQuery)); expectThrows(IllegalArgumentException.class, () -> PercolatorFieldMapper.verifyQuery(new BoolQueryBuilder().must(hasParentQuery))); }
/** * 分词 查询 商品名称 and 描述 价格排序 */ @Test public void testSelectSort() { //组装查询 BoolQueryBuilder builder = boolQuery(); builder.must(matchQuery("goodsName", "百事")).must(matchQuery("description", "百事")); SearchQuery searchQuery = new NativeSearchQueryBuilder().withQuery(builder).build(); searchQuery.addSort(new Sort(Sort.Direction.DESC, new String[]{"price"})); Page<GoodsModel> page = elasticsearchTemplate.queryForPage(searchQuery, GoodsModel.class); System.out.println(page.getSize()); List<GoodsModel> GoodsESDocs = page.getContent(); System.out.println(JSON.toJSONString(GoodsESDocs)); Assert.assertThat(page.getTotalElements(), is(2L)); }
public String fullTextQueryMultipleFields(String textQuery, boolean isPhraseQuery, List<String> fieldNames, int start, int count) { BoolQueryBuilder qb = QueryBuilders.boolQuery(); String fn; Iterator<String>itr = fieldNames.iterator(); while (itr.hasNext()) { fn = itr.next(); if (!isPhraseQuery) qb.should(QueryBuilders.matchQuery(fn, textQuery)); else qb.should(QueryBuilders.matchPhraseQuery(fn, textQuery)); } SearchSourceBuilder ssb = new SearchSourceBuilder(); ssb.query(qb); ssb.from(start); if (count > -1) ssb.size(count); return ssb.toString(); }
private QueryBuilder preparse(String q, int timezoneOffset) { // detect usage of OR connector usage. q = fixQueryMistakes(q); List<String> terms = splitIntoORGroups(q); // OR binds stronger than AND if (terms.size() == 0) return QueryBuilders.constantScoreQuery(QueryBuilders.matchAllQuery()); // special handling if (terms.size() == 1) return parse(terms.get(0), timezoneOffset); // generic handling BoolQueryBuilder aquery = QueryBuilders.boolQuery(); for (String t: terms) { QueryBuilder partial = parse(t, timezoneOffset); aquery.filter(partial); } return aquery; }
public void testThatTermsLookupGetRequestContainsContextAndHeaders() throws Exception { transportClient().prepareIndex(lookupIndex, "type", "1") .setSource(jsonBuilder().startObject().array("followers", "foo", "bar", "baz").endObject()).get(); transportClient().prepareIndex(queryIndex, "type", "1") .setSource(jsonBuilder().startObject().field("username", "foo").endObject()).get(); transportClient().admin().indices().prepareRefresh(queryIndex, lookupIndex).get(); TermsLookup termsLookup = new TermsLookup(lookupIndex, "type", "1", "followers"); TermsQueryBuilder termsLookupFilterBuilder = QueryBuilders.termsLookupQuery("username", termsLookup); BoolQueryBuilder queryBuilder = QueryBuilders.boolQuery().must(QueryBuilders.matchAllQuery()).must(termsLookupFilterBuilder); SearchResponse searchResponse = transportClient() .prepareSearch(queryIndex) .setQuery(queryBuilder) .get(); assertNoFailures(searchResponse); assertHitCount(searchResponse, 1); assertGetRequestsContainHeaders(); }
public void testMatchQueryZeroTermsQuery() { assertAcked(prepareCreate("test") .addMapping("type1", "field1", "type=text,analyzer=classic", "field2", "type=text,analyzer=classic")); client().prepareIndex("test", "type1", "1").setSource("field1", "value1").get(); client().prepareIndex("test", "type1", "2").setSource("field1", "value2").get(); refresh(); BoolQueryBuilder boolQuery = boolQuery() .must(matchQuery("field1", "a").zeroTermsQuery(MatchQuery.ZeroTermsQuery.NONE)) .must(matchQuery("field1", "value1").zeroTermsQuery(MatchQuery.ZeroTermsQuery.NONE)); SearchResponse searchResponse = client().prepareSearch().setQuery(boolQuery).get(); assertHitCount(searchResponse, 0L); boolQuery = boolQuery() .must(matchQuery("field1", "a").zeroTermsQuery(MatchQuery.ZeroTermsQuery.ALL)) .must(matchQuery("field1", "value1").zeroTermsQuery(MatchQuery.ZeroTermsQuery.ALL)); searchResponse = client().prepareSearch().setQuery(boolQuery).get(); assertHitCount(searchResponse, 1L); boolQuery = boolQuery().must(matchQuery("field1", "a").zeroTermsQuery(MatchQuery.ZeroTermsQuery.ALL)); searchResponse = client().prepareSearch().setQuery(boolQuery).get(); assertHitCount(searchResponse, 2L); }
public void testMultiMatchQueryZeroTermsQuery() { assertAcked(prepareCreate("test") .addMapping("type1", "field1", "type=text,analyzer=classic", "field2", "type=text,analyzer=classic")); client().prepareIndex("test", "type1", "1").setSource("field1", "value1", "field2", "value2").get(); client().prepareIndex("test", "type1", "2").setSource("field1", "value3", "field2", "value4").get(); refresh(); BoolQueryBuilder boolQuery = boolQuery() .must(multiMatchQuery("a", "field1", "field2").zeroTermsQuery(MatchQuery.ZeroTermsQuery.NONE)) .must(multiMatchQuery("value1", "field1", "field2").zeroTermsQuery(MatchQuery.ZeroTermsQuery.NONE)); // Fields are ORed together SearchResponse searchResponse = client().prepareSearch().setQuery(boolQuery).get(); assertHitCount(searchResponse, 0L); boolQuery = boolQuery() .must(multiMatchQuery("a", "field1", "field2").zeroTermsQuery(MatchQuery.ZeroTermsQuery.ALL)) .must(multiMatchQuery("value4", "field1", "field2").zeroTermsQuery(MatchQuery.ZeroTermsQuery.ALL)); searchResponse = client().prepareSearch().setQuery(boolQuery).get(); assertHitCount(searchResponse, 1L); boolQuery = boolQuery().must(multiMatchQuery("a", "field1").zeroTermsQuery(MatchQuery.ZeroTermsQuery.ALL)); searchResponse = client().prepareSearch().setQuery(boolQuery).get(); assertHitCount(searchResponse, 2L); }
private static QueryBuilder randomBoolQuery(List<String> stringFields, List<String> numericFields, int numDocs, int depth) { QueryBuilder q = QueryBuilders.boolQuery(); int numClause = randomIntBetween(0,5); for (int i = 0; i < numClause; i++) { ((BoolQueryBuilder)q).must(randomQueryBuilder(stringFields, numericFields,numDocs, depth -1)); } numClause = randomIntBetween(0,5); for (int i = 0; i < numClause; i++) { ((BoolQueryBuilder)q).should(randomQueryBuilder(stringFields, numericFields,numDocs, depth -1)); } numClause = randomIntBetween(0,5); for (int i = 0; i < numClause; i++) { ((BoolQueryBuilder)q).mustNot(randomQueryBuilder(stringFields, numericFields, numDocs, depth -1)); } return q; }
public List<DateHistogramValue> calculateStats(String sourceUrl) { BoolQueryBuilder filter = QueryBuilders.boolQuery() .must(QueryBuilders.rangeQuery("created").gte("now-1M")) .must(QueryBuilders.termQuery("source", sourceUrl)); SearchResponse response = getConnection().getClient() .prepareSearch(getIndex()) .setTypes(getType()) .setSearchType(SearchType.DEFAULT) .setQuery(filter) .addAggregation(AggregationBuilders .dateHistogram("urls_over_time") .field("created") .format("yyyy-MM-dd") .dateHistogramInterval(DateHistogramInterval.DAY)) .setSize(0) .setFetchSource(true) .setExplain(false) .execute() .actionGet(); InternalDateHistogram hits = response.getAggregations().get("urls_over_time"); return hits.getBuckets().stream() .map(b -> new DateHistogramValue(b.getKeyAsString(), b.getDocCount())) .collect(Collectors.toList()); }
public List<HttpUrl> findUrlsByStatusAndSource(Enum status, String source, int count) { BoolQueryBuilder filter = QueryBuilders.boolQuery() .must(QueryBuilders.termQuery("status", String.valueOf(status))) .must(QueryBuilders.termQuery("source", source)); SearchResponse response = getConnection().getClient() .prepareSearch(getIndex()) .setTypes(getType()) .setSearchType(SearchType.DEFAULT) .setPostFilter(filter) .addSort("created", SortOrder.DESC) .setSize(count) .setFetchSource(true) .setExplain(false) .execute() .actionGet(); SearchHits hits = response.getHits(); return Arrays.stream(hits.getHits()) .map(SearchHit::getSource) .map(s -> { HttpUrl httpUrl = new HttpUrl(); httpUrl.setUrl(Objects.toString(s.get("url"), null)); httpUrl.setPublished(Objects.toString(s.get("published"), null)); httpUrl.setDiscovered(EsDataParser.nullOrDate(s.get("created"))); httpUrl.setSource(source); return httpUrl; }) .collect(Collectors.toList()); }
public List<HttpSource> findEnabledSources() { BoolQueryBuilder filter = QueryBuilders.boolQuery() .must(QueryBuilders.termQuery("enabled", true)); SearchResponse response = getConnection().getClient() .prepareSearch(getIndex()) .setTypes(getType()) .setSearchType(SearchType.DEFAULT) .setPostFilter(filter) .addSort("updated", SortOrder.ASC) .setSize(10000) .setFetchSource(true) .setExplain(false) .execute() .actionGet(); SearchHits hits = response.getHits(); return Arrays.stream(hits.getHits()) .map(SearchHit::sourceAsMap) .map(this::mapToHttpSource) .collect(Collectors.toList()); }
private void applyFieldStrategy(BoolQueryBuilder rootQuery, Function<String, QueryBuilder> fieldQueryResolver, Set<? extends FactSearchCriteria.FieldStrategy> fieldStrategies, FactSearchCriteria.MatchStrategy matchStrategy) { // Determine all fields to query. Set<String> fieldsToQuery = fieldStrategies.stream() .flatMap(strategy -> strategy.getFields().stream()) .collect(Collectors.toSet()); BoolQueryBuilder strategyQuery = boolQuery(); for (String field : fieldsToQuery) { if (matchStrategy == FactSearchCriteria.MatchStrategy.all) { // Field query must match all fields. strategyQuery.filter(fieldQueryResolver.apply(field)); } else { // Field query should match at least one field. strategyQuery.should(fieldQueryResolver.apply(field)); } } rootQuery.filter(strategyQuery); }
private void applyAccessControlQuery(FactSearchCriteria criteria, BoolQueryBuilder rootQuery) { // Query to verify that user has access to Fact ... BoolQueryBuilder accessQuery = boolQuery() // ... if Fact is public. .should(termQuery("accessMode", FactDocument.AccessMode.Public)) // ... if AccessMode == Explicit user must be in ACL. .should(boolQuery() .filter(termQuery("accessMode", FactDocument.AccessMode.Explicit)) .filter(termQuery("acl", criteria.getCurrentUserID())) ) // ... if AccessMode == RoleBased user must be in ACL or have access to the owning Organization. .should(boolQuery() .filter(termQuery("accessMode", FactDocument.AccessMode.RoleBased)) .filter(boolQuery() .should(termQuery("acl", criteria.getCurrentUserID())) .should(termsQuery("organizationID", criteria.getAvailableOrganizationID())) ) ); // Always apply access control query. rootQuery.filter(accessQuery); }
@Override public <P extends ParaObject> List<P> findTermInList(String appid, String type, String field, List<?> terms, Pager... pager) { if (StringUtils.isBlank(field) || terms == null) { return Collections.emptyList(); } QueryBuilder qb; if (nestedMode() && field.startsWith(PROPS_PREFIX)) { QueryBuilder bfb = null; BoolQueryBuilder fb = boolQuery(); for (Object term : terms) { bfb = keyValueBoolQuery(field, String.valueOf(term)); fb.should(bfb); } qb = nestedPropsQuery(terms.size() > 1 ? fb : bfb); } else { qb = termsQuery(field, terms); } return searchQuery(appid, type, qb, pager); }
public DfsOnlyRequest(Fields termVectorsFields, String[] indices, String[] types, Set<String> selectedFields) throws IOException { super(indices); // build a search request with a query of all the terms final BoolQueryBuilder boolBuilder = boolQuery(); for (String fieldName : termVectorsFields) { if ((selectedFields != null) && (!selectedFields.contains(fieldName))) { continue; } Terms terms = termVectorsFields.terms(fieldName); TermsEnum iterator = terms.iterator(); while (iterator.next() != null) { String text = iterator.term().utf8ToString(); boolBuilder.should(QueryBuilders.termQuery(fieldName, text)); } } // wrap a search request object this.searchRequest = new SearchRequest(indices).types(types).source(new SearchSourceBuilder().query(boolBuilder)); }
/** * Provides a mechanism to create a composite expression, consisting of two or more leaf-level expressions that * actually compare property values. * * @param conditionType The {@link ConditionType} representing the composite operation taking place (EG: {@code AND}, @{code OR}). * @param parts The parts of the composite expression to apply the {@link ConditionType} to. * @return A composite {@link QueryBuilder} consisting of two or more expressions. * @see #buildSimpleExpression(PrimitiveStatement) */ QueryBuilder buildCompositeExpression(final ConditionType conditionType, final List<QueryBuilder> parts) { final BoolQueryBuilder boolQueryBuilder = createBoolQueryBuilder(); for (int partIndex = 0; partIndex < parts.size(); partIndex++) { final QueryBuilder part = parts.get(partIndex); final QueryBuilder previousPart = partIndex > 0 ? getPreviousQueryPart(boolQueryBuilder, conditionType) : null; // Try and fold this into the previous statement part IFF possible. if (null != previousPart && canMergeQueryParts(previousPart, part, conditionType)) { mergeQueryParts(previousPart, part); } else { // If no folding is possible, just keep on going... if (conditionType.equals(ConditionType.AND)) { boolQueryBuilder.must(part); } else { boolQueryBuilder.should(part); } } } return boolQueryBuilder; }
/** * Tests {@link ElasticsearchQueryBuilderVisitor#buildCompositeExpression(ConditionType, List)} for the case * where the passed {@link ConditionType} is {@link ConditionType#AND}. We should AND together the set of parts. */ @Test public void testBuildCompositeExpressionForAnd() throws Exception { final ConditionType conditionType = AND; final QueryBuilder part = mock(QueryBuilder.class); final BoolQueryBuilder boolQueryBuilder = mock(BoolQueryBuilder.class); final List<QueryBuilder> parts = Collections.singletonList(part); doReturn(boolQueryBuilder).when(visitor).createBoolQueryBuilder(); visitor.buildCompositeExpression(conditionType, parts); verify(visitor).buildCompositeExpression(conditionType, parts); verify(visitor).createBoolQueryBuilder(); verify(boolQueryBuilder).must(part); verifyNoMoreCollaboration(part, boolQueryBuilder); }
/** * Tests {@link ElasticsearchQueryBuilderVisitor#buildCompositeExpression(ConditionType, List)} for the case * where the passed {@link ConditionType} is anything but {@link ConditionType#AND}. We should OR together the set * of parts. */ @Test public void testBuildCompositeExpressionForNotAnd() throws Exception { Stream.of(ConditionType.values()) .filter(type -> !type.equals(AND)) .forEach(type -> { final ElasticsearchQueryBuilderVisitor<MetadataRecord> localVisitor = spy(new ElasticsearchQueryBuilderVisitor<>()); final QueryBuilder part = mock(QueryBuilder.class); final BoolQueryBuilder boolQueryBuilder = mock(BoolQueryBuilder.class); final List<QueryBuilder> parts = Collections.singletonList(part); doReturn(boolQueryBuilder).when(localVisitor).createBoolQueryBuilder(); localVisitor.buildCompositeExpression(type, parts); verify(localVisitor).buildCompositeExpression(type, parts); verify(localVisitor).createBoolQueryBuilder(); verify(boolQueryBuilder).should(part); verifyNoMoreCollaboration(part, boolQueryBuilder); }); }
/** * Tests {@link ElasticsearchQueryBuilderVisitor#buildCompositeExpression(ConditionType, List)} for the case * where we have multiple query parts and cannot merge them. */ @Test public void testBuildCompositeExpressionWithMultipleUnmergeableParts() throws Exception { doReturn(false).when(visitor).canMergeQueryParts(any(QueryBuilder.class), any(QueryBuilder.class), any(ConditionType.class)); final ConditionType conditionType = AND; final QueryBuilder part = mock(QueryBuilder.class); final QueryBuilder secondPart = mock(QueryBuilder.class); final BoolQueryBuilder boolQueryBuilder = mock(BoolQueryBuilder.class); final List<QueryBuilder> parts = Arrays.asList(part, secondPart); doReturn(boolQueryBuilder).when(visitor).createBoolQueryBuilder(); doReturn(part).when(visitor).getPreviousQueryPart(boolQueryBuilder, conditionType); visitor.buildCompositeExpression(conditionType, parts); verify(visitor).buildCompositeExpression(conditionType, parts); verify(visitor).createBoolQueryBuilder(); verify(visitor).getPreviousQueryPart(boolQueryBuilder, conditionType); verify(visitor).canMergeQueryParts(part, secondPart, conditionType); verify(boolQueryBuilder).must(part); verify(boolQueryBuilder).must(secondPart); verifyNoMoreCollaboration(part, boolQueryBuilder); }
/** * Method to remove invalid logs through IP address * * @param es an instantiated es driver * @param ip invalid IP address * @throws ElasticsearchException ElasticsearchException * @throws IOException IOException */ public void deleteInvalid(ESDriver es, String ip) throws IOException { BoolQueryBuilder filterAll = new BoolQueryBuilder(); filterAll.must(QueryBuilders.termQuery("IP", ip)); SearchResponse scrollResp = es.getClient().prepareSearch(logIndex).setTypes(this.cleanupType).setScroll(new TimeValue(60000)).setQuery(filterAll).setSize(100).execute().actionGet(); while (true) { for (SearchHit hit : scrollResp.getHits().getHits()) { update(es, logIndex, cleanupType, hit.getId(), "SessionID", "invalid"); } scrollResp = es.getClient().prepareSearchScroll(scrollResp.getScrollId()).setScroll(new TimeValue(600000)).execute().actionGet(); if (scrollResp.getHits().getHits().length == 0) { break; } } }
/** * Method to add the additional search query like range query , exists - not exist filter etc. */ @SuppressWarnings("unchecked") private static void addAdditionalProperties(BoolQueryBuilder query, Entry<String, Object> entry, Map<String, Float> constraintsMap) { long startTime = System.currentTimeMillis(); ProjectLogger.log("ElasticSearchUtil addAdditionalProperties method started at ==" +startTime, LoggerEnum.PERF_LOG); String key = entry.getKey(); if (key.equalsIgnoreCase(JsonKey.FILTERS)) { Map<String, Object> filters = (Map<String, Object>) entry.getValue(); for (Map.Entry<String, Object> en : filters.entrySet()) { createFilterESOpperation(en, query, constraintsMap); } } else if (key.equalsIgnoreCase(JsonKey.EXISTS) || key.equalsIgnoreCase(JsonKey.NOT_EXISTS)) { createESOpperation(entry, query, constraintsMap); } long stopTime = System.currentTimeMillis(); long elapsedTime = stopTime - startTime; ProjectLogger.log("ElasticSearchUtil addAdditionalProperties method end at ==" +stopTime+" ,Total time elapsed = "+elapsedTime, LoggerEnum.PERF_LOG); }
/** * 增加嵌套插 * * @param boolQuery * @param where * @param subQuery */ private void addSubQuery(BoolQueryBuilder boolQuery, Where where, QueryBuilder subQuery) { if(where instanceof Condition){ Condition condition = (Condition) where; if(condition.isNested()){ subQuery = QueryBuilders.nestedQuery(condition.getNestedPath(), subQuery, ScoreMode.None); } else if(condition.isChildren()) { subQuery = QueryBuilders.hasChildQuery(condition.getChildType(), subQuery, ScoreMode.None); } } if (where.getConn() == CONN.AND) { boolQuery.must(subQuery); } else { boolQuery.should(subQuery); } }
/** * 根据协议的报文体内容查询 * * @param data */ @SuppressWarnings("rawtypes") private void queryByBodyContent(UAVHttpMessage data) { String[] types = buildTypes(data); String content = data.getRequest("content"); if (types.length == 0) { types = typeMap.values().toArray(types); } BoolQueryBuilder queryBuilder = QueryBuilders.boolQuery(); for (String type : types) { for (String field : typeBodyMap.get(type)) { queryBuilder.should(QueryBuilders.matchQuery(field, content)); } } SortBuilder[] sorts = buildSort(data); queryToList(data, queryBuilder, null, sorts); }
private void queryMultiDumpInfo(UAVHttpMessage data) { String ipport = data.getRequest("ipport"); String timesStr = data.getRequest("times"); List<String> times = JSONHelper.toObjectArray(timesStr, String.class); List<List<Map<String, Object>>> records = new ArrayList<>(); for (String time : times) { long timestamp = DataConvertHelper.toLong(time, -1L); // build query builder BoolQueryBuilder queryBuilder = QueryBuilders.boolQuery(); queryBuilder.must(QueryBuilders.rangeQuery("time").gte(timestamp).lte(timestamp)); queryBuilder.must(QueryBuilders.termQuery("ipport", ipport)); SearchResponse sr = query(data, queryBuilder, null, buildSorts(data)); List<Map<String, Object>> record = getRecords(sr); records.add(record); } ThreadAnalyser ta = (ThreadAnalyser) getConfigManager().getComponent(feature, "ThreadAnalyser"); List<Map<String, String>> rs = ta.queryMutilDumpInfo(times, records); data.putResponse("rs", JSONHelper.toString(rs)); }
private void queryMultiDumpGraph(UAVHttpMessage data) { String ipport = data.getRequest("ipport"); String timesStr = data.getRequest("times"); String threadIdsStr = data.getRequest("threadIds"); List<String> times = JSONHelper.toObjectArray(timesStr, String.class); List<String> threadIds = JSONHelper.toObjectArray(threadIdsStr, String.class); List<List<Map<String, Object>>> records = new ArrayList<>(); for (String time : times) { long timestamp = DataConvertHelper.toLong(time, -1L); // build query builder BoolQueryBuilder queryBuilder = QueryBuilders.boolQuery(); queryBuilder.must(QueryBuilders.rangeQuery("time").gte(timestamp).lte(timestamp)); queryBuilder.must(QueryBuilders.termQuery("ipport", ipport)); SearchResponse sr = query(data, queryBuilder, null, buildSorts(data)); List<Map<String, Object>> record = getRecords(sr); records.add(record); } ThreadAnalyser ta = (ThreadAnalyser) getConfigManager().getComponent(feature, "ThreadAnalyser"); Map<String, Object> rs = ta.queryMutilDumpGraph(threadIds, records); data.putResponse("rs", JSONHelper.toString(rs)); }
@Override public Page<File> findWithHighlightedSummary(Pageable pageable, String query, List<String> version, List<String> project) { //QueryBuilder searchQuery = Queries.constructQuery(query); //return elasticsearchTemplate.queryForPage(new NativeSearchQuery(searchQuery), File.class, new ResultHighlightMapper()); NativeSearchQueryBuilder nativeQuery = Queries.constructQueryWithHighlight(query, pageable, 3); BoolQueryBuilder ensembleVersion = QueryBuilders.boolQuery(); BoolQueryBuilder ensembleProjet = QueryBuilders.boolQuery(); if (version != null && !version.isEmpty()) { ensembleVersion = ensembleVersion.should(QueryBuilders.termsQuery("version.raw", version)); } if (project != null && !project.isEmpty()) { ensembleProjet = ensembleProjet.should(QueryBuilders.termsQuery("project.raw", project)); } nativeQuery = nativeQuery.withFilter(QueryBuilders.boolQuery().must(ensembleVersion).must(ensembleProjet)); log.debug("query : {}", nativeQuery.toString()); SearchQuery searchQuery = nativeQuery.build(); log.debug("query : {}", searchQuery.getQuery()); log.debug("filter: {}", searchQuery.getFilter()); return elasticsearchTemplate.queryForPage(searchQuery, File.class, new ResultHighlightMapper()); // SearchResponse searchResponse = searchRequestBuilder.execute().actionGet(); }
/** * 根据条件查询 * * @param filedContentMap 不能为null * @return */ @Override public boolean deleteByQuery(Map<String, Object> filedContentMap) { try { DeleteQuery dq = new DeleteQuery(); BoolQueryBuilder qb = QueryBuilders.boolQuery(); if (filedContentMap != null) for (String key : filedContentMap.keySet()) {//字段查询 qb.must(QueryBuilders.matchQuery(key, filedContentMap.get(key))); } dq.setQuery(qb); elasticsearchTemplate.delete(dq, entityClass); return true; } catch (Exception e) { e.printStackTrace(); return false; } }
/** * 根据条件查询 * * @param filedContentMap 不能为null * @return */ public boolean deleteByQuery(Map<String, Object> filedContentMap) { try { DeleteQuery dq = new DeleteQuery(); BoolQueryBuilder qb = QueryBuilders.boolQuery(); if (filedContentMap != null) for (String key : filedContentMap.keySet()) {//字段查询 qb.must(QueryBuilders.matchQuery(key, filedContentMap.get(key))); } dq.setQuery(qb); ; elasticsearchTemplate.delete(dq, GoodsModel.class); return true; } catch (Exception e) { e.printStackTrace(); return false; } }
/** * 分词 查询 商品名称 */ @Test public void testSelect1() { //组装查询 BoolQueryBuilder builder = boolQuery(); builder.must(matchQuery("goodsName", "百事")); SearchQuery searchQuery = new NativeSearchQueryBuilder().withQuery(builder).build(); Page<GoodsModel> page = elasticsearchTemplate.queryForPage(searchQuery, GoodsModel.class); System.out.println(page.getSize()); List<GoodsModel> GoodsESDocs = page.getContent(); System.out.println(JSON.toJSONString(GoodsESDocs)); Assert.assertThat(page.getTotalElements(), is(2L)); }
/** * 分词 查询 商品名称 and 描述 */ @Test public void testSelect2() { //组装查询 BoolQueryBuilder builder = boolQuery(); builder.must(matchQuery("goodsName", "百事")).must(matchQuery("description", "百事")); SearchQuery searchQuery = new NativeSearchQueryBuilder().withQuery(builder).build(); Page<GoodsModel> page = elasticsearchTemplate.queryForPage(searchQuery, GoodsModel.class); System.out.println(page.getSize()); List<GoodsModel> GoodsESDocs = page.getContent(); System.out.println(JSON.toJSONString(GoodsESDocs)); Assert.assertThat(page.getTotalElements(), is(2L)); }
/** * 根据条件查询 * * @return */ @Test public void deleteByQuery() { try { DeleteQuery dq = new DeleteQuery(); Map<String, Object> filedContentMap = Maps.newHashMap(); filedContentMap.put("id", "1"); BoolQueryBuilder qb = QueryBuilders.boolQuery(); if (filedContentMap != null) for (String key : filedContentMap.keySet()) {//字段查询 qb.must(QueryBuilders.matchQuery(key, filedContentMap.get(key))); } dq.setQuery(qb); elasticsearchTemplate.delete(dq, GoodsModel.class); } catch (Exception e) { e.printStackTrace(); } }
private static <Goods> Pagination<Goods> boolSearch(String sortField, boolean desc, int from, int size, String value) { BoolQueryBuilder boolQueryBuilder = QueryBuilders.boolQuery(); boolQueryBuilder .should(QueryBuilders.wildcardQuery("goodsNamePinyin", "*" + value + "*").boost(2f)) .should(QueryBuilders.wildcardQuery("categoryName1Pinyin", "*" + value + "*").boost(0.8f)) .should(QueryBuilders.wildcardQuery("categoryName2Pinyin", "*" + value + "*").boost(1f)) .should(QueryBuilders.wildcardQuery("categoryName3Pinyin", "*" + value + "*").boost(1.5f)) .should(QueryBuilders.wildcardQuery("goodsSkuAttrPinyin", "*" + value + "*").boost(0.8f)) .should(QueryBuilders.queryStringQuery(value).field("goodsNamePinyin", 2f) .field("categoryName1Pinyin", 0.8f) .field("categoryName2Pinyin", 1f) .field("categoryName3Pinyin", 1.5f) .field("goodsSkuAttrPinyin", 0.8f)) .should(QueryBuilders.termQuery("goodsNamePinyin", value).boost(2f)) .should(QueryBuilders.termQuery("categoryName1Pinyin", value).boost(0.8f)) .should(QueryBuilders.termQuery("categoryName2Pinyin", value).boost(1f)) .should(QueryBuilders.termQuery("categoryName3Pinyin", value).boost(1.5f)) .should(QueryBuilders.termQuery("goodsSkuAttrPinyin", value).boost(0.8f)); return search(boolQueryBuilder, IndexType.GOODS, sortField, desc, from, size); }
/** * Method to build a matchPhraseQuery {@code QueryBuilder} * * @param query * - The {@code String} query e.g. turnips * @param within * - The {@code String} base64 encoded within String * @return {@code QueryBuilder} */ private QueryBuilder buildQuery(String query, String within) { String decodedWithinUrl = textUtils.decodeWithinUrl(within); if (null != decodedWithinUrl) { BoolQueryBuilder must = QueryBuilders.boolQuery(); QueryBuilder builder = QueryBuilders.matchPhraseQuery(TEXT_FIELD_NAME, query); must.must(QueryBuilders.matchQuery("manifestId", decodedWithinUrl)); must.must(builder); return must; } else { LOG.info("Unable to decode the within " + within); return null; } }
private QueryBuilder buildQueryBuilder(ElasticsearchSearchOption option) { ElasticsearchSearchOption.Operator op = option.getOperator(); MultiMatchQueryBuilder query = multiMatchQuery( option.getKeyword(), option.getSearchFields().stream().toArray(String[]::new)); if (op != null && ElasticsearchSearchOption.Operator.AND == op) { query = query.operator(MatchQueryBuilder.Operator.AND); } if (option.getOptionalSearchConditions().isEmpty()) { return query; } else { BoolQueryBuilder q = boolQuery().must(query); option.getOptionalSearchConditions().forEach((k, v) -> { q.must(matchQuery(k, v)); }); return q; } }
@Override public Observable<Void> call(Void aVoid) { Elasticsearch elasticsearch = vertxContext.verticle().elasticsearch(); BoolQueryBuilder query = boolQuery() .must(existsQuery("node_id")) .must(termQuery("node_id", nodeId)); ScanAndScrollStreamProducer producer = new ScanAndScrollStreamProducer(vertxContext, query) .setIndeces(index) .setTypes(elasticsearch.defaultType()); SearchHitEndableWriteStreamToJsonLine consumer = new SearchHitEndableWriteStreamToJsonLine(bufferStreamConsumer); return pump(producer, consumer); }
BoolQueryBuilder queryInDocument(String documentId) { if (type == null) { throw new IllegalStateException("type not initialized"); } if (analysisFilters == null) { throw new IllegalStateException("analysisFilters not initialized"); } BoolQueryBuilder boolQuery = QueryBuilders.boolQuery() .must(QueryBuilders.termQuery("types", type)) .must(QueryBuilders.termQuery("documentIdentifier", documentId)); for (UnitOfAnalysisFilter analysisFilter : analysisFilters) { boolQuery.must(analysisFilter.buildQuery()); } return boolQuery; }
private void fetchFeaturesFromStore() { SearchRequest srequest = new SearchRequest(store); srequest.setParentTask(clusterService.localNode().getId(), task.getId()); QueryBuilder nameQuery; if (featureNamesQuery.endsWith("*")) { String parsed = featureNamesQuery.replaceAll("[*]+$", ""); if (parsed.isEmpty()) { nameQuery = QueryBuilders.matchAllQuery(); } else { nameQuery = QueryBuilders.matchQuery("name.prefix", parsed); } } else { nameQuery = QueryBuilders.matchQuery("name", featureNamesQuery); } BoolQueryBuilder bq = QueryBuilders.boolQuery(); bq.must(nameQuery); bq.must(QueryBuilders.matchQuery("type", StoredFeature.TYPE)); srequest.types(IndexFeatureStore.ES_TYPE); srequest.source().query(bq); srequest.source().fetchSource(true); srequest.source().size(StoredFeatureSet.MAX_FEATURES); ActionFuture<SearchResponse> resp = searchAction.execute(srequest); searchAction.execute(srequest, wrap(this::onSearchResponse, this::onSearchFailure)); }