public static SignificanceHeuristic parse(QueryParseContext context) throws IOException, QueryShardException { XContentParser parser = context.parser(); String heuristicName = parser.currentName(); Script script = null; XContentParser.Token token; String currentFieldName = null; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token.equals(XContentParser.Token.FIELD_NAME)) { currentFieldName = parser.currentName(); } else { if (Script.SCRIPT_PARSE_FIELD.match(currentFieldName)) { script = Script.parse(parser); } else { throw new ElasticsearchParseException("failed to parse [{}] significance heuristic. unknown object [{}]", heuristicName, currentFieldName); } } } if (script == null) { throw new ElasticsearchParseException("failed to parse [{}] significance heuristic. no script found in script_heuristic", heuristicName); } return new ScriptHeuristic(script); }
@Override public SignificanceHeuristic parse(QueryParseContext context) throws IOException, QueryShardException { XContentParser parser = context.parser(); String givenName = parser.currentName(); boolean includeNegatives = false; boolean backgroundIsSuperset = true; XContentParser.Token token = parser.nextToken(); while (!token.equals(XContentParser.Token.END_OBJECT)) { if (INCLUDE_NEGATIVES_FIELD.match(parser.currentName())) { parser.nextToken(); includeNegatives = parser.booleanValue(); } else if (BACKGROUND_IS_SUPERSET.match(parser.currentName())) { parser.nextToken(); backgroundIsSuperset = parser.booleanValue(); } else { throw new ElasticsearchParseException("failed to parse [{}] significance heuristic. unknown field [{}]", givenName, parser.currentName()); } token = parser.nextToken(); } return newHeuristic(includeNegatives, backgroundIsSuperset); }
@Override public SignificanceHeuristic parse(QueryParseContext context) throws IOException, QueryShardException { XContentParser parser = context.parser(); String givenName = parser.currentName(); boolean backgroundIsSuperset = true; XContentParser.Token token = parser.nextToken(); while (!token.equals(XContentParser.Token.END_OBJECT)) { if (BACKGROUND_IS_SUPERSET.match(parser.currentName())) { parser.nextToken(); backgroundIsSuperset = parser.booleanValue(); } else { throw new ElasticsearchParseException("failed to parse [{}] significance heuristic. unknown field [{}]", givenName, parser.currentName()); } token = parser.nextToken(); } return newHeuristic(true, backgroundIsSuperset); }
protected static Nested resolveNested(QueryShardContext context, String nestedPath, QueryBuilder nestedFilter) throws IOException { Nested nested = null; if (nestedPath != null) { BitSetProducer rootDocumentsFilter = context.bitsetFilter(Queries.newNonNestedFilter()); ObjectMapper nestedObjectMapper = context.getObjectMapper(nestedPath); if (nestedObjectMapper == null) { throw new QueryShardException(context, "[nested] failed to find nested object under path [" + nestedPath + "]"); } if (!nestedObjectMapper.nested().isNested()) { throw new QueryShardException(context, "[nested] nested object under path [" + nestedPath + "] is not of nested type"); } Query innerDocumentsQuery; if (nestedFilter != null) { context.nestedScope().nextLevel(nestedObjectMapper); innerDocumentsQuery = QueryBuilder.rewriteQuery(nestedFilter, context).toFilter(context); context.nestedScope().previousLevel(); } else { innerDocumentsQuery = nestedObjectMapper.nestedTypeFilter(); } nested = new Nested(rootDocumentsFilter, innerDocumentsQuery); } return nested; }
@Override public Query doToQuery(QueryShardContext context, FeatureSet set, Map<String, Object> params) { List<String> missingParams = queryParams.stream() .filter((x) -> params == null || !params.containsKey(x)) .collect(Collectors.toList()); if (!missingParams.isEmpty()) { String names = missingParams.stream().collect(Collectors.joining(",")); throw new IllegalArgumentException("Missing required param(s): [" + names + "]"); } String query = MustacheUtils.execute(template, params); try { XContentParser parser = XContentFactory.xContent(query).createParser(context.getXContentRegistry(), query); QueryParseContext parserContext = context.newParseContext(parser); QueryBuilder queryBuilder = parserContext.parseInnerQueryBuilder().orElseThrow( () -> new ParsingException(parser.getTokenLocation(), "ltr inner query cannot be empty")); // XXX: QueryShardContext extends QueryRewriteContext (for now) return QueryBuilder.rewriteQuery(queryBuilder, context).toQuery(context); } catch (IOException |ParsingException|IllegalArgumentException e) { // wrap common exceptions as well so we can attach the feature's name to the stack throw new QueryShardException(context, "Cannot create query while parsing feature [" + name +"]", e); } }
public void testPercolatorFieldMapperUnMappedField() throws Exception { addQueryMapping(); MapperParsingException exception = expectThrows(MapperParsingException.class, () -> { mapperService.documentMapper(typeName).parse("test", typeName, "1", XContentFactory.jsonBuilder().startObject() .field(fieldName, termQuery("unmapped_field", "value")) .endObject().bytes()); }); assertThat(exception.getCause(), instanceOf(QueryShardException.class)); assertThat(exception.getCause().getMessage(), equalTo("No field mapping can be found for the field with name [unmapped_field]")); }
public static SignificanceHeuristic parse(QueryParseContext context) throws IOException, QueryShardException { XContentParser parser = context.parser(); // move to the closing bracket if (!parser.nextToken().equals(XContentParser.Token.END_OBJECT)) { throw new ElasticsearchParseException( "failed to parse [jlh] significance heuristic. expected an empty object, but found [{}] instead", parser.currentToken()); } return new JLHScore(); }
public static SignificanceHeuristic parse(QueryParseContext context) throws IOException, QueryShardException { XContentParser parser = context.parser(); // move to the closing bracket if (!parser.nextToken().equals(XContentParser.Token.END_OBJECT)) { throw new ElasticsearchParseException("failed to parse [percentage] significance heuristic. expected an empty object, but got [{}] instead", parser.currentToken()); } return new PercentageScore(); }
@Override public SortFieldAndFormat build(QueryShardContext context) throws IOException { if (DOC_FIELD_NAME.equals(fieldName)) { if (order == SortOrder.DESC) { return SORT_DOC_REVERSE; } else { return SORT_DOC; } } else { MappedFieldType fieldType = context.fieldMapper(fieldName); if (fieldType == null) { if (unmappedType != null) { fieldType = context.getMapperService().unmappedFieldType(unmappedType); } else { throw new QueryShardException(context, "No mapping found for [" + fieldName + "] in order to sort on"); } } MultiValueMode localSortMode = null; if (sortMode != null) { localSortMode = MultiValueMode.fromString(sortMode.toString()); } boolean reverse = (order == SortOrder.DESC); if (localSortMode == null) { localSortMode = reverse ? MultiValueMode.MAX : MultiValueMode.MIN; } final Nested nested = resolveNested(context, nestedPath, nestedFilter); IndexFieldData<?> fieldData = context.getForField(fieldType); if (fieldData instanceof IndexNumericFieldData == false && (sortMode == SortMode.SUM || sortMode == SortMode.AVG || sortMode == SortMode.MEDIAN)) { throw new QueryShardException(context, "we only support AVG, MEDIAN and SUM on number based fields"); } IndexFieldData.XFieldComparatorSource fieldComparatorSource = fieldData .comparatorSource(missing, localSortMode, nested); SortField field = new SortField(fieldType.name(), fieldComparatorSource, reverse); return new SortFieldAndFormat(field, fieldType.docValueFormat(null, null)); } }
@Override protected ScoreFunction doToFunction(QueryShardContext context) { try { SearchScript searchScript = context.getSearchScript(script, ScriptContext.Standard.SEARCH); return new ScriptScoreFunction(script, searchScript); } catch (Exception e) { throw new QueryShardException(context, "script_score: the script could not be loaded", e); } }
public void testQueryShardException() throws IOException { QueryShardException ex = serialize(new QueryShardException(new Index("foo", "_na_"), "fobar", null)); assertEquals(ex.getIndex().getName(), "foo"); assertEquals(ex.getMessage(), "fobar"); ex = serialize(new QueryShardException((Index) null, null, null)); assertNull(ex.getIndex()); assertNull(ex.getMessage()); }
@Override public Query termQuery(Object value, QueryShardContext context) { throw new QueryShardException(context, "Percolator fields are not searchable directly, use a percolate query instead"); }
@Override public SortFieldAndFormat build(QueryShardContext context) throws IOException { final SearchScript searchScript = context.getSearchScript(script, ScriptContext.Standard.SEARCH); MultiValueMode valueMode = null; if (sortMode != null) { valueMode = MultiValueMode.fromString(sortMode.toString()); } boolean reverse = (order == SortOrder.DESC); if (valueMode == null) { valueMode = reverse ? MultiValueMode.MAX : MultiValueMode.MIN; } final Nested nested = resolveNested(context, nestedPath, nestedFilter); final IndexFieldData.XFieldComparatorSource fieldComparatorSource; switch (type) { case STRING: fieldComparatorSource = new BytesRefFieldComparatorSource(null, null, valueMode, nested) { LeafSearchScript leafScript; @Override protected SortedBinaryDocValues getValues(LeafReaderContext context) throws IOException { leafScript = searchScript.getLeafSearchScript(context); final BinaryDocValues values = new BinaryDocValues() { final BytesRefBuilder spare = new BytesRefBuilder(); @Override public BytesRef get(int docID) { leafScript.setDocument(docID); spare.copyChars(leafScript.run().toString()); return spare.get(); } }; return FieldData.singleton(values, null); } @Override protected void setScorer(Scorer scorer) { leafScript.setScorer(scorer); } }; break; case NUMBER: fieldComparatorSource = new DoubleValuesComparatorSource(null, Double.MAX_VALUE, valueMode, nested) { LeafSearchScript leafScript; @Override protected SortedNumericDoubleValues getValues(LeafReaderContext context) throws IOException { leafScript = searchScript.getLeafSearchScript(context); final NumericDoubleValues values = new NumericDoubleValues() { @Override public double get(int docID) { leafScript.setDocument(docID); return leafScript.runAsDouble(); } }; return FieldData.singleton(values, null); } @Override protected void setScorer(Scorer scorer) { leafScript.setScorer(scorer); } }; break; default: throw new QueryShardException(context, "custom script sort type [" + type + "] not supported"); } return new SortFieldAndFormat(new SortField("_script", fieldComparatorSource, reverse), DocValueFormat.RAW); }
@Override public Query termQuery(Object value, QueryShardContext context) { throw new QueryShardException(context, "Binary fields do not support searching"); }
@Override public Query termQuery(Object value, QueryShardContext context) { throw new QueryShardException(context, "The _source field is not searchable"); }
@Override public Query termQuery(Object value, QueryShardContext context) { throw new QueryShardException(context, "The _version field is not searchable"); }
@Override public Query termQuery(Object value, QueryShardContext context) { throw new QueryShardException(context, "Geo fields do not support exact searching, use dedicated geo queries instead"); }
@Override public Query termQuery(Object value, QueryShardContext context) { throw new QueryShardException(context, "Geo fields do not support exact searching, use dedicated geo queries instead: [" + name() + "]"); }
public Query prefixQuery(String value, @Nullable MultiTermQuery.RewriteMethod method, QueryShardContext context) { throw new QueryShardException(context, "Can only use prefix queries on keyword and text fields - not on [" + name + "] which is of type [" + typeName() + "]"); }
public Query regexpQuery(String value, int flags, int maxDeterminizedStates, @Nullable MultiTermQuery.RewriteMethod method, QueryShardContext context) { throw new QueryShardException(context, "Can only use regexp queries on keyword and text fields - not on [" + name + "] which is of type [" + typeName() + "]"); }
public static SignificanceHeuristic parse(QueryParseContext context) throws IOException, QueryShardException { context.parser().nextToken(); return new SimpleHeuristic(); }
public void testWriteThrowable() throws IOException { final QueryShardException queryShardException = new QueryShardException(new Index("foo", "_na_"), "foobar", null); final UnknownException unknownException = new UnknownException("this exception is unknown", queryShardException); final Exception[] causes = new Exception[]{ new IllegalStateException("foobar"), new IllegalArgumentException("alalaal"), new NullPointerException("boom"), new EOFException("dadada"), new ElasticsearchSecurityException("nono!"), new NumberFormatException("not a number"), new CorruptIndexException("baaaam booom", "this is my resource"), new IndexFormatTooNewException("tooo new", 1, 2, 3), new IndexFormatTooOldException("tooo new", 1, 2, 3), new IndexFormatTooOldException("tooo new", "very old version"), new ArrayIndexOutOfBoundsException("booom"), new StringIndexOutOfBoundsException("booom"), new FileNotFoundException("booom"), new NoSuchFileException("booom"), new AlreadyClosedException("closed!!", new NullPointerException()), new LockObtainFailedException("can't lock directory", new NullPointerException()), unknownException}; for (final Exception cause : causes) { ElasticsearchException ex = new ElasticsearchException("topLevel", cause); ElasticsearchException deserialized = serialize(ex); assertEquals(deserialized.getMessage(), ex.getMessage()); assertTrue("Expected: " + deserialized.getCause().getMessage() + " to contain: " + ex.getCause().getClass().getName() + " but it didn't", deserialized.getCause().getMessage().contains(ex.getCause().getMessage())); if (ex.getCause().getClass() != UnknownException.class) { // unknown exception is not directly mapped assertEquals(deserialized.getCause().getClass(), ex.getCause().getClass()); } else { assertEquals(deserialized.getCause().getClass(), NotSerializableExceptionWrapper.class); } assertArrayEquals(deserialized.getStackTrace(), ex.getStackTrace()); assertTrue(deserialized.getStackTrace().length > 1); assertVersionSerializable(VersionUtils.randomVersion(random()), cause); assertVersionSerializable(VersionUtils.randomVersion(random()), ex); assertVersionSerializable(VersionUtils.randomVersion(random()), deserialized); } }
@Override public Query termQuery(Object value, QueryShardContext context) { throw new QueryShardException(context, "Murmur3 fields are not searchable: [" + name() + "]"); }
@Override public Query termQuery(final Object value, final QueryShardContext context) { throw new QueryShardException(context, "MinHash fields do not support searching"); }