@Override public void writeTo( Object o, Class<?> type, Type genericType, Annotation[] annotations, MediaType mediaType, MultivaluedMap<String, Object> httpHeaders, OutputStream os) throws IOException { String fields = uriInfo.getQueryParameters() == null ? null : uriInfo.getQueryParameters().getFirst("fields"); FieldFilter fieldFilter = FieldFilter.create(fields); if (!fieldFilter.hasFilters()) { super.writeTo(o, type, genericType, annotations, mediaType, httpHeaders, os); return; } JsonGenerator jgen = objectMapper.getFactory().createGenerator(os); TokenBuffer tokenBuffer = new TokenBuffer(objectMapper, false); objectMapper.writeValue(tokenBuffer, o); JsonParser jsonParser = tokenBuffer.asParser(); fieldFilter.writeJson(jsonParser, jgen); jgen.flush(); }
private static JsonNode convertToJsonNode(RelDataType rowType, ImmutableList<ImmutableList<RexLiteral>> tuples) throws IOException{ TokenBuffer out = new TokenBuffer(MAPPER.getFactory().getCodec(), false); JsonOutput json = new ExtendedJsonOutput(out); json.writeStartArray(); String[] fields = rowType.getFieldNames().toArray(new String[rowType.getFieldCount()]); for(List<RexLiteral> row : tuples){ json.writeStartObject(); int i =0; for(RexLiteral field : row){ json.writeFieldName(fields[i]); writeLiteral(field, json); i++; } json.writeEndObject(); } json.writeEndArray(); json.flush(); return out.asParser().readValueAsTree(); }
private JsonNode getResultsNode(long count) { try{ TokenBuffer out = new TokenBuffer(MAPPER.getFactory().getCodec(), false); JsonOutput json = new ExtendedJsonOutput(out); json.writeStartArray(); json.writeStartObject(); json.writeFieldName("count"); json.writeBigInt(count); json.writeEndObject(); json.writeEndArray(); json.flush(); return out.asParser().readValueAsTree(); }catch(IOException ex){ throw Throwables.propagate(ex); } }
private static JsonNode convertToJsonNode(RelDataType rowType, ImmutableList<ImmutableList<RexLiteral>> tuples) throws IOException { TokenBuffer out = new TokenBuffer(MAPPER.getFactory().getCodec(), false); JsonOutput json = new ExtendedJsonOutput(out); json.writeStartArray(); String[] fields = rowType.getFieldNames().toArray(new String[rowType.getFieldCount()]); for (List<RexLiteral> row : tuples) { json.writeStartObject(); int i = 0; for (RexLiteral field : row) { json.writeFieldName(fields[i]); writeLiteral(field, json); i++; } json.writeEndObject(); } json.writeEndArray(); json.flush(); return out.asParser().readValueAsTree(); }
@Override public <T> T unmarshall(final Object marshalled, final Class<T> type) throws Exception { checkNotNull(marshalled); checkState(marshalled instanceof Map, "Marshalled data must be a Map; found: %s", marshalled.getClass()); // FIXME: This allows the top-level object to be created, but if any children objects of this are missing // FIXME: ... no-arg CTOR then Jackson will fail to construct them. // FIXME: Is there any way to configure the basic instance creation for Jackson? Object value = instanceCreator.newInstance(type); // performs same basic logic as ObjectMapper.convertValue(Object, Class) helper ObjectReader reader = objectMapper.readerForUpdating(value); TokenBuffer buff = new TokenBuffer(objectMapper, false); objectMapper.writeValue(buff, marshalled); reader.readValue(buff.asParser()); return type.cast(value); }
FieldMetadata deserializeMetadata(ObjectMapper mapper, String fieldName, JsonNode fieldJson) throws JsonParseException, IOException { TokenBuffer tb = new TokenBuffer(null, false); JsonParser jp = fieldJson.traverse(mapper); jp.nextToken(); // Get to Start TOKEN JsonToken token = jp.getCurrentToken(); tb.writeStringField(NAME_PROP, fieldName); do { jp.nextToken(); token = jp.getCurrentToken(); tb.copyCurrentStructure(jp); } while(token != JsonToken.END_OBJECT); jp = tb.asParser(jp); jp.nextToken(); tb.close(); FieldMetadata diff = mapper.readValue(jp, FieldMetadata.class); return defaultMetaProvider.mergeWithDefault(diff); }
private JsonParser enrichJsonObject(ObjectMapper mapper, String fieldName, JsonNode fieldJson) throws IOException { TokenBuffer tb = new TokenBuffer(null, false); JsonParser jp = fieldJson.traverse(mapper); JsonToken token = jp.nextToken(); // Get to Start TOKEN tb.writeStringField(NAME_PROP, fieldName); do { jp.nextToken(); token = jp.getCurrentToken(); tb.copyCurrentStructure(jp); } while(token != JsonToken.END_OBJECT); jp = tb.asParser(jp); jp.nextToken(); tb.close(); return jp; }
@SuppressWarnings("unchecked") protected static <T> T convert(Object fromValue, TypeReference<T> toValueType, Class<?> view) { // sanity check for null first: if (fromValue == null) return null; /* * Then use TokenBuffer, which is a JsonGenerator: (see [JACKSON-175]) */ TokenBuffer buf = new TokenBuffer(MAPPER); try { MAPPER.writerWithView(view).writeValue(buf, fromValue); // and provide as with a JsonParser for contents as well! JsonParser jp = buf.asParser(); Object result = MAPPER.readValue(jp, toValueType); jp.close(); return (T) result; } catch (IOException e) { // should not occur, no real i/o... throw new IllegalArgumentException(e.getMessage(), e); } }
protected Object _deserializeTypedUsingDefaultImpl(JsonParser paramJsonParser, DeserializationContext paramDeserializationContext, TokenBuffer paramTokenBuffer) { JsonDeserializer localJsonDeserializer = _findDefaultImplDeserializer(paramDeserializationContext); if (localJsonDeserializer != null) { if (paramTokenBuffer != null) { paramTokenBuffer.writeEndObject(); JsonParser localJsonParser = paramTokenBuffer.asParser(paramJsonParser); paramJsonParser = localJsonParser; localJsonParser.nextToken(); } return localJsonDeserializer.deserialize(paramJsonParser, paramDeserializationContext); } Object localObject = TypeDeserializer.deserializeIfNatural(paramJsonParser, paramDeserializationContext, this._baseType); if (localObject != null) return localObject; if (paramJsonParser.getCurrentToken() == JsonToken.START_ARRAY) return super.deserializeTypedFromAny(paramJsonParser, paramDeserializationContext); throw paramDeserializationContext.wrongTokenException(paramJsonParser, JsonToken.FIELD_NAME, "missing property '" + this._typePropertyName + "' that is to contain type id (for class " + baseTypeName() + ")"); }
private final Object _deserialize(JsonParser paramJsonParser, DeserializationContext paramDeserializationContext) { boolean bool = paramJsonParser.isExpectedStartArrayToken(); String str = _locateTypeId(paramJsonParser, paramDeserializationContext); JsonDeserializer localJsonDeserializer = _findDeserializer(paramDeserializationContext, str); if ((this._typeIdVisible) && (paramJsonParser.getCurrentToken() == JsonToken.START_OBJECT)) { TokenBuffer localTokenBuffer = new TokenBuffer(null); localTokenBuffer.writeStartObject(); localTokenBuffer.writeFieldName(this._typePropertyName); localTokenBuffer.writeString(str); JsonParserSequence localJsonParserSequence = JsonParserSequence.createFlattened(localTokenBuffer.asParser(paramJsonParser), paramJsonParser); paramJsonParser = localJsonParserSequence; localJsonParserSequence.nextToken(); } Object localObject = localJsonDeserializer.deserialize(paramJsonParser, paramDeserializationContext); if ((bool) && (paramJsonParser.nextToken() != JsonToken.END_ARRAY)) throw paramDeserializationContext.wrongTokenException(paramJsonParser, JsonToken.END_ARRAY, "expected closing END_ARRAY after type information and deserialized value"); return localObject; }
private final Object _deserialize(JsonParser paramJsonParser, DeserializationContext paramDeserializationContext) { if (paramJsonParser.getCurrentToken() != JsonToken.START_OBJECT) throw paramDeserializationContext.wrongTokenException(paramJsonParser, JsonToken.START_OBJECT, "need JSON Object to contain As.WRAPPER_OBJECT type information for class " + baseTypeName()); if (paramJsonParser.nextToken() != JsonToken.FIELD_NAME) throw paramDeserializationContext.wrongTokenException(paramJsonParser, JsonToken.FIELD_NAME, "need JSON String that contains type id (for subtype of " + baseTypeName() + ")"); String str = paramJsonParser.getText(); JsonDeserializer localJsonDeserializer = _findDeserializer(paramDeserializationContext, str); paramJsonParser.nextToken(); if ((this._typeIdVisible) && (paramJsonParser.getCurrentToken() == JsonToken.START_OBJECT)) { TokenBuffer localTokenBuffer = new TokenBuffer(null); localTokenBuffer.writeStartObject(); localTokenBuffer.writeFieldName(this._typePropertyName); localTokenBuffer.writeString(str); JsonParserSequence localJsonParserSequence = JsonParserSequence.createFlattened(localTokenBuffer.asParser(paramJsonParser), paramJsonParser); paramJsonParser = localJsonParserSequence; localJsonParserSequence.nextToken(); } Object localObject = localJsonDeserializer.deserialize(paramJsonParser, paramDeserializationContext); if (paramJsonParser.nextToken() != JsonToken.END_OBJECT) throw paramDeserializationContext.wrongTokenException(paramJsonParser, JsonToken.END_OBJECT, "expected closing END_OBJECT after type information and deserialized value"); return localObject; }
protected Object handlePolymorphic(JsonParser paramJsonParser, DeserializationContext paramDeserializationContext, Object paramObject, TokenBuffer paramTokenBuffer) { JsonDeserializer localJsonDeserializer = _findSubclassDeserializer(paramDeserializationContext, paramObject, paramTokenBuffer); if (localJsonDeserializer != null) { if (paramTokenBuffer != null) { paramTokenBuffer.writeEndObject(); JsonParser localJsonParser = paramTokenBuffer.asParser(); localJsonParser.nextToken(); paramObject = localJsonDeserializer.deserialize(localJsonParser, paramDeserializationContext, paramObject); } if (paramJsonParser != null) paramObject = localJsonDeserializer.deserialize(paramJsonParser, paramDeserializationContext, paramObject); return paramObject; } if (paramTokenBuffer != null) paramObject = handleUnknownProperties(paramDeserializationContext, paramObject, paramTokenBuffer); if (paramJsonParser != null) paramObject = deserialize(paramJsonParser, paramDeserializationContext, paramObject); return paramObject; }
private JsonNode valueToTree(ObjectMapper mapper, ObjectWriter writer, Object o) { if (o == null) { return null; } TokenBuffer buf = new TokenBuffer(mapper, false); JsonNode result; try { writer.writeValue(buf, o); JsonParser jp = buf.asParser(); result = mapper.readTree(jp); jp.close(); } catch (IOException e) { // should not occur, no real i/o... throw new IllegalArgumentException(e.getMessage(), e); } return result; }
/** * Method that handles type information wrapper, locates actual * subtype deserializer to use, and calls it to do actual * deserialization. */ private final Object _deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException, JsonProcessingException { boolean hadStartArray = jp.isExpectedStartArrayToken(); String typeId = _locateTypeId(jp, ctxt); JsonDeserializer<Object> deser = _findDeserializer(ctxt, typeId); // Minor complication: we may need to merge type id in? if (_typeIdVisible && jp.getCurrentToken() == JsonToken.START_OBJECT) { // but what if there's nowhere to add it in? Error? Or skip? For now, skip. TokenBuffer tb = new TokenBuffer(null); tb.writeStartObject(); // recreate START_OBJECT tb.writeFieldName(_typePropertyName); tb.writeString(typeId); jp = JsonParserSequence.createFlattened(tb.asParser(jp), jp); jp.nextToken(); } Object value = deser.deserialize(jp, ctxt); // And then need the closing END_ARRAY if (hadStartArray && jp.nextToken() != JsonToken.END_ARRAY) { throw ctxt.wrongTokenException(jp, JsonToken.END_ARRAY, "expected closing END_ARRAY after type information and deserialized value"); } return value; }
protected final Object _deserialize(JsonParser jp, DeserializationContext ctxt, int index) throws IOException, JsonProcessingException { TokenBuffer merged = new TokenBuffer(jp.getCodec()); merged.writeStartArray(); merged.writeString(_typeIds[index]); JsonParser p2 = _tokens[index].asParser(jp); p2.nextToken(); merged.copyCurrentStructure(p2); merged.writeEndArray(); // needs to point to START_OBJECT (or whatever first token is) p2 = merged.asParser(jp); p2.nextToken(); return _properties[index].getProperty().deserialize(p2, ctxt); }
protected final void _deserializeAndSet(JsonParser jp, DeserializationContext ctxt, Object bean, int index) throws IOException, JsonProcessingException { /* Ok: time to mix type id, value; and we will actually use "wrapper-array" * style to ensure we can handle all kinds of JSON constructs. */ TokenBuffer merged = new TokenBuffer(jp.getCodec()); merged.writeStartArray(); merged.writeString(_typeIds[index]); JsonParser p2 = _tokens[index].asParser(jp); p2.nextToken(); merged.copyCurrentStructure(p2); merged.writeEndArray(); // needs to point to START_OBJECT (or whatever first token is) p2 = merged.asParser(jp); p2.nextToken(); _properties[index].getProperty().deserializeAndSet(p2, ctxt, bean); }
/** * Reverse of {@link #treeToValue}; given a value (usually bean), will * construct equivalent JSON Tree representation. Functionally same * as if serializing value into JSON and parsing JSON as tree, but * more efficient. * * @param <T> Actual node type; usually either basic {@link JsonNode} or * {@link com.fasterxml.jackson.databind.node.ObjectNode} * @param fromValue Bean value to convert * @return Root node of the resulting JSON tree */ @SuppressWarnings("unchecked") public <T extends JsonNode> T valueToTree(Object fromValue) throws IllegalArgumentException { if (fromValue == null) return null; TokenBuffer buf = new TokenBuffer(this); JsonNode result; try { writeValue(buf, fromValue); JsonParser jp = buf.asParser(); result = readTree(jp); jp.close(); } catch (IOException e) { // should not occur, no real i/o... throw new IllegalArgumentException(e.getMessage(), e); } return (T) result; }
/** * Reads current value including objects and array as effiecient token buffer. * Use of Jackson's own mechanisms is important to preserve custom elements * such as special embedded objects in BSON or other data formats. * @return {@link TokenBuffer} * @throws IOException if error occured */ public final TokenBuffer nextTokenBuffer() throws IOException { TokenBuffer buffer = new TokenBuffer(parser); // if token is consumed, but undelying parser is still sitting on this token, we move forward requirePeek(); buffer.copyCurrentStructure(parser); // when we will return to reading from reader, state will be cleared and nextToken after clearPeek(); return buffer; }
public static JsonNode objectToJsonNode(Object node, ObjectMapper objectMapper) { try { final TokenBuffer buf = new TokenBuffer(objectMapper, false); objectMapper.writerWithView(View.Db.class).writeValue(buf, node); final JsonParser jp = buf.asParser(); return objectMapper.readTree(jp); } catch (IOException exc) { logger.error("Failed to generate object for: " + node, exc); return null; } }
/** * Processes the results from the sql {@link ResultSet} and writes them out as * the json format returned for a {@link com.yahoo.bard.webservice.druid.model.query.GroupByQuery}. * * @return the equivalent json. */ public JsonNode buildDruidResponse() { Map<String, Function<String, Number>> resultTypeMapper = getAggregationTypeMapper(druidQuery); try (TokenBuffer jsonWriter = new TokenBuffer(objectMapper, true)) { jsonWriter.writeStartArray(); for (String[] row : sqlResults) { jsonWriter.writeStartObject(); DateTime timestamp; if (AllGranularity.INSTANCE.equals(druidQuery.getGranularity())) { timestamp = druidQuery.getIntervals().get(0).getStart(); } else { timestamp = sqlTimeConverter.getIntervalStart( groupByDimensionsCount, row, druidQuery ); } // all druid results are returned in UTC timestamps jsonWriter.writeStringField("timestamp", timestamp.toDateTime(DateTimeZone.UTC).toString()); jsonWriter.writeObjectFieldStart("event"); processRow(resultTypeMapper, jsonWriter, row); jsonWriter.writeEndObject(); jsonWriter.writeEndObject(); } jsonWriter.writeEndArray(); return jsonWriter.asParser().readValueAsTree(); } catch (IOException e) { throw new RuntimeException("Failed to write json.", e); } }
@Override protected void readFields(final ODocument document, final HttpClientConfiguration entity) throws Exception { ObjectReader reader = objectMapper.readerForUpdating(entity); TokenBuffer buff = new TokenBuffer(objectMapper, false); Map<String, Object> fields = document.toMap(); // strip out id/class synthetics fields.remove("@rid"); fields.remove("@class"); log.trace("Reading fields: {}", fields); objectMapper.writeValue(buff, fields); reader.readValue(buff.asParser()); }
private JsonNode invoke(MethodDefinition method, Object[] args) { try { MethodInvocation invocation = newMethodInvocation(method, args, getInterceptors(method)); Object result = invocation.proceed(); TokenBuffer buffer = new TokenBuffer(objectMapper, false); objectMapper.writerFor(method.getReturnTypeRef()).writeValue(buffer, result); return objectMapper.readTree(buffer.asParser()); } catch (Throwable e) { throw propagate(e); } }
public static JsonDeserializer<?> find(Class<?> paramClass) { if (paramClass == TokenBuffer.class) return TokenBufferDeserializer.instance; if (JavaType.class.isAssignableFrom(paramClass)) return JavaTypeDeserializer.instance; return null; }
protected ExternalTypeHandler(ExternalTypeHandler paramExternalTypeHandler) { this._properties = paramExternalTypeHandler._properties; this._nameToPropertyIndex = paramExternalTypeHandler._nameToPropertyIndex; int i = this._properties.length; this._typeIds = new String[i]; this._tokens = new TokenBuffer[i]; }
protected ExternalTypeHandler(ExtTypedProperty[] paramArrayOfExtTypedProperty, HashMap<String, Integer> paramHashMap, String[] paramArrayOfString, TokenBuffer[] paramArrayOfTokenBuffer) { this._properties = paramArrayOfExtTypedProperty; this._nameToPropertyIndex = paramHashMap; this._typeIds = paramArrayOfString; this._tokens = paramArrayOfTokenBuffer; }
protected final Object _deserialize(JsonParser paramJsonParser, DeserializationContext paramDeserializationContext, int paramInt, String paramString) { TokenBuffer localTokenBuffer = new TokenBuffer(paramJsonParser.getCodec()); localTokenBuffer.writeStartArray(); localTokenBuffer.writeString(paramString); JsonParser localJsonParser1 = this._tokens[paramInt].asParser(paramJsonParser); localJsonParser1.nextToken(); localTokenBuffer.copyCurrentStructure(localJsonParser1); localTokenBuffer.writeEndArray(); JsonParser localJsonParser2 = localTokenBuffer.asParser(paramJsonParser); localJsonParser2.nextToken(); return this._properties[paramInt].getProperty().deserialize(localJsonParser2, paramDeserializationContext); }
protected final void _deserializeAndSet(JsonParser paramJsonParser, DeserializationContext paramDeserializationContext, Object paramObject, int paramInt, String paramString) { TokenBuffer localTokenBuffer = new TokenBuffer(paramJsonParser.getCodec()); localTokenBuffer.writeStartArray(); localTokenBuffer.writeString(paramString); JsonParser localJsonParser1 = this._tokens[paramInt].asParser(paramJsonParser); localJsonParser1.nextToken(); localTokenBuffer.copyCurrentStructure(localJsonParser1); localTokenBuffer.writeEndArray(); JsonParser localJsonParser2 = localTokenBuffer.asParser(paramJsonParser); localJsonParser2.nextToken(); this._properties[paramInt].getProperty().deserializeAndSet(localJsonParser2, paramDeserializationContext, paramObject); }
public Object complete(JsonParser paramJsonParser, DeserializationContext paramDeserializationContext, Object paramObject) { int i = 0; int j = this._properties.length; while (i < j) { String str1 = this._typeIds[i]; String str2 = str1; if (str1 == null) { TokenBuffer localTokenBuffer = this._tokens[i]; if (localTokenBuffer == null) break label263; JsonToken localJsonToken = localTokenBuffer.firstToken(); if ((localJsonToken != null) && (localJsonToken.isScalarValue())) { JsonParser localJsonParser = localTokenBuffer.asParser(paramJsonParser); localJsonParser.nextToken(); SettableBeanProperty localSettableBeanProperty2 = this._properties[i].getProperty(); Object localObject = TypeDeserializer.deserializeIfNatural(localJsonParser, paramDeserializationContext, localSettableBeanProperty2.getType()); if (localObject != null) { localSettableBeanProperty2.set(paramObject, localObject); break label263; } if (!this._properties[i].hasDefaultType()) throw paramDeserializationContext.mappingException("Missing external type id property '" + this._properties[i].getTypePropertyName() + "'"); str2 = this._properties[i].getDefaultTypeId(); } } else if (this._tokens[i] == null) { SettableBeanProperty localSettableBeanProperty1 = this._properties[i].getProperty(); throw paramDeserializationContext.mappingException("Missing property '" + localSettableBeanProperty1.getName() + "' for external type id '" + this._properties[i].getTypePropertyName()); } _deserializeAndSet(paramJsonParser, paramDeserializationContext, paramObject, i, str2); label263: i++; } return paramObject; }
public boolean handlePropertyValue(JsonParser paramJsonParser, DeserializationContext paramDeserializationContext, String paramString, Object paramObject) { Integer localInteger = (Integer)this._nameToPropertyIndex.get(paramString); if (localInteger == null) return false; int i = localInteger.intValue(); int j; if (this._properties[i].hasTypePropertyName(paramString)) { this._typeIds[i] = paramJsonParser.getText(); paramJsonParser.skipChildren(); if ((paramObject != null) && (this._tokens[i] != null)) j = 1; else j = 0; } else { TokenBuffer localTokenBuffer = new TokenBuffer(paramJsonParser.getCodec()); localTokenBuffer.copyCurrentStructure(paramJsonParser); this._tokens[i] = localTokenBuffer; if ((paramObject != null) && (this._typeIds[i] != null)) j = 1; else j = 0; } if (j != 0) { String str = this._typeIds[i]; this._typeIds[i] = null; _deserializeAndSet(paramJsonParser, paramDeserializationContext, paramObject, i, str); this._tokens[i] = null; } return true; }
public Object processUnwrapped(JsonParser paramJsonParser, DeserializationContext paramDeserializationContext, Object paramObject, TokenBuffer paramTokenBuffer) { int i = 0; int j = this._properties.size(); while (i < j) { SettableBeanProperty localSettableBeanProperty = (SettableBeanProperty)this._properties.get(i); JsonParser localJsonParser = paramTokenBuffer.asParser(); localJsonParser.nextToken(); localSettableBeanProperty.deserializeAndSet(localJsonParser, paramDeserializationContext, paramObject); i++; } return paramObject; }
protected Object handleUnknownProperties(DeserializationContext paramDeserializationContext, Object paramObject, TokenBuffer paramTokenBuffer) { paramTokenBuffer.writeEndObject(); JsonParser localJsonParser = paramTokenBuffer.asParser(); while (localJsonParser.nextToken() != JsonToken.END_OBJECT) { String str = localJsonParser.getCurrentName(); localJsonParser.nextToken(); handleUnknownProperty(localJsonParser, paramDeserializationContext, paramObject, str); } return paramObject; }
public static JsonNode valueToTree(ObjectCodec oc, Object fromValue) throws Exception { if (fromValue == null) return null; TokenBuffer buf = new TokenBuffer(oc); oc.writeValue(buf, fromValue); JsonParser jp = buf.asParser(); JsonNode result = oc.readTree(jp); jp.close(); return result; }
public V clone() { try { final ObjectMapper mapper = Mason.getMapper(); // Similar to MAPPER.treeToValue(MAPPER.valueToTree(this), this.getClass()), // but skips tokens -> tree -> tokens. See ObjectMapper#valueToTree(Object). TokenBuffer buf = new TokenBuffer(Mason.getMapper(), false); mapper.writer().writeValue(buf, this); JsonParser json = buf.asParser(); V result = mapper.reader().withType(this.getClass()).readValue(json); json.close(); return result; } catch (IOException e) { throw Throwables.propagate(e); } }
protected Object _deserializeTypedUsingDefaultImpl(JsonParser jp, DeserializationContext ctxt, TokenBuffer tb) throws IOException, JsonProcessingException { // As per [JACKSON-614], may have default implementation to use if (_defaultImpl != null) { JsonDeserializer<Object> deser = _findDefaultImplDeserializer(ctxt); if (tb != null) { tb.writeEndObject(); jp = tb.asParser(jp); // must move to point to the first token: jp.nextToken(); } return deser.deserialize(jp, ctxt); } // or, perhaps we just bumped into a "natural" value (boolean/int/double/String)? Object result = _deserializeIfNatural(jp, ctxt); if (result != null) { return result; } // or, something for which "as-property" won't work, changed into "wrapper-array" type: if (jp.getCurrentToken() == JsonToken.START_ARRAY) { return super.deserializeTypedFromAny(jp, ctxt); } throw ctxt.wrongTokenException(jp, JsonToken.FIELD_NAME, "missing property '"+_typePropertyName+"' that is to contain type id (for class "+baseTypeName()+")"); }
/** * Method that handles type information wrapper, locates actual * subtype deserializer to use, and calls it to do actual * deserialization. */ private final Object _deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException, JsonProcessingException { // first, sanity checks if (jp.getCurrentToken() != JsonToken.START_OBJECT) { throw ctxt.wrongTokenException(jp, JsonToken.START_OBJECT, "need JSON Object to contain As.WRAPPER_OBJECT type information for class "+baseTypeName()); } // should always get field name, but just in case... if (jp.nextToken() != JsonToken.FIELD_NAME) { throw ctxt.wrongTokenException(jp, JsonToken.FIELD_NAME, "need JSON String that contains type id (for subtype of "+baseTypeName()+")"); } final String typeId = jp.getText(); JsonDeserializer<Object> deser = _findDeserializer(ctxt, typeId); jp.nextToken(); // Minor complication: we may need to merge type id in? if (_typeIdVisible && jp.getCurrentToken() == JsonToken.START_OBJECT) { // but what if there's nowhere to add it in? Error? Or skip? For now, skip. TokenBuffer tb = new TokenBuffer(null); tb.writeStartObject(); // recreate START_OBJECT tb.writeFieldName(_typePropertyName); tb.writeString(typeId); jp = JsonParserSequence.createFlattened(tb.asParser(jp), jp); jp.nextToken(); } Object value = deser.deserialize(jp, ctxt); // And then need the closing END_OBJECT if (jp.nextToken() != JsonToken.END_OBJECT) { throw ctxt.wrongTokenException(jp, JsonToken.END_OBJECT, "expected closing END_OBJECT after type information and deserialized value"); } return value; }
/** * Method called in cases where we may have polymorphic deserialization * case: that is, type of Creator-constructed bean is not the type * of deserializer itself. It should be a sub-class or implementation * class; either way, we may have more specific deserializer to use * for handling it. * * @param jp (optional) If not null, parser that has more properties to handle * (in addition to buffered properties); if null, all properties are passed * in buffer */ protected Object handlePolymorphic(JsonParser jp, DeserializationContext ctxt, Object bean, TokenBuffer unknownTokens) throws IOException, JsonProcessingException { // First things first: maybe there is a more specific deserializer available? JsonDeserializer<Object> subDeser = _findSubclassDeserializer(ctxt, bean, unknownTokens); if (subDeser != null) { if (unknownTokens != null) { // need to add END_OBJECT marker first unknownTokens.writeEndObject(); JsonParser p2 = unknownTokens.asParser(); p2.nextToken(); // to get to first data field bean = subDeser.deserialize(p2, ctxt, bean); } // Original parser may also have some leftovers if (jp != null) { bean = subDeser.deserialize(jp, ctxt, bean); } return bean; } // nope; need to use this deserializer. Unknowns we've seen so far? if (unknownTokens != null) { bean = handleUnknownProperties(ctxt, bean, unknownTokens); } // and/or things left to process via main parser? if (jp != null) { bean = deserialize(jp, ctxt, bean); } return bean; }
@Override public TokenBuffer deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException, JsonProcessingException { TokenBuffer tb = new TokenBuffer(jp.getCodec()); // quite simple, given that TokenBuffer is a JsonGenerator: tb.copyCurrentStructure(jp); return tb; }