Java 类com.esotericsoftware.kryo.io.InputChunked 实例源码

项目:subzero    文件:KryoStrategy.java   
public T read(InputStream in) throws IOException {
    KryoContext kryoContext = KRYOS.get();
    InputChunked input = kryoContext.getInputChunked();
    input.setInputStream(in);
    T object = readObject(kryoContext.getKryo(), input);
    return object;
}
项目:EsperDist    文件:DeflateSerializer.java   
public Object read (Kryo kryo, Input input, Class type) {
    // The inflater would read from input beyond the compressed bytes if chunked enoding wasn't used.
    Inflater inflater = new Inflater(noHeaders);
    try {
        InflaterInputStream inflaterStream = new InflaterInputStream(new InputChunked(input, 256), inflater);
        return serializer.read(kryo, new Input(inflaterStream, 256), type);
    } finally {
        inflater.end();
    }
}
项目:JourneyPlanner    文件:DeflateSerializer.java   
public Object read (Kryo kryo, Input input, Class type) {
    // The inflater would read from input beyond the compressed bytes if chunked enoding wasn't used.
    Inflater inflater = new Inflater(noHeaders);
    try {
        InflaterInputStream inflaterStream = new InflaterInputStream(new InputChunked(input, 256), inflater);
        return serializer.read(kryo, new Input(inflaterStream, 256), type);
    } finally {
        inflater.end();
    }
}
项目:kryo-mavenized    文件:ChunkedTest.java   
public void testChunks () {
    Output output = new Output(512);
    output.writeInt(1234);
    OutputChunked outputChunked = new OutputChunked(output);
    outputChunked.writeInt(1);
    outputChunked.endChunks();
    outputChunked.writeInt(2);
    outputChunked.endChunks();
    outputChunked.writeInt(3);
    outputChunked.endChunks();
    outputChunked.writeInt(4);
    outputChunked.endChunks();
    outputChunked.writeInt(5);
    outputChunked.endChunks();
    output.writeInt(5678);
    output.close();

    Input input = new Input(output.getBuffer());
    assertEquals(1234, input.readInt());
    InputChunked inputChunked = new InputChunked(input);
    assertEquals(1, inputChunked.readInt());
    inputChunked.nextChunks();
    inputChunked.nextChunks(); // skip 3
    assertEquals(3, inputChunked.readInt());
    inputChunked.nextChunks();
    inputChunked.nextChunks(); // skip 4
    assertEquals(5, inputChunked.readInt());
    assertEquals(5678, input.readInt());
    input.close();
}
项目:subzero    文件:KryoStrategy.java   
protected KryoContext initialValue() {
    Kryo kryo = newKryoInstance();
    OutputChunked output = new OutputChunked(BUFFER_SIZE);
    InputChunked input = new InputChunked(BUFFER_SIZE);
    return new KryoContext(kryo, input, output);
}
项目:subzero    文件:KryoContext.java   
KryoContext(Kryo kryo, InputChunked inputChunked, OutputChunked outputChunked) {
    this.kryo = kryo;
    this.inputChunked = inputChunked;
    this.outputChunked = outputChunked;
}
项目:subzero    文件:KryoContext.java   
public InputChunked getInputChunked() {
    return inputChunked;
}
项目:EsperDist    文件:CompatibleFieldSerializer.java   
public T read (Kryo kryo, Input input, Class<T> type) {
    T object = create(kryo, input, type);
    kryo.reference(object);
    ObjectMap context = kryo.getGraphContext();
    CachedField[] fields = (CachedField[])context.get(this);
    if (fields == null) {
        int length = input.readVarInt(true);
        if (TRACE) trace("kryo", "Read " + length + " field names.");
        String[] names = new String[length];
        for (int i = 0; i < length; i++)
            names[i] = input.readString();

        fields = new CachedField[length];
        CachedField[] allFields = getFields();
        outer:
        for (int i = 0, n = names.length; i < n; i++) {
            String schemaName = names[i];
            for (int ii = 0, nn = allFields.length; ii < nn; ii++) {
                if (allFields[ii].field.getName().equals(schemaName)) {
                    fields[i] = allFields[ii];
                    continue outer;
                }
            }
            if (TRACE) trace("kryo", "Ignore obsolete field: " + schemaName);
        }
        context.put(this, fields);
    }

    InputChunked inputChunked = new InputChunked(input, 1024);
    boolean hasGenerics = getGenerics() != null;
    for (int i = 0, n = fields.length; i < n; i++) {
        CachedField cachedField = fields[i];
        if(cachedField != null && hasGenerics) {
            // Generic type used to instantiate this field could have 
            // been changed in the meantime. Therefore take the most 
            // up-to-date definition of a field
            cachedField = getField(cachedField.field.getName());
        }
        if (cachedField == null) {
            if (TRACE) trace("kryo", "Skip obsolete field.");
            inputChunked.nextChunks();
            continue;
        }
        cachedField.read(inputChunked, object);
        inputChunked.nextChunks();
    }
    return object;
}
项目:EsperDist    文件:DeflateSerializer.java   
public Object read (Kryo kryo, Input input, Class type) {
    // The inflater would read from input beyond the compressed bytes if chunked enoding wasn't used.
    InflaterInputStream inflaterStream = new InflaterInputStream(new InputChunked(input, 256), new Inflater(noHeaders));
    return kryo.readObject(new Input(inflaterStream, 256), type, serializer);
}
项目:EsperDist    文件:CompatibleFieldSerializer.java   
public T read (Kryo kryo, Input input, Class<T> type) {
    T object = create(kryo, input, type);
    kryo.reference(object);
    ObjectMap context = kryo.getGraphContext();
    CachedField[] fields = (CachedField[])context.get(this);
    if (fields == null) {
        int length = input.readVarInt(true);
        if (TRACE) trace("kryo", "Read " + length + " field names.");
        String[] names = new String[length];
        for (int i = 0; i < length; i++)
            names[i] = input.readString();

        fields = new CachedField[length];
        CachedField[] allFields = getFields();
        outer:
        for (int i = 0, n = names.length; i < n; i++) {
            String schemaName = names[i];
            for (int ii = 0, nn = allFields.length; ii < nn; ii++) {
                if (allFields[ii].field.getName().equals(schemaName)) {
                    fields[i] = allFields[ii];
                    continue outer;
                }
            }
            if (TRACE) trace("kryo", "Ignore obsolete field: " + schemaName);
        }
        context.put(this, fields);
    }

    InputChunked inputChunked = new InputChunked(input, 1024);
    for (int i = 0, n = fields.length; i < n; i++) {
        CachedField cachedField = fields[i];
        if (cachedField == null) {
            if (TRACE) trace("kryo", "Skip obsolete field.");
            inputChunked.nextChunks();
            continue;
        }
        cachedField.read(inputChunked, object);
        inputChunked.nextChunks();
    }
    return object;
}
项目:JourneyPlanner    文件:CompatibleFieldSerializer.java   
public T read (Kryo kryo, Input input, Class<T> type) {
    T object = create(kryo, input, type);
    kryo.reference(object);
    ObjectMap context = kryo.getGraphContext();
    CachedField[] fields = (CachedField[])context.get(this);
    if (fields == null) {
        int length = input.readVarInt(true);
        if (TRACE) trace("kryo", "Read " + length + " field names.");
        String[] names = new String[length];
        for (int i = 0; i < length; i++)
            names[i] = input.readString();

        fields = new CachedField[length];
        CachedField[] allFields = getFields();
        outer:
        for (int i = 0, n = names.length; i < n; i++) {
            String schemaName = names[i];
            for (int ii = 0, nn = allFields.length; ii < nn; ii++) {
                if (allFields[ii].field.getName().equals(schemaName)) {
                    fields[i] = allFields[ii];
                    continue outer;
                }
            }
            if (TRACE) trace("kryo", "Ignore obsolete field: " + schemaName);
        }
        context.put(this, fields);
    }

    InputChunked inputChunked = new InputChunked(input, 1024);
    boolean hasGenerics = getGenerics() != null;
    for (int i = 0, n = fields.length; i < n; i++) {
        CachedField cachedField = fields[i];
        if(cachedField != null && hasGenerics) {
            // Generic type used to instantiate this field could have 
            // been changed in the meantime. Therefore take the most 
            // up-to-date definition of a field
            cachedField = getField(cachedField.field.getName());
        }
        if (cachedField == null) {
            if (TRACE) trace("kryo", "Skip obsolete field.");
            inputChunked.nextChunks();
            continue;
        }
        cachedField.read(inputChunked, object);
        inputChunked.nextChunks();
    }
    return object;
}
项目:geowave    文件:FeatureSerializationTest.java   
@Test
public void test()
        throws SchemaException {
    final Kryo kryo = new Kryo();

    kryo.register(
            SimpleFeatureImpl.class,
            new FeatureSerializer());

    final SimpleFeatureType schema = DataUtilities.createType(
            "testGeo",
            "location:Point:srid=4326,name:String");
    final List<AttributeDescriptor> descriptors = schema.getAttributeDescriptors();
    final Object[] defaults = new Object[descriptors.size()];
    int p = 0;
    for (final AttributeDescriptor descriptor : descriptors) {
        defaults[p++] = descriptor.getDefaultValue();
    }

    final SimpleFeature feature = SimpleFeatureBuilder.build(
            schema,
            defaults,
            UUID.randomUUID().toString());
    final GeometryFactory geoFactory = new GeometryFactory();

    feature.setAttribute(
            "location",
            geoFactory.createPoint(new Coordinate(
                    -45,
                    45)));
    final Output output = new OutputChunked();
    kryo.getSerializer(
            SimpleFeatureImpl.class).write(
            kryo,
            output,
            feature);
    final Input input = new InputChunked();
    input.setBuffer(output.getBuffer());
    final SimpleFeature f2 = (SimpleFeature) kryo.getSerializer(
            SimpleFeatureImpl.class).read(
            kryo,
            input,
            SimpleFeatureImpl.class);
    assertEquals(
            feature,
            f2);

}
项目:kryo-mavenized    文件:DeflateSerializer.java   
public Object read (Kryo kryo, Input input, Class type) {
    // The inflater would read from input beyond the compressed bytes if chunked enoding wasn't used.
    InflaterInputStream inflaterStream = new InflaterInputStream(new InputChunked(input, 256), new Inflater(noHeaders));
    return kryo.readObject(new Input(inflaterStream, 256), type, serializer);
}
项目:kryo-mavenized    文件:CompatibleFieldSerializer.java   
public T read (Kryo kryo, Input input, Class<T> type) {
    T object = create(kryo, input, type);
    kryo.reference(object);
    ObjectMap context = kryo.getGraphContext();
    CachedField[] fields = (CachedField[])context.get(this);
    if (fields == null) {
        int length = input.readVarInt(true);
        if (TRACE) trace("kryo", "Read " + length + " field names.");
        String[] names = new String[length];
        for (int i = 0; i < length; i++)
            names[i] = input.readString();

        fields = new CachedField[length];
        CachedField[] allFields = getFields();
        outer:
        for (int i = 0, n = names.length; i < n; i++) {
            String schemaName = names[i];
            for (int ii = 0, nn = allFields.length; ii < nn; ii++) {
                if (allFields[ii].field.getName().equals(schemaName)) {
                    fields[i] = allFields[ii];
                    continue outer;
                }
            }
            if (TRACE) trace("kryo", "Ignore obsolete field: " + schemaName);
        }
        context.put(this, fields);
    }

    InputChunked inputChunked = new InputChunked(input, 1024);
    for (int i = 0, n = fields.length; i < n; i++) {
        CachedField cachedField = fields[i];
        if (cachedField == null) {
            if (TRACE) trace("kryo", "Skip obsolete field.");
            inputChunked.nextChunks();
            continue;
        }
        cachedField.read(inputChunked, object);
        inputChunked.nextChunks();
    }
    return object;
}