private void serializeOneNestedSet( Object children, CodedOutputStream codedOut, Map<Object, byte[]> childToDigest) throws IOException, SerializationException { // Serialize nested set into an inner byte array so we can take its digest ByteArrayOutputStream childOutputStream = new ByteArrayOutputStream(); HashingOutputStream hashingOutputStream = new HashingOutputStream(Hashing.md5(), childOutputStream); CodedOutputStream childCodedOut = CodedOutputStream.newInstance(hashingOutputStream); if (children instanceof Object[]) { serializeMultiItemChildArray((Object[]) children, childToDigest, childCodedOut); } else if (children != NestedSet.EMPTY_CHILDREN) { serializeSingleItemChildArray(children, childCodedOut); } else { // Empty set childCodedOut.writeInt32NoTag(0); } childCodedOut.flush(); byte[] digest = hashingOutputStream.hash().asBytes(); codedOut.writeByteArrayNoTag(digest); byte[] childBytes = childOutputStream.toByteArray(); codedOut.writeByteArrayNoTag(childBytes); childToDigest.put(children, digest); }
private static String computeHash(ByteSource source, HashFunction hashFunction) throws IOException { try (InputStream inputStream = source.openStream(); HashingOutputStream outputStream = new HashingOutputStream( hashFunction, new OutputStream() { @Override public void write(int b) throws IOException { // Do nothing. } })) { ByteStreams.copy(inputStream, outputStream); return outputStream.hash().toString(); } }
public ReadPayloadInfo readPayload(OutputStream outStream) throws IOException { assertTrue( nextPayloadToBeRead < thriftData.getPayloadsSize(), "Trying to download payload index=[%s] but the thriftData only contains [%s] payloads.", nextPayloadToBeRead, thriftData.getPayloadsSize()); long payloadSizeBytes = assertNotNull(thriftData.getPayloads(), "Payloads[] cannot be null.") .get(nextPayloadToBeRead) .getSizeBytes(); try (HashingOutputStream wrappedOutputStream = new HashingOutputStream(MD5_HASH_FUNCTION, outStream)) { copyExactly(responseStream, wrappedOutputStream, payloadSizeBytes); ++nextPayloadToBeRead; return new ReadPayloadInfo(payloadSizeBytes, wrappedOutputStream.hash().toString()); } }
/** * Encodes the given element using the given coder and hashes the encoding. */ static <T> long hash(T element, Coder<T> coder) throws CoderException, IOException { try (HashingOutputStream stream = new HashingOutputStream(Hashing.murmur3_128(), ByteStreams.nullOutputStream())) { coder.encode(element, stream, Context.OUTER); return stream.hash().asLong(); } }
public void download(BackupMetadata backup, String filename, OutputStream out) throws IOException { if (!(backup.isSuccessful() || backup.isFailed())) { throw new IllegalStateException("Attempted to download a non-complete backup."); } final Timer.Context context = DOWNLOAD_TIMES.time(); try { ACTIVE_DOWNLOADS.inc(); for (Chunk chunk : backup.getChunks(filename)) { final CompressionCodec compressionCodec = chunk.getCompressionCodec(); final StreamCodec codec = codecFactory.get(compressionCodec, backup.getModelVersion() < 1); try (final InputStream in = codec.input(this.openStreamFromStorage(backup.getService(), chunk.getPath()))) { final HashingOutputStream md5out = new HashingOutputStream(Hashing.md5(), out); final long size = ByteStreams.copy(in, md5out); final String hash = md5out.hash().toString(); if (!hash.equalsIgnoreCase(chunk.getHash())) { throw new InvalidMD5Exception(chunk.getHash(), hash); } DOWNLOAD_SIZES.update(size); } } } catch (IOException e) { LOG.error("Failed to download backup: " + backup, e); throw e; } finally { ACTIVE_DOWNLOADS.dec(); context.stop(); } }
/** * Set the input data the given Compression. Will automatically * set calculate the md5 sum and length properties * @param in InputStream * @return Builder * @throws IOException */ public Builder data( InputStream in, Compression<?,?> compression) throws IOException { StringWriter writer = new StringWriter(); OutputStream out = BaseEncoding.base64Url().encodingStream(writer); if (compression != null) out = compression.compressor(out); HashingOutputStream hout = new HashingOutputStream( Hashing.md5(), out); byte[] buf = new byte[1024]; int r = -1; long size = 0; while((r = in.read(buf)) > -1) { hout.write(buf,0,r); size += r; } set("length", size); if (compression != null) { set("compression", compression.label()); compression.finish(out); } hout.close(); set("md5", hout.hash().toString()); return set("data",writer.toString()); }
public String writeTo(final OutputStream outputStream) throws IOException { checkState(appManifest != null || filesetManifest != null, "neither AppManifest nor a FilesetManifest is given"); final OutputStream bufferedOutputStream; if (buffering) { bufferedOutputStream = new BufferedOutputStream(outputStream); } else { bufferedOutputStream = outputStream; } // TODO add encryption capabilities (before or after hashing/compression?) (https://github.com/coreos/rocket/issues/233) final OutputStream compressedOutputStream = compression.wrapStream(bufferedOutputStream); final HashingOutputStream hashingOutputStream = new HashingOutputStream(Hashing.sha256(), compressedOutputStream); final TarArchiveOutputStream imageOutputStream = new TarArchiveOutputStream(hashingOutputStream); if (appManifest != null) { final String appManifestJson = JSON_MAPPER.writeValueAsString(appManifest); final TarArchiveEntry appManifestEntry = new TarArchiveEntry("/app"); appManifestEntry.setSize(appManifestJson.length()); imageOutputStream.putArchiveEntry(appManifestEntry); imageOutputStream.write(appManifestJson.getBytes(MANIFEST_CHARSET)); imageOutputStream.closeArchiveEntry(); } if (filesetManifest != null) { final String filesetManifestJson = JSON_MAPPER.writeValueAsString(filesetManifest); final TarArchiveEntry filesetManifestEntry = new TarArchiveEntry("/fileset"); filesetManifestEntry.setSize(filesetManifestJson.length()); imageOutputStream.putArchiveEntry(filesetManifestEntry); imageOutputStream.write(filesetManifestJson.getBytes(MANIFEST_CHARSET)); imageOutputStream.closeArchiveEntry(); } for (final Content content : contents) { content.addToImage(imageOutputStream); } imageOutputStream.flush(); return hashingOutputStream.hash().toString(); }