Java 类com.google.common.hash.HashingInputStream 实例源码

项目:openscoring    文件:ModelRegistry.java   
@SuppressWarnings (
    value = {"resource"}
)
public Model load(InputStream is) throws Exception {
    CountingInputStream countingIs = new CountingInputStream(is);

    HashingInputStream hashingIs = new HashingInputStream(Hashing.md5(), countingIs);

    ModelEvaluator<?> evaluator = unmarshal(hashingIs, this.validate);

    PMML pmml = evaluator.getPMML();

    for(Class<? extends Visitor> visitorClazz : this.visitorClazzes){
        Visitor visitor = visitorClazz.newInstance();

        visitor.applyTo(pmml);
    }

    evaluator.verify();

    Model model = new Model(evaluator);
    model.putProperty(Model.PROPERTY_FILE_SIZE, countingIs.getCount());
    model.putProperty(Model.PROPERTY_FILE_MD5SUM, (hashingIs.hash()).toString());

    return model;
}
项目:okhttp-awssigner    文件:JCloudTools.java   
/**
 * hash input with sha256
 *
 * @param is a stream of bytes to hash
 * @return hash result
 */
public static byte[] hash(InputStream is) {
    HashingInputStream his = new HashingInputStream(Hashing.sha256(), is);
    try {
        ByteStreams.copy(his, ByteStreams.nullOutputStream());
        return his.hash().asBytes();
    } catch (IOException e) {
        throw new RuntimeException("Unable to compute hash while signing request: " + e.getMessage(), e);
    }
}
项目:nexus-public    文件:Hashes.java   
/**
 * Computes the hash of the given stream using the given algorithm.
 */
public static HashCode hash(final HashAlgorithm algorithm, final InputStream inputStream) throws IOException {
  checkNotNull(algorithm);
  checkNotNull(inputStream);

  try (HashingInputStream hashingStream = new HashingInputStream(algorithm.function(), inputStream)) {
    ByteStreams.copy(hashingStream, ByteStreams.nullOutputStream());
    return hashingStream.hash();
  }
}
项目:mccy-engine    文件:ZipMiningServiceImpl.java   
@Override public String interrogate(InputStream rawInputStream,
                          Optional<List<ZipMiningHandler.Entry>> handlers) throws IOException {
    final HashingInputStream hashingInputStream = new HashingInputStream(fileIdHash, rawInputStream);

    final ZipInputStream zipInputStream = new ZipInputStream(hashingInputStream);

    ZipEntry zipEntry;
    while ((zipEntry = zipInputStream.getNextEntry()) != null) {
        if (handlers != null) {
            final String name = zipEntry.getName();
            handlers.orElse(Collections.emptyList()).stream()
                    .filter(e -> e.getPath().matcher(name).matches())
                    .forEach(e -> {
                        final ZipMiningHandler handler = e.getHandler();
                        try {
                            handler.handleZipContentFile(name, StreamUtils.nonClosing(zipInputStream));
                        } catch (IOException | MccyException e1) {
                            LOG.warn("Handler {} failed to process zip content file named {}", handler, e1);
                        }
                    });
        }
    }

    // Need to read off the remaining content of the zip file to generate the full hash
    final byte[] buffer = new byte[1024];
    //noinspection StatementWithEmptyBody
    while (hashingInputStream.read(buffer) != -1){}

    return hashingInputStream.hash().toString();
}
项目:mccy-engine    文件:ZipMiningServiceTest.java   
@Test
public void testRawHashOfFile() throws Exception {
    final ClassPathResource worldResource = new ClassPathResource("NewWorld.zip");
    try (InputStream in = worldResource.getInputStream()) {
        final HashingInputStream hashingInputStream = new HashingInputStream(Hashing.md5(), in);
        final byte[] buffer = new byte[1024];
        while (hashingInputStream.read(buffer) != -1){}

        assertEquals("2a9eaf43128d05bbebbb9a0d4b9f8892", hashingInputStream.hash().toString());
    }

}
项目:omakase    文件:ManifestTool.java   
private static String copy(ProtocolHandler sourceProtocolHandler, ProtocolHandler destinationProtocolHandler, long contentLength)
        throws IOException, NoSuchAlgorithmException {

    try (InputStream inputStream = sourceProtocolHandler.openStream(); HashingInputStream hashingInputStream = new HashingInputStream(Hashing.md5(), inputStream);
            CountingInputStream countingInputStream = new CountingInputStream(hashingInputStream)) {
        destinationProtocolHandler.copyTo(countingInputStream, contentLength);
        return hashingInputStream.hash().toString();
    }
}
项目:flow    文件:HashEncoder.java   
@Nonnull
public String process(@Nonnull InputStream in) throws IOException {
    checkNotNull(in);
    try {
        HashingInputStream hashIn = new HashingInputStream(function, in);
        IOUtils.copy(hashIn, ByteStreams.nullOutputStream());
        return encoding.encode(hashIn.hash().asBytes());
    } finally {
        IOUtils.closeQuietly(in);
    }
}
项目:backups    文件:BackupProcessor.java   
private long storeChunk(BackupMetadata backup, InputStream in, final String path, final String filename, final CompressionCodec compressionCodec) throws IOException {
    final CountingOutputStream countingOut = new CountingOutputStream(localStorage.upload(backup.getService(), path));

    final StreamCodec codec = codecFactory.get(compressionCodec, backup.getModelVersion() < 1);

    try (final OutputStream out = codec.output(countingOut)) {
        final HashingInputStream md5in = new HashingInputStream(Hashing.md5(), in);

        LOG.debug("Storing {} chunk {} locally", backup, path);

        final long originalSize = ByteStreams.copy(md5in, out);
        if (originalSize > 0) {
            final long size = countingOut.getCount();
            final String hash = md5in.hash().toString();

            this.update(backup, new Function<BackupMetadata, BackupMetadata>() {
                @Override
                public BackupMetadata apply(BackupMetadata input) {
                    input.addChunk(filename, path, originalSize, size, hash, getNodeName(), compressionCodec);
                    return input;
                }
            });

            LOG.debug("Stored {} chunk {} (originalSize: {}, size: {}) locally", backup, path, originalSize, size);

            // Upload this chunk offsite in another thread
            this.uploadChunk(backup, path);
        }

        return originalSize;
    }
}
项目:rsc    文件:ResourceStore.java   
private void fetchResource(String name, File target) throws IOException {
    try (final HashingInputStream in = new HashingInputStream(HASH_FUNCTION, fetchResource(name))) {
        Files.asByteSink(target).writeFrom(in);

        LOG.debug("Downloaded {} (hash: {})", name, in.hash());
    }
}
项目:buck    文件:CustomJarOutputStreamTest.java   
@Test
public void manifestContainsEntryHashesOfHashedEntries() throws IOException {
  String entryName = "A";
  InputStream contents = new ByteArrayInputStream("contents".getBytes(StandardCharsets.UTF_8));
  try (HashingInputStream hashingContents =
      new HashingInputStream(Hashing.murmur3_128(), contents)) {
    writer.writeEntry(entryName, hashingContents);
    writer.close();

    String expectedHash = hashingContents.hash().toString();
    assertEntryHash(entryName, expectedHash);
  }
}
项目:buck    文件:CustomJarOutputStreamTest.java   
@Test
public void manifestContainsEntryHashesOfEmptyHashedEntries() throws IOException {
  String entryName = "A";
  InputStream contents = new ByteArrayInputStream(new byte[0]);
  try (HashingInputStream hashingContents =
      new HashingInputStream(Hashing.murmur3_128(), contents)) {
    writer.putNextEntry(new CustomZipEntry(entryName));
    writer.closeEntry();
    writer.close();

    String expectedHash = hashingContents.hash().toString();
    assertEntryHash(entryName, expectedHash);
  }
}
项目:mccy-engine    文件:ZipMiningServiceTest.java   
@Test
public void testInterceptFiles() throws Exception {
    final ClassPathResource worldResource = new ClassPathResource("NewWorld.zip");
    final String id;
    CompletableFuture<String> hashOfLevelDat = new CompletableFuture<>();

    final String[] expectedDataDatFiles = new String[]{
            "Mineshaft.dat",
            "Temple.dat",
            "villages.dat",
            "villages_end.dat",
            "villages_nether.dat"
    };

    AtomicInteger dataDatCount = new AtomicInteger();

    try (InputStream worldIn = worldResource.getInputStream()) {

        id = service.interrogate(worldIn, ZipMiningHandler.listBuilder()
                .add(".*/level.dat", ((path, in) -> {
                    final HashingInputStream hashLevelDatIn = new HashingInputStream(Hashing.md5(), in);
                    byte[] buffer = new byte[1024];
                    try {
                        while (hashLevelDatIn.read(buffer) != -1){}
                        hashOfLevelDat.complete(hashLevelDatIn.hash().toString());
                    } catch (IOException e) {
                        hashOfLevelDat.completeExceptionally(e);
                    }
                }))
                .add(".*/data/.*\\.dat", (path, in) -> {

                    final int pos = path.lastIndexOf("/");
                    assertThat(path.substring(pos + 1), Matchers.isIn(expectedDataDatFiles));

                    dataDatCount.incrementAndGet();
                })
                .build());

        assertTrue(hashOfLevelDat.isDone());
        assertEquals("7661c3e52e999aeb6b5593072d189be0", hashOfLevelDat.get());

        assertEquals(5L, dataDatCount.longValue());
    }
    assertEquals("2a9eaf43128d05bbebbb9a0d4b9f8892", id);
}
项目:backups    文件:BackupProcessor.java   
public void store(BackupMetadata backup, Optional<String> contentMD5, InputStream in, String filename) throws IOException {
    final Timer.Context context = STORE_TIMES.time();

    try {
        ACTIVE_STORES.inc();
        incrementPendingStores(backup, filename);

        // If the file is already compressed then don't bother compressing it again
        final boolean originallyCompressed = isCompressed(filename);
        final CompressionCodec compressionCodec = originallyCompressed ? CompressionCodec.NONE : codecFactory.getDefaultCompressionCodec();

        LOG.trace("Compressing {} file {} using {}", backup, filename, compressionCodec);

        try (final HashingInputStream md5in = new HashingInputStream(Hashing.md5(), in)) {
            // Store all chunks
            final long size = this.storeChunks(backup, md5in, filename, compressionCodec);
            if (size == 0) {
                throw new NoContentException(String.format("No content received for %s (service: %s, id: %s)", filename, backup.getService(), backup.getId()));
            }

            STORE_SIZES.update(size);

            final String calculatedMD5 = md5in.hash().toString();
            if (contentMD5.isPresent() && !calculatedMD5.equalsIgnoreCase(contentMD5.get())) {
                throw new InvalidMD5Exception(contentMD5.get(), calculatedMD5);
            }
        }

        decrementPendingStores(backup, filename);
        fireBackupUploaded(backup, filename);
    }
    catch (final Exception e) {
        this.update(backup, new Function<BackupMetadata, BackupMetadata>() {
            @Override
            public BackupMetadata apply(BackupMetadata input) {
                LOG.error("Failed to store backup: " + input, e);
                input.setFailed("Failed to store backup: " + e.getMessage());
                return input;
            }
        });

        throw e;
    }
    finally {
        ACTIVE_STORES.dec();
        context.stop();
    }
}
项目:buck    文件:JarDumper.java   
private Stream<String> dumpBinaryFile(String name, InputStream inputStream) throws IOException {
  try (HashingInputStream is = new HashingInputStream(Hashing.murmur3_128(), inputStream)) {
    ByteStreams.exhaust(is);
    return Stream.of(String.format("Murmur3-128 of %s: %s", name, is.hash().toString()));
  }
}