public ExternalResourceMetaData getMetaData(URI location, boolean revalidate) { LOGGER.debug("Attempting to get resource metadata: {}", location); S3Object s3Object = s3Client.getMetaData(location); if (s3Object == null) { return null; } try { ObjectMetadata objectMetadata = s3Object.getObjectMetadata(); return new DefaultExternalResourceMetaData(location, objectMetadata.getLastModified().getTime(), objectMetadata.getContentLength(), objectMetadata.getContentType(), objectMetadata.getETag(), null); // Passing null for sha1 - TODO - consider using the etag which is an MD5 hash of the file (when less than 5Gb) } finally { IoActions.closeQuietly(s3Object); } }
private S3Object doGetS3Object(URI uri, boolean isLightWeight) { S3RegionalResource s3RegionalResource = new S3RegionalResource(uri); String bucketName = s3RegionalResource.getBucketName(); String s3BucketKey = s3RegionalResource.getKey(); configureClient(s3RegionalResource); GetObjectRequest getObjectRequest = new GetObjectRequest(bucketName, s3BucketKey); if (isLightWeight) { //Skip content download getObjectRequest.setRange(0, 0); } try { return amazonS3Client.getObject(getObjectRequest); } catch (AmazonServiceException e) { String errorCode = e.getErrorCode(); if (null != errorCode && errorCode.equalsIgnoreCase("NoSuchKey")) { return null; } throw ResourceExceptions.getFailed(uri, e); } }
/** * Performs a {@link GetObjectRequest} to the S3 bucket by file id for the file * * @param fileLocationId Id of the file to search for * @return file found from S3 */ @Override public InputStream getFileByLocationId(String fileLocationId) { final String bucketName = environment.getProperty(Constants.BUCKET_NAME_ENV_VARIABLE); if (Strings.isNullOrEmpty(bucketName)) { API_LOG.warn("No bucket name is specified."); return null; } GetObjectRequest getObjectRequest = new GetObjectRequest(bucketName, fileLocationId); S3Object s3Object = amazonS3.getObject(getObjectRequest); API_LOG.info("Successfully retrieved the file from S3 bucket {}", getObjectRequest.getBucketName()); return s3Object.getObjectContent(); }
@Override public InputStream readBlob(String blobName) throws IOException { int retry = 0; while (retry <= blobStore.numberOfRetries()) { try { S3Object s3Object = SocketAccess.doPrivileged(() -> blobStore.client().getObject(blobStore.bucket(), buildKey(blobName))); return s3Object.getObjectContent(); } catch (AmazonClientException e) { if (blobStore.shouldRetry(e) && (retry < blobStore.numberOfRetries())) { retry++; } else { if (e instanceof AmazonS3Exception) { if (404 == ((AmazonS3Exception) e).getStatusCode()) { throw new NoSuchFileException("Blob object [" + blobName + "] not found: " + e.getMessage()); } } throw e; } } } throw new BlobStoreException("retries exhausted while attempting to access blob object [name:" + blobName + ", bucket:" + blobStore.bucket() + "]"); }
@Override public S3Object getObject(GetObjectRequest getObjectRequest) throws AmazonClientException, AmazonServiceException { // in ESBlobStoreContainerTestCase.java, the prefix is empty, // so the key and blobName are equivalent to each other String blobName = getObjectRequest.getKey(); if (!blobs.containsKey(blobName)) { throw new AmazonS3Exception("[" + blobName + "] does not exist."); } // the HTTP request attribute is irrelevant for reading S3ObjectInputStream stream = new S3ObjectInputStream( blobs.get(blobName), null, false); S3Object s3Object = new S3Object(); s3Object.setObjectContent(stream); return s3Object; }
/** * Tests if an object can be uploaded asynchronously * * @throws Exception not expected */ @Test public void shouldUploadInParallel() throws Exception { final File uploadFile = new File(UPLOAD_FILE_NAME); s3Client.createBucket(BUCKET_NAME); final TransferManager transferManager = createDefaultTransferManager(); final Upload upload = transferManager.upload(new PutObjectRequest(BUCKET_NAME, UPLOAD_FILE_NAME, uploadFile)); final UploadResult uploadResult = upload.waitForUploadResult(); assertThat(uploadResult.getKey(), equalTo(UPLOAD_FILE_NAME)); final S3Object getResult = s3Client.getObject(BUCKET_NAME, UPLOAD_FILE_NAME); assertThat(getResult.getKey(), equalTo(UPLOAD_FILE_NAME)); }
private void writeLocalFile( final S3Object s3Object, final File file) { try(FileOutputStream fileOutputStream = new FileOutputStream(file)) { IOUtils.copy( s3Object.getObjectContent(), fileOutputStream); } catch (IOException ioException) { throw new SecretsLockerException( ioException); } file.deleteOnExit(); }
/** * Get the original inventory report from S3, unzip it, and transfer it into a String format. * @return inventReport String * @throws IOException when getting object from S3 fails * or the checksum of the inventory report and the checksum specified in the manifest file not match */ public String getInventoryReportToString() throws IOException { String inventReportKey = locator.getKey(); String bucketName = inventoryManifest.getSourceBucket(); try (S3Object s3InventoryReport = s3Client.getObject( new GetObjectRequest(bucketName, inventReportKey))) { InputStream objectData = s3InventoryReport.getObjectContent(); byte[] zippedData = IOUtils.toByteArray(objectData); String actualChecksum = DigestUtils.md5Hex(zippedData); String expectedChecksum = locator.getMD5checksum(); if (!actualChecksum.equals(expectedChecksum)) { throw new ChecksumMismatchException (expectedChecksum, actualChecksum); } return IOUtils.toString(new GZIPInputStream(new ByteArrayInputStream(zippedData))); } }
/** * Check if the MD5s of manifest.json and manifest.checksum equal * if so, pull out the manifest file and map it into a POJO * @return inventoryManifestStorage InventoryManifest, which stores all the elements of the manifest.json file */ public InventoryManifest getInventoryManifest() throws Exception { // Get manifest.json and transfer it to String GetObjectRequest requestJson = new GetObjectRequest(bucketName, bucketKeyJson); S3Object jsonObject = s3Client.getObject(requestJson); String jsonFile = inputStreamToString(jsonObject.getObjectContent()); jsonObject.close(); // Get manifest.checksum and transfer it to String with no whitespace GetObjectRequest requestChecksum = new GetObjectRequest(bucketName, bucketKeyChecksum); S3Object checksumObject = s3Client.getObject(requestChecksum); String expectedChecksum = inputStreamToString(checksumObject.getObjectContent()) .replaceAll("\\s",""); checksumObject.close(); // Compare manifest.json and manifest.checksum's MD5 value String actualChecksum = DigestUtils.md5Hex(jsonFile); if (!actualChecksum.equals(expectedChecksum)) { throw new ChecksumMismatchException (expectedChecksum, actualChecksum); } return mapper.readValue(jsonFile, InventoryManifest.class); }
@SuppressWarnings("resource") @Override public PutObjectResult putObject(PutObjectRequest putObjectRequest) throws AmazonClientException, AmazonServiceException { putObjectRequests.add(putObjectRequest); S3Object s3Object = new S3Object(); s3Object.setBucketName(putObjectRequest.getBucketName()); s3Object.setKey(putObjectRequest.getKey()); if (putObjectRequest.getFile() != null) { try { s3Object.setObjectContent(new FileInputStream(putObjectRequest.getFile())); } catch (FileNotFoundException e) { throw new AmazonServiceException("Cannot store the file object.", e); } } else { s3Object.setObjectContent(putObjectRequest.getInputStream()); } objects.add(s3Object); PutObjectResult putObjectResult = new PutObjectResult(); putObjectResult.setETag("3a5c8b1ad448bca04584ecb55b836264"); return putObjectResult; }
/** * Puts an Object; Copies that object to a new bucket; Downloads the object from the new * bucket; compares checksums * of original and copied object * * @throws Exception if an Exception occurs */ @Test public void shouldCopyObject() throws Exception { final File uploadFile = new File(UPLOAD_FILE_NAME); final String sourceKey = UPLOAD_FILE_NAME; final String destinationBucketName = "destinationBucket"; final String destinationKey = "copyOf/" + sourceKey; final PutObjectResult putObjectResult = s3Client.putObject(new PutObjectRequest(BUCKET_NAME, sourceKey, uploadFile)); final CopyObjectRequest copyObjectRequest = new CopyObjectRequest(BUCKET_NAME, sourceKey, destinationBucketName, destinationKey); s3Client.copyObject(copyObjectRequest); final com.amazonaws.services.s3.model.S3Object copiedObject = s3Client.getObject(destinationBucketName, destinationKey); final String copiedHash = HashUtil.getDigest(copiedObject.getObjectContent()); copiedObject.close(); assertThat("Sourcefile and copied File should have same Hashes", copiedHash, is(equalTo(putObjectResult.getETag()))); }
@Override public boolean load(BuildCacheKey key, BuildCacheEntryReader reader) { if (s3.doesObjectExist(bucketName, key.getHashCode())) { logger.info("Found cache item '{}' in S3 bucket", key.getHashCode()); S3Object object = s3.getObject(bucketName, key.getHashCode()); try (InputStream is = object.getObjectContent()) { reader.readFrom(is); return true; } catch (IOException e) { throw new BuildCacheException("Error while reading cache object from S3 bucket", e); } } else { logger.info("Did not find cache item '{}' in S3 bucket", key.getHashCode()); return false; } }
@Test public void copyOneObject() throws Exception { client.putObject("source", "data", inputData); Path sourceBaseLocation = new Path("s3://source/"); Path replicaLocation = new Path("s3://target/"); List<Path> sourceSubLocations = new ArrayList<>(); S3S3Copier s3s3Copier = newS3S3Copier(sourceBaseLocation, sourceSubLocations, replicaLocation); Metrics metrics = s3s3Copier.copy(); assertThat(metrics.getBytesReplicated(), is(7L)); assertThat(metrics.getMetrics().get(S3S3CopierMetrics.Metrics.TOTAL_BYTES_TO_REPLICATE.name()), is(7L)); S3Object object = client.getObject("target", "data"); String data = IOUtils.toString(object.getObjectContent()); assertThat(data, is("bar foo")); assertThat(registry.getGauges().containsKey(RunningMetrics.S3S3_CP_BYTES_REPLICATED.name()), is(true)); }
@Test public void copyMultipleObjects() throws Exception { // Making sure we only request 1 file at the time so we need to loop ListObjectsRequestFactory mockListObjectRequestFactory = Mockito.mock(ListObjectsRequestFactory.class); when(mockListObjectRequestFactory.newInstance()).thenReturn(new ListObjectsRequest().withMaxKeys(1)); client.putObject("source", "bar/data1", inputData); client.putObject("source", "bar/data2", inputData); Path sourceBaseLocation = new Path("s3://source/bar/"); Path replicaLocation = new Path("s3://target/foo/"); List<Path> sourceSubLocations = new ArrayList<>(); S3S3Copier s3s3Copier = new S3S3Copier(sourceBaseLocation, sourceSubLocations, replicaLocation, s3ClientFactory, transferManagerFactory, mockListObjectRequestFactory, registry, s3S3CopierOptions); Metrics metrics = s3s3Copier.copy(); assertThat(metrics.getBytesReplicated(), is(14L)); S3Object object1 = client.getObject("target", "foo/data1"); String data1 = IOUtils.toString(object1.getObjectContent()); assertThat(data1, is("bar foo")); S3Object object2 = client.getObject("target", "foo/data2"); String data2 = IOUtils.toString(object2.getObjectContent()); assertThat(data2, is("bar foo")); }
/** * Attempt to fetch a secret from S3. * * @param s3path where to fetch it from * @return the content of the file found on S3 * @throws IOException on problems streaming the content of the file * @throws AmazonS3Exception on problems communicating with amazon */ private String getS3Value(final SecretPath s3path) throws IOException, AmazonS3Exception { LOG.info("Fetching secret from s3://" + s3path.bucket + "/" + s3path.key); if (s3Client == null) { if (awsCredentialsProvider != null) { s3Client = AmazonS3ClientBuilder.standard().withCredentials(awsCredentialsProvider) .build(); } else { s3Client = AmazonS3ClientBuilder.standard().build(); } } final S3Object s3object = s3Client.getObject(new GetObjectRequest(s3path.bucket, s3path.key)); final BufferedReader reader = new BufferedReader(new InputStreamReader(s3object.getObjectContent())); final StringBuilder b = new StringBuilder(); String line; while((line = reader.readLine()) != null) { b.append(line); } LOG.info("Found secret"); reader.close(); return b.toString(); }
@Override public ObjectMetadata getObject(final GetObjectRequest getObjectRequest, File destinationFile) throws SdkClientException, AmazonServiceException { rejectNull(destinationFile, "The destination file parameter must be specified when downloading an object directly to a file"); S3Object s3Object = ServiceUtils.retryableDownloadS3ObjectToFile(destinationFile, new ServiceUtils.RetryableS3DownloadTask() { @Override public S3Object getS3ObjectStream() { return getObject(getObjectRequest); } @Override public boolean needIntegrityCheck() { return !skipMd5CheckStrategy.skipClientSideValidationPerRequest(getObjectRequest); } }, ServiceUtils.OVERWRITE_MODE); // getObject can return null if constraints were specified but not met if (s3Object == null) return null; return s3Object.getObjectMetadata(); }
/** * Retrieves an instruction file from S3; or null if no instruction file is * found. * * @param s3ObjectId * the S3 object id (not the instruction file id) * @param instFileSuffix * suffix of the instruction file to be retrieved; or null to use * the default suffix. * @return an instruction file, or null if no instruction file is found. */ final S3ObjectWrapper fetchInstructionFile(S3ObjectId s3ObjectId, String instFileSuffix) { try { S3Object o = s3.getObject( createInstructionGetRequest(s3ObjectId, instFileSuffix)); return o == null ? null : new S3ObjectWrapper(o, s3ObjectId); } catch (AmazonServiceException e) { // If no instruction file is found, log a debug message, and return // null. if (log.isDebugEnabled()) { log.debug("Unable to retrieve instruction file : " + e.getMessage()); } return null; } }
/** * Same as {@link #decipher(GetObjectRequest, long[], long[], S3Object)} * but makes use of an instruction file with the specified suffix. * @param instFileSuffix never null or empty (which is assumed to have been * sanitized upstream.) */ private S3Object decipherWithInstFileSuffix(GetObjectRequest req, long[] desiredRange, long[] cryptoRange, S3Object retrieved, String instFileSuffix) { final S3ObjectId id = req.getS3ObjectId(); // Check if encrypted info is in an instruction file final S3ObjectWrapper ifile = fetchInstructionFile(id, instFileSuffix); if (ifile == null) { throw new SdkClientException("Instruction file with suffix " + instFileSuffix + " is not found for " + retrieved); } try { return decipherWithInstructionFile(req, desiredRange, cryptoRange, new S3ObjectWrapper(retrieved, id), ifile); } finally { closeQuietly(ifile, log); } }
@Override public Stream<String> lines() { LOGGER.debug("starting download from {}", uri); AmazonS3URI s3URI = new AmazonS3URI(uri); S3Object s3Object = s3Client.getObject(s3URI.getBucket(), s3URI.getKey()); InputStream stream = s3Object.getObjectContent(); return new BufferedReader(new InputStreamReader(stream)).lines(); }
/** * Stores a file in a previously created bucket. Downloads the file again and compares checksums * * @throws Exception if FileStreams can not be read */ @Test public void shouldUploadAndDownloadObject() throws Exception { final File uploadFile = new File(UPLOAD_FILE_NAME); s3Client.createBucket(BUCKET_NAME); s3Client.putObject(new PutObjectRequest(BUCKET_NAME, uploadFile.getName(), uploadFile)); final S3Object s3Object = s3Client.getObject(BUCKET_NAME, uploadFile.getName()); final InputStream uploadFileIS = new FileInputStream(uploadFile); final String uploadHash = HashUtil.getDigest(uploadFileIS); final String downloadedHash = HashUtil.getDigest(s3Object.getObjectContent()); uploadFileIS.close(); s3Object.close(); assertThat("Up- and downloaded Files should have equal Hashes", uploadHash, is(equalTo(downloadedHash))); }
public ExternalResourceReadResponse openResource(URI location, boolean revalidate) { LOGGER.debug("Attempting to get resource: {}", location); S3Object s3Object = s3Client.getResource(location); if (s3Object == null) { return null; } return new S3Resource(s3Object, location); }
private String getObjectContent(String key) { S3Object stored = amazonS3Client.getObject(bucketName, key); try { return IOUtils.toString(stored.getObjectContent(), "UTF-8"); } catch (IOException ioe) { fail("should have content"); } return ""; }
/** * Verifies multipart copy. * * @throws InterruptedException */ @Test public void multipartCopy() throws InterruptedException, IOException, NoSuchAlgorithmException { final int contentLen = 3 * _1MB; final ObjectMetadata objectMetadata = new ObjectMetadata(); objectMetadata.setContentLength(contentLen); final String assumedSourceKey = UUID.randomUUID().toString(); final Bucket sourceBucket = s3Client.createBucket(UUID.randomUUID().toString()); final Bucket targetBucket = s3Client.createBucket(UUID.randomUUID().toString()); final TransferManager transferManager = createTransferManager(_2MB, _1MB, _2MB, _1MB); final InputStream sourceInputStream = randomInputStream(contentLen); final Upload upload = transferManager .upload(sourceBucket.getName(), assumedSourceKey, sourceInputStream, objectMetadata); final UploadResult uploadResult = upload.waitForUploadResult(); assertThat(uploadResult.getKey(), is(assumedSourceKey)); final String assumedDestinationKey = UUID.randomUUID().toString(); final Copy copy = transferManager.copy(sourceBucket.getName(), assumedSourceKey, targetBucket.getName(), assumedDestinationKey); final CopyResult copyResult = copy.waitForCopyResult(); assertThat(copyResult.getDestinationKey(), is(assumedDestinationKey)); final S3Object copiedObject = s3Client.getObject(targetBucket.getName(), assumedDestinationKey); assertThat("Hashes for source and target S3Object do not match.", HashUtil.getDigest(copiedObject.getObjectContent()) + "-1", is(uploadResult.getETag())); }
@Override public S3Object getObject(String bucketName, String key) throws AmazonClientException, AmazonServiceException { if (shouldFail(bucketName, key, readFailureRate)) { logger.info("--> random read failure on getObject method: throwing an exception for [bucket={}, key={}]", bucketName, key); AmazonS3Exception ex = new AmazonS3Exception("Random S3 read exception"); ex.setStatusCode(404); throw ex; } else { return super.getObject(bucketName, key); } }
private S3Object downloadS3Object( final String fileName) { return s3Client .getObject( new GetObjectRequest( bucketName, buildS3ObjectName( fileName))); }
/** * Gets specified text file content from specified S3 bucket. * * @param bucketName * @param fileName * @return */ public String readTextFileContentFromBucket(String bucketName, String fileName) { final S3Object s3object = s3client.getObject(bucketName, fileName); final S3ObjectInputStream inputStream = s3object.getObjectContent(); final StringWriter writer = new StringWriter(); try { IOUtils.copy(inputStream, writer, "UTF-8"); } catch (IOException ex) { log.error("Error copying file from s3: " + ex); } return writer.toString(); }
/** * Reads a file from S3 into a String object * @param s3Uri (eg. s3://bucket/file.ext) * @return String containing the content of the file in S3 * @throws IOException if error reading file */ public String readFileFromS3(String s3Uri) throws IOException { AmazonS3URI s3FileUri = new AmazonS3URI(s3Uri); S3Object s3object = amazonS3Client.getObject(new GetObjectRequest(s3FileUri.getBucket(), s3FileUri.getKey())); return IOUtils.toString(s3object.getObjectContent()); }
@Test public void testReadFileFromS3() throws IOException { final String testInput = "Test Input"; final S3ObjectInputStream s3ObjectInputStream = new S3ObjectInputStream( new ByteArrayInputStream(testInput.getBytes()), mock(HttpRequestBase.class), false); final S3Object s3Object = mock(S3Object.class); when(s3Object.getObjectContent()).thenReturn(s3ObjectInputStream); when(amazonS3Client.getObject(any(GetObjectRequest.class))).thenReturn(s3Object); assertThat(awsHelperService.readFileFromS3("http://bucket.s3.amazonaws.com"), equalTo(testInput)); }
/** * Download an object data as a file * * @param remoteObjectName the name of object/key which contents should be downloaded * @param localFileName the location and file name on the local machine, where the file will be downloaded * @throws S3OperationException if there is an error during data transfer */ @PublicAtsApi public void download( String remoteObjectName, String localFileName ) throws S3OperationException, IllegalArgumentException { AmazonS3 s3Client = getClient(); localFileName = IoUtils.normalizeFilePath(localFileName); String localDirName = IoUtils.getFilePath(localFileName); String localFileOnlyName = IoUtils.getFileName(localFileName); File localDir = new File(localDirName); if (localDir.exists()) { if (localDir.isFile()) { throw new IllegalArgumentException("Could not create file " + localFileOnlyName + " into existing file " + localDirName); } // else dir exists } else { LOG.debug("Creating target directory path " + localDirName); if (!localDir.mkdirs()) { throw new S3OperationException("Could not create local directory path '" + localDirName + "' for local file specified '" + localFileName + "'"); } } S3Object obj = s3Client.getObject(bucketName, remoteObjectName); try (BufferedOutputStream bos = new BufferedOutputStream(new FileOutputStream(new File(localFileName))); S3ObjectInputStream s3is = obj.getObjectContent();) { byte[] readBuffArr = new byte[4096]; int readBytes = 0; while ( (readBytes = s3is.read(readBuffArr)) >= 0) { bos.write(readBuffArr, 0, readBytes); } } catch (Exception e) { handleExeption(e, "Error while downloading object " + remoteObjectName + " to local file " + localFileName + ". If error persists check your endpoint, credentials and permissions."); } LOG.info("S3 object '" + remoteObjectName + "; is downloaded successfully from bucket '" + bucketName + "' to file " + localFileName); }
private Position getPositionFromKey(String key) { try { S3Object object = s3.getObject(new GetObjectRequest(BUCKET_NAME, key)); BufferedReader reader = new BufferedReader(new InputStreamReader(object.getObjectContent())); StringBuilder sb = new StringBuilder(); while (true) { String line = reader.readLine(); if (line == null) break; sb.append(line); } return new Gson().fromJson(sb.toString(), Position.class); } catch (Exception e) { return null; } }
@Override public S3Object getObject(String bucketName, String key) throws AmazonClientException, AmazonServiceException { for (S3Object s3Object : objects) { if (bucketName.equals(s3Object.getBucketName()) && key.equals(s3Object.getKey())) { return s3Object; } } return null; }
@Test public void copyOneObjectUsingKeys() throws Exception { client.putObject("source", "bar/data", inputData); Path sourceBaseLocation = new Path("s3://source/bar/"); Path replicaLocation = new Path("s3://target/foo/"); List<Path> sourceSubLocations = new ArrayList<>(); S3S3Copier s3s3Copier = newS3S3Copier(sourceBaseLocation, sourceSubLocations, replicaLocation); s3s3Copier.copy(); S3Object object = client.getObject("target", "foo/data"); String data = IOUtils.toString(object.getObjectContent()); assertThat(data, is("bar foo")); }
@Test public void copyOneObjectPartitioned() throws Exception { client.putObject("source", "year=2016/data", inputData); Path sourceBaseLocation = new Path("s3://source/"); Path replicaLocation = new Path("s3://target/foo/"); List<Path> sourceSubLocations = Lists.newArrayList(new Path(sourceBaseLocation, "year=2016")); S3S3Copier s3s3Copier = newS3S3Copier(sourceBaseLocation, sourceSubLocations, replicaLocation); Metrics metrics = s3s3Copier.copy(); assertThat(metrics.getBytesReplicated(), is(7L)); S3Object object = client.getObject("target", "foo/year=2016/data"); String data = IOUtils.toString(object.getObjectContent()); assertThat(data, is("bar foo")); }
@Test public void copyOneObjectPartitionedHandlingS3ASchemes() throws Exception { client.putObject("source", "year=2016/data", inputData); Path sourceBaseLocation = new Path("s3a://source/"); Path replicaLocation = new Path("s3a://target/foo/"); List<Path> sourceSubLocations = Lists.newArrayList(new Path(sourceBaseLocation, "year=2016")); S3S3Copier s3s3Copier = newS3S3Copier(sourceBaseLocation, sourceSubLocations, replicaLocation); s3s3Copier.copy(); S3Object object = client.getObject("target", "foo/year=2016/data"); String data = IOUtils.toString(object.getObjectContent()); assertThat(data, is("bar foo")); }