private S3Object doGetS3Object(URI uri, boolean isLightWeight) { S3RegionalResource s3RegionalResource = new S3RegionalResource(uri); String bucketName = s3RegionalResource.getBucketName(); String s3BucketKey = s3RegionalResource.getKey(); configureClient(s3RegionalResource); GetObjectRequest getObjectRequest = new GetObjectRequest(bucketName, s3BucketKey); if (isLightWeight) { //Skip content download getObjectRequest.setRange(0, 0); } try { return amazonS3Client.getObject(getObjectRequest); } catch (AmazonServiceException e) { String errorCode = e.getErrorCode(); if (null != errorCode && errorCode.equalsIgnoreCase("NoSuchKey")) { return null; } throw ResourceExceptions.getFailed(uri, e); } }
/** * Performs a {@link GetObjectRequest} to the S3 bucket by file id for the file * * @param fileLocationId Id of the file to search for * @return file found from S3 */ @Override public InputStream getFileByLocationId(String fileLocationId) { final String bucketName = environment.getProperty(Constants.BUCKET_NAME_ENV_VARIABLE); if (Strings.isNullOrEmpty(bucketName)) { API_LOG.warn("No bucket name is specified."); return null; } GetObjectRequest getObjectRequest = new GetObjectRequest(bucketName, fileLocationId); S3Object s3Object = amazonS3.getObject(getObjectRequest); API_LOG.info("Successfully retrieved the file from S3 bucket {}", getObjectRequest.getBucketName()); return s3Object.getObjectContent(); }
@Override public S3Object getObject(GetObjectRequest getObjectRequest) throws AmazonClientException, AmazonServiceException { // in ESBlobStoreContainerTestCase.java, the prefix is empty, // so the key and blobName are equivalent to each other String blobName = getObjectRequest.getKey(); if (!blobs.containsKey(blobName)) { throw new AmazonS3Exception("[" + blobName + "] does not exist."); } // the HTTP request attribute is irrelevant for reading S3ObjectInputStream stream = new S3ObjectInputStream( blobs.get(blobName), null, false); S3Object s3Object = new S3Object(); s3Object.setObjectContent(stream); return s3Object; }
/** * Get the original inventory report from S3, unzip it, and transfer it into a String format. * @return inventReport String * @throws IOException when getting object from S3 fails * or the checksum of the inventory report and the checksum specified in the manifest file not match */ public String getInventoryReportToString() throws IOException { String inventReportKey = locator.getKey(); String bucketName = inventoryManifest.getSourceBucket(); try (S3Object s3InventoryReport = s3Client.getObject( new GetObjectRequest(bucketName, inventReportKey))) { InputStream objectData = s3InventoryReport.getObjectContent(); byte[] zippedData = IOUtils.toByteArray(objectData); String actualChecksum = DigestUtils.md5Hex(zippedData); String expectedChecksum = locator.getMD5checksum(); if (!actualChecksum.equals(expectedChecksum)) { throw new ChecksumMismatchException (expectedChecksum, actualChecksum); } return IOUtils.toString(new GZIPInputStream(new ByteArrayInputStream(zippedData))); } }
/** * Check if the MD5s of manifest.json and manifest.checksum equal * if so, pull out the manifest file and map it into a POJO * @return inventoryManifestStorage InventoryManifest, which stores all the elements of the manifest.json file */ public InventoryManifest getInventoryManifest() throws Exception { // Get manifest.json and transfer it to String GetObjectRequest requestJson = new GetObjectRequest(bucketName, bucketKeyJson); S3Object jsonObject = s3Client.getObject(requestJson); String jsonFile = inputStreamToString(jsonObject.getObjectContent()); jsonObject.close(); // Get manifest.checksum and transfer it to String with no whitespace GetObjectRequest requestChecksum = new GetObjectRequest(bucketName, bucketKeyChecksum); S3Object checksumObject = s3Client.getObject(requestChecksum); String expectedChecksum = inputStreamToString(checksumObject.getObjectContent()) .replaceAll("\\s",""); checksumObject.close(); // Compare manifest.json and manifest.checksum's MD5 value String actualChecksum = DigestUtils.md5Hex(jsonFile); if (!actualChecksum.equals(expectedChecksum)) { throw new ChecksumMismatchException (expectedChecksum, actualChecksum); } return mapper.readValue(jsonFile, InventoryManifest.class); }
@Test public void getInventReportSuccess() throws Exception { testLocator.setMD5checksum(testMD5); testManifest.setFileSchema("storageClass, size"); reportRetriever = new InventoryReportRetriever(mockS3Client, testLocator, testManifest); String expectedInventoryReportString = "testString"; byte[] expectedInventoryReportBytes = inventReportBytes(expectedInventoryReportString); when(mockS3Object.getObjectContent()).thenReturn(new S3ObjectInputStream( new ByteArrayInputStream(expectedInventoryReportBytes), null)); when(mockS3Client.getObject(getObjectRequestCaptor.capture())).thenReturn(mockS3Object); String result = reportRetriever.getInventoryReportToString(); assertThat(result, is(expectedInventoryReportString)); GetObjectRequest request = getObjectRequestCaptor.getValue(); assertThat(request.getBucketName(), is("testBucket")); assertThat(request.getKey(), is("testInventReportKey")); }
@Test public void getInventoryManifestSuccess() throws Exception { InventoryManifest expectedManifest = manifest(); byte[] expectedManifestBytes = manifestBytes(expectedManifest); when(mockS3JsonObject.getObjectContent()).thenReturn(new S3ObjectInputStream( new ByteArrayInputStream(expectedManifestBytes), null)); String expectedChecksum = "a6121a6a788be627a68d7e9def9f6968"; byte[] expectedChecksumBytes = expectedChecksum.getBytes(StandardCharsets.UTF_8); when(mockS3ChecksumObject.getObjectContent()).thenReturn(new S3ObjectInputStream( new ByteArrayInputStream(expectedChecksumBytes), null)); when(mockS3Client.getObject(getObjectRequestCaptor.capture())) .thenReturn(mockS3JsonObject) .thenReturn(mockS3ChecksumObject); InventoryManifest result = retriever.getInventoryManifest(); assertThat(result, is(expectedManifest)); List<GetObjectRequest> request = getObjectRequestCaptor.getAllValues(); assertThat(request.get(0).getBucketName(), is("testBucketName")); assertThat(request.get(0).getKey(), is("testBucketKey/manifest.json")); assertThat(request.get(1).getBucketName(), is("testBucketName")); assertThat(request.get(1).getKey(), is("testBucketKey/manifest.checksum")); }
/** * Attempt to fetch a secret from S3. * * @param s3path where to fetch it from * @return the content of the file found on S3 * @throws IOException on problems streaming the content of the file * @throws AmazonS3Exception on problems communicating with amazon */ private String getS3Value(final SecretPath s3path) throws IOException, AmazonS3Exception { LOG.info("Fetching secret from s3://" + s3path.bucket + "/" + s3path.key); if (s3Client == null) { if (awsCredentialsProvider != null) { s3Client = AmazonS3ClientBuilder.standard().withCredentials(awsCredentialsProvider) .build(); } else { s3Client = AmazonS3ClientBuilder.standard().build(); } } final S3Object s3object = s3Client.getObject(new GetObjectRequest(s3path.bucket, s3path.key)); final BufferedReader reader = new BufferedReader(new InputStreamReader(s3object.getObjectContent())); final StringBuilder b = new StringBuilder(); String line; while((line = reader.readLine()) != null) { b.append(line); } LOG.info("Found secret"); reader.close(); return b.toString(); }
@Override public ObjectMetadata getObject(final GetObjectRequest getObjectRequest, File destinationFile) throws SdkClientException, AmazonServiceException { rejectNull(destinationFile, "The destination file parameter must be specified when downloading an object directly to a file"); S3Object s3Object = ServiceUtils.retryableDownloadS3ObjectToFile(destinationFile, new ServiceUtils.RetryableS3DownloadTask() { @Override public S3Object getS3ObjectStream() { return getObject(getObjectRequest); } @Override public boolean needIntegrityCheck() { return !skipMd5CheckStrategy.skipClientSideValidationPerRequest(getObjectRequest); } }, ServiceUtils.OVERWRITE_MODE); // getObject can return null if constraints were specified but not met if (s3Object == null) return null; return s3Object.getObjectMetadata(); }
DownloadCallable(AmazonS3 s3, CountDownLatch latch, GetObjectRequest req, boolean resumeExistingDownload, DownloadImpl download, File dstfile, long origStartingByte, long expectedFileLength, long timeout, ScheduledExecutorService timedExecutor, ExecutorService executor, Integer lastFullyDownloadedPartNumber, boolean isDownloadParallel, boolean resumeOnRetry) { if (s3 == null || latch == null || req == null || dstfile == null || download == null) throw new IllegalArgumentException(); this.s3 = s3; this.latch = latch; this.req = req; this.resumeExistingDownload = resumeExistingDownload; this.download = download; this.dstfile = dstfile; this.origStartingByte = origStartingByte; this.expectedFileLength = expectedFileLength; this.timeout = timeout; this.timedExecutor = timedExecutor; this.executor = executor; this.futureFiles = new ArrayList<Future<File>>(); this.lastFullyMergedPartNumber = lastFullyDownloadedPartNumber; this.isDownloadParallel = isDownloadParallel; this.resumeOnRetry = resumeOnRetry; }
/** * Downloads each part of the object into a separate file synchronously and * combines all the files into a single file. */ private void downloadInParallel(int partCount) throws Exception { if (lastFullyMergedPartNumber == null) { lastFullyMergedPartNumber = 0; } for (int i = lastFullyMergedPartNumber + 1; i <= partCount; i++) { GetObjectRequest getPartRequest = new GetObjectRequest(req.getBucketName(), req.getKey(), req.getVersionId()).withUnmodifiedSinceConstraint(req.getUnmodifiedSinceConstraint()) .withModifiedSinceConstraint(req.getModifiedSinceConstraint()) .withResponseHeaders(req.getResponseHeaders()).withSSECustomerKey(req.getSSECustomerKey()) .withGeneralProgressListener(req.getGeneralProgressListener()); getPartRequest.setMatchingETagConstraints(req.getMatchingETagConstraints()); getPartRequest.setNonmatchingETagConstraints(req.getNonmatchingETagConstraints()); getPartRequest.setRequesterPays(req.isRequesterPays()); futureFiles.add( executor.submit(new DownloadPartCallable(s3, getPartRequest.withPartNumber(i), dstfile))); } truncateDestinationFileIfNecessary(); Future<File> future = executor.submit(new CompleteMultipartDownload(futureFiles, dstfile, download, ++lastFullyMergedPartNumber)); ((DownloadMonitor) download.getMonitor()).setFuture(future); }
/** * Resumes an download operation. This download operation uses the same * configuration as the original download. Any data already fetched will be * skipped, and only the remaining data is retrieved from Amazon S3. * * @param persistableDownload * the download to resume. * @return A new <code>Download</code> object to use to check the state of * the download, listen for progress notifications, and otherwise * manage the download. * * @throws AmazonClientException * If any errors are encountered in the client while making the * request or handling the response. * @throws AmazonServiceException * If any errors occurred in Amazon S3 while processing the * request. */ public Download resumeDownload(PersistableDownload persistableDownload) { assertParameterNotNull(persistableDownload, "PausedDownload is mandatory to resume a download."); GetObjectRequest request = new GetObjectRequest( persistableDownload.getBucketName(), persistableDownload.getKey(), persistableDownload.getVersionId()); if (persistableDownload.getRange() != null && persistableDownload.getRange().length == 2) { long[] range = persistableDownload.getRange(); request.setRange(range[0], range[1]); } request.setRequesterPays(persistableDownload.isRequesterPays()); request.setResponseHeaders(persistableDownload.getResponseHeaders()); return doDownload(request, new File(persistableDownload.getFile()), null, null, APPEND_MODE, 0, persistableDownload.getLastFullyDownloadedPartNumber(), persistableDownload.getlastModifiedTime()); }
/** * Same as {@link #decipher(GetObjectRequest, long[], long[], S3Object)} * but makes use of an instruction file with the specified suffix. * @param instFileSuffix never null or empty (which is assumed to have been * sanitized upstream.) */ private S3Object decipherWithInstFileSuffix(GetObjectRequest req, long[] desiredRange, long[] cryptoRange, S3Object retrieved, String instFileSuffix) { final S3ObjectId id = req.getS3ObjectId(); // Check if encrypted info is in an instruction file final S3ObjectWrapper ifile = fetchInstructionFile(id, instFileSuffix); if (ifile == null) { throw new SdkClientException("Instruction file with suffix " + instFileSuffix + " is not found for " + retrieved); } try { return decipherWithInstructionFile(req, desiredRange, cryptoRange, new S3ObjectWrapper(retrieved, id), ifile); } finally { closeQuietly(ifile, log); } }
/** * Verify that range-downloads work. * * @throws Exception not expected */ @Test public void checkRangeDownloads() throws Exception { final File uploadFile = new File(UPLOAD_FILE_NAME); s3Client.createBucket(BUCKET_NAME); final TransferManager transferManager = createDefaultTransferManager(); final Upload upload = transferManager.upload(new PutObjectRequest(BUCKET_NAME, UPLOAD_FILE_NAME, uploadFile)); upload.waitForUploadResult(); final File downloadFile = File.createTempFile(UUID.randomUUID().toString(), null); transferManager .download(new GetObjectRequest(BUCKET_NAME, UPLOAD_FILE_NAME).withRange(1, 2), downloadFile) .waitForCompletion(); assertThat("Invalid file length", downloadFile.length(), is(2L)); transferManager .download(new GetObjectRequest(BUCKET_NAME, UPLOAD_FILE_NAME).withRange(0, 1000), downloadFile) .waitForCompletion(); assertThat("Invalid file length", downloadFile.length(), is(uploadFile.length())); }
@Override public void retrieve( char[] path ) throws IOException { String pathValue = String.valueOf( path ); try { LOG.log( Level.FINE, () -> "Retrieving " + getBucketName() + ":" + pathValue ); S3Object obj = getS3().getObject( new GetObjectRequest( getBucketName(), pathValue ) ); FileSystemUtils.copyFromRemote( () -> obj.getObjectContent(), getDelegate(), path ); LOG.log( Level.FINE, () -> "Retrieved " + getBucketName() + ":" + pathValue ); } catch( AmazonS3Exception ex ) { LOG.log( Level.FINE, () -> "Error " + ex.getStatusCode() + " " + getBucketName() + ":" + pathValue ); if( ex.getStatusCode() == 404 ) { throw new FileNotFoundException( pathValue ); } throw new IOException( "Cannot access " + pathValue, ex ); } }
@Override public void retrieveFile(final PersistentFileMetadata metadata, final OutputStream outputStream) { final S3Util.BucketObjectPair s3Object = S3Util.parseResourceURL(metadata.getResourceUrl()); try { final GetObjectRequest getObjectRequest = new GetObjectRequest(s3Object.getBucketName(), s3Object.getKey()); try (final InputStream is = this.amazonS3.getObject(getObjectRequest).getObjectContent()) { ByteStreams.copy(is, outputStream); } } catch (IOException ex) { throw new RuntimeException(ex); } }
@Override public InputStream downloadFile(String filePath) throws BusinessException { if (!Optional.ofNullable(filePath).isPresent()) { throw new BusinessException(Validations.INVALID_PATH.getCode()); } client.getClient(credentials.getCredentials()); try { if (filePath.contains("/")) { filePath = filePath.split("/")[filePath.split("/").length - 1]; } S3Object object = s3Client.getObject( new GetObjectRequest(cdnConfig.getName(), filePath)); return object.getObjectContent(); } catch (AmazonServiceException e) { throw new BusinessException(Validations.INVALID_S3_BUCKET_CREDENTIALS.getCode()); } }
@Override public void setConf(AbstractConfig config) { this.config = (GeoIpOperationConfig) config; AmazonS3Client client = this.s3Factory.newInstance(); AmazonS3URI uri = new AmazonS3URI(this.config.getGeoLiteDb()); GetObjectRequest req = new GetObjectRequest(uri.getBucket(), uri.getKey()); S3Object obj = client.getObject(req); try { this.databaseReader = new DatabaseReader.Builder(obj.getObjectContent()).withCache(new CHMCache()).build(); } catch (IOException e) { throw new ConfigurationException("Unable to read " + this.config.getGeoLiteDb(), e); } }
/** * Reads bytes into {@link #target}, until either the end of {@link #target} or the end of the S3 object is * reached. * * @param s3Object the S3 object * @param getObjectRequest the S3 get-object request used for retrieving {@code s3Object} * @return the total size of the S3 object * @throws AmazonClientException if a call to {@link S3ObjectInputStream#read(byte[], int, int)} does not read * any bytes even though it should have * @throws IOException if a call to {@link S3ObjectInputStream#read(byte[], int, int)} throws an I/O exception */ private long readS3Object(@Nullable S3Object s3Object, GetObjectRequest getObjectRequest) throws IOException { long totalSize; if (s3Object == null) { totalSize = s3Client.getObjectMetadata(bucketName, key).getInstanceLength(); if (offsetInS3Object < totalSize) { throw new AmazonClientException(String.format( "Could not read %s (range: %s), because AmazonS3#getClient() returned null.", key, Arrays.toString(getObjectRequest.getRange()) )); } } else { totalSize = s3Object.getObjectMetadata().getInstanceLength(); // Note that the (int) cast is safe because target.length is of type int. int remainingBytesToRead = (int) Math.max(0, Math.min(target.length - posInTarget, totalSize - offsetInS3Object)); S3ObjectInputStream inputStream = s3Object.getObjectContent(); int bytesRead; while (remainingBytesToRead > 0) { // read() promises to read "up to" remainingBytesToRead bytes. There is no guarantee that // this many bytes are read, even if enough bytes are available. In fact, experiments showed // that read() sometimes only returns 2^15 bytes. bytesRead = inputStream.read(target, posInTarget, remainingBytesToRead); posInTarget += bytesRead; remainingBytesToRead -= bytesRead; if (bytesRead <= 0) { // This should not happen and indicates a logical bug. We therefore fail here. throw new AmazonClientException(String.format( "Could not read %s (range: %s). Requested %d bytes from input stream, but " + "S3ObjectInputStream#read() returned %d.", key, Arrays.toString(getObjectRequest.getRange()), remainingBytesToRead, bytesRead )); } } } return totalSize; }
/** * Performs the actual data transfer. */ private void transfer() { // Strangely, withRange() expects an inclusive end parameter GetObjectRequest getObjectRequest = new GetObjectRequest(bucketName, key) .withRange(offsetInS3Object + posInTarget, offsetInS3Object + target.length - 1); // getObject() may return null if some constraints of the request cannot be met. Thanks to JDK-7020047, // the try-with-resources statement protects the automatic call to close() with a non-null check. try (@Nullable S3Object s3Object = s3Client.getObject(getObjectRequest)) { long totalSize = readS3Object(s3Object, getObjectRequest); promise.complete(totalSize); } catch (IOException exception) { AmazonClientException amazonClientException = new AmazonClientException(String.format( "Could not read %s (range: %s).", key, Arrays.toString(getObjectRequest.getRange()) ), exception); boolean couldRecover = couldRecoverFromException(amazonClientException, getObjectRequest); if (!couldRecover) { promise.completeExceptionally(amazonClientException); } } }
/** * Strict authenticated encryption mode does not support ranged GETs. This is because we must use AES/CTR for ranged * GETs which is not an authenticated encryption algorithm. To do a partial get using authenticated encryption you have to * get the whole object and filter to the data you want. */ public void strictAuthenticatedEncryption_RangeGet_CustomerManagedKey() throws NoSuchAlgorithmException { SecretKey secretKey = KeyGenerator.getInstance("AES").generateKey(); AmazonS3Encryption s3Encryption = AmazonS3EncryptionClientBuilder .standard() .withRegion(Regions.US_WEST_2) .withCryptoConfiguration(new CryptoConfiguration(CryptoMode.StrictAuthenticatedEncryption)) .withEncryptionMaterials(new StaticEncryptionMaterialsProvider(new EncryptionMaterials(secretKey))) .build(); s3Encryption.putObject(BUCKET_NAME, ENCRYPTED_KEY, "some contents"); try { s3Encryption.getObject(new GetObjectRequest(BUCKET_NAME, ENCRYPTED_KEY).withRange(0, 2)); } catch (SecurityException e) { System.err.println("Range GET is not supported with authenticated encryption"); } }
@Test public void testGet() { AmazonS3 client = mock(AmazonS3.class); S3StoreService service = new S3StoreService(client, S3_BUCKET, S3_PREFIX); String path = "path"; String value = "value"; ArgumentCaptor<GetObjectRequest> request = ArgumentCaptor.forClass(GetObjectRequest.class); S3Object s3Object = new S3Object(); s3Object.setObjectContent(new S3ObjectInputStream(IOUtils.toInputStream(value), mock(HttpRequestBase.class))); when(client.getObject(request.capture())).thenReturn(s3Object); // invoke method under test Optional<String> result = service.get(path); assertTrue(result.isPresent()); assertEquals(value, result.get()); assertEquals(S3_BUCKET, request.getValue().getBucketName()); assertEquals(S3_PREFIX + "/" + path, request.getValue().getKey()); }
@Test public void testGetNoSuchKey() { AmazonS3 client = mock(AmazonS3.class); S3StoreService service = new S3StoreService(client, S3_BUCKET, S3_PREFIX); String path = "path"; String value = "value"; ArgumentCaptor<GetObjectRequest> request = ArgumentCaptor.forClass(GetObjectRequest.class); S3Object s3Object = new S3Object(); s3Object.setObjectContent(new S3ObjectInputStream(IOUtils.toInputStream(value), mock(HttpRequestBase.class))); AmazonServiceException error = new AmazonServiceException("fake expected exception"); error.setErrorCode("NoSuchKey"); when(client.getObject(request.capture())).thenThrow(error); // invoke method under test Optional<String> result = service.get(path); assertFalse(result.isPresent()); assertEquals(S3_BUCKET, request.getValue().getBucketName()); assertEquals(S3_PREFIX + "/" + path, request.getValue().getKey()); }
private Note getNote(String key) throws IOException { GsonBuilder gsonBuilder = new GsonBuilder(); gsonBuilder.setPrettyPrinting(); Gson gson = gsonBuilder.create(); S3Object s3object = s3client.getObject(new GetObjectRequest( bucketName, key)); InputStream ins = s3object.getObjectContent(); String json = IOUtils.toString(ins, conf.getString(ConfVars.ZEPPELIN_ENCODING)); ins.close(); Note note = gson.fromJson(json, Note.class); for (Paragraph p : note.getParagraphs()) { if (p.getStatus() == Status.PENDING || p.getStatus() == Status.RUNNING) { p.setStatus(Status.ABORT); } } return note; }
@Override public StreamingOutput getArtifactStream(IndexArtifact artifact, String filename) { final String artifactPath = getPath() + artifact.getLocation() + "/" + filename; if (client.doesObjectExist(bucketName, artifactPath)) { return new StreamingOutput() { @Override public void write(OutputStream os) throws IOException, WebApplicationException { GetObjectRequest gor = new GetObjectRequest(bucketName, artifactPath); S3Object so = client.getObject(gor); ByteStreams.copy(so.getObjectContent(), os); so.close(); } }; } else { throw new NotFoundException(); } }
/** * S3 block read would be achieved through the AmazonS3 client. Following * are the steps to achieve: (1) Create the objectRequest from bucketName * and filePath. (2) Set the range to the above created objectRequest. (3) * Get the object portion through AmazonS3 client API. (4) Get the object * content from the above object portion. * * @param bytesFromCurrentOffset * bytes read till now from current offset * @param bytesToFetch * the number of bytes to be fetched * @return the number of bytes read, -1 if 0 bytes read * @throws IOException */ @Override protected int readData(final long bytesFromCurrentOffset, final int bytesToFetch) throws IOException { GetObjectRequest rangeObjectRequest = new GetObjectRequest(s3Params.bucketName, s3Params.filePath); rangeObjectRequest.setRange(offset + bytesFromCurrentOffset, offset + bytesFromCurrentOffset + bytesToFetch - 1); S3Object objectPortion = s3Params.s3Client.getObject(rangeObjectRequest); S3ObjectInputStream wrappedStream = objectPortion.getObjectContent(); buffer = ByteStreams.toByteArray(wrappedStream); wrappedStream.close(); int bufferLength = buffer.length; if (bufferLength <= 0) { return -1; } return bufferLength; }
/** * S3 block read would be achieved through the AmazonS3 client. Following are the steps to achieve: * (1) Create the objectRequest from bucketName and filePath. * (2) Set the range to the above created objectRequest. * (3) Get the object portion through AmazonS3 client API. * (4) Get the object content from the above object portion. * @return the block entity * @throws IOException */ @Override protected Entity readEntity() throws IOException { entity.clear(); GetObjectRequest rangeObjectRequest = new GetObjectRequest( bucketName, filePath); rangeObjectRequest.setRange(offset, blockMetadata.getLength() - 1); S3Object objectPortion = s3Client.getObject(rangeObjectRequest); S3ObjectInputStream wrappedStream = objectPortion.getObjectContent(); byte[] record = ByteStreams.toByteArray(wrappedStream); entity.setUsedBytes(record.length); entity.setRecord(record); wrappedStream.close(); return entity; }
public OSM get (String id) { try { return osmCache.get(id, () -> { String cleanId = cleanId(id); File cacheFile = new File(cacheDir, cleanId + ".pbf"); if (!cacheFile.exists()) { // fetch from S3 s3.getObject(new GetObjectRequest(bucket, cleanId + ".pbf"), cacheFile); } OSM ret = new OSM(null); ret.intersectionDetection = true; ret.readFromFile(cacheFile.getAbsolutePath()); return ret; }); } catch (ExecutionException e) { throw new RuntimeException(e); } }
@Test public void testFetchZeroSize() throws Exception { final S3Object expected = new S3Object(); expected.setKey("object-key"); expected.setBucketName("bucket-name"); when(mockS3.getObject(any(GetObjectRequest.class))) .thenReturn(expected); try { final S3Object download = downloader.fetch(record); assertThat(download).isNotNull(); failBecauseExceptionWasNotThrown(AmazonS3ZeroSizeException.class); } catch (AmazonS3ZeroSizeException e) { } verify(mockS3).getObject(any(GetObjectRequest.class)); }
private Note getNote(String key) throws IOException { S3Object s3object; try { s3object = s3client.getObject(new GetObjectRequest(bucketName, key)); } catch (AmazonClientException ace) { throw new IOException("Unable to retrieve object from S3: " + ace, ace); } Note note; try (InputStream ins = s3object.getObjectContent()) { String json = IOUtils.toString(ins, conf.getString(ConfVars.ZEPPELIN_ENCODING)); note = Note.fromJson(json); } for (Paragraph p : note.getParagraphs()) { if (p.getStatus() == Status.PENDING || p.getStatus() == Status.RUNNING) { p.setStatus(Status.ABORT); } } return note; }
public InputStream getResourceStream(String reference) throws IOException { logger.info("Reading file from s3://"+reference); AmazonS3Client client; if(creds != null) { client = new AmazonS3Client(creds); } else { client = new AmazonS3Client(); } String[] bucketAndFile = reference.split("/", 2); if(bucketAndFile.length!=2){ return null; } S3Object object = client.getObject(new GetObjectRequest(bucketAndFile[0], bucketAndFile[1])); if(reference.endsWith(".gz")){ return new S3ObjectInputStreamWrapper(new GZIPInputStream(object.getObjectContent()),client); } else { return new S3ObjectInputStreamWrapper(object.getObjectContent(), client); } }
@Override protected InputStream getInputStream(int x, int y) throws IOException { try { GetObjectRequest req = new GetObjectRequest(bucketName, String.format("%d/%d.pbf.gz", x, y)); // the LODES bucket is requester-pays. req.setRequesterPays(true); return s3.getObject(req).getObjectContent(); } catch (AmazonS3Exception e) { // there is no data in this tile if ("NoSuchKey".equals(e.getErrorCode())) return null; else // re-throw, something else is amiss throw e; } }
static S3Object getObject( AmazonS3 s3Client, String bucket, String objectKey, boolean useSSE, CredentialValue customerKey, CredentialValue customerKeyMd5 ) throws StageException { GetObjectRequest getObjectRequest = new GetObjectRequest(bucket, objectKey); if (useSSE) { SSECustomerKey sseCustomerKey = new SSECustomerKey(customerKey.get()); sseCustomerKey.setMd5(customerKeyMd5.get()); getObjectRequest.setSSECustomerKey(sseCustomerKey); } return s3Client.getObject(getObjectRequest); }
static S3Object getObjectRange( AmazonS3 s3Client, String bucket, String objectKey, long range, boolean useSSE, CredentialValue customerKey, CredentialValue customerKeyMd5 ) throws StageException { GetObjectRequest getObjectRequest = new GetObjectRequest(bucket, objectKey).withRange(0, range); if (useSSE) { SSECustomerKey sseCustomerKey = new SSECustomerKey(customerKey.get()); sseCustomerKey.setMd5(customerKeyMd5.get()); getObjectRequest.setSSECustomerKey(sseCustomerKey); } return s3Client.getObject(getObjectRequest); }
protected void downloadInputFileToLocal() { try { AmazonS3Client s3Client = awsConnection.getS3Client(); s3Client.getObject(new GetObjectRequest(options.getS3PathBucketName(options.getCorrectionInputPath()), options.getS3PathKey(options.getCorrectionInputPath())), new File(options.getTmpCorrectionInputPath())); } catch (AmazonServiceException ase) { String errorStr = "Error: Failed to download given file from S3 path: " + options.getCorrectionInputPath() + " , please check your path."; logger.error(errorStr); System.exit(1); } catch (Exception e) { logger.error("Exception!", e); e.printStackTrace(); System.exit(1); } }
@Test public void testConditionalGet() throws Exception { assumeTrue(!blobStoreType.equals("b2")); String blobName = "blob-name"; ObjectMetadata metadata = new ObjectMetadata(); metadata.setContentLength(BYTE_SOURCE.size()); PutObjectResult result = client.putObject(containerName, blobName, BYTE_SOURCE.openStream(), metadata); S3Object object = client.getObject( new GetObjectRequest(containerName, blobName) .withMatchingETagConstraint(result.getETag())); try (InputStream is = object.getObjectContent()) { assertThat(is).isNotNull(); ByteStreams.copy(is, ByteStreams.nullOutputStream()); } object = client.getObject( new GetObjectRequest(containerName, blobName) .withNonmatchingETagConstraint(result.getETag())); assertThat(object).isNull(); }
@Override public boolean downloadEntity(String bucketName, String keyNotAvailable, File destinationFile) { LOG.info("Gets the object metadata for the object stored in Amazon S3 under the specified bucket " + bucketName + " and key " + keyNotAvailable + ", and saves the object contents to the specified file " + destinationFile); try { ObjectMetadata objectMetadata = amazonS3Client.getObject(new GetObjectRequest(bucketName, keyNotAvailable), destinationFile); if (objectMetadata != null) { return true; } } catch (AmazonServiceException ase) { LOG.warn(ase.getMessage(), ase); } catch (AmazonClientException ace) { LOG.warn(ace.getMessage(), ace); } return false; }
@Override public Movie get( final MovieId movieId ) { final String key = movieId.getMovieId(); logger.debug( "Downloading {} from S3", key ); final GetObjectRequest getObjectRequest = new GetObjectRequest( S3_BUCKET_HOOD_ETS_SOURCE, key ); try { final Path outputPath = Files.createTempFile( "movie", key ); final File outputFile = outputPath.toFile(); this.transferManager.getAmazonS3Client().getObject( getObjectRequest, outputFile ); return new Movie( new MovieId( outputFile.getAbsolutePath() ), outputPath ); } catch ( final Exception e ) { logger.error( "Exception while downloading", e ); } return null; }
/*** * Download a S3 object to local directory * * @param s3ObjectSummary S3 object summary for the object to download * @param targetDirectory Local target directory to download the object to * @throws IOException If any errors were encountered in downloading the object */ public void downloadS3Object(S3ObjectSummary s3ObjectSummary, String targetDirectory) throws IOException { final AmazonS3 amazonS3 = getS3Client(); final GetObjectRequest getObjectRequest = new GetObjectRequest( s3ObjectSummary.getBucketName(), s3ObjectSummary.getKey()); final S3Object s3Object = amazonS3.getObject(getObjectRequest); final String targetFile = StringUtils.removeEnd(targetDirectory, File.separator) + File.separator + s3Object.getKey(); FileUtils.copyInputStreamToFile(s3Object.getObjectContent(), new File(targetFile)); LOGGER.info("S3 object downloaded to file: " + targetFile); }
private synchronized void connectToS3() throws IOException { if (! connected) { try { String s3key; try { s3key = java.net.URLDecoder.decode(s3uri.getKey(), "UTF-8"); } catch (final UnsupportedEncodingException e) { LOG.warn("failed to decode key, using raw key instead", e); // TODO: Better error handling with badly encoded URLs? s3key = s3uri.getKey(); } s3object = s3Client.getObject(new GetObjectRequest(s3uri.getBucket(), s3key)); connected = true; } catch (final AmazonServiceException ase) { throw new IOException("Amazon S3 service failure for error type " + ase.getErrorType(), ase); } catch (final AmazonClientException ace) { throw new IOException("Amazon S3 client failure", ace); } } }