public static ObjectMetadata simpleUploadFile(S3Client client, byte[] bytes, String bucket, String key) throws Exception { byte[] md5 = md5(bytes, bytes.length); ObjectMetadata metadata = new ObjectMetadata(); metadata.setContentLength(bytes.length); metadata.setLastModified(new Date()); metadata.setContentMD5(S3Utils.toBase64(md5)); PutObjectRequest putObjectRequest = new PutObjectRequest(bucket, key, new ByteArrayInputStream(bytes), metadata); PutObjectResult putObjectResult = client.putObject(putObjectRequest); if ( !putObjectResult.getETag().equals(S3Utils.toHex(md5)) ) { throw new Exception("Unable to match MD5 for config"); } return metadata; }
@Override public PutObjectResult putObject(PutObjectRequest putObjectRequest) throws AmazonClientException, AmazonServiceException { String blobName = putObjectRequest.getKey(); DigestInputStream stream = (DigestInputStream) putObjectRequest.getInputStream(); if (blobs.containsKey(blobName)) { throw new AmazonS3Exception("[" + blobName + "] already exists."); } blobs.put(blobName, stream); // input and output md5 hashes need to match to avoid an exception String md5 = Base64.encodeAsString(stream.getMessageDigest().digest()); PutObjectResult result = new PutObjectResult(); result.setContentMd5(md5); return result; }
@SuppressWarnings("resource") @Override public PutObjectResult putObject(PutObjectRequest putObjectRequest) throws AmazonClientException, AmazonServiceException { putObjectRequests.add(putObjectRequest); S3Object s3Object = new S3Object(); s3Object.setBucketName(putObjectRequest.getBucketName()); s3Object.setKey(putObjectRequest.getKey()); if (putObjectRequest.getFile() != null) { try { s3Object.setObjectContent(new FileInputStream(putObjectRequest.getFile())); } catch (FileNotFoundException e) { throw new AmazonServiceException("Cannot store the file object.", e); } } else { s3Object.setObjectContent(putObjectRequest.getInputStream()); } objects.add(s3Object); PutObjectResult putObjectResult = new PutObjectResult(); putObjectResult.setETag("3a5c8b1ad448bca04584ecb55b836264"); return putObjectResult; }
@Override public PutObjectResult putObject(String bucketName, String key, String content) throws AmazonServiceException, SdkClientException { rejectNull(bucketName, "Bucket name must be provided"); rejectNull(key, "Object key must be provided"); rejectNull(content, "String content must be provided"); byte[] contentBytes = content.getBytes(StringUtils.UTF8); InputStream is = new ByteArrayInputStream(contentBytes); ObjectMetadata metadata = new ObjectMetadata(); metadata.setContentType("text/plain"); metadata.setContentLength(contentBytes.length); return putObject(new PutObjectRequest(bucketName, key, is, metadata)); }
private PutObjectResult putObjectUsingMetadata(PutObjectRequest req) { ContentCryptoMaterial cekMaterial = createContentCryptoMaterial(req); // Wraps the object data with a cipher input stream final File fileOrig = req.getFile(); final InputStream isOrig = req.getInputStream(); PutObjectRequest wrappedReq = wrapWithCipher(req, cekMaterial); // Update the metadata req.setMetadata(updateMetadataWithContentCryptoMaterial( req.getMetadata(), req.getFile(), cekMaterial)); // Put the encrypted object into S3 try { return s3.putObject(wrappedReq); } finally { cleanupDataSource(req, fileOrig, isOrig, wrappedReq.getInputStream(), log); } }
/** * Puts an Object; Copies that object to a new bucket; Downloads the object from the new * bucket; compares checksums * of original and copied object * * @throws Exception if an Exception occurs */ @Test public void shouldCopyObject() throws Exception { final File uploadFile = new File(UPLOAD_FILE_NAME); final String sourceKey = UPLOAD_FILE_NAME; final String destinationBucketName = "destinationBucket"; final String destinationKey = "copyOf/" + sourceKey; final PutObjectResult putObjectResult = s3Client.putObject(new PutObjectRequest(BUCKET_NAME, sourceKey, uploadFile)); final CopyObjectRequest copyObjectRequest = new CopyObjectRequest(BUCKET_NAME, sourceKey, destinationBucketName, destinationKey); s3Client.copyObject(copyObjectRequest); final com.amazonaws.services.s3.model.S3Object copiedObject = s3Client.getObject(destinationBucketName, destinationKey); final String copiedHash = HashUtil.getDigest(copiedObject.getObjectContent()); copiedObject.close(); assertThat("Sourcefile and copied File should have same Hashes", copiedHash, is(equalTo(putObjectResult.getETag()))); }
/** * Tests if the Metadata of an existing file can be retrieved. */ @Test public void shouldGetObjectMetadata() { final String nonExistingFileName = "nonExistingFileName"; final File uploadFile = new File(UPLOAD_FILE_NAME); s3Client.createBucket(BUCKET_NAME); final PutObjectResult putObjectResult = s3Client.putObject(new PutObjectRequest(BUCKET_NAME, UPLOAD_FILE_NAME, uploadFile)); final ObjectMetadata metadataExisting = s3Client.getObjectMetadata(BUCKET_NAME, UPLOAD_FILE_NAME); assertThat("The ETags should be identically!", metadataExisting.getETag(), is(putObjectResult.getETag())); thrown.expect(AmazonS3Exception.class); thrown.expectMessage(containsString("Status Code: 404")); s3Client.getObjectMetadata(BUCKET_NAME, nonExistingFileName); }
@SuppressWarnings("ResultOfMethodCallIgnored") @Test public void uploadPage() throws IOException, TemplateException { doCallRealMethod().when(uploader).uploadPage(any()); doReturn(mock(PutObjectResult.class)).when(s3client).putObject(any()); URL url = new URL("http://www.example.com"); doReturn(url).when(s3client).getUrl(anyString(), anyString()); String result = uploader.uploadPage("content"); verify(s3client, times(1)).putObject(any()); verify(s3client, times(1)).getUrl(anyString(), anyString()); assertEquals(result, "http://www.example.com"); }
@Test public void testPut() { ModelBucket bucket = getService(ModelBucket.class); InputStream stream = new ByteArrayInputStream("file content".getBytes()); ArgumentCaptor<PutObjectRequest> requestCaptor = ArgumentCaptor.forClass(PutObjectRequest.class); PutObjectResult expected = new PutObjectResult(); when(amazonS3Client.putObject(requestCaptor.capture())).thenReturn(expected); assertEquals(expected, bucket.put("path", stream, 12L)); PutObjectRequest request = requestCaptor.getValue(); assertEquals("model-bucket", request.getBucketName()); assertEquals("path", request.getKey()); assertEquals(stream, request.getInputStream()); assertEquals(12L, request.getMetadata().getContentLength()); List<Grant> grants = request.getAccessControlList().getGrantsAsList(); assertEquals(1, grants.size()); assertEquals(GroupGrantee.AllUsers, grants.get(0).getGrantee()); assertEquals(Permission.Read, grants.get(0).getPermission()); }
@Override protected void starting(Description description) { super.starting(description); outputPath = new File( "target" + Path.SEPARATOR + description.getClassName() + Path.SEPARATOR + description.getMethodName()) .getPath(); Attribute.AttributeMap attributes = new Attribute.AttributeMap.DefaultAttributeMap(); attributes.put(DAG.DAGContext.APPLICATION_ID, description.getClassName()); attributes.put(DAG.DAGContext.APPLICATION_PATH, outputPath); context = mockOperatorContext(1, attributes); underTest = new S3Reconciler(); underTest.setAccessKey(""); underTest.setSecretKey(""); underTest.setup(context); MockitoAnnotations.initMocks(this); PutObjectResult result = new PutObjectResult(); result.setETag(outputPath); when(s3clientMock.putObject((PutObjectRequest)any())).thenReturn(result); underTest.setS3client(s3clientMock); }
@Override public PutObjectResult uploadObject(final String bucketName, final String fileName, final InputStream inputStream, final CannedAccessControlList cannedAcl) throws AmazonClientException, AmazonServiceException, IOException { LOGGER.info("uploadObject invoked, bucketName: {} , fileName: {}, cannedAccessControlList: {}", bucketName, fileName, cannedAcl); File tempFile = null; PutObjectRequest putObjectRequest = null; PutObjectResult uploadResult = null; try { // Create temporary file from stream to avoid 'out of memory' exception tempFile = AWSUtil.createTempFileFromStream(inputStream); putObjectRequest = new PutObjectRequest(bucketName, fileName, tempFile).withCannedAcl(cannedAcl); uploadResult = uploadObject(putObjectRequest); } finally { AWSUtil.deleteTempFile(tempFile); // Delete the temporary file once uploaded } return uploadResult; }
@Override public PutObjectResult createDirectory(final String bucketName, final String dirName, final boolean isPublicAccessible) throws AmazonClientException, AmazonServiceException { LOGGER.info("createDirectory invoked, bucketName: {}, dirName: {} and isPublicAccessible: {}", bucketName, dirName, isPublicAccessible); final ObjectMetadata metadata = new ObjectMetadata(); metadata.setContentLength(0); // Create empty content,since creating empty folder needs an empty content final InputStream emptyContent = new ByteArrayInputStream(new byte[0]); // Create a PutObjectRequest passing the directory name suffixed by '/' final PutObjectRequest putObjectRequest = new PutObjectRequest(bucketName, dirName + AWSUtilConstants.SEPARATOR, emptyContent, metadata); if(isPublicAccessible){ putObjectRequest.setCannedAcl(CannedAccessControlList.PublicRead); } return s3client.putObject(putObjectRequest); }
@Test public void testConditionalGet() throws Exception { assumeTrue(!blobStoreType.equals("b2")); String blobName = "blob-name"; ObjectMetadata metadata = new ObjectMetadata(); metadata.setContentLength(BYTE_SOURCE.size()); PutObjectResult result = client.putObject(containerName, blobName, BYTE_SOURCE.openStream(), metadata); S3Object object = client.getObject( new GetObjectRequest(containerName, blobName) .withMatchingETagConstraint(result.getETag())); try (InputStream is = object.getObjectContent()) { assertThat(is).isNotNull(); ByteStreams.copy(is, ByteStreams.nullOutputStream()); } object = client.getObject( new GetObjectRequest(containerName, blobName) .withNonmatchingETagConstraint(result.getETag())); assertThat(object).isNull(); }
@Override public boolean uploadEntity(String bucketName, String keyName, File file) { LOG.info("Uploads the specified file " + file + " to Amazon S3 under the specified bucket " + bucketName + " and key name " + keyName); try { PutObjectResult putObjectResult = amazonS3Client.putObject(bucketName, keyName, file); if (putObjectResult != null) { return true; } } catch (AmazonServiceException ase) { LOG.error(ase.getMessage(), ase); } catch (AmazonClientException ace) { LOG.error(ace.getMessage(), ace); } return false; }
@Override public boolean uploadEntity(String bucketName, String keyName, InputStream inputStream, ObjectMetadata metadata) { LOG.info("Uploads the specified input stream " + inputStream + " and object metadata to Amazon S3 under the specified bucket " + bucketName + " and key name " + keyName); try { PutObjectResult putObjectResult = amazonS3Client.putObject(bucketName, keyName, inputStream, metadata); if (putObjectResult != null) { LOG.info("Upload the specified input stream " + inputStream + " state: " + putObjectResult); return true; } } catch (AmazonServiceException ase) { LOG.warn(ase.getMessage(), ase); } catch (AmazonClientException ace) { LOG.warn(ace.getMessage(), ace); } return false; }
protected MediaMetadata store(String bucketName, String key,String fileName,InputStream data, MediaType contentType, Date lastModified) throws IOException { File tmpFile = cache(data); ObjectMetadata metadata = new ObjectMetadata(); metadata.setContentType(contentType.toString()); metadata.setContentLength(tmpFile.length()); metadata.setLastModified(lastModified==null ? new Date() : lastModified); if (lastModified!=null) { metadata.addUserMetadata("last-modified", DateUtils.format(lastModified,DateUtils.FORMAT_RFC_3339.get(0))); } InputStream is = new FileInputStream(tmpFile); try { PutObjectResult result = s3Client.putObject(bucketName, key, is, metadata); is.close(); return new S3Metadata(fileName,contentType,lastModified,metadata.getContentLength()); } catch (AmazonClientException ex) { is.close(); throw new IOException("Cannot create object due to S3 error.",ex); } finally { tmpFile.delete(); } }
@Test public void writeFile_forNewFile_writesFileContent() throws Exception { //Arrange AmazonS3 amazonS3 = mock(AmazonS3.class); SimpleStorageResource simpleStorageResource = new SimpleStorageResource(amazonS3, "bucketName", "objectName", new SyncTaskExecutor()); String messageContext = "myFileContent"; when(amazonS3.putObject(eq("bucketName"), eq("objectName"), any(InputStream.class), any(ObjectMetadata.class))).thenAnswer((Answer<PutObjectResult>) invocation -> { assertEquals("bucketName", invocation.getArguments()[0]); assertEquals("objectName", invocation.getArguments()[1]); byte[] content = new byte[messageContext.length()]; assertEquals(content.length, ((InputStream) invocation.getArguments()[2]).read(content)); assertEquals(messageContext, new String(content)); return new PutObjectResult(); }); OutputStream outputStream = simpleStorageResource.getOutputStream(); //Act outputStream.write(messageContext.getBytes()); outputStream.flush(); outputStream.close(); //Assert }
@Test public void testCreateBuckjet_thenPutAndGet() throws IOException { byte[] content = "qwer".getBytes("UTF-8"); ObjectMetadata metadata = new ObjectMetadata(); metadata.setContentLength(content.length); client.createBucket(bucketName); PutObjectRequest s3request = new PutObjectRequest(bucketName, "qwer.txt",new ByteArrayInputStream(content),metadata); PutObjectResult response = client.putObject(s3request); assertNotNull(response); S3Object getResponse = client.getObject(bucketName, "qwer.txt"); String getContent = inputStreamToString(getResponse.getObjectContent()); assertEquals("qwer",getContent); client.deleteBucket(bucketName); }
/** * AwsOcket can write ocket content. * @throws Exception If fails */ @Test public void writesContentToAwsObject() throws Exception { final AmazonS3 aws = Mockito.mock(AmazonS3.class); Mockito.doReturn(new PutObjectResult()).when(aws).putObject( Mockito.any(PutObjectRequest.class) ); final Region region = Mockito.mock(Region.class); Mockito.doReturn(aws).when(region).aws(); final Bucket bucket = Mockito.mock(Bucket.class); Mockito.doReturn(region).when(bucket).region(); final Ocket ocket = new AwsOcket(bucket, "test-3.txt"); final String content = "text \u20ac\n\t\rtest"; ocket.write( new ByteArrayInputStream(content.getBytes(StandardCharsets.UTF_8)), new ObjectMetadata() ); Mockito.verify(aws).putObject(Mockito.any(PutObjectRequest.class)); }
/** * OverridingBundle can override a file in AWS S3. * @throws Exception If something is wrong */ @Test public void overridesFileInAws() throws Exception { final String bucket = "some-bucket"; final String key = "some-key"; final File war = this.temp.newFile("temp.war"); FileUtils.writeStringToFile(war, "broken JAR file content"); final AmazonS3 client = Mockito.mock(AmazonS3.class); Mockito.doReturn(new PutObjectResult()) .when(client).putObject(bucket, key, war); Mockito.doReturn(new ObjectListing()) .when(client).listObjects(Mockito.any(ListObjectsRequest.class)); final Bundle bundle = new OverridingBundle(client, bucket, key, war); MatcherAssert.assertThat( bundle.name(), Matchers.equalTo(key) ); MatcherAssert.assertThat( bundle.location().getS3Key(), Matchers.equalTo(key) ); }
/** * OverridingBundle caches result of location() method. * @throws Exception If something is wrong */ @Test public void cachesResultOfLocation() throws Exception { final AmazonS3 client = Mockito.mock(AmazonS3.class); final File war = this.temp.newFile("temp1.war"); FileUtils.writeStringToFile(war, "some JAR file content"); final String bucket = "some-bucket-for-cache"; final String key = "some-key-for-cache"; Mockito.doReturn(new PutObjectResult()) .when(client).putObject(bucket, key, war); Mockito.doReturn(new ObjectListing()) .when(client).listObjects(Mockito.any(ListObjectsRequest.class)); final Bundle bundle = new OverridingBundle(client, bucket, key, war); bundle.location(); bundle.location(); Mockito.verify(client, Mockito.times(1)).putObject(bucket, key, war); }
private void copyToS3( String fileName ) { String bucketName = ( String ) properties.get( BUCKET_PROPNAME ); String accessId = ( String ) properties.get( ACCESS_ID_PROPNAME ); String secretKey = ( String ) properties.get( SECRET_KEY_PROPNAME ); Properties overrides = new Properties(); overrides.setProperty( "s3" + ".identity", accessId ); overrides.setProperty( "s3" + ".credential", secretKey ); final Iterable<? extends Module> MODULES = ImmutableSet .of( new JavaUrlHttpCommandExecutorServiceModule(), new Log4JLoggingModule(), new NettyPayloadModule() ); AWSCredentials credentials = new BasicAWSCredentials(accessId, secretKey); ClientConfiguration clientConfig = new ClientConfiguration(); clientConfig.setProtocol( Protocol.HTTP); AmazonS3Client s3Client = new AmazonS3Client(credentials, clientConfig); s3Client.createBucket( bucketName ); File uploadFile = new File( fileName ); PutObjectResult putObjectResult = s3Client.putObject( bucketName, uploadFile.getName(), uploadFile ); logger.info("Uploaded file etag={}", putObjectResult.getETag()); }
@Override public PutObjectResult putObject(String bucketName, String key, File file) throws AmazonClientException, AmazonServiceException { throwException(putObjectException); List<String> keys = files.get(bucketName); if (keys == null) { throw new AmazonClientException("Bucket do not exist"); } keys.add(key); files.put(bucketName, keys); PutObjectResult result = new PutObjectResult(); try { result.setContentMd5(new String(Md5Utils.md5AsBase64(file))); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } return result; }
@Override public void save(String path, String fileName, byte[] bytes) { byte[] md5 = DigestUtils.md5(bytes); InputStream is = new ByteArrayInputStream(bytes); ObjectMetadata metadata = new ObjectMetadata(); metadata.setContentLength(bytes.length); //setting max-age to 15 days metadata.setCacheControl("max-age=1296000"); metadata.setContentMD5(new String(Base64.encodeBase64(md5))); PutObjectRequest request = new PutObjectRequest(bucketName, subPath + path + fileName, is, metadata); request.setCannedAcl(CannedAccessControlList.PublicRead); PutObjectResult result = s3Client.putObject(request); logger.debug("Etag:" + result.getETag() + "-->" + result); }
@Override public Long doInBackground(Void... unused) { Log.d(TAG, "Upload Blob Task:"); MainActivity.YassPreferences preferences = new MainActivity.YassPreferences(context); ConnectivityManager cm = (ConnectivityManager) context.getSystemService(Context.CONNECTIVITY_SERVICE); NetworkInfo activeNetwork = cm.getActiveNetworkInfo(); boolean isConnected = activeNetwork != null && activeNetwork.isConnectedOrConnecting(); if (!isConnected) { Log.d(TAG, "Skipping camera upload because network is disconnected"); return null; } if (preferences.cameraUploadOnlyOnWifi && activeNetwork.getType() != ConnectivityManager.TYPE_WIFI) { Log.d(TAG, "Skipping camera upload because Wifi is disconnected"); return null; } long serial; Uri uri; String fileName; long fileSize; SQLiteDatabase db = new YassDbHelper(context).getReadableDatabase(); try { String[] projection = { "serial", "file_uri", "file_name", "file_size" }; String selection = null; String[] selectionArgs = null; String groupBy = null; String having = null; String orderBy = "serial ASC"; String limit = "1"; Cursor cursor = db.query("camera_uploads", projection, selection, selectionArgs, groupBy, having, orderBy, limit); try { if (!cursor.moveToNext()) { Log.d(TAG, "Did not find image to upload"); return null; } serial = cursor.getLong(cursor.getColumnIndexOrThrow("serial")); uri = Uri.parse(cursor.getString(cursor.getColumnIndexOrThrow("file_uri"))); fileName = cursor.getString(cursor.getColumnIndexOrThrow("file_name")); fileSize = cursor.getLong(cursor.getColumnIndexOrThrow("file_size")); } finally { cursor.close(); } } finally { db.close(); } Log.d(TAG, "Found image to upload: " + fileName); ObjectMetadata metadata = new ObjectMetadata(); metadata.setContentLength(fileSize); metadata.setContentType(context.getContentResolver().getType(uri)); PutObjectResult result; try (InputStream is = context.getContentResolver().openInputStream(uri)) { // TODO: limited to 5 GB result = MainActivity.getS3Client(preferences).putObject(preferences.bucketName, "Camera Uploads/" + fileName, is, metadata); } catch (AmazonClientException | IOException e) { Log.e(TAG, "Could not upload file: " + e.getMessage()); return null; } return serial; }
@Override public void run() { ObjectMetadata meta_data = new ObjectMetadata(); if (p_content_type != null) meta_data.setContentType(p_content_type); meta_data.setContentLength(p_size); PutObjectRequest putObjectRequest = new PutObjectRequest(p_bucket_name, p_s3_key, p_file_stream, meta_data); putObjectRequest.setCannedAcl(CannedAccessControlList.PublicRead); PutObjectResult res = s3Client.putObject(putObjectRequest); }
private void putObject() throws IOException { if (LOG.isDebugEnabled()) { LOG.debug("Executing regular upload for bucket '{}' key '{}'", bucket, key); } final ObjectMetadata om = createDefaultMetadata(); om.setContentLength(buffer.size()); final PutObjectRequest putObjectRequest = new PutObjectRequest(bucket, key, new ByteArrayInputStream(buffer.toByteArray()), om); putObjectRequest.setCannedAcl(cannedACL); putObjectRequest.setGeneralProgressListener(progressListener); ListenableFuture<PutObjectResult> putObjectResult = executorService.submit(new Callable<PutObjectResult>() { @Override public PutObjectResult call() throws Exception { return client.putObject(putObjectRequest); } }); //wait for completion try { putObjectResult.get(); } catch (InterruptedException ie) { LOG.warn("Interrupted object upload:" + ie, ie); Thread.currentThread().interrupt(); } catch (ExecutionException ee) { throw new IOException("Regular upload failed", ee.getCause()); } }
private static PutObjectResult createPutObjectResult(ObjectMetadata metadata) { final PutObjectResult result = new PutObjectResult(); result.setVersionId(metadata.getVersionId()); result.setSSEAlgorithm(metadata.getSSEAlgorithm()); result.setSSECustomerAlgorithm(metadata.getSSECustomerAlgorithm()); result.setSSECustomerKeyMd5(metadata.getSSECustomerKeyMd5()); result.setExpirationTime(metadata.getExpirationTime()); result.setExpirationTimeRuleId(metadata.getExpirationTimeRuleId()); result.setETag(metadata.getETag()); result.setMetadata(metadata); result.setRequesterCharged(metadata.isRequesterCharged()); return result; }
/** * Uploads the given request in a single chunk and returns the result. */ private UploadResult uploadInOneChunk() { PutObjectResult putObjectResult = s3.putObject(origReq); UploadResult uploadResult = new UploadResult(); uploadResult.setBucketName(origReq.getBucketName()); uploadResult.setKey(origReq.getKey()); uploadResult.setETag(putObjectResult.getETag()); uploadResult.setVersionId(putObjectResult.getVersionId()); return uploadResult; }
@Override public PutObjectResult putObjectSecurely(PutObjectRequest req) { // TODO: consider cloning req before proceeding further to reduce side // effects appendUserAgent(req, USER_AGENT); return cryptoConfig.getStorageMode() == InstructionFile ? putObjectUsingInstructionFile(req) : putObjectUsingMetadata(req); }
/** * Puts an encrypted object into S3, and puts an instruction file into S3. * Encryption info is stored in the instruction file. * * @param putObjectRequest * The request object containing all the parameters to upload a * new object to Amazon S3. * @return A {@link PutObjectResult} object containing the information * returned by Amazon S3 for the new, created object. */ private PutObjectResult putObjectUsingInstructionFile( PutObjectRequest putObjectRequest) { final File fileOrig = putObjectRequest.getFile(); final InputStream isOrig = putObjectRequest.getInputStream(); final PutObjectRequest putInstFileRequest = putObjectRequest.clone() .withFile(null) .withInputStream(null) ; putInstFileRequest.setKey(putInstFileRequest.getKey() + DOT + DEFAULT_INSTRUCTION_FILE_SUFFIX); // Create instruction ContentCryptoMaterial cekMaterial = createContentCryptoMaterial(putObjectRequest); // Wraps the object data with a cipher input stream; note the metadata // is mutated as a side effect. PutObjectRequest req = wrapWithCipher(putObjectRequest, cekMaterial); // Put the encrypted object into S3 final PutObjectResult result; try { result = s3.putObject(req); } finally { cleanupDataSource(putObjectRequest, fileOrig, isOrig, req.getInputStream(), log); } // Put the instruction file into S3 s3.putObject(updateInstructionPutRequest(putInstFileRequest, cekMaterial)); // Return the result of the encrypted object PUT. return result; }
@Override public PutObjectResult putObjectSecurely(PutObjectRequest putObjectRequest) { return defaultCryptoMode == EncryptionOnly ? eo.putObjectSecurely(putObjectRequest) : ae.putObjectSecurely(putObjectRequest) ; }
@Override public PutObjectResult putInstructionFileSecurely( PutInstructionFileRequest req) { return defaultCryptoMode == EncryptionOnly ? eo.putInstructionFileSecurely(req) : ae.putInstructionFileSecurely(req) ; }