/** * Helper method that parses a JSON object from a resource on the classpath * as an instance of the provided type. * * @param resource * the path to the resource (relative to this class) * @param clazz * the type to parse the JSON into */ public static <T> T parse(String resource, Class<T> clazz) throws IOException { InputStream stream = TestUtils.class.getResourceAsStream(resource); try { if (clazz == S3Event.class) { String json = IOUtils.toString(stream); S3EventNotification event = S3EventNotification.parseJson(json); @SuppressWarnings("unchecked") T result = (T) new S3Event(event.getRecords()); return result; } else if (clazz == SNSEvent.class) { return snsEventMapper.readValue(stream, clazz); } else if (clazz == DynamodbEvent.class) { return dynamodbEventMapper.readValue(stream, clazz); } else { return mapper.readValue(stream, clazz); } } finally { stream.close(); } }
private void writeLocalFile( final S3Object s3Object, final File file) { try(FileOutputStream fileOutputStream = new FileOutputStream(file)) { IOUtils.copy( s3Object.getObjectContent(), fileOutputStream); } catch (IOException ioException) { throw new SecretsLockerException( ioException); } file.deleteOnExit(); }
@Test public void copyOneObject() throws Exception { client.putObject("source", "data", inputData); Path sourceBaseLocation = new Path("s3://source/"); Path replicaLocation = new Path("s3://target/"); List<Path> sourceSubLocations = new ArrayList<>(); S3S3Copier s3s3Copier = newS3S3Copier(sourceBaseLocation, sourceSubLocations, replicaLocation); Metrics metrics = s3s3Copier.copy(); assertThat(metrics.getBytesReplicated(), is(7L)); assertThat(metrics.getMetrics().get(S3S3CopierMetrics.Metrics.TOTAL_BYTES_TO_REPLICATE.name()), is(7L)); S3Object object = client.getObject("target", "data"); String data = IOUtils.toString(object.getObjectContent()); assertThat(data, is("bar foo")); assertThat(registry.getGauges().containsKey(RunningMetrics.S3S3_CP_BYTES_REPLICATED.name()), is(true)); }
@Test public void copyMultipleObjects() throws Exception { // Making sure we only request 1 file at the time so we need to loop ListObjectsRequestFactory mockListObjectRequestFactory = Mockito.mock(ListObjectsRequestFactory.class); when(mockListObjectRequestFactory.newInstance()).thenReturn(new ListObjectsRequest().withMaxKeys(1)); client.putObject("source", "bar/data1", inputData); client.putObject("source", "bar/data2", inputData); Path sourceBaseLocation = new Path("s3://source/bar/"); Path replicaLocation = new Path("s3://target/foo/"); List<Path> sourceSubLocations = new ArrayList<>(); S3S3Copier s3s3Copier = new S3S3Copier(sourceBaseLocation, sourceSubLocations, replicaLocation, s3ClientFactory, transferManagerFactory, mockListObjectRequestFactory, registry, s3S3CopierOptions); Metrics metrics = s3s3Copier.copy(); assertThat(metrics.getBytesReplicated(), is(14L)); S3Object object1 = client.getObject("target", "foo/data1"); String data1 = IOUtils.toString(object1.getObjectContent()); assertThat(data1, is("bar foo")); S3Object object2 = client.getObject("target", "foo/data2"); String data2 = IOUtils.toString(object2.getObjectContent()); assertThat(data2, is("bar foo")); }
private SNSEvent createSnsEvent(final String githubEvent) { SNSEvent.SNS sns = new SNSEvent.SNS(); sns.setMessageAttributes(new HashMap<String, SNSEvent.MessageAttribute>(1, 1) { { SNSEvent.MessageAttribute attr = new SNSEvent.MessageAttribute(); attr.setValue(githubEvent); put("X-Github-Event", attr); } }); try (InputStream is = getClass().getResourceAsStream("/github-push-payload.json")) { sns.setMessage(IOUtils.toString(is)); } catch (IOException e) { throw new IllegalArgumentException(e); } SNSEvent.SNSRecord record = new SNSEvent.SNSRecord(); record.setSns(sns); SNSEvent snsEvent = new SNSEvent(); snsEvent.setRecords(Collections.singletonList(record)); return snsEvent; }
private byte[] fromStdin() { try { InputStream inputStream = System.in; BufferedReader inputReader = new BufferedReader(new InputStreamReader(inputStream)); if (!inputReader.ready()) { // Interactive char[] secretValue = System.console().readPassword("Enter secret value:"); if (secretValue == null) { throw new IllegalArgumentException("A secret value must be specified"); } return asBytes(secretValue); } else { // Piped in return IOUtils.toByteArray(inputStream); } } catch (IOException e) { throw new RuntimeException("Failed to read secret value from stdin", e); } }
private void handleErrorResponse(InputStream errorStream, int statusCode, String responseMessage) throws IOException { String errorCode = null; // Parse the error stream returned from the service. if(errorStream != null) { String errorResponse = IOUtils.toString(errorStream); try { JsonNode node = Jackson.jsonNodeOf(errorResponse); JsonNode code = node.get("code"); JsonNode message = node.get("message"); if (code != null && message != null) { errorCode = code.asText(); responseMessage = message.asText(); } } catch (Exception exception) { LOG.debug("Unable to parse error stream"); } } AmazonServiceException ase = new AmazonServiceException(responseMessage); ase.setStatusCode(statusCode); ase.setErrorCode(errorCode); throw ase; }
public static byte[] drainInputStream(InputStream inputStream) { ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); try { byte[] buffer = new byte[1024]; long bytesRead = 0; while ((bytesRead = inputStream.read(buffer)) > -1) { byteArrayOutputStream.write(buffer, 0, (int) bytesRead); } return byteArrayOutputStream.toByteArray(); } catch (IOException e) { throw new RuntimeException(e); } finally { IOUtils.closeQuietly(byteArrayOutputStream, null); } }
/** * Reads to the end of the inputStream returning a byte array of the contents * * @param inputStream * InputStream to drain * @return Remaining data in stream as a byte array */ public static byte[] drainInputStream(InputStream inputStream) { ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); try { byte[] buffer = new byte[1024]; long bytesRead = 0; while ((bytesRead = inputStream.read(buffer)) > -1) { byteArrayOutputStream.write(buffer, 0, (int) bytesRead); } return byteArrayOutputStream.toByteArray(); } catch (IOException e) { throw new RuntimeException(e); } finally { IOUtils.closeQuietly(byteArrayOutputStream, null); } }
/** * Used for performance testing purposes only. */ private void putLocalObject(final UploadObjectRequest reqIn, OutputStream os) throws IOException { UploadObjectRequest req = reqIn.clone(); final File fileOrig = req.getFile(); final InputStream isOrig = req.getInputStream(); if (isOrig == null) { if (fileOrig == null) throw new IllegalArgumentException("Either a file lor input stream must be specified"); req.setInputStream(new FileInputStream(fileOrig)); req.setFile(null); } try { IOUtils.copy(req.getInputStream(), os); } finally { cleanupDataSource(req, fileOrig, isOrig, req.getInputStream(), log); IOUtils.closeQuietly(os, log); } return; }
/** * {@inheritDoc} * * Delegates to {@link S3ObjectInputStream#abort()} if there is any data * remaining in the stream. Otherwise, it safely closes the stream. * * @see {@link S3ObjectInputStream#abort()} */ @Override public void close() throws IOException { if (bytesRead >= contentLength || eofReached) { super.close(); } else { LOG.warn( "Not all bytes were read from the S3ObjectInputStream, aborting HTTP connection. This is likely an error and " + "may result in sub-optimal behavior. Request only the bytes you need via a ranged GET or drain the input " + "stream after use."); if (httpRequest != null) { httpRequest.abort(); } IOUtils.closeQuietly(in, null); } }
@Override public void close() throws IOException { // If not already closed, then close the input stream. if(!this.closed) { this.closed = true; // if the user read to the end of the virtual stream, then drain // the wrapped stream so the HTTP client can keep this connection // alive if possible. // This should not have too much overhead since if we've reached the // end of the virtual stream, there should be at most 31 bytes left // (2 * JceEncryptionConstants.SYMMETRIC_CIPHER_BLOCK_SIZE - 1) in the // stream. // See: S3CryptoModuleBase#getCipherBlockUpperBound if (this.virtualAvailable == 0) { IOUtils.drainInputStream(decryptedContents); } this.decryptedContents.close(); } abortIfNeeded(); }
@Override public final void putLocalObjectSecurely(final UploadObjectRequest reqIn, String uploadId, OutputStream os) throws IOException { UploadObjectRequest req = reqIn.clone(); final File fileOrig = req.getFile(); final InputStream isOrig = req.getInputStream(); final T uploadContext = multipartUploadContexts.get(uploadId); ContentCryptoMaterial cekMaterial = uploadContext.getContentCryptoMaterial(); req = wrapWithCipher(req, cekMaterial); try { IOUtils.copy(req.getInputStream(), os); // so it won't crap out with a false negative at the end; (Not // really relevant here) uploadContext.setHasFinalPartBeenSeen(true); } finally { cleanupDataSource(req, fileOrig, isOrig, req.getInputStream(), log); IOUtils.closeQuietly(os, log); } return; }
/** * A method that seeks and downloads the index for the set BAM URI. * Seeks an index file with the same name in the BAM directory * in case there's no custom index URI specified * * @param bamURI an http address of the required file. * @return A SeekableStream optional on index file URI */ Optional<SeekableStream> loadIndex(AmazonS3URI bamURI) throws IOException { LOG.info("Trying to set index file for " + bamURI.toString()); Optional<AmazonS3URI> index = providedIndexURI() .map(Optional::of) .orElseGet(() -> nearbyIndexURI(bamURI)); if (!index.isPresent()) { LOG.info("Index wasn't provided for " + bamURI.toString()); return Optional.empty(); } LOG.info("Start download index: " + index.get()); AmazonS3URI indexURI = index.get(); S3InputStreamFactory streamFactory = new S3InputStreamFactory(client); InputStream stream = streamFactory.loadFully(indexURI); long fileSize = client.getFileSize(indexURI); byte[] buffer = IOUtils.toByteArray(stream); if (fileSize != buffer.length) { throw new IOException("Failed to fully download index " + indexURI); } LOG.info("Finished download index: " + index.get()); return Optional.of(new SeekableMemoryStream(buffer, indexURI.toString())); }
private static void escrowDecrypt(final String fileName) throws Exception { // You can decrypt the stream using only the private key. // This method does not call KMS. // 1. Instantiate the SDK final AwsCrypto crypto = new AwsCrypto(); // 2. Instantiate a JCE master key provider // This method call uses the escrowed private key, not null final JceMasterKey escrowPriv = JceMasterKey.getInstance(publicEscrowKey, privateEscrowKey, "Escrow", "Escrow", "RSA/ECB/OAEPWithSHA-512AndMGF1Padding"); // 3. Decrypt the file // To simplify the code, we omit the encryption context. Production code should always // use an encryption context. For an example, see the other SDK samples. final FileInputStream in = new FileInputStream(fileName + ".encrypted"); final FileOutputStream out = new FileOutputStream(fileName + ".deescrowed"); final CryptoOutputStream<?> decryptingStream = crypto.createDecryptingStream(escrowPriv, out); IOUtils.copy(in, decryptingStream); in.close(); decryptingStream.close(); }
@Override protected Profile getBaseProfile(String name, String version, String outputFile) { try { return new Profile(name, version, outputFile, IOUtils.toString(profileRegistry.readProfile(getArtifact().getName(), version, name)) ); } catch (RetrofitError | IOException e) { throw new HalException( new ConfigProblemBuilder(FATAL, "Unable to retrieve profile \"" + name + "\": " + e.getMessage()) .build(), e ); } }
/** * Helper method that parses a JSON object from a resource on the classpath * as an instance of the provided type. * * @param resource the path to the resource (relative to this class) * @param clazz the type to parse the JSON into */ public static <T> T parse(String resource, Class<T> clazz) throws IOException { InputStream stream = TestUtils.class.getResourceAsStream(resource); try { if (clazz == S3Event.class) { String json = IOUtils.toString(stream); S3EventNotification event = S3EventNotification.parseJson(json); @SuppressWarnings("unchecked") T result = (T) new S3Event(event.getRecords()); return result; } else { return mapper.readValue(stream, clazz); } } finally { stream.close(); } }
@RequestMapping(value = "/image/view/{userUid}", method = RequestMethod.GET) public ResponseEntity<byte[]> viewProfileImage(@PathVariable String userUid) { try { String imageKey = userProfileImagesFolder + "/" + userUid; MediaFunction mediaFunction = MediaFunction.USER_PROFILE_IMAGE; MediaFileRecord mediaFileRecord = mediaFileBroker.load(mediaFunction, imageKey); byte[] data; if (mediaFileRecord != null) { File imageFile = storageBroker.fetchFileFromRecord(mediaFileRecord); data = IOUtils.toByteArray(new FileInputStream(imageFile)); } else { InputStream in = getClass().getResourceAsStream("/static/images/user.png"); data = IOUtils.toByteArray(in); } HttpHeaders headers = new HttpHeaders(); headers.setContentType(MediaType.IMAGE_PNG); ResponseEntity<byte[]> response = new ResponseEntity(data, headers, HttpStatus.OK); return response; } catch (Exception e) { log.error("Failed to fetch user profile image for user with uid: " + userUid, e); return new ResponseEntity(null, HttpStatus.INTERNAL_SERVER_ERROR); } }
@Override public DynamoSessionItem toSessionItem(Session session) { ObjectOutputStream oos = null; try { ByteArrayOutputStream fos = new ByteArrayOutputStream(); oos = new ObjectOutputStream(fos); ((StandardSession) session).writeObjectData(oos); oos.close(); DynamoSessionItem sessionItem = new DynamoSessionItem(session.getIdInternal()); sessionItem.setSessionData(ByteBuffer.wrap(fos.toByteArray())); return sessionItem; } catch (Exception e) { IOUtils.closeQuietly(oos, null); throw new SessionConversionException("Unable to convert Tomcat Session into Dynamo storage representation", e); } }
/** * {@inheritDoc } */ @Override public void decryptFile( final String encryptedFilename, final String decryptedFilename) { final KmsMasterKeyProvider provider = new KmsMasterKeyProvider( new DefaultAWSCredentialsProviderChain()); final AwsCrypto awsCrypto = new AwsCrypto(); try (final FileInputStream fileInputStream = new FileInputStream( encryptedFilename); final FileOutputStream fileOutputStream = new FileOutputStream( decryptedFilename); final CryptoInputStream<?> decryptingStream = awsCrypto .createDecryptingStream( provider, fileInputStream)) { IOUtils.copy( decryptingStream, fileOutputStream); } catch (IOException exception) { throw new DecryptionException(exception); } }
/** * {@inheritDoc } */ @Override public String decryptFile( final String encryptedFilename) { final KmsMasterKeyProvider provider = new KmsMasterKeyProvider( new DefaultAWSCredentialsProviderChain()); final AwsCrypto awsCrypto = new AwsCrypto(); try (final FileInputStream fileInputStream = new FileInputStream( encryptedFilename); final CryptoInputStream<?> decryptingStream = awsCrypto .createDecryptingStream( provider, fileInputStream)) { return IOUtils.toString( decryptingStream); } catch (IOException exception) { throw new DecryptionException(exception); } }
/** * Reads a file from S3 into a String object * @param s3Uri (eg. s3://bucket/file.ext) * @return String containing the content of the file in S3 * @throws IOException if error reading file */ public String readFileFromS3(String s3Uri) throws IOException { AmazonS3URI s3FileUri = new AmazonS3URI(s3Uri); S3Object s3object = amazonS3Client.getObject(new GetObjectRequest(s3FileUri.getBucket(), s3FileUri.getKey())); return IOUtils.toString(s3object.getObjectContent()); }
@Test public void copyOneObjectUsingKeys() throws Exception { client.putObject("source", "bar/data", inputData); Path sourceBaseLocation = new Path("s3://source/bar/"); Path replicaLocation = new Path("s3://target/foo/"); List<Path> sourceSubLocations = new ArrayList<>(); S3S3Copier s3s3Copier = newS3S3Copier(sourceBaseLocation, sourceSubLocations, replicaLocation); s3s3Copier.copy(); S3Object object = client.getObject("target", "foo/data"); String data = IOUtils.toString(object.getObjectContent()); assertThat(data, is("bar foo")); }
@Test public void copyOneObjectPartitioned() throws Exception { client.putObject("source", "year=2016/data", inputData); Path sourceBaseLocation = new Path("s3://source/"); Path replicaLocation = new Path("s3://target/foo/"); List<Path> sourceSubLocations = Lists.newArrayList(new Path(sourceBaseLocation, "year=2016")); S3S3Copier s3s3Copier = newS3S3Copier(sourceBaseLocation, sourceSubLocations, replicaLocation); Metrics metrics = s3s3Copier.copy(); assertThat(metrics.getBytesReplicated(), is(7L)); S3Object object = client.getObject("target", "foo/year=2016/data"); String data = IOUtils.toString(object.getObjectContent()); assertThat(data, is("bar foo")); }
@Test public void copyOneObjectPartitionedSourceBaseNested() throws Exception { client.putObject("source", "nested/year=2016/data", inputData); Path sourceBaseLocation = new Path("s3://source/nested");// no slash at the end Path replicaLocation = new Path("s3://target/foo/"); List<Path> sourceSubLocations = Lists.newArrayList(new Path(sourceBaseLocation, "year=2016")); S3S3Copier s3s3Copier = newS3S3Copier(sourceBaseLocation, sourceSubLocations, replicaLocation); s3s3Copier.copy(); S3Object object = client.getObject("target", "foo/year=2016/data"); String data = IOUtils.toString(object.getObjectContent()); assertThat(data, is("bar foo")); }
@Test public void copyOneObjectPartitionedHandlingS3ASchemes() throws Exception { client.putObject("source", "year=2016/data", inputData); Path sourceBaseLocation = new Path("s3a://source/"); Path replicaLocation = new Path("s3a://target/foo/"); List<Path> sourceSubLocations = Lists.newArrayList(new Path(sourceBaseLocation, "year=2016")); S3S3Copier s3s3Copier = newS3S3Copier(sourceBaseLocation, sourceSubLocations, replicaLocation); s3s3Copier.copy(); S3Object object = client.getObject("target", "foo/year=2016/data"); String data = IOUtils.toString(object.getObjectContent()); assertThat(data, is("bar foo")); }
public static ArrayList<File> unzip(InputStream fin, String location) { dirChecker("", location); ArrayList<File> savedFiles = new ArrayList<>(); try { ZipInputStream zin = new ZipInputStream(fin); ZipEntry entry; while ((entry = zin.getNextEntry()) != null) { File file = new File(location + "/" + entry.getName()); savedFiles.add(file); if (file.exists()) continue; if (entry.isDirectory()) { dirChecker(entry.getName(), location); } else { FileOutputStream fOut = new FileOutputStream(file); fOut.write(IOUtils.toByteArray(zin)); zin.closeEntry(); fOut.close(); } } zin.close(); } catch(Exception e) { e.printStackTrace(); } return savedFiles; }
public SecureShellAuthentication(Bucket bucket, AmazonS3 client) { factory = new JschConfigSessionFactory() { @Override public synchronized RemoteSession getSession(URIish uri, CredentialsProvider credentialsProvider, FS fs, int tms) throws TransportException { // Do not check for default ssh user config fs.setUserHome(null); return super.getSession(uri, credentialsProvider, fs, tms); } @Override protected void configure(OpenSshConfig.Host host, Session session) { session.setConfig("HashKnownHosts", "no"); if ("localhost".equalsIgnoreCase(host.getHostName())) { session.setConfig("StrictHostKeyChecking", "no"); } } @Override protected void configureJSch(JSch jsch) { S3Object file; file = client.getObject(bucket.getName(), ".ssh/known_hosts"); try (InputStream is = file.getObjectContent()) { jsch.setKnownHosts(is); } catch (IOException | JSchException e) { throw new IllegalArgumentException("Missing known hosts file on s3: .ssh/known_hosts", e); } file = client.getObject(bucket.getName(), ".ssh/id_rsa"); try (InputStream is = file.getObjectContent()) { jsch.addIdentity("git", IOUtils.toByteArray(is), null, new byte[0]); } catch (IOException | JSchException e) { throw new IllegalArgumentException("Missing key file on s3: .ssh/id_rsa", e); } } }; }
private boolean walk(Iterator<S3ObjectSummary> iter, ObjectId file, String path) throws IOException { byte[] content; byte[] newHash; LOG.debug("Start processing file: {}", path); try (DigestInputStream is = new DigestInputStream(repository.open(file).openStream(), DigestUtils.getMd5Digest())) { // Get content content = IOUtils.toByteArray(is); // Get hash newHash = is.getMessageDigest().digest(); } if (isUploadFile(iter, path, Hex.encodeHexString(newHash))) { LOG.info("Uploading file: {}", path); ObjectMetadata bucketMetadata = new ObjectMetadata(); bucketMetadata.setContentMD5(Base64.encodeAsString(newHash)); bucketMetadata.setContentLength(content.length); // Give Tika a few hints for the content detection Metadata tikaMetadata = new Metadata(); tikaMetadata.set(Metadata.RESOURCE_NAME_KEY, FilenameUtils.getName(FilenameUtils.normalize(path))); // Fire! try (InputStream bis = TikaInputStream.get(content, tikaMetadata)) { bucketMetadata.setContentType(TIKA_DETECTOR.detect(bis, tikaMetadata).toString()); s3.putObject(bucket.getName(), path, bis, bucketMetadata); return true; } } LOG.info("Skipping file (same checksum): {}", path); return false; }
private byte[] readJson(InputStream jsonStream) { ByteArrayOutputStream baos = new ByteArrayOutputStream(); try { IOUtils.copy(jsonStream, baos); } catch (IOException e) { throw new UncheckedIOException(e); } return baos.toByteArray(); }
private Partitions loadPartitionFromStream(InputStream stream, String location) { try { return mapper.readValue(stream, Partitions.class); } catch (IOException e) { throw new SdkClientException("Error while loading partitions " + "file from " + location, e); } finally { IOUtils.closeQuietly(stream, null); } }
/** * Static factory method to create a JsonContent object from the contents of the HttpResponse * provided */ public static JsonContent createJsonContent(HttpResponse httpResponse, JsonFactory jsonFactory) { byte[] rawJsonContent = null; try { if (httpResponse.getContent() != null) { rawJsonContent = IOUtils.toByteArray(httpResponse.getContent()); } } catch (Exception e) { LOG.info("Unable to read HTTP response content", e); } return new JsonContent(rawJsonContent, new ObjectMapper(jsonFactory) .configure(JsonParser.Feature.ALLOW_COMMENTS, true)); }
private RegionMetadata loadOverrideMetadataIfExists() { RegionMetadata metadata = loadFromSystemProperty(); if (metadata == null) { InputStream override = RegionUtils.class .getResourceAsStream(OVERRIDE_ENDPOINTS_RESOURCE_PATH); if (override != null) { metadata = loadFromStream(override); IOUtils.closeQuietly(override, LOG); } } return metadata; }
private String contentToString(InputStream content, String idString) throws Exception { try { return IOUtils.toString(content); } catch (Exception e) { log.info(String.format("Unable to read input stream to string (%s)", idString), e); throw e; } }
@Override public final void release() { // Don't call IOUtils.release(in, null) or else could lead to infinite loop IOUtils.closeQuietly(this, null); if (out instanceof Releasable) { // This allows any underlying stream that has the close operation // disabled to be truly released Releasable r = (Releasable)out; r.release(); } }
@Override public String parseErrorCode(HttpResponse response, JsonContent jsonContents) { IonReader reader = ionSystem.newReader(jsonContents.getRawContent()); try { IonType type = reader.next(); if (type != IonType.STRUCT) { throw new SdkClientException(String.format("Can only get error codes from structs (saw %s), request id %s", type, getRequestId(response))); } boolean errorCodeSeen = false; String errorCode = null; String[] annotations = reader.getTypeAnnotations(); for (String annotation : annotations) { if (annotation.startsWith(TYPE_PREFIX)) { if (errorCodeSeen) { throw new SdkClientException(String.format("Multiple error code annotations found for request id %s", getRequestId(response))); } else { errorCodeSeen = true; errorCode = annotation.substring(TYPE_PREFIX.length()); } } } return errorCode; } finally { IOUtils.closeQuietly(reader, log); } }
/** * Connects to the given endpoint to read the resource * and returns the text contents. * * @param endpoint * The service endpoint to connect to. * * @param retryPolicy * The custom retry policy that determines whether a * failed request should be retried or not. * * @return The text payload returned from the Amazon EC2 endpoint * service for the specified resource path. * * @throws IOException * If any problems were encountered while connecting to the * service for the requested resource path. * @throws SdkClientException * If the requested service is not found. */ public String readResource(URI endpoint, CredentialsEndpointRetryPolicy retryPolicy) throws IOException { int retriesAttempted = 0; InputStream inputStream = null; while (true) { try { HttpURLConnection connection = connectionUtils.connectToEndpoint(endpoint); int statusCode = connection.getResponseCode(); if (statusCode == HttpURLConnection.HTTP_OK) { inputStream = connection.getInputStream(); return IOUtils.toString(inputStream); } else if (statusCode == HttpURLConnection.HTTP_NOT_FOUND) { // This is to preserve existing behavior of EC2 Instance metadata service. throw new SdkClientException("The requested metadata is not found at " + connection.getURL()); } else { if (!retryPolicy.shouldRetry(retriesAttempted++, CredentialsEndpointRetryParameters.builder().withStatusCode(statusCode).build())) { inputStream = connection.getErrorStream(); handleErrorResponse(inputStream, statusCode, connection.getResponseMessage()); } } } catch (IOException ioException) { if (!retryPolicy.shouldRetry(retriesAttempted++, CredentialsEndpointRetryParameters.builder().withException(ioException).build())) { throw ioException; } LOG.debug("An IOException occured when connecting to service endpoint: " + endpoint + "\n Retrying to connect again."); } finally { IOUtils.closeQuietly(inputStream, LOG); } } }
/** * WARNING: Subclass that overrides this method must NOT call * super.release() or else it would lead to infinite loop. * <p> * {@inheritDoc} */ @Override public void release() { // Don't call IOUtils.release(in, null) or else could lead to infinite loop IOUtils.closeQuietly(this, null); InputStream in = getWrappedInputStream(); if (in instanceof Releasable) { // This allows any underlying stream that has the close operation // disabled to be truly released Releasable r = (Releasable)in; r.release(); } }
public static HttpResponseHandler<AmazonWebServiceResponse<String>> stringResponseHandler() { return responseHandler(new FunctionWithException<HttpResponse, String>() { @Override public String apply(HttpResponse in) throws Exception { return IOUtils.toString(in.getContent()); } }); }