private void start() throws IOException { //this.port = findFreeLocalPort(); @see https://github.com/findify/sqsmock/pull/7 this.api = new SQSService(this.port, 1); this.api.start(); AWSCredentials credentials = new AnonymousAWSCredentials(); this.sqsClient = new MockSQSClient(credentials); this.endpoint = String.format("http://localhost:%s", this.port); this.sqsClient.setEndpoint(endpoint); this.sqsUrl = this.sqsClient.createQueue(this.getClass().getSimpleName()).getQueueUrl(); ((MockSQSClient)this.sqsClient).setQueueUrl(this.sqsUrl); this.started = true; }
@Override public AmazonS3 newS3Client(S3SinkConnectorConfig config) { final AWSCredentialsProvider provider = new AWSCredentialsProvider() { private final AnonymousAWSCredentials credentials = new AnonymousAWSCredentials(); @Override public AWSCredentials getCredentials() { return credentials; } @Override public void refresh() { } }; AmazonS3ClientBuilder builder = AmazonS3ClientBuilder.standard() .withAccelerateModeEnabled(config.getBoolean(S3SinkConnectorConfig.WAN_MODE_CONFIG)) .withPathStyleAccessEnabled(true) .withCredentials(provider); builder = url == null ? builder.withRegion(config.getString(S3SinkConnectorConfig.REGION_CONFIG)) : builder.withEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration(url, "")); return builder.build(); }
private static AWSRequestSigningApacheInterceptor createInterceptor() { AWSCredentialsProvider anonymousCredentialsProvider = new AWSStaticCredentialsProvider(new AnonymousAWSCredentials()); return new AWSRequestSigningApacheInterceptor("servicename", new AddHeaderSigner("Signature", "wuzzle"), anonymousCredentialsProvider); }
private AmazonS3 configureAWS() { ClientConfiguration configuration = new ClientConfiguration() .withMaxConnections(Configuration.getNumberOfConnections()) .withMaxErrorRetry(MAX_RETRY) .withConnectionTimeout(TIMEOUT) .withSocketTimeout(TIMEOUT) .withTcpKeepAlive(true); if (credentialsExist()) { return new AmazonS3Client(providerChain, configuration); } else { return new AmazonS3Client(new AnonymousAWSCredentials(), configuration); } }
static AWSLogs createLogsClient(CloudwatchLogsConfig config) { AWSLogsClientBuilder builder = AWSLogsClientBuilder.standard(); if (config.getEndpoint() != null) { // Non-AWS mock endpoint builder.setCredentials(new AWSStaticCredentialsProvider(new AnonymousAWSCredentials())); builder.setEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration(config.getEndpoint(), config.getRegion())); } else { builder.setRegion(config.getRegion()); } return builder.build(); }
@Before public void setup() throws Exception { sender = KinesisSender.builder() .streamName("test") .endpointConfiguration(new EndpointConfiguration(server.url("/").toString(), "us-east-1")) .credentialsProvider(new AWSStaticCredentialsProvider(new AnonymousAWSCredentials())) .build(); }
public AWSAnonInterface() { String weatherPipeBinaryPath = WeatherPipe.class.getProtectionDomain().getCodeSource().getLocation().getPath(); String log4jConfPath = weatherPipeBinaryPath.substring(0, weatherPipeBinaryPath.lastIndexOf("/")) + "/log4j.properties"; PropertyConfigurator.configure(log4jConfPath); AWSCredentials creds = new AnonymousAWSCredentials(); s3AnonClient = new AmazonS3Client(creds); }
@Test @PrepareForTest(TerrapinUtil.class) public void testGetS3FileList() throws Exception { AmazonS3Client s3Client = mock(AmazonS3Client.class); ObjectListing objectListing = mock(ObjectListing.class); S3ObjectSummary summary1 = new S3ObjectSummary(); S3ObjectSummary summary2 = new S3ObjectSummary(); S3ObjectSummary summary3 = new S3ObjectSummary(); summary1.setKey("/abc/123"); summary2.setKey("/abc/456"); summary3.setKey("/def/123"); summary1.setSize(32432); summary2.setSize(213423); summary3.setSize(2334); List<S3ObjectSummary> summaries = ImmutableList.of(summary1, summary2, summary3); whenNew(AmazonS3Client.class).withAnyArguments().thenReturn(s3Client); when(s3Client.listObjects(any(ListObjectsRequest.class))).thenReturn(objectListing); when(objectListing.getObjectSummaries()).thenReturn(summaries); List<Pair<Path, Long>> results = TerrapinUtil.getS3FileList(new AnonymousAWSCredentials(), "bucket", "/abc"); assertEquals(2, results.size()); assertTrue(results.get(0).getLeft().toString().endsWith(summary1.getKey())); assertEquals(new Long(summary1.getSize()), results.get(0).getRight()); assertTrue(results.get(1).getLeft().toString().endsWith(summary2.getKey())); assertEquals(new Long(summary2.getSize()), results.get(1).getRight()); }
@Bean public AmazonS3Provider amazonS3Provider() { return () -> { final AmazonS3Client client = new AmazonS3Client(new AnonymousAWSCredentials()); client.setEndpoint("http://127.0.0.1:" + s3MockPort); client.createBucket("s3-content-service1"); client.createBucket("s3-content-service2"); // Amazon S3 reads region from endpoint (127.0.0.1...) return new AmazonS3TestWrapper(client); }; }
@Before public void setUp() throws Exception { TestUtils.S3ProxyLaunchInfo info = TestUtils.startS3Proxy( "s3proxy-anonymous.conf"); awsCreds = new AnonymousAWSCredentials(); context = info.getBlobStore().getContext(); s3Proxy = info.getS3Proxy(); s3Endpoint = info.getSecureEndpoint(); servicePath = info.getServicePath(); s3EndpointConfig = new EndpointConfiguration( s3Endpoint.toString() + servicePath, "us-east-1"); client = AmazonS3ClientBuilder.standard() .withCredentials(new AWSStaticCredentialsProvider(awsCreds)) .withEndpointConfiguration(s3EndpointConfig) .build(); containerName = createRandomContainerName(); info.getBlobStore().createContainerInLocation(null, containerName); blobStoreType = context.unwrap().getProviderMetadata().getId(); if (Quirks.OPAQUE_ETAG.contains(blobStoreType)) { System.setProperty( SkipMd5CheckStrategy .DISABLE_GET_OBJECT_MD5_VALIDATION_PROPERTY, "true"); System.setProperty( SkipMd5CheckStrategy .DISABLE_PUT_OBJECT_MD5_VALIDATION_PROPERTY, "true"); } }
@Test public void testStartTwoServers() throws Exception { S3Server instanceA = null; S3Server instanceB = null; AmazonS3Client client = null; try { instanceA = S3Server.createHttpServer(); instanceB = S3Server.createHttpServer(); instanceA.start(); instanceB.start(); client = new AmazonS3Client(new StaticCredentialsProvider(new AnonymousAWSCredentials())); client.setS3ClientOptions(new S3ClientOptions().withPathStyleAccess(true)); client.setEndpoint(instanceA.getAddress()); BasicTestSuperclass.createDefaultBucket(client); S3Object response = client.getObject("bucketname", "asdf.txt"); String content = inputStreamToString(response.getObjectContent()); assertEquals("asdf",content); assertFalse(instanceA.getAddress().equals(instanceB.getAddress())); } finally { if (client!=null) client.shutdown(); if (instanceA!=null) instanceA.stop(); if (instanceB!=null) instanceB.stop(); } }
@Test public void testStartTwoHttpsServers() throws Exception { S3Server instanceA = null; S3Server instanceB = null; AmazonS3Client client = null; try { instanceA = S3Server.createHttpsServer( MultipleRequestTest.class.getResourceAsStream("/keystore.jks"), "password".toCharArray() ); instanceB = S3Server.createHttpsServer( MultipleRequestTest.class.getResourceAsStream("/keystore.jks"), "password".toCharArray() ); instanceA.start(); instanceB.start(); client = new AmazonS3Client(new StaticCredentialsProvider(new AnonymousAWSCredentials())); client.setS3ClientOptions(new S3ClientOptions().withPathStyleAccess(true)); client.setEndpoint(instanceA.getAddress()); BasicTestSuperclass.createDefaultBucket(client); S3Object response = client.getObject("bucketname", "asdf.txt"); String content = inputStreamToString(response.getObjectContent()); assertEquals("asdf",content); assertFalse(instanceA.getAddress().equals(instanceB.getAddress())); } finally { if (client!=null) client.shutdown(); if (instanceA!=null) instanceA.stop(); if (instanceB!=null) instanceB.stop(); } }
@Test public void testStartBothHttpsAndHttpServers() throws Exception { S3Server instanceA = null; S3Server instanceB = null; AmazonS3Client client = null; try { instanceA = S3Server.createHttpsServer( MultipleRequestTest.class.getResourceAsStream("/keystore.jks"), "password".toCharArray() ); instanceB = S3Server.createHttpServer(); instanceA.start(); instanceB.start(); client = new AmazonS3Client(new StaticCredentialsProvider(new AnonymousAWSCredentials())); client.setS3ClientOptions(new S3ClientOptions().withPathStyleAccess(true)); client.setEndpoint(instanceA.getAddress()); BasicTestSuperclass.createDefaultBucket(client); S3Object response = client.getObject("bucketname", "asdf.txt"); String content = inputStreamToString(response.getObjectContent()); assertEquals("asdf",content); assertFalse(instanceA.getAddress().equals(instanceB.getAddress())); } finally { if (client!=null) client.shutdown(); if (instanceA!=null) instanceA.stop(); if (instanceB!=null) instanceB.stop(); } }
@Before public void setUp() throws Exception { instance = S3Server.createHttpServer(); instance.start(); client = new AmazonS3Client(new StaticCredentialsProvider(new AnonymousAWSCredentials())); client.setS3ClientOptions(new S3ClientOptions().withPathStyleAccess(true)); client.setEndpoint(instance.getAddress()); createDefaultBucket(client); }
public AWSCredentials getCredentials() { return new AnonymousAWSCredentials(); }
@BeforeMethod public void setUp() throws Exception { sqsmock = System.getProperty("sqsmock"); sqsmock = sqsmock != null ? sqsmock : "http://localhost:9324"; client = new AmazonSQSClient(new AnonymousAWSCredentials()); client.setEndpoint(sqsmock); }
NetcdfFile loadNCFileFromS3(String bucketName, String key) { ClientConfiguration conf = new ClientConfiguration(); // 2 minute timeout AmazonS3Client s3 = new AmazonS3Client(new AnonymousAWSCredentials(), conf); Region usEast1 = Region.getRegion(Regions.US_EAST_1); s3.setRegion(usEast1); S3Object object; byte[] buf = new byte[1024]; int len; GZIPInputStream gunzip; @SuppressWarnings("resource") ByteArrayOutputStream byteArrayStream = new ByteArrayOutputStream(); ByteArrayOutputStream orginalByteArrayStream = new ByteArrayOutputStream(); S3ObjectInputStream objectInputStream; Level level; Logger logger; NetcdfFile ncfile = null; //log4j stuff BasicConfigurator.configure(); level = Level.OFF; logger = org.apache.log4j.Logger.getRootLogger(); logger.setLevel(level); try { object = s3.getObject(bucketName, key); objectInputStream = object.getObjectContent(); while((len = objectInputStream.read(buf)) != -1){ orginalByteArrayStream.write(buf, 0, len); } gunzip = new GZIPInputStream(new ByteArrayInputStream(orginalByteArrayStream.toByteArray())); orginalByteArrayStream.close(); while((len = gunzip.read(buf)) != -1){ byteArrayStream.write(buf, 0, len); } ncfile = NetcdfFile.openInMemory(key, byteArrayStream.toByteArray()); } catch (IOException|IllegalStateException ioe) { System.out.println("Data file " + key.toString() + "was unable to be loaded."); System.out.println(ExceptionUtils.getStackTrace(ioe)); return null; } catch (AmazonServiceException ase) { System.out.println("Caught an AmazonServiceException, which means your request made it " + "to Amazon S3, but was rejected with an error response for some reason."); System.out.println("Error Message: " + ase.getMessage()); System.out.println("HTTP Status Code: " + ase.getStatusCode()); System.out.println("AWS Error Code: " + ase.getErrorCode()); System.out.println("Error Type: " + ase.getErrorType()); System.out.println("Request ID: " + ase.getRequestId()); } catch (AmazonClientException ace) { System.out.println("Caught an AmazonClientException, which means the client encountered " + "a serious internal problem while trying to communicate with S3, " + "such as not being able to access the network."); System.out.println("Error Message: " + ace.getMessage()); } return ncfile; }
@Before public void setup() throws PersistenceException { context.registerAdapter(ResourceResolver.class, AssetManager.class, new Function<ResourceResolver, AssetManager>() { @Nullable @Override public AssetManager apply(@Nullable ResourceResolver input) { return assetManager; } }); context.create().resource("/content/dam", JcrConstants.JCR_PRIMARYTYPE, "sling:Folder"); context.resourceResolver().commit(); ingestor = new S3AssetIngestor(context.getService(MimeTypeService.class)); ingestor.jcrBasePath = "/content/dam"; ingestor.ignoreFileList = Collections.emptyList(); ingestor.ignoreExtensionList = Collections.emptyList(); ingestor.ignoreFolderList = Arrays.asList(".ds_store"); ingestor.existingAssetAction = AssetIngestor.AssetAction.skip; int port = FreePortFinder.findFreeLocalPort(); s3Mock = new S3Mock.Builder().withPort(port).withInMemoryBackend().build(); s3Mock.start(); S3ClientOptions options = S3ClientOptions.builder().setPathStyleAccess(true).build(); s3Client = new AmazonS3Client(new AnonymousAWSCredentials()); s3Client.setS3ClientOptions(options); s3Client.setEndpoint("http://localhost:" + port); ingestor.s3Client = s3Client; ingestor.bucket = TEST_BUCKET; s3Client.createBucket(TEST_BUCKET); doAnswer(new Answer() { @Override public Object answer(InvocationOnMock invocation) throws Throwable { CheckedConsumer<ResourceResolver> method = (CheckedConsumer<ResourceResolver>) invocation.getArguments()[0]; method.accept(context.resourceResolver()); return null; } }).when(actionManager).deferredWithResolver(any(CheckedConsumer.class)); }
/** * <p> * Constructs a new Amazon S3 Encryption client that will make <b>anonymous</b> * requests to Amazon S3. If {@link #getObject(String, String)} is called, * the object contents will be decrypted with the encryption materials provided. * </p> * <p> * Only a subset of the Amazon S3 API will work with anonymous * <i>(i.e. unsigned)</i> requests, but this can prove useful in some situations. * For example: * <ul> * <li>If an Amazon S3 bucket has {@link Permission#Read} permission for the * {@link GroupGrantee#AllUsers} group, anonymous clients can call * {@link #listObjects(String)} to see what objects are stored in a bucket.</li> * <li>If an object has {@link Permission#Read} permission for the * {@link GroupGrantee#AllUsers} group, anonymous clients can call * {@link #getObject(String, String)} and * {@link #getObjectMetadata(String, String)} to pull object content and * metadata.</li> * <li>If a bucket has {@link Permission#Write} permission for the * {@link GroupGrantee#AllUsers} group, anonymous clients can upload objects * to the bucket.</li> * </ul> * </p> * * @param encryptionMaterialsProvider * A provider for the encryption materials to be used to encrypt and decrypt data. * @deprecated use {@link AmazonS3EncryptionClientBuilder#withEncryptionMaterials(EncryptionMaterialsProvider)} */ @Deprecated public AmazonS3EncryptionClient( EncryptionMaterialsProvider encryptionMaterialsProvider) { this(new StaticCredentialsProvider(new AnonymousAWSCredentials()), encryptionMaterialsProvider, configFactory.getConfig(), new CryptoConfiguration()); }
/** * <p> * Constructs a new Amazon S3 Encryption client that will make <b>anonymous</b> * requests to Amazon S3. If {@link #getObject(String, String)} is called, * the object contents will be decrypted with the encryption materials provided. * The encryption implementation of the provided crypto provider will be * used to encrypt and decrypt data. * </p> * <p> * Only a subset of the Amazon S3 API will work with anonymous * <i>(i.e. unsigned)</i> requests, but this can prove useful in some situations. * For example: * <ul> * <li>If an Amazon S3 bucket has {@link Permission#Read} permission for the * {@link GroupGrantee#AllUsers} group, anonymous clients can call * {@link #listObjects(String)} to see what objects are stored in a bucket.</li> * <li>If an object has {@link Permission#Read} permission for the * {@link GroupGrantee#AllUsers} group, anonymous clients can call * {@link #getObject(String, String)} and * {@link #getObjectMetadata(String, String)} to pull object content and * metadata.</li> * <li>If a bucket has {@link Permission#Write} permission for the * {@link GroupGrantee#AllUsers} group, anonymous clients can upload objects * to the bucket.</li> * </ul> * </p> * * @param encryptionMaterialsProvider * A provider for the encryption materials to be used to encrypt and decrypt data. * @param cryptoConfig * The crypto configuration whose parameters will be used to encrypt and decrypt data. * @deprecated use {@link AmazonS3EncryptionClientBuilder#withEncryptionMaterials(EncryptionMaterialsProvider)} and * {@link AmazonS3EncryptionClientBuilder#withCryptoConfiguration(CryptoConfiguration)} */ @Deprecated public AmazonS3EncryptionClient( EncryptionMaterialsProvider encryptionMaterialsProvider, CryptoConfiguration cryptoConfig) { this(new StaticCredentialsProvider(new AnonymousAWSCredentials()), encryptionMaterialsProvider, configFactory.getConfig(), cryptoConfig); }