我们从Python开源项目中,提取了以下32个代码示例,用于说明如何使用boto.s3.connection.OrdinaryCallingFormat()。
def __init__(self, region, access_key, secret_key, bucket_name, secure=True, num_retries=5, socket_timeout=15): self.region = region self.access_key = access_key self.secret_key = secret_key self.secure = secure self.num_retries = num_retries self.socket_timeout = socket_timeout # monkey patch for bucket_name with dots # https://github.com/boto/boto/issues/2836 if self.secure and '.' in bucket_name: self.calling_format = OrdinaryCallingFormat() else: self.calling_format = SubdomainCallingFormat() for section in boto.config.sections(): boto.config.remove_section(section) boto.config.add_section('Boto') boto.config.setbool('Boto', 'is_secure', self.secure) boto.config.set('Boto', 'http_socket_timeout', str(self.socket_timeout)) boto.config.set('Boto', 'num_retries', str(self.num_retries)) self._conn = None self.connect()
def list_command(args): tiers_config = get_tiers_config() conn = connect_to_region(tiers_config["region"], calling_format=OrdinaryCallingFormat()) bucket_name = "{}.{}".format(tiers_config["bucket"], tiers_config["domain"]) print "List of all tiers registered at http://{}/{}".format(bucket_name, "tiers") bucket = conn.get_bucket(bucket_name) for file_key in bucket.list("tiers/", "/"): head, tail = os.path.split(file_key.name) root, ext = os.path.splitext(tail) if ext == ".json": if args.verbose: print bcolors.BOLD + "Tier: " + root + bcolors.ENDC json_text = file_key.get_contents_as_string() print json_text else: print " ", root
def post_pdfs_to_s3(self): conn = boto.s3.connect_to_region('us-east-1', aws_access_key_id=S3_ACCESS_KEY_FOR_MANUAL, aws_secret_access_key=S3_SECRET_KEY_FOR_MANUAL, calling_format=OrdinaryCallingFormat()) bucket_name = S3_BUCKET_FOR_MANUAL bucket_dir = S3_DIRECTORY_FOR_MANUAL bucket = conn.get_bucket(bucket_name, validate=False) source_dir = os.path.join(ZENDESK_UTIL_DIR, 'gen/pdf/') print "posting pdfs from %s" % source_dir section_dict = {} for fn in os.listdir(source_dir): with open(source_dir + fn, 'r') as pdf_file: chunks = fn.split('-') category = chunks[0] filename = '-'.join(chunks[1:len(chunks)]) if not category in section_dict: section_dict[category] = '' section_dict[category] += '<tr><td style="padding-right:10px;padding-bottom:5px"><a href=http://{}/{}/{}/{}>{}</a></td><td>http://{}/{}/{}/{}</td></tr>'.format(bucket_name, bucket_dir, category, filename, filename, bucket_name, bucket_dir, category, filename) k = Key(bucket) k.key = '/' + bucket_dir + '/' + category + '/' + filename print("POSTING PDF to S3: " + k.key) k.set_contents_from_file(pdf_file,cb=self.percent_cb, num_cb=1) self.post_inventory_html(section_dict, bucket, bucket_name)
def _get_s3bucket(host, bucket, access_key, secret_key, force_bucket_suffix=True, create_if_missing=True): from boto.s3.connection import S3Connection, OrdinaryCallingFormat, S3ResponseError s3con = S3Connection(aws_access_key_id=access_key, aws_secret_access_key=secret_key, host=host, is_secure=False, calling_format=OrdinaryCallingFormat()) # add access key prefix to bucket name, unless explicitly prohibited if force_bucket_suffix and not bucket.lower().endswith('-' + access_key.lower()): bucket = bucket + '-' + access_key.lower() try: return s3con.get_bucket(bucket) except S3ResponseError as ex: if ex.status == 404: if create_if_missing: return s3con.create_bucket(bucket) else: raise IOError("Bucket {} does not exist".format(bucket)) raise
def get_s3_bucket(tiers_config): conn = connect_to_region(tiers_config["region"], calling_format=OrdinaryCallingFormat()) bucket_name = "{}.{}".format(tiers_config["bucket"], tiers_config["domain"]) bucket = conn.get_bucket(bucket_name) return bucket
def fetch(path): """Read the contents of the file or url pointed to by 'path'.""" try: with open(path) as f: return f.read() except Exception as e1: pass try: r = requests.get(path) r.raise_for_status() return r.text except Exception as e2: pass try: region, bucket_name, key_name = path.split("/", 2) conn = connect_to_region(region, calling_format=OrdinaryCallingFormat()) bucket = conn.lookup(bucket_name) data = bucket.get_key(key_name).get_contents_as_string() return data except Exception as e3: pass print "Can't fetch '{}'".format(path) print " Not a file:", e1 print " Not an URL:", e2 print " Not a bucket:", e3
def connect_walrus(host=None, aws_access_key_id=None, aws_secret_access_key=None, port=8773, path='/services/Walrus', is_secure=False, **kwargs): """ Connect to a Walrus service. :type host: string :param host: the host name or ip address of the Walrus server :type aws_access_key_id: string :param aws_access_key_id: Your AWS Access Key ID :type aws_secret_access_key: string :param aws_secret_access_key: Your AWS Secret Access Key :rtype: :class:`boto.s3.connection.S3Connection` :return: A connection to Walrus """ from boto.s3.connection import S3Connection from boto.s3.connection import OrdinaryCallingFormat # Check for values in boto config, if not supplied as args if not aws_access_key_id: aws_access_key_id = config.get('Credentials', 'euca_access_key_id', None) if not aws_secret_access_key: aws_secret_access_key = config.get('Credentials', 'euca_secret_access_key', None) if not host: host = config.get('Boto', 'walrus_host', None) return S3Connection(aws_access_key_id, aws_secret_access_key, host=host, port=port, path=path, calling_format=OrdinaryCallingFormat(), is_secure=is_secure, **kwargs)
def connect_ia(ia_access_key_id=None, ia_secret_access_key=None, is_secure=False, **kwargs): """ Connect to the Internet Archive via their S3-like API. :type ia_access_key_id: string :param ia_access_key_id: Your IA Access Key ID. This will also look in your boto config file for an entry in the Credentials section called "ia_access_key_id" :type ia_secret_access_key: string :param ia_secret_access_key: Your IA Secret Access Key. This will also look in your boto config file for an entry in the Credentials section called "ia_secret_access_key" :rtype: :class:`boto.s3.connection.S3Connection` :return: A connection to the Internet Archive """ from boto.s3.connection import S3Connection from boto.s3.connection import OrdinaryCallingFormat access_key = config.get('Credentials', 'ia_access_key_id', ia_access_key_id) secret_key = config.get('Credentials', 'ia_secret_access_key', ia_secret_access_key) return S3Connection(access_key, secret_key, host='s3.us.archive.org', calling_format=OrdinaryCallingFormat(), is_secure=is_secure, **kwargs)
def main(): argument_spec = ec2_argument_spec() argument_spec.update( dict( name = dict(required=True), target_bucket = dict(required=False, default=None), target_prefix = dict(required=False, default=""), state = dict(required=False, default='present', choices=['present', 'absent']) ) ) module = AnsibleModule(argument_spec=argument_spec) if not HAS_BOTO: module.fail_json(msg='boto required for this module') region, ec2_url, aws_connect_params = get_aws_connection_info(module) if region in ('us-east-1', '', None): # S3ism for the US Standard region location = Location.DEFAULT else: # Boto uses symbolic names for locations but region strings will # actually work fine for everything except us-east-1 (US Standard) location = region try: connection = boto.s3.connect_to_region(location, is_secure=True, calling_format=OrdinaryCallingFormat(), **aws_connect_params) # use this as fallback because connect_to_region seems to fail in boto + non 'classic' aws accounts in some cases if connection is None: connection = boto.connect_s3(**aws_connect_params) except (boto.exception.NoAuthHandlerFound, AnsibleAWSError) as e: module.fail_json(msg=str(e)) state = module.params.get("state") if state == 'present': enable_bucket_logging(connection, module) elif state == 'absent': disable_bucket_logging(connection, module)
def __connect_to_bucket(self): from boto.s3.connection import S3Connection from boto.s3.connection import OrdinaryCallingFormat if self.verbose: print "DEBUG: Setting up S3Connection to", \ self.host+":"+self.bucket_name self.conn = S3Connection( host=self.host, calling_format=OrdinaryCallingFormat() ) self.bucket = self.conn.get_bucket(self.bucket_name, validate=False)
def conn(self): from boto.s3 import connect_to_region from boto.s3.connection import OrdinaryCallingFormat from baiji.config import settings if not self._connected: self._conn = connect_to_region( settings.region, aws_access_key_id=settings.key, aws_secret_access_key=settings.secret, calling_format=OrdinaryCallingFormat(), suppress_consec_slashes=False) self._connected = True return self._conn
def authenticate(function): """ A decorator that authenticates a request and provides a connection. Args: function: Any function that requires authentication. """ @functools.wraps(function) def decorated_function(*args, **kwargs): try: _, token = request.headers['Authorization'].split() except KeyError: token = request.args.get('key') if token is None: return error('Login required.', HTTP_UNAUTHORIZED) try: user = get_user(token) except (TokenNotFound, TokenExpired) as token_error: return error(str(token_error), HTTP_UNAUTHORIZED) if user not in s3_connection_cache: valid_users = current_app.config['USERS'] if user not in valid_users: return error('Invalid token: user not configured.') s3_connection_cache[user] = S3Connection( aws_access_key_id=valid_users[user]['aws_access_key'], aws_secret_access_key=valid_users[user]['aws_secret_key'], is_secure=current_app.config['S3_USE_SSL'], host=current_app.config['S3_HOST'], port=current_app.config['S3_PORT'], calling_format=OrdinaryCallingFormat() ) kwargs['conn'] = s3_connection_cache[user] return function(*args, **kwargs) return decorated_function
def get_website_endpoint(bucket_name): # bucket names with .'s in them need to use the calling_format option, # otherwise the connection will fail. # See https://github.com/boto/boto/issues/2836 bucket = connect_s3( calling_format=OrdinaryCallingFormat() ).get_bucket(bucket_name, validate=False) return bucket.get_website_endpoint()
def download_s3(self, package): """ Download from private s3 distributions. """ package_path = path_join(self.path, package) aws_access_key_id = get_env_variable_or_raise_error("AWS_ACCESS_KEY_ID") aws_secret_access_key = get_env_variable_or_raise_error("AWS_SECRET_ACCESS_KEY") bucket_name = getenv("BUCKET", DIST_HOST) # connect to the bucket conn = boto.s3.connect_to_region( "eu-west-1", aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key, is_secure=True, calling_format=OrdinaryCallingFormat() ) bucket = conn.get_bucket(bucket_name) # Get the Key object of the given key, in the bucket k = Key(bucket, package) # Ensure the destination exist try: makedirs(self.path) except OSError: pass self.write("Downloading from aws bucket %s... " % bucket_name) # Get the contents of the key into a file k.get_contents_to_filename(package_path) return package_path
def web_publish(assets_path): from boto.s3.connection import S3Connection, OrdinaryCallingFormat site_path = 'website/site' os.environ['S3_USE_SIGV4'] = 'True' conn = S3Connection(host='s3.eu-central-1.amazonaws.com', calling_format=OrdinaryCallingFormat()) bucket = conn.get_bucket('spacy.io', validate=False) keys_left = set([k.name for k in bucket.list() if not k.name.startswith('resources')]) for root, dirnames, filenames in os.walk(site_path): for dirname in dirnames: target = os.path.relpath(os.path.join(root, dirname), site_path) source = os.path.join(target, 'index.html') if os.path.exists(os.path.join(root, dirname, 'index.html')): key = bucket.new_key(source) key.set_redirect('//%s/%s' % (bucket.name, target)) print('adding redirect for %s' % target) keys_left.remove(source) for filename in filenames: source = os.path.join(root, filename) target = os.path.relpath(root, site_path) if target == '.': target = filename elif filename != 'index.html': target = os.path.join(target, filename) key = bucket.new_key(target) key.set_metadata('Content-Type', 'text/html') key.set_contents_from_filename(source) print('uploading %s' % target) keys_left.remove(target) for key_name in keys_left: print('deleting %s' % key_name) bucket.delete_key(key_name) local('aws s3 sync --delete %s s3://spacy.io/resources' % assets_path)
def connect(self, access_key_id=None, secret_access_key=None, **kwargs): """ Opens a connection to appropriate provider, depending on provider portion of URI. Requires Credentials defined in boto config file (see boto/pyami/config.py). @type storage_uri: StorageUri @param storage_uri: StorageUri specifying a bucket or a bucket+object @rtype: L{AWSAuthConnection<boto.gs.connection.AWSAuthConnection>} @return: A connection to storage service provider of the given URI. """ connection_args = dict(self.connection_args or ()) if (hasattr(self, 'suppress_consec_slashes') and 'suppress_consec_slashes' not in connection_args): connection_args['suppress_consec_slashes'] = ( self.suppress_consec_slashes) connection_args.update(kwargs) if not self.connection: if self.scheme in self.provider_pool: self.connection = self.provider_pool[self.scheme] elif self.scheme == 's3': from boto.s3.connection import S3Connection self.connection = S3Connection(access_key_id, secret_access_key, **connection_args) self.provider_pool[self.scheme] = self.connection elif self.scheme == 'gs': from boto.gs.connection import GSConnection # Use OrdinaryCallingFormat instead of boto-default # SubdomainCallingFormat because the latter changes the hostname # that's checked during cert validation for HTTPS connections, # which will fail cert validation (when cert validation is # enabled). # # The same is not true for S3's HTTPS certificates. In fact, # we don't want to do this for S3 because S3 requires the # subdomain to match the location of the bucket. If the proper # subdomain is not used, the server will return a 301 redirect # with no Location header. # # Note: the following import can't be moved up to the # start of this file else it causes a config import failure when # run from the resumable upload/download tests. from boto.s3.connection import OrdinaryCallingFormat connection_args['calling_format'] = OrdinaryCallingFormat() self.connection = GSConnection(access_key_id, secret_access_key, **connection_args) self.provider_pool[self.scheme] = self.connection elif self.scheme == 'file': from boto.file.connection import FileConnection self.connection = FileConnection(self) else: raise InvalidUriError('Unrecognized scheme "%s"' % self.scheme) self.connection.debug = self.debug return self.connection
def connect(self): """ Establishes the actual connection to the referred RSE. :param: credentials needed to establish a connection with the stroage. :raises RSEAccessDenied: if no connection could be established. """ try: scheme, prefix = self.attributes.get('scheme'), self.attributes.get('prefix') netloc, port = self.attributes['hostname'], self.attributes.get('port', 80) service_url = '%(scheme)s://%(netloc)s:%(port)s' % locals() access_key, secret_key, is_secure = None, None, None if 'S3_ACCESS_KEY' in os.environ: access_key = os.environ['S3_ACCESS_KEY'] if 'S3_SECRET_KEY' in os.environ: secret_key = os.environ['S3_SECRET_KEY'] if 'S3_IS_SECURE' in os.environ: if str(os.environ['S3_IS_SECURE']).lower() == 'true': is_secure = True elif str(os.environ['S3_IS_SECURE']).lower() == 'false': is_secure = False if is_secure is None or access_key is None or secret_key is None: credentials = get_rse_credentials() self.rse['credentials'] = credentials.get(self.rse['rse']) if not access_key: access_key = self.rse['credentials']['access_key'] if not secret_key: secret_key = self.rse['credentials']['secret_key'] if not is_secure: is_secure = self.rse['credentials'].get('is_secure', {}).\ get(service_url, False) self._disable_http_proxy() self.__conn = connect_s3(host=self.attributes['hostname'], port=int(port), aws_access_key_id=access_key, aws_secret_access_key=secret_key, is_secure=is_secure, calling_format=OrdinaryCallingFormat()) self._reset_http_proxy() except Exception as e: self._reset_http_proxy() raise exception.RSEAccessDenied(e)
def connect(self, access_key_id=None, secret_access_key=None, **kwargs): """ Opens a connection to appropriate provider, depending on provider portion of URI. Requires Credentials defined in boto config file (see boto/pyami/config.py). @type storage_uri: StorageUri @param storage_uri: StorageUri specifying a bucket or a bucket+object @rtype: L{AWSAuthConnection<boto.gs.connection.AWSAuthConnection>} @return: A connection to storage service provider of the given URI. """ connection_args = dict(self.connection_args or ()) # Use OrdinaryCallingFormat instead of boto-default # SubdomainCallingFormat because the latter changes the hostname # that's checked during cert validation for HTTPS connections, # which will fail cert validation (when cert validation is enabled). # Note: the following import can't be moved up to the start of # this file else it causes a config import failure when run from # the resumable upload/download tests. from boto.s3.connection import OrdinaryCallingFormat connection_args['calling_format'] = OrdinaryCallingFormat() if (hasattr(self, 'suppress_consec_slashes') and 'suppress_consec_slashes' not in connection_args): connection_args['suppress_consec_slashes'] = ( self.suppress_consec_slashes) connection_args.update(kwargs) if not self.connection: if self.scheme in self.provider_pool: self.connection = self.provider_pool[self.scheme] elif self.scheme == 's3': from boto.s3.connection import S3Connection self.connection = S3Connection(access_key_id, secret_access_key, **connection_args) self.provider_pool[self.scheme] = self.connection elif self.scheme == 'gs': from boto.gs.connection import GSConnection self.connection = GSConnection(access_key_id, secret_access_key, **connection_args) self.provider_pool[self.scheme] = self.connection elif self.scheme == 'file': from boto.file.connection import FileConnection self.connection = FileConnection(self) else: raise InvalidUriError('Unrecognized scheme "%s"' % self.scheme) self.connection.debug = self.debug return self.connection