我们从Python开源项目中,提取了以下50个代码示例,用于说明如何使用boto.exception()。
def download_video_to_working_directory(self, key, file_name, file_extension): """ Downloads the video to working directory from S3 and returns whether its successfully downloaded or not. Arguments: key: An S3 key whose content is going to be downloaded file_name: Name of the file when its in working directory file_extension: extension of this file. """ if len(file_extension) == 3: file_name = u'{file_name}.{ext}'.format(file_name=file_name, ext=file_extension) try: key.get_contents_to_filename(os.path.join(self.node_work_directory, file_name)) file_ingested = True except S3DataError: file_ingested = False LOGGER.exception('[File Ingest] Error downloading the file into node working directory.') return file_ingested
def parse_transcript_preferences(self, course_id, transcript_preferences): """ Parses and validates transcript preferences. Arguments: course_id: course id identifying a course run. transcript_preferences: A serialized dict containing third party transcript preferences. """ try: transcript_preferences = json.loads(transcript_preferences) TranscriptCredentials.objects.get( org=extract_course_org(course_id), provider=transcript_preferences.get('provider') ) except (TypeError, TranscriptCredentials.DoesNotExist): # when the preferences are not set OR these are set to some data in invalid format OR these don't # have associated 3rd party transcription provider API keys. transcript_preferences = None except ValueError: LOGGER.exception('[File Discovery] Invalid transcripts preferences=%s', transcript_preferences) transcript_preferences = None return transcript_preferences
def test_retryable_exception_recovery(self): """ Tests handling of a retryable exception """ # Test one of the RETRYABLE_EXCEPTIONS. exception = ResumableUploadHandler.RETRYABLE_EXCEPTIONS[0] harness = CallbackTestHarness(exception=exception) res_upload_handler = ResumableUploadHandler(num_retries=1) small_src_file_as_string, small_src_file = self.make_small_file() small_src_file.seek(0) dst_key = self._MakeKey(set_contents=False) dst_key.set_contents_from_file( small_src_file, cb=harness.call, res_upload_handler=res_upload_handler) # Ensure uploaded object has correct content. self.assertEqual(SMALL_KEY_SIZE, dst_key.size) self.assertEqual(small_src_file_as_string, dst_key.get_contents_as_string())
def test_broken_pipe_recovery(self): """ Tests handling of a Broken Pipe (which interacts with an httplib bug) """ exception = IOError(errno.EPIPE, "Broken pipe") harness = CallbackTestHarness(exception=exception) res_upload_handler = ResumableUploadHandler(num_retries=1) small_src_file_as_string, small_src_file = self.make_small_file() small_src_file.seek(0) dst_key = self._MakeKey(set_contents=False) dst_key.set_contents_from_file( small_src_file, cb=harness.call, res_upload_handler=res_upload_handler) # Ensure uploaded object has correct content. self.assertEqual(SMALL_KEY_SIZE, dst_key.size) self.assertEqual(small_src_file_as_string, dst_key.get_contents_as_string())
def test_non_retryable_exception_handling(self): """ Tests a resumable upload that fails with a non-retryable exception """ harness = CallbackTestHarness( exception=OSError(errno.EACCES, 'Permission denied')) res_upload_handler = ResumableUploadHandler(num_retries=1) small_src_file_as_string, small_src_file = self.make_small_file() small_src_file.seek(0) dst_key = self._MakeKey(set_contents=False) try: dst_key.set_contents_from_file( small_src_file, cb=harness.call, res_upload_handler=res_upload_handler) self.fail('Did not get expected OSError') except OSError, e: # Ensure the error was re-raised. self.assertEqual(e.errno, 13)
def test_retryable_exception_recovery(self): """ Tests handling of a retryable exception """ # Test one of the RETRYABLE_EXCEPTIONS. exception = ResumableDownloadHandler.RETRYABLE_EXCEPTIONS[0] harness = CallbackTestHarness(exception=exception) res_download_handler = ResumableDownloadHandler(num_retries=1) dst_fp = self.make_dst_fp() small_src_key_as_string, small_src_key = self.make_small_key() small_src_key.get_contents_to_file( dst_fp, cb=harness.call, res_download_handler=res_download_handler) # Ensure downloaded object has correct content. self.assertEqual(SMALL_KEY_SIZE, get_cur_file_size(dst_fp)) self.assertEqual(small_src_key_as_string, small_src_key.get_contents_as_string())
def test_broken_pipe_recovery(self): """ Tests handling of a Broken Pipe (which interacts with an httplib bug) """ exception = IOError(errno.EPIPE, "Broken pipe") harness = CallbackTestHarness(exception=exception) res_download_handler = ResumableDownloadHandler(num_retries=1) dst_fp = self.make_dst_fp() small_src_key_as_string, small_src_key = self.make_small_key() small_src_key.get_contents_to_file( dst_fp, cb=harness.call, res_download_handler=res_download_handler) # Ensure downloaded object has correct content. self.assertEqual(SMALL_KEY_SIZE, get_cur_file_size(dst_fp)) self.assertEqual(small_src_key_as_string, small_src_key.get_contents_as_string())
def get_domain(self, domain_name, validate=True): """ Retrieves a :py:class:`boto.sdb.domain.Domain` object whose name matches ``domain_name``. :param str domain_name: The name of the domain to retrieve :keyword bool validate: When ``True``, check to see if the domain actually exists. If ``False``, blindly return a :py:class:`Domain <boto.sdb.domain.Domain>` object with the specified name set. :raises: :py:class:`boto.exception.SDBResponseError` if ``validate`` is ``True`` and no match could be found. :rtype: :py:class:`boto.sdb.domain.Domain` :return: The requested domain """ domain = Domain(self, domain_name) if validate: self.select(domain, """select * from `%s` limit 1""" % domain_name) return domain
def lookup(self, domain_name, validate=True): """ Lookup an existing SimpleDB domain. This differs from :py:meth:`get_domain` in that ``None`` is returned if ``validate`` is ``True`` and no match was found (instead of raising an exception). :param str domain_name: The name of the domain to retrieve :param bool validate: If ``True``, a ``None`` value will be returned if the specified domain can't be found. If ``False``, a :py:class:`Domain <boto.sdb.domain.Domain>` object will be dumbly returned, regardless of whether it actually exists. :rtype: :class:`boto.sdb.domain.Domain` object or ``None`` :return: The Domain object or ``None`` if the domain does not exist. """ try: domain = self.get_domain(domain_name, validate) except: domain = None return domain
def get_domain_and_name(self, domain_or_name): """ Given a ``str`` or :class:`boto.sdb.domain.Domain`, return a ``tuple`` with the following members (in order): * In instance of :class:`boto.sdb.domain.Domain` for the requested domain * The domain's name as a ``str`` :type domain_or_name: ``str`` or :class:`boto.sdb.domain.Domain` :param domain_or_name: The domain or domain name to get the domain and name for. :raises: :class:`boto.exception.SDBResponseError` when an invalid domain name is specified. :rtype: tuple :return: A ``tuple`` with contents outlined as per above. """ if (isinstance(domain_or_name, Domain)): return (domain_or_name, domain_or_name.name) else: return (self.get_domain(domain_or_name), domain_or_name)
def _check_num_ops(self, type_, response_num): """Raise exception if number of ops in response doesn't match commit :type type_: str :param type_: Type of commit operation: 'add' or 'delete' :type response_num: int :param response_num: Number of adds or deletes in the response. :raises: :class:`boto.cloudsearch.document.CommitMismatchError` """ commit_num = len([d for d in self.doc_service.documents_batch if d['type'] == type_]) if response_num != commit_num: raise CommitMismatchError( 'Incorrect number of {0}s returned. Commit: {1} Response: {2}'\ .format(type_, commit_num, response_num))
def set_xml_logging(self, logging_str, headers=None): """ Set logging on a bucket directly to the given xml string. :type logging_str: unicode string :param logging_str: The XML for the bucketloggingstatus which will be set. The string will be converted to utf-8 before it is sent. Usually, you will obtain this XML from the BucketLogging object. :rtype: bool :return: True if ok or raises an exception. """ body = logging_str if not isinstance(body, bytes): body = body.encode('utf-8') response = self.connection.make_request('PUT', self.name, data=body, query_args='logging', headers=headers) body = response.read() if response.status == 200: return True else: raise self.connection.provider.storage_response_error( response.status, response.reason, body)
def enable_logging(self, target_bucket, target_prefix='', grants=None, headers=None): """ Enable logging on a bucket. :type target_bucket: bucket or string :param target_bucket: The bucket to log to. :type target_prefix: string :param target_prefix: The prefix which should be prepended to the generated log files written to the target_bucket. :type grants: list of Grant objects :param grants: A list of extra permissions which will be granted on the log files which are created. :rtype: bool :return: True if ok or raises an exception. """ if isinstance(target_bucket, Bucket): target_bucket = target_bucket.name blogging = BucketLogging(target=target_bucket, prefix=target_prefix, grants=grants) return self.set_xml_logging(blogging.to_xml(), headers=headers)
def __init__(self, **args): self.args = args self.check_for_credential_file() self.check_for_env_url() if 'host' not in self.args: if self.Regions: region_name = self.args.get('region_name', self.Regions[0]['name']) for region in self.Regions: if region['name'] == region_name: self.args['host'] = region['endpoint'] if 'path' not in self.args: self.args['path'] = self.Path if 'port' not in self.args: self.args['port'] = self.Port try: super(AWSQueryService, self).__init__(**self.args) self.aws_response = None except boto.exception.NoAuthHandlerFound: raise NoCredentialsError()
def set_xml_logging(self, logging_str, headers=None): """ Set logging on a bucket directly to the given xml string. :type logging_str: unicode string :param logging_str: The XML for the bucketloggingstatus which will be set. The string will be converted to utf-8 before it is sent. Usually, you will obtain this XML from the BucketLogging object. :rtype: bool :return: True if ok or raises an exception. """ body = logging_str.encode('utf-8') response = self.connection.make_request('PUT', self.name, data=body, query_args='logging', headers=headers) body = response.read() if response.status == 200: return True else: raise self.connection.provider.storage_response_error( response.status, response.reason, body)
def __init__(self, **args): self.args = args self.check_for_credential_file() self.check_for_env_url() if 'host' not in self.args: if self.Regions: region_name = self.args.get('region_name', self.Regions[0]['name']) for region in self.Regions: if region['name'] == region_name: self.args['host'] = region['endpoint'] if 'path' not in self.args: self.args['path'] = self.Path if 'port' not in self.args: self.args['port'] = self.Port try: boto.connection.AWSQueryConnection.__init__(self, **self.args) self.aws_response = None except boto.exception.NoAuthHandlerFound: raise NoCredentialsError()
def wait_exception(lfunction): """Returns with the exception or raises one.""" start_time = time.time() while True: try: lfunction() except BaseException as exc: LOG.info('Exception in %d second', time.time() - start_time) return exc dtime = time.time() - start_time if dtime > CONF.boto.build_timeout: raise TestCase.failureException("Wait timeout exceeded! (%ds)" % dtime) time.sleep(CONF.boto.build_interval) # TODO(afazekas): consider strategy design pattern..
def create_bucket(**kwargs): """ Create a bucket for a user """ s3conn = s3connect(kwargs['user']) try: s3conn.create_bucket(kwargs['bucket_name']) except boto.exception.S3CreateError: return False return True
def _make_request(self, action, params=None): """Make a call to the SES API. :type action: string :param action: The API method to use (e.g. SendRawEmail) :type params: dict :param params: Parameters that will be sent as POST data with the API call. """ ct = 'application/x-www-form-urlencoded; charset=UTF-8' headers = {'Content-Type': ct} params = params or {} params['Action'] = action for k, v in params.items(): if isinstance(v, six.text_type): # UTF-8 encode only if it's Unicode params[k] = v.encode('utf-8') response = super(SESConnection, self).make_request( 'POST', '/', headers=headers, data=urllib.parse.urlencode(params) ) body = response.read().decode('utf-8') if response.status == 200: list_markers = ('VerifiedEmailAddresses', 'Identities', 'DkimTokens', 'VerificationAttributes', 'SendDataPoints') item_markers = ('member', 'item', 'entry') e = boto.jsonresponse.Element(list_marker=list_markers, item_marker=item_markers) h = boto.jsonresponse.XmlHandler(e, None) h.parse(body) return e else: # HTTP codes other than 200 are considered errors. Go through # some error handling to determine which exception gets raised, self._handle_error(response, body)
def load_object(self, obj): if not obj._loaded: a = self.domain.get_attributes(obj.id, consistent_read=self.consistent) if '__type__' in a: for prop in obj.properties(hidden=False): if prop.name in a: value = self.decode_value(prop, a[prop.name]) value = prop.make_value_from_datastore(value) try: setattr(obj, prop.name, value) except Exception as e: boto.log.exception(e) obj._loaded = True
def __init__(self, response, doc_service, sdf): self.response = response self.doc_service = doc_service self.sdf = sdf _body = response.content.decode('utf-8') try: self.content = json.loads(_body) except: boto.log.error('Error indexing documents.\nResponse Content:\n{0}\n\n' 'SDF:\n{1}'.format(_body, self.sdf)) raise boto.exception.BotoServerError(self.response.status_code, '', body=_body) self.status = self.content['status'] if self.status == 'error': self.errors = [e.get('message') for e in self.content.get('errors', [])] for e in self.errors: if "Illegal Unicode character" in e: raise EncodingError("Illegal Unicode character in document") elif e == "The Content-Length is too long": raise ContentTooLongError("Content was too long") if 'adds' not in self.content or 'deletes' not in self.content: raise SearchServiceException("Error indexing documents" " => %s" % self.content.get('message', '')) else: self.errors = [] self.adds = self.content['adds'] self.deletes = self.content['deletes'] self._check_num_ops('add', self.adds) self._check_num_ops('delete', self.deletes)
def __init__(self, response, doc_service, sdf): self.response = response self.doc_service = doc_service self.sdf = sdf _body = response.content.decode('utf-8') try: self.content = json.loads(_body) except: boto.log.error('Error indexing documents.\nResponse Content:\n{0}' '\n\nSDF:\n{1}'.format(_body, self.sdf)) raise boto.exception.BotoServerError(self.response.status_code, '', body=_body) self.status = self.content['status'] if self.status == 'error': self.errors = [e.get('message') for e in self.content.get('errors', [])] for e in self.errors: if "Illegal Unicode character" in e: raise EncodingError("Illegal Unicode character in document") elif e == "The Content-Length is too long": raise ContentTooLongError("Content was too long") else: self.errors = [] self.adds = self.content['adds'] self.deletes = self.content['deletes'] self._check_num_ops('add', self.adds) self._check_num_ops('delete', self.deletes)
def _check_num_ops(self, type_, response_num): """Raise exception if number of ops in response doesn't match commit :type type_: str :param type_: Type of commit operation: 'add' or 'delete' :type response_num: int :param response_num: Number of adds or deletes in the response. :raises: :class:`boto.cloudsearch2.document.CommitMismatchError` """ commit_num = len([d for d in self.doc_service.documents_batch if d['type'] == type_]) if response_num != commit_num: boto.log.debug(self.response.content) # There will always be a commit mismatch error if there is any # errors on cloudsearch. self.errors gets lost when this # CommitMismatchError is raised. Whoever is using boto has no idea # why their commit failed. They can't even notify the user of the # cause by parsing the error messages from amazon. So let's # attach the self.errors to the exceptions if we already spent # time and effort collecting them out of the response. exc = CommitMismatchError( 'Incorrect number of {0}s returned. Commit: {1} Response: {2}' .format(type_, commit_num, response_num) ) exc.errors = self.errors raise exc
def make_request(self, action, body='', object_hook=None): """ :raises: ``SWFResponseError`` if response status is not 200. """ headers = {'X-Amz-Target': '%s.%s' % (self.ServiceName, action), 'Host': self.region.endpoint, 'Content-Type': 'application/json; charset=UTF-8', 'Content-Encoding': 'amz-1.0', 'Content-Length': str(len(body))} http_request = self.build_base_http_request('POST', '/', '/', {}, headers, body, None) response = self._mexe(http_request, sender=None, override_num_retries=10) response_body = response.read().decode('utf-8') boto.log.debug(response_body) if response.status == 200: if response_body: return json.loads(response_body, object_hook=object_hook) else: return None else: json_body = json.loads(response_body) fault_name = json_body.get('__type', None) # Certain faults get mapped to more specific exception classes. excp_cls = self._fault_excp.get(fault_name, self.ResponseError) raise excp_cls(response.status, response.reason, body=json_body) # Actions related to Activities
def delete_stream(self, stream_name): """ This operation deletes a stream and all of its shards and data. You must shut down any applications that are operating on the stream before you delete the stream. If an application attempts to operate on a deleted stream, it will receive the exception `ResourceNotFoundException`. If the stream is in the ACTIVE state, you can delete it. After a `DeleteStream` request, the specified stream is in the DELETING state until Amazon Kinesis completes the deletion. **Note:** Amazon Kinesis might continue to accept data read and write operations, such as PutRecord and GetRecords, on a stream in the DELETING state until the stream deletion is complete. When you delete a stream, any shards in that stream are also deleted. You can use the DescribeStream operation to check the state of the stream, which is returned in `StreamStatus`. `DeleteStream` has a limit of 5 transactions per second per account. :type stream_name: string :param stream_name: The name of the stream to delete. """ params = {'StreamName': stream_name, } return self.make_request(action='DeleteStream', body=json.dumps(params))
def _make_request(self, action, params=None): """Make a call to the SES API. :type action: string :param action: The API method to use (e.g. SendRawEmail) :type params: dict :param params: Parameters that will be sent as POST data with the API call. """ ct = 'application/x-www-form-urlencoded; charset=UTF-8' headers = {'Content-Type': ct} params = params or {} params['Action'] = action for k, v in params.items(): if isinstance(v, six.text_type): # UTF-8 encode only if it's Unicode params[k] = v.encode('utf-8') response = super(SESConnection, self).make_request( 'POST', '/', headers=headers, data=urllib.parse.urlencode(params) ) body = response.read().decode('utf-8') if response.status == 200: list_markers = ('VerifiedEmailAddresses', 'Identities', 'DkimTokens', 'DkimAttributes', 'VerificationAttributes', 'SendDataPoints') item_markers = ('member', 'item', 'entry') e = boto.jsonresponse.Element(list_marker=list_markers, item_marker=item_markers) h = boto.jsonresponse.XmlHandler(e, None) h.parse(body) return e else: # HTTP codes other than 200 are considered errors. Go through # some error handling to determine which exception gets raised, self._handle_error(response, body)
def encode_string(self, value): """Convert ASCII, Latin-1 or UTF-8 to pure Unicode""" if not isinstance(value, str): return value try: return six.text_type(value, 'utf-8') except: # really, this should throw an exception. # in the interest of not breaking current # systems, however: arr = [] for ch in value: arr.append(six.unichr(ord(ch))) return u"".join(arr)
def disable_logging(self, headers=None): """ Disable logging on a bucket. :rtype: bool :return: True if ok or raises an exception. """ blogging = BucketLogging() return self.set_xml_logging(blogging.to_xml(), headers=headers)
def __init__(self, response, doc_service, sdf, signed_request=False): self.response = response self.doc_service = doc_service self.sdf = sdf self.signed_request = signed_request if self.signed_request: self.content = response else: _body = response.content.decode('utf-8') try: self.content = json.loads(_body) except: boto.log.error('Error indexing documents.\nResponse Content:\n{0}' '\n\nSDF:\n{1}'.format(_body, self.sdf)) raise boto.exception.BotoServerError(self.response.status_code, '', body=_body) self.status = self.content['status'] if self.status == 'error': self.errors = [e.get('message') for e in self.content.get('errors', [])] for e in self.errors: if "Illegal Unicode character" in e: raise EncodingError("Illegal Unicode character in document") elif e == "The Content-Length is too long": raise ContentTooLongError("Content was too long") else: self.errors = [] self.adds = self.content['adds'] self.deletes = self.content['deletes'] self._check_num_ops('add', self.adds) self._check_num_ops('delete', self.deletes)
def _check_num_ops(self, type_, response_num): """Raise exception if number of ops in response doesn't match commit :type type_: str :param type_: Type of commit operation: 'add' or 'delete' :type response_num: int :param response_num: Number of adds or deletes in the response. :raises: :class:`boto.cloudsearch2.document.CommitMismatchError` """ commit_num = len([d for d in self.doc_service.documents_batch if d['type'] == type_]) if response_num != commit_num: if self.signed_request: boto.log.debug(self.response) else: boto.log.debug(self.response.content) # There will always be a commit mismatch error if there is any # errors on cloudsearch. self.errors gets lost when this # CommitMismatchError is raised. Whoever is using boto has no idea # why their commit failed. They can't even notify the user of the # cause by parsing the error messages from amazon. So let's # attach the self.errors to the exceptions if we already spent # time and effort collecting them out of the response. exc = CommitMismatchError( 'Incorrect number of {0}s returned. Commit: {1} Response: {2}' .format(type_, commit_num, response_num) ) exc.errors = self.errors raise exc
def delete_stream(self, stream_name): """ Deletes a stream and all its shards and data. You must shut down any applications that are operating on the stream before you delete the stream. If an application attempts to operate on a deleted stream, it will receive the exception `ResourceNotFoundException`. If the stream is in the `ACTIVE` state, you can delete it. After a `DeleteStream` request, the specified stream is in the `DELETING` state until Amazon Kinesis completes the deletion. **Note:** Amazon Kinesis might continue to accept data read and write operations, such as PutRecord, PutRecords, and GetRecords, on a stream in the `DELETING` state until the stream deletion is complete. When you delete a stream, any shards in that stream are also deleted, and any tags are dissociated from the stream. You can use the DescribeStream operation to check the state of the stream, which is returned in `StreamStatus`. `DeleteStream` has a limit of 5 transactions per second per account. :type stream_name: string :param stream_name: The name of the stream to delete. """ params = {'StreamName': stream_name, } return self.make_request(action='DeleteStream', body=json.dumps(params))
def _make_request(self, action, params=None): """Make a call to the SES API. :type action: string :param action: The API method to use (e.g. SendRawEmail) :type params: dict :param params: Parameters that will be sent as POST data with the API call. """ ct = 'application/x-www-form-urlencoded; charset=UTF-8' headers = {'Content-Type': ct} params = params or {} params['Action'] = action for k, v in params.items(): if isinstance(v, unicode): # UTF-8 encode only if it's Unicode params[k] = v.encode('utf-8') response = super(SESConnection, self).make_request( 'POST', '/', headers=headers, data=urllib.urlencode(params) ) body = response.read() if response.status == 200: list_markers = ('VerifiedEmailAddresses', 'Identities', 'VerificationAttributes', 'SendDataPoints') item_markers = ('member', 'item', 'entry') e = boto.jsonresponse.Element(list_marker=list_markers, item_marker=item_markers) h = boto.jsonresponse.XmlHandler(e, None) h.parse(body) return e else: # HTTP codes other than 200 are considered errors. Go through # some error handling to determine which exception gets raised, self._handle_error(response, body)