我们从Python开源项目中,提取了以下50个代码示例,用于说明如何使用requests.RequestException()。
def scan_file(self, this_file): """ Submit a file to be scanned by VirusTotal :param this_file: File to be scanned (32MB file size limit) :return: JSON response that contains scan_id and permalink. """ params = {'apikey': self.api_key} try: if type(this_file) == str and os.path.isfile(this_file): files = {'file': (this_file, open(this_file, 'rb'))} elif isinstance(this_file, StringIO.StringIO): files = {'file': this_file.read()} else: files = {'file': this_file} except TypeError as e: return dict(error=e.message) try: response = requests.post(self.base + 'file/scan', files=files, params=params, proxies=self.proxies) except requests.RequestException as e: return dict(error=e.message) return _return_response_and_status_code(response)
def rescan_file(self, this_hash): """ Rescan a previously submitted filed or schedule an scan to be performed in the future. :param this_hash: a md5/sha1/sha256 hash. You can also specify a CSV list made up of a combination of any of the three allowed hashes (up to 25 items), this allows you to perform a batch request with one single call. Note that the file must already be present in our file store. :return: JSON response that contains scan_id and permalink. """ params = {'apikey': self.api_key, 'resource': this_hash} try: response = requests.post(self.base + 'file/rescan', params=params, proxies=self.proxies) except requests.RequestException as e: return dict(error=e.message) return _return_response_and_status_code(response)
def get_file_report(self, this_hash): """ Get the scan results for a file. You can also specify a CSV list made up of a combination of hashes and scan_ids (up to 4 items with the standard request rate), this allows you to perform a batch request with one single call. i.e. {'resource': '99017f6eebbac24f351415dd410d522d, 88817f6eebbac24f351415dd410d522d'}. :param this_hash: The md5/sha1/sha256/scan_ids hash of the file whose dynamic behavioural report you want to retrieve or scan_ids from a previous call to scan_file. :return: """ params = {'apikey': self.api_key, 'resource': this_hash} try: response = requests.get(self.base + 'file/report', params=params, proxies=self.proxies) except requests.RequestException as e: return dict(error=e.message) return _return_response_and_status_code(response)
def scan_url(self, this_url): """ Submit a URL to be scanned by VirusTotal. :param this_url: The URL that should be scanned. This parameter accepts a list of URLs (up to 4 with the standard request rate) so as to perform a batch scanning request with one single call. The URLs must be separated by a new line character. :return: JSON response that contains scan_id and permalink. """ params = {'apikey': self.api_key, 'url': this_url} try: response = requests.post(self.base + 'url/scan', params=params, proxies=self.proxies) except requests.RequestException as e: return dict(error=e.message) return _return_response_and_status_code(response)
def get_url_report(self, this_url, scan='0'): """ Get the scan results for a URL. (can do batch searches like get_file_report) :param this_url: a URL will retrieve the most recent report on the given URL. You may also specify a scan_id (sha256-timestamp as returned by the URL submission API) to access a specific report. At the same time, you can specify a CSV list made up of a combination of hashes and scan_ids so as to perform a batch request with one single call (up to 4 resources per call with the standard request rate). When sending multiples, the scan_ids or URLs must be separated by a new line character. :param scan: (optional): this is an optional parameter that when set to "1" will automatically submit the URL for analysis if no report is found for it in VirusTotal's database. In this case the result will contain a scan_id field that can be used to query the analysis report later on. :return: JSON response """ params = {'apikey': self.api_key, 'resource': this_url, 'scan': scan} try: response = requests.get(self.base + 'url/report', params=params, proxies=self.proxies) except requests.RequestException as e: return dict(error=e.message) return _return_response_and_status_code(response)
def scan_file(self, this_file, notify_url=None, notify_changes_only=None): """ Submit a file to be scanned by VirusTotal. Allows you to send a file for scanning with VirusTotal. Before performing your submissions we encourage you to retrieve the latest report on the files, if it is recent enough you might want to save time and bandwidth by making use of it. File size limit is 32MB, in order to submmit files up to 200MB in size you must request a special upload URL. :param this_file: The file to be uploaded. :param notify_url: A URL to which a POST notification should be sent when the scan finishes. :param notify_changes_only: Used in conjunction with notify_url. Indicates if POST notifications should be sent only if the scan results differ from the previous analysis. :return: JSON response that contains scan_id and permalink. """ params = {'apikey': self.api_key} files = {'file': (this_file, open(this_file, 'rb'))} try: response = requests.post(self.base + 'file/scan', files=files, params=params, proxies=self.proxies) except requests.RequestException as e: return dict(error=e.message) return _return_response_and_status_code(response)
def get_upload_url(self): """ Get a special URL for submitted files bigger than 32MB. In order to submit files bigger than 32MB you need to obtain a special upload URL to which you can POST files up to 200MB in size. This API generates such a URL. :return: JSON special upload URL to which you can POST files up to 200MB in size. """ params = {'apikey': self.api_key} try: response = requests.get(self.base + 'file/scan/upload_url', params=params, proxies=self.proxies) except requests.RequestException as e: return dict(error=e.message) if response.status_code == requests.codes.ok: return response.json()['upload_url'] else: return dict(response_code=response.status_code)
def get_file_behaviour(self, this_hash): """ Get a report about the behaviour of the file in sand boxed environment. VirusTotal runs a distributed setup of Cuckoo sandbox machines that execute the files we receive. Execution is attempted only once, upon first submission to VirusTotal, and only Portable Executables under 10MB in size are ran. The execution of files is a best effort process, hence, there are no guarantees about a report being generated for a given file in our dataset. If a file did indeed produce a behavioural report, a summary of it can be obtained by using the file scan lookup call providing the additional HTTP POST parameter allinfo=1. The summary will appear under the behaviour-v1 property of the additional_info field in the JSON report. :param this_hash: The md5/sha1/sha256 hash of the file whose dynamic behavioural report you want to retrieve. :return: full JSON report of the file's execution as returned by the Cuckoo JSON report encoder. """ params = {'apikey': self.api_key, 'hash': this_hash} try: response = requests.get(self.base + 'file/behaviour', params=params, proxies=self.proxies) except requests.RequestException as e: return dict(error=e.message) return _return_response_and_status_code(response)
def get_file(self, this_hash): """ Download a file by its hash. Downloads a file from VirusTotal's store given one of its hashes. This call can be used in conjuction with the file searching call in order to download samples that match a given set of criteria. :param this_hash: The md5/sha1/sha256 hash of the file you want to download. :return: Downloaded file in response.content """ params = {'apikey': self.api_key, 'hash': this_hash} try: response = requests.get(self.base + 'file/download', params=params, proxies=self.proxies) except requests.RequestException as e: return dict(error=e.message) if response.status_code == requests.codes.ok: return response.content elif response.status_code == 403: return dict(error='You tried to perform calls to functions for which you require a Private API key.', response_code=response.status_code) elif response.status_code == 404: return dict(error='File not found.', response_code=response.status_code) else: return dict(response_code=response.status_code)
def get_url_report(self, this_url, scan='0', allinfo=1): """ Get the scan results for a URL. :param this_url: A URL for which you want to retrieve the most recent report. You may also specify a scan_id (sha256-timestamp as returned by the URL submission API) to access a specific report. At the same time, you can specify a CSV list made up of a combination of urls and scan_ids (up to 25 items) so as to perform a batch request with one single call. The CSV list must be separated by new line characters. :param scan: (optional) This is an optional parameter that when set to "1" will automatically submit the URL for analysis if no report is found for it in VirusTotal's database. In this case the result will contain a scan_id field that can be used to query the analysis report later on. :param allinfo: (optional) If this parameter is specified and set to "1" additional info regarding the URL (other than the URL scanning engine results) will also be returned. This additional info includes VirusTotal related metadata (first seen date, last seen date, files downloaded from the given URL, etc.) and the output of other tools and datasets when fed with the URL. :return: JSON response """ params = {'apikey': self.api_key, 'resource': this_url, 'scan': scan, 'allinfo': allinfo} try: response = requests.get(self.base + 'url/report', params=params, proxies=self.proxies) except requests.RequestException as e: return dict(error=e.message) return _return_response_and_status_code(response)
def get_url_distribution(self, after=None, reports='true', limit=1000): """ Get a live feed with the lastest URLs submitted to VirusTotal. Allows you to retrieve a live feed of URLs submitted to VirusTotal, along with their scan reports. This call enables you to stay synced with VirusTotal URL submissions and replicate our dataset. :param after: (optional) Retrieve URLs received after the given timestamp, in timestamp ascending order. :param reports: (optional) When set to "true" each item retrieved will include the results for each particular URL scan (in exactly the same format as the URL scan retrieving API). If the parameter is not specified, each item returned will only contain the scanned URL and its detection ratio. :param limit: (optional) Retrieve limit file items at most (default: 1000). :return: JSON response """ params = {'apikey': self.api_key, 'after': after, 'reports': reports, 'limit': limit} try: response = requests.get(self.base + 'url/distribution', params=params, proxies=self.proxies) except requests.RequestException as e: return dict(error=e.message) return _return_response_and_status_code(response)
def get_ip_report(self, this_ip): """ Get information about a given IP address. Retrieves a report on a given IP address (including the information recorded by VirusTotal's Passive DNS infrastructure). :param this_ip: A valid IPv4 address in dotted quad notation, for the time being only IPv4 addresses are supported. :return: JSON response """ params = {'apikey': self.api_key, 'ip': this_ip} try: response = requests.get(self.base + 'ip-address/report', params=params, proxies=self.proxies) except requests.RequestException as e: return dict(error=e.message) return _return_response_and_status_code(response)
def put_comments(self, resource, comment): """ Post a comment on a file or URL. Allows you to place comments on URLs and files, these comments will be publicly visible in VirusTotal Community, under the corresponding tab in the reports for each particular item. Comments can range from URLs and locations where a given file was found in the wild to full reverse engineering reports on a given malware specimen, anything that may help other analysts in extending their knowledge about a particular file or URL. :param resource: Either an md5/sha1/sha256 hash of the file you want to review or the URL itself that you want to comment on. :param comment: The actual review, you can tag it using the "#" twitter-like syntax (e.g. #disinfection #zbot) and reference users using the "@" syntax (e.g. @VirusTotalTeam). :return: JSON response """ params = {'apikey': self.api_key, 'resource': resource, 'comment': comment} try: response = requests.post(self.base + 'comments/put', params=params, proxies=self.proxies) except requests.RequestException as e: return dict(error=e.message) return _return_response_and_status_code(response)
def get_comments(self, resource, before=None): """ Get comments for a file or URL. Retrieve a list of VirusTotal Community comments for a given file or URL. VirusTotal Community comments are user submitted reviews on a given item, these comments may contain anything from the in-the-wild locations of files up to fully-featured reverse engineering reports on a given sample. :param resource: Either an md5/sha1/sha256 hash of the file or the URL itself you want to retrieve. :param before: (optional) A datetime token that allows you to iterate over all comments on a specific item whenever it has been commented on more than 25 times. :return: JSON response - The application answers with the comments sorted in descending order according to their date. """ params = dict(apikey=self.api_key, resource=resource, before=before) try: response = requests.get(self.base + 'comments/get', params=params, proxies=self.proxies) except requests.RequestException as e: return dict(error=e.message) return _return_response_and_status_code(response)
def get_hashes_from_search(self, query, page=None): """ Get the scan results for a file. Even if you do not have a Private Mass API key that you can use, you can still automate VirusTotal Intelligence searches pretty much in the same way that the searching for files api call works. :param query: a VirusTotal Intelligence search string in accordance with the file search documentation . <https://www.virustotal.com/intelligence/help/file-search/> :param page: the next_page property of the results of a previously issued query to this API. This parameter should not be provided if it is the very first query to the API, i.e. if we are retrieving the first page of results. apikey: the API key associated to a VirusTotal Community account with VirusTotal Intelligence privileges. """ params = {'query': query, 'apikey': self.api_key, 'page': page} try: response = requests.get(self.base + 'search/programmatic/', params=params, proxies=self.proxies) except requests.RequestException as e: return dict(error=e.message) return response.json()['next_page'], response
def view_autocomplete(self, request, group, **kwargs): field = request.GET.get('autocomplete_field') query = request.GET.get('autocomplete_query') if field != 'issue_id' or not query: return Response({'issue_id': []}) query = query.encode('utf-8') _url = '%s?%s' % (self.build_api_url(group, 'search'), urlencode({'query': query})) try: req = self.make_api_request(group.project, _url) body = safe_urlread(req) except (requests.RequestException, PluginError) as e: return self.handle_api_error(e) try: json_resp = json.loads(body) except ValueError as e: return self.handle_api_error(e) resp = json_resp.get('stories', {}) stories = resp.get('stories', []) issues = [{'text': '(#%s) %s' % (i['id'], i['name']), 'id': i['id']} for i in stories] return Response({field: issues})
def link_issue(self, request, group, form_data, **kwargs): comment = form_data.get('comment') if not comment: return _url = '%s/%s/comments' % (self.build_api_url(group, 'stories'), form_data['issue_id']) try: req = self.make_api_request(group.project, _url, json_data={"text": comment}) body = safe_urlread(req) except requests.RequestException as e: msg = six.text_type(e) raise PluginError('Error communicating with Pivotal: %s' % (msg, )) try: json_resp = json.loads(body) except ValueError as e: msg = six.text_type(e) raise PluginError('Error communicating with Pivotal: %s' % (msg, )) if req.status_code > 399: raise PluginError(json_resp['error'])
def _stream(self, url, headers={}): response = self.__get(url, headers=headers, stream=True, timeout=self.timeout) length = 0 piece_size = 65536 try: for piece in response.iter_content(piece_size): length += len(piece) yield piece except requests.RequestException as re: raise exceptions.RetryableIOError(re) if CONTENT_LENGTH in response.headers: content_length = int(response.headers[CONTENT_LENGTH]) if content_length != length: raise exceptions.ContentLengthMismatch( "Length mismatch {} != {}".format(content_length, length))
def test_push_results_to_db_request_post_failed(self): dic = self._get_env_dict(None) CONST.__setattr__('results_test_db_url', self.db_url) with mock.patch.dict(os.environ, dic, clear=True), \ mock.patch('functest.utils.functest_utils.logger.error') \ as mock_logger_error, \ mock.patch('functest.utils.functest_utils.requests.post', side_effect=requests.RequestException): self.assertFalse(functest_utils. push_results_to_db(self.project, self.case_name, self.start_date, self.stop_date, self.result, self.details)) mock_logger_error.assert_called_once_with(test_utils. RegexMatch("Pushing " "Result to" " DB" "(\S+\s*) " "failed:"))
def callback(request_id, message, callback_url, scheduled_at, last_retry=None, retry_delay=None, _attempts=None): if retry_delay is None: retry_delay = DEFAULT_RETRY_DELAY try: response = requests.post(callback_url, data=message) response.raise_for_status() except requests.RequestException: callback.retry( request_id=request_id, message=message, callback_url=callback_url, scheduled_at=scheduled_at, last_retry=int(time.time()), retry_delay=retry_delay, delay=retry_delay, taskid=request_id, _attempts=_attempts )
def http_request(method, url, session=requests, **kwargs): """ Wrapper for 'requests' silencing exceptions a little bit. """ kwargs.setdefault('timeout', 30.0) kwargs.setdefault('verify', False) try: return getattr(session, method.lower())(url, **kwargs) except (requests.exceptions.MissingSchema, requests.exceptions.InvalidSchema): print_error("Invalid URL format: {}".format(url)) return except requests.exceptions.ConnectionError: print_error("Connection error: {}".format(url)) return except requests.RequestException as error: print_error(error) return except socket.error as err: print_error(err) return except KeyboardInterrupt: print_info() print_status("Module has been stopped")
def get_product_info(self, serial, retry=True): if self.debug: print '\t[+] Checking possible product "%s"' % serial timeout = 10 try: resp = self.requests.get(self.url + '?productId=' + serial, verify=True, timeout=timeout) msg = 'Status code: %s' % str(resp.status_code) if str(resp.status_code) == '401': print '\t[!] HTTP error. Message was: %s' % msg print '\t[!] waiting for 30 seconds to let the api server calm down' # suspecting blockage due to to many api calls. Put in a pause of 30 seconds and go on time.sleep(30) if retry: print '\n[!] Retry' self.get_product_info(serial, False) else: return None else: return resp.json() except requests.RequestException as e: self.error_msg(e) return None
def get_access_key(self): if self.debug: print '\t[+] Getting HP access token' timeout = 10 payload = { 'apiKey': self.api_key, 'apiSecret': self.api_secret, 'grantType': 'client_credentials', 'scope': 'warranty' } headers = { 'Accept': 'application/json', 'Content-type': 'application/x-www-form-urlencoded' } try: resp = requests.post(self.url + '/oauth/v1/token', data=payload, headers=headers, verify=True, timeout=timeout) result = json.loads(resp.text) return result['access_token'] except requests.RequestException as e: self.error_msg(e) sys.exit()
def http_request(self, call, url, **kwargs): try: # Remove args with no value kwargs = self.unused(kwargs) if self.client.timeout: kwargs['timeout'] = self.client.timeout if self.debug: print("-- %s on %s with %s " % (call.__name__.upper(), url, kwargs)) resp = call(url, **kwargs) if self.debug: print("-- response: %s " % resp.text) if resp.status_code != 200: raise LunrHttpError("%s returned '%s' with '%s'" % (url, resp.status_code, json.loads(resp.text)['reason']), resp.status_code) return response(json.loads(resp.text), resp.status_code) except requests.RequestException as e: raise LunrError(str(e))
def _get_freegeoip() -> Optional[Dict[str, Any]]: """Query freegeoip.io for location data.""" try: raw_info = requests.get(FREEGEO_API, timeout=5).json() except (requests.RequestException, ValueError): return None return { 'ip': raw_info.get('ip'), 'country_code': raw_info.get('country_code'), 'country_name': raw_info.get('country_name'), 'region_code': raw_info.get('region_code'), 'region_name': raw_info.get('region_name'), 'city': raw_info.get('city'), 'zip_code': raw_info.get('zip_code'), 'time_zone': raw_info.get('time_zone'), 'latitude': raw_info.get('latitude'), 'longitude': raw_info.get('longitude'), }
def _get_ip_api() -> Optional[Dict[str, Any]]: """Query ip-api.com for location data.""" try: raw_info = requests.get(IP_API, timeout=5).json() except (requests.RequestException, ValueError): return None return { 'ip': raw_info.get('query'), 'country_code': raw_info.get('countryCode'), 'country_name': raw_info.get('country'), 'region_code': raw_info.get('region'), 'region_name': raw_info.get('regionName'), 'city': raw_info.get('city'), 'zip_code': raw_info.get('zip'), 'time_zone': raw_info.get('timezone'), 'latitude': raw_info.get('lat'), 'longitude': raw_info.get('lon'), }
def pull_poloniex_data(): try: logger.info("pulling Poloniex data...") req = get('https://poloniex.com/public?command=returnTicker') data = req.json() timestamp = time.time() poloniex_data_point = ExchangeData.objects.create( source=POLONIEX, data=json.dumps(data), timestamp=timestamp ) logger.info("Saving Poloniex price, volume data...") _save_prices_and_volumes(data, timestamp) except RequestException: return 'Error to collect data from Poloniex'
def request(self, method, url, **kwargs): headers = kwargs.get('headers') if headers: headers = dict(headers) if 'User-Agent' not in map(str.title, headers.keys()): headers['User-Agent'] = self.user_agent else: headers = self.default_headers kwargs['headers'] = headers try: return super(RequestsSession, self).request(method, url, **kwargs) except requests.RequestException: raise RequestError
def working(self, priority: int, url: str, keys: dict, deep: int, repeat: int, proxies=None) -> (int, bool, object): """ working function, must "try, except" and don't change the parameters and return :return (fetch_result, proxies_state, content): fetch_result can be -2(fetch failed, stop thread), -1(fetch failed), 0(need repeat), 1(fetch success) :return (fetch_result, proxies_state, content): proxies_state can be True(avaiable), False(unavaiable) :return (fetch_result, proxies_state, content): content can be any object, for example string, list, etc """ logging.debug("%s start: %s", self.__class__.__name__, CONFIG_FETCH_MESSAGE % (priority, keys, deep, repeat, url)) time.sleep(random.randint(0, self._sleep_time)) try: fetch_result, proxies_state, content = self.url_fetch(priority, url, keys, deep, repeat, proxies=proxies) except requests.RequestException: if repeat >= self._max_repeat: fetch_result, proxies_state, content = -1, True, None logging.error("%s error: %s, %s", self.__class__.__name__, extract_error_info(), CONFIG_FETCH_MESSAGE % (priority, keys, deep, repeat, url)) else: fetch_result, proxies_state, content = 0, True, None logging.debug("%s repeat: %s, %s", self.__class__.__name__, extract_error_info(), CONFIG_FETCH_MESSAGE % (priority, keys, deep, repeat, url)) except Exception: fetch_result, proxies_state, content = -1, True, None logging.error("%s error: %s, %s", self.__class__.__name__, extract_error_info(), CONFIG_FETCH_MESSAGE % (priority, keys, deep, repeat, url)) logging.debug("%s end: fetch_result=%s, proxies_state=%s, url=%s", self.__class__.__name__, fetch_result, proxies_state, url) return fetch_result, proxies_state, content
def _request_call(self, uri, method='get', wait=False, **kwargs): if self._allow_reconnect: urls = self._urls else: urls = [self._urls[0]] error_messages = [] for u in urls: try: url = u + uri return EtcdResult(getattr(self._session, method)(url, **kwargs)) except RequestException as err: error_messages.append("%s: %s" % (u, err)) raise EtcdException('No more hosts to connect.\nErrors: %s' % '\n'.join(error_messages))
def grant_access_token(self, code): res = self._session.post( 'https://bitbucket.org/site/oauth2/access_token', data={ 'grant_type': 'authorization_code', 'code': code, }, auth=HTTPBasicAuth(self._oauth_key, self._oauth_secret) ) try: res.raise_for_status() except requests.RequestException as reqe: error_info = res.json() raise BitbucketAPIError( res.status_code, error_info.get('error', ''), error_info.get('error_description', ''), request=reqe.request, response=reqe.response ) data = res.json() self._access_token = data['access_token'] self._refresh_token = data['refresh_token'] self._token_type = data['token_type'] return data
def _test_connection(self): response = None try: response = requests.get('{}/versions'.format(self._url), timeout=self._timeout) response.raise_for_status() self._stats.update(response) except requests.RequestException as err: self._stats.update(response) if self._verbose and response is not None: self.logger.error(response.headers) message = "Unable to contact Oozie server at {}".format(self._url) raise exceptions.OozieException.communication_error(message, err) try: versions = response.json() except ValueError as err: message = "Invalid response from Oozie server at {} ".format(self._url) raise exceptions.OozieException.communication_error(message, err) if 2 not in versions: message = "Oozie server at {} does not support API version 2 (supported: {})".format(self._url, versions) raise exceptions.OozieException.communication_error(message)
def write(self, data): """Write data to the file. :param data: data to be written :raises: VimConnectionException, VimException """ try: self._file_handle.send(data) except requests.RequestException as excep: excep_msg = _("Connection error occurred while writing data to" " %s.") % self._url LOG.exception(excep_msg) raise exceptions.VimConnectionException(excep_msg, excep) except Exception as excep: # TODO(vbala) We need to catch and raise specific exceptions # related to connection problems, invalid request and invalid # arguments. excep_msg = _("Error occurred while writing data to" " %s.") % self._url LOG.exception(excep_msg) raise exceptions.VimException(excep_msg, excep)
def write(self, data): """Write data to the file. :param data: data to be written :raises: VimConnectionException, VimException """ try: self._file_handle.send(data) self._bytes_written += len(data) except requests.RequestException as excep: excep_msg = _("Connection error occurred while writing data to" " %s.") % self._url LOG.exception(excep_msg) raise exceptions.VimConnectionException(excep_msg, excep) except Exception as excep: # TODO(vbala) We need to catch and raise specific exceptions # related to connection problems, invalid request and invalid # arguments. excep_msg = _("Error occurred while writing data to" " %s.") % self._url LOG.exception(excep_msg) raise exceptions.VimException(excep_msg, excep)
def get_fees(): try: response = requests.get(_fee_host + "v1/fees/recommended") if response.status_code == 200: fee_per_kb = response.json()['halfHourFee'] * 1000 else: raise requests.ConnectionError('Received status_code %d' % response.status_code) except requests.RequestException as error: fee_per_kb = DEFAULT_FEE_PER_KB logger.error( "Error getting recommended fees from server: %s. Using defaults." % error) if not 0 <= fee_per_kb <= 2 * DEFAULT_FEE_PER_KB: raise exceptions.UnreasonableFeeError( 'Unreasonable fee per kB: %s' % fee_per_kb) return { 'per_kb': fee_per_kb, 'per_input': int(DEFAULT_INPUT_SIZE_KB * fee_per_kb), 'per_output': int(DEFAULT_OUTPUT_SIZE_KB * fee_per_kb) }
def check_url(url, expected_status=200, timeout=5): """ A simple check if `url` is reachable and resturns `expected_status`. """ if not url: return {HealthCheck.HEALTHY: False, HealthCheck.STATUS_MESSAGE: 'No URL specified to check.'} try: response = requests.get(url, timeout=timeout) except requests.RequestException as exc: message = 'Error connecting to URL: {}'.format(str(exc)) return {HealthCheck.HEALTHY: False, HealthCheck.STATUS_MESSAGE: message} if expected_status == response.status_code: return {HealthCheck.HEALTHY: True, HealthCheck.STATUS_MESSAGE: 'URL is available'} message = 'server responded with unexpected status code: {}'.format( response.status_code) return {HealthCheck.HEALTHY: False, HealthCheck.STATUS_MESSAGE: message}
def get_versions(): """ Wrap in a function to ensure that we don't run this every time a CLI command runs (yuck!) Also protects import of `requests` from issues when grabbed by setuptools. More on that inline """ # import in the func (rather than top-level scope) so that at setup time, # `requests` isn't required -- otherwise, setuptools will fail to run # because requests isn't installed yet. import requests try: version_data = requests.get( "https://pypi.python.org/pypi/globus-cli/json").json() latest = max(LooseVersion(v) for v in version_data["releases"]) return latest, LooseVersion(__version__) # if the fetch from pypi fails except requests.RequestException: return None, LooseVersion(__version__)
def trigger(self): if not self.revoked and self.url != None: try: # Find the events events = sg.db.session.query(EVENT).filter(EVENT.id > self.last_event_id, EVENT.notif_to_push == True, EVENT.group_id == self.group_id).order_by(asc(EVENT.time)).all() res = [] max_id = 0 for event in events: max_id = max(event.id, max_id) res.append({'id': event.id, 'notif': event.notif.encode(sg.DEFAULT_CHARSET)}) # Send the data if len(res) > 0 : try: headers = {'Authorization': self.jwt} r = requests.post(self.url, headers = headers, json = res, timeout = 1) # Update the hook self.last_event_id = max_id sg.db.session.add(self) sg.db.session.commit() except requests.RequestException as e: sg.logger.warning('Unable to send events for reverse hook %s (%s) and url %s : %s' % (self.name, self.id, self.url, str(e), )) except NoResultFound: sg.logger.warning('No event found corresponding to the reverse hook %s (%s)' % (self.name, self.id, ))
def connected(): """ Check if can connect to the Amazon oAuth2 :return boolean """ print "Checking Internet Connection" try: requests.get(AlexaService.AMAZON_TOKEN_URL) print "Connection OK" return True except requests.exceptions.Timeout as exception: print "Error: Timeout / " + exception.message except requests.exceptions.TooManyRedirects as exception: print "Error: Invalid URL provided / " + exception.message except requests.RequestException as exception: print "Error: Connection Failed / " + exception.message return False
def read(self, n=None): """Read the file stream. Args: n (int, optional): The bytes to read from the stream, if n is None, it means read the whole data stream. Returns: The data in bytes, if all data has been read, returns an empty string. """ if self.r is None: self._init_r() try: if n is None: return self.r.content return self.r.raw.read(n) except requests.RequestException: raise_with_traceback(exceptions.HTTPError, 'Failed to read the response body')
def _RequestUrl(self, url, verb, data=None): """ Request a url. :param url: The web location we want to retrieve. :param verb: GET only (for now). :param data: A dict of (str, unicode) key/value pairs. :return:A JSON object. """ if verb == 'GET': url = self._BuildUrl(url, extra_params=data) try: return requests.get( url, auth=self.__auth, timeout=self._timeout ) except requests.RequestException as e: raise ZillowError(str(e)) return 0
def query_leveldb(query_db, save_db, queryed_db): '''query by leveldb''' try: with requests.Session() as session: _token = '' for _name, _code in query_db.RangeIter(): if not util.has_key(save_db, _name) and not util.has_key(queryed_db, _name): # ???? _subname = _name[0: 18] if len(_name) > 18 else _name logging.info(_name + ' -> ' + _subname) _query_code, _token = query_keyword(session, _subname, _token) if _query_code: for _r in _query_code: logging.info(_r[0].decode() + ' : ' + _r[1].decode()) save_db.Put(_r[0], _r[1], sync=True) queryed_db.Put(_name, '', sync=True) return True except requests.RequestException as _e: logging.error(_e) return False
def query_leveldb(query_db, save_db, queryed_db): '''query by leveldb''' try: with requests.Session() as session: for _name, _code in query_db.RangeIter(): if not util.has_key(save_db, _name) and not util.has_key(queryed_db, _name): # ???? _subname = _name[0: 18] if len(_name) > 18 else _name logging.info(_name + ' -> ' + _subname) _code_all = query_keyword(session, _subname) if _code_all: for _c in _code_all: logging.info(_c[0] + ' : ' + _c[1]) save_db.Put(_c[0], _c[1], sync=True) queryed_db.Put(_name, '', sync=True) return True except requests.RequestException as _e: logging.error(_e) return False
def send_request(self, method_name, is_notification, params): """Issue the HTTP request to the server and return the method result (if not a notification)""" request_body = self.serialize(method_name, params, is_notification) try: response = self.request(data=request_body) except requests.RequestException as requests_exception: raise TransportError('Error calling method %r' % method_name, requests_exception) if response.status_code != requests.codes.ok: raise TransportError(response.status_code) if not is_notification: try: parsed = response.json() except ValueError as value_error: raise TransportError('Cannot deserialize response body', value_error) return self.parse_result(parsed)
def _send_notification(self, data: dict) -> Response: response_data = { 'provider_name': self.provider_name, 'data': data } try: response = requests.post(self.base_url, data=data) response.raise_for_status() response_data['response'] = response except requests.RequestException as e: if e.response is not None: response_data['response'] = e.response response_data['errors'] = [e.response.json()['message']] else: response_data['errors'] = [(str(e))] return create_response(**response_data)
def _send_notification(self, data: dict) -> Response: url = data.pop('webhook_url') response_data = { 'provider_name': self.provider_name, 'data': data } try: response = requests.post(url, json=data) response.raise_for_status() response_data['response'] = response except requests.RequestException as e: if e.response is not None: response_data['response'] = e.response response_data['errors'] = [e.response.text] else: response_data['errors'] = [(str(e))] return create_response(**response_data)
def _send_notification(self, data: dict) -> Response: response_data = { 'provider_name': self.provider_name, 'data': data } try: response = requests.post(self.base_url, data=data) response.raise_for_status() response_data['response'] = response except requests.RequestException as e: if e.response is not None: response_data['response'] = e.response response_data['errors'] = e.response.json()['errors'] else: response_data['errors'] = [(str(e))] return create_response(**response_data)
def _send_notification(self, data: dict) -> Response: token = data.pop('token') url = self.base_url.format(token=token, method='sendMessage') response_data = { 'provider_name': self.provider_name, 'data': data } try: response = requests.post(url, json=data) response.raise_for_status() response_data['response'] = response except requests.RequestException as e: if e.response is not None: response_data['response'] = e.response response_data['errors'] = [e.response.json()['description']] else: response_data['errors'] = [(str(e))] return create_response(**response_data)
def _send_notification(self, data: dict) -> Response: response_data = { 'provider_name': self.provider_name, 'data': data } try: response = requests.get(self.base_url, params=data) response.raise_for_status() response_data['response'] = response rsp = response.json() if not rsp['success']: response_data['errors'] = [rsp['errorMessage']] except requests.RequestException as e: if e.response is not None: response_data['response'] = e.response response_data['errors'] = [e.response.json()['errorMessage']] else: response_data['errors'] = [(str(e))] return create_response(**response_data)