我们从Python开源项目中,提取了以下47个代码示例,用于说明如何使用aiohttp.ClientError()。
def send_sms(recipients: Iterable[str], msg: str, username: str, api_key: str, sender: str): data = { 'messages': [], } # type: Dict[str, List] for recipient in recipients: data['messages'].append({ 'source': 'python', 'from': sender, 'body': msg[:140], 'to': recipient, 'schedule': '' }) try: async with aiohttp.ClientSession(headers={'Content-Type': 'application/json'}, auth=aiohttp.BasicAuth(username, api_key)) as session: async with session.post(CLICKSEND_URL, data=json.dumps(data), timeout=30) as resp: if resp.status != 200: log.msg('Error sending clicksend sms notification: http status %s' % (str(resp.status)), 'NOTIFICATION') except aiohttp.ClientError as e: log.msg('Error sending clicksend sms notification: %s' % (str(e)), 'NOTIFICATIONS')
def async_unsubscribe_all(self): """ Disconnect from device. This removes all UpnpServices. """ _LOGGER.debug('%s.async_disconnect()', self) if not self._device: return for service in self._device.services.values(): try: sid = service.subscription_sid if sid: self._callback_view.unregister_service(sid) yield from service.async_unsubscribe(True) except (asyncio.TimeoutError, aiohttp.ClientError): pass
def do_record(self): self.tag = self.request.pop('tag') self.type = self.request.pop('type') self.domain_id = self.request.pop('domain_id') self.pid = self.request.pop('pid') self.rid = self.request.pop('rid') self.lang = self.request.pop('lang') self.code = self.request.pop('code') try: if self.type == 0: await self.do_submission() elif self.type == 1: await self.do_pretest() else: raise Exception('Unsupported type: {}'.format(self.type)) except CompileError: self.end(status=STATUS_COMPILE_ERROR, score=0, time_ms=0, memory_kb=0) except ClientError: raise except Exception as e: logger.exception(e) self.next(judge_text=repr(e)) self.end(status=STATUS_SYSTEM_ERROR, score=0, time_ms=0, memory_kb=0)
def test_ws_loop_exception(client): """Test websocket loop exception.""" @asyncio.coroutine def side_effect(*args, **kwargs): raise aiohttp.ClientError("Mock Exception") mock_ws = AsyncMock() mock_ws.receive.side_effect = side_effect client._ws_connection = mock_ws client.ws_close = AsyncMock() client._handle_event = MagicMock() with pytest.raises(exceptions.TransportError): client.loop.run_until_complete(client._ws_loop()) assert client.ws_close.called assert len(client.ws_close.mock_calls) == 1 assert client._handle_event.called assert len(client._handle_event.mock_calls) == 1 assert client._handle_event.mock_calls[0][1][0] == 'closed' assert client._handle_event.mock_calls[0][1][1] is None
def test_ws_close_exception(client): """Test websocket close exception.""" @asyncio.coroutine def side_effect(*args, **kwargs): raise aiohttp.ClientError("Mock Exception") mock_ws = AsyncMock() mock_ws.send_str.side_effect = side_effect client._ws_connection = mock_ws client._ws_session_data = {} client._handle_event = MagicMock() client.loop.run_until_complete(client.ws_close()) assert mock_ws.close.called assert len(mock_ws.close.mock_calls) == 1 assert mock_ws.send_str.called assert len(mock_ws.send_str.mock_calls) == 1 assert mock_ws.send_str.mock_calls[0][1][0] == '41'
def ws_connect(self): """Open a websocket connection for real time events.""" if self.ws_connected: raise exceptions.TransportError('Connection already open.') _LOGGER.info("Opening websocket connection.") try: # Open an engineIO session session_data = yield from self._get_engineio_session() # Now that the session data has been fetched, open the actual # websocket connection. ws_connection = yield from self._get_ws_connection(session_data) # Finalize connection status self._ws_connection = ws_connection self._ws_session_data = session_data # Send the first ping packet self.loop.create_task(self._ping()) except (ClientError, HttpProcessingError, asyncio.TimeoutError) as exc: raise exceptions.TransportError from exc return self.loop.create_task(self._ws_loop())
def _ws_loop(self): """Run the websocket loop listening for messages.""" msg = None try: while True: msg = yield from self._ws_connection.receive() if msg.type == aiohttp.WSMsgType.TEXT: self._handle_packet(msg.data) elif msg.type == aiohttp.WSMsgType.CLOSED: break elif msg.type == aiohttp.WSMsgType.ERROR: break except (ClientError, HttpProcessingError, asyncio.TimeoutError) as exc: raise exceptions.TransportError from exc finally: yield from self.ws_close() self._handle_event(EVENT_WS_CLOSED, None) if msg is not None and msg.type == aiohttp.WSMsgType.ERROR: raise exceptions.TransportError( 'Websocket error detected. Connection closed.')
def ws_close(self): """Close the websocket connection.""" if not self.ws_connected: return # Try to gracefully end the connection try: yield from self._ws_connection.send_str('41') yield from self._ws_connection.send_str('1') except (ClientError, HttpProcessingError, asyncio.TimeoutError): pass # Close any remaining ping handles handle = self._ws_session_data.get(ATTR_PING_INTERVAL_HANDLE) if handle: handle.cancel() handle = self._ws_session_data.get(ATTR_PING_TIMEOUT_HANDLE) if handle: handle.cancel() yield from self._ws_connection.close() self._ws_connection = None self._ws_session_data = None
def retry_create_artifact(*args, **kwargs): """Retry create_artifact() calls. Args: *args: the args to pass on to create_artifact **kwargs: the args to pass on to create_artifact """ await retry_async( create_artifact, retry_exceptions=( ScriptWorkerRetryException, aiohttp.ClientError ), args=args, kwargs=kwargs ) # create_artifact {{{1
def test_single_proxy(self, proxy): """ ?????? :param proxy: :return: """ conn = aiohttp.TCPConnector(verify_ssl=False) async with aiohttp.ClientSession(connector=conn) as session: try: if isinstance(proxy, bytes): proxy = proxy.decode('utf-8') real_proxy = 'http://' + proxy print('????', proxy) async with session.get(TEST_URL, proxy=real_proxy, timeout=15, allow_redirects=False) as response: if response.status in VALID_STATUS_CODES: self.redis.max(proxy) print('????', proxy) else: self.redis.decrease(proxy) print('???????? ', response.status, 'IP', proxy) except (ClientError, aiohttp.client_exceptions.ClientConnectorError, asyncio.TimeoutError, AttributeError): self.redis.decrease(proxy) print('??????', proxy)
def orly(self, ctx, title, guide, author, *, top_text=''): """Generates O'Reilly book covers.""" api_base = 'https://orly-appstore.herokuapp.com/generate?' url = (api_base + f'title={urlescape(title)}&top_text={urlescape(top_text)}&image_code={randrange(0, 41)}' + f'&theme={randrange(0, 17)}&author={urlescape(author)}&guide_text={urlescape(guide)}' + f'&guide_text_placement=bottom_right') try: async with ctx.typing(): async with ctx.bot.session.get(url) as resp: with BytesIO(await resp.read()) as bio: await ctx.send(file=discord.File(filename='orly.png', fp=bio)) except aiohttp.ClientError: await ctx.send("Couldn't contact the API.")
def test_perform_request_ssl_error(auto_close, loop): for exc, expected in [ (aiohttp.ClientConnectorCertificateError(mock.Mock(), mock.Mock()), SSLError), # noqa (aiohttp.ClientConnectorSSLError(mock.Mock(), mock.Mock()), SSLError), (aiohttp.ClientSSLError(mock.Mock(), mock.Mock()), SSLError), (aiohttp.ClientError('Other'), ConnectionError), (asyncio.TimeoutError, ConnectionTimeout), ]: session = aiohttp.ClientSession(loop=loop) @asyncio.coroutine def request(*args, **kwargs): raise exc session._request = request conn = auto_close(AIOHttpConnection(session=session, loop=loop, use_ssl=True)) with pytest.raises(expected): await conn.perform_request('HEAD', '/')
def fetch(url: str, session: aiohttp.ClientSession, timeout: float = None, loop: AbstractEventLoop = None) -> Dict[Any, Any]: ''' Fetch JSON format data from a web resource and return a dict ''' try: logger.debug('fetching %s', url) async with session.get(url, timeout=timeout) as resp: if not resp.status == 200: raise Exception('Fetch failed {}: {}'.format(resp.status, url)) data = await resp.json() return data except asyncio.TimeoutError: raise Exception('Request timed out to {}'.format(url)) from None except aiohttp.ClientError as exc: raise Exception('Client error {}, {}'.format(exc, url)) from None
def _load_remote_data(self, url): result = None async def _load_remote_data_async(): nonlocal result with aiohttp.Timeout(self.load_timeout): response = await self.session.get(url) result = await response.read() try: response.raise_for_status() except aiohttp.ClientError as exc: raise TransportError( message=str(exc), status_code=response.status, content=result ).with_traceback(exc.__traceback__) from exc # Block until we have the data self.loop.run_until_complete(_load_remote_data_async()) return result
def fetch(self, url, max_redirect): tries = 0 exception = None while tries < self.max_tries: try: response = await self.session.get( url, allow_redirects=False) break except aiohttp.ClientError as client_error: exception = client_error tries += 1 else: return try: next_url = await self.parse_link(response) print('{} has finished'.format(url)) if next_url is not None: self.add_url(next_url, max_redirect) finally: response.release()
def handle(self, url): tries = 0 while tries < self.max_tries: try: response = await self.session.get( url, allow_redirects=False) break except aiohttp.ClientError: pass tries += 1 try: doc = await self.fetch_etree(response) if is_root_url(url): print('root:{}'.format(url)) self.parse_root_etree(doc) else: print('second level:{}'.format(url)) self.parse_second_etree(doc, url) finally: await response.release()
def download_content(self, url, count): total_size = 0 errors = [] start = time() for _ in range(count): try: async with self.session.get(url) as r: content = await r.read() total_size += len(content) if r.status != 200: errors.append(f'{r.status} length: {len(content)}') except ClientError as e: errors.append(f'{e.__class__.__name__}: {e}') output = f'{time() - start:0.2f}s, {count} downloads, total size: {total_size}' if errors: output += ', errors: ' + ', '.join(errors) await self.redis.rpush(R_OUTPUT, output.encode()) return total_size
def http_post(shark, url, data): log = shark.log.bind(url=url) opts = shark.config['HTTP'] if opts.get('ssl_cafile'): ssl_context = ssl.create_default_context(cafile=opts['ssl_cafile']) else: ssl_context = None conn = aiohttp.TCPConnector(ssl_context=ssl_context) async with aiohttp.ClientSession(connector=conn) as session: wait = opts['wait'] for n in range(opts['tries']): if n > 0: await asyncio.sleep(wait) try: log.debug('http request', data=data) async with session.post(url, json=data, timeout=opts['timeout']) as resp: if resp.status == 429: # Too many requests. wait = _get_rate_limit_wait(log, resp, opts) continue else: wait = opts['wait'] resp.raise_for_status() data = await resp.json() log.debug('http response', data=data) return data except aiohttp.ClientError: log.exception('unhandled exception in http_post') except asyncio.TimeoutError: log.exception('timeout in http_post') return {'status': 'error', 'error': c.ERR_SERVICE_UNAVAILABLE}
def send_slack_notification(url: str, attachments: List[Dict]): data = { 'attachments': attachments } try: async with aiohttp.ClientSession() as session: async with session.post(url, data=json.dumps(data), timeout=30) as resp: if resp.status != 200: log.msg('Error sending slack notification: http status %s' % (str(resp.status)), 'NOTIFICATION') except aiohttp.ClientError as e: log.msg('Error sending slack notification: %s' % (str(e)), 'NOTIFICATIONS')
def send_http_notification(url: str, in_data: Any): out_data = json.dumps(in_data) try: async with aiohttp.ClientSession() as session: async with session.post(url, data=out_data, timeout=10) as resp: if resp.status != 200: log.msg('Error sending http notification: http status %s' % (str(resp.status)), 'NOTIFICATION') except aiohttp.ClientError as e: log.msg('Error sending http notification: %s' % (str(e)), 'NOTIFICATIONS')
def async_http_request(self, method, url, headers=None, body=None): websession = async_get_clientsession(self.hass) try: with async_timeout.timeout(5, loop=self.hass.loop): response = yield from websession.request(method, url, headers=headers, data=body) response_body = yield from response.text() except (asyncio.TimeoutError, aiohttp.ClientError) as ex: _LOGGER.debug("Error in %s.async_call_action(): %s", self, ex) raise return response.status, response.headers, response_body
def async_update(self): """Retrieve the latest data.""" _LOGGER.debug('%s.async_update()', self) if not self._device: _LOGGER.debug('%s.async_update(): no device', self) try: yield from self._async_init_device() except (asyncio.TimeoutError, aiohttp.ClientError): # Not yet seen alive, leave for now, gracefully _LOGGER.debug('%s._async_update(): device not seen yet, leaving', self) return # XXX TODO: if re-connected, then (re-)subscribe # call GetTransportInfo/GetPositionInfo regularly try: _LOGGER.debug('%s.async_update(): calling...', self) avt_service = self._service('AVT') if avt_service: get_transport_info_action = avt_service.action('GetTransportInfo') state = yield from self._async_poll_transport_info(get_transport_info_action) if state == STATE_PLAYING or state == STATE_PAUSED: # playing something... get position info get_position_info_action = avt_service.action('GetPositionInfo') yield from self._async_poll_position_info(get_position_info_action) else: _LOGGER.debug('%s.async_update(): pinging...', self) yield from self._device.async_ping() self._is_connected = True except (asyncio.TimeoutError, aiohttp.ClientError) as ex: _LOGGER.debug('%s.async_update(): error on update: %s', self, ex) self._is_connected = False yield from self.async_unsubscribe_all()
def async_unsubscribe(self, force=False): """UNSUBSCRIBE from events on StateVariables.""" if not force and not self._subscription_sid: raise RuntimeError('Cannot unsubscribed, subscribe first') subscription_sid = self._subscription_sid if force: # we don't care what happens further, make sure we are unsubscribed self._subscription_sid = None headers = { 'Host': urllib.parse.urlparse(self.event_sub_url).netloc, 'SID': subscription_sid, } try: response_status, _, _ = \ yield from self._requester.async_http_request('UNSUBSCRIBE', self.event_sub_url, headers) except (asyncio.TimeoutError, aiohttp.ClientError): if not force: raise return if response_status != 200: _LOGGER.error('Did not receive 200, but %s', response_status) return self._subscription_sid = None
def claim_work(context): """Find and claim the next pending task in the queue, if any. Args: context (scriptworker.context.Context): the scriptworker context. Returns: dict: a dict containing a list of the task definitions of the tasks claimed. """ log.debug("Calling claimWork...") payload = { 'workerGroup': context.config['worker_group'], 'workerId': context.config['worker_id'], # Hardcode one task at a time. Make this a pref if we allow for # parallel tasks in multiple `work_dir`s. 'tasks': 1, } try: return await context.queue.claimWork( context.config['provisioner_id'], context.config['worker_type'], payload ) except (taskcluster.exceptions.TaskclusterFailure, aiohttp.ClientError) as exc: log.warning("{} {}".format(exc.__class__, exc))
def sendToTelegram(self): session = SessionManager.get() TELEGRAM_BASE_URL = "https://api.telegram.org/bot{token}/sendVenue".format(token=conf.TELEGRAM_BOT_TOKEN) title = self.name try: minutes, seconds = divmod(self.tth, 60) description = 'Expires at: {} ({:.0f}m{:.0f}s left)'.format(self.expire_time, minutes, seconds) except AttributeError: description = "It'll expire between {} & {}.".format(self.min_expire_time, self.max_expire_time) try: title += ' ({}/{}/{})'.format(self.attack, self.defense, self.stamina) except AttributeError: pass payload = { 'chat_id': conf.TELEGRAM_CHAT_ID, 'latitude': self.coordinates[0], 'longitude': self.coordinates[1], 'title' : title, 'address' : description, } try: async with session.post(TELEGRAM_BASE_URL, data=payload) as resp: self.log.info('Sent a Telegram notification about {}.', self.name) return True except ClientResponseError as e: self.log.error('Error {} from Telegram: {}', e.code, e.message) except ClientError as e: self.log.error('{} during Telegram notification.', e.__class__.__name__) except CancelledError: raise except Exception: self.log.exception('Exception caught in Telegram notification.') return False
def hook_post(self, w, session, payload, headers={'content-type': 'application/json'}): try: async with session.post(w, json=payload, timeout=4, headers=headers) as resp: return True except ClientResponseError as e: self.log.error('Error {} from webook {}: {}', e.code, w, e.message) except (TimeoutError, ServerTimeoutError): self.log.error('Response timeout from webhook: {}', w) except ClientError as e: self.log.error('{} on webhook: {}', e.__class__.__name__, w) except CancelledError: raise except Exception: self.log.exception('Error from webhook: {}', w) return False
def test_status_response_handle_client_errors(cli): async def error_task(product, version): raise ClientError('Error message') error_endpoint = status_response(error_task) request = mock.MagicMock() request.match_info = {"product": "firefox", "version": "57.0"} resp = await error_endpoint(request) assert json.loads(resp.body.decode()) == { "status": Status.ERROR.value, "message": "Error message", }
def get_maps(self): try: async with aiohttp.get("https://www.hotslogs.com/API/Data/Maps") as response: maps = await response.json() return [m["PrimaryName"] for m in maps] except aiohttp.ClientError: raise APIError()
def get_mmr(self, tag): if "#" not in tag: raise ValueError("battle tag must include '#'") try: async with aiohttp.get("https://www.hotslogs.com/API/Players/1/" + tag.replace("#", "_")) as r: response = await r.json() except aiohttp.ClientError: raise APIError() if not response: return MMRInfo(MMRInfo.NO_INFO) rankings = response.get("LeaderboardRankings") if not rankings: return MMRInfo(MMRInfo.NO_INFO) qm_mmr = 0 hl_mmr = 0 for ranking in rankings: if ranking["GameMode"] == "QuickMatch": qm_mmr = ranking["CurrentMMR"] elif ranking["GameMode"] == "HeroLeague": hl_mmr = ranking["CurrentMMR"] return MMRInfo(MMRInfo.PRESENT, qm_mmr, hl_mmr)
def ping(url, timeout, *, loop): async with aiohttp.ClientSession(loop=loop) as session: with async_timeout.timeout(timeout, loop=loop): try: async with session.get(url) as response: logger.debug(response.status) except (aiohttp.ClientError, asyncio.TimeoutError) as exc: logger.exception(exc)
def steal_emoji(self, ctx: DogbotContext, emoji: EmojiStealer, name=None): """ Imports an external emoji into this server. You can specify an emoji ID, the custom emoji itself, or "recent" to make the bot scan for recent messages with a custom emoji that isn't already in this server. If you provide a name, the bot will use that when uploading the emoji, instead of the name it finds. The name parameter is mandatory if you specify an emoji ID. """ emoji_url = f'https://cdn.discordapp.com/emojis/{emoji[0]}.png' if not emoji.name and not name: return await ctx.send('You must provide the name for the stolen emoji.') # strip colons from name if they are there name = None if not name else name.strip(':') msg = await ctx.send('Downloading...') try: async with ctx.bot.session.get(emoji_url) as emoji_resp: emoji_bytes = await emoji_resp.read() if emoji_resp.status != 200 or not emoji_bytes: return await ctx.send('Failed to download the emoji.') # steal emoji = await ctx.guild.create_custom_emoji(name=name or emoji.name, image=emoji_bytes) # as confirmation, attempt to add react to the comamnd message with it, and fall back to ok try: await msg.edit(content=str(emoji)) await msg.add_reaction(f'{emoji.name}:{emoji.id}') except discord.HTTPException: await ctx.ok() except aiohttp.ClientError: await msg.edit(content='Failed to download the emoji.') except discord.HTTPException: await msg.edit(content='Failed to upload the emoji.')
def urban(self, ctx: DogbotContext, *, word): """Finds UrbanDictionary definitions.""" async with ctx.channel.typing(): try: result = await UrbanDefinition.query(self.bot.session, word) except ClientError: return await ctx.send('Failed to look up that word!') if not result: return await ctx.send('No results.') await ctx.send(embed=result.embed)
def shibe(self, ctx: DogbotContext): """Posts a random Shiba Inu picture.""" async with ctx.typing(): try: resp = await utils.get_json(ctx.bot.session, SHIBE_ENDPOINT) except aiohttp.ClientError: return await ctx.send('Failed to contact the Shibe API. Please try again later.') await ctx.send(embed=discord.Embed().set_image(url=resp[0]))
def __post(self, data): with aiohttp.Timeout(self.timeout, loop=self.loop): try: response = await self.__session.post(str(self.__url), data=data, headers=self.__headers) except aiohttp.ClientError as e: log.debug('Caught during POST request: %r', e) raise ConnectionError(str(self.url)) else: if response.status == CSRF_ERROR_CODE: # Send request again with CSRF header self.__headers[CSRF_HEADER] = response.headers[CSRF_HEADER] log.debug('Setting CSRF header: %s = %s', CSRF_HEADER, response.headers[CSRF_HEADER]) await response.release() return await self.__post(data) elif response.status == AUTH_ERROR_CODE: await response.release() log.debug('Authentication failed') raise AuthError(str(self.url)) else: try: answer = await response.json() except aiohttp.ClientResponseError as e: text = textwrap.shorten(await response.text(), 50, placeholder='...') raise RPCError('Server sent malformed JSON: {}'.format(text)) else: return answer
def __send_request(self, post_data): """Send RPC POST request to daemon post_data: Any valid RPC request as JSON string If applicable, returns response['arguments']['torrents'] or response['arguments'], otherwise response. Raises ClientError. """ try: answer = await self.__post(post_data) except OSError as e: log.debug('Caught OSError: %r', e) raise ConnectionError(str(self.url)) except asyncio.TimeoutError as e: log.debug('Caught TimeoutError: %r', e) raise ConnectionError('Timeout after {}s: {}'.format(self.timeout, self.url)) else: if answer['result'] != 'success': raise RPCError(answer['result'].capitalize()) else: if 'arguments' in answer: if 'torrents' in answer['arguments']: return answer['arguments']['torrents'] else: return answer['arguments'] return answer
def fetch_listing(session, url): try: data = await fetch_json(session, url) return data['prefixes'], data['files'] except (aiohttp.ClientError, KeyError, ValueError) as e: raise ValueError("Could not fetch '{}': {}".format(url, e))
def fetch_release_candidate_metadata(session, record): """A JSON file containing build info is published along the nightly build archive. """ global _rc_metadata url = record['download']['url'] # Make sure the rc URL is turned into a en-US one. rc_url = localize_release_candidate_url(url) if rc_url in _rc_metadata: return _rc_metadata[rc_url] product = record['source']['product'] if product == 'devedition': product = 'firefox' if product == 'fennec': metadata_url = re.sub('\.({})$'.format(FILE_EXTENSIONS), '.json', rc_url) else: major_version = record['target']['version'].split('rc')[0] parts = rc_url.split('/') parts[-1] = '{}-{}.json'.format(product, major_version) metadata_url = '/'.join(parts) try: metadata = await fetch_json(session, metadata_url) except aiohttp.ClientError as e: # Old RC like https://archive.mozilla.org/pub/firefox/releases/1.0rc1/ # don't have metadata. logger.warning("Could not fetch metadata for '%s' from '%s'" % (record['id'], metadata_url)) _rc_metadata[rc_url] = None # Don't try it anymore. return None m = re.search('/build(\d+)/', url) metadata['buildnumber'] = int(m.group(1)) _rc_metadata[rc_url] = metadata return metadata
def _do_http_request(self, url, body, headers): try: return await self._do_http_request_impl(url, body, headers) except asyncio.TimeoutError: raise PyVLXException("Request timeout when talking to VELUX API") except aiohttp.ClientError: raise PyVLXException("HTTP error when talking to VELUX API") except OSError: raise PyVLXException("OS error when talking to VELUX API")
def booru(self, ctx, booru, tags): if '[jose:no_nsfw]' in ctx.channel.topic: return # taxxx await self.jcoin.pricing(ctx, self.prices['API']) try: # grab posts posts = await booru.get_posts(ctx.bot, tags) if not posts: return await ctx.send('Found nothing.') # grab random post post = random.choice(posts) post_id = post.get('id') post_author = booru.get_author(post) log.info('%d posts from %s, chose %d', len(posts), booru.__name__, post_id) tags = (post['tags'].replace('_', '\\_'))[:500] # add stuffs embed = discord.Embed(title=f'Posted by {post_author}') embed.set_image(url=post['file_url']) embed.add_field(name='Tags', value=tags) embed.add_field(name='URL', value=booru.url_post.format(post_id)) # hypnohub doesn't have this if 'fav_count' in post and 'score' in post: embed.add_field(name='Votes/Favorites', value=f"{post['score']} votes, {post['fav_count']} favorites") # send await ctx.send(embed=embed) except BooruError as err: raise self.SayException(f'Error while fetching posts: `{err!r}`') except aiohttp.ClientError as err: log.exception('client error') raise self.SayException(f'Something went wrong. Sorry! `{err!r}`')
def fetch(url, session, params, method='GET', t=10): with timeout(t): resp = yield from session.request(method.lower(), url, params=params) try: return resp except (asyncio.TimeoutError, aiohttp.ClientError) as err: LOG.exception(err)
def process(self, task): status, tasks, items = 0, set(), set() with (await self.manager.semaphore): try: request = Request(url=task.url) request = await self.pipeline.requests.process(request) request = await self.middleware.http.before(request) response = await self.downloader.process(request) response = await self.middleware.http.after(response) response = await self.pipeline.responses.process(response) if response.status in constants.HTTP_FAILED: status = response.status else: tasks, items = await self.spider.process(task=task, response=response) if tasks: tasks = await self.pipeline.tasks.process(tasks) if items: items = await self.pipeline.items.process(items) await self.pipeline.stats.process(stats=await self.stats()) except aiohttp.ClientError as e: log.exception(e) status = constants.status.RETRIAL self.session.close() except Exception as e: log.exception(e) status = constants.status.FAILED result = Result(status=status, task=task, tasks=tasks, items=items) await self.manager.process(result=result) return result
def request(self): """Refresh drawing plan data.""" current_time = int(time.time()) url = DRAWING_DATA_URL.format(current_time) try: async with self.session.get(url) as resp: data = await resp.json(content_type=None) self.start_x = data['startX'] self.start_y = data['startY'] self.colours = data['colors'] self.kill = data['kill'] self.version = data['newVersion'] self.height = len(self.colours) if self.height > 0: self.width = max(len(row) for row in self.colours) else: self.width = 0 logger.debug("Succesfully updated drawing plan.") logger.debug("Start X: %d, start y: %d, kill: %s", self.start_x, self.start_y, self.kill) return True except (aiohttp.ClientError, KeyError) as e: logger.exception(e) return False
def run_loop(context, creds_key="credentials"): """Split this out of the async_main while loop for easier testing. args: context (scriptworker.context.Context): the scriptworker context. creds_key (str, optional): when reading the creds file, this dict key corresponds to the credentials value we want to use. Defaults to "credentials". Returns: int: status None: if no task run. """ loop = asyncio.get_event_loop() tasks = await claim_work(context) status = None if not tasks: await asyncio.sleep(context.config['poll_interval']) return status # Assume only a single task, but should more than one fall through, # run them sequentially. A side effect is our return status will # be the status of the final task run. for task_defn in tasks.get('tasks', []): status = 0 prepare_to_run_task(context, task_defn) loop.create_task(reclaim_task(context, context.task)) try: if context.config['verify_chain_of_trust']: chain = ChainOfTrust(context, context.config['cot_job_type']) await verify_chain_of_trust(chain) status = await run_task(context) generate_cot(context) except ScriptWorkerException as e: status = worst_level(status, e.exit_code) log.error("Hit ScriptWorkerException: {}".format(e)) try: await upload_artifacts(context) except ScriptWorkerException as e: status = worst_level(status, e.exit_code) log.error("Hit ScriptWorkerException: {}".format(e)) except aiohttp.ClientError as e: status = worst_level(status, STATUSES['intermittent-task']) log.error("Hit aiohttp error: {}".format(e)) await complete_task(context, status) cleanup(context) return status
def __getattr__(self, method): """Return asyncio coroutine that sends RPC request and returns response method: Any method from the RPC specs with every '-' replaced with '_'. For arguments see the RPC specs. Example: >>> stats = await client.session_stats() >>> torrents = await client.torrent_get(ids=(1,2,3), fields=('status','name')) Raises RPCError, ConnectionError, AuthError """ async def request(arguments={}, autoconnect=True, **kwargs): async with self.__request_lock: if not self.connected: if autoconnect: log.debug('Autoconnecting for %r', method) await self.connect() else: log.debug('Not connected and autoconnect=%r - %r returns None', autoconnect, method) return None arguments.update(**kwargs) rpc_request = json.dumps({'method' : method.replace('_', '-'), 'arguments' : arguments}) try: return await self.__send_request(rpc_request) except ClientError as e: log.debug('Caught ClientError in %r request: %r', method, e) # RPCError does not mean host is unreachable, there was just a # misunderstanding, so we're still connected. if not isinstance(e, RPCError) and self.connected: await self.disconnect(str(e)) self.__on_error.send(self.__url, error=e) raise request.__name__ = method request.__qualname__ = method return request
def fetch_nightly_metadata(session, record): """A JSON file containing build info is published along the nightly build archive. """ global _nightly_metadata url = record['download']['url'] # Make sure the nightly_url is turned into a en-US one. nightly_url = localize_nightly_url(url) if nightly_url in _nightly_metadata: return _nightly_metadata[nightly_url] try: metadata_url = re.sub('\.({})$'.format(FILE_EXTENSIONS), '.json', nightly_url) metadata = await fetch_json(session, metadata_url) _nightly_metadata[nightly_url] = metadata return metadata except aiohttp.ClientError: # Very old nightly metadata is published as .txt files. try: # e.g. https://archive.mozilla.org/pub/firefox/nightly/2011/05/ # 2011-05-05-03-mozilla-central/firefox-6.0a1.en-US.mac.txt old_metadata_url = re.sub('\.({})$'.format(FILE_EXTENSIONS), '.txt', nightly_url) async with session.get(old_metadata_url) as response: old_metadata = await response.text() m = re.search('^(\d+)\n(http.+)/rev/(.+)$', old_metadata) if m: metadata = { 'buildid': m.group(1), 'moz_source_repo': m.group(2), 'moz_source_stamp': m.group(3), } _nightly_metadata[nightly_url] = metadata return metadata # e.g. https://archive.mozilla.org/pub/firefox/nightly/2010/07/2010-07-04-05 # -mozilla-central/firefox-4.0b2pre.en-US.win64-x86_64.txt m = re.search('^(\d+) (.+)$', old_metadata) if m: metadata = { 'buildid': m.group(1), 'moz_source_stamp': m.group(2), 'moz_source_repo': 'http://hg.mozilla.org/mozilla-central', } _nightly_metadata[nightly_url] = metadata return metadata except aiohttp.ClientError as e: pass logger.warning("Could not fetch metadata for '%s' from '%s'" % (record['id'], metadata_url)) _nightly_metadata[url] = None # Don't try it anymore. return None
def fetch_release_metadata(session, record): """The `candidates` folder contains build info about recent released versions. """ global _candidates_build_folder product = record['source']['product'] version = record['target']['version'] platform = record['target']['platform'] locale = 'en-US' try: latest_build_folder = _candidates_build_folder[product][version] except KeyError: # Version is not listed in candidates. Give up. return None build_number = int(latest_build_folder.strip('/')[-1]) # build3 -> 3 # Metadata for EME-free and sha1 repacks are the same as original release. platform = re.sub('-(eme-free|sha1)', '', platform, flags=re.I) url = archive_url(product, version, platform, locale, candidate='/' + latest_build_folder) # We already have the metadata for this platform and version. if url in _release_metadata: return _release_metadata[url] try: _, files = await fetch_listing(session, url) except ValueError: # Some partial update don't have metadata. eg. /47.0.1-candidates/ _release_metadata[url] = None return None for f in files: filename = f['name'] if is_release_build_metadata(product, version, filename): try: metadata = await fetch_json(session, url + filename) metadata['buildnumber'] = build_number _release_metadata[url] = metadata return metadata except aiohttp.ClientError as e: # Sometimes, some XML comes out \o/ (see #259) pass # Version exists in candidates but has no metadata! _release_metadata[url] = None # Don't try it anymore. raise ValueError('Missing metadata for candidate {}'.format(url))
def fetch(self, url, max_redirect): tries = 0 exception = None while tries < self.max_tries: try: response = await self.session.get( url, allow_redirects=False) break except aiohttp.ClientError as client_error: exception = client_error tries += 1 else: self.record_statistic(FetchStatistic(url=url, next_url=None, status=None, exception=exception, size=0, content_type=None, encoding=None, num_urls=0, num_new_urls=0)) return try: if is_redirect(response): location = response.headers['location'] next_url = urllib.parse.urljoin(url, location) self.record_statistic(FetchStatistic(url=url, next_url=next_url, status=response.status, exception=None, size=0, content_type=None, encoding=None, num_urls=0, num_new_urls=0)) if next_url in self.seen_urls: return if max_redirect > 0: self.add_url(next_url, max_redirect - 1) else: print('redirect limit reached for %r from %r', next_url, url) else: stat, links = await self.parse_links(response) self.record_statistic(stat) for link in links.difference(self.seen_urls): self.q.put_nowait((link, self.max_redirect)) self.seen_urls.update(links) finally: await response.release()