我们从Python开源项目中,提取了以下35个代码示例,用于说明如何使用aiohttp.BasicAuth()。
def credential_verfication(self, username, password): auth = aiohttp.BasicAuth(login=username, password=password) url = "https://myanimelist.net/api/account/verify_credentials.xml" with aiohttp.ClientSession(auth=auth) as session: async with session.get(url) as response: status = response.status if status == 200: return True if status == 401: await self.bot.say("Username and Password is incorrect.") return False if status == 403: await self.bot.say("Too many failed login attempts. Try putting in the" "correct credentials after some time has passed.") return False
def send_sms(recipients: Iterable[str], msg: str, username: str, api_key: str, sender: str): data = { 'messages': [], } # type: Dict[str, List] for recipient in recipients: data['messages'].append({ 'source': 'python', 'from': sender, 'body': msg[:140], 'to': recipient, 'schedule': '' }) try: async with aiohttp.ClientSession(headers={'Content-Type': 'application/json'}, auth=aiohttp.BasicAuth(username, api_key)) as session: async with session.post(CLICKSEND_URL, data=json.dumps(data), timeout=30) as resp: if resp.status != 200: log.msg('Error sending clicksend sms notification: http status %s' % (str(resp.status)), 'NOTIFICATION') except aiohttp.ClientError as e: log.msg('Error sending clicksend sms notification: %s' % (str(e)), 'NOTIFICATIONS')
def fetch_url(url: str, auth: aiohttp.BasicAuth, headers: Dict["str", "str"] = None): """ Function used to eliminate code reuse with creating ClientSessions :param str url: The URL you're fetching :param aiohttp.BasicAuth auth: The BasicAuth object being passed to aiohttp.ClientSession :param dict headers: The headers to be passed to `myanimelist`_ :rtype: bytes Example usage: .. code-block:: py import asyncio loop = asyncio.get_event_loop() loop.run_until_complete(fetch_url("url", aiohttp.BasicAuth(login="username", password="password"), {"User-Agent": "PyAnimeList"})) """ if not headers: headers = {} with aiohttp.ClientSession(auth=auth, headers=headers) as session: async with session.get(url) as response: return response.read()
def update_proxy(self, proxy, proxy_auth, proxy_headers): if proxy and proxy.scheme not in ['http', 'socks4', 'socks5']: raise ValueError( "Only http, socks4 and socks5 proxies are supported") if proxy and proxy_auth: if proxy.scheme == 'http' and \ not isinstance(proxy_auth, aiohttp.BasicAuth): raise ValueError("proxy_auth must be None or " "BasicAuth() tuple for http proxy") if proxy.scheme == 'socks4' and \ not isinstance(proxy_auth, Socks4Auth): raise ValueError("proxy_auth must be None or Socks4Auth() " "tuple for socks4 proxy") if proxy.scheme == 'socks5' and \ not isinstance(proxy_auth, Socks5Auth): raise ValueError("proxy_auth must be None or Socks5Auth() " "tuple for socks5 proxy") self.proxy = proxy self.proxy_auth = proxy_auth self.proxy_headers = proxy_headers
def oc_classify(records, one_codex_api_key, progress=False, stdout=False): oc_auth = aiohttp.BasicAuth(one_codex_api_key) conn = aiohttp.TCPConnector(limit=10) with aiohttp.ClientSession(auth=oc_auth, connector=conn) as oc_session: with aiohttp.ClientSession(connector=conn) as ebi_session: tasks = [classify_taxify(oc_session, ebi_session, r.id, str(r.seq)) for r in records] # No async generators in 3.5... :'( # return [await f for f in tqdm.tqdm(asyncio.as_completed(tasks), total=len(tasks))] records = [] for f in tqdm.tqdm(asyncio.as_completed(tasks), disable=not progress, total=len(tasks)): response = await f record = build_record(response[0], response[1]) if stdout: print(record.format('fasta'), end='') records.append(record) return records # --------------------------------------------------------------------------------------------------
def create_session(self, loop): conn = None if self.proxy and self.proxy_user: conn = aiohttp.ProxyConnector( loop=loop, limit=self.parallel, proxy=self.proxy, proxy_auth=aiohttp.BasicAuth(self.proxy_user, self.proxy_password) ) elif self.proxy: conn = aiohttp.ProxyConnector(loop=loop, limit=self.parallel, proxy=self.proxy) else: conn = aiohttp.TCPConnector(loop=loop, limit=self.parallel) session = aiohttp.ClientSession(connector=conn) return session
def image(message, query): """ Search Bing for an image and returns a URL to that image. Example:: image socially awkward penguin """ r = await http.get(SEARCH_URL, params=[ ('$format', 'json'), ('$top', '20'), ('Adult', "'Off'" if scoped_config.get(nsfw, message.channel) else "'Strict'"), ('Query', "'{}'".format(query)), ], auth=BasicAuth("", password=api_key())) results = SafeStructure(r.json()).d.results if results: return Response(random.choice(results).MediaUrl) else: raise CommandError("no results found")
def __init__(self, hass, device_info): """Initialize a generic camera.""" super().__init__() self.hass = hass self._authentication = device_info.get(CONF_AUTHENTICATION) self._name = device_info.get(CONF_NAME) self._still_image_url = device_info[CONF_STILL_IMAGE_URL] self._still_image_url.hass = hass self._limit_refetch = device_info[CONF_LIMIT_REFETCH_TO_URL_CHANGE] username = device_info.get(CONF_USERNAME) password = device_info.get(CONF_PASSWORD) if username and password: if self._authentication == HTTP_DIGEST_AUTHENTICATION: self._auth = HTTPDigestAuth(username, password) else: self._auth = aiohttp.BasicAuth(username, password=password) else: self._auth = None self._last_url = None self._last_image = None
def get_xml(self, nature, name): username = self.credentials["Username"] password = self.credentials["Password"] name = name.replace(" ", "_") auth = aiohttp.BasicAuth(login=username, password=password) url = 'https://myanimelist.net/api/{}/search.xml?q={}'.format(nature, name) with aiohttp.ClientSession(auth=auth) as session: async with session.get(url) as response: data = await response.text() return data
def __init__(self, solver=None, proxy=None, logger=None): if logger: self.logger = logger else: self.logger = logging.Logger("vk_client") self.solver = solver self.auth = Auth(self, logger=self.logger) self.req_kwargs = {} if proxy: url, username, password, encoding = *proxy, None, None, None self.req_kwargs["proxy"] = url if username: self.req_kwargs["proxy_auth"] = aiohttp.BasicAuth(username, password if password else "", encoding if encoding else "latin1") self.session = aiohttp.ClientSession() self.queue = RequestsQueue(self, logger=self.logger) self.username = "" self.password = "" self.app_id = None self.scope = None self.group_id = 0 self.user_id = 0 self.token = ""
def __init__(self, username: str, password: str, enable_scraper: bool = False, user_agent: str = None): """ :param str username: The username of the account that is being used to access the API :param str password: The password of the account that is being used to access the API :param bool enable_scraper: :param str user_agent: UserAgent of the application """ self.user_agent = user_agent or UA self._auth = aiohttp.BasicAuth(login=username, password=password)
def basic_auth(self, username, password): self.set_header( # pragma: no cover 'Authorization', aiohttp.BasicAuth(username, password).encode() ) # HTTP Methods
def _mal_fetch(session, kind, query, username, password): """Returns a bs4 tag or a string. session is an aiohttp.ClientSession kind should be either anime or manga query is self-explanatory username is self-explanatory password is self-explanatory """ auth = aiohttp.BasicAuth(username, password) query = urllib.parse.quote(query) url = BASE_URL_MYANIMELIST_SEARCH.format(kind, query) try: # This is gross, but MAL doesn't respond nicely. async with session.request("GET", url, auth=auth) as response: if response.status == 200: xml = await response.text() soup = BeautifulSoup(xml) entry = soup.find("entry") return entry else: message = "Could not reach MyAnimeList. x.x" return message except aiohttp.ClientResponseError: message = ("No results found. Make sure you use spaces (e.g. " "`one piece`, not `onepiece`). Also make sure to spell things right.") return message
def test_auth_is_set_correctly(event_loop): connection = AIOHttpConnection(http_auth=('user', 'secret'), loop=event_loop) assert connection.session._default_auth == aiohttp.BasicAuth('user', 'secret') connection = AIOHttpConnection(http_auth='user:secret', loop=event_loop) assert connection.session._default_auth == aiohttp.BasicAuth('user', 'secret')
def __init__(self, bot): self.bot = bot self.waifu1votes = 0 self.waifu2votes = 0 self.startTime = 0 self.duration = 120 #Initialize empty set for users that have already voted self.alreadyVoted = set({}) #Initialize empty list for channels to announce winners to self.channels = set({}) #Login stuff from Safebooru parser = configparser.ConfigParser() parser.read('data/auth/auth.ini') self.has_login = False if not parser.has_section("Safebooru Login"): self.session = aiohttp.ClientSession() print("No Safebooru credentials provided; api calls will be anonymous") else: self.loginName = parser['Safebooru Login']['Username'] self.loginToken = parser['Safebooru Login']['Token'] if self.loginName != "" and self.loginToken != "": self.session = aiohttp.ClientSession(auth=aiohttp.BasicAuth(self.loginName, self.loginToken)) self.has_login = True else: self.session = aiohttp.ClientSession() atexit.register(self.closeConnection) #Starts the waifu war
def __init__(self, bot): self.bot = bot self.waifuLists = {} self.lastWaifuRolled = {} self.LISTSIZE = 5 self.MAXLISTS = 5 self.TRADELISTSIZE = 10 self.MAXTRADEREQS = 10 invalidLists = [] for userId in os.listdir("data/safebooru/WaifuList"): if not dataIO.is_valid_json("data/safebooru/WaifuList/" + userId): invalidLists.append(userId + "\n") else: self.waifuLists[userId[:-5]] = dataIO.load_json("data/safebooru/WaifuList/" + userId) if not len(invalidLists) == 0: print("Warning: the following files were not saved properly, and have been lost: \n") for user in invalidLists: print(user) parser = configparser.ConfigParser() parser.read('data/auth/auth.ini') self.has_login = False if not parser.has_section("Safebooru Login"): self.session = aiohttp.ClientSession() print("No Safebooru credentials provided; api calls will be anonymous") else: self.loginName = parser['Safebooru Login']['Username'] self.loginToken = parser['Safebooru Login']['Token'] if self.loginName != "" and self.loginToken != "": self.session = aiohttp.ClientSession(auth=aiohttp.BasicAuth(self.loginName, self.loginToken)) self.has_login = True else: self.session = aiohttp.ClientSession() atexit.register(self.closeConnection)
def __init__(self, agent_addr, *, min_success_rate=.0, min_count=0, update_interval=300, auth=None, params=None, timeout=20, loop=None): """ SimpleProxyPool constructor. agent_addr - Proxy agent address. min_success_rate - (optional) The minimum acceptable success rate of a proxy. min_count - (optional) The least number of proxies in the proxy list. It works when pass the `min_success_rate` parameter. update_interval - (optional) Time interval to update the proxy list from proxy agent. auth - (optional) Http Basic Auth tuple. params - (optional) Prameters dictionary be sent in the query. timeout - (optional) Timeout when connects proxy agent. loop - (optional) Event loop. """ self.agent_addr = agent_addr if loop is None: self.asyn = False self.loop = asyncio.new_event_loop() else: self.asyn = True self.loop = loop self.auth = auth if self.auth is not None: if isinstance(self.auth, tuple): self.auth = aiohttp.BasicAuth(*self.auth) elif not isinstance(self.auth, aiohttp.BasicAuth): raise TypeError('The type of "auth" must be tuple or aiohttp.BasicAuth') self.params = params or {} self.timeout = timeout self.update_interval = update_interval self.min_success_rate = min_success_rate self.min_count = min_count self._last_update = 0 self._update_lock = asyncio.Lock(loop=self.loop) self.proxies = []
def send(self, *, from_addr, to_addrs, subject, body): request = self._session.post( f'https://api.mailgun.net/v3/{self._domain}/messages', auth=aiohttp.BasicAuth("api", self._api_key), data={"from": from_addr, "to": to_addrs, "subject": subject, "text": body, }) async with request as response: response.raise_for_status()
def test_proxy_client_request_invalid(loop): with pytest.raises(ValueError) as cm: ProxyClientRequest( 'GET', URL('http://python.org'), proxy=URL('socks6://proxy.org'), proxy_auth=None, loop=loop) assert 'Only http, socks4 and socks5 proxies are supported' in str(cm) with pytest.raises(ValueError) as cm: ProxyClientRequest( 'GET', URL('http://python.org'), loop=loop, proxy=URL('http://proxy.org'), proxy_auth=Socks4Auth('l')) assert 'proxy_auth must be None or BasicAuth() ' \ 'tuple for http proxy' in str(cm) with pytest.raises(ValueError) as cm: ProxyClientRequest( 'GET', URL('http://python.org'), loop=loop, proxy=URL('socks4://proxy.org'), proxy_auth=BasicAuth('l')) assert 'proxy_auth must be None or Socks4Auth() ' \ 'tuple for socks4 proxy' in str(cm) with pytest.raises(ValueError) as cm: ProxyClientRequest( 'GET', URL('http://python.org'), loop=loop, proxy=URL('socks5://proxy.org'), proxy_auth=Socks4Auth('l')) assert 'proxy_auth must be None or Socks5Auth() ' \ 'tuple for socks5 proxy' in str(cm)
def test_auth_str(auto_close, loop): auth = aiohttp.BasicAuth('user', 'pass') conn = auto_close(AIOHttpConnection(http_auth='user:pass', loop=loop)) assert conn.http_auth == auth
def test_auth_tuple(auto_close, loop): auth = aiohttp.BasicAuth('user', 'pass') conn = auto_close(AIOHttpConnection(http_auth=('user', 'pass'), loop=loop)) assert conn.http_auth == auth
def test_auth_basicauth(auto_close, loop): auth = aiohttp.BasicAuth('user', 'pass') conn = auto_close(AIOHttpConnection(http_auth=auth, loop=loop)) assert conn.http_auth == auth
def __init__(self, url_builder, headers, user = None, password = None, verify_ssl_certs = True): self.url_builder = url_builder self.headers = headers self.basic_auth_credentials = None if (user is None or password is None) else aiohttp.BasicAuth(login = user, password = password) self.verify_ssl_certs = verify_ssl_certs
def __init__(self, login: str, password: str): self.session = aiohttp.ClientSession(auth=aiohttp.BasicAuth(login=login, password=password))
def anime(message): """ Gets information about an anime using myanimelist.net. Example:: /anime code geass """ query = message.content.strip() if not len(query): raise CommandError("Supply the name of an anime to search.") auth = aiohttp.BasicAuth(username(), password()) try: r = await http.get("https://myanimelist.net/api/anime/search.xml", params=[ ('q', query) ], auth=auth) except BadStatusCodeError as e: if e.http_code in (204, 404): raise CommandError("No anime results for '{query}'.".format(query=query)) raise doc = BeautifulSoup(r.text(), features="lxml") entries = doc.anime.find_all("entry", recursive=False) if not len(entries): raise CommandError("No results found.") entry = entries[0] return "{image}\n\n" \ "**{name}** ({type})\n\n" \ "**Score:** {score}\n" \ "**Episodes:** {ep_count}\n" \ "**Air Dates:** {start}-{end}\n\n" \ "{synopsis}\n".format( image=entry.image.text, type=entry.type.text, name=entry.title.text, score=entry.score.text, ep_count=entry.episodes.text, start=entry.start_date.text, end=entry.end_date.text, synopsis=strip_html(entry.synopsis.text), )
def fetch_template_data(username, token): semaphore = asyncio.Semaphore(10) loop = asyncio.get_event_loop() auth = aiohttp.BasicAuth(username, token) with aiohttp.ClientSession(loop=loop, auth=auth) as client: logger.debug('Load Cookiecutter readme') cookiecutter_readme = loop.run_until_complete( github_api.get_readme(semaphore, client, 'audreyr', 'cookiecutter') ) if not cookiecutter_readme: raise CookiecutterReadmeError logger.debug('Find GitHub links in Cookiecutter readme') github_links, _ = readme_parser.read(cookiecutter_readme) if not github_links: raise UnableToFindTemplateLinks tasks = [ github_api.get_template(semaphore, client, link) for link in github_links ] logger.debug('Fetch template data from links') results = loop.run_until_complete(asyncio.gather(*tasks)) yield from filter(None, results) # Ignore all invalid templates
def __init__(self, hass, device_info): """Initialize a MJPEG camera.""" super().__init__() self._name = device_info.get(CONF_NAME) self._authentication = device_info.get(CONF_AUTHENTICATION) self._username = device_info.get(CONF_USERNAME) self._password = device_info.get(CONF_PASSWORD) self._mjpeg_url = device_info[CONF_MJPEG_URL] self._auth = None if self._username and self._password: if self._authentication == HTTP_BASIC_AUTHENTICATION: self._auth = aiohttp.BasicAuth( self._username, password=self._password )
def anime(self, ctx, *, animeName:str): ''' Gives you the details of an anime ''' # Make sure there are the correct tokens in the bot tokens = getTokens() userPass = tokens['MyAnimeList'] # Authenticate auth = BasicAuth(userPass['Username'], userPass['Password']) url = 'https://myanimelist.net/api/anime/search.xml?q=' + animeName.replace(' ', '+') # Send the request async with self.session.get(url, auth=auth) as r: resp = r.status data = await r.text() # Make sure everything's alright if resp == 204: await self.sparcli.say('The anime with the title `{}` could not be found.'.format(animeName.title())) return elif resp == 200: pass else: await self.sparcli.say('There was an error with this bot\'s authentication details.') return # Parse the XML data root = ET.fromstring(data) anime = root[0] o = OrderedDict() # Plonk it into an embed v = htmlFixer(anime[10].text) v = v if len(v) < 1000 else v[:1000] while v[-1] in ' .,?;\'"/!': v = v[:-1] v = v + '...' o['Summary'] = (v, False) o['Episodes'] = anime[4].text o['Rating'] = anime[5].text + '/10.00' o['Media Type'] = anime[6].text o['Status'] = anime[7].text image = anime[11].text title = anime[1].text # Echo out to the user e = makeEmbed(author=title, image=image, fields=o) await self.sparcli.say(embed=e)
def __init__(self, host: str = 'localhost', port: int = 8086, username: Optional[str] = None, password: Optional[str] = None, db: str = 'testdb', database: Optional[str] = None, loop: asyncio.BaseEventLoop = None, log_level: int = 30, mode: str = 'async'): """ The AsyncInfluxDBClient object holds information necessary to interact with InfluxDB. It is async by default, but can also be used as a sync/blocking client and even generate Pandas DataFrames from queries. The three main public methods are the three endpoints of the InfluxDB API, namely: 1) AsyncInfluxDBClient.ping 2) AsyncInfluxDBClient.write 3) AsyncInfluxDBClient.query See each of the above methods documentation for further usage details. See also: https://docs.influxdata.com/influxdb/v1.2/tools/api/ :param host: Hostname to connect to InfluxDB. :param port: Port to connect to InfluxDB. :param username: Username to use to connect to InfluxDB. :param password: User password. :param db: Default database to be used by the client. :param database: Default database to be used by the client. This field is for argument consistency with the official InfluxDB Python client. :param loop: Event loop used for processing HTTP requests. :param log_level: Logging level. The lower the more verbose. Defaults to INFO (30). :param mode: Mode in which client should run. Available options are: 'async', 'blocking' and 'dataframe'. - 'async': Default mode. Each query/request to the backend will - 'blocking': Behaves in sync/blocking fashion, similar to the official InfluxDB-Python client. - 'dataframe': Behaves in a sync/blocking fashion, but parsing results into Pandas DataFrames. Similar to InfluxDB-Python's `DataFrameClient`. """ self._logger = self._make_logger(log_level) self._loop = asyncio.get_event_loop() if loop is None else loop self._auth = aiohttp.BasicAuth(username, password) if username and password else None self._session = aiohttp.ClientSession(loop=self._loop, auth=self._auth) self._url = f'http://{host}:{port}/{{endpoint}}' self.host = host self.port = port self.db = database or db self.mode = mode if mode not in {'async', 'blocking', 'dataframe'}: raise ValueError('Invalid mode')
def _coro_make_request(*, endpoint: APITestEndPoint, proxy: ProxyConfig = None, download_content: bool = False, accept_selfsigned_certs: bool = True): """ A coroutine that makes the requests :param endpoint: APITestEndPoint object instance :type endpoint: APITestEndPoint :param proxy: ProxyConfig object instance :type proxy: ProxyConfig :return: the :rtype: """ assert isinstance(endpoint, APITestEndPoint) assert isinstance(proxy, ProxyConfig) # Proxy needs authentication? proxy_auth = None if proxy.user is not None: proxy_auth = aiohttp.BasicAuth(proxy.user, proxy.password) # Extract info url = endpoint.request.url body = transform_apitest_body_to_queryable(endpoint.request.body) method = endpoint.request.method headers = {header.key: header.value for header in endpoint.request.headers} # Check SSL? conn = None if accept_selfsigned_certs: conn = aiohttp.TCPConnector(verify_ssl=False) # Do the Requests async with aiohttp.ClientSession(connector=conn) as session: async with session.request(method=method, url=url, headers=headers, data=body, proxy=proxy.url, proxy_auth=proxy_auth) as resp: if resp.status == 200: log.console(" - Response OK for URL: '{}'".format(url)) else: log.console(" - Non-200 response for URL: '{}'".format(url)) log.console(" \_ HTTP status code: '{}'".format(resp.status)) # Download content? content = None if download_content: content = await resp.text() log.console(" \_{}".format(content)) return RequestResponse(status=resp.status, content=content, headers=resp.headers)
def danbooru(self, message): """ Get a random image from Danbooru :param message: :return: """ cursor = self.db.server_backend.find({"serv_id": message.server.id}) for c in cursor: nsfw_status = c['nsfw']['nsfw_status'] nsfw_chan_id = c['nsfw']['nsfw_chan_id'] if nsfw_status == "on|global" or nsfw_chan_id == message.channel.id: tag = message.content[10:] tags = str(tag).rstrip() if tags == "": with aiohttp.ClientSession( auth=aiohttp.BasicAuth( login=settings["DANBOORU_ID"], password=settings["DANBOORU_PASSWORD"])) as session: async with session.get("https://danbooru.donmai.us/posts.json?") as resp: if resp.status == 200: data = await resp.json() else: return else: with aiohttp.ClientSession( auth=aiohttp.BasicAuth( login=settings["DANBOORU_ID"], password=settings["DANBOORU_PASSWORD"])) as session: async with session.get("https://danbooru.donmai.us/posts.json?tags={}".format(tag)) as resp: if resp.status == 200: data = await resp.json() else: return if not data: await self.ctx.send_message(message.channel, "**Error**: `Tag {} did not return any results.`".format(tag)) return data_len = len(data) if data_len == 0: await self.ctx.send_message(message.channel, "**Error**: `Tag {} did not return any results.`".format(tag)) return try: ran = random.randint(0, data_len - 1) lucky = data[ran] a = "https://danbooru.donmai.us" img = a + lucky['file_url'] await self.ctx.send_message(message.channel, img) return except KeyError: pass except IndexError: pass
def __init__(self, agent_addr, *, pool_size=100, block_time=3600, max_fail_times=2, update_interval=300, auth=None, params=None, timeout=20, loop=None): """ ProxyPool constructor. agent_addr - Proxy agent address. pool_size - (optional) The size of the pool. block_time - (optional) Time for blocking a proxy. max_fail_times - (optional) The maximum acceptable number of the continuous failure of a proxy. update_interval - (optional) Time interval to update the proxy list from proxy agent. auth - (optional) Http Basic Auth tuple. params - (optional) Prameters dictionary be sent in the query. timeout - (optional) Timeout when connects proxy agent. loop - (optional) Event loop. """ self.agent_addr = agent_addr if loop is None: self.asyn = False self.loop = asyncio.new_event_loop() else: self.asyn = True self.loop = loop self.auth = auth if self.auth is not None: if isinstance(self.auth, tuple): self.auth = aiohttp.BasicAuth(*self.auth) elif not isinstance(self.auth, aiohttp.BasicAuth): raise TypeError('The type of "auth" must be tuple or aiohttp.BasicAuth') self.params = params or {} self.timeout = timeout self.update_interval = update_interval self._last_update = 0 self._update_lock = asyncio.Lock(loop=self.loop) self.max_fail_times = max_fail_times self._proxies = {} self._pool = PriorityQueue(pool_size) self._pool_p = PriorityQueue(pool_size) self._pool_n = PriorityQueue(pool_size) self.backup_size = pool_size * 5 self._backup = PriorityQueue(self.backup_size) self._backup_p = PriorityQueue(self.backup_size) self._backup_n = PriorityQueue(self.backup_size) self.block_time = block_time self._trash = {} self._block_queue = deque()
def __init__( self, host='localhost', port=9200, http_auth=None, use_ssl=False, ssl_context=None, verify_certs=False, maxsize=10, headers=None, *, loop, **kwargs ): super().__init__(host=host, port=port, use_ssl=use_ssl, **kwargs) if headers is None: headers = {} self.headers = headers self.headers.setdefault('Content-Type', 'application/json') self.loop = loop if http_auth is not None: if isinstance(http_auth, aiohttp.BasicAuth): pass elif isinstance(http_auth, str): http_auth = aiohttp.BasicAuth(*http_auth.split(':', 1)) elif isinstance(http_auth, (tuple, list)): http_auth = aiohttp.BasicAuth(*http_auth) else: raise TypeError("Expected str, list, tuple or " "aiohttp.BasicAuth as http_auth parameter," "got {!r}".format(http_auth)) self.http_auth = http_auth self.verify_certs = verify_certs self.base_url = URL.build(scheme='https' if self.use_ssl else 'http', host=host, port=port, path=self.url_prefix) self.session = kwargs.get('session') if self.session is None: self.session = aiohttp.ClientSession( auth=self.http_auth, connector=aiohttp.TCPConnector( limit=maxsize, use_dns_cache=kwargs.get('use_dns_cache', False), ssl_context=ssl_context, verify_ssl=self.verify_certs, loop=self.loop, ), )
def manga(message): """ Gets information about an manga using myanimelist.net. Example:: /manga naruto """ query = message.content.strip() if not len(query): raise CommandError("Supply the name of a manga to search.") auth = aiohttp.BasicAuth(username(), password()) try: r = await http.get("https://myanimelist.net/api/manga/search.xml", params=[ ('q', query) ], auth=auth) except BadStatusCodeError as e: if e.http_code in (204, 404): raise CommandError("No manga results for '{query}'.".format(query=query)) raise doc = BeautifulSoup(r.text(), features="lxml") entries = doc.manga.find_all("entry", recursive=False) if not len(entries): raise CommandError("No results found.") entry = entries[0] return "{image}\n\n" \ "**{name}** ({type})\n\n" \ "**Status:** {status}\n" \ "**Score:** {score}\n" \ "**Chapters:** {chapters}\n" \ "**Run Dates:** {start}-{end}\n\n" \ "{synopsis}\n".format( image=entry.image.text, type=entry.type.text, name=entry.title.text, status=entry.status.text, score=entry.score.text, chapters=entry.chapters.text, start=entry.start_date.text, end=entry.end_date.text, synopsis=strip_html(entry.synopsis.text), )