def go(dirname, filename): ssl_ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23) ssl_ctx.load_cert_chain( os.path.join(dirname, 'sample.crt'), os.path.join(dirname, 'sample.key') ) app, _, url = yield from self.create_server( 'GET', '/static/' + filename, ssl_ctx=ssl_ctx ) app.router.add_static('/static', dirname) conn = TCPConnector(verify_ssl=False, loop=self.loop) session = ClientSession(connector=conn) resp = yield from session.request('GET', url) self.assertEqual(200, resp.status) txt = yield from resp.text() self.assertEqual('file content', txt.rstrip()) ct = resp.headers['CONTENT-TYPE'] self.assertEqual('application/octet-stream', ct) self.assertEqual(resp.headers.get('CONTENT-ENCODING'), None) resp.close() resp = yield from session.request('GET', url + 'fake') self.assertEqual(404, resp.status) resp.close() resp = yield from session.request('GET', url + '/../../') self.assertEqual(404, resp.status) resp.close()
class Client: def __init__(self, app, service, **kwargs): if not app.config['CONSUL_ENABLED']: raise ValueError( 'Current app can\'t use `Client`, enabled consul support firstly!' ) self._client = ClientSession(loop=app.loop, **kwargs) self._app = app self.services = app.services[service] def handler_url(self, api): s = random.choice(list(self.services)) return f'http://{s.service_address}:{s.service_port}/{api}' def request(self, method, api, **kwargs): url = self.handler_url(api) self._client.request(method, url, **kwargs) def cli(self, req): operation = f'' span = opentracing.tracer.start_span(operation_name='get', child_of=req['span']) return ClientSessionConn(self, url=self._url, span=span) async def close(self): await self._client.close()
class Amari: def __init__(self): self.session = ClientSession() async def get_amari_rank(self, guild: int, user: discord.User): gid = guild username = user.name url = f"https://lb.amaribot.com/weekly.php?gID={gid}" async with self.session.request("GET", url) as response: text = await response.text() obj = BeautifulSoup(text, "html.parser") rank_list = obj.body.main.findAll("div")[2].div.find("table").findAll("tr") tag = None for tag in rank_list: if username in str(tag): break check = re.compile( r"<tr><td>(\d+)<\/td><td>({})<\/td><td>(\d+)<\/td><td>(\d+)<\/td><\/tr>".format( re.escape(username) ) ) if not tag: return None match = re.match(check, str(tag)) try: return int(match.group(4)) except (TypeError, AttributeError): return 0 async def get_weekly_rank(self, guild: int, user: discord.User): gid = guild username = user.name url = f"https://lb.amaribot.com/weekly.php?gID={gid}" async with self.session.request("GET", url) as response: text = await response.text() obj = BeautifulSoup(text, "html.parser") rank_list = obj.body.main.findAll("div")[2].div.find("table").findAll("tr") tag = None for tag in rank_list: if username in str(tag): break check = re.compile( r"<tr><td>(\d+)<\/td><td>({})<\/td><td>(\d+)<\/td><td>(\d+)<\/td><\/tr>".format( re.escape(username) ) ) if not tag: return match = re.match(check, str(tag)) try: return int(match.group(3)) except (TypeError, AttributeError): return 0
class AsyncClientSession: """ async aiohttp client """ __slots__ = ("session", ) def __new__(cls, *args, **kwargs): if not hasattr(cls, '_instance'): cls._instance = super(AsyncClientSession, cls).__new__(cls) return cls._instance async def init_session(self) -> ClientSession: tcp_connector = TCPConnector( keepalive_timeout=15, limit=600, limit_per_host=300, ) self.session = ClientSession(connector=tcp_connector) return self.session async def request(self, method, url, **kwargs): return await self.session.request(method, url, **kwargs) @catch_exc(calc_time=True) async def fetch_json(self, method, url, **kwargs): logging.debug(f"{method} {url} {kwargs}") async with self.session.request(method, url, **kwargs) as response: result = await response.json() return result async def close(self): await self.session.close()
async def _request( self, method: str, url: str, *, headers: Optional[dict] = None, params: Optional[dict] = None, json: Optional[dict] = None, ) -> dict: """Make a request against the RainMachine device.""" use_running_session = self._session and not self._session.closed if use_running_session: session = self._session else: session = ClientSession(timeout=ClientTimeout( total=DEFAULT_TIMEOUT)) assert session try: async with session.request(method, url, headers=headers, params=params, json=json) as resp: resp.raise_for_status() data: dict = await resp.json(content_type=None) return data except ClientError as err: raise RequestError(f"Error requesting data from {url}: {err}") finally: if not use_running_session: await session.close()
async def _process(self, request: Request, session: ClientSession, throttle: Throttle) -> Response: self._logger.debug(f'{request} pending') async with throttle.request(request.url.host): self._logger.debug(f'{request} processing') timeout, retry, retry_interval, sleep, req_params = self._make_aio_req_params( request) try: for _ in range(retry + 1): try: async with session.request(**req_params) as aio_resp: response = await self._make_response( request, aio_resp) break except asyncio.TimeoutError: await asyncio.sleep(retry_interval) else: raise asyncio.TimeoutError(f'{timeout}s') except Exception as exc: if not isinstance( exc, (asyncio.TimeoutError, asyncio.CancelledError)): self._logger.exception('unexpected exception') response = await self._make_response(request, exc) finally: await asyncio.sleep(sleep) self._logger.debug(f'{request} complete ' f'({response.status}: {response.reason})') return response
async def _async_request( self, method: str, endpoint: str, **kwargs: Dict[str, str] ) -> Dict[str, Any]: """Make a request against OpenUV.""" kwargs.setdefault("headers", {}) kwargs["headers"]["x-access-token"] = self._api_key kwargs.setdefault("params", {}) kwargs["params"]["lat"] = self.latitude kwargs["params"]["lng"] = self.longitude kwargs["params"]["alt"] = self.altitude use_running_session = self._session and not self._session.closed if use_running_session: session = self._session else: session = ClientSession(timeout=ClientTimeout(total=DEFAULT_TIMEOUT)) assert session async with session.request( method, f"{API_URL_SCAFFOLD}/{endpoint}", **kwargs ) as resp: resp.raise_for_status() data = await resp.json() if not use_running_session: await session.close() self._logger.debug("Received data for %s: %s", endpoint, data) return cast(Dict[str, Any], data)
async def _perform_request(self, session: aiohttp.ClientSession, config: _NotifyConfiguration) -> None: method = 'PUT' if config.device.available else 'POST' self._json['local_reg']['notify'] = int( config.device.commands_queue.qsize() > 0) url = 'http://{}/local_reg.json'.format(config.device.ip_address) try: logging.debug('[KeepAlive] Sending {} {} {}'.format( method, url, json.dumps(self._json))) async with session.request(method, url, json=self._json, headers=config.headers) as resp: if resp.status != HTTPStatus.ACCEPTED.value: resp_data = await resp.text() logging.error( '[KeepAlive] Sending local_reg failed: {}, {}'.format( resp.status, resp_data)) raise ConnectionError( 'Sending local_reg failed: {}, {}'.format( resp.status, resp_data)) except: config.device.available = False raise else: config.device.available = True
async def _async_request(self, method: str, url: str, **kwargs: Dict[str, Any]) -> Dict[str, Any]: """Make a request against the IQVIA API.""" url_pieces = urlparse(url) kwargs.setdefault("headers", {}) kwargs["headers"]["Content-Type"] = "application/json" kwargs["headers"][ "Referer"] = f"{url_pieces.scheme}://{url_pieces.netloc}" kwargs["headers"]["User-Agent"] = DEFAULT_USER_AGENT use_running_session = self._session and not self._session.closed if use_running_session: session = self._session else: session = ClientSession(timeout=ClientTimeout( total=DEFAULT_TIMEOUT)) assert session async with session.request(method, f"{url}/{self.zip_code}", **kwargs) as resp: resp.raise_for_status() data = await resp.json() if not use_running_session: await session.close() self._logger.debug("Received data for %s: %s", url, data) return cast(Dict[str, Any], data)
async def subprocess_based_service(cmd: List[str], service_url: str, log_file: TextIO) -> WebDriver: check_event_loop() closers = [] try: if log_file is os.devnull: log_file = DEVNULL process = await asyncio.create_subprocess_exec( *cmd, stdout=log_file, stderr=log_file, ) closers.append(partial(stop_process, process)) session = ClientSession() closers.append(sync_factory(session.close)) count = 0 while True: try: await tasked( session.request(url=service_url + '/status', method='GET')) break except: # TODO: make this better count += 1 if count > 30: raise Exception('not starting?') await asyncio.sleep(0.5) return WebDriver( Connection(session, service_url), closers, ) except: for closer in reversed(closers): await closer() raise
async def _request(self, method: str, endpoint: str, **kwargs) -> dict: """Make a request the API.com.""" url: str = f"{API_BASE}/{endpoint}" kwargs.setdefault("headers", {}) if self._token: kwargs["headers"]["Authorization"] = f"Token token={self._token}" use_running_session = self._session and not self._session.closed if use_running_session: session = self._session else: session = ClientSession(timeout=ClientTimeout( total=DEFAULT_TIMEOUT)) try: async with session.request(method, url, **kwargs) as resp: data: dict = await resp.json(content_type=None) resp.raise_for_status() return data except ClientError as err: if "401" in str(err): raise InvalidCredentialsError("Invalid credentials") from err raise RequestError(data["errors"][0]["title"]) from err finally: if not use_running_session: await session.close()
async def async_request(self, method: str, endpoint: str, rawdata: bool = False) -> dict: """Make a request against the SmartWeather API.""" use_running_session = self._session and not self._session.closed if use_running_session: session = self._session else: session = ClientSession(timeout=ClientTimeout( total=DEFAULT_TIMEOUT)) try: async with session.request(method, endpoint) as resp: resp.raise_for_status() data = await resp.read() if not rawdata: decoded_content = data.decode("utf-8") return decoded_content else: return data except asyncio.TimeoutError: raise RequestError("Request to endpoint timed out: {endpoint}") except ClientError as err: raise RequestError(f"Error requesting data from {endpoint}: {err}") except: raise RequestError(f"Error occurred: {sys.exc_info()[1]}") finally: if not use_running_session: await session.close()
async def async_request(self, method: str, endpoint: str) -> dict: """Make a request against the SmartWeather API.""" use_running_session = self._session and not self._session.closed if use_running_session: session = self._session else: session = ClientSession(timeout=ClientTimeout( total=DEFAULT_TIMEOUT)) try: async with session.request(method, f"{BASE_URL}/{endpoint}") as resp: resp.raise_for_status() data = await resp.json() return data except asyncio.TimeoutError: raise RequestError("Request to endpoint timed out: {endpoint}") except ClientError as err: if "Unauthorized" in str(err): raise InvalidApiKey( "Your API Key is invalid or does not support this operation" ) elif "Not Found" in str(err): raise ResultError("The Station ID does not exist") else: raise RequestError( f"Error requesting data from {endpoint}: {err}") from None finally: if not use_running_session: await session.close()
async def request(url, session: aiohttp.ClientSession, *, timeout=180, method: str = "GET", return_text=False, **kwargs) -> typing.Union[dict, str, bytes]: async with session.request(method, url, timeout=timeout, **kwargs) as resp: if resp.status == 429: log.info(f"Ratelimited while requesting {url}") raise errors.HTTPRatelimited(resp) # TODO: Make it better if resp.status == 404: log.info(f"404 while requesting {url}") raise errors.HTTPException(resp) if 300 > resp.status >= 200: if return_text is True: return await resp.text() try: return await resp.json() except aiohttp.ContentTypeError: return await resp.read() else: raise errors.HTTPException(resp)
async def async_http_request(name, session: ClientSession, **kwargs) -> str: """ Invokes aiohttp client session request :param name: :param session: :param kwargs: :return: """ _kwargs = serialize(kwargs) async with session.request(timeout=DEFAULT_REST_REQUEST_TIMEOUT, **_kwargs) as resp: resp_data = await resp.text() description = HTTPCodesDescription.get_description( resp.status, **kwargs) logger.debug("'%s' '%s' %s %s, status: %s, description: %s" "\n\tpayload: %s\n\trequest headers: %s\n\tparams: %s" "\n\tresponse data: %s\n\tresponse headers: %s" % (kwargs['username'], name, kwargs['url'], kwargs['method'].upper(), resp.status, description, kwargs.get('data'), kwargs.get('headers'), kwargs.get('params'), resp_data, dict(resp.headers))) # TODO: replace dirty hack if resp.status not in list(range(200, 209)): raise ClientResponseError(request_info=kwargs, history='', code=resp.status) return resp_data
async def _http_request( self, session: aiohttp.ClientSession, method: str, request: GraphQLRequest, **kwargs: Any, ): """ Helper method to make an http request using the provided *session*. :param session: Session to use when making the request. :param method: HTTP method to use when making the request. :param request: Prepared GraphQL request to dispatch to the server. :param kwargs: Additional arguments to pass to :method:`aiohttp.ClientSession.request` when making the request. :raises: :class:`GraphQLRequestException` when the server responds with a non 200 status code. :return: Query response. """ async with session.request(method=method, url=self.endpoint, headers=request.headers, **kwargs) as resp: body = await resp.json() response = GraphQLResponse(request=request, json=body) if 200 <= resp.status < 300: return response raise GraphQLRequestException(response)
async def fetch( self, session: aiohttp.ClientSession, url: str, method: str, response_method: str, *args, **kwargs, ) -> Response: async with session.request(method, url, *args, **kwargs) as response: dispatch: dict[str, Any] = { "json": response.json, "read": response.read, "text": response.text, } if response_method not in dispatch: raise ValueError( f"Invalid response_method value: {response_method}") return Response( response.status, response.reason, await dispatch[response_method](), response.url, response.headers, )
async def _async_request(self, method: str, endpoint: str, **kwargs) -> ElementTree: """Make a request to the API and return the XML response.""" kwargs.setdefault("params", {}) kwargs["params"]["auth_token"] = self._api_token use_running_session = self._session and not self._session.closed if use_running_session: session = self._session else: session = ClientSession(timeout=ClientTimeout(total=DEFAULT_TIMEOUT)) try: async with session.request( method, f"{API_URL_BASE}/{endpoint}", **kwargs ) as resp: resp.raise_for_status() body = await resp.text() _LOGGER.debug("Response text for %s: %s", endpoint, body) response_root = ElementTree.fromstring(body.encode("utf-8")) raise_on_response_error(response_root) return response_root except ClientError as err: raise RequestError(err) from None finally: if not use_running_session: await session.close()
async def async_request(self, method: str, endpoint: str) -> dict: """Make a request against the SmartWeather API.""" use_running_session = self._session and not self._session.closed if use_running_session: session = self._session else: session = ClientSession(timeout=ClientTimeout( total=DEFAULT_TIMEOUT)) try: async with session.request(method, endpoint) as resp: resp.raise_for_status() data = await resp.read() decoded_content = data.decode("utf-8") return decoded_content except asyncio.TimeoutError: raise RequestError("Request to endpoint timed out: {endpoint}") except ClientError as err: if err.message == "Unauthorized": raise InvalidCredentials( "Your Username/Password combination is not correct") elif err.message == "Not Found": raise ResultError( "The Meteobridge cannot not be found on this IP Address") else: raise RequestError( f"Error requesting data from {endpoint}: {err}") from None finally: if not use_running_session: await session.close()
async def check_password_moxahttp_1_0(cls, client: aiohttp.ClientSession, context: ssl.SSLContext, content: str, router: BaseIndustrialRouter): """ Method for checking the passwords in MoxaHttp/1.0 router authentication schemas. :param client: ClientSession for the connection to the router. :param context: SSLContext of the connection. :param content: Content of the response of the router. :param router: :class:`aztarna.industrialrouters.scanner.BaseIndustrialRouter` router to check. """ challenge = cls.get_challenge_moxahttp_1_0(content) for clear_password, password in cls.default_credentials_http1: uri = '{}://{}:{}/home.htm?Password={}&Submit=Submit&token_text=&FakeChallenge={}' \ .format(router.protocol, router.address, router.port, password, challenge) try: async with client.request('GET', uri, ssl=context) as response: content = str(await response.content.read()) if cls.valid_login_text_moxahttp_2_2 in content: router.valid_credentials.append(clear_password) except: logger.warning('[-] Connection to {} failed'.format( router.address))
async def _async_request(self, method: str, url: str, data: str = "", **kwargs) -> list: """Make a request against the API.""" kwargs.setdefault("headers", {}) kwargs.setdefault("proxy", SYSTEM_PROXY) use_running_session = self._session and not self._session.closed if use_running_session: session = self._session else: session = ClientSession(timeout=ClientTimeout( total=DEFAULT_TIMEOUT)) try: async with session.request(method, url, data=data, **kwargs) as resp: resp.raise_for_status() return await resp.json(content_type=None) except ClientError as err: _LOGGER.error(f"Error requesting data from {url}: {err}") raise RequestError(f"Error requesting data from {url}: {err}") except Exception as e: _LOGGER.error(f"Error requesting data from {url}: {e}") finally: if not use_running_session: await session.close()
async def async_request(self, method: str, endpoint: str) -> dict: """Make a request against the Weatherbit API.""" use_running_session = self._session and not self._session.closed if use_running_session: session = self._session else: session = ClientSession(timeout=ClientTimeout( total=DEFAULT_TIMEOUT)) try: async with session.request(method, f"{BASE_URL}/{endpoint}") as resp: resp.raise_for_status() data = await resp.json() return data except asyncio.TimeoutError: raise RequestError( f"Request to endpoint timed out: {BASE_URL}/{endpoint}") except ClientError as err: if "Forbidden" in str(err): raise InvalidApiKey( "Your API Key is invalid or does not support this operation" ) else: raise RequestError( f"Error requesting data from {BASE_URL}: {str(err)}") except: raise RequestError(f"Error occurred: {sys.exc_info()[1]}") finally: if not use_running_session: await session.close()
async def perform_client_request( session: aiohttp.ClientSession, request: aiohttp.web.Request, proxy: typing.Optional[str] = None) -> aiohttp.web.Response: ''' Perform a client request. :param session: session to perform requests with :param request: request to perform :param proxy: proxy to use, if any :return: response from client ''' http_kwargs = {} headers = dict(request.headers) headers['Content-Type'] = request.content_type if request.can_read_body: http_kwargs['body'] = await request.read() handle_aiohttp_decompression(http_kwargs['body'], headers) if 'Connection' in headers: headers['Connection'] = 'close' async with session.request(request.method, request.url, headers=headers, params=request.query, proxy=proxy, **http_kwargs) as response: return await make_response(response)
async def _request(self, method: str, endpoint: str, **kwargs) -> list: """Make a request against the API.""" # In order to deal with Ambient's fairly aggressive rate limiting, we # pause for a second before continuing in case any requests came before # this. # https://ambientweather.docs.apiary.io/#introduction/rate-limiting await asyncio.sleep(1) url = f"{REST_API_BASE}/v{self._api_version}/{endpoint}" kwargs.setdefault("params", {}) kwargs["params"]["apiKey"] = self._api_key kwargs["params"]["applicationKey"] = self._application_key use_running_session = self._session and not self._session.closed if use_running_session: session = self._session else: session = ClientSession(timeout=ClientTimeout( total=DEFAULT_TIMEOUT)) try: async with session.request(method, url, **kwargs) as resp: resp.raise_for_status() return await resp.json(content_type=None) except ClientError as err: raise RequestError(f"Error requesting data from {url}: {err}") finally: if not use_running_session: await session.close()
async def doAsyncAction(self, queue: asyncio.Queue, session: aiohttp.ClientSession): self.retryCounter += 1 self.startTime = datetime.utcnow() try: async with session.request(self.method, self.url, **self.requestParams) as response: state = RequestState(action=self, response=response, queue=queue) state = await self.waitForResponseText(state) if self.retryOnFail: state = await self.checkForRetry(state) if self.callback: state = await self.callback(state) if self.storeResults: self.completedActionData = state.responseText return except asyncio.TimeoutError as e: logger.exception(e) except aiohttp.ClientConnectionError as e: logger.exception(f"Connection error in {self} ") raise e except aiohttp.ClientError as e: logger.exception(e) finally: self.endTime = datetime.utcnow()
async def _get_article_urls_from_nav_page(sess: aiohttp.ClientSession, category: int, date: str, page: int) -> List[str]: nav_url = ('https://news.naver.com/main/list.nhn?mode=LSD&mid=shm' '&sid1={category}&date={date}&page={page}'.format( category=category, date=date, page=page)) # Read navigation page and extract article links. async with sess.request('GET', nav_url) as resp: document = await resp.text() document = document[document.find('<ul class="type06_headline">'):] # Extract article url containers. list1 = document[:document.find('</ul>')] list2 = document[document.find('</ul>') + 5:] list2 = list2[:list2.find('</ul>')] document = list1 + list2 # Extract all article urls from their containers. article_urls = [] while '<dt>' in document: document = document[document.find('<dt>'):] container = document[:document.find('</dt>')] if not container.strip(): continue article_urls.append(re.search(r'<a href="(.*?)"', container).group(1)) document = document[document.find('</dt>'):] return article_urls
async def send_log(self, session: aiohttp.ClientSession, request_dict: Dict[str, Any]): async with session.request(request_dict["method"], request_dict["url"], **request_dict["request_obj"]) as resp: resp_text = await resp.text() self.logger().debug(f"Sent logs: {resp.status} {resp.url} {resp_text} ", extra={"do_not_send": True}) if resp.status != 200 and resp.status not in {404, 405, 400}: raise EnvironmentError("Failed sending logs to log server.")
async def get_section_links( session: aiohttp.ClientSession) -> bs4.element.ResultSet: """Fetching links to sections""" async with session.request('get', MAIN_PAGE, verify_ssl=False) as resp: main_html = await resp.text() sections = bs4.BeautifulSoup(main_html, PARSER).find( 'div', class_='contents-wrap').find_all('a') return sections
async def execute(self, session: ClientSession): try: params = self._request_params() async with session.request(**params) as response: result = await self.process_response(response) return result except ClientResponseError as error: return await self.process_response_error(error)
async def worker_single(target): def return_ip_from_deep(sess, response) -> str: try: ip_port = response.connection.transport.get_extra_info('peername') if check_ip(ip_port[0]): return ip_port[0] except: pass try: _tmp_conn_key = sess.connector._conns.items() for k, v in _tmp_conn_key: _h = v[0][0] ip_port = _h.transport.get_extra_info('peername') if check_ip(ip_port[0]): return ip_port[0] except: pass return '' result = None timeout = ClientTimeout(target.timeout) if target.sslcheck: conn = TCPConnector(ssl=False, limit_per_host=1) session = ClientSession(timeout=timeout, connector=conn, response_class=WrappedResponseClass) else: session = ClientSession(timeout=timeout) try: # files = {'file': open('payload', 'rb')} # async with session.request(target.method, # target.url, # allow_redirects=target.allow_redirects, # data=files) as response: # https: // docs.aiohttp.org / en / stable / client_quickstart.html # post-a-multipart-encoded-file async with session.request(target.method, target.url, headers=target.headers, allow_redirects=target.allow_redirects, data=target.payload, timeout=timeout) as response: result = await make_document_from_response(response, target) # какое-то безумие с функцией return_ip_from_deep, автор aiohttp говорит, что просто так до ip сервера # не добраться # link: https://github.com/aio-libs/aiohttp/issues/4249 # но есть моменты.... # в функции return_ip_from_deep через какую-то "жопу" добираемся до ip - это не дело, но пока оставим так if len(result['ip']) == 0: _ip = return_ip_from_deep(session, response) result['ip'] = _ip await asyncio.sleep(0.05) await session.close() except Exception as e: result = create_template_error(target, str(e)) await asyncio.sleep(0.05) await session.close() return result
class CRLeaderBoard(commands.Cog): def __init__(self, bot): self.bot = bot self.loop = asyncio.get_event_loop() self.session = ClientSession(loop=self.loop) async def crtoken(self): token = await self.bot.get_shared_api_tokens("clashroyale") if token['token'] is None: print("CR token not set") CRKEY = token['token'] global headers headers = {"Authorization": f'Bearer {CRKEY}'} async def fetch(self, url): async with self.session.request("GET", url=url, headers=headers) as response: return await response.read() async def get_member_data(self, tag): url = f"https://proxy.royaleapi.dev/v1/clans/%23{tag}" clan_data = json.loads(await self.fetch(url)) async for member in AsyncIter(clan_data["memberList"]): name = member['name'] + "(" + member['tag'] + ")" player_trophy_list[name] = member['trophies'] return player_trophy_list @commands.command() async def legendleaderboard(self, ctx): await ctx.channel.trigger_typing() desc = "" clan_tags = [ "8QRQQ8RG", "L8J2VC20", "YLULCRQJ", "99R2PQVR", "PRCRJYCR", "J0CQ9R9", "P9GG9QQY", "Y8G9C09", "2CJ88808", "80CC8", "RY9QJU2", "9P2PQULQ", "9PJYVVL2", "VJQ0GJ0", "29YPJYY", "Q0JJ2GG2" ] embed_list = [] async for tag in AsyncIter(clan_tags): member_list = await (self.get_member_data(tag)) final = { k: v for k, v in sorted( member_list.items(), key=lambda x: x[1], reverse=True) } async for k, v in AsyncIter(enumerate(final)): trophies = final[v] desc += f"**Number {k+1}**- **{v}** - **{trophies}** trophies \n" if (k + 1) % 20 == 0: embed = discord.Embed(color=discord.Color.blue(), title="Legend Trophy Leaderboard", description=desc) embed_list.append(embed) desc = "" await menu(ctx, embed_list, DEFAULT_CONTROLS)
class FirebaseHTTP: """ HTTP Client for Firebase. Args: base_url (str): URL to your data. auth (string): Auth key. loop (class:`asyncio.BaseEventLoop`): Loop. """ def __init__(self, base_url, auth=None, loop=None): """Initialise the class.""" self._loop = loop or asyncio.get_event_loop() self._base_url = base_url self._auth = auth self._session = ClientSession(loop=self._loop) async def close(self): """Gracefully close the session.""" await self._session.close() async def get(self, *, path=None, params=None): """Perform a GET request.""" return await self._request(method='GET', path=path, params=params) async def put(self, *, value, path=None, params=None): """Perform a put request.""" return await self._request(method='PUT', value=value, path=path, params=params) async def post(self, *, value, path=None, params=None): """Perform a POST request.""" return await self._request(method='POST', value=value, path=path, params=params) async def patch(self, *, value, path=None, params=None): """Perform a PATCH request.""" return await self._request(method='PATCH', value=value, path=path, params=params) async def delete(self, *, path=None, params=None): """Perform a DELETE request.""" return await self._request(method='DELETE', path=path, params=params) async def stream(self, *, callback, path=None): """Hook up to the EventSource stream.""" url = posixpath.join(self._base_url, path) if path else self._base_url headers = {'accept': 'text/event-stream'} async with self._session.get(url, headers=headers) as resp: while True: await FirebaseHTTP._iterate_over_stream(resp.content.read(), callback) @staticmethod async def _iterate_over_stream(iterable, callback): """Iterate over the EventSource stream and pass the event and data to the callback as and when we receive it.""" async for msg in iterable: msg_str = msg.decode('utf-8').strip() if not msg_str: continue key, value = msg_str.split(':', 1) if key == 'event' and value == 'cancel': raise StreamCancelled('The requested location is no longer allowed due to security/rules changes.') elif key == 'event' and value == 'auth_revoked': raise StreamAuthRevoked('The auth credentials has expired.') elif key == 'event': event = value elif key == 'data': await callback(event=event, data=json.loads(value)) async def _request(self, *, method, value=None, path=None, params=None): """Perform a request to Firebase.""" url = posixpath.join(self._base_url, path.strip('/')) if path else self._base_url url += '.json' data = json.dumps(value) if value else None params = params or {} headers = {} if self._auth: params.update({'auth': self._auth}) headers.update({'typ': 'JWT', 'alg': 'HS256'}) async with self._session.request(method, url, data=data, params=params, headers=headers) as resp: assert resp.status == 200 return await resp.json()
class TestClient: """ A test client implementation, for a aiohttp.web.Application. :param app: the aiohttp.web application passed to create_test_server :type app: aiohttp.web.Application :param protocol: the aiohttp.web application passed to create_test_server :type app: aiohttp.web.Application """ def __init__(self, app, protocol="http"): self._app = app self._loop = loop = app.loop self.port = unused_port() self._handler = handler = app.make_handler() self._server = loop.run_until_complete(loop.create_server( handler, '127.0.0.1', self.port )) self._session = ClientSession(loop=self._loop) self._root = "{}://127.0.0.1:{}".format( protocol, self.port ) self._closed = False def request(self, method, url, *args, **kwargs): return _RequestContextManager(self._request( method, url, *args, **kwargs )) @asyncio.coroutine def _request(self, method, url, *args, **kwargs): """ routes a request to the http server. the interface is identical to asyncio.request, except the loop kwarg is overriden by the instance used by the application. """ return (yield from self._session.request( method, self._root + url, *args, **kwargs )) def close(self): if not self._closed: loop = self._loop loop.run_until_complete(self._session.close()) loop.run_until_complete(self._handler.finish_connections()) loop.run_until_complete(self._app.finish()) self._server.close() loop.run_until_complete(self._server.wait_closed()) self._closed = True def __del__(self): self.close() def __enter__(self): return self def __exit__(self, exc_type, exc_value, traceback): self.close()