async def setUp(self): self.message_results = [] self.webhook_results = [] self.port = 0 self.connector = TCPConnector(limit=16, limit_per_host=4) session_args = {"cookie_jar": DummyCookieJar(), "connector": self.connector} self.client_session = ClientSession( cookie_jar=DummyCookieJar(), connector=self.connector )
async def test_aiohttp_extra_args(event_loop, aiohttp_server): async def handler(request): return web.Response(text=query1_server_answer, content_type="application/json") app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = server.make_url("/") # passing extra arguments to aiohttp.ClientSession jar = DummyCookieJar() sample_transport = AIOHTTPTransport( url=url, timeout=10, client_session_args={"version": "1.1", "cookie_jar": jar} ) async with Client(transport=sample_transport,) as session: query = gql(query1_str) # Passing extra arguments to the post method of aiohttp result = await session.execute(query, extra_args={"allow_redirects": False}) continents = result["continents"] africa = continents[0] assert africa["code"] == "AF"
def create_session(cls, connector: TCPConnector = None, timeout: int = 5, headers: dict = None) -> ClientSession: """ Create and return an aiohttp ClientSession with default or some custom parameters. :param connector: Connector for standard TCP Sockets :param timeout: Maximum time to wait for an HTTP response :param headers: Spoofed request headers to trick some websites :return session: Session ready to go """ resolver = AsyncResolver(nameservers=DNS_NAMESERVERS) connector = TCPConnector( limit=100, limit_per_host=0, resolver=resolver if resolver else cls.create_resolver(), use_dns_cache=False, force_close=True, family=socket.AF_INET, ssl=False) session = ClientSession(cookie_jar=DummyCookieJar(), connector=connector, timeout=ClientTimeout(total=timeout), headers=HEADERS, connector_owner=True) return session
async def _process_webhooks(self): """Continuously poll webhook queue and dispatch to targets.""" session_args = {} collector: Collector = await self.context.inject(Collector, required=False) if collector: session_args["trace_configs"] = [ StatsTracer(collector, "webhook-http:") ] session_args["cookie_jar"] = DummyCookieJar() self.webhook_session = ClientSession(**session_args) self.webhook_processor = TaskProcessor(max_pending=20) async for topic, payload in self.webhook_queue: for queue in self.websocket_queues.values(): await queue.enqueue({"topic": topic, "payload": payload}) if self.webhook_targets: targets = self.webhook_targets.copy() for idx, target in targets.items(): if not target.topic_filter or topic in target.topic_filter: retries = (self.webhook_retries if target.retries is None else target.retries) await self.webhook_processor.run_retry( lambda pending: self._perform_send_webhook( target.endpoint, topic, payload, pending. attempts + 1), ident=(target.endpoint, topic), retries=retries, ) self.webhook_queue.task_done()
async def run(self): # TODO: rework proxy async with ClientSession( cookie_jar=DummyCookieJar(), connector=ProxyConnector(), request_class=ProxyClientRequest, ) as session: tasks = self.prepare_tasks(session) return await asyncio.gather(*tasks)
def __init__(self): super().__init__("Sayonika") self.route_dir = "" self.json_encoder = CombinedEncoder self.aioh_sess = ClientSession(cookie_jar=DummyCookieJar(), raise_for_status=True) for code, func in exception_handlers.items(): self.register_error_handler(code, func)
async def start(self): """Start the transport.""" session_args = {} if self.collector: session_args["trace_configs"] = [ StatsTracer(self.collector, "outbound-http:") ] session_args["cookie_jar"] = DummyCookieJar() self.client_session = ClientSession(**session_args) return self
async def _setup_session(self, app): """Handle context sessions nicely. `See docs <https://docs.aiohttp.org/en/latest/client_advanced.html#persistent-session>`_""" self._key_session = ClientSession(raise_for_status=True) self._upstream_session = ClientSession( raise_for_status=False, cookie_jar=DummyCookieJar(), auto_decompress=False ) yield await asyncio.gather(self._key_session.close(), self._upstream_session.close())
async def __aiter__(self): # TODO: rework proxy async with ClientSession( cookie_jar=DummyCookieJar(), connector=ProxyConnector(), request_class=ProxyClientRequest, ) as session: tasks = self.prepare_tasks(session) for result in asyncio.as_completed(tasks): yield await result
async def on_start(self): self.disable_cookies = self.crawler.config.get("DISABLE_COOKIES", False) self.connector = TCPConnector(limit=None) if self.disable_cookies: self.session = self.session = ClientSession( connector=self.connector, cookie_jar=DummyCookieJar()) else: self.session = ClientSession(connector=self.connector) self.crawler.session = self.session
async def test_dummy_cookie_jar() -> None: cookie = SimpleCookie('foo=bar; Domain=example.com;') dummy_jar = DummyCookieJar() assert len(dummy_jar) == 0 dummy_jar.update_cookies(cookie) assert len(dummy_jar) == 0 with pytest.raises(StopIteration): next(iter(dummy_jar)) assert not dummy_jar.filter_cookies(URL("http://example.com/")) dummy_jar.clear()
async def start(self): """Start the transport.""" session_args = {} self.connector = TCPConnector(limit=200, limit_per_host=50) if self.collector: session_args["trace_configs"] = [ StatsTracer(self.collector, "outbound-http:") ] session_args["cookie_jar"] = DummyCookieJar() session_args["connector"] = self.connector self.client_session = ClientSession(**session_args) return self
async def load_page(url=None, method='GET', body=None): """ Send request to the schedule url Returns: str: decoded body of the response """ if not url: url = options.SCHEDULE_URL async with ClientSession(cookie_jar=DummyCookieJar()) as session: response = await session.request(url=url, method=method, data=body) raw_response_body = await response.content.read() return raw_response_body.decode(options.BASE_ENCODING)
def get_client(self): from aiohttp import ClientSession, DummyCookieJar, UnixConnector, TCPConnector if self._client is None: jar = DummyCookieJar() if self.outbound_unix_socket: conn = UnixConnector(path=self.outbound_unix_socket, ) else: conn = TCPConnector(limit=30) self._client = ClientSession( connector=conn, auto_decompress=False, cookie_jar=jar, ) return self._client
def test_dummy_cookie_jar(loop) -> None: cookie = SimpleCookie('foo=bar; Domain=example.com;') dummy_jar = DummyCookieJar(loop=loop) assert len(dummy_jar) == 0 dummy_jar.update_cookies(cookie) assert len(dummy_jar) == 0 with pytest.raises(StopIteration): next(iter(dummy_jar)) assert not dummy_jar.filter_cookies(URL("http://example.com/")) dummy_jar.clear()
async def create_new_client(self, request_timeout: int, request_limit: int) -> None: """ Create a new client Args: request_timeout: The timout which should be used for making requests request_limit: The maximal number of requests per session """ if self.client_session_list: await self.close_client() client_instance_number: int = math.ceil(request_limit / 500) for _ in range(client_instance_number): timeout = ClientTimeout(total=request_timeout) connector = TCPConnector(limit=request_limit, enable_cleanup_closed=True) client_session = ClientSession(connector=connector, timeout=timeout, cookie_jar=DummyCookieJar()) self.client_session_list.append(client_session)
async def start(self): """Start the outbound transport.""" self.client_session = ClientSession(cookie_jar=DummyCookieJar()) return self