def _create_aiohttp_client_session(): loop = asyncio.get_event_loop() resolver = DNS_SERVICE.aiohttp_resolver(loop=loop) request_timeout = 30 session = aiohttp_client_session( resolver=resolver, timeout=request_timeout) return session
async def _async_init(self): if self.resolver is None: loop = asyncio.get_event_loop() if self.dns_service is None: self.resolver = aiohttp.AsyncResolver(loop=loop) else: self.resolver = self.dns_service.aiohttp_resolver(loop=loop) if self.session is None: self.session = aiohttp_client_session(resolver=self.resolver, timeout=self.request_timeout)
async def _async_init(self): if self.session is None: self.session = aiohttp_client_session(timeout=self.timeout)
async def image_proxy(request, url, referer=None): if not referer or is_referer_force_url(url): referer = get_referer_of_url(url) LOG.info(f'proxy image {url} referer={referer}') session = response = None async def do_cleanup(): nonlocal session, response if response: response.close() if session: await session.close() try: await check_private_address(url) user_agent = DEFAULT_USER_AGENT if callable(user_agent): user_agent = user_agent() headers = {'User-Agent': user_agent} for h in PROXY_REQUEST_HEADERS: if h in request.headers: headers[h] = request.headers[h] referer_headers = dict(headers) referer_headers['Referer'] = referer request_timeout = 30 session = aiohttp_client_session( auto_decompress=False, timeout=request_timeout, ) # 先尝试发带Referer的请求,不行再尝试不带Referer response = await get_response(session, url, referer_headers) if response.status in REFERER_DENY_STATUS: LOG.info(f'proxy image {url} referer={referer} ' f'failed {response.status}, will try without referer') response.close() response = await get_response(session, response.url, headers) is_chunked = response.headers.get('Transfer-Encoding', '').lower() == 'chunked' # using chunked encoding is forbidden for HTTP/1.0 if is_chunked and request.version < HttpVersion11: version = 'HTTP/{0.major}.{0.minor}'.format(request.version) error_msg = f"using chunked encoding is forbidden for {version}" LOG.info( f'proxy image {url} referer={referer} failed: {error_msg}') response.close() raise ImageProxyError(error_msg) except ImageProxyError as ex: await do_cleanup() return ex.to_response() except Exception: await do_cleanup() raise try: my_response = StreamResponse(status=response.status) # 'Content-Length', 'Content-Type', 'Transfer-Encoding' if is_chunked: my_response.enable_chunked_encoding() elif response.headers.get('Transfer-Encoding'): my_response.headers['Transfer-Encoding'] = response.headers[ 'Transfer-Encoding'] if response.headers.get('Content-Length'): content_length = int(response.headers['Content-Length']) if content_length > MAX_IMAGE_SIZE: return json_response({'message': 'image too large'}, status=413) my_response.content_length = content_length if response.headers.get('Content-Type'): my_response.content_type = response.headers['Content-Type'] for h in PROXY_RESPONSE_HEADERS: if h in response.headers: my_response.headers[h] = response.headers[h] await my_response.prepare(request) except Exception: await do_cleanup() raise try: content_length = 0 async for chunk in response.content.iter_chunked(8 * 1024): content_length += len(chunk) if content_length > MAX_IMAGE_SIZE: LOG.warning(f'image too large, abort the response, url={url}') my_response.force_close() break await my_response.write(chunk) await my_response.write_eof() finally: await do_cleanup() return my_response
async def _async_test_dns_service_aiohttp(url): resolver = DNS_SERVICE.aiohttp_resolver() async with aiohttp_client_session(resolver=resolver) as session: async with session.get(url) as resp: assert resp.status == 200
async def _async_init(self): if self.resolver is None: self.resolver = aiodns.DNSResolver(loop=asyncio.get_event_loop()) if self.session is None: self.session = aiohttp_client_session(timeout=self.request_timeout)
def _create_session(self, proxy_url: str = None): return aiohttp_client_session( resolver=self.resolver, proxy_url=proxy_url, timeout=self.request_timeout, )