def test_force_close(self): req = self.make_request('GET', '/') resp = StreamResponse(req) self.assertTrue(resp.keep_alive) resp.force_close() self.assertFalse(resp.keep_alive)
async def test_start_force_close(): req = make_request('GET', '/') resp = StreamResponse() resp.force_close() assert not resp.keep_alive await resp.prepare(req) assert not resp.keep_alive
def test_start_force_close(): req = make_request('GET', '/') resp = StreamResponse() resp.force_close() assert not resp.keep_alive yield from resp.prepare(req) assert not resp.keep_alive
async def test_start_force_close() -> None: req = make_request("GET", "/") resp = StreamResponse() resp.force_close() assert not resp.keep_alive await resp.prepare(req) assert not resp.keep_alive
def test_start_force_close(self): req = self.make_request("GET", "/") resp = StreamResponse() resp.force_close() self.assertFalse(resp.keep_alive) msg = self.loop.run_until_complete(resp.prepare(req)) self.assertFalse(resp.keep_alive) self.assertTrue(msg.closing)
def test_start_force_close(self): req = self.make_request('GET', '/') resp = StreamResponse() resp.force_close() self.assertFalse(resp.keep_alive) msg = self.loop.run_until_complete(resp.prepare(req)) self.assertFalse(resp.keep_alive) self.assertTrue(msg.closing)
def test_start_force_close(self): req = self.make_request('GET', '/') resp = StreamResponse() resp.force_close() self.assertFalse(resp.keep_alive) msg = resp.start(req) self.assertFalse(resp.keep_alive) self.assertTrue(msg.closing)
def test_start_force_close(): req = make_request('GET', '/') resp = StreamResponse() resp.force_close() assert not resp.keep_alive msg = yield from resp.prepare(req) assert not resp.keep_alive assert msg.closing
async def final_endpoint(request): response = StreamResponse( status=200, reason='OK', headers={'Content-Type': 'text/plain', 'Content-Length': f'{char_qty}'} ) await response.prepare(request) await _write_resp(response) response.force_close() return response
async def stream_file( self, request: Request, node: typing.Optional['Node'] = None) -> StreamResponse: log.info("stream file to browser for lbry://%s#%s (sd hash %s...)", self.claim_name, self.claim_id, self.sd_hash[:6]) headers, size, skip_blobs, first_blob_start_offset = self._prepare_range_response_headers( request.headers.get('range', 'bytes=0-')) await self.start(node) response = StreamResponse(status=206, headers=headers) await response.prepare(request) self.streaming_responses.append((request, response)) self.streaming.set() wrote = 0 try: async for blob_info, decrypted in self._aiter_read_stream( skip_blobs, connection_id=self.STREAMING_ID): if not wrote: decrypted = decrypted[first_blob_start_offset:] if (blob_info.blob_num == len(self.descriptor.blobs) - 2) or (len(decrypted) + wrote >= size): decrypted += (b'\x00' * (size - len(decrypted) - wrote - (skip_blobs * (MAX_BLOB_SIZE - 1)))) log.debug("sending browser final blob (%i/%i)", blob_info.blob_num + 1, len(self.descriptor.blobs) - 1) await response.write_eof(decrypted) else: log.debug("sending browser blob (%i/%i)", blob_info.blob_num + 1, len(self.descriptor.blobs) - 1) await response.write(decrypted) wrote += len(decrypted) log.info("sent browser %sblob %i/%i", "(final) " if response._eof_sent else "", blob_info.blob_num + 1, len(self.descriptor.blobs) - 1) if response._eof_sent: break return response except ConnectionResetError: log.warning( "connection was reset after sending browser %i blob bytes", wrote) raise asyncio.CancelledError("range request transport was reset") finally: response.force_close() if (request, response) in self.streaming_responses: self.streaming_responses.remove((request, response)) if not self.streaming_responses: self.streaming.clear()
async def stream_file( self, request: Request, node: typing.Optional['Node'] = None) -> StreamResponse: log.info("stream file to browser for lbry://%s#%s (sd hash %s...)", self.claim_name, self.claim_id, self.sd_hash[:6]) await self.start(node) headers, size, skip_blobs = self._prepare_range_response_headers( request.headers.get('range', 'bytes=0-')) response = StreamResponse(status=206, headers=headers) await response.prepare(request) self.streaming_responses.append((request, response)) self.streaming.set() try: wrote = 0 async for blob_info, decrypted in self._aiter_read_stream( skip_blobs, connection_id=2): if (blob_info.blob_num == len(self.descriptor.blobs) - 2) or (len(decrypted) + wrote >= size): decrypted += (b'\x00' * (size - len(decrypted) - wrote - (skip_blobs * 2097151))) await response.write_eof(decrypted) else: await response.write(decrypted) wrote += len(decrypted) log.info("sent browser %sblob %i/%i", "(final) " if response._eof_sent else "", blob_info.blob_num + 1, len(self.descriptor.blobs) - 1) if response._eof_sent: break return response finally: response.force_close() if (request, response) in self.streaming_responses: self.streaming_responses.remove((request, response)) if not self.streaming_responses: self.streaming.clear()
def test_force_close() -> None: resp = StreamResponse() assert resp.keep_alive is None resp.force_close() assert resp.keep_alive is False
def test_force_close(self): resp = StreamResponse() self.assertIsNone(resp.keep_alive) resp.force_close() self.assertFalse(resp.keep_alive)
def test_force_close(): resp = StreamResponse() assert resp.keep_alive is None resp.force_close() assert resp.keep_alive is False
async def image_proxy(request, url, referer=None): if not referer or is_referer_force_url(url): referer = get_referer_of_url(url) LOG.info(f'proxy image {url} referer={referer}') session = response = None async def do_cleanup(): nonlocal session, response if response: response.close() if session: await session.close() try: await check_private_address(url) user_agent = DEFAULT_USER_AGENT if callable(user_agent): user_agent = user_agent() headers = {'User-Agent': user_agent} for h in PROXY_REQUEST_HEADERS: if h in request.headers: headers[h] = request.headers[h] referer_headers = dict(headers) referer_headers['Referer'] = referer request_timeout = 30 session = aiohttp_client_session( auto_decompress=False, timeout=request_timeout, ) # 先尝试发带Referer的请求,不行再尝试不带Referer response = await get_response(session, url, referer_headers) if response.status in REFERER_DENY_STATUS: LOG.info(f'proxy image {url} referer={referer} ' f'failed {response.status}, will try without referer') response.close() response = await get_response(session, response.url, headers) is_chunked = response.headers.get('Transfer-Encoding', '').lower() == 'chunked' # using chunked encoding is forbidden for HTTP/1.0 if is_chunked and request.version < HttpVersion11: version = 'HTTP/{0.major}.{0.minor}'.format(request.version) error_msg = f"using chunked encoding is forbidden for {version}" LOG.info( f'proxy image {url} referer={referer} failed: {error_msg}') response.close() raise ImageProxyError(error_msg) except ImageProxyError as ex: await do_cleanup() return ex.to_response() except Exception: await do_cleanup() raise try: my_response = StreamResponse(status=response.status) # 'Content-Length', 'Content-Type', 'Transfer-Encoding' if is_chunked: my_response.enable_chunked_encoding() elif response.headers.get('Transfer-Encoding'): my_response.headers['Transfer-Encoding'] = response.headers[ 'Transfer-Encoding'] if response.headers.get('Content-Length'): content_length = int(response.headers['Content-Length']) if content_length > MAX_IMAGE_SIZE: return json_response({'message': 'image too large'}, status=413) my_response.content_length = content_length if response.headers.get('Content-Type'): my_response.content_type = response.headers['Content-Type'] for h in PROXY_RESPONSE_HEADERS: if h in response.headers: my_response.headers[h] = response.headers[h] await my_response.prepare(request) except Exception: await do_cleanup() raise try: content_length = 0 async for chunk in response.content.iter_chunked(8 * 1024): content_length += len(chunk) if content_length > MAX_IMAGE_SIZE: LOG.warning(f'image too large, abort the response, url={url}') my_response.force_close() break await my_response.write(chunk) await my_response.write_eof() finally: await do_cleanup() return my_response