def test_chunked_encoding_forbidden_for_http_10(self): req = self.make_request("GET", "/", version=HttpVersion10) resp = StreamResponse() resp.enable_chunked_encoding() with self.assertRaisesRegex(RuntimeError, "Using chunked encoding is forbidden for HTTP/1.0"): self.loop.run_until_complete(resp.prepare(req))
def test_chunked_encoding_forbidden_for_http_10(self): req = self.make_request('GET', '/', version=HttpVersion10) resp = StreamResponse() resp.enable_chunked_encoding() with self.assertRaisesRegex( RuntimeError, "Using chunked encoding is forbidden for HTTP/1.0"): resp.start(req)
def test_chunked_encoding_forbidden_for_http_10(): req = make_request('GET', '/', version=HttpVersion10) resp = StreamResponse() resp.enable_chunked_encoding() with pytest.raises(RuntimeError) as ctx: yield from resp.prepare(req) assert re.match("Using chunked encoding is forbidden for HTTP/1.0", str(ctx.value))
def test_chunked_encoding_forbidden_for_http_10(): req = make_request('GET', '/', version=HttpVersion10) resp = StreamResponse() resp.enable_chunked_encoding() with pytest.raises_regexp( RuntimeError, "Using chunked encoding is forbidden for HTTP/1.0"): yield from resp.prepare(req)
def test_chunked_encoding(self, ResponseImpl): req = self.make_request("GET", "/") resp = StreamResponse() self.assertFalse(resp.chunked) resp.enable_chunked_encoding() self.assertTrue(resp.chunked) msg = self.loop.run_until_complete(resp.prepare(req)) self.assertTrue(msg.chunked)
async def test_chunked_encoding(): req = make_request('GET', '/') resp = StreamResponse() assert not resp.chunked resp.enable_chunked_encoding() assert resp.chunked msg = await resp.prepare(req) assert msg.chunked
def test_chunked_encoding(self, ResponseImpl): req = self.make_request('GET', '/') resp = StreamResponse() self.assertFalse(resp.chunked) resp.enable_chunked_encoding() self.assertTrue(resp.chunked) msg = resp.start(req) self.assertTrue(msg.chunked)
def test_chunked_encoding(): req = make_request('GET', '/') resp = StreamResponse() assert not resp.chunked resp.enable_chunked_encoding() assert resp.chunked with mock.patch('aiohttp.web_reqrep.ResponseImpl'): msg = yield from resp.prepare(req) assert msg.chunked
def test_chunk_size(): req = make_request('GET', '/', payload_writer=mock.Mock()) resp = StreamResponse() assert not resp.chunked resp.enable_chunked_encoding(chunk_size=8192) assert resp.chunked msg = yield from resp.prepare(req) assert msg.chunked assert msg.enable_chunking.called assert msg.filter is not None
def test_chunk_size(self, ResponseImpl): req = self.make_request("GET", "/") resp = StreamResponse() self.assertFalse(resp.chunked) resp.enable_chunked_encoding(chunk_size=8192) self.assertTrue(resp.chunked) msg = self.loop.run_until_complete(resp.prepare(req)) self.assertTrue(msg.chunked) msg.add_chunking_filter.assert_called_with(8192) self.assertIsNotNone(msg.filter)
def test_chunk_size(self, ResponseImpl): req = self.make_request('GET', '/') resp = StreamResponse() self.assertFalse(resp.chunked) resp.enable_chunked_encoding(chunk_size=8192) self.assertTrue(resp.chunked) msg = resp.start(req) self.assertTrue(msg.chunked) msg.add_chunking_filter.assert_called_with(8192) self.assertIsNotNone(msg.filter)
def test_chunk_size(): req = make_request('GET', '/') resp = StreamResponse() assert not resp.chunked resp.enable_chunked_encoding(chunk_size=8192) assert resp.chunked with mock.patch('aiohttp.web_response.PayloadWriter'): msg = yield from resp.prepare(req) assert msg.chunked assert msg.enable_chunking.called assert msg.filter is not None
def test_chunk_size(): req = make_request('GET', '/') resp = StreamResponse() assert not resp.chunked resp.enable_chunked_encoding(chunk_size=8192) assert resp.chunked with mock.patch('aiohttp.web_reqrep.ResponseImpl'): msg = yield from resp.prepare(req) assert msg.chunked msg.add_chunking_filter.assert_called_with(8192) assert msg.filter is not None
async def test_chunk_size(): req = make_request('GET', '/') resp = StreamResponse() assert not resp.chunked with pytest.warns(DeprecationWarning): resp.enable_chunked_encoding(chunk_size=8192) assert resp.chunked msg = await resp.prepare(req) assert msg.chunked assert msg.enable_chunking.called assert msg.filter is not None
async def dump_snapshots(request): ''' For debugging purposes only - possibility of skipping snapshots due to race conditions between creating objectid and finishing DB insert. ''' model = request.app['model'] stream_id = request.query.get('streamId') after_snapshot_id = request.query.get('afterSnapshotId') tail = bool(int(request.query.get('tail', 0))) streams = await model.streams.list_all() streams = {s.id: s for s in streams} res = StreamResponse() res.headers['Content-Type'] = 'text/plain' res.enable_chunked_encoding() res.enable_compression() await res.prepare(request) try: while True: t = monotime() snapshots = await model.stream_snapshots.dump( stream_id=stream_id, after_snapshot_id=after_snapshot_id) if not snapshots: if not tail: break logger.debug('No snapshots dumped, sleeping') await sleep(1) continue logger.debug( 'Dumped %s snapshots %s - %s in %.3f s', len(snapshots), snapshots[0].id, snapshots[-1].id, monotime() - t) parts = [] for snapshot in snapshots: stream = streams.get(snapshot.stream_id) if not stream: stream = await model.streams.get_by_id(snapshot.stream_id) assert stream.id == snapshot.stream_id record = { 'id': str(snapshot.id), 'date': snapshot.date.isoformat(), 'stream': { 'id': snapshot.stream_id, 'label': stream.label, }, 'state_json': snapshot.state_json, } line = to_compact_json(record) parts.append(line.encode()) parts.append(b'\n') after_snapshot_id = snapshot.id del snapshots chunk = b''.join(parts) logger.debug('Sending %.2f kB of JSONL response chunk', len(chunk) / 1024) await res.write(chunk) await res.write_eof() except CancelledError as e: logger.debug('dump_snapshots finished: %r', e) except Exception as e: logger.exception('dump_snapshots failed: %r', e) return res
def test_enable_chunked_encoding_with_content_length() -> None: resp = StreamResponse() resp.content_length = 234 with pytest.raises(RuntimeError): resp.enable_chunked_encoding()
def test_content_length_setter_with_enable_chunked_encoding(): resp = StreamResponse() resp.enable_chunked_encoding() with pytest.raises(RuntimeError): resp.content_length = 234
def test_enable_chunked_encoding_with_content_length(): resp = StreamResponse() resp.content_length = 234 with pytest.raises(RuntimeError): resp.enable_chunked_encoding()
async def stream(request): client_guid = str(uuid4()) sentry.tags_context({'client_guid': client_guid}) if request.headers.get('accept') != 'text/event-stream': return Response(status=406) if request.method != 'GET': return Response(status=405) token = request.query.get('token') if not token: return Response(status=401) build_ids = frozenset(request.query.get('build') or []) repo_ids = frozenset(request.query.get('repo') or []) token = auth.parse_token(token) if not token: return Response(status=401) if 'uid' in token: sentry.user_context({'id': token['uid']}) current_app.logger.debug('pubsub.client.connected guid=%s tenant=%s', client_guid, token) loop = request.app.loop parts = urlparse(current_app.config['REDIS_URL']) conn = await aioredis.create_redis( address=(parts.hostname or 'localhost', parts.port or '6379'), db=parts.path.split('1', 1)[:-1] or 0, password=parts.password, loop=loop, ) try: queue = asyncio.Queue(loop=loop) res = await conn.subscribe('builds') asyncio.ensure_future(worker(res[0], queue, token, repo_ids, build_ids)) resp = StreamResponse(status=200, reason='OK') resp.headers['Content-Type'] = 'text/event-stream' resp.headers['Cache-Control'] = 'no-cache' resp.headers['Connection'] = 'keep-alive' if 'Origin' in request.headers and is_valid_origin(request): resp.headers['Access-Control-Allow-Origin'] = request.headers.get( 'Origin') resp.headers['Access-Control-Expose-Headers'] = '*' resp.enable_chunked_encoding() await resp.prepare(request) # loop.create_task(ping(loop, resp, client_guid)) # resp.write(b'retry: 100\r\n\r\n') while True: event = await queue.get() if event is None: break buffer = io.BytesIO() if event.id: buffer.write(b"id: %s\r\n" % (event.id.encode('utf-8'), )) if event.event: buffer.write(b"event: %s\r\n" % (event.event.encode('utf-8'))) if event.data: buffer.write(b"data: %s\r\n" % (json.dumps(event.data).encode('utf-8'))) buffer.write(b'\r\n') resp.write(buffer.getvalue()) queue.task_done() current_app.logger.debug('pubsub.event.sent qsize=%s', queue.qsize()) # Yield to the scheduler so other processes do stuff. await resp.drain() await resp.write_eof() return resp finally: conn.close() await conn.wait_closed() current_app.logger.debug('client.disconnected guid=%s', client_guid, exc_info=True)
def test_content_length_setter_with_enable_chunked_encoding() -> None: resp = StreamResponse() resp.enable_chunked_encoding() with pytest.raises(RuntimeError): resp.content_length = 234
async def stream(request): client_guid = str(uuid4()) sentry.tags_context({"client_guid": client_guid}) if request.headers.get("accept") != "text/event-stream": return Response(status=406) if request.method != "GET": return Response(status=405) token = request.query.get("token") if not token: return Response(status=401) build_ids = frozenset(request.query.get("build") or []) repo_ids = frozenset(request.query.get("repo") or []) token = auth.parse_token(token) if not token: return Response(status=401) if "uid" in token: sentry.user_context({"id": token["uid"]}) current_app.logger.debug("pubsub.client.connected guid=%s tenant=%s", client_guid, token) loop = request.app.loop parts = urlparse(current_app.config["REDIS_URL"]) conn = await aioredis.create_redis( address=(parts.hostname or "localhost", parts.port or "6379"), db=parts.path.split("1", 1)[:-1] or 0, password=parts.password, loop=loop, ) try: queue = asyncio.Queue(loop=loop) res = await conn.subscribe("builds") asyncio.ensure_future(worker(res[0], queue, token, repo_ids, build_ids)) resp = StreamResponse(status=200, reason="OK") resp.headers["Content-Type"] = "text/event-stream" resp.headers["Cache-Control"] = "no-cache" resp.headers["Connection"] = "keep-alive" if "Origin" in request.headers and is_valid_origin(request): resp.headers["Access-Control-Allow-Origin"] = request.headers.get( "Origin") resp.headers["Access-Control-Expose-Headers"] = "*" resp.enable_chunked_encoding() await resp.prepare(request) # loop.create_task(ping(loop, resp, client_guid)) # resp.write(b'retry: 100\r\n\r\n') while True: event = await queue.get() if event is None: break buffer = io.BytesIO() if event.id: buffer.write(b"id: %s\r\n" % (event.id.encode("utf-8"), )) if event.event: buffer.write(b"event: %s\r\n" % (event.event.encode("utf-8"))) if event.data: buffer.write(b"data: %s\r\n" % (json.dumps(event.data).encode("utf-8"))) buffer.write(b"\r\n") resp.write(buffer.getvalue()) queue.task_done() current_app.logger.debug("pubsub.event.sent qsize=%s", queue.qsize()) # Yield to the scheduler so other processes do stuff. await resp.drain() await resp.write_eof() return resp finally: conn.close() await conn.wait_closed() current_app.logger.debug("client.disconnected guid=%s", client_guid, exc_info=True)
async def image_proxy(request, url, referer=None): if not referer or is_referer_force_url(url): referer = get_referer_of_url(url) LOG.info(f'proxy image {url} referer={referer}') session = response = None async def do_cleanup(): nonlocal session, response if response: response.close() if session: await session.close() try: user_agent = DEFAULT_USER_AGENT if callable(user_agent): user_agent = user_agent() headers = {'User-Agent': user_agent} for h in PROXY_REQUEST_HEADERS: if h in request.headers: headers[h] = request.headers[h] referer_headers = dict(headers) referer_headers['Referer'] = referer session = _create_aiohttp_client_session() # 先尝试发带Referer的请求,不行再尝试不带Referer response = await get_response(session, url, referer_headers) if response.status in REFERER_DENY_STATUS: LOG.info(f'proxy image {url!r} referer={referer!r} ' f'failed {response.status}, will try without referer') response.close() response = await get_response(session, response.url, headers) is_chunked = response.headers.get('Transfer-Encoding', '').lower() == 'chunked' # using chunked encoding is forbidden for HTTP/1.0 if is_chunked and request.version < HttpVersion11: version = 'HTTP/{0.major}.{0.minor}'.format(request.version) error_msg = f"using chunked encoding is forbidden for {version}" LOG.info(f'proxy image {url!r} referer={referer!r} failed: {error_msg}') response.close() raise ImageProxyError(error_msg) except ImageProxyError as ex: await do_cleanup() return ex.to_response() except Exception: await do_cleanup() raise try: my_response = StreamResponse(status=response.status) # 'Content-Length', 'Content-Type', 'Transfer-Encoding' if is_chunked: my_response.enable_chunked_encoding() elif response.headers.get('Transfer-Encoding'): my_response.headers['Transfer-Encoding'] = response.headers['Transfer-Encoding'] if response.headers.get('Content-Length'): content_length = int(response.headers['Content-Length']) if content_length > MAX_IMAGE_SIZE: message = 'image too large, size={}'.format(content_length) return json_response({'message': message}, status=413) my_response.content_length = content_length if response.headers.get('Content-Type'): my_response.content_type = response.headers['Content-Type'] for h in PROXY_RESPONSE_HEADERS: if h in response.headers: my_response.headers[h] = response.headers[h] await my_response.prepare(request) except Exception: await do_cleanup() raise try: content_length = 0 async for chunk in response.content.iter_chunked(8 * 1024): content_length += len(chunk) if content_length > MAX_IMAGE_SIZE: LOG.warning(f'image too large, abort the response, url={url!r}') my_response.force_close() break await my_response.write(chunk) await my_response.write_eof() except _IMAGE_NETWORK_ERROR_S as ex: msg = "image proxy failed {}: {} url={!r}".format(type(ex).__name__, ex, url) LOG.warning(msg) finally: await do_cleanup() my_response.force_close() await my_response.write_eof() return my_response