async def test_middleware_with_default_transport(tracer, fake_transport): app = web.Application() az.setup(app, tracer) async def handler(request): return web.Response(body=b'data') req = make_mocked_request('GET', '/aa', headers={'token': 'x'}, app=app) req.match_info.route.resource.canonical = '/{pid}' middleware_factory = middleware_maker() middleware = await middleware_factory(app, handler) await middleware(req) span = az.request_span(req) assert span assert len(fake_transport.records) == 1 rec = fake_transport.records[0] assert rec.asdict()['tags'][az.HTTP_ROUTE] == '/{pid}' # noop span does not produce records headers = {'X-B3-Sampled': '0'} req_noop = make_mocked_request('GET', '/', headers=headers, app=app) await middleware(req_noop) span = az.request_span(req_noop) assert span assert len(fake_transport.records) == 1
async def test_middleware_with_default_transport(tracer: az.Tracer, fake_transport: Any) -> None: app = web.Application() az.setup(app, tracer) async def handler(request: web.Request) -> web.StreamResponse: return web.Response(body=b"data") req = make_mocked_request("GET", "/aa", headers={"token": "x"}, app=app) assert req.match_info.route.resource is not None req.match_info.route.resource.canonical = "/{pid}" # type: ignore[misc] middleware = middleware_maker() await middleware(req, handler) span = az.request_span(req) assert span assert len(fake_transport.records) == 1 rec = fake_transport.records[0] assert rec.asdict()["tags"][az.HTTP_ROUTE] == "/{pid}" # noop span does not produce records headers = {"X-B3-Sampled": "0"} req_noop = make_mocked_request("GET", "/", headers=headers, app=app) await middleware(req_noop, handler) span = az.request_span(req_noop) assert span assert len(fake_transport.records) == 1
async def test_middleware_with_default_transport(tracer, fake_transport): app = web.Application() az.setup(app, tracer) async def handler(request): return web.Response(body=b'data') req = make_mocked_request('GET', '/aa', headers={'token': 'x'}, app=app) req.match_info.route.resource.canonical = '/{pid}' middleware = middleware_maker() await middleware(req, handler) span = az.request_span(req) assert span assert len(fake_transport.records) == 1 rec = fake_transport.records[0] assert rec.asdict()['tags'][az.HTTP_ROUTE] == '/{pid}' # noop span does not produce records headers = {'X-B3-Sampled': '0'} req_noop = make_mocked_request('GET', '/', headers=headers, app=app) await middleware(req_noop, handler) span = az.request_span(req_noop) assert span assert len(fake_transport.records) == 1
async def handle_run(request): from .common import get_run from .pkg import generate_run_file span = aiozipkin.request_span(request) run_id = request.match_info["run_id"] pkg = request.match_info.get("pkg") async with request.app.database.acquire() as conn: with span.new_child('sql:run'): run = await get_run(conn, run_id) if run is None: raise web.HTTPNotFound(text="No run with id %r" % run_id) if pkg is not None and pkg != run['package']: if run is None: raise web.HTTPNotFound(text="No run with id %r" % run_id) return await generate_run_file( request.app.database, request.app.http_client_session, request.app.config, request.app.differ_url, request.app.logfile_manager, run, request.app.vcs_store_url, is_admin=is_admin(request), span=span, )
async def handle_pkg(request): from .pkg import generate_pkg_file span = aiozipkin.request_span(request) package_name = request.match_info["pkg"] async with request.app.database.acquire() as conn: with span.new_child('sql:package'): package = await conn.fetchrow( 'SELECT name, vcswatch_status, maintainer_email, vcs_type, ' 'vcs_url, branch_url, vcs_browse, removed FROM package WHERE name = $1', package_name) if package is None: raise web.HTTPNotFound(text="No package with name %s" % package_name) with span.new_child('sql:merge-proposals'): merge_proposals = await conn.fetch( """\ SELECT DISTINCT ON (merge_proposal.url) merge_proposal.url AS url, merge_proposal.status AS status, run.suite AS suite FROM merge_proposal LEFT JOIN run ON merge_proposal.revision = run.revision AND run.result_code = 'success' WHERE run.package = $1 ORDER BY merge_proposal.url, run.finish_time DESC """, package['name']) with span.new_child('sql:publishable-suites'): available_suites = await state.iter_publishable_suites( conn, package_name) with span.new_child('sql:runs'): runs = state.iter_runs(request.app.database, package=package['name']) return await generate_pkg_file(request.app.database, request.app.config, package, merge_proposals, runs, available_suites, span)
async def handle_chat(request: web.Request): chat_url = os.getenv('CHAT_URL') if not chat_url: raise web.HTTPBadRequest(reason='CHAT_URL env not set') session = await aiohttp_session.get_session(request) uid = session["uid"] form = await request.post() friend_id = form.get('user_id') client_session = request.app['client_session'] tracer = az.get_tracer(request.app) span = az.request_span(request) with tracer.new_child(span.context) as child_span: child_span.name("chat_api_get_key") child_span.tag('user_id', uid) child_span.tag('friend_id', friend_id) chat_key = await chat_api_get_key(client_session, user_id=uid, friend_id=friend_id) child_span.tag('chat_key', chat_key) storage: EncryptedCookieStorage = request.get(aiohttp_session.STORAGE_KEY) chat_session = storage.load_cookie(request) new_chat_url = update_url( chat_url, dict(chat_key=chat_key, session=chat_session, userId=uid)) return dict(uid=uid, chat_url=new_chat_url)
async def get(request): session = request.app["session"] span = az.request_span(request) ctx = {'span_context': span.context} response = await fetch(session, "http://backend:8081/", ctx) task_time = response["task_time"] return web.Response(text=f"The task was completed in: {task_time}")
async def error_handler(request): span = az.request_span(request) session = request.app['session'] url = 'http://4c2a7f53-9468-43a5-9c7d-466591eda953' ctx = {'span_context': span.context} await session.get(url, trace_request_ctx=ctx) return web.Response(body=b'')
async def error_handler(request: web.Request) -> web.StreamResponse: span = az.request_span(request) session = request.app["session"] url = "http://4c2a7f53-9468-43a5-9c7d-466591eda953" ctx = {"span_context": span.context} await session.get(url, trace_request_ctx=ctx) return web.Response(body=b"")
async def handler(request): span = az.request_span(request) session = request.app['session'] url = 'https://httpbin.org/get' ctx = {'span_context': span.context} resp = await session.get(url, trace_request_ctx=ctx) data = await resp.text() return web.Response(body=data)
async def handler(request: web.Request) -> web.StreamResponse: span = az.request_span(request) session = request.app["session"] url = "https://httpbin.org/get" ctx = {"span_context": span.context} resp = await session.get(url, trace_request_ctx=ctx) data = await resp.text() return web.Response(body=data)
async def handle(request): tracer = az.get_tracer(request.app) span = az.request_span(request) with tracer.new_child(span.context) as child_span: child_span.name('mysql:select') await asyncio.sleep(0.01) text = 'Hello' return web.Response(text=text)
async def handle_new_upstream_pkg(request): from .common import generate_pkg_context suite = request.match_info["suite"] pkg = request.match_info["pkg"] run_id = request.match_info.get("run_id") return await generate_pkg_context(request.app.database, request.app.config, suite, request.app.policy, request.app.http_client_session, request.app.differ_url, request.app.vcs_store_url, pkg, aiozipkin.request_span(request), run_id)
async def handle_lintian_fixes_pkg(request): # TODO(jelmer): Handle Accept: text/diff pkg = request.match_info["pkg"] run_id = request.match_info.get("run_id") return await generate_pkg_file( request.app.database, request.app.config, request.app.policy, request.app.http_client_session, request.app.differ_url, request.app.vcs_store_url, pkg, aiozipkin.request_span(request), run_id, )
async def test_middleware_with_default_transport(tracer, fake_transport): app = web.Application() az.setup(app, tracer) async def handler(request): return web.Response(body=b'data') req = make_mocked_request('GET', '/', headers={'token': 'x'}, app=app) middleware_factory = middleware_maker() middleware = await middleware_factory(app, handler) await middleware(req) span = az.request_span(req) assert span assert len(fake_transport.records) == 1 # noop span does not produce records headers = {'X-B3-Sampled': '0'} req_noop = make_mocked_request('GET', '/', headers=headers, app=app) await middleware(req_noop) span = az.request_span(req_noop) assert span assert len(fake_transport.records) == 1
async def handle_repo_list(request): vcs = request.match_info["vcs"] span = aiozipkin.request_span(request) with span.new_child('list-repositories'): names = list(request.app.vcs_manager.list_repositories(vcs)) names.sort() for accept in iter_accept(request): if accept in ('application/json', ): return web.json_response(names) elif accept in ('text/plain', ): return web.Response(text=''.join([line + '\n' for line in names]), content_type='text/plain') elif accept in ('text/html', ): template = site_env.get_template('repo-list.html') text = await template.render_async(vcs=vcs, repositories=names) return web.Response(text=text, content_type='text/html') return web.json_response(names)
async def index(request): span = az.request_span(request) tracer = az.get_tracer(request.app) session = request.app['session'] with tracer.new_child(span.context) as span_producer: span_producer.kind(az.PRODUCER) span_producer.name('produce event click') span_producer.remote_endpoint('broker', ipv4='127.0.0.1', port=9011) headers = span_producer.context.make_headers() message = {'payload': 'click', 'headers': headers} resp = await session.post(backend_service, json=message) resp = await resp.text() assert resp == 'ok' await asyncio.sleep(0.01) return web.Response(text=page, content_type='text/html')
async def handle_scrub_obsolete_pkg(request): from .common import generate_pkg_context # TODO(jelmer): Handle Accept: text/diff pkg = request.match_info["pkg"] run_id = request.match_info.get("run_id") return await generate_pkg_context( request.app.database, request.app.config, "scrub-obsolete", request.app.policy, request.app.http_client_session, request.app.differ_url, request.app.vcs_store_url, pkg, aiozipkin.request_span(request), run_id, )
async def index(request): span = az.request_span(request) tracer = az.get_tracer(request.app) session = request.app["session"] with tracer.new_child(span.context) as span_producer: span_producer.kind(az.PRODUCER) span_producer.name("produce event click") span_producer.remote_endpoint("broker", ipv4="127.0.0.1", port=9011) headers = span_producer.context.make_headers() message = {"payload": "click", "headers": headers} resp = await session.post(backend_service, json=message) resp = await resp.text() assert resp == "ok" await asyncio.sleep(0.01) return web.Response(text=page, content_type="text/html")
async def dulwich_refs(request): package = request.match_info["package"] allow_writes = await is_worker(request.app.db, request) span = aiozipkin.request_span(request) with span.new_child('open-repo'): repo = await _git_open_repo(request.app.vcs_manager, request.app.db, package) r = repo._git service = request.query.get("service") _git_check_service(service, allow_writes) headers = { "Expires": "Fri, 01 Jan 1980 00:00:00 GMT", "Pragma": "no-cache", "Cache-Control": "no-cache, max-age=0, must-revalidate", } handler_cls = DULWICH_SERVICE_HANDLERS[service.encode("ascii")] response = web.StreamResponse(status=200, headers=headers) response.content_type = "application/x-%s-advertisement" % service await response.prepare(request) out = BytesIO() proto = ReceivableProtocol(BytesIO().read, out.write) handler = handler_cls(DictBackend({".": r}), ["."], proto, stateless_rpc=True, advertise_refs=True) handler.proto.write_pkt_line(b"# service=" + service.encode("ascii") + b"\n") handler.proto.write_pkt_line(None) await asyncio.to_thread(handler.handle) await response.write(out.getvalue()) await response.write_eof() return response
async def city(request): """Returns the information about the city >>>curl -H "Content-Type: application/json" 'http://localhost:8080/city/sydney' | jq [ { "geonameid": 6160752, "name": "Sydney", "latitude": 46.15014, "longitude": -60.18175, "country_code": "CA", "admin1": "07", "admin2": "", "geom": "0101000020E61000002506819543174EC025E99AC937134740" }, { "geonameid": 6354908, "name": "Sydney", "latitude": 46.1351, "longitude": -60.1831, "country_code": "CA", "admin1": "07", "admin2": "Cape Breton County (undefined)", "geom": "0101000020E6100000E5F21FD26F174EC045D8F0F44A114740" }, ... """ tracer = az.get_tracer(request.app) span = az.request_span(request) with tracer.new_child(span.context) as child_span: child_span.name('postgres:select:get_city') # call to external service like https://python.org # or database query city_name = request.match_info['city_name'] try: results = await db.get_city(request.app['db'], city_name.title()) except db.RecordNotFound as e: raise web.HTTPNotFound(text=str(e)) return web.json_response([dict(ob) for ob in results])
async def handler(request): span = az.request_span(request) tracer = az.get_tracer(request.app) await asyncio.sleep(0.01) session = request.app['session'] with tracer.new_child(span.context) as span_b: headers = span_b.context.make_headers() resp = await session.get(service_b_api, headers=headers) data_b = await resp.text() with tracer.new_child(span.context) as span_e: headers = span_e.context.make_headers() resp = await session.get(service_e_api, headers=headers) data_e = await resp.text() body = 'service_a ' + data_b + ' ' + data_e return web.Response(text=body)
async def neighbors(request): """Return the k nearest neighbors. >>> curl 'http://localhost:8080/neighbors?longitude=-73.98569&latitude=40.74844&number=10' "[{\"id\": 5142464, \"name\": \"WBAI-FM (New York)\", \"geom\": \"0101000020E6100000A27F828B157F52C05682C5E1CC5F4440\", \"latitude\": -73.98569, \"longitude\": 40.74844, \"country_code\": \"US\", \"admin1\": \"NY\", \"admin2\": \"061\"}, {\"id\": 5142503, \"name\": \"WCBS-FM (New York)\", \"geom\": \"0101000020E6100000A27F828B157F52C05682C5E1CC5F4440\", \"latitude\": -73.98569, \"longitude\": 40.74844, \"country_code\": \"US\", \"admin1\": \"NY\", \"admin2\": \"061\"}, {\"id\": 5142481, \"name\": \"WBLS-FM (New York)\", \"geom\": \"0101000020E6100000A27F828B157F52C05682C5E1CC5F4440\", \"latitude\": -73.98569, \"longitude\": 40.74844, \"country_code\": \"US\", \"admin1\": \"NY\", \"admin2\": \"061\"}, ...] """ tracer = az.get_tracer(request.app) span = az.request_span(request) with tracer.new_child(span.context) as child_span: child_span.name('postgres:select:get_neighbors') longitude = float(request.query["longitude"]) latitude = float(request.query["latitude"]) number = int(request.query["number"]) results = await db.get_neighbors(request.app['db'], longitude, latitude, number) return web.json_response([dict(ob) for ob in results])
async def handler(request): await asyncio.sleep(0.01) session = request.app['session'] span = az.request_span(request) ctx = {'span_context': span.context, 'propagate_headers': True} resp = await session.get(service_c_api, trace_request_ctx=ctx) data_c = await resp.json() resp = await session.get(service_d_api, trace_request_ctx=ctx) data_d = await resp.json() payload = { 'name': 'service_b', 'host': host, 'port': port, 'children': [data_c, data_d], } return web.json_response(payload)
async def handler(request): await asyncio.sleep(0.01) session = request.app['session'] span = az.request_span(request) ctx = {'span_context': span.context} resp = await session.get(service_b_api, trace_request_ctx=ctx) data_b = await resp.json() resp = await session.get(service_e_api, trace_request_ctx=ctx) data_e = await resp.json() tree = { 'name': 'service_a', 'host': host, 'port': port, 'children': [data_b, data_e], } ctx = {'zipkin': zipkin_address, 'service': tree} return aiohttp_jinja2.render_template('index.html', request, ctx)
async def handle_set_git_remote(request): package = request.match_info["package"] remote = request.match_info["remote"] span = aiozipkin.request_span(request) with span.new_child('open-repo'): repo = await _git_open_repo(request.app.vcs_manager, request.app.db, package) post = await request.post() r = repo._git c = r.get_config() section = ("remote", remote) c.set(section, "url", post["url"]) c.set(section, "fetch", "+refs/heads/*:refs/remotes/%s/*" % remote) b = BytesIO() c.write_to_file(b) r._controltransport.put_bytes("config", b.getvalue()) # TODO(jelmer): Run 'git fetch $remote'? return web.Response()
async def handle(request: web.Request) -> web.StreamResponse: tracer = az.get_tracer(request.app) span = az.request_span(request) with tracer.new_child(span.context) as child_span: child_span.name("mysql:select") # call to external service like https://python.org # or database query await asyncio.sleep(0.01) text = """ <html lang="en"> <head> <title>aiohttp simple example</title> </head> <body> <h3>This page was traced by aiozipkin</h3> <p><a href="http://127.0.0.1:9001/status">Go to not traced page</a></p> </body> </html> """ return web.Response(text=text, content_type="text/html")
async def handle(request): tracer = az.get_tracer(request.app) span = az.request_span(request) with tracer.new_child(span.context) as child_span: child_span.name('mysql:select') # call to external service like https://python.org # or database query await asyncio.sleep(0.01) text = """ <html lang="en"> <head> <title>aiohttp simple example</title> </head> <body> <h3>This page was traced by aiozipkin</h3> <p><a href="http://127.0.0.1:9001/status">Go to not traced page</a></p> </body> </html> """ return web.Response(text=text, content_type='text/html')
async def handle_run_post(request): run_id = request.match_info["run_id"] check_qa_reviewer(request) span = aiozipkin.request_span(request) post = await request.post() review_status = post.get("review-status") review_comment = post.get("review-comment") if review_status: async with request.app.db.acquire() as conn: review_status = review_status.lower() if review_status == "reschedule": with span.new_child('sql:run'): run = await conn.fetchrow( 'SELECT package, suite FROM run WHERE id = $1', run_id) with span.new_child('schedule'): await do_schedule( conn, run['package'], run['suite'], refresh=True, requestor="reviewer", bucket="default", ) review_status = "rejected" with span.new_child('sql:update-run'): await conn.execute( "UPDATE run SET review_status = $1, review_comment = $2 WHERE id = $3", review_status, review_comment, run_id, ) if review_status == 'approved': await consider_publishing(request.app['http_client_session'], request.app.publisher_url, run_id) return web.json_response({ "review-status": review_status, "review-comment": review_comment })
async def handle_publish_ready(request): suite = request.match_info.get("suite") review_status = request.query.get("review-status") span = aiozipkin.request_span(request) publishable_only = request.query.get("publishable_only", "true") == "true" if 'needs-review' in request.query: needs_review = (request.query['needs-review'] == 'true') else: needs_review = None limit = request.query.get("limit", 200) if limit: limit = int(limit) else: limit = None ret = [] async with request.app.db.acquire() as conn: with span.new_child('sql:publish-ready'): async for ( run, value, maintainer_email, uploader_emails, changelog_mode, command, qa_review_policy, needs_review, unpublished_branches, ) in state.iter_publish_ready( conn, suites=([suite] if suite else None), review_status=review_status, needs_review=needs_review, publishable_only=publishable_only, ): ret.append((run.package, run.id, [rb[0] for rb in run.result_branches])) return web.json_response(ret, status=200)
async def diff_request(request): run_id = request.match_info["run_id"] role = request.match_info["role"] span = aiozipkin.request_span(request) with span.new_child('sql:run'): async with request.app.db.acquire() as conn: row = await conn.fetchrow( """\ SELECT package, new_result_branch.base_revision AS base_revision, new_result_branch.revision AS revision FROM run LEFT JOIN new_result_branch ON new_result_branch.run_id = run.id WHERE id = $1 AND new_result_branch.role = $2 """, run_id, role) if not row: raise web.HTTPNotFound(text="No such run: %r" % run_id) try: repo = request.app.vcs_manager.get_repository(row['package']) except NotBranchError: repo = None if repo is None: raise web.HTTPServiceUnavailable( text="Local VCS repository for %s temporarily inaccessible" % row['package']) if row['revision'] is None: raise web.HTTPNotFound(text="No branch with role %s" % role) old_revid = row['base_revision'].encode('utf-8') new_revid = row['revision'].encode('utf-8') if (hasattr(repo, '_git') and old_revid.startswith(b'git-v1:') and new_revid.startswith(b'git-v1:')): args = [ "git", "diff", old_revid.decode()[len('git-v1:'):], new_revid.decode()[len('git-v1:'):], ] p = await asyncio.create_subprocess_exec( *args, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE, stdin=asyncio.subprocess.PIPE, cwd=repo.user_transport.local_abspath('.'), ) with span.new_child('subprocess:git-diff'): # TODO(jelmer): Stream this try: (stdout, stderr) = await asyncio.wait_for(p.communicate(b""), 30.0) except asyncio.TimeoutError: raise web.HTTPRequestTimeout(text='diff generation timed out') if p.returncode == 0: return web.Response(body=stdout, content_type="text/x-diff") logging.warning('git diff failed: %s', stderr.decode()) raise web.HTTPInternalServerError(text='git diff failed: %s' % stderr) else: # Fall back to breezy args = [ sys.executable, '-m', 'breezy', "diff", '-rrevid:%s..revid:%s' % ( old_revid.decode(), new_revid.decode(), ), repo.user_url ] p = await asyncio.create_subprocess_exec( *args, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE, stdin=asyncio.subprocess.PIPE, ) with span.new_child('subprocess:brz-diff'): # TODO(jelmer): Stream this try: (stdout, stderr) = await asyncio.wait_for(p.communicate(b""), 30.0) except asyncio.TimeoutError: raise web.HTTPRequestTimeout(text='diff generation timed out') if p.returncode != 3: return web.Response(body=stdout, content_type="text/x-diff") logging.warning('bzr diff failed: %s', stderr.decode()) raise web.HTTPInternalServerError(text='bzr diff failed: %s' % stderr)
async def git_backend(request): package = request.match_info["package"] subpath = request.match_info["subpath"] allow_writes = await is_worker(request.app.db, request) service = request.query.get("service") if service is not None: _git_check_service(service, allow_writes) span = aiozipkin.request_span(request) with span.new_child('open-repo'): repo = await _git_open_repo(request.app.vcs_manager, request.app.db, package) args = ["/usr/bin/git"] if allow_writes: args.extend(["-c", "http.receivepack=1"]) args.append("http-backend") local_path = repo.user_transport.local_abspath(".") full_path = os.path.join(local_path, subpath.lstrip('/')) env = { "GIT_HTTP_EXPORT_ALL": "true", "REQUEST_METHOD": request.method, "REMOTE_ADDR": request.remote, "CONTENT_TYPE": request.content_type, "PATH_TRANSLATED": full_path, "QUERY_STRING": request.query_string, # REMOTE_USER is not set } if request.content_type is not None: env['CONTENT_TYPE'] = request.content_type for key, value in request.headers.items(): env["HTTP_" + key.replace("-", "_").upper()] = value for name in [ "HTTP_CONTENT_ENCODING", "HTTP_CONTENT_LENGTH", "HTTP_TRANSFER_ENCODING" ]: try: del env[name] except KeyError: pass p = await asyncio.create_subprocess_exec( *args, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE, env=env, stdin=asyncio.subprocess.PIPE, ) async def feed_stdin(stream): async for chunk in request.content.iter_any(): stream.write(chunk) await stream.drain() stream.close() async def read_stderr(stream): line = await stream.readline() while line: logging.warning("git: %s", line.decode().rstrip('\n')) line = await stream.readline() async def read_stdout(stream): b = BytesIO() line = await stream.readline() while line != b'\r\n': b.write(line) line = await stream.readline() b.seek(0) headers = parse_headers(b) status = headers.get("Status") if status: del headers["Status"] (status_code, status_reason) = status.split(" ", 1) status_code = int(status_code) status_reason = status_reason else: status_code = 200 status_reason = "OK" if 'Content-Length' in headers: content_length = int(headers['Content-Length']) return web.Response(headers=headers, status=status_code, reason=status_reason, body=await p.stdout.read(content_length)) else: response = web.StreamResponse( headers=headers, status=status_code, reason=status_reason, ) if tuple(request.version) == (1, 1): response.enable_chunked_encoding() await response.prepare(request) chunk = await p.stdout.read(GIT_BACKEND_CHUNK_SIZE) while chunk: await response.write(chunk) chunk = await p.stdout.read(GIT_BACKEND_CHUNK_SIZE) await response.write_eof() return response with span.new_child('git-backend'): try: unused_stderr, response, unused_stdin = await asyncio.gather( *[ read_stderr(p.stderr), read_stdout(p.stdout), feed_stdin(p.stdin), ], return_exceptions=False) except asyncio.CancelledError: p.terminate() await p.wait() raise return response