async def handle_login(request): state = str(uuid.uuid4()) callback_path = request.app.router["oauth2-callback"].url_for() if not request.app.openid_config: raise web.HTTPNotFound(text='login is disabled on this instance') location = URL( request.app.openid_config["authorization_endpoint"]).with_query({ "client_id": request.app.config.oauth2_provider.client_id or os.environ['OAUTH2_CLIENT_ID'], "redirect_uri": str(request.app.external_url.join(callback_path)), "response_type": "code", "scope": "openid", "state": state, }) response = web.HTTPFound(location) response.set_cookie("state", state, max_age=60, path=callback_path, httponly=True, secure=True) if "url" in request.query: try: response.set_cookie("back_url", str(URL(request.query["url"]).relative())) except ValueError: # 'url' is not a URL raise web.HTTPBadRequest(text='invalid url') return response
async def discover_openid_config(app): url = URL(app.config.oauth2_provider.base_url).join( URL("/.well-known/openid-configuration")) async with app.http_client_session.get(url) as resp: if resp.status != 200: # TODO(jelmer): Fail? Set flag? logging.warning( "Unable to find openid configuration (%s): %s", resp.status, await resp.read(), ) return app.openid_config = await resp.json()
async def listen_to_result(app): url = URL(app.runner_url) / "ws/result" async for msg in pubsub_reader(app.http_client_session, url): app.topic_notifications.publish(["result", msg])
async def listen_to_queue(app): url = URL(app.runner_url) / "ws/queue" async for msg in pubsub_reader(app.http_client_session, url): app['runner_status'] = msg app.topic_notifications.publish(["queue", msg])
async def listen_to_publisher_mp(app): url = URL(app.publisher_url) / "ws/merge-proposal" async for msg in pubsub_reader(app.http_client_session, url): app.topic_notifications.publish(["merge-proposal", msg])
async def listen_to_publisher_publish(app): url = URL(app.publisher_url) / "ws/publish" async for msg in pubsub_reader(app.http_client_session, url): app.topic_notifications.publish(["publish", msg])
async def get_credentials(session, publisher_url): url = URL(publisher_url) / "credentials" async with session.get(url=url) as resp: if resp.status != 200: raise Exception("unexpected response") return await resp.json()
async def create_app(config, policy_config, minified=False, external_url=None, debugtoolbar=None, runner_url=None, publisher_url=None, archiver_url=None, vcs_store_url=None, differ_url=None, listen_address=None, port=None): if minified: minified_prefix = "" else: minified_prefix = "min." trailing_slash_redirect = normalize_path_middleware(append_slash=True) app = web.Application(middlewares=[trailing_slash_redirect]) setup_metrics(app) app.topic_notifications = Topic("notifications") app.router.add_get( "/ws/notifications", functools.partial(pubsub_handler, app.topic_notifications), # type: ignore name="ws-notifications", ) endpoint = aiozipkin.create_endpoint("janitor.site", ipv4=listen_address, port=port) if config.zipkin_address: tracer = await aiozipkin.create(config.zipkin_address, endpoint, sample_rate=1.0) else: tracer = await aiozipkin.create_custom(endpoint) trace_configs = [aiozipkin.make_trace_config(tracer)] aiozipkin.setup(app, tracer, skip_routes=[ app.router['metrics'], app.router['ws-notifications'], ]) async def setup_client_session(app): app.http_client_session = ClientSession(trace_configs=trace_configs) async def close_client_session(app): await app.http_client_session.close() app.on_startup.append(setup_client_session) app.on_cleanup.append(close_client_session) async def start_gpg_context(app): gpg_home = tempfile.TemporaryDirectory() gpg_context = gpg.Context(home_dir=gpg_home.name) app.gpg = gpg_context.__enter__() async def cleanup_gpg(app): gpg_context.__exit__(None, None, None) shutil.rmtree(gpg_home) app.on_cleanup.append(cleanup_gpg) async def discover_openid_config(app): url = URL(app.config.oauth2_provider.base_url).join( URL("/.well-known/openid-configuration")) async with app.http_client_session.get(url) as resp: if resp.status != 200: # TODO(jelmer): Fail? Set flag? logging.warning( "Unable to find openid configuration (%s): %s", resp.status, await resp.read(), ) return app.openid_config = await resp.json() async def start_pubsub_forwarder(app): async def listen_to_publisher_publish(app): url = URL(app.publisher_url) / "ws/publish" async for msg in pubsub_reader(app.http_client_session, url): app.topic_notifications.publish(["publish", msg]) async def listen_to_publisher_mp(app): url = URL(app.publisher_url) / "ws/merge-proposal" async for msg in pubsub_reader(app.http_client_session, url): app.topic_notifications.publish(["merge-proposal", msg]) app['runner_status'] = None async def listen_to_queue(app): url = URL(app.runner_url) / "ws/queue" async for msg in pubsub_reader(app.http_client_session, url): app['runner_status'] = msg app.topic_notifications.publish(["queue", msg]) async def listen_to_result(app): url = URL(app.runner_url) / "ws/result" async for msg in pubsub_reader(app.http_client_session, url): app.topic_notifications.publish(["result", msg]) for cb, title in [ (listen_to_publisher_publish, 'publisher publish listening'), (listen_to_publisher_mp, 'merge proposal listiening'), (listen_to_queue, 'queue listening'), (listen_to_result, 'result listening'), ]: listener = create_background_task(cb(app), title) async def stop_listener(app): listener.cancel() await listener app.on_cleanup.append(stop_listener) for path, templatename in [ ("/", "index"), ("/contact", "contact"), ("/about", "about"), ("/apt", "apt"), ("/cupboard/", "cupboard"), ]: app.router.add_get( path, functools.partial(handle_simple, templatename + ".html"), name=templatename, ) app.router.add_get("/credentials", handle_credentials, name="credentials") app.router.add_get("/ssh_keys", handle_ssh_keys, name="ssh-keys") app.router.add_get(r"/pgp_keys{extension:(\.asc)?}", handle_pgp_keys, name="pgp-keys") from .lintian_fixes import register_lintian_fixes_endpoints register_lintian_fixes_endpoints(app.router) from .multiarch_hints import register_multiarch_hints_endpoints register_multiarch_hints_endpoints(app.router) from .orphan import register_orphan_endpoints register_orphan_endpoints(app.router) from .debianize import register_debianize_endpoints register_debianize_endpoints(app.router) from .scrub_obsolete import register_scrub_obsolete_endpoints register_scrub_obsolete_endpoints(app.router) from .new_upstream import register_new_upstream_endpoints register_new_upstream_endpoints(app.router) SUITE_REGEX = "|".join([re.escape(suite.name) for suite in config.suite]) app.router.add_get( "/{suite:%s}/merge-proposals" % SUITE_REGEX, handle_merge_proposals, name="suite-merge-proposals", ) app.router.add_get("/{suite:%s}/ready" % SUITE_REGEX, handle_ready_proposals, name="suite-ready") app.router.add_get( "/{suite:%s}/maintainer" % SUITE_REGEX, handle_maintainer_list, name="suite-maintainer-list", ) app.router.add_get("/{suite:%s}/pkg/" % SUITE_REGEX, handle_pkg_list, name="suite-package-list") app.router.add_get("/{vcs:git|bzr}/", handle_repo_list, name="repo-list") app.router.add_get("/{suite:unchanged}", handle_apt_repo, name="unchanged-start") app.router.add_get("/cupboard/history", handle_history, name="history") app.router.add_get("/cupboard/queue", handle_queue, name="queue") app.router.add_get("/cupboard/result-codes/", handle_result_codes, name="result-code-list") app.router.add_get("/cupboard/result-codes/{code}", handle_result_codes, name="result-code") app.router.add_get("/cupboard/never-processed", handle_never_processed, name="never-processed") app.router.add_get( "/cupboard/maintainer-stats", handle_cupboard_maintainer_stats, name="cupboard-maintainer-stats", ) app.router.add_get("/cupboard/maintainer", handle_maintainer_list, name="maintainer-list") app.router.add_get( "/cupboard/maintainer/{maintainer}", handle_maintainer_overview, name="cupboard-maintainer-overview", ) app.router.add_get( "/maintainer/{maintainer}", handle_maintainer_overview, name="maintainer-overview", ) app.router.add_get("/m/", handle_maintainer_index, name="maintainer-index-short") app.router.add_get("/m/{maintainer}", handle_maintainer_overview, name="maintainer-overview-short") app.router.add_get("/cupboard/publish", handle_publish_history, name="publish-history") app.router.add_get("/cupboard/ready", handle_ready_proposals, name="cupboard-ready") app.router.add_get("/cupboard/pkg/", handle_pkg_list, name="package-list") app.router.add_get("/cupboard/pkg/{pkg}/", handle_pkg, name="cupboard-package") app.router.add_get("/cupboard/pkg/{pkg}/{run_id}/", handle_run, name="cupboard-run") app.router.add_get("/cupboard/review", handle_review, name="cupboard-review") app.router.add_get("/cupboard/rejected", handle_rejected, name="cupboard-rejected") app.router.add_post("/cupboard/review", handle_review_post, name="cupboard-review-post") app.router.add_get( "/cupboard/failed-lintian-brush-fixers/", handle_failed_lintian_brush_fixers_list, name="failed-lintian-brush-fixer-list", ) app.router.add_get( "/cupboard/failed-lintian-brush-fixers/{fixer}", handle_failed_lintian_brush_fixers, name="failed-lintian-brush-fixer", ) app.router.add_get( "/cupboard/lintian-brush-regressions/", handle_lintian_brush_regressions, name="lintian-brush-regressions", ) app.router.add_get( "/cupboard/pkg/{pkg}/{run_id}/{filename:.+}", handle_result_file, name="cupboard-result-file", ) app.router.add_get( "/{suite:" + SUITE_REGEX + "}/pkg/{pkg}/{run_id}/{filename:.+}", handle_result_file, name="result-file", ) app.router.add_get("/{suite:" + SUITE_REGEX + "}/", handle_generic_start, name="generic-start") app.router.add_get( "/{suite:" + SUITE_REGEX + "}/candidates", handle_generic_candidates, name="generic-candidates", ) app.router.add_get( "/{suite:" + SUITE_REGEX + "}/pkg/{pkg}/", handle_generic_pkg, name="generic-package", ) app.router.add_get( "/{suite:" + SUITE_REGEX + "}/pkg/{pkg}/{run_id}", handle_generic_pkg, name="generic-run", ) app.router.add_get("/cupboard/vcs-regressions/", handle_vcs_regressions, name="vcs-regressions") app.router.add_get("/cupboard/broken-merge-proposals", handle_broken_mps, name="broken-mps") app.router.add_get("/login", handle_login, name="login") for entry in os.scandir(os.path.join(os.path.dirname(__file__), "_static")): app.router.add_get( "/_static/%s" % entry.name, functools.partial(handle_static_file, entry.path), ) app.router.add_static("/_static/images/datatables", "/usr/share/javascript/jquery-datatables/images") for (name, kind, basepath) in [ ("chart", "js", "/usr/share/javascript/chart.js/Chart"), ("chart", "css", "/usr/share/javascript/chart.js/Chart"), ("jquery", "js", "/usr/share/javascript/jquery/jquery"), ( "jquery.typeahead", "js", "/usr/share/javascript/jquery-typeahead/jquery.typeahead", ), ( "jquery.datatables", "js", "/usr/share/javascript/jquery-datatables/jquery.dataTables", ), ("moment", "js", "/usr/share/javascript/moment/moment"), ]: if not os.path.exists(basepath + "." + kind): continue app.router.add_get( "/_static/%s.%s" % (name, kind), functools.partial(handle_static_file, "%s.%s%s" % (basepath, minified_prefix, kind)), ) app.router.add_get("/oauth/callback", handle_oauth_callback, name="oauth2-callback") from .api import create_app as create_api_app from .webhook import process_webhook, is_webhook_request async def handle_post_root(request): if is_webhook_request(request): return await process_webhook(request, request.app.database) raise web.HTTPMethodNotAllowed(method='POST', allowed_methods=['GET', 'HEAD']) app.runner_url = runner_url app.archiver_url = archiver_url app.differ_url = differ_url app.policy = policy_config app.publisher_url = publisher_url app.vcs_store_url = vcs_store_url if config.oauth2_provider and config.oauth2_provider.base_url: app.on_startup.append(discover_openid_config) else: app.openid_config = None app.on_startup.append(start_pubsub_forwarder) app.on_startup.append(start_gpg_context) if external_url: app.external_url = URL(external_url) else: app.external_url = None database = state.Database(config.database_location) app.database = database from .stats import stats_app app.add_subapp("/cupboard/stats", stats_app(database, config, app.external_url)) app.config = config from janitor.site import env app.jinja_env = env from janitor.artifacts import get_artifact_manager async def startup_artifact_manager(app): app['artifact_manager'] = get_artifact_manager( config.artifact_location, trace_configs=trace_configs) await app['artifact_manager'].__aenter__() async def turndown_artifact_manager(app): await app['artifact_manager'].__aexit__(None, None, None) app.on_startup.append(startup_artifact_manager) app.on_cleanup.append(turndown_artifact_manager) setup_debsso(app) app.router.add_post("/", handle_post_root, name="root-post") app.router.add_get("/health", handle_health, name="health") app.add_subapp( "/api", create_api_app( app.database, publisher_url, runner_url, # type: ignore archiver_url, vcs_store_url, differ_url, config, policy_config, external_url=(app.external_url.join(URL("api")) if app.external_url else None), trace_configs=trace_configs, ), ) import aiohttp_apispec app.router.add_static( '/static/swagger', os.path.join(os.path.dirname(aiohttp_apispec.__file__), "static")) if debugtoolbar: import aiohttp_debugtoolbar # install aiohttp_debugtoolbar aiohttp_debugtoolbar.setup(app, hosts=debugtoolbar) async def setup_logfile_manager(app): app.logfile_manager = get_log_manager(config.logs_location, trace_configs=trace_configs) app.on_startup.append(setup_logfile_manager) return app
async def handle_oauth_callback(request): code = request.query.get("code") state_code = request.query.get("state") if request.cookies.get("state") != state_code: return web.Response(status=400, text="state variable mismatch") if not request.app.openid_config: raise web.HTTPNotFound(text='login disabled') token_url = URL(request.app.openid_config["token_endpoint"]) redirect_uri = (request.app.external_url or request.url).join( request.app.router["oauth2-callback"].url_for()) params = { "code": code, "client_id": request.app.config.oauth2_provider.client_id or os.environ['OAUTH2_CLIENT_ID'], "client_secret": request.app.config.oauth2_provider.client_secret or os.environ['OAUTH2_CLIENT_SECRET'], "grant_type": "authorization_code", "redirect_uri": str(redirect_uri), } async with request.app.http_client_session.post(token_url, params=params) as resp: if resp.status != 200: return web.json_response(status=resp.status, data={"error": "token-error"}) resp = await resp.json() if resp["token_type"] != "Bearer": return web.Response( status=500, text="Expected bearer token, got %s" % resp["token_type"], ) refresh_token = resp["refresh_token"] # noqa: F841 access_token = resp["access_token"] try: back_url = request.cookies["back_url"] except KeyError: back_url = "/" async with request.app.http_client_session.get( request.app.openid_config["userinfo_endpoint"], headers={"Authorization": "Bearer %s" % access_token}, ) as resp: if resp.status != 200: raise Exception("unable to get user info (%s): %s" % (resp.status, await resp.read())) userinfo = await resp.json() session_id = str(uuid.uuid4()) async with request.app.database.acquire() as conn: await conn.execute( """ INSERT INTO site_session (id, userinfo) VALUES ($1, $2) ON CONFLICT (id) DO UPDATE SET userinfo = EXCLUDED.userinfo """, session_id, userinfo) # TODO(jelmer): Store access token / refresh token? resp = web.HTTPFound(back_url) resp.del_cookie("state") resp.del_cookie("back_url") resp.set_cookie("session_id", session_id, secure=True, httponly=True) return resp
async def handle_repo_list(request): vcs = request.match_info["vcs"] vcs_store_url = request.app.vcs_store_url url = URL(vcs_store_url) / vcs async with request.app.http_client_session.get(url) as resp: return {"vcs": vcs, "repositories": await resp.json()}