async def test_client_signals(tracer, fake_transport):
    trace_config = az.make_trace_config(tracer)
    session = aiohttp.ClientSession(trace_configs=[trace_config])

    with tracer.new_trace() as span:
        span.name('client:signals')
        url = 'https://httpbin.org/get'
        # do not propagate headers
        ctx = {'span_context': span.context, 'propagate_headers': False}
        resp = await session.get(url, trace_request_ctx=ctx)
        data = await resp.read()
        assert len(data) > 0
        assert az.make_context(resp.request_info.headers) is None

        # by default headers added
        ctx = {'span_context': span.context}
        resp = await session.get(url, trace_request_ctx=ctx)
        await resp.text()
        assert len(data) > 0
        context = az.make_context(resp.request_info.headers)
        assert context.trace_id == span.context.trace_id

    await session.close()

    assert len(fake_transport.records) == 3
    record1 = fake_transport.records[0].asdict()
    record2 = fake_transport.records[1].asdict()
    record3 = fake_transport.records[2].asdict()
    assert record2['parentId'] == record3['id']
    assert record1['parentId'] == record3['id']
    assert record3['name'] == 'client:signals'
Ejemplo n.º 2
0
async def make_app():

    app = web.Application()
    app.router.add_get('/api/v1/data', handler)
    app.router.add_get('/', handler)

    endpoint = az.create_endpoint('service_a', ipv4=host, port=port)
    tracer = await az.create(zipkin_address, endpoint, sample_rate=1.0)

    trace_config = az.make_trace_config(tracer)

    session = aiohttp.ClientSession(trace_configs=[trace_config])
    app['session'] = session

    async def close_session(app):
        await app['session'].close()

    app.on_cleanup.append(close_session)

    az.setup(app, tracer)

    TEMPLATES_ROOT = pathlib.Path(__file__).parent / 'templates'
    aiohttp_jinja2.setup(
        app, loader=jinja2.FileSystemLoader(str(TEMPLATES_ROOT))
    )

    return app
Ejemplo n.º 3
0
async def test_client_signals(tracer, fake_transport):
    trace_config = az.make_trace_config(tracer)
    session = aiohttp.ClientSession(trace_configs=[trace_config])

    with tracer.new_trace() as span:
        span.name('client:signals')
        url = 'https://httpbin.org/get'
        # do not propagate headers
        ctx = {'span_context': span.context, 'propagate_headers': False}
        resp = await session.get(url, trace_request_ctx=ctx)
        await resp.text()
        assert resp.status == 200
        assert az.make_context(resp.request_info.headers) is None

        # by default headers added
        ctx = {'span_context': span.context}
        resp = await session.get(url, trace_request_ctx=ctx)
        await resp.text()
        assert resp.status == 200
        context = az.make_context(resp.request_info.headers)
        assert context.trace_id == span.context.trace_id

    await session.close()

    assert len(fake_transport.records) == 3
    record1 = fake_transport.records[0].asdict()
    record2 = fake_transport.records[1].asdict()
    record3 = fake_transport.records[2].asdict()
    assert record2['parentId'] == record3['id']
    assert record1['parentId'] == record3['id']
    assert record3['name'] == 'client:signals'
Ejemplo n.º 4
0
async def make_app():

    app = web.Application()
    app.router.add_get("/api/v1/data", handler)
    app.router.add_get("/", handler)

    endpoint = az.create_endpoint("service_a", ipv4=host, port=port)
    tracer = await az.create(zipkin_address, endpoint, sample_rate=1.0)

    trace_config = az.make_trace_config(tracer)

    session = aiohttp.ClientSession(trace_configs=[trace_config])
    app["session"] = session

    async def close_session(app):
        await app["session"].close()

    app.on_cleanup.append(close_session)

    az.setup(app, tracer)

    TEMPLATES_ROOT = pathlib.Path(__file__).parent / "templates"
    aiohttp_jinja2.setup(app,
                         loader=jinja2.FileSystemLoader(str(TEMPLATES_ROOT)))

    return app
Ejemplo n.º 5
0
async def setup_zipkin(app, loop, context):
    endpoint = az.create_endpoint(sanic_zipkin.service, ipv4=sanic_zipkin.host,
                                port=sanic_zipkin.port)
    context.tracer = await az.create(sanic_zipkin.zipkin_address, endpoint, 
                                sample_rate=1.0)
    trace_config = az.make_trace_config(context.tracer)
    context.aio_session = aiohttp.ClientSession(trace_configs=[trace_config])
    context.span = []
    context.zipkin_headers = []
Ejemplo n.º 6
0
async def test_client_signals_no_span(tracer, fake_transport):
    trace_config = az.make_trace_config(tracer)
    session = aiohttp.ClientSession(trace_configs=[trace_config])

    url = 'https://httpbin.org/get'
    resp = await session.get(url)
    data = await resp.read()
    assert len(data) > 0
    await session.close()
    assert len(fake_transport.records) == 0
Ejemplo n.º 7
0
async def client(aiohttp_client: Any, tracer: az.Tracer) -> Any:
    app = web.Application()
    app.router.add_get("/simple", handler)
    app.router.add_get("/error", error_handler)

    trace_config = az.make_trace_config(tracer)
    session = aiohttp.ClientSession(trace_configs=[trace_config])
    app["session"] = session

    az.setup(app, tracer)
    c = await aiohttp_client(app)
    yield c

    await session.close()
Ejemplo n.º 8
0
async def client(test_client, tracer):
    app = web.Application()
    app.router.add_get('/simple', handler)
    app.router.add_get('/error', error_handler)

    trace_config = az.make_trace_config(tracer)
    session = aiohttp.ClientSession(trace_configs=[trace_config])
    app['session'] = session

    az.setup(app, tracer)
    c = await test_client(app)
    await yield_(c)

    await session.close()
async def client(test_client, tracer):
    app = web.Application()
    app.router.add_get('/simple', handler)
    app.router.add_get('/error', error_handler)

    trace_config = az.make_trace_config(tracer)
    session = aiohttp.ClientSession(trace_configs=[trace_config])
    app['session'] = session

    az.setup(app, tracer)
    c = await test_client(app)
    await yield_(c)

    await session.close()
Ejemplo n.º 10
0
async def make_app(host, port):
    app = web.Application()
    app.add_routes([web.get("/", get)])

    endpoint = az.create_endpoint("frontend", ipv4=host, port=port)
    zipkin_address = "http://zipkin:9411/api/v1/spans"
    tracer = await az.create(zipkin_address, endpoint, sample_rate=1.0)

    trace_config = az.make_trace_config(tracer)

    session = ClientSession(trace_configs=[trace_config])
    app["session"] = session

    async def close_session(app):
        await app["session"].close()

    app.on_cleanup.append(close_session)
    az.setup(app, tracer)
    return app
Ejemplo n.º 11
0
async def make_app():
    app = web.Application()
    app.router.add_get('/api/v1/data', handler)

    zipkin_address = 'http://127.0.0.1:9411/api/v2/spans'
    endpoint = az.create_endpoint('service_b', ipv4=host, port=port)
    tracer = await az.create(zipkin_address, endpoint, sample_rate=1.0)
    az.setup(app, tracer)

    trace_config = az.make_trace_config(tracer)

    session = aiohttp.ClientSession(trace_configs=[trace_config])
    app['session'] = session

    async def close_session(app):
        await app['session'].close()

    app.on_cleanup.append(close_session)
    return app
Ejemplo n.º 12
0
async def make_app():
    app = web.Application()
    app.router.add_get('/api/v1/data', handler)

    zipkin_address = 'http://127.0.0.1:9411'
    endpoint = az.create_endpoint('service_b', ipv4=host, port=port)
    tracer = await az.create(zipkin_address, endpoint, sample_rate=1.0)
    az.setup(app, tracer)

    trace_config = az.make_trace_config(tracer)

    session = aiohttp.ClientSession(trace_configs=[trace_config])
    app['session'] = session

    async def close_session(app):
        await app['session'].close()

    app.on_cleanup.append(close_session)
    return app
async def test_client_signals(tracer: az.Tracer, fake_transport: Any) -> None:
    trace_config = az.make_trace_config(tracer)
    session = aiohttp.ClientSession(trace_configs=[trace_config])

    with tracer.new_trace() as span:
        span.name("client:signals")
        url = "https://httpbin.org/get"
        # do not propagate headers
        ctx = {"span_context": span.context, "propagate_headers": False}
        resp = await session.get(url, trace_request_ctx=ctx)
        data = await resp.read()
        assert len(data) > 0
        assert az.make_context(resp.request_info.headers) is None

        ctx_ns = SimpleNamespace(span_context=span.context,
                                 propagate_headers=False)
        resp = await session.get(url, trace_request_ctx=ctx_ns)
        data = await resp.read()
        assert len(data) > 0
        assert az.make_context(resp.request_info.headers) is None

        # by default headers added
        ctx = {"span_context": span.context}
        resp = await session.get(url, trace_request_ctx=ctx)
        await resp.text()
        assert len(data) > 0
        context = az.make_context(resp.request_info.headers)
        assert context is not None
        assert context.trace_id == span.context.trace_id

    await session.close()

    assert len(fake_transport.records) == 4
    record1 = fake_transport.records[0].asdict()
    record2 = fake_transport.records[1].asdict()
    record3 = fake_transport.records[2].asdict()
    record4 = fake_transport.records[3].asdict()
    assert record3["parentId"] == record4["id"]
    assert record2["parentId"] == record4["id"]
    assert record1["parentId"] == record4["id"]
    assert record4["name"] == "client:signals"
Ejemplo n.º 14
0
async def init(app, loop):
    tracer = await az.create(zipkin_address, endpoint, sample_rate=1.0)
    trace_config = az.make_trace_config(tracer)
    app.aiohttp_session = aiohttp.ClientSession(trace_configs=[trace_config], loop=loop)
    app.tracer = tracer
Ejemplo n.º 15
0
async def main(argv=None):
    import argparse

    parser = argparse.ArgumentParser(prog="janitor.debian.archive")
    parser.add_argument(
        "--listen-address", type=str, help="Listen address", default="localhost"
    )
    parser.add_argument("--port", type=int, help="Listen port", default=9914)
    parser.add_argument(
        "--config", type=str, default="janitor.conf", help="Path to configuration."
    )
    parser.add_argument("--dists-directory", type=str, help="Dists directory")
    parser.add_argument("--cache-directory", type=str, help="Cache directory")
    parser.add_argument("--verbose", action='store_true')
    parser.add_argument("--gcp-logging", action='store_true', help='Use Google cloud logging.')
    parser.add_argument(
        "--runner-url", type=str, default=None, help="URL to reach runner at."
    )

    args = parser.parse_args()
    if not args.dists_directory:
        parser.print_usage()
        sys.exit(1)

    if args.gcp_logging:
        import google.cloud.logging
        client = google.cloud.logging.Client()
        client.get_default_handler()
        client.setup_logging()
    elif args.verbose:
        logging.basicConfig(level=logging.DEBUG)
    else:
        logging.basicConfig(level=logging.INFO)

    with open(args.config, "r") as f:
        config = read_config(f)

    os.makedirs(args.dists_directory, exist_ok=True)

    db = state.Database(config.database_location)

    endpoint = aiozipkin.create_endpoint("janitor.debian.archive", ipv4=args.listen_address, port=args.port)
    if config.zipkin_address:
        tracer = await aiozipkin.create(config.zipkin_address, endpoint, sample_rate=1.0)
    else:
        tracer = await aiozipkin.create_custom(endpoint)
    trace_configs = [aiozipkin.make_trace_config(tracer)]

    artifact_manager = get_artifact_manager(config.artifact_location, trace_configs=trace_configs)

    gpg_context = gpg.Context()

    package_info_provider = PackageInfoProvider(artifact_manager)
    if args.cache_directory:
        os.makedirs(args.cache_directory, exist_ok=True)
        package_info_provider = CachingPackageInfoProvider(
            package_info_provider, args.cache_directory
        )

    generator_manager = GeneratorManager(
        args.dists_directory, db, config, package_info_provider, gpg_context,
    )

    loop = asyncio.get_event_loop()
    tasks = [
        loop.create_task(
            run_web_server(
                args.listen_address,
                args.port,
                args.dists_directory,
                config,
                generator_manager,
                tracer,
            )
        ),
        loop.create_task(loop_publish(config, generator_manager)),
        ]

    if args.runner_url:
        tasks.append(loop.create_task(
            listen_to_runner(
                args.runner_url,
                generator_manager)))

    async with package_info_provider:
        await asyncio.gather(*tasks)
Ejemplo n.º 16
0
async def main(argv=None):
    import argparse

    parser = argparse.ArgumentParser(prog="janitor.differ")
    parser.add_argument(
        "--listen-address", type=str, help="Listen address", default="localhost"
    )
    parser.add_argument("--port", type=int, help="Listen port", default=9920)
    parser.add_argument(
        "--config", type=str, default="janitor.conf", help="Path to configuration."
    )
    parser.add_argument("--cache-path", type=str, default=None, help="Path to cache.")
    parser.add_argument(
        "--runner-url", type=str, default=None, help="URL to reach runner at."
    )
    parser.add_argument(
        '--task-memory-limit', help='Task memory limit (in MB)',
        type=int, default=1500)
    parser.add_argument(
        '--task-timeout', help='Task timeout (in seconds)',
        type=int, default=60)
    parser.add_argument('--gcp-logging', action='store_true')

    args = parser.parse_args()

    if args.gcp_logging:
        import google.cloud.logging
        client = google.cloud.logging.Client()
        client.get_default_handler()
        client.setup_logging()
    else:
        logging.basicConfig(level=logging.INFO)

    with open(args.config, "r") as f:
        config = read_config(f)

    endpoint = aiozipkin.create_endpoint("janitor.differ", ipv4=args.listen_address, port=args.port)
    if config.zipkin_address:
        tracer = await aiozipkin.create(config.zipkin_address, endpoint, sample_rate=1.0)
    else:
        tracer = await aiozipkin.create_custom(endpoint)
    trace_configs = [aiozipkin.make_trace_config(tracer)]

    artifact_manager = get_artifact_manager(
        config.artifact_location, trace_configs=trace_configs)

    db = state.Database(config.database_location)
    loop = asyncio.get_event_loop()

    if args.cache_path and not os.path.isdir(args.cache_path):
        os.makedirs(args.cache_path)

    app = DifferWebApp(
        db=db,
        config=config,
        cache_path=args.cache_path,
        artifact_manager=artifact_manager,
        task_memory_limit=args.task_memory_limit,
        task_timeout=args.task_timeout,
    )

    tasks = [loop.create_task(run_web_server(app, args.listen_address, args.port, tracer))]

    if args.runner_url:
        tasks.append(loop.create_task(listen_to_runner(args.runner_url, app)))

    await asyncio.gather(*tasks)
Ejemplo n.º 17
0
async def create_app(config,
                     policy_config,
                     minified=False,
                     external_url=None,
                     debugtoolbar=None,
                     runner_url=None,
                     publisher_url=None,
                     archiver_url=None,
                     vcs_store_url=None,
                     differ_url=None,
                     listen_address=None,
                     port=None):
    if minified:
        minified_prefix = ""
    else:
        minified_prefix = "min."

    trailing_slash_redirect = normalize_path_middleware(append_slash=True)
    app = web.Application(middlewares=[trailing_slash_redirect])

    setup_metrics(app)
    app.topic_notifications = Topic("notifications")
    app.router.add_get(
        "/ws/notifications",
        functools.partial(pubsub_handler,
                          app.topic_notifications),  # type: ignore
        name="ws-notifications",
    )

    endpoint = aiozipkin.create_endpoint("janitor.site",
                                         ipv4=listen_address,
                                         port=port)
    if config.zipkin_address:
        tracer = await aiozipkin.create(config.zipkin_address,
                                        endpoint,
                                        sample_rate=1.0)
    else:
        tracer = await aiozipkin.create_custom(endpoint)
    trace_configs = [aiozipkin.make_trace_config(tracer)]

    aiozipkin.setup(app,
                    tracer,
                    skip_routes=[
                        app.router['metrics'],
                        app.router['ws-notifications'],
                    ])

    async def setup_client_session(app):
        app.http_client_session = ClientSession(trace_configs=trace_configs)

    async def close_client_session(app):
        await app.http_client_session.close()

    app.on_startup.append(setup_client_session)
    app.on_cleanup.append(close_client_session)

    async def start_gpg_context(app):
        gpg_home = tempfile.TemporaryDirectory()
        gpg_context = gpg.Context(home_dir=gpg_home.name)
        app.gpg = gpg_context.__enter__()

        async def cleanup_gpg(app):
            gpg_context.__exit__(None, None, None)
            shutil.rmtree(gpg_home)

        app.on_cleanup.append(cleanup_gpg)

    async def discover_openid_config(app):
        url = URL(app.config.oauth2_provider.base_url).join(
            URL("/.well-known/openid-configuration"))
        async with app.http_client_session.get(url) as resp:
            if resp.status != 200:
                # TODO(jelmer): Fail? Set flag?
                logging.warning(
                    "Unable to find openid configuration (%s): %s",
                    resp.status,
                    await resp.read(),
                )
                return
            app.openid_config = await resp.json()

    async def start_pubsub_forwarder(app):
        async def listen_to_publisher_publish(app):
            url = URL(app.publisher_url) / "ws/publish"
            async for msg in pubsub_reader(app.http_client_session, url):
                app.topic_notifications.publish(["publish", msg])

        async def listen_to_publisher_mp(app):
            url = URL(app.publisher_url) / "ws/merge-proposal"
            async for msg in pubsub_reader(app.http_client_session, url):
                app.topic_notifications.publish(["merge-proposal", msg])

        app['runner_status'] = None

        async def listen_to_queue(app):
            url = URL(app.runner_url) / "ws/queue"
            async for msg in pubsub_reader(app.http_client_session, url):
                app['runner_status'] = msg
                app.topic_notifications.publish(["queue", msg])

        async def listen_to_result(app):
            url = URL(app.runner_url) / "ws/result"
            async for msg in pubsub_reader(app.http_client_session, url):
                app.topic_notifications.publish(["result", msg])

        for cb, title in [
            (listen_to_publisher_publish, 'publisher publish listening'),
            (listen_to_publisher_mp, 'merge proposal listiening'),
            (listen_to_queue, 'queue listening'),
            (listen_to_result, 'result listening'),
        ]:
            listener = create_background_task(cb(app), title)

            async def stop_listener(app):
                listener.cancel()
                await listener

            app.on_cleanup.append(stop_listener)

    for path, templatename in [
        ("/", "index"),
        ("/contact", "contact"),
        ("/about", "about"),
        ("/apt", "apt"),
        ("/cupboard/", "cupboard"),
    ]:
        app.router.add_get(
            path,
            functools.partial(handle_simple, templatename + ".html"),
            name=templatename,
        )
    app.router.add_get("/credentials", handle_credentials, name="credentials")
    app.router.add_get("/ssh_keys", handle_ssh_keys, name="ssh-keys")
    app.router.add_get(r"/pgp_keys{extension:(\.asc)?}",
                       handle_pgp_keys,
                       name="pgp-keys")
    from .lintian_fixes import register_lintian_fixes_endpoints
    register_lintian_fixes_endpoints(app.router)
    from .multiarch_hints import register_multiarch_hints_endpoints
    register_multiarch_hints_endpoints(app.router)
    from .orphan import register_orphan_endpoints
    register_orphan_endpoints(app.router)
    from .debianize import register_debianize_endpoints
    register_debianize_endpoints(app.router)
    from .scrub_obsolete import register_scrub_obsolete_endpoints
    register_scrub_obsolete_endpoints(app.router)
    from .new_upstream import register_new_upstream_endpoints
    register_new_upstream_endpoints(app.router)
    SUITE_REGEX = "|".join([re.escape(suite.name) for suite in config.suite])
    app.router.add_get(
        "/{suite:%s}/merge-proposals" % SUITE_REGEX,
        handle_merge_proposals,
        name="suite-merge-proposals",
    )
    app.router.add_get("/{suite:%s}/ready" % SUITE_REGEX,
                       handle_ready_proposals,
                       name="suite-ready")
    app.router.add_get(
        "/{suite:%s}/maintainer" % SUITE_REGEX,
        handle_maintainer_list,
        name="suite-maintainer-list",
    )
    app.router.add_get("/{suite:%s}/pkg/" % SUITE_REGEX,
                       handle_pkg_list,
                       name="suite-package-list")
    app.router.add_get("/{vcs:git|bzr}/", handle_repo_list, name="repo-list")
    app.router.add_get("/{suite:unchanged}",
                       handle_apt_repo,
                       name="unchanged-start")
    app.router.add_get("/cupboard/history", handle_history, name="history")
    app.router.add_get("/cupboard/queue", handle_queue, name="queue")
    app.router.add_get("/cupboard/result-codes/",
                       handle_result_codes,
                       name="result-code-list")
    app.router.add_get("/cupboard/result-codes/{code}",
                       handle_result_codes,
                       name="result-code")
    app.router.add_get("/cupboard/never-processed",
                       handle_never_processed,
                       name="never-processed")
    app.router.add_get(
        "/cupboard/maintainer-stats",
        handle_cupboard_maintainer_stats,
        name="cupboard-maintainer-stats",
    )
    app.router.add_get("/cupboard/maintainer",
                       handle_maintainer_list,
                       name="maintainer-list")
    app.router.add_get(
        "/cupboard/maintainer/{maintainer}",
        handle_maintainer_overview,
        name="cupboard-maintainer-overview",
    )
    app.router.add_get(
        "/maintainer/{maintainer}",
        handle_maintainer_overview,
        name="maintainer-overview",
    )
    app.router.add_get("/m/",
                       handle_maintainer_index,
                       name="maintainer-index-short")
    app.router.add_get("/m/{maintainer}",
                       handle_maintainer_overview,
                       name="maintainer-overview-short")
    app.router.add_get("/cupboard/publish",
                       handle_publish_history,
                       name="publish-history")
    app.router.add_get("/cupboard/ready",
                       handle_ready_proposals,
                       name="cupboard-ready")
    app.router.add_get("/cupboard/pkg/", handle_pkg_list, name="package-list")
    app.router.add_get("/cupboard/pkg/{pkg}/",
                       handle_pkg,
                       name="cupboard-package")
    app.router.add_get("/cupboard/pkg/{pkg}/{run_id}/",
                       handle_run,
                       name="cupboard-run")
    app.router.add_get("/cupboard/review",
                       handle_review,
                       name="cupboard-review")
    app.router.add_get("/cupboard/rejected",
                       handle_rejected,
                       name="cupboard-rejected")
    app.router.add_post("/cupboard/review",
                        handle_review_post,
                        name="cupboard-review-post")
    app.router.add_get(
        "/cupboard/failed-lintian-brush-fixers/",
        handle_failed_lintian_brush_fixers_list,
        name="failed-lintian-brush-fixer-list",
    )
    app.router.add_get(
        "/cupboard/failed-lintian-brush-fixers/{fixer}",
        handle_failed_lintian_brush_fixers,
        name="failed-lintian-brush-fixer",
    )
    app.router.add_get(
        "/cupboard/lintian-brush-regressions/",
        handle_lintian_brush_regressions,
        name="lintian-brush-regressions",
    )
    app.router.add_get(
        "/cupboard/pkg/{pkg}/{run_id}/{filename:.+}",
        handle_result_file,
        name="cupboard-result-file",
    )
    app.router.add_get(
        "/{suite:" + SUITE_REGEX + "}/pkg/{pkg}/{run_id}/{filename:.+}",
        handle_result_file,
        name="result-file",
    )
    app.router.add_get("/{suite:" + SUITE_REGEX + "}/",
                       handle_generic_start,
                       name="generic-start")
    app.router.add_get(
        "/{suite:" + SUITE_REGEX + "}/candidates",
        handle_generic_candidates,
        name="generic-candidates",
    )
    app.router.add_get(
        "/{suite:" + SUITE_REGEX + "}/pkg/{pkg}/",
        handle_generic_pkg,
        name="generic-package",
    )
    app.router.add_get(
        "/{suite:" + SUITE_REGEX + "}/pkg/{pkg}/{run_id}",
        handle_generic_pkg,
        name="generic-run",
    )
    app.router.add_get("/cupboard/vcs-regressions/",
                       handle_vcs_regressions,
                       name="vcs-regressions")
    app.router.add_get("/cupboard/broken-merge-proposals",
                       handle_broken_mps,
                       name="broken-mps")
    app.router.add_get("/login", handle_login, name="login")
    for entry in os.scandir(os.path.join(os.path.dirname(__file__),
                                         "_static")):
        app.router.add_get(
            "/_static/%s" % entry.name,
            functools.partial(handle_static_file, entry.path),
        )
    app.router.add_static("/_static/images/datatables",
                          "/usr/share/javascript/jquery-datatables/images")
    for (name, kind, basepath) in [
        ("chart", "js", "/usr/share/javascript/chart.js/Chart"),
        ("chart", "css", "/usr/share/javascript/chart.js/Chart"),
        ("jquery", "js", "/usr/share/javascript/jquery/jquery"),
        (
            "jquery.typeahead",
            "js",
            "/usr/share/javascript/jquery-typeahead/jquery.typeahead",
        ),
        (
            "jquery.datatables",
            "js",
            "/usr/share/javascript/jquery-datatables/jquery.dataTables",
        ),
        ("moment", "js", "/usr/share/javascript/moment/moment"),
    ]:
        if not os.path.exists(basepath + "." + kind):
            continue
        app.router.add_get(
            "/_static/%s.%s" % (name, kind),
            functools.partial(handle_static_file,
                              "%s.%s%s" % (basepath, minified_prefix, kind)),
        )
    app.router.add_get("/oauth/callback",
                       handle_oauth_callback,
                       name="oauth2-callback")

    from .api import create_app as create_api_app
    from .webhook import process_webhook, is_webhook_request

    async def handle_post_root(request):
        if is_webhook_request(request):
            return await process_webhook(request, request.app.database)
        raise web.HTTPMethodNotAllowed(method='POST',
                                       allowed_methods=['GET', 'HEAD'])

    app.runner_url = runner_url
    app.archiver_url = archiver_url
    app.differ_url = differ_url
    app.policy = policy_config
    app.publisher_url = publisher_url
    app.vcs_store_url = vcs_store_url
    if config.oauth2_provider and config.oauth2_provider.base_url:
        app.on_startup.append(discover_openid_config)
    else:
        app.openid_config = None
    app.on_startup.append(start_pubsub_forwarder)
    app.on_startup.append(start_gpg_context)
    if external_url:
        app.external_url = URL(external_url)
    else:
        app.external_url = None
    database = state.Database(config.database_location)
    app.database = database
    from .stats import stats_app

    app.add_subapp("/cupboard/stats",
                   stats_app(database, config, app.external_url))
    app.config = config
    from janitor.site import env

    app.jinja_env = env
    from janitor.artifacts import get_artifact_manager

    async def startup_artifact_manager(app):
        app['artifact_manager'] = get_artifact_manager(
            config.artifact_location, trace_configs=trace_configs)
        await app['artifact_manager'].__aenter__()

    async def turndown_artifact_manager(app):
        await app['artifact_manager'].__aexit__(None, None, None)

    app.on_startup.append(startup_artifact_manager)
    app.on_cleanup.append(turndown_artifact_manager)
    setup_debsso(app)
    app.router.add_post("/", handle_post_root, name="root-post")
    app.router.add_get("/health", handle_health, name="health")
    app.add_subapp(
        "/api",
        create_api_app(
            app.database,
            publisher_url,
            runner_url,  # type: ignore
            archiver_url,
            vcs_store_url,
            differ_url,
            config,
            policy_config,
            external_url=(app.external_url.join(URL("api"))
                          if app.external_url else None),
            trace_configs=trace_configs,
        ),
    )
    import aiohttp_apispec
    app.router.add_static(
        '/static/swagger',
        os.path.join(os.path.dirname(aiohttp_apispec.__file__), "static"))

    if debugtoolbar:
        import aiohttp_debugtoolbar
        # install aiohttp_debugtoolbar
        aiohttp_debugtoolbar.setup(app, hosts=debugtoolbar)

    async def setup_logfile_manager(app):
        app.logfile_manager = get_log_manager(config.logs_location,
                                              trace_configs=trace_configs)

    app.on_startup.append(setup_logfile_manager)
    return app
Ejemplo n.º 18
0
async def make_app(host, port):
    app = web.Application()
    app['instance_id'] = os.getenv('INSTANCE_ID', '1')
    app['tasks'] = []

    jaeger_address = os.getenv('JAEGER_ADDRESS')
    az.aiohttp_helpers._set_span_properties = _set_span_properties
    if jaeger_address:
        endpoint = az.create_endpoint(
            f"social_net_server_{app['instance_id']}", ipv4=host, port=port)
        tracer = await az.create(jaeger_address, endpoint, sample_rate=1.0)

        trace_config = az.make_trace_config(tracer)
        app['client_session'] = aiohttp.ClientSession(
            trace_configs=[trace_config])
    else:
        app['client_session'] = aiohttp.ClientSession()

    async def close_session(app):
        await app["client_session"].close()

    app.on_cleanup.append(close_session)

    app.add_routes([
        web.static('/static', STATIC_DIR),
        web.get("/", handle_index, name='index'),
        web.get("/login/", handle_login_get, name='login'),
        web.post("/login/", handle_login_post),
        web.get("/logout/", handle_logout_post),
        web.get("/register/", handle_register),
        web.post("/register/", handle_register),
        web.get("/userpage/", handle_userpage, name='user_page'),
        web.post("/chat/", handle_chat, name='chat_page'),
        web.get("/userpage/{uid}/", handle_userpage),
        web.get("/userlist/", hanlde_userlist),
        web.post("/add_friend/{uid}/", hanlde_add_friend),
        web.post("/del_friend/{uid}/", hanlde_del_friend),
        web.get("/newspage/", hanlde_newspage, name='news_page'),
        web.post("/add_post/", hanlde_add_post),
        web.view("/news_ws/", handle_news_ws),
        web.get('/api/user/', api_user.handle_user),
    ])

    # secret_key must be 32 url-safe base64-encoded bytes
    fernet_key = os.getenv('FERNET_KEY', fernet.Fernet.generate_key())
    secret_key = base64.urlsafe_b64decode(fernet_key)
    aiohttp_session.setup(app, EncryptedCookieStorage(secret_key))
    logging.debug('fernet_key: %r secret_key: %r', fernet_key, secret_key)

    app.middlewares.append(check_login)

    aiohttp_jinja2.setup(
        app,
        loader=jinja2.FileSystemLoader(TEMPLATE_DIR),
        context_processors=[username_ctx_processor],
    )

    database_url = os.getenv('CLEARDB_DATABASE_URL', None) or os.getenv(
        'DATABASE_URL', None)

    pool = await aiomysql.create_pool(**extract_database_credentials(
        database_url),
                                      maxsize=50,
                                      autocommit=True)

    app['db_pool'] = pool
    app.on_shutdown.append(lambda _app: close_db_pool(_app['db_pool']))

    databse_ro_url = os.getenv('DATABASE_RO_URL', None)
    if databse_ro_url:
        ro_pool = await aiomysql.create_pool(**extract_database_credentials(
            databse_ro_url),
                                             maxsize=50,
                                             autocommit=True)

        app['db_ro_pool'] = ro_pool
        app.on_shutdown.append(lambda _app: close_db_pool(_app['db_ro_pool']))
    else:
        logging.warning('DATABASE_RO_URL not set')
        app['db_ro_pool'] = pool

    redis_url = os.getenv('REDIS_URL', None)
    if redis_url:
        app['arq_pool'] = await arq.create_pool(
            arq.connections.RedisSettings.from_dsn(redis_url))

        async def close_arq_pool(_app):
            _app['arq_pool'].close()
            await _app['arq_pool'].wait_closed()

        app.on_shutdown.append(close_arq_pool)

    tarantool_url = os.getenv('TARANTOOL_URL', None)
    if tarantool_url:

        app['tnt'] = asynctnt.Connection(
            **extract_tarantool_credentials(tarantool_url))
        await app['tnt'].connect()
        app.on_shutdown.append(app['tnt'].disconnect)

    rabbit_url = os.getenv('CLOUDAMQP_URL', os.getenv('RABBIT_URL', None))
    if rabbit_url:
        connection: aio_pika.Connection = await aio_pika.connect_robust(
            rabbit_url)
        app['rabbit'] = connection  # await connection.channel()
        await start_background_task(app, listen_news_updates(app))

    app['news_subscribers'] = defaultdict(dict)

    app.on_shutdown.append(stop_tasks)

    if jaeger_address:
        az.setup(app, tracer)

    await migrate_schema(pool)
    return app