Exemple #1
0
async def authenticate(host: str, port: str, method: str, path: str,
                       authorization_headers: dict[str, str]) -> str:
    """Authenticate a request based on its headers.

    :param host: The authentication service host.
    :param port: The authentication Service port.
    :param method: The Authentication Service method.
    :param path: The Authentication Service path.
    :param authorization_headers: The headers that contain the authentication metadata.
    :return: The authenticated user identifier.
    """
    authentication_url = URL(f"http://{host}:{port}{path}")
    authentication_method = method
    logger.info("Authenticating request...")

    try:
        async with ClientSession(headers=authorization_headers) as session:
            async with session.request(method=authentication_method,
                                       url=authentication_url) as response:
                if not response.ok:
                    raise web.HTTPUnauthorized(
                        text=
                        "The given request does not have authorization to be forwarded."
                    )

                payload = await response.json()
                return payload["sub"]

    except ClientConnectorError:
        raise web.HTTPGatewayTimeout(
            text="The Authentication Service is not available.")
Exemple #2
0
 async def _precache():
     try:
         return precache(request.app, old_run['id'], new_run['id'])
     except ArtifactsMissing as e:
         raise web.HTTPNotFound(
             text="No artifacts for run id: %r" % e,
             headers={"unavailable_run_id": e.args[0]},
         )
     except asyncio.TimeoutError:
         raise web.HTTPGatewayTimeout(text="Timeout retrieving artifacts")
Exemple #3
0
async def precache(app, old_id, new_id):
    with ExitStack() as es:
        old_dir = es.enter_context(TemporaryDirectory())
        new_dir = es.enter_context(TemporaryDirectory())

        await asyncio.gather(
            app.artifact_manager.retrieve_artifacts(
                old_id, old_dir, filter_fn=is_binary, timeout=PRECACHE_RETRIEVE_TIMEOUT
            ),
            app.artifact_manager.retrieve_artifacts(
                new_id, new_dir, filter_fn=is_binary, timeout=PRECACHE_RETRIEVE_TIMEOUT
            ),
        )

        old_binaries = find_binaries(old_dir)
        if not old_binaries:
            raise ArtifactsMissing(old_id)

        new_binaries = find_binaries(new_dir)
        if not new_binaries:
            raise ArtifactsMissing(new_id)

        debdiff_cache_path = app.debdiff_cache_path(old_id, new_id)

        if debdiff_cache_path and not os.path.exists(debdiff_cache_path):
            with open(debdiff_cache_path, "wb") as f:
                f.write(
                    await run_debdiff(
                        [p for (n, p) in old_binaries], [p for (n, p) in new_binaries]
                    )
                )
            logging.info("Precached debdiff result for %s/%s", old_id, new_id)

        diffoscope_cache_path = app.diffoscope_cache_path(old_id, new_id)
        if diffoscope_cache_path and not os.path.exists(diffoscope_cache_path):
            try:
                diffoscope_diff = await asyncio.wait_for(
                    run_diffoscope(
                        old_binaries, new_binaries,
                        lambda: _set_limits(app.task_memory_limit)), app.task_timeout
                )
            except MemoryError:
                raise web.HTTPServiceUnavailable(text="diffoscope used too much memory")
            except asyncio.TimeoutError:
                raise web.HTTPGatewayTimeout(text="diffoscope timed out")

            try:
                with open(diffoscope_cache_path, "w") as f:
                    json.dump(diffoscope_diff, f)
            except json.JSONDecodeError as e:
                raise web.HTTPServerError(text=str(e))
            logging.info("Precached diffoscope result for %s/%s", old_id, new_id)
Exemple #4
0
    def _try_validator_request(self, message_type, content):
        """
        Sends a protobuf message to the validator
        Handles a possible timeout if validator is unresponsive
        """
        timeout = 5
        timeout_msg = 'Could not reach validator, validator timed out'

        if isinstance(content, BaseMessage):
            content = content.SerializeToString()

        future = self._stream.send(message_type=message_type, content=content)

        try:
            response = future.result(timeout=timeout)
        except FutureTimeoutError as e:
            print(str(e))
            raise web.HTTPGatewayTimeout(reason=timeout_msg)

        return response.content
Exemple #5
0
async def discover(host: str, port: int, path: str, verb: str,
                   endpoint: str) -> dict[str, Any]:
    """Call discovery service and get microservice connection data.

    :param host: Discovery host name.
    :param port: Discovery port.
    :param path: Discovery path.
    :param verb: Endpoint Verb.
    :param endpoint: Endpoint url.
    :return: The response of the discovery.
    """

    url = URL.build(scheme="http",
                    host=host,
                    port=port,
                    path=path,
                    query={
                        "verb": verb,
                        "path": endpoint
                    })
    try:
        async with ClientSession() as session:
            async with session.get(url=url) as response:
                if not response.ok:
                    if response.status == 404:
                        raise web.HTTPNotFound(
                            text=
                            f"The {endpoint!r} path is not available for {verb!r} method."
                        )
                    raise web.HTTPBadGateway(
                        text="The Discovery Service response is wrong.")

                data = await response.json()
    except ClientConnectorError:
        raise web.HTTPGatewayTimeout(
            text="The Discovery Service is not available.")

    data["port"] = int(data["port"])

    return data
Exemple #6
0
async def get_metrics(request: web.Request) -> web.Response:
    try:
        target = request.query['target']
    except KeyError:
        raise web.HTTPBadRequest(text='target parameter omitted')
    cache = request.app['cache']
    switch = cache.get(target)
    timeout = request.app['scrape_timeout']
    try:
        with async_timeout.timeout(timeout), switch:
            counters = await switch.scrape()
    except asyncio.CancelledError:
        raise
    except asyncio.TimeoutError:
        raise web.HTTPGatewayTimeout(
            text='Scrape timed out after {}s'.format(timeout))
    except Exception as exc:
        # Possibly a failed connection, so reset it
        switch.destroy()
        raise web.HTTPInternalServerError(text='Scrape failed: ' + str(exc))
    else:
        content = prometheus_client.generate_latest(counters).decode()
        return web.Response(text=content)
Exemple #7
0
    async def _redirect(self, request, connector):

        if request.host in (
            socket.getfqdn(),
            f'{self.config["hostname"]}:{self.config["port"]}',
        ):
            raise web.HTTPBadRequest(
                text=(
                    "ERROR: Recursion error. "
                    "Invalid 'Host' header specified.\n"
                )
            )

        self.logger.debug(
            f"Request (redirecting): method={request.method!r} "
            f"path={request.path!r}, query_string={request.query_string!r}, "
            f"headers={request.headers!r}, remote={request.remote!r}"
        )

        req_headers = request.headers
        body = await request.read()

        # modify headers
        if self.config["num_forwarded"]:
            req_headers = copy.deepcopy(dict(req_headers))
            req_headers["X-Forwarded-For"] = request.remote

            self.logger.debug(
                f"Request headers (redirecting, modified): {req_headers!r}"
            )

        try:
            async with aiohttp.ClientSession(
                headers=req_headers,
                connector=connector,
                timeout=self.client_timeout,
                connector_owner=False,
                auto_decompress=False,
            ) as session:
                async with session.request(
                    request.method,
                    request.url,
                    data=body,
                ) as resp:

                    self.logger.debug(
                        f"Response: {resp.reason}: resp.status={resp.status}, "
                        f"resp.request_info={resp.request_info}, "
                        f"resp.url={resp.url}, resp.headers={resp.headers}"
                    )
                    proxied_response = web.StreamResponse(
                        headers=resp.headers, status=resp.status
                    )
                    if (
                        resp.headers.get("Transfer-Encoding", "").lower()
                        == "chunked"
                    ):
                        proxied_response.enable_chunked_encoding()

                    await proxied_response.prepare(request)

                    async for data in resp.content.iter_any():
                        await proxied_response.write(data)

                    await proxied_response.write_eof()
                    return proxied_response

        except ConnectionResetError as err:
            self.logger.warning(f"Connection reset by peer: {err}")
            # TODO(damb): Would implementing a retry mechanism be an
            # alternative?
            raise web.HTTPNoContent()
        except asyncio.TimeoutError as err:
            self.logger.warning(
                f"Error while executing request: error={type(err)}, "
                f"url={request.url!r}, method={request.method!r}"
            )
            raise web.HTTPGatewayTimeout(text=f"ERROR: {str(type(err))}\n")

        except aiohttp.ClientError as err:
            msg = (
                f"Error while executing request: error={type(err)}, "
                f"msg={err}, url={request.url!r}, method={request.method!r}"
            )
            if isinstance(err, aiohttp.ClientOSError):
                msg += f", errno={err.errno}"

            self.logger.warning(msg)
            raise web.HTTPServiceUnavailable(text=f"ERROR: {str(type(err))}\n")
Exemple #8
0
async def handle_debdiff(request):
    old_id = request.match_info["old_id"]
    new_id = request.match_info["new_id"]

    old_run, new_run = await get_run_pair(request.app.db, old_id, new_id)

    cache_path = request.app.debdiff_cache_path(old_run['id'], new_run['id'])
    if cache_path:
        try:
            with open(cache_path, "rb") as f:
                debdiff = f.read()
        except FileNotFoundError:
            debdiff = None
    else:
        debdiff = None

    if debdiff is None:
        logging.info(
            "Generating debdiff between %s (%s/%s/%s) and %s (%s/%s/%s)",
            old_run['id'],
            old_run['package'],
            old_run['build_version'],
            old_run['suite'],
            new_run['id'],
            new_run['package'],
            new_run['build_version'],
            new_run['suite'],
        )
        with ExitStack() as es:
            old_dir = es.enter_context(TemporaryDirectory())
            new_dir = es.enter_context(TemporaryDirectory())

            try:
                await asyncio.gather(
                    request.app.artifact_manager.retrieve_artifacts(
                        old_run['id'], old_dir, filter_fn=is_binary
                    ),
                    request.app.artifact_manager.retrieve_artifacts(
                        new_run['id'], new_dir, filter_fn=is_binary
                    ),
                )
            except ArtifactsMissing as e:
                raise web.HTTPNotFound(
                    text="No artifacts for run id: %r" % e,
                    headers={"unavailable_run_id": e.args[0]},
                )
            except asyncio.TimeoutError:
                raise web.HTTPGatewayTimeout(text="Timeout retrieving artifacts")

            old_binaries = find_binaries(old_dir)
            if not old_binaries:
                raise web.HTTPNotFound(
                    text="No artifacts for run id: %s" % old_run['id'],
                    headers={"unavailable_run_id": old_run['id']},
                )

            new_binaries = find_binaries(new_dir)
            if not new_binaries:
                raise web.HTTPNotFound(
                    text="No artifacts for run id: %s" % new_run['id'],
                    headers={"unavailable_run_id": new_run['id']},
                )

            try:
                debdiff = await run_debdiff(
                    [p for (n, p) in old_binaries], [p for (n, p) in new_binaries]
                )
            except DebdiffError as e:
                return web.Response(status=400, text=e.args[0])

        if cache_path:
            with open(cache_path, "wb") as f:
                f.write(debdiff)

    if "filter_boring" in request.query:
        debdiff = filter_debdiff_boring(
            debdiff.decode(), str(old_run['build_version']), str(new_run['build_version'])
        ).encode()

    for accept in request.headers.get("ACCEPT", "*/*").split(","):
        if accept in ("text/x-diff", "text/plain", "*/*"):
            return web.Response(body=debdiff, content_type="text/plain")
        if accept == "text/markdown":
            return web.Response(
                text=markdownify_debdiff(debdiff.decode("utf-8", "replace")),
                content_type="text/markdown",
            )
        if accept == "text/html":
            return web.Response(
                text=htmlize_debdiff(debdiff.decode("utf-8", "replace")),
                content_type="text/html",
            )
    raise web.HTTPNotAcceptable(
        text="Acceptable content types: " "text/html, text/plain, text/markdown"
    )
Exemple #9
0
async def handle_diffoscope(request):
    for accept in request.headers.get("ACCEPT", "*/*").split(","):
        if accept in ("text/plain", "*/*"):
            content_type = "text/plain"
            break
        elif accept in ("text/html",):
            content_type = "text/html"
            break
        elif accept in ("application/json",):
            content_type = "application/json"
            break
        elif accept in ("text/markdown",):
            content_type = "text/markdown"
            break
    else:
        raise web.HTTPNotAcceptable(
            text="Acceptable content types: "
            "text/html, text/plain, application/json, "
            "application/markdown"
        )

    old_id = request.match_info["old_id"]
    new_id = request.match_info["new_id"]

    old_run, new_run = await get_run_pair(request.app.db, old_id, new_id)

    cache_path = request.app.diffoscope_cache_path(old_run['id'], new_run['id'])
    if cache_path:
        try:
            with open(cache_path, "rb") as f:
                diffoscope_diff = json.load(f)
        except FileNotFoundError:
            diffoscope_diff = None
    else:
        diffoscope_diff = None

    if diffoscope_diff is None:
        logging.info(
            "Generating diffoscope between %s (%s/%s/%s) and %s (%s/%s/%s)",
            old_run['id'],
            old_run['package'],
            old_run['build_version'],
            old_run['suite'],
            new_run['id'],
            new_run['package'],
            new_run['build_version'],
            new_run['suite'],
        )
        with ExitStack() as es:
            old_dir = es.enter_context(TemporaryDirectory())
            new_dir = es.enter_context(TemporaryDirectory())

            try:
                await asyncio.gather(
                    request.app.artifact_manager.retrieve_artifacts(
                        old_run['id'], old_dir, filter_fn=is_binary
                    ),
                    request.app.artifact_manager.retrieve_artifacts(
                        new_run['id'], new_dir, filter_fn=is_binary
                    ),
                )
            except ArtifactsMissing as e:
                raise web.HTTPNotFound(
                    text="No artifacts for run id: %r" % e,
                    headers={"unavailable_run_id": e.args[0]},
                )
            except asyncio.TimeoutError:
                raise web.HTTPGatewayTimeout(text="Timeout retrieving artifacts")

            old_binaries = find_binaries(old_dir)
            if not old_binaries:
                raise web.HTTPNotFound(
                    text="No artifacts for run id: %s" % old_run['id'],
                    headers={"unavailable_run_id": old_run['id']},
                )

            new_binaries = find_binaries(new_dir)
            if not new_binaries:
                raise web.HTTPNotFound(
                    text="No artifacts for run id: %s" % new_run['id'],
                    headers={"unavailable_run_id": new_run['id']},
                )

            try:
                diffoscope_diff = await asyncio.wait_for(
                    run_diffoscope(
                        old_binaries, new_binaries,
                        lambda: _set_limits(request.app.task_memory_limit)),
                    request.app.task_timeout
                )
            except MemoryError:
                raise web.HTTPServiceUnavailable(text="diffoscope used too much memory")
            except asyncio.TimeoutError:
                raise web.HTTPGatewayTimeout(text="diffoscope timed out")

        if cache_path is not None:
            with open(cache_path, "w") as f:
                json.dump(diffoscope_diff, f)

    diffoscope_diff["source1"] = "%s version %s (%s)" % (
        old_run['package'],
        old_run['build_version'],
        old_run['suite'],
    )
    diffoscope_diff["source2"] = "%s version %s (%s)" % (
        new_run['package'],
        new_run['build_version'],
        new_run['suite'],
    )

    filter_diffoscope_irrelevant(diffoscope_diff)

    title = "diffoscope for %s applied to %s" % (new_run['suite'], new_run['package'])

    if "filter_boring" in request.query:
        filter_diffoscope_boring(
            diffoscope_diff,
            str(old_run['build_version']),
            str(new_run['build_version']),
            old_run['suite'],
            new_run['suite'],
        )
        title += " (filtered)"

    debdiff = await format_diffoscope(
        diffoscope_diff,
        content_type,
        title=title,
        jquery_url=request.query.get("jquery_url"),
        css_url=request.query.get("css_url"),
    )

    return web.Response(text=debdiff, content_type=content_type)