async def test_get_always_the_same_client_session():
    app = web.Application()
    session = get_client_session(app)

    assert session in app.values()
    assert app[APP_CLIENT_SESSION_KEY] == session

    for _ in range(3):
        assert get_client_session(app) == session
async def update_service(
    app: web.Application,
    user_id: int,
    service_key: str,
    service_version: str,
    product_name: str,
    update_data: Dict[str, Any],
) -> Dict[str, Any]:
    url = (URL(app[KCATALOG_ORIGIN]).with_path(
        app[KCATALOG_VERSION_PREFIX] +
        f"/services/{urllib.parse.quote_plus(service_key)}/{service_version}").
           with_query({
               "user_id": user_id,
           }))
    session = get_client_session(app)
    try:
        async with session.patch(url,
                                 headers={X_PRODUCT_NAME_HEADER: product_name},
                                 json=update_data) as resp:
            resp.raise_for_status(
            )  # FIXME: error handling for session and response exceptions
            return await resp.json()
    except asyncio.TimeoutError as err:
        logger.warning("Catalog service connection timeout error")
        raise web.HTTPServiceUnavailable(
            reason="catalog is currently unavailable") from err
Beispiel #3
0
    async def copy_file_s3_datcore(self, user_id: str, dest_uuid: str,
                                   source_uuid: str):
        session = get_client_session(self.app)

        # source is s3, get link and copy to datcore
        bucket_name = self.simcore_bucket_name
        object_name = source_uuid
        filename = source_uuid.split("/")[-1]

        s3_dowload_link = self.s3_client.create_presigned_get_url(
            bucket_name, object_name)

        with tempfile.TemporaryDirectory() as tmpdir:
            # FIXME: connect download and upload streams
            local_file_path = os.path.join(tmpdir, filename)

            # Downloads S3 -> local
            await download_to_file_or_raise(session, s3_dowload_link,
                                            local_file_path)

            # Uploads local -> DATCore
            await self.upload_file_to_datcore(
                _user_id=user_id,
                _local_file_path=local_file_path,
                _destination_id=dest_uuid,
            )
Beispiel #4
0
def _get_storage_client(app: web.Application) -> Tuple[ClientSession, URL]:
    settings: StorageSettings = get_plugin_settings(app)
    # storage service API endpoint
    endpoint = URL(settings.base_url)

    session = get_client_session(app)
    return session, endpoint
async def get_service(
    app: web.Application,
    user_id: int,
    service_key: str,
    service_version: str,
    product_name: str,
) -> Dict[str, Any]:
    session: ClientSession = get_client_session(app)
    settings: CatalogSettings = get_plugin_settings(app)

    url = (URL(settings.api_base_url) /
           f"services/{urllib.parse.quote_plus(service_key)}/{service_version}"
           ).with_query({"user_id": user_id})

    try:
        async with session.get(url,
                               headers={X_PRODUCT_NAME_HEADER:
                                        product_name}) as resp:
            resp.raise_for_status(
            )  # FIXME: error handling for session and response exceptions
            return await resp.json()

    except asyncio.TimeoutError as err:
        logger.warning("Catalog service connection timeout error")
        raise web.HTTPServiceUnavailable(
            reason="catalog is currently unavailable") from err
Beispiel #6
0
async def make_request_and_envelope_response(
    app: web.Application,
    method: str,
    url: URL,
    headers: Optional[Dict[str, str]] = None,
    data: Optional[bytes] = None,
) -> web.Response:
    """
    Helper to forward a request to the catalog service
    """
    session = get_client_session(app)

    try:

        async with session.request(method, url, headers=headers,
                                   data=data) as resp:
            payload = await resp.json()

            try:
                resp.raise_for_status()
                resp_data = wrap_as_envelope(data=payload)

            except ClientResponseError as err:
                if 500 <= err.status:
                    raise err
                resp_data = wrap_as_envelope(error=payload["errors"])

            return web.json_response(resp_data, status=resp.status)

    except (asyncio.TimeoutError, ClientConnectionError,
            ClientResponseError) as err:
        logger.warning("Catalog service errors upon request %s %s: %s", method,
                       url.relative(), err)
        raise web.HTTPServiceUnavailable(
            reason="catalog is currently unavailable") from err
async def get_services_for_user_in_product(
        app: web.Application, user_id: int, product_name: str, *,
        only_key_versions: bool) -> List[Dict]:
    url = (URL(app[KCATALOG_ORIGIN]).with_path(app[KCATALOG_VERSION_PREFIX] +
                                               "/services").with_query({
                                                   "user_id":
                                                   user_id,
                                                   "details":
                                                   f"{not only_key_versions}"
                                               }))
    session = get_client_session(app)
    try:
        async with session.get(
                url,
                headers={X_PRODUCT_NAME_HEADER: product_name},
        ) as resp:
            if resp.status >= 400:
                logger.warning(
                    "Error while retrieving services for user %s. Returning an empty list",
                    user_id,
                )
                return []
            return await resp.json()
    except asyncio.TimeoutError as err:
        logger.warning("Catalog service connection timeout error")
        raise web.HTTPServiceUnavailable(
            reason="catalog is currently unavailable") from err
def _get_director_client(app: web.Application) -> Tuple[ClientSession, URL]:
    cfg: Dict[str, Any] = get_config(app)

    api_endpoint = URL.build(scheme="http", host=cfg["host"],
                             port=cfg["port"]).with_path(cfg["version"])

    session = get_client_session(app)
    return session, api_endpoint
Beispiel #9
0
 def acquire_instance(cls, app: MutableMapping[str, Any],
                      settings: SciCrunchSettings) -> "SciCrunch":
     """Returns single instance for the application and stores it"""
     obj = app.get(f"{__name__}.{cls.__name__}")
     if obj is None:
         session = get_client_session(app)
         app[f"{__name__}.{cls.__name__}"] = obj = cls(session, settings)
     return obj
Beispiel #10
0
async def get_storage_locations_for_user(app: web.Application,
                                         user_id: int) -> List[Dict[str, Any]]:
    session = get_client_session(app)

    url: URL = _get_base_storage_url(app) / "locations"
    params = dict(user_id=user_id)
    async with session.get(url, ssl=False, params=params) as resp:
        data, _ = cast(List[Dict[str, Any]], await safe_unwrap(resp))
        return data
Beispiel #11
0
async def get_file_upload_url(app: web.Application, location_id: str,
                              fileId: str, user_id: int) -> str:
    session = get_client_session(app)

    url: URL = (_get_base_storage_url(app) / "locations" / location_id /
                "files" / urllib.parse.quote(fileId, safe=""))
    params = dict(user_id=user_id)
    async with session.put(url, ssl=False, params=params) as resp:
        data, _ = await safe_unwrap(resp)
        return extract_link(data)
    def _get_client_session_info():
        client: ClientSession = get_client_session(request.app)
        info = {"instance": str(client)}

        if not client.closed:
            info.update({
                "limit": client.connector.limit,
                "limit_per_host": client.connector.limit_per_host,
            })

        return info
def app_mock():
    # we only need the app as a container of the aiohttp client session and the director entrypoint

    app = web.Application()

    # mocks client session (normally included as upon startup)
    single_client_in_app = get_client_session(app)
    assert single_client_in_app
    assert single_client_in_app is get_client_session(app)

    # mocks loading config
    settings = DirectorSettings()
    app[director_config.APP_CONFIG_KEY] = {
        director_config.CONFIG_SECTION_NAME:
        json.loads(settings.json(exclude={"url"}, by_alias=True))
    }

    assert setup_director(app, disable_routes=True)

    return app
async def test_app_client_session_json_serialize(
    server: TestServer, fake_data_dict: Dict[str, Any]
):
    session = get_client_session(server.app)

    resp = await session.post(server.make_url("/echo"), json=fake_data_dict)
    assert resp.status == 200

    got = await resp.json()

    expected = json.loads(json_dumps(fake_data_dict))
    assert got == expected
Beispiel #15
0
async def fake_app(mock_env_devel_environment, loop):
    # By using .env-devel we ensure all needed variables are at
    # least defined there
    print("app's environment variables", format(mock_env_devel_environment))

    app = {}
    setup_scicrunch_submodule(app)
    assert app

    yield app

    client = get_client_session(app)
    await client.close()
Beispiel #16
0
async def get_status(request: aiohttp.web.Request):
    session = get_client_session(request.app)
    user_id = request.get(RQT_USERID_KEY, -1)

    config = request.app[APP_CONFIG_KEY][CONFIG_SECTION_NAME]
    url = URL.build(
        scheme="http",
        host=config["prometheus_host"],
        port=config["prometheus_port"],
        path=f"/api/{config['prometheus_api_version']}/query",
    )
    results = await asyncio.gather(
        get_cpu_usage(session, url, user_id),
        get_memory_usage(session, url, user_id),
        get_container_metric_for_labels(session, url, user_id),
        return_exceptions=True,
    )
    cpu_usage = get_prometheus_result_or_default(results[0], [])
    mem_usage = get_prometheus_result_or_default(results[1], [])
    metric = get_prometheus_result_or_default(results[2], [])

    res = defaultdict(dict)
    for node in cpu_usage:
        node_id = node["metric"]["container_label_node_id"]
        usage = float(node["value"][1])
        res[node_id] = {"stats": {"cpuUsage": usage}}

    for node in mem_usage:
        node_id = node["metric"]["container_label_node_id"]
        usage = float(node["value"][1])
        if node_id in res:
            res[node_id]["stats"]["memUsage"] = usage
        else:
            res[node_id] = {"stats": {"memUsage": usage}}

    for node in metric:
        limits = {"cpus": 0.0, "mem": 0.0}
        metric_labels = node["metric"]
        limits["cpus"] = float(
            metric_labels.get("container_label_nano_cpus_limit", 0)) / pow(
                10, 9)  # Nanocpus to cpus
        limits["mem"] = float(metric_labels.get("container_label_mem_limit",
                                                0)) / pow(1024, 2)  # In MB
        node_id = metric_labels.get("container_label_node_id")
        res[node_id]["limits"] = limits

    if not res:
        raise aiohttp.web.HTTPNoContent

    return dict(res)
async def fake_app(mock_env_devel_environment, loop):
    # By using .env-devel we ensure all needed variables are at
    # least defined there
    print("app's environment variables", format(mock_env_devel_environment))

    app = create_safe_application()

    setup_settings(app)
    setup_scicrunch(app)

    yield app

    client = get_client_session(app)
    await client.close()
def app_mock():
    # we only need the app as a container of the aiohttp client session and the director entrypoint

    app = web.Application()

    # mocks client session (normally included as upon startup)
    single_client_in_app = get_client_session(app)
    assert single_client_in_app
    assert single_client_in_app is get_client_session(app)

    # mocks loading config
    settings = DirectorSettings()

    app[director_config.APP_CONFIG_KEY] = {
        director_config.CONFIG_SECTION_NAME: {
            "host": settings.DIRECTOR_HOST,
            "port": settings.DIRECTOR_PORT,
            "version": settings.DIRECTOR_VTAG,
            "enabled": True,
        }
    }
    assert setup_director(app, disable_routes=True)

    return app
Beispiel #19
0
async def get_status(request: aiohttp.web.Request):
    session = get_client_session(request.app)
    user_id = request.get(RQT_USERID_KEY, -1)

    prometheus_settings = get_plugin_settings(request.app)
    url = URL(prometheus_settings.base_url)

    results = await asyncio.gather(
        get_cpu_usage(session, url, user_id),
        get_memory_usage(session, url, user_id),
        get_container_metric_for_labels(session, url, user_id),
        return_exceptions=True,
    )
    cpu_usage = get_prometheus_result_or_default(results[0], [])
    mem_usage = get_prometheus_result_or_default(results[1], [])
    metric = get_prometheus_result_or_default(results[2], [])

    res = defaultdict(dict)
    for node in cpu_usage:
        node_id = node["metric"]["container_label_node_id"]
        usage = float(node["value"][1])
        res[node_id] = {"stats": {"cpuUsage": usage}}

    for node in mem_usage:
        node_id = node["metric"]["container_label_node_id"]
        usage = float(node["value"][1])
        if node_id in res:
            res[node_id]["stats"]["memUsage"] = usage
        else:
            res[node_id] = {"stats": {"memUsage": usage}}

    for node in metric:
        limits = {"cpus": 0.0, "mem": 0.0}
        metric_labels = node["metric"]
        limits["cpus"] = float(
            metric_labels.get("container_label_nano_cpus_limit", 0)) / pow(
                10, 9)  # Nanocpus to cpus
        limits["mem"] = float(metric_labels.get("container_label_mem_limit",
                                                0)) / pow(1024, 2)  # In MB
        node_id = metric_labels.get("container_label_node_id")
        res[node_id]["limits"] = limits

    if not res:
        raise aiohttp.web.HTTPNoContent

    return dict(res)
Beispiel #20
0
async def _request_storage(request: web.Request, method: str, **kwargs):
    await extract_and_validate(request)

    url = _resolve_storage_url(request)
    # _token_data, _token_secret = _get_token_key_and_secret(request)

    body = None
    if request.can_read_body:
        body = await request.json()

    session = get_client_session(request.app)
    async with session.request(method.upper(),
                               url,
                               ssl=False,
                               json=body,
                               **kwargs) as resp:
        payload = await resp.json()
        return payload
async def is_service_responsive(app: web.Application):
    """Returns true if catalog is ready"""
    try:
        session: ClientSession = get_client_session(app)
        settings: CatalogSettings = get_plugin_settings(app)

        await session.get(
            settings.base_url,
            ssl=False,
            raise_for_status=True,
            timeout=ClientTimeout(total=2, connect=1),
        )

    except (ClientConnectionError, ClientResponseError, InvalidURL,
            ValueError) as err:
        logger.warning("Catalog service unresponsive: %s", err)
        return False
    else:
        return True
Beispiel #22
0
async def get_project_files_metadata(app: web.Application, location_id: str,
                                     uuid_filter: str,
                                     user_id: int) -> List[Dict[str, Any]]:
    session = get_client_session(app)

    url: URL = (_get_base_storage_url(app) / "locations" / location_id /
                "files" / "metadata")
    params = dict(user_id=user_id, uuid_filter=uuid_filter)
    async with session.get(url, ssl=False, params=params) as resp:
        data, _ = await safe_unwrap(resp)

        if data is None:
            raise web.HTTPException(
                reason=f"No url found in response: '{data}'")
        if not isinstance(data, list):
            raise web.HTTPException(
                reason=f"No list payload received as data: '{data}'")

        return data
Beispiel #23
0
async def _assemble_cached_indexes(app: web.Application):
    """
    Currently the static resources are contain 3 folders: osparc, s4l, tis
    each of them contain and index.html to be served to as the root of the site
    for each type of frontend.

    Caching these 3 items on start. This
    """
    settings: StaticWebserverModuleSettings = get_settings(app)
    cached_indexes: Dict[str, str] = {}

    session: ClientSession = get_client_session(app)

    for frontend_name in FRONTEND_APPS_AVAILABLE:
        url = URL(settings.static_web_server_url) / frontend_name
        log.info("Fetching index from %s", url)

        try:
            # web-static server might still not be up
            async for attempt in AsyncRetrying(**RETRY_ON_STARTUP_POLICY):
                with attempt:
                    response = await session.get(url, raise_for_status=True)

            body = await response.text()

        except ClientError as err:
            log.error("Could not fetch index from static server: %s", err)

            # ANE: Yes this is supposed to fail the boot process
            raise RuntimeError(
                f"Could not fetch index at {str(url)}. Stopping application boot"
            ) from err

        # fixes relative paths
        body = body.replace(f"../resource/{frontend_name}",
                            f"resource/{frontend_name}")
        body = body.replace("boot.js", f"{frontend_name}/boot.js")

        log.info("Storing index for %s", url)
        cached_indexes[frontend_name] = body

    app[APP_FRONTEND_CACHED_INDEXES_KEY] = cached_indexes
Beispiel #24
0
    async def copy_file_datcore_s3(
        self,
        user_id: str,
        dest_uuid: str,
        source_uuid: str,
        filename_missing: bool = False,
    ):
        session = get_client_session(self.app)

        # 2 steps: Get download link for local copy, the upload link to s3
        # TODO: This should be a redirect stream!
        dc_link, filename = await self.download_link_datcore(
            user_id=user_id, file_id=source_uuid)
        if filename_missing:
            dest_uuid = str(Path(dest_uuid) / filename)

        s3_upload_link = await self.upload_link(user_id, dest_uuid)

        with tempfile.TemporaryDirectory() as tmpdir:
            # FIXME: connect download and upload streams

            local_file_path = os.path.join(tmpdir, filename)

            # Downloads DATCore -> local
            await download_to_file_or_raise(session, dc_link, local_file_path)

            # Uploads local -> S3
            s3_upload_link = URL(s3_upload_link)
            async with session.put(
                    s3_upload_link,
                    data=Path(local_file_path).open("rb"),
                    raise_for_status=True,
            ) as resp:
                logger.debug(
                    "Uploaded local -> SIMCore %s . Status %s",
                    s3_upload_link,
                    resp.status,
                )

        return dest_uuid
async def is_service_responsive(app: web.Application):
    """Returns true if catalog is ready"""
    try:
        origin: Optional[URL] = app.get(KCATALOG_ORIGIN)
        if not origin:
            raise ValueError(
                "KCATALOG_ORIGIN was not initialized (app module was not enabled?)"
            )

        client: ClientSession = get_client_session(app)
        await client.get(
            origin,
            ssl=False,
            raise_for_status=True,
            timeout=ClientTimeout(total=2, connect=1),
        )

    except (ClientConnectionError, ClientResponseError, InvalidURL,
            ValueError) as err:
        logger.warning("Catalog service unresponsive: %s", err)
        return False
    else:
        return True
Beispiel #26
0
async def get_status(request: aiohttp.web.Request):
    session = get_client_session(request.app)
    user_id = request.get(RQT_USERID_KEY, -1)

    config = request.app[APP_CONFIG_KEY][CONFIG_SECTION_NAME]
    url = URL.build(
        scheme="http",
        host=config["prometheus_host"],
        port=config["prometheus_port"],
        path=f"/api/{config['prometheus_api_version']}/query",
    )
    results = await asyncio.gather(
        get_cpu_usage(session, url, user_id),
        get_memory_usage(session, url, user_id),
        get_celery_reserved(request.app),
        get_container_metric_for_labels(session, url, user_id),
        return_exceptions=True,
    )
    cpu_usage = get_prometheus_result_or_default(results[0], [])
    mem_usage = get_prometheus_result_or_default(results[1], [])
    metric = get_prometheus_result_or_default(results[3], [])
    celery_inspect = results[2]

    res = defaultdict(dict)
    for node in cpu_usage:
        node_id = node["metric"]["container_label_node_id"]
        usage = float(node["value"][1])
        res[node_id] = {"stats": {"cpuUsage": usage}}

    for node in mem_usage:
        node_id = node["metric"]["container_label_node_id"]
        usage = float(node["value"][1])
        if node_id in res:
            res[node_id]["stats"]["memUsage"] = usage
        else:
            res[node_id] = {"stats": {"memUsage": usage}}

    for node in metric:
        limits = {"cpus": 0, "mem": 0}
        metric_labels = node["metric"]
        limits["cpus"] = float(
            metric_labels.get("container_label_nano_cpus_limit", 0)
        ) / pow(
            10, 9
        )  # Nanocpus to cpus
        limits["mem"] = float(metric_labels.get("container_label_mem_limit", 0)) / pow(
            1024, 2
        )  # In MB
        node_id = metric_labels.get("container_label_node_id")
        res[node_id]["limits"] = limits

    if hasattr(celery_inspect, "items"):
        for dummy_worker_id, worker in celery_inspect.items():
            for task in worker:
                values = task["args"][1:-1].split(", ")
                if values[0] == str(user_id):  # Extracts user_id from task's args
                    node_id = values[2][1:-1]  # Extracts node_id from task's args
                    res[node_id]["queued"] = True

    if not res:
        raise aiohttp.web.HTTPNoContent

    return dict(res)
Beispiel #27
0
def test_setup_scicrunch_submodule(fake_app):
    # scicruch should be init
    scicrunch = SciCrunch.get_instance(fake_app)
    assert scicrunch
    assert scicrunch.client == get_client_session(fake_app)
Beispiel #28
0
def _get_director_client(app: web.Application) -> Tuple[ClientSession, URL]:
    settings: DirectorSettings = get_plugin_settings(app)
    api_endpoint = settings.base_url
    session = get_client_session(app)
    return session, api_endpoint