예제 #1
0
    async def copy_file_datcore_s3(
        self,
        user_id: str,
        dest_uuid: str,
        source_uuid: str,
        filename_missing: bool = False,
    ):
        # 2 steps: Get download link for local copy, the upload link to s3
        # TODO: This should be a redirect stream!
        dc_link, filename = await self.download_link_datcore(
            user_id=user_id, file_id=source_uuid)
        if filename_missing:
            dest_uuid = str(Path(dest_uuid) / filename)

        s3_upload_link = await self.upload_link(user_id, dest_uuid)

        tmp_dirpath = tempfile.mkdtemp()
        local_file_path = os.path.join(tmp_dirpath, filename)
        session = get_client_session(self.app)

        async with session.get(dc_link) as resp:
            if resp.status == 200:
                f = await aiofiles.open(local_file_path, mode="wb")
                await f.write(await resp.read())
                await f.close()
                s3_upload_link = URL(s3_upload_link)
                async with session.put(
                        s3_upload_link,
                        data=Path(local_file_path).open("rb")) as resp:
                    if resp.status > 299:
                        _response_text = await resp.text()

        return dest_uuid
예제 #2
0
async def get_status(request: aiohttp.web.Request):
    session = get_client_session(request.app)

    user_id = request.get(RQT_USERID_KEY, -1)

    config = request.app[APP_CONFIG_KEY]["activity"]
    url = (URL(config.get("prometheus_host")).with_port(
        config.get("prometheus_port")).with_path(
            "api/" + config.get("prometheus_api_version") + "/query"))
    results = await asyncio.gather(
        get_cpu_usage(session, url, user_id),
        get_memory_usage(session, url, user_id),
        get_celery_reserved(request.app),
        get_container_metric_for_labels(session, url, user_id),
        return_exceptions=True,
    )
    cpu_usage = get_prometheus_result_or_default(results[0], [])
    mem_usage = get_prometheus_result_or_default(results[1], [])
    metric = get_prometheus_result_or_default(results[3], [])
    celery_inspect = results[2]

    res = defaultdict(dict)
    for node in cpu_usage:
        node_id = node["metric"]["container_label_node_id"]
        usage = float(node["value"][1])
        res[node_id] = {"stats": {"cpuUsage": usage}}

    for node in mem_usage:
        node_id = node["metric"]["container_label_node_id"]
        usage = float(node["value"][1])
        if node_id in res:
            res[node_id]["stats"]["memUsage"] = usage
        else:
            res[node_id] = {"stats": {"memUsage": usage}}

    for node in metric:
        limits = {"cpus": 0, "mem": 0}
        metric_labels = node["metric"]
        limits["cpus"] = float(
            metric_labels.get("container_label_nano_cpus_limit", 0)) / pow(
                10, 9)  # Nanocpus to cpus
        limits["mem"] = float(metric_labels.get("container_label_mem_limit",
                                                0)) / pow(1024, 2)  # In MB
        node_id = metric_labels.get("container_label_node_id")
        res[node_id]["limits"] = limits

    if hasattr(celery_inspect, "items"):
        for dummy_worker_id, worker in celery_inspect.items():
            for task in worker:
                values = task["args"][1:-1].split(", ")
                if values[0] == str(
                        user_id):  # Extracts user_id from task's args
                    node_id = values[2][
                        1:-1]  # Extracts node_id from task's args
                    res[node_id]["queued"] = True

    if not res:
        raise aiohttp.web.HTTPNoContent

    return dict(res)
예제 #3
0
 async def copy_file_s3_datcore(self, user_id: str, dest_uuid: str,
                                source_uuid: str):
     # source is s3, get link and copy to datcore
     bucket_name = self.simcore_bucket_name
     object_name = source_uuid
     filename = source_uuid.split("/")[-1]
     tmp_dirpath = tempfile.mkdtemp()
     local_file_path = os.path.join(tmp_dirpath, filename)
     url = self.s3_client.create_presigned_get_url(bucket_name, object_name)
     session = get_client_session(self.app)
     async with session.get(url) as resp:
         if resp.status == 200:
             f = await aiofiles.open(local_file_path, mode="wb")
             await f.write(await resp.read())
             await f.close()
             # and then upload
             await self.upload_file_to_datcore(
                 user_id=user_id,
                 local_file_path=local_file_path,
                 destination_id=dest_uuid,
             )
     shutil.rmtree(tmp_dirpath)