async def stop_pipeline(request: web.Request) -> web.Response:
    client = DirectorV2ApiClient(request.app)
    run_policy = get_project_run_policy(request.app)
    assert run_policy  # nosec

    user_id = UserID(request[RQT_USERID_KEY])
    project_id = ProjectID(request.match_info["project_id"])

    try:
        project_ids: List[
            ProjectID] = await run_policy.get_runnable_projects_ids(
                request, project_id)
        log.debug("Project %s will stop %d variants", project_id,
                  len(project_ids))

        await asyncio.gather(
            *[client.stop(pid, user_id) for pid in project_ids])

        # FIXME: our middleware has this issue
        #
        #  if 'return web.HTTPNoContent()' then 'await response.json()' raises ContentTypeError
        #  if 'raise web.HTTPNoContent()' then 'await response.json() == None'
        #
        raise web.HTTPNoContent()

    except DirectorServiceError as exc:
        return create_error_response(
            exc,
            reason=exc.reason,
            http_error_cls=get_http_error(exc.status)
            or web.HTTPServiceUnavailable,
        )
Esempio n. 2
0
def parse_dask_job_id(
    job_id: str,
) -> Tuple[ServiceKeyStr, ServiceVersionStr, UserID, ProjectID, NodeID]:
    parts = job_id.split(":")
    assert len(parts) == 6  # nosec
    return (
        parts[0],
        parts[1],
        UserID(parts[2][len("userid_") :]),
        ProjectID(parts[3][len("projectid_") :]),
        NodeID(parts[4][len("nodeid_") :]),
    )
async def projects_redirection_middleware(request: web.Request,
                                          handler: _Handler):
    """Intercepts /projects/{project_id}* requests and redirect them to the copy @HEAD

    Any given project has a unique identifier 'project_id' but, when activated,
    it also has a version history (denoted 'checkpoints' in the API).

    In that case, GET /projects/1234 shall refer to the HEAD version of the project
    with id 1234, also denoted the project's working copy (in short 'workcopy project')

    All metaprojects are versioned so this middleware intercepts calls to GET project
    and ensures that the response body includes the correct workcopy of the requested
    project.
    """

    if URL_PATTERN.match(f"{request.rel_url}"):
        #
        # TODO: because hierarchical design is not guaranteed, we find ourselves with
        # entries like /v0/computation/pipeline/{project_id}:start which might also neeed
        # indirection
        #

        project_id, path_param = _match_project_id(request)
        if project_id and path_param:
            vc_repo = VersionControlForMetaModeling(request)

            if repo_id := await vc_repo.get_repo_id(ProjectID(project_id)):
                # Changes resolved project_id parameter with working copy instead
                # TODO: optimize db calls
                #
                workcopy_project_id = await vc_repo.get_workcopy_project_id(
                    repo_id)
                request.match_info[path_param] = f"{workcopy_project_id}"

                if f"{workcopy_project_id}" != f"{project_id}":
                    request[
                        RQ_REQUESTED_REPO_PROJECT_UUID_KEY] = workcopy_project_id
                    log.debug(
                        "Redirecting request with %s to working copy %s",
                        f"{project_id=}",
                        f"{workcopy_project_id=}",
                    )
async def _create_meta_project_iterations_handler(
        request: web.Request) -> web.Response:
    # TODO: check access to non owned projects user_id = request[RQT_USERID_KEY]
    # SEE https://github.com/ITISFoundation/osparc-simcore/issues/2735

    url_for = create_url_for_function(request)
    vc_repo = VersionControlForMetaModeling(request)

    _project_uuid = ProjectID(request.match_info["project_uuid"])
    _ref_id = request.match_info["ref_id"]
    try:
        commit_id = CommitID(_ref_id)
    except ValueError as err:
        # e.g. HEAD
        raise NotImplementedError(
            "cannot convert ref (e.g. HEAD) -> commit id") from err

    # core function ----
    project_iterations = await create_or_get_project_iterations(
        vc_repo, _project_uuid, commit_id)

    # parse and validate response ----
    iterations_items = [
        ProjectIterationAsItem(
            name=
            f"projects/{_project_uuid}/checkpoint/{commit_id}/iterations/{iter_id}",
            parent=ParentMetaProjectRef(project_id=_project_uuid,
                                        ref_id=commit_id),
            workcopy_project_id=wcp_id,
            workcopy_project_url=url_for(
                "get_project",
                project_id=wcp_id,
            ),
            url=url_for(
                f"{__name__}._create_meta_project_iterations_handler",
                project_uuid=_project_uuid,
                ref_id=commit_id,
            ),
        ) for wcp_id, iter_id in project_iterations
    ]

    return envelope_json_response(iterations_items, web.HTTPCreated)
Esempio n. 5
0
 def from_service(cls, service: Dict[str, Any]) -> "ServiceLabelsStoredData":
     labels = service["Spec"]["Labels"]
     params = dict(
         service_name=service["Spec"]["Name"],
         node_uuid=NodeID(labels["uuid"]),
         key=labels["service_key"],
         version=labels["service_tag"],
         paths_mapping=PathMappingsLabel.parse_raw(labels["paths_mapping"]),
         dynamic_sidecar_network_name=labels["traefik.docker.network"],
         simcore_traefik_zone=labels["io.simcore.zone"],
         service_port=labels["service_port"],
         project_id=ProjectID(labels["study_id"]),
         user_id=int(labels["user_id"]),
     )
     if "compose_spec" in labels:
         params["compose_spec"] = labels["compose_spec"]
     if "container_http_entry" in labels:
         params["container_http_entry"] = labels["container_http_entry"]
     if "restart_policy" in labels:
         params["restart_policy"] = labels["restart_policy"]
     return cls(**params)
def project_id(faker: Faker) -> ProjectID:
    return ProjectID(faker.uuid4())
async def start_pipeline(request: web.Request) -> web.Response:
    client = DirectorV2ApiClient(request.app)

    run_policy = get_project_run_policy(request.app)
    assert run_policy  # nosec

    user_id = UserID(request[RQT_USERID_KEY])
    project_id = ProjectID(request.match_info["project_id"])

    subgraph: Set[str] = set()
    force_restart: bool = False  # TODO: deprecate this entry
    cluster_id: NonNegativeInt = 0

    if request.can_read_body:
        body = await request.json()
        subgraph = body.get("subgraph", [])
        force_restart = bool(body.get("force_restart", force_restart))
        cluster_id = body.get("cluster_id")

    options = {
        "start_pipeline": True,
        "subgraph": list(subgraph),  # sets are not natively json serializable
        "force_restart": force_restart,
        "cluster_id": cluster_id,
    }

    try:
        running_project_ids: List[ProjectID]
        project_vc_commits: List[CommitID]

        (
            running_project_ids,
            project_vc_commits,
        ) = await run_policy.get_or_create_runnable_projects(
            request, project_id)
        log.debug(
            "Project %s will start %d variants: %s",
            f"{project_id=}",
            len(running_project_ids),
            f"{running_project_ids=}",
        )

        assert running_project_ids  # nosec
        assert (  # nosec
            len(running_project_ids) == len(project_vc_commits)
            if project_vc_commits else True)

        _started_pipelines_ids: Tuple[str] = await asyncio.gather(*[
            client.start(pid, user_id, **options)
            for pid in running_project_ids
        ])

        assert set(_started_pipelines_ids) == set(map(
            str, running_project_ids))  # nosec

        data: Dict[str, Any] = {
            "pipeline_id": project_id,
        }
        # Optional
        if project_vc_commits:
            data["ref_ids"] = project_vc_commits

        return web.json_response(
            {"data": data},
            status=web.HTTPCreated.status_code,
            dumps=json_dumps,
        )

    except DirectorServiceError as exc:
        return create_error_response(
            exc,
            reason=exc.reason,
            http_error_cls=get_http_error(exc.status)
            or web.HTTPServiceUnavailable,
        )
Esempio n. 8
0
async def replace_project(request: web.Request):
    """Implements PUT /projects

     In a PUT request, the enclosed entity is considered to be a modified version of
     the resource stored on the origin server, and the client is requesting that the
     stored version be replaced.

     With PATCH, however, the enclosed entity contains a set of instructions describing how a
     resource currently residing on the origin server should be modified to produce a new version.

     Also, another difference is that when you want to update a resource with PUT request, you have to send
     the full payload as the request whereas with PATCH, you only send the parameters which you want to update.

    :raises web.HTTPNotFound: cannot find project id in repository
    """
    user_id: int = request[RQT_USERID_KEY]
    try:
        project_uuid = ProjectID(request.match_info["project_id"])
        new_project = await request.json()

        # Prune state field (just in case)
        new_project.pop("state", None)

    except AttributeError as err:
        # NOTE: if new_project is not a dict, .pop will raise this error
        raise web.HTTPBadRequest(
            reason="Invalid request payload, expected a project model"
        ) from err
    except KeyError as err:
        raise web.HTTPBadRequest(
            reason=f"Invalid request parameter {err}") from err
    except json.JSONDecodeError as exc:
        raise web.HTTPBadRequest(reason="Invalid request body") from exc

    db: ProjectDBAPI = request.config_dict[APP_PROJECT_DBAPI]
    await check_permission(
        request,
        "project.update | project.workbench.node.inputs.update",
        context={
            "dbapi": db,
            "project_id": f"{project_uuid}",
            "user_id": user_id,
            "new_data": new_project,
        },
    )

    try:
        await projects_api.validate_project(request.app, new_project)

        current_project = await projects_api.get_project_for_user(
            request.app,
            project_uuid=f"{project_uuid}",
            user_id=user_id,
            include_templates=True,
            include_state=True,
        )

        if current_project["accessRights"] != new_project["accessRights"]:
            await check_permission(request, "project.access_rights.update")

        if await director_v2_api.is_pipeline_running(request.app, user_id,
                                                     project_uuid):

            if any_node_inputs_changed(new_project, current_project):
                # NOTE:  This is a conservative measure that we take
                #  until nodeports logic is re-designed to tackle with this
                #  particular state.
                #
                # This measure avoid having a state with different node *links* in the
                # comp-tasks table and the project's workbench column.
                # The limitation is that nodeports only "sees" those in the comptask
                # and this table does not add the new ones since it remains "blocked"
                # for modification from that project while the pipeline runs. Therefore
                # any extra link created while the pipeline is running can not
                # be managed by nodeports because it basically "cannot see it"
                #
                # Responds https://httpstatuses.com/409:
                #  The request could not be completed due to a conflict with the current
                #  state of the target resource (i.e. pipeline is running). This code is used in
                #  situations where the user might be able to resolve the conflict
                #  and resubmit the request  (front-end will show a pop-up with message below)
                #
                raise web.HTTPConflict(
                    reason=
                    f"Project {project_uuid} cannot be modified while pipeline is still running."
                )

        new_project = await db.replace_user_project(new_project,
                                                    user_id,
                                                    f"{project_uuid}",
                                                    include_templates=True)
        await director_v2_api.create_or_update_pipeline(
            request.app, user_id, project_uuid)
        # Appends state
        new_project = await projects_api.add_project_states_for_user(
            user_id=user_id,
            project=new_project,
            is_template=False,
            app=request.app,
        )

    except ValidationError as exc:
        raise web.HTTPBadRequest(
            reason=f"Invalid project update: {exc.message}") from exc

    except ProjectInvalidRightsError as exc:
        raise web.HTTPForbidden(
            reason="You do not have sufficient rights to save the project"
        ) from exc

    except ProjectNotFoundError as exc:
        raise web.HTTPNotFound from exc

    return {"data": new_project}
async def _list_meta_project_iterations_handler(
        request: web.Request) -> web.Response:
    # TODO: check access to non owned projects user_id = request[RQT_USERID_KEY]
    # SEE https://github.com/ITISFoundation/osparc-simcore/issues/2735

    # parse and validate request ----
    url_for = create_url_for_function(request)
    vc_repo = VersionControlForMetaModeling(request)

    _project_uuid = ProjectID(request.match_info["project_uuid"])
    _ref_id = request.match_info["ref_id"]

    _limit = int(request.query.get("limit", DEFAULT_NUMBER_OF_ITEMS_PER_PAGE))
    _offset = int(request.query.get("offset", 0))

    try:
        commit_id = CommitID(_ref_id)
    except ValueError as err:
        # e.g. HEAD
        raise NotImplementedError(
            "cannot convert ref (e.g. HEAD) -> commit id") from err

    # core function ----
    iterations = await _get_project_iterations_range(vc_repo,
                                                     _project_uuid,
                                                     commit_id,
                                                     offset=_offset,
                                                     limit=_limit)

    if iterations.total_count == 0:
        raise web.HTTPNotFound(
            reason=
            f"No iterations found for project {_project_uuid=}/{commit_id=}")

    assert len(iterations.items) <= _limit  # nosec

    # parse and validate response ----
    page_items = [
        ProjectIterationAsItem(
            name=
            f"projects/{_project_uuid}/checkpoint/{commit_id}/iterations/{iter_id}",
            parent=ParentMetaProjectRef(project_id=_project_uuid,
                                        ref_id=commit_id),
            workcopy_project_id=wcp_id,
            workcopy_project_url=url_for(
                "get_project",
                project_id=wcp_id,
            ),
            url=url_for(
                f"{__name__}._list_meta_project_iterations_handler",
                project_uuid=_project_uuid,
                ref_id=commit_id,
            ),
        ) for wcp_id, iter_id in iterations.items
    ]

    page = Page[ProjectIterationAsItem].parse_obj(
        paginate_data(
            chunk=page_items,
            request_url=request.url,
            total=iterations.total_count,
            limit=_limit,
            offset=_offset,
        ))
    return web.Response(
        text=page.json(**RESPONSE_MODEL_POLICY),
        content_type="application/json",
    )
def parse_workcopy_project_tag_name(name: str) -> Optional[ProjectID]:
    if m := re.match(rf"^project:(?P<workcopy_project_id>{UUID_RE})$", name):
        data = m.groupdict()
        return ProjectID(data["workcopy_project_id"])
async def get_or_create_runnable_projects(
    request: web.Request,
    project_uuid: ProjectID,
) -> Tuple[List[ProjectID], List[CommitID]]:
    """
    Returns ids and refid of projects that can run
    If project_uuid is a std-project, then it returns itself
    If project_uuid is a meta-project, then it returns iterations
    """

    vc_repo = VersionControlForMetaModeling(request)
    assert vc_repo.user_id  # nosec

    try:
        project: ProjectDict = await vc_repo.get_project(str(project_uuid))
    except UserUndefined as err:
        raise web.HTTPForbidden(reason="Unauthenticated request") from err

    project_nodes: Dict[NodeID, Node] = {
        nid: Node.parse_obj(n)
        for nid, n in project["workbench"].items()
    }

    # init returns
    runnable_project_vc_commits: List[CommitID] = []
    runnable_project_ids: List[ProjectID] = [
        project_uuid,
    ]

    # auto-commit
    #   because it will run in parallel -> needs an independent working copy
    repo_id = await vc_repo.get_repo_id(project_uuid)
    if repo_id is None:
        repo_id = await vc_repo.init_repo(project_uuid)

    main_commit_id = await vc_repo.commit(
        repo_id,
        tag=f"auto:main/{project_uuid}",
        message=f"auto-commit {now_str()}",
    )
    runnable_project_vc_commits.append(main_commit_id)

    # std-project
    is_meta_project = any(
        is_iterator_service(node.key) for node in project_nodes.values())
    if not is_meta_project:
        return runnable_project_ids, runnable_project_vc_commits

    # meta-project: resolve project iterations
    runnable_project_ids = []
    runnable_project_vc_commits = []

    iterations = _build_project_iterations(project_nodes)
    log.debug(
        "Project %s with %s parameters, produced %s variants",
        project_uuid,
        len(iterations[0]) if iterations else 0,
        len(iterations),
    )

    # Each iteration generates a set of 'parameters'
    #  - parameters are set in the corresponding outputs of the meta-nodes
    #
    parameters: Parameters
    updated_nodes: NodesDict
    total_count = len(iterations)
    original_name = project["name"]

    # FIXME: in an optimization, iteration_index should start with LAST iterated index
    for iteration_index, (parameters, updated_nodes) in enumerate(iterations,
                                                                  start=1):
        log.debug(
            "Creating snapshot of project %s with parameters=%s [%s]",
            f"{project_uuid=}",
            f"{parameters=}",
            f"{updated_nodes=}",
        )

        project["name"] = f"{original_name}/{iteration_index}"
        project["workbench"].update({
            # converts model in dict patching first thumbnail
            nid: n.copy(update={
                "thumbnail": n.thumbnail or ""
            }).dict(by_alias=True, exclude_unset=True)
            for nid, n in updated_nodes.items()
        })

        project_iteration = ProjectIteration(
            repo_id=repo_id,
            repo_commit_id=main_commit_id,
            iteration_index=iteration_index,
            total_count=total_count,
            parameters_checksum=_compute_params_checksum(parameters),
        )

        # tag to identify this iteration
        branch_name = tag_name = project_iteration.to_tag_name()

        commit_id = await vc_repo.create_workcopy_and_branch_from_commit(
            repo_id,
            start_commit_id=main_commit_id,
            project=project,
            branch_name=branch_name,
            tag_name=tag_name,
            tag_message=json.dumps(parameters),
        )

        workcopy_project_id = await vc_repo.get_workcopy_project_id(
            repo_id, commit_id)

        runnable_project_ids.append(ProjectID(workcopy_project_id))
        runnable_project_vc_commits.append(commit_id)

    return runnable_project_ids, runnable_project_vc_commits