async def _set_states_following_failed_to_aborted(
         self, project_id: ProjectID,
         dag: nx.DiGraph) -> Dict[str, CompTaskAtDB]:
     tasks: Dict[str, CompTaskAtDB] = await self._get_pipeline_tasks(
         project_id, dag)
     tasks_to_set_aborted: Set[NodeIDStr] = set()
     for task in tasks.values():
         if task.state == RunningState.FAILED:
             tasks_to_set_aborted.update(nx.bfs_tree(
                 dag, f"{task.node_id}"))
             tasks_to_set_aborted.remove(f"{task.node_id}")
     for task in tasks_to_set_aborted:
         tasks[f"{task}"].state = RunningState.ABORTED
     if tasks_to_set_aborted:
         # update the current states back in DB
         comp_tasks_repo: CompTasksRepository = cast(
             CompTasksRepository,
             get_repository(self.db_engine, CompTasksRepository),
         )
         await comp_tasks_repo.set_project_tasks_state(
             project_id,
             [NodeID(n) for n in tasks_to_set_aborted],
             RunningState.ABORTED,
         )
     return tasks
예제 #2
0
 def creator(user: Dict[str, Any], project: ProjectAtDB,
             **overrides_kwargs) -> List[CompTaskAtDB]:
     created_tasks: List[CompTaskAtDB] = []
     for internal_id, (node_id,
                       node_data) in enumerate(project.workbench.items()):
         task_config = {
             "project_id":
             f"{project.uuid}",
             "node_id":
             f"{node_id}",
             "schema": {
                 "inputs": {},
                 "outputs": {}
             },
             "inputs": {
                 key:
                 json.loads(value.json(by_alias=True, exclude_unset=True))
                 if isinstance(value, BaseModel) else value
                 for key, value in node_data.inputs.items()
             } if node_data.inputs else {},
             "outputs": {
                 key:
                 json.loads(value.json(by_alias=True, exclude_unset=True))
                 if isinstance(value, BaseModel) else value
                 for key, value in node_data.outputs.items()
             } if node_data.outputs else {},
             "image":
             Image(name=node_data.key,
                   tag=node_data.version).dict(  # type: ignore
                       by_alias=True, exclude_unset=True),  # type: ignore
             "node_class":
             to_node_class(node_data.key),
             "internal_id":
             internal_id + 1,
             "submit":
             datetime.utcnow(),
             "job_id":
             generate_dask_job_id(
                 service_key=node_data.key,
                 service_version=node_data.version,
                 user_id=user["id"],
                 project_id=project.uuid,
                 node_id=NodeID(node_id),
             ),
         }
         task_config.update(**overrides_kwargs)
         with postgres_db.connect() as conn:
             result = conn.execute(comp_tasks.insert().values(
                 **task_config).returning(sa.literal_column("*")))
             new_task = CompTaskAtDB.parse_obj(result.first())
             created_tasks.append(new_task)
         created_task_ids.extend(
             [t.task_id for t in created_tasks if t.task_id])
     return created_tasks
예제 #3
0
def parse_dask_job_id(
    job_id: str,
) -> Tuple[ServiceKeyStr, ServiceVersionStr, UserID, ProjectID, NodeID]:
    parts = job_id.split(":")
    assert len(parts) == 6  # nosec
    return (
        parts[0],
        parts[1],
        UserID(parts[2][len("userid_") :]),
        ProjectID(parts[3][len("projectid_") :]),
        NodeID(parts[4][len("nodeid_") :]),
    )
 def _convert_to_pipeline_details(
     project: ProjectAtDB,
     exp_pipeline_adj_list: Dict[int, List[str]],
     exp_node_states: Dict[int, Dict[str, Any]],
 ) -> PipelineDetails:
     workbench_node_uuids = list(project.workbench.keys())
     converted_adj_list: Dict[NodeID, Dict[NodeID, List[NodeID]]] = {}
     for node_key, next_nodes in exp_pipeline_adj_list.items():
         converted_adj_list[NodeID(workbench_node_uuids[node_key])] = [
             NodeID(workbench_node_uuids[n]) for n in next_nodes
         ]
     converted_node_states: Dict[NodeID, NodeState] = {
         NodeID(workbench_node_uuids[n]): NodeState(
             modified=s["modified"],
             dependencies={
                 workbench_node_uuids[dep_n]
                 for dep_n in s["dependencies"]
             },
             currentStatus=s.get("currentStatus", RunningState.NOT_STARTED),
         )
         for n, s in exp_node_states.items()
     }
     return PipelineDetails(adjacency_list=converted_adj_list,
                            node_states=converted_node_states)
예제 #5
0
 def from_service(cls, service: Dict[str, Any]) -> "ServiceLabelsStoredData":
     labels = service["Spec"]["Labels"]
     params = dict(
         service_name=service["Spec"]["Name"],
         node_uuid=NodeID(labels["uuid"]),
         key=labels["service_key"],
         version=labels["service_tag"],
         paths_mapping=PathMappingsLabel.parse_raw(labels["paths_mapping"]),
         dynamic_sidecar_network_name=labels["traefik.docker.network"],
         simcore_traefik_zone=labels["io.simcore.zone"],
         service_port=labels["service_port"],
         project_id=ProjectID(labels["study_id"]),
         user_id=int(labels["user_id"]),
     )
     if "compose_spec" in labels:
         params["compose_spec"] = labels["compose_spec"]
     if "container_http_entry" in labels:
         params["container_http_entry"] = labels["container_http_entry"]
     if "restart_policy" in labels:
         params["restart_policy"] = labels["restart_policy"]
     return cls(**params)
예제 #6
0
def node_id(faker: Faker) -> NodeID:
    return NodeID(faker.uuid4())
예제 #7
0
         not_forced_exp_dag={
             "e1e2ea96-ce8f-5abc-8712-b8ed312a782c": [
                 "6ede1209-b459-5735-91fc-761aa584808d"
             ],
             "415fefd1-d08b-53c1-adb0-16bed3a687ef": [
                 "6ede1209-b459-5735-91fc-761aa584808d"
             ],
             "6ede1209-b459-5735-91fc-761aa584808d": [],
         },
     ),
     id="no sub selection returns the full graph",
 ),
 pytest.param(
     MinimalGraphTest(
         subgraph=[
             NodeID("8902d36c-bc65-5b0d-848f-88aed72d7849"),
             NodeID("3a710d8b-565c-5f46-870b-b45ebe195fc7"),
             NodeID("415fefd1-d08b-53c1-adb0-16bed3a687ef"),
             NodeID("e1e2ea96-ce8f-5abc-8712-b8ed312a782c"),
             NodeID("6ede1209-b459-5735-91fc-761aa584808d"),
             NodeID("82d7a25c-18d4-44dc-a997-e5c9a745e7fd"),
         ],
         force_exp_dag={
             "3a710d8b-565c-5f46-870b-b45ebe195fc7": [
                 "415fefd1-d08b-53c1-adb0-16bed3a687ef"
             ],
             "e1e2ea96-ce8f-5abc-8712-b8ed312a782c": [
                 "6ede1209-b459-5735-91fc-761aa584808d"
             ],
             "415fefd1-d08b-53c1-adb0-16bed3a687ef": [
                 "6ede1209-b459-5735-91fc-761aa584808d"