async def test_failed_or_aborted_task_cleans_output_files(
    scheduler: BaseCompScheduler,
    minimal_app: FastAPI,
    user_id: PositiveInt,
    aiopg_engine: Iterator[aiopg.sa.engine.Engine],  # type: ignore
    mocked_dask_client_send_task: mock.MagicMock,
    published_project: PublishedProject,
    state: RunningState,
    mocked_clean_task_output_fct: mock.MagicMock,
    mocked_scheduler_task: None,
):
    # we do have a published project where the comp services are in PUBLISHED state
    # here we will artifically call the completion handler in the scheduler
    dask_scheduler = cast(DaskScheduler, scheduler)
    job_id = generate_dask_job_id(
        "simcore/service/comp/pytest/fake",
        "12.34.55",
        user_id,
        published_project.project.uuid,
        published_project.tasks[0].node_id,
    )
    state_event = TaskStateEvent(
        job_id=job_id,
        msg=TaskOutputData.parse_obj({"output_1": "some fake data"}).json(),
        state=state,
    )
    await dask_scheduler._on_task_completed(state_event)
    await assert_comp_tasks_state(
        aiopg_engine,
        published_project.project.uuid,
        [published_project.tasks[0].node_id],
        exp_state=state,
    )

    mocked_clean_task_output_fct.assert_called_once()
Ejemplo n.º 2
0
async def test_parse_output_data(
    aiopg_engine: aiopg.sa.engine.Engine,  # type: ignore
    published_project: PublishedProject,
    user_id: UserID,
    fake_io_schema: Dict[str, Dict[str, str]],
    fake_task_output_data: TaskOutputData,
    mocker: MockerFixture,
):
    # need some fakes set in the DB
    sleeper_task: CompTaskAtDB = published_project.tasks[1]
    no_outputs = {}
    await set_comp_task_outputs(aiopg_engine, sleeper_task.node_id,
                                fake_io_schema, no_outputs)
    # mock the set_value function so we can test it is called correctly
    mocked_node_ports_set_value_fct = mocker.patch(
        "simcore_sdk.node_ports_v2.port.Port.set_value")

    # test
    dask_job_id = generate_dask_job_id(
        sleeper_task.image.name,
        sleeper_task.image.tag,
        user_id,
        published_project.project.uuid,
        sleeper_task.node_id,
    )
    await parse_output_data(aiopg_engine, dask_job_id, fake_task_output_data)

    # the FileUrl types are converted to a pure url
    expected_values = {
        k: v.url if isinstance(v, FileUrl) else v
        for k, v in fake_task_output_data.items()
    }
    mocked_node_ports_set_value_fct.assert_has_calls(
        [mock.call(value) for value in expected_values.values()])
Ejemplo n.º 3
0
 def creator(user: Dict[str, Any], project: ProjectAtDB,
             **overrides_kwargs) -> List[CompTaskAtDB]:
     created_tasks: List[CompTaskAtDB] = []
     for internal_id, (node_id,
                       node_data) in enumerate(project.workbench.items()):
         task_config = {
             "project_id":
             f"{project.uuid}",
             "node_id":
             f"{node_id}",
             "schema": {
                 "inputs": {},
                 "outputs": {}
             },
             "inputs": {
                 key:
                 json.loads(value.json(by_alias=True, exclude_unset=True))
                 if isinstance(value, BaseModel) else value
                 for key, value in node_data.inputs.items()
             } if node_data.inputs else {},
             "outputs": {
                 key:
                 json.loads(value.json(by_alias=True, exclude_unset=True))
                 if isinstance(value, BaseModel) else value
                 for key, value in node_data.outputs.items()
             } if node_data.outputs else {},
             "image":
             Image(name=node_data.key,
                   tag=node_data.version).dict(  # type: ignore
                       by_alias=True, exclude_unset=True),  # type: ignore
             "node_class":
             to_node_class(node_data.key),
             "internal_id":
             internal_id + 1,
             "submit":
             datetime.utcnow(),
             "job_id":
             generate_dask_job_id(
                 service_key=node_data.key,
                 service_version=node_data.version,
                 user_id=user["id"],
                 project_id=project.uuid,
                 node_id=NodeID(node_id),
             ),
         }
         task_config.update(**overrides_kwargs)
         with postgres_db.connect() as conn:
             result = conn.execute(comp_tasks.insert().values(
                 **task_config).returning(sa.literal_column("*")))
             new_task = CompTaskAtDB.parse_obj(result.first())
             created_tasks.append(new_task)
         created_task_ids.extend(
             [t.task_id for t in created_tasks if t.task_id])
     return created_tasks
Ejemplo n.º 4
0
def test_dask_job_id_serialization(
    service_key: str,
    service_version: str,
    user_id: UserID,
    project_id: ProjectID,
    node_id: NodeID,
):
    dask_job_id = generate_dask_job_id(service_key, service_version, user_id,
                                       project_id, node_id)
    (
        parsed_service_key,
        parsed_service_version,
        parsed_user_id,
        parsed_project_id,
        parsed_node_id,
    ) = parse_dask_job_id(dask_job_id)
    assert service_key == parsed_service_key
    assert service_version == parsed_service_version
    assert user_id == parsed_user_id
    assert project_id == parsed_project_id
    assert node_id == parsed_node_id