async def test_misconfigured_pipeline_is_not_scheduled(
    mocked_scheduler_task: None,
    scheduler: BaseCompScheduler,
    minimal_app: FastAPI,
    user_id: PositiveInt,
    project: Callable[..., ProjectAtDB],
    pipeline: Callable[..., CompPipelineAtDB],
    fake_workbench_without_outputs: Dict[str, Any],
    fake_workbench_adjacency: Dict[str, Any],
    aiopg_engine: Iterator[aiopg.sa.engine.Engine],  # type: ignore
):
    """A pipeline which comp_tasks are missing should not be scheduled.
    It shall be aborted and shown as such in the comp_runs db"""
    sleepers_project = project(workbench=fake_workbench_without_outputs)
    sleepers_pipeline = pipeline(
        project_id=f"{sleepers_project.uuid}",
        dag_adjacency_list=fake_workbench_adjacency,
    )
    # check the pipeline is correctly added to the scheduled pipelines
    await scheduler.run_new_pipeline(
        user_id=user_id,
        project_id=sleepers_project.uuid,
        cluster_id=minimal_app.state.settings.DASK_SCHEDULER.DASK_DEFAULT_CLUSTER_ID,
    )
    assert len(scheduler.scheduled_pipelines) == 1
    assert (
        scheduler.wake_up_event.is_set() == True
    ), "the scheduler was NOT woken up on the scheduled pipeline!"
    for (u_id, p_id, it), params in scheduler.scheduled_pipelines.items():
        assert u_id == user_id
        assert p_id == sleepers_project.uuid
        assert it > 0
        assert params.mark_for_cancellation == False
    # check the database was properly updated
    async with aiopg_engine.acquire() as conn:  # type: ignore
        result = await conn.execute(
            comp_runs.select().where(
                (comp_runs.c.user_id == user_id)
                & (comp_runs.c.project_uuid == f"{sleepers_project.uuid}")
            )  # there is only one entry
        )
        run_entry = CompRunsAtDB.parse_obj(await result.first())
    assert run_entry.result == RunningState.PUBLISHED
    # let the scheduler kick in
    await manually_run_comp_scheduler(scheduler)
    # check the scheduled pipelines is again empty since it's misconfigured
    assert len(scheduler.scheduled_pipelines) == 0
    # check the database entry is correctly updated
    async with aiopg_engine.acquire() as conn:  # type: ignore
        result = await conn.execute(
            comp_runs.select().where(
                (comp_runs.c.user_id == user_id)
                & (comp_runs.c.project_uuid == f"{sleepers_project.uuid}")
            )  # there is only one entry
        )
        run_entry = CompRunsAtDB.parse_obj(await result.first())
    assert run_entry.result == RunningState.ABORTED
コード例 #2
0
 def creator(project: ProjectAtDB, **run_kwargs) -> CompRunsAtDB:
     run_config = {
         "project_uuid": f"{project.uuid}",
         "user_id": f"{user_db['id']}",
         "iteration": 1,
         "result": StateType.NOT_STARTED,
     }
     run_config.update(**run_kwargs)
     with postgres_db.connect() as conn:
         result = conn.execute(comp_runs.insert().values(
             **run_config).returning(literal_column("*")))
         new_run = CompRunsAtDB.parse_obj(result.first())
         created_run_ids.append(new_run.run_id)
         return new_run
コード例 #3
0
async def _assert_comp_run_state(
    aiopg_engine: Iterator[aiopg.sa.engine.Engine],
    user_id: UserID,
    project_uuid: ProjectID,
    exp_state: RunningState,
):
    # check the database is correctly updated, the run is published
    async with aiopg_engine.acquire() as conn:  # type: ignore
        result = await conn.execute(comp_runs.select().where(
            (comp_runs.c.user_id == user_id)
            & (comp_runs.c.project_uuid
               == f"{project_uuid}"))  # there is only one entry
                                    )
        run_entry = CompRunsAtDB.parse_obj(await result.first())
    assert run_entry.result == exp_state