async def assert_comp_tasks_state( aiopg_engine: Iterator[aiopg.sa.engine.Engine], # type: ignore project_uuid: ProjectID, task_ids: List[NodeID], exp_state: RunningState, ): # check the database is correctly updated, the run is published async with aiopg_engine.acquire() as conn: # type: ignore result = await conn.execute(comp_tasks.select().where( (comp_tasks.c.project_id == f"{project_uuid}") & (comp_tasks.c.node_id.in_([f"{n}" for n in task_ids ]))) # there is only one entry ) tasks = parse_obj_as(List[CompTaskAtDB], await result.fetchall()) assert all( # pylint: disable=use-a-generator [t.state == exp_state for t in tasks ]), f"expected state: {exp_state}, found: {[t.state for t in tasks]}"
def updator(project_uuid: str): with postgres_db.connect() as con: result = con.execute( projects.select().where(projects.c.uuid == project_uuid)) prj_row = result.first() prj_workbench = prj_row.workbench result = con.execute(comp_tasks.select().where( comp_tasks.c.project_id == project_uuid)) # let's get the results and run_hash for task_row in result: # pass these to the project workbench prj_workbench[task_row.node_id]["outputs"] = task_row.outputs prj_workbench[task_row.node_id]["runHash"] = task_row.run_hash con.execute( projects.update().values(workbench=prj_workbench).where( projects.c.uuid == project_uuid))
async def _get_node_from_db(node_uuid: str, connection: aiopg.sa.SAConnection) -> comp_tasks: log.debug( "Reading from comp_tasks table for node uuid %s, project %s", node_uuid, config.PROJECT_ID, ) result = await connection.execute( comp_tasks.select( and_( comp_tasks.c.node_id == node_uuid, comp_tasks.c.project_id == config.PROJECT_ID, ))) if result.rowcount > 1: log.error("the node id %s is not unique", node_uuid) node = await result.fetchone() if not node: log.error("the node id %s was not found", node_uuid) raise NodeNotFound(node_uuid) return node