示例#1
0
def task(postgres_db: sa.engine.Engine) -> Callable[..., str]:
    created_task_ids: List[int] = []

    def creator(project_id: str, node_uuid: str, **overrides) -> str:
        task_config = {
            "project_id": project_id,
            "node_id": node_uuid,
        }
        task_config.update(**overrides)
        with postgres_db.connect() as conn:
            result = conn.execute(
                comp_tasks.insert()  # pylint: disable=no-value-for-parameter
                .values(**task_config)
                .returning(comp_tasks.c.task_id)
            )
            new_task_id = result.first()[comp_tasks.c.task_id]
        created_task_ids.append(new_task_id)
        return node_uuid

    yield creator

    # cleanup
    with postgres_db.connect() as conn:
        conn.execute(
            comp_tasks.delete().where(  # pylint: disable=no-value-for-parameter
                comp_tasks.c.task_id.in_(created_task_ids)
            )
        )
示例#2
0
def tasks(
    postgres_db: sa.engine.Engine
) -> Iterable[Callable[..., List[CompTaskAtDB]]]:
    created_task_ids: List[int] = []

    def creator(project: ProjectAtDB, **overrides) -> List[CompTaskAtDB]:
        created_tasks: List[CompTaskAtDB] = []
        for internal_id, (node_id,
                          node_data) in enumerate(project.workbench.items()):
            task_config = {
                "project_id":
                f"{project.uuid}",
                "node_id":
                f"{node_id}",
                "schema": {
                    "inputs": {},
                    "outputs": {}
                },
                "inputs": {
                    key:
                    json.loads(value.json(by_alias=True, exclude_unset=True))
                    if isinstance(value, BaseModel) else value
                    for key, value in node_data.inputs.items()
                } if node_data.inputs else {},
                "outputs": {
                    key:
                    json.loads(value.json(by_alias=True, exclude_unset=True))
                    if isinstance(value, BaseModel) else value
                    for key, value in node_data.outputs.items()
                } if node_data.outputs else {},
                "image":
                Image(
                    name=node_data.key,
                    tag=node_data.version,
                ).dict(by_alias=True, exclude_unset=True),
                "node_class":
                to_node_class(node_data.key),
                "internal_id":
                internal_id + 1,
                "submit":
                datetime.utcnow(),
            }
            task_config.update(**overrides)
            with postgres_db.connect() as conn:
                result = conn.execute(comp_tasks.insert().values(
                    **task_config).returning(literal_column("*")))
                new_task = CompTaskAtDB.parse_obj(result.first())
                created_tasks.append(new_task)
            created_task_ids.extend(
                [t.task_id for t in created_tasks if t.task_id])
        return created_tasks

    yield creator

    # cleanup
    with postgres_db.connect() as conn:
        conn.execute(comp_tasks.delete().where(
            comp_tasks.c.task_id.in_(created_task_ids)))
def minimal_configuration(  # pylint:disable=too-many-arguments
    loop: asyncio.AbstractEventLoop,
    sleeper_service: Dict,
    dy_static_file_server_dynamic_sidecar_service: Dict,
    dy_static_file_server_dynamic_sidecar_compose_spec_service: Dict,
    redis_service: RedisConfig,
    postgres_db: sa.engine.Engine,
    postgres_host_config: Dict[str, str],
    rabbit_service: RabbitConfig,
    simcore_services_ready: None,
    storage_service: URL,
    dask_scheduler_service: None,
    dask_sidecar_service: None,
    ensure_swarm_and_networks: None,
) -> Iterator[None]:
    node_ports_config.STORAGE_ENDPOINT = (
        f"{storage_service.host}:{storage_service.port}")
    with postgres_db.connect() as conn:
        # pylint: disable=no-value-for-parameter
        conn.execute(comp_tasks.delete())
        conn.execute(comp_pipeline.delete())
        yield
示例#4
0
async def pipeline(
    sidecar_config: None,
    postgres_db: sa.engine.Engine,
    storage_service: URL,
    osparc_service: Dict[str, str],
    user_id: int,
    project_id: str,
    pipeline_cfg: Dict,
    mock_dir: Path,
    request,
) -> str:
    """creates a full pipeline.
    NOTE: 'pipeline', defined as parametrization
    """

    tasks = {key: osparc_service for key in pipeline_cfg}
    dag = {key: pipeline_cfg[key]["next"] for key in pipeline_cfg}
    inputs = {key: pipeline_cfg[key]["inputs"] for key in pipeline_cfg}

    np = importlib.import_module(f".{request.param}", package="simcore_sdk")

    async def _create(
        tasks: Dict[str, Any],
        dag: Dict[str, List[str]],
        inputs: Dict[str, Dict[str, Any]],
    ) -> str:

        # add a pipeline
        with postgres_db.connect() as conn:
            conn.execute(
                comp_pipeline.insert().values(  # pylint: disable=no-value-for-parameter
                    project_id=project_id, dag_adjacency_list=dag
                )
            )

            # create the tasks for each pipeline's node
            for node_uuid, service in tasks.items():
                node_inputs = inputs[node_uuid]
                conn.execute(
                    comp_tasks.insert().values(  # pylint: disable=no-value-for-parameter
                        project_id=project_id,
                        node_id=node_uuid,
                        schema=service["schema"],
                        image=service["image"],
                        inputs=node_inputs,
                        state="PENDING",
                        outputs={},
                    )
                )

        # check if file must be uploaded
        # create the tasks for each pipeline's node
        for node_uuid, service in tasks.items():
            node_inputs = inputs[node_uuid]
            for input_key in node_inputs:
                if (
                    isinstance(node_inputs[input_key], dict)
                    and "path" in node_inputs[input_key]
                ):
                    # update the files in mock_dir to S3
                    print("--" * 10)
                    print_module_variables(module=np.node_config)
                    print("--" * 10)

                    PORTS = await np.ports(user_id, project_id, node_uuid)
                    await (await PORTS.inputs)[input_key].set(
                        mock_dir / node_inputs[input_key]["path"]
                    )
        return project_id

    yield await _create(tasks, dag, inputs)

    # cleanup
    with postgres_db.connect() as conn:
        conn.execute(comp_tasks.delete().where(comp_tasks.c.project_id == project_id))
        conn.execute(
            comp_pipeline.delete().where(comp_pipeline.c.project_id == project_id)
        )