Exemplo n.º 1
0
def pipeline(
    postgres_db: sa.engine.Engine,
) -> Iterable[Callable[..., CompPipelineAtDB]]:
    created_pipeline_ids: List[str] = []

    def creator(**overrides) -> CompPipelineAtDB:
        pipeline_config = {
            "project_id": f"{uuid4()}",
            "dag_adjacency_list": {},
            "state": StateType.NOT_STARTED,
        }
        pipeline_config.update(**overrides)
        with postgres_db.connect() as conn:
            result = conn.execute(comp_pipeline.insert().values(
                **pipeline_config).returning(literal_column("*")))
            new_pipeline = CompPipelineAtDB.parse_obj(result.first())
            created_pipeline_ids.append(f"{new_pipeline.project_id}")
            return new_pipeline

    yield creator

    # cleanup
    with postgres_db.connect() as conn:
        conn.execute(comp_pipeline.delete().where(
            comp_pipeline.c.project_id.in_(created_pipeline_ids)))
Exemplo n.º 2
0
def pipeline(postgres_db: sa.engine.Engine) -> Callable[[str], str]:
    created_pipeline_ids: List[str] = []

    def creator(project_id: str) -> str:
        with postgres_db.connect() as conn:
            result = conn.execute(comp_pipeline.insert()  # pylint: disable=no-value-for-parameter
                                  .values(project_id=project_id).returning(
                                      comp_pipeline.c.project_id))
            new_pipeline_id = result.first()[comp_pipeline.c.project_id]
        created_pipeline_ids.append(f"{new_pipeline_id}")
        return new_pipeline_id

    yield creator

    # cleanup
    with postgres_db.connect() as conn:
        conn.execute(comp_pipeline.delete().where(  # pylint: disable=no-value-for-parameter
            comp_pipeline.c.project_id.in_(created_pipeline_ids)))
def minimal_configuration(  # pylint:disable=too-many-arguments
    loop: asyncio.AbstractEventLoop,
    sleeper_service: Dict,
    dy_static_file_server_dynamic_sidecar_service: Dict,
    dy_static_file_server_dynamic_sidecar_compose_spec_service: Dict,
    redis_service: RedisConfig,
    postgres_db: sa.engine.Engine,
    postgres_host_config: Dict[str, str],
    rabbit_service: RabbitConfig,
    simcore_services_ready: None,
    storage_service: URL,
    dask_scheduler_service: None,
    dask_sidecar_service: None,
    ensure_swarm_and_networks: None,
) -> Iterator[None]:
    node_ports_config.STORAGE_ENDPOINT = (
        f"{storage_service.host}:{storage_service.port}")
    with postgres_db.connect() as conn:
        # pylint: disable=no-value-for-parameter
        conn.execute(comp_tasks.delete())
        conn.execute(comp_pipeline.delete())
        yield
Exemplo n.º 4
0
async def pipeline(
    sidecar_config: None,
    postgres_db: sa.engine.Engine,
    storage_service: URL,
    osparc_service: Dict[str, str],
    user_id: int,
    project_id: str,
    pipeline_cfg: Dict,
    mock_dir: Path,
    request,
) -> str:
    """creates a full pipeline.
    NOTE: 'pipeline', defined as parametrization
    """

    tasks = {key: osparc_service for key in pipeline_cfg}
    dag = {key: pipeline_cfg[key]["next"] for key in pipeline_cfg}
    inputs = {key: pipeline_cfg[key]["inputs"] for key in pipeline_cfg}

    np = importlib.import_module(f".{request.param}", package="simcore_sdk")

    async def _create(
        tasks: Dict[str, Any],
        dag: Dict[str, List[str]],
        inputs: Dict[str, Dict[str, Any]],
    ) -> str:

        # add a pipeline
        with postgres_db.connect() as conn:
            conn.execute(
                comp_pipeline.insert().values(  # pylint: disable=no-value-for-parameter
                    project_id=project_id, dag_adjacency_list=dag
                )
            )

            # create the tasks for each pipeline's node
            for node_uuid, service in tasks.items():
                node_inputs = inputs[node_uuid]
                conn.execute(
                    comp_tasks.insert().values(  # pylint: disable=no-value-for-parameter
                        project_id=project_id,
                        node_id=node_uuid,
                        schema=service["schema"],
                        image=service["image"],
                        inputs=node_inputs,
                        state="PENDING",
                        outputs={},
                    )
                )

        # check if file must be uploaded
        # create the tasks for each pipeline's node
        for node_uuid, service in tasks.items():
            node_inputs = inputs[node_uuid]
            for input_key in node_inputs:
                if (
                    isinstance(node_inputs[input_key], dict)
                    and "path" in node_inputs[input_key]
                ):
                    # update the files in mock_dir to S3
                    print("--" * 10)
                    print_module_variables(module=np.node_config)
                    print("--" * 10)

                    PORTS = await np.ports(user_id, project_id, node_uuid)
                    await (await PORTS.inputs)[input_key].set(
                        mock_dir / node_inputs[input_key]["path"]
                    )
        return project_id

    yield await _create(tasks, dag, inputs)

    # cleanup
    with postgres_db.connect() as conn:
        conn.execute(comp_tasks.delete().where(comp_tasks.c.project_id == project_id))
        conn.execute(
            comp_pipeline.delete().where(comp_pipeline.c.project_id == project_id)
        )