Esempio n. 1
0
def _get_fake_service_details(service: ServiceKeyVersion) -> ServiceDockerData:
    if "file-picker" in service.key:
        file_picker_outputs = {
            "outFile": {
                "label": "the output",
                "displayOrder": 0,
                "description": "a file",
                "type": "data:*/*",
            }
        }
        file_picker_type = ServiceType.FRONTEND
        return ServiceDockerData(
            **service.dict(),
            name="file-picker",
            description="file-picks",
            authors=[
                Author(name="ITIS",
                       email="*****@*****.**",
                       affiliation="IT'IS")
            ],
            contact="*****@*****.**",
            inputs={},
            outputs=file_picker_outputs,
            type=file_picker_type,
        )
    raise ValueError("")
Esempio n. 2
0
    async def list_services_from_published_templates(self) -> List[ServiceKeyVersion]:
        list_of_published_services: List[ServiceKeyVersion] = []
        async for row in self.connection.execute(
            sa.select([projects]).where(
                (projects.c.type == ProjectType.TEMPLATE)
                & (projects.c.published == True)
            )
        ):
            project_workbench = row.workbench
            for node in project_workbench:
                service = project_workbench[node]
                try:
                    if (
                        "file-picker" in service["key"]
                        or "nodes-group" in service["key"]
                    ):
                        # these 2 are not going to pass the validation tests, they are frontend only nodes.
                        continue
                    list_of_published_services.append(ServiceKeyVersion(**service))
                except ValidationError:
                    logger.warning(
                        "service %s could not be validated", service, exc_info=True
                    )
                    continue

        return list_of_published_services
Esempio n. 3
0
    async def action(cls, app: FastAPI, scheduler_data: SchedulerData) -> None:
        app_settings: AppSettings = app.state.settings
        dynamic_sidecar_client = get_dynamic_sidecar_client(app)
        dynamic_sidecar_endpoint = scheduler_data.dynamic_sidecar.endpoint

        async with disabled_directory_watcher(dynamic_sidecar_client,
                                              dynamic_sidecar_endpoint):
            tasks = [
                dynamic_sidecar_client.service_pull_output_ports(
                    dynamic_sidecar_endpoint)
            ]
            # When enabled no longer downloads state via nodeports
            # S3 is used to store state paths
            if not app_settings.DIRECTOR_V2_DEV_FEATURES_ENABLED:
                tasks.append(
                    dynamic_sidecar_client.service_restore_state(
                        dynamic_sidecar_endpoint))
            await logged_gather(*tasks)

            # inside this directory create the missing dirs, fetch those form the labels
            director_v0_client: DirectorV0Client = _get_director_v0_client(app)
            simcore_service_labels: SimcoreServiceLabels = (
                await director_v0_client.get_service_labels(
                    service=ServiceKeyVersion(key=scheduler_data.key,
                                              version=scheduler_data.version)))
            service_outputs_labels = json.loads(
                simcore_service_labels.dict().get("io.simcore.outputs",
                                                  "{}")).get("outputs", {})
            logger.debug("Creating dirs from service outputs labels: %s",
                         service_outputs_labels)
            await dynamic_sidecar_client.service_outputs_create_dirs(
                dynamic_sidecar_endpoint, service_outputs_labels)

            scheduler_data.dynamic_sidecar.service_environment_prepared = True
Esempio n. 4
0
async def _generate_tasks_list_from_project(
    project: ProjectAtDB,
    director_client: DirectorV0Client,
    published_nodes: List[NodeID],
) -> List[CompTaskAtDB]:

    list_comp_tasks = []
    for internal_id, node_id in enumerate(project.workbench, 1):
        node: Node = project.workbench[node_id]

        service_key_version = ServiceKeyVersion(
            key=node.key,
            version=node.version,
        )
        node_class = to_node_class(service_key_version.key)
        node_details: Optional[ServiceDockerData] = None
        node_extras: Optional[ServiceExtras] = None
        if node_class == NodeClass.FRONTEND:
            node_details = _FRONTEND_SERVICES_CATALOG.get(
                service_key_version.key, None)
        else:
            node_details, node_extras = await asyncio.gather(
                director_client.get_service_details(service_key_version),
                director_client.get_service_extras(service_key_version),
            )

        if not node_details:
            continue

        image = Image(
            name=service_key_version.key,
            tag=service_key_version.version,
            node_requirements=node_extras.node_requirements
            if node_extras else None,
        )

        assert node.state is not None  # nosec
        task_state = node.state.current_status
        if node_id in published_nodes and node_class == NodeClass.COMPUTATIONAL:
            task_state = RunningState.PUBLISHED

        task_db = CompTaskAtDB(
            project_id=project.uuid,
            node_id=node_id,
            schema=NodeSchema.parse_obj(
                node_details.dict(exclude_unset=True,
                                  by_alias=True,
                                  include={"inputs", "outputs"})),
            inputs=node.inputs,
            outputs=node.outputs,
            image=image,
            submit=datetime.utcnow(),
            state=task_state,
            internal_id=internal_id,
            node_class=node_class,
        )

        list_comp_tasks.append(task_db)
    return list_comp_tasks
Esempio n. 5
0
    async def upsert_tasks_from_project(self, project: ProjectAtDB,
                                        director_client: DirectorV0Client,
                                        publish: bool) -> None:
        # start by removing the old tasks if they exist
        await self.connection.execute(
            sa.delete(comp_tasks).where(
                comp_tasks.c.project_id == str(project.uuid)))
        # create the tasks
        workbench = project.workbench
        internal_id = 1
        for node_id in workbench:
            node: Node = workbench[node_id]

            service_key_version = ServiceKeyVersion(
                key=node.key,
                version=node.version,
            )
            node_class = to_node_class(service_key_version.key)
            node_details: ServiceDockerData = None
            node_extras: ServiceExtras = None
            if node_class == NodeClass.FRONTEND:
                node_details = _get_fake_service_details(service_key_version)
            else:
                node_details = await director_client.get_service_details(
                    service_key_version)
                node_extras: ServiceExtras = await director_client.get_service_extras(
                    service_key_version)
            requires_mpi = False
            requires_gpu = False
            if node_extras:
                requires_gpu = node_extras.node_requirements == NodeRequirement.GPU
                requires_mpi = node_extras.node_requirements == NodeRequirement.MPI
            image = Image(
                name=service_key_version.key,
                tag=service_key_version.version,
                requires_gpu=requires_gpu,
                requires_mpi=requires_mpi,
            )

            comp_state = RunningState.PUBLISHED if publish else RunningState.NOT_STARTED
            task_db = CompTaskAtDB(
                project_id=project.uuid,
                node_id=node_id,
                schema=NodeSchema(inputs=node_details.inputs,
                                  outputs=node_details.outputs),
                inputs=node.inputs,
                outputs=node.outputs,
                image=image,
                submit=datetime.utcnow(),
                state=comp_state if node_class == NodeClass.COMPUTATIONAL else
                RunningState.NOT_STARTED,
                internal_id=internal_id,
                node_class=node_class,
            )
            internal_id = internal_id + 1

            await self.connection.execute(
                insert(comp_tasks).values(
                    **task_db.dict(by_alias=True, exclude_unset=True)))
Esempio n. 6
0
async def merge_settings_before_use(
    director_v0_client: DirectorV0Client,
    service_key: str,
    service_tag: str,
    service_user_selection_boot_options: Dict[EnvVarKey, str],
) -> SimcoreServiceSettingsLabel:

    simcore_service_labels: SimcoreServiceLabels = (
        await director_v0_client.get_service_labels(
            service=ServiceKeyVersion(key=service_key, version=service_tag)))
    log.info("image=%s, tag=%s, labels=%s", service_key, service_tag,
             simcore_service_labels)

    # paths_mapping express how to map dynamic-sidecar paths to the compose-spec volumes
    # where the service expects to find its certain folders

    labels_for_involved_services: Dict[
        str,
        SimcoreServiceLabels] = await _extract_osparc_involved_service_labels(
            director_v0_client=director_v0_client,
            service_key=service_key,
            service_tag=service_tag,
            service_labels=simcore_service_labels,
        )
    logging.info("labels_for_involved_services=%s",
                 labels_for_involved_services)

    # merge the settings from the all the involved services
    settings: Deque[SimcoreServiceSettingLabelEntry] = deque(
    )  # TODO: fix typing here
    for compose_spec_key, service_labels in labels_for_involved_services.items(
    ):
        service_settings: SimcoreServiceSettingsLabel = cast(
            SimcoreServiceSettingsLabel, service_labels.settings)

        settings.extend(
            # inject compose spec key, used to target container specific services
            _add_compose_destination_container_to_settings_entries(
                settings=service_settings,
                destination_container=compose_spec_key))

        # inject boot options as env vars
        labels_boot_options = _get_boot_options(service_labels)
        if labels_boot_options:
            # create a new setting from SimcoreServiceSettingsLabel as env var to pass to target container
            boot_options_settings_env_vars = _assemble_env_vars_for_boot_options(
                labels_boot_options, service_user_selection_boot_options)
            settings.extend(
                # inject compose spec key, used to target container specific services
                _add_compose_destination_container_to_settings_entries(
                    settings=boot_options_settings_env_vars,
                    destination_container=compose_spec_key,
                ))

    settings = _merge_resources_in_settings(settings)
    settings = _patch_target_service_into_env_vars(settings)

    return SimcoreServiceSettingsLabel.parse_obj(settings)
Esempio n. 7
0
async def test_get_service_extras(
    minimal_app: FastAPI,
    mocked_director_service_fcts,
    fake_service_extras: ServiceExtras,
):
    director_client: DirectorV0Client = minimal_app.state.director_v0_client
    service = ServiceKeyVersion(key="simcore/services/dynamic/myservice",
                                version="1.3.4")
    service_extras: ServiceExtras = await director_client.get_service_extras(
        service)
    assert mocked_director_service_fcts["get_service_extras"].called
    assert fake_service_extras == service_extras
async def create_dynamic_service(
    service: DynamicServiceCreate,
    x_dynamic_sidecar_request_dns: str = Header(...),
    x_dynamic_sidecar_request_scheme: str = Header(...),
    director_v0_client: DirectorV0Client = Depends(get_director_v0_client),
    dynamic_services_settings: DynamicServicesSettings = Depends(
        get_dynamic_services_settings),
    scheduler: DynamicSidecarsScheduler = Depends(get_scheduler),
) -> Union[DynamicServiceOut, RedirectResponse]:

    simcore_service_labels: SimcoreServiceLabels = (
        await director_v0_client.get_service_labels(
            service=ServiceKeyVersion(key=service.key, version=service.version)
        ))

    # LEGACY (backwards compatibility)
    if not simcore_service_labels.needs_dynamic_sidecar:
        # forward to director-v0
        redirect_url_with_query = director_v0_client.client.base_url.copy_with(
            path="/v0/running_interactive_services",
            params={
                "user_id": f"{service.user_id}",
                "project_id": f"{service.project_id}",
                "service_uuid": f"{service.node_uuid}",
                "service_key": f"{service.key}",
                "service_tag": f"{service.version}",
                "service_basepath": f"{service.basepath}",
            },
        )
        logger.debug("Redirecting %s", redirect_url_with_query)
        return RedirectResponse(str(redirect_url_with_query))

    #
    if not await is_dynamic_service_running(
            service.node_uuid, dynamic_services_settings.DYNAMIC_SIDECAR):
        scheduler_data = SchedulerData.from_http_request(
            service=service,
            simcore_service_labels=simcore_service_labels,
            port=dynamic_services_settings.DYNAMIC_SIDECAR.
            DYNAMIC_SIDECAR_PORT,
            request_dns=x_dynamic_sidecar_request_dns,
            request_scheme=x_dynamic_sidecar_request_scheme,
        )
        await scheduler.add_service(scheduler_data)

    return cast(DynamicServiceOut, await
                scheduler.get_stack_status(service.node_uuid))
Esempio n. 9
0
async def _extract_osparc_involved_service_labels(
    director_v0_client: DirectorV0Client,
    service_key: str,
    service_tag: str,
    service_labels: SimcoreServiceLabels,
) -> Dict[str, SimcoreServiceLabels]:
    """
    Returns all the involved oSPARC services from the provided service labels.

    If the service contains a compose-spec that will also be parsed for images.
    Searches for images like the following in the spec:
    - `${REGISTRY_URL}/**SOME_SERVICE_NAME**:${SERVICE_TAG}`
    - `${REGISTRY_URL}/**SOME_SERVICE_NAME**:1.2.3` where `1.2.3` is a hardcoded tag
    """

    # initialize with existing labels
    # stores labels mapped by image_name service:tag
    docker_image_name_by_services: Dict[str, SimcoreServiceLabels] = {
        _assemble_key(service_key=service_key, service_tag=service_tag):
        service_labels
    }
    compose_spec: ComposeSpecLabel = cast(ComposeSpecLabel,
                                          service_labels.compose_spec)
    if compose_spec is None:
        return docker_image_name_by_services

    # maps form image_name to compose_spec key
    reverse_mapping: Dict[str, str] = {}

    compose_spec_services = compose_spec.get("services", {})
    image = None
    for compose_service_key, service_data in compose_spec_services.items():
        image = service_data.get("image", None)
        if image is None:
            continue

        # if image dose not have this format skip:
        # - `${SIMCORE_REGISTRY}/**SOME_SERVICE_NAME**:${SERVICE_VERSION}`
        # - `${SIMCORE_REGISTRY}/**SOME_SERVICE_NAME**:1.2.3` a hardcoded tag
        if not image.startswith(MATCH_IMAGE_START) or ":" not in image:
            continue
        if not image.startswith(MATCH_IMAGE_START) or not image.endswith(
                MATCH_IMAGE_END):
            continue

        # strips `${REGISTRY_URL}/`; replaces `${SERVICE_TAG}` with `service_tag`
        osparc_image_key = image.replace(MATCH_SERVICE_VERSION,
                                         service_tag).replace(
                                             MATCH_IMAGE_START, "")
        current_service_key, current_service_tag = osparc_image_key.split(":")
        involved_key = _assemble_key(service_key=current_service_key,
                                     service_tag=current_service_tag)
        reverse_mapping[involved_key] = compose_service_key

        simcore_service_labels: SimcoreServiceLabels = (
            await
            director_v0_client.get_service_labels(service=ServiceKeyVersion(
                key=current_service_key, version=current_service_tag)))
        docker_image_name_by_services[involved_key] = simcore_service_labels

    if len(reverse_mapping) != len(docker_image_name_by_services):
        message = (
            f"Extracting labels for services in '{image}' could not fill "
            f"reverse_mapping={reverse_mapping}; "
            f"docker_image_name_by_services={docker_image_name_by_services}")
        log.error(message)
        raise DynamicSidecarError(message)

    # remaps from image_name as key to compose_spec key
    compose_spec_mapped_labels = {
        reverse_mapping[k]: v
        for k, v in docker_image_name_by_services.items()
    }
    return compose_spec_mapped_labels
Esempio n. 10
0
def mock_service_key_version() -> ServiceKeyVersion:
    return ServiceKeyVersion(key=MOCK_SERVICE_KEY,
                             version=MOCK_SERVICE_VERSION)