Esempio n. 1
0
    def test_get_data_paths_works_as_expected(self):
        with self.assertRaises(VolumeNotFoundError):
            get_data_paths(['path1', 'path2'])

        assert get_data_paths(['data2']) == {'data2': '/data/2'}
        assert get_data_paths(['data3']) == {'data3': 'gs://data-bucket'}
        assert get_data_paths(['data2', 'data3']) == {
            'data2': '/data/2',
            'data3': 'gs://data-bucket'
        }
Esempio n. 2
0
    def get_pod_container(self,
                          volume_mounts,
                          env_vars=None,
                          command=None,
                          args=None,
                          persistence_outputs=None,
                          persistence_data=None,
                          outputs_refs_jobs=None,
                          outputs_refs_experiments=None,
                          secret_refs=None,
                          configmap_refs=None,
                          resources=None,
                          ephemeral_token=None):
        """Pod job container for task."""
        assert self.cluster_def is not None

        # Env vars preparations
        env_vars = get_list(env_vars)
        outputs_path = get_experiment_outputs_path(
            persistence_outputs=persistence_outputs,
            experiment_name=self.experiment_name,
            original_name=self.original_name,
            cloning_strategy=self.cloning_strategy)
        env_vars += get_job_env_vars(
            log_level=self.log_level,
            outputs_path=outputs_path,
            data_paths=get_data_paths(persistence_data),
            logs_path=get_experiment_logs_path(self.experiment_name),
            outputs_refs_jobs=outputs_refs_jobs,
            outputs_refs_experiments=outputs_refs_experiments,
            ephemeral_token=ephemeral_token,
        )
        env_vars += [
            get_env_var(name=constants.CONFIG_MAP_CLUSTER_KEY_NAME,
                        value=json.dumps(self.cluster_def)),
            get_env_var(name=constants.CONFIG_MAP_DECLARATIONS_KEY_NAME,
                        value=self.declarations),
            get_env_var(name=constants.CONFIG_MAP_EXPERIMENT_INFO_KEY_NAME,
                        value=json.dumps(self.experiment_labels)),
        ]
        env_vars += get_resources_env_vars(resources=resources)

        # Env from configmap and secret refs
        env_from = get_pod_env_from(secret_refs=secret_refs,
                                    configmap_refs=configmap_refs)

        ports = [
            client.V1ContainerPort(container_port=port) for port in self.ports
        ]
        return client.V1Container(name=self.job_container_name,
                                  image=self.job_docker_image,
                                  command=command,
                                  args=args,
                                  ports=ports,
                                  env=env_vars,
                                  env_from=env_from,
                                  resources=get_resources(resources),
                                  volume_mounts=volume_mounts)
Esempio n. 3
0
    def get_pod_container(self,
                          volume_mounts,
                          persistence_outputs,
                          persistence_data,
                          outputs_refs_jobs=None,
                          outputs_refs_experiments=None,
                          secret_refs=None,
                          configmap_refs=None,
                          env_vars=None,
                          command=None,
                          args=None,
                          resources=None):
        """Pod job container for task."""
        # Env vars preparation
        env_vars = get_list(env_vars)
        env_vars += get_job_env_vars(
            log_level=self.log_level,
            outputs_path=get_job_outputs_path(
                persistence_outputs=persistence_outputs,
                job_name=self.job_name),
            data_paths=get_data_paths(persistence_data),
            logs_path=get_job_logs_path(job_name=self.job_name),
            outputs_refs_jobs=outputs_refs_jobs,
            outputs_refs_experiments=outputs_refs_experiments)
        env_vars += [
            get_env_var(name=constants.CONFIG_MAP_JOB_INFO_KEY_NAME,
                        value=json.dumps(self.labels)),
        ]

        env_vars += get_resources_env_vars(resources=resources)

        # Env from configmap and secret refs
        env_from = get_pod_env_from(secret_refs=secret_refs,
                                    configmap_refs=configmap_refs)

        ports = [
            client.V1ContainerPort(container_port=port) for port in self.ports
        ]
        return client.V1Container(name=self.job_container_name,
                                  image=self.job_docker_image,
                                  command=command,
                                  args=args,
                                  ports=ports or None,
                                  env=env_vars,
                                  env_from=env_from,
                                  resources=get_resources(resources),
                                  volume_mounts=volume_mounts)
Esempio n. 4
0
    def start_notebook(self,
                       image,
                       persistence_outputs=None,
                       persistence_data=None,
                       outputs_refs_jobs=None,
                       outputs_refs_experiments=None,
                       resources=None,
                       secret_refs=None,
                       configmap_refs=None,
                       node_selector=None,
                       affinity=None,
                       tolerations=None,
                       allow_commits=False):
        ports = [self.request_notebook_port()]
        target_ports = [self.PORT]
        volumes, volume_mounts = get_pod_volumes(
            persistence_outputs=persistence_outputs,
            persistence_data=persistence_data)
        refs_volumes, refs_volume_mounts = get_pod_refs_outputs_volumes(
            outputs_refs=outputs_refs_jobs,
            persistence_outputs=persistence_outputs)
        volumes += refs_volumes
        volume_mounts += refs_volume_mounts
        refs_volumes, refs_volume_mounts = get_pod_refs_outputs_volumes(
            outputs_refs=outputs_refs_experiments,
            persistence_outputs=persistence_outputs)
        volumes += refs_volumes
        volume_mounts += refs_volume_mounts
        shm_volumes, shm_volume_mounts = get_shm_volumes()
        volumes += shm_volumes
        volume_mounts += shm_volume_mounts
        env_vars = get_job_env_vars(
            outputs_path=get_notebook_job_outputs_path(
                persistence_outputs=persistence_outputs,
                notebook_job=self.job_name),
            data_paths=get_data_paths(persistence_data),
            outputs_refs_jobs=outputs_refs_jobs,
            outputs_refs_experiments=outputs_refs_experiments)
        secret_refs = validate_secret_refs(secret_refs)
        configmap_refs = validate_configmap_refs(configmap_refs)
        env_from = get_pod_env_from(secret_refs=secret_refs,
                                    configmap_refs=configmap_refs)
        code_volume, code_volume_mount = self.get_notebook_code_volume()
        volumes.append(code_volume)
        volume_mounts.append(code_volume_mount)
        deployment_name = constants.JOB_NAME.format(
            name=self.NOTEBOOK_JOB_NAME, job_uuid=self.job_uuid)

        node_selector = get_node_selector(
            node_selector=node_selector,
            default_node_selector=settings.NODE_SELECTOR_EXPERIMENTS)
        affinity = get_affinity(affinity=affinity,
                                default_affinity=settings.AFFINITY_EXPERIMENTS)
        tolerations = get_tolerations(
            tolerations=tolerations,
            default_tolerations=settings.TOLERATIONS_EXPERIMENTS)
        deployment = deployments.get_deployment(
            namespace=self.namespace,
            app=settings.APP_LABELS_NOTEBOOK,
            name=self.NOTEBOOK_JOB_NAME,
            project_name=self.project_name,
            project_uuid=self.project_uuid,
            job_name=self.job_name,
            job_uuid=self.job_uuid,
            volume_mounts=volume_mounts,
            volumes=volumes,
            image=image,
            command=["/bin/sh", "-c"],
            args=self.get_notebook_args(deployment_name=deployment_name,
                                        ports=ports,
                                        allow_commits=allow_commits),
            ports=target_ports,
            container_name=settings.CONTAINER_NAME_PLUGIN_JOB,
            env_vars=env_vars,
            env_from=env_from,
            resources=resources,
            node_selector=node_selector,
            affinity=affinity,
            tolerations=tolerations,
            role=settings.ROLE_LABELS_DASHBOARD,
            type=settings.TYPE_LABELS_RUNNER)
        deployment_labels = deployments.get_labels(
            app=settings.APP_LABELS_NOTEBOOK,
            project_name=self.project_name,
            project_uuid=self.project_uuid,
            job_name=self.job_name,
            job_uuid=self.job_uuid,
            role=settings.ROLE_LABELS_DASHBOARD,
            type=settings.TYPE_LABELS_RUNNER)
        dep_resp, _ = self.create_or_update_deployment(name=deployment_name,
                                                       data=deployment)
        service = services.get_service(namespace=self.namespace,
                                       name=deployment_name,
                                       labels=deployment_labels,
                                       ports=ports,
                                       target_ports=target_ports,
                                       service_type=self._get_service_type())

        service_resp, _ = self.create_or_update_service(name=deployment_name,
                                                        data=service)
        results = {
            'deployment': dep_resp.to_dict(),
            'service': service_resp.to_dict()
        }

        if self._use_ingress():
            annotations = json.loads(settings.K8S_INGRESS_ANNOTATIONS)
            paths = [{
                'path':
                '/notebook/{}'.format(self.project_name.replace('.', '/')),
                'backend': {
                    'serviceName': deployment_name,
                    'servicePort': ports[0]
                }
            }]
            ingress = ingresses.get_ingress(namespace=self.namespace,
                                            name=deployment_name,
                                            labels=deployment_labels,
                                            annotations=annotations,
                                            paths=paths)
            self.create_or_update_ingress(name=deployment_name, data=ingress)
        return results
Esempio n. 5
0
 def test_get_data_paths_raises_for_unrecognised_paths(self):
     with self.assertRaises(VolumeNotFoundError):
         get_data_paths(['path1', 'path2'])
Esempio n. 6
0
def get_job_env_vars(persistence_outputs,
                     outputs_path,
                     persistence_data,
                     log_level=None,
                     logs_path=None,
                     outputs_refs_jobs=None,
                     outputs_refs_experiments=None,
                     ephemeral_token=None):
    env_vars = [
        get_env_var(name=API_HTTP_URL, value=get_settings_http_api_url()),
        get_env_var(name=API_WS_HOST, value=get_settings_ws_api_url()),
        get_env_var(name=constants.CONFIG_MAP_IN_CLUSTER, value=True),
        get_env_var(name=constants.CONFIG_MAP_API_VERSION, value=VERSION_V1),
        get_env_var(name=constants.CONFIG_MAP_INTERNAL_HEADER,
                    value=settings.HEADERS_INTERNAL.replace('_', '-')),
        get_env_var(name=constants.CONFIG_MAP_INTERNAL_HEADER_SERVICE,
                    value=settings.INTERNAL_SERVICES.RUNNER),
    ]
    if log_level:
        env_vars.append(
            get_env_var(name=constants.CONFIG_MAP_LOG_LEVEL_KEY_NAME,
                        value=log_level))

    if logs_path:
        env_vars.append(
            get_env_var(name=constants.CONFIG_MAP_RUN_LOGS_PATH_KEY_NAME,
                        value=logs_path))

    # Data and outputs paths
    data_paths = get_data_paths(persistence_data)
    env_vars += [
        get_env_var(name=constants.CONFIG_MAP_RUN_OUTPUTS_PATH_KEY_NAME,
                    value=outputs_path),
        get_env_var(name=constants.CONFIG_MAP_RUN_DATA_PATHS_KEY_NAME,
                    value=data_paths)
    ]

    refs_outputs = {}
    outputs_jobs_paths = get_paths_from_specs(specs=outputs_refs_jobs)
    if outputs_jobs_paths:
        refs_outputs['jobs'] = outputs_jobs_paths
    outputs_experiments_paths = get_paths_from_specs(
        specs=outputs_refs_experiments)
    if outputs_experiments_paths:
        refs_outputs['experiments'] = outputs_experiments_paths
    if refs_outputs:
        env_vars.append(
            get_env_var(name=constants.CONFIG_MAP_REFS_OUTPUTS_PATHS_KEY_NAME,
                        value=refs_outputs))

    env_vars += get_job_stores_secrets_env_vars(
        persistence_outputs=persistence_outputs,
        outputs_path=outputs_path,
        persistence_data=persistence_data,
        data_paths=data_paths,
        outputs_refs_jobs=outputs_refs_jobs,
        outputs_refs_experiments=outputs_refs_experiments)
    if ephemeral_token:
        env_vars.append(
            get_env_var(name=constants.SECRET_EPHEMERAL_TOKEN,
                        value=ephemeral_token))
    return env_vars