def get_notebook_args(self, deployment_name, ports, mount_code_in_notebooks=False, backend=None): backend = backend or conf.get('NOTEBOOK_BACKEND') notebook_token = self.get_notebook_token() notebook_url = self._get_proxy_url( namespace=self.namespace, job_name=NOTEBOOK_JOB_NAME, deployment_name=deployment_name, port=ports[0]) if mount_code_in_notebooks: notebook_dir = get_project_repos_path(self.project_name) notebook_dir = '{}/{}'.format(notebook_dir, notebook_dir.split('/')[-1]) else: notebook_dir = '.' return [ "jupyter {backend} " "--no-browser " "--port={port} " "--ip=0.0.0.0 " "--allow-root " "--NotebookApp.token={token} " "--NotebookApp.trust_xheaders=True " "--NotebookApp.base_url={base_url} " "--NotebookApp.notebook_dir={notebook_dir} ".format( backend=backend, port=self.PORT, token=notebook_token, base_url=notebook_url, notebook_dir=notebook_dir)]
def get_notebook_args(self, deployment_name, ports, allow_commits=False): notebook_token = self.get_notebook_token() notebook_url = self._get_proxy_url(namespace=self.namespace, job_name=self.NOTEBOOK_JOB_NAME, deployment_name=deployment_name, port=ports[0]) if allow_commits: notebook_dir = get_project_repos_path(self.project_name) notebook_dir = '{}/{}'.format(notebook_dir, notebook_dir.split('/')[-1]) else: notebook_dir = '.' return [ "jupyter notebook " "--no-browser " "--port={port} " "--ip=0.0.0.0 " "--allow-root " "--NotebookApp.token={token} " "--NotebookApp.trust_xheaders=True " "--NotebookApp.base_url={base_url} " "--NotebookApp.notebook_dir={notebook_dir} ".format( port=self.PORT, token=notebook_token, base_url=notebook_url, notebook_dir=notebook_dir) ]
def test_project_logs_path_creation_deletion(self): with patch('scheduler.tasks.experiments.experiments_build.apply_async' ) as _: # noqa experiment = ExperimentFactory(user=self.project.user, project=self.project) experiment_logs_path = stores.get_experiment_logs_path( experiment_name=experiment.unique_name, temp=False) stores.create_experiment_logs_path( experiment_name=experiment.unique_name, temp=False) open(experiment_logs_path, '+w') project_logs_path = stores.get_project_logs_path( project_name=self.project.unique_name) project_repos_path = get_project_repos_path(self.project.unique_name) # Should be true, created by the signal assert os.path.exists(experiment_logs_path) is True assert os.path.exists(project_logs_path) is True assert os.path.exists(project_repos_path) is True stores_schedule_logs_deletion(persistence=None, subpath=self.project.subpath) delete_project_repos(self.project.unique_name) assert os.path.exists(experiment_logs_path) is False assert os.path.exists(project_logs_path) is False assert os.path.exists(project_repos_path) is False
def start_notebook(self, image, resources=None, node_selectors=None): ports = [self.request_notebook_port()] target_ports = [self.PORT] volumes, volume_mounts = get_pod_volumes() code_volume, code_volume_mount = self.get_notebook_code_volume() volumes.append(code_volume) volume_mounts.append(code_volume_mount) deployment_name = constants.JOB_NAME.format(name=self.NOTEBOOK_JOB_NAME, job_uuid=self.job_uuid) notebook_token = self.get_notebook_token() notebook_url = self._get_proxy_url( namespace=self.namespace, job_name=self.NOTEBOOK_JOB_NAME, deployment_name=deployment_name, port=ports[0]) notebook_dir = get_project_repos_path(self.project_name) notebook_dir = '{}/{}'.format(notebook_dir, notebook_dir.split('/')[-1]) deployment = deployments.get_deployment( namespace=self.namespace, app=settings.APP_LABELS_NOTEBOOK, name=self.NOTEBOOK_JOB_NAME, project_name=self.project_name, project_uuid=self.project_uuid, job_name=self.job_name, job_uuid=self.job_uuid, volume_mounts=volume_mounts, volumes=volumes, image=image, command=["/bin/sh", "-c"], args=[ "jupyter notebook " "--no-browser " "--port={port} " "--ip=0.0.0.0 " "--allow-root " "--NotebookApp.token={token} " "--NotebookApp.trust_xheaders=True " "--NotebookApp.base_url={base_url} " "--NotebookApp.notebook_dir={notebook_dir} ".format( port=self.PORT, token=notebook_token, base_url=notebook_url, notebook_dir=notebook_dir)], ports=target_ports, container_name=settings.CONTAINER_NAME_PLUGIN_JOB, resources=resources, node_selector=node_selectors, role=settings.ROLE_LABELS_DASHBOARD, type=settings.TYPE_LABELS_EXPERIMENT) deployment_labels = deployments.get_labels(app=settings.APP_LABELS_NOTEBOOK, project_name=self.project_name, project_uuid=self.project_uuid, job_name=self.job_name, job_uuid=self.job_uuid, role=settings.ROLE_LABELS_DASHBOARD, type=settings.TYPE_LABELS_EXPERIMENT) dep_resp, _ = self.create_or_update_deployment(name=deployment_name, data=deployment) service = services.get_service( namespace=self.namespace, name=deployment_name, labels=deployment_labels, ports=ports, target_ports=target_ports, service_type=self._get_service_type()) service_resp, _ = self.create_or_update_service(name=deployment_name, data=service) results = {'deployment': dep_resp.to_dict(), 'service': service_resp.to_dict()} if self._use_ingress(): annotations = json.loads(settings.K8S_INGRESS_ANNOTATIONS) paths = [{ 'path': '/notebook/{}'.format(self.project_name.replace('.', '/')), 'backend': { 'serviceName': deployment_name, 'servicePort': ports[0] } }] ingress = ingresses.get_ingress(namespace=self.namespace, name=deployment_name, labels=deployment_labels, annotations=annotations, paths=paths) self.create_or_update_ingress(name=deployment_name, data=ingress) return results