def test_spawner_can_use_list_of_image_pull_secrets(): secrets = ["ecr", "regcred", "artifactory"] c = Config() c.KubeSpawner.image_spec = "private.docker.registry/jupyter:1.2.3" c.KubeSpawner.image_pull_secrets = secrets spawner = KubeSpawner(hub=Hub(), config=c, _mock=True) assert spawner.image_pull_secrets == secrets secrets = [dict(name=secret) for secret in secrets] c = Config() c.KubeSpawner.image_spec = "private.docker.registry/jupyter:1.2.3" c.KubeSpawner.image_pull_secrets = secrets spawner = KubeSpawner(hub=Hub(), config=c, _mock=True) assert spawner.image_pull_secrets == secrets
async def test_start_normal(self): operation = operations_pb2.Operation() # Mock the Dataproc API client mock_client = mock.create_autospec(dataproc_v1beta2.ClusterControllerClient()) mock_client.create_cluster.return_value = operation # Force no existing clusters to bypass the check in the spawner mock_client.get_cluster.return_value = None spawner = DataprocSpawner(hub=Hub(), dataproc=mock_client, user=MockUser(), _mock=True) # Test that the traitlets work spawner.project = "test-create" assert spawner.project == "test-create" assert spawner.region == self.region (ip, port) = await spawner.start() assert ip == f'fake-jupyterhub-m.{self.zone}.c.{spawner.project}.internal' # JupyterHub defaults to 0 if no port set assert port == 0 mock_client.create_cluster.assert_called_once() assert spawner.cluster_data['cluster_name'] == 'fake-jupyterhub' assert spawner.cluster_data['config']['gce_cluster_config']['zone_uri'] == f'https://www.googleapis.com/compute/v1/projects/{spawner.project}/zones/{spawner.zone}' env = json.loads(spawner.cluster_data['config']['software_config']['properties']['dataproc:jupyter.hub.env']) assert env['JUPYTERHUB_API_URL'] is not None
async def test_spawn(kube_ns, kube_client, config): spawner = KubeSpawner(hub=Hub(), user=MockUser(), config=config) # empty spawner isn't running status = await spawner.poll() assert isinstance(status, int) # start the spawner await spawner.start() # verify the pod exists pods = kube_client.list_namespaced_pod(kube_ns).items pod_names = [p.metadata.name for p in pods] assert "jupyter-%s" % spawner.user.name in pod_names # verify poll while running status = await spawner.poll() assert status is None # stop the pod await spawner.stop() # verify pod is gone pods = kube_client.list_namespaced_pod(kube_ns).items pod_names = [p.metadata.name for p in pods] assert "jupyter-%s" % spawner.user.name not in pod_names # verify exit status status = await spawner.poll() assert isinstance(status, int)
def test_specification(): spawner = YarnSpawner(hub=Hub(), user=MockUser()) spawner.queue = 'myqueue' spawner.prologue = 'Do this first' spawner.epilogue = 'Do this after' spawner.mem_limit = '1 G' spawner.cpu_limit = 2 spawner.localize_files = { 'environment': 'environment.tar.gz', 'file2': { 'source': 'path/to/file', 'visibility': 'public' } } spawner.environment = {'TEST_ENV_VAR': 'TEST_VALUE'} spec = spawner._build_specification() assert spec.user == 'myname' assert spec.queue == 'myqueue' assert 'Do this first\n' in spec.master.script assert 'python -m yarnspawner.singleuser' in spec.master.script assert 'Do this after' in spec.master.script assert spec.master.resources == skein.Resources(memory='1 GiB', vcores=2) assert 'environment' in spec.master.files assert 'file2' in spec.master.files assert spec.master.files['file2'].visibility == 'public' assert 'TEST_ENV_VAR' in spec.master.env assert 'JUPYTERHUB_API_TOKEN' in spec.master.env
async def test_spawn_progress(kube_ns, kube_client, config): spawner = KubeSpawner(hub=Hub(), user=MockUser(name="progress"), config=config) # empty spawner isn't running status = await spawner.poll() assert isinstance(status, int) # start the spawner start_future = spawner.start() # check progress events messages = [] async for progress in spawner.progress(): assert 'progress' in progress assert isinstance(progress['progress'], int) assert 'message' in progress assert isinstance(progress['message'], str) messages.append(progress['message']) # ensure we can serialize whatever we return with open(os.devnull, "w") as devnull: json.dump(progress, devnull) assert 'Started container' in '\n'.join(messages) await start_future # stop the pod await spawner.stop()
def hub_ssl(kube_ns, hub_pod_ssl): """Return the Hub object for connecting to a running hub pod with internal_ssl enabled""" return Hub( proto="https", ip=f"{hub_pod_ssl.metadata.name}.{kube_ns}", port=8081, base_url="/hub/", )
def spawner(tmpdir): rv = TerraformSpawner(hub=Hub(), user=MockUser()) rv.tf_bin = os.path.join(sys.base_exec_prefix, 'bin', 'terraform') rv.tf_dir = tmpdir.dirname rv.tf_module_source = os.path.join(os.path.dirname(__file__), 'terraform-mock-jupyterhub-singleuser') return rv
def init_hub(self): """Load the Hub config into the database""" self.hub = Hub( ip=self.hub_ip, port=self.hub_port, base_url=self.hub_prefix, cookie_name='jupyter-hub-token', public_host=self.subdomain_host, )
async def test_progress(self, monkeypatch): fake_creds = AnonymousCredentials() mock_client = mock.create_autospec( ClusterControllerClient(credentials=fake_creds)) mock_logging_client = mock.create_autospec( logging_v2.LoggingServiceV2Client(credentials=fake_creds)) spawner = DataprocSpawner(hub=Hub(), dataproc=mock_client, user=MockUser(), _mock=True, logging=mock_logging_client, gcs_notebooks=self.gcs_notebooks) spawner.project = "test-progress" async def collect(ait): items = [] async for value in ait: items.append(value) return items def create_logs(): entries = [] for i in range(5): e = LogEntry(insert_id=f'entry_{i}', json_payload=ParseDict( { 'method': 'method', 'message': f'message_{i}' }, Struct())) entries.append(e) return entries def create_expected(): progress = 5 expected = [] i = 0 for e in create_logs(): progress += math.ceil((90 - progress) / 4) expected.append({ 'progress': progress, 'message': f'method: message_{i}' }) i += 1 expected.append({'message': 'operation.done()', 'progress': 71}) return expected def test_list_log_entries(*args, **kwargs): return create_logs() op = MockOperation('op1', 'cluster1-op1') monkeypatch.setattr(mock_logging_client, 'list_log_entries', test_list_log_entries) monkeypatch.setattr(spawner, 'operation', op) _, _ = await spawner.start() assert await collect(spawner.progress()) == create_expected()
def test_minimium_cluster_definition(self, monkeypatch): """ Some keys must always be present for JupyterHub to work. """ import yaml def test_read_file(*args, **kwargs): config_string = open('./tests/test_data/minimum.yaml', 'r').read() return config_string def test_clustername(*args, **kwargs): return 'test-clustername' fake_creds = AnonymousCredentials() mock_dataproc_client = mock.create_autospec( ClusterControllerClient(credentials=fake_creds)) mock_gcs_client = mock.create_autospec( storage.Client(credentials=fake_creds, project='project')) spawner = DataprocSpawner(hub=Hub(), dataproc=mock_dataproc_client, gcs=mock_gcs_client, user=MockUser(), _mock=True, gcs_notebooks=self.gcs_notebooks) # Prevents a call to GCS. We return the local file instead. monkeypatch.setattr(spawner, "read_gcs_file", test_read_file) monkeypatch.setattr(spawner, "clustername", test_clustername) spawner.project = "test-project" spawner.zone = "test-self1-b" spawner.env_str = "test-env-str" spawner.args_str = "test-args-str" config_built = spawner._build_cluster_config() assert 'project_id' in config_built assert 'cluster_name' in config_built assert config_built['project_id'] == 'test-project' assert config_built['cluster_name'] == 'test-clustername' assert config_built['config']['gce_cluster_config']['zone_uri'].split( '/')[-1] == 'test-self1-b' assert Component['JUPYTER'].value in config_built['config'][ 'software_config']['optional_components'] assert Component['ANACONDA'].value in config_built['config'][ 'software_config']['optional_components'] assert 'dataproc:jupyter.hub.args' in config_built['config'][ 'software_config']['properties'] assert 'dataproc:jupyter.hub.enabled' in config_built['config'][ 'software_config']['properties'] # assert 'dataproc:jupyter.notebook.gcs.dir' in config_built['config']['software_config']['properties'] assert 'dataproc:jupyter.hub.env' in config_built['config'][ 'software_config']['properties']
async def test_domain_scoped_zonal_dns(self): mock_client = mock.create_autospec(dataproc_v1beta2.ClusterControllerClient()) spawner = DataprocSpawner(hub=Hub(), dataproc=mock_client, user=MockUser(), _mock=True) spawner.project = "test:domain-scoped" assert spawner.project == "test:domain-scoped" (ip, port) = await spawner.start() assert ip == f'fake-jupyterhub-m.{self.zone}.c.domain-scoped.test.internal' assert port == 0
async def test_poll_no_cluster(self): mock_client = mock.create_autospec(dataproc_v1beta2.ClusterControllerClient()) mock_client.get_cluster.return_value = None spawner = DataprocSpawner(hub=Hub(), dataproc=mock_client, user=MockUser(), _mock=True) spawner.project = "test-poll-no-cluster" assert spawner.project == "test-poll-no-cluster" assert await spawner.poll() == 1
def new_spawner(db, **kwargs): user = kwargs.setdefault("user", User(db.query(orm.User).first(), {})) kwargs.setdefault("cmd", [sys.executable, "-c", _echo_sleep]) kwargs.setdefault("hub", Hub()) kwargs.setdefault("notebook_dir", os.getcwd()) kwargs.setdefault("default_url", "/user/{username}/lab") kwargs.setdefault("oauth_client_id", "mock-client-id") kwargs.setdefault("interrupt_timeout", 1) kwargs.setdefault("term_timeout", 1) kwargs.setdefault("kill_timeout", 1) kwargs.setdefault("poll_interval", 1) return user._new_spawner("", spawner_class=SSHSpawner, **kwargs)
def new_spawner(db, **kwargs): user = kwargs.setdefault('user', User(db.query(orm.User).first(), {})) kwargs.setdefault('cmd', [sys.executable, '-c', _echo_sleep]) kwargs.setdefault('hub', Hub()) kwargs.setdefault('notebook_dir', os.getcwd()) kwargs.setdefault('default_url', '/user/{username}/lab') kwargs.setdefault('oauth_client_id', 'mock-client-id') kwargs.setdefault('interrupt_timeout', 1) kwargs.setdefault('term_timeout', 1) kwargs.setdefault('kill_timeout', 1) kwargs.setdefault('poll_interval', 1) return user._new_spawner('', spawner_class=SSHSpawner, **kwargs)
def test_locations(self, monkeypatch): import yaml def test_read_file(*args, **kwargs): config_string = open('./tests/test_data/basic_uri.yaml', 'r').read() return config_string def test_clustername(*args, **kwargs): return 'test-clustername' fake_creds = AnonymousCredentials() mock_dataproc_client = mock.create_autospec( ClusterControllerClient(credentials=fake_creds)) mock_gcs_client = mock.create_autospec( storage.Client(credentials=fake_creds, project='project')) spawner = DataprocSpawner(hub=Hub(), dataproc=mock_dataproc_client, gcs=mock_gcs_client, user=MockUser(), _mock=True, gcs_notebooks=self.gcs_notebooks) # Prevents a call to GCS. We return the local file instead. monkeypatch.setattr(spawner, "read_gcs_file", test_read_file) monkeypatch.setattr(spawner, "clustername", test_clustername) spawner.project = "test-project" spawner.region = "us-east1" spawner.zone = "us-east1-d" spawner.env_str = "test-env-str" spawner.args_str = "test-args-str" spawner.user_options = { 'cluster_type': 'basic_uri.yaml', 'cluster_zone': 'us-east1-d' } user_zone = spawner.user_options['cluster_zone'] user_region = user_zone[:-2] config_built = spawner._build_cluster_config() assert config_built['config']['gce_cluster_config'][ 'subnetwork_uri'].split('/')[-3] == user_region assert config_built['config']['master_config'][ 'machine_type_uri'] == 'n1-standard-4' assert config_built['config']['worker_config'][ 'machine_type_uri'] == 'n1-highmem-16' assert config_built['config']['secondary_worker_config'][ 'machine_type_uri'] == 'n1-standard-4' assert config_built['config']['master_config']['accelerators'][0][ 'accelerator_type_uri'] == 'nvidia-tesla-v100'
async def test_stop_normal(self): mock_client = mock.create_autospec(dataproc_v1beta2.ClusterControllerClient()) spawner = DataprocSpawner(hub=Hub(), dataproc=mock_client, user=MockUser(), _mock=True) spawner.project = "test-stop" assert spawner.project == "test-stop" assert spawner.region == self.region response = await spawner.stop() mock_client.delete_cluster.assert_called_once_with("test-stop", self.region, 'fake-jupyterhub')
async def test_stop_no_cluster(self): mock_client = mock.create_autospec(dataproc_v1beta2.ClusterControllerClient()) mock_client.get_cluster.return_value = None spawner = DataprocSpawner(hub=Hub(), dataproc=mock_client, user=MockUser(), _mock=True) spawner.project = "test-stop-no-cluster" assert spawner.project == "test-stop-no-cluster" response = await spawner.stop() mock_client.delete_cluster.assert_not_called()
async def test_multi_namespace_spawn(): # We cannot use the fixtures, because they assume the standard # namespace and client for that namespace. spawner = KubeSpawner( hub=Hub(), user=MockUser(), config=Config(), enable_user_namespaces=True, ) # empty spawner isn't running status = await spawner.poll() assert isinstance(status, int) # get a client kube_ns = spawner.namespace load_kube_config() client = shared_client('CoreV1Api') # the spawner will create the namespace on its own. # Wrap in a try block so we clean up the namespace. saved_exception = None try: # start the spawner await spawner.start() # verify the pod exists pods = client.list_namespaced_pod(kube_ns).items pod_names = [p.metadata.name for p in pods] assert "jupyter-%s" % spawner.user.name in pod_names # verify poll while running status = await spawner.poll() assert status is None # stop the pod await spawner.stop() # verify pod is gone pods = client.list_namespaced_pod(kube_ns).items pod_names = [p.metadata.name for p in pods] assert "jupyter-%s" % spawner.user.name not in pod_names # verify exit status status = await spawner.poll() assert isinstance(status, int) except Exception as saved_exception: pass # We will raise after namespace removal # remove namespace client.delete_namespace(kube_ns, body={}) if saved_exception is not None: raise saved_exception
async def test_pod_connect_ip(kube_ns, kube_client, config): config.KubeSpawner.pod_connect_ip = "jupyter-{username}--{servername}.foo.example.com" # w/o servername spawner = KubeSpawner(hub=Hub(), user=MockUser(), config=config) # start the spawner res = await spawner.start() # verify the pod IP and port assert res == ("jupyter-fake.foo.example.com", 8888) await spawner.stop() # w/ servername spawner = KubeSpawner(hub=Hub(), user=MockUser(), config=config, orm_spawner=MockOrmSpawner()) # start the spawner res = await spawner.start() # verify the pod IP and port assert res == ("jupyter-fake--server.foo.example.com", 8888)
def test_cluster_definition_overrides(self, monkeypatch): """Check that config settings incompatible with JupyterHub are overwritten correctly.""" import yaml def test_read_file(*args, **kwargs): config_string = open('./tests/test_data/export.yaml', 'r').read() return config_string def test_clustername(*args, **kwargs): return 'test-clustername' fake_creds = AnonymousCredentials() mock_dataproc_client = mock.create_autospec( ClusterControllerClient(credentials=fake_creds)) mock_gcs_client = mock.create_autospec( storage.Client(credentials=fake_creds, project='project')) spawner = DataprocSpawner(hub=Hub(), dataproc=mock_dataproc_client, gcs=mock_gcs_client, user=MockUser(), _mock=True, gcs_notebooks=self.gcs_notebooks) # Prevents a call to GCS. We return the local file instead. monkeypatch.setattr(spawner, "read_gcs_file", test_read_file) monkeypatch.setattr(spawner, "clustername", test_clustername) spawner.project = "test-project" spawner.region = "us-east1" spawner.zone = "us-east1-d" spawner.env_str = "test-env-str" spawner.args_str = "test-args-str" spawner.user_options = { 'cluster_type': 'export.yaml', 'cluster_zone': 'test-form1-a' } config_built = spawner._build_cluster_config() # Verify that we disable Component Gateway (temporarily) assert config_built['config']['endpoint_config'][ 'enable_http_port_access'] == False # Verify that we disable preemptibility (temporarily) assert 'preemptibility' not in config_built['config']['master_config'] assert 'preemptibility' not in config_built['config']['worker_config'] # Verify that we removed cluster-specific namenode properties assert 'hdfs:dfs.namenode.lifeline.rpc-address' not in config_built[ 'config']['software_config']['properties'] assert 'hdfs:dfs.namenode.servicerpc-address' not in config_built[ 'config']['software_config']['properties']
async def test_start_existing_clustername(self): mock_client = mock.create_autospec(dataproc_v1beta2.ClusterControllerClient()) spawner = DataprocSpawner(hub=Hub(), dataproc=mock_client, user=MockUser(), _mock=True) spawner.project = "test-create-existing" assert spawner.project == "test-create-existing" (ip, port) = await spawner.start() assert ip == f'fake-jupyterhub-m.{self.zone}.c.{spawner.project}.internal' assert port == 0 mock_client.create_cluster.assert_not_called()
def test_image_version_supports_component_gateway(self): fake_creds = AnonymousCredentials() mock_dataproc_client = mock.create_autospec( ClusterControllerClient(credentials=fake_creds)) mock_gcs_client = mock.create_autospec( storage.Client(credentials=fake_creds, project='project')) spawner = DataprocSpawner(hub=Hub(), dataproc=mock_dataproc_client, gcs=mock_gcs_client, user=MockUser(), _mock=True, gcs_notebooks=self.gcs_notebooks) assert spawner._validate_image_version_supports_component_gateway( '1.3') is True assert spawner._validate_image_version_supports_component_gateway( '1.3-debian9') is True assert spawner._validate_image_version_supports_component_gateway( '1.3.6-debian9') is False assert spawner._validate_image_version_supports_component_gateway( '1.3.59-debian9') is True assert spawner._validate_image_version_supports_component_gateway( '1.3.999-debian9') is True assert spawner._validate_image_version_supports_component_gateway( '1.4-debian10') is True assert spawner._validate_image_version_supports_component_gateway( '1.4.6-debian10') is False assert spawner._validate_image_version_supports_component_gateway( '1.4.31-debian10') is True assert spawner._validate_image_version_supports_component_gateway( '1.5-debian10') is True assert spawner._validate_image_version_supports_component_gateway( '1.5.0-debian10') is False assert spawner._validate_image_version_supports_component_gateway( '1.5.5-debian10') is True assert spawner._validate_image_version_supports_component_gateway( '2') is True assert spawner._validate_image_version_supports_component_gateway( '2.0') is True assert spawner._validate_image_version_supports_component_gateway( '2.0.0') is True assert spawner._validate_image_version_supports_component_gateway( '2.3.0') is True assert spawner._validate_image_version_supports_component_gateway( '2.0.0-RC1-preview') is True assert spawner._validate_image_version_supports_component_gateway( 'weird-unexpected-version-124.3.v2.2020-02-15') is True assert spawner._validate_image_version_supports_component_gateway( '1.3.weird-version-again') is True
def test_cluster_definition_keep_core_values(self, monkeypatch): """ Some system's default values must remain no matter what. """ import yaml def test_read_file(*args, **kwargs): config_string = open('./tests/test_data/basic.yaml', 'r').read() return config_string def test_clustername(*args, **kwargs): return 'test-clustername' fake_creds = AnonymousCredentials() mock_dataproc_client = mock.create_autospec( ClusterControllerClient(credentials=fake_creds)) mock_gcs_client = mock.create_autospec( storage.Client(credentials=fake_creds, project='project')) spawner = DataprocSpawner(hub=Hub(), dataproc=mock_dataproc_client, gcs=mock_gcs_client, user=MockUser(), _mock=True, gcs_notebooks=self.gcs_notebooks) # Prevents a call to GCS. We return the local file instead. monkeypatch.setattr(spawner, "read_gcs_file", test_read_file) monkeypatch.setattr(spawner, "clustername", test_clustername) spawner.project = "test-project" spawner.region = "us-east1" spawner.zone = "us-east1-d" spawner.env_str = "test-env-str" spawner.args_str = "test-args-str" spawner.user_options = { 'cluster_type': 'basic.yaml', 'cluster_zone': 'test-form1-a' } config_built = spawner._build_cluster_config() assert config_built['project_id'] == 'test-project' assert config_built['cluster_name'] == 'test-clustername' assert config_built['config']['software_config']['properties'][ 'dataproc:jupyter.hub.args'] == 'test-args-str' assert config_built['config']['software_config']['properties'][ 'dataproc:jupyter.hub.enabled'] == 'true' # assert config_built['config']['software_config']['properties']['dataproc:jupyter.notebook.gcs.dir'] == f'gs://users-notebooks/fake' assert config_built['config']['software_config']['properties'][ 'dataproc:jupyter.hub.env'] == 'test-env-str'
def new_spawner(db, spawner_class=BatchDummy, **kwargs): kwargs.setdefault("cmd", ["singleuser_command"]) user = db.query(orm.User).first() hub = Hub() user = User(user, {}) server = Server() # Set it after constructions because it isn't a traitlet. kwargs.setdefault("hub", hub) kwargs.setdefault("user", user) kwargs.setdefault("poll_interval", 1) # These are not traitlets so we have to set them here spawner = user._new_spawner("", spawner_class=spawner_class, **kwargs) spawner.server = server spawner.mock_port = testport return spawner
async def test_poll_no_cluster(self): fake_creds = AnonymousCredentials() mock_client = mock.create_autospec( ClusterControllerClient(credentials=fake_creds)) mock_client.get_cluster.return_value = None spawner = DataprocSpawner(hub=Hub(), dataproc=mock_client, user=MockUser(), _mock=True, gcs_notebooks=self.gcs_notebooks) spawner.project = 'test-poll-no-cluster' assert spawner.project == 'test-poll-no-cluster' assert await spawner.poll() == 1
def test_clean_gcs_path(self, monkeypatch): path = "gs://bucket/config/" fake_creds = AnonymousCredentials() mock_client = mock.create_autospec( ClusterControllerClient(credentials=fake_creds)) spawner = DataprocSpawner(hub=Hub(), dataproc=mock_client, user=MockUser(), _mock=True, gcs_notebooks=self.gcs_notebooks) assert spawner._clean_gcs_path(path) == "gs://bucket/config" assert spawner._clean_gcs_path(path, return_gs=False) == "bucket/config" assert spawner._clean_gcs_path( path, return_slash=True) == "gs://bucket/config/"
async def test_domain_scoped_zonal_dns(self): fake_creds = AnonymousCredentials() mock_client = mock.create_autospec( ClusterControllerClient(credentials=fake_creds)) spawner = DataprocSpawner(hub=Hub(), dataproc=mock_client, user=MockUser(), _mock=True, gcs_notebooks=self.gcs_notebooks) spawner.project = "test:domain-scoped" assert spawner.project == "test:domain-scoped" (ip, port) = await spawner.start() assert ip == f'dataprochub-fake-m.{self.zone}.c.domain-scoped.test.internal' assert port == 0
def test_deprecated_config(): """Deprecated config is handled correctly""" c = Config() # both set, non-deprecated wins c.KubeSpawner.singleuser_fs_gid = 5 c.KubeSpawner.fs_gid = 10 # only deprecated set, should still work c.KubeSpawner.hub_connect_ip = '10.0.1.1' c.KubeSpawner.singleuser_extra_pod_config = extra_pod_config = {"key": "value"} c.KubeSpawner.image_spec = 'abc:123' spawner = KubeSpawner(hub=Hub(), config=c, _mock=True) assert spawner.hub.connect_ip == '10.0.1.1' assert spawner.fs_gid == 10 assert spawner.extra_pod_config == extra_pod_config # deprecated access gets the right values, too assert spawner.singleuser_fs_gid == spawner.fs_gid assert spawner.singleuser_extra_pod_config == spawner.extra_pod_config assert spawner.image == 'abc:123'
async def test_poll_create(self): expected_response = { "status": { "state": "CREATING" } } expected_response = clusters_pb2.Cluster(**expected_response) mock_client = mock.create_autospec(dataproc_v1beta2.ClusterControllerClient()) mock_client.get_cluster.return_value = expected_response spawner = DataprocSpawner(hub=Hub(), dataproc=mock_client, user=MockUser(), _mock=True) spawner.project = "test-poll-create" assert spawner.project == "test-poll-create" assert await spawner.poll() == None
def test_duration(self, monkeypatch): import yaml def test_read_file(*args, **kwargs): config_string = open('./tests/test_data/duration.yaml', 'r').read() return config_string def test_clustername(*args, **kwargs): return 'test-clustername' fake_creds = AnonymousCredentials() mock_dataproc_client = mock.create_autospec( ClusterControllerClient(credentials=fake_creds)) mock_gcs_client = mock.create_autospec( storage.Client(credentials=fake_creds, project='project')) spawner = DataprocSpawner(hub=Hub(), dataproc=mock_dataproc_client, gcs=mock_gcs_client, user=MockUser(), _mock=True, gcs_notebooks=self.gcs_notebooks) # Prevents a call to GCS. We return the local file instead. monkeypatch.setattr(spawner, "read_gcs_file", test_read_file) monkeypatch.setattr(spawner, "clustername", test_clustername) spawner.project = "test-project" spawner.region = "us-east1" spawner.zone = "us-east1-d" spawner.env_str = "test-env-str" spawner.args_str = "test-args-str" spawner.user_options = { 'cluster_type': 'duration.yaml', 'cluster_zone': 'test-form1-a' } config_built = spawner._build_cluster_config() # Test 600s string assert config_built['config']['initialization_actions'][0][ 'execution_timeout']['seconds'] == 600 # Test Duration protobuf assert config_built['config']['initialization_actions'][1][ 'execution_timeout']['seconds'] == 600