def test_get_cluster(self): # Setup Expected Response project_id_2 = "projectId2939242356" cluster_name_2 = "clusterName2875867491" cluster_uuid = "clusterUuid-1017854240" expected_response = { "project_id": project_id_2, "cluster_name": cluster_name_2, "cluster_uuid": cluster_uuid, } expected_response = clusters_pb2.Cluster(**expected_response) # Mock the API response channel = ChannelStub(responses=[expected_response]) patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = dataproc_v1beta2.ClusterControllerClient() # Setup Request project_id = "projectId-1969970175" region = "region-934795532" cluster_name = "clusterName-1018081872" response = client.get_cluster(project_id, region, cluster_name) assert expected_response == response assert len(channel.requests) == 1 expected_request = clusters_pb2.GetClusterRequest( project_id=project_id, region=region, cluster_name=cluster_name ) actual_request = channel.requests[0][1] assert expected_request == actual_request
def test_list_clusters(self): # Setup Expected Response next_page_token = "" clusters_element = {} clusters = [clusters_element] expected_response = {"next_page_token": next_page_token, "clusters": clusters} expected_response = clusters_pb2.ListClustersResponse(**expected_response) # Mock the API response channel = ChannelStub(responses=[expected_response]) patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = dataproc_v1beta2.ClusterControllerClient() # Setup Request project_id = "projectId-1969970175" region = "region-934795532" paged_list_response = client.list_clusters(project_id, region) resources = list(paged_list_response) assert len(resources) == 1 assert expected_response.clusters[0] == resources[0] assert len(channel.requests) == 1 expected_request = clusters_pb2.ListClustersRequest( project_id=project_id, region=region ) actual_request = channel.requests[0][1] assert expected_request == actual_request
def test_create_cluster(self): # Setup Expected Response project_id_2 = 'projectId2939242356' cluster_name = 'clusterName-1018081872' cluster_uuid = 'clusterUuid-1017854240' expected_response = { 'project_id': project_id_2, 'cluster_name': cluster_name, 'cluster_uuid': cluster_uuid } expected_response = clusters_pb2.Cluster(**expected_response) operation = operations_pb2.Operation( name='operations/test_create_cluster', done=True) operation.response.Pack(expected_response) # Mock the API response channel = ChannelStub(responses=[operation]) client = dataproc_v1beta2.ClusterControllerClient(channel=channel) # Setup Request project_id = 'projectId-1969970175' region = 'region-934795532' cluster = {} response = client.create_cluster(project_id, region, cluster) result = response.result() assert expected_response == result assert len(channel.requests) == 1 expected_request = clusters_pb2.CreateClusterRequest( project_id=project_id, region=region, cluster=cluster) actual_request = channel.requests[0][1] assert expected_request == actual_request
async def test_start_normal(self): operation = operations_pb2.Operation() # Mock the Dataproc API client mock_client = mock.create_autospec(dataproc_v1beta2.ClusterControllerClient()) mock_client.create_cluster.return_value = operation # Force no existing clusters to bypass the check in the spawner mock_client.get_cluster.return_value = None spawner = DataprocSpawner(hub=Hub(), dataproc=mock_client, user=MockUser(), _mock=True) # Test that the traitlets work spawner.project = "test-create" assert spawner.project == "test-create" assert spawner.region == self.region (ip, port) = await spawner.start() assert ip == f'fake-jupyterhub-m.{self.zone}.c.{spawner.project}.internal' # JupyterHub defaults to 0 if no port set assert port == 0 mock_client.create_cluster.assert_called_once() assert spawner.cluster_data['cluster_name'] == 'fake-jupyterhub' assert spawner.cluster_data['config']['gce_cluster_config']['zone_uri'] == f'https://www.googleapis.com/compute/v1/projects/{spawner.project}/zones/{spawner.zone}' env = json.loads(spawner.cluster_data['config']['software_config']['properties']['dataproc:jupyter.hub.env']) assert env['JUPYTERHUB_API_URL'] is not None
def test_diagnose_cluster(self): # Setup Expected Response expected_response = {} expected_response = empty_pb2.Empty(**expected_response) operation = operations_pb2.Operation( name='operations/test_diagnose_cluster', done=True) operation.response.Pack(expected_response) # Mock the API response channel = ChannelStub(responses=[operation]) client = dataproc_v1beta2.ClusterControllerClient(channel=channel) # Setup Request project_id = 'projectId-1969970175' region = 'region-934795532' cluster_name = 'clusterName-1018081872' response = client.diagnose_cluster(project_id, region, cluster_name) result = response.result() assert expected_response == result assert len(channel.requests) == 1 expected_request = clusters_pb2.DiagnoseClusterRequest( project_id=project_id, region=region, cluster_name=cluster_name) actual_request = channel.requests[0][1] assert expected_request == actual_request
def test_list_clusters(self): # Setup Expected Response next_page_token = '' clusters_element = {} clusters = [clusters_element] expected_response = { 'next_page_token': next_page_token, 'clusters': clusters } expected_response = clusters_pb2.ListClustersResponse( **expected_response) # Mock the API response channel = ChannelStub(responses=[expected_response]) client = dataproc_v1beta2.ClusterControllerClient(channel=channel) # Setup Request project_id = 'projectId-1969970175' region = 'region-934795532' paged_list_response = client.list_clusters(project_id, region) resources = list(paged_list_response) assert len(resources) == 1 assert expected_response.clusters[0] == resources[0] assert len(channel.requests) == 1 expected_request = clusters_pb2.ListClustersRequest( project_id=project_id, region=region) actual_request = channel.requests[0][1] assert expected_request == actual_request
def create_cluster(cluster_json_path, cluster_name, region): """ This function takes an external JSON with cluster configuration and creates the cluster. :param project_id: project id of the service account :param region: region where needs to be created :return: returns the cluster client instance """ cluster_client = dataproc_v1beta2.ClusterControllerClient( client_options={ 'api_endpoint': '{}-dataproc.googleapis.com:443'.format(region) }) # The json is created under the cluster config folder with open(cluster_json_path, 'r') as f: cluster_json_template = Template(f.read()) cluster_json = cluster_json_template.render(cluster_name=cluster_name) cluster_config = json_format.Parse(cluster_json, clusters.Cluster()) project_id = cluster_config.project_id operation = cluster_client.create_cluster(project_id, region, cluster_config) result = operation.result() # Output a success message. print('Cluster created successfully : {}'.format(result.cluster_name))
def test_get_cluster(self): # Setup Expected Response project_id_2 = 'projectId2939242356' cluster_name_2 = 'clusterName2875867491' cluster_uuid = 'clusterUuid-1017854240' expected_response = { 'project_id': project_id_2, 'cluster_name': cluster_name_2, 'cluster_uuid': cluster_uuid } expected_response = clusters_pb2.Cluster(**expected_response) # Mock the API response channel = ChannelStub(responses=[expected_response]) client = dataproc_v1beta2.ClusterControllerClient(channel=channel) # Setup Request project_id = 'projectId-1969970175' region = 'region-934795532' cluster_name = 'clusterName-1018081872' response = client.get_cluster(project_id, region, cluster_name) assert expected_response == response assert len(channel.requests) == 1 expected_request = clusters_pb2.GetClusterRequest( project_id=project_id, region=region, cluster_name=cluster_name) actual_request = channel.requests[0][1] assert expected_request == actual_request
def _update_cluster_list_on_filter(self, _widget, _event, data): self.initialize_credentials_with_auth_account_selection( self.account_widget.v_model) if self.project_widget.v_model != self.project and self.project is not None: self.project_widget.v_model = self.project #we need to update filters and clusters now if self.region_widget.v_model is not None: try: client = dataproc_v1beta2.ClusterControllerClient( credentials=self.credentials, client_options={ "api_endpoint": f"{self.region_widget.v_model}-dataproc.googleapis.com:443" }) #we update cluster dropdown self.cluster_widget.items, _ = get_cluster_pool( self.project_widget.v_model, self.region_widget.v_model, client, data) self._update_widgets_placeholder_text() except Exception as caught_exc: self.cluster_widget.placeholder = constants.NO_CLUSTERS_FOUND_MESSAGE self.filter_widget.placeholder = constants.NO_FILTERS_FOUND_MESSAGE ipython_display.send_error(f"Failed to create a client with the api_endpoint: "\ f"{self.region_widget.v_model}-dataproc.googleapis.com:443 due to an error: "\ f"{str(caught_exc)}")
def _update_cluster_list_on_region(self, _widget, _event, data): if self.account_widget.v_model is not None and self.project_widget.v_model is not None: self.initialize_credentials_with_auth_account_selection( self.account_widget.v_model) try: _, _ = get_component_gateway_url(self.project_widget.v_model, data, None, self.credentials) self.region_widget.error = False self.project_widget.error = False client = dataproc_v1beta2.ClusterControllerClient( credentials=self.credentials, client_options={ "api_endpoint": f"{data}-dataproc.googleapis.com:443" }) self.cluster_widget.items, self.filter_widget.items = get_cluster_pool( self.project_widget.v_model, data, client) self._update_widgets_placeholder_text() except IndexError: self.region_widget.error = False pass except: self.region_widget.error = True ipython_display.send_error("Please make sure you have entered a correct Project "\ "ID and Region.") self.cluster_widget.placeholder = constants.NO_CLUSTERS_FOUND_MESSAGE self.filter_widget.placeholder = constants.NO_FILTERS_FOUND_MESSAGE self.cluster_widget.items = [] self.filter_widget.items = []
def test_diagnose_cluster(self): # Setup Expected Response expected_response = {} expected_response = empty_pb2.Empty(**expected_response) operation = operations_pb2.Operation( name="operations/test_diagnose_cluster", done=True ) operation.response.Pack(expected_response) # Mock the API response channel = ChannelStub(responses=[operation]) patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = dataproc_v1beta2.ClusterControllerClient() # Setup Request project_id = "projectId-1969970175" region = "region-934795532" cluster_name = "clusterName-1018081872" response = client.diagnose_cluster(project_id, region, cluster_name) result = response.result() assert expected_response == result assert len(channel.requests) == 1 expected_request = clusters_pb2.DiagnoseClusterRequest( project_id=project_id, region=region, cluster_name=cluster_name ) actual_request = channel.requests[0][1] assert expected_request == actual_request
def test_update_cluster_exception(self): # Setup Response error = status_pb2.Status() operation = operations_pb2.Operation( name="operations/test_update_cluster_exception", done=True ) operation.error.CopyFrom(error) # Mock the API response channel = ChannelStub(responses=[operation]) patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = dataproc_v1beta2.ClusterControllerClient() # Setup Request project_id = "projectId-1969970175" region = "region-934795532" cluster_name = "clusterName-1018081872" cluster = {} update_mask = {} response = client.update_cluster( project_id, region, cluster_name, cluster, update_mask ) exception = response.exception() assert exception.errors[0] == error
def test_list_clusters(self): project_id = os.environ['PROJECT_ID'] client = dataproc_v1beta2.ClusterControllerClient() project_id_2 = project_id region = 'global' response = client.list_clusters(project_id_2, region)
def test_list_clusters(self): project_id = os.environ["PROJECT_ID"] client = dataproc_v1beta2.ClusterControllerClient() project_id_2 = project_id region = "global" response = client.list_clusters(request={ "project_id": project_id_2, "region": region })
async def test_domain_scoped_zonal_dns(self): mock_client = mock.create_autospec(dataproc_v1beta2.ClusterControllerClient()) spawner = DataprocSpawner(hub=Hub(), dataproc=mock_client, user=MockUser(), _mock=True) spawner.project = "test:domain-scoped" assert spawner.project == "test:domain-scoped" (ip, port) = await spawner.start() assert ip == f'fake-jupyterhub-m.{self.zone}.c.domain-scoped.test.internal' assert port == 0
async def test_poll_no_cluster(self): mock_client = mock.create_autospec(dataproc_v1beta2.ClusterControllerClient()) mock_client.get_cluster.return_value = None spawner = DataprocSpawner(hub=Hub(), dataproc=mock_client, user=MockUser(), _mock=True) spawner.project = "test-poll-no-cluster" assert spawner.project == "test-poll-no-cluster" assert await spawner.poll() == 1
def test_list_clusters_exception(self): channel = ChannelStub(responses=[CustomException()]) client = dataproc_v1beta2.ClusterControllerClient(channel=channel) # Setup request project_id = 'projectId-1969970175' region = 'region-934795532' paged_list_response = client.list_clusters(project_id, region) with pytest.raises(CustomException): list(paged_list_response)
def test_get_cluster_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) client = dataproc_v1beta2.ClusterControllerClient(channel=channel) # Setup request project_id = 'projectId-1969970175' region = 'region-934795532' cluster_name = 'clusterName-1018081872' with pytest.raises(CustomException): client.get_cluster(project_id, region, cluster_name)
async def test_stop_no_cluster(self): mock_client = mock.create_autospec(dataproc_v1beta2.ClusterControllerClient()) mock_client.get_cluster.return_value = None spawner = DataprocSpawner(hub=Hub(), dataproc=mock_client, user=MockUser(), _mock=True) spawner.project = "test-stop-no-cluster" assert spawner.project == "test-stop-no-cluster" response = await spawner.stop() mock_client.delete_cluster.assert_not_called()
async def test_stop_normal(self): mock_client = mock.create_autospec(dataproc_v1beta2.ClusterControllerClient()) spawner = DataprocSpawner(hub=Hub(), dataproc=mock_client, user=MockUser(), _mock=True) spawner.project = "test-stop" assert spawner.project == "test-stop" assert spawner.region == self.region response = await spawner.stop() mock_client.delete_cluster.assert_called_once_with("test-stop", self.region, 'fake-jupyterhub')
def test_list_clusters_exception(self): channel = ChannelStub(responses=[CustomException()]) patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = dataproc_v1beta2.ClusterControllerClient() # Setup request project_id = "projectId-1969970175" region = "region-934795532" paged_list_response = client.list_clusters(project_id, region) with pytest.raises(CustomException): list(paged_list_response)
def __init__(self, *args, **kwargs): _mock = kwargs.pop('_mock', False) super().__init__(*args, **kwargs) if _mock: # Mock the API self.dataproc = kwargs.get('dataproc') else: self.client_transport = ( cluster_controller_grpc_transport. ClusterControllerGrpcTransport( address=f'{self.region}-dataproc.googleapis.com:443')) self.dataproc = dataproc_v1beta2.ClusterControllerClient( self.client_transport)
async def test_start_existing_clustername(self): mock_client = mock.create_autospec(dataproc_v1beta2.ClusterControllerClient()) spawner = DataprocSpawner(hub=Hub(), dataproc=mock_client, user=MockUser(), _mock=True) spawner.project = "test-create-existing" assert spawner.project == "test-create-existing" (ip, port) = await spawner.start() assert ip == f'fake-jupyterhub-m.{self.zone}.c.{spawner.project}.internal' assert port == 0 mock_client.create_cluster.assert_not_called()
def test_get_cluster_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = dataproc_v1beta2.ClusterControllerClient() # Setup request project_id = "projectId-1969970175" region = "region-934795532" cluster_name = "clusterName-1018081872" with pytest.raises(CustomException): client.get_cluster(project_id, region, cluster_name)
def test_update_cluster(self): # Setup Expected Response project_id_2 = "projectId2939242356" cluster_name_2 = "clusterName2875867491" cluster_uuid = "clusterUuid-1017854240" expected_response = { "project_id": project_id_2, "cluster_name": cluster_name_2, "cluster_uuid": cluster_uuid, } expected_response = clusters_pb2.Cluster(**expected_response) operation = operations_pb2.Operation( name="operations/test_update_cluster", done=True ) operation.response.Pack(expected_response) # Mock the API response channel = ChannelStub(responses=[operation]) patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = dataproc_v1beta2.ClusterControllerClient() # Setup Request project_id = "projectId-1969970175" region = "region-934795532" cluster_name = "clusterName-1018081872" cluster = {} update_mask = {} response = client.update_cluster( project_id, region, cluster_name, cluster, update_mask ) result = response.result() assert expected_response == result assert len(channel.requests) == 1 expected_request = clusters_pb2.UpdateClusterRequest( project_id=project_id, region=region, cluster_name=cluster_name, cluster=cluster, update_mask=update_mask, ) actual_request = channel.requests[0][1] assert expected_request == actual_request
async def test_poll_create(self): expected_response = { "status": { "state": "CREATING" } } expected_response = clusters_pb2.Cluster(**expected_response) mock_client = mock.create_autospec(dataproc_v1beta2.ClusterControllerClient()) mock_client.get_cluster.return_value = expected_response spawner = DataprocSpawner(hub=Hub(), dataproc=mock_client, user=MockUser(), _mock=True) spawner.project = "test-poll-create" assert spawner.project == "test-poll-create" assert await spawner.poll() == None
def test_diagnose_cluster_exception(self): # Setup Response error = status_pb2.Status() operation = operations_pb2.Operation( name='operations/test_diagnose_cluster_exception', done=True) operation.error.CopyFrom(error) # Mock the API response channel = ChannelStub(responses=[operation]) client = dataproc_v1beta2.ClusterControllerClient(channel=channel) # Setup Request project_id = 'projectId-1969970175' region = 'region-934795532' cluster_name = 'clusterName-1018081872' response = client.diagnose_cluster(project_id, region, cluster_name) exception = response.exception() assert exception.errors[0] == error
def get_component_gateway_url(project_id, region, cluster_name, credentials): """Gets the component gateway url for a cluster name, project id, and region Args: project_id (str): The project id to use for the url region (str): The project id to use for the url cluster_name (Optional[str]): The cluster name to use for the url credentials (google.oauth2.credentials.Credentials): The authorization credentials to attach to requests. Returns: str: the component gateway url Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ try: client = dataproc_v1beta2.ClusterControllerClient( credentials=credentials, client_options={ "api_endpoint": f"{region}-dataproc.googleapis.com:443" }) except: raise try: #if they do not enter a cluster name, we get a random one for them. if cluster_name is None: cluster_pool, _ = get_cluster_pool(project_id, region, client) cluster_name = random.choice(cluster_pool) response = client.get_cluster(project_id=project_id, region=region, cluster_name=cluster_name) url = response.config.endpoint_config.http_ports.popitem()[1] parsed_uri = urllib3.util.parse_url(url) endpoint_address = f"{parsed_uri.scheme}://{parsed_uri.netloc}/gateway/default/livy/v1" return endpoint_address, cluster_name except: raise
def test_create_cluster_exception(self): # Setup Response error = status_pb2.Status() operation = operations_pb2.Operation( name='operations/test_create_cluster_exception', done=True) operation.error.CopyFrom(error) # Mock the API response channel = ChannelStub(responses=[operation]) patch = mock.patch('google.api_core.grpc_helpers.create_channel') with patch as create_channel: create_channel.return_value = channel client = dataproc_v1beta2.ClusterControllerClient() # Setup Request project_id = 'projectId-1969970175' region = 'region-934795532' cluster = {} response = client.create_cluster(project_id, region, cluster) exception = response.exception() assert exception.errors[0] == error
def __init__(self, project_id, cluster_name, bucket_name): self.credentials = credentials self.zone = 'europe-west6-b' self.project_id = project_id self.cluster_name = cluster_name self.bucket_name = bucket_name self.region = self._get_region_from_zone() # Use a regional gRPC endpoint. See: # https://cloud.google.com/dataproc/docs/concepts/regional-endpoints self.client_transport = \ cluster_controller_grpc_transport.ClusterControllerGrpcTransport(credentials=self.credentials, address='{}-dataproc.googleapis.com:443' \ .format(self.region)) self.job_transport = \ job_controller_grpc_transport.JobControllerGrpcTransport(credentials=self.credentials, address='{}-dataproc.googleapis.com:443'.format( self.region)) self.dataproc_cluster_client = dataproc_v1beta2.ClusterControllerClient( self.client_transport) self.dataproc_job_client = dataproc_v1beta2.JobControllerClient( self.job_transport)