def test_update(self): from gcloud._testing import _Monkey from gcloud.bigtable._generated import ( bigtable_cluster_data_pb2 as data_pb2) from gcloud.bigtable._generated import operations_pb2 from gcloud.bigtable._testing import _FakeStub from gcloud.bigtable import cluster as MUT project = 'PROJECT' zone = 'zone' cluster_id = 'cluster-id' serve_nodes = 81 display_name = 'display_name' timeout_seconds = 9 client = _Client(project, timeout_seconds=timeout_seconds) cluster = self._makeOne(zone, cluster_id, client, display_name=display_name, serve_nodes=serve_nodes) # Create request_pb cluster_name = ('projects/' + project + '/zones/' + zone + '/clusters/' + cluster_id) request_pb = data_pb2.Cluster( name=cluster_name, display_name=display_name, serve_nodes=serve_nodes, ) # Create response_pb current_op = operations_pb2.Operation() response_pb = data_pb2.Cluster(current_operation=current_op) # Patch the stub used by the API method. client._cluster_stub = stub = _FakeStub(response_pb) # Create expected_result. op_id = 5678 op_begin = object() expected_result = MUT.Operation('update', op_id, op_begin, cluster=cluster) # Create mocks process_operation_called = [] def mock_process_operation(operation_pb): process_operation_called.append(operation_pb) return op_id, op_begin # Perform the method and check the result. with _Monkey(MUT, _process_operation=mock_process_operation): result = cluster.update() self.assertEqual(result, expected_result) self.assertEqual(stub.method_calls, [( 'UpdateCluster', (request_pb, timeout_seconds), {}, )]) self.assertEqual(process_operation_called, [current_op])
def test_with_create_cluster_metadata(self): from gcloud.bigtable._generated import any_pb2 from gcloud.bigtable._generated import ( bigtable_cluster_data_pb2 as data_pb2) from gcloud.bigtable._generated import ( bigtable_cluster_service_messages_pb2 as messages_pb2) from gcloud.bigtable._generated.timestamp_pb2 import Timestamp type_url = ('type.googleapis.com/' + messages_pb2._CREATECLUSTERMETADATA.full_name) metadata = messages_pb2.CreateClusterMetadata( request_time=Timestamp(seconds=1, nanos=1234), finish_time=Timestamp(seconds=10, nanos=891011), original_request=messages_pb2.CreateClusterRequest( name='foo', cluster_id='bar', cluster=data_pb2.Cluster( display_name='quux', serve_nodes=1337, ), ), ) any_val = any_pb2.Any( type_url=type_url, value=metadata.SerializeToString(), ) result = self._callFUT(any_val) self.assertEqual(result, metadata)
def update(self): """Update this cluster. .. note:: Updates the ``display_name`` and ``serve_nodes``. If you'd like to change them before updating, reset the values via .. code:: python cluster.display_name = 'New display name' cluster.serve_nodes = 3 before calling :meth:`update`. :rtype: :class:`Operation` :returns: The long-running operation corresponding to the update operation. """ request_pb = data_pb2.Cluster( name=self.name, display_name=self.display_name, serve_nodes=self.serve_nodes, ) # We expect a `._generated.bigtable_cluster_data_pb2.Cluster`. cluster_pb = self._client._cluster_stub.UpdateCluster( request_pb, self._client.timeout_seconds) op_id, op_begin = _process_operation(cluster_pb.current_operation) return Operation('update', op_id, op_begin, cluster=self)
def test_it(self): from gcloud.bigtable._generated import ( bigtable_cluster_data_pb2 as data_pb2) serve_nodes = 119 cluster_pb = data_pb2.Cluster(serve_nodes=serve_nodes) result = self._callFUT(cluster_pb, 'serve_nodes') self.assertEqual(result, serve_nodes)
def test_with_update_cluster_metadata(self): from google.protobuf import any_pb2 from google.protobuf.timestamp_pb2 import Timestamp from gcloud.bigtable._generated import (bigtable_cluster_data_pb2 as data_pb2) from gcloud.bigtable._generated import ( bigtable_cluster_service_messages_pb2 as messages_pb2) type_url = ('type.googleapis.com/' + messages_pb2._UPDATECLUSTERMETADATA.full_name) metadata = messages_pb2.UpdateClusterMetadata( request_time=Timestamp(seconds=1, nanos=1234), finish_time=Timestamp(seconds=10, nanos=891011), cancel_time=Timestamp(seconds=100, nanos=76543), original_request=data_pb2.Cluster( display_name='the-end', serve_nodes=42, ), ) any_val = any_pb2.Any( type_url=type_url, value=metadata.SerializeToString(), ) result = self._callFUT(any_val) self.assertEqual(result, metadata)
def test_set_message_field(self): from google.protobuf.timestamp_pb2 import Timestamp from gcloud.bigtable._generated import (bigtable_cluster_data_pb2 as data_pb2) timestamp = Timestamp() cluster_pb = data_pb2.Cluster(delete_time=timestamp) self.assertTrue(self._callFUT(cluster_pb, 'delete_time'))
def test_op_name_parsing_failure(self): from google.longrunning import operations_pb2 from gcloud.bigtable._generated import (bigtable_cluster_data_pb2 as data_pb2) current_op = operations_pb2.Operation(name='invalid') cluster = data_pb2.Cluster(current_operation=current_op) with self.assertRaises(ValueError): self._callFUT(cluster)
def test_from_pb_bad_cluster_name(self): from gcloud.bigtable._generated import (bigtable_cluster_data_pb2 as data_pb2) cluster_name = 'INCORRECT_FORMAT' cluster_pb = data_pb2.Cluster(name=cluster_name) klass = self._getTargetClass() with self.assertRaises(ValueError): klass.from_pb(cluster_pb, None)
def test__update_from_pb_no_serve_nodes(self): from gcloud.bigtable._generated import (bigtable_cluster_data_pb2 as data_pb2) from gcloud.bigtable.cluster import DEFAULT_SERVE_NODES cluster_pb = data_pb2.Cluster(display_name='name') cluster = self._makeOne(None, None, None) self.assertEqual(cluster.display_name, None) self.assertEqual(cluster.serve_nodes, DEFAULT_SERVE_NODES) with self.assertRaises(ValueError): cluster._update_from_pb(cluster_pb) self.assertEqual(cluster.display_name, None) self.assertEqual(cluster.serve_nodes, DEFAULT_SERVE_NODES)
def test_reload(self): from gcloud.bigtable._generated import (bigtable_cluster_data_pb2 as data_pb2) from gcloud.bigtable._generated import ( bigtable_cluster_service_messages_pb2 as messages_pb2) from gcloud.bigtable._testing import _FakeStub from gcloud.bigtable.cluster import DEFAULT_SERVE_NODES project = 'PROJECT' zone = 'zone' cluster_id = 'cluster-id' timeout_seconds = 123 client = _Client(project, timeout_seconds=timeout_seconds) cluster = self._makeOne(zone, cluster_id, client) # Create request_pb cluster_name = ('projects/' + project + '/zones/' + zone + '/clusters/' + cluster_id) request_pb = messages_pb2.GetClusterRequest(name=cluster_name) # Create response_pb serve_nodes = 31 display_name = u'hey-hi-hello' response_pb = data_pb2.Cluster( display_name=display_name, serve_nodes=serve_nodes, ) # Patch the stub used by the API method. client._cluster_stub = stub = _FakeStub(response_pb) # Create expected_result. expected_result = None # reload() has no return value. # Check Cluster optional config values before. self.assertEqual(cluster.serve_nodes, DEFAULT_SERVE_NODES) self.assertEqual(cluster.display_name, cluster_id) # Perform the method and check the result. result = cluster.reload() self.assertEqual(result, expected_result) self.assertEqual(stub.method_calls, [( 'GetCluster', (request_pb, timeout_seconds), {}, )]) # Check Cluster optional config values before. self.assertEqual(cluster.serve_nodes, serve_nodes) self.assertEqual(cluster.display_name, display_name)
def test_unset_message_field(self): from google.protobuf.timestamp_pb2 import Timestamp from gcloud.bigtable._generated import (bigtable_cluster_data_pb2 as data_pb2) cluster_pb = data_pb2.Cluster() # Check that no fields are attached to the protobuf message. self.assertEqual(len(cluster_pb._fields), 0) self.assertEqual(cluster_pb.delete_time, Timestamp()) # Check that the field is now attached to the protobuf message, # even though the value is unset. self.assertEqual([field.name for field in cluster_pb._fields.keys()], ['delete_time']) # Make sure has field is still False. self.assertFalse(self._callFUT(cluster_pb, 'delete_time'))
def test__update_from_pb_success(self): from gcloud.bigtable._generated import (bigtable_cluster_data_pb2 as data_pb2) from gcloud.bigtable.cluster import DEFAULT_SERVE_NODES display_name = 'display_name' serve_nodes = 8 cluster_pb = data_pb2.Cluster( display_name=display_name, serve_nodes=serve_nodes, ) cluster = self._makeOne(None, None, None) self.assertEqual(cluster.display_name, None) self.assertEqual(cluster.serve_nodes, DEFAULT_SERVE_NODES) cluster._update_from_pb(cluster_pb) self.assertEqual(cluster.display_name, display_name) self.assertEqual(cluster.serve_nodes, serve_nodes)
def test_from_pb_project_mistmatch(self): from gcloud.bigtable._generated import (bigtable_cluster_data_pb2 as data_pb2) project = 'PROJECT' zone = 'zone' cluster_id = 'cluster-id' alt_project = 'ALT_PROJECT' client = _Client(project=alt_project) self.assertNotEqual(project, alt_project) cluster_name = ('projects/' + project + '/zones/' + zone + '/clusters/' + cluster_id) cluster_pb = data_pb2.Cluster(name=cluster_name) klass = self._getTargetClass() with self.assertRaises(ValueError): klass.from_pb(cluster_pb, client)
def _prepare_create_request(cluster): """Creates a protobuf request for a CreateCluster request. :type cluster: :class:`Cluster` :param cluster: The cluster to be created. :rtype: :class:`.messages_pb2.CreateClusterRequest` :returns: The CreateCluster request object containing the cluster info. """ zone_full_name = ('projects/' + cluster._client.project + '/zones/' + cluster.zone) return messages_pb2.CreateClusterRequest( name=zone_full_name, cluster_id=cluster.cluster_id, cluster=data_pb2.Cluster( display_name=cluster.display_name, serve_nodes=cluster.serve_nodes, ), )
def test_from_pb_success(self): from gcloud.bigtable._generated import (bigtable_cluster_data_pb2 as data_pb2) project = 'PROJECT' zone = 'zone' cluster_id = 'cluster-id' client = _Client(project=project) cluster_name = ('projects/' + project + '/zones/' + zone + '/clusters/' + cluster_id) cluster_pb = data_pb2.Cluster( name=cluster_name, display_name=cluster_id, serve_nodes=331, ) klass = self._getTargetClass() cluster = klass.from_pb(cluster_pb, client) self.assertTrue(isinstance(cluster, klass)) self.assertEqual(cluster._client, client) self.assertEqual(cluster.zone, zone) self.assertEqual(cluster.cluster_id, cluster_id)
def test_set_simple_field(self): from gcloud.bigtable._generated import (bigtable_cluster_data_pb2 as data_pb2) serve_nodes = 119 cluster_pb = data_pb2.Cluster(serve_nodes=serve_nodes) self.assertTrue(self._callFUT(cluster_pb, 'serve_nodes'))
def test_list_clusters(self): from gcloud.bigtable._generated import (bigtable_cluster_data_pb2 as data_pb2) from gcloud.bigtable._generated import ( bigtable_cluster_service_messages_pb2 as messages_pb2) from gcloud.bigtable._testing import _FakeStub credentials = _Credentials() project = 'PROJECT' timeout_seconds = 8004 client = self._makeOne(project=project, credentials=credentials, admin=True, timeout_seconds=timeout_seconds) # Create request_pb request_pb = messages_pb2.ListClustersRequest(name='projects/' + project, ) # Create response_pb zone = 'foo' failed_zone = 'bar' cluster_id1 = 'cluster-id1' cluster_id2 = 'cluster-id2' cluster_name1 = ('projects/' + project + '/zones/' + zone + '/clusters/' + cluster_id1) cluster_name2 = ('projects/' + project + '/zones/' + zone + '/clusters/' + cluster_id2) response_pb = messages_pb2.ListClustersResponse( failed_zones=[ data_pb2.Zone(display_name=failed_zone), ], clusters=[ data_pb2.Cluster( name=cluster_name1, display_name=cluster_name1, serve_nodes=3, ), data_pb2.Cluster( name=cluster_name2, display_name=cluster_name2, serve_nodes=3, ), ], ) # Patch the stub used by the API method. client._cluster_stub_internal = stub = _FakeStub(response_pb) # Create expected_result. failed_zones = [failed_zone] clusters = [ client.cluster(zone, cluster_id1), client.cluster(zone, cluster_id2), ] expected_result = (clusters, failed_zones) # Perform the method and check the result. result = client.list_clusters() self.assertEqual(result, expected_result) self.assertEqual(stub.method_calls, [( 'ListClusters', (request_pb, timeout_seconds), {}, )])
def test_with_value_unset_on_pb(self): from gcloud.bigtable._generated import ( bigtable_cluster_data_pb2 as data_pb2) cluster_pb = data_pb2.Cluster() with self.assertRaises(ValueError): self._callFUT(cluster_pb, 'serve_nodes')
def test_create(self): from google.longrunning import operations_pb2 from gcloud._testing import _Monkey from gcloud.bigtable._generated import (bigtable_cluster_data_pb2 as data_pb2) from gcloud.bigtable._testing import _FakeStub from gcloud.bigtable import cluster as MUT project = 'PROJECT' zone = 'zone' cluster_id = 'cluster-id' timeout_seconds = 578 client = _Client(project, timeout_seconds=timeout_seconds) cluster = self._makeOne(zone, cluster_id, client) # Create request_pb. Just a mock since we monkey patch # _prepare_create_request request_pb = object() # Create response_pb op_id = 5678 op_begin = object() op_name = ('operations/projects/%s/zones/%s/clusters/%s/' 'operations/%d' % (project, zone, cluster_id, op_id)) current_op = operations_pb2.Operation(name=op_name) response_pb = data_pb2.Cluster(current_operation=current_op) # Patch the stub used by the API method. client._cluster_stub = stub = _FakeStub(response_pb) # Create expected_result. expected_result = MUT.Operation('create', op_id, op_begin, cluster=cluster) # Create the mocks. prep_create_called = [] def mock_prep_create_req(cluster): prep_create_called.append(cluster) return request_pb process_operation_called = [] def mock_process_operation(operation_pb): process_operation_called.append(operation_pb) return op_id, op_begin # Perform the method and check the result. with _Monkey(MUT, _prepare_create_request=mock_prep_create_req, _process_operation=mock_process_operation): result = cluster.create() self.assertEqual(result, expected_result) self.assertEqual(stub.method_calls, [( 'CreateCluster', (request_pb, timeout_seconds), {}, )]) self.assertEqual(prep_create_called, [cluster]) self.assertEqual(process_operation_called, [current_op])