def test_it(self): from gcloud_bigtable._testing import _MockCalled from gcloud_bigtable._testing import _MockWithAttachedMethods from gcloud_bigtable._testing import _Monkey from gcloud_bigtable import _helpers as MUT mock_result = object() custom_factory = _MockCalled(mock_result) transformed = object() transformer = _MockCalled(transformed) host = 'HOST' port = 1025 certs = 'FOOBAR' client = _MockWithAttachedMethods() with _Monkey(MUT, get_certs=lambda: certs, MetadataTransformer=transformer): result = self._callFUT(client, custom_factory, host, port) self.assertTrue(result is mock_result) custom_factory.check_called( self, [(host, port)], [{ 'metadata_transformer': transformed, 'secure': True, 'root_certificates': certs, }], ) transformer.check_called(self, [(client,)]) self.assertEqual(client._called, [])
def test_from_service_account_p12(self): from gcloud_bigtable._testing import _MockCalled from gcloud_bigtable._testing import _MockWithAttachedMethods from gcloud_bigtable._testing import _Monkey from gcloud_bigtable import client as MUT klass = self._getTargetClass() scoped_creds = object() credentials = _MockWithAttachedMethods(scoped_creds) signed_creds = _MockCalled(credentials) private_key = 'PRIVATE_KEY' mock_get_contents = _MockCalled(private_key) client_email = 'CLIENT_EMAIL' private_key_path = 'PRIVATE_KEY_PATH' with _Monkey(MUT, SignedJwtAssertionCredentials=signed_creds, _get_contents=mock_get_contents): client = klass.from_service_account_p12( client_email, private_key_path, project_id=PROJECT_ID) self.assertEqual(client.project_id, PROJECT_ID) self.assertTrue(client._credentials is scoped_creds) expected_scopes = [MUT.DATA_SCOPE] self.assertEqual(credentials._called, [ ('create_scoped', (expected_scopes,), {}), ]) # SignedJwtAssertionCredentials() called with only kwargs signed_creds_kw = { 'private_key': private_key, 'service_account_name': client_email, } signed_creds.check_called(self, [()], [signed_creds_kw]) # Load private key (via _get_contents) from the key path. mock_get_contents.check_called(self, [(private_key_path,)])
def test_it(self): from gcloud_bigtable._generated import bigtable_cluster_service_messages_pb2 as messages_pb2 from gcloud_bigtable._generated import operations_pb2 from gcloud_bigtable._testing import _MockCalled from gcloud_bigtable._testing import _Monkey from gcloud_bigtable import cluster as MUT expected_operation_id = 234 operation_name = "operations/projects/%s/zones/%s/clusters/%s/" "operations/%d" % ( PROJECT_ID, ZONE, CLUSTER_ID, expected_operation_id, ) current_op = operations_pb2.Operation(name=operation_name) request_metadata = messages_pb2.CreateClusterMetadata() mock_parse_pb_any_to_native = _MockCalled(request_metadata) expected_operation_begin = object() mock_pb_timestamp_to_datetime = _MockCalled(expected_operation_begin) with _Monkey( MUT, _parse_pb_any_to_native=mock_parse_pb_any_to_native, _pb_timestamp_to_datetime=mock_pb_timestamp_to_datetime, ): operation_id, operation_begin = self._callFUT(current_op) self.assertEqual(operation_id, expected_operation_id) self.assertTrue(operation_begin is expected_operation_begin) mock_parse_pb_any_to_native.check_called(self, [(current_op.metadata,)]) mock_pb_timestamp_to_datetime.check_called(self, [(request_metadata.request_time,)])
def test_rows_with_results(self): from gcloud_bigtable._testing import _MockCalled from gcloud_bigtable._testing import _Monkey from gcloud_bigtable.happybase import table as MUT from gcloud_bigtable.row_data import PartialRowData row_key1 = 'row-key1' row_key2 = 'row-key2' rows = [row_key1, row_key2] name = 'table-name' connection = None table = self._makeOne(name, connection) table._low_level_table = _MockLowLevelTable() row1 = PartialRowData(row_key1) # Return row1 but not row2 rr_result = _MockPartialRowsData(rows={row_key1: row1}) table._low_level_table.read_rows_result = rr_result self.assertEqual(rr_result.consume_all_calls, 0) fake_rows_filter = object() mock_row_keys_filter_helper = _MockCalled(fake_rows_filter) fake_filter = object() mock_filter_chain_helper = _MockCalled(fake_filter) fake_pair = object() mock_cells_to_pairs = _MockCalled([fake_pair]) col_fam = u'cf1' qual = b'qual' fake_cells = object() row1._cells = {col_fam: {qual: fake_cells}} include_timestamp = object() with _Monkey(MUT, _row_keys_filter_helper=mock_row_keys_filter_helper, _filter_chain_helper=mock_filter_chain_helper, _cells_to_pairs=mock_cells_to_pairs): result = table.rows(rows, include_timestamp=include_timestamp) # read_rows_result == PartialRowsData with row_key1 expected_result = {col_fam.encode('ascii') + b':' + qual: fake_pair} self.assertEqual(result, [(row_key1, expected_result)]) read_rows_args = () read_rows_kwargs = {'filter_': fake_filter} self.assertEqual(table._low_level_table.read_rows_calls, [ (read_rows_args, read_rows_kwargs), ]) self.assertEqual(rr_result.consume_all_calls, 1) mock_row_keys_filter_helper.check_called(self, [(rows,)]) expected_kwargs = { 'filters': [fake_rows_filter], 'versions': 1, 'timestamp': None, } mock_filter_chain_helper.check_called(self, [()], [expected_kwargs]) to_pairs_kwargs = {'include_timestamp': include_timestamp} mock_cells_to_pairs.check_called( self, [(fake_cells,)], [to_pairs_kwargs])
def test_cells_with_results(self): from gcloud_bigtable._testing import _MockCalled from gcloud_bigtable._testing import _Monkey from gcloud_bigtable.happybase import table as MUT from gcloud_bigtable.row_data import PartialRowData row_key = 'row-key' name = 'table-name' connection = None table = self._makeOne(name, connection) table._low_level_table = _MockLowLevelTable() partial_row = PartialRowData(row_key) table._low_level_table.read_row_result = partial_row # These are all passed to mocks. versions = object() timestamp = object() include_timestamp = object() fake_filter = object() mock_filter_chain_helper = _MockCalled(fake_filter) fake_result = object() mock_cells_to_pairs = _MockCalled(fake_result) col_fam = 'cf1' qual = 'qual' fake_cells = object() partial_row._cells = {col_fam: {qual: fake_cells}} column = col_fam + ':' + qual with _Monkey(MUT, _filter_chain_helper=mock_filter_chain_helper, _cells_to_pairs=mock_cells_to_pairs): result = table.cells(row_key, column, versions=versions, timestamp=timestamp, include_timestamp=include_timestamp) self.assertEqual(result, fake_result) read_row_args = (row_key,) read_row_kwargs = {'filter_': fake_filter} self.assertEqual(table._low_level_table.read_row_calls, [ (read_row_args, read_row_kwargs), ]) filter_kwargs = { 'column': column, 'versions': versions, 'timestamp': timestamp, } mock_filter_chain_helper.check_called(self, [()], [filter_kwargs]) to_pairs_kwargs = {'include_timestamp': include_timestamp} mock_cells_to_pairs.check_called( self, [(fake_cells,)], [to_pairs_kwargs])
def _start_method_helper(self, admin): from gcloud_bigtable._testing import _MockCalled from gcloud_bigtable._testing import _MockWithAttachedMethods from gcloud_bigtable._testing import _Monkey from gcloud_bigtable import client as MUT scoped_creds = object() credentials = _MockWithAttachedMethods(scoped_creds) client = self._makeOne(credentials, project_id=PROJECT_ID, admin=admin) stub = _FakeStub() mock_make_stub = _MockCalled(stub) with _Monkey(MUT, make_stub=mock_make_stub): client.start() self.assertTrue(client._data_stub is stub) if admin: self.assertTrue(client._cluster_stub is stub) self.assertTrue(client._operations_stub is stub) self.assertTrue(client._table_stub is stub) self.assertEqual(stub._entered, 4) else: self.assertTrue(client._cluster_stub is None) self.assertTrue(client._operations_stub is None) self.assertTrue(client._table_stub is None) self.assertEqual(stub._entered, 1) self.assertEqual(stub._exited, [])
def test_from_service_account_json(self): from gcloud_bigtable._testing import _MockCalled from gcloud_bigtable._testing import _MockWithAttachedMethods from gcloud_bigtable._testing import _Monkey from gcloud_bigtable import client as MUT klass = self._getTargetClass() scoped_creds = object() credentials = _MockWithAttachedMethods(scoped_creds) get_adc = _MockCalled(credentials) json_credentials_path = 'JSON_CREDENTIALS_PATH' with _Monkey(MUT, _get_application_default_credential_from_file=get_adc): client = klass.from_service_account_json( json_credentials_path, project_id=PROJECT_ID) self.assertEqual(client.project_id, PROJECT_ID) self.assertTrue(client._credentials is scoped_creds) expected_scopes = [MUT.DATA_SCOPE] self.assertEqual(credentials._called, [ ('create_scoped', (expected_scopes,), {}), ]) # _get_application_default_credential_from_file only has pos. args. get_adc.check_called(self, [(json_credentials_path,)])
def _constructor_test_helper(self, expected_scopes, project_id=None, read_only=False, admin=False, user_agent=None): from gcloud_bigtable._testing import _MockCalled from gcloud_bigtable._testing import _MockWithAttachedMethods from gcloud_bigtable._testing import _Monkey from gcloud_bigtable import client as MUT scoped_creds = object() credentials = _MockWithAttachedMethods(scoped_creds) determined_project_id = object() mock_determine_project_id = _MockCalled(determined_project_id) with _Monkey(MUT, _determine_project_id=mock_determine_project_id): client = self._makeOne(credentials, project_id=project_id, read_only=read_only, admin=admin, user_agent=user_agent) self.assertTrue(client._credentials is scoped_creds) self.assertEqual(credentials._called, [ ('create_scoped', (expected_scopes,), {}), ]) self.assertTrue(client._project_id is determined_project_id) self.assertEqual(client.timeout_seconds, MUT.DEFAULT_TIMEOUT_SECONDS) self.assertEqual(client.user_agent, user_agent) mock_determine_project_id.check_called(self, [(project_id,)])
def test_cells_empty_row(self): from gcloud_bigtable._testing import _MockCalled from gcloud_bigtable._testing import _Monkey from gcloud_bigtable.happybase import table as MUT name = 'table-name' connection = None table = self._makeOne(name, connection) table._low_level_table = _MockLowLevelTable() table._low_level_table.read_row_result = None fake_filter = object() mock_filter_chain_helper = _MockCalled(fake_filter) row_key = 'row-key' column = 'fam:col1' with _Monkey(MUT, _filter_chain_helper=mock_filter_chain_helper): result = table.cells(row_key, column) # read_row_result == None --> No results. self.assertEqual(result, []) read_row_args = (row_key,) read_row_kwargs = {'filter_': fake_filter} self.assertEqual(table._low_level_table.read_row_calls, [ (read_row_args, read_row_kwargs), ]) expected_kwargs = { 'column': column, 'versions': None, 'timestamp': None, } mock_filter_chain_helper.check_called(self, [()], [expected_kwargs])
def test_families(self): from gcloud_bigtable._testing import _MockCalled from gcloud_bigtable._testing import _Monkey from gcloud_bigtable.happybase import table as MUT name = 'table-name' connection = None table = self._makeOne(name, connection) table._low_level_table = _MockLowLevelTable() # Mock the column families to be returned. col_fam_name = 'fam' gc_rule = object() col_fam = _MockLowLevelColumnFamily(col_fam_name, gc_rule=gc_rule) col_fams = {col_fam_name: col_fam} table._low_level_table.column_families = col_fams to_dict_result = object() mock_gc_rule_to_dict = _MockCalled(to_dict_result) with _Monkey(MUT, _gc_rule_to_dict=mock_gc_rule_to_dict): result = table.families() self.assertEqual(result, {col_fam_name: to_dict_result}) self.assertEqual(table._low_level_table.list_column_families_calls, 1) # Check the input to our mock. mock_gc_rule_to_dict.check_called(self, [(gc_rule,)])
def test__make_table_stub(self): from gcloud_bigtable._testing import _MockCalled from gcloud_bigtable._testing import _MockWithAttachedMethods from gcloud_bigtable._testing import _Monkey from gcloud_bigtable import client as MUT from gcloud_bigtable.client import TABLE_ADMIN_HOST from gcloud_bigtable.client import TABLE_ADMIN_PORT from gcloud_bigtable.client import TABLE_STUB_FACTORY scoped_creds = object() credentials = _MockWithAttachedMethods(scoped_creds) client = self._makeOne(credentials, project_id=PROJECT_ID) expected_result = object() mock_make_stub = _MockCalled(expected_result) with _Monkey(MUT, make_stub=mock_make_stub): result = client._make_table_stub() self.assertTrue(result is expected_result) make_stub_args = [ ( client, TABLE_STUB_FACTORY, TABLE_ADMIN_HOST, TABLE_ADMIN_PORT, ), ] mock_make_stub.check_called(self, make_stub_args)
def test_row_with_results(self): from gcloud_bigtable._testing import _MockCalled from gcloud_bigtable._testing import _Monkey from gcloud_bigtable.happybase import table as MUT from gcloud_bigtable.row_data import PartialRowData row_key = 'row-key' name = 'table-name' connection = None table = self._makeOne(name, connection) table._low_level_table = _MockLowLevelTable() partial_row = PartialRowData(row_key) table._low_level_table.read_row_result = partial_row fake_filter = object() mock_filter_chain_helper = _MockCalled(fake_filter) fake_pair = object() mock_cells_to_pairs = _MockCalled([fake_pair]) col_fam = u'cf1' qual = b'qual' fake_cells = object() partial_row._cells = {col_fam: {qual: fake_cells}} include_timestamp = object() with _Monkey(MUT, _filter_chain_helper=mock_filter_chain_helper, _cells_to_pairs=mock_cells_to_pairs): result = table.row(row_key, include_timestamp=include_timestamp) # The results come from _cells_to_pairs. expected_result = {col_fam.encode('ascii') + b':' + qual: fake_pair} self.assertEqual(result, expected_result) read_row_args = (row_key,) read_row_kwargs = {'filter_': fake_filter} self.assertEqual(table._low_level_table.read_row_calls, [ (read_row_args, read_row_kwargs), ]) expected_kwargs = { 'filters': [], 'versions': 1, 'timestamp': None, } mock_filter_chain_helper.check_called(self, [()], [expected_kwargs]) to_pairs_kwargs = {'include_timestamp': include_timestamp} mock_cells_to_pairs.check_called( self, [(fake_cells,)], [to_pairs_kwargs])
def test_rows_with_columns(self): from gcloud_bigtable._testing import _MockCalled from gcloud_bigtable._testing import _Monkey from gcloud_bigtable.happybase import table as MUT name = 'table-name' connection = None table = self._makeOne(name, connection) table._low_level_table = _MockLowLevelTable() rr_result = _MockPartialRowsData() table._low_level_table.read_rows_result = rr_result self.assertEqual(rr_result.consume_all_calls, 0) fake_col_filter = object() mock_columns_filter_helper = _MockCalled(fake_col_filter) fake_rows_filter = object() mock_row_keys_filter_helper = _MockCalled(fake_rows_filter) fake_filter = object() mock_filter_chain_helper = _MockCalled(fake_filter) rows = ['row-key'] columns = object() with _Monkey(MUT, _filter_chain_helper=mock_filter_chain_helper, _row_keys_filter_helper=mock_row_keys_filter_helper, _columns_filter_helper=mock_columns_filter_helper): result = table.rows(rows, columns=columns) # read_rows_result == Empty PartialRowsData --> No results. self.assertEqual(result, []) read_rows_args = () read_rows_kwargs = {'filter_': fake_filter} self.assertEqual(table._low_level_table.read_rows_calls, [ (read_rows_args, read_rows_kwargs), ]) self.assertEqual(rr_result.consume_all_calls, 1) mock_columns_filter_helper.check_called(self, [(columns,)]) mock_row_keys_filter_helper.check_called(self, [(rows,)]) expected_kwargs = { 'filters': [fake_col_filter, fake_rows_filter], 'versions': 1, 'timestamp': None, } mock_filter_chain_helper.check_called(self, [()], [expected_kwargs])
def test_create(self): from gcloud_bigtable._generated import bigtable_cluster_data_pb2 as data_pb2 from gcloud_bigtable._generated import operations_pb2 from gcloud_bigtable._grpc_mocks import StubMock from gcloud_bigtable._testing import _MockCalled from gcloud_bigtable._testing import _Monkey from gcloud_bigtable import cluster as MUT client = _Client(PROJECT_ID) cluster = self._makeOne(ZONE, CLUSTER_ID, client) # Create request_pb. Just a mock since we monkey patch # _prepare_create_request request_pb = object() # Create response_pb current_op = operations_pb2.Operation() response_pb = data_pb2.Cluster(current_operation=current_op) # Patch the stub used by the API method. client.cluster_stub = stub = StubMock(response_pb) # Create expected_result. expected_result = None # Perform the method and check the result. timeout_seconds = 578 mock_prepare_create_request = _MockCalled(request_pb) op_id = 5678 op_begin = object() mock_process_operation = _MockCalled((op_id, op_begin)) with _Monkey( MUT, _prepare_create_request=mock_prepare_create_request, _process_operation=mock_process_operation ): result = cluster.create(timeout_seconds=timeout_seconds) self.assertEqual(result, expected_result) self.assertEqual(stub.method_calls, [("CreateCluster", (request_pb, timeout_seconds), {})]) self.assertEqual(cluster._operation_type, "create") self.assertEqual(cluster._operation_id, op_id) self.assertTrue(cluster._operation_begin is op_begin) mock_prepare_create_request.check_called(self, [(cluster,)]) mock_process_operation.check_called(self, [(current_op,)])
def test_read_rows(self): from gcloud_bigtable._grpc_mocks import StubMock from gcloud_bigtable._testing import _MockCalled from gcloud_bigtable._testing import _Monkey from gcloud_bigtable.row_data import PartialRowsData from gcloud_bigtable import table as MUT client = _Client() cluster_name = ('projects/' + PROJECT_ID + '/zones/' + ZONE + '/clusters/' + CLUSTER_ID) cluster = _Cluster(cluster_name, client=client) table = self._makeOne(TABLE_ID, cluster) # Create request_pb request_pb = object() # Returned by our mock. mock_create_row_request = _MockCalled(request_pb) # Create response_iterator response_iterator = object() # Patch the stub used by the API method. client.data_stub = stub = StubMock(response_iterator) # Create expected_result. expected_result = PartialRowsData(response_iterator) # Perform the method and check the result. start_key = b'start-key' end_key = b'end-key' filter_obj = object() allow_row_interleaving = True limit = 22 timeout_seconds = 1111 with _Monkey(MUT, _create_row_request=mock_create_row_request): result = table.read_rows( start_key=start_key, end_key=end_key, filter_=filter_obj, allow_row_interleaving=allow_row_interleaving, limit=limit, timeout_seconds=timeout_seconds) self.assertEqual(result, expected_result) self.assertEqual(stub.method_calls, [( 'ReadRows', (request_pb, timeout_seconds), {}, )]) created_kwargs = { 'start_key': start_key, 'end_key': end_key, 'filter_': filter_obj, 'allow_row_interleaving': allow_row_interleaving, 'limit': limit, } mock_create_row_request.check_called(self, [(table.name,)], [created_kwargs])
def _read_row_helper(self, chunks): from gcloud_bigtable._generated import ( bigtable_service_messages_pb2 as messages_pb2) from gcloud_bigtable._grpc_mocks import StubMock from gcloud_bigtable._testing import _MockCalled from gcloud_bigtable._testing import _Monkey from gcloud_bigtable.row_data import PartialRowData from gcloud_bigtable import table as MUT client = _Client() cluster_name = ('projects/' + PROJECT_ID + '/zones/' + ZONE + '/clusters/' + CLUSTER_ID) cluster = _Cluster(cluster_name, client=client) table = self._makeOne(TABLE_ID, cluster) # Create request_pb request_pb = object() # Returned by our mock. mock_create_row_request = _MockCalled(request_pb) # Create response_iterator row_key = b'row-key' response_pb = messages_pb2.ReadRowsResponse(row_key=row_key, chunks=chunks) response_iterator = [response_pb] # Patch the stub used by the API method. client.data_stub = stub = StubMock(response_iterator) # Create expected_result. if chunks: expected_result = PartialRowData(row_key) expected_result._committed = True expected_result._chunks_encountered = True else: expected_result = None # Perform the method and check the result. filter_obj = object() timeout_seconds = 596 with _Monkey(MUT, _create_row_request=mock_create_row_request): result = table.read_row(row_key, filter_=filter_obj, timeout_seconds=timeout_seconds) self.assertEqual(result, expected_result) self.assertEqual(stub.method_calls, [( 'ReadRows', (request_pb, timeout_seconds), {}, )]) mock_create_row_request.check_called( self, [(table.name,)], [{'row_key': row_key, 'filter_': filter_obj}])
def test_create_table(self): import operator from gcloud_bigtable._testing import _MockCalled from gcloud_bigtable._testing import _Monkey from gcloud_bigtable.happybase import connection as MUT cluster = _Cluster() # Avoid implicit environ check. connection = self._makeOne(autoconnect=False, cluster=cluster) mock_gc_rule = object() mock_parse_family_option = _MockCalled(mock_gc_rule) name = 'table-name' col_fam1 = 'cf1' col_fam_option1 = object() col_fam2 = 'cf2' col_fam_option2 = object() families = { col_fam1: col_fam_option1, # A trailing colon is also allowed. col_fam2 + ':': col_fam_option2, } table_instances = [] col_fam_instances = [] with _Monkey(MUT, _LowLevelTable=_MockLowLevelTable, _parse_family_option=mock_parse_family_option): _MockLowLevelTable._instances = table_instances _MockLowLevelColumnFamily._instances = col_fam_instances connection.create_table(name, families) # Just one table would have been created. table_instance, = table_instances self.assertEqual(table_instance.args, (name, cluster)) self.assertEqual(table_instance.kwargs, {}) self.assertEqual(table_instance.create_calls, 1) # Check if our mock was called twice, but we don't know the order. mock_called = mock_parse_family_option.called_args self.assertEqual(len(mock_called), 2) self.assertEqual([len(args) for args in mock_called], [1, 1]) self.assertEqual(set(mock_called[0] + mock_called[1]), set([col_fam_option1, col_fam_option2])) # We expect two column family instances created, but don't know the # order due to non-deterministic dict.items(). col_fam_instances.sort(key=operator.attrgetter('column_family_id')) self.assertEqual(col_fam_instances[0].column_family_id, col_fam1) self.assertEqual(col_fam_instances[0].gc_rule, mock_gc_rule) self.assertEqual(col_fam_instances[0].create_calls, 1) self.assertEqual(col_fam_instances[1].column_family_id, col_fam2) self.assertEqual(col_fam_instances[1].gc_rule, mock_gc_rule) self.assertEqual(col_fam_instances[1].create_calls, 1)
def _helper(self, num_mocks_called, mock_output, method_input): from gcloud_bigtable._testing import _MockCalled from gcloud_bigtable._testing import _Monkey from gcloud_bigtable import client as MUT mock_project_id_from_environment = _MockCalled(None) mock_project_id_from_app_engine = _MockCalled(None) mock_project_id_from_compute_engine = _MockCalled(None) monkey_kwargs = { '_project_id_from_environment': mock_project_id_from_environment, '_project_id_from_app_engine': mock_project_id_from_app_engine, '_project_id_from_compute_engine': ( mock_project_id_from_compute_engine), } # Need the mocks in order they are called, so we can # access them based on `num_mocks_called`. mocks = [ mock_project_id_from_environment, mock_project_id_from_app_engine, mock_project_id_from_compute_engine, ] mocks[num_mocks_called - 1].result = mock_output with _Monkey(MUT, **monkey_kwargs): if num_mocks_called == 3 and mock_output is None: with self.assertRaises(EnvironmentError): self._callFUT(method_input) else: result = self._callFUT(method_input) self.assertEqual(result, method_input or mock_output) # Make sure our mocks were called with no arguments. for mock in mocks[:num_mocks_called]: mock.check_called(self, [()]) for mock in mocks[num_mocks_called:]: mock.check_called(self, [])
def test_constructor_missing_cluster(self): from gcloud_bigtable._testing import _MockCalled from gcloud_bigtable._testing import _Monkey from gcloud_bigtable.happybase import connection as MUT cluster = _Cluster() timeout = object() mock_get_cluster = _MockCalled(cluster) with _Monkey(MUT, _get_cluster=mock_get_cluster): connection = self._makeOne(autoconnect=False, cluster=None, timeout=timeout) self.assertEqual(connection.table_prefix, None) self.assertEqual(connection.table_prefix_separator, '_') self.assertEqual(connection._cluster, cluster) mock_get_cluster.check_called(self, [()], [{'timeout': timeout}])
def test_update(self): from gcloud_bigtable._generated import bigtable_cluster_data_pb2 as data_pb2 from gcloud_bigtable._generated import operations_pb2 from gcloud_bigtable._grpc_mocks import StubMock from gcloud_bigtable._testing import _MockCalled from gcloud_bigtable._testing import _Monkey from gcloud_bigtable import cluster as MUT client = _Client(PROJECT_ID) serve_nodes = 81 display_name = "display_name" cluster = self._makeOne(ZONE, CLUSTER_ID, client, display_name=display_name, serve_nodes=serve_nodes) # Create request_pb cluster_name = "projects/" + PROJECT_ID + "/zones/" + ZONE + "/clusters/" + CLUSTER_ID request_pb = data_pb2.Cluster(name=cluster_name, display_name=display_name, serve_nodes=serve_nodes) # Create response_pb current_op = operations_pb2.Operation() response_pb = data_pb2.Cluster(current_operation=current_op) # Patch the stub used by the API method. client.cluster_stub = stub = StubMock(response_pb) # Create expected_result. expected_result = None # We must create the cluster object with the client passed in. timeout_seconds = 9 op_id = 5678 op_begin = object() mock_process_operation = _MockCalled((op_id, op_begin)) with _Monkey(MUT, _process_operation=mock_process_operation): result = cluster.update(timeout_seconds=timeout_seconds) self.assertEqual(result, expected_result) self.assertEqual(stub.method_calls, [("UpdateCluster", (request_pb, timeout_seconds), {})]) self.assertEqual(cluster._operation_type, "update") self.assertEqual(cluster._operation_id, op_id) self.assertTrue(cluster._operation_begin is op_begin) mock_process_operation.check_called(self, [(current_op,)])
def test_constructor_infers_cluster(self): from gcloud_bigtable._testing import _MockCalled from gcloud_bigtable._testing import _Monkey from gcloud_bigtable.happybase.connection import Connection from gcloud_bigtable.happybase import pool as MUT size = 1 cluster_copy = _Cluster() all_copies = [cluster_copy] * size cluster = _Cluster(copies=all_copies) mock_get_cluster = _MockCalled(cluster) with _Monkey(MUT, _get_cluster=mock_get_cluster): pool = self._makeOne(size) for connection in pool._queue.queue: self.assertTrue(isinstance(connection, Connection)) # We know that the Connection() constructor will # call cluster.copy(). self.assertTrue(connection._cluster is cluster_copy) mock_get_cluster.check_called(self, [()], [{'timeout': None}])
def test_undelete(self): from gcloud_bigtable._generated import bigtable_cluster_service_messages_pb2 as messages_pb2 from gcloud_bigtable._generated import operations_pb2 from gcloud_bigtable._grpc_mocks import StubMock from gcloud_bigtable._testing import _MockCalled from gcloud_bigtable._testing import _Monkey from gcloud_bigtable import cluster as MUT client = _Client(PROJECT_ID) cluster = self._makeOne(ZONE, CLUSTER_ID, client) # Create request_pb cluster_name = "projects/" + PROJECT_ID + "/zones/" + ZONE + "/clusters/" + CLUSTER_ID request_pb = messages_pb2.UndeleteClusterRequest(name=cluster_name) # Create response_pb response_pb = operations_pb2.Operation() # Patch the stub used by the API method. client.cluster_stub = stub = StubMock(response_pb) # Create expected_result. expected_result = None # Perform the method and check the result. timeout_seconds = 78 op_id = 5678 op_begin = object() mock_process_operation = _MockCalled((op_id, op_begin)) with _Monkey(MUT, _process_operation=mock_process_operation): result = cluster.undelete(timeout_seconds=timeout_seconds) self.assertEqual(result, expected_result) self.assertEqual(stub.method_calls, [("UndeleteCluster", (request_pb, timeout_seconds), {})]) self.assertEqual(cluster._operation_type, "undelete") self.assertEqual(cluster._operation_id, op_id) self.assertTrue(cluster._operation_begin is op_begin) mock_process_operation.check_called(self, [(response_pb,)])
def _scan_test_helper(self, row_start=None, row_stop=None, row_prefix=None, columns=None, filter_=None, timestamp=None, include_timestamp=False, limit=None, rr_result=None, expected_result=None): import types from gcloud_bigtable._testing import _MockCalled from gcloud_bigtable._testing import _Monkey from gcloud_bigtable.happybase import table as MUT name = 'table-name' connection = None table = self._makeOne(name, connection) table._low_level_table = _MockLowLevelTable() rr_result = rr_result or _MockPartialRowsData() table._low_level_table.read_rows_result = rr_result self.assertEqual(rr_result.consume_next_calls, 0) fake_col_filter = object() mock_columns_filter_helper = _MockCalled(fake_col_filter) fake_filter = object() mock_filter_chain_helper = _MockCalled(fake_filter) with _Monkey(MUT, _filter_chain_helper=mock_filter_chain_helper, _columns_filter_helper=mock_columns_filter_helper): result = table.scan(row_start=row_start, row_stop=row_stop, row_prefix=row_prefix, columns=columns, filter=filter_, timestamp=timestamp, include_timestamp=include_timestamp, limit=limit) self.assertTrue(isinstance(result, types.GeneratorType)) # Need to consume the result while the monkey patch is applied. # read_rows_result == Empty PartialRowsData --> No results. expected_result = expected_result or [] self.assertEqual(list(result), expected_result) read_rows_args = () if row_prefix: row_start = row_prefix row_stop = MUT._string_successor(row_prefix) read_rows_kwargs = { 'end_key': row_stop, 'filter_': fake_filter, 'limit': limit, 'start_key': row_start, } self.assertEqual(table._low_level_table.read_rows_calls, [ (read_rows_args, read_rows_kwargs), ]) self.assertEqual(rr_result.consume_next_calls, rr_result.iterations + 1) if columns is not None: mock_columns_filter_helper.check_called(self, [(columns,)]) else: mock_columns_filter_helper.check_called(self, []) filters = [] if filter_ is not None: filters.append(filter_) if columns: filters.append(fake_col_filter) expected_kwargs = { 'filters': filters, 'versions': 1, 'timestamp': timestamp, } mock_filter_chain_helper.check_called(self, [()], [expected_kwargs])