def test_consume_next(self): from gcloud.bigtable._generated import ( bigtable_service_messages_pb2 as messages_pb2) from gcloud.bigtable.row_data import PartialRowData row_key = b'row-key' value_pb = messages_pb2.ReadRowsResponse(row_key=row_key) response_iterator = _MockCancellableIterator(value_pb) partial_rows_data = self._makeOne(response_iterator) self.assertEqual(partial_rows_data.rows, {}) partial_rows_data.consume_next() expected_rows = {row_key: PartialRowData(row_key)} self.assertEqual(partial_rows_data.rows, expected_rows)
def test_update_from_read_rows_row_key_disagree(self): from gcloud.bigtable._generated import ( bigtable_service_messages_pb2 as messages_pb2) row_key1 = b'row-key1' row_key2 = b'row-key2' partial_row_data = self._makeOne(row_key1) self.assertFalse(partial_row_data._chunks_encountered) self.assertNotEqual(row_key1, row_key2) read_rows_response_pb = messages_pb2.ReadRowsResponse(row_key=row_key2) with self.assertRaises(ValueError): partial_row_data.update_from_read_rows(read_rows_response_pb) self.assertFalse(partial_row_data._chunks_encountered)
def test_consume_next_row_exists(self): from gcloud.bigtable._generated import ( bigtable_service_messages_pb2 as messages_pb2) from gcloud.bigtable.row_data import PartialRowData row_key = b'row-key' chunk = messages_pb2.ReadRowsResponse.Chunk(commit_row=True) value_pb = messages_pb2.ReadRowsResponse(row_key=row_key, chunks=[chunk]) response_iterator = _MockCancellableIterator(value_pb) partial_rows_data = self._makeOne(response_iterator) existing_values = PartialRowData(row_key) partial_rows_data._rows[row_key] = existing_values self.assertFalse(existing_values.committed) partial_rows_data.consume_next() self.assertTrue(existing_values.committed) self.assertEqual(existing_values.cells, {})
def test_update_from_read_rows_empty_chunk(self): from gcloud.bigtable._generated import ( bigtable_service_messages_pb2 as messages_pb2) row_key = b'row-key' partial_row_data = self._makeOne(row_key) self.assertFalse(partial_row_data._chunks_encountered) chunk = messages_pb2.ReadRowsResponse.Chunk() read_rows_response_pb = messages_pb2.ReadRowsResponse( row_key=row_key, chunks=[chunk]) # This makes it an "empty" chunk. self.assertEqual(chunk.WhichOneof('chunk'), None) with self.assertRaises(ValueError): partial_row_data.update_from_read_rows(read_rows_response_pb) self.assertFalse(partial_row_data._chunks_encountered)
def test_update_from_read_rows(self): from gcloud.bigtable._generated import bigtable_data_pb2 as data_pb2 from gcloud.bigtable._generated import ( bigtable_service_messages_pb2 as messages_pb2) row_key = b'row-key' partial_row_data = self._makeOne(row_key) # Set-up chunk1, some data that will be reset by chunk2. ignored_family_name = u'ignore-name' row_contents = data_pb2.Family(name=ignored_family_name) chunk1 = messages_pb2.ReadRowsResponse.Chunk(row_contents=row_contents) # Set-up chunk2, a reset row. chunk2 = messages_pb2.ReadRowsResponse.Chunk(reset_row=True) # Set-up chunk3, a column family with no columns. family_name = u'name' row_contents = data_pb2.Family(name=family_name) chunk3 = messages_pb2.ReadRowsResponse.Chunk(row_contents=row_contents) # Set-up chunk4, a commit row. chunk4 = messages_pb2.ReadRowsResponse.Chunk(commit_row=True) # Prepare request and make sure PartialRowData is empty before. read_rows_response_pb = messages_pb2.ReadRowsResponse( row_key=row_key, chunks=[chunk1, chunk2, chunk3, chunk4]) self.assertEqual(partial_row_data.cells, {}) self.assertFalse(partial_row_data.committed) self.assertFalse(partial_row_data._chunks_encountered) # Parse the response and make sure the cells took place. partial_row_data.update_from_read_rows(read_rows_response_pb) self.assertEqual(partial_row_data.cells, {family_name: {}}) self.assertFalse(ignored_family_name in partial_row_data.cells) self.assertTrue(partial_row_data.committed) self.assertTrue(partial_row_data._chunks_encountered)