def test_recursive_delete(self): client = self._make_default_one() client._firestore_api_internal = mock.Mock(spec=["run_query"]) collection_ref = client.collection("my_collection") results = [] for index in range(10): results.append( RunQueryResponse(document=Document( name=f"{collection_ref.id}/{index}"))) chunks = [ results[:3], results[3:6], results[6:9], results[9:], ] def _get_chunk(*args, **kwargs): return iter(chunks.pop(0)) client._firestore_api_internal.run_query.side_effect = _get_chunk bulk_writer = mock.MagicMock() bulk_writer.mock_add_spec(spec=["delete", "close"]) num_deleted = client.recursive_delete(collection_ref, bulk_writer=bulk_writer, chunk_size=3) self.assertEqual(num_deleted, len(results))
async def test_chunkify(self): client = _make_client() col = client.collection("my-collection") client._firestore_api_internal = mock.Mock(spec=["run_query"]) results = [] for index in range(10): results.append( RunQueryResponse(document=Document( name= f"projects/project-project/databases/(default)/documents/my-collection/{index}", ), ), ) chunks = [ results[:3], results[3:6], results[6:9], results[9:], ] async def _get_chunk(*args, **kwargs): return AsyncIter(chunks.pop(0)) client._firestore_api_internal.run_query.side_effect = _get_chunk counter = 0 expected_lengths = [3, 3, 3, 1] async for chunk in col._chunkify(3): msg = f"Expected chunk of length {expected_lengths[counter]} at index {counter}. Saw {len(chunk)}." self.assertEqual(len(chunk), expected_lengths[counter], msg) counter += 1
def _list_documents_helper(self, page_size=None, retry=None, timeout=None): from google.cloud.firestore_v1 import _helpers as _fs_v1_helpers from google.api_core.page_iterator import Iterator from google.api_core.page_iterator import Page from google.cloud.firestore_v1.document import DocumentReference from google.cloud.firestore_v1.services.firestore.client import FirestoreClient from google.cloud.firestore_v1.types.document import Document class _Iterator(Iterator): def __init__(self, pages): super(_Iterator, self).__init__(client=None) self._pages = pages def _next_page(self): if self._pages: page, self._pages = self._pages[0], self._pages[1:] return Page(self, page, self.item_to_value) client = _test_helpers.make_client() template = client._database_string + "/documents/{}" document_ids = ["doc-1", "doc-2"] documents = [ Document(name=template.format(document_id)) for document_id in document_ids ] iterator = _Iterator(pages=[documents]) api_client = mock.create_autospec(FirestoreClient) api_client.list_documents.return_value = iterator client._firestore_api_internal = api_client collection = self._make_one("collection", client=client) kwargs = _fs_v1_helpers.make_retry_timeout_kwargs(retry, timeout) if page_size is not None: documents = list( collection.list_documents(page_size=page_size, **kwargs)) else: documents = list(collection.list_documents(**kwargs)) # Verify the response and the mocks. self.assertEqual(len(documents), len(document_ids)) for document, document_id in zip(documents, document_ids): self.assertIsInstance(document, DocumentReference) self.assertEqual(document.parent, collection) self.assertEqual(document.id, document_id) parent, _ = collection._parent_info() api_client.list_documents.assert_called_once_with( request={ "parent": parent, "collection_id": collection.id, "page_size": page_size, "show_missing": True, "mask": { "field_paths": None }, }, metadata=client._rpc_metadata, **kwargs, )
async def _list_documents_helper(page_size=None, retry=None, timeout=None): from google.cloud.firestore_v1 import _helpers from google.api_core.page_iterator_async import AsyncIterator from google.api_core.page_iterator import Page from google.cloud.firestore_v1.async_document import AsyncDocumentReference from google.cloud.firestore_v1.types.document import Document class _AsyncIterator(AsyncIterator): def __init__(self, pages): super(_AsyncIterator, self).__init__(client=None) self._pages = pages async def _next_page(self): if self._pages: page, self._pages = self._pages[0], self._pages[1:] return Page(self, page, self.item_to_value) client = _make_client() template = client._database_string + "/documents/{}" document_ids = ["doc-1", "doc-2"] documents = [ Document(name=template.format(document_id)) for document_id in document_ids ] iterator = _AsyncIterator(pages=[documents]) firestore_api = AsyncMock() firestore_api.mock_add_spec(spec=["list_documents"]) firestore_api.list_documents.return_value = iterator client._firestore_api_internal = firestore_api collection = _make_async_collection_reference("collection", client=client) kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout) if page_size is not None: documents = [ i async for i in collection.list_documents(page_size=page_size, **kwargs,) ] else: documents = [i async for i in collection.list_documents(**kwargs)] # Verify the response and the mocks. assert len(documents) == len(document_ids) for document, document_id in zip(documents, document_ids): assert isinstance(document, AsyncDocumentReference) assert document.parent == collection assert document.id == document_id parent, _ = collection._parent_info() firestore_api.list_documents.assert_called_once_with( request={ "parent": parent, "collection_id": collection.id, "page_size": page_size, "show_missing": True, "mask": {"field_paths": None}, }, metadata=client._rpc_metadata, **kwargs, )
def test_watch_on_snapshot_document_change_changed(): from google.cloud.firestore_v1.types.document import Document from google.cloud.firestore_v1.watch import WATCH_TARGET_ID inst = _make_watch() proto = _make_listen_response() proto.target_change = None proto.document_change.target_ids = [WATCH_TARGET_ID] proto.document_change.document = Document(name="fred") inst.on_snapshot(proto) assert inst.change_map["fred"].data == {}
def test_unnecessary_chunkify(self): client = _make_client() firestore_api = mock.Mock(spec=["run_query"]) firestore_api.run_query.return_value = iter([ RunQueryResponse(document=Document( name= f"projects/project-project/databases/(default)/documents/asdf/{index}", ), ) for index in range(5) ]) client._firestore_api_internal = firestore_api query = client.collection("asdf")._query() for chunk in query.limit(5)._chunkify(10): self.assertEqual(len(chunk), 5)
def _add_bundle_element(self, bundle_element: BundleElement, *, client: BaseClient, type: str): # type: ignore """Applies BundleElements to this FirestoreBundle instance as a part of deserializing a FirestoreBundle string. """ from google.cloud.firestore_v1.types.document import Document if getattr(self, "_doc_metadata_map", None) is None: self._doc_metadata_map = {} if type == "metadata": self._deserialized_metadata = bundle_element.metadata # type: ignore elif type == "namedQuery": self.named_queries[ bundle_element.named_query. name] = bundle_element.named_query # type: ignore elif type == "documentMetadata": self._doc_metadata_map[bundle_element.document_metadata. name] = bundle_element.document_metadata elif type == "document": doc_ref_value = _helpers.DocumentReferenceValue( bundle_element.document.name) snapshot = DocumentSnapshot( data=_helpers.decode_dict( Document(mapping=bundle_element.document).fields, client), exists=True, reference=DocumentReference( doc_ref_value.collection_name, doc_ref_value.document_id, client=client, ), read_time=self._doc_metadata_map[ bundle_element.document.name].read_time, create_time=bundle_element.document. create_time, # type: ignore update_time=bundle_element.document. update_time, # type: ignore ) self.add_document(snapshot) bundled_document = self.documents.get( snapshot.reference._document_path) for query_name in self._doc_metadata_map[ bundle_element.document.name].queries: bundled_document.metadata.queries.append( query_name) # type: ignore else: raise ValueError(f"Unexpected type of BundleElement: {type}")
def test_watch_on_snapshot_document_change_changed_docname_db_prefix(): # TODO: Verify the current behavior. The change map currently contains # the db-prefixed document name and not the bare document name. from google.cloud.firestore_v1.types.document import Document from google.cloud.firestore_v1.watch import WATCH_TARGET_ID inst = _make_watch() proto = _make_listen_response() proto.target_change = None proto.document_change.target_ids = [WATCH_TARGET_ID] proto.document_change.document = Document(name="abc://foo/documents/fred") inst._set_documents_pfx("abc://foo") inst.on_snapshot(proto) assert inst.change_map["abc://foo/documents/fred"].data == {}
def _bundled_collection_helper( self, document_ids: typing.Optional[typing.List[str]] = None, data: typing.Optional[typing.List[typing.Dict]] = None, ) -> collection.CollectionReference: """Builder of a mocked Query for the sake of testing Bundles. Bundling queries involves loading the actual documents for cold storage, and this method arranges all of the necessary mocks so that unit tests can think they are evaluating a live query. """ from google.cloud.firestore_v1 import _helpers from google.cloud.firestore_v1.types.document import Document from google.cloud.firestore_v1.types.firestore import RunQueryResponse from google.protobuf.timestamp_pb2 import Timestamp # type: ignore client = self.get_client() template = client._database_string + "/documents/col/{}" document_ids = document_ids or ["doc-1", "doc-2"] def _index_from_data(index: int): if data is None or len(data) < index + 1: return None return data[index] documents = [ RunQueryResponse( transaction=b"", document=Document( name=template.format(document_id), fields=_helpers.encode_dict( _index_from_data(index) or {"hello": "world"}), create_time=Timestamp(seconds=1, nanos=1), update_time=Timestamp(seconds=1, nanos=1), ), read_time=_test_helpers.build_timestamp(), ) for index, document_id in enumerate(document_ids) ] iterator = self.build_results_iterable(documents) api_client = self.get_internal_client_mock() api_client.run_query.return_value = iterator client._firestore_api_internal = api_client return self.get_collection_class()("col", client=client)
def test_chunkify(): from google.cloud.firestore_v1.types.document import Document from google.cloud.firestore_v1.types.firestore import RunQueryResponse from tests.unit.v1 import _test_helpers client = _test_helpers.make_client() col = client.collection("my-collection") client._firestore_api_internal = mock.Mock(spec=["run_query"]) results = [] for index in range(10): results.append( RunQueryResponse( document=Document( name=f"projects/project-project/databases/(default)/documents/my-collection/{index}", ), ), ) chunks = [ results[:3], results[3:6], results[6:9], results[9:], ] def _get_chunk(*args, **kwargs): return iter(chunks.pop(0)) client._firestore_api_internal.run_query.side_effect = _get_chunk counter = 0 expected_lengths = [3, 3, 3, 1] for chunk in col._chunkify(3): assert len(chunk) == expected_lengths[counter] counter += 1
def test_recursive_delete_from_document(self): client = self._make_default_one() client._firestore_api_internal = mock.Mock( spec=["run_query", "list_collection_ids"]) collection_ref = client.collection("my_collection") collection_1_id: str = "collection_1_id" collection_2_id: str = "collection_2_id" parent_doc = collection_ref.document("parent") collection_1_results = [] collection_2_results = [] for index in range(10): collection_1_results.append( RunQueryResponse( document=Document(name=f"{collection_1_id}/{index}"), ), ) collection_2_results.append( RunQueryResponse( document=Document(name=f"{collection_2_id}/{index}"), ), ) col_1_chunks = [ collection_1_results[:3], collection_1_results[3:6], collection_1_results[6:9], collection_1_results[9:], ] col_2_chunks = [ collection_2_results[:3], collection_2_results[3:6], collection_2_results[6:9], collection_2_results[9:], ] def _get_chunk(*args, **kwargs): start_at = (kwargs["request"]["structured_query"].start_at. values[0].reference_value) if collection_1_id in start_at: return iter(col_1_chunks.pop(0)) return iter(col_2_chunks.pop(0)) client._firestore_api_internal.run_query.side_effect = _get_chunk client._firestore_api_internal.list_collection_ids.return_value = [ collection_1_id, collection_2_id, ] bulk_writer = mock.MagicMock() bulk_writer.mock_add_spec(spec=["delete", "close"]) num_deleted = client.recursive_delete(parent_doc, bulk_writer=bulk_writer, chunk_size=3) expected_len = len(collection_1_results) + len( collection_2_results) + 1 self.assertEqual(num_deleted, expected_len)