def test_update(self): from google.cloud.firestore_v1beta1.proto import common_pb2 from google.cloud.firestore_v1beta1.proto import document_pb2 from google.cloud.firestore_v1beta1.proto import write_pb2 client = _make_client() batch = self._make_one(client) self.assertEqual(batch._write_pbs, []) reference = client.document('cats', 'cradle') field_path = 'head.foot' value = u'knees toes shoulders' field_updates = {field_path: value} ret_val = batch.update(reference, field_updates) self.assertIsNone(ret_val) map_pb = document_pb2.MapValue(fields={ 'foot': _value_pb(string_value=value), }) new_write_pb = write_pb2.Write( update=document_pb2.Document( name=reference._document_path, fields={'head': _value_pb(map_value=map_pb)}, ), update_mask=common_pb2.DocumentMask(field_paths=[field_path]), current_document=common_pb2.Precondition(exists=True), ) self.assertEqual(batch._write_pbs, [new_write_pb])
def test_paths(self): from google.cloud.firestore_v1beta1.proto import common_pb2 field_paths = ["a.b", "c"] result = self._call_fut(field_paths) expected = common_pb2.DocumentMask(field_paths=field_paths) self.assertEqual(result, expected)
def test_get_all(self): from google.cloud.firestore_v1beta1.proto import common_pb2 from google.cloud.firestore_v1beta1.document import DocumentSnapshot data1 = {'a': u'cheese'} data2 = {'b': True, 'c': 18} info = self._info_for_get_all(data1, data2) client, document1, document2, response1, response2 = info # Exercise the mocked ``batch_get_documents``. field_paths = ['a', 'b'] snapshots = self._get_all_helper(client, [document1, document2], [response1, response2], field_paths=field_paths) self.assertEqual(len(snapshots), 2) snapshot1 = snapshots[0] self.assertIsInstance(snapshot1, DocumentSnapshot) self.assertIs(snapshot1._reference, document1) self.assertEqual(snapshot1._data, data1) snapshot2 = snapshots[1] self.assertIsInstance(snapshot2, DocumentSnapshot) self.assertIs(snapshot2._reference, document2) self.assertEqual(snapshot2._data, data2) # Verify the call to the mock. doc_paths = [document1._document_path, document2._document_path] mask = common_pb2.DocumentMask(field_paths=field_paths) client._firestore_api.batch_get_documents.assert_called_once_with( client._database_string, doc_paths, mask, transaction=None, metadata=client._rpc_metadata)
def _get_update_mask(self, allow_empty_mask=False): mask_paths = [] for field_path in self.top_level_paths: if field_path not in self.transform_paths: mask_paths.append(field_path.to_api_repr()) return common_pb2.DocumentMask(field_paths=mask_paths)
def _get_update_mask(self, allow_empty_mask=False): # Mask uses dotted / quoted paths. mask_paths = [ field_path.to_api_repr() for field_path in self.merge if field_path not in self.transform_merge ] if mask_paths or allow_empty_mask: return common_pb2.DocumentMask(field_paths=mask_paths)
def _get_update_mask(self, allow_empty_mask=False): mask_paths = [] for field_path in self.top_level_paths: if field_path not in self.transform_paths: mask_paths.append(field_path.to_api_repr()) else: prefix = FieldPath(*field_path.parts[:-1]) if prefix.parts: mask_paths.append(prefix.to_api_repr()) return common_pb2.DocumentMask(field_paths=mask_paths)
def _write_pb_for_update(document_path, update_values, field_paths): from google.cloud.firestore_v1beta1.proto import common_pb2 from google.cloud.firestore_v1beta1.proto import document_pb2 from google.cloud.firestore_v1beta1.proto import write_pb2 from google.cloud.firestore_v1beta1 import _helpers return write_pb2.Write( update=document_pb2.Document( name=document_path, fields=_helpers.encode_dict(update_values) ), update_mask=common_pb2.DocumentMask(field_paths=field_paths), current_document=common_pb2.Precondition(exists=True), )
def _helper(self, option=None, do_transform=False, **write_kwargs): from google.cloud.firestore_v1beta1.gapic import enums from google.cloud.firestore_v1beta1.proto import common_pb2 from google.cloud.firestore_v1beta1.proto import document_pb2 from google.cloud.firestore_v1beta1.proto import write_pb2 from google.cloud.firestore_v1beta1.client import Client from google.cloud.firestore_v1beta1.constants import SERVER_TIMESTAMP document_path = _make_ref_string( u'toy', u'car', u'onion', u'garlic') field_path1 = 'bitez.yum' value = b'\x00\x01' field_path2 = 'blog.internet' field_updates = {field_path1: value} if do_transform: field_updates[field_path2] = SERVER_TIMESTAMP # NOTE: ``Client.write_option()`` is a ``@staticmethod`` so # we don't need a client instance. write_pbs = self._call_fut( Client, document_path, field_updates, option) map_pb = document_pb2.MapValue(fields={ 'yum': _value_pb(bytes_value=value), }) expected_update_pb = write_pb2.Write( update=document_pb2.Document( name=document_path, fields={'bitez': _value_pb(map_value=map_pb)}, ), update_mask=common_pb2.DocumentMask(field_paths=[field_path1]), **write_kwargs ) expected_pbs = [expected_update_pb] if do_transform: server_val = enums.DocumentTransform.FieldTransform.ServerValue expected_transform_pb = write_pb2.Write( transform=write_pb2.DocumentTransform( document=document_path, field_transforms=[ write_pb2.DocumentTransform.FieldTransform( field_path=field_path2, set_to_server_value=server_val.REQUEST_TIME, ), ], ), ) expected_pbs.append(expected_transform_pb) self.assertEqual(write_pbs, expected_pbs)
def _write_pb_for_set(document_path, document_data, merge): from google.cloud.firestore_v1beta1.proto import common_pb2 from google.cloud.firestore_v1beta1.proto import document_pb2 from google.cloud.firestore_v1beta1.proto import write_pb2 from google.cloud.firestore_v1beta1 import _helpers write_pbs = write_pb2.Write(update=document_pb2.Document( name=document_path, fields=_helpers.encode_dict(document_data), ), ) if merge: _, _, field_paths = _helpers.process_server_timestamp( document_data, split_on_dots=False) field_paths = _helpers.canonicalize_field_paths(field_paths) mask = common_pb2.DocumentMask(field_paths=sorted(field_paths)) write_pbs.update_mask.CopyFrom(mask) return write_pbs
def pbs_for_update(client, document_path, field_updates, option): """Make ``Write`` protobufs for ``update()`` methods. Args: client (~.firestore_v1beta1.client.Client): A client that has a write option factory. document_path (str): A fully-qualified document path. field_updates (dict): Field names or paths to update and values to update with. option (optional[~.firestore_v1beta1.client.WriteOption]): A write option to make assertions / preconditions on the server state of the document before applying changes. Returns: List[google.cloud.firestore_v1beta1.types.Write]: One or two ``Write`` protobuf instances for ``update()``. """ if option is None: # Default uses ``exists=True``. option = client.write_option(exists=True) transform_paths, actual_updates, field_paths = ( process_server_timestamp(field_updates)) if not (transform_paths or actual_updates): raise ValueError('There are only ServerTimeStamp objects or is empty.') update_values, field_paths = FieldPathHelper.to_field_paths(actual_updates) field_paths = canonicalize_field_paths(field_paths) update_pb = write_pb2.Write( update=document_pb2.Document( name=document_path, fields=encode_dict(update_values), ), update_mask=common_pb2.DocumentMask(field_paths=field_paths), ) # Due to the default, we don't have to check if ``None``. option.modify_write(update_pb, field_paths=field_paths) write_pbs = [update_pb] if transform_paths: # NOTE: We **explicitly** don't set any write option on # the ``transform_pb``. transform_pb = get_transform_pb(document_path, transform_paths) write_pbs.append(transform_pb) return write_pbs
def _write_pb_for_set(document_path, document_data, merge): from google.cloud.firestore_v1beta1.proto import common_pb2 from google.cloud.firestore_v1beta1.proto import document_pb2 from google.cloud.firestore_v1beta1.proto import write_pb2 from google.cloud.firestore_v1beta1 import _helpers write_pbs = write_pb2.Write(update=document_pb2.Document( name=document_path, fields=_helpers.encode_dict(document_data))) if merge: field_paths = [ field_path for field_path, value in _helpers.extract_fields( document_data, _helpers.FieldPath()) ] field_paths = [ field_path.to_api_repr() for field_path in sorted(field_paths) ] mask = common_pb2.DocumentMask(field_paths=sorted(field_paths)) write_pbs.update_mask.CopyFrom(mask) return write_pbs
def pbs_for_set(document_path, document_data, merge=False, exists=None): """Make ``Write`` protobufs for ``set()`` methods. Args: document_path (str): A fully-qualified document path. document_data (dict): Property names and values to use for replacing a document. merge (bool): Whether to merge the fields or replace them exists (bool): If set, a precondition to indicate whether the document should exist or not. Used for create. Returns: List[google.cloud.firestore_v1beta1.types.Write]: One or two ``Write`` protobuf instances for ``set()``. """ transform_paths, actual_data, field_paths = process_server_timestamp( document_data, False) update_pb = write_pb2.Write(update=document_pb2.Document( name=document_path, fields=encode_dict(actual_data), ), ) if exists is not None: update_pb.current_document.CopyFrom( common_pb2.Precondition(exists=exists)) if merge: field_paths = canonicalize_field_paths(field_paths) mask = common_pb2.DocumentMask(field_paths=sorted(field_paths)) update_pb.update_mask.CopyFrom(mask) write_pbs = [update_pb] if transform_paths: # NOTE: We **explicitly** don't set any write option on # the ``transform_pb``. transform_pb = get_transform_pb(document_path, transform_paths) if not actual_data: write_pbs = [transform_pb] return write_pbs write_pbs.append(transform_pb) return write_pbs
def get(self, field_paths=None, transaction=None): """Retrieve a snapshot of the current document. See :meth:`~.firestore_v1beta1.client.Client.field_path` for more information on **field paths**. If a ``transaction`` is used and it already has write operations added, this method cannot be used (i.e. read-after-write is not allowed). Args: field_paths (Optional[Iterable[str, ...]]): An iterable of field paths (``.``-delimited list of field names) to use as a projection of document fields in the returned results. If no value is provided, all fields will be returned. transaction (Optional[~.firestore_v1beta1.transaction.\ Transaction]): An existing transaction that this reference will be retrieved in. Returns: ~.firestore_v1beta1.document.DocumentSnapshot: A snapshot of the current document. If the document does not exist at the time of `snapshot`, the snapshot `reference`, `data`, `update_time`, and `create_time` attributes will all be `None` and `exists` will be `False`. """ if isinstance(field_paths, six.string_types): raise ValueError("'field_paths' must be a sequence of paths, not a string.") if field_paths is not None: mask = common_pb2.DocumentMask(field_paths=sorted(field_paths)) else: mask = None firestore_api = self._client._firestore_api try: document_pb = firestore_api.get_document( self._document_path, mask=mask, transaction=_helpers.get_transaction_id(transaction), metadata=self._client._rpc_metadata, ) except exceptions.NotFound: data = None exists = False create_time = None update_time = None else: data = _helpers.decode_dict(document_pb.fields, self._client) exists = True create_time = document_pb.create_time update_time = document_pb.update_time return DocumentSnapshot( reference=self, data=data, exists=exists, read_time=None, # No server read_time available create_time=create_time, update_time=update_time, )
def _get_helper(self, field_paths=None, use_transaction=False, not_found=False): from google.api_core.exceptions import NotFound from google.cloud.firestore_v1beta1.proto import common_pb2 from google.cloud.firestore_v1beta1.proto import document_pb2 from google.cloud.firestore_v1beta1.transaction import Transaction # Create a minimal fake GAPIC with a dummy response. create_time = 123 update_time = 234 firestore_api = mock.Mock(spec=["get_document"]) response = mock.create_autospec(document_pb2.Document) response.fields = {} response.create_time = create_time response.update_time = update_time if not_found: firestore_api.get_document.side_effect = NotFound("testing") else: firestore_api.get_document.return_value = response client = _make_client("donut-base") client._firestore_api_internal = firestore_api document = self._make_one("where", "we-are", client=client) if use_transaction: transaction = Transaction(client) transaction_id = transaction._id = b"asking-me-2" else: transaction = None snapshot = document.get(field_paths=field_paths, transaction=transaction) self.assertIs(snapshot.reference, document) if not_found: self.assertIsNone(snapshot._data) self.assertFalse(snapshot.exists) self.assertIsNone(snapshot.read_time) self.assertIsNone(snapshot.create_time) self.assertIsNone(snapshot.update_time) else: self.assertEqual(snapshot.to_dict(), {}) self.assertTrue(snapshot.exists) self.assertIsNone(snapshot.read_time) self.assertIs(snapshot.create_time, create_time) self.assertIs(snapshot.update_time, update_time) # Verify the request made to the API if field_paths is not None: mask = common_pb2.DocumentMask(field_paths=sorted(field_paths)) else: mask = None if use_transaction: expected_transaction_id = transaction_id else: expected_transaction_id = None firestore_api.get_document.assert_called_once_with( document._document_path, mask=mask, transaction=expected_transaction_id, metadata=client._rpc_metadata, )
def pbs_for_update(client, document_path, field_updates, option): """Make ``Write`` protobufs for ``update()`` methods. Args: client (~.firestore_v1beta1.client.Client): A client that has a write option factory. document_path (str): A fully-qualified document path. field_updates (dict): Field names or paths to update and values to update with. option (optional[~.firestore_v1beta1.client.WriteOption]): A write option to make assertions / preconditions on the server state of the document before applying changes. Returns: List[google.cloud.firestore_v1beta1.types.Write]: One or two ``Write`` protobuf instances for ``update()``. """ if option is None: # Default uses ``exists=True``. option = client.write_option(exists=True) transform_paths, actual_updates, field_paths = (process_server_timestamp( field_updates, split_on_dots=True)) if not (transform_paths or actual_updates): raise ValueError('There are only ServerTimeStamp objects or is empty.') update_values, field_paths = FieldPathHelper.to_field_paths(actual_updates) update_paths = field_paths[:] # for whatever reason, the conformance tests want to see the parent # of nested transform paths in the update mask for transform_path in transform_paths: if len(transform_path.parts) > 1: parent_fp = FieldPath(*transform_path.parts[:-1]) if parent_fp not in update_paths: update_paths.append(parent_fp) field_paths = canonicalize_field_paths(field_paths) update_paths = canonicalize_field_paths(update_paths) write_pbs = [] if update_values: update_pb = write_pb2.Write( update=document_pb2.Document( name=document_path, fields=encode_dict(update_values), ), update_mask=common_pb2.DocumentMask(field_paths=update_paths), ) # Due to the default, we don't have to check if ``None``. option.modify_write(update_pb) write_pbs.append(update_pb) if transform_paths: transform_pb = get_transform_pb(document_path, transform_paths) if not update_values: # NOTE: set the write option on the ``transform_pb`` only if there # is no ``update_pb`` option.modify_write(transform_pb) write_pbs.append(transform_pb) return write_pbs
def pbs_for_set_with_merge(document_path, document_data, merge): """Make ``Write`` protobufs for ``set()`` methods. Args: document_path (str): A fully-qualified document path. document_data (dict): Property names and values to use for replacing a document. merge (Optional[bool] or Optional[List<apispec>]): If True, merge all fields; else, merge only the named fields. Returns: List[google.cloud.firestore_v1beta1.types.Write]: One or two ``Write`` protobuf instances for ``set()``. """ create_empty = not document_data if merge is True: ( transform_paths, actual_data, data_merge, transform_merge, merge, ) = all_merge_paths(document_data) else: ( transform_paths, actual_data, data_merge, transform_merge, merge, ) = normalize_merge_paths(document_data, merge) write_pbs = [] update_pb = write_pb2.Write() update_paths = set(data_merge) # for whatever reason, the conformance tests want to see the parent # of nested transform paths in the update mask # (see set-st-merge-nonleaf-alone.textproto) for transform_path in transform_paths: if len(transform_path.parts) > 1: parent_fp = FieldPath(*transform_path.parts[:-1]) update_paths.add(parent_fp) if actual_data or create_empty or update_paths: update = document_pb2.Document( name=document_path, fields=encode_dict(actual_data), ) update_pb.update.CopyFrom(update) mask_paths = [ fp.to_api_repr() for fp in merge if fp not in transform_merge ] if mask_paths or create_empty: mask = common_pb2.DocumentMask(field_paths=mask_paths) update_pb.update_mask.CopyFrom(mask) write_pbs.append(update_pb) new_transform_paths = [] for merge_fp in merge: t_merge_fps = [ fp for fp in transform_paths if merge_fp.eq_or_parent(fp) ] new_transform_paths.extend(t_merge_fps) transform_paths = new_transform_paths if transform_paths: transform_pb = get_transform_pb(document_path, transform_paths) write_pbs.append(transform_pb) return write_pbs