def _write_pb_for_create(document_path, document_data): from google.cloud.firestore_v1.types import common from google.cloud.firestore_v1.types import document from google.cloud.firestore_v1.types import write from google.cloud.firestore_v1 import _helpers return write.Write( update=document.Document(name=document_path, fields=_helpers.encode_dict(document_data)), current_document=common.Precondition(exists=False), )
def _write_pb_for_create(document_path, document_data): from google.cloud.firestore_v1.proto import common_pb2 from google.cloud.firestore_v1.proto import document_pb2 from google.cloud.firestore_v1.proto import write_pb2 from google.cloud.firestore_v1 import _helpers return write_pb2.Write( update=document_pb2.Document( name=document_path, fields=_helpers.encode_dict(document_data) ), current_document=common_pb2.Precondition(exists=False), )
def _write_pb_for_update(document_path, update_values, field_paths): from google.cloud.firestore_v1.types import common from google.cloud.firestore_v1.types import document from google.cloud.firestore_v1.types import write from google.cloud.firestore_v1 import _helpers return write.Write( update=document.Document(name=document_path, fields=_helpers.encode_dict(update_values)), update_mask=common.DocumentMask(field_paths=field_paths), current_document=common.Precondition(exists=True), )
def _write_pb_for_update(document_path, update_values, field_paths): from google.cloud.firestore_v1.proto import common_pb2 from google.cloud.firestore_v1.proto import document_pb2 from google.cloud.firestore_v1.proto import write_pb2 from google.cloud.firestore_v1 import _helpers return write_pb2.Write( update=document_pb2.Document( name=document_path, fields=_helpers.encode_dict(update_values) ), update_mask=common_pb2.DocumentMask(field_paths=field_paths), current_document=common_pb2.Precondition(exists=True), )
def _doc_get_info(ref_string, values): from google.cloud.firestore_v1.proto import document_pb2 from google.cloud._helpers import _datetime_to_pb_timestamp from google.cloud.firestore_v1 import _helpers now = datetime.datetime.utcnow() read_time = _datetime_to_pb_timestamp(now) delta = datetime.timedelta(seconds=100) update_time = _datetime_to_pb_timestamp(now - delta) create_time = _datetime_to_pb_timestamp(now - 2 * delta) document_pb = document_pb2.Document( name=ref_string, fields=_helpers.encode_dict(values), create_time=create_time, update_time=update_time, ) return document_pb, read_time
def _write_pb_for_set(document_path, document_data, merge): from google.cloud.firestore_v1.types import common from google.cloud.firestore_v1.types import document from google.cloud.firestore_v1.types import write from google.cloud.firestore_v1 import _helpers write_pbs = write.Write(update=document.Document( name=document_path, fields=_helpers.encode_dict(document_data))) if merge: field_paths = [ field_path for field_path, value in _helpers.extract_fields( document_data, _helpers.FieldPath()) ] field_paths = [ field_path.to_api_repr() for field_path in sorted(field_paths) ] mask = common.DocumentMask(field_paths=sorted(field_paths)) write_pbs._pb.update_mask.CopyFrom(mask._pb) return write_pbs
def _doc_get_info(ref_string, values): from google.cloud.firestore_v1.types import document from google.cloud._helpers import _datetime_to_pb_timestamp from google.cloud.firestore_v1 import _helpers now = datetime.datetime.utcnow() read_time = _datetime_to_pb_timestamp(now) delta = datetime.timedelta(seconds=100) update_time = _datetime_to_pb_timestamp(now - delta) create_time = _datetime_to_pb_timestamp(now - 2 * delta) document_pb = document.Document( name=ref_string, fields=_helpers.encode_dict(values), create_time=create_time, update_time=update_time, ) return document_pb, read_time
def _bundled_collection_helper( self, document_ids: typing.Optional[typing.List[str]] = None, data: typing.Optional[typing.List[typing.Dict]] = None, ) -> collection.CollectionReference: """Builder of a mocked Query for the sake of testing Bundles. Bundling queries involves loading the actual documents for cold storage, and this method arranges all of the necessary mocks so that unit tests can think they are evaluating a live query. """ from google.cloud.firestore_v1 import _helpers from google.cloud.firestore_v1.types.document import Document from google.cloud.firestore_v1.types.firestore import RunQueryResponse from google.protobuf.timestamp_pb2 import Timestamp # type: ignore client = self.get_client() template = client._database_string + "/documents/col/{}" document_ids = document_ids or ["doc-1", "doc-2"] def _index_from_data(index: int): if data is None or len(data) < index + 1: return None return data[index] documents = [ RunQueryResponse( transaction=b"", document=Document( name=template.format(document_id), fields=_helpers.encode_dict( _index_from_data(index) or {"hello": "world"}), create_time=Timestamp(seconds=1, nanos=1), update_time=Timestamp(seconds=1, nanos=1), ), read_time=_test_helpers.build_timestamp(), ) for index, document_id in enumerate(document_ids) ] iterator = self.build_results_iterable(documents) api_client = self.get_internal_client_mock() api_client.run_query.return_value = iterator client._firestore_api_internal = api_client return self.get_collection_class()("col", client=client)
def _write_pb_for_set(document_path, document_data, merge): from google.cloud.firestore_v1.proto import common_pb2 from google.cloud.firestore_v1.proto import document_pb2 from google.cloud.firestore_v1.proto import write_pb2 from google.cloud.firestore_v1 import _helpers write_pbs = write_pb2.Write( update=document_pb2.Document( name=document_path, fields=_helpers.encode_dict(document_data) ) ) if merge: field_paths = [ field_path for field_path, value in _helpers.extract_fields( document_data, _helpers.FieldPath() ) ] field_paths = [ field_path.to_api_repr() for field_path in sorted(field_paths) ] mask = common_pb2.DocumentMask(field_paths=sorted(field_paths)) write_pbs.update_mask.CopyFrom(mask) return write_pbs