示例#1
0
    def bulk_update(
        self,
        docs: List[Union[BaseModel, dict]],
        owner: Optional[str] = None,
        force: Optional[bool] = False,
        merge: Optional[bool] = False,
        batch_size: Optional[int] = 300,
    ) -> None:
        if batch_size <= 0:
            raise ValueError("`batch_size` must be larger than 0")
        if len(docs) == 0:
            raise ValueError("No documents provided")

        # Parse all docs to dicts
        docs = [
            self.update(
                doc=doc,
                owner=owner,
                force=force,
                dry_run=True,
            ) for doc in docs
        ]

        # Define batch operation
        write_batch = WriteBatch(client=self._client)

        for i, doc in enumerate(docs):
            doc_id = doc.pop("id", None)
            if doc_id is None:
                doc_id = str(ObjectId())

            write_batch.set(
                reference=self.collection.document(doc_id),
                document_data=doc,
                merge=merge,
            )

            if (i + 1) % batch_size == 0:
                # Execute batch operation
                write_batch.commit()
                write_batch = WriteBatch(client=self._client)

        if (i + 1) % batch_size != 0:
            # Execute batch operation
            write_batch.commit()
示例#2
0
    def batch(self):
        """Get a batch instance from this client.

        Returns:
            ~.firestore_v1.batch.WriteBatch: A "write" batch to be
            used for accumulating document changes and sending the changes
            all at once.
        """
        return WriteBatch(self)
示例#3
0
    def batch(self) -> WriteBatch:
        """Get a batch instance from this client.

        Returns:
            :class:`~google.cloud.firestore_v1.batch.WriteBatch`:
            A "write" batch to be used for accumulating document changes and
            sending the changes all at once.
        """
        return WriteBatch(self)
示例#4
0
    def bulk_delete(
        self,
        doc_ids: List[str],
        owner: Optional[str] = None,
        force: Optional[bool] = False,
        batch_size: Optional[int] = 300,
    ) -> None:
        if batch_size <= 0:
            raise ValueError("`batch_size` must be larger than 0")
        if len(doc_ids) == 0:
            raise ValueError("No document IDs provided")

        # Set updated by and time before deleting to trigger change
        update_before_delete = False
        if issubclass(self.schema, SchemaWithOwner):
            if not force and (owner is None and self.force_ownership):
                raise ValueError(
                    f"An `owner` must be defined for collection {self.name}")

            if owner is not None:
                update_before_delete = True

        # Define batch operation
        write_batch = WriteBatch(client=self._client)

        if update_before_delete:
            batch_size = max(1, batch_size // 2)

        for i, doc_id in enumerate(doc_ids):
            if update_before_delete:
                if self.is_updatable:
                    write_batch.set(
                        reference=self.collection.document(doc_id),
                        document_data={
                            "updated_at":
                            datetime.utcnow().replace(tzinfo=timezone.utc),
                            "updated_by":
                            owner,
                            "deleted":
                            True,
                        },
                        merge=True,
                    )
                else:
                    write_batch.set(
                        reference=self.collection.document(doc_id),
                        document_data={
                            "deleted": True,
                        },
                        merge=True,
                    )

            write_batch.delete(reference=self.collection.document(doc_id))

            if (i + 1) % batch_size == 0:
                # Execute batch operation
                write_batch.commit()
                write_batch = WriteBatch(client=self._client)

        if (i + 1) % batch_size != 0:
            # Execute batch operation
            write_batch.commit()
示例#5
0
def _make_write_batch(*args, **kwargs):
    from google.cloud.firestore_v1.batch import WriteBatch

    return WriteBatch(*args, **kwargs)