def test_basebulkwriter_update_retriable(self):
        from google.cloud.firestore_v1.bulk_writer import BulkRetry
        from google.cloud.firestore_v1.bulk_writer import BulkWriteFailure
        from google.cloud.firestore_v1.bulk_writer import BulkWriterOptions

        client = self._make_client()
        bw = _make_no_send_bulk_writer(
            client,
            options=BulkWriterOptions(retry=BulkRetry.immediate),
        )
        # First document in each batch will "fail"
        bw._fail_indices = [0]
        bw._total_retries = 0
        times_to_retry = 6

        def _on_error(error, bw) -> bool:
            assert isinstance(error, BulkWriteFailure)
            should_retry = error.attempts < times_to_retry
            if should_retry:
                bw._total_retries += 1
            return should_retry

        bw.on_write_error(_on_error)

        for ref, data in self._doc_iter(client, 1):
            bw.update(ref, data)
        bw.flush()

        assert bw._total_retries == times_to_retry
        assert len(bw._operations) == 0
 def test_serial_calls_send_correctly(self):
     bw = NoSendBulkWriter(
         self.client, options=BulkWriterOptions(mode=SendMode.serial)
     )
     for ref, data in self._doc_iter(101):
         bw.create(ref, data)
     bw.flush()
     # Full batches with 20 items should have been sent 5 times, and a 1-item
     # batch should have been sent once.
     self._verify_bw_activity(bw, [(20, 5,), (1, 1,)])
    def test_default_error_handler(self):
        bw = NoSendBulkWriter(
            self.client, options=BulkWriterOptions(retry=BulkRetry.immediate),
        )
        bw._attempts = 0

        def _on_error(error, bw):
            bw._attempts = error.attempts
            return bw._default_on_error(error, bw)

        bw.on_write_error(_on_error)

        # First document in each batch will "fail"
        bw._fail_indices = [0]
        for ref, data in self._doc_iter(1):
            bw.create(ref, data)
        bw.flush()
        self.assertEqual(bw._attempts, 15)
    def test_basebulkwriter_handles_errors_and_successes_correctly(self):
        from google.cloud.firestore_v1.bulk_writer import BulkRetry
        from google.cloud.firestore_v1.bulk_writer import BulkWriteFailure
        from google.cloud.firestore_v1.bulk_writer import BulkWriterOptions

        client = self._make_client()
        bw = _make_no_send_bulk_writer(
            client,
            options=BulkWriterOptions(retry=BulkRetry.immediate),
        )
        # First document in each batch will "fail"
        bw._fail_indices = [0]
        bw._sent_batches = 0
        bw._sent_documents = 0
        bw._total_retries = 0

        times_to_retry = 1

        def _on_batch(batch, response, bulk_writer):
            bulk_writer._sent_batches += 1

        def _on_write(ref, result, bulk_writer):
            bulk_writer._sent_documents += 1

        def _on_error(error, bw) -> bool:
            assert isinstance(error, BulkWriteFailure)
            should_retry = error.attempts < times_to_retry
            if should_retry:
                bw._total_retries += 1
            return should_retry

        bw.on_batch_result(_on_batch)
        bw.on_write_result(_on_write)
        bw.on_write_error(_on_error)

        for ref, data in self._doc_iter(client, 40):
            bw.create(ref, data)
        bw.flush()

        # 19 successful writes per batch
        assert bw._sent_documents == 38
        assert bw._total_retries == times_to_retry * 2
        assert bw._sent_batches == 4
        assert len(bw._operations) == 0
    def test_basebulkwriter_serial_calls_send_correctly(self):
        from google.cloud.firestore_v1.bulk_writer import BulkWriterOptions
        from google.cloud.firestore_v1.bulk_writer import SendMode

        client = self._make_client()
        bw = _make_no_send_bulk_writer(
            client, options=BulkWriterOptions(mode=SendMode.serial))
        for ref, data in self._doc_iter(client, 101):
            bw.create(ref, data)
        bw.flush()
        # Full batches with 20 items should have been sent 5 times, and a 1-item
        # batch should have been sent once.
        self._verify_bw_activity(bw, [(
            20,
            5,
        ), (
            1,
            1,
        )])
    def _basebulkwriter_ctor_helper(self, **kw):
        from google.cloud.firestore_v1.bulk_writer import BulkWriterOptions

        client = self._make_client()

        if not self._PRESERVES_CLIENT:
            sync_copy = client._sync_copy = object()

        bw = _make_no_send_bulk_writer(client, **kw)

        if self._PRESERVES_CLIENT:
            assert bw._client is client
        else:
            assert bw._client is sync_copy

        if "options" in kw:
            assert bw._options is kw["options"]
        else:
            assert bw._options == BulkWriterOptions()
    def test_handles_errors_and_successes_correctly(self):
        bw = NoSendBulkWriter(
            self.client, options=BulkWriterOptions(retry=BulkRetry.immediate),
        )
        # First document in each batch will "fail"
        bw._fail_indices = [0]
        bw._sent_batches = 0
        bw._sent_documents = 0
        bw._total_retries = 0

        times_to_retry = 1

        def _on_batch(batch, response, bulk_writer):
            bulk_writer._sent_batches += 1

        def _on_write(ref, result, bulk_writer):
            bulk_writer._sent_documents += 1

        def _on_error(error, bw) -> bool:
            assert isinstance(error, BulkWriteFailure)
            should_retry = error.attempts < times_to_retry
            if should_retry:
                bw._total_retries += 1
            return should_retry

        bw.on_batch_result(_on_batch)
        bw.on_write_result(_on_write)
        bw.on_write_error(_on_error)

        for ref, data in self._doc_iter(40):
            bw.create(ref, data)
        bw.flush()

        # 19 successful writes per batch
        self.assertEqual(bw._sent_documents, 38)
        self.assertEqual(bw._total_retries, times_to_retry * 2)
        self.assertEqual(bw._sent_batches, 4)
        self.assertEqual(len(bw._operations), 0)
    def test_basebulkwriter_default_error_handler(self):
        from google.cloud.firestore_v1.bulk_writer import BulkRetry
        from google.cloud.firestore_v1.bulk_writer import BulkWriterOptions

        client = self._make_client()
        bw = _make_no_send_bulk_writer(
            client,
            options=BulkWriterOptions(retry=BulkRetry.immediate),
        )
        bw._attempts = 0

        def _on_error(error, bw):
            bw._attempts = error.attempts
            return bw._default_on_error(error, bw)

        bw.on_write_error(_on_error)

        # First document in each batch will "fail"
        bw._fail_indices = [0]
        for ref, data in self._doc_iter(client, 1):
            bw.create(ref, data)
        bw.flush()
        assert bw._attempts == 15
    def test_invokes_error_callbacks_successfully_multiple_retries(self):
        bw = NoSendBulkWriter(
            self.client, options=BulkWriterOptions(retry=BulkRetry.immediate),
        )
        # First document in each batch will "fail"
        bw._fail_indices = [0]
        bw._sent_batches = 0
        bw._sent_documents = 0
        bw._total_retries = 0

        times_to_retry = 10

        def _on_batch(batch, response, bulk_writer):
            bulk_writer._sent_batches += 1

        def _on_write(ref, result, bulk_writer):
            bulk_writer._sent_documents += 1

        def _on_error(error, bw) -> bool:
            assert isinstance(error, BulkWriteFailure)
            should_retry = error.attempts < times_to_retry
            if should_retry:
                bw._total_retries += 1
            return should_retry

        bw.on_batch_result(_on_batch)
        bw.on_write_result(_on_write)
        bw.on_write_error(_on_error)

        for ref, data in self._doc_iter(2):
            bw.create(ref, data)
        bw.flush()

        self.assertEqual(bw._sent_documents, 1)
        self.assertEqual(bw._total_retries, times_to_retry)
        self.assertEqual(bw._sent_batches, times_to_retry + 1)
        self.assertEqual(len(bw._operations), 0)
    def test_update_retriable(self):
        bw = NoSendBulkWriter(
            self.client, options=BulkWriterOptions(retry=BulkRetry.immediate),
        )
        # First document in each batch will "fail"
        bw._fail_indices = [0]
        bw._total_retries = 0
        times_to_retry = 6

        def _on_error(error, bw) -> bool:
            assert isinstance(error, BulkWriteFailure)
            should_retry = error.attempts < times_to_retry
            if should_retry:
                bw._total_retries += 1
            return should_retry

        bw.on_write_error(_on_error)

        for ref, data in self._doc_iter(1):
            bw.update(ref, data)
        bw.flush()

        self.assertEqual(bw._total_retries, times_to_retry)
        self.assertEqual(len(bw._operations), 0)
async def test_async_recursive_delete_serialized(client, cleanup):
    from google.cloud.firestore_v1.bulk_writer import BulkWriterOptions, SendMode

    bw = client.bulk_writer(options=BulkWriterOptions(mode=SendMode.serial))
    await _do_recursive_delete_with_bulk_writer(client, bw)
    def test_basebulkwriter_ctor_explicit(self):
        from google.cloud.firestore_v1.bulk_writer import BulkRetry
        from google.cloud.firestore_v1.bulk_writer import BulkWriterOptions

        options = BulkWriterOptions(retry=BulkRetry.immediate)
        self._basebulkwriter_ctor_helper(options=options)