def test_retry_idempotent(identity): # Define our error and OK responses. err409 = exceptions.Aborted(message='derp de derp') err503 = exceptions.ServiceUnavailable(message='whups') errwtf = exceptions.Unknown(message='huh?') ok = showcase_v1alpha3.User(name='users/0', display_name='Guido') server = mock.Mock(side_effect=(err409, err503, errwtf, ok)) # Mock the transport to send back the error responses followed by a # success response. transport = type(identity).get_transport_class() with mock.patch.object( transport, 'get_user', new_callable=mock.PropertyMock(return_value=server)): with mock.patch.object(time, 'sleep'): response = identity.get_user({'name': 'users/0'}) assert response.name == 'users/0' assert response.display_name == 'Guido' assert server.call_count == 4
def _open( self, initial_request: gapic_types.AppendRowsRequest, timeout: float = _DEFAULT_TIMEOUT, ) -> "AppendRowsFuture": """Open an append rows stream. This is automatically called by the first call to the :attr:`google.cloud.bigquery_storage_v1beta2.writer.AppendRowsStream.send` method. Args: initial_request: The initial request to start the stream. Must have :attr:`google.cloud.bigquery_storage_v1beta2.types.AppendRowsRequest.write_stream` and ``proto_rows.writer_schema.proto_descriptor`` and properties populated. timeout: How long (in seconds) to wait for the stream to be ready. Returns: A future, which can be used to process the response to the initial request when it arrives. """ if self.is_active: raise ValueError("This manager is already open.") if self._closed: raise bqstorage_exceptions.StreamClosedError( "This manager has been closed and can not be re-used.") start_time = time.monotonic() request = gapic_types.AppendRowsRequest() gapic_types.AppendRowsRequest.copy_from(request, self._inital_request_template) request._pb.MergeFrom(initial_request._pb) self._stream_name = request.write_stream inital_response_future = AppendRowsFuture(self) self._futures_queue.put(inital_response_future) self._rpc = bidi.BidiRpc( self._client.append_rows, initial_request=request, # TODO: pass in retry and timeout. Blocked by # https://github.com/googleapis/python-api-core/issues/262 metadata=tuple( itertools.chain( self._metadata, # This header is required so that the BigQuery Storage API # knows which region to route the request to. ( ("x-goog-request-params", f"write_stream={self._stream_name}"), ), )), ) self._rpc.add_done_callback(self._on_rpc_done) self._consumer = bidi.BackgroundConsumer(self._rpc, self._on_response) self._consumer.start() # Make sure RPC has started before returning. # Without this, consumers may get: # # ValueError: Can not send() on an RPC that has never been open()ed. # # when they try to send a request. while not self._rpc.is_active and self._consumer.is_active: # Avoid 100% CPU while waiting for RPC to be ready. time.sleep(_WRITE_OPEN_INTERVAL) # TODO: Check retry.deadline instead of (per-request) timeout. # Blocked by # https://github.com/googleapis/python-api-core/issues/262 if timeout is None: continue current_time = time.monotonic() if current_time - start_time > timeout: break # Something went wrong when opening the RPC. if not self._consumer.is_active: # TODO: Share the exception from _rpc.open(). Blocked by # https://github.com/googleapis/python-api-core/issues/268 request_exception = exceptions.Unknown( "There was a problem opening the stream. " "Try turning on DEBUG level logs to see the error.") self.close(reason=request_exception) raise request_exception return inital_response_future
def test_unknown(core_retry): error = core_exceptions.Unknown("testing") core_retry.if_transient_error.return_value = False assert _retry.is_transient_error(error) is True core_retry.if_transient_error.assert_called_once_with(error)