def test_consumer_unexpected_error(self, caplog): caplog.set_level(logging.DEBUG) bidi_rpc = mock.create_autospec(bidi.BidiRpc, instance=True) bidi_rpc.is_active = True bidi_rpc.recv.side_effect = ValueError() on_response = mock.Mock(spec=["__call__"]) consumer = bidi.BackgroundConsumer(bidi_rpc, on_response) consumer.start() # Wait for the consumer's thread to exit. while consumer.is_active: pass on_response.assert_not_called() bidi_rpc.recv.assert_called_once() assert "caught unexpected exception" in caplog.text
def test_consumer_expected_error(self, caplog): caplog.set_level(logging.DEBUG) bidi_rpc = mock.create_autospec(bidi.BidiRpc, instance=True) bidi_rpc.is_active = True bidi_rpc.recv.side_effect = exceptions.ServiceUnavailable('Gone away') on_response = mock.Mock(spec=['__call__']) consumer = bidi.BackgroundConsumer(bidi_rpc, on_response) consumer.start() # Wait for the consumer's thread to exit. while consumer.is_active: pass on_response.assert_not_called() bidi_rpc.recv.assert_called_once() assert 'caught error' in caplog.text
def open(self, callback): """Begin consuming messages. Args: callback (Callable[None, google.cloud.pubsub_v1.message.Messages]): A callback that will be called for each message received on the stream. """ if self.is_active: raise ValueError('This manager is already open.') if self._closed: raise ValueError( 'This manager has been closed and can not be re-used.') self._callback = functools.partial(_wrap_callback_errors, callback) # Create the RPC self._rpc = bidi.ResumableBidiRpc( start_rpc=self._client.api.streaming_pull, initial_request=self._get_initial_request, should_recover=self._should_recover) self._rpc.add_done_callback(self._on_rpc_done) # Create references to threads self._dispatcher = dispatcher.Dispatcher(self, self._scheduler.queue) self._consumer = bidi.BackgroundConsumer(self._rpc, self._on_response) self._leaser = leaser.Leaser(self) self._heartbeater = heartbeater.Heartbeater(self) # Start the thread to pass the requests. self._dispatcher.start() # Start consuming messages. self._consumer.start() # Start the lease maintainer thread. self._leaser.start() # Start the stream heartbeater thread. self._heartbeater.start()
def test_double_stop(self, caplog): caplog.set_level(logging.DEBUG) bidi_rpc = mock.create_autospec(bidi.BidiRpc, instance=True) bidi_rpc.is_active = True on_response = mock.Mock(spec=["__call__"]) def close_side_effect(): bidi_rpc.is_active = False bidi_rpc.close.side_effect = close_side_effect consumer = bidi.BackgroundConsumer(bidi_rpc, on_response) consumer.start() assert consumer.is_active is True consumer.stop() assert consumer.is_active is False # calling stop twice should not result in an error. consumer.stop()
def test_consume_once_then_exit(self): bidi_rpc = mock.create_autospec(bidi.BidiRpc, instance=True) bidi_rpc.is_active = True bidi_rpc.recv.side_effect = [mock.sentinel.response_1] recved = threading.Event() def on_response(response): assert response == mock.sentinel.response_1 bidi_rpc.is_active = False recved.set() consumer = bidi.BackgroundConsumer(bidi_rpc, on_response) consumer.start() recved.wait() bidi_rpc.recv.assert_called_once() assert bidi_rpc.is_active is False consumer.stop() bidi_rpc.close.assert_called_once() assert consumer.is_active is False
def _open( self, initial_request: gapic_types.AppendRowsRequest, timeout: float = _DEFAULT_TIMEOUT, ) -> "AppendRowsFuture": """Open an append rows stream. This is automatically called by the first call to the :attr:`google.cloud.bigquery_storage_v1beta2.writer.AppendRowsStream.send` method. Args: initial_request: The initial request to start the stream. Must have :attr:`google.cloud.bigquery_storage_v1beta2.types.AppendRowsRequest.write_stream` and ``proto_rows.writer_schema.proto_descriptor`` and properties populated. timeout: How long (in seconds) to wait for the stream to be ready. Returns: A future, which can be used to process the response to the initial request when it arrives. """ if self.is_active: raise ValueError("This manager is already open.") if self._closed: raise bqstorage_exceptions.StreamClosedError( "This manager has been closed and can not be re-used.") start_time = time.monotonic() request = gapic_types.AppendRowsRequest() gapic_types.AppendRowsRequest.copy_from(request, self._inital_request_template) request._pb.MergeFrom(initial_request._pb) self._stream_name = request.write_stream inital_response_future = AppendRowsFuture(self) self._futures_queue.put(inital_response_future) self._rpc = bidi.BidiRpc( self._client.append_rows, initial_request=request, # TODO: pass in retry and timeout. Blocked by # https://github.com/googleapis/python-api-core/issues/262 metadata=tuple( itertools.chain( self._metadata, # This header is required so that the BigQuery Storage API # knows which region to route the request to. ( ("x-goog-request-params", f"write_stream={self._stream_name}"), ), )), ) self._rpc.add_done_callback(self._on_rpc_done) self._consumer = bidi.BackgroundConsumer(self._rpc, self._on_response) self._consumer.start() # Make sure RPC has started before returning. # Without this, consumers may get: # # ValueError: Can not send() on an RPC that has never been open()ed. # # when they try to send a request. while not self._rpc.is_active and self._consumer.is_active: # Avoid 100% CPU while waiting for RPC to be ready. time.sleep(_WRITE_OPEN_INTERVAL) # TODO: Check retry.deadline instead of (per-request) timeout. # Blocked by # https://github.com/googleapis/python-api-core/issues/262 if timeout is None: continue current_time = time.monotonic() if current_time - start_time > timeout: break # Something went wrong when opening the RPC. if not self._consumer.is_active: # TODO: Share the exception from _rpc.open(). Blocked by # https://github.com/googleapis/python-api-core/issues/268 request_exception = exceptions.Unknown( "There was a problem opening the stream. " "Try turning on DEBUG level logs to see the error.") self.close(reason=request_exception) raise request_exception return inital_response_future
def test_pause_resume_and_close(self): # This test is relatively complex. It attempts to start the consumer, # consume one item, pause the consumer, check the state of the world, # then resume the consumer. Doing this in a deterministic fashion # requires a bit more mocking and patching than usual. bidi_rpc = mock.create_autospec(bidi.BidiRpc, instance=True) bidi_rpc.is_active = True def close_side_effect(): bidi_rpc.is_active = False bidi_rpc.close.side_effect = close_side_effect # These are used to coordinate the two threads to ensure deterministic # execution. should_continue = threading.Event() responses_and_events = { mock.sentinel.response_1: threading.Event(), mock.sentinel.response_2: threading.Event(), } bidi_rpc.recv.side_effect = [ mock.sentinel.response_1, mock.sentinel.response_2 ] recved_responses = [] consumer = None def on_response(response): if response == mock.sentinel.response_1: consumer.pause() recved_responses.append(response) responses_and_events[response].set() should_continue.wait() consumer = bidi.BackgroundConsumer(bidi_rpc, on_response) consumer.start() # Wait for the first response to be recved. responses_and_events[mock.sentinel.response_1].wait() # Ensure only one item has been recved and that the consumer is paused. assert recved_responses == [mock.sentinel.response_1] assert consumer.is_paused is True assert consumer.is_active is True # Unpause the consumer, wait for the second item, then close the # consumer. should_continue.set() consumer.resume() responses_and_events[mock.sentinel.response_2].wait() assert recved_responses == [ mock.sentinel.response_1, mock.sentinel.response_2 ] consumer.stop() assert consumer.is_active is False
def open(self, callback, on_callback_error): """Begin consuming messages. Args: callback (Callable[None, google.cloud.pubsub_v1.message.Message]): A callback that will be called for each message received on the stream. on_callback_error (Callable[Exception]): A callable that will be called if an exception is raised in the provided `callback`. """ if self.is_active: raise ValueError("This manager is already open.") if self._closed: raise ValueError( "This manager has been closed and can not be re-used.") self._callback = functools.partial(_wrap_callback_errors, callback, on_callback_error) # Create the RPC # We must use a fixed value for the ACK deadline, as we cannot read it # from the subscription. The latter would require `pubsub.subscriptions.get` # permission, which is not granted to the default subscriber role # `roles/pubsub.subscriber`. # See also https://github.com/googleapis/google-cloud-python/issues/9339 # # When dynamic lease management is enabled for the "on hold" messages, # the default stream ACK deadline should again be set based on the # historic ACK timing data, i.e. `self.ack_histogram.percentile(99)`. stream_ack_deadline_seconds = _DEFAULT_STREAM_ACK_DEADLINE get_initial_request = functools.partial(self._get_initial_request, stream_ack_deadline_seconds) self._rpc = bidi.ResumableBidiRpc( start_rpc=self._client.api.streaming_pull, initial_request=get_initial_request, should_recover=self._should_recover, should_terminate=self._should_terminate, throttle_reopen=True, ) self._rpc.add_done_callback(self._on_rpc_done) _LOGGER.debug( "Creating a stream, default ACK deadline set to {} seconds.". format(stream_ack_deadline_seconds)) # Create references to threads self._dispatcher = dispatcher.Dispatcher(self, self._scheduler.queue) self._consumer = bidi.BackgroundConsumer(self._rpc, self._on_response) self._leaser = leaser.Leaser(self) self._heartbeater = heartbeater.Heartbeater(self) # Start the thread to pass the requests. self._dispatcher.start() # Start consuming messages. self._consumer.start() # Start the lease maintainer thread. self._leaser.start() # Start the stream heartbeater thread. self._heartbeater.start()