Пример #1
0
def test_publish_with_ordering_key():
    creds = mock.Mock(spec=credentials.Credentials)
    publisher_options = types.PublisherOptions(enable_message_ordering=True)
    client = publisher.Client(publisher_options=publisher_options,
                              credentials=creds)

    # Use a mock in lieu of the actual batch class.
    batch = mock.Mock(spec=client._batch_class)
    # Set the mock up to claim indiscriminately that it accepts all messages.
    batch.will_accept.return_value = True
    batch.publish.side_effect = (mock.sentinel.future1, mock.sentinel.future2)

    topic = "topic/path"
    ordering_key = "k1"
    client._set_batch(topic, batch, ordering_key=ordering_key)

    # Begin publishing.
    future1 = client.publish(topic, b"spam", ordering_key=ordering_key)
    future2 = client.publish(topic,
                             b"foo",
                             bar="baz",
                             ordering_key=ordering_key)

    assert future1 is mock.sentinel.future1
    assert future2 is mock.sentinel.future2

    # Check mock.
    batch.publish.assert_has_calls([
        mock.call(types.PubsubMessage(data=b"spam", ordering_key="k1")),
        mock.call(
            types.PubsubMessage(data=b"foo",
                                attributes={"bar": "baz"},
                                ordering_key="k1")),
    ])
Пример #2
0
def test_ordered_sequencer_cleaned_up(creds):
    # Max latency is infinite so a commit thread is not created.
    # We don't want a commit thread to interfere with this test.
    batch_settings = types.BatchSettings(max_latency=float("inf"))
    publisher_options = types.PublisherOptions(enable_message_ordering=True)
    client = publisher.Client(
        batch_settings=batch_settings,
        publisher_options=publisher_options,
        credentials=creds,
    )

    topic = "topic"
    ordering_key = "ord_key"
    sequencer = mock.Mock(spec=ordered_sequencer.OrderedSequencer)
    sequencer.is_finished.return_value = False
    client._set_sequencer(topic=topic,
                          sequencer=sequencer,
                          ordering_key=ordering_key)

    assert len(client._sequencers) == 1
    # 'sequencer' is not finished yet so don't remove it.
    client._commit_sequencers()
    assert len(client._sequencers) == 1

    sequencer.is_finished.return_value = True
    # 'sequencer' is finished so remove it.
    client._commit_sequencers()
    assert len(client._sequencers) == 0
Пример #3
0
def test_resume_publish_ordering_keys_not_enabled(creds):
    publisher_options = types.PublisherOptions(enable_message_ordering=False)
    client = publisher.Client(publisher_options=publisher_options,
                              credentials=creds)

    # Throw on calling resume_publish() when enable_message_ordering is False.
    with pytest.raises(ValueError):
        client.resume_publish("topic", "ord_key")
Пример #4
0
def test_resume_publish_no_sequencer_found(creds):
    publisher_options = types.PublisherOptions(enable_message_ordering=True)
    client = publisher.Client(publisher_options=publisher_options,
                              credentials=creds)

    # Check no exception is thrown if a sequencer with the (topic, ordering_key)
    # pair does not exist.
    client.resume_publish("topic", "ord_key")
Пример #5
0
def test_publish_empty_ordering_key_when_message_ordering_enabled(creds):
    client = publisher.Client(
        publisher_options=types.PublisherOptions(enable_message_ordering=True),
        credentials=creds,
    )
    topic = "topic/path"
    assert client.publish(topic, b"bytestring body",
                          ordering_key="") is not None
Пример #6
0
def test_message_ordering_enabled(creds):
    client = publisher.Client(credentials=creds)
    assert not client._enable_message_ordering

    client = publisher.Client(
        publisher_options=types.PublisherOptions(enable_message_ordering=True),
        credentials=creds,
    )
    assert client._enable_message_ordering
Пример #7
0
    def __init__(self,
                 name,
                 dist,
                 emulator_host,
                 emulator_port,
                 max_bytes,
                 max_latency,
                 max_messages,
                 ordering=False,
                 client_options=None,
                 credentials=None,
                 services_service=None,
                 **config):
        services_service(plugin.Plugin.__init__,
                         self,
                         name,
                         dist,
                         emulator_host=emulator_host,
                         emulator_port=emulator_port,
                         max_bytes=max_bytes,
                         max_latency=max_latency,
                         max_messages=max_messages,
                         ordering=False,
                         client_options=None,
                         credentials=None,
                         **config)

        batch_settings = types.BatchSettings(max_bytes=max_bytes,
                                             max_latency=max_latency,
                                             max_messages=max_messages)

        publisher_options = types.PublisherOptions(
            enable_message_ordering=ordering)

        settings = {}
        if client_options is not None:
            if isinstance(client_options, (str, type(u''))):
                client_options = services_service(
                    reference.load_object(client_options)[0])
            settings['client_options'] = client_options

        if emulator_host:
            channel = grpc.insecure_channel('{}:{}'.format(
                emulator_host, emulator_port))
            transport = PublisherGrpcTransport(channel=channel)
        else:
            transport = None

            if credentials is not None:
                settings['credentials'] = services_service(
                    reference.load_object(credentials)[0])

        self.__class__.proxy_target = PublisherClient(batch_settings,
                                                      publisher_options,
                                                      transport=transport,
                                                      **settings)
def test_message_ordering_changes_retry_deadline():
    creds = mock.Mock(spec=credentials.Credentials)

    client = publisher.Client(credentials=creds)
    assert client.api._method_configs["Publish"].retry._deadline == 60

    client = publisher.Client(
        publisher_options=types.PublisherOptions(enable_message_ordering=True),
        credentials=creds,
    )
    assert client.api._method_configs["Publish"].retry._deadline == 2 ** 32 / 1000
Пример #9
0
def test_resume_publish():
    creds = mock.Mock(spec=credentials.Credentials)
    publisher_options = types.PublisherOptions(enable_message_ordering=True)
    client = publisher.Client(publisher_options=publisher_options, credentials=creds)

    topic = "topic"
    ordering_key = "ord_key"
    sequencer = mock.Mock(spec=ordered_sequencer.OrderedSequencer)
    client._set_sequencer(topic=topic, sequencer=sequencer, ordering_key=ordering_key)

    client.resume_publish(topic, ordering_key)
    assert sequencer.unpause.called_once()
Пример #10
0
def test_publish_custom_timeout_overrides_configured_timeout(creds):
    client = publisher.Client(
        credentials=creds,
        publisher_options=types.PublisherOptions(
            timeout=mock.sentinel.publish_timeout),
    )

    topic = "topic/path"
    client._flow_controller = mock.Mock()
    fake_sequencer = mock.Mock()
    client._get_or_create_sequencer = mock.Mock(return_value=fake_sequencer)
    client.publish(topic, b"hello!", timeout=mock.sentinel.custom_timeout)

    fake_sequencer.publish.assert_called_once_with(
        mock.ANY, retry=mock.ANY, timeout=mock.sentinel.custom_timeout)
    message = fake_sequencer.publish.call_args.args[0]
    assert message.data == b"hello!"
Пример #11
0
    def __init__(self, batch_settings=(), publisher_options=(), **kwargs):
        assert (
            type(batch_settings) is types.BatchSettings
            or len(batch_settings) == 0
        ), "batch_settings must be of type BatchSettings or an empty tuple."
        assert (
            type(publisher_options) is types.PublisherOptions
            or len(publisher_options) == 0
        ), "publisher_options must be of type PublisherOptions or an empty tuple."

        # Sanity check: Is our goal to use the emulator?
        # If so, create a grpc insecure channel with the emulator host
        # as the target.
        if os.environ.get("PUBSUB_EMULATOR_HOST"):
            kwargs["client_options"] = {
                "api_endpoint": os.environ.get("PUBSUB_EMULATOR_HOST")
            }
            kwargs["credentials"] = AnonymousCredentials()

        # For a transient failure, retry publishing the message infinitely.
        self.publisher_options = types.PublisherOptions(*publisher_options)
        self._enable_message_ordering = self.publisher_options[0]

        # Add the metrics headers, and instantiate the underlying GAPIC
        # client.
        self.api = publisher_client.PublisherClient(**kwargs)
        self._target = self.api._transport._host
        self._batch_class = thread.Batch
        self.batch_settings = types.BatchSettings(*batch_settings)

        # The batches on the publisher client are responsible for holding
        # messages. One batch exists for each topic.
        self._batch_lock = self._batch_class.make_lock()
        # (topic, ordering_key) => sequencers object
        self._sequencers = {}
        self._is_stopped = False
        # Thread created to commit all sequencers after a timeout.
        self._commit_thread = None

        # The object controlling the message publishing flow
        self._flow_controller = FlowController(
            self.publisher_options.flow_control)
Пример #12
0
def test_publish_error_exceeding_flow_control_limits(creds):
    publisher_options = types.PublisherOptions(
        flow_control=types.PublishFlowControl(
            message_limit=10,
            byte_limit=150,
            limit_exceeded_behavior=types.LimitExceededBehavior.ERROR,
        ))
    client = publisher.Client(credentials=creds,
                              publisher_options=publisher_options)

    mock_batch = mock.Mock(spec=client._batch_class)
    topic = "topic/path"
    client._set_batch(topic, mock_batch)

    future1 = client.publish(topic, b"a" * 100)
    future2 = client.publish(topic, b"b" * 100)

    future1.result()  # no error, still within flow control limits
    with pytest.raises(exceptions.FlowControlLimitError):
        future2.result()
Пример #13
0
def test_publish_with_ordering_key_uses_extended_retry_deadline():
    creds = mock.Mock(spec=credentials.Credentials)
    client = publisher.Client(
        credentials=creds,
        publisher_options=types.PublisherOptions(enable_message_ordering=True),
    )

    # Use mocks in lieu of the actual batch class.
    batch = mock.Mock(spec=client._batch_class)
    future = mock.sentinel.future
    future.add_done_callback = mock.Mock(spec=["__call__"])
    batch.publish.return_value = future

    topic = "topic/path"
    client._set_batch(topic, batch)

    # Actually mock the batch class now.
    batch_class = mock.Mock(spec=(), return_value=batch)
    client._set_batch_class(batch_class)

    # Publish a message with custom retry settings.
    custom_retry = retries.Retry(
        initial=1,
        maximum=20,
        multiplier=3.3,
        deadline=999,
        predicate=retries.if_exception_type(TimeoutError, KeyboardInterrupt),
    )
    future = client.publish(topic,
                            b"foo",
                            ordering_key="first",
                            retry=custom_retry)
    assert future is mock.sentinel.future

    # Check the retry settings used for the batch.
    batch_class.assert_called_once()
    _, kwargs = batch_class.call_args

    batch_commit_retry = kwargs["commit_retry"]
    expected_retry = custom_retry.with_deadline(2.0**32)
    _assert_retries_equal(batch_commit_retry, expected_retry)
Пример #14
0
    def __init__(self, batch_settings=(), publisher_options=(), **kwargs):
        assert (
            type(batch_settings) is types.BatchSettings
            or len(batch_settings) == 0
        ), "batch_settings must be of type BatchSettings or an empty tuple."
        assert (
            type(publisher_options) is types.PublisherOptions
            or len(publisher_options) == 0
        ), "publisher_options must be of type PublisherOptions or an empty tuple."

        # Sanity check: Is our goal to use the emulator?
        # If so, create a grpc insecure channel with the emulator host
        # as the target.
        if os.environ.get("PUBSUB_EMULATOR_HOST"):
            kwargs["channel"] = grpc.insecure_channel(
                target=os.environ.get("PUBSUB_EMULATOR_HOST"))

        # The GAPIC client has mTLS logic to determine the api endpoint and the
        # ssl credentials to use. Here we create a GAPIC client to help compute the
        # api endpoint and ssl credentials. The api endpoint will be used to set
        # `self._target`, and ssl credentials will be passed to
        # `grpc_helpers.create_channel` to establish a mTLS channel (if ssl
        # credentials is not None).
        client_options = kwargs.get("client_options", None)
        credentials = kwargs.get("credentials", None)
        client_for_mtls_info = publisher_client.PublisherClient(
            credentials=credentials, client_options=client_options)

        self._target = client_for_mtls_info._transport._host

        # Use a custom channel.
        # We need this in order to set appropriate default message size and
        # keepalive options.
        if "transport" not in kwargs:
            channel = kwargs.pop("channel", None)
            if channel is None:
                channel = grpc_helpers.create_channel(
                    credentials=kwargs.pop("credentials", None),
                    target=self.target,
                    ssl_credentials=client_for_mtls_info._transport.
                    _ssl_channel_credentials,
                    scopes=publisher_client.PublisherClient._DEFAULT_SCOPES,
                    options={
                        "grpc.max_send_message_length": -1,
                        "grpc.max_receive_message_length": -1,
                    }.items(),
                )
            # cannot pass both 'channel' and 'credentials'
            kwargs.pop("credentials", None)
            transport = publisher_grpc_transport.PublisherGrpcTransport(
                channel=channel)
            kwargs["transport"] = transport

        # For a transient failure, retry publishing the message infinitely.
        self.publisher_options = types.PublisherOptions(*publisher_options)
        self._enable_message_ordering = self.publisher_options[0]

        # Add the metrics headers, and instantiate the underlying GAPIC
        # client.
        self.api = publisher_client.PublisherClient(**kwargs)
        self._batch_class = thread.Batch
        self.batch_settings = types.BatchSettings(*batch_settings)

        # The batches on the publisher client are responsible for holding
        # messages. One batch exists for each topic.
        self._batch_lock = self._batch_class.make_lock()
        # (topic, ordering_key) => sequencers object
        self._sequencers = {}
        self._is_stopped = False
        # Thread created to commit all sequencers after a timeout.
        self._commit_thread = None

        # The object controlling the message publishing flow
        self._flow_controller = FlowController(
            self.publisher_options.flow_control)
    def __init__(self, batch_settings=(), publisher_options=(), **kwargs):
        assert (
            type(batch_settings) is types.BatchSettings
            or len(batch_settings) == 0
        ), "batch_settings must be of type BatchSettings or an empty tuple."
        assert (
            type(publisher_options) is types.PublisherOptions
            or len(publisher_options) == 0
        ), "publisher_options must be of type PublisherOptions or an empty tuple."

        # Sanity check: Is our goal to use the emulator?
        # If so, create a grpc insecure channel with the emulator host
        # as the target.
        if os.environ.get("PUBSUB_EMULATOR_HOST"):
            kwargs["channel"] = grpc.insecure_channel(
                target=os.environ.get("PUBSUB_EMULATOR_HOST"))

        client_options = kwargs.pop("client_options", None)
        if (client_options and "api_endpoint" in client_options and isinstance(
                client_options["api_endpoint"], six.string_types)):
            self._target = client_options["api_endpoint"]
        else:
            self._target = publisher_client.PublisherClient.SERVICE_ADDRESS

        # Use a custom channel.
        # We need this in order to set appropriate default message size and
        # keepalive options.
        if "transport" not in kwargs:
            channel = kwargs.pop("channel", None)
            if channel is None:
                channel = grpc_helpers.create_channel(
                    credentials=kwargs.pop("credentials", None),
                    target=self.target,
                    scopes=publisher_client.PublisherClient._DEFAULT_SCOPES,
                    options={
                        "grpc.max_send_message_length": -1,
                        "grpc.max_receive_message_length": -1,
                    }.items(),
                )
            # cannot pass both 'channel' and 'credentials'
            kwargs.pop("credentials", None)
            transport = publisher_grpc_transport.PublisherGrpcTransport(
                channel=channel)
            kwargs["transport"] = transport

        # For a transient failure, retry publishing the message infinitely.
        self.publisher_options = types.PublisherOptions(*publisher_options)
        self._enable_message_ordering = self.publisher_options[0]
        if self._enable_message_ordering:
            # Set retry timeout to "infinite" when message ordering is enabled.
            # Note that this then also impacts messages added with an empty ordering
            # key.
            client_config = _set_nested_value(
                kwargs.pop("client_config", {}),
                2**32,
                [
                    "interfaces",
                    "google.pubsub.v1.Publisher",
                    "retry_params",
                    "messaging",
                    "total_timeout_millis",
                ],
            )
            kwargs["client_config"] = client_config

        # Add the metrics headers, and instantiate the underlying GAPIC
        # client.
        self.api = publisher_client.PublisherClient(**kwargs)
        self._batch_class = thread.Batch
        self.batch_settings = types.BatchSettings(*batch_settings)

        # The batches on the publisher client are responsible for holding
        # messages. One batch exists for each topic.
        self._batch_lock = self._batch_class.make_lock()
        # (topic, ordering_key) => sequencers object
        self._sequencers = {}
        self._is_stopped = False
        # Thread created to commit all sequencers after a timeout.
        self._commit_thread = None

        # The object controlling the message publishing flow
        self._flow_controller = FlowController(
            self.publisher_options.flow_control)