Exemplo n.º 1
0
def publish_messages(project_number, cloud_region, zone_id, topic_id):
    # [START pubsublite_quickstart_publisher]
    from google.cloud.pubsublite.cloudpubsub import PublisherClient
    from google.cloud.pubsublite.types import (
        CloudRegion,
        CloudZone,
        MessageMetadata,
        TopicPath,
    )

    # TODO(developer):
    # project_number = 1122334455
    # cloud_region = "us-central1"
    # zone_id = "a"
    # topic_id = "your-topic-id"

    location = CloudZone(CloudRegion(cloud_region), zone_id)
    topic_path = TopicPath(project_number, location, topic_id)

    # PublisherClient() must be used in a `with` block or have __enter__() called before use.
    with PublisherClient() as publisher_client:
        data = "Hello world!"
        api_future = publisher_client.publish(topic_path, data.encode("utf-8"))
        # result() blocks. To resolve API futures asynchronously, use add_done_callback().
        message_id = api_future.result()
        message_metadata = MessageMetadata.decode(message_id)
        print(
            f"Published a message to partition {message_metadata.partition.value} and offset {message_metadata.cursor.offset}."
        )
def subscription(client: AdminClient,
                 topic: Topic) -> Generator[Subscription, None, None]:
    location = CloudZone(CloudRegion(CLOUD_REGION), ZONE_ID)
    subscription_path = SubscriptionPath(PROJECT_NUMBER, location,
                                         SUBSCRIPTION_ID)

    subscription = Subscription(
        name=str(subscription_path),
        topic=topic.name,
        delivery_config=Subscription.DeliveryConfig(
            delivery_requirement=Subscription.DeliveryConfig.
            DeliveryRequirement.DELIVER_IMMEDIATELY, ),
    )

    try:
        response = client.get_subscription(subscription.name)
    except NotFound:
        # This subscription will start receiving the first message in the topic.
        response = client.create_subscription(subscription,
                                              BacklogLocation.BEGINNING)

    yield response

    try:
        client.delete_subscription(response.name)
    except NotFound as e:
        print(e.message)
def receive_messages(project_number,
                     cloud_region,
                     zone_id,
                     subscription_id,
                     timeout=90):
    # [START pubsublite_quickstart_subscriber]
    from concurrent.futures._base import TimeoutError
    from google.cloud.pubsublite.cloudpubsub import SubscriberClient
    from google.cloud.pubsublite.types import (
        CloudRegion,
        CloudZone,
        FlowControlSettings,
        SubscriptionPath,
    )

    # TODO(developer):
    # project_number = 1122334455
    # cloud_region = "us-central1"
    # zone_id = "a"
    # subscription_id = "your-subscription-id"
    # timeout = 90

    location = CloudZone(CloudRegion(cloud_region), zone_id)
    subscription_path = SubscriptionPath(project_number, location,
                                         subscription_id)
    # Configure when to pause the message stream for more incoming messages based on the
    # maximum size or number of messages that a single-partition subscriber has received,
    # whichever condition is met first.
    per_partition_flow_control_settings = FlowControlSettings(
        # 1,000 outstanding messages. Must be >0.
        messages_outstanding=1000,
        # 10 MiB. Must be greater than the allowed size of the largest message (1 MiB).
        bytes_outstanding=10 * 1024 * 1024,
    )

    def callback(message: PubsubMessage):
        message_data = message.data.decode("utf-8")
        metadata = MessageMetadata.decode(message.message_id)
        print(
            f"Received {message_data} of ordering key {message.ordering_key} with id {metadata}."
        )
        message.ack()

    # SubscriberClient() must be used in a `with` block or have __enter__() called before use.
    with SubscriberClient() as subscriber_client:

        streaming_pull_future = subscriber_client.subscribe(
            subscription_path,
            callback=callback,
            per_partition_flow_control_settings=
            per_partition_flow_control_settings,
        )

        print(f"Listening for messages on {str(subscription_path)}...")

        try:
            streaming_pull_future.result(timeout=timeout)
        except TimeoutError or KeyboardInterrupt:
            streaming_pull_future.cancel()
            assert streaming_pull_future.done()
async def test_iterator(
    default_subscriber,
    subscriber_factory,
    multiplexed_client: AsyncSubscriberClientInterface,
):
    read_queues = wire_queues(default_subscriber.read)
    subscription = SubscriptionPath(1, CloudZone.parse("us-central1-a"), "abc")
    message = Message(PubsubMessage(message_id="1")._pb, "", 0, None)
    async with multiplexed_client:
        iterator = await multiplexed_client.subscribe(
            subscription, DISABLED_FLOW_CONTROL
        )
        subscriber_factory.assert_has_calls(
            [call(subscription, None, DISABLED_FLOW_CONTROL)]
        )
        read_fut_1 = asyncio.ensure_future(iterator.__anext__())
        assert not read_fut_1.done()
        await read_queues.called.get()
        default_subscriber.read.assert_has_calls([call()])
        await read_queues.results.put(message)
        assert await read_fut_1 is message
        read_fut_2 = asyncio.ensure_future(iterator.__anext__())
        assert not read_fut_2.done()
        await read_queues.called.get()
        default_subscriber.read.assert_has_calls([call(), call()])
        await read_queues.results.put(FailedPrecondition(""))
        with pytest.raises(FailedPrecondition):
            await read_fut_2
        default_subscriber.__aexit__.assert_called_once()
def topic(client: AdminClient) -> Generator[Topic, None, None]:
    location = CloudZone(CloudRegion(CLOUD_REGION), ZONE_ID)
    topic_path = TopicPath(PROJECT_NUMBER, location, TOPIC_ID)

    # A topic of 2 partitions, each of size 30 GiB, publish throughput
    # capacity per partition to 4 MiB/s, and subscribe throughput
    # capacity per partition to 8 MiB/s.
    topic = Topic(
        name=str(topic_path),
        partition_config=Topic.PartitionConfig(
            count=2,
            capacity=Topic.PartitionConfig.Capacity(
                publish_mib_per_sec=4, subscribe_mib_per_sec=8,
            ),
        ),
        retention_config=Topic.RetentionConfig(
            per_partition_bytes=30 * 1024 * 1024 * 1024,
        ),
    )

    try:
        response = client.get_topic(topic.name)
    except NotFound:
        response = client.create_topic(topic)

    yield response

    try:
        client.delete_topic(response.name)
    except NotFound as e:
        print(e.message)
Exemplo n.º 6
0
def subscription_path(client):
    location = CloudZone(CloudRegion(CLOUD_REGION), ZONE_ID)
    subscription_path = str(SubscriptionPath(PROJECT_NUMBER, location, SUBSCRIPTION_ID))
    yield subscription_path
    try:
        client.delete_subscription(subscription_path)
    except NotFound:
        pass
def topic_path(client):
    location = CloudZone(CloudRegion(CLOUD_REGION), ZONE_ID)
    topic_path = str(TopicPath(PROJECT_NUMBER, location, TOPIC_ID))
    yield topic_path
    try:
        client.delete_topic(topic_path)
    except NotFound:
        pass
Exemplo n.º 8
0
def update_lite_topic(project_number, cloud_region, zone_id, topic_id):
    # [START pubsublite_update_topic]
    from google.api_core.exceptions import NotFound
    from google.cloud.pubsublite import AdminClient, Topic
    from google.cloud.pubsublite.types import CloudRegion, CloudZone, TopicPath
    from google.protobuf.duration_pb2 import Duration
    from google.protobuf.field_mask_pb2 import FieldMask

    # TODO(developer):
    # project_number = 1122334455
    # cloud_region = "us-central1"
    # zone_id = "a"
    # topic_id = "your-topic-id"

    cloud_region = CloudRegion(cloud_region)
    location = CloudZone(cloud_region, zone_id)
    topic_path = TopicPath(project_number, location, topic_id)

    # Defines which topic fields to update.
    field_mask = FieldMask(paths=[
        "partition_config.scale",
        "retention_config.per_partition_bytes",
        "retention_config.period",
    ])

    # Defines how to update the topic fields.
    topic = Topic(
        name=str(topic_path),
        partition_config=Topic.PartitionConfig(
            # Set publishing throughput to 2x standard partition throughput of 4 MiB
            # per second. This must in the range [1,4]. A topic with `scale` of 2 and
            # `count` of 10 is charged for 20 partitions.
            scale=2, ),
        retention_config=Topic.RetentionConfig(
            # Set storage per partition to 100 GiB. This must be in the range 30 GiB-10TiB.
            # If the number of byptes stored in any of the topic's partitions grows beyond
            # this value, older messages will be dropped to make room for newer ones,
            # regardless of the value of `period`.
            # Be careful when decreasing storage per partition as it may cuase lost messages.
            per_partition_bytes=100 * 1024 * 1024 * 1024,
            # Allow messages to be stored for 14 days.
            period=Duration(seconds=60 * 60 * 24 * 14),
        ),
    )

    client = AdminClient(cloud_region)
    try:
        response = client.update_topic(topic, field_mask)
        print(f"{response.name} updated successfully.")
    except NotFound:
        print(f"{topic_path} not found.")
def seek_lite_subscription(project_number, cloud_region, zone_id,
                           subscription_id, seek_target, wait_for_operation):
    # [START pubsublite_seek_subscription]
    from google.api_core.exceptions import NotFound
    from google.cloud.pubsublite import AdminClient
    from google.cloud.pubsublite.types import CloudRegion, CloudZone, SubscriptionPath

    # TODO(developer):
    # project_number = 1122334455
    # cloud_region = "us-central1"
    # zone_id = "a"
    # subscription_id = "your-subscription-id"
    # seek_target = BacklogLocation.BEGINNING
    # wait_for_operation = False

    # Possible values for seek_target:
    # - BacklogLocation.BEGINNING: replays from the beginning of all retained
    #   messages.
    # - BacklogLocation.END: skips past all current published messages.
    # - PublishTime(<datetime>): delivers messages with publish time greater
    #   than or equal to the specified timestamp.
    # - EventTime(<datetime>): seeks to the first message with event time
    #   greater than or equal to the specified timestamp.

    # Waiting for the seek operation to complete is optional. It indicates when
    # subscribers for all partitions are receiving messages from the seek
    # target. If subscribers are offline, the operation will complete once they
    # are online.

    cloud_region = CloudRegion(cloud_region)
    location = CloudZone(cloud_region, zone_id)
    subscription_path = SubscriptionPath(project_number, location,
                                         subscription_id)

    client = AdminClient(cloud_region)
    try:
        # Initiate an out-of-band seek for a subscription to the specified
        # target. If an operation is returned, the seek has been successfully
        # registered and will eventually propagate to subscribers.
        seek_operation = client.seek_subscription(subscription_path,
                                                  seek_target)
        print(f"Seek operation: {seek_operation.operation.name}")
    except NotFound:
        print(f"{subscription_path} not found.")
        return

    if wait_for_operation:
        print("Waiting for operation to complete...")
        seek_operation.result()
        print(f"Operation completed. Metadata:\n{seek_operation.metadata}")
def publish_with_batch_settings(
    project_number, cloud_region, zone_id, topic_id, num_messages
):
    # [START pubsublite_publish_batch]
    from google.cloud.pubsub_v1.types import BatchSettings
    from google.cloud.pubsublite.cloudpubsub import PublisherClient
    from google.cloud.pubsublite.types import (
        CloudRegion,
        CloudZone,
        MessageMetadata,
        TopicPath,
    )

    # TODO(developer):
    # project_number = 1122334455
    # cloud_region = "us-central1"
    # zone_id = "a"
    # topic_id = "your-topic-id"
    # num_messages = 100

    location = CloudZone(CloudRegion(cloud_region), zone_id)
    topic_path = TopicPath(project_number, location, topic_id)
    batch_setttings = BatchSettings(
        # 2 MiB. Default to 3 MiB. Must be less than 4 MiB gRPC's per-message limit.
        max_bytes=2 * 1024 * 1024,
        # 100 ms. Default to 50 ms.
        max_latency=0.1,
        # Default to 1000.
        max_messages=100,
    )

    # PublisherClient() must be used in a `with` block or have __enter__() called before use.
    with PublisherClient(
        per_partition_batching_settings=batch_setttings
    ) as publisher_client:
        for message in range(num_messages):
            data = f"{message}"
            api_future = publisher_client.publish(topic_path, data.encode("utf-8"))
            # result() blocks. To resolve API futures asynchronously, use add_done_callback().
            message_id = api_future.result()
            message_metadata = MessageMetadata.decode(message_id)
            print(
                f"Published {data} to partition {message_metadata.partition.value} and offset {message_metadata.cursor.offset}."
            )

    print(
        f"Finished publishing {num_messages} messages with batch settings to {str(topic_path)}."
    )
Exemplo n.º 11
0
def create_lite_subscription(project_number, cloud_region, zone_id, topic_id,
                             subscription_id):
    # [START pubsublite_create_subscription]
    from google.api_core.exceptions import AlreadyExists
    from google.cloud.pubsublite import AdminClient, Subscription
    from google.cloud.pubsublite.types import (
        CloudRegion,
        CloudZone,
        SubscriptionPath,
        TopicPath,
    )

    # TODO(developer):
    # project_number = 1122334455
    # cloud_region = "us-central1"
    # zone_id = "a"
    # topic_id = "your-topic-id"
    # subscription_id = "your-subscription-id"

    cloud_region = CloudRegion(cloud_region)
    location = CloudZone(cloud_region, zone_id)
    topic_path = TopicPath(project_number, location, topic_id)
    subscription_path = SubscriptionPath(project_number, location,
                                         subscription_id)
    subscription = Subscription(
        name=str(subscription_path),
        topic=str(topic_path),
        delivery_config=Subscription.DeliveryConfig(
            # Possible values for delivery_requirement:
            # - `DELIVER_IMMEDIATELY`
            # - `DELIVER_AFTER_STORED`
            # You may choose whether to wait for a published message to be successfully written
            # to storage before the server delivers it to subscribers. `DELIVER_IMMEDIATELY` is
            # suitable for applications that need higher throughput.
            delivery_requirement=Subscription.DeliveryConfig.
            DeliveryRequirement.DELIVER_IMMEDIATELY, ),
    )

    client = AdminClient(cloud_region)
    try:
        response = client.create_subscription(subscription)
        print(f"{response.name} created successfully.")
    except AlreadyExists:
        print(f"{subscription_path} already exists.")
Exemplo n.º 12
0
def publish_with_odering_key(project_number, cloud_region, zone_id, topic_id,
                             num_messages):
    # [START pubsublite_publish_ordering_key]
    from google.cloud.pubsublite.cloudpubsub import PublisherClient
    from google.cloud.pubsublite.types import (
        CloudRegion,
        CloudZone,
        MessageMetadata,
        TopicPath,
    )

    # TODO(developer):
    # project_number = 1122334455
    # cloud_region = "us-central1"
    # zone_id = "a"
    # topic_id = "your-topic-id"
    # num_messages = 100

    location = CloudZone(CloudRegion(cloud_region), zone_id)
    topic_path = TopicPath(project_number, location, topic_id)

    # PublisherClient() must be used in a `with` block or have __enter__() called before use.
    with PublisherClient() as publisher_client:
        for message in range(num_messages):
            data = f"{message}"
            # Messages of the same ordering key will always get published to the same partition.
            # When ordering_key is unset, messsages can get published ot different partitions if
            # more than one partition exists for the topic.
            api_future = publisher_client.publish(topic_path,
                                                  data.encode("utf-8"),
                                                  ordering_key="testing")
            # result() blocks. To resolve api futures asynchronously, use add_done_callback().
            message_id = api_future.result()
            message_metadata = MessageMetadata.decode(message_id)
            print(
                f"Published {data} to partition {message_metadata.partition.value} and offset {message_metadata.cursor.offset}."
            )

    print(
        f"Finished publishing {num_messages} messages with an ordering key to {str(topic_path)}."
    )
def list_lite_subscriptions_in_project(project_number, cloud_region, zone_id):
    # [START pubsublite_list_subscriptions_in_project]
    from google.cloud.pubsublite import AdminClient
    from google.cloud.pubsublite.types import CloudRegion, CloudZone, LocationPath

    # TODO(developer):
    # project_number = 1122334455
    # cloud_region = "us-central1"
    # zone_id = "a"

    cloud_region = CloudRegion(cloud_region)
    location = CloudZone(cloud_region, zone_id)
    location_path = LocationPath(project_number, location)

    client = AdminClient(cloud_region)
    response = client.list_subscriptions(location_path)

    for subscription in response:
        print(subscription.name)

    print(f"{len(response)} subscription(s) listed in your project and location.")
Exemplo n.º 14
0
def update_lite_subscription(project_number, cloud_region, zone_id,
                             subscription_id):
    # [START pubsublite_update_subscription]
    from google.api_core.exceptions import NotFound
    from google.cloud.pubsublite import AdminClient, Subscription
    from google.cloud.pubsublite.types import CloudRegion, CloudZone, SubscriptionPath
    from google.protobuf.field_mask_pb2 import FieldMask

    # TODO(developer):
    # project_number = 1122334455
    # cloud_region = "us-central1"
    # zone_id = "a"
    # topic_id = "your-topic-id"
    # subscription_id = "your-subscription-id"

    cloud_region = CloudRegion(cloud_region)
    location = CloudZone(cloud_region, zone_id)
    subscription_path = SubscriptionPath(project_number, location,
                                         subscription_id)
    field_mask = FieldMask(paths=["delivery_config.delivery_requirement"])

    subscription = Subscription(
        name=str(subscription_path),
        delivery_config=Subscription.DeliveryConfig(
            # Possible values for delivery_requirement:
            # - `DELIVER_IMMEDIATELY`
            # - `DELIVER_AFTER_STORED`
            # `DELIVER_AFTER_STORED` requires a published message to be successfully written
            # to storage before the server delivers it to subscribers.
            delivery_requirement=Subscription.DeliveryConfig.
            DeliveryRequirement.DELIVER_AFTER_STORED, ),
    )

    client = AdminClient(cloud_region)
    try:
        response = client.update_subscription(subscription, field_mask)
        print(f"{response.name} updated successfully.")
    except NotFound:
        print(f"{subscription_path} not found.")
def delete_lite_topic(project_number, cloud_region, zone_id, topic_id):
    # [START pubsublite_delete_topic]
    from google.api_core.exceptions import NotFound
    from google.cloud.pubsublite import AdminClient
    from google.cloud.pubsublite.types import CloudRegion, CloudZone, TopicPath

    # TODO(developer):
    # project_number = 1122334455
    # cloud_region = "us-central1"
    # zone_id = "a"
    # topic_id = "your-topic-id"

    cloud_region = CloudRegion(cloud_region)
    location = CloudZone(cloud_region, zone_id)
    topic_path = TopicPath(project_number, location, topic_id)

    client = AdminClient(cloud_region)
    try:
        client.delete_topic(topic_path)
        print(f"{topic_path} deleted successfully.")
    except NotFound:
        print(f"{topic_path} not found.")
def get_lite_topic(project_number, cloud_region, zone_id, topic_id):
    # [START pubsublite_get_topic]
    from google.api_core.exceptions import NotFound
    from google.cloud.pubsublite import AdminClient
    from google.cloud.pubsublite.types import CloudRegion, CloudZone, TopicPath

    # TODO(developer):
    # project_number = 1122334455
    # cloud_region = "us-central1"
    # zone_id = "a"
    # topic_id = "your-topic-id"

    cloud_region = CloudRegion(cloud_region)
    location = CloudZone(cloud_region, zone_id)
    topic_path = TopicPath(project_number, location, topic_id)

    client = AdminClient(cloud_region)
    try:
        response = client.get_topic(topic_path)
        num_partitions = client.get_topic_partition_count(topic_path)
        print(f"{response.name} has {num_partitions} partition(s).")
    except NotFound:
        print(f"{topic_path} not found.")
def list_lite_subscriptions_in_topic(project_number, cloud_region, zone_id,
                                     topic_id):
    # [START pubsublite_list_subscriptions_in_topic]
    from google.cloud.pubsublite import AdminClient
    from google.cloud.pubsublite.types import CloudRegion, CloudZone, TopicPath

    # TODO(developer):
    # project_number = 1122334455
    # cloud_region = "us-central1"
    # zone_id = "a"
    # topic_id = "your-topic-id"

    cloud_region = CloudRegion(cloud_region)
    location = CloudZone(cloud_region, zone_id)
    topic_path = TopicPath(project_number, location, topic_id)

    client = AdminClient(cloud_region)
    response = client.list_topic_subscriptions(topic_path)

    for subscription_path in response:
        print(subscription_path)

    print(f"{len(response)} subscription(s) listed in your topic.")
Exemplo n.º 18
0
def get_lite_subscription(project_number, cloud_region, zone_id,
                          subscription_id):
    # [START pubsublite_get_subscription]
    from google.api_core.exceptions import NotFound
    from google.cloud.pubsublite import AdminClient
    from google.cloud.pubsublite.types import CloudRegion, CloudZone, SubscriptionPath

    # TODO(developer):
    # project_number = 1122334455
    # cloud_region = "us-central1"
    # zone_id = "a"
    # subscription_id = "your-subscription-id"

    cloud_region = CloudRegion(cloud_region)
    location = CloudZone(cloud_region, zone_id)
    subscription_path = SubscriptionPath(project_number, location,
                                         subscription_id)

    client = AdminClient(cloud_region)
    try:
        response = client.get_subscription(subscription_path)
        print(f"{response.name} exists.")
    except NotFound:
        print(f"{subscription_path} not found.")
Exemplo n.º 19
0
from concurrent.futures._base import TimeoutError
from google.cloud.pubsublite.cloudpubsub import SubscriberClient
from google.cloud.pubsublite.types import (CloudRegion, CloudZone,
                                           FlowControlSettings,
                                           SubscriptionPath, MessageMetadata)

project_number = 533637743951
cloud_region = "asia-east1"
zone_id = "a"
subscription_id = "test_sub_one"
timeout = 90

location = CloudZone(CloudRegion(cloud_region), zone_id)
subscription_path = SubscriptionPath(project_number, location, subscription_id)
# Configure when to pause the message stream for more incoming messages based on the
# maximum size or number of messages that a single-partition subscriber has received,
# whichever condition is met first.
per_partition_flow_control_settings = FlowControlSettings(
    # 1,000 outstanding messages. Must be >0.
    messages_outstanding=1000,
    # 10 MiB. Must be greater than the allowed size of the largest message (1 MiB).
    bytes_outstanding=10 * 1024 * 1024,
)


def callback(message):
    message_data = message.data.decode("utf-8")
    print(message_data)
    # metadata = MessageMetadata.decode(message.message_id)
    message.ack()