bytes_payload = b"".join(b for b in event.body)
    print('The received bytes of the EventData is {}.'.format(bytes_payload))

    # Use the deserialize method to convert bytes to dict object.
    # The deserialize method would extract the schema id from the payload, and automatically retrieve the Avro Schema
    # from the Schema Registry Service. The schema would be cached locally for future usage.
    deserialized_data = avro_serializer.deserialize(bytes_payload)
    print(
        'The dict data after deserialization is {}'.format(deserialized_data))


# create an EventHubConsumerClient instance
eventhub_consumer = EventHubConsumerClient.from_connection_string(
    conn_str=EVENTHUB_CONNECTION_STR,
    consumer_group='$Default',
    eventhub_name=EVENTHUB_NAME,
)

# create a AvroSerializer instance
avro_serializer = AvroSerializer(client=SchemaRegistryClient(
    fully_qualified_namespace=SCHEMAREGISTRY_FULLY_QUALIFIED_NAMESPACE,
    credential=DefaultAzureCredential()),
                                 group_name=GROUP_NAME,
                                 auto_register_schemas=True)

try:
    with eventhub_consumer, avro_serializer:
        eventhub_consumer.receive(
            on_event=on_event,
            starting_position=
Пример #2
0
import os
from azure.eventhub import EventHubConsumerClient
from azure.eventhub.extensions.checkpointstoreblob import BlobCheckpointStore

CONNECTION_STR = os.environ["EVENT_HUB_CONN_STR"]
STORAGE_CONNECTION_STR = os.environ["AZURE_STORAGE_CONN_STR"]
BLOB_CONTAINER_NAME = "your-blob-container-name"  # Please make sure the blob container resource exists.


def on_event(partition_context, event):
    # Put your code here.
    # Avoid time-consuming operations.
    print(event)
    partition_context.update_checkpoint(event)


if __name__ == '__main__':
    checkpoint_store = BlobCheckpointStore.from_connection_string(
        STORAGE_CONNECTION_STR,
        container_name=BLOB_CONTAINER_NAME,
    )
    client = EventHubConsumerClient.from_connection_string(
        CONNECTION_STR, "$Default", checkpoint_store=checkpoint_store)

    try:
        client.receive(on_event)
    except KeyboardInterrupt:
        client.close()
    print("Received event from partition: {}".format(
        partition_context.partition_id))

    # Put your code here to do some operations on the event.
    # Avoid time-consuming operations.
    print(event)

    partition_context.update_checkpoint(event)


if __name__ == '__main__':
    checkpoint_store = BlobCheckpointStore.from_connection_string(
        STORAGE_CONNECTION_STR, "eventprocessor")
    consumer_client = EventHubConsumerClient.from_connection_string(
        conn_str=CONNECTION_STR,
        consumer_group='$Default',
        checkpoint_store=
        checkpoint_store,  # For load-balancing and checkpoint. Leave None for no load-balancing
    )

    try:
        with consumer_client:
            """
            Without specified partition_id, the receive will try to receive events from all partitions and if provided with
            partition manager, the client will load-balance partition assignment with other EventHubConsumerClient instances
            which also try to receive events from all partitions and use the same storage resource.
            """
            consumer_client.receive(on_event=on_event)
            # With specified partition_id, load-balance will be disabled
            # client.receive(on_event=on_event, consumer_group='$Default', partition_id='0')
    except KeyboardInterrupt:
        print('Stop receiving.')
from azure.eventhub import EventHubConsumerClient
import logging

##EVENT HUB DETAILS
connection_str = 'Endpoint=sb://streamuckafka.servicebus.windows.net/;SharedAccessKeyName=StreamUCPolicy;SharedAccessKey=C1ds2c1s+nsO0OWk+IlGRfDn3IY2pnWVTrXJ1EF2kMs='
eventhub_name = 'streamuckafkaeventhub'
consumer_group = "$default"
client = EventHubConsumerClient.from_connection_string(
    connection_str, consumer_group, eventhub_name=eventhub_name)

logger = logging.getLogger("azure.eventhub")
logging.basicConfig(level=logging.INFO)


def on_event(partition_context, event):
    logger.info("Received event from partition {}".format(
        partition_context.partition_id))
    logger.info("Message received:{}".format(event))
    partition_context.update_checkpoint(event)


with client:
    client.receive(on_event=on_event, starting_position="-1")
Пример #5
0
EVENT_POSITION = EventPosition("-1")
PARTITION = "0"


def on_event(partition_context, event):
    print("Received event from partition {}".format(
        partition_context.partition_id))

    # Put your code here to do some operations on the event.
    # Avoid time-consuming operations.
    print(event)

    print("Last enqueued event properties from partition: {} is: {}".format(
        partition_context.partition_id, event.last_enqueued_event_properties))


if __name__ == '__main__':
    consumer_client = EventHubConsumerClient.from_connection_string(
        conn_str=CONNECTION_STR,
        event_hub_path=EVENT_HUB,
    )

    try:
        with consumer_client:
            consumer_client.receive(on_event=on_event,
                                    consumer_group='$Default',
                                    partition_id='0',
                                    track_last_enqueued_event_properties=True)
    except KeyboardInterrupt:
        print('Stop receiving.')
Пример #6
0

def on_error(partition_context, error):
    # put your code here
    print("Partition: {} met an exception during receiving: {}".format(
        partition_context.partition_id, error))


def on_events(partition_context, event):
    # put your code here
    print("Received event from partition: {}".format(
        partition_context.partition_id))


if __name__ == '__main__':
    consumer_client = EventHubConsumerClient.from_connection_string(
        conn_str=CONNECTION_STR,
        eventhub_name=EVENTHUB_NAME,
    )

    try:
        with consumer_client:
            consumer_client.receive(
                on_events=on_events,
                consumer_group='$Default',
                on_partition_initialize=on_partition_initialize,
                on_partition_close=on_partition_close,
                on_error=on_error)
    except KeyboardInterrupt:
        print('Stop receiving.')
            'onePropertyNameIShouldGiveAProperName'] = 'youShouldChangeThis'

        aux_iot_hub_send_message_to_device(
            device_name=event_device_name,
            message_body=command_to_device_message_body,
            message_properties=command_to_device_message_properties)


if __name__ == '__main__':

    #FIRST CHECK IF THE CONNECTION STRING IS OK
    aux_validate_connection_string()

    #CONNECT TO THE IOT HUB BUILT IT ENDPOINT
    aux_iot_hub_built_in_event_hub_consumer_client = EventHubConsumerClient.from_connection_string(
        conn_str=AUX_EVENT_HUB_NAMESPACE_CONNECTION_STRING,
        consumer_group='app',
        eventhub_name=AUX_EVENTHUB_NAME)

    MAXIMUM_TEMPERATURE = 38

    try:
        with aux_iot_hub_built_in_event_hub_consumer_client:
            print("Starting sample temperature monitor application")
            print("Maximum temperature set to {} degrees".format(
                MAXIMUM_TEMPERATURE))
            #EVERY TIME WE RECEIVE AN EVENT WE CALL THE METHOD ON EVENT
            aux_iot_hub_built_in_event_hub_consumer_client.receive(
                on_event=on_iot_hub_message_event, starting_position="@latest")
    except KeyboardInterrupt:
        print('Stopped receiving.')
CONNECTION_STR = os.environ["EVENT_HUB_CONN_STR"]
STORAGE_CONNECTION_STR = os.environ["AZURE_STORAGE_CONN_STR"]

logging.basicConfig(level=logging.INFO)


def do_operation(event):
    # put your code here
    print(event)


def process_events(partition_context, events):
    # put your code here
    print("received events: {} from partition: {}".format(
        len(events), partition_context.partition_id))
    for event in events:
        do_operation(event)
    partition_context.update_checkpoint(events[-1])


if __name__ == '__main__':
    container_client = ContainerClient.from_connection_string(
        STORAGE_CONNECTION_STR, "eventprocessor")
    partition_manager = BlobPartitionManager(container_client=container_client)
    client = EventHubConsumerClient.from_connection_string(
        CONNECTION_STR, partition_manager=partition_manager)
    try:
        client.receive(process_events, "$default")
    except KeyboardInterrupt:
        client.close()
def test_send_amqp_annotated_message(connstr_receivers):
    connection_str, receivers = connstr_receivers
    client = EventHubProducerClient.from_connection_string(connection_str)
    with client:
        sequence_body = [b'message', 123.456, True]
        footer = {'footer_key': 'footer_value'}
        prop = {"subject": "sequence"}
        seq_app_prop = {"body_type": "sequence"}

        sequence_message = AmqpAnnotatedMessage(
            sequence_body=sequence_body,
            footer=footer,
            properties=prop,
            application_properties=seq_app_prop
        )

        value_body = {b"key": [-123, b'data', False]}
        header = {"priority": 10}
        anno = {"ann_key": "ann_value"}
        value_app_prop = {"body_type": "value"}
        value_message = AmqpAnnotatedMessage(
            value_body=value_body,
            header=header,
            annotations=anno,
            application_properties=value_app_prop
        )

        data_body = [b'aa', b'bb', b'cc']
        data_app_prop = {"body_type": "data"}
        del_anno = {"delann_key": "delann_value"}
        data_message = AmqpAnnotatedMessage(
            data_body=data_body,
            header=header,
            delivery_annotations=del_anno,
            application_properties=data_app_prop
        )

        body_ed = """{"json_key": "json_val"}"""
        prop_ed = {"raw_prop": "raw_value"}
        cont_type_ed = "text/plain"
        corr_id_ed = "corr_id"
        mess_id_ed = "mess_id"
        event_data = EventData(body_ed)
        event_data.content_type = cont_type_ed
        event_data.correlation_id = corr_id_ed
        event_data.message_id = mess_id_ed

        batch = client.create_batch()
        batch.add(data_message)
        batch.add(value_message)
        batch.add(sequence_message)
        batch.add(event_data)
        client.send_batch(batch)
        client.send_batch([data_message, value_message, sequence_message, event_data])

    received_count = {}
    received_count["data_msg"] = 0
    received_count["seq_msg"] = 0
    received_count["value_msg"] = 0
    received_count["normal_msg"] = 0

    def check_values(event):
        raw_amqp_message = event.raw_amqp_message
        if raw_amqp_message.body_type == AmqpMessageBodyType.DATA:
            if raw_amqp_message.application_properties and raw_amqp_message.application_properties.get(b'body_type') == b'data':
                body = [data for data in raw_amqp_message.body]
                assert data_body == body
                assert event.body_as_str() == "aabbcc"
                assert raw_amqp_message.delivery_annotations[b'delann_key'] == b'delann_value'
                assert raw_amqp_message.application_properties[b'body_type'] == b'data'
                received_count["data_msg"] += 1
            else:
                assert event.body_as_json() == {'json_key': 'json_val'}
                assert event.correlation_id == corr_id_ed
                assert event.message_id == mess_id_ed
                assert event.content_type == cont_type_ed
                assert event.body_type == AmqpMessageBodyType.DATA
                received_count["normal_msg"] += 1
        elif raw_amqp_message.body_type == AmqpMessageBodyType.SEQUENCE:
            body = [sequence for sequence in raw_amqp_message.body]
            assert [sequence_body] == body
            assert event.body_as_str() == "['message', 123.456, True]"
            assert raw_amqp_message.footer[b'footer_key'] == b'footer_value'
            assert raw_amqp_message.properties.subject == b'sequence'
            assert raw_amqp_message.application_properties[b'body_type'] == b'sequence'
            received_count["seq_msg"] += 1
        elif raw_amqp_message.body_type == AmqpMessageBodyType.VALUE:
            assert raw_amqp_message.body == value_body
            assert event.body_as_str() == "{'key': [-123, 'data', False]}"
            assert raw_amqp_message.annotations[b'ann_key'] == b'ann_value'
            assert raw_amqp_message.application_properties[b'body_type'] == b'value'
            received_count["value_msg"] += 1

    def on_event(partition_context, event):
        on_event.received.append(event)

    on_event.received = []
    client = EventHubConsumerClient.from_connection_string(connection_str,
                                                           consumer_group='$default')
    with client:
        thread = threading.Thread(target=client.receive, args=(on_event,),
                                  kwargs={"starting_position": "-1"})
        thread.start()
        time.sleep(15)
        for event in on_event.received:
            check_values(event)

    assert len(on_event.received) == 8
    assert received_count["data_msg"] == 2
    assert received_count["seq_msg"] == 2
    assert received_count["value_msg"] == 2
    assert received_count["normal_msg"] == 2
Пример #10
0
PARTITION = "0"
HTTP_PROXY = {
    'proxy_hostname': '127.0.0.1',  # proxy hostname
    'proxy_port': 3128,  # proxy port
    'username': '******',  # username used for proxy authentication if needed
    'password': '******'  # password used for proxy authentication if needed
}


def on_event(partition_context, event):
    print("received event from partition: {}".format(partition_context.partition_id))
    # do some operations on the event
    print(event)


consumer_client = EventHubConsumerClient.from_connection_string(
    conn_str=CONNECTION_STR, consumer_group='$Default', eventhub_name=EVENTHUB_NAME, http_proxy=HTTP_PROXY)
producer_client = EventHubProducerClient.from_connection_string(
    conn_str=CONNECTION_STR, eventhub_name=EVENTHUB_NAME, http_proxy=HTTP_PROXY)

with producer_client:
    event_data_batch = producer_client.create_batch(max_size_in_bytes=10000)
    while True:
        try:
            event_data_batch.add(EventData('Message inside EventBatchData'))
        except ValueError:
            # EventDataBatch object reaches max_size.
            # New EventDataBatch object can be created here to send more data
            break
    producer_client.send_batch(event_data_batch)
    print('Finish sending.')
import asyncio
from app import app
from app.constants import Config

from azure.eventhub import EventHubConsumerClient

client = EventHubConsumerClient.from_connection_string(
    Config.EVENT_HUB_CONNNECTION_STRING,
    Config.CONSUMER_GROUP,
    eventhub_name=Config.EVENT_HUB_NAME)


def on_event(partition_context, event):
    from app.loggers import context
    # context.do_logging("ID: {} Message: {}".format(
    #     partition_context.partition_id, event.body_as_str(encoding='UTF-8')))
    partition_context.update_checkpoint(event)


def receive_data():
    with client:
        client.receive(
            on_event=on_event,
            # "-1" is from the beginning of the partition.
            starting_position="-1",
        )


receive_data()
Пример #12
0
from azure.eventhub.extensions.checkpointstoreblob import BlobCheckpointStore

CONNECTION_STR = os.environ["EVENT_HUB_CONN_STR"]
EVENTHUB_NAME = os.environ['EVENT_HUB_NAME']
STORAGE_CONNECTION_STR = os.environ["AZURE_STORAGE_CONN_STR"]
BLOB_CONTAINER_NAME = "your-blob-container-name"  # Please make sure the blob container resource exists.


def on_event(partition_context, event):
    # Put your code here.
    # Avoid time-consuming operations.
    print(event)
    partition_context.update_checkpoint(event)


if __name__ == '__main__':
    checkpoint_store = BlobCheckpointStore.from_connection_string(
        STORAGE_CONNECTION_STR,
        container_name=BLOB_CONTAINER_NAME,
    )
    client = EventHubConsumerClient.from_connection_string(
        CONNECTION_STR,
        consumer_group='$Default',
        eventhub_name=EVENTHUB_NAME,
        checkpoint_store=checkpoint_store)

    try:
        client.receive(on_event)
    except KeyboardInterrupt:
        client.close()
Пример #13
0
import os
import sys
import time

from azure.eventhub import EventHubConsumerClient, EventHubProducerClient

# Connection String
CONN = sys.argv[1]
CONSUMER_GROUP = "$Default"

client = EventHubConsumerClient.from_connection_string(CONN, CONSUMER_GROUP)


def on_event(partition_context, event):
    print("event: {}", event)
    print("Received event from partition {}".format(
        partition_context.partition_id))
    partition_context.update_checkpoint(event)


with client:
    client.receive(
        on_event=on_event,
        starting_position="-1",  # "-1" is from the beginning of the partition.
    )
Пример #14
0
        partition_context.partition_id))

    # Put your code here to do some operations on the event.
    # Avoid time-consuming operations.
    print(event)

    partition_context.update_checkpoint(event)


if __name__ == '__main__':
    container_client = ContainerClient.from_connection_string(
        STORAGE_CONNECTION_STR, "eventprocessor")
    partition_manager = BlobPartitionManager(container_client)
    consumer_client = EventHubConsumerClient.from_connection_string(
        conn_str=CONNECTION_STR,
        partition_manager=
        partition_manager,  # For load balancing and checkpoint. Leave None for no load balancing
    )

    try:
        with consumer_client:
            """
            Without specified partition_id, the receive will try to receive events from all partitions and if provided with
            partition manager, the client will load-balance partition assignment with other EventHubConsumerClient instances
            which also try to receive events from all partitions and use the same storage resource.
            """
            consumer_client.receive(on_event=on_event,
                                    consumer_group='$Default')
            # With specified partition_id, load-balance will be disabled
            # client.receive(on_event=on_event, consumer_group='$Default', partition_id='0')
    except KeyboardInterrupt: