Exemplo n.º 1
0
def test_client_azure_sas_credential(live_eventhub):
    # This should "just work" to validate known-good.
    hostname = live_eventhub['hostname']
    producer_client = EventHubProducerClient.from_connection_string(
        live_eventhub['connection_str'],
        eventhub_name=live_eventhub['event_hub'])

    with producer_client:
        batch = producer_client.create_batch(partition_id='0')
        batch.add(EventData(body='A single message'))
        producer_client.send_batch(batch)

    # This should also work, but now using SAS tokens.
    credential = EventHubSharedKeyCredential(live_eventhub['key_name'],
                                             live_eventhub['access_key'])
    auth_uri = "sb://{}/{}".format(hostname, live_eventhub['event_hub'])
    token = credential.get_token(auth_uri).token.decode()
    producer_client = EventHubProducerClient(
        fully_qualified_namespace=hostname,
        eventhub_name=live_eventhub['event_hub'],
        credential=AzureSasCredential(token))

    with producer_client:
        batch = producer_client.create_batch(partition_id='0')
        batch.add(EventData(body='A single message'))
        producer_client.send_batch(batch)
Exemplo n.º 2
0
def test_get_properties_with_auth_error_sync(live_eventhub):
    client = EventHubConsumerClient(
        live_eventhub['hostname'], live_eventhub['event_hub'], '$default',
        EventHubSharedKeyCredential(live_eventhub['key_name'],
                                    "AaBbCcDdEeFf="))
    with client:
        with pytest.raises(AuthenticationError) as e:
            client.get_eventhub_properties()

    client = EventHubConsumerClient(
        live_eventhub['hostname'], live_eventhub['event_hub'], '$default',
        EventHubSharedKeyCredential("invalid", live_eventhub['access_key']))
    with client:
        with pytest.raises(AuthenticationError) as e:
            client.get_eventhub_properties()
Exemplo n.º 3
0
def create_eventhub_consumer_client():
    # [START create_eventhub_consumer_client_from_conn_str_sync]
    import os
    from azure.eventhub import EventHubConsumerClient
    event_hub_connection_str = os.environ['EVENT_HUB_CONN_STR']
    eventhub_name = os.environ['EVENT_HUB_NAME']
    consumer = EventHubConsumerClient.from_connection_string(
        conn_str=event_hub_connection_str, eventhub_name=eventhub_name)
    # [END create_eventhub_consumer_client_from_conn_str_sync]

    # [START create_eventhub_consumer_client_sync]
    import os
    from azure.eventhub import EventHubConsumerClient, EventHubSharedKeyCredential

    fully_qualified_namespace = os.environ['EVENT_HUB_HOSTNAME']
    eventhub_name = os.environ['EVENT_HUB_NAME']
    shared_access_policy = os.environ['EVENT_HUB_SAS_POLICY']
    shared_access_key = os.environ['EVENT_HUB_SAS_KEY']

    credential = EventHubSharedKeyCredential(shared_access_policy,
                                             shared_access_key)
    consumer = EventHubConsumerClient(
        fully_qualified_namespace=fully_qualified_namespace,
        eventhub_name=eventhub_name,
        credential=credential)
    # [END create_eventhub_consumer_client_sync]
    return consumer
Exemplo n.º 4
0
def create_producer_client():
    print('Examples showing how to create producer client.')

    # Create producer client from connection string.

    producer_client = EventHubProducerClient.from_connection_string(
        conn_str=CONNECTION_STRING  # connection string contains EventHub name.
    )

    # Illustration of commonly used parameters.
    producer_client = EventHubProducerClient.from_connection_string(
        conn_str=CONNECTION_STRING,
        eventhub_name=EVENTHUB_NAME,  # EventHub name should be specified if it doesn't show up in connection string.
        logging_enable=False,  # To enable network tracing log, set logging_enable to True.
        retry_total=3,  # Retry up to 3 times to re-do failed operations.
        transport_type=TransportType.Amqp  # Use Amqp as the underlying transport protocol.
    )

    # Create producer client from constructor.

    producer_client = EventHubProducerClient(
        fully_qualified_namespace=FULLY_QUALIFIED_NAMESPACE,
        eventhub_name=EVENTHUB_NAME,
        credential=EventHubSharedKeyCredential(
            policy=SAS_POLICY,
            key=SAS_KEY
        ),
        logging_enable=False,  # To enable network tracing log, set logging_enable to True.
        retry_total=3,  # Retry up to 3 times to re-do failed operations.
        transport_type=TransportType.Amqp  # Use Amqp as the underlying transport protocol.
    )

    print("Calling producer client get eventhub properties:", producer_client.get_eventhub_properties())
    def create_client(self, client_class):
        if self.args.conn_str:
            client = client_class.from_connection_string(
                self.args.conn_str,
                event_hub_path=self.args.eventhub,
                network_tracing=False)
        elif self.args.address:
            client = client_class(host=self.args.address,
                                  event_hub_path=self.args.eventhub,
                                  credential=EventHubSharedKeyCredential(
                                      self.args.sas_policy, self.args.sas_key),
                                  auth_timeout=240,
                                  network_tracing=False)
        elif self.args.aad_client_id:
            client = client_class(host=self.args.address,
                                  event_hub_path=self.args.eventhub,
                                  credential=ClientSecretCredential(
                                      self.args.aad_client_id,
                                      self.args.aad_secret,
                                      self.args.tenant_id),
                                  network_tracing=False)
        else:
            raise ValueError(
                "Argument error. Must have one of connection string, sas and aad credentials"
            )

        return client
def example_create_eventhub_consumer_client():
    # [START create_eventhub_consumer_client_from_conn_str_sync]
    import os
    from azure.eventhub import EventHubConsumerClient
    event_hub_connection_str = os.environ['EVENT_HUB_CONN_STR']
    eventhub_name = os.environ['EVENT_HUB_NAME']
    consumer = EventHubConsumerClient.from_connection_string(
        conn_str=event_hub_connection_str,
        consumer_group='$Default',
        eventhub_name=eventhub_name  # EventHub name should be specified if it doesn't show up in connection string.
    )
    # [END create_eventhub_consumer_client_from_conn_str_sync]

    # [START create_eventhub_consumer_client_sync]
    import os
    from azure.eventhub import EventHubConsumerClient, EventHubSharedKeyCredential

    fully_qualified_namespace = os.environ['EVENT_HUB_HOSTNAME']
    eventhub_name = os.environ['EVENT_HUB_NAME']
    shared_access_policy = os.environ['EVENT_HUB_SAS_POLICY']
    shared_access_key = os.environ['EVENT_HUB_SAS_KEY']

    credential = EventHubSharedKeyCredential(shared_access_policy, shared_access_key)
    consumer = EventHubConsumerClient(
        fully_qualified_namespace=fully_qualified_namespace,
        eventhub_name=eventhub_name,
        consumer_group='$Default',
        credential=credential)
    # [END create_eventhub_consumer_client_sync]
    return consumer
def create_async_eventhub_consumer_client():
    # [START create_eventhub_consumer_client_from_conn_str_async]
    import os
    from azure.eventhub.aio import EventHubConsumerClient
    event_hub_connection_str = os.environ['EVENT_HUB_CONN_STR']
    event_hub = os.environ['EVENT_HUB_NAME']
    consumer = EventHubConsumerClient.from_connection_string(conn_str=event_hub_connection_str,
                                                             event_hub_path=event_hub)
    # [END create_eventhub_consumer_client_from_conn_str_async]

    # [START create_eventhub_consumer_client_async]
    import os
    from azure.eventhub import EventHubSharedKeyCredential
    from azure.eventhub.aio import EventHubConsumerClient

    hostname = os.environ['EVENT_HUB_HOSTNAME']
    event_hub = os.environ['EVENT_HUB_NAME']
    shared_access_policy = os.environ['EVENT_HUB_SAS_POLICY']
    shared_access_key = os.environ['EVENT_HUB_SAS_KEY']

    consumer = EventHubConsumerClient(host=hostname,
                                      event_hub_path=event_hub,
                                      credential=EventHubSharedKeyCredential(shared_access_policy, shared_access_key))
    # [END create_eventhub_consumer_client_async]
    return consumer
Exemplo n.º 8
0
async def main():
    client = EventHubClient(host=HOSTNAME, event_hub_path=EVENT_HUB, credential=EventHubSharedKeyCredential(USER, KEY),
                            network_tracing=False)
    consumer = client.create_consumer(consumer_group="$default", partition_id="0", event_position=EVENT_POSITION)
    async with consumer:
        async for item in consumer:
            print(item)
async def test_get_partition_ids(live_eventhub):
    client = EventHubClient(live_eventhub['hostname'], live_eventhub['event_hub'],
        EventHubSharedKeyCredential(live_eventhub['key_name'], live_eventhub['access_key'])
    )
    partition_ids = await client.get_partition_ids()
    assert partition_ids == ['0', '1']
    await client.close()
Exemplo n.º 10
0
    async def open_clients_async(self):
        """
        Responsible for establishing connection to event hub client
        throws EventHubsException, IOException, InterruptedException, ExecutionException.
        """
        await self.partition_context.get_initial_offset_async()
        # Create event hub client and receive handler and set options
        hostname = "{}.{}".format(self.host.eh_config.sb_name,
                                  self.host.eh_config.namespace_suffix)
        event_hub_path = self.host.eh_config.eh_name
        shared_key_cred = EventHubSharedKeyCredential(
            self.host.eh_config.policy, self.host.eh_config.sas_key)

        self.eh_client = EventHubClient(
            hostname,
            event_hub_path,
            shared_key_cred,
            network_tracing=self.host.eph_options.debug_trace,
            http_proxy=self.host.eph_options.http_proxy)
        self.partition_receive_handler = self.eh_client.create_consumer(
            partition_id=self.partition_context.partition_id,
            consumer_group=self.partition_context.consumer_group_name,
            event_position=EventPosition(self.partition_context.offset),
            prefetch=self.host.eph_options.prefetch_count,
            loop=self.loop)
        self.partition_receiver = PartitionReceiver(self)
Exemplo n.º 11
0
    async def get_partition_ids_async(self):
        """
        Returns a list of all the event hub partition IDs.

        :rtype: list[str]
        """
        if not self.partition_ids:
            hostname = "{}.{}".format(self.host.eh_config.sb_name,
                                      self.host.eh_config.namespace_suffix)
            event_hub_path = self.host.eh_config.eh_name
            shared_key_cred = EventHubSharedKeyCredential(
                self.host.eh_config.policy, self.host.eh_config.sas_key)

            eh_client = EventHubClient(
                hostname,
                event_hub_path,
                shared_key_cred,
                network_tracing=self.host.eph_options.debug_trace,
                # http_proxy=self.host.eph_options.http_proxy,
            )
            try:
                eh_info = await eh_client.get_properties()
                self.partition_ids = eh_info['partition_ids']
            except Exception as err:  # pylint: disable=broad-except
                raise Exception("Failed to get partition ids", repr(err))
        return self.partition_ids
Exemplo n.º 12
0
def test_get_partition_ids(live_eventhub):
    client = EventHubConsumerClient(
        live_eventhub['hostname'], live_eventhub['event_hub'], '$default',
        EventHubSharedKeyCredential(live_eventhub['key_name'],
                                    live_eventhub['access_key']))
    with client:
        partition_ids = client.get_partition_ids()
        assert partition_ids == ['0', '1']
Exemplo n.º 13
0
async def test_long_running_receive_async(connection_str):
    parser = argparse.ArgumentParser()
    parser.add_argument("--duration",
                        help="Duration in seconds of the test",
                        type=int,
                        default=30)
    parser.add_argument("--consumer",
                        help="Consumer group name",
                        default="$default")
    parser.add_argument("--partitions", help="Comma seperated partition IDs")
    parser.add_argument("--offset", help="Starting offset", default="-1")
    parser.add_argument("--conn-str",
                        help="EventHub connection string",
                        default=connection_str)
    parser.add_argument("--eventhub", help="Name of EventHub")
    parser.add_argument("--address", help="Address URI to the EventHub entity")
    parser.add_argument(
        "--sas-policy",
        help="Name of the shared access policy to authenticate with")
    parser.add_argument("--sas-key", help="Shared access key")

    loop = asyncio.get_event_loop()
    args, _ = parser.parse_known_args()
    if args.conn_str:
        client = EventHubClient.from_connection_string(
            args.conn_str,
            event_hub_path=args.eventhub,
            auth_timeout=240,
            network_tracing=False)
    elif args.address:
        client = EventHubClient(host=args.address,
                                event_hub_path=args.eventhub,
                                credential=EventHubSharedKeyCredential(
                                    args.sas_policy, args.sas_key),
                                auth_timeout=240,
                                network_tracing=False)

    else:
        try:
            import pytest
            pytest.skip("Must specify either '--conn-str' or '--address'")
        except ImportError:
            raise ValueError("Must specify either '--conn-str' or '--address'")

    if not args.partitions:
        partitions = await client.get_partition_ids()
    else:
        partitions = args.partitions.split(",")
    pumps = []
    for pid in partitions:
        receiver = client.create_consumer(consumer_group="$default",
                                          partition_id=pid,
                                          event_position=EventPosition(
                                              args.offset),
                                          prefetch=300,
                                          loop=loop)
        pumps.append(pump(pid, receiver, args, args.duration))
    await asyncio.gather(*pumps)
Exemplo n.º 14
0
def create_client(args):

    if args.storage_conn_str:
        checkpoint_store = BlobCheckpointStore.from_connection_string(
            args.storage_conn_str, args.storage_container_name)
    else:
        checkpoint_store = None

    transport_type = TransportType.Amqp if args.transport_type == 0 else TransportType.AmqpOverWebsocket
    http_proxy = None
    if args.proxy_hostname:
        http_proxy = {
            "proxy_hostname": args.proxy_hostname,
            "proxy_port": args.proxy_port,
            "username": args.proxy_username,
            "password": args.proxy_password,
        }

    if args.conn_str:
        client = EventHubConsumerClientTest.from_connection_string(
            args.conn_str,
            args.consumer_group,
            eventhub_name=args.eventhub,
            checkpoint_store=checkpoint_store,
            load_balancing_interval=args.load_balancing_interval,
            auth_timeout=args.auth_timeout,
            http_proxy=http_proxy,
            transport_type=transport_type,
            logging_enable=args.uamqp_logging_enable)
    elif args.hostname:
        client = EventHubConsumerClientTest(
            fully_qualified_namespace=args.hostname,
            eventhub_name=args.eventhub,
            consumer_group=args.consumer_group,
            credential=EventHubSharedKeyCredential(args.sas_policy,
                                                   args.sas_key),
            checkpoint_store=checkpoint_store,
            load_balancing_interval=args.load_balancing_interval,
            auth_timeout=args.auth_timeout,
            http_proxy=http_proxy,
            transport_type=transport_type,
            logging_enable=args.uamqp_logging_enable)
    elif args.aad_client_id:
        credential = ClientSecretCredential(args.tenant_id, args.aad_client_id,
                                            args.aad_secret)
        client = EventHubConsumerClientTest(
            fully_qualified_namespace=args.hostname,
            eventhub_name=args.eventhub,
            consumer_group=args.consumer_group,
            credential=credential,
            checkpoint_store=checkpoint_store,
            load_balancing_interval=args.load_balancing_interval,
            auth_timeout=args.auth_timeout,
            http_proxy=http_proxy,
            transport_type=transport_type,
            logging_enable=args.uamqp_logging_enable)

    return client
    def create_client(self, client_class, is_async=False):

        transport_type = TransportType.Amqp if self.args.transport_type == 0 else TransportType.AmqpOverWebsocket
        http_proxy = None
        retry_options = {
            "retry_total": self.args.retry_total,
            "retry_backoff_factor": self.args.retry_backoff_factor,
            "retry_backoff_max": self.args.retry_backoff_max
        }
        if self.args.proxy_hostname:
            http_proxy = {
                "proxy_hostname": self.args.proxy_hostname,
                "proxy_port": self.args.proxy_port,
                "username": self.args.proxy_username,
                "password": self.args.proxy_password,
            }

        if self.args.conn_str:
            client = client_class.from_connection_string(
                self.args.conn_str,
                eventhub_name=self.args.eventhub,
                auth_timeout=self.args.auth_timeout,
                http_proxy=http_proxy,
                transport_type=transport_type,
                logging_enable=self.args.uamqp_logging_enable,
                **retry_options
            )
        elif self.args.hostname:
            client = client_class(
                fully_qualified_namespace=self.args.hostname,
                eventhub_name=self.args.eventhub,
                credential=EventHubSharedKeyCredential(self.args.sas_policy, self.args.sas_key),
                auth_timeout=self.args.auth_timeout,
                http_proxy=http_proxy,
                transport_type=transport_type,
                logging_enable=self.args.uamqp_logging_enable,
                **retry_options
            )
        elif self.args.aad_client_id:
            if is_async:
                credential = ClientSecretCredentialAsync(self.args.tenant_id, self.args.aad_client_id, self.args.aad_secret)
            else:
                credential = ClientSecretCredential(self.args.tenant_id, self.args.aad_client_id, self.args.aad_secret)
            client = client_class(
                fully_qualified_namespace=self.args.hostname,
                eventhub_name=self.args.eventhub,
                auth_timeout=self.args.auth_timeout,
                credential=credential,
                http_proxy=http_proxy,
                transport_type=transport_type,
                logging_enable=self.args.uamqp_logging_enable,
                **retry_options
            )
        else:
            raise ValueError("Argument error. Must have one of connection string, sas and aad credentials")

        return client
def test_get_properties(live_eventhub):
    client = EventHubClient(
        live_eventhub['hostname'], live_eventhub['event_hub'],
        EventHubSharedKeyCredential(live_eventhub['key_name'],
                                    live_eventhub['access_key']))
    properties = client.get_properties()
    assert properties['path'] == live_eventhub['event_hub'] and properties[
        'partition_ids'] == ['0', '1']
    client.close()
Exemplo n.º 17
0
def test_get_properties_with_connect_error(live_eventhub):
    client = EventHubConsumerClient(
        live_eventhub['hostname'], "invalid", '$default',
        EventHubSharedKeyCredential(live_eventhub['key_name'],
                                    live_eventhub['access_key']))
    with client:
        with pytest.raises(ConnectError) as e:
            client.get_eventhub_properties()

    client = EventHubConsumerClient(
        "invalid.servicebus.windows.net", live_eventhub['event_hub'],
        '$default',
        EventHubSharedKeyCredential(live_eventhub['key_name'],
                                    live_eventhub['access_key']))
    with client:
        with pytest.raises(
                EventHubError
        ) as e:  # This can be either ConnectError or ConnectionLostError
            client.get_eventhub_properties()
async def main():
    if not HOSTNAME:
        raise ValueError("No EventHubs URL supplied.")
    client = EventHubClient(host=HOSTNAME,
                            event_hub_path=EVENT_HUB,
                            credential=EventHubSharedKeyCredential(USER, KEY),
                            network_tracing=False)
    consumer = client.create_consumer(consumer_group="$default",
                                      partition_id="0",
                                      event_position=EVENT_POSITION)
    await iter_consumer(consumer)
async def test_get_partition_properties(live_eventhub):
    client = EventHubClient(live_eventhub['hostname'], live_eventhub['event_hub'],
        EventHubSharedKeyCredential(live_eventhub['key_name'], live_eventhub['access_key'])
    )
    properties = await client.get_partition_properties('0')
    assert properties['event_hub_path'] == live_eventhub['event_hub'] \
        and properties['id'] == '0' \
        and 'beginning_sequence_number' in properties \
        and 'last_enqueued_sequence_number' in properties \
        and 'last_enqueued_offset' in properties \
        and 'last_enqueued_time_utc' in properties \
        and 'is_empty' in properties
Exemplo n.º 20
0
def test_send_with_long_interval_sync(live_eventhub, sleep):
    test_partition = "0"
    sender = EventHubProducerClient(
        live_eventhub['hostname'], live_eventhub['event_hub'],
        EventHubSharedKeyCredential(live_eventhub['key_name'],
                                    live_eventhub['access_key']))
    with sender:
        batch = sender.create_batch(partition_id=test_partition)
        batch.add(EventData(b"A single event"))
        sender.send_batch(batch)
        if sleep:
            time.sleep(250)
        else:
            sender._producers[
                test_partition]._handler._connection._conn.destroy()
        batch = sender.create_batch(partition_id=test_partition)
        batch.add(EventData(b"A single event"))
        sender.send_batch(batch)

    received = []

    uri = "sb://{}/{}".format(live_eventhub['hostname'],
                              live_eventhub['event_hub'])
    sas_auth = authentication.SASTokenAuth.from_shared_access_key(
        uri, live_eventhub['key_name'], live_eventhub['access_key'])

    source = "amqps://{}/{}/ConsumerGroups/{}/Partitions/{}".format(
        live_eventhub['hostname'], live_eventhub['event_hub'],
        live_eventhub['consumer_group'], test_partition)
    receiver = uamqp.ReceiveClient(source,
                                   auth=sas_auth,
                                   debug=False,
                                   timeout=5000,
                                   prefetch=500)
    try:
        receiver.open()
        # receive_message_batch() returns immediately once it receives any messages before the max_batch_size
        # and timeout reach. Could be 1, 2, or any number between 1 and max_batch_size.
        # So call it twice to ensure the two events are received.
        received.extend([
            EventData._from_message(x)
            for x in receiver.receive_message_batch(max_batch_size=1,
                                                    timeout=5000)
        ])
        received.extend([
            EventData._from_message(x)
            for x in receiver.receive_message_batch(max_batch_size=1,
                                                    timeout=5000)
        ])
    finally:
        receiver.close()
    assert len(received) == 2
    assert list(received[0].body)[0] == b"A single event"
Exemplo n.º 21
0
def init_event_hub():
    with open('config.json', 'r') as json_file:
        config = json.load(json_file)
    client = EventHubClient(host=config["EH_HOST"],
                            event_hub_path=config["EH_NAME"],
                            credential=EventHubSharedKeyCredential(
                                config["EVENT_HUB_SAS_POLICY"],
                                config["EVENT_HUB_SAS_KEY"]),
                            network_tracing=False)

    for i in range(NUM_PARTITIONS):
        event_producer_list.append(client.create_producer(partition_id=str(i)))
def test_long_running_send(connection_str):
    if sys.platform.startswith('darwin'):
        import pytest
        pytest.skip("Skipping on OSX")
    parser = argparse.ArgumentParser()
    parser.add_argument("--duration",
                        help="Duration in seconds of the test",
                        type=int,
                        default=30)
    parser.add_argument("--payload",
                        help="payload size",
                        type=int,
                        default=512)
    parser.add_argument("--conn-str",
                        help="EventHub connection string",
                        default=connection_str)
    parser.add_argument("--eventhub", help="Name of EventHub")
    parser.add_argument("--address", help="Address URI to the EventHub entity")
    parser.add_argument(
        "--sas-policy",
        help="Name of the shared access policy to authenticate with")
    parser.add_argument("--sas-key", help="Shared access key")

    args, _ = parser.parse_known_args()
    if args.conn_str:
        client = EventHubClient.from_connection_string(
            args.conn_str, event_hub_path=args.eventhub)
    elif args.address:
        client = EventHubClient(host=args.address,
                                event_hub_path=args.eventhub,
                                credential=EventHubSharedKeyCredential(
                                    args.sas_policy, args.sas_key),
                                auth_timeout=240,
                                network_tracing=False)
    else:
        try:
            import pytest
            pytest.skip("Must specify either '--conn-str' or '--address'")
        except ImportError:
            raise ValueError("Must specify either '--conn-str' or '--address'")

    try:
        partition_ids = client.get_partition_ids()
        threads = []
        for pid in partition_ids:
            sender = client.create_producer(partition_id=pid)
            thread = threading.Thread(target=send, args=(sender, args))
            thread.start()
            threads.append(thread)
        thread.join()
    except KeyboardInterrupt:
        pass
Exemplo n.º 23
0
def test_client_sas_credential(live_eventhub):
    # This should "just work" to validate known-good.
    hostname = live_eventhub['hostname']
    producer_client = EventHubProducerClient.from_connection_string(
        live_eventhub['connection_str'],
        eventhub_name=live_eventhub['event_hub'])

    with producer_client:
        batch = producer_client.create_batch(partition_id='0')
        batch.add(EventData(body='A single message'))
        producer_client.send_batch(batch)

    # This should also work, but now using SAS tokens.
    credential = EventHubSharedKeyCredential(live_eventhub['key_name'],
                                             live_eventhub['access_key'])
    auth_uri = "sb://{}/{}".format(hostname, live_eventhub['event_hub'])
    token = credential.get_token(auth_uri).token
    producer_client = EventHubProducerClient(
        fully_qualified_namespace=hostname,
        eventhub_name=live_eventhub['event_hub'],
        credential=EventHubSASTokenCredential(token,
                                              time.time() + 3000))

    with producer_client:
        batch = producer_client.create_batch(partition_id='0')
        batch.add(EventData(body='A single message'))
        producer_client.send_batch(batch)

    # Finally let's do it with SAS token + conn str
    token_conn_str = "Endpoint=sb://{}/;SharedAccessSignature={};".format(
        hostname, token.decode())
    conn_str_producer_client = EventHubProducerClient.from_connection_string(
        token_conn_str, eventhub_name=live_eventhub['event_hub'])

    with conn_str_producer_client:
        batch = conn_str_producer_client.create_batch(partition_id='0')
        batch.add(EventData(body='A single message'))
        conn_str_producer_client.send_batch(batch)
async def test_long_running_partition_send_async(connection_str):
    parser = argparse.ArgumentParser()
    parser.add_argument("--duration", help="Duration in seconds of the test", type=int, default=30)
    parser.add_argument("--payload", help="payload size", type=int, default=1024)
    parser.add_argument("--partitions", help="Comma separated partition IDs")
    parser.add_argument("--conn-str", help="EventHub connection string", default=connection_str)
    parser.add_argument("--eventhub", help="Name of EventHub")
    parser.add_argument("--address", help="Address URI to the EventHub entity")
    parser.add_argument("--sas-policy", help="Name of the shared access policy to authenticate with")
    parser.add_argument("--sas-key", help="Shared access key")
    parser.add_argument("--logger-name", help="Unique log file ID")

    loop = asyncio.get_event_loop()
    args, _ = parser.parse_known_args()

    if args.conn_str:
        client = EventHubClient.from_connection_string(
            args.conn_str,
            event_hub_path=args.eventhub, network_tracing=False)
    elif args.address:
        client = EventHubClient(host=args.address,
                                event_hub_path=args.eventhub,
                                credential=EventHubSharedKeyCredential(args.sas_policy, args.sas_key),
                                network_tracing=False)
    else:
        try:
            import pytest
            pytest.skip("Must specify either '--conn-str' or '--address'")
        except ImportError:
            raise ValueError("Must specify either '--conn-str' or '--address'")

    try:
        if not args.partitions:
            partitions = await client.get_partition_ids()
        else:
            pid_range = args.partitions.split("-")
            if len(pid_range) > 1:
                partitions = [str(i) for i in range(int(pid_range[0]), int(pid_range[1]) + 1)]
            else:
                partitions = args.partitions.split(",")
        pumps = []
        for pid in partitions:
            sender = client.create_producer(partition_id=pid, send_timeout=0)
            pumps.append(pump(pid, sender, args, args.duration))
        results = await asyncio.gather(*pumps, return_exceptions=True)
        assert not results
    except Exception as e:
        logger.error("EventHubProducer failed: {}".format(e))
def create_eventhub_client(live_eventhub_config):
    # [START create_eventhub_client]
    import os
    from azure.eventhub import EventHubClient, EventHubSharedKeyCredential

    host = os.environ['EVENT_HUB_HOSTNAME']
    event_hub_path = os.environ['EVENT_HUB_NAME']
    shared_access_policy = os.environ['EVENT_HUB_SAS_POLICY']
    shared_access_key = os.environ['EVENT_HUB_SAS_KEY']

    client = EventHubClient(host=host,
                            event_hub_path=event_hub_path,
                            credential=EventHubSharedKeyCredential(
                                shared_access_policy, shared_access_key))
    # [END create_eventhub_client]
    return client
def test_send_with_long_interval_sync(live_eventhub, sleep):
    sender = EventHubProducerClient(
        live_eventhub['hostname'], live_eventhub['event_hub'],
        EventHubSharedKeyCredential(live_eventhub['key_name'],
                                    live_eventhub['access_key']))
    with sender:
        batch = sender.create_batch()
        batch.add(EventData(b"A single event"))
        sender.send_batch(batch)
        for _ in range(1):
            if sleep:
                time.sleep(300)
            else:
                sender._producers[-1]._handler._connection._conn.destroy()
            batch = sender.create_batch()
            batch.add(EventData(b"A single event"))
            sender.send_batch(batch)
        partition_ids = sender.get_partition_ids()

    received = []
    for p in partition_ids:
        uri = "sb://{}/{}".format(live_eventhub['hostname'],
                                  live_eventhub['event_hub'])
        sas_auth = authentication.SASTokenAuth.from_shared_access_key(
            uri, live_eventhub['key_name'], live_eventhub['access_key'])

        source = "amqps://{}/{}/ConsumerGroups/{}/Partitions/{}".format(
            live_eventhub['hostname'], live_eventhub['event_hub'],
            live_eventhub['consumer_group'], p)
        receiver = uamqp.ReceiveClient(source,
                                       auth=sas_auth,
                                       debug=False,
                                       timeout=5000,
                                       prefetch=500)
        try:
            receiver.open()
            received.extend([
                EventData._from_message(x)
                for x in receiver.receive_message_batch(timeout=5000)
            ])
        finally:
            receiver.close()

    assert len(received) == 2
    assert list(received[0].body)[0] == b"A single event"
Exemplo n.º 27
0
HOSTNAME = os.environ[
    'EVENT_HUB_HOSTNAME']  # <mynamespace>.servicebus.windows.net
EVENT_HUB = os.environ['EVENT_HUB_NAME']

USER = os.environ['EVENT_HUB_SAS_POLICY']
KEY = os.environ['EVENT_HUB_SAS_KEY']

EVENT_POSITION = EventPosition("-1")
PARTITION = "0"

total = 0
last_sn = -1
last_offset = "-1"
client = EventHubClient(host=HOSTNAME,
                        event_hub_path=EVENT_HUB,
                        credential=EventHubSharedKeyCredential(USER, KEY),
                        network_tracing=False)

consumer = client.create_consumer(consumer_group="$default",
                                  partition_id=PARTITION,
                                  event_position=EVENT_POSITION,
                                  prefetch=5000)
with consumer:
    start_time = time.time()
    batch = consumer.receive(timeout=5)
    while batch:
        for event_data in batch:
            last_offset = event_data.offset
            last_sn = event_data.sequence_number
            print("Received: {}, {}".format(last_offset, last_sn))
            print(event_data.body_as_str())
Exemplo n.º 28
0
    event_data_batch = producer.create_batch(max_size=10000)
    while True:
        try:
            event_data_batch.try_add(EventData('Message inside EventBatchData'))
        except ValueError:
            # EventDataBatch object reaches max_size.
            # New EventDataBatch object can be created here to send more data
            break
    return event_data_batch


try:
    if not HOSTNAME:
        raise ValueError("No EventHubs URL supplied.")

    client = EventHubClient(host=HOSTNAME, event_hub_path=EVENT_HUB, credential=EventHubSharedKeyCredential(USER, KEY),
                            network_tracing=False)
    producer = client.create_producer()

    try:
        start_time = time.time()
        with producer:
            event_data_batch = create_batch_data(producer)
            producer.send(event_data_batch)
    except:
        raise
    finally:
        end_time = time.time()
        run_time = end_time - start_time
        logger.info("Runtime: {} seconds".format(run_time))