async def test_get_partition_ids(live_eventhub): client = EventHubConsumerClient( live_eventhub['hostname'], live_eventhub['event_hub'], '$default', EventHubSharedKeyCredential(live_eventhub['key_name'], live_eventhub['access_key'])) partition_ids = await client.get_partition_ids() assert partition_ids == ['0', '1'] await client.close()
async def test_get_properties(live_eventhub): client = EventHubConsumerClient( live_eventhub['hostname'], live_eventhub['event_hub'], '$default', EventHubSharedKeyCredential(live_eventhub['key_name'], live_eventhub['access_key'])) async with client: properties = await client.get_eventhub_properties() assert properties['eventhub_name'] == live_eventhub[ 'event_hub'] and properties['partition_ids'] == ['0', '1']
async def test_send_with_long_interval_async(live_eventhub, sleep): test_partition = "0" sender = EventHubProducerClient( live_eventhub['hostname'], live_eventhub['event_hub'], EventHubSharedKeyCredential(live_eventhub['key_name'], live_eventhub['access_key'])) async with sender: batch = await sender.create_batch(partition_id=test_partition) batch.add(EventData(b"A single event")) await sender.send_batch(batch) if sleep: await asyncio.sleep(250 ) # EH server side idle timeout is 240 second else: await sender._producers[test_partition ]._handler._connection._conn.destroy() batch = await sender.create_batch(partition_id=test_partition) batch.add(EventData(b"A single event")) await sender.send_batch(batch) received = [] uri = "sb://{}/{}".format(live_eventhub['hostname'], live_eventhub['event_hub']) sas_auth = authentication.SASTokenAuth.from_shared_access_key( uri, live_eventhub['key_name'], live_eventhub['access_key']) source = "amqps://{}/{}/ConsumerGroups/{}/Partitions/{}".format( live_eventhub['hostname'], live_eventhub['event_hub'], live_eventhub['consumer_group'], test_partition) receiver = uamqp.ReceiveClient(source, auth=sas_auth, debug=False, timeout=10000, prefetch=10) try: receiver.open() # receive_message_batch() returns immediately once it receives any messages before the max_batch_size # and timeout reach. Could be 1, 2, or any number between 1 and max_batch_size. # So call it twice to ensure the two events are received. received.extend([ EventData._from_message(x) for x in receiver.receive_message_batch(max_batch_size=1, timeout=5000) ]) received.extend([ EventData._from_message(x) for x in receiver.receive_message_batch(max_batch_size=1, timeout=5000) ]) finally: receiver.close() assert len(received) == 2 assert list(received[0].body)[0] == b"A single event"
async def consume(config: ConsumerConfig, delegate: MessageStorageDelegate): """ Setup and start a message topic consumer and storage delegate. :param config: A ConsumerConfig object :param delegate: A Storage delegate object :return: None """ # Create a consumer client for the event hub. logger.info(f"Consuming {config}") if config.checkpoint_store_conn_str and config.checkpoint_store_container_name: # Use an azure blob storage container to store position within partition checkpoint_store = BlobCheckpointStore.from_connection_string(config.checkpoint_store_conn_str, config.checkpoint_store_container_name) client = EventHubConsumerClient( fully_qualified_namespace=config.fully_qualified_namespace, consumer_group=config.consumer_group, eventhub_name=config.topic, credential=EventHubSharedKeyCredential(config.shared_access_policy, config.key), checkpoint_store=checkpoint_store ) else: client = EventHubConsumerClient( fully_qualified_namespace=config.fully_qualified_namespace, consumer_group=config.consumer_group, eventhub_name=config.topic, credential=EventHubSharedKeyCredential(config.shared_access_policy, config.key) ) handler = MessageHandler( storage_delegate=delegate, buffer_size=config.buffer_size, max_buffer_time_in_sec=config.max_buffer_time_in_seconds, max_time_to_keep_data_in_seconds=config.max_time_to_keep_data_in_seconds, data_eviction_interval_in_seconds=config.data_eviction_interval_in_seconds, checkpoint_after_messages=config.checkpoint_after_messages ) async with client: await client.receive(on_event=handler.received_event, on_error=errored, on_partition_close=partition_closed, on_partition_initialize=partition_initialized, starting_position=-1)
async def test_get_properties_with_connect_error(live_eventhub): client = EventHubConsumerClient( live_eventhub['hostname'], "invalid", '$default', EventHubSharedKeyCredential(live_eventhub['key_name'], live_eventhub['access_key'])) async with client: with pytest.raises(ConnectError) as e: await client.get_eventhub_properties() client = EventHubConsumerClient( "invalid.servicebus.windows.net", live_eventhub['event_hub'], '$default', EventHubSharedKeyCredential(live_eventhub['key_name'], live_eventhub['access_key'])) async with client: with pytest.raises( EventHubError ) as e: # This can be either ConnectError or ConnectionLostError await client.get_eventhub_properties()
def client(self) -> EventHubProducerClient: """Get a Event Producer Client.""" return EventHubProducerClient( fully_qualified_namespace= f"{self.event_hub_namespace}.servicebus.windows.net", eventhub_name=self.event_hub_instance_name, credential=EventHubSharedKeyCredential( # type: ignore policy=self.event_hub_sas_policy, key=self.event_hub_sas_key), **ADDITIONAL_ARGS, )
async def test_get_partition_properties(live_eventhub): client = EventHubProducerClient( live_eventhub['hostname'], live_eventhub['event_hub'], EventHubSharedKeyCredential(live_eventhub['key_name'], live_eventhub['access_key'])) async with client: properties = await client.get_partition_properties('0') assert properties['eventhub_name'] == live_eventhub['event_hub'] \ and properties['id'] == '0' \ and 'beginning_sequence_number' in properties \ and 'last_enqueued_sequence_number' in properties \ and 'last_enqueued_offset' in properties \ and 'last_enqueued_time_utc' in properties \ and 'is_empty' in properties
async def test_send_with_long_interval_async(live_eventhub, sleep): sender = EventHubProducerClient( live_eventhub['hostname'], live_eventhub['event_hub'], EventHubSharedKeyCredential(live_eventhub['key_name'], live_eventhub['access_key'])) async with sender: batch = await sender.create_batch() batch.add(EventData(b"A single event")) await sender.send_batch(batch) for _ in range(1): if sleep: await asyncio.sleep(300) else: await sender._producers[-1]._handler._connection._conn.destroy( ) batch = await sender.create_batch() batch.add(EventData(b"A single event")) await sender.send_batch(batch) partition_ids = await sender.get_partition_ids() received = [] for p in partition_ids: uri = "sb://{}/{}".format(live_eventhub['hostname'], live_eventhub['event_hub']) sas_auth = authentication.SASTokenAuth.from_shared_access_key( uri, live_eventhub['key_name'], live_eventhub['access_key']) source = "amqps://{}/{}/ConsumerGroups/{}/Partitions/{}".format( live_eventhub['hostname'], live_eventhub['event_hub'], live_eventhub['consumer_group'], p) receiver = uamqp.ReceiveClient(source, auth=sas_auth, debug=False, timeout=5000, prefetch=500) try: receiver.open() received.extend([ EventData._from_message(x) for x in receiver.receive_message_batch(timeout=5000) ]) finally: receiver.close() assert len(received) == 2 assert list(received[0].body)[0] == b"A single event"
async def test_client_sas_credential_async( self, eventhub, eventhub_namespace, eventhub_namespace_key_name, eventhub_namespace_primary_key, eventhub_namespace_connection_string, **kwargs): # This should "just work" to validate known-good. hostname = "{}.servicebus.windows.net".format(eventhub_namespace.name) producer_client = EventHubProducerClient.from_connection_string( eventhub_namespace_connection_string, eventhub_name=eventhub.name) async with producer_client: batch = await producer_client.create_batch(partition_id='0') batch.add(EventData(body='A single message')) await producer_client.send_batch(batch) # This should also work, but now using SAS tokens. credential = EventHubSharedKeyCredential( eventhub_namespace_key_name, eventhub_namespace_primary_key) hostname = "{}.servicebus.windows.net".format(eventhub_namespace.name) auth_uri = "sb://{}/{}".format(hostname, eventhub.name) token = (await credential.get_token(auth_uri)).token producer_client = EventHubProducerClient( fully_qualified_namespace=hostname, eventhub_name=eventhub.name, credential=EventHubSASTokenCredential(token, time.time() + 3000)) async with producer_client: batch = await producer_client.create_batch(partition_id='0') batch.add(EventData(body='A single message')) await producer_client.send_batch(batch) # Finally let's do it with SAS token + conn str token_conn_str = "Endpoint=sb://{}/;SharedAccessSignature={};".format( hostname, token.decode()) conn_str_producer_client = EventHubProducerClient.from_connection_string( token_conn_str, eventhub_name=eventhub.name) async with conn_str_producer_client: batch = await conn_str_producer_client.create_batch( partition_id='0') batch.add(EventData(body='A single message')) await conn_str_producer_client.send_batch(batch)
async def create_consumer_client(): print('Examples showing how to create consumer client.') # Create consumer client from connection string. consumer_client = EventHubConsumerClient.from_connection_string( conn_str=CONNECTION_STRING, # connection string contains EventHub name. consumer_group=CONSUMER_GROUP) # Illustration of commonly used parameters. consumer_client = EventHubConsumerClient.from_connection_string( conn_str=CONNECTION_STRING, consumer_group=CONSUMER_GROUP, eventhub_name= EVENTHUB_NAME, # EventHub name should be specified if it doesn't show up in connection string. logging_enable= False, # To enable network tracing log, set logging_enable to True. retry_total=3, # Retry up to 3 times to re-do failed operations. transport_type=TransportType. Amqp # Use Amqp as the underlying transport protocol. ) # Create consumer client from constructor. consumer_client = EventHubConsumerClient( fully_qualified_namespace=FULLY_QUALIFIED_NAMESPACE, eventhub_name=EVENTHUB_NAME, consumer_group=CONSUMER_GROUP, credential=EventHubSharedKeyCredential(policy=SAS_POLICY, key=SAS_KEY), logging_enable= False, # To enable network tracing log, set logging_enable to True. retry_total=3, # Retry up to 3 times to re-do failed operations. transport_type=TransportType. Amqp # Use Amqp as the underlying transport protocol. ) async with consumer_client: print("Calling consumer client get eventhub properties:", await consumer_client.get_eventhub_properties())
async def test_client_sas_credential_async(live_eventhub): # This should "just work" to validate known-good. hostname = live_eventhub['hostname'] producer_client = EventHubProducerClient.from_connection_string( live_eventhub['connection_str'], eventhub_name=live_eventhub['event_hub']) async with producer_client: batch = await producer_client.create_batch(partition_id='0') batch.add(EventData(body='A single message')) await producer_client.send_batch(batch) # This should also work, but now using SAS tokens. credential = EventHubSharedKeyCredential(live_eventhub['key_name'], live_eventhub['access_key']) auth_uri = "sb://{}/{}".format(hostname, live_eventhub['event_hub']) token = (await credential.get_token(auth_uri)).token producer_client = EventHubProducerClient( fully_qualified_namespace=hostname, eventhub_name=live_eventhub['event_hub'], credential=EventHubSASTokenCredential(token, time.time() + 3000)) async with producer_client: batch = await producer_client.create_batch(partition_id='0') batch.add(EventData(body='A single message')) await producer_client.send_batch(batch) # Finally let's do it with SAS token + conn str token_conn_str = "Endpoint=sb://{}/;SharedAccessSignature={};".format( hostname, token.decode()) conn_str_producer_client = EventHubProducerClient.from_connection_string( token_conn_str, eventhub_name=live_eventhub['event_hub']) async with conn_str_producer_client: batch = await conn_str_producer_client.create_batch(partition_id='0') batch.add(EventData(body='A single message')) await conn_str_producer_client.send_batch(batch)