async def test_receive_with_invalid_policy_async(invalid_policy): client = EventHubClient.from_connection_string(invalid_policy) receiver = client._create_consumer(consumer_group="$default", partition_id="0", event_position=EventPosition("-1")) with pytest.raises(AuthenticationError): await receiver.receive(timeout=3) await receiver.close() await client.close()
async def test_loadbalancer_list_ownership_error(connstr_senders): class ErrorPartitionManager(InMemoryPartitionManager): async def list_ownership(self, fully_qualified_namespace, eventhub_name, consumer_group_name): raise RuntimeError("Test runtime error") connection_str, senders = connstr_senders for sender in senders: sender.send(EventData("EventProcessor Test")) eventhub_client = EventHubClient.from_connection_string(connection_str, receive_timeout=3) partition_manager = ErrorPartitionManager() event_processor = EventProcessor(eventhub_client=eventhub_client, consumer_group_name='$default', partition_manager=partition_manager, event_handler=event_handler, error_handler=None, partition_initialize_handler=None, partition_close_handler=None, polling_interval=1) task = asyncio.ensure_future(event_processor.start()) await asyncio.sleep(5) assert event_processor._running is True assert len(event_processor._tasks) == 0 await event_processor.stop() # task.cancel() await eventhub_client.close()
async def test_non_existing_entity_sender_async(connection_str): client = EventHubClient.from_connection_string(connection_str, event_hub_path="nemo") sender = client._create_producer(partition_id="1") with pytest.raises(AuthenticationError): await sender.send(EventData("test data")) await sender.close() await client.close()
async def test_non_existing_entity_receiver_async(connection_str): client = EventHubClient.from_connection_string(connection_str, event_hub_path="nemo") receiver = client._create_consumer(consumer_group="$default", partition_id="0", event_position=EventPosition("-1")) with pytest.raises(AuthenticationError): await receiver.receive(timeout=5) await receiver.close() await client.close()
async def test_send_with_create_event_batch_async(connstr_receivers): connection_str, receivers = connstr_receivers client = EventHubClient.from_connection_string( connection_str, transport_type=TransportType.AmqpOverWebsocket) sender = client._create_producer() event_data_batch = await sender.create_batch(max_size=100000) while True: try: event_data_batch.try_add(EventData('A single event data')) except ValueError: break await sender.send(event_data_batch) event_data_batch = await sender.create_batch(max_size=100000) while True: try: event_data_batch.try_add(EventData('A single event data')) except ValueError: break await sender.send(event_data_batch) await sender.close() await client.close()
async def test_multiple_receiver_async(connstr_senders): connection_str, senders = connstr_senders senders[0].send(EventData(b"Receiving only a single event")) client = EventHubClient.from_connection_string(connection_str) partitions = await client.get_properties() assert partitions["partition_ids"] == ["0", "1"] receivers = [] for i in range(2): receivers.append( client._create_consumer(consumer_group="$default", partition_id="0", event_position=EventPosition("-1"), prefetch=10)) try: more_partitions = await client.get_properties() assert more_partitions["partition_ids"] == ["0", "1"] outputs = [0, 0] outputs[0] = await pump(receivers[0]) outputs[1] = await pump(receivers[1]) assert isinstance(outputs[0], int) and outputs[0] == 1 assert isinstance(outputs[1], int) and outputs[1] == 1 finally: for r in receivers: await r.close() await client.close()
async def test_send_with_partition_key_async(connstr_receivers): connection_str, receivers = connstr_receivers client = EventHubClient.from_connection_string(connection_str) sender = client._create_producer() async with sender: data_val = 0 for partition in [b"a", b"b", b"c", b"d", b"e", b"f"]: partition_key = b"test_partition_" + partition for i in range(50): data = EventData(str(data_val)) # data.partition_key = partition_key data_val += 1 await sender.send(data, partition_key=partition_key) found_partition_keys = {} for index, partition in enumerate(receivers): received = partition.receive(timeout=5) for message in received: try: existing = found_partition_keys[message.partition_key] assert existing == index except KeyError: found_partition_keys[message.partition_key] = index await client.close()
async def test_send_batch_with_app_prop_async(connstr_receivers): connection_str, receivers = connstr_receivers app_prop_key = "raw_prop" app_prop_value = "raw_value" app_prop = {app_prop_key: app_prop_value} def batched(): for i in range(10): ed = EventData("Event number {}".format(i)) ed.application_properties = app_prop yield ed for i in range(10, 20): ed = EventData("Event number {}".format(i)) ed.application_properties = app_prop yield ed client = EventHubClient.from_connection_string(connection_str) sender = client._create_producer() async with sender: await sender.send(batched()) time.sleep(1) received = [] for r in receivers: received.extend(r.receive(timeout=3)) assert len(received) == 20 for index, message in enumerate(received): assert list( message.body)[0] == "Event number {}".format(index).encode('utf-8') assert (app_prop_key.encode('utf-8') in message.application_properties) \ and (dict(message.application_properties)[app_prop_key.encode('utf-8')] == app_prop_value.encode('utf-8')) await client.close()
async def test_exclusive_receiver_after_non_exclusive_receiver_async( connstr_senders): connection_str, senders = connstr_senders senders[0].send(EventData(b"Receiving only a single event")) client = EventHubClient.from_connection_string(connection_str) receiver1 = client._create_consumer(consumer_group="$default", partition_id="0", event_position=EventPosition("-1"), prefetch=10) receiver2 = client._create_consumer(consumer_group="$default", partition_id="0", event_position=EventPosition("-1"), owner_level=15, prefetch=10) try: await pump(receiver1) output2 = await pump(receiver2) with pytest.raises(ConnectionLostError): await receiver1.receive(timeout=3) assert output2 == 1 finally: await receiver1.close() await receiver2.close() await client.close()
async def test_receive_with_datetime_async(connstr_senders): connection_str, senders = connstr_senders client = EventHubClient.from_connection_string(connection_str) receiver = client._create_consumer(consumer_group="$default", partition_id="0", event_position=EventPosition('@latest')) async with receiver: received = await receiver.receive(timeout=5) assert len(received) == 0 senders[0].send(EventData(b"Data")) received = await receiver.receive(timeout=5) assert len(received) == 1 offset = received[0].enqueued_time offset_receiver = client._create_consumer( consumer_group="$default", partition_id="0", event_position=EventPosition(offset)) async with offset_receiver: received = await offset_receiver.receive(timeout=5) assert len(received) == 0 senders[0].send(EventData(b"Message after timestamp")) time.sleep(1) received = await offset_receiver.receive(timeout=5) assert len(received) == 1 await client.close()
async def test_get_partition_ids(live_eventhub): client = EventHubClient(live_eventhub['hostname'], live_eventhub['event_hub'], EventHubSharedKeyCredential(live_eventhub['key_name'], live_eventhub['access_key']) ) partition_ids = await client.get_partition_ids() assert partition_ids == ['0', '1'] await client.close()
async def test_loadbalancer_balance(connstr_senders): connection_str, senders = connstr_senders for sender in senders: sender.send(EventData("EventProcessor Test")) eventhub_client = EventHubClient.from_connection_string(connection_str, receive_timeout=3) partition_manager = InMemoryPartitionManager() tasks = [] event_processor1 = EventProcessor(eventhub_client=eventhub_client, consumer_group_name='$default', partition_manager=partition_manager, event_handler=event_handler, error_handler=None, partition_initialize_handler=None, partition_close_handler=None, polling_interval=1) tasks.append(asyncio.ensure_future(event_processor1.start())) await asyncio.sleep(3) assert len(event_processor1._tasks) == 2 # event_processor1 claims two partitions event_processor2 = EventProcessor(eventhub_client=eventhub_client, consumer_group_name='$default', partition_manager=partition_manager, event_handler=event_handler, error_handler=None, partition_initialize_handler=None, partition_close_handler=None, polling_interval=1) tasks.append(asyncio.ensure_future(event_processor2.start())) await asyncio.sleep(3) assert len(event_processor1._tasks) == 1 # two event processors balance. So each has 1 task assert len(event_processor2._tasks) == 1 event_processor3 = EventProcessor(eventhub_client=eventhub_client, consumer_group_name='$default', partition_manager=partition_manager, event_handler=event_handler, error_handler=None, partition_initialize_handler=None, partition_close_handler=None, polling_interval=1) tasks.append(asyncio.ensure_future(event_processor3.start())) await asyncio.sleep(3) assert len(event_processor3._tasks) == 0 await event_processor3.stop() await event_processor1.stop() await asyncio.sleep(3) assert len(event_processor2._tasks) == 2 # event_procesor2 takes another one after event_processor1 stops await event_processor2.stop() ''' for task in tasks: task.cancel() ''' await eventhub_client.close()
async def test_send_with_invalid_policy_async(invalid_policy, connstr_receivers): _, receivers = connstr_receivers client = EventHubClient.from_connection_string(invalid_policy) sender = client._create_producer() with pytest.raises(AuthenticationError): await sender.send(EventData("test data")) await sender.close() await client.close()
async def test_send_to_invalid_partitions_async(connection_str): partitions = ["XYZ", "-1", "1000", "-"] for p in partitions: client = EventHubClient.from_connection_string(connection_str) sender = client._create_producer(partition_id=p) with pytest.raises(ConnectError): await sender.send(EventData("test data")) await sender.close() await client.close()
async def test_receive_from_invalid_partitions_async(connection_str): partitions = ["XYZ", "-1", "1000", "-"] for p in partitions: client = EventHubClient.from_connection_string(connection_str) receiver = client._create_consumer(consumer_group="$default", partition_id=p, event_position=EventPosition("-1")) with pytest.raises(ConnectError): await receiver.receive(timeout=5) await receiver.close() await client.close()
async def test_create_batch_with_too_large_size_async(connection_str): client = EventHubClient.from_connection_string(connection_str) sender = client._create_producer() try: with pytest.raises(ValueError): batch_event_data = await sender.create_batch(max_size=5 * 1024 * 1024) finally: await sender.close() await client.close()
async def test_create_batch_with_invalid_hostname_async(invalid_hostname): client = EventHubClient.from_connection_string(invalid_hostname) sender = client._create_producer() try: with pytest.raises(AuthenticationError): batch_event_data = await sender.create_batch(max_size=300) finally: await sender.close() await client.close()
async def test_send_null_body_async(connection_str): client = EventHubClient.from_connection_string(connection_str) sender = client._create_producer() try: with pytest.raises(ValueError): data = EventData(None) await sender.send(data) finally: await sender.close() await client.close()
async def test_partition_processor(connstr_senders): lock = asyncio.Lock() event_map = {} checkpoint = None close_reason = None error = None async def partition_initialize_handler(partition_context): assert partition_context async def event_handler(partition_context, events): async with lock: if events: nonlocal checkpoint, event_map event_map[partition_context.partition_id] = event_map.get(partition_context.partition_id, 0) + len(events) offset, sn = events[-1].offset, events[-1].sequence_number checkpoint = (offset, sn) await partition_context.update_checkpoint(events[-1]) async def partition_close_handler(partition_context, reason): nonlocal close_reason close_reason = reason assert partition_context and reason async def error_handler(partition_context, err): nonlocal error error = err assert partition_context and err connection_str, senders = connstr_senders for sender in senders: sender.send(EventData("EventProcessor Test")) eventhub_client = EventHubClient.from_connection_string(connection_str, receive_timeout=3) partition_manager = InMemoryPartitionManager() event_processor = EventProcessor(eventhub_client=eventhub_client, consumer_group_name='$default', partition_manager=partition_manager, event_handler=event_handler, error_handler=error_handler, partition_initialize_handler=partition_initialize_handler, partition_close_handler=partition_close_handler, polling_interval=1) task = asyncio.ensure_future(event_processor.start()) await asyncio.sleep(10) assert len(event_processor._tasks) == 2 await event_processor.stop() task.cancel() await eventhub_client.close() assert event_map['0'] == 1 and event_map['1'] == 1 assert checkpoint is not None assert close_reason == CloseReason.SHUTDOWN assert error is None
async def test_send_partition_key_with_partition_async(connection_str): pytest.skip("No longer raise value error. EventData will be sent to partition_id") client = EventHubClient.from_connection_string(connection_str) sender = client._create_producer(partition_id="1") try: data = EventData(b"Data") with pytest.raises(ValueError): await sender.send(EventData("test data")) finally: await sender.close() await client.close()
async def test_send_partition_async(connstr_receivers): connection_str, receivers = connstr_receivers client = EventHubClient.from_connection_string(connection_str) sender = client._create_producer(partition_id="1") async with sender: await sender.send(EventData(b"Data")) partition_0 = receivers[0].receive(timeout=2) assert len(partition_0) == 0 partition_1 = receivers[1].receive(timeout=2) assert len(partition_1) == 1 await client.close()
async def test_send_too_large_message_async(connection_str): if sys.platform.startswith('darwin'): pytest.skip("Skipping on OSX - open issue regarding message size") client = EventHubClient.from_connection_string(connection_str) sender = client._create_producer() try: data = EventData(b"A" * 1100000) with pytest.raises(EventDataSendError): await sender.send(data) finally: await sender.close() await client.close()
async def test_send_non_ascii_async(connstr_receivers): connection_str, receivers = connstr_receivers client = EventHubClient.from_connection_string(connection_str) sender = client._create_producer(partition_id="0") async with sender: await sender.send(EventData("é,è,à,ù,â,ê,î,ô,û")) await sender.send(EventData(json.dumps({"foo": "漢字"}))) await asyncio.sleep(1) partition_0 = receivers[0].receive(timeout=2) assert len(partition_0) == 2 assert partition_0[0].body_as_str() == "é,è,à,ù,â,ê,î,ô,û" assert partition_0[1].body_as_json() == {"foo": "漢字"} await client.close()
async def test_get_partition_properties(live_eventhub): client = EventHubClient(live_eventhub['hostname'], live_eventhub['event_hub'], EventHubSharedKeyCredential(live_eventhub['key_name'], live_eventhub['access_key']) ) properties = await client.get_partition_properties('0') assert properties['event_hub_path'] == live_eventhub['event_hub'] \ and properties['id'] == '0' \ and 'beginning_sequence_number' in properties \ and 'last_enqueued_sequence_number' in properties \ and 'last_enqueued_offset' in properties \ and 'last_enqueued_time_utc' in properties \ and 'is_empty' in properties await client.close()
async def test_send_array_async(connstr_receivers): connection_str, receivers = connstr_receivers client = EventHubClient.from_connection_string(connection_str) sender = client._create_producer() async with sender: await sender.send(EventData([b"A", b"B", b"C"])) received = [] for r in receivers: received.extend(r.receive(timeout=1)) assert len(received) == 1 assert list(received[0].body) == [b"A", b"B", b"C"] await client.close()
async def test_send_single_event_async(connstr_receivers): connection_str, receivers = connstr_receivers client = EventHubClient.from_connection_string(connection_str) sender = client._create_producer() async with sender: await sender.send(EventData(b"A single event")) received = [] for r in receivers: received.extend(r.receive(timeout=1)) assert len(received) == 1 assert list(received[0].body)[0] == b"A single event" await client.close()
async def test_send_multiple_clients_async(connstr_receivers): connection_str, receivers = connstr_receivers client = EventHubClient.from_connection_string(connection_str) sender_0 = client._create_producer(partition_id="0") sender_1 = client._create_producer(partition_id="1") async with sender_0: await sender_0.send(EventData(b"Message 0")) async with sender_1: await sender_1.send(EventData(b"Message 1")) partition_0 = receivers[0].receive(timeout=2) assert len(partition_0) == 1 partition_1 = receivers[1].receive(timeout=2) assert len(partition_1) == 1 await client.close()
async def test_receive_end_of_stream_async(connstr_senders): connection_str, senders = connstr_senders client = EventHubClient.from_connection_string(connection_str) receiver = client._create_consumer(consumer_group="$default", partition_id="0", event_position=EventPosition('@latest')) async with receiver: received = await receiver.receive(timeout=5) assert len(received) == 0 senders[0].send(EventData(b"Receiving only a single event")) received = await receiver.receive(timeout=5) assert len(received) == 1 assert list(received[-1].body)[0] == b"Receiving only a single event" await client.close()
async def test_send_partition_batch_async(connstr_receivers): connection_str, receivers = connstr_receivers def batched(): for i in range(10): yield EventData("Event number {}".format(i)) client = EventHubClient.from_connection_string(connection_str) sender = client._create_producer(partition_id="1") async with sender: await sender.send(batched()) partition_0 = receivers[0].receive(timeout=2) assert len(partition_0) == 0 partition_1 = receivers[1].receive(timeout=2) assert len(partition_1) == 10 await client.close()
async def test_max_receivers_async(connstr_senders): connection_str, senders = connstr_senders client = EventHubClient.from_connection_string(connection_str) receivers = [] for i in range(6): receivers.append(client._create_consumer(consumer_group="$default", partition_id="0", prefetch=1000, event_position=EventPosition('@latest'))) outputs = await asyncio.gather( pump(receivers[0]), pump(receivers[1]), pump(receivers[2]), pump(receivers[3]), pump(receivers[4]), pump(receivers[5]), return_exceptions=True) print(outputs) failed = [o for o in outputs if isinstance(o, EventHubError)] assert len(failed) == 1 print(failed[0].message) await client.close()