async def test_receive_with_inclusive_offset_async(connstr_senders): connection_str, senders = connstr_senders client = EventHubClientAsync.from_connection_string(connection_str, debug=False) receiver = client.add_async_receiver("$default", "0", offset=Offset('@latest')) await client.run_async() try: received = await receiver.receive(timeout=5) assert len(received) == 0 senders[0].send(EventData(b"Data")) time.sleep(1) received = await receiver.receive(timeout=5) assert len(received) == 1 offset = received[0].offset offset_receiver = client.add_async_receiver("$default", "0", offset=Offset( offset.value, inclusive=True)) await client.run_async() received = await offset_receiver.receive(timeout=5) assert len(received) == 1 except: raise finally: await client.stop_async()
def test_receive_with_datetime_sync(connstr_senders): connection_str, senders = connstr_senders client = EventHubClient.from_connection_string(connection_str, debug=False) partitions = client.get_eventhub_info() assert partitions["partition_ids"] == ["0", "1"] receiver = client.add_receiver("$default", "0", offset=Offset('@latest')) try: client.run() more_partitions = client.get_eventhub_info() assert more_partitions["partition_ids"] == ["0", "1"] received = receiver.receive(timeout=5) assert len(received) == 0 senders[0].send(EventData(b"Data")) received = receiver.receive(timeout=5) assert len(received) == 1 offset = received[0].enqueued_time assert list(received[0].body) == [b'Data'] assert received[0].body_as_str() == "Data" offset_receiver = client.add_receiver("$default", "0", offset=Offset(offset)) client.run() received = offset_receiver.receive(timeout=5) assert len(received) == 0 senders[0].send(EventData(b"Message after timestamp")) received = offset_receiver.receive(timeout=5) assert len(received) == 1 except: raise finally: client.stop()
def test_receive_with_sequence_no(connstr_senders): connection_str, senders = connstr_senders client = EventHubClient.from_connection_string(connection_str, debug=False) receiver = client.add_receiver("$default", "0", offset=Offset('@latest')) try: client.run() received = receiver.receive(timeout=5) assert len(received) == 0 senders[0].send(EventData(b"Data")) time.sleep(1) received = receiver.receive(timeout=5) assert len(received) == 1 offset = received[0].sequence_number offset_receiver = client.add_receiver("$default", "0", offset=Offset(offset)) client.run() received = offset_receiver.receive(timeout=5) assert len(received) == 0 senders[0].send(EventData(b"Message next in sequence")) time.sleep(1) received = offset_receiver.receive(timeout=5) assert len(received) == 1 except: raise finally: client.stop()
def test_receive_with_inclusive_offset(connstr_senders): connection_str, senders = connstr_senders client = EventHubClient.from_connection_string(connection_str, debug=False) receiver = client.add_receiver("$default", "0", offset=Offset('@latest')) try: client.run() received = receiver.receive(timeout=5) assert len(received) == 0 senders[0].send(EventData(b"Data")) time.sleep(1) received = receiver.receive(timeout=5) assert len(received) == 1 offset = received[0].offset assert list(received[0].body) == [b'Data'] assert received[0].body_as_str() == "Data" offset_receiver = client.add_receiver("$default", "0", offset=Offset(offset.value, inclusive=True)) client.run() received = offset_receiver.receive(timeout=5) assert len(received) == 1 except: raise finally: client.stop()
async def test_receive_with_datetime_async(connection_str, senders): client = EventHubClientAsync.from_connection_string(connection_str, debug=False) receiver = client.add_async_receiver("$default", "0", offset=Offset('@latest')) await client.run_async() try: received = await receiver.receive(timeout=5) assert len(received) == 0 senders[0].send(EventData(b"Data")) received = await receiver.receive(timeout=5) assert len(received) == 1 offset = received[0].enqueued_time offset_receiver = client.add_async_receiver("$default", "0", offset=Offset(offset)) await client.run_async() received = await offset_receiver.receive(timeout=5) assert len(received) == 0 senders[0].send(EventData(b"Message after timestamp")) time.sleep(1) received = await offset_receiver.receive(timeout=5) assert len(received) == 1 except: raise finally: await client.stop_async()
class Consumer: consumer_group = None eventhubs_client = None offset = Offset("-1") redis_cache = None def __init__(self, eventhub, address, user, key, consumer_group, redis_hostname, redis_key): self.consumer_group = consumer_group self.eventhubs_client = EventHubClient(address, debug=False, username=user, password=key) redis_topic = f"eventhubs-{eventhub}-{consumer_group}" self.redis_cache = RedisCache(redis_hostname, redis_key, redis_topic) def recieve(self): OFFSET = Offset(self.redis_cache.get_offset()) receiver = self.eventhubs_client.add_receiver(self.consumer_group, "0", prefetch=5000, offset=OFFSET) self.eventhubs_client.run() messages = receiver.receive(timeout=100) self.eventhubs_client.stop() return messages def commit(self, event_data): self.redis_cache.set_offset(event_data.sequence_number)
def test_receive_with_custom_datetime_sync(connstr_senders): connection_str, senders = connstr_senders client = EventHubClient.from_connection_string(connection_str, debug=False) for i in range(5): senders[0].send(EventData(b"Message before timestamp")) time.sleep(60) now = datetime.datetime.utcnow() offset = datetime.datetime(now.year, now.month, now.day, now.hour, now.minute) for i in range(5): senders[0].send(EventData(b"Message after timestamp")) receiver = client.add_receiver("$default", "0", offset=Offset(offset)) try: client.run() all_received = [] received = receiver.receive(timeout=1) while received: all_received.extend(received) received = receiver.receive(timeout=1) assert len(all_received) == 5 for received_event in all_received: assert received_event.body_as_str() == "Message after timestamp" assert received_event.enqueued_time > offset except: raise finally: client.stop()
def run(): logger.info('starting') iot_conf, influx_conf = get_config('conf/config.ini') influxdb = InfluxAdapter(influx_conf['HOSTNAME'], influx_conf['PORT'], influx_conf['USER'], influx_conf['PASSWORD'], influx_conf['DATABASE']) client = EventHubClient.from_iothub_connection_string( iot_conf['IOTHUB_CONNSTR'], debug=False) partitions = iot_conf.getint('PARTITION_COUNT') new_receiver = lambda x: client.add_receiver("$default", str(x), offset=Offset("@latest"), operation='/messages/events') receivers = [new_receiver(pid) for pid in range(partitions)] try: client.run() p = multiprocessing.dummy.Pool(partitions) receiverfunc = functools.partial(receivefunc, influxdb=influxdb) p.map(receiverfunc, receivers) except KeyboardInterrupt: logger.info('stopping') stop_event.set() finally: client.stop()
def test_receive_with_offset_sync(connection_str, senders): client = EventHubClient.from_connection_string(connection_str, debug=False) partitions = client.get_eventhub_info() assert partitions["partition_ids"] == ["0", "1"] receiver = client.add_receiver("$default", "0", offset=Offset('@latest')) try: client.run() more_partitions = client.get_eventhub_info() assert more_partitions["partition_ids"] == ["0", "1"] received = receiver.receive(timeout=5) assert len(received) == 0 senders[0].send(EventData(b"Data")) received = receiver.receive(timeout=5) assert len(received) == 1 offset = received[0].offset offset_receiver = client.add_receiver("$default", "0", offset=offset) client.run() received = offset_receiver.receive(timeout=5) assert len(received) == 0 senders[0].send(EventData(b"Message after offset")) received = offset_receiver.receive(timeout=5) assert len(received) == 1 except: raise finally: client.stop()
def get(): if request.args.get('since') is None: since = -1 else: since = request.args.get('since') client = EventHubClient(address, debug=False, username=user, password=key) receiver = client.add_receiver(consumergroup, PARTITION, prefetch=1000, offset=Offset(since), keep_alive=72000) client.run() def generate(): batched_events = receiver.receive(max_batch_size=100, timeout=500) yield '[' index = 0 while batched_events: for event_data in batched_events: if index > 0: yield ',' last_sn = event_data.sequence_number data = str(event_data.message) output_entity = literal_eval(data) output_entity.update({"_updated": str(last_sn)}) yield json.dumps(output_entity) index = index + 1 batched_events = receiver.receive(max_batch_size=100, timeout=500) yield ']' return Response(generate(), mimetype='application/json')
async def test_max_receivers_async(connstr_senders): connection_str, senders = connstr_senders client = EventHubClientAsync.from_connection_string(connection_str, debug=True) receivers = [] for i in range(6): receivers.append( client.add_async_receiver("$default", "0", prefetch=1000, offset=Offset('@latest'))) try: await client.run_async() outputs = await asyncio.gather(pump(receivers[0]), pump(receivers[1]), pump(receivers[2]), pump(receivers[3]), pump(receivers[4]), pump(receivers[5]), return_exceptions=True) print(outputs) failed = [o for o in outputs if isinstance(o, EventHubError)] assert len(failed) == 1 print(failed[0].message) finally: await client.stop_async()
def cosmosDBServiceToCosmosDB(self): database_link = 'dbs/' + DATABASE_ID collection_link = database_link + '/colls/' + COLLECTION_ID counter = 0 filepath = '' CONSUMER_GROUP = "$Default" OFFSET = Offset("0") PARTITION = "0" eh_client = EventHubClient('amqps://xxxxx.servicebus.windows.net/txxxxqueue', debug=True, username='******', password='******') receiver = eh_client.add_receiver(CONSUMER_GROUP, PARTITION, prefetch=300, offset=OFFSET) try: eh_client.run() while True: for event_data in receiver.receive(timeout=100): rcv_msg = str(event_data.message) # Filter the Null messages if len(rcv_msg)>5: # Load the messages in CosmosDB cosmos_client.CreateDocument(collection_link, json.loads(str(event_data.message))) eh_client.stop() except Exception as e: print("Failed Receiving Record {}".format(str(e)) ) finally: eh_client.stop()
async def test_max_receivers_async(connection_str, senders): client = EventHubClientAsync.from_connection_string(connection_str, debug=False) receivers = [] for i in range(6): receivers.append( client.add_async_receiver("$default", "0", prefetch=1000, offset=Offset('@latest'))) await client.run_async() try: outputs = await asyncio.gather(pump(receivers[0]), pump(receivers[1]), pump(receivers[2]), pump(receivers[3]), pump(receivers[4]), pump(receivers[5]), return_exceptions=True) assert len([o for o in outputs if isinstance(o, EventHubError)]) == 1 except: raise finally: await client.stop_async()
def test_long_running_receive_async(connection_str): parser = argparse.ArgumentParser() parser.add_argument("--duration", help="Duration in seconds of the test", type=int, default=30) parser.add_argument("--consumer", help="Consumer group name", default="$default") parser.add_argument("--partitions", help="Comma seperated partition IDs") parser.add_argument("--offset", help="Starting offset", default="-1") parser.add_argument("--conn-str", help="EventHub connection string", default=connection_str) parser.add_argument("--eventhub", help="Name of EventHub") parser.add_argument("--address", help="Address URI to the EventHub entity") parser.add_argument( "--sas-policy", help="Name of the shared access policy to authenticate with") parser.add_argument("--sas-key", help="Shared access key") loop = asyncio.get_event_loop() args, _ = parser.parse_known_args() if args.conn_str: client = EventHubClientAsync.from_connection_string( args.conn_str, eventhub=args.eventhub, auth_timeout=240, debug=False) elif args.address: client = EventHubClientAsync(args.address, auth_timeout=240, username=args.sas_policy, password=args.sas_key) else: try: import pytest pytest.skip("Must specify either '--conn-str' or '--address'") except ImportError: raise ValueError("Must specify either '--conn-str' or '--address'") try: if not args.partitions: partitions = loop.run_until_complete(get_partitions(client)) else: partitions = args.partitions.split(",") pumps = [] for pid in partitions: receiver = client.add_async_receiver(consumer_group=args.consumer, partition=pid, offset=Offset(args.offset), prefetch=50) pumps.append(pump(pid, receiver, args, args.duration)) loop.run_until_complete(client.run_async()) loop.run_until_complete(asyncio.gather(*pumps)) finally: loop.run_until_complete(client.stop_async())
def recieve(self): OFFSET = Offset(self.redis_cache.get_offset()) receiver = self.eventhubs_client.add_receiver(self.consumer_group, "0", prefetch=5000, offset=OFFSET) self.eventhubs_client.run() messages = receiver.receive(timeout=100) self.eventhubs_client.stop() return messages
def test_example_eventhub_sync_receiver_ops(live_eventhub_config, connection_str): import os # [START create_eventhub_client_receiver_instance] from azure.eventhub import EventHubClient, Offset client = EventHubClient.from_connection_string(connection_str) receiver = client.add_receiver(consumer_group="$default", partition="0", offset=Offset('@latest')) # [END create_eventhub_client_receiver_instance] # [START eventhub_client_receiver_open] client = EventHubClient.from_connection_string(connection_str) receiver = client.add_receiver(consumer_group="$default", partition="0", offset=Offset('@latest')) try: # Open the Receiver using the supplied conneciton. receiver.open() # Start receiving except: raise finally: # Close down the receive handler. receiver.close() # [END eventhub_client_receiver_open] # [START eventhub_client_receiver_close] client = EventHubClient.from_connection_string(connection_str) receiver = client.add_receiver(consumer_group="$default", partition="0", offset=Offset('@latest')) try: # Open the Receiver using the supplied conneciton. receiver.open() # Start receiving except: raise finally: # Close down the receive handler. receiver.close()
class _EventHubTest(PerfStressTest): eventhub_client = None async_eventhub_client = None consumer_group = '$Default' partition = '0' offset = Offset('-1') def __init__(self, arguments): super().__init__(arguments) connection_string = self.get_from_env( "AZURE_EVENTHUB_CONNECTION_STRING") eventhub_name = self.get_from_env("AZURE_EVENTHUB_NAME") if self.args.no_client_share: self.eventhub_client = EventHubClient.from_connection_string( connection_string, eventhub=eventhub_name) self.async_eventhub_client = EventHubClientAsync.from_connection_string( connection_string, eventhub=eventhub_name) else: if not _EventHubTest.eventhub_client: _EventHubTest.eventhub_client = EventHubClient.from_connection_string( connection_string, eventhub=eventhub_name) _EventHubTest.async_eventhub_client = EventHubClientAsync.from_connection_string( connection_string, eventhub=eventhub_name) self.eventhub_client = _EventHubTest.eventhub_client self.async_eventhub_client = _EventHubTest.async_eventhub_client async def close(self): self.eventhub_client.stop() await self.async_eventhub_client.stop_async() await super().close() @staticmethod def add_arguments(parser): super(_EventHubTest, _EventHubTest).add_arguments(parser) parser.add_argument( '--event-size', nargs='?', type=int, help='Size of a single event. Defaults to 100 bytes', default=100) parser.add_argument( '--no-client-share', action='store_true', help= 'Create one EventHubClient per test instance. Default is to share a single EventHubClient.', default=False) parser.add_argument( '--num-events', nargs='?', type=int, help='Number of events to send or receive. Defaults to 100', default=100)
def test_message_body_types(connstr_senders): connection_str, senders = connstr_senders client = EventHubClient.from_connection_string(connection_str, debug=False) receiver = client.add_receiver("$default", "0", offset=Offset('@latest')) try: client.run() received = receiver.receive(timeout=5) assert len(received) == 0 senders[0].send(EventData(b"Bytes Data")) time.sleep(1) received = receiver.receive(timeout=5) assert len(received) == 1 assert list(received[0].body) == [b'Bytes Data'] assert received[0].body_as_str() == "Bytes Data" with pytest.raises(TypeError): received[0].body_as_json() senders[0].send(EventData("Str Data")) time.sleep(1) received = receiver.receive(timeout=5) assert len(received) == 1 assert list(received[0].body) == [b'Str Data'] assert received[0].body_as_str() == "Str Data" with pytest.raises(TypeError): received[0].body_as_json() senders[0].send(EventData(b'{"test_value": "JSON bytes data", "key1": true, "key2": 42}')) time.sleep(1) received = receiver.receive(timeout=5) assert len(received) == 1 assert list(received[0].body) == [b'{"test_value": "JSON bytes data", "key1": true, "key2": 42}'] assert received[0].body_as_str() == '{"test_value": "JSON bytes data", "key1": true, "key2": 42}' assert received[0].body_as_json() == {"test_value": "JSON bytes data", "key1": True, "key2": 42} senders[0].send(EventData('{"test_value": "JSON str data", "key1": true, "key2": 42}')) time.sleep(1) received = receiver.receive(timeout=5) assert len(received) == 1 assert list(received[0].body) == [b'{"test_value": "JSON str data", "key1": true, "key2": 42}'] assert received[0].body_as_str() == '{"test_value": "JSON str data", "key1": true, "key2": 42}' assert received[0].body_as_json() == {"test_value": "JSON str data", "key1": True, "key2": 42} senders[0].send(EventData(42)) time.sleep(1) received = receiver.receive(timeout=5) assert len(received) == 1 assert received[0].body_as_str() == "42" assert received[0].body == 42 except: raise finally: client.stop()
def receivers(connection_str): client = EventHubClient.from_connection_string(connection_str, debug=True) eh_hub_info = client.get_eventhub_info() partitions = eh_hub_info["partition_ids"] recv_offset = Offset("@latest") receivers = [] for p in partitions: receivers.append( client.add_receiver("$default", p, prefetch=500, offset=Offset("@latest"))) client.run() for r in receivers: r.receive(timeout=1) yield receivers client.stop()
async def test_receive_batch_async(connection_str, senders): client = EventHubClientAsync.from_connection_string(connection_str, debug=False) receiver = client.add_async_receiver("$default", "0", prefetch=500, offset=Offset('@latest')) await client.run_async() try: received = await receiver.receive(timeout=5) assert len(received) == 0 for i in range(10): senders[0].send(EventData(b"Data")) received = await receiver.receive(max_batch_size=5, timeout=5) assert len(received) == 5 except: raise finally: await client.stop_async()
def test_receive_with_datetime(connection_str, senders): client = EventHubClient.from_connection_string(connection_str, debug=False) receiver = client.add_receiver("$default", "0", offset=Offset('@latest')) try: client.run() received = receiver.receive(timeout=5) assert len(received) == 0 senders[0].send(EventData(b"Data")) received = receiver.receive(timeout=5) assert len(received) == 1 offset = received[0].enqueued_time offset_receiver = client.add_receiver("$default", "0", offset=Offset(offset)) client.run() received = offset_receiver.receive(timeout=5) assert len(received) == 0 senders[0].send(EventData(b"Message after timestamp")) received = offset_receiver.receive(timeout=5) assert len(received) == 1 except: raise finally: client.stop()
def receive_one(): auth, db = get_firebase_auth_and_db() user = auth.sign_in_with_email_and_password(USER, PASSWORD) id_token = user['idToken'] last_received_event_offset = LastReceivedEventOffset(db, id_token) offset_value = "72500" offset = Offset(offset_value, inclusive=True) client = build_client() event_hub_retriever = EventHubRetriever(client, offset) event_hub_retriever.fetch() client.stop()
async def test_receive_end_of_stream_async(connection_str, senders): client = EventHubClientAsync.from_connection_string(connection_str, debug=False) receiver = client.add_async_receiver("$default", "0", offset=Offset('@latest')) await client.run_async() try: received = await receiver.receive(timeout=5) assert len(received) == 0 senders[0].send(EventData(b"Receiving only a single event")) received = await receiver.receive(timeout=5) assert len(received) == 1 assert list(received[-1].body)[0] == b"Receiving only a single event" except: raise finally: await client.stop_async()
def eventhubReceiveToFile(test_queue_url): # next, we dequeue these messages - 10 messages at a time # (SQS max limit) till the queue is exhausted. # in production/real setup, I suggest using long polling as # you get billed for each request, regardless of an empty response counter = 0 filepath = '' CONSUMER_GROUP = "$Default" OFFSET = Offset("0") PARTITION = "0" client = EventHubClient('amqps://xxxxx.servicebus.windows.net/txxxxxqueue', debug=True, username='******', password='******') receiver = client.add_receiver(CONSUMER_GROUP, PARTITION, prefetch=300, offset=OFFSET) try: client.run() while True: for event_data in receiver.receive(timeout=100): rcv_msg = str(event_data.message) #print((rcv_msg)) if len(rcv_msg)>=5: if counter!=0 and counter <= 50000: #print(message['Body']) file = open(filepath,'a') file.write(rcv_msg) file.write('\n') # next, we delete the message from the queue so no one else will process it again elif counter == 0: filepath = createfile() # print(filepath) file = open(filepath,'w') else: filepath = createfile() #print(filepath) counter = 1 file = open(filepath,'w') file.close() counter = counter + 1 except Exception as e: print("Failed Receiving Record {}".format(str(e)) ) finally: client.stop()
async def open_clients_async(self): """ Responsible for establishing connection to event hub client throws EventHubsException, IOException, InterruptedException, ExecutionException """ await self.partition_context.get_initial_offset_async() # Create event hub client and receive handler and set options self.eh_client = EventHubClientAsync( self.host.eh_config.client_address, debug=self.host.eph_options.debug_trace) self.partition_receive_handler = self.eh_client.add_async_receiver( self.partition_context.consumer_group_name, self.partition_context.partition_id, Offset(self.partition_context.offset), prefetch=self.host.eph_options.prefetch_count, loop=self.loop) self.partition_receiver = PartitionReceiver(self)
def test_long_running_receive(): parser = argparse.ArgumentParser() parser.add_argument("--duration", help="Duration in seconds of the test", type=int, default=30) parser.add_argument("--consumer", help="Consumer group name", default="$default") parser.add_argument("--partitions", help="Comma seperated partition IDs") parser.add_argument("--offset", help="Starting offset", default="-1") parser.add_argument("--conn-str", help="EventHub connection string", default=os.environ.get('EVENT_HUB_CONNECTION_STR')) parser.add_argument("--eventhub", help="Name of EventHub") parser.add_argument("--address", help="Address URI to the EventHub entity") parser.add_argument("--sas-policy", help="Name of the shared access policy to authenticate with") parser.add_argument("--sas-key", help="Shared access key") args, _ = parser.parse_known_args() if args.conn_str: client = EventHubClient.from_connection_string( args.conn_str, eventhub=args.eventhub, debug=False) elif args.address: client = EventHubClient( args.address, username=args.sas_policy, password=args.sas_key) else: try: import pytest pytest.skip("Must specify either '--conn-str' or '--address'") except ImportError: raise ValueError("Must specify either '--conn-str' or '--address'") try: if not args.partitions: partitions = get_partitions(client) else: partitions = args.partitions.split(",") pumps = {} for pid in partitions: pumps[pid] = client.add_receiver( consumer_group=args.consumer, partition=pid, offset=Offset(args.offset), prefetch=50) client.run() pump(pumps, args.duration) finally: client.stop()
async def test_receive_batch_with_app_prop_async(connstr_senders): pytest.skip("Waiting on uAMQP release") connection_str, senders = connstr_senders def batched(): for i in range(10): yield "Event Data {}".format(i) for i in range(10, 20): yield EventData("Event Data {}".format(i)) client = EventHubClientAsync.from_connection_string(connection_str, debug=False) receiver = client.add_async_receiver("$default", "0", prefetch=500, offset=Offset('@latest')) try: await client.run_async() received = await receiver.receive(timeout=5) assert len(received) == 0 app_prop_key = "raw_prop" app_prop_value = "raw_value" batch_app_prop = {app_prop_key: app_prop_value} batch_event = EventData(batch=batched()) batch_event.application_properties = batch_app_prop senders[0].send(batch_event) await asyncio.sleep(1) received = await receiver.receive(max_batch_size=15, timeout=5) assert len(received) == 15 for index, message in enumerate(received): assert list(message.body)[0] == "Event Data {}".format( index).encode('utf-8') assert (app_prop_key.encode('utf-8') in message.application_properties) \ and (dict(message.application_properties)[app_prop_key.encode('utf-8')] == app_prop_value.encode('utf-8')) except: raise finally: await client.stop_async()
def test_receive_end_of_stream(connstr_senders): connection_str, senders = connstr_senders client = EventHubClient.from_connection_string(connection_str, debug=False) receiver = client.add_receiver("$default", "0", offset=Offset('@latest')) try: client.run() received = receiver.receive(timeout=5) assert len(received) == 0 senders[0].send(EventData(b"Receiving only a single event")) received = receiver.receive(timeout=5) assert len(received) == 1 assert received[0].body_as_str() == "Receiving only a single event" assert list(received[-1].body)[0] == b"Receiving only a single event" except: raise finally: client.stop()
def receive_all_partitions(self): messages = [] for partition_id in self.partition_ids: OFFSET = Offset( self.redis_cache_partition_aware[partition_id].get_offset()) receiver = self.eventhubs_client.add_receiver(self.consumer_group, partition_id, prefetch=5000, offset=OFFSET) self.eventhubs_client.run() for message in receiver.receive(timeout=100): messages.append({ "message": message, "partition_id": partition_id }) self.eventhubs_client.stop() return messages
def receive_all_using_last_received_event_sequence(): auth, db = get_firebase_auth_and_db() user = auth.sign_in_with_email_and_password(USER, PASSWORD) id_token = user['idToken'] total = 0 has_data = True client = build_client() last_received_event_sequence = LastReceivedEventOffset(db, id_token) last_offset = last_received_event_sequence.get_offset() receiver = add_receiver(client, Offset(last_offset, inclusive=True)) start_time = time.time() while has_data: has_data = False for event_data in receiver.receive(timeout=10): has_data = True sequence_number = event_data.sequence_number offset = event_data.offset message = event_data.message last_received_event_sequence.write_offset(offset.value) logger.info( f"Offset:{offset.value}||Sequence:{sequence_number}==>Message:{message}" ) total += 1 end_time = time.time() run_time = end_time - start_time logger.info(f"Received {total} messages in {run_time} seconds") client.stop()