async def test_send_with_partition_key_async(connstr_receivers): connection_str, receivers = connstr_receivers client = EventHubClientAsync.from_connection_string(connection_str, debug=False) sender = client.add_async_sender() await client.run_async() data_val = 0 for partition in [b"a", b"b", b"c", b"d", b"e", b"f"]: partition_key = b"test_partition_" + partition for i in range(50): data = EventData(str(data_val)) data.partition_key = partition_key data_val += 1 await sender.send(data) await client.stop_async() found_partition_keys = {} for index, partition in enumerate(receivers): received = partition.receive(timeout=5) for message in received: try: existing = found_partition_keys[message.partition_key] assert existing == index except KeyError: found_partition_keys[message.partition_key] = index
async def test_send_partition_key_with_partition_async(connection_str): client = EventHubClientAsync.from_connection_string(connection_str, debug=True) sender = client.add_async_sender(partition="1") try: await client.run_async() data = EventData(b"Data") data.partition_key = b"PKey" with pytest.raises(ValueError): await sender.send(data) finally: await client.stop_async()
async def test_send_batch_with_app_prop_async(connstr_receivers): pytest.skip("Waiting on uAMQP release") connection_str, receivers = connstr_receivers def batched(): for i in range(10): yield "Event number {}".format(i) for i in range(10, 20): yield EventData("Event number {}".format(i)) client = EventHubClientAsync.from_connection_string(connection_str, debug=False) sender = client.add_async_sender() try: await client.run_async() app_prop_key = "raw_prop" app_prop_value = "raw_value" batch_app_prop = {app_prop_key:app_prop_value} batch_event = EventData(batch=batched()) batch_event.application_properties = batch_app_prop await sender.send(batch_event) except: raise finally: await client.stop_async() time.sleep(1) received = [] for r in receivers: received.extend(r.receive(timeout=3)) assert len(received) == 20 for index, message in enumerate(received): assert list(message.body)[0] == "Event number {}".format(index).encode('utf-8') assert (app_prop_key.encode('utf-8') in message.application_properties) \ and (dict(message.application_properties)[app_prop_key.encode('utf-8')] == app_prop_value.encode('utf-8'))
# ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. # ------------------------------------ import os from datetime import datetime from azure.eventhub import EventHubConsumerClient, EventHubProducerClient, EventData RECEIVE_TIMEOUT = 30 CONSUMER_GROUP = "$Default" STARTING_POSITION = "-1" TEST_EVENTS = [ EventData(b"Test Event 1 in Python"), EventData(b"Test Event 2 in Python"), EventData(b"Test Event 3 in Python"), ] class EventHub: def __init__(self): # This test requires a previusly created Event Hub. # In this example the name is "myeventhub", but it could be change below connection_string = os.environ["EVENT_HUBS_CONNECTION_STRING"] event_hub_name = "myeventhub" self.consumer_client = EventHubConsumerClient.from_connection_string( connection_string, CONSUMER_GROUP, idle_timeout=RECEIVE_TIMEOUT) self.producer_client = EventHubProducerClient.from_connection_string( connection_string) self.received_event_count = 0
def send_event(self, payload): data = json.dumps(payload) logger.info("Sending message: {}".format(data)) self.sender.send(EventData(data))
def receive(self): time.sleep(0.5) self._on_event_received(EventData("test data"))
try: if not ADDRESS: raise ValueError("No EventHubs URL supplied.") # Create Event Hubs client client = EventHubClient(ADDRESS, debug=False, username=USER, password=KEY) sender = client.add_sender(partition="0") client.run() try: start_time = time.time() for i in range(20): json_string = "{'passenger_count': '%s', 'trip_time_in_secs': '%s', 'trip_distance': '%s', 'total_amount': '%s', 'created_datetime': '%s'}" % ( get_passenger(), get_trip_time(), get_distance(), get_total_amount(), str(datetime.datetime.now())) print("Sending message: {}".format(json_string)) sender.send(EventData(json_string)) time.sleep(1) except: raise finally: end_time = time.time() client.stop() run_time = end_time - start_time logger.info("Runtime: {} seconds".format(run_time)) except KeyboardInterrupt: pass
def test_example_eventhub_sync_send_and_receive(live_eventhub_config): # [START create_eventhub_client_connstr] import os from azure.eventhub import EventHubClient connection_str = "Endpoint=sb://{}/;SharedAccessKeyName={};SharedAccessKey={};EntityPath={}".format( os.environ['EVENT_HUB_HOSTNAME'], os.environ['EVENT_HUB_SAS_POLICY'], os.environ['EVENT_HUB_SAS_KEY'], os.environ['EVENT_HUB_NAME']) client = EventHubClient.from_connection_string(connection_str) # [END create_eventhub_client_connstr] from azure.eventhub import EventData, Offset # [START create_eventhub_client_sender] client = EventHubClient.from_connection_string(connection_str) # Add a sender to the client object. sender = client.add_sender(partition="0") # [END create_eventhub_client_sender] # [START create_eventhub_client_receiver] client = EventHubClient.from_connection_string(connection_str) # Add a receiver to the client object. receiver = client.add_receiver(consumer_group="$default", partition="0", offset=Offset('@latest')) # [END create_eventhub_client_receiver] # [START create_eventhub_client_epoch_receiver] client = EventHubClient.from_connection_string(connection_str) # Add a receiver to the client object with an epoch value. epoch_receiver = client.add_epoch_receiver(consumer_group="$default", partition="0", epoch=42) # [END create_eventhub_client_epoch_receiver] # [START eventhub_client_run] client = EventHubClient.from_connection_string(connection_str) # Add Senders/Receivers try: client.run() # Start sending and receiving except: raise finally: client.stop() # [END eventhub_client_run] client = EventHubClient.from_connection_string(connection_str) sender = client.add_sender(partition="0") receiver = client.add_receiver(consumer_group="$default", partition="0", offset=Offset('@latest')) try: # Opens the connection and starts running all Sender/Receiver clients. client.run() # Start sending and receiving # [START create_event_data] event_data = EventData("String data") event_data = EventData(b"Bytes data") event_data = EventData([b"A", b"B", b"C"]) def batched(): for i in range(10): yield "Batch data, Event number {}".format(i) event_data = EventData(batch=batched()) # [END create_event_data] # [START eventhub_client_sync_send] event_data = EventData(b"A single event") sender.send(event_data) # [END eventhub_client_sync_send] time.sleep(1) # [START eventhub_client_sync_receive] logger = logging.getLogger("azure.eventhub") received = receiver.receive(timeout=5, max_batch_size=1) for event_data in received: logger.info("Message received:{}".format(event_data.body_as_str())) # [END eventhub_client_sync_receive] assert len(received) == 1 assert received[0].body_as_str() == "A single event" assert list(received[-1].body)[0] == b"A single event" except: raise finally: client.stop() # [START eventhub_client_stop] client = EventHubClient.from_connection_string(connection_str) # Add Senders/Receivers try: client.run() # Start sending and receiving except: raise finally: client.stop()
def test_receive_over_websocket_sync(connstr_senders): app_prop = {"raw_prop": "raw_value"} content_type = "text/plain" message_id_base = "mess_id_sample_" def on_event(partition_context, event): on_event.received.append(event) on_event.app_prop = event.properties on_event.received = [] on_event.app_prop = None connection_str, senders = connstr_senders client = EventHubConsumerClient.from_connection_string( connection_str, consumer_group='$default', transport_type=TransportType.AmqpOverWebsocket) event_list = [] for i in range(5): ed = EventData("Event Number {}".format(i)) ed.properties = app_prop ed.content_type = content_type ed.correlation_id = message_id_base ed.message_id = message_id_base + str(i) event_list.append(ed) senders[0].send(event_list) single_ed = EventData("Event Number {}".format(6)) single_ed.properties = app_prop single_ed.content_type = content_type single_ed.correlation_id = message_id_base single_ed.message_id = message_id_base + str(6) senders[0].send(single_ed) with client: thread = threading.Thread(target=client.receive, args=(on_event, ), kwargs={ "partition_id": "0", "starting_position": "-1" }) thread.start() time.sleep(10) assert len(on_event.received) == 6 for ed in on_event.received: assert ed.correlation_id == message_id_base assert message_id_base in ed.message_id assert ed.content_type == "text/plain" assert ed.properties[b"raw_prop"] == b"raw_value"
from azure.eventhub import EventHubProducerClient, EventData #Initialize Kafka Consumer for Topic consumer = KafkaConsumer('MockStreamTopic') ##EVENT HUB DETAILS connection_str = 'Endpoint=sb://streamuckafka.servicebus.windows.net/;SharedAccessKeyName=StreamUCPolicy;SharedAccessKey=C1ds2c1s+nsO0OWk+IlGRfDn3IY2pnWVTrXJ1EF2kMs=' eventhub_name = 'streamuckafkaeventhub' producer = EventHubProducerClient.from_connection_string(connection_str, eventhub_name=eventhub_name) event_data_batch = producer.create_batch() i=0 for message in consumer: try: event_data_batch.add(EventData(str(message))) print(i, "message added") i+=1 except ValueError: print("New batch") producer.send_batch(event_data_batch) event_data_batch = producer.create_batch() print("Sending the batch") event_data_batch.add(EventData(str(message))) print("Sending the batch") producer.send_batch(event_data_batch) print("Closing") producer.close()
def test_body_wrong_json(): event_data = EventData('aaa') with pytest.raises(TypeError): event_data.body_as_json()
def test_body_json(): event_data = EventData('{"a":"b"}') assert str(event_data) == "{ body: '{\"a\":\"b\"}', properties: {} }" assert repr(event_data) == "EventData(body='{\"a\":\"b\"}', properties={}, offset=None, sequence_number=None, partition_key=None, enqueued_time=None)" jo = event_data.body_as_json() assert jo["a"] == "b"
def main(req: func.HttpRequest) -> func.HttpResponse: logger = logging.getLogger(__name__) formatter = logging.Formatter( '%(asctime)s %(name)s %(levelname)s: %(message)s') func_context = os.environ['FUNCTION_CONTEXT'] logger.debug(f"Function context --> {func_context}") credentials = None subscription_id = None kv_credentials = None kv_subscription_id = None if func_context == 'local': filehandler = logging.FileHandler('func.log') filehandler.setFormatter(formatter) logger.addHandler(filehandler) logger.setLevel(logging.DEBUG) credentials, subscription_id = get_local_credentials() else: console = logging.StreamHandler() console.setLevel(logging.INFO) console.setFormatter(formatter) credentials, subscription_id = get_azure_credentials() logger.debug('Python HTTP trigger function processed a request.') logger.debug(f"method={req.method}, url={req.url}, params={req.params}") logger.debug(f"body={req.get_json()}") # Handle WebHook. webhook = req.get_json() # Get resource information specifically tags if this is an alert resource_id = "" if check_keys(webhook, 'data', 'context', 'resourceId'): resource_id = webhook['data']['context']['resourceId'] elif check_keys('data', 'context', 'activityLog', 'resourceId'): resource_id = webhook['data']['context']['activityLog']['resourceId'] elif check_keys('data', 'context', 'scope'): resource_id = webhook['data']['context']['scope'] elif check_keys('data', 'context', 'activityLog', 'authorization', 'scope'): resource_id = webhook['data']['context']['activityLog']['authorization']['scope'] if resource_id: resource_client = ResourceManagementClient(credentials, subscription_id) try: resource = resource_client.resources.get_by_id(resource_id, api_version='2018-06-01') if resource.tags: webhook['tags'] = resource.tags logger.info(f"adding tags {resource.tags}") else: logger.info(f"no tags found in resource {resource_id}") except: logger.error(f"received exception from ResourceManagementClient for {resource_id}") else: logger.info("no resource_id found in webhook") # Key Vault stuff kv_mgmt_client = KeyVaultManagementClient(credentials, subscription_id) kv_client = KeyVaultClient(credentials) namespace = get_kv_secret(kv_client, 'EventHubNamespace') event_hub = get_kv_secret(kv_client, 'EventHub') user = get_kv_secret(kv_client, 'EventHubKeyName') key = get_kv_secret(kv_client, 'EventHubKey') amqp_uri = f"https://{namespace}.servicebus.windows.net/{event_hub}" eh_client = EventHubClient( amqp_uri, debug=False, username=user, password=key) eh_sender = eh_client.add_sender(partition="0") eh_client.run() eh_sender.send(EventData(json.dumps(webhook))) logger.info(f"sending event to {amqp_uri}, {json.dumps(webhook)}") date = datetime.datetime.now() return func.HttpResponse( json.dumps({ 'status': 'SUCCESS' }) )
async def example_eventhub_async_send_and_receive(): producer = example_create_async_eventhub_producer_client() consumer = example_create_async_eventhub_consumer_client() try: # [START eventhub_producer_client_create_batch_async] from azure.eventhub import EventData event_data_batch = await producer.create_batch() while True: try: event_data_batch.add( EventData('Message inside EventBatchData')) except ValueError: # The EventDataBatch object reaches its max_size. # You can send the full EventDataBatch object and create a new one here. break # [END eventhub_producer_client_create_batch_async] # [START eventhub_producer_client_send_async] async with producer: event_data_batch = await producer.create_batch() while True: try: event_data_batch.add( EventData('Message inside EventBatchData')) except ValueError: # The EventDataBatch object reaches its max_size. # You can send the full EventDataBatch object and create a new one here. break await producer.send_batch(event_data_batch) # [END eventhub_producer_client_send_async] await asyncio.sleep(1) # [START eventhub_consumer_client_receive_async] logger = logging.getLogger("azure.eventhub") async def on_event(partition_context, event): # Put your code here. # If the operation is i/o intensive, async will have better performance. logger.info("Received event from partition: {}".format( partition_context.partition_id)) async with consumer: await consumer.receive( on_event=on_event, starting_position= "-1", # "-1" is from the beginning of the partition. ) # [END eventhub_consumer_client_receive_async] consumer = example_create_async_eventhub_consumer_client() # [START eventhub_consumer_client_receive_batch_async] logger = logging.getLogger("azure.eventhub") async def on_event_batch(partition_context, event_batch): # Put your code here. # If the operation is i/o intensive, async will have better performance. logger.info("{} events received from partition: {}".format( len(event_batch), partition_context.partition_id)) async with consumer: await consumer.receive_batch( on_event_batch=on_event_batch, starting_position= "-1", # "-1" is from the beginning of the partition. ) # [END eventhub_consumer_client_receive_batch_async] finally: pass
from azure.eventhub import EventHubProducerClient, EventData EVENT_HUB_CONNECTION_STR = os.environ['EVENT_HUB_CONN_STR'] EVENTHUB_NAME = os.environ['EVENT_HUB_NAME'] producer = EventHubProducerClient.from_connection_string(conn_str=EVENT_HUB_CONNECTION_STR, eventhub_name=EVENTHUB_NAME) start_time = time.time() with producer: # Without specifying partition_id or partition_key # The events will be distributed to available partitions via round-robin. event_data_batch = producer.create_batch(max_size_in_bytes=10000) # Specifying partition_id # event_data_batch = producer.create_batch(partition_id='0') # Specifying partition_key # event_data_batch = producer.create_batch(partition_key='pkey') while True: try: event_data_batch.add(EventData('Message inside EventBatchData')) except ValueError: # EventDataBatch object reaches max_size. # New EventDataBatch object can be created here to send more data break producer.send_batch(event_data_batch) print("Send messages in {} seconds".format(time.time() - start_time))
def test_message_body_types(connection_str, senders): client = EventHubClient.from_connection_string(connection_str, debug=False) receiver = client.add_receiver("$default", "0", offset=Offset('@latest')) try: client.run() received = receiver.receive(timeout=5) assert len(received) == 0 senders[0].send(EventData(b"Bytes Data")) time.sleep(1) received = receiver.receive(timeout=5) assert len(received) == 1 assert list(received[0].body) == [b'Bytes Data'] assert received[0].body_as_str() == "Bytes Data" with pytest.raises(TypeError): received[0].body_as_json() senders[0].send(EventData("Str Data")) time.sleep(1) received = receiver.receive(timeout=5) assert len(received) == 1 assert list(received[0].body) == [b'Str Data'] assert received[0].body_as_str() == "Str Data" with pytest.raises(TypeError): received[0].body_as_json() senders[0].send( EventData( b'{"test_value": "JSON bytes data", "key1": true, "key2": 42}') ) time.sleep(1) received = receiver.receive(timeout=5) assert len(received) == 1 assert list(received[0].body) == [ b'{"test_value": "JSON bytes data", "key1": true, "key2": 42}' ] assert received[0].body_as_str( ) == '{"test_value": "JSON bytes data", "key1": true, "key2": 42}' assert received[0].body_as_json() == { "test_value": "JSON bytes data", "key1": True, "key2": 42 } senders[0].send( EventData( '{"test_value": "JSON str data", "key1": true, "key2": 42}')) time.sleep(1) received = receiver.receive(timeout=5) assert len(received) == 1 assert list(received[0].body) == [ b'{"test_value": "JSON str data", "key1": true, "key2": 42}' ] assert received[0].body_as_str( ) == '{"test_value": "JSON str data", "key1": true, "key2": 42}' assert received[0].body_as_json() == { "test_value": "JSON str data", "key1": True, "key2": 42 } senders[0].send(EventData(42)) time.sleep(1) received = receiver.receive(timeout=5) assert len(received) == 1 assert received[0].body_as_str() == "42" assert received[0].body == 42 except: raise finally: client.stop()
client = EventHubClient(ADDRESS, debug=True, username=USER, password=KEY) sender = client.add_sender(partition="0") client.run() try: start_time = time.time() s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.bind((HOST, PORT)) s.listen(1) conn, addr = s.accept() print('Connected by', addr) count while True: data = conn.recv(1024) if not data: break sender.send(EventData(data)) count = count + 1 if (count % 30 == 0): print('Uploaded %d messages' % (count)) s.close() except: raise finally: end_time = time.time() client.stop() run_time = end_time - start_time logger.info("Runtime: {} seconds".format(run_time)) except KeyboardInterrupt: pass
#!/usr/bin/env python # -------------------------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # -------------------------------------------------------------------------------------------- """ An example to show authentication using aad credentials """ import os from azure.eventhub import EventData, EventHubProducerClient from azure.identity import EnvironmentCredential HOSTNAME = os.environ['EVENT_HUB_HOSTNAME'] EVENT_HUB = os.environ['EVENT_HUB_NAME'] credential = EnvironmentCredential() producer = EventHubProducerClient(host=HOSTNAME, event_hub_path=EVENT_HUB, credential=credential) with producer: event = EventData(body='A single message') producer.send(event, partition_id='0')
def run_sync(self): event = EventData(body=self.data) self.sender.send(event)
async def main(): async with client, producer_client: event_data_batch = await producer_client.create_batch() event_data_batch.add(EventData('Single Message')) await producer_client.send_batch(event_data_batch) time_start = time.perf_counter() client_live = await client.live_events.begin_create( resource_group_name=resource_group, account_name=account_name, live_event_name=live_event_name, parameters=live_event_create, auto_start=False) time_end = time.perf_counter() execution_time = (time_end - time_start) if client_live: print( f"Live Event Created - long running operation complete! Name: {live_event_name}" ) print( f"Execution time to create LiveEvent: {execution_time:.2f}seconds" ) print() poller = client_live print(await poller.result()) else: raise ValueError('Live Event creation failed!') # Create an Asset for the LiveOutput to use. Think of this as the "tape" that will be recorded to. # The asset entity points to a folder/container in your Azure Storage account. print(f"Creating an asset named: {asset_name}") print() out_alternate_id = f'outputALTid-{uniqueness}' out_description = f'outputdescription-{uniqueness}' # Create an output asset object out_asset = Asset(alternate_id=out_alternate_id, description=out_description) # Create an output asset output_asset = await client.assets.create_or_update( resource_group, account_name, asset_name, out_asset) if output_asset: # print output asset name print(f"The output asset name is: {output_asset.name}") print() else: raise ValueError('Output Asset creation failed!') # Create the Live Output - think of this as the "tape recorder for the live event". # Live outputs are optional, but are required if you want to archive the event to storage, # use the asset for on-demand playback later, or if you want to enable cloud DVR time-shifting. # We will use the asset created above for the "tape" to record to. manifest_name = "output" # See the REST API for details on each of the settings on Live Output # https://docs.microsoft.com/rest/api/media/liveoutputs/create print(f"Creating a live output named: {live_output_name}") print() if output_asset: time_start = time.perf_counter() live_output_create = LiveOutput( description= "Optional description when using more than one live output", asset_name=output_asset.name, manifest_name= manifest_name, # The HLS and DASH manifest file name. This is recommended to set if you want a deterministic manifest path up front. archive_window_length=timedelta( hours=1 ), # Sets an one hour time-shift DVR window. Uses ISO 8601 format string. hls=Hls(fragments_per_ts_segment= 1 # Advanced setting when using HLS TS output only. )) print(f"live_output_create object is {live_output_create}") print() # Create and await the live output live_output_await = await client.live_outputs.begin_create( resource_group_name=resource_group, account_name=account_name, live_event_name=live_event_name, live_output_name=live_output_name, parameters=live_output_create) if live_output_await: print(f"Live Output created: {live_output_name}") poller = live_output_await print(await poller.result()) time_end = time.perf_counter() execution_time = time_end - time_start print( f"Execution time to create LiveEvent: {execution_time:.2f}seconds" ) print() else: raise Exception("Live Output creation failed!") # Refresh the LiveEvent object's settings after starting it... live_event = await client.live_events.get(resource_group, account_name, live_event_name) # Get the RTMP ingest URL to configure in OBS Studio # The endpoints is a collection of RTMP primary and secondary, and RTMPS primary and secondary URLs. # to get the primary secure RTMPS, it is usually going to be index 3, but you could add a loop here to confirm... if live_event.input.endpoints: ingest_url = live_event.input.endpoints[0].url print("The RTMP ingest URL to enter into OBS Studio is:") print(f"RTMP ingest: {ingest_url}") print( "Make sure to enter a Stream Key into the OBS studio settings. It can be any value or you can repeat the accessToken used in the ingest URL path." ) print() if live_event.preview.endpoints: # Use the preview_endpoint to preview and verify that the input from the encoder is actually being received. # The preview endpoint URL also support the addition of various format strings for HLS (format=m3u8-cmaf) and DASH (format=mpd-time-cmaf) for example. # The default manifest is Smooth. preview_endpoint = live_event.preview.endpoints[0].url print(f"The preview url is: {preview_endpoint}") print() print( "Open the live preview in your browser and use any DASH and HLS player to monitor the preview playback." ) print( f"https://ampdemo.azureedge.net/?url={preview_endpoint}(format=mpd-time-cmaf)&heuristicprofile=lowlatency" ) print( "You will need to refresh the player page SEVERAL times until enough data has arrived to allow for manifest creation." ) print( "In a production player, the player can inspect the manifest to see if it contains enough content for the player to load and auto reload." ) print() print( "Start the live stream now, sending the input to the ingest url and verify that it is arriving with the preview url." ) print( "IMPORTANT TIP!: Make CERTAIN that the video is flowing to the Preview URL before continuing!" ) # Create the Streaming Locator URL for playback of the contents in the Live Output recoding print(f"Creating a streaming locator named: {streaming_locator_name}") print() streaming_locator = StreamingLocator( asset_name=asset_name, streaming_policy_name="Predefined_ClearStreamingOnly") locator = await client.streaming_locators.create( resource_group_name=resource_group, account_name=account_name, streaming_locator_name=streaming_locator_name, parameters=streaming_locator) # Get the default streaming endpoint on the account streaming_endpoint = await client.streaming_endpoints.get( resource_group_name=resource_group, account_name=account_name, streaming_endpoint_name=streaming_endpoint_name) if streaming_endpoint.resource_state != "Running": print( f"Streaming endpoint is stopped. Starting the endpoint named {streaming_endpoint_name}..." ) poller = await client.streaming_endpoints.begin_start( resource_group, account_name, streaming_endpoint_name) client_streaming_begin = await poller.result() print("Streaming Endpoint started.") if not client_streaming_begin: print("Streaming Endpoint was already started.") # Get the URL to stream the Output print( "The streaming URLs to stream the live output from a client player" ) print() host_name = streaming_endpoint.host_name scheme = 'https' # If you wish to get the streaming manifest ahead of time, make sure to set the manifest name in the LiveOutput as done above. # This allows you to have a deterministic manifest path. <streaming endpoint hostname>/<streaming locator ID>/manifestName.ism/manifest(<format string>) # Building the paths statically. Which is highly recommended when you want to share the stream manifests # to a player application or CMS system ahead of the live event. hls_format = "format=m3u8-cmaf" dash_format = "format=mpd-time-cmaf" manifest_base = f"{scheme}://{host_name}/{locator.streaming_locator_id}/{manifest_name}.ism/manifest" hls_manifest = f'{manifest_base}({hls_format})' print(f"The HLS (MP4) manifest URL is: {hls_manifest}") print( "Open the following URL to playback the live stream in an HLS compliant player (HLS.js, Shaka, ExoPlayer) or directly in an iOS device" ) print({hls_manifest}) print() dash_manifest = f'{manifest_base}({dash_format})' print(f"The DASH manifest URL is: {dash_manifest}") print( "Open the following URL to playback the live stream from the LiveOutput in the Azure Media Player" ) print( f"https://ampdemo.azureedge.net/?url={dash_manifest}&heuristicprofile=lowlatency" ) print() # closing media client print('Closing media client') await client.close() # closing eventhub producer client print('Closing eventhub producer client') await producer_client.close() # closing credential client print('Closing credential client') await default_credential.close()
async def run_async(self): event = EventData(body=self.data) await self.async_sender.send(event)
#!/usr/bin/env python # -------------------------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # -------------------------------------------------------------------------------------------- """ An example to show receiving events from an IoT Hub partition. """ import os import logging from azure.eventhub import EventData, EventHubClient logger = logging.getLogger('azure.eventhub') iot_device_id = os.environ['IOTHUB_DEVICE'] iot_connection_str = os.environ['IOTHUB_CONNECTION_STR'] client = EventHubClient.from_connection_string(iot_connection_str, network_tracing=False) try: producer = client.create_producer(operation='/messages/devicebound') with producer: producer.send(EventData(b"A single event", to_device=iot_device_id)) except KeyboardInterrupt: pass
@pytest.mark.liveTest @pytest.mark.asyncio async def test_send_list_partition_async(connstr_receivers): connection_str, receivers = connstr_receivers client = EventHubProducerClient.from_connection_string(connection_str) payload = "A1" async with client: await client.send_batch([EventData(payload)], partition_id="0") message = receivers[0].receive_message_batch(timeout=10000)[0] received = EventData._from_message(message) assert received.body_as_str() == payload @pytest.mark.parametrize("to_send, exception_type", [([EventData("A" * 1024)] * 1100, ValueError), ("any str", AttributeError)]) @pytest.mark.liveTest @pytest.mark.asyncio async def test_send_list_wrong_data_async(connection_str, to_send, exception_type): client = EventHubProducerClient.from_connection_string(connection_str) async with client: with pytest.raises(exception_type): await client.send_batch(to_send) @pytest.mark.parametrize("partition_id, partition_key", [("0", None), (None, "pk")]) @pytest.mark.liveTest @pytest.mark.asyncio
async def receive(self): await asyncio.sleep(0.1) await self._on_event_received(EventData("mock events"))
async def test_send_amqp_annotated_message(connstr_receivers): connection_str, receivers = connstr_receivers client = EventHubProducerClient.from_connection_string(connection_str) async with client: sequence_body = [b'message', 123.456, True] footer = {'footer_key': 'footer_value'} prop = {"subject": "sequence"} seq_app_prop = {"body_type": "sequence"} sequence_message = AmqpAnnotatedMessage( sequence_body=sequence_body, footer=footer, properties=prop, application_properties=seq_app_prop) value_body = {b"key": [-123, b'data', False]} header = {"priority": 10} anno = {"ann_key": "ann_value"} value_app_prop = {"body_type": "value"} value_message = AmqpAnnotatedMessage( value_body=value_body, header=header, annotations=anno, application_properties=value_app_prop) data_body = [b'aa', b'bb', b'cc'] data_app_prop = {"body_type": "data"} del_anno = {"delann_key": "delann_value"} data_message = AmqpAnnotatedMessage( data_body=data_body, header=header, delivery_annotations=del_anno, application_properties=data_app_prop) body_ed = """{"json_key": "json_val"}""" prop_ed = {"raw_prop": "raw_value"} cont_type_ed = "text/plain" corr_id_ed = "corr_id" mess_id_ed = "mess_id" event_data = EventData(body_ed) event_data.content_type = cont_type_ed event_data.correlation_id = corr_id_ed event_data.message_id = mess_id_ed batch = await client.create_batch() batch.add(data_message) batch.add(value_message) batch.add(sequence_message) batch.add(event_data) await client.send_batch(batch) await client.send_batch( [data_message, value_message, sequence_message, event_data]) received_count = {} received_count["data_msg"] = 0 received_count["seq_msg"] = 0 received_count["value_msg"] = 0 received_count["normal_msg"] = 0 def check_values(event): raw_amqp_message = event.raw_amqp_message if raw_amqp_message.body_type == AmqpMessageBodyType.DATA: if raw_amqp_message.application_properties and raw_amqp_message.application_properties.get( b'body_type') == b'data': body = [data for data in raw_amqp_message.body] assert data_body == body assert event.body_as_str() == "aabbcc" assert raw_amqp_message.delivery_annotations[ b'delann_key'] == b'delann_value' assert raw_amqp_message.application_properties[ b'body_type'] == b'data' received_count["data_msg"] += 1 else: assert event.body_as_json() == {'json_key': 'json_val'} assert event.correlation_id == corr_id_ed assert event.message_id == mess_id_ed assert event.content_type == cont_type_ed assert event.body_type == AmqpMessageBodyType.DATA received_count["normal_msg"] += 1 elif raw_amqp_message.body_type == AmqpMessageBodyType.SEQUENCE: body = [sequence for sequence in raw_amqp_message.body] assert [sequence_body] == body assert event.body_as_str() == "['message', 123.456, True]" assert raw_amqp_message.footer[b'footer_key'] == b'footer_value' assert raw_amqp_message.properties.subject == b'sequence' assert raw_amqp_message.application_properties[ b'body_type'] == b'sequence' received_count["seq_msg"] += 1 elif raw_amqp_message.body_type == AmqpMessageBodyType.VALUE: assert raw_amqp_message.body == value_body assert event.body_as_str() == "{'key': [-123, 'data', False]}" assert raw_amqp_message.annotations[b'ann_key'] == b'ann_value' assert raw_amqp_message.application_properties[ b'body_type'] == b'value' received_count["value_msg"] += 1 async def on_event(partition_context, event): on_event.received.append(event) on_event.received = [] client = EventHubConsumerClient.from_connection_string( connection_str, consumer_group='$default') async with client: task = asyncio.ensure_future( client.receive(on_event, starting_position="-1")) await asyncio.sleep(15) for event in on_event.received: check_values(event) await task assert len(on_event.received) == 8 assert received_count["data_msg"] == 2 assert received_count["seq_msg"] == 2 assert received_count["value_msg"] == 2 assert received_count["normal_msg"] == 2
def receive(self): time.sleep(0.1) self._on_event_received(EventData(""))
def run_sync(self): event = EventData(batch=self.data_generator()) self.sender.send(event)
# "amqps://<URL-encoded-SAS-policy>:<URL-encoded-SAS-key>@<mynamespace>.servicebus.windows.net/myeventhub" # "amqps://<mynamespace>.servicebus.windows.net/myeventhub" ADDRESS = os.environ.get('EVENT_HUB_ADDRESS') # SAS policy and key are not required if they are encoded in the URL USER = os.environ.get('EVENT_HUB_SAS_POLICY') KEY = os.environ.get('EVENT_HUB_SAS_KEY') try: if not ADDRESS: raise ValueError("No EventHubs URL supplied.") client = EventHubClient(ADDRESS, debug=False, username=USER, password=KEY) sender = client.add_sender(partition="1") client.run() try: start_time = time.time() for i in range(100): logger.info("Sending message: {}".format(i)) sender.send(EventData(str(i))) except: raise finally: end_time = time.time() client.stop() run_time = end_time - start_time logger.info("Runtime: {} seconds".format(run_time)) except KeyboardInterrupt: pass
async def run_async(self): event = EventData(batch=self.data_generator()) await self.async_sender.send(event)
try: start_time = time.time() devices = [] for x in range(0, 10): devices.append(str(uuid.uuid4())) for y in range(0, 100000): for dev in devices: reading = { 'source': 'python-code-caio-sensor' + str(random.randint(1, 3)), 'id': dev, 'timestamp': str(datetime.datetime.utcnow()), 'uv': random.random(), 'temperature': random.randint(70, 100), 'humidity': random.randint(70, 100), 'motion': random.randint(0, 1) } message = json.dumps(reading) #message = "Message {}".format(i) print("Sending Message" + message) sender.send(EventData(message)) except: raise except KeyboardInterrupt: pass
USER = os.environ.get('EVENT_HUB_SAS_POLICY') KEY = os.environ.get('EVENT_HUB_SAS_KEY') try: if not HOSTNAME: raise ValueError("No EventHubs URL supplied.") client = EventHubClient(host=HOSTNAME, event_hub_path=EVENT_HUB, credential=EventHubSharedKeyCredential(USER, KEY), network_tracing=False) producer = client.create_producer(partition_id="0") try: start_time = time.time() with producer: # not performance optimal, but works. Please do send events in batch to get much better performance. for i in range(100): ed = EventData("msg") logger.info("Sending message: {}".format(i)) producer.send(ed) except: raise finally: end_time = time.time() run_time = end_time - start_time logger.info("Runtime: {} seconds".format(run_time)) except KeyboardInterrupt: pass
async def send(snd, count): for i in range(count): logger.info("Sending message: {}".format(i)) data = EventData(str(i)) data.partition_key = b'SamplePartitionKey' await snd.send(data)
def test_message_body_types(connstr_senders): connection_str, senders = connstr_senders client = EventHubClient.from_connection_string(connection_str) receiver = client._create_consumer(consumer_group="$default", partition_id="0", event_position=EventPosition('@latest')) try: received = receiver.receive(timeout=5) assert len(received) == 0 senders[0].send(EventData(b"Bytes Data")) time.sleep(1) received = receiver.receive(timeout=5) assert len(received) == 1 assert list(received[0].body) == [b'Bytes Data'] assert received[0].body_as_str() == "Bytes Data" with pytest.raises(TypeError): received[0].body_as_json() senders[0].send(EventData("Str Data")) time.sleep(1) received = receiver.receive(timeout=5) assert len(received) == 1 assert list(received[0].body) == [b'Str Data'] assert received[0].body_as_str() == "Str Data" with pytest.raises(TypeError): received[0].body_as_json() senders[0].send( EventData( b'{"test_value": "JSON bytes data", "key1": true, "key2": 42}') ) time.sleep(1) received = receiver.receive(timeout=5) assert len(received) == 1 assert list(received[0].body) == [ b'{"test_value": "JSON bytes data", "key1": true, "key2": 42}' ] assert received[0].body_as_str( ) == '{"test_value": "JSON bytes data", "key1": true, "key2": 42}' assert received[0].body_as_json() == { "test_value": "JSON bytes data", "key1": True, "key2": 42 } senders[0].send( EventData( '{"test_value": "JSON str data", "key1": true, "key2": 42}')) time.sleep(1) received = receiver.receive(timeout=5) assert len(received) == 1 assert list(received[0].body) == [ b'{"test_value": "JSON str data", "key1": true, "key2": 42}' ] assert received[0].body_as_str( ) == '{"test_value": "JSON str data", "key1": true, "key2": 42}' assert received[0].body_as_json() == { "test_value": "JSON str data", "key1": True, "key2": 42 } senders[0].send(EventData(42)) time.sleep(1) received = receiver.receive(timeout=5) assert len(received) == 1 assert received[0].body_as_str() == "42" assert received[0].body == 42 except: raise finally: receiver.close() client.close()
async def test_example_eventhub_async_send_and_receive(live_eventhub_config): # [START create_eventhub_client_async] from azure.eventhub import EventHubClientAsync import os connection_str = "Endpoint=sb://{}/;SharedAccessKeyName={};SharedAccessKey={};EntityPath={}".format( os.environ['EVENT_HUB_HOSTNAME'], os.environ['EVENT_HUB_SAS_POLICY'], os.environ['EVENT_HUB_SAS_KEY'], os.environ['EVENT_HUB_NAME']) client = EventHubClientAsync.from_connection_string(connection_str) # [END create_eventhub_client_async] from azure.eventhub import EventData, Offset # [START create_eventhub_client_async_sender] client = EventHubClientAsync.from_connection_string(connection_str) # Add a async sender to the async client object. sender = client.add_async_sender(partition="0") # [END create_eventhub_client_async_sender] # [START create_eventhub_client_async_receiver] client = EventHubClientAsync.from_connection_string(connection_str) # Add a async receiver to the async client object. receiver = client.add_async_receiver(consumer_group="$default", partition="0", offset=Offset('@latest')) # [END create_eventhub_client_async_receiver] # [START create_eventhub_client_async_epoch_receiver] client = EventHubClientAsync.from_connection_string(connection_str) # Add a async receiver to the async client object. epoch_receiver = client.add_async_epoch_receiver(consumer_group="$default", partition="0", epoch=42) # [END create_eventhub_client_async_epoch_receiver] # [START eventhub_client_run_async] client = EventHubClientAsync.from_connection_string(connection_str) # Add AsyncSenders/AsyncReceivers try: # Opens the connection and starts running all AsyncSender/AsyncReceiver clients. await client.run_async() # Start sending and receiving except: raise finally: await client.stop_async() # [END eventhub_client_run_async] client = EventHubClientAsync.from_connection_string(connection_str) sender = client.add_async_sender(partition="0") receiver = client.add_async_receiver(consumer_group="$default", partition="0", offset=Offset('@latest')) try: # Opens the connection and starts running all AsyncSender/AsyncReceiver clients. await client.run_async() # [START eventhub_client_async_send] event_data = EventData(b"A single event") await sender.send(event_data) # [END eventhub_client_async_send] time.sleep(1) # [START eventhub_client_async_receive] logger = logging.getLogger("azure.eventhub") received = await receiver.receive(timeout=5) for event_data in received: logger.info("Message received:{}".format(event_data.body_as_str())) # [END eventhub_client_async_receive] assert len(received) == 1 assert received[0].body_as_str() == "A single event" assert list(received[-1].body)[0] == b"A single event" except: raise finally: await client.stop_async() # [START eventhub_client_async_stop] client = EventHubClientAsync.from_connection_string(connection_str) # Add AsyncSenders/AsyncReceivers try: # Opens the connection and starts running all AsyncSender/AsyncReceiver clients. await client.run_async() # Start sending and receiving except: raise finally: await client.stop_async()
def batched(): for i in range(10): yield EventData("Event number {}".format(i))