def test_backoff_fixed_retry(): client = EventHubProducerClient('fake.host.com', 'fake_eh', None, retry_mode='fixed') backoff = client._config.backoff_factor start_time = time.time() client._backoff(retried_times=1, last_exception=Exception('fake'), timeout_time=None) sleep_time = time.time() - start_time # exp = 0.8 * (2 ** 1) = 1.6 # time.sleep() in _backoff will take AT LEAST time 'exp' for 'exponential' # check that fixed is less than 'exp' assert sleep_time < backoff * (2**1) client = EventHubProducerClient('fake.host.com', 'fake_eh', None, retry_mode=RetryMode.Fixed) backoff = client._config.backoff_factor start_time = time.time() client._backoff(retried_times=1, last_exception=Exception('fake'), timeout_time=None) sleep_time = time.time() - start_time # exp = 0.8 * (2 ** 1) = 1.6 # time.sleep() in _backoff will take AT LEAST time 'exp' for 'exponential' # check that fixed is less than 'exp' assert sleep_time < backoff * (2**1)
def example_create_eventhub_producer_client(): # [START create_eventhub_producer_client_from_conn_str_sync] import os from azure.eventhub import EventHubProducerClient event_hub_connection_str = os.environ['EVENT_HUB_CONN_STR'] eventhub_name = os.environ['EVENT_HUB_NAME'] producer = EventHubProducerClient.from_connection_string( conn_str=event_hub_connection_str, eventhub_name=eventhub_name # EventHub name should be specified if it doesn't show up in connection string. ) # [END create_eventhub_producer_client_from_conn_str_sync] # [START create_eventhub_producer_client_sync] import os from azure.eventhub import EventHubProducerClient, EventHubSharedKeyCredential fully_qualified_namespace = os.environ['EVENT_HUB_HOSTNAME'] eventhub_name = os.environ['EVENT_HUB_NAME'] shared_access_policy = os.environ['EVENT_HUB_SAS_POLICY'] shared_access_key = os.environ['EVENT_HUB_SAS_KEY'] credential = EventHubSharedKeyCredential(shared_access_policy, shared_access_key) producer = EventHubProducerClient( fully_qualified_namespace=fully_qualified_namespace, eventhub_name=eventhub_name, # EventHub name should be specified if it doesn't show up in connection string. credential=credential ) # [END create_eventhub_producer_client_sync] return producer
def create_eventhub_producer_client(): # [START create_eventhub_producer_client_from_conn_str_sync] import os from azure.eventhub import EventHubProducerClient event_hub_connection_str = os.environ['EVENT_HUB_CONN_STR'] event_hub = os.environ['EVENT_HUB_NAME'] producer = EventHubProducerClient.from_connection_string( conn_str=event_hub_connection_str, event_hub_path=event_hub) # [END create_eventhub_producer_client_from_conn_str_sync] # [START create_eventhub_producer_client_sync] import os from azure.eventhub import EventHubProducerClient, EventHubSharedKeyCredential hostname = os.environ['EVENT_HUB_HOSTNAME'] event_hub = os.environ['EVENT_HUB_NAME'] shared_access_policy = os.environ['EVENT_HUB_SAS_POLICY'] shared_access_key = os.environ['EVENT_HUB_SAS_KEY'] producer = EventHubProducerClient(host=hostname, event_hub_path=event_hub, credential=EventHubSharedKeyCredential( shared_access_policy, shared_access_key)) # [END create_eventhub_producer_client_sync] return producer
def test_client_azure_sas_credential(live_eventhub): # This should "just work" to validate known-good. hostname = live_eventhub['hostname'] producer_client = EventHubProducerClient.from_connection_string( live_eventhub['connection_str'], eventhub_name=live_eventhub['event_hub']) with producer_client: batch = producer_client.create_batch(partition_id='0') batch.add(EventData(body='A single message')) producer_client.send_batch(batch) # This should also work, but now using SAS tokens. credential = EventHubSharedKeyCredential(live_eventhub['key_name'], live_eventhub['access_key']) auth_uri = "sb://{}/{}".format(hostname, live_eventhub['event_hub']) token = credential.get_token(auth_uri).token.decode() producer_client = EventHubProducerClient( fully_qualified_namespace=hostname, eventhub_name=live_eventhub['event_hub'], credential=AzureSasCredential(token)) with producer_client: batch = producer_client.create_batch(partition_id='0') batch.add(EventData(body='A single message')) producer_client.send_batch(batch)
def test_client_secret_credential(aad_credential, live_eventhub): try: from azure.identity import EnvironmentCredential except ImportError: pytest.skip("No azure identity library") credential = EnvironmentCredential() producer_client = EventHubProducerClient( host=live_eventhub['hostname'], event_hub_path=live_eventhub['event_hub'], credential=credential, user_agent='customized information') consumer_client = EventHubConsumerClient( host=live_eventhub['hostname'], event_hub_path=live_eventhub['event_hub'], credential=credential, user_agent='customized information') with producer_client: producer_client.send(EventData(body='A single message')) def on_events(partition_context, events): assert partition_context.partition_id == '0' assert len(events) == 1 assert list(events[0].body)[0] == 'A single message'.encode('utf-8') with consumer_client: worker = threading.Thread(target=consumer_client.receive, args=(on_events, ), kwargs={ "consumer_group": '$default', "partition_id": '0' }) worker.start() time.sleep(2)
def create_producer_client(): print('Examples showing how to create producer client.') # Create producer client from connection string. producer_client = EventHubProducerClient.from_connection_string( conn_str=CONNECTION_STRING # connection string contains EventHub name. ) # Illustration of commonly used parameters. producer_client = EventHubProducerClient.from_connection_string( conn_str=CONNECTION_STRING, eventhub_name=EVENTHUB_NAME, # EventHub name should be specified if it doesn't show up in connection string. logging_enable=False, # To enable network tracing log, set logging_enable to True. retry_total=3, # Retry up to 3 times to re-do failed operations. transport_type=TransportType.Amqp # Use Amqp as the underlying transport protocol. ) # Create producer client from constructor. producer_client = EventHubProducerClient( fully_qualified_namespace=FULLY_QUALIFIED_NAMESPACE, eventhub_name=EVENTHUB_NAME, credential=EventHubSharedKeyCredential( policy=SAS_POLICY, key=SAS_KEY ), logging_enable=False, # To enable network tracing log, set logging_enable to True. retry_total=3, # Retry up to 3 times to re-do failed operations. transport_type=TransportType.Amqp # Use Amqp as the underlying transport protocol. ) print("Calling producer client get eventhub properties:", producer_client.get_eventhub_properties())
def test_send_with_long_interval_sync(live_eventhub, sleep): test_partition = "0" sender = EventHubProducerClient( live_eventhub['hostname'], live_eventhub['event_hub'], EventHubSharedKeyCredential(live_eventhub['key_name'], live_eventhub['access_key'])) with sender: batch = sender.create_batch(partition_id=test_partition) batch.add(EventData(b"A single event")) sender.send_batch(batch) if sleep: time.sleep(250) else: sender._producers[ test_partition]._handler._connection._conn.destroy() batch = sender.create_batch(partition_id=test_partition) batch.add(EventData(b"A single event")) sender.send_batch(batch) received = [] uri = "sb://{}/{}".format(live_eventhub['hostname'], live_eventhub['event_hub']) sas_auth = authentication.SASTokenAuth.from_shared_access_key( uri, live_eventhub['key_name'], live_eventhub['access_key']) source = "amqps://{}/{}/ConsumerGroups/{}/Partitions/{}".format( live_eventhub['hostname'], live_eventhub['event_hub'], live_eventhub['consumer_group'], test_partition) receiver = uamqp.ReceiveClient(source, auth=sas_auth, debug=False, timeout=5000, prefetch=500) try: receiver.open() # receive_message_batch() returns immediately once it receives any messages before the max_batch_size # and timeout reach. Could be 1, 2, or any number between 1 and max_batch_size. # So call it twice to ensure the two events are received. received.extend([ EventData._from_message(x) for x in receiver.receive_message_batch(max_batch_size=1, timeout=5000) ]) received.extend([ EventData._from_message(x) for x in receiver.receive_message_batch(max_batch_size=1, timeout=5000) ]) finally: receiver.close() assert len(received) == 2 assert list(received[0].body)[0] == b"A single event"
def test_custom_certificate(): producer = EventHubProducerClient("fake.host.com", "fake_eh", None, connection_verify='/usr/bin/local/cert') assert producer._config.connection_verify == '/usr/bin/local/cert' consumer = EventHubConsumerClient("fake.host.com", "fake_eh", "fake_group", None, connection_verify='D:/local/certfile') assert consumer._config.connection_verify == 'D:/local/certfile'
def test_send_with_long_interval_sync(live_eventhub, sleep): sender = EventHubProducerClient( live_eventhub['hostname'], live_eventhub['event_hub'], EventHubSharedKeyCredential(live_eventhub['key_name'], live_eventhub['access_key'])) with sender: batch = sender.create_batch() batch.add(EventData(b"A single event")) sender.send_batch(batch) for _ in range(1): if sleep: time.sleep(300) else: sender._producers[-1]._handler._connection._conn.destroy() batch = sender.create_batch() batch.add(EventData(b"A single event")) sender.send_batch(batch) partition_ids = sender.get_partition_ids() received = [] for p in partition_ids: uri = "sb://{}/{}".format(live_eventhub['hostname'], live_eventhub['event_hub']) sas_auth = authentication.SASTokenAuth.from_shared_access_key( uri, live_eventhub['key_name'], live_eventhub['access_key']) source = "amqps://{}/{}/ConsumerGroups/{}/Partitions/{}".format( live_eventhub['hostname'], live_eventhub['event_hub'], live_eventhub['consumer_group'], p) receiver = uamqp.ReceiveClient(source, auth=sas_auth, debug=False, timeout=5000, prefetch=500) try: receiver.open() received.extend([ EventData._from_message(x) for x in receiver.receive_message_batch(timeout=5000) ]) finally: receiver.close() assert len(received) == 2 assert list(received[0].body)[0] == b"A single event"
def EventhubSender(eventhubAppCredential): print("Initiating sender client..") #Creating a Eventhub producer client using ServicePrincipal credentials producer = EventHubProducerClient(fully_qualified_namespace=fully_qualified_namespace, eventhub_name=eventhub_name, credential=eventhubAppCredential) with producer: event_data_batch = producer.create_batch() try: event_data_batch.add(EventData('Message inside EventBatchData')) except ValueError: print("error") producer.send_batch(event_data_batch) print('One Batch sent successfully.')
def test_client_secret_credential(aad_credential, live_eventhub): try: from azure.identity import EnvironmentCredential except: pytest.skip("No azure identity library") credential = EnvironmentCredential() producer_client = EventHubProducerClient( fully_qualified_namespace=live_eventhub['hostname'], eventhub_name=live_eventhub['event_hub'], credential=credential, user_agent='customized information') consumer_client = EventHubConsumerClient( fully_qualified_namespace=live_eventhub['hostname'], eventhub_name=live_eventhub['event_hub'], consumer_group='$default', credential=credential, user_agent='customized information') with producer_client: batch = producer_client.create_batch(partition_id='0') batch.add(EventData(body='A single message')) producer_client.send_batch(batch) def on_event(partition_context, event): on_event.called = True on_event.partition_id = partition_context.partition_id on_event.event = event on_event.called = False with consumer_client: worker = threading.Thread(target=consumer_client.receive, args=(on_event, ), kwargs={ "partition_id": '0', "starting_position": '-1' }) worker.start() time.sleep(13) worker.join() assert on_event.called is True assert on_event.partition_id == "0" assert list(on_event.event.body)[0] == 'A single message'.encode('utf-8')
def test_client_sas_credential(live_eventhub): # This should "just work" to validate known-good. hostname = live_eventhub['hostname'] producer_client = EventHubProducerClient.from_connection_string( live_eventhub['connection_str'], eventhub_name=live_eventhub['event_hub']) with producer_client: batch = producer_client.create_batch(partition_id='0') batch.add(EventData(body='A single message')) producer_client.send_batch(batch) # This should also work, but now using SAS tokens. credential = EventHubSharedKeyCredential(live_eventhub['key_name'], live_eventhub['access_key']) auth_uri = "sb://{}/{}".format(hostname, live_eventhub['event_hub']) token = credential.get_token(auth_uri).token producer_client = EventHubProducerClient( fully_qualified_namespace=hostname, eventhub_name=live_eventhub['event_hub'], credential=EventHubSASTokenCredential(token, time.time() + 3000)) with producer_client: batch = producer_client.create_batch(partition_id='0') batch.add(EventData(body='A single message')) producer_client.send_batch(batch) # Finally let's do it with SAS token + conn str token_conn_str = "Endpoint=sb://{}/;SharedAccessSignature={};".format( hostname, token.decode()) conn_str_producer_client = EventHubProducerClient.from_connection_string( token_conn_str, eventhub_name=live_eventhub['event_hub']) with conn_str_producer_client: batch = conn_str_producer_client.create_batch(partition_id='0') batch.add(EventData(body='A single message')) conn_str_producer_client.send_batch(batch)
def test_custom_endpoint(): producer = EventHubProducerClient( "fake.host.com", "fake_eh", None, ) assert not producer._config.custom_endpoint_hostname assert producer._config.transport_type == TransportType.Amqp assert producer._config.connection_port == 5671 producer = EventHubProducerClient( "fake.host.com", "fake_eh", None, custom_endpoint_address="https://12.34.56.78") assert producer._config.custom_endpoint_hostname == '12.34.56.78' assert producer._config.transport_type == TransportType.AmqpOverWebsocket assert producer._config.connection_port == 443 producer = EventHubProducerClient( "fake.host.com", "fake_eh", None, custom_endpoint_address="sb://fake.endpoint.com:443") assert producer._config.custom_endpoint_hostname == 'fake.endpoint.com' assert producer._config.transport_type == TransportType.AmqpOverWebsocket assert producer._config.connection_port == 443 producer = EventHubProducerClient( "fake.host.com", "fake_eh", None, custom_endpoint_address="https://fake.endpoint.com:200") assert producer._config.custom_endpoint_hostname == 'fake.endpoint.com' assert producer._config.transport_type == TransportType.AmqpOverWebsocket assert producer._config.connection_port == 200 producer = EventHubProducerClient( "fake.host.com", "fake_eh", None, custom_endpoint_address="fake.endpoint.com:200") assert producer._config.custom_endpoint_hostname == 'fake.endpoint.com' assert producer._config.transport_type == TransportType.AmqpOverWebsocket assert producer._config.connection_port == 200 consumer = EventHubConsumerClient( "fake.host.com", "fake_eh", "fake_group", None, ) assert not consumer._config.custom_endpoint_hostname assert consumer._config.transport_type == TransportType.Amqp assert consumer._config.connection_port == 5671 consumer = EventHubConsumerClient( "fake.host.com", "fake_eh", "fake_group", None, custom_endpoint_address="https://12.34.56.78/") assert consumer._config.custom_endpoint_hostname == '12.34.56.78' assert consumer._config.transport_type == TransportType.AmqpOverWebsocket assert consumer._config.connection_port == 443 consumer = EventHubConsumerClient( "fake.host.com", "fake_eh", "fake_group", None, custom_endpoint_address="sb://fake.endpoint.com:443") assert consumer._config.custom_endpoint_hostname == 'fake.endpoint.com' assert consumer._config.transport_type == TransportType.AmqpOverWebsocket assert consumer._config.connection_port == 443 consumer = EventHubConsumerClient( "fake.host.com", "fake_eh", "fake_group", None, custom_endpoint_address="https://fake.endpoint.com:200") assert consumer._config.custom_endpoint_hostname == 'fake.endpoint.com' assert consumer._config.transport_type == TransportType.AmqpOverWebsocket assert consumer._config.connection_port == 200 consumer = EventHubConsumerClient( "fake.host.com", "fake_eh", "fake_group", None, custom_endpoint_address="fake.endpoint.com:200") assert consumer._config.custom_endpoint_hostname == 'fake.endpoint.com' assert consumer._config.transport_type == TransportType.AmqpOverWebsocket assert consumer._config.connection_port == 200
def main(req: func.HttpRequest) -> func.HttpResponse: logger = logging.getLogger(__name__) formatter = logging.Formatter( '%(asctime)s %(name)s %(levelname)s: %(message)s') func_context = os.environ['FUNCTION_CONTEXT'] logger.debug(f"Function context --> {func_context}") credentials = None subscription_id = None kv_credentials = None kv_subscription_id = None sub_cred = None if func_context == 'local': filehandler = logging.FileHandler('func.log') filehandler.setFormatter(formatter) logger.addHandler(filehandler) logger.setLevel(logging.DEBUG) credentials, kv_credentials, subscription_id = get_local_credentials() sub_cred = credentials else: from msrestazure.azure_active_directory import MSIAuthentication console = logging.StreamHandler() console.setLevel(logging.INFO) console.setFormatter(formatter) credentials, kv_credentials, subscription_id = get_azure_credentials() sub_cred = MSIAuthentication() logger.debug('Python HTTP trigger function processed a request.') logger.debug(f"method={req.method}, url={req.url}, params={req.params}") logger.debug(f"body={req.get_json()}") # Handle WebHook webhook = req.get_json() # Get resource information specifically tags if this is an alert resource_id = None if "azureMonitorCommonAlertSchema" in webhook["schemaId"]: if check_keys(webhook, 'data', 'essentials', 'alertTargetIDs'): resource_id = webhook["data"]["essentials"]["alertTargetIDs"] if resource_id: resource_client = ResourceManagementClient( credentials, subscription_id) try: resource = resource_client.resources.get_by_id( resource_id[0], api_version='2018-06-01') if resource.tags: # webhook['resource_tags'] = resource.tags logger.info(f"found resource tags {resource.tags}") else: logger.info(f"no tags found in resource {resource_id}") except: logger.error( f"received exception from ResourceManagementClient for {resource_id}") else: logger.info("no resource_id found in webhook") subscription_client = SubscriptionClient(sub_cred) subscription = None for sub in subscription_client.subscriptions.list(): if subscription_id == sub.subscription_id: subscription = sub break webhook['additionalData'] = {} if subscription and 'motsID' in subscription.tags.keys(): webhook['additionalData']['motsID'] = subscription.tags['motsID'] logger.info(f"added subscription tags={subscription.tags}") elif not subscription: logger.error(f"subscription not found via MSI for {subscription_id}") else: logger.info(f"motsID not found in {subscription_id} tags") if 'EVENT_HUB_NAMESPACE' in os.environ and 'EVENT_HUB' in os.environ: namespace = os.environ['EVENT_HUB_NAMESPACE'] event_hub = os.environ['EVENT_HUB'] eh_prod_client = EventHubProducerClient( fully_qualified_namespace=namespace, eventhub_name=event_hub, credential=credentials) else: # Key Vault stuff kv_mgmt_client = KeyVaultManagementClient(credentials, subscription_id) kv_client = SecretClient( vault_url=os.environ['KEY_VAULT_URI'], credential=kv_credentials) namespace = kv_client.get_secret('EventHubNamespace').value event_hub = kv_client.get_secret('EventHub').value user = kv_client.get_secret('EventHubKeyName').value key = kv_client.get_secret('EventHubKey').value # Check whether connection string exists in Key Vault kv_prop = kv_client.list_properties_of_secrets() if 'EventHubConnectionString' in [prop.name for prop in kv_prop]: conn_string = get_kv_secret( kv_client, 'EventHubConnectionString').value else: conn_string = f"Endpoint=sb://{namespace}.servicebus.windows.net/;SharedAccessKeyName={user};SharedAccessKey={key}" eh_prod_client = EventHubProducerClient.from_connection_string( conn_string, eventhub_name=event_hub) event_data_batch = eh_prod_client.create_batch() event_data_batch.add(EventData(json.dumps(webhook))) eh_prod_client.send_batch(event_data_batch) logger.info(f"sending event to {namespace}, {json.dumps(webhook)}") date = datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%f") return func.HttpResponse( json.dumps({ 'date': date, 'status': 'SUCCESS' }) )
return AccessToken(self.token, self.expiry) # Target namespace and hub must also be specified. Consumer group is set to default unless required otherwise. FULLY_QUALIFIED_NAMESPACE = os.environ['EVENT_HUB_HOSTNAME'] EVENTHUB_NAME = os.environ['EVENT_HUB_NAME'] # The following part creates a SAS token. Users can use any way to create a SAS token. SAS_POLICY = os.environ['EVENT_HUB_SAS_POLICY'] SAS_KEY = os.environ['EVENT_HUB_SAS_KEY'] uri = "sb://{}/{}".format(FULLY_QUALIFIED_NAMESPACE, EVENTHUB_NAME) token_ttl = 3000 # seconds sas_token = generate_sas_token(uri, SAS_POLICY, SAS_KEY, token_ttl) # end of creating a SAS token producer_client = EventHubProducerClient( fully_qualified_namespace=FULLY_QUALIFIED_NAMESPACE, eventhub_name=EVENTHUB_NAME, credential=CustomizedSASCredential(sas_token, time.time() + token_ttl), logging_enable=True) start_time = time.time() with producer_client: event_data_batch = producer_client.create_batch() event_data_batch.add(EventData('Single message')) producer_client.send_batch(event_data_batch) print("Send messages in {} seconds.".format(time.time() - start_time))
from azure.identity import EnvironmentCredential fully_qualified_namespace = os.environ['EVENT_HUB_HOSTNAME'] eventhub_name = os.environ['EVENT_HUB_NAME'] credential = EnvironmentCredential() # Note: One has other options to specify the credential. For instance, DefaultAzureCredential. # Default Azure Credentials attempt a chained set of authentication methods, per documentation here: https://github.com/Azure/azure-sdk-for-python/tree/master/sdk/identity/azure-identity # For example user to be logged in can be specified by the environment variable AZURE_USERNAME, consumed via the ManagedIdentityCredential # Alternately, one can specify the AZURE_TENANT_ID, AZURE_CLIENT_ID, and AZURE_CLIENT_SECRET to use the EnvironmentCredentialClass. # The docs above specify all mechanisms which the defaultCredential internally support. # credential = DefaultAzureCredential() producer = EventHubProducerClient( fully_qualified_namespace=fully_qualified_namespace, eventhub_name=eventhub_name, credential=credential) with producer: event_data_batch = producer.create_batch() while True: try: event_data_batch.add(EventData('Message inside EventBatchData')) except ValueError: # EventDataBatch object reaches max_size. # New EventDataBatch object can be created here to send more data. break producer.send_batch(event_data_batch) print('Finished sending.')
""" import os import time from azure.core.credentials import AzureNamedKeyCredential from azure.eventhub import EventHubProducerClient, EventData # Target namespace and hub must also be specified. FULLY_QUALIFIED_NAMESPACE = os.environ['EVENT_HUB_HOSTNAME'] EVENTHUB_NAME = os.environ['EVENT_HUB_NAME'] EVENTHUB_POLICY_NAME = os.environ['EVENT_HUB_SAS_POLICY'] EVENTHUB_KEY = os.environ['EVENT_HUB_SAS_KEY'] credential = AzureNamedKeyCredential(EVENTHUB_POLICY_NAME, EVENTHUB_KEY) producer_client = EventHubProducerClient( fully_qualified_namespace=FULLY_QUALIFIED_NAMESPACE, eventhub_name=EVENTHUB_NAME, credential=credential, logging_enable=True ) start_time = time.time() with producer_client: event_data_batch = producer_client.create_batch() event_data_batch.add(EventData('Single message')) producer_client.send_batch(event_data_batch) print("Send messages in {} seconds.".format(time.time() - start_time))
#!/usr/bin/env python # -------------------------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # -------------------------------------------------------------------------------------------- """ An example to show authentication using aad credentials """ import os from azure.eventhub import EventData, EventHubProducerClient from azure.identity import EnvironmentCredential HOSTNAME = os.environ['EVENT_HUB_HOSTNAME'] EVENT_HUB = os.environ['EVENT_HUB_NAME'] credential = EnvironmentCredential() producer = EventHubProducerClient(host=HOSTNAME, event_hub_path=EVENT_HUB, credential=credential) with producer: event = EventData(body='A single message') producer.send(event, partition_id='0')