示例#1
0
def test_send_with_long_interval_sync(live_eventhub, sleep):
    test_partition = "0"
    sender = EventHubProducerClient(
        live_eventhub['hostname'], live_eventhub['event_hub'],
        EventHubSharedKeyCredential(live_eventhub['key_name'],
                                    live_eventhub['access_key']))
    with sender:
        batch = sender.create_batch(partition_id=test_partition)
        batch.add(EventData(b"A single event"))
        sender.send_batch(batch)
        if sleep:
            time.sleep(250)
        else:
            sender._producers[
                test_partition]._handler._connection._conn.destroy()
        batch = sender.create_batch(partition_id=test_partition)
        batch.add(EventData(b"A single event"))
        sender.send_batch(batch)

    received = []

    uri = "sb://{}/{}".format(live_eventhub['hostname'],
                              live_eventhub['event_hub'])
    sas_auth = authentication.SASTokenAuth.from_shared_access_key(
        uri, live_eventhub['key_name'], live_eventhub['access_key'])

    source = "amqps://{}/{}/ConsumerGroups/{}/Partitions/{}".format(
        live_eventhub['hostname'], live_eventhub['event_hub'],
        live_eventhub['consumer_group'], test_partition)
    receiver = uamqp.ReceiveClient(source,
                                   auth=sas_auth,
                                   debug=False,
                                   timeout=5000,
                                   prefetch=500)
    try:
        receiver.open()
        # receive_message_batch() returns immediately once it receives any messages before the max_batch_size
        # and timeout reach. Could be 1, 2, or any number between 1 and max_batch_size.
        # So call it twice to ensure the two events are received.
        received.extend([
            EventData._from_message(x)
            for x in receiver.receive_message_batch(max_batch_size=1,
                                                    timeout=5000)
        ])
        received.extend([
            EventData._from_message(x)
            for x in receiver.receive_message_batch(max_batch_size=1,
                                                    timeout=5000)
        ])
    finally:
        receiver.close()
    assert len(received) == 2
    assert list(received[0].body)[0] == b"A single event"
示例#2
0
def test_client_azure_sas_credential(live_eventhub):
    # This should "just work" to validate known-good.
    hostname = live_eventhub['hostname']
    producer_client = EventHubProducerClient.from_connection_string(
        live_eventhub['connection_str'],
        eventhub_name=live_eventhub['event_hub'])

    with producer_client:
        batch = producer_client.create_batch(partition_id='0')
        batch.add(EventData(body='A single message'))
        producer_client.send_batch(batch)

    # This should also work, but now using SAS tokens.
    credential = EventHubSharedKeyCredential(live_eventhub['key_name'],
                                             live_eventhub['access_key'])
    auth_uri = "sb://{}/{}".format(hostname, live_eventhub['event_hub'])
    token = credential.get_token(auth_uri).token.decode()
    producer_client = EventHubProducerClient(
        fully_qualified_namespace=hostname,
        eventhub_name=live_eventhub['event_hub'],
        credential=AzureSasCredential(token))

    with producer_client:
        batch = producer_client.create_batch(partition_id='0')
        batch.add(EventData(body='A single message'))
        producer_client.send_batch(batch)
def test_send_with_long_interval_sync(live_eventhub, sleep):
    sender = EventHubProducerClient(
        live_eventhub['hostname'], live_eventhub['event_hub'],
        EventHubSharedKeyCredential(live_eventhub['key_name'],
                                    live_eventhub['access_key']))
    with sender:
        batch = sender.create_batch()
        batch.add(EventData(b"A single event"))
        sender.send_batch(batch)
        for _ in range(1):
            if sleep:
                time.sleep(300)
            else:
                sender._producers[-1]._handler._connection._conn.destroy()
            batch = sender.create_batch()
            batch.add(EventData(b"A single event"))
            sender.send_batch(batch)
        partition_ids = sender.get_partition_ids()

    received = []
    for p in partition_ids:
        uri = "sb://{}/{}".format(live_eventhub['hostname'],
                                  live_eventhub['event_hub'])
        sas_auth = authentication.SASTokenAuth.from_shared_access_key(
            uri, live_eventhub['key_name'], live_eventhub['access_key'])

        source = "amqps://{}/{}/ConsumerGroups/{}/Partitions/{}".format(
            live_eventhub['hostname'], live_eventhub['event_hub'],
            live_eventhub['consumer_group'], p)
        receiver = uamqp.ReceiveClient(source,
                                       auth=sas_auth,
                                       debug=False,
                                       timeout=5000,
                                       prefetch=500)
        try:
            receiver.open()
            received.extend([
                EventData._from_message(x)
                for x in receiver.receive_message_batch(timeout=5000)
            ])
        finally:
            receiver.close()

    assert len(received) == 2
    assert list(received[0].body)[0] == b"A single event"
def stress_send_sync(producer: EventHubProducerClient, args, logger):
    batch = producer.create_batch(partition_id=args.send_partition_id,
                                  partition_key=args.send_partition_key)
    try:
        while True:
            event_data = EventData(body=b"D" * args.payload)
            batch.add(event_data)
    except ValueError:
        producer.send_batch(batch)
        return len(batch)
def stress_send_sync(producer: EventHubProducerClient, args, stress_logger, azure_monitor_metric):
    try:
        batch = producer.create_batch(partition_id=args.send_partition_id, partition_key=args.send_partition_key)
        while True:
            event_data = EventData(body=b"D" * args.payload)
            batch.add(event_data)
    except ValueError:
        try:
            producer.send_batch(batch)
        except EventHubError as e:
            return handle_exception(e, args.ignore_send_failure, stress_logger, azure_monitor_metric)
    except EventHubError as e:
        return handle_exception(e, args.ignore_send_failure, stress_logger, azure_monitor_metric)
    return len(batch)
示例#6
0
def stress_send_sync(producer: EventHubProducerClient, args, logger):
    batch = producer.create_batch(partition_id=args.send_partition_id,
                                  partition_key=args.send_partition_key)
    try:
        while True:
            event_data = EventData(body=b"D" * args.payload)
            batch.add(event_data)
    except ValueError:
        try:
            producer.send_batch(batch)
        except EventHubError as e:
            if args.ignore_send_failure:
                logger.warning("Sync send failed due to error: %r.", e)
                return 0
            raise
    return len(batch)
示例#7
0
def send_event_data_batch(producer: EventHubProducerClient, data: str):
    """Send/publish data to events hub syncrounously
                Without specifying partition_id or partition_key
                the events will be distributed to available partitions via round-robin.

                Parameters
                ----------
                producer: the eventhub client
                data: data to be published in string format

                Returns
                -------
                none
        """

    event_data_batch = producer.create_batch(partition_id="0")
    event_data_batch.add(EventData(data))
    producer.send_batch(event_data_batch)
示例#8
0
def send_event_data_batch_with_partition_id(producer: EventHubProducerClient,
                                            data: str, pid: int):
    """Send/publish data to events hub syncrounously
            Specifying partition_key
            Parameters
            ----------
            producer: the eventhub client
            data: data to be published in string format
            pid: partion key string

            Returns
            -------
            none
    """
    event_data_batch_with_partition_id = producer.create_batch(
        partition_id=str(pid))
    event_data_batch_with_partition_id.add(EventData(data))
    producer.send_batch(event_data_batch_with_partition_id)
def EventhubSender(eventhubAppCredential):
    
    print("Initiating sender client..")
    #Creating a Eventhub producer client using ServicePrincipal credentials
    producer = EventHubProducerClient(fully_qualified_namespace=fully_qualified_namespace,
                                  eventhub_name=eventhub_name,
                                  credential=eventhubAppCredential)

    with producer:
        event_data_batch = producer.create_batch()
       
        try:
            event_data_batch.add(EventData('Message inside EventBatchData'))
        except ValueError:
           
            print("error")
        producer.send_batch(event_data_batch)
    
    print('One Batch sent successfully.')
示例#10
0
def send_event_data_batch_with_properties(producer: EventHubProducerClient,
                                          data: str, properties: Dict[str,
                                                                      str]):
    """Send/publish data to events hub syncrounously
            Parameters
            ----------
            producer: the eventhub client
            data: data to be published in string format
            properties: a dictionary of key=value of data to be added to the message
            Returns
            -------
            none
    """

    event_data_batch = producer.create_batch()
    event_data = EventData(data)
    event_data.properties = properties
    event_data_batch.add(event_data)
    producer.send_batch(event_data_batch)
示例#11
0
def test_client_secret_credential(aad_credential, live_eventhub):
    try:
        from azure.identity import EnvironmentCredential
    except:
        pytest.skip("No azure identity library")
    credential = EnvironmentCredential()
    producer_client = EventHubProducerClient(
        fully_qualified_namespace=live_eventhub['hostname'],
        eventhub_name=live_eventhub['event_hub'],
        credential=credential,
        user_agent='customized information')
    consumer_client = EventHubConsumerClient(
        fully_qualified_namespace=live_eventhub['hostname'],
        eventhub_name=live_eventhub['event_hub'],
        consumer_group='$default',
        credential=credential,
        user_agent='customized information')
    with producer_client:
        batch = producer_client.create_batch(partition_id='0')
        batch.add(EventData(body='A single message'))
        producer_client.send_batch(batch)

    def on_event(partition_context, event):
        on_event.called = True
        on_event.partition_id = partition_context.partition_id
        on_event.event = event

    on_event.called = False
    with consumer_client:
        worker = threading.Thread(target=consumer_client.receive,
                                  args=(on_event, ),
                                  kwargs={
                                      "partition_id": '0',
                                      "starting_position": '-1'
                                  })
        worker.start()
        time.sleep(13)

    worker.join()
    assert on_event.called is True
    assert on_event.partition_id == "0"
    assert list(on_event.event.body)[0] == 'A single message'.encode('utf-8')
示例#12
0
def test_client_sas_credential(live_eventhub):
    # This should "just work" to validate known-good.
    hostname = live_eventhub['hostname']
    producer_client = EventHubProducerClient.from_connection_string(
        live_eventhub['connection_str'],
        eventhub_name=live_eventhub['event_hub'])

    with producer_client:
        batch = producer_client.create_batch(partition_id='0')
        batch.add(EventData(body='A single message'))
        producer_client.send_batch(batch)

    # This should also work, but now using SAS tokens.
    credential = EventHubSharedKeyCredential(live_eventhub['key_name'],
                                             live_eventhub['access_key'])
    auth_uri = "sb://{}/{}".format(hostname, live_eventhub['event_hub'])
    token = credential.get_token(auth_uri).token
    producer_client = EventHubProducerClient(
        fully_qualified_namespace=hostname,
        eventhub_name=live_eventhub['event_hub'],
        credential=EventHubSASTokenCredential(token,
                                              time.time() + 3000))

    with producer_client:
        batch = producer_client.create_batch(partition_id='0')
        batch.add(EventData(body='A single message'))
        producer_client.send_batch(batch)

    # Finally let's do it with SAS token + conn str
    token_conn_str = "Endpoint=sb://{}/;SharedAccessSignature={};".format(
        hostname, token.decode())
    conn_str_producer_client = EventHubProducerClient.from_connection_string(
        token_conn_str, eventhub_name=live_eventhub['event_hub'])

    with conn_str_producer_client:
        batch = conn_str_producer_client.create_batch(partition_id='0')
        batch.add(EventData(body='A single message'))
        conn_str_producer_client.send_batch(batch)
示例#13
0
        return AccessToken(self.token, self.expiry)


# Target namespace and hub must also be specified.  Consumer group is set to default unless required otherwise.
FULLY_QUALIFIED_NAMESPACE = os.environ['EVENT_HUB_HOSTNAME']
EVENTHUB_NAME = os.environ['EVENT_HUB_NAME']

# The following part creates a SAS token. Users can use any way to create a SAS token.
SAS_POLICY = os.environ['EVENT_HUB_SAS_POLICY']
SAS_KEY = os.environ['EVENT_HUB_SAS_KEY']

uri = "sb://{}/{}".format(FULLY_QUALIFIED_NAMESPACE, EVENTHUB_NAME)
token_ttl = 3000  # seconds
sas_token = generate_sas_token(uri, SAS_POLICY, SAS_KEY, token_ttl)
# end of creating a SAS token

producer_client = EventHubProducerClient(
    fully_qualified_namespace=FULLY_QUALIFIED_NAMESPACE,
    eventhub_name=EVENTHUB_NAME,
    credential=CustomizedSASCredential(sas_token,
                                       time.time() + token_ttl),
    logging_enable=True)

start_time = time.time()
with producer_client:
    event_data_batch = producer_client.create_batch()
    event_data_batch.add(EventData('Single message'))
    producer_client.send_batch(event_data_batch)

print("Send messages in {} seconds.".format(time.time() - start_time))
示例#14
0
fully_qualified_namespace = os.environ['EVENT_HUB_HOSTNAME']
eventhub_name = os.environ['EVENT_HUB_NAME']

credential = EnvironmentCredential()

# Note: One has other options to specify the credential.  For instance, DefaultAzureCredential.
# Default Azure Credentials attempt a chained set of authentication methods, per documentation here: https://github.com/Azure/azure-sdk-for-python/tree/master/sdk/identity/azure-identity
# For example user to be logged in can be specified by the environment variable AZURE_USERNAME, consumed via the ManagedIdentityCredential
# Alternately, one can specify the AZURE_TENANT_ID, AZURE_CLIENT_ID, and AZURE_CLIENT_SECRET to use the EnvironmentCredentialClass.
# The docs above specify all mechanisms which the defaultCredential internally support.
# credential = DefaultAzureCredential()

producer = EventHubProducerClient(
    fully_qualified_namespace=fully_qualified_namespace,
    eventhub_name=eventhub_name,
    credential=credential)

with producer:
    event_data_batch = producer.create_batch()
    while True:
        try:
            event_data_batch.add(EventData('Message inside EventBatchData'))
        except ValueError:
            # EventDataBatch object reaches max_size.
            # New EventDataBatch object can be created here to send more data.
            break
    producer.send_batch(event_data_batch)

print('Finished sending.')
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
"""
An example to show authentication using aad credentials
"""

import os
from azure.eventhub import EventData, EventHubProducerClient
from azure.identity import EnvironmentCredential

fully_qualified_namespace = os.environ['EVENT_HUB_HOSTNAME']
eventhub_name = os.environ['EVENT_HUB_NAME']

credential = EnvironmentCredential()
producer = EventHubProducerClient(
    fully_qualified_namespace=fully_qualified_namespace,
    eventhub_name=eventhub_name,
    credential=credential)

with producer:
    event_data_batch = producer.create_batch(max_size_in_bytes=10000)
    while True:
        try:
            event_data_batch.add(EventData('Message inside EventBatchData'))
        except ValueError:
            # EventDataBatch object reaches max_size.
            # New EventDataBatch object can be created here to send more data
            break
    producer.send_batch(event_data_batch)
示例#16
0
def main(req: func.HttpRequest) -> func.HttpResponse:
    logger = logging.getLogger(__name__)
    formatter = logging.Formatter(
        '%(asctime)s %(name)s %(levelname)s: %(message)s')
    func_context = os.environ['FUNCTION_CONTEXT']
    logger.debug(f"Function context --> {func_context}")

    credentials = None
    subscription_id = None
    kv_credentials = None
    kv_subscription_id = None
    sub_cred = None
    if func_context == 'local':
        filehandler = logging.FileHandler('func.log')
        filehandler.setFormatter(formatter)
        logger.addHandler(filehandler)
        logger.setLevel(logging.DEBUG)
        credentials, kv_credentials, subscription_id = get_local_credentials()
        sub_cred = credentials
    else:
        from msrestazure.azure_active_directory import MSIAuthentication
        console = logging.StreamHandler()
        console.setLevel(logging.INFO)
        console.setFormatter(formatter)
        credentials, kv_credentials, subscription_id = get_azure_credentials()
        sub_cred = MSIAuthentication()

    logger.debug('Python HTTP trigger function processed a request.')
    logger.debug(f"method={req.method}, url={req.url}, params={req.params}")
    logger.debug(f"body={req.get_json()}")

    # Handle WebHook
    webhook = req.get_json()
    # Get resource information specifically tags if this is an alert
    resource_id = None
    if "azureMonitorCommonAlertSchema" in webhook["schemaId"]:
        if check_keys(webhook, 'data', 'essentials', 'alertTargetIDs'):
            resource_id = webhook["data"]["essentials"]["alertTargetIDs"]

    if resource_id:
        resource_client = ResourceManagementClient(
            credentials, subscription_id)
        try:
            resource = resource_client.resources.get_by_id(
                resource_id[0], api_version='2018-06-01')
            if resource.tags:
                #                webhook['resource_tags'] = resource.tags
                logger.info(f"found resource tags {resource.tags}")
            else:
                logger.info(f"no tags found in resource {resource_id}")
        except:
            logger.error(
                f"received exception from ResourceManagementClient for {resource_id}")
    else:
        logger.info("no resource_id found in webhook")

    subscription_client = SubscriptionClient(sub_cred)
    subscription = None
    for sub in subscription_client.subscriptions.list():
        if subscription_id == sub.subscription_id:
            subscription = sub
            break
    webhook['additionalData'] = {}
    if subscription and 'motsID' in subscription.tags.keys():
        webhook['additionalData']['motsID'] = subscription.tags['motsID']
        logger.info(f"added subscription tags={subscription.tags}")
    elif not subscription:
        logger.error(f"subscription not found via MSI for {subscription_id}")
    else:
        logger.info(f"motsID not found in {subscription_id} tags")

    if 'EVENT_HUB_NAMESPACE' in os.environ and 'EVENT_HUB' in os.environ:
        namespace = os.environ['EVENT_HUB_NAMESPACE']
        event_hub = os.environ['EVENT_HUB']
        eh_prod_client = EventHubProducerClient(
            fully_qualified_namespace=namespace,
            eventhub_name=event_hub,
            credential=credentials)
    else:
        # Key Vault stuff
        kv_mgmt_client = KeyVaultManagementClient(credentials, subscription_id)
        kv_client = SecretClient(
            vault_url=os.environ['KEY_VAULT_URI'],
            credential=kv_credentials)
        namespace = kv_client.get_secret('EventHubNamespace').value
        event_hub = kv_client.get_secret('EventHub').value
        user = kv_client.get_secret('EventHubKeyName').value
        key = kv_client.get_secret('EventHubKey').value
        # Check whether connection string exists in Key Vault
        kv_prop = kv_client.list_properties_of_secrets()
        if 'EventHubConnectionString' in [prop.name for prop in kv_prop]:
            conn_string = get_kv_secret(
                kv_client, 'EventHubConnectionString').value
        else:
            conn_string = f"Endpoint=sb://{namespace}.servicebus.windows.net/;SharedAccessKeyName={user};SharedAccessKey={key}"

        eh_prod_client = EventHubProducerClient.from_connection_string(
            conn_string, eventhub_name=event_hub)
    event_data_batch = eh_prod_client.create_batch()
    event_data_batch.add(EventData(json.dumps(webhook)))
    eh_prod_client.send_batch(event_data_batch)
    logger.info(f"sending event to {namespace}, {json.dumps(webhook)}")
    date = datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%f")
    return func.HttpResponse(
        json.dumps({
            'date': date,
            'status': 'SUCCESS'
        })
    )