def test_example_eventhub_sync_sender_ops(live_eventhub_config, connection_str):
    import os
    # [START create_eventhub_client_sender_instance]
    from azure.eventhub import EventHubClient

    client = EventHubClient.from_connection_string(connection_str)
    sender = client.add_sender(partition="0")
    # [END create_eventhub_client_sender_instance]

    # [START eventhub_client_sender_open]
    client = EventHubClient.from_connection_string(connection_str)
    sender = client.add_sender(partition="0")
    try:
        # Open the Sender using the supplied conneciton.
        sender.open()
        # Start sending
    except:
        raise
    finally:
        # Close down the send handler.
        sender.close()
    # [END eventhub_client_sender_open]

    # [START eventhub_client_sender_close]
    client = EventHubClient.from_connection_string(connection_str)
    sender = client.add_sender(partition="0")
    try:
        # Open the Sender using the supplied conneciton.
        sender.open()
        # Start sending
    except:
        raise
    finally:
        # Close down the send handler.
        sender.close()
Ejemplo n.º 2
0
def add_receiver(client: EventHubClient, offset: Offset):
    receiver = client.add_receiver(CONSUMER_GROUP,
                                   EVENT_HUB_PARTITION,
                                   prefetch=PREFETCH,
                                   offset=offset)
    client.run()
    return receiver
Ejemplo n.º 3
0
class Consumer:
    consumer_group = None
    eventhubs_client = None
    offset = Offset("-1")
    redis_cache = None

    def __init__(self, eventhub, address, user, key, consumer_group,
                 redis_hostname, redis_key):
        self.consumer_group = consumer_group
        self.eventhubs_client = EventHubClient(address,
                                               debug=False,
                                               username=user,
                                               password=key)

        redis_topic = f"eventhubs-{eventhub}-{consumer_group}"
        self.redis_cache = RedisCache(redis_hostname, redis_key, redis_topic)

    def recieve(self):
        OFFSET = Offset(self.redis_cache.get_offset())
        receiver = self.eventhubs_client.add_receiver(self.consumer_group,
                                                      "0",
                                                      prefetch=5000,
                                                      offset=OFFSET)
        self.eventhubs_client.run()
        messages = receiver.receive(timeout=100)
        self.eventhubs_client.stop()
        return messages

    def commit(self, event_data):
        self.redis_cache.set_offset(event_data.sequence_number)
Ejemplo n.º 4
0
def test_client_secret_credential(aad_credential, live_eventhub):
    try:
        from azure.identity import ClientSecretCredential
    except ImportError:
        pytest.skip("No azure identity library")
    client_id, secret, tenant_id = aad_credential
    credential = ClientSecretCredential(client_id=client_id,
                                        secret=secret,
                                        tenant_id=tenant_id)
    client = EventHubClient(host=live_eventhub['hostname'],
                            event_hub_path=live_eventhub['event_hub'],
                            credential=credential,
                            user_agent='customized information')
    sender = client.create_producer(partition_id='0')
    receiver = client.create_consumer(consumer_group="$default",
                                      partition_id='0',
                                      event_position=EventPosition("@latest"))

    with receiver:
        received = receiver.receive(timeout=1)
        assert len(received) == 0

        with sender:
            event = EventData(body='A single message')
            sender.send(event)
        time.sleep(1)

        received = receiver.receive(timeout=1)

        assert len(received) == 1
        assert list(received[0].body)[0] == 'A single message'.encode('utf-8')
Ejemplo n.º 5
0
def main(req: func.HttpRequest) -> func.HttpResponse:
    logging.info('Python HTTP trigger function processed a request.')

    connection_str = "Endpoint=sb://{}/;SharedAccessKeyName={};SharedAccessKey={};EntityPath={}".format(
        os.environ['EVENT_HUB_HOSTNAME'], os.environ['EVENT_HUB_SAS_POLICY'],
        os.environ['EVENT_HUB_SAS_KEY'], os.environ['EVENT_HUB_NAME'])
    client = EventHubClient.from_connection_string(connection_str)

    req_body = req.get_json()
    req_payload = json.dumps(req_body)

    client = EventHubClient.from_connection_string(connection_str)
    sender = client.add_sender(partition="0")

    try:
        client.run()
        logging.info('Send Alert with this payload: %s', req_payload)
        event_data = EventData(req_payload)
        sender.send(event_data)
        logging.info('Sent payload to Event Hub!')
    except:
        raise
    finally:
        client.stop()

    return func.HttpResponse(f"Hello {req_body}!")
Ejemplo n.º 6
0
def get():
    if request.args.get('since') is None:
        since = -1
    else:
        since = request.args.get('since')

    client = EventHubClient(address, debug=False, username=user, password=key)

    receiver = client.add_receiver(consumergroup, PARTITION, prefetch=1000, offset=Offset(since), keep_alive=72000)
    client.run()

    def generate():
        batched_events = receiver.receive(max_batch_size=100, timeout=500)
        yield '['
        index = 0
        while batched_events:
            for event_data in batched_events:
                if index > 0:
                    yield ','
                last_sn = event_data.sequence_number
                data = str(event_data.message)
                output_entity = literal_eval(data)
                output_entity.update({"_updated": str(last_sn)})
                yield json.dumps(output_entity)
                index = index + 1
            batched_events = receiver.receive(max_batch_size=100, timeout=500)
        yield ']'
    return Response(generate(), mimetype='application/json')
Ejemplo n.º 7
0
 def __init__(self, address, user, key):
     self.client = EventHubClient(address,
                                  debug=False,
                                  username=user,
                                  password=key)
     self.sender = self.client.add_sender(partition="0")
     self.client.run()
Ejemplo n.º 8
0
    def __init__(self, config):
        """
        Class to create an EventHubStreamingClient instance.

        :param config: Dictionary file with all the relevant parameters.
        """
        super().__init__()
        self.message_callback = None
        self.config = config
        self.storage_account_name = self.config.get("AZURE_STORAGE_ACCOUNT")
        self.storage_key = self.config.get("AZURE_STORAGE_ACCESS_KEY")
        self.lease_container_name = self.config.get("LEASE_CONTAINER_NAME")
        self.namespace = self.config.get("EVENT_HUB_NAMESPACE")
        self.eventhub = self.config.get("EVENT_HUB_NAME")
        self.consumer_group = self.config.get("EVENT_HUB_CONSUMER_GROUP")
        if self.consumer_group is None:
            self.consumer_group = '$default'

        self.user = self.config.get("EVENT_HUB_SAS_POLICY")
        self.key = self.config.get("EVENT_HUB_SAS_KEY")
        if self.config.get("TIMEOUT"):
            try:
                self.timeout = int(self.config.get("TIMEOUT"))
            except ValueError:
                self.timeout = None
        else:
            self.timeout = None

        # Create EPH Client
        if self.storage_account_name is not None and self.storage_key is not None:
            self.eph_client = EventHubConfig(
                sb_name=self.namespace,
                eh_name=self.eventhub,
                policy=self.user,
                sas_key=self.key,
                consumer_group=self.consumer_group)
            self.eh_options = EPHOptions()
            self.eh_options.release_pump_on_timeout = True
            self.eh_options.auto_reconnect_on_error = False
            self.eh_options.debug_trace = False
            self.storage_manager = AzureStorageCheckpointLeaseManager(
                self.storage_account_name, self.storage_key,
                self.lease_container_name)

        # Create Send client
        else:
            address = "amqps://" + self.namespace + \
                      ".servicebus.windows.net/" + self.eventhub
            try:
                self.send_client = EventHubClient(address,
                                                  debug=False,
                                                  username=self.user,
                                                  password=self.key)
                self.sender = self.send_client.add_sender()
                self.send_client.run()
            except Exception as e:
                logger.error('Failed to init EH send client: ' + str(e))
                raise
Ejemplo n.º 9
0
def test_long_running_receive(connection_str):
    parser = argparse.ArgumentParser()
    parser.add_argument("--duration",
                        help="Duration in seconds of the test",
                        type=int,
                        default=30)
    parser.add_argument("--consumer",
                        help="Consumer group name",
                        default="$default")
    parser.add_argument("--partitions", help="Comma seperated partition IDs")
    parser.add_argument("--offset", help="Starting offset", default="-1")
    parser.add_argument("--conn-str",
                        help="EventHub connection string",
                        default=connection_str)
    parser.add_argument("--eventhub", help="Name of EventHub")
    parser.add_argument("--address", help="Address URI to the EventHub entity")
    parser.add_argument(
        "--sas-policy",
        help="Name of the shared access policy to authenticate with")
    parser.add_argument("--sas-key", help="Shared access key")

    args, _ = parser.parse_known_args()
    if args.conn_str:
        client = EventHubClient.from_connection_string(
            args.conn_str, event_hub_path=args.eventhub, network_tracing=False)
    elif args.address:
        client = EventHubClient(host=args.address,
                                event_hub_path=args.eventhub,
                                credential=EventHubSharedKeyCredential(
                                    args.sas_policy, args.sas_key),
                                auth_timeout=240,
                                network_tracing=False)
    else:
        try:
            import pytest
            pytest.skip("Must specify either '--conn-str' or '--address'")
        except ImportError:
            raise ValueError("Must specify either '--conn-str' or '--address'")

    if args.partitions:
        partitions = args.partitions.split(",")
    else:
        partitions = client.get_partition_ids()

    threads = []
    for pid in partitions:
        consumer = client.create_consumer(consumer_group="$default",
                                          partition_id=pid,
                                          event_position=EventPosition(
                                              args.offset),
                                          prefetch=300)
        thread = threading.Thread(target=pump, args=(consumer, args.duration))
        thread.start()
        threads.append(thread)
    for thread in threads:
        thread.join()
Ejemplo n.º 10
0
    def __init__(self, eventhub, address, user, key, consumer_group,
                 redis_hostname, redis_key):
        self.consumer_group = consumer_group
        self.eventhubs_client = EventHubClient(address,
                                               debug=False,
                                               username=user,
                                               password=key)

        redis_topic = f"eventhubs-{eventhub}-{consumer_group}"
        self.redis_cache = RedisCache(redis_hostname, redis_key, redis_topic)
Ejemplo n.º 11
0
    def cosmosDBServiceToCosmosDB(self):
        
        database_link = 'dbs/' + DATABASE_ID
        collection_link = database_link + '/colls/' + COLLECTION_ID

        counter = 0
        filepath = ''

        CONSUMER_GROUP = "$Default"
        OFFSET = Offset("0")
        PARTITION = "0"
        eh_client = EventHubClient('amqps://xxxxx.servicebus.windows.net/txxxxqueue', 
                                   debug=True, 
                                   username='******', 
                                   password='******')
        receiver = eh_client.add_receiver(CONSUMER_GROUP, PARTITION, 
                                          prefetch=300, offset=OFFSET)
        try:    
            eh_client.run()
            while True:
                for event_data in receiver.receive(timeout=100):
                    rcv_msg = str(event_data.message)
                    # Filter the Null messages
                    if len(rcv_msg)>5:
                        # Load the messages in CosmosDB
                        cosmos_client.CreateDocument(collection_link, 
                                                     json.loads(str(event_data.message)))
   
            eh_client.stop()
        except Exception as e:
            print("Failed Receiving Record {}".format(str(e)) ) 
        finally:
            eh_client.stop()
Ejemplo n.º 12
0
 def __init__(self, address, user, key):
     self.address = address
     self.user = user
     self.key = key
     self.counter = 0
     self.client_batch = EventHubClient(self.address,
                                        debug=False,
                                        username=self.user,
                                        password=self.key)
     self.sender = self.client_batch.add_sender()
     self.client_batch.run()
Ejemplo n.º 13
0
def main(req: func.HttpRequest) -> func.HttpResponse:
    logger = logging.getLogger(__name__)
    formatter = logging.Formatter(
        '%(asctime)s %(name)s %(levelname)s: %(message)s')
    func_context = os.environ['FUNCTION_CONTEXT']
    logger.debug(f"Function context --> {func_context}")

    credentials = None
    subscription_id = None
    kv_credentials = None
    kv_subscription_id = None
    if func_context == 'local':
        filehandler = logging.FileHandler('func.log')
        filehandler.setFormatter(formatter)
        logger.addHandler(filehandler)
        logger.setLevel(logging.DEBUG)
        credentials, subscription_id = get_local_credentials()
    else:
        console = logging.StreamHandler()
        console.setLevel(logging.INFO)
        console.setFormatter(formatter)
        credentials, subscription_id = get_azure_credentials()

    logger.debug('Python HTTP trigger function processed a request.')
    logger.debug(f"method={req.method}, url={req.url}, params={req.params}")
    logger.debug(f"body={req.get_json()}")

    # Handle WebHook
    webhook = req.get_json()

    #Create an empty dict within webhook for motsID
    webhook['additionalData'] = {}
    addl_data = webhook['additionalData']
    addl_data['motsID'] = "0000"

    # Key Vault stuff.
    kv_mgmt_client = KeyVaultManagementClient(credentials, subscription_id)
    kv_client = KeyVaultClient(credentials)
    namespace = get_kv_secret(kv_client, 'EventHubNamespace')
    event_hub = get_kv_secret(kv_client, 'EventHub')
    user = get_kv_secret(kv_client, 'EventHubKeyName')
    key = get_kv_secret(kv_client, 'EventHubKey')

    amqp_uri = f"https://{namespace}.servicebus.windows.net/{event_hub}"
    eh_client = EventHubClient(amqp_uri,
                               debug=False,
                               username=user,
                               password=key)
    eh_sender = eh_client.add_sender(partition="0")
    eh_client.run()
    eh_sender.send(EventData(json.dumps(webhook)))
    logger.info(f"sending event to {amqp_uri}, {json.dumps(webhook)}")
    date = datetime.datetime.now()
    return func.HttpResponse(json.dumps({'date': date, 'status': 'SUCCESS'}))
Ejemplo n.º 14
0
    def __init__(self):
        #load config
        with open('config.json', 'r') as json_file:
            self.config = json.load(json_file)

        # Create Event Hubs client
        client = EventHubClient(self.config["EH_ADDRESS"],
                                debug=False,
                                username=self.config["EH_USER"],
                                password=self.config["EH_KEY"])
        Worker.event_hub_sender = client.add_sender(partition="0")
        client.run()
Ejemplo n.º 15
0
def init_event_hub():
    with open('config.json', 'r') as json_file:
        config = json.load(json_file)
    client = EventHubClient(host=config["EH_HOST"],
                            event_hub_path=config["EH_NAME"],
                            credential=EventHubSharedKeyCredential(
                                config["EVENT_HUB_SAS_POLICY"],
                                config["EVENT_HUB_SAS_KEY"]),
                            network_tracing=False)

    for i in range(NUM_PARTITIONS):
        event_producer_list.append(client.create_producer(partition_id=str(i)))
Ejemplo n.º 16
0
def test_get_partition_properties(live_eventhub):
    client = EventHubClient(live_eventhub['hostname'], live_eventhub['event_hub'],
                                    EventHubSharedKeyCredential(live_eventhub['key_name'], live_eventhub['access_key'])
                                    )
    properties = client.get_partition_properties('0')
    assert properties['event_hub_path'] == live_eventhub['event_hub'] \
        and properties['id'] == '0' \
        and 'beginning_sequence_number' in properties \
        and 'last_enqueued_sequence_number' in properties \
        and 'last_enqueued_offset' in properties \
        and 'last_enqueued_time_utc' in properties \
        and 'is_empty' in properties
def test_long_running_send(connection_str):
    if sys.platform.startswith('darwin'):
        import pytest
        pytest.skip("Skipping on OSX")
    parser = argparse.ArgumentParser()
    parser.add_argument("--duration",
                        help="Duration in seconds of the test",
                        type=int,
                        default=30)
    parser.add_argument("--payload",
                        help="payload size",
                        type=int,
                        default=512)
    parser.add_argument("--conn-str",
                        help="EventHub connection string",
                        default=connection_str)
    parser.add_argument("--eventhub", help="Name of EventHub")
    parser.add_argument("--address", help="Address URI to the EventHub entity")
    parser.add_argument(
        "--sas-policy",
        help="Name of the shared access policy to authenticate with")
    parser.add_argument("--sas-key", help="Shared access key")

    args, _ = parser.parse_known_args()
    if args.conn_str:
        client = EventHubClient.from_connection_string(
            args.conn_str, event_hub_path=args.eventhub)
    elif args.address:
        client = EventHubClient(host=args.address,
                                event_hub_path=args.eventhub,
                                credential=EventHubSharedKeyCredential(
                                    args.sas_policy, args.sas_key),
                                auth_timeout=240,
                                network_tracing=False)
    else:
        try:
            import pytest
            pytest.skip("Must specify either '--conn-str' or '--address'")
        except ImportError:
            raise ValueError("Must specify either '--conn-str' or '--address'")

    try:
        partition_ids = client.get_partition_ids()
        threads = []
        for pid in partition_ids:
            sender = client.create_producer(partition_id=pid)
            thread = threading.Thread(target=send, args=(sender, args))
            thread.start()
            threads.append(thread)
        thread.join()
    except KeyboardInterrupt:
        pass
Ejemplo n.º 18
0
 def __init__(self, devenv='doh'):
     """ Create Event Hub Sender """
     if devenv == 'prod':
             ADDRESS = "amqps://doh-airqual-eventhub.servicebus.windows.net/doh-airqual-event-hub-prod"
             USER = "******"
             KEY = "1p1aHCJc5IbamvnzlnvUa2wlvXsaJpSAbAORGlPRaQ4="
     elif devenv == "dev":
             ADDRESS = "amqps://az-doh-airqual-eventhub.servicebus.windows.net/az-doh-airqual-eventhub"
             USER = "******"
             KEY = "pfWGtB6obtiUCwwAobAAuaq7B9SZSsnpHY6ArEbeS1A="
     self.client = EventHubClient(ADDRESS, debug=False, username=USER, password=KEY)
     self.sender = self.client.add_sender(partition="1")
     self.client.run()
Ejemplo n.º 19
0
    def __init__(self):
        NAMESPACE = os.environ['EVENT_HUB_NAMESPACE']
        EHNAME = os.environ['EVENT_HUB_NAME']
        ADDRESS = "amqps://" + NAMESPACE + ".servicebus.windows.net/" + EHNAME

        # SAS policy and key are not required if they are encoded in the URL
        USER = os.environ.get('EVENT_HUB_SAS_POLICY')
        KEY = os.environ.get('EVENT_HUB_SAS_KEY')

        self.client = EventHubClient(ADDRESS,
                                     debug=False,
                                     username=USER,
                                     password=KEY)
        self.sender = self.client.add_sender(partition="0")
        self.client.run()
Ejemplo n.º 20
0
def test_send_with_partition_key(connstr_receivers):
    connection_str, receivers = connstr_receivers
    client = EventHubClient.from_connection_string(connection_str, debug=False)
    sender = client.add_sender()
    try:
        client.run()

        data_val = 0
        for partition in [b"a", b"b", b"c", b"d", b"e", b"f"]:
            partition_key = b"test_partition_" + partition
            for i in range(50):
                data = EventData(str(data_val))
                data.partition_key = partition_key
                data_val += 1
                sender.send(data)
    except:
        raise
    finally:
        client.stop()

    found_partition_keys = {}
    for index, partition in enumerate(receivers):
        received = partition.receive(timeout=5)
        for message in received:
            try:
                existing = found_partition_keys[message.partition_key]
                assert existing == index
            except KeyError:
                found_partition_keys[message.partition_key] = index
def test_long_running_send(connection_str):
    if sys.platform.startswith('darwin'):
        import pytest
        pytest.skip("Skipping on OSX")
    parser = argparse.ArgumentParser()
    parser.add_argument("--duration", help="Duration in seconds of the test", type=int, default=30)
    parser.add_argument("--payload", help="payload size", type=int, default=512)
    parser.add_argument("--batch", help="Number of events to send and wait", type=int, default=1)
    parser.add_argument("--conn-str", help="EventHub connection string", default=connection_str)
    parser.add_argument("--eventhub", help="Name of EventHub")
    parser.add_argument("--address", help="Address URI to the EventHub entity")
    parser.add_argument("--sas-policy", help="Name of the shared access policy to authenticate with")
    parser.add_argument("--sas-key", help="Shared access key")

    args, _ = parser.parse_known_args()
    if args.conn_str:
        client = EventHubClient.from_connection_string(
            args.conn_str,
            eventhub=args.eventhub)
    elif args.address:
        client = EventHubClient(
            args.address,
            username=args.sas_policy,
            password=args.sas_key)
    else:
        try:
            import pytest
            pytest.skip("Must specify either '--conn-str' or '--address'")
        except ImportError:
            raise ValueError("Must specify either '--conn-str' or '--address'")

    try:
        main(client, args)
    except KeyboardInterrupt:
        pass
def create_eventhub_client_from_iothub_connection_string(live_eventhub_config):
    # [START create_eventhub_client_iot_connstr]
    import os
    from azure.eventhub import EventHubClient

    iot_connection_str = os.environ['IOTHUB_CONNECTION_STR']
    client = EventHubClient.from_connection_string(iot_connection_str)
Ejemplo n.º 23
0
def test_receive_with_custom_datetime_sync(connstr_senders):
    connection_str, senders = connstr_senders
    client = EventHubClient.from_connection_string(connection_str, debug=False)
    for i in range(5):
        senders[0].send(EventData(b"Message before timestamp"))
    time.sleep(60)

    now = datetime.datetime.utcnow()
    offset = datetime.datetime(now.year, now.month, now.day, now.hour, now.minute)
    for i in range(5):
        senders[0].send(EventData(b"Message after timestamp"))

    receiver = client.add_receiver("$default", "0", offset=Offset(offset))
    try:
        client.run()
        all_received = []
        received = receiver.receive(timeout=1)
        while received:
            all_received.extend(received)
            received = receiver.receive(timeout=1)

        assert len(all_received) == 5
        for received_event in all_received:
            assert received_event.body_as_str() == "Message after timestamp"
            assert received_event.enqueued_time > offset
    except:
        raise
    finally:
        client.stop()
Ejemplo n.º 24
0
def test_receive_with_inclusive_offset(connstr_senders):
    connection_str, senders = connstr_senders
    client = EventHubClient.from_connection_string(connection_str,
                                                   network_tracing=False)
    receiver = client.create_consumer(consumer_group="$default",
                                      partition_id="0",
                                      event_position=EventPosition('@latest'))

    with receiver:
        received = receiver.receive(timeout=5)
        assert len(received) == 0
        senders[0].send(EventData(b"Data"))
        time.sleep(1)
        received = receiver.receive(timeout=5)
        assert len(received) == 1
        offset = received[0].offset

        assert list(received[0].body) == [b'Data']
        assert received[0].body_as_str() == "Data"

    offset_receiver = client.create_consumer(consumer_group="$default",
                                             partition_id="0",
                                             event_position=EventPosition(
                                                 offset, inclusive=True))
    with offset_receiver:
        received = offset_receiver.receive(timeout=5)
        assert len(received) == 1
Ejemplo n.º 25
0
def test_receive_with_offset_sync(connstr_senders):
    connection_str, senders = connstr_senders
    client = EventHubClient.from_connection_string(connection_str, debug=False)
    partitions = client.get_eventhub_info()
    assert partitions["partition_ids"] == ["0", "1"]
    receiver = client.add_receiver("$default", "0", offset=Offset('@latest'))
    try:
        client.run()
        more_partitions = client.get_eventhub_info()
        assert more_partitions["partition_ids"] == ["0", "1"]

        received = receiver.receive(timeout=5)
        assert len(received) == 0
        senders[0].send(EventData(b"Data"))
        received = receiver.receive(timeout=5)
        assert len(received) == 1
        offset = received[0].offset

        assert list(received[0].body) == [b'Data']
        assert received[0].body_as_str() == "Data"

        offset_receiver = client.add_receiver("$default", "0", offset=offset)
        client.run()
        received = offset_receiver.receive(timeout=5)
        assert len(received) == 0
        senders[0].send(EventData(b"Message after offset"))
        received = offset_receiver.receive(timeout=5)
        assert len(received) == 1
    except:
        raise
    finally:
        client.stop()
Ejemplo n.º 26
0
def test_receive_with_sequence_no(connstr_senders):
    connection_str, senders = connstr_senders
    client = EventHubClient.from_connection_string(connection_str, debug=False)
    receiver = client.add_receiver("$default", "0", offset=Offset('@latest'))
    try:
        client.run()

        received = receiver.receive(timeout=5)
        assert len(received) == 0
        senders[0].send(EventData(b"Data"))
        time.sleep(1)
        received = receiver.receive(timeout=5)
        assert len(received) == 1
        offset = received[0].sequence_number

        offset_receiver = client.add_receiver("$default", "0", offset=Offset(offset))
        client.run()
        received = offset_receiver.receive(timeout=5)
        assert len(received) == 0
        senders[0].send(EventData(b"Message next in sequence"))
        time.sleep(1)
        received = offset_receiver.receive(timeout=5)
        assert len(received) == 1
    except:
        raise
    finally:
        client.stop()
Ejemplo n.º 27
0
def test_send_with_partition_key(connection_str, receivers):
    client = EventHubClient.from_connection_string(connection_str, debug=False)
    sender = client.add_sender()
    try:
        client.run()

        data_val = 0
        for partition in [b"a", b"b", b"c", b"d", b"e", b"f"]:
            partition_key = b"test_partition_" + partition
            for i in range(50):
                data = EventData(str(data_val))
                data.partition_key = partition_key
                data_val += 1
                sender.send(data)
    except:
        raise
    finally:
        client.stop()

    found_partition_keys = {}
    for index, partition in enumerate(receivers):
        received = partition.receive(timeout=5)
        for message in received:
            try:
                existing = found_partition_keys[message.partition_key]
                assert existing == index
            except KeyError:
                found_partition_keys[message.partition_key] = index
Ejemplo n.º 28
0
def test_receive_with_inclusive_offset(connstr_senders):
    connection_str, senders = connstr_senders
    client = EventHubClient.from_connection_string(connection_str, debug=False)
    receiver = client.add_receiver("$default", "0", offset=Offset('@latest'))
    try:
        client.run()

        received = receiver.receive(timeout=5)
        assert len(received) == 0
        senders[0].send(EventData(b"Data"))
        time.sleep(1)
        received = receiver.receive(timeout=5)
        assert len(received) == 1
        offset = received[0].offset

        assert list(received[0].body) == [b'Data']
        assert received[0].body_as_str() == "Data"

        offset_receiver = client.add_receiver("$default", "0", offset=Offset(offset.value, inclusive=True))
        client.run()
        received = offset_receiver.receive(timeout=5)
        assert len(received) == 1
    except:
        raise
    finally:
        client.stop()
Ejemplo n.º 29
0
def test_send_with_forced_conn_close_sync(connstr_receivers, sleep):
    pytest.skip("This test is similar to the above one")
    connection_str, receivers = connstr_receivers
    client = EventHubClient.from_connection_string(connection_str,
                                                   network_tracing=False)
    sender = client.create_producer()
    with sender:
        sender.send(EventData(b"A single event"))
        sender._handler._connection._conn.destroy()
        if sleep:
            time.sleep(300)
        else:
            sender._handler._connection._conn.destroy()
        sender.send(EventData(b"A single event"))
        sender.send(EventData(b"A single event"))
        if sleep:
            time.sleep(300)
        else:
            sender._handler._connection._conn.destroy()
        sender.send(EventData(b"A single event"))
        sender.send(EventData(b"A single event"))

    received = []
    for r in receivers:
        if not sleep:
            r._handler._connection._conn.destroy()
        received.extend(r.receive(timeout=1))
    assert len(received) == 5
    assert list(received[0].body)[0] == b"A single event"
def create_eventhub_client_from_iothub_connection_string(live_eventhub_config):
    # [START create_eventhub_client_iot_connstr]
    import os
    from azure.eventhub import EventHubClient

    iot_connection_str = os.environ['IOTHUB_CONNECTION_STR']
    client = EventHubClient.from_iothub_connection_string(iot_connection_str)
Ejemplo n.º 31
0
 def __init__(self):
     # This test requires a previusly created Event Hub.
     # In this example the name is "myeventhub", but it could be change below
     connectionString = os.environ["EVENT_HUBS_CONNECTION_STRING"]
     eventHubName = "myeventhub"
     self.client = EventHubClient.from_connection_string(
         connectionString, eventHubName)
Ejemplo n.º 32
0
 def __init__(self):
     connection_string = "Endpoint=sb://{}/;SharedAccessKeyName={};SharedAccessKey={};EntityPath={}".format(
         Settings.eh.hostname, Settings.eh.sas_policy, Settings.eh.sas_key,
         Settings.eh.event_hub_name)
     self.client = EventHubClient.from_connection_string(connection_string)
     self.sender = self.client.add_sender(partition="0")
     self.client.run()
Ejemplo n.º 33
0
def test_send_batch_with_app_prop_sync(connstr_receivers):
    connection_str, receivers = connstr_receivers
    app_prop_key = "raw_prop"
    app_prop_value = "raw_value"
    app_prop = {app_prop_key: app_prop_value}

    def batched():
        for i in range(10):
            ed = EventData("Event number {}".format(i))
            ed.application_properties = app_prop
            yield ed
        for i in range(10, 20):
            ed = EventData("Event number {}".format(i))
            ed.application_properties = app_prop
            yield ed

    client = EventHubClient.from_connection_string(connection_str, network_tracing=False)
    sender = client.create_producer()
    with sender:
        sender.send(batched())

    time.sleep(1)

    received = []
    for r in receivers:
        received.extend(r.receive(timeout=3))

    assert len(received) == 20
    for index, message in enumerate(received):
        assert list(message.body)[0] == "Event number {}".format(index).encode('utf-8')
        assert (app_prop_key.encode('utf-8') in message.application_properties) \
            and (dict(message.application_properties)[app_prop_key.encode('utf-8')] == app_prop_value.encode('utf-8'))
def start_event_generation_sync_impl(cancellation_token):

    random.seed(int(time.time())) # use ticks as seed
    
    client = EventHubClient.from_connection_string(conn_str=EVENTHUB_CONNECTION_STRING, eventhub=EVENTHUB_NAME)
    sender = client.add_sender()
    client.run()

    while cancellation_token.is_set():
        try:
            # Simulate sending data from 100 weather sensors
            devices_data = []
        
            for i in range(0, 100):
                scale_factor = random.randrange(0,25)
                windturbine_measure = generate_turbine_measure("Python_Turbine_" + str(i), scale_factor)
                ev_data = serialize_windturbine_to_eventdata(windturbine_measure)
                devices_data.append(ev_data)

            sender.send(EventData(batch=[event for event in devices_data])) 
            logger.info("100 events sent!")
            print(".", end='', flush=True)

        except Exception as e:
            logger.error(e)
    
    client.stop()
Ejemplo n.º 35
0
def test_send_to_invalid_partitions(connection_str):
    partitions = ["XYZ", "-1", "1000", "-" ]
    for p in partitions:
        client = EventHubClient.from_connection_string(connection_str, debug=False)
        sender = client.add_sender(partition=p)
        try:
            with pytest.raises(EventHubError):
                client.run()
        finally:
            client.stop()
def create_eventhub_client_from_sas_token(live_eventhub_config):
    # [START create_eventhub_client_sas_token]
    import os
    from azure.eventhub import EventHubClient

    address = os.environ['EVENT_HUB_ADDRESS']
    sas_token = os.environ['EVENT_HUB_SAS_TOKEN']

    client = EventHubClient.from_sas_token(
        address=address,
        sas_token=sas_token)
Ejemplo n.º 37
0
def test_receive_from_invalid_partitions_sync(connection_str):
    partitions = ["XYZ", "-1", "1000", "-" ]
    for p in partitions:
        client = EventHubClient.from_connection_string(connection_str, debug=True)
        receiver = client.add_receiver("$default", p)
        try:
            with pytest.raises(EventHubError):
                client.run()
                receiver.receive(timeout=10)
        finally:
            client.stop()
def test_iothub_receive_sync(iot_connection_str, device_id):
    client = EventHubClient.from_iothub_connection_string(iot_connection_str, debug=True)
    receiver = client.add_receiver("$default", "0", operation='/messages/events')
    try:
        client.run()
        partitions = client.get_eventhub_info()
        assert partitions["partition_ids"] == ["0", "1", "2", "3"]
        received = receiver.receive(timeout=5)
        assert len(received) == 0
    finally:
        client.stop()
Ejemplo n.º 39
0
def test_send_null_body(connection_str):
    partitions = ["XYZ", "-1", "1000", "-" ]
    client = EventHubClient.from_connection_string(connection_str, debug=False)
    sender = client.add_sender()
    try:
        client.run()
        with pytest.raises(ValueError):
            data = EventData(None)
            sender.send(data)
    finally:
        client.stop()
Ejemplo n.º 40
0
def test_iothub_send_single_event(iot_connection_str, device_id):
    client = EventHubClient.from_iothub_connection_string(iot_connection_str, debug=True)
    sender = client.add_sender(operation='/messages/devicebound')
    try:
        client.run()
        outcome = sender.send(EventData(b"A single event", to_device=device_id))
        assert outcome.value == 0
    except:
        raise
    finally:
        client.stop()
Ejemplo n.º 41
0
def test_send_partition_key_with_partition_sync(connection_str):
    client = EventHubClient.from_connection_string(connection_str, debug=True)
    sender = client.add_sender(partition="1")
    try:
        client.run()
        data = EventData(b"Data")
        data.partition_key = b"PKey"
        with pytest.raises(ValueError):
            sender.send(data)
    finally:
        client.stop()
Ejemplo n.º 42
0
def connstr_senders(connection_str):
    client = EventHubClient.from_connection_string(connection_str, debug=True)
    eh_hub_info = client.get_eventhub_info()
    partitions = eh_hub_info["partition_ids"]

    senders = []
    for p in partitions:
        senders.append(client.add_sender(partition=p))

    client.run()
    yield connection_str, senders
    client.stop()
Ejemplo n.º 43
0
def test_send_too_large_message(connection_str):
    if sys.platform.startswith('darwin'):
        pytest.skip("Skipping on OSX - open issue regarding message size")
    client = EventHubClient.from_connection_string(connection_str, debug=True)
    sender = client.add_sender()
    try:
        client.run()
        data = EventData(b"A" * 300000)
        with pytest.raises(EventHubError):
            sender.send(data)
    finally:
        client.stop()
Ejemplo n.º 44
0
def test_message_body_types(connstr_senders):
    connection_str, senders = connstr_senders
    client = EventHubClient.from_connection_string(connection_str, debug=False)
    receiver = client.add_receiver("$default", "0", offset=Offset('@latest'))
    try:
        client.run()

        received = receiver.receive(timeout=5)
        assert len(received) == 0
        senders[0].send(EventData(b"Bytes Data"))
        time.sleep(1)
        received = receiver.receive(timeout=5)
        assert len(received) == 1
        assert list(received[0].body) == [b'Bytes Data']
        assert received[0].body_as_str() == "Bytes Data"
        with pytest.raises(TypeError):
            received[0].body_as_json()

        senders[0].send(EventData("Str Data"))
        time.sleep(1)
        received = receiver.receive(timeout=5)
        assert len(received) == 1
        assert list(received[0].body) == [b'Str Data']
        assert received[0].body_as_str() == "Str Data"
        with pytest.raises(TypeError):
            received[0].body_as_json()

        senders[0].send(EventData(b'{"test_value": "JSON bytes data", "key1": true, "key2": 42}'))
        time.sleep(1)
        received = receiver.receive(timeout=5)
        assert len(received) == 1
        assert list(received[0].body) == [b'{"test_value": "JSON bytes data", "key1": true, "key2": 42}']
        assert received[0].body_as_str() == '{"test_value": "JSON bytes data", "key1": true, "key2": 42}'
        assert received[0].body_as_json() == {"test_value": "JSON bytes data", "key1": True, "key2": 42}

        senders[0].send(EventData('{"test_value": "JSON str data", "key1": true, "key2": 42}'))
        time.sleep(1)
        received = receiver.receive(timeout=5)
        assert len(received) == 1
        assert list(received[0].body) == [b'{"test_value": "JSON str data", "key1": true, "key2": 42}']
        assert received[0].body_as_str() == '{"test_value": "JSON str data", "key1": true, "key2": 42}'
        assert received[0].body_as_json() == {"test_value": "JSON str data", "key1": True, "key2": 42}

        senders[0].send(EventData(42))
        time.sleep(1)
        received = receiver.receive(timeout=5)
        assert len(received) == 1
        assert received[0].body_as_str() == "42"
        assert received[0].body == 42
    except:
        raise
    finally:
        client.stop()
def test_example_eventhub_sync_receiver_ops(live_eventhub_config, connection_str):
    import os
    # [START create_eventhub_client_receiver_instance]
    from azure.eventhub import EventHubClient, Offset

    client = EventHubClient.from_connection_string(connection_str)
    receiver = client.add_receiver(consumer_group="$default", partition="0", offset=Offset('@latest'))
    # [END create_eventhub_client_receiver_instance]

    # [START eventhub_client_receiver_open]
    client = EventHubClient.from_connection_string(connection_str)
    receiver = client.add_receiver(consumer_group="$default", partition="0", offset=Offset('@latest'))
    try:
        # Open the Receiver using the supplied conneciton.
        receiver.open()
        # Start receiving
    except:
        raise
    finally:
        # Close down the receive handler.
        receiver.close()
    # [END eventhub_client_receiver_open]        

    # [START eventhub_client_receiver_close]
    client = EventHubClient.from_connection_string(connection_str)
    receiver = client.add_receiver(consumer_group="$default", partition="0", offset=Offset('@latest'))
    try:
        # Open the Receiver using the supplied conneciton.
        receiver.open()
        # Start receiving
    except:
        raise
    finally:
        # Close down the receive handler.
        receiver.close()
    # [END eventhub_client_receiver_close]
Ejemplo n.º 46
0
def test_send_partition(connstr_receivers):
    connection_str, receivers = connstr_receivers
    client = EventHubClient.from_connection_string(connection_str, debug=False)
    sender = client.add_sender(partition="1")
    try:
        client.run()
        sender.send(EventData(b"Data"))
    except:
        raise
    finally:
        client.stop()

    partition_0 = receivers[0].receive(timeout=2)
    assert len(partition_0) == 0
    partition_1 = receivers[1].receive(timeout=2)
    assert len(partition_1) == 1
Ejemplo n.º 47
0
def connstr_receivers(connection_str):
    client = EventHubClient.from_connection_string(connection_str, debug=False)
    eh_hub_info = client.get_eventhub_info()
    partitions = eh_hub_info["partition_ids"]

    recv_offset = Offset("@latest")
    receivers = []
    for p in partitions:
        receivers.append(client.add_receiver("$default", p, prefetch=500, offset=Offset("@latest")))

    client.run()

    for r in receivers:
        r.receive(timeout=1)
    yield connection_str, receivers

    client.stop()
Ejemplo n.º 48
0
def test_send_non_ascii(connstr_receivers):
    connection_str, receivers = connstr_receivers
    client = EventHubClient.from_connection_string(connection_str, debug=False)
    sender = client.add_sender(partition="0")
    try:
        client.run()
        sender.send(EventData(u"é,è,à,ù,â,ê,î,ô,û"))
        sender.send(EventData(json.dumps({"foo": u"漢字"})))
    except:
        raise
    finally:
        client.stop()

    partition_0 = receivers[0].receive(timeout=2)
    assert len(partition_0) == 2
    assert partition_0[0].body_as_str() == u"é,è,à,ù,â,ê,î,ô,û"
    assert partition_0[1].body_as_json() == {"foo": u"漢字"}
Ejemplo n.º 49
0
def test_send_multiple_clients(connstr_receivers):
    connection_str, receivers = connstr_receivers
    client = EventHubClient.from_connection_string(connection_str, debug=False)
    sender_0 = client.add_sender(partition="0")
    sender_1 = client.add_sender(partition="1")
    try:
        client.run()
        sender_0.send(EventData(b"Message 0"))
        sender_1.send(EventData(b"Message 1"))
    except:
        raise
    finally:
        client.stop()

    partition_0 = receivers[0].receive(timeout=2)
    assert len(partition_0) == 1
    partition_1 = receivers[1].receive(timeout=2)
    assert len(partition_1) == 1
Ejemplo n.º 50
0
def test_send_single_event(connstr_receivers):
    connection_str, receivers = connstr_receivers
    client = EventHubClient.from_connection_string(connection_str, debug=False)
    sender = client.add_sender()
    try:
        client.run()
        sender.send(EventData(b"A single event"))
    except:
        raise
    finally:
        client.stop()

    received = []
    for r in receivers:
        received.extend(r.receive(timeout=1))

    assert len(received) == 1
    assert list(received[0].body)[0] == b"A single event"
Ejemplo n.º 51
0
def test_send_array_sync(connstr_receivers):
    connection_str, receivers = connstr_receivers
    client = EventHubClient.from_connection_string(connection_str, debug=True)
    sender = client.add_sender()
    try:
        client.run()
        sender.send(EventData([b"A", b"B", b"C"]))
    except:
        raise
    finally:
        client.stop()

    received = []
    for r in receivers:
        received.extend(r.receive(timeout=1))

    assert len(received) == 1
    assert list(received[0].body) == [b"A", b"B", b"C"]
Ejemplo n.º 52
0
def test_send_with_long_interval_sync(connstr_receivers):
    connection_str, receivers = connstr_receivers
    client = EventHubClient.from_connection_string(connection_str, debug=True)
    sender = client.add_sender()
    try:
        client.run()
        sender.send(EventData(b"A single event"))
        for _ in range(2):
            time.sleep(300)
            sender.send(EventData(b"A single event"))
    finally:
        client.stop()

    received = []
    for r in receivers:
       received.extend(r.receive(timeout=1))

    assert len(received) == 3
    assert list(received[0].body)[0] == b"A single event"
def test_long_running_receive(connection_str):
    parser = argparse.ArgumentParser()
    parser.add_argument("--duration", help="Duration in seconds of the test", type=int, default=30)
    parser.add_argument("--consumer", help="Consumer group name", default="$default")
    parser.add_argument("--partitions", help="Comma seperated partition IDs")
    parser.add_argument("--offset", help="Starting offset", default="-1")
    parser.add_argument("--conn-str", help="EventHub connection string", default=connection_str)
    parser.add_argument("--eventhub", help="Name of EventHub")
    parser.add_argument("--address", help="Address URI to the EventHub entity")
    parser.add_argument("--sas-policy", help="Name of the shared access policy to authenticate with")
    parser.add_argument("--sas-key", help="Shared access key")

    args, _ = parser.parse_known_args()
    if args.conn_str:
        client = EventHubClient.from_connection_string(
            args.conn_str,
            eventhub=args.eventhub, debug=False)
    elif args.address:
        client = EventHubClient(
            args.address,
            username=args.sas_policy,
            password=args.sas_key)
    else:
        try:
            import pytest
            pytest.skip("Must specify either '--conn-str' or '--address'")
        except ImportError:
            raise ValueError("Must specify either '--conn-str' or '--address'")

    try:
        if not args.partitions:
            partitions = get_partitions(client)
        else:
            partitions = args.partitions.split(",")
        pumps = {}
        for pid in partitions:
            pumps[pid] = client.add_receiver(
                consumer_group=args.consumer,
                partition=pid,
                offset=Offset(args.offset),
                prefetch=50)
        client.run()
        pump(pumps, args.duration)
    finally:
        client.stop()
Ejemplo n.º 54
0
def test_send_and_receive_large_body_size(connstr_receivers):
    if sys.platform.startswith('darwin'):
        pytest.skip("Skipping on OSX - open issue regarding message size")
    connection_str, receivers = connstr_receivers
    client = EventHubClient.from_connection_string(connection_str, debug=False)
    sender = client.add_sender()
    try:
        client.run()
        payload = 250 * 1024
        sender.send(EventData("A" * payload))
    except:
        raise
    finally:
        client.stop()

    received = []
    for r in receivers:
        received.extend(r.receive(timeout=4))

    assert len(received) == 1
    assert len(list(received[0].body)[0]) == payload
Ejemplo n.º 55
0
def test_send_partition_batch(connstr_receivers):
    connection_str, receivers = connstr_receivers
    def batched():
        for i in range(10):
            yield "Event number {}".format(i)

    client = EventHubClient.from_connection_string(connection_str, debug=False)
    sender = client.add_sender(partition="1")
    try:
        client.run()
        sender.send(EventData(batch=batched()))
        time.sleep(1)
    except:
        raise
    finally:
        client.stop()

    partition_0 = receivers[0].receive(timeout=2)
    assert len(partition_0) == 0
    partition_1 = receivers[1].receive(timeout=2)
    assert len(partition_1) == 10
def test_example_eventhub_transfer(connection_str):
    import os
    from azure.eventhub import EventHubClient, EventData

    client = EventHubClient.from_connection_string(connection_str)
    sender = client.add_sender()

    try:
        client.run()
        # [START eventhub_client_transfer]
        logger = logging.getLogger("azure.eventhub")
        def callback(outcome, condition):
            logger.info("Message sent. Outcome: {}, Condition: {}".format(
                outcome, condition))

        event_data = EventData(b"A single event")
        sender.transfer(event_data, callback=callback)
        sender.wait()
        # [END eventhub_client_transfer]
    except:
        raise
    finally:
        client.stop()
Ejemplo n.º 57
0
def test_send_with_forced_conn_close_sync(connstr_receivers):
    connection_str, receivers = connstr_receivers
    client = EventHubClient.from_connection_string(connection_str, debug=True)
    sender = client.add_sender()
    try:
        client.run()
        sender.send(EventData(b"A single event"))
        sender._handler._message_sender.destroy()
        time.sleep(300)
        sender.send(EventData(b"A single event"))
        sender.send(EventData(b"A single event"))
        sender._handler._message_sender.destroy()
        time.sleep(300)
        sender.send(EventData(b"A single event"))
        sender.send(EventData(b"A single event"))
    finally:
        client.stop()
    
    received = []
    for r in receivers:
       received.extend(r.receive(timeout=1))
    assert len(received) == 5
    assert list(received[0].body)[0] == b"A single event"
Ejemplo n.º 58
0
def test_send_batch_with_app_prop_sync(connstr_receivers):
    pytest.skip("Waiting on uAMQP release")
    connection_str, receivers = connstr_receivers
    def batched():
        for i in range(10):
            yield "Event number {}".format(i)
        for i in range(10, 20):
            yield EventData("Event number {}".format(i))

    client = EventHubClient.from_connection_string(connection_str, debug=False)
    sender = client.add_sender()
    try:
        client.run()

        app_prop_key = "raw_prop"
        app_prop_value = "raw_value"
        batch_app_prop = {app_prop_key:app_prop_value}
        batch_event = EventData(batch=batched())
        batch_event.application_properties = batch_app_prop

        sender.send(batch_event)
    except:
        raise
    finally:
        client.stop()

    time.sleep(1)

    received = []
    for r in receivers:
        received.extend(r.receive(timeout=3))

    assert len(received) == 20
    for index, message in enumerate(received):
        assert list(message.body)[0] == "Event number {}".format(index).encode('utf-8')
        assert (app_prop_key.encode('utf-8') in message.application_properties) \
            and (dict(message.application_properties)[app_prop_key.encode('utf-8')] == app_prop_value.encode('utf-8'))
Ejemplo n.º 59
0
def test_send_batch_sync(connstr_receivers):
    connection_str, receivers = connstr_receivers
    def batched():
        for i in range(10):
            yield "Event number {}".format(i)

    client = EventHubClient.from_connection_string(connection_str, debug=False)
    sender = client.add_sender()
    try:
        client.run()
        sender.send(EventData(batch=batched()))
    except:
        raise
    finally:
        client.stop()

    time.sleep(1)
    received = []
    for r in receivers:
        received.extend(r.receive(timeout=3))

    assert len(received) == 10
    for index, message in enumerate(received):
        assert list(message.body)[0] == "Event number {}".format(index).encode('utf-8')
Ejemplo n.º 60
0
# SAS policy and key are not required if they are encoded in the URL
USER = os.environ.get('EVENT_HUB_SAS_POLICY')
KEY = os.environ.get('EVENT_HUB_SAS_KEY')


def callback(outcome, condition):
    logger.info("Message sent. Outcome: {}, Condition: {}".format(
        outcome, condition))


try:
    if not ADDRESS:
        raise ValueError("No EventHubs URL supplied.")

    client = EventHubClient(ADDRESS, debug=False, username=USER, password=KEY)
    sender = client.add_sender(partition="1")
    client.run()
    try:
        start_time = time.time()
        for i in range(100):
            sender.transfer(EventData(str(i)), callback=callback)
        logger.info("Queued 100 messages.")
        sender.wait()
        logger.info("Finished processing queue.")
    except:
        raise
    finally:
        end_time = time.time()
        client.stop()
        run_time = end_time - start_time