def create_eventhub_consumer_client():
    # [START create_eventhub_consumer_client_from_conn_str_sync]
    import os
    from azure.eventhub import EventHubConsumerClient
    event_hub_connection_str = os.environ['EVENT_HUB_CONN_STR']
    event_hub = os.environ['EVENT_HUB_NAME']
    consumer = EventHubConsumerClient.from_connection_string(
        conn_str=event_hub_connection_str, event_hub_path=event_hub)
    # [END create_eventhub_consumer_client_from_conn_str_sync]

    # [START create_eventhub_consumer_client_sync]
    import os
    from azure.eventhub import EventHubConsumerClient, EventHubSharedKeyCredential

    hostname = os.environ['EVENT_HUB_HOSTNAME']
    event_hub = os.environ['EVENT_HUB_NAME']
    shared_access_policy = os.environ['EVENT_HUB_SAS_POLICY']
    shared_access_key = os.environ['EVENT_HUB_SAS_KEY']

    consumer = EventHubConsumerClient(host=hostname,
                                      event_hub_path=event_hub,
                                      credential=EventHubSharedKeyCredential(
                                          shared_access_policy,
                                          shared_access_key))
    # [END create_eventhub_consumer_client_sync]
    return consumer
def example_create_eventhub_consumer_client():
    # [START create_eventhub_consumer_client_from_conn_str_sync]
    import os
    from azure.eventhub import EventHubConsumerClient
    event_hub_connection_str = os.environ['EVENT_HUB_CONN_STR']
    eventhub_name = os.environ['EVENT_HUB_NAME']
    consumer = EventHubConsumerClient.from_connection_string(
        conn_str=event_hub_connection_str,
        consumer_group='$Default',
        eventhub_name=eventhub_name  # EventHub name should be specified if it doesn't show up in connection string.
    )
    # [END create_eventhub_consumer_client_from_conn_str_sync]

    # [START create_eventhub_consumer_client_sync]
    import os
    from azure.eventhub import EventHubConsumerClient, EventHubSharedKeyCredential

    fully_qualified_namespace = os.environ['EVENT_HUB_HOSTNAME']
    eventhub_name = os.environ['EVENT_HUB_NAME']
    shared_access_policy = os.environ['EVENT_HUB_SAS_POLICY']
    shared_access_key = os.environ['EVENT_HUB_SAS_KEY']

    credential = EventHubSharedKeyCredential(shared_access_policy, shared_access_key)
    consumer = EventHubConsumerClient(
        fully_qualified_namespace=fully_qualified_namespace,
        eventhub_name=eventhub_name,
        consumer_group='$Default',
        credential=credential)
    # [END create_eventhub_consumer_client_sync]
    return consumer
Пример #3
0
def test_receive_load_balancing(connstr_senders):
    if sys.platform.startswith('darwin'):
        pytest.skip("Skipping on OSX - test code using multiple threads. Sometimes OSX aborts python process")

    connection_str, senders = connstr_senders
    pm = InMemoryPartitionManager()
    client1 = EventHubConsumerClient.from_connection_string(
        connection_str, partition_manager=pm, load_balancing_interval=1)
    client2 = EventHubConsumerClient.from_connection_string(
        connection_str, partition_manager=pm, load_balancing_interval=1)

    def on_event(partition_context, event):
        pass

    with client1, client2:
        worker1 = threading.Thread(target=client1.receive,
                                   args=(on_event,),
                                   kwargs={"consumer_group": "$default", "initial_event_position": "-1"})

        worker2 = threading.Thread(target=client2.receive,
                                   args=(on_event,),
                                   kwargs={"consumer_group": "$default", "initial_event_position": "-1"})

        worker1.start()
        worker2.start()
        time.sleep(10)
        assert len(client1._event_processors[("$default", ALL_PARTITIONS)]._consumers) == 1
        assert len(client2._event_processors[("$default", ALL_PARTITIONS)]._consumers) == 1
Пример #4
0
def test_receive_with_event_position_sync(connstr_senders, position, inclusive,
                                          expected_result):
    def on_event(partition_context, event):
        assert partition_context.last_enqueued_event_properties.get(
            'sequence_number') == event.sequence_number
        assert partition_context.last_enqueued_event_properties.get(
            'offset') == event.offset
        assert partition_context.last_enqueued_event_properties.get(
            'enqueued_time') == event.enqueued_time
        assert partition_context.last_enqueued_event_properties.get(
            'retrieval_time') is not None

        if position == "offset":
            on_event.event_position = event.offset
        elif position == "sequence":
            on_event.event_position = event.sequence_number
        else:
            on_event.event_position = event.enqueued_time
        on_event.event = event

    on_event.event_position = None
    connection_str, senders = connstr_senders
    senders[0].send(EventData(b"Inclusive"))
    senders[1].send(EventData(b"Inclusive"))
    client = EventHubConsumerClient.from_connection_string(
        connection_str, consumer_group='$default')
    with client:
        thread = threading.Thread(target=client.receive,
                                  args=(on_event, ),
                                  kwargs={
                                      "starting_position": "-1",
                                      "starting_position_inclusive": inclusive,
                                      "track_last_enqueued_event_properties":
                                      True
                                  })
        thread.daemon = True
        thread.start()
        time.sleep(10)
        assert on_event.event_position is not None
    thread.join()
    senders[0].send(EventData(expected_result))
    senders[1].send(EventData(expected_result))
    client2 = EventHubConsumerClient.from_connection_string(
        connection_str, consumer_group='$default')
    with client2:
        thread = threading.Thread(target=client2.receive,
                                  args=(on_event, ),
                                  kwargs={
                                      "starting_position":
                                      on_event.event_position,
                                      "starting_position_inclusive": inclusive,
                                      "track_last_enqueued_event_properties":
                                      True
                                  })
        thread.daemon = True
        thread.start()
        time.sleep(10)
        assert on_event.event.body_as_str() == expected_result

    thread.join()
def test_receive_owner_level(connstr_senders):
    def on_event(partition_context, event):
        pass
    def on_error(partition_context, error):
        on_error.error = error

    on_error.error = None
    connection_str, senders = connstr_senders
    client1 = EventHubConsumerClient.from_connection_string(connection_str, consumer_group='$default')
    client2 = EventHubConsumerClient.from_connection_string(connection_str, consumer_group='$default')
    with client1, client2:
        thread1 = threading.Thread(target=client1.receive, args=(on_event,),
                                   kwargs={"partition_id": "0", "starting_position": "-1",
                                           "on_error": on_error})
        thread1.start()
        for i in range(5):
            ed = EventData("Event Number {}".format(i))
            senders[0].send(ed)
        time.sleep(10)
        thread2 = threading.Thread(target=client2.receive, args=(on_event,),
                                   kwargs = {"partition_id": "0", "starting_position": "-1", "owner_level": 1})
        thread2.start()
        for i in range(5):
            ed = EventData("Event Number {}".format(i))
            senders[0].send(ed)
        time.sleep(20)
    thread1.join()
    thread2.join()
    assert isinstance(on_error.error, EventHubError)
Пример #6
0
def test_get_partition_ids(live_eventhub):
    client = EventHubConsumerClient(
        live_eventhub['hostname'], live_eventhub['event_hub'], '$default',
        EventHubSharedKeyCredential(live_eventhub['key_name'],
                                    live_eventhub['access_key']))
    with client:
        partition_ids = client.get_partition_ids()
        assert partition_ids == ['0', '1']
Пример #7
0
def test_get_properties(live_eventhub):
    client = EventHubConsumerClient(
        live_eventhub['hostname'], live_eventhub['event_hub'], '$default',
        EventHubSharedKeyCredential(live_eventhub['key_name'],
                                    live_eventhub['access_key']))
    properties = client.get_eventhub_properties()
    assert properties['eventhub_name'] == live_eventhub[
        'event_hub'] and properties['partition_ids'] == ['0', '1']
    client.close()
def test_get_partition_properties(live_eventhub):
    client = EventHubConsumerClient(live_eventhub['hostname'], live_eventhub['event_hub'],
                            EventHubSharedKeyCredential(live_eventhub['key_name'], live_eventhub['access_key']))
    properties = client.get_partition_properties('0')
    assert properties['event_hub_path'] == live_eventhub['event_hub'] \
        and properties['id'] == '0' \
        and 'beginning_sequence_number' in properties \
        and 'last_enqueued_sequence_number' in properties \
        and 'last_enqueued_offset' in properties \
        and 'last_enqueued_time_utc' in properties \
        and 'is_empty' in properties
    client.close()
Пример #9
0
def consume_events(starting_position):
    global consumer_group
    global events_received_count
    global max_events_to_receive
    global client
    event_count  = 0
    eh_conn_str  = os.environ['AZURE_STREAMPOC_EVENTHUB_CONN_STRING']
    eh_namespace = os.environ['AZURE_STREAMPOC_EVENTHUB_NAMESPACE'] 
    eh_hubname   = os.environ['AZURE_STREAMPOC_EVENTHUB_HUBNAME']
    zipcodes     = read_nc_zipcodes_data()
    start_epoch  = arrow.utcnow().timestamp
    consumer_group = reformat_consumer_group(consumer_group)

    print('consume_events')
    print('consumer_group:        {}'.format(consumer_group))
    print('starting_position:     {}'.format(starting_position))
    print('max_events_to_receive: {}'.format(max_events_to_receive))
    print('eventhub namespace:    {}'.format(eh_namespace))
    print('eventhub hubname:      {}'.format(eh_hubname))
    print('eventhub conn_str:     {}'.format(eh_conn_str))
    event_count = 0

    if max_events_to_receive > 0:
        print('creating EventHubConsumerClient')
        client = EventHubConsumerClient.from_connection_string(
            eh_conn_str, consumer_group, eventhub_name=eh_hubname)

        # "-1" is from the beginning of the partition.
        client.receive(on_event=on_event_received, starting_position=starting_position)  
Пример #10
0
def test_client_secret_credential(aad_credential, live_eventhub):
    try:
        from azure.identity import EnvironmentCredential
    except ImportError:
        pytest.skip("No azure identity library")
    credential = EnvironmentCredential()
    producer_client = EventHubProducerClient(
        host=live_eventhub['hostname'],
        event_hub_path=live_eventhub['event_hub'],
        credential=credential,
        user_agent='customized information')
    consumer_client = EventHubConsumerClient(
        host=live_eventhub['hostname'],
        event_hub_path=live_eventhub['event_hub'],
        credential=credential,
        user_agent='customized information')
    with producer_client:
        producer_client.send(EventData(body='A single message'))

    def on_events(partition_context, events):
        assert partition_context.partition_id == '0'
        assert len(events) == 1
        assert list(events[0].body)[0] == 'A single message'.encode('utf-8')

    with consumer_client:
        worker = threading.Thread(target=consumer_client.receive,
                                  args=(on_events, ),
                                  kwargs={
                                      "consumer_group": '$default',
                                      "partition_id": '0'
                                  })
        worker.start()
        time.sleep(2)
Пример #11
0
def test_receive_end_of_stream(connstr_senders):
    def on_event(partition_context, event):
        if partition_context.partition_id == "0":
            assert event.body_as_str() == "Receiving only a single event"
            assert list(event.body)[0] == b"Receiving only a single event"
            on_event.called = True

    on_event.called = False
    connection_str, senders = connstr_senders
    client = EventHubConsumerClient.from_connection_string(connection_str)
    with client:
        thread = threading.Thread(target=client.receive,
                                  args=(on_event, "$default"),
                                  kwargs={
                                      "partition_id": "0",
                                      "initial_event_position": "@latest"
                                  })
        thread.daemon = True
        thread.start()
        time.sleep(10)
        assert on_event.called is False
        senders[0].send(EventData(b"Receiving only a single event"))
        time.sleep(10)
        assert on_event.called is True
    thread.join()
Пример #12
0
def test_receive_over_websocket_sync(connstr_senders):
    app_prop = {"raw_prop": "raw_value"}

    def on_event(partition_context, event):
        on_event.received.append(event)
        on_event.app_prop = event.application_properties

    on_event.received = []
    on_event.app_prop = None
    connection_str, senders = connstr_senders
    client = EventHubConsumerClient.from_connection_string(
        connection_str, transport_type=TransportType.AmqpOverWebsocket)

    event_list = []
    for i in range(5):
        ed = EventData("Event Number {}".format(i))
        ed.application_properties = app_prop
        event_list.append(ed)
    senders[0].send(event_list)

    with client:
        thread = threading.Thread(target=client.receive,
                                  args=(on_event, "$default"),
                                  kwargs={
                                      "partition_id": "0",
                                      "initial_event_position": "-1"
                                  })
        thread.start()
        time.sleep(10)
    assert len(on_event.received) == 5
    for ed in on_event.received:
        assert ed.application_properties[b"raw_prop"] == b"raw_value"
Пример #13
0
 def __init__(self, arguments):
     super().__init__(arguments)
     connection_string = self.get_from_env("AZURE_EVENTHUB_CONNECTION_STRING")
     eventhub_name = self.get_from_env("AZURE_EVENTHUB_NAME")
     self.async_producer = AsyncEventHubProducerClient.from_connection_string(connection_string, eventhub_name=eventhub_name)
     self.consumer = EventHubConsumerClient.from_connection_string(connection_string, _EventHubTest.consumer_group, eventhub_name=eventhub_name)
     self.async_consumer = AsyncEventHubConsumerClient.from_connection_string(connection_string, _EventHubTest.consumer_group, eventhub_name=eventhub_name)
def example_eventhub_consumer_ops():
    # [START eventhub_consumer_client_close_sync]
    import os
    import threading

    event_hub_connection_str = os.environ['EVENT_HUB_CONN_STR']
    eventhub_name = os.environ['EVENT_HUB_NAME']

    from azure.eventhub import EventHubConsumerClient
    consumer = EventHubConsumerClient.from_connection_string(
        conn_str=event_hub_connection_str,
        consumer_group="$Default",
        eventhub_name=eventhub_name)

    logger = logging.getLogger("azure.eventhub")

    def on_event(partition_context, event):
        logger.info("Received event from partition: {}".format(
            partition_context.partition_id))
        # Do ops on the received event

    # The 'receive' method is a blocking call, it can be executed in a thread for
    # non-blocking behavior, and combined with the 'close' method.

    worker = threading.Thread(target=consumer.receive,
                              kwargs={"on_event": on_event})
    worker.start()
    time.sleep(10)  # Keep receiving for 10s then close.
    # Close down the consumer handler explicitly.
    consumer.close()
Пример #15
0
    def __init__(self, opts):
        self._opts = opts
        self._client = None
        self._mode = None
        self._state = None
        self._verbose = False
        conn_str = opts['conn_str']
        hub_name = opts['hub_name']

        if 'verbose' in opts:
            if opts['verbose'] == True:
                self._verbose = True

        if self.is_consumer():
            consumer_group = opts['consumer_group']
            self._client = EventHubConsumerClient.from_connection_string(
                conn_str, consumer_group, eventhub_name=hub_name)
            self._mode = 'consumer'
            self._state = 'open'
        else:
            self._client = EventHubProducerClient.from_connection_string(
                conn_str, eventhub_name=hub_name)
            self._mode = 'producer'
            self._state = 'open'

        if self._verbose:
            print("EventHub.__init__ client: {}".format(str(type(
                self._client))))
Пример #16
0
def test_receive_end_of_stream(connstr_senders):
    def on_event(partition_context, event):
        if partition_context.partition_id == "0":
            assert event.body_as_str() == "Receiving only a single event"
            assert list(event.body)[0] == b"Receiving only a single event"
            on_event.called = True
            assert event.partition_key == b'0'
            event_str = str(event)
            assert ", offset: " in event_str
            assert ", sequence_number: " in event_str
            assert ", enqueued_time: " in event_str
            assert ", partition_key: 0" in event_str

    on_event.called = False
    connection_str, senders = connstr_senders
    client = EventHubConsumerClient.from_connection_string(
        connection_str, consumer_group='$default')
    with client:
        thread = threading.Thread(target=client.receive,
                                  args=(on_event, ),
                                  kwargs={
                                      "partition_id": "0",
                                      "starting_position": "@latest"
                                  })
        thread.daemon = True
        thread.start()
        time.sleep(10)
        assert on_event.called is False
        senders[0].send(EventData(b"Receiving only a single event"),
                        partition_key='0')
        time.sleep(10)
        assert on_event.called is True
    thread.join()
Пример #17
0
def test_receive_no_partition(connstr_senders):
    connection_str, senders = connstr_senders
    senders[0].send(EventData("Test EventData"))
    senders[1].send(EventData("Test EventData"))
    client = EventHubConsumerClient.from_connection_string(connection_str, consumer_group='$default', receive_timeout=1)

    def on_event(partition_context, event):
        on_event.received += 1
        partition_context.update_checkpoint(event)
        on_event.namespace = partition_context.fully_qualified_namespace
        on_event.eventhub_name = partition_context.eventhub_name
        on_event.consumer_group = partition_context.consumer_group
        on_event.offset = event.offset
        on_event.sequence_number = event.sequence_number

    on_event.received = 0
    on_event.namespace = None
    on_event.eventhub_name = None
    on_event.consumer_group = None
    on_event.offset = None
    on_event.sequence_number = None

    with client:
        worker = threading.Thread(target=client.receive,
                                  args=(on_event,),
                                  kwargs={"starting_position": "-1"})
        worker.start()
        time.sleep(10)
        assert on_event.received == 2
        checkpoints = list(client._event_processors.values())[0]._checkpoint_store.list_checkpoints(
            on_event.namespace, on_event.eventhub_name, on_event.consumer_group
        )
        assert len([checkpoint for checkpoint in checkpoints if checkpoint["offset"] == on_event.offset]) > 0
        assert len([checkpoint for checkpoint in checkpoints if checkpoint["sequence_number"] == on_event.sequence_number]) > 0
Пример #18
0
    def run(self, config: IotHubConfig) -> None:
        """
        Process queue and upload to CDF

        Args:
            config: Configuration parameters
        """

        CONNECTION_STR = f"Endpoint={config.azureiothub.eventhub_compatible_endpoint}/;SharedAccessKeyName=service;SharedAccessKey={config.azureiothub.iot_sas_key};EntityPath={config.azureiothub.eventhub_compatible_path}"

        client = EventHubConsumerClient.from_connection_string(
            conn_str=CONNECTION_STR,
            consumer_group="$default",
            # transport_type=TransportType.AmqpOverWebsocket,  # uncomment it if you want to use web socket
            # http_proxy={  # uncomment if you want to use proxy
            #     'proxy_hostname': '127.0.0.1',  # proxy hostname.
            #     'proxy_port': 3128,  # proxy port.
            #     'username': '******',
            #     'password': '******'
            # }
        )

        self.asset_id = cdf_client.assets.retrieve(
            external_id=config.azureiothub.iot_root).id

        try:
            with client:
                client.receive_batch(
                    on_event_batch=self.on_event_batch,
                    on_error=self.on_error,
                )
        except KeyboardInterrupt:
            print("Receiving has stopped.")
def example_eventhub_consumer_ops():
    # [START eventhub_consumer_client_close_sync]
    import os
    import threading

    event_hub_connection_str = os.environ['EVENT_HUB_CONN_STR']
    event_hub = os.environ['EVENT_HUB_NAME']

    from azure.eventhub import EventHubConsumerClient
    consumer = EventHubConsumerClient.from_connection_string(
        conn_str=event_hub_connection_str, event_hub_path=event_hub)

    logger = logging.getLogger("azure.eventhub")

    def on_events(partition_context, events):
        logger.info("Received {} messages from partition: {}".format(
            len(events), partition_context.partition_id))
        # Do ops on received events

    # The receive method is blocking call, so execute it in a thread to
    # better demonstrate how to stop the receiving by calling he close method.

    worker = threading.Thread(target=consumer.receive,
                              kwargs={
                                  "on_events": on_events,
                                  "consumer_group": "$Default"
                              })
    worker.start()
    time.sleep(10)  # Keep receiving for 10s then close.
    # Close down the consumer handler explicitly.
    consumer.close()
Пример #20
0
def test_receive_connection_idle_timeout_and_reconnect_sync(connstr_senders):
    connection_str, senders = connstr_senders
    client = EventHubConsumerClient.from_connection_string(
        conn_str=connection_str, consumer_group='$default', idle_timeout=10)

    def on_event_received(event):
        on_event_received.event = event

    with client:
        consumer = client._create_consumer("$default", "0", "-1",
                                           on_event_received)
        with consumer:
            consumer._open()
            time.sleep(11)

            ed = EventData("Event")
            senders[0].send(ed)

            consumer._handler.do_work()
            assert consumer._handler._connection._state == c_uamqp.ConnectionState.DISCARDING

            duration = 10
            now_time = time.time()
            end_time = now_time + duration

            while now_time < end_time:
                consumer.receive()
                time.sleep(0.01)
                now_time = time.time()

            assert on_event_received.event.body_as_str() == "Event"
Пример #21
0
def test_receive_partition(connstr_senders):
    connection_str, senders = connstr_senders
    senders[0].send(EventData("Test EventData"))
    client = EventHubConsumerClient.from_connection_string(
        connection_str, consumer_group='$default')

    def on_event(partition_context, event):
        on_event.received += 1
        on_event.partition_id = partition_context.partition_id
        on_event.consumer_group = partition_context.consumer_group
        on_event.fully_qualified_namespace = partition_context.fully_qualified_namespace
        on_event.eventhub_name = partition_context.eventhub_name

    on_event.received = 0
    with client:
        worker = threading.Thread(target=client.receive,
                                  args=(on_event, ),
                                  kwargs={
                                      "starting_position": "-1",
                                      "partition_id": "0"
                                  })
        worker.start()
        time.sleep(10)
        assert on_event.received == 1
        assert on_event.partition_id == "0"
        assert on_event.consumer_group == "$default"
        assert on_event.fully_qualified_namespace in connection_str
        assert on_event.eventhub_name == senders[0]._client.eventhub_name
Пример #22
0
def test_receive_batch_early_callback(connstr_senders):
    ''' Test whether the callback is called once max_batch_size reaches and before max_wait_time reaches.
    '''
    connection_str, senders = connstr_senders
    for _ in range(10):
        senders[0].send(EventData("Test EventData"))
    client = EventHubConsumerClient.from_connection_string(
        connection_str, consumer_group='$default')

    def on_event_batch(partition_context, event_batch):
        on_event_batch.received += len(event_batch)

    on_event_batch.received = 0

    with client:
        worker = threading.Thread(target=client.receive_batch,
                                  args=(on_event_batch, ),
                                  kwargs={
                                      "max_batch_size": 10,
                                      "max_wait_time": 100,
                                      "starting_position": "-1",
                                      "partition_id": "0"
                                  })
        worker.start()
        time.sleep(10)
        assert on_event_batch.received == 10
    worker.join()
Пример #23
0
def event_hub_task():
    # Create the event hub client to receive messages from IoT hub
    client = EventHubConsumerClient.from_connection_string(
        conn_str=connection_string, consumer_group=consumer_group_name)

    # Set up the batch receiving of messages
    with client:
        client.receive_batch(on_event_batch=on_event_batch)
Пример #24
0
 def send(self):
     client = EventHubConsumerClient.from_connection_string(
         conn_str=self.connectionString,
         consumer_group="$default",
     )
     try:
         with client:
             client.receive_batch(on_event_batch=self.on_event_batch,
                                  on_error=self.on_error)
     except KeyboardInterrupt:
         print("Primanje je završilo.")
Пример #25
0
    def __init__(self):
        # This test requires a previusly created Event Hub.
        # In this example the name is "myeventhub", but it could be change below
        connection_string = os.environ["EVENT_HUBS_CONNECTION_STRING"]
        event_hub_name = "myeventhub"
        self.consumer_client = EventHubConsumerClient.from_connection_string(
            connection_string, CONSUMER_GROUP, idle_timeout=RECEIVE_TIMEOUT)
        self.producer_client = EventHubProducerClient.from_connection_string(
            connection_string)

        self.received_event_count = 0
Пример #26
0
def main():
    client = EventHubConsumerClient.from_connection_string(
        conn_str=CONNECTION_STR_SUB,
        consumer_group="$default",
    )
    try:
        with client:
            client.receive_batch(on_event_batch=on_event_batch,
                                 on_error=on_error)
    except KeyboardInterrupt:
        print("Receiving has stopped.")
Пример #27
0
    def createClients(self):
        try:
            client = EventHubConsumerClient.from_connection_string(
                conn_str=self.CONNECTION_STR_LSM,
                consumer_group="$default",
            )
            clientJob = ClientJob(client, "LSM")
            clientJob.start()

        except KeyboardInterrupt:
            print("Primanje je završilo.")
def test_receive_with_invalid_hostname_sync(invalid_hostname):
    def on_event(partition_context, event):
        pass

    client = EventHubConsumerClient.from_connection_string(
        invalid_hostname, consumer_group='$default')
    with client:
        thread = threading.Thread(target=client.receive, args=(on_event, ))
        thread.start()
        time.sleep(2)
        assert len(client._event_processors) == 1
    thread.join()
Пример #29
0
def test_custom_certificate():
    producer = EventHubProducerClient("fake.host.com",
                                      "fake_eh",
                                      None,
                                      connection_verify='/usr/bin/local/cert')
    assert producer._config.connection_verify == '/usr/bin/local/cert'

    consumer = EventHubConsumerClient("fake.host.com",
                                      "fake_eh",
                                      "fake_group",
                                      None,
                                      connection_verify='D:/local/certfile')
    assert consumer._config.connection_verify == 'D:/local/certfile'
Пример #30
0
def test_get_properties_with_auth_error_sync(live_eventhub):
    client = EventHubConsumerClient(
        live_eventhub['hostname'], live_eventhub['event_hub'], '$default',
        EventHubSharedKeyCredential(live_eventhub['key_name'],
                                    "AaBbCcDdEeFf="))
    with client:
        with pytest.raises(AuthenticationError) as e:
            client.get_eventhub_properties()

    client = EventHubConsumerClient(
        live_eventhub['hostname'], live_eventhub['event_hub'], '$default',
        EventHubSharedKeyCredential("invalid", live_eventhub['access_key']))
    with client:
        with pytest.raises(AuthenticationError) as e:
            client.get_eventhub_properties()