def send_once(): client = EventHubProducerClient.from_connection_string( connection_str, eventhub_name=eventhub_name ) with client: event_data = client.create_batch() event_data.add(EventData('{"film":"Avengers", "rank": "3"}')) client.send_batch(event_data)
def test_send_partition(connstr_receivers): connection_str, receivers = connstr_receivers client = EventHubProducerClient.from_connection_string(connection_str) with client: client.send(EventData(b"Data"), partition_id="1") partition_0 = receivers[0].receive_message_batch(timeout=5000) assert len(partition_0) == 0 partition_1 = receivers[1].receive_message_batch(timeout=5000) assert len(partition_1) == 1
def test_send_too_large_message(connection_str): if sys.platform.startswith('darwin'): pytest.skip("Skipping on OSX - open issue regarding message size") client = EventHubProducerClient.from_connection_string(connection_str) try: data = EventData(b"A" * 1100000) with pytest.raises(EventDataSendError): client.send(data) finally: client.close()
def test_send_batch_with_invalid_key(live_eventhub): conn_str = live_eventhub["connection_str"].format( live_eventhub['hostname'], live_eventhub['key_name'], 'invalid', live_eventhub['event_hub']) client = EventHubProducerClient.from_connection_string(conn_str) with pytest.raises(ConnectError): batch = EventDataBatch() batch.add(EventData("test data")) client.send_batch(batch) client.close()
def test_send_batch_pid_pk(invalid_hostname, partition_id, partition_key): # Use invalid_hostname because this is not a live test. client = EventHubProducerClient.from_connection_string(invalid_hostname) batch = EventDataBatch(partition_id=partition_id, partition_key=partition_key) with client: with pytest.raises(TypeError): client.send_batch(batch, partition_id=partition_id, partition_key=partition_key)
def test_send_batch_null_body(connection_str): client = EventHubProducerClient.from_connection_string(connection_str) try: with pytest.raises(ValueError): data = EventData(None) batch = client.create_batch() batch.add(data) client.send_batch(batch) finally: client.close()
def test_send_and_receive_small_body(connstr_receivers, payload): connection_str, receivers = connstr_receivers client = EventHubProducerClient.from_connection_string(connection_str) with client: client.send(EventData(payload)) received = [] for r in receivers: received.extend([EventData._from_message(x) for x in r.receive_message_batch(timeout=5000)]) assert len(received) == 1 assert list(received[0].body)[0] == payload
def test_send_non_ascii(connstr_receivers): connection_str, receivers = connstr_receivers client = EventHubProducerClient.from_connection_string(connection_str) with client: client.send(EventData(u"é,è,à,ù,â,ê,î,ô,û"), partition_id="0") client.send(EventData(json.dumps({"foo": u"漢字"})), partition_id="0") time.sleep(1) partition_0 = [EventData._from_message(x) for x in receivers[0].receive_message_batch(timeout=5000)] assert len(partition_0) == 2 assert partition_0[0].body_as_str() == u"é,è,à,ù,â,ê,î,ô,û" assert partition_0[1].body_as_json() == {"foo": u"漢字"}
def __init__(self): # This test requires a previusly created Event Hub. # In this example the name is "myeventhub", but it could be change below connection_string = os.environ["EVENT_HUBS_CONNECTION_STRING"] event_hub_name = "myeventhub" self.consumer_client = EventHubConsumerClient.from_connection_string( connection_string, CONSUMER_GROUP, idle_timeout=RECEIVE_TIMEOUT) self.producer_client = EventHubProducerClient.from_connection_string( connection_string) self.received_event_count = 0
def test_send_batch_with_invalid_hostname(invalid_hostname): if sys.platform.startswith('darwin'): pytest.skip( "Skipping on OSX - it keeps reporting 'Unable to set external certificates' " "and blocking other tests") client = EventHubProducerClient.from_connection_string(invalid_hostname) with client: with pytest.raises(ConnectError): batch = EventDataBatch() batch.add(EventData("test data")) client.send_batch(batch)
def test_send_batch_to_invalid_partitions(connection_str): partitions = ["XYZ", "-1", "1000", "-"] for p in partitions: client = EventHubProducerClient.from_connection_string(connection_str) try: with pytest.raises(ConnectError): batch = client.create_batch(partition_id=p) batch.add(EventData("test data")) client.send_batch(batch) finally: client.close()
def connstr_senders(connection_str): client = EventHubProducerClient.from_connection_string(connection_str) partitions = client.get_partition_ids() senders = [] for p in partitions: sender = client._create_producer(partition_id=p) senders.append(sender) yield connection_str, senders for s in senders: s.close() client.close()
def test_send_list(connstr_receivers): connection_str, receivers = connstr_receivers client = EventHubProducerClient.from_connection_string(connection_str) payload = "A1" with client: client.send_batch([EventData(payload)]) received = [] for r in receivers: received.extend([EventData._from_message(x) for x in r.receive_message_batch(timeout=10000)]) assert len(received) == 1 assert received[0].body_as_str() == payload
def main(req: func.HttpRequest) -> func.HttpResponse: logging.info('Python HTTP trigger function processed a request.') connection_str = os.environ.get("EventHubConnectionString") eventhub_name = 'newactivityhub' client = EventHubProducerClient.from_connection_string(connection_str, eventhub_name=eventhub_name) event_data_batch = client.create_batch() event_data_batch.add(EventData('Message inside EventBatchData')) client.send_batch(event_data_batch)
def test_send_over_websocket_sync(connstr_receivers): connection_str, receivers = connstr_receivers client = EventHubProducerClient.from_connection_string(connection_str, transport_type=TransportType.AmqpOverWebsocket) with client: for i in range(20): client.send(EventData("Event Number {}".format(i))) time.sleep(1) received = [] for r in receivers: received.extend(r.receive_message_batch(timeout=5000)) assert len(received) == 20
def test_send_over_websocket_sync(connstr_receivers): connection_str, receivers = connstr_receivers client = EventHubProducerClient.from_connection_string(connection_str, transport_type=TransportType.AmqpOverWebsocket) with client: batch = client.create_batch(partition_id="0") batch.add(EventData("Event Data")) client.send_batch(batch) time.sleep(1) received = [] received.extend(receivers[0].receive_message_batch(max_batch_size=5, timeout=10000)) assert len(received) == 1
def main(event: func.EventHubEvent): storage_orders_conn_str = os.environ.get('storage_orders_conn_str') storage_orders_container = os.environ.get('storage_orders_container') eventhub_ns_sap_sl = os.environ.get('eventhub_ns_sap_sl') eventhub_order_combine_files = os.environ.get('eventhub_order_combine_files') container = ContainerClient.from_connection_string(conn_str = storage_orders_conn_str, container_name = storage_orders_container) event_body = event.get_body().decode('utf-8') logging.info("OrderFileSeeker-event_body" + event_body) event_json = json.loads(event_body) for e in event_json: url = e["data"]["url"] if 'orders' not in url: return url_dirname = os.path.dirname(url) url_basename = os.path.basename(url) order_id = re.findall(r'\d+', url_basename)[0] blobs = list(container.list_blobs(name_starts_with = order_id)) blob_ts = {b['name'] : (b['last_modified'], b['etag']) for b in blobs} blob_ts_max = max(blob_ts.values()) doc = dict() if len(blobs) == 3 and blob_ts[url_basename] == blob_ts_max: for b in blobs: blob_name = b['name'] file_type = order_file_type.get(re.findall(r'(?<=-)\w+(?=\.)', blob_name)[0].lower()) doc[file_type] = f'{url_dirname}/{blob_name}' doc_json = json.dumps(doc) producer = EventHubProducerClient.from_connection_string(conn_str = eventhub_ns_sap_sl, eventhub_name = eventhub_order_combine_files) try: event_data_batch = producer.create_batch() event_data_batch.add(EventData(doc_json)) producer.send_batch(event_data_batch) finally: producer.close()
def test_client_sas_credential(live_eventhub): # This should "just work" to validate known-good. hostname = live_eventhub['hostname'] producer_client = EventHubProducerClient.from_connection_string( live_eventhub['connection_str'], eventhub_name=live_eventhub['event_hub']) with producer_client: batch = producer_client.create_batch(partition_id='0') batch.add(EventData(body='A single message')) producer_client.send_batch(batch) # This should also work, but now using SAS tokens. credential = EventHubSharedKeyCredential(live_eventhub['key_name'], live_eventhub['access_key']) auth_uri = "sb://{}/{}".format(hostname, live_eventhub['event_hub']) token = credential.get_token(auth_uri).token producer_client = EventHubProducerClient( fully_qualified_namespace=hostname, eventhub_name=live_eventhub['event_hub'], credential=EventHubSASTokenCredential(token, time.time() + 3000)) with producer_client: batch = producer_client.create_batch(partition_id='0') batch.add(EventData(body='A single message')) producer_client.send_batch(batch) # Finally let's do it with SAS token + conn str token_conn_str = "Endpoint=sb://{}/;SharedAccessSignature={};".format( hostname, token.decode()) conn_str_producer_client = EventHubProducerClient.from_connection_string( token_conn_str, eventhub_name=live_eventhub['event_hub']) with conn_str_producer_client: batch = conn_str_producer_client.create_batch(partition_id='0') batch.add(EventData(body='A single message')) conn_str_producer_client.send_batch(batch)
def test_send_no_partition_batch(connstr_receivers): connection_str, receivers = connstr_receivers client = EventHubProducerClient.from_connection_string(connection_str) with client: event_batch = client.create_batch() try: while True: event_batch.try_add(EventData(b"Data")) except ValueError: client.send(event_batch) partition_0 = receivers[0].receive(timeout=2) partition_1 = receivers[1].receive(timeout=2) assert len(partition_0) + len(partition_1) > 10
def producer_connecting_to_custom_endpoint(): producer_client = EventHubProducerClient.from_connection_string( conn_str=CONNECTION_STR, eventhub_name=EVENTHUB_NAME, custom_endpoint_address=CUSTOM_ENDPOINT_ADDRESS, connection_verify=CUSTOM_CA_BUNDLE_PATH, ) with producer_client: # Without specifying partition_id or partition_key # the events will be distributed to available partitions via round-robin. event_data_batch = producer_client.create_batch() event_data_batch.add(EventData('Single message')) producer_client.send_batch(event_data_batch) print("Send a message.")
def test_send_and_receive_large_body_size(connstr_receivers): if sys.platform.startswith('darwin'): pytest.skip("Skipping on OSX - open issue regarding message size") connection_str, receivers = connstr_receivers client = EventHubProducerClient.from_connection_string(connection_str) with client: payload = 250 * 1024 client.send(EventData("A" * payload)) received = [] for r in receivers: received.extend([EventData._from_message(x) for x in r.receive_message_batch(timeout=10000)]) assert len(received) == 1 assert len(list(received[0].body)[0]) == payload
def main(documents: func.DocumentList) -> str: eventhub_ns_sap_sl = os.environ.get('eventhub_ns_sap_sl') eventhub_cosmosdb_change_feed = os.environ.get('eventhub_cosmosdb_change_feed') producer = EventHubProducerClient.from_connection_string(conn_str = eventhub_ns_sap_sl, eventhub_name = eventhub_cosmosdb_change_feed) event_data_batch = producer.create_batch() for doc in documents: doc['document_source'] = 'rating' event_data_batch.add(EventData(doc.to_json())) try: producer.send_batch(event_data_batch) finally: producer.close()
def example_eventhub_producer_ops(): # [START eventhub_producer_client_close_sync] import os from azure.eventhub import EventHubProducerClient, EventData event_hub_connection_str = os.environ['EVENT_HUB_CONN_STR'] event_hub = os.environ['EVENT_HUB_NAME'] producer = EventHubProducerClient.from_connection_string( conn_str=event_hub_connection_str, event_hub_path=event_hub) try: producer.send(EventData(b"A single event")) finally: # Close down the producer handler. producer.close()
def send_events(do_send, max_events): event_count = 0 eh_conn_str = os.environ['AZURE_STREAMPOC_EVENTHUB_CONN_STRING'] eh_namespace = os.environ['AZURE_STREAMPOC_EVENTHUB_NAMESPACE'] eh_hubname = os.environ['AZURE_STREAMPOC_EVENTHUB_HUBNAME'] zipcodes = read_nc_zipcodes_data() start_epoch = arrow.utcnow().timestamp print('send_events') print('do_send: {}'.format(do_send)) print('max_events: {}'.format(max_events)) print('sleep_seconds: {}'.format(sleep_seconds)) print('eventhub namespace: {}'.format(eh_namespace)) print('eventhub hubname: {}'.format(eh_hubname)) print('eventhub conn_str: {}'.format(eh_conn_str)) print('# zipcodes loaded: {}'.format(len(zipcodes))) if do_send > 0: print('creating EventHubProducerClient') client = EventHubProducerClient.from_connection_string( eh_conn_str, eventhub_name=eh_hubname) while event_count < max_events: try: event_count = event_count + 1 evt = json.dumps(random_zipcode(zipcodes)) batch_of_1 = client.create_batch() batch_of_1.add(EventData(evt)) print("\nsending event: {}".format(evt)) client.send_batch(batch_of_1) time.sleep(sleep_seconds) except: sys.stderr.write('Exception encountered') traceback.print_exc(file=sys.stderr) if client: client.close() time.sleep(2) print("query cosmosdb with: SELECT * FROM c where c.sender = '{}' and c.epoch >= {}".format( 'python_ms_sdk', start_epoch)) else: while event_count < max_events: event_count = event_count + 1 zipcode = random_zipcode(zipcodes) evt = json.dumps(zipcode, sort_keys=False, indent=2) print(evt) print('end-of-job producer no-send path')
def test_send_non_ascii(connstr_receivers): connection_str, receivers = connstr_receivers client = EventHubProducerClient.from_connection_string(connection_str) with client: batch = client.create_batch(partition_id="0") batch.add(EventData(u"é,è,à,ù,â,ê,î,ô,û")) batch.add(EventData(json.dumps({"foo": u"漢字"}))) client.send_batch(batch) time.sleep(1) # receive_message_batch() returns immediately once it receives any messages before the max_batch_size # and timeout reach. Could be 1, 2, or any number between 1 and max_batch_size. # So call it twice to ensure the two events are received. partition_0 = [EventData._from_message(x) for x in receivers[0].receive_message_batch(timeout=5000)] + \ [EventData._from_message(x) for x in receivers[0].receive_message_batch(timeout=5000)] assert len(partition_0) == 2 assert partition_0[0].body_as_str() == u"é,è,à,ù,â,ê,î,ô,û" assert partition_0[1].body_as_json() == {"foo": u"漢字"}
def test_send_connection_idle_timeout_and_reconnect_sync(connstr_receivers): connection_str, receivers = connstr_receivers client = EventHubProducerClient.from_connection_string(conn_str=connection_str, idle_timeout=10) with client: ed = EventData('data') sender = client._create_producer(partition_id='0') with sender: sender._open_with_retry() time.sleep(11) sender._unsent_events = [ed.message] ed.message.on_send_complete = sender._on_outcome with pytest.raises((uamqp.errors.ConnectionClose, uamqp.errors.MessageHandlerError, OperationTimeoutError)): # Mac may raise OperationTimeoutError or MessageHandlerError sender._send_event_data() sender._send_event_data_with_retry() messages = receivers[0].receive_message_batch(max_batch_size=10, timeout=10000) received_ed1 = EventData._from_message(messages[0]) assert received_ed1.body_as_str() == 'data'
def test_send_multiple_partitions_with_app_prop(connstr_receivers): connection_str, receivers = connstr_receivers app_prop_key = "raw_prop" app_prop_value = "raw_value" app_prop = {app_prop_key: app_prop_value} client = EventHubProducerClient.from_connection_string(connection_str) with client: ed0 = EventData(b"Message 0") ed0.properties = app_prop client.send(ed0, partition_id="0") ed1 = EventData(b"Message 1") ed1.properties = app_prop client.send(ed1, partition_id="1") partition_0 = [EventData._from_message(x) for x in receivers[0].receive_message_batch(timeout=5000)] assert len(partition_0) == 1 assert partition_0[0].properties[b"raw_prop"] == b"raw_value" partition_1 = [EventData._from_message(x) for x in receivers[1].receive_message_batch(timeout=5000)] assert len(partition_1) == 1 assert partition_1[0].properties[b"raw_prop"] == b"raw_value"
def eventhub_setup(): """ this method read the config file and creates an eventshub client in sync fashion. Parameters ---------- none Returns ------- the events hub publisher object needed to publish events """ _configuration = AzureCredentials() _eventhub_name = _configuration.get_eventhub_name() _conn_str = _configuration.get_eventhub_conn_string() print("connection string is: ", _conn_str) print("event hub name is: ", _eventhub_name) _publisher = EventHubProducerClient.from_connection_string(conn_str=_conn_str, eventhub_name=_eventhub_name) return _publisher
def test_send_with_create_event_batch_with_app_prop_sync(connstr_receivers): connection_str, receivers = connstr_receivers app_prop_key = "raw_prop" app_prop_value = "raw_value" app_prop = {app_prop_key: app_prop_value} client = EventHubProducerClient.from_connection_string(connection_str, transport_type=TransportType.AmqpOverWebsocket) with client: event_data_batch = client.create_batch(max_size_in_bytes=100000) while True: try: ed = EventData('A single event data') ed.properties = app_prop event_data_batch.add(ed) except ValueError: break client.send_batch(event_data_batch) received = [] for r in receivers: received.extend(r.receive_message_batch(timeout=5000)) assert len(received) >= 1 assert EventData._from_message(received[0]).properties[b"raw_prop"] == b"raw_value"
def test_send_with_partition_key(connstr_receivers): connection_str, receivers = connstr_receivers client = EventHubProducerClient.from_connection_string(connection_str) with client: data_val = 0 for partition in [b"a", b"b", b"c", b"d", b"e", b"f"]: partition_key = b"test_partition_" + partition for i in range(50): data = EventData(str(data_val)) data_val += 1 client.send(data, partition_key=partition_key) found_partition_keys = {} for index, partition in enumerate(receivers): received = partition.receive_message_batch(timeout=5000) for message in received: try: event_data = EventData._from_message(message) existing = found_partition_keys[event_data.partition_key] assert existing == index except KeyError: found_partition_keys[event_data.partition_key] = index