예제 #1
0
def get():
    if request.args.get('since') is None:
        since = -1
    else:
        since = request.args.get('since')

    client = EventHubClient(address, debug=False, username=user, password=key)

    receiver = client.add_receiver(consumergroup, PARTITION, prefetch=1000, offset=Offset(since), keep_alive=72000)
    client.run()

    def generate():
        batched_events = receiver.receive(max_batch_size=100, timeout=500)
        yield '['
        index = 0
        while batched_events:
            for event_data in batched_events:
                if index > 0:
                    yield ','
                last_sn = event_data.sequence_number
                data = str(event_data.message)
                output_entity = literal_eval(data)
                output_entity.update({"_updated": str(last_sn)})
                yield json.dumps(output_entity)
                index = index + 1
            batched_events = receiver.receive(max_batch_size=100, timeout=500)
        yield ']'
    return Response(generate(), mimetype='application/json')
예제 #2
0
def test_client_secret_credential(aad_credential, live_eventhub):
    try:
        from azure.identity import ClientSecretCredential
    except ImportError:
        pytest.skip("No azure identity library")
    client_id, secret, tenant_id = aad_credential
    credential = ClientSecretCredential(client_id=client_id,
                                        secret=secret,
                                        tenant_id=tenant_id)
    client = EventHubClient(host=live_eventhub['hostname'],
                            event_hub_path=live_eventhub['event_hub'],
                            credential=credential,
                            user_agent='customized information')
    sender = client.create_producer(partition_id='0')
    receiver = client.create_consumer(consumer_group="$default",
                                      partition_id='0',
                                      event_position=EventPosition("@latest"))

    with receiver:
        received = receiver.receive(timeout=1)
        assert len(received) == 0

        with sender:
            event = EventData(body='A single message')
            sender.send(event)
        time.sleep(1)

        received = receiver.receive(timeout=1)

        assert len(received) == 1
        assert list(received[0].body)[0] == 'A single message'.encode('utf-8')
예제 #3
0
 def __init__(self, address, user, key):
     self.client = EventHubClient(address,
                                  debug=False,
                                  username=user,
                                  password=key)
     self.sender = self.client.add_sender(partition="0")
     self.client.run()
예제 #4
0
    def cosmosDBServiceToCosmosDB(self):
        
        database_link = 'dbs/' + DATABASE_ID
        collection_link = database_link + '/colls/' + COLLECTION_ID

        counter = 0
        filepath = ''

        CONSUMER_GROUP = "$Default"
        OFFSET = Offset("0")
        PARTITION = "0"
        eh_client = EventHubClient('amqps://xxxxx.servicebus.windows.net/txxxxqueue', 
                                   debug=True, 
                                   username='******', 
                                   password='******')
        receiver = eh_client.add_receiver(CONSUMER_GROUP, PARTITION, 
                                          prefetch=300, offset=OFFSET)
        try:    
            eh_client.run()
            while True:
                for event_data in receiver.receive(timeout=100):
                    rcv_msg = str(event_data.message)
                    # Filter the Null messages
                    if len(rcv_msg)>5:
                        # Load the messages in CosmosDB
                        cosmos_client.CreateDocument(collection_link, 
                                                     json.loads(str(event_data.message)))
   
            eh_client.stop()
        except Exception as e:
            print("Failed Receiving Record {}".format(str(e)) ) 
        finally:
            eh_client.stop()
def isNewLabeledData(eh_url, eh_offset_url, eh_account, eh_key):
    '''
    Examines the EventHub to identify if sufficient quantities of new training data is available to trigger a re-train
    ''' 
    
    CONSUMER_GROUP = "$default"
    PARTITION = "0"
    
    offset_client = EventHubClient(eh_offset_url, debug=False, username=eh_account, password=eh_key)
    offset_receiver = offset_client.add_receiver(CONSUMER_GROUP, PARTITION, prefetch=5000)
    offset_sender = offset_client.add_sender(partition="0")
    offset_client.run()

    #Retrieves the current offset/sequence number for the write event queue from the dedicated offset queue
    offsets = offset_receiver.receive(timeout=50)
    current_offset = -1 #Default to -1 or reading the entire feed if another offset is not retrieved
    logging.info("{0} write messages recieved".format(len(offsets)))
    for offset in offsets:
        offset_event = json.loads(offset.body_as_str())
        current_offset = offset_event['CURRENT_OFFSET']
        logging.info("Retrieved previous offset event {0}".format(offset_event))
    current_offset = -1
    
    #Use the retrieved offset/sequence number to retrieve new writes
    event_client = EventHubClient(eh_url, debug=False, username=eh_account, password=eh_key)
    receiver = event_client.add_receiver(CONSUMER_GROUP, PARTITION, prefetch=5000, offset=Offset(current_offset))
    event_client.run()
    batch = receiver.receive(timeout=50)
    new_label_count = len(batch)
    for stuff in batch:
        logging.info("Offset {0}".format(stuff.sequence_number))
        current_offset = int(stuff.sequence_number) if int(stuff.sequence_number) > current_offset else current_offset
        logging.info("Message {0}".format(stuff.body_as_str()))
    logging.info("Processed {0} new label writes".format(new_label_count))
    
    #Write the last retrieved offset/sequence number to the offset message queue to be used in the next read
    offset_sender.send(EventData(json.dumps({"TIMESTAMP": datetime.datetime.now().timestamp(), "CURRENT_OFFSET": current_offset})))
    logging.info("Stored current offset event {0}".format(current_offset))
    #sender.send(EventData(json.dumps({"EVENT_TYPE": "LABEL_WRITE", "LABEL_INDEX":face_hash, "WRITE_TIMESTAMP": datetime.datetime.now().timestamp()})))
    
    #Close queue clients
    offset_client.stop()
    event_client.stop()
    
    #Return true if more then results found to execute retrain
    return True if new_label_count > 5 else False
예제 #6
0
    def __init__(self, config):
        """
        Class to create an EventHubStreamingClient instance.

        :param config: Dictionary file with all the relevant parameters.
        """
        super().__init__()
        self.message_callback = None
        self.config = config
        self.storage_account_name = self.config.get("AZURE_STORAGE_ACCOUNT")
        self.storage_key = self.config.get("AZURE_STORAGE_ACCESS_KEY")
        self.lease_container_name = self.config.get("LEASE_CONTAINER_NAME")
        self.namespace = self.config.get("EVENT_HUB_NAMESPACE")
        self.eventhub = self.config.get("EVENT_HUB_NAME")
        self.consumer_group = self.config.get("EVENT_HUB_CONSUMER_GROUP")
        if self.consumer_group is None:
            self.consumer_group = '$default'

        self.user = self.config.get("EVENT_HUB_SAS_POLICY")
        self.key = self.config.get("EVENT_HUB_SAS_KEY")
        if self.config.get("TIMEOUT"):
            try:
                self.timeout = int(self.config.get("TIMEOUT"))
            except ValueError:
                self.timeout = None
        else:
            self.timeout = None

        # Create EPH Client
        if self.storage_account_name is not None and self.storage_key is not None:
            self.eph_client = EventHubConfig(
                sb_name=self.namespace,
                eh_name=self.eventhub,
                policy=self.user,
                sas_key=self.key,
                consumer_group=self.consumer_group)
            self.eh_options = EPHOptions()
            self.eh_options.release_pump_on_timeout = True
            self.eh_options.auto_reconnect_on_error = False
            self.eh_options.debug_trace = False
            self.storage_manager = AzureStorageCheckpointLeaseManager(
                self.storage_account_name, self.storage_key,
                self.lease_container_name)

        # Create Send client
        else:
            address = "amqps://" + self.namespace + \
                      ".servicebus.windows.net/" + self.eventhub
            try:
                self.send_client = EventHubClient(address,
                                                  debug=False,
                                                  username=self.user,
                                                  password=self.key)
                self.sender = self.send_client.add_sender()
                self.send_client.run()
            except Exception as e:
                logger.error('Failed to init EH send client: ' + str(e))
                raise
def step_impl(context):
    from azure.eventhub import EventHubClient
    address = "sb://{}/{}".format(context.eh_config['hostname'],
                                  context.eh_config['event_hub'])
    context.client = EventHubClient(address,
                                    username=context.eh_config['key_name'],
                                    password=context.eh_config['access_key'])
    context.sender = client.add_sender()
    context.client.run()
예제 #8
0
def test_long_running_receive(connection_str):
    parser = argparse.ArgumentParser()
    parser.add_argument("--duration",
                        help="Duration in seconds of the test",
                        type=int,
                        default=30)
    parser.add_argument("--consumer",
                        help="Consumer group name",
                        default="$default")
    parser.add_argument("--partitions", help="Comma seperated partition IDs")
    parser.add_argument("--offset", help="Starting offset", default="-1")
    parser.add_argument("--conn-str",
                        help="EventHub connection string",
                        default=connection_str)
    parser.add_argument("--eventhub", help="Name of EventHub")
    parser.add_argument("--address", help="Address URI to the EventHub entity")
    parser.add_argument(
        "--sas-policy",
        help="Name of the shared access policy to authenticate with")
    parser.add_argument("--sas-key", help="Shared access key")

    args, _ = parser.parse_known_args()
    if args.conn_str:
        client = EventHubClient.from_connection_string(
            args.conn_str, event_hub_path=args.eventhub, network_tracing=False)
    elif args.address:
        client = EventHubClient(host=args.address,
                                event_hub_path=args.eventhub,
                                credential=EventHubSharedKeyCredential(
                                    args.sas_policy, args.sas_key),
                                auth_timeout=240,
                                network_tracing=False)
    else:
        try:
            import pytest
            pytest.skip("Must specify either '--conn-str' or '--address'")
        except ImportError:
            raise ValueError("Must specify either '--conn-str' or '--address'")

    if args.partitions:
        partitions = args.partitions.split(",")
    else:
        partitions = client.get_partition_ids()

    threads = []
    for pid in partitions:
        consumer = client.create_consumer(consumer_group="$default",
                                          partition_id=pid,
                                          event_position=EventPosition(
                                              args.offset),
                                          prefetch=300)
        thread = threading.Thread(target=pump, args=(consumer, args.duration))
        thread.start()
        threads.append(thread)
    for thread in threads:
        thread.join()
예제 #9
0
    def __init__(self, eventhub, address, user, key, consumer_group,
                 redis_hostname, redis_key):
        self.consumer_group = consumer_group
        self.eventhubs_client = EventHubClient(address,
                                               debug=False,
                                               username=user,
                                               password=key)

        redis_topic = f"eventhubs-{eventhub}-{consumer_group}"
        self.redis_cache = RedisCache(redis_hostname, redis_key, redis_topic)
예제 #10
0
 def __init__(self, address, user, key):
     self.address = address
     self.user = user
     self.key = key
     self.counter = 0
     self.client_batch = EventHubClient(self.address,
                                        debug=False,
                                        username=self.user,
                                        password=self.key)
     self.sender = self.client_batch.add_sender()
     self.client_batch.run()
예제 #11
0
파일: eventhub.py 프로젝트: rymurr/pyiot
def to_azure(events):
    client = EventHubClient(ADDRESS, debug=True)
    sender = client.add_sender(partition="0")
    client.run()
    try:
        for event in events:
            sender.send(EventData(event))
    except:
        raise
    finally:
        client.stop()
예제 #12
0
def main(req: func.HttpRequest) -> func.HttpResponse:
    logger = logging.getLogger(__name__)
    formatter = logging.Formatter(
        '%(asctime)s %(name)s %(levelname)s: %(message)s')
    func_context = os.environ['FUNCTION_CONTEXT']
    logger.debug(f"Function context --> {func_context}")

    credentials = None
    subscription_id = None
    kv_credentials = None
    kv_subscription_id = None
    if func_context == 'local':
        filehandler = logging.FileHandler('func.log')
        filehandler.setFormatter(formatter)
        logger.addHandler(filehandler)
        logger.setLevel(logging.DEBUG)
        credentials, subscription_id = get_local_credentials()
    else:
        console = logging.StreamHandler()
        console.setLevel(logging.INFO)
        console.setFormatter(formatter)
        credentials, subscription_id = get_azure_credentials()

    logger.debug('Python HTTP trigger function processed a request.')
    logger.debug(f"method={req.method}, url={req.url}, params={req.params}")
    logger.debug(f"body={req.get_json()}")

    # Handle WebHook
    webhook = req.get_json()

    #Create an empty dict within webhook for motsID
    webhook['additionalData'] = {}
    addl_data = webhook['additionalData']
    addl_data['motsID'] = "0000"

    # Key Vault stuff.
    kv_mgmt_client = KeyVaultManagementClient(credentials, subscription_id)
    kv_client = KeyVaultClient(credentials)
    namespace = get_kv_secret(kv_client, 'EventHubNamespace')
    event_hub = get_kv_secret(kv_client, 'EventHub')
    user = get_kv_secret(kv_client, 'EventHubKeyName')
    key = get_kv_secret(kv_client, 'EventHubKey')

    amqp_uri = f"https://{namespace}.servicebus.windows.net/{event_hub}"
    eh_client = EventHubClient(amqp_uri,
                               debug=False,
                               username=user,
                               password=key)
    eh_sender = eh_client.add_sender(partition="0")
    eh_client.run()
    eh_sender.send(EventData(json.dumps(webhook)))
    logger.info(f"sending event to {amqp_uri}, {json.dumps(webhook)}")
    date = datetime.datetime.now()
    return func.HttpResponse(json.dumps({'date': date, 'status': 'SUCCESS'}))
예제 #13
0
def test_get_partition_properties(live_eventhub):
    client = EventHubClient(live_eventhub['hostname'], live_eventhub['event_hub'],
                                    EventHubSharedKeyCredential(live_eventhub['key_name'], live_eventhub['access_key'])
                                    )
    properties = client.get_partition_properties('0')
    assert properties['event_hub_path'] == live_eventhub['event_hub'] \
        and properties['id'] == '0' \
        and 'beginning_sequence_number' in properties \
        and 'last_enqueued_sequence_number' in properties \
        and 'last_enqueued_offset' in properties \
        and 'last_enqueued_time_utc' in properties \
        and 'is_empty' in properties
예제 #14
0
    def __init__(self):
        #load config
        with open('config.json', 'r') as json_file:
            self.config = json.load(json_file)

        # Create Event Hubs client
        client = EventHubClient(self.config["EH_ADDRESS"],
                                debug=False,
                                username=self.config["EH_USER"],
                                password=self.config["EH_KEY"])
        Worker.event_hub_sender = client.add_sender(partition="0")
        client.run()
예제 #15
0
def init_event_hub():
    with open('config.json', 'r') as json_file:
        config = json.load(json_file)
    client = EventHubClient(host=config["EH_HOST"],
                            event_hub_path=config["EH_NAME"],
                            credential=EventHubSharedKeyCredential(
                                config["EVENT_HUB_SAS_POLICY"],
                                config["EVENT_HUB_SAS_KEY"]),
                            network_tracing=False)

    for i in range(NUM_PARTITIONS):
        event_producer_list.append(client.create_producer(partition_id=str(i)))
def test_long_running_send(connection_str):
    if sys.platform.startswith('darwin'):
        import pytest
        pytest.skip("Skipping on OSX")
    parser = argparse.ArgumentParser()
    parser.add_argument("--duration",
                        help="Duration in seconds of the test",
                        type=int,
                        default=30)
    parser.add_argument("--payload",
                        help="payload size",
                        type=int,
                        default=512)
    parser.add_argument("--conn-str",
                        help="EventHub connection string",
                        default=connection_str)
    parser.add_argument("--eventhub", help="Name of EventHub")
    parser.add_argument("--address", help="Address URI to the EventHub entity")
    parser.add_argument(
        "--sas-policy",
        help="Name of the shared access policy to authenticate with")
    parser.add_argument("--sas-key", help="Shared access key")

    args, _ = parser.parse_known_args()
    if args.conn_str:
        client = EventHubClient.from_connection_string(
            args.conn_str, event_hub_path=args.eventhub)
    elif args.address:
        client = EventHubClient(host=args.address,
                                event_hub_path=args.eventhub,
                                credential=EventHubSharedKeyCredential(
                                    args.sas_policy, args.sas_key),
                                auth_timeout=240,
                                network_tracing=False)
    else:
        try:
            import pytest
            pytest.skip("Must specify either '--conn-str' or '--address'")
        except ImportError:
            raise ValueError("Must specify either '--conn-str' or '--address'")

    try:
        partition_ids = client.get_partition_ids()
        threads = []
        for pid in partition_ids:
            sender = client.create_producer(partition_id=pid)
            thread = threading.Thread(target=send, args=(sender, args))
            thread.start()
            threads.append(thread)
        thread.join()
    except KeyboardInterrupt:
        pass
def test_long_running_receive(connection_str):
    parser = argparse.ArgumentParser()
    parser.add_argument("--duration",
                        help="Duration in seconds of the test",
                        type=int,
                        default=30)
    parser.add_argument("--consumer",
                        help="Consumer group name",
                        default="$default")
    parser.add_argument("--partitions", help="Comma seperated partition IDs")
    parser.add_argument("--offset", help="Starting offset", default="-1")
    parser.add_argument("--conn-str",
                        help="EventHub connection string",
                        default=connection_str)
    parser.add_argument("--eventhub", help="Name of EventHub")
    parser.add_argument("--address", help="Address URI to the EventHub entity")
    parser.add_argument(
        "--sas-policy",
        help="Name of the shared access policy to authenticate with")
    parser.add_argument("--sas-key", help="Shared access key")

    args, _ = parser.parse_known_args()
    if args.conn_str:
        client = EventHubClient.from_connection_string(args.conn_str,
                                                       eventhub=args.eventhub,
                                                       debug=False)
    elif args.address:
        client = EventHubClient(args.address,
                                username=args.sas_policy,
                                password=args.sas_key)
    else:
        try:
            import pytest
            pytest.skip("Must specify either '--conn-str' or '--address'")
        except ImportError:
            raise ValueError("Must specify either '--conn-str' or '--address'")

    try:
        if not args.partitions:
            partitions = get_partitions(client)
        else:
            partitions = args.partitions.split(",")
        pumps = {}
        for pid in partitions:
            pumps[pid] = client.add_receiver(consumer_group=args.consumer,
                                             partition=pid,
                                             offset=Offset(args.offset),
                                             prefetch=50)
        client.run()
        pump(pumps, args.duration)
    finally:
        client.stop()
예제 #18
0
 def __init__(self, devenv='doh'):
     """ Create Event Hub Sender """
     if devenv == 'prod':
             ADDRESS = "amqps://doh-airqual-eventhub.servicebus.windows.net/doh-airqual-event-hub-prod"
             USER = "******"
             KEY = "1p1aHCJc5IbamvnzlnvUa2wlvXsaJpSAbAORGlPRaQ4="
     elif devenv == "dev":
             ADDRESS = "amqps://az-doh-airqual-eventhub.servicebus.windows.net/az-doh-airqual-eventhub"
             USER = "******"
             KEY = "pfWGtB6obtiUCwwAobAAuaq7B9SZSsnpHY6ArEbeS1A="
     self.client = EventHubClient(ADDRESS, debug=False, username=USER, password=KEY)
     self.sender = self.client.add_sender(partition="1")
     self.client.run()
예제 #19
0
def create_eventhub_client(live_eventhub_config):
    # [START create_eventhub_client]
    import os
    from azure.eventhub import EventHubClient

    address = os.environ['EVENT_HUB_ADDRESS']
    shared_access_policy = os.environ['EVENT_HUB_SAS_POLICY']
    shared_access_key = os.environ['EVENT_HUB_SAS_KEY']

    client = EventHubClient(address=address,
                            username=shared_access_policy,
                            password=shared_access_key)
    # [END create_eventhub_client]
    return client
예제 #20
0
    def __init__(self):
        NAMESPACE = os.environ['EVENT_HUB_NAMESPACE']
        EHNAME = os.environ['EVENT_HUB_NAME']
        ADDRESS = "amqps://" + NAMESPACE + ".servicebus.windows.net/" + EHNAME

        # SAS policy and key are not required if they are encoded in the URL
        USER = os.environ.get('EVENT_HUB_SAS_POLICY')
        KEY = os.environ.get('EVENT_HUB_SAS_KEY')

        self.client = EventHubClient(ADDRESS,
                                     debug=False,
                                     username=USER,
                                     password=KEY)
        self.sender = self.client.add_sender(partition="0")
        self.client.run()
예제 #21
0
def loadConfig():
    with open('config/EventHubsConfig.json') as f:
        configObj = json.load(f)

    for client in configObj['EventHubs']:
        eventHubClient = EventHubClient(
            client['url'],
            username=client['username'],
            password=client['password'],
        )
        eventhubs.append(eventHubClient)
        
    global token
    token = configObj['GithubToken']
    global ENDPOINT
    ENDPOINT = configObj['GithubEndPoint']
def create_eventhub_client(live_eventhub_config):
    # [START create_eventhub_client]
    import os
    from azure.eventhub import EventHubClient, EventHubSharedKeyCredential

    host = os.environ['EVENT_HUB_HOSTNAME']
    event_hub_path = os.environ['EVENT_HUB_NAME']
    shared_access_policy = os.environ['EVENT_HUB_SAS_POLICY']
    shared_access_key = os.environ['EVENT_HUB_SAS_KEY']

    client = EventHubClient(host=host,
                            event_hub_path=event_hub_path,
                            credential=EventHubSharedKeyCredential(
                                shared_access_policy, shared_access_key))
    # [END create_eventhub_client]
    return client
예제 #23
0
    def eventhubReceiveToFile(test_queue_url):
        # next, we dequeue these messages - 10 messages at a time 
        # (SQS max limit) till the queue is exhausted.
        # in production/real setup, I suggest using long polling as 
        # you get billed for each request, regardless of an empty response
        counter = 0
        filepath = ''

        CONSUMER_GROUP = "$Default"
        OFFSET = Offset("0")
        PARTITION = "0"

        client = EventHubClient('amqps://xxxxx.servicebus.windows.net/txxxxxqueue', 
                                debug=True, 
                                username='******', 
                                password='******')
        receiver = client.add_receiver(CONSUMER_GROUP, 
                                       PARTITION, prefetch=300, offset=OFFSET)
        try:    
            client.run()
            while True:
                for event_data in receiver.receive(timeout=100):
                    rcv_msg = str(event_data.message)
                    #print((rcv_msg))
                    if len(rcv_msg)>=5:
                        if counter!=0 and counter <= 50000:
                            #print(message['Body'])
                            file = open(filepath,'a')
                            file.write(rcv_msg)
                            file.write('\n')
                            # next, we delete the message from the queue so no one else will process it again
                        elif counter == 0:
                            filepath = createfile()
                            # print(filepath)
                            file = open(filepath,'w') 
                        else:
                            filepath = createfile()
                            #print(filepath)
                            counter = 1
                            file = open(filepath,'w') 
                        file.close() 
                        counter = counter + 1
        except Exception as e:
            print("Failed Receiving Record {}".format(str(e)) ) 
        finally:
            client.stop()
def test_long_running_send(connection_str):
    if sys.platform.startswith('darwin'):
        import pytest
        pytest.skip("Skipping on OSX")
    parser = argparse.ArgumentParser()
    parser.add_argument("--duration",
                        help="Duration in seconds of the test",
                        type=int,
                        default=30)
    parser.add_argument("--payload",
                        help="payload size",
                        type=int,
                        default=512)
    parser.add_argument("--batch",
                        help="Number of events to send and wait",
                        type=int,
                        default=1)
    parser.add_argument("--conn-str",
                        help="EventHub connection string",
                        default=connection_str)
    parser.add_argument("--eventhub", help="Name of EventHub")
    parser.add_argument("--address", help="Address URI to the EventHub entity")
    parser.add_argument(
        "--sas-policy",
        help="Name of the shared access policy to authenticate with")
    parser.add_argument("--sas-key", help="Shared access key")

    args, _ = parser.parse_known_args()
    if args.conn_str:
        client = EventHubClient.from_connection_string(args.conn_str,
                                                       eventhub=args.eventhub)
    elif args.address:
        client = EventHubClient(args.address,
                                username=args.sas_policy,
                                password=args.sas_key)
    else:
        try:
            import pytest
            pytest.skip("Must specify either '--conn-str' or '--address'")
        except ImportError:
            raise ValueError("Must specify either '--conn-str' or '--address'")

    try:
        main(client, args)
    except KeyboardInterrupt:
        pass
예제 #25
0
def send_msg(addr, user, password):
    from azure.eventhub import EventHubClient, Sender, EventData
    client = EventHubClient(addr, username=user, password=password)
    sender = client.add_sender(partition='1')
    client.run()
    try:
        from datetime import datetime
        current_time = datetime.now().isoformat()
        message = "hi from event hub {}".format(current_time)
        event_data = EventData(message)
        sender.send(event_data)
    except:
        raise


#  return client
    finally:
        client.stop()
예제 #26
0
 def __init__(self,
              poll_frequency: int = 1,
              event_hub_address: str = None,
              event_hub_user: str = None,
              event_hub_key: str = None,
              *args,
              **kwargs) -> None:
     super().__init__(*args, **kwargs)
     self.poll_frequency = poll_frequency
     self.event_hub_address = event_hub_address
     self.event_hub_user = event_hub_user
     self.event_hub_key = event_hub_key
     self._validate_parameters()
     self.client = EventHubClient(self.event_hub_address,
                                  debug=False,
                                  username=event_hub_user,
                                  password=event_hub_key)
     self.sender = self.client.add_sender(partition="0")
예제 #27
0
    def __init__(self, eventhub, address, user, key, consumer_group,
                 redis_hostname, redis_key):
        self.consumer_group = consumer_group
        self.eventhubs_client = EventHubClient(address,
                                               debug=False,
                                               username=user,
                                               password=key)

        # Leaving in here for backward compatibility
        redis_topic = f"eventhubs-{eventhub}-{consumer_group}"
        self.redis_cache = RedisCache(redis_hostname, redis_key, redis_topic)

        self.partition_ids = self.eventhubs_client.get_eventhub_info(
        )['partition_ids']

        for partition_id in self.partition_ids:
            redis_topic = f"eventhubs-{eventhub}-{consumer_group}-{partition_id}"
            self.redis_cache_partition_aware[partition_id] = RedisCache(
                redis_hostname, redis_key, redis_topic)
    def __init__(self,output_buffer,ADDRESS,USER,KEY,CONSUMER_GROUP,OFFSET,PARTITION,parent=None):
        super(RetrieveEventHub,self).__init__(parent)

        self.address = ADDRESS

        # SAS policy and key are not required if they are encoded in the URL
        self.user = USER
        self.key = KEY
        self.CONSUMER_GROUP = CONSUMER_GROUP
        self.OFFSET = OFFSET
        self.PARTITION = PARTITION
        self.total = 0
        self.last_sn = -1
        self.last_offset = "-1"
        self.client = EventHubClient(self.address, debug=False, username=self.user, password=self.key)
        self.receiver = self.client.add_receiver(self.CONSUMER_GROUP, self.PARTITION, prefetch=1000,offset=self.OFFSET)
    
        self.output_buffer = output_buffer
        self.last_frame = -1
예제 #29
0
    def execute(self):
        dataset_id = '{}_{}'.format(self.dataset_url, self.dataset_filename)
        latest_status = self.redis_client.get(dataset_id)
        if latest_status is not None:
            latest_status = latest_status.decode(ENCODING)

        client = EventHubClient(
            read_config.cfg.get('nulplabs', 'hub_address'),
            debug=False,
            username=read_config.cfg.get('nulplabs', 'hub_user'),
            password=read_config.cfg.get('nulplabs', 'hub_passwd'))
        sender = client.add_sender(partition="0")
        client.run()

        if latest_status == 'COMPLETED' or latest_status == 'ATTEMPT TO REFILL':
            self.redis_client.set(dataset_id, 'ATTEMPT TO REFILL')
            print('ATTEMPT TO REFILL')
            return False

        client = Socrata(self.dataset_url, None)
        self.redis_client.set(dataset_id, 'STARTED')

        for i in range(int(NUMBER_OF_MESSAGES / MESSAGES_PER_FETCH)):
            results = client.get(self.dataset_filename,
                                 limit=MESSAGES_PER_FETCH,
                                 offset=MESSAGES_PER_FETCH * i)
            results_df = pd.DataFrame.from_records(results)
            # print(json.dumps(results))
            current_progress = '{} - {}'.format(
                str(i * MESSAGES_PER_FETCH + 1),
                str((i + 1) * MESSAGES_PER_FETCH))

            message = EventData(batch=self.data_generator(results))
            sender.send(message)
            self.redis_client.set(dataset_id, current_progress)

            # print('Progress {}'.format(current_progress))
            # print(results_df)

        self.redis_client.set(self.dataset_url + "_" + self.dataset_filename,
                              'COMPLETED')
        print('COMPLETED')
예제 #30
0
 def run_job():
     # Create Event Hubs client
     client = EventHubClient(config.ADDRESS,
                             debug=False,
                             username=config.USER,
                             password=config.KEY)
     sender = client.add_sender(partition="0")
     client.run()
     while True:
         try:
             messages = [
                 create_advert_event(brands),
                 create_impression_event(urls)
             ]
             for message in messages:
                 print('Sending message to Event Hubs: ' + str(message))
                 sender.send(EventData(message))
                 time.sleep(1)
         except:
             raise