示例#1
0
 def __init__(self, pubsub_endpoint: str, topic: str):
     self.appProc = AppProcessor()
     self.urlProc = UrlProcessor()
     self.kafka_mgr = PubSubManagerKafka(
         PubSubConnectionKafka(pubsub_endpoint))
     self.producer = self.kafka_mgr.create_producer(topic,
                                                    "testapp_sig_gen")
示例#2
0
def main():
    pubsub_endpoint = "localhost:9092"
    topic = "out_topic_ad_notif_test"

    try:
        opts, args = getopt.getopt(sys.argv[1:], "hs:t:",
                                   ["server=", "topic="])
    except getopt.GetoptError:
        printhelp()
        sys.exit(2)

    for opt, arg in opts:
        if opt == '-h':
            printhelp()
            sys.exit()
        elif opt in ("-s", "--server"):
            pubsub_endpoint = arg
        elif opt in ("-t", "--topic"):
            topic = arg

    # print out configs
    print(f"Endpoint: {pubsub_endpoint}")
    print(f"Topic: {topic}")

    kafka_mgr = PubSubManagerKafka(PubSubConnectionKafka(pubsub_endpoint))
    consumer = kafka_mgr.create_consumer([topic], "testapp_sig_gen", "testapp")

    print("Starting listening for notifications ...")

    while True:
        (t, o) = consumer.poll()
        if o is not None:
            print(f"Getting message from topic t={t}:")
            print(f"{o}")
            print(f"")
示例#3
0
    def __init__(self, config: Config):

        orchestration_topic = config.try_get(
            'servicesettings.orchestration-channel-in',
            default.ORCHESTRATION_NOTIF_TOPIC_DEFAULT)
        client_name = config.try_get('servicesettings.service-client-name',
                                     None)
        self.__notification_service_store_name = config.try_get(
            'servicesettings.notification-service-store-name',
            NotificationService.NOTIFICATION_SERVICE_STORE_NAME_DEFAULT)

        # load configurations
        self.__db_conn = CouchDbConnection(config['store.couchdb.connection'],
                                           config['store.couchdb.username'],
                                           config['store.couchdb.usertoken'])
        self.__pubsub_conn = PubSubConnectionKafka(
            config['eventqueue.kafka.server'])

        # intialize db readers
        self.__noti_repo = NotificationDefinitionRepository(
            self.__db_conn, self.__notification_service_store_name)

        # initialize message bus manager
        self.__pubsub_mgr = PubSubManagerKafka(self.__pubsub_conn,
                                               acquire_service_logger())

        super().__init__(orchestration_topic, self.__pubsub_mgr, client_name)

        # initialize handlers
        self.__update_handlers()

        # logging the client name for debugging
        self.log.debug(
            f"Created instance with client name {self._BaseWorkerService__client_name}"
        )
    def signal_thread(self, notif_callback):

        kafka_mgr = PubSubManagerKafka(PubSubConnectionKafka(self.pubsub_endpoint))
        consumer = kafka_mgr.create_consumer([self.topic], "testapp_sig_gen", "testapp")

        print("Starting listening for notifications ...")

        while not self.terminating_thread:
            (t, o) = consumer.poll(1)
            if o is not None:
                print(f"Getting message from topic t={t}:")
                print(f"{o}")
                print(f"")
                notif_callback(o.notification_id)
示例#5
0
 def __init__(self, pubsub_endpoint: str, topic: str):
     self.kafka_mgr = PubSubManagerKafka(
         PubSubConnectionKafka(pubsub_endpoint))
     self.producer = self.kafka_mgr.create_producer(topic,
                                                    "testapp_sig_gen")
     self.params = {
         'dim': 1,
         'sequence_length': 37,
         'noise_scale': 0.05,
         'mixed_sample_size': 1,
         'anomaly_magnitude': 0.5,
         'anomaly_window_size': 13
     }
     self.sequence = np.zeros(
         (self.params['dim'], self.params['sequence_length']))
     self.abnormal_peak = np.zeros(
         (self.params['dim'], self.params['sequence_length'] +
          self.params['anomaly_window_size']))
     self.time = 0
示例#6
0
    def signal_thread(self, terminating_thread):

        kafka_mgr = PubSubManagerKafka(
            PubSubConnectionKafka(self.pubsub_endpoint))
        consumer = kafka_mgr.create_consumer([self.topic], "testapp_sig_gen",
                                             "testapp")

        print("Starting listening for notifications ...")
        while not terminating_thread():
            (t, o) = consumer.poll(1)
            print(t)
            if o is not None:
                print(terminating_thread())
                if o.notification_id == 'is_not_me':
                    self.data = 'not_me'
                else:
                    self.data = 'me'
                with self.condition:
                    self.condition.notifyAll()
 def __init__(self, pubsub_endpoint: str, topic: str):
     self.kafka_mgr = PubSubManagerKafka(PubSubConnectionKafka(pubsub_endpoint))
     # uncomment following code to reset kafka db
     #self.kafka_mgr.reset()
     #time.sleep(1000)
     self.producer = self.kafka_mgr.create_producer(topic, "testapp_sig_gen")
     self.data = np.zeros([1,64,10])
     self.data_delta = {
         "mhhd_s1" : 0.0,
         "mhhd_s2" : 0.0,
         "mhhd_s3" : 0.0,
         "sesor3" : 0.0,
         "sesor4" : 0.0,
         "sesor5" : 0.0,
         "sesor6" : 0.0,
         "sesor7" : 0.0,
         "sesor8" : 0.0,
         "sesor9" : 0.0
     }
     self.interval_begin = datetime.datetime.now()
    def __init__(self, config: Config):

        orch_infer_topic = config.try_get(
            'servicesettings.orchestration-inference-channel-in',
            default.ORCHESTRATION_INFER_TOPIC_DEFAULT)
        orch_notif_topic = config.try_get(
            'servicesettings.orchestration-notification-channel-in',
            default.ORCHESTRATION_NOTIF_TOPIC_DEFAULT)
        self.__scenario_store_name = config.try_get(
            'servicesettings.orchestration-service-store-name',
            OrchestrationService.ORCHESTRATION_SERVICE_STORE_NAME_DEFAULT)
        self.__client_name = config.try_get(
            'servicesettings.service-client-name', None)

        # load configurations
        self.__db_conn = CouchDbConnection(config['store.couchdb.connection'],
                                           config['store.couchdb.username'],
                                           config['store.couchdb.usertoken'])
        self.__pubsub_conn = PubSubConnectionKafka(
            config['eventqueue.kafka.server'])

        # intialize db readers
        self.__scn_repo = ScenarioRepository(self.__db_conn,
                                             self.__scenario_store_name)

        # initialize message bus manager and producers
        self.__pubsub_mgr = PubSubManagerKafka(self.__pubsub_conn,
                                               acquire_service_logger())
        self.__pubsub_mgr.create_topic_if_not_exist(orch_infer_topic)
        self.__pubsub_mgr.create_topic_if_not_exist(orch_notif_topic)
        self.__producers = {
            'infer':
            self.__pubsub_mgr.create_producer(orch_infer_topic,
                                              self.__client_name),
            'notif':
            self.__pubsub_mgr.create_producer(orch_notif_topic,
                                              self.__client_name),
        }

        super().__init__()
示例#9
0
def setup_empty_test_pubsub_server() -> PubSubConnectionKafka:
    client = docker.from_env()

    # setup zookeeper/kafka
    kafka_test_config = yaml.load(StringIO(kafka_test_config_content),
                                  Loader=yaml.SafeLoader)

    test_project_name = "unittest"

    conn_obj = PubSubConnectionKafka(
        f"{kafka_test_config['kafka_host_name']}:{kafka_test_config['kafka_container_port']}"
    )

    if f"{test_project_name}_kafka_1" not in [
            c.name for c in client.containers.list()
    ]:
        setup_test_containers_with_docker_compose(
            kafka_test_docker_compose_content, test_project_name)

        # test the connection to wait for container launch
        pubsub = PubSubManagerKafka(conn_obj)
        if not pubsub.test_connection():
            print("Waiting for kafka container to start ...")
            time.sleep(5)
            while not pubsub.test_connection():
                print("Still waiting ...")
                time.sleep(5)
        print(
            f"Test containers '{test_project_name}_kafka_1': has been started")

        # hack: leave sometime for kafka to warm up
        print("Pausing a bit for kafka to warm up ...")
        time.sleep(30)
    else:
        print(
            f"skip creating test containers '{test_project_name}_kafka_1': already exist"
        )

    return conn_obj
示例#10
0
    def __init__(self, config: Config):

        orchestration_topic = config.try_get(
            'servicesettings.orchestration-channel-in',
            default.ORCHESTRATION_INFER_TOPIC_DEFAULT)
        client_name = config.try_get('servicesettings.service-client-name',
                                     None)
        self.__inference_service_store_name = config.try_get(
            'servicesettings.inference-service-store-name',
            InferenceService.INFERENCE_SERVICE_STORE_NAME_DEFAULT)

        # load configurations
        self.__db_conn = CouchDbConnection(config['store.couchdb.connection'],
                                           config['store.couchdb.username'],
                                           config['store.couchdb.usertoken'])
        self.__pubsub_conn = PubSubConnectionKafka(
            config['eventqueue.kafka.server'])
        self.__model_cache_location = config.try_get(
            'servicesettings.model_cache_location',
            default.MODEL_CACHE_LOCATION)

        # intialize db readers
        self.__infer_repo = InferenceDefinitionRepository(
            self.__db_conn, self.__inference_service_store_name)

        # initialize message bus manager
        self.__pubsub_mgr = PubSubManagerKafka(self.__pubsub_conn,
                                               acquire_service_logger())

        super().__init__(orchestration_topic, self.__pubsub_mgr, client_name)

        # initialize model cache
        self.__model_cache = ModelCache(self.__model_cache_location, self.log)

        # logging the client name for debugging
        self.log.debug(
            f"Created instance with client name {self._BaseWorkerService__client_name}"
        )