def run_consumer_instance(*args, **kwargs):
        perform_subscription = False
        msgq_type = False
        logging_to_console_and_syslog("Starting {}".format(
            threading.current_thread().getName()))
        for name, value in kwargs.items():
            logging_to_console_and_syslog("name={},value={}".format(
                name, value))
            if name == 'msgq_type':
                msgq_type = value
            elif name == 'perform_subscription':
                perform_subscription = value

        t = threading.currentThread()

        consumer_instance = ProducerConsumerAPI(
            is_consumer=True,
            thread_identifier=threading.current_thread().getName(),
            perform_subscription=perform_subscription,
            type_of_messaging_queue=msgq_type)
        while getattr(t, "do_run", True):
            t = threading.currentThread()
            message = consumer_instance.dequeue()
            if message:
                logging_to_console_and_syslog(
                    "Consumer {}: Dequeued Message = {}".format(
                        threading.current_thread().getName(), message))
                time.sleep(5)
        consumer_instance.cleanup()
        logging_to_console_and_syslog("Consumer {}: Exiting".format(
            threading.current_thread().getName()))
 def create_producer_and_produce_jobs(self, msgq_type):
     self.producer_instance = ProducerConsumerAPI(
         is_producer=True,
         thread_identifier="Producer",
         type_of_messaging_queue=msgq_type)
     logging_to_console_and_syslog("Posting messages.")
     self.assertTrue(self.post_messages())
 def instantiate_objects(self):
     self.consumer_instance = ProducerConsumerAPI(
         is_consumer=True,
         thread_identifier="Consumer_{}".format(self.cont_id),
         type_of_messaging_queue=self.producer_consumer_queue_type)
     self.data_parser_instance = DataParserInterface()
     self.redis_instance = RedisInterface("Consumer_{}".format(
         self.cont_id))
class MachineLearningWorker:
    def __init__(self):
        self.hostname = os.popen("cat /etc/hostname").read()
        self.cont_id = os.popen(
            "cat /proc/self/cgroup | head -n 1 | cut -d '/' -f3").read()
        self.producer_consumer_queue_type = None
        self.load_environment_variables()
        self.consumer_instance = None
        self.data_parser_instance = None
        self.redis_instance = None
        self.instantiate_objects()

    def load_environment_variables(self):
        while self.producer_consumer_queue_type is None:
            time.sleep(1)
            self.producer_consumer_queue_type = os.getenv(
                "producer_consumer_queue_type_key", default=None)

        logging_to_console_and_syslog(
            ("producer_consumer_queue_type={}".format(
                self.producer_consumer_queue_type)))

    def instantiate_objects(self):
        self.consumer_instance = ProducerConsumerAPI(
            is_consumer=True,
            thread_identifier="Consumer_{}".format(self.cont_id),
            type_of_messaging_queue=self.producer_consumer_queue_type)
        self.data_parser_instance = DataParserInterface()
        self.redis_instance = RedisInterface("Consumer_{}".format(
            self.cont_id))

    def cleanup(self):
        self.consumer_instance.cleanup()

    def process_job(self, message):
        self.data_parser_instance.process_job(message)

    def dequeue_and_process_jobs(self):
        message = self.consumer_instance.dequeue()
        if message:
            try:
                event = "Consumer: Successfully dequeued a message = {} from msgQ.".format(
                    message)
                self.redis_instance.write_an_event_in_redis_db(event)
                self.redis_instance.increment_dequeue_count()
                start_time = datetime.now()
                self.process_job(message)
                time_elapsed = datetime.now() - start_time
                event = 'Time taken to process {} = (hh:mm:ss.ms) {}'.format(
                    message, time_elapsed)
                self.redis_instance.write_an_event_in_redis_db(event)
            except:
                print("Exception in dequeue_and_process_jobs:")
                print("-" * 60)
                traceback.print_exc(file=sys.stdout)
                print("-" * 60)
                self.cleanup()
                self.instantiate_objects()
示例#5
0
 def __init__(self):
     self.before={}
     self.after={}
     self.video_file_path = None
     self.producer_consumer_type = None
     self.redis_instance = RedisInterface("Producer")
     self.load_environment_variables()
     self.producer_instance = ProducerConsumerAPI(is_producer=True,
                                                  thread_identifier="Producer",
                                                  type_of_messaging_queue=self.producer_consumer_type)
示例#6
0
class DirectoryWatch:
    def __init__(self):
        self.before={}
        self.after={}
        self.video_file_path = None
        self.producer_consumer_type = None
        self.redis_instance = RedisInterface("Producer")
        self.load_environment_variables()
        self.producer_instance = ProducerConsumerAPI(is_producer=True,
                                                     thread_identifier="Producer",
                                                     type_of_messaging_queue=self.producer_consumer_type)

    def load_environment_variables(self):
        while self.video_file_path is None or \
              self.producer_consumer_type is None:
            time.sleep(1)
            self.video_file_path = os.getenv("video_file_path_key", default=None)
            self.producer_consumer_type = os.getenv("producer_consumer_queue_type_key", default=None)
        logging_to_console_and_syslog(("video_file_path={}".format(self.video_file_path)))
        logging_to_console_and_syslog(("producer_consumer_type={}".format(self.producer_consumer_type)))

    def cleanup(self):
        self.producer_instance.cleanup()

    def process_new_file(self,file_name):
        # post the file_name into the producer queue.
        self.producer_instance.enqueue(file_name)
        event = "Producer: Successfully posted a message = {} into msgQ.".format(file_name)
        self.redis_instance.write_an_event_in_redis_db(event)
        self.redis_instance.increment_enqueue_count()

    def watch_a_directory(self):
        self.before = {}
        while True:
            time.sleep(1)
            self.after = dict([(f, None) for f in os.listdir(self.video_file_path)])
            added = [f for f in self.after if not f in self.before]
            removed = [f for f in self.before if not f in self.after]
            if added:
                logging_to_console_and_syslog("Added: " + str(added))
                for filename in added:
                    self.process_new_file(filename)
            if removed:
                logging_to_console_and_syslog("Removed: " + str(removed))
            self.before = self.after
class TestProducerConsumer(unittest.TestCase):
    def setUp(self):
        os.environ["broker_name_key"] = "localhost:9094"
        os.environ["topic_key"] = "video-file-name"
        os.environ["redis_log_keyname_key"] = "briefcam"
        os.environ["total_job_enqueued_count_redis_name_key"] = "enqueue"
        os.environ["total_job_dequeued_count_redis_name_key"] = "dequeue"
        os.environ["redis_server_hostname_key"] = "localhost"
        os.environ["redis_server_port_key"] = "6379"
        self.dirname = os.path.dirname(os.path.realpath(__file__))
        self.max_consumer_threads = 10
        self.create_test_docker_container()
        self.producer_instance = None
        self.consumer_threads = None

    @staticmethod
    def run_consumer_instance(*args, **kwargs):
        perform_subscription = False
        msgq_type = False
        logging_to_console_and_syslog("Starting {}".format(
            threading.current_thread().getName()))
        for name, value in kwargs.items():
            logging_to_console_and_syslog("name={},value={}".format(
                name, value))
            if name == 'msgq_type':
                msgq_type = value
            elif name == 'perform_subscription':
                perform_subscription = value

        t = threading.currentThread()

        consumer_instance = ProducerConsumerAPI(
            is_consumer=True,
            thread_identifier=threading.current_thread().getName(),
            perform_subscription=perform_subscription,
            type_of_messaging_queue=msgq_type)
        while getattr(t, "do_run", True):
            t = threading.currentThread()
            message = consumer_instance.dequeue()
            if message:
                logging_to_console_and_syslog(
                    "Consumer {}: Dequeued Message = {}".format(
                        threading.current_thread().getName(), message))
                time.sleep(5)
        consumer_instance.cleanup()
        logging_to_console_and_syslog("Consumer {}: Exiting".format(
            threading.current_thread().getName()))

    def create_consumer_threads(self, msgq_type, perform_subscription):
        self.consumer_threads = [0] * self.max_consumer_threads
        for index in range(self.max_consumer_threads):
            self.consumer_threads[index] = threading.Thread(
                name="{}{}".format("thread", index),
                target=TestProducerConsumer.run_consumer_instance,
                args=(),
                kwargs={
                    'msgq_type': msgq_type,
                    'perform_subscription': perform_subscription
                })
            self.consumer_threads[index].do_run = True
            self.consumer_threads[index].name = "{}_{}".format(
                "consumer", index)
            self.consumer_threads[index].start()

    def create_consumers(self, msgq_type, perform_subscription):
        self.create_consumer_threads(msgq_type, perform_subscription)
        logging_to_console_and_syslog(
            "Validating consumer threads to be not null.")
        for index in range(self.max_consumer_threads):
            self.assertIsNotNone(self.consumer_threads[index])

    def create_producer_and_produce_jobs(self, msgq_type):
        self.producer_instance = ProducerConsumerAPI(
            is_producer=True,
            thread_identifier="Producer",
            type_of_messaging_queue=msgq_type)
        logging_to_console_and_syslog("Posting messages.")
        self.assertTrue(self.post_messages())

    def perform_enqueue_dequeue(self, msgq_type, perform_subscription=False):
        logging_to_console_and_syslog(
            "Creating consumer threads to consume jobs.")
        self.create_consumers(msgq_type, perform_subscription)
        time.sleep(10)
        logging_to_console_and_syslog(
            "Creating producer instance and producing jobs.")
        self.create_producer_and_produce_jobs(msgq_type)
        time.sleep(120)
        logging_to_console_and_syslog(
            "Validating if the consumer successfully dequeued messages.")
        redis_instance = RedisInterface(threading.current_thread().getName())
        self.assertEqual(redis_instance.get_current_enqueue_count(),
                         redis_instance.get_current_dequeue_count())
        logging_to_console_and_syslog(
            "enqueue_count={},dequeue_count={}".format(
                redis_instance.get_current_enqueue_count(),
                redis_instance.get_current_dequeue_count()))

    def test_run(self):
        #logging_to_console_and_syslog("Validating **************** KAFKA MSGQ *****************.")
        #self.perform_enqueue_dequeue(ProducerConsumerAPI.kafkaMsgQType)
        #self.cleanup_test_environment()
        #logging_to_console_and_syslog("Validating **************** KAFKA MSGQ + SUBSCRIPTION *****************.")
        self.perform_enqueue_dequeue(ProducerConsumerAPI.kafkaMsgQType,
                                     perform_subscription=True)
        logging_to_console_and_syslog(
            "Validating **************** RABBIT MSGQ  *****************.")
        #self.perform_enqueue_dequeue(ProducerConsumerAPI.rabbitMsgQType)
        #self.cleanup_test_environment()

    def post_messages(self):
        messages = [str(x) for x in range(100)]
        for message in messages:
            self.producer_instance.enqueue(message)
        self.producer_instance.cleanup()
        return True

    def create_test_docker_container(self):
        completedProcess = subprocess.run([
            "docker-compose", "-f",
            "{}/docker-compose_wurstmeister_kafka.yml".format(
                self.dirname), "up", "-d"
        ],
                                          stdout=subprocess.PIPE)
        self.assertIsNotNone(completedProcess)
        self.assertIsNotNone(completedProcess.stdout)
        # time.sleep(120)

    def delete_test_docker_container(self):
        completedProcess = subprocess.run([
            "docker-compose", "-f",
            "{}/docker-compose_wurstmeister_kafka.yml".format(
                self.dirname), "down"
        ],
                                          stdout=subprocess.PIPE)
        self.assertIsNotNone(completedProcess)
        self.assertIsNotNone(completedProcess.stdout)

    def cleanup_test_environment(self):
        for index in range(self.max_consumer_threads):
            self.consumer_threads[index].do_run = False
        time.sleep(5)
        self.delete_test_docker_container()
        for index in range(self.max_consumer_threads):
            logging_to_console_and_syslog("Trying to join thread {}.".format(
                self.consumer_threads[index].getName()))
            self.consumer_threads[index].join(1.0)
            time.sleep(1)
            if self.consumer_threads[index].is_alive():
                try:
                    logging_to_console_and_syslog(
                        "Trying to __stop thread {}.".format(
                            self.consumer_threads[index].getName()))
                    self.consumer_threads[index].__stop()

                except:
                    logging_to_console_and_syslog(
                        "Caught an exception while stopping thread {}".format(
                            self.consumer_threads[index].getName()))
                    docker_api_interface_instance = DockerAPIInterface(
                        image_name=self.dirname.split('/')[-2],
                        dockerfile_directory_name=self.dirname)
                    docker_api_interface_instance.stop_docker_container_by_name(
                    )

    def tearDown(self):
        self.cleanup_test_environment()
        time.sleep(5)