コード例 #1
0
    def run_consumer_instance():
        logging_to_console_and_syslog("Starting {}".format(
            threading.current_thread().getName()))
        t = threading.currentThread()
        redis_instance = RedisInterface("Consumer{}".format(
            threading.current_thread().getName()))

        consumer_instance = ConfluentKafkaMsgQAPI(
            is_consumer=True,
            thread_identifier=threading.current_thread().getName(),
            perform_subscription=True)
        while getattr(t, "do_run", True):
            t = threading.currentThread()
            message = consumer_instance.dequeue()
            if message:
                logging_to_console_and_syslog(
                    "Consumer {}: Dequeued Message = {}".format(
                        threading.current_thread().getName(), message))
                redis_instance.increment_dequeue_count()
                redis_instance.write_an_event_in_redis_db(
                    "Consumer {}: Dequeued Message = {}".format(
                        threading.current_thread().getName(), message))
            time.sleep(5)
        consumer_instance.cleanup()
        logging_to_console_and_syslog("Consumer {}: Exiting".format(
            threading.current_thread().getName()))
コード例 #2
0
 def create_local_consumer2(self):
     c = None
     redis_instance = RedisInterface(threading.current_thread().getName())
     conf = {
         'bootstrap.servers': "localhost:9092",
         'group.id': "video-file-name",
         'session.timeout.ms': 6000,
         'auto.offset.reset': 'earliest'
     }
     while redis_instance.get_current_enqueue_count() != \
             redis_instance.get_current_dequeue_count():
         if not c:
             c = Consumer(conf)
             c.subscribe(["video-file-name"], on_assign=print_assignment)
         msg = c.poll(timeout=1.0)
         if msg is None or msg.error():
             continue
         else:
             logging_to_console_and_syslog(
                 '%% %s [%d] at offset %d with key %s:\n' %
                 (msg.topic(), msg.partition(), msg.offset(), str(
                     msg.key())))
             logging_to_console_and_syslog("msg.value()={}".format(
                 msg.value()))
             redis_instance.increment_dequeue_count()
             c.close()
             c = None
             time.sleep(5)
コード例 #3
0
class MachineLearningWorker:
    def __init__(self):
        self.hostname = os.popen("cat /etc/hostname").read()
        self.cont_id = os.popen(
            "cat /proc/self/cgroup | head -n 1 | cut -d '/' -f3").read()
        self.producer_consumer_queue_type = None
        self.load_environment_variables()
        self.consumer_instance = None
        self.data_parser_instance = None
        self.redis_instance = None
        self.instantiate_objects()

    def load_environment_variables(self):
        while self.producer_consumer_queue_type is None:
            time.sleep(1)
            self.producer_consumer_queue_type = os.getenv(
                "producer_consumer_queue_type_key", default=None)

        logging_to_console_and_syslog(
            ("producer_consumer_queue_type={}".format(
                self.producer_consumer_queue_type)))

    def instantiate_objects(self):
        self.consumer_instance = ProducerConsumerAPI(
            is_consumer=True,
            thread_identifier="Consumer_{}".format(self.cont_id),
            type_of_messaging_queue=self.producer_consumer_queue_type)
        self.data_parser_instance = DataParserInterface()
        self.redis_instance = RedisInterface("Consumer_{}".format(
            self.cont_id))

    def cleanup(self):
        self.consumer_instance.cleanup()

    def process_job(self, message):
        self.data_parser_instance.process_job(message)

    def dequeue_and_process_jobs(self):
        message = self.consumer_instance.dequeue()
        if message:
            try:
                event = "Consumer: Successfully dequeued a message = {} from msgQ.".format(
                    message)
                self.redis_instance.write_an_event_in_redis_db(event)
                self.redis_instance.increment_dequeue_count()
                start_time = datetime.now()
                self.process_job(message)
                time_elapsed = datetime.now() - start_time
                event = 'Time taken to process {} = (hh:mm:ss.ms) {}'.format(
                    message, time_elapsed)
                self.redis_instance.write_an_event_in_redis_db(event)
            except:
                print("Exception in dequeue_and_process_jobs:")
                print("-" * 60)
                traceback.print_exc(file=sys.stdout)
                print("-" * 60)
                self.cleanup()
                self.instantiate_objects()
コード例 #4
0
    def create_local_consumer(self):
        redis_instance = RedisInterface(threading.current_thread().getName())
        consumer_instance = ConfluentKafkaMsgQAPI(
            is_consumer=True,
            thread_identifier=threading.current_thread().getName(),
            perform_subscription=True)
        while redis_instance.get_current_enqueue_count() != \
                redis_instance.get_current_dequeue_count():

            message = consumer_instance.dequeue()
            if message is None or message.error():
                continue
            else:
                logging_to_console_and_syslog(
                    "Consumer {}: Dequeued Message = {}".format(
                        threading.current_thread().getName(), message))
                redis_instance.increment_dequeue_count()
                redis_instance.write_an_event_in_redis_db(
                    "Consumer {}: Dequeued Message = {}".format(
                        threading.current_thread().getName(), message))
        consumer_instance.cleanup()
コード例 #5
0
class ProducerConsumerAPI:
    """
    This is a factory design pattern.
    This class produces messages into
    1. Kafka Queue.
    2. Rabbit Message Queue.
    """
    rabbitMsgQType = "Rabbit"
    kafkaMsgQType = "Kafka"
    confluentKafkaMsgQType = "ConfluentKafka"

    def __init__(self,
                 is_producer=False,
                 is_consumer=False,
                 perform_subscription=False,
                 type_of_messaging_queue=None,
                 thread_identifier=None):
        self.message_queue_instance = None
        self.redis_instance = None
        self.is_producer = is_producer
        self.is_consumer = is_consumer
        self.perform_subscription = perform_subscription
        self.type_of_messaging_queue = type_of_messaging_queue
        self.thread_identifier = thread_identifier
        self.read_environment_variables()
        #self.__connect()

    def read_environment_variables(self):
        """
        This method is used to read the environment variables defined in the OS.
        :return:
        """
        while self.type_of_messaging_queue is None:
            time.sleep(2)
            logging_to_console_and_syslog(
                "ProducerConsumerAPI: "
                "Trying to read the environment variables...")
            self.type_of_messaging_queue = os.getenv(
                "type_of_messaging_queue_key", default=None)
        logging_to_console_and_syslog("ProducerConsumerAPI:"
                                      "type_of_messaging_queue={}".format(
                                          self.type_of_messaging_queue))

    def __connect(self):
        """
        This method tries to connect to the messaging queue.
        :return:
        """
        if self.message_queue_instance is None:
            try:
                if self.type_of_messaging_queue == ProducerConsumerAPI.kafkaMsgQType:
                    self.message_queue_instance = KafkaMsgQAPI(
                        is_producer=self.is_producer,
                        is_consumer=self.is_consumer,
                        perform_subscription=self.perform_subscription,
                        thread_identifier=self.thread_identifier)
                elif self.type_of_messaging_queue == ProducerConsumerAPI.rabbitMsgQType:
                    self.message_queue_instance = RabbitMsgQAPI(
                        is_producer=self.is_producer,
                        is_consumer=self.is_consumer,
                        perform_subscription=self.perform_subscription,
                        thread_identifier=self.thread_identifier)
                elif self.type_of_messaging_queue == ProducerConsumerAPI.confluentKafkaMsgQType:
                    self.message_queue_instance = ConfluentKafkaMsgQAPI(
                        is_producer=self.is_producer,
                        is_consumer=self.is_consumer,
                        perform_subscription=self.perform_subscription,
                        thread_identifier=self.thread_identifier)
                if not self.redis_instance:
                    if self.is_producer:
                        self.redis_instance = RedisInterface(
                            "Producer{}".format(self.thread_identifier))
                    elif self.is_consumer:
                        self.redis_instance = RedisInterface(
                            "Consumer{}".format(self.thread_identifier))
            except:
                print("Exception in user code:")
                print("-" * 60)
                traceback.print_exc(file=sys.stdout)
                print("-" * 60)
                time.sleep(5)
            else:
                logging_to_console_and_syslog(
                    "ProducerConsumerAPI: Successfully "
                    "created producer instance for messageQ type ={}".format(
                        self.type_of_messaging_queue))

    def enqueue(self, filename):
        """
        This method tries to post a message.
        :param filename:
        :return True or False:
        """
        status = False

        if filename is None or len(filename) == 0:
            logging_to_console_and_syslog("filename is None or invalid")
            return status

        if self.message_queue_instance is None:
            self.__connect()

        if hasattr(self.message_queue_instance, 'enqueue'):
            status = self.message_queue_instance.enqueue(filename)
            event = "Producer: Successfully posted a message = {} into msgQ. Status={}".format(
                filename, status)
            self.redis_instance.write_an_event_in_redis_db(event)
            self.redis_instance.increment_enqueue_count()

        return status

    def dequeue(self):
        """
        This method tries to post a message.
        :return Freezes the current context and yeilds a message:
        Please make sure to iterate this over to unfreeze the context.
        """
        if self.message_queue_instance is None:
            self.__connect()
        msg = None
        if hasattr(self.message_queue_instance, 'dequeue'):
            msg = self.message_queue_instance.dequeue()
            if msg:
                self.redis_instance.increment_dequeue_count()
                self.redis_instance.write_an_event_in_redis_db(
                    "Consumer {}: Dequeued Message = {}".format(
                        self.thread_identifier, msg))
                self.cleanup()
        return msg

    def cleanup(self):
        if self.message_queue_instance:
            self.message_queue_instance.cleanup()
            self.message_queue_instance = None