def run(self): """ Read messages from queue and set task to worker thread. TODO: Remove the kafka related stuff from this function. """ # Brokers should be in the uri path # path.strip returns type 'unicode' and pykafka expects a string, so # converting unicode to str brokers = Consumer._Brokers.strip('/').encode('ascii', 'ignore') self.kafka_client = PyKafkaClient(hosts=brokers) kafka_topic = self.kafka_client.topics[self.topic] self.kafka_consumer = kafka_topic.get_balanced_consumer( consumer_group=Consumer._Consumer_group.encode('ascii', 'ignore'), **Consumer._Args) produce_topic = self.kafka_client.topics[("%s-data" % self.topic)] self.kafka_producer = produce_topic.get_producer() # Create the worker object. handler_desc = _handler_dict[self.topic] worker_class = get_class(handler_desc[0]) worker = worker_class() # Enable timer to consume the worker tasks. worker.enable_task_routine(task_callback=self.task_callback) while True: msg_list = kafka_queue_read(self.kafka_consumer) if len(msg_list) == 0: # No data, try again later. sleep(1) continue _logger.debug(len(msg_list)) for msg in msg_list: task_info = json2dict(msg.value) worker.add_task(task_info) # Check the worker thread working status; and commit the offset. # Wait for the worker consuming all the tasks. worker.wait_all_tasks_done() # Commit the kafka reading offset. self.kafka_consumer.commit_offsets()
def start(): for trigger_name, trigger_desc in _trigger_dict.iteritems(): _logger.debug("%s" % trigger_name) trigger_class = get_class(trigger_desc[0]) _trigger_workers[trigger_name] = trigger_class()