def run(self): # Parameters to be passed on to the queue worker queue_name = "SEMANTICS_QUEUE" options = { "exchange_name": "metadata", "exchange_type": "fanout", "durable_queue": True, "prefetch_count": self.num_workers, } drop_consumer = Consumer("semanticsqueue-consumer", self.mq_host, "SEMANTICS_QUEUE", options) drop_publisher = Publisher("Response Publisher", mq_host) for x in range(self.num_workers): SemanticsQueueWorker( "semanticsqueue-worker-" + str(x), drop_consumer.message_queue, drop_consumer.confirm_queue, self.api_url, drop_publisher, self.lock, self.max_retries, self.sleep_time, self.retry_cache, ) log.info("Workers started") drop_consumer.join() log.info("Exiting")
def run(self): # Parameters to be passed on to the queue worker queue_name = 'SEMANTICS_QUEUE' options = { 'exchange_name': 'metadata', 'exchange_type': 'fanout', 'durable_queue': True, 'prefetch_count': self.num_workers } drop_consumer = Consumer("semanticsqueue-consumer", self.mq_host, 'SEMANTICS_QUEUE', options) drop_publisher = Publisher("Response Publisher", mq_host) for x in range(self.num_workers): SemanticsQueueWorker("semanticsqueue-worker-" + str(x), drop_consumer.message_queue, drop_consumer.confirm_queue, self.api_url, drop_publisher, self.lock, self.max_retries, self.sleep_time, self.retry_cache) log.info("Workers started") drop_consumer.join() log.info("Exiting")
def run(self): """Initializes the daemon and spawns s set of workers to listen on the MQ for buckets that are ready to push drops""" options = { 'exchange_name': 'chatter', 'exchange_type': 'topic', 'routing_key': 'web.bucket.push.*', 'durable_exchange': True } # Consumer for USHAHIDI_POST_QUEUE postqueue_consumer = Consumer("ushahidi-postqueue-consumer", self.mq_host, "USHAHIDI_POST_QUEUE", options) # Spawn a set of workers to listen for buckets that are ready # to post drops for x in range(self.num_workers): UshahidiPostQueueWorker("ushahidi-postqueue-worker" + str(x), postqueue_consumer.message_queue, postqueue_consumer.confirm_queue, self) log.info("Workers started") postqueue_consumer.join() log.info("Exiting...")
def run(self): """Initializes the daemon and spawns s set of workers to listen on the MQ for buckets that are ready to push drops""" options = {'exchange_name': 'chatter', 'exchange_type': 'topic', 'routing_key': 'web.bucket.push.*', 'durable_exchange': True} # Consumer for USHAHIDI_POST_QUEUE postqueue_consumer = Consumer( "ushahidi-postqueue-consumer", self.mq_host, "USHAHIDI_POST_QUEUE", options) # Spawn a set of workers to listen for buckets that are ready # to post drops for x in range(self.num_workers): UshahidiPostQueueWorker( "ushahidi-postqueue-worker" + str(x), postqueue_consumer.message_queue, postqueue_consumer.confirm_queue, self) log.info("Workers started") postqueue_consumer.join() log.info("Exiting...")
def run(self): options = {'exchange_name': 'metadata', 'exchange_type': 'fanout', 'durable_queue': True, 'prefetch_count': self.num_workers} drop_consumer = Consumer("mediaextractor-consumer", self.mq_host, 'MEDIA_EXTRACTOR_QUEUE', options) drop_publisher = Publisher("Response Publisher", mq_host) for x in range(self.num_workers): MediaExtractorQueueWorker("mediaextractor-worker-" + str(x), drop_consumer.message_queue, drop_consumer.confirm_queue, drop_publisher, self.cf_options, self.url_services) log.info("Workers started") drop_consumer.join() log.info("Exiting")
def run(self): try: consumer = Consumer("rss-fetcher-consumer", self.mq_host, 'RSS_FETCH_QUEUE', {'prefetch_count': self.num_workers}) drop_publisher = DropPublisher(mq_host) response_publisher = ResponsePublisher(mq_host) for x in range(self.num_workers): RssFetcherWorker("worker-" + str(x), consumer.message_queue, consumer.confirm_queue, self.db_config, drop_publisher, response_publisher) log.info("Workers started") consumer.join() except Exception, e: #Catch unhandled exceptions log.exception(e)
def run(self): options = { 'exchange_name': 'metadata', 'exchange_type': 'fanout', 'durable_queue': True, 'prefetch_count': self.num_workers } drop_consumer = Consumer("mediaextractor-consumer", self.mq_host, 'MEDIA_EXTRACTOR_QUEUE', options) drop_publisher = Publisher("Response Publisher", mq_host) for x in range(self.num_workers): MediaExtractorQueueWorker("mediaextractor-worker-" + str(x), drop_consumer.message_queue, drop_consumer.confirm_queue, drop_publisher, self.cf_options, self.url_services) log.info("Workers started") drop_consumer.join() log.info("Exiting")
def run(self): try: consumer = Consumer( "rss-fetcher-consumer", self.mq_host, "RSS_FETCH_QUEUE", {"prefetch_count": self.num_workers} ) drop_publisher = DropPublisher(mq_host) response_publisher = ResponsePublisher(mq_host) for x in range(self.num_workers): RssFetcherWorker( "worker-" + str(x), consumer.message_queue, consumer.confirm_queue, self.db_config, drop_publisher, response_publisher, ) log.info("Workers started") consumer.join() except Exception, e: # Catch unhandled exceptions log.exception(e)