Exemple #1
0
    def run(self):
        # Parameters to be passed on to the queue worker
        queue_name = 'SEMANTICS_QUEUE'
        options = {
            'exchange_name': 'metadata',
            'exchange_type': 'fanout',
            'durable_queue': True,
            'prefetch_count': self.num_workers
        }

        drop_consumer = Consumer("semanticsqueue-consumer", self.mq_host,
                                 'SEMANTICS_QUEUE', options)

        drop_publisher = Publisher("Response Publisher", mq_host)

        for x in range(self.num_workers):
            SemanticsQueueWorker("semanticsqueue-worker-" + str(x),
                                 drop_consumer.message_queue,
                                 drop_consumer.confirm_queue, self.api_url,
                                 drop_publisher, self.lock, self.max_retries,
                                 self.sleep_time, self.retry_cache)

        log.info("Workers started")
        drop_consumer.join()
        log.info("Exiting")
    def run(self):
        # Parameters to be passed on to the queue worker
        queue_name = "SEMANTICS_QUEUE"
        options = {
            "exchange_name": "metadata",
            "exchange_type": "fanout",
            "durable_queue": True,
            "prefetch_count": self.num_workers,
        }

        drop_consumer = Consumer("semanticsqueue-consumer", self.mq_host, "SEMANTICS_QUEUE", options)

        drop_publisher = Publisher("Response Publisher", mq_host)

        for x in range(self.num_workers):
            SemanticsQueueWorker(
                "semanticsqueue-worker-" + str(x),
                drop_consumer.message_queue,
                drop_consumer.confirm_queue,
                self.api_url,
                drop_publisher,
                self.lock,
                self.max_retries,
                self.sleep_time,
                self.retry_cache,
            )

        log.info("Workers started")
        drop_consumer.join()
        log.info("Exiting")
Exemple #3
0
    def run(self):
        """Initializes the daemon and spawns s set of workers to listen
        on the MQ for buckets that are ready to push drops"""

        options = {
            'exchange_name': 'chatter',
            'exchange_type': 'topic',
            'routing_key': 'web.bucket.push.*',
            'durable_exchange': True
        }

        # Consumer for USHAHIDI_POST_QUEUE
        postqueue_consumer = Consumer("ushahidi-postqueue-consumer",
                                      self.mq_host, "USHAHIDI_POST_QUEUE",
                                      options)

        # Spawn a set of workers to listen for buckets that are ready
        # to post drops
        for x in range(self.num_workers):
            UshahidiPostQueueWorker("ushahidi-postqueue-worker" + str(x),
                                    postqueue_consumer.message_queue,
                                    postqueue_consumer.confirm_queue, self)

        log.info("Workers started")

        postqueue_consumer.join()
        log.info("Exiting...")
Exemple #4
0
    def run(self):
        """Initializes the daemon and spawns s set of workers to listen
        on the MQ for buckets that are ready to push drops"""

        options = {'exchange_name': 'chatter',
                   'exchange_type': 'topic',
                   'routing_key': 'web.bucket.push.*',
                   'durable_exchange': True}

        # Consumer for USHAHIDI_POST_QUEUE
        postqueue_consumer = Consumer(
            "ushahidi-postqueue-consumer",
            self.mq_host,
            "USHAHIDI_POST_QUEUE",
            options)

        # Spawn a set of workers to listen for buckets that are ready
        # to post drops
        for x in range(self.num_workers):
            UshahidiPostQueueWorker(
                "ushahidi-postqueue-worker" + str(x),
                postqueue_consumer.message_queue,
                postqueue_consumer.confirm_queue,
                self)

        log.info("Workers started")

        postqueue_consumer.join()
        log.info("Exiting...")
Exemple #5
0
    def run(self):
        log.info("Firehose Started")
        self.drop_publisher = DropPublisher(self.mq_host)

        consumer = Consumer("firehose-consumer", self.mq_host,
                            utils.FIREHOSE_QUEUE)

        self.run_firehose(consumer.message_queue, consumer.confirm_queue)
    def run(self):
        options = {'exchange_name': 'metadata', 
                   'exchange_type': 'fanout', 
                   'durable_queue': True,
                   'prefetch_count': self.num_workers}
        drop_consumer = Consumer("mediaextractor-consumer", self.mq_host, 
                                 'MEDIA_EXTRACTOR_QUEUE', options)        
        drop_publisher = Publisher("Response Publisher", mq_host)

        for x in range(self.num_workers):
            MediaExtractorQueueWorker("mediaextractor-worker-" + str(x), 
                                     drop_consumer.message_queue, 
                                     drop_consumer.confirm_queue, 
                                     drop_publisher, self.cf_options, 
                                     self.url_services)
        log.info("Workers started")

        drop_consumer.join()
        log.info("Exiting")
    def run(self):
        try:
            consumer = Consumer("rss-fetcher-consumer", self.mq_host,
                                'RSS_FETCH_QUEUE',
                                {'prefetch_count': self.num_workers})

            drop_publisher = DropPublisher(mq_host)
            response_publisher = ResponsePublisher(mq_host)

            for x in range(self.num_workers):
                RssFetcherWorker("worker-" + str(x), consumer.message_queue,
                                 consumer.confirm_queue, self.db_config,
                                 drop_publisher, response_publisher)

            log.info("Workers started")
            consumer.join()
        except Exception, e:
            #Catch unhandled exceptions
            log.exception(e)
Exemple #8
0
    def run(self):
        options = {
            'exchange_name': 'metadata',
            'exchange_type': 'fanout',
            'durable_queue': True,
            'prefetch_count': self.num_workers
        }
        drop_consumer = Consumer("mediaextractor-consumer", self.mq_host,
                                 'MEDIA_EXTRACTOR_QUEUE', options)
        drop_publisher = Publisher("Response Publisher", mq_host)

        for x in range(self.num_workers):
            MediaExtractorQueueWorker("mediaextractor-worker-" + str(x),
                                      drop_consumer.message_queue,
                                      drop_consumer.confirm_queue,
                                      drop_publisher, self.cf_options,
                                      self.url_services)
        log.info("Workers started")

        drop_consumer.join()
        log.info("Exiting")
    def run(self):
        log.info("SwiftRiver RSS Fetcher Started")
        
        self.init_cache()
        self.fetch_publisher = RssFetchPublisher(self.mq_host)

        # Start a pool of threads to handle responses from
        # fetchers and update rss_urls
        fetcher_response_consumer = Consumer("fetcher-response-consumer",
                                             self.mq_host,
                                             'RSS_FETCH_RESPONSE',
                                             {'durable_queue': False,
                                              'prefetch_count': self.num_response_workers})

        for x in range(self.num_response_workers):
            FetcherResponseHandler("response-handler-" + str(x),
                                   fetcher_response_consumer.message_queue,
                                   fetcher_response_consumer.confirm_queue,
                                   self)

        # Start a pool to handle new/removed channel options from the web front end / wherever
        # Update the options
        options = {'exchange_name': 'chatter',
                   'exchange_type': 'topic',
                   'routing_key': ['web.river.*',
                                   'web.channel.rss.*'],
                   'durable_exchange':  True,
                   'prefetch_count': self.num_channel_update_workers}
        channel_update_consumer = Consumer("channel-update-consumer",
                                           self.mq_host,
                                           'RSS_UPDATE_QUEUE', options)

        for x in range(self.num_channel_update_workers):
            ChannelUpdateHandler("channel-handler-" + str(x),
                                 channel_update_consumer.message_queue,
                                 channel_update_consumer.confirm_queue, self)

        self.run_scheduler()
    def run(self):
        try:
            consumer = Consumer(
                "rss-fetcher-consumer", self.mq_host, "RSS_FETCH_QUEUE", {"prefetch_count": self.num_workers}
            )

            drop_publisher = DropPublisher(mq_host)
            response_publisher = ResponsePublisher(mq_host)

            for x in range(self.num_workers):
                RssFetcherWorker(
                    "worker-" + str(x),
                    consumer.message_queue,
                    consumer.confirm_queue,
                    self.db_config,
                    drop_publisher,
                    response_publisher,
                )

            log.info("Workers started")
            consumer.join()
        except Exception, e:
            # Catch unhandled exceptions
            log.exception(e)