Beispiel #1
0
class MessageHandler:
    def __init__(self):
        self.conn = Connection('amqp://*****:*****@localhost:5672//')

    def add_message(self, message, queue_name):

        simple_queue = self.conn.SimpleQueue(queue_name)
        simple_queue.put(message)
        simple_queue.close()

    def get_message(self, queue_name):
        simple_queue = self.conn.SimpleQueue(queue_name)
        message = simple_queue.get(block=True, timeout=1)
        message.ack()
        simple_queue.close()
        return message
Beispiel #2
0
    def __init__(self, uri=None, host_url='localhost', queue='logging'):
        logging.Handler.__init__(self)
        if uri:
            connection = Connection(uri)

        self.queue = connection.SimpleQueue(queue)
        self.host = host_url
Beispiel #3
0
    def inner(*args, **kwargs):
        connection = Connection(settings.BROKER_URL)
        connection.connect()

        queue = connection.SimpleQueue(settings.QUEUE_NAME)
        res = func(*args, **kwargs, queue=queue)
        connection.release()
        return res
class CeleryQueue(QueueBase):
    """ At some point this will adapt `django-signalqueue` for use
        with popular `(dj)celery` platform (but not today).
        
        When this class is done, I will discuss it here. """
    def __init__(self, *args, **kwargs):
        super(CeleryQueue, self).__init__(*args, **kwargs)

        self.celery_queue_name = self.queue_options.pop(
            'celery_queue_name', 'inactive')
        self.serializer = self.queue_options.pop('serializer', 'json')
        self.compression = self.queue_options.pop('compression', None)
        self.kc = Connection(**self.queue_options)
        self.kc.connect()

        self.qc = self.kc.SimpleQueue(name=self.celery_queue_name)

    def ping(self):
        return self.kc.connected and not self.qc.channel.closed

    def push(self, value):
        self.qc.put(value, compression=self.compression, serializer=None)

    def pop(self):
        virtual_message = self.qc.get(block=False, timeout=1)
        return virtual_message.payload

    def count(self):
        try:
            return self.qc.qsize()
        except kombu.exceptions.StdChannelError:
            self.qc.queue.declare()
            return 0

    def clear(self):
        self.qc.clear()

    def values(self, **kwargs):
        return []

    def __getitem__(self, idx):
        #return self.values().__getitem__(idx)
        return ''

    def dispatch(self, signal, sender=None, **kwargs):
        task_name = "%s:%s" % (signal.regkey, signal.name)
        try:
            result = tasks[task_name].delay(sender=sender, **kwargs)
        except tasks.NotRegistered:
            pass
        else:
            return result
Beispiel #5
0
class KombuMailbox(AckableMailbox):
    def __init__(self,
                 address,
                 name,
                 transport_options,
                 ssl=False,
                 no_ack=True,
                 queue_opts=None,
                 exchange_opts=None):
        from kombu import Connection
        self._conn = Connection(address,
                                transport_options=transport_options,
                                ssl=ssl)
        self._queue = self._conn.SimpleQueue(name, no_ack, queue_opts,
                                             exchange_opts)
        self._no_ack = no_ack
        self._last_msg = None

    def get(self):
        self._last_msg = self._queue.get()
        return decode(
            unpackb(self._last_msg.body, encoding='utf-8', use_list=False))

    def put(self, message):
        return self._queue.put(
            packb(encode(message), encoding='utf-8', use_bin_type=True))

    def ack(self):
        if self._no_ack:
            return
        if self._last_msg is not None:
            self._last_msg.ack()
            self._last_msg = None

    def encode(self):
        raise NotImplementedError

    @staticmethod
    def decode(params):
        raise NotImplementedError

    def __enter__(self):
        return self

    def __exit__(self, *exc_details):
        self.__del__()

    def __del__(self):
        if hasattr(self, '_queue'):
            self._queue.close()
        if hasattr(self, '_conn'):
            self._conn.close()
Beispiel #6
0
    def send_task(obj, message):
        log.info('send message: %s' % message['event_type'])
        try:

            connection = Connection(PLAYOUT_BROKER_URL)

            simple_queue = connection.SimpleQueue(BROKER_QUEUE)
            simple_queue.put(json.dumps(message))
            simple_queue.close()
            connection.close()

        except Exception as e:
            log.error('error sending message: %s' % e)
Beispiel #7
0
    def reset(self):
        logging.debug("Attempting kombu reset: %s" % self.uri)
        conn = Connection(self.uri, connect_timeout=.5)
        try:
            logging.debug("Connected ... creating queue: %s" % self.queue_name)
            q = conn.SimpleQueue(self.queue_name)
            while True:
                logging.debug("Attempting to read msg from queue ...")
                m = q.get(block=False)
                m.ack()

        except Queue.Empty:
            pass
        logging.debug("Done with purge ...")
Beispiel #8
0
def connect_queue(QUEUE_USERNAME, QUEUE_PASSWORD, QUEUE_HOST,
                  QUEUE_VIRTUAL_HOST, QUEUE):
    #example of parameter
    #QUEUE_USERNAME='******'
    #QUEUE_PASSWORD='******'
    #QUEUE_HOST='queue.platform.acommercedev.com'
    #QUEUE_VIRTUAL_HOST='myvhost'
    #QUEUE='partner_master'

    url = "amqp://{0}:{1}@{2}:5672/{3}".format(QUEUE_USERNAME, QUEUE_PASSWORD,
                                               QUEUE_HOST, QUEUE_VIRTUAL_HOST)
    print("url :") + url
    #### open connection to QUEUE####
    con = Connection(url)
    msg_queue = con.SimpleQueue(QUEUE)
    return msg_queue
Beispiel #9
0
    def __init__(self, crawl_name, num_urls=DEFAULT_NUM_URLS):
        """
        Create a NutchUrlTrails instance for visualizing a running Nutch crawl in real-time using Bokeh
        :param name: The name of the crawl (as identified by the queue)
        :param num_urls: The number of URLs to display in the visualization
        :return: A NutchUrLTrails instance
        """
        self.crawl_name = crawl_name
        self.num_urls = num_urls
        self.open_urls = {}
        self.closed_urls = {}
        self.old_segments = None
        self.old_circles = None

        self.session = Session()
        self.session.use_doc(self.crawl_name)
        self.document = Document()

        con = Connection()

        exchange = Exchange(EXCHANGE_NAME, 'direct', durable=False)
        queue = Queue(crawl_name, exchange=exchange, routing_key=crawl_name)
        self.queue = con.SimpleQueue(name=queue)
class CommissaireService(ConsumerMixin, BusMixin):
    """
    Commissaire service class.
    """

    #: Default configuration file if none is specified. Subclasses
    #: should override this.
    _default_config_file = C.DEFAULT_CONFIGURATION_FILE

    def __init__(self,
                 exchange_name,
                 connection_url,
                 qkwargs,
                 config_file=None):
        """
        Initializes a new Service instance.

        :param exchange_name: Name of the topic exchange.
        :type exchange_name: str
        :param connection_url: Kombu connection url.
        :type connection_url: str
        :param qkwargs: One or more dicts keyword arguments for queue creation
        :type qkwargs: list
        :param config_file: Path to the configuration file location.
        :type config_file: str or None
        """
        name = self.__class__.__name__
        self.logger = logging.getLogger(name)
        self.logger.debug('Initializing {}'.format(name))

        # If we are given no default, use the global one
        # Read the configuration file
        self._config_data = read_config_file(config_file,
                                             self._default_config_file)

        if connection_url is None and 'bus_uri' in self._config_data:
            connection_url = self._config_data.get('bus_uri')
            self.logger.debug('Using connection_url=%s from config file',
                              connection_url)
        if exchange_name is None and 'exchange_name' in self._config_data:
            self.logger.debug('Using exchange_name=%s from config file',
                              exchange_name)
            exchange_name = self._config_data.get('bus_exchange')

        self.connection = Connection(connection_url)
        self._channel = self.connection.default_channel
        self._exchange = Exchange(exchange_name,
                                  type='topic').bind(self._channel)
        self._exchange.declare()

        # Set up queues
        self._queues = []
        for kwargs in qkwargs:
            queue = Queue(**kwargs)
            queue.exchange = self._exchange
            queue = queue.bind(self._channel)
            self._queues.append(queue)
            self.logger.debug(queue.as_dict())

        # Create producer for publishing on topics
        self.producer = Producer(self._channel, self._exchange)
        self.logger.debug('Initializing of {} finished'.format(name))

    def get_consumers(self, Consumer, channel):
        """
        Returns the a list of consumers to watch. Called by the parent Mixin.

        :param Consumer: Message consumer class.
        :type Consumer: kombu.Consumer
        :param channel: An opened channel.
        :type channel: kombu.transport.*.Channel
        :returns: A list of Consumer instances.
        :rtype: list
        """
        consumers = []
        self.logger.debug('Setting up consumers')
        for queue in self._queues:
            self.logger.debug('Will consume on {}'.format(queue.name))
            consumers.append(Consumer(queue, callbacks=[self.on_message]))
        self.logger.debug('Consumers: {}'.format(consumers))
        return consumers

    def on_message(self, body, message):
        """
        Called when a new message arrives.

        :param body: Body of the message.
        :type body: dict or json string
        :param message: The message instance.
        :type message: kombu.message.Message
        """
        self.logger.debug('Received message "{}" {}'.format(
            message.delivery_tag, body))
        expected_method = message.delivery_info['routing_key'].rsplit('.',
                                                                      1)[1]

        # If we don't get a valid message we default to -1 for the id
        response = {'jsonrpc': '2.0', 'id': -1}
        try:
            # If we don't have a dict then it should be a json string
            if isinstance(body, str):
                body = json.loads(body)

            # If we have a method and it matches the routing key treat it
            # as a jsonrpc call
            if (isinstance(body, dict) and 'method' in body.keys()
                    and body.get('method') == expected_method):
                response['id'] = body.get('id', -1)
                method = getattr(self, 'on_{}'.format(body['method']))
                if type(body.setdefault('params', {})) is dict:
                    result = method(message=message, **body['params'])
                else:
                    result = method(message, *body['params'])
                response['result'] = result

                self.logger.debug('Result for "{}": "{}"'.format(
                    response['id'], result))
            else:
                # Drop it
                self.logger.error('Dropping unknown message: payload="{}", '
                                  'properties="{}"'.format(
                                      body, message.properties))
        except Exception as error:
            # Subclasses of RemoteProcedureCallError are re-created and
            # raised on the client-side.
            if isinstance(error, RemoteProcedureCallError):
                response['error'] = {
                    'code': error.code,
                    'message': str(error),
                    'data': error.data
                }
            else:
                jsonrpc_error_code = C.JSONRPC_ERRORS['INVALID_REQUEST']
                # If there is an attribute error then use the Method Not Found
                # code in the error response
                if type(error) is AttributeError:
                    jsonrpc_error_code = C.JSONRPC_ERRORS['METHOD_NOT_FOUND']
                elif type(error) is json.decoder.JSONDecodeError:
                    jsonrpc_error_code = C.JSONRPC_ERRORS['INVALID_JSON']
                response['error'] = {
                    'code': jsonrpc_error_code,
                    'message': str(error),
                    'data': {
                        'exception': str(type(error))
                    }
                }
                self.logger.warn(
                    'Exception raised during method call:\n{}'.format(
                        traceback.format_exc()))

        # Reply back if needed
        if message.properties.get('reply_to'):
            self.logger.debug('Responding to {}'.format(
                message.properties['reply_to']))
            response_queue = self.connection.SimpleQueue(
                message.properties['reply_to'])
            response_queue.put(json.dumps(response))
            response_queue.close()

        message.ack()
        self.logger.debug('Message "{}" {} ackd'.format(
            message.delivery_tag,
            ('was' if message.acknowledged else 'was not')))

    def respond(self, queue_name, id, payload, **kwargs):
        """
        Sends a response to a simple queue. Responses are sent back to a
        request and never should be the owner of the queue.

        :param queue_name: The name of the queue to use.
        :type queue_name: str
        :param id: The unique request id
        :type id: str
        :param payload: The content of the message.
        :type payload: dict
        :param kwargs: Keyword arguments to pass to SimpleQueue
        :type kwargs: dict
        """
        self.logger.debug('Sending response for message id "{}"'.format(id))
        send_queue = self.connection.SimpleQueue(queue_name, **kwargs)
        jsonrpc_msg = {
            'jsonrpc': "2.0",
            'id': id,
            'result': payload,
        }
        self.logger.debug('jsonrpc msg: {}'.format(jsonrpc_msg))
        send_queue.put(jsonrpc_msg)
        self.logger.debug('Sent response for message id "{}"'.format(id))
        send_queue.close()

    def onconnection_revived(self):  # pragma: no cover
        """
        Called when a reconnection occurs.
        """
        self.logger.info('Connection (re)established')

    def on_consume_ready(self, connection, channel,
                         consumers):  # pragma: no cover
        """
        Called when the service is ready to consume messages.

        :param connection: The current connection instance.
        :type connection: kombu.Connection
        :param channel: The current channel.
        :type channel: kombu.transport.*.Channel
        :param consumers: A list of consumers.
        :type consumers: list
        """
        self.logger.info('Ready to consume')
        if self.logger.level == logging.DEBUG:
            queue_names = []
            for consumer in consumers:
                queue_names += [x.name for x in consumer.queues]
            self.logger.debug(
                'Consuming via connection "{}" and channel "{}" on '
                'the following queues: "{}"'.format(connection.as_uri(),
                                                    channel,
                                                    '", "'.join(queue_names)))

    def on_consume_end(self, connection, channel):  # pragma: no cover
        """
        Called when the service stops consuming.

        :param connection: The current connection instance.
        :type connection: kombu.Connection
        :param channel: The current channel.
        :type channel: kombu.transport.*.Channel
        """
        self.logger.warn('Consuming has ended')
Beispiel #11
0
class Queue(object):
    def __init__(self,
                 url=None,
                 hostname=None,
                 userid=None,
                 password=None,
                 queue_name="basic_queue",
                 serializer="pickle",
                 compression=None):
        # current_queue
        self.is_local = True
        self.localized = None
        self.distributed = None
        if (hostname is None or userid is None
                or password is None) and url is None:
            self.localized = multiprocessing.Queue()
        else:
            self.is_local = False
            self.serializer = serializer
            self.compression = compression
            self.connection = Connection(url, heartbeat=5)
            self.distributed = self.connection.SimpleQueue(queue_name)

    def __len__(self):
        return self.qsize()

    def join(self):
        pass

    def qsize(self) -> int:
        if self.is_local == True:
            return self.localized.qsize()
        return self.distributed.qsize()

    def clear_queue(self):
        if self.is_local:
            while not self.empty():
                self.get_nowait()
        else:
            self.distributed.clear()

    def empty(self) -> bool:
        if self.is_local:
            return self.localized.empty()
        else:
            current_size = self.distributed.qsize()
            if current_size == 0:
                return True
            return False

    def put(self, item, block=True, timeout=None):
        if self.is_local == True:
            self.localized.put(item, block=block, timeout=timeout)
        else:
            self.distributed.put(item,
                                 serializer=self.serializer,
                                 compression=self.compression)

    def get(self, block=True, timeout=None):
        if self.is_local == True:
            return self.localized.get(block=block, timeout=timeout)
        else:
            return self.distributed.get()

    def put_nowait(self, item):
        if self.is_local == True:
            self.localized.put_nowait(item)
        else:
            self.distributed.put_nowait(item,
                                        serializer=self.serializer,
                                        compression=self.compression)

    def get_nowait(self):
        if self.is_local == True:
            return self.localized.get_nowait()
        else:
            return self.distributed.get_nowait()

    def close(self):
        """ Closes the distributed queue if it's distributed with redis or any other library. """
        if self.is_local == False:
            self.distributed.close()
Beispiel #12
0
class Bus(BusMixin):
    """
    Connection to a bus.
    """

    def __init__(self, exchange_name, connection_url, qkwargs):
        """
        Initializes a new Bus instance.

        :param exchange_name: Name of the topic exchange.
        :type exchange_name: str
        :param connection_url: Kombu connection url.
        :type connection_url: str
        :param qkwargs: One or more dicts keyword arguments for queue creation
        :type qkwargs: list
        """
        self.logger = logging.getLogger('Bus')
        self.logger.debug('Initializing bus connection')
        self.connection = None
        self._channel = None
        self._exchange = None
        self.exchange_name = exchange_name
        self.connection_url = connection_url
        self.qkwargs = qkwargs
        self.storage = StorageClient(self)

    @property
    def init_kwargs(self):
        """
        Returns the initializing kwargs for this instance.

        :returns: the initializing kwargs for this instance.
        :rtype: dict
        """
        return {
            'exchange_name': self.exchange_name,
            'connection_url': self.connection_url,
            'qkwargs': self.qkwargs,
        }

    def connect(self):
        """
        'Connects' to the bus.

        :returns: The same instance.
        :rtype: commissaire_http.bus.Bus
        """
        if self.connection is not None:
            self.logger.warn('Bus already connected.')
            return self

        self.connection = Connection(self.connection_url)
        self._channel = self.connection.channel()
        self._exchange = Exchange(
            self.exchange_name, type='topic').bind(self._channel)
        self._exchange.declare()

        # Create queues
        self._queues = []
        for kwargs in self.qkwargs:
            queue = Queue(**kwargs)
            queue.exchange = self._exchange
            queue = queue.bind(self._channel)
            self._queues.append(queue)
            self.logger.debug('Created queue {}'.format(queue.as_dict()))

        # Create producer for publishing on topics
        self.producer = Producer(self._channel, self._exchange)
        self.logger.debug('Bus connection finished')
        return self

    def respond(self, queue_name, id, payload, **kwargs):  # pragma: no cover
        """
        Sends a response to a simple queue. Responses are sent back to a
        request and never should be the owner of the queue.

        :param queue_name: The name of the queue to use.
        :type queue_name: str
        :param id: The unique request id
        :type id: str
        :param payload: The content of the message.
        :type payload: dict
        :param kwargs: Keyword arguments to pass to SimpleQueue
        :type kwargs: dict
        """
        self.logger.debug('Sending response for message id "{}"'.format(id))
        send_queue = self.connection.SimpleQueue(queue_name, **kwargs)
        jsonrpc_msg = {
            'jsonrpc': "2.0",
            'id': id,
            'result': payload,
        }
        self.logger.debug('jsonrpc msg: {}'.format(jsonrpc_msg))
        send_queue.put(jsonrpc_msg)
        self.logger.debug('Sent response for message id "{}"'.format(id))
        send_queue.close()
Beispiel #13
0
def setup():
    global conn, q
    conn = Connection(connectionString)
    conn.connect()
    q = conn.SimpleQueue(queueName)
Beispiel #14
0
if settings.MOESIF_MIDDLEWARE.get('USE_CELERY', False):
    if BROKER_URL:
        try:
            from apscheduler.schedulers.background import BackgroundScheduler
            from apscheduler.triggers.interval import IntervalTrigger
            import atexit
            from kombu import Connection
            from kombu.exceptions import ChannelError

            scheduler = BackgroundScheduler(daemon=True)
            scheduler.start()
            config, config_etag, sampling_percentage, last_updated_time = get_config(
            )
            try:
                conn = Connection(BROKER_URL)
                moesif_events_queue = conn.SimpleQueue('moesif_events_queue')
                scheduler.add_job(
                    func=lambda: async_client_create_event(
                        moesif_events_queue, config, config_etag,
                        last_updated_time),
                    trigger=IntervalTrigger(seconds=5),
                    id='moesif_events_batch_job',
                    name='Schedule events batch job every 5 second',
                    replace_existing=True)

                # Exit handler when exiting the app
                atexit.register(
                    lambda: exit_handler(moesif_events_queue, scheduler))
            except:
                if DEBUG:
                    print("Error while connecting to - {0}".format(BROKER_URL))
Beispiel #15
0
class KombuQueue(object):
    """
    kombu is a high-level interface for multiple message queue backends.

    KombuQueue is built on top of kombu API.
    """

    Empty = BaseQueue.Empty
    Full = BaseQueue.Full
    max_timeout = 0.3

    def __init__(self, name, url="amqp://", maxsize=0, lazy_limit=True):
        """
        Constructor for KombuQueue

        url:        http://kombu.readthedocs.org/en/latest/userguide/connections.html#urls
        maxsize:    an integer that sets the upperbound limit on the number of
                    items that can be placed in the queue.
        """
        self.name = name
        self.conn = Connection(url)
        self.queue = self.conn.SimpleQueue(self.name, no_ack=True, serializer='umsgpack')

        self.maxsize = maxsize
        self.lazy_limit = lazy_limit
        if self.lazy_limit and self.maxsize:
            self.qsize_diff_limit = int(self.maxsize * 0.1)
        else:
            self.qsize_diff_limit = 0
        self.qsize_diff = 0

    def qsize(self):
        try:
            return self.queue.qsize()
        except ChannelError:
            return 0

    def empty(self):
        if self.qsize() == 0:
            return True
        else:
            return False

    def full(self):
        if self.maxsize and self.qsize() >= self.maxsize:
            return True
        else:
            return False

    def put(self, obj, block=True, timeout=None):
        if not block:
            return self.put_nowait()

        start_time = time.time()
        while True:
            try:
                return self.put_nowait(obj)
            except BaseQueue.Full:
                if timeout:
                    lasted = time.time() - start_time
                    if timeout > lasted:
                        time.sleep(min(self.max_timeout, timeout - lasted))
                    else:
                        raise
                else:
                    time.sleep(self.max_timeout)

    def put_nowait(self, obj):
        if self.lazy_limit and self.qsize_diff < self.qsize_diff_limit:
            pass
        elif self.full():
            raise BaseQueue.Full
        else:
            self.qsize_diff = 0
        return self.queue.put(obj)

    def get(self, block=True, timeout=None):
        try:
            ret = self.queue.get(block, timeout)
            return ret.payload
        except self.queue.Empty:
            raise BaseQueue.Empty

    def get_nowait(self):
        try:
            ret = self.queue.get_nowait()
            return ret.payload
        except self.queue.Empty:
            raise BaseQueue.Empty

    def delete(self):
        self.queue.queue.delete()

    def __del__(self):
        self.queue.close()
Beispiel #16
0
from kombu import Connection

conn = Connection("amqp://*****:*****@localhost:5672/")

queue = conn.SimpleQueue('example1')
queue.put("test from producer")

conn.close()
Beispiel #17
0
    def __init__(self, uri=None, queue='logging'):
        logging.Handler.__init__(self)
        if uri:
            connection = Connection(uri)

        self.queue = connection.SimpleQueue(queue)
Beispiel #18
0
sys.path.append(os.getcwd() + "/..")

from scraper import scraper
from repo import db_repo

CYCLONE_LIST_QUEUE = 'cyclones.list'
CYCLONE_DETAILS_QUEUE = 'cyclones.details'
CYCLONE_HISTORY_QUEUE = "cyclone.history"
CYCLONE_FORECAST_QUEUE = "cyclone.forecast"

BROKER_URL = "redis://redis"

kombu_conn = Connection(BROKER_URL)

list_queue = kombu_conn.SimpleQueue(CYCLONE_LIST_QUEUE)
details_queue = kombu_conn.SimpleQueue(CYCLONE_DETAILS_QUEUE)
history_queue = kombu_conn.SimpleQueue(CYCLONE_HISTORY_QUEUE)
forecast_queue = kombu_conn.SimpleQueue(CYCLONE_FORECAST_QUEUE)

CELERY_BROKER_URL = 'redis://redis'

app = Celery('tasks', broker=CELERY_BROKER_URL)


@app.task
def extract_cyclones_list():
    log.info("extract list of cyclones")
    cyclones = scraper.extract_cyclones_list()
    for c in cyclones:
        log.info("adding %s", c)
Beispiel #19
0
class StreamArchiver(LogMixin, NotificationMixin, StreamListener):
    def __init__(self, archives, verbosity=1, *args, **kwargs):

        super().__init__(*args, **kwargs)

        # A temporary storage for use in exception forensics
        self.raw_data = None

        # All archives in a stream belong to the same user
        self.user = archives[0].user

        self.verbosity = verbosity
        self.connection = Connection(settings.BROKER_URL)

        self.channels = []
        for archive in archives:
            with Connection(settings.BROKER_URL) as connection:
                consumer = ArchiveConsumer(archive,
                                           connection,
                                           verbosity=self.verbosity)
                consumer.start()
                self.channels.append({
                    "archive":
                    archive,
                    "consumer":
                    consumer,
                    "queue":
                    self.connection.SimpleQueue("archiver:{}".format(
                        archive.pk)),
                })

    def set_verbosity(self, verbosity):
        self.logger.info("Setting {} stream verbosity to {}".format(
            self.user, verbosity))
        self.verbosity = verbosity
        for channel in self.channels:
            channel["consumer"].set_verbosity(self.verbosity)

    def on_data(self, raw_data):
        self.raw_data = raw_data
        return StreamListener.on_data(self, raw_data)

    def on_status(self, status):

        for channel in self.channels:

            archive = channel["archive"]
            queue = channel["queue"]
            query = archive.query.lower()

            if query in status.text.lower():
                queue.put(status._json)
            elif hasattr(status, "retweeted_status"):
                if query in status.retweeted_status.text.lower():
                    queue.put(status._json)
                elif hasattr(status.retweeted_status, "quoted_status"):
                    if query in status.retweeted_status.quoted_status[
                            "text"].lower():  # NOQA: E501
                        queue.put(status._json)
            elif hasattr(status, "quoted_status"):
                if query in status.quoted_status["text"].lower():
                    queue.put(status._json)

    def on_exception(self, exception):

        additional = "Source: {}".format(self.raw_data)

        self._alert("Collector exception [listener]", exception, additional)

        stderr.write("\n\nEXCEPTION:\n{}\n\nSource: {}\n".format(
            exception, additional))

        self.close_log()

        return False

    def on_error(self, status_code):

        message = str(status_code)
        if status_code == 401:
            message = (
                f"Twitter issued a 401 for {self.user}, so they've been "
                f"kicked.")
            self.user.status = User.STATUS_DISABLED
            self.user.save(update_fields=("status", ))

        self._alert("Collector Twitter error", message)

        stderr.write("ERROR: Twitter responded with {}".format(status_code))

        self.close_log()

        return False

    def on_disconnect(self, notice):
        """
        This is what happens if *Twitter* sends a disconnect, not if we
        disconnect from the stream ourselves.
        """
        self._alert("Collector disconnect", str(notice))
        stderr.write("\n\nTwitter disconnect: {}\n\n\n".format(notice))
        self.close_log()
        return False

    def close_log(self):

        self.connection.close()
        self.connection.release()

        # Set `should_stop` which queues the consumer to close everything up
        for channel in self.channels:
            channel["consumer"].should_stop = True

        # Now wait until the consumer has confirmed that it's finished
        for channel in self.channels:
            while not channel["consumer"].is_stopped:
                if self.verbosity > 1:
                    self.logger.info("Waiting for {} to stop".format(
                        channel["consumer"].archive))
                sleep(0.1)

        stdout.flush()

        Archive.objects.filter(
            pk__in=[__["archive"].pk
                    for __ in self.channels]).update(is_running=False)
Beispiel #20
0
class KomBuClient(ConsumerMixin):
    def __init__(self, hosts_conf, exchange_name='', exchange_type='', exchange_arguments=None,
                 queue_name='', routing_key='', queue_arguments=None, callback=None, no_ack=True):
        self.hosts_conf = hosts_conf
        self.hosts = self.create_hosts()
        self.connection = Connection(self.hosts)
        self.task_exchange = Exchange(name=exchange_name, type=exchange_type, arguments=exchange_arguments)
        self.task_queues = [Queue(name=queue_name, exchange=self.task_exchange, routing_key=routing_key,
                                  queue_arguments=queue_arguments)]
        self.callback = callback
        self.no_ack = no_ack

    def queue_size(self, queue_list, queue_arguments=None):
        result = dict()
        for i in queue_list:
            queue_size = self.connection.SimpleQueue(name=Queue(name=i, queue_arguments=queue_arguments)).qsize()
            result[i] = queue_size
        return result

    def create_hosts(self):
        hosts_list = []
        for i in self.hosts_conf:
            host = i.get('host', '127.0.0.1')
            port = i.get('port', '5672')
            username = i.get('username', 'guest')
            passwd = i.get('passwd', 'guest')
            auth = "amqp://{username}:{passwd}@{host}:{port}//".format(username=username, passwd=passwd,
                                                                       host=host, port=port)
            hosts_list.append(auth)
        return hosts_list

    def get_consumers(self, Consumer, channel):
        channel.basic_qos(prefetch_size=0, prefetch_count=1, a_global=False)
        return [Consumer(queues=self.task_queues,
                         accept=['json', 'pickle', 'msgpack', 'yaml'],
                         callbacks=[self.callback], no_ack=self.no_ack)]

    def process_task(self, body, message):
        print self.hosts
        print body, message.properties
        message.ack()

    def start(self):
        self.run()

    def send_task(self, payload, routing_key=None, priority=0, content_type=None, content_encoding=None,
                  serializer=None, headers=None, compression=None, exchange=None, retry=False,
                  retry_policy=None, declare=[], expiration=None):
        try:
            with producers[self.connection].acquire(block=True) as producer:
                producer.publish(payload,
                                 serializer=serializer, compression=compression, exchange=exchange,
                                 declare=declare, routing_key=routing_key, priority=priority,
                                 content_type=content_type, content_encoding=content_encoding, headers=headers,
                                 retry=retry, retry_policy=retry_policy, expiration=expiration)
        except Exception as error:
            return False, error
        return True, None

    def close(self):
        self.connection.close()
Beispiel #21
0
class connection:
    """
    
    Class to connect to a BITCloud server.
    
    Kwargs:
        
    
    Kwrvals:
        
    
    See Also:
        
    
    Notes:
        
    
    Example:
        
    
    References:
        .. [1]
        
    """
    TRANSPORT = 'amqp'
    VHOST = '/'
    RABBIT_PORT = 5672
    MONGO_HOST = '193.136.222.234'
    MONGO_PORT = 27017

    def __init__(self,
                 host='localhost',
                 user=None,
                 passwd=None,
                 queue='BiometricsQ',
                 dbName='BiometricsExperiments'):
        """
        
        Establish the connection to the server.
        
        Kwargs:
            
        
        Kwrvals:
            
        
        See Also:
            
        
        Notes:
            
        
        Example:
            
        
        References:
            .. [1]
            
        """

        # connect to RabbitMQ server
        self.conn = Connection(hostname=host,
                               port=self.RABBIT_PORT,
                               transport=self.TRANSPORT,
                               userid=user,
                               password=passwd,
                               virtual_host=self.VHOST)

        # declare queue (persistent queue)
        self.queue = self.conn.SimpleQueue(queue)

        # connect to MongoDB
        self.mongo_conn = pmg.Connection(self.MONGO_HOST, self.MONGO_PORT)
        self.mongo_db = self.mongo_conn[dbName]
        self.mongo_collection = self.mongo_db['experiments']
        self.mongo_id = self.mongo_db['IDTracker']

        # check if ID tracker exists
        res = self.mongo_id.find_one({'_id': 0}, {'_id': 1})
        if res is None:
            self.mongo_id.insert({'_id': 0, 'nextID': 0})

        # check if user exists in MongoDB
        collection = self.mongo_db['users']
        doc = collection.find_one({'username': user}, {'_id': 1})
        if doc is None:
            s = raw_input(
                "Please register your E-Mail to be notified of task completion:\n"
            )
            while True:
                if s is '':
                    collection.insert({'username': user})
                    print "User %s registered without E-Mail.\nYou will not be able to receive notifications." % user
                    break
                elif '@' not in s:
                    s = raw_input(
                        "Badly formated E-Mail address - missing '@' character.\nYou provided %s.\nPlease retype the E-Mail address:\n"
                        % s)
                elif '.' not in s.split('@')[1]:
                    s = raw_input(
                        "Badly formated E-Mail address - missing domain name.\nYou provided %s.\nPlease retype the E-Mail address:\n"
                        % s)
                else:
                    collection.insert({'username': user, 'email': s})
                    print "User %s registered with E-Mail %s." % (user, s)
                    break

        # extra self things
        self.user = user

    def addTask(self, task=None):

        # check inputs
        if task is None:
            raise TypeError, "Please specify a task."

        # get a new experiment ID from MongoDB (atomic operation)
        expID = self.mongo_id.find_and_modify(query={'_id': 0},
                                              update={'$inc': {
                                                  'nextID': 1
                                              }})['nextID']

        task.update({
            '_id': expID,
            'user': self.user,
            'status': 'waiting',
            'seen': False
        })

        # save to MongoDB
        self.mongo_collection.insert(task)

        # confirm insert
        _ = self.mongo_collection.find_one({'_id': expID}, {'status': 1})

        # publish to RabbitMQ
        self.queue.put({'_id': expID})

        return expID

    def cancelTask(self, expID):
        # cancel a task (will be ignored by worker when delivered)

        ### update status checks on worker

        # update and get task
        qry = {'_id': expID}
        upd = {
            '$set': {
                'user': self.user,
                'status': 'finished',
                'seen': True,
                'results': None,
                'worker': None,
            },
        }
        self.mongo_collection.update(qry, upd)

    def requeue(self, expID):
        # requeue a task

        # update and get task
        qry = {'_id': expID}
        upd = {'$set': {'user': self.user, 'status': 'waiting', 'seen': False}}
        self.mongo_collection.update(qry, upd)

        # publish to RabbitMQ
        self.queue.put(qry)

    def listTasks(self, status='all', seen=None):
        # list tasks belonging to user
        # status = 'waiting' | 'running' | 'finished' | 'error' | 'all'

        search = {'user': self.user}

        # update the search
        if status is not 'all':
            search.update({'status': status})

        if seen is not None:
            search.update({'seen': seen})

        # perform the search
        doc = self.mongo_collection.find(search, {'_id': 1})
        res = [item['_id'] for item in doc]

        return res

    def getTaskInfo(self, expID):
        # retrieve a task

        doc = self.mongo_collection.find_one({'_id': expID})

        return doc

    def search(self, spec=None, fields=None):
        # search task DB

        return self.mongo_collection.find(spec, fields)

    def collectResults(self, expIDList, mapper, basePath, headerOrder=None):
        # collect the results of a list of experiments, selecting the items in mapper

        if headerOrder is None:
            # use default order
            headerOrder = ['starting_data', 'train', 'test', 'dimreduction']

        # determine number of columns per row
        keyOrder = []
        for item in headerOrder:
            try:
                aux = len(mapper[item]['items'])
            except KeyError:
                pass
            else:
                if aux > 0:
                    keyOrder.append(item)

        rows = []
        for item in expIDList:
            task = self.getTaskInfo(item)
            mrow = []

            # task ID
            mrow.append(task['_id'])

            # general parameters
            for c in keyOrder:
                for pair in mapper[c]['items']:
                    mrow.append(misc.slasherDict(task[c], pair[1]))

            # classifiers
            for name in mapper['classifier']['instances'].iterkeys():
                for clf in mapper['classifier']['instances'][name]:
                    row = copy.deepcopy(mrow)

                    # append name
                    row.append(name)

                    # append parameters
                    try:
                        for pair in mapper['classifier']['items']:
                            row.append(
                                misc.slasherDict(task['classifier'][clf],
                                                 pair[1]))
                    except KeyError:
                        # clf not present
                        continue

                    # load global results
                    res = datamanager.skLoad(
                        os.path.join(basePath, 'Exp-%d' % item, 'results', clf,
                                     'results-global.dict'))
                    EER = 100. * res['global']['authentication']['rates'][
                        'EER'][0, 1]
                    EID = 100. * res['global']['identification']['rates']['Err']
                    row.append(EER)
                    row.append(EID)

                    # append row
                    rows.append(row)

        # build header
        header = [[], []]

        # task ID
        header[0].append('Task ID')
        header[1].append('')

        # general parameters
        for c in keyOrder:
            header[0].append(mapper[c]['name'])
            nb = len(mapper[c]['items'])
            header[0].extend((nb - 1) * [''])
            for i in xrange(nb):
                header[1].append(mapper[c]['items'][i][0])

        # classifier
        header[0].append(mapper['classifier']['name'])
        nb = len(mapper['classifier']['items'])
        header[0].extend((nb + 2) * [''])  # do not forget name, EER, EID
        header[1].append('Name')
        for i in xrange(nb):
            header[1].append(mapper['classifier']['items'][i][0])
        header[1].append('EER (%)')
        header[1].append('EID (%)')

        return {'rows': rows, 'header': header}

    def markSeen(self, expID):
        # mark a task (or list of tasks) as seen

        if type(expID) is int:
            expID = [expID]
        elif type(expID) is list:
            pass
        else:
            raise TypeError, "The input must be of type int or list"

        for item in expID:
            self.mongo_collection.update({'_id': item},
                                         {'$set': {
                                             'seen': True
                                         }})

    def close(self):
        # close connection to RabbitMQ
        self.conn.release()
        # close connection to MongoDB
        self.mongo_conn.close()
Beispiel #22
0
class CommissaireService(ConsumerMixin, BusMixin):
    """
    An example prototype CommissaireService base class.
    """

    def __init__(self, exchange_name, connection_url, qkwargs):
        """
        Initializes a new Service instance.

        :param exchange_name: Name of the topic exchange.
        :type exchange_name: str
        :param connection_url: Kombu connection url.
        :type connection_url: str
        :param qkwargs: One or more dicts keyword arguments for queue creation
        :type qkwargs: list
        """
        self.logger = logging.getLogger(self.__class__.__name__)
        self.logger.debug('Initializing {0}'.format(self.__class__.__name__))
        self.connection = Connection(connection_url)
        self._channel = self.connection.channel()
        self._exchange = Exchange(
            exchange_name, type='topic').bind(self._channel)
        self._exchange.declare()

        # Set up queues
        self._queues = []
        for kwargs in qkwargs:
            queue = Queue(**kwargs)
            queue.exchange = self._exchange
            queue = queue.bind(self._channel)
            self._queues.append(queue)
            self.logger.debug(queue.as_dict())

        # Create producer for publishing on topics
        self.producer = Producer(self._channel, self._exchange)
        self.logger.debug('Initializing finished')

    def get_consumers(self, Consumer, channel):
        """
        Returns the a list of consumers to watch. Called by the parent Mixin.

        :param Consumer: Message consumer class.
        :type Consumer: kombu.Consumer
        :param channel: An opened channel.
        :type channel: kombu.transport.*.Channel
        :returns: A list of Consumer instances.
        :rtype: list
        """
        consumers = []
        self.logger.debug('Setting up consumers')
        for queue in self._queues:
            self.logger.debug('Will consume on {0}'.format(queue.name))
            consumers.append(
                Consumer(queue, callbacks=[self._wrap_on_message]))
        self.logger.debug('Consumers: {}'.format(consumers))
        return consumers

    def on_message(self, body, message):
        """
        Called when a non-jsonrpc message arrives.

        :param body: Body of the message.
        :type body: dict
        :param message: The message instance.
        :type message: kombu.message.Message
        """
        self.logger.error(
            'Dropping unknown message: payload="{}", properties="{}"'.format(
                body, message.properties))

    def _wrap_on_message(self, body, message):
        """
        Wraps on_message for jsonrpc routing and logging.

        :param body: Body of the message.
        :type body: dict or json string
        :param message: The message instance.
        :type message: kombu.message.Message
        """
        self.logger.debug('Received message "{}" {}'.format(
            message.delivery_tag, body))
        expected_method = message.delivery_info['routing_key'].rsplit(
            '.', 1)[1]

        # If we don't get a valid message we default to -1 for the id
        response = {'jsonrpc': '2.0', 'id': -1}
        try:
            # If we don't have a dict then it should be a json string
            if isinstance(body, str):
                body = json.loads(body)

            # If we have a method and it matches the routing key treat it
            # as a jsonrpc call
            if (
                    isinstance(body, dict) and
                    'method' in body.keys() and
                    body.get('method') == expected_method):
                response['id'] = body.get('id', -1)
                method = getattr(self, 'on_{}'.format(body['method']))
                if type(body.setdefault('params', {})) is dict:
                    result = method(message=message, **body['params'])
                else:
                    result = method(message, *body['params'])
                response['result'] = result

                self.logger.debug('Result for "{}": "{}"'.format(
                    response['id'], result))
            # Otherwise send it to on_message
            else:
                self.on_message(body, message)
        except Exception as error:
            jsonrpc_error_code = -32600
            # If there is an attribute error then use the Method Not Found
            # code in the error response
            if type(error) is AttributeError:
                jsonrpc_error_code = -32601
            elif type(error) is json.decoder.JSONDecodeError:
                jsonrpc_error_code = -32700  # Parser error
            response['error'] = {
                'code': jsonrpc_error_code,
                'message': str(error),
                'data': {
                    'exception': str(type(error))
                }
            }
            self.logger.warn(
                'Exception raised during method call:\n{}'.format(
                    traceback.format_exc()))

        # Reply back if needed
        if message.properties.get('reply_to'):
            self.logger.debug('Responding to {0}'.format(
                message.properties['reply_to']))
            response_queue = self.connection.SimpleQueue(
                message.properties['reply_to'])
            response_queue.put(json.dumps(response))
            response_queue.close()

        message.ack()
        self.logger.debug('Message "{0}" {1} ackd'.format(
            message.delivery_tag,
            ('was' if message.acknowledged else 'was not')))

    def respond(self, queue_name, id, payload, **kwargs):
        """
        Sends a response to a simple queue. Responses are sent back to a
        request and never should be the owner of the queue.

        :param queue_name: The name of the queue to use.
        :type queue_name: str
        :param id: The unique request id
        :type id: str
        :param payload: The content of the message.
        :type payload: dict
        :param kwargs: Keyword arguments to pass to SimpleQueue
        :type kwargs: dict
        """
        self.logger.debug('Sending response for message id "{}"'.format(id))
        send_queue = self.connection.SimpleQueue(queue_name, **kwargs)
        jsonrpc_msg = {
            'jsonrpc': "2.0",
            'id': id,
            'result': payload,
        }
        self.logger.debug('jsonrpc msg: {}'.format(jsonrpc_msg))
        send_queue.put(jsonrpc_msg)
        self.logger.debug('Sent response for message id "{}"'.format(id))
        send_queue.close()

    def onconnection_revived(self):  # pragma: no cover
        """
        Called when a reconnection occurs.
        """
        self.logger.info('Connection (re)established')

    def on_consume_ready(
            self, connection, channel, consumers):  # pragma: no cover
        """
        Called when the service is ready to consume messages.

        :param connection: The current connection instance.
        :type connection: kombu.Connection
        :param channel: The current channel.
        :type channel: kombu.transport.*.Channel
        :param consumers: A list of consumers.
        :type consumers: list
        """
        self.logger.info('Ready to consume')
        if self.logger.level == logging.DEBUG:
            queue_names = []
            for consumer in consumers:
                queue_names += [x.name for x in consumer.queues]
            self.logger.debug(
                'Consuming via connection "{0}" and channel "{1}" on '
                'the following queues: "{2}"'.format(
                    connection.as_uri(), channel, '", "'.join(queue_names)))

    def on_consume_end(self, connection, channel):  # pragma: no cover
        """
        Called when the service stops consuming.

        :param connection: The current connection instance.
        :type connection: kombu.Connection
        :param channel: The current channel.
        :type channel: kombu.transport.*.Channel
        """
        self.logger.warn('Consuming has ended')