def test_clone(self):
     hostname = 'sqlite:///celerydb.sqlite'
     x = Connection('+'.join(['sqla', hostname]))
     self.assertEqual(x.uri_prefix, 'sqla')
     self.assertEqual(x.hostname, hostname)
     clone = x.clone()
     self.assertEqual(clone.hostname, hostname)
     self.assertEqual(clone.uri_prefix, 'sqla')
 def test_clone(self):
     hostname = 'sqlite:///celerydb.sqlite'
     x = Connection('+'.join(['sqla', hostname]))
     self.assertEqual(x.uri_prefix, 'sqla')
     self.assertEqual(x.hostname, hostname)
     clone = x.clone()
     self.assertEqual(clone.hostname, hostname)
     self.assertEqual(clone.uri_prefix, 'sqla')
Exemple #3
0
class EventsConsumer(threading.Thread):
    '''
    Consumer Events Processing
    '''

    def __init__(self):
        threading.Thread.__init__(self)
        self.rabbit_url = "amqp://localhost:5672/"
        self.conn = Connection(self.rabbit_url, heartbeat=10)
        self.channel = self.conn.channel()
        self.exchange = Exchange(name="gateway-exchange", type="fanout")
        self.queue = Queue(name="gateway-queue", exchange=self.exchange, routing_key="gateway")


    def process_message(self, body, message):
        '''
        Append the new location to the trip loactions if the trip status is 0 (ie bike in use)
        '''

        logging.info('*** Event received is: {}'.format(body))

        if Trip_db.objects(status='0').filter(bike_id=body['id']):
  
            trip = Trip_db.objects(status='0').get(bike_id=body['id'])
            trip.update(add_to_set__locations=body['location'])     # Append to the trip location
            trip.save()
            logging.info('*** New trip location appended')
        message.ack()


    def consume(self):
        new_conn = self.establish_connection()
        while True:
            try:
                new_conn.drain_events(timeout=2)
            except socket.timeout:
                new_conn.heartbeat_check()


    def establish_connection(self):
        consumer = Consumer(self.conn, queues=self.queue, callbacks=[self.process_message], accept=["application/json"])
        consumer.consume()

        revived_connection = self.conn.clone()
        revived_connection.ensure_connection(max_retries=3)
        channel = revived_connection.channel()
        consumer.revive(channel)
        consumer.consume()
        return revived_connection
    

    def consumer_deamon(self):
        while True:
            try:
                self.consume()
            except self.conn.connection_errors:
                logging.warning('*** Connection revived')
Exemple #4
0
class EventsConsumer(threading.Thread):
    '''
    Consumer Events Processing
    '''
    def __init__(self):
        threading.Thread.__init__(self)
        self.rabbit_url = "amqp://localhost:5672/"
        self.conn = Connection(self.rabbit_url, heartbeat=10)
        self.channel = self.conn.channel()
        self.exchange = Exchange(name="gateway-exchange", type="fanout")
        self.queue = Queue(name="gateway-queue",
                           exchange=self.exchange,
                           routing_key="gateway")

    def process_message(self, body, message):
        '''
        Update the bike location if bike id found
        '''

        logging.info('*** Event received is: {}'.format(body))

        if Bike_db.objects.with_id(body['id']):
            bike = Bike_db.objects.get(id=body['id'])
            bike.update(location=body['location'])
            bike.save()
            logging.info('*** Bike location updated')

        message.ack()

    def consume(self):
        new_conn = self.establish_connection()
        while True:
            try:
                new_conn.drain_events(timeout=2)
            except socket.timeout:
                new_conn.heartbeat_check()

    def establish_connection(self):
        consumer = Consumer(self.conn,
                            queues=self.queue,
                            callbacks=[self.process_message],
                            accept=["application/json"])
        consumer.consume()

        revived_connection = self.conn.clone()
        revived_connection.ensure_connection(max_retries=3)
        channel = revived_connection.channel()
        consumer.revive(channel)
        consumer.consume()
        return revived_connection

    def consumer_deamon(self):
        while True:
            try:
                self.consume()
            except self.conn.connection_errors:
                logging.warning('*** Connection revived')
Exemple #5
0
 def test_clone(self):
     hostname = 'sqlite:///celerydb.sqlite'
     x = Connection('+'.join(['sqla', hostname]))
     try:
         assert x.uri_prefix == 'sqla'
         assert x.hostname == hostname
         clone = x.clone()
         try:
             assert clone.hostname == hostname
             assert clone.uri_prefix == 'sqla'
         finally:
             clone.release()
     finally:
         x.release()
class Worker(object):
    def __init__(self, queue_name):
        self.queue_name = queue_name
        self.serializer = "pickle"
        self.rabbit_connect()
        self.poll_messages()

    def rabbit_connect(self):
        url = "amqp://{}:{}@{}:5672/".format(config.rabbitmq.username, config.rabbitmq.password, config.rabbitmq.host)
        self.connection = Connection(url)
        self.channel = self.connection.channel()
        self.channel.basic_qos(prefetch_size=0, prefetch_count=1, a_global=False)
        self.exchange = Exchange("", type="direct", durable=True)

        self.queue = Queue(name=self.queue_name, exchange=self.exchange, routing_key=self.queue_name)
        self.queue.maybe_bind(self.connection)
        self.queue.declare()

        self.producer = Producer(exchange=self.exchange, channel=self.channel, serializer=self.serializer)
        self.consumer = Consumer(self.connection, queues=self.queue, callbacks=[self.message_callback], accept=["application/x-python-serialize"])
        #self.consumer.qos(prefetch_count = 1)

    def poll_messages(self):
        while True:
            try:
                self.process_messages()
            except self.connection.connection_errors:
                pass

    def process_messages(self):
        self.connection = self.renew_connection()
        while True:
            try:
                self.connection.drain_events(timeout=5)
            except socket.timeout:
                pass

    def renew_connection(self):
        new_connection = self.connection.clone()
        new_connection.ensure_connection(max_retries=10)
        self.channel = new_connection.channel()
        self.channel.basic_qos(prefetch_size=0, prefetch_count=1, a_global=False)
        self.consumer.revive(self.channel)
        self.producer.revive(self.channel)
        self.consumer.consume()
        return new_connection

    def message_callback(self, body, message):
        # Convert body to UTF-8 string
        body = body.decode('utf-8')

        # Process message
        self.process_message(body)

        # Tell RabbitMQ that we processed the message
        message.ack()

    def process_message(self, message):
        # Generic message processing stub
        print ("Message from queue '{}': '{}'".format(self.queue_name, message))
        
    def produce_message(self, message):
        self.producer.publish(message.encode('utf-8'), routing_key=self.queue_name, retry=True, delivery_mode=2)
Exemple #7
0
 def test_none_hostname_persists(self):
     conn = Connection(hostname=None, transport=SQS.Transport)
     assert conn.hostname == conn.clone().hostname
Exemple #8
0
class AMQP:
    def __init__(self, mesh):
        self.mesh = mesh
        self.logger = mesh.init_logger()

        self.app_id = mesh.config.get('AMQP_APP_ID')
        self.base_url = 'amqp://{}/'.format(self.app_id or '')
        self.connection_prototype = Connection(mesh.config['AMQP_DSN'])

        self.sessions = []
        self.mutex = Lock()

        self.task_callbacks = {}
        self.connection = None
        self.consumers = {}
        self.running = False

        mesh.teardown_context(self.release_session)
        atexit.register(self.close)

    def close(self):
        while self.sessions:
            session = self.sessions.pop()
            session.close()
        for consumer in self.consumers.values():
            consumer.close()
        if self.connection is not None:
            self.connection.close()

    @property
    def session(self):
        context = self.mesh.current_context()
        session = getattr(context, 'amqp_session', None)
        if session is None:
            with self.mutex:
                try:
                    session = self.sessions.pop()
                except IndexError:
                    connection = self.connection_prototype.clone()
                    session = Session(self.mesh, self.app_id, connection)
            session.begin()
            setattr(context, 'amqp_session', session)
        return session

    def release_session(self, exc=None):
        context = self.mesh.current_context()
        session = getattr(context, 'amqp_session', None)
        if session is not None:
            session.rollback()
            if session.connected:
                with self.mutex:
                    self.sessions.append(session)

    def task(self, message_type, consumer_name='default'):
        def decorator(callback):
            self.task_callbacks[consumer_name, message_type] = callback
            return callback

        return decorator

    def init_connection(self):
        connection = self.connection
        if connection is None:
            connection = self.connection_prototype.clone(heartbeat=60)
            connection.ensure_connection(max_retries=3)
            self.connection = connection
        return connection

    def init_consumer(self, consumer_name='default'):
        consumer = self.consumers.get(consumer_name)
        if consumer is None:
            connection = self.init_connection()
            consumer = connection.Consumer(on_message=self.process_message,
                                           tag_prefix=f'{consumer_name}/',
                                           auto_declare=False)
            self.consumers[consumer_name] = consumer
        return consumer

    def make_exchange(self, **kwargs):
        return Exchange(channel=self.init_connection(), **kwargs)

    def make_queue(self, **kwargs):
        return Queue(channel=self.init_connection(), **kwargs)

    def run(self):
        self.running = True
        signal(SIGINT, self.stop)
        signal(SIGTERM, self.stop)

        for consumer in self.consumers.values():
            consumer.consume()

        while self.running:
            try:
                self.connection.drain_events(timeout=5)
            except socket.timeout:
                self.connection.heartbeat_check()
            except self.connection.connection_errors:
                self.connection.close()
                self.connection.ensure_connection(max_retries=3)
                for consumer in self.consumers.values():
                    consumer.revive(self.connection)
                    consumer.consume()

    def stop(self, signo=None, frame=None):
        self.running = False

    def process_message(self, message):
        consumer_name, __, __ = message.delivery_info[
            'consumer_tag'].partition('/')  # noqa
        message_type = message.properties.get('type')

        context = self.mesh.make_context(
            method='CONSUME',
            base_url=self.base_url,
            path=f'/{consumer_name}/{message_type}',
            headers=message.properties,
            content_type=message.content_type,
            data=message.body)
        context.amqp_message = message

        with context:
            try:
                callback = self.task_callbacks[consumer_name, message_type]
                callback(message)
            except Exception:
                self.logger.exception('Exception occured')
            finally:
                if not message.acknowledged:
                    message.reject()
Exemple #9
0
class RabbitMQProvider(BrokerProvider):
    def __init__(self, broker_configuration: BrokerSettings):
        self.last_message_tag = {}
        self.config = broker_configuration

        self.connection = Connection(
            hostname=self.config.get_host(),
            port=self.config.get_port(),
            userid=self.config.get_user(),
            password=self.config.get_password(),
            virtual_host=self.config.get_virtual_host(),
            heartbeat=0,
            connect_timeout=30)
        self.connection.connect()

        self.send_connection = Connection(
            hostname=self.config.get_host(),
            port=self.config.get_port(),
            userid=self.config.get_user(),
            password=self.config.get_password(),
            virtual_host=self.config.get_virtual_host(),
            connect_timeout=30,
            heartbeat=0)
        self.queues = {}

    def establish_connection(self):
        revived_connection = self.connection.clone()
        revived_connection.ensure_connection(max_retries=3)
        return revived_connection

    def disconnect(self):
        self.connection.close()

    def rabbit_disconnected(self, queue_name):
        return self.queues[
            queue_name].consumer.connection is None or not self.queues[
                queue_name].consumer.connection.connected

    def get_simple_queue(self, queue_name):
        if not queue_name in self.queues or self.rabbit_disconnected(
                queue_name):
            conn = self.establish_connection()
            self.queues[queue_name] = conn.SimpleQueue(
                name=Queue(name=queue_name, channel=conn))

        return self.queues[queue_name]

    def process_next_message(self,
                             queue_name,
                             callback,
                             model_validator,
                             max_retry=0):
        sub_queue = self.get_simple_queue(queue_name)
        retry_count = 0
        while True:
            try:
                msg = sub_queue.get(block=False, timeout=20)
                try:
                    e = Event(**msg.payload)
                except ValueError as ve:
                    logger.error(
                        f"Rejecting not valid event payload: {msg.payload}")
                    msg.ack()
                    return True
                try:
                    if model_validator is not None:
                        try:
                            call_result = callback(
                                e, model_validator(**e.payload))
                        except Exception as error:
                            logger.error(
                                f"Invalid payload for type.  Errors: {str(error)}"
                            )
                            msg.ack()
                            return True
                    else:
                        call_result = callback(e)
                    if call_result:
                        msg.ack()
                        return True
                    else:
                        logger.debug("Callback returning false")
                        msg.requeue()
                        return False
                except:
                    msg.requeue()
                    return False
            except IOError:
                logger.error("Lost connection with Rabbit")
                self.queues = {}
                return False
            except Empty as e:
                if retry_count < max_retry:
                    sleep(0.1)
                    retry_count += 1
                else:
                    return True

    def declare_topic(self, topic_name):
        with self.connection as _conn:
            _conn.connect()
            channel = _conn.channel()
            topic = Exchange(
                name=f"{self.config.get_event_name_prefix()}{topic_name}",
                type="topic",
                channel=channel)
            topic.declare()

    def append_to_topic(self, topic_name, queue_name, routing_key=None):
        with self.connection as _conn:
            _conn.connect()
            channel = _conn.channel()

            if not topic_name.startswith(self.config.get_event_name_prefix()):
                topic_name = f"{self.config.get_event_name_prefix()}{topic_name}"

            topic = Exchange(name=f"{topic_name}",
                             type="topic",
                             channel=channel)
            topic.declare()

            queue = Queue(name=queue_name,
                          channel=channel,
                          routing_key=routing_key)
            queue.declare()

            queue.bind_to(exchange=f"{topic_name}", routing_key=routing_key)

    def send_message(self, message, topic):
        # with self.send_connection as _conn:
        _conn = self.send_connection
        _conn.connect()
        # channel = _conn.channel()
        with _conn.channel() as channel:
            producer = Producer(channel)

            logger.debug(f"Insert data on TOPIC: {topic}")

            if not topic.startswith(self.config.get_event_name_prefix()):
                topic = f"{self.config.get_event_name_prefix()}{topic}"

            producer.publish(body=message, exchange=topic, routing_key=None)

            logger.debug(f"Message {message} sent to topic {topic}!")
Exemple #10
0
class RabbitMQ(object):
    def __init__(self, app=None):
        self.send_exchange_name = None
        self.send_exchange_type = None
        self.config = None
        self.consumer = None
        self.connection = None
        self.send_connection = None
        self.app = app
        self.message_callback_list = []
        self.wait_send_queue = None
        if app is not None:
            self.init_app(app)

    def init_app(self, app):
        self.app = app
        self.config = app.config
        self.connection = Connection(self.config.get('RABMQ_RABBITMQ_URL'))
        self.consumer = CP(self.connection, self.message_callback_list)
        self.send_connection = self.connection.clone()
        self.send_exchange_name = self.config.get('RABMQ_SEND_EXCHANGE_NAME')
        self.send_exchange_type = self.config.get(
            'RABMQ_SEND_EXCHANGE_TYPE') or ExchangeType.TOPIC
        self.wait_send_queue = SendQueue(maxsize=1)

    def run_consumer(self):
        self._run()

    def _run(self):
        thread = Thread(target=self.consumer.run)
        thread.setDaemon(True)
        thread.start()

    def queue(self,
              exchange_name,
              routing_key,
              queue_name=None,
              exchange_type=None,
              retry_count=3):
        def decorator(f):
            self.add_message_rule(f,
                                  queue_name=queue_name,
                                  exchange_type=exchange_type,
                                  exchange_name=exchange_name,
                                  routing_key=routing_key,
                                  retry_count=retry_count)
            return f

        return decorator

    @setup_method
    def add_message_rule(self,
                         func,
                         queue_name,
                         routing_key,
                         exchange_name,
                         exchange_type=ExchangeType.DEFAULT,
                         retry_count=3):
        if not queue_name:
            queue_name = func.__name__
        if not routing_key:
            raise RoutingKeyError('routing_key 没有指定')

        if not exchange_name:
            raise ExchangeNameError('exchange_name 没有指定')

        def _callback(body, message):
            try:
                handler_flag = ''.join(random.sample('0123456789', 10))
                logger.info(handler_flag, 'message handler start: %s',
                            func.__name__)
                try:
                    logger.info(handler_flag,
                                'received_message-route_key:%s-exchange:%s',
                                routing_key, exchange_name)
                    logger.info(handler_flag, 'received data:%s', body)
                    if is_py2:
                        if isinstance(body, (str, eval('unicode'))):
                            message_id = json.loads(body).get('message_id')
                        else:
                            message_id = body.get('message_id')
                    else:
                        if isinstance(body, str):
                            message_id = json.loads(body).get('message_id')
                        else:
                            message_id = body.get('message_id')
                    if not message_id:
                        logger.error(handler_flag, 'message not id: %s', body)
                        message.ack()
                        return True
                except:
                    logger.error(handler_flag, 'parse message body failed:%s',
                                 body)
                    message.ack()
                    return True
                try:
                    if is_py2:
                        if not isinstance(body, (str, eval('unicode'))):
                            body = json.dumps(body)
                    else:
                        if not isinstance(body, str):
                            body = json.dumps(body)
                    with self.app.app_context():
                        result = func(body)
                    if result:
                        message.ack()
                        return True
                    else:
                        logger.info(handler_flag, 'no ack message')
                        if int(message.headers.get('retry')
                               or 0) >= retry_count:
                            message.ack()
                            logger.info(handler_flag,
                                        'retry %s count handler failed: %s',
                                        retry_count, body)
                            return True
                        headers = {
                            'retry': int(message.headers.get('retry') or 0) + 1
                        }
                        message.ack()
                        self.retry_send(body=body,
                                        queue_name=queue_name,
                                        headers=headers,
                                        log_flag=handler_flag)
                        return False
                except ConnectionError:  # 不可预测的Connect错误
                    logger.info(handler_flag, 'Connection Error pass: %s',
                                traceback.format_exc())
                    return True
                except KombuError:  # 不可预测的kombu错误
                    logger.info(handler_flag, 'Kombu Error pass: %s',
                                traceback.format_exc())
                    return True
                except Exception as e:
                    logger.info(handler_flag, 'handler message failed: %s',
                                traceback.format_exc())
                    headers = {
                        'retry': int(message.headers.get('retry') or 0) + 1
                    }
                    message.ack()
                    self.retry_send(body=body,
                                    queue_name=queue_name,
                                    headers=headers,
                                    log_flag=handler_flag)
                    return False
                finally:
                    logger.info(handler_flag, 'message handler end: %s',
                                func.__name__)
            except Exception as e:
                logger.info('unknown error: %s' % traceback.format_exc())
                return True

        exchange = Exchange(name=exchange_name,
                            type=exchange_type or ExchangeType.DEFAULT)
        queue = Queue(name=queue_name,
                      exchange=exchange,
                      routing_key=routing_key)
        tmp_dict = {'queue': queue, 'callback': _callback}
        self.message_callback_list.append(tmp_dict)

    def send(self,
             body,
             routing_key,
             exchange_name=None,
             exchange_type=None,
             headers=None,
             log_flag=None):
        exchange = Exchange(name=exchange_name or self.send_exchange_name,
                            type=exchange_type or self.send_exchange_type,
                            auto_delete=False,
                            durable=True)
        while True:
            try:
                self.wait_send_queue.join()
                self.wait_send_queue.put(body, block=False)
                break
            except Exception as E:
                logger.info('wait lock')
                pass
        try:
            channel = self.send_connection.default_channel
            exchange.declare(channel=channel)
            self.consumer.producer.publish(
                body=self.wait_send_queue.get(),
                exchange=exchange,
                routing_key=routing_key,
                retry=True,
                headers=headers,
            )
            logger.info(log_flag, 'send data: %s', body)
        except Exception as E:
            pass
        finally:
            self.wait_send_queue.task_done()

    def retry_send(self,
                   body,
                   queue_name,
                   headers=None,
                   log_flag='',
                   **kwargs):
        logger.info(log_flag, 'send data: %s', body)
        simple_queue = self.consumer.connection.SimpleQueue(queue_name)
        simple_queue.put(body, headers=headers, retry=True, **kwargs)

    def delay_send(self,
                   body,
                   routing_key,
                   delay=None,
                   exchange_name=None,
                   log_flag=None,
                   **kwargs):
        logger.info(log_flag, 'send data: %s', body)
        dead_letter_params = {
            'x-dead-letter-routing-key': routing_key,
            'x-dead-letter-exchange': exchange_name
        }
        queue_name = '%s_%s' % (exchange_name,
                                re.sub('[^0-9a-zA-Z]+', '', routing_key))
        simple_queue = self.consumer.connection.SimpleQueue(
            queue_name, queue_args=dead_letter_params)
        simple_queue.put(body, retry=True, expiration=delay, **kwargs)
class MessageBusService(object):

    _uri: str
    _connection: Connection
    _connection_producer: Connection
    _consuming: bool = False
    _producer: Producer
    _producer_reply_to_consumer: Consumer
    _future: StandardQueue
    _logger: Optional[logging.Logger]

    def __init__(self, uri: str, logger: Optional[logging.Logger] = None):
        self._uri = uri
        self._logger = logger
        self._future = StandardQueue()
        self.connect()

    def connect(self):
        self._connection = Connection(self._uri)
        self._connection.connect()
        self._connection_producer = self._connection.clone()
        self._producer = Producer(self._connection_producer)
        #
        reply_queue = Queue(
            channel=self._producer.channel,
            name="amq.rabbitmq.reply-to",
            no_ack=True,
            durable=False,
        )
        self._producer_reply_to_consumer = self._producer.channel.Consumer(
            queues=[reply_queue],
            no_ack=True,
            auto_declare=True,
            callbacks=[self.on_reply_to_message],
            accept=["json"],
        )
        self._producer_reply_to_consumer.consume(no_ack=True)

    def disconnect(self):
        self._producer.close()
        self._connection_producer.close()
        self._connection.close()

    def start_consuming(
        self,
        callback: Callable,
        queue_name: str,
        prefetch_count: int = 1,
        no_ack: bool = False,
        expires: int = None,
        callback_ready: Callable = None,
    ):
        if self._logger is not None:
            self._logger.debug("Start consuming queue: %s" % queue_name)
        self._consuming = True
        while self._consuming:
            revived_connection = self._connection.clone()
            revived_connection.ensure_connection()
            channel = revived_connection.channel()
            channel.basic_qos(0, prefetch_count, True)
            queues = []
            queue_obj = Queue(
                channel=channel,
                name=queue_name,
                no_ack=no_ack,
                durable=False,
                expires=expires,
                queue_arguments={"x-max-priority": 255},
            )
            queue_obj.declare()
            queues.append(queue_obj)
            consumer = Consumer(
                revived_connection,
                queues,
                callbacks=[callback],
                accept=["json"],
                auto_declare=False,
                prefetch_count=prefetch_count,
            )
            consumer.revive(channel)
            consumer.consume()
            while self._consuming:
                callback_ready is not None and callback_ready()
                try:
                    revived_connection.drain_events(timeout=2)
                except socket.timeout:
                    revived_connection.heartbeat_check()
                except self._connection.connection_errors + (
                    AMQPError,
                    ConnectionForced,
                    ConnectionError,
                ):  # pragma: no cover
                    if self._logger is not None:
                        self._logger.exception("Connection error", stack_info=True)
                    break

    def start_consuming_replies(
        self, callback: Callable, prefetch_count: int = 1, no_ack: bool = False
    ):
        self._consuming = True
        while self._consuming:
            revived_connection = self._connection_producer.clone()
            revived_connection.ensure_connection()
            while self._consuming:
                try:
                    revived_connection.drain_events(timeout=2)
                except socket.timeout:
                    revived_connection.heartbeat_check()
                except self._connection.connection_errors + (
                    AMQPError,
                    ConnectionForced,
                    ConnectionError,
                ):  # pragma: no cover
                    if self._logger is not None:
                        self._logger.exception("Connection error", stack_info=True)
                    break

    def stop_consuming(self):
        if self._logger is not None:
            self._logger.debug("Stop consuming...")
        self._consuming = False

    def publish(
        self,
        body: dict,
        exchange: str = "",
        queue_name: str = "",
        priority: int = None,
        reply_to: str = None,
        expiration: int = None,
        correlation_id: str = None,
    ):
        while True:
            try:
                self._connection_producer.ensure_connection()
                self._producer.publish(
                    body=body,
                    exchange=exchange,
                    routing_key=queue_name,
                    priority=priority,
                    reply_to=str(reply_to) if reply_to is not None else None,
                    expiration=expiration if expiration is not None else None,
                    correlation_id=str(correlation_id)
                    if correlation_id is not None
                    else None,
                )
            except self._connection_producer.connection_errors + (
                AMQPError,
                ConnectionForced,
                ConnectionError,
            ):  # pragma: no cover
                self._connection_producer = self._connection.clone()
                self._producer.revive(self._connection_producer)
            else:
                break

    def on_reply_to_message(self, body, message):
        self._future.put(body)

    def publish_and_get_reply(self, *args, timeout: int = 1, **kw) -> Optional[dict]:
        kw['reply_to'] = "amq.rabbitmq.reply-to"
        self.publish(*args, **kw)
        try:
            self._producer_reply_to_consumer.connection.drain_events(timeout=timeout)
        except socket.timeout:  # pragma: no cover
            return None
        #
        return self._future.get(block=False)
Exemple #12
0
class ConsumerMixin(KombuConsumer):
    consumer_args = {}

    def __init__(self, subscribe=None, **kwargs):
        super(ConsumerMixin, self).__init__(**kwargs)
        name = '{name}.{id}'.format(name=self._name, id=os.urandom(3).hex())
        if subscribe:
            exchange = Exchange(subscribe['exchange_name'], subscribe['exchange_type'])

        self.__connection = Connection(self.url)
        self.__exchange = exchange if subscribe else self._default_exchange
        self.__subscriptions = defaultdict(list)
        self.__queue = Queue(name=name, auto_delete=True, durable=False)
        self.__lock = Lock()
        self.create_connection()

        try:
            self._register_thread('consumer', self.__run, on_stop=self.__stop)
        except AttributeError:
            pass

    @property
    @contextmanager
    def __binding_channel(self):
        if not self.__connection.connected:
            self.__connection.connect()
        yield self.__connection.default_channel

    def __create_binding(self, headers, routing_key):
        binding = Binding(self.__exchange, routing_key, headers, headers)
        self.__queue.bindings.add(binding)
        if self.is_running:
            try:
                with self.__binding_channel as channel:
                    self.__queue.queue_declare(passive=True, channel=channel)
                    binding.bind(self.__queue, channel=channel)
            except self.__connection.connection_errors as e:
                self.log.error('Connection error while creating binding: %s', e)
            except NotFound:
                self.log.error(
                    'Queue %s doesn\'t exist on the server', self.__queue.name
                )
        return binding

    def __remove_binding(self, binding):
        self.__queue.bindings.remove(binding)
        if self.is_running:
            try:
                with self.__binding_channel as channel:
                    self.__queue.queue_declare(passive=True, channel=channel)
                    binding.unbind(self.__queue, channel=channel)
            except self.__connection.connection_errors:
                self.log.exception('Connection error while removing binding: %s')
            except NotFound:
                pass

    def __dispatch(self, event_name, payload, headers=None):
        with self.__lock:
            subscriptions = self.__subscriptions[event_name].copy()
        for (handler, _) in subscriptions:
            try:
                handler(payload)
            except Exception:
                self.log.exception(
                    'Handler \'%s\' for event \'%s\' failed',
                    getattr(handler, '__name__', handler),
                    event_name,
                )
            continue

    def __extract_event_from_message(self, message):
        event_name = None
        headers = message.headers
        payload = message.payload

        if 'name' in headers:
            event_name = headers['name']
        elif isinstance(payload, dict) and 'name' in payload:
            event_name = payload['name']
        else:
            raise ValueError('Received invalid messsage; no event name could be found.')
        return event_name, headers, payload

    def subscribe(
        self,
        event_name,
        handler,
        headers=None,
        routing_key=None,
        headers_match_all=True,
    ):
        headers = dict(headers or {})
        headers.update(name=event_name)
        if self.__exchange.type == 'headers':
            headers.setdefault('x-match', 'all' if headers_match_all else 'any')

        binding = self.__create_binding(headers, routing_key)
        subscription = Subscription(handler, binding)
        with self.__lock:
            self.__subscriptions[event_name].append(subscription)
        self.log.debug(
            'Registered handler \'%s\' to event \'%s\'',
            getattr(handler, '__name__', handler),
            event_name,
        )

    def unsubscribe(self, event_name, handler):
        with self.__lock:
            subscriptions = self.__subscriptions[event_name].copy()
        try:
            for subscription in subscriptions:
                if subscription.handler == handler:
                    with self.__lock:
                        self.__subscriptions[event_name].remove(subscription)
                    self.__remove_binding(subscription.binding)
                    self.log.debug(
                        'Unregistered handler \'%s\' from \'%s\'',
                        getattr(handler, '__name__', handler),
                        event_name,
                    )
                    return True
            return False
        finally:
            if not self.__subscriptions[event_name]:
                with self.__lock:
                    self.__subscriptions.pop(event_name)

    def get_consumers(self, Consumer, channel):
        self.__exchange.bind(channel).declare()
        return [
            Consumer(
                queues=[self.__queue],
                callbacks=[self.__on_message_received],
                auto_declare=True,
            )
        ]

    def __on_message_received(self, body, message):
        event_name, headers, payload = self.__extract_event_from_message(message)
        if event_name not in self.__subscriptions:
            return
        try:
            headers, payload = self._unmarshal(event_name, headers, payload)
        except Exception:
            raise
        else:
            self.__dispatch(event_name, payload, headers)
        finally:
            message.ack()

    def on_connection_error(self, exc, interval):
        self.log.error(
            'Broker connection error: %s, trying to reconnect in %s seconds...',
            exc,
            interval,
        )
        if self.should_stop:
            # Workaround to force kill the threaded consumer when a stop has been issued
            # instead of looping forever to reestablish the connection
            raise SystemExit

    def create_connection(self):
        self.connection = self.__connection.clone()
        return self.connection

    @property
    def is_running(self):
        try:
            is_running = self.connection.connected
        except AttributeError:
            is_running = False
        return super(ConsumerMixin, self).is_running and is_running

    @property
    def should_stop(self):
        return getattr(self, 'is_stopping', True)

    def on_consume_ready(self, connection, channel, consumers, **kwargs):
        if 'ready_flag' in kwargs:
            ready_flag = kwargs.pop('ready_flag')
            ready_flag.set()

    def __run(self, ready_flag, **kwargs):
        super(ConsumerMixin, self).run(ready_flag=ready_flag, **self.consumer_args)

    def __stop(self):
        self.__connection.release()
Exemple #13
0
 def test_none_hostname_persists(self):
     conn = Connection(hostname=None, transport=SQS.Transport)
     assert conn.hostname == conn.clone().hostname
class KombuSubscriber:

    def __init__(self,
                 name=ev("SUBSCRIBER_NAME", "kombu-subscriber"),
                 auth_url=ev("BROKER_URL", "redis://localhost:6379/0"),
                 ssl_options={},
                 max_general_failures=-1):  # infinite retries

        """
        Available Brokers:
        http://docs.celeryproject.org/en/latest/getting-started/brokers/index.html

        Redis:
        http://docs.celeryproject.org/en/latest/getting-started/brokers/redis.html

        RabbitMQ:
        http://docs.celeryproject.org/en/latest/getting-started/brokers/rabbitmq.html

        SQS:
        http://docs.celeryproject.org/en/latest/getting-started/brokers/sqs.html
        """

        self.state = "not_ready"
        self.name = name
        self.auth_url = auth_url
        self.ssl_options = ssl_options

        self.conn = None
        self.new_conn = None
        self.channel = None
        self.consumer = None
        self.process_message_callback = None
        self.drain_time = 1.0
        self.num_setup_failures = 0
        self.num_consume_failures = 0
        self.max_general_failures = max_general_failures

        self.exchange = None
        self.exchange_name = ""
        self.routing_key = ""
        self.serializer = "json"
        self.queue = None
        self.queue_name = ""
        self.consume_from_queues = []

    # end of __init__

    def setup_routing(self,
                      exchange,
                      consume_queue_names,
                      process_message_callback,
                      routing_key=None,
                      heartbeat=60,
                      serializer="application/json",
                      transport_options={}):

        self.state = "not_ready"
        self.exchange = None
        self.exchange_name = exchange
        self.routing_key = routing_key
        self.serializer = serializer
        self.queue = None

        if self.routing_key:
            log.debug(("creating Exchange={} topic for rk={}")
                      .format(self.exchange_name, self.routing_key))
            self.exchange = Exchange(self.exchange_name, type="topic")
        else:
            log.debug(("creating Exchange={} direct")
                      .format(self.exchange_name, self.routing_key))
            self.exchange = Exchange(self.exchange_name, type="direct")
        # end of if/else

        self.consume_from_queues = []
        for queue_name in consume_queue_names:

            new_queue = None
            if self.routing_key:
                log.debug(("creating Queue={} topic rk={} from Exchange={}")
                          .format(queue_name,
                                  self.routing_key,
                                  self.exchange_name))
                new_queue = Queue(queue_name, exchange=self.exchange, routing_key=self.routing_key)
            else:
                log.debug(("creating Queue={} direct from Exchange={}")
                          .format(queue_name,
                                  self.exchange_name))
                new_queue = Queue(queue_name, exchange=self.exchange)
            # end of handling queues with direct/topic routing

            self.consume_from_queues.append(new_queue)

            if not self.queue:
                self.queue_name = queue_name
                self.queue = new_queue

        # end of building new consume queues

        # https://redis.io/topics/security
        #
        # Redis does not support encryption, but I would like to try out ssl-termination
        # using an haproxy/nginx container running as an ssl-proxy to see if this works.

        # import ssl
        # Connection("amqp://", login_method='EXTERNAL', ssl={
        #               "ca_certs": '/etc/pki/tls/certs/something.crt',
        #               "keyfile": '/etc/something/system.key',
        #               "certfile": '/etc/something/system.cert',
        #               "cert_reqs": ssl.CERT_REQUIRED,
        #          })
        #
        self.conn = Connection(self.auth_url,
                               heartbeat=heartbeat,
                               transport_options=transport_options)

        self.channel = self.conn.channel()

        self.process_message_callback = process_message_callback

        log.debug(("creating kombu.Consumer "
                   "broker={} ssl={} ex={} rk={} "
                   "queue={} serializer={}")
                  .format(self.auth_url,
                          self.ssl_options,
                          self.exchange_name,
                          self.routing_key,
                          self.queue_name,
                          self.serializer))

        self.consumer = Consumer(self.conn,
                                 queues=self.consume_from_queues,
                                 auto_declare=True,
                                 callbacks=[self.process_message_callback],
                                 accept=["{}".format(self.serializer)])

        log.debug(("creating kombu.Exchange={}")
                  .format(self.exchange))

        self.consumer.declare()

        log.debug(("creating kombu.Queue={}")
                  .format(self.queue_name))

        self.queue.maybe_bind(self.conn)
        self.queue.declare()

        self.consumer.consume()

        self.state = "ready"
    # end of setup_routing

    def establish_connection(self):
        revived_connection = self.conn.clone()
        revived_connection.ensure_connection(max_retries=3)
        channel = revived_connection.channel()
        self.consumer.revive(channel)
        self.consumer.consume()

        return revived_connection
    # end of establish_connection

    def try_consume_from_queue(self,
                               time_to_wait=5.0):

        success = False
        try:
            self.consumer.consume()
            log.debug(("draining events time_to_wait={}")
                      .format(time_to_wait))
            self.conn.drain_events(timeout=time_to_wait)
            self.num_consume_failures = 0
            success = True
        except socket.timeout as t:
            log.debug(("detected socket.timeout - "
                       "running heartbeat check={}")
                      .format(t))
            self.conn.heartbeat_check()
        except ConnectionRefusedError:  # noqa
            sleep_duration = calc_backoff_timer(num=self.num_consume_failures)
            log.debug(("{} - kombu.subscriber - consume - hit "
                       "connection refused sleep seconds={}")
                      .format(self.name,
                              sleep_duration))
            self.state = "connection refused"
            self.num_consume_failures += 1
            time.sleep(sleep_duration)
        except ConnectionResetError:  # noqa
            sleep_duration = calc_backoff_timer(num=self.num_consume_failures)
            log.info(("{} - kombu.subscriber - consume - hit "
                      "connection reset sleep seconds={}")
                     .format(self.name,
                             sleep_duration))
            self.state = "connection reset"
            self.num_consume_failures += 1
            time.sleep(sleep_duration)
        except Exception as e:
            sleep_duration = calc_backoff_timer(num=self.num_consume_failures)
            log.info(("{} - kombu.subscriber - consume - hit "
                      "general exception={} queue={} sleep seconds={}")
                     .format(self.name,
                             e,
                             self.queue_name,
                             sleep_duration))
            self.state = "general error - consume"
            self.num_consume_failures += 1
            time.sleep(sleep_duration)
        # end of supported errors
        # end of try/ex

        return success
    # end of try_consume_from_queue

    def restore_connection(self,
                           callback,
                           queue,
                           exchange=None,
                           routing_key=None,
                           heartbeat=60,
                           serializer="application/json",
                           silent=False,
                           transport_options={},
                           *args,
                           **kwargs):

        ready_for_consume = False

        if self.state != "ready" or queue != self.queue_name:
            if not silent:
                log.info("setup routing")

            try:
                if exchange and routing_key:
                    self.setup_routing(exchange,
                                       [queue],
                                       callback,
                                       routing_key,
                                       heartbeat=heartbeat,
                                       serializer=serializer)
                else:
                    self.setup_routing(queue,
                                       [queue],
                                       callback,
                                       routing_key,
                                       heartbeat=heartbeat,
                                       serializer=serializer)

                ready_for_consume = True
                self.num_setup_failures = 0
                self.num_consume_failures = 0
            except ConnectionRefusedError:  # noqa
                sleep_duration = calc_backoff_timer(num=self.num_setup_failures)
                log.info(("{} - kombu.subscriber - setup - hit "
                          "connection refused sleep seconds={}")
                         .format(self.name,
                                 sleep_duration))

                self.state = "connection refused"
                self.num_setup_failures += 1
                time.sleep(sleep_duration)
                ready_for_consume = False
            except Exception as e:
                sleep_duration = calc_backoff_timer(num=self.num_setup_failures)
                log.info(("{} - kombu.subscriber - setup - hit "
                          "exception={} queue={} sleep seconds={}")
                         .format(self.name,
                                 e,
                                 self.queue_name,
                                 sleep_duration))
                self.state = "general error - setup"
                self.num_setup_failures += 1
                ready_for_consume = False
                time.sleep(sleep_duration)
            # end of initializing connection
        # end of if need to restore connection for a
        # broker restart or queues are deleted/need to be created
        else:
            ready_for_consume = True
        # esle we're already to go

        return ready_for_consume
    # end of restore_connection

    def consume(self,
                callback,
                queue,
                exchange=None,
                routing_key=None,
                heartbeat=60,
                serializer="application/json",
                time_to_wait=5.0,
                forever=False,
                silent=False,
                transport_options={},
                *args,
                **kwargs):

        """
        Redis does not have an Exchange or Routing Keys, but RabbitMQ does.

        Redis producers uses only the queue name to both publish and consume messages:
        http://docs.celeryproject.org/en/latest/getting-started/brokers/redis.html#configuration
        """

        if not callback:
            log.info(("Please pass in a valid callback "
                      "function or class method"))
            return

        not_done = True
        self.num_setup_failures = 0
        self.num_consume_failures = 0

        while not_done:

            # each loop start as the incoming forever arg
            not_done = forever

            ready_for_consume = self.restore_connection(
                                        callback=callback,
                                        queue=queue,
                                        exchange=exchange,
                                        routing_key=routing_key,
                                        heartbeat=heartbeat,
                                        serializer=serializer,
                                        silent=silent,
                                        transport_options=transport_options)

            if ready_for_consume:

                if not forever and not silent:
                    log.info(("{} - kombu.subscriber queues={} "
                              "consuming with callback={}")
                             .format(self.name,
                                     self.queue_name,
                                     callback.__name__))

                self.try_consume_from_queue(time_to_wait=time_to_wait)
            # end of trying to consume

            # only allow error-retry-stop if not going forever
            if not forever and self.max_general_failures > 0:

                if self.num_setup_failures > self.max_general_failures \
                   or self.num_consume_failures > self.max_general_failures:

                    log.error(("Stopping consume for max={} "
                               "setup_failures={} consume_failures={} queue={}")
                              .format(self.max_general_failures,
                                      self.num_setup_failures,
                                      self.num_consume_failures,
                                      self.queue_name))

                    not_done = False
                # if we're over the retry limits

            else:
                if ev("TEST_STOP_DONE", "0") == "1":
                    not_done = False
                else:
                    not_done = True