Пример #1
0
    def test_manual_declare(self):
        channel = self.connection.channel()
        queue = Queue('qname', self.exchange, 'rkey')
        consumer = Consumer(channel, queue, auto_declare=False)
        self.assertIsNot(consumer.queues[0], queue)
        self.assertTrue(consumer.queues[0].is_bound)
        self.assertTrue(consumer.queues[0].exchange.is_bound)
        self.assertIsNot(consumer.queues[0].exchange, self.exchange)

        for meth in ('exchange_declare', 'queue_declare', 'basic_consume'):
            self.assertNotIn(meth, channel)

        consumer.declare()
        for meth in ('exchange_declare', 'queue_declare', 'queue_bind'):
            self.assertIn(meth, channel)
        self.assertNotIn('basic_consume', channel)

        consumer.consume()
        self.assertIn('basic_consume', channel)
Пример #2
0
        def config_request_rpc(self) -> NoReturn:
            """
            Initial RPC of this service to request the configuration.
            The RPC is blocked until the configuration service replies back.
            """
            self.correlation_id = uuid()
            callback_queue = Queue(
                uuid(),
                durable=False,
                auto_delete=True,
                max_priority=4,
                consumer_arguments={"x-priority": 4},
            )

            self.producer.publish(
                "",
                exchange="",
                routing_key="config-request-queue",
                reply_to=callback_queue.name,
                correlation_id=self.correlation_id,
                retry=True,
                declare=[
                    Queue(
                        "config-request-queue",
                        durable=False,
                        max_priority=4,
                        consumer_arguments={"x-priority": 4},
                    ),
                    callback_queue,
                ],
                priority=4,
                serializer="ujson",
            )
            with Consumer(
                self.connection,
                on_message=self.handle_config_request_reply,
                queues=[callback_queue],
                accept=["ujson"],
            ):
                while self.rules is None:
                    self.connection.drain_events()
            log.debug("{}".format(self.rules))
Пример #3
0
        def on_modified(self, event):
            if event.is_directory:
                return None
            elif event.src_path == self.path:
                with open(self.path, 'r') as f:
                    content = f.readlines()
                # Taken any action here when a file is modified.
                changes = ''.join(difflib.unified_diff(self.content, content))
                if len(changes) > 0:
                    self.response = None
                    self.correlation_id = uuid()
                    callback_queue = Queue(
                        uuid(),
                        durable=False,
                        auto_delete=True,
                        max_priority=4,
                        consumer_arguments={'x-priority': 4})
                    with Producer(self.connection) as producer:
                        producer.publish(content,
                                         exchange='',
                                         routing_key='config-modify-queue',
                                         serializer='yaml',
                                         retry=True,
                                         declare=[callback_queue],
                                         reply_to=callback_queue.name,
                                         correlation_id=self.correlation_id,
                                         priority=4)
                    with Consumer(self.connection,
                                  on_message=self.on_response,
                                  queues=[callback_queue],
                                  no_ack=True):
                        while self.response is None:
                            self.connection.drain_events()

                    if self.response['status'] == 'accepted':
                        text = 'new configuration accepted:\n{}'.format(
                            changes)
                        log.info(text)
                        self.content = content
                    else:
                        log.error('invalid configuration:\n{}'.format(content))
                    self.response = None
Пример #4
0
    def __init__(self):
        client_id = settings.DISCORD_APP_ID

        intents = discord.Intents.default()
        intents.members = True

        super().__init__(
            command_prefix=DISCORD_BOT_PREFIX,
            description=description,
            intents=intents,
        )

        self.redis = None
        self.redis = self.loop.run_until_complete(
            aioredis.create_pool(getattr(settings, "BROKER_URL",
                                         "redis://localhost:6379/0"),
                                 minsize=5,
                                 maxsize=10))
        print('redis pool started', self.redis)
        self.client_id = client_id
        self.session = aiohttp.ClientSession(loop=self.loop)
        self.tasks = []

        self.message_connection = Connection(
            getattr(settings, "BROKER_URL", 'redis://localhost:6379/0'))
        queues = []
        for que in queue_keys:
            queues.append(Queue(que))
        self.message_consumer = Consumer(self.message_connection,
                                         queues,
                                         callbacks=[self.on_queue_message],
                                         accept=['json'])

        django.setup()

        for hook in hooks.get_hooks("discord_cogs_hook"):
            for cog in hook():
                try:
                    self.load_extension(cog)
                except Exception as e:
                    print(f"Failed to load cog {cog}", file=sys.stderr)
                    traceback.print_exc()
Пример #5
0
    def test_produce__consume_large_messages(self,
                                             bytes=1048576,
                                             n=10,
                                             charset=string.punctuation +
                                             string.letters + string.digits):
        if not self.verify_alive():
            return
        bytes = min(filter(None, [bytes, self.message_size_limit]))
        messages = [
            "".join(random.choice(charset) for j in xrange(bytes)) + "--%s" % n
            for i in xrange(n)
        ]
        digests = []
        chan1 = self.connection.channel()
        consumer = Consumer(chan1, self.queue)
        for queue in consumer.queues:
            queue.purge()
        producer = Producer(chan1, self.exchange)
        for i, message in enumerate(messages):
            producer.publish({
                "text": message,
                "i": i
            },
                             routing_key=self.prefix)
            digests.append(self._digest(message))

        received = [(msg["i"], msg["text"])
                    for msg in consumeN(self.connection, consumer, n)]
        self.assertEqual(len(received), n)
        ordering = [i for i, _ in received]
        if ordering != range(n):
            warnings.warn("%s did not deliver messages in FIFO order: %r" %
                          (self.transport, ordering))

        for i, text in received:
            if text != messages[i]:
                raise AssertionError("%i: %r is not %r" %
                                     (i, text[-100:], messages[i][-100:]))
            self.assertEqual(self._digest(text), digests[i])

        chan1.close()
        self.purge([self.queue.name])
Пример #6
0
def get_things_info(thing_status, item_status):
    print("API get things info with thing_status and item_status")

    if (thing_status in ["active", "inactive", "all"]) \
            and (item_status in ["active", "inactive", "all"]):

        message_request = {
            'reply_to': 'registry.response.api.api_get_things',
            'thing_status': thing_status,
            'item_status': item_status
        }

        #request to api_get_things of Registry
        queue_response = Queue(
            name='registry.response.api.api_get_things',
            exchange=exchange,
            routing_key='registry.response.api.api_get_things')
        request_routing_key = 'registry.request.api_get_things'
        rabbitmq_connection.ensure_connection()
        with Producer(rabbitmq_connection) as producer:
            producer.publish(json.dumps(message_request),
                             exchange=exchange.name,
                             routing_key=request_routing_key,
                             declare=[queue_response],
                             retry=True)

        message_response = None

        def on_response(body, message):
            nonlocal message_response
            message_response = json.loads(body)

        with Consumer(rabbitmq_connection,
                      queues=queue_response,
                      callbacks=[on_response],
                      no_ack=True):
            while message_response is None:
                rabbitmq_connection.drain_events()

        return message_response
    else:
        return None
Пример #7
0
    def listen_for_response(self, connection):
        """
        Set up a consumer and listen for a response. If successful, this will
        return the contents of :py:attr:`self.result` which will have been set
        up by the callback.

        :param connection:
           A Kombu Connection instance.

        :return:
            A dictionary containing the result, or None if the request failed.

        """
        self.result = None
        queue = self.get_response_queue(connection)
        with Consumer(connection, queue, callbacks=[self.callback]):
            while self.result is None:
                connection.drain_events(timeout=self.amqp_timeout)

        return self.process_response(self.result)
Пример #8
0
 def control_with_reply(self, command, timeout=5):
     logger.warn('checking {} {} for {}'.format(self.service, command,
                                                self.queuename))
     reply_queue = Queue(name="amq.rabbitmq.reply-to")
     self.result = None
     with Connection(settings.BROKER_URL) as conn:
         with Consumer(conn,
                       reply_queue,
                       callbacks=[self.process_message],
                       no_ack=True):
             self.publish({'control': command},
                          conn,
                          reply_to='amq.rabbitmq.reply-to')
             try:
                 conn.drain_events(timeout=timeout)
             except socket.timeout:
                 logger.error('{} did not reply within {}s'.format(
                     self.service, timeout))
                 raise
     return self.result
Пример #9
0
    def test_auto_declare(self):
        channel = self.connection.channel()
        queue = Queue('qname', self.exchange, 'rkey')
        consumer = Consumer(channel, queue, auto_declare=True)
        consumer.consume()
        consumer.consume()  # twice is a noop
        assert consumer.queues[0] is not queue
        assert consumer.queues[0].is_bound
        assert consumer.queues[0].exchange.is_bound
        assert consumer.queues[0].exchange is not self.exchange

        for meth in ('exchange_declare', 'queue_declare', 'queue_bind',
                     'basic_consume'):
            assert meth in channel
        assert channel.called.count('basic_consume') == 1
        assert consumer._active_tags

        consumer.cancel_by_queue(queue.name)
        consumer.cancel_by_queue(queue.name)
        assert not consumer._active_tags
Пример #10
0
def run():
    queue_notification = Queue(
        name='monitor.request.alert',
        exchange=exchange,
        routing_key='monitor.request.alert')  #, message_ttl=20)

    while 1:
        try:
            consumer_connection.ensure_connection(max_retries=1)
            with nested(
                    Consumer(consumer_connection,
                             queues=queue_notification,
                             callbacks=[handle_notification],
                             no_ack=True)):
                while True:
                    consumer_connection.drain_events()
        except (ConnectionRefusedError, exceptions.OperationalError):
            print('Connection lost')
        except consumer_connection.connection_errors:
            print('Connection error')
Пример #11
0
    def test_auto_declare(self):
        channel = self.connection.channel()
        queue = Queue('qname', self.exchange, 'rkey')
        consumer = Consumer(channel, queue, auto_declare=True)
        consumer.consume()
        consumer.consume()  # twice is a noop
        self.assertIsNot(consumer.queues[0], queue)
        self.assertTrue(consumer.queues[0].is_bound)
        self.assertTrue(consumer.queues[0].exchange.is_bound)
        self.assertIsNot(consumer.queues[0].exchange, self.exchange)

        for meth in ('exchange_declare', 'queue_declare', 'queue_bind',
                     'basic_consume'):
            self.assertIn(meth, channel)
        self.assertEqual(channel.called.count('basic_consume'), 1)
        self.assertTrue(consumer._active_tags)

        consumer.cancel_by_queue(queue.name)
        consumer.cancel_by_queue(queue.name)
        self.assertFalse(consumer._active_tags)
    def call(self, message: dict, event_type: str, entity_type: str,
             site: Site) -> any:
        self.response = None
        self.correlation_id = uuid()
        with Producer(self.connection) as producer:
            _logger.info(
                f'CELERY RPC call {site.domain} with {event_type} - {entity_type} {message} reply to {self.correlation_id}'
            )
            producer.publish(
                {
                    "event_type": event_type,
                    "entity_type": entity_type,
                    "entity_site": site.domain,
                    "data": message
                },
                exchange='',
                routing_key=self.routing_key,
                declare=[self.callback_queue],
                reply_to=self.callback_queue.name,
                correlation_id=self.correlation_id,
                serializer='msgpack',
            )

        with Consumer(self.connection,
                      on_message=self.on_response,
                      queues=[self.callback_queue],
                      no_ack=True):
            _logger.info(
                f'CELERY RPC call consume {site.domain} with {event_type} - {entity_type} {message} reply to {self.correlation_id}'
            )
            t_current = time.time()
            while self.response is None:
                self.connection.drain_events(timeout=1)
                # time.sleep(0.25)  # sleep for 250 milliseconds
                # if time.time() >= t_current + 60000:
                #     break
        _logger.info(
            f'CELERY RPC call consume {site.domain} with response {self.response}'
        )

        return self.response
    def test_rpc_client(self):
        """Check behaviour of client """
        self.pre_declare_queues(['qurator.booya'])
        consumer = Qurator(exchange=self._exchange)

        @consumer.rpc
        def booya(*args, **kwargs):
            return {"msg": "Wooot"}

        payload = {"msg": "Boooya"}
        client = RpcClient(exchange=self._exchange, prefix='qurator')
        client.rpc('booya', payload)
        booya_queue = consumer.queues['booya']
        booya_callbacks = consumer.callbacks['booya']

        conn = self._connection
        with Consumer(conn, booya_queue, callbacks=booya_callbacks):
            conn.drain_events(timeout=1)
        reply = client.retrieve_messages()
        self.assertIn('msg', reply)
        self.assertEqual(reply['msg'], 'Wooot')
Пример #14
0
        def do_rpc(*args, **kwargs):
            self.response = None
            self.correlation_id = uuid()
            with Producer(self.connection) as producer:
                producer.publish(
                    pickle.dumps((name, args, kwargs)),
                    exchange='',
                    routing_key=settings.RPC_QUEUE,
                    declare=[self.callback_queue],
                    reply_to=self.callback_queue.name,
                    correlation_id=self.correlation_id,
                )

            with Consumer(self.connection,
                          on_message=self.on_response,
                          queues=[self.callback_queue],
                          no_ack=True,
                          accept=['json', 'pickle', 'msgpack']):
                while self.response is None:
                    self.connection.drain_events()
            return self.response
Пример #15
0
 def __init__(self, logs, connection, name, exchange, routing_key,
              queue_name):
     self.__logs = logs
     self.__ignore_some_stuff = False
     self.name = name
     self.__event_callbacks = []
     if queue_name is None:
         queue_name = ''
         exclusive = True
     else:
         exclusive = False
     chan = connection.channel()
     ex = Exchange(exchange, 'topic', channel=chan)
     queue = Queue(exchange=ex,
                   routing_key=routing_key,
                   exclusive=exclusive)
     consumer = Consumer(chan,
                         queues=[queue],
                         callbacks=[self.__message_cb])
     consumer.consume()
     self.exchange = ex
 def call(self, method):
     if method not in ALLOWED_METHODS:
         raise Exception("Method '{}' not allowed".format(method))
     self.response = None
     self.correlation_id = uuid()
     with Producer(self.connection) as producer:
         producer.publish(
             {'method': method},
             exchange=self.exchange,
             routing_key=self.routing_key,
             declare=[self.callback_queue],
             reply_to=self.callback_queue.name,
             correlation_id=self.correlation_id,
         )
     with Consumer(self.connection,
                   on_message=self.on_response,
                   queues=[self.callback_queue],
                   no_ack=True):
         while self.response is None:
             self.connection.drain_events()
     return self.response
Пример #17
0
def run(rabbit_url):
    print rabbit_url
    conn = Connection(rabbit_url)
    conn.ensure_connection()
    conn.connect()
    exchange = Exchange(config.EXCHANGE_NAME, type='direct')
    queue = Queue(name=config.QUEUE_NAME,
                  exchange=exchange,
                  routing_key=config.ROUTING_KEY)
    consumer = Consumer(conn,
                        queues=queue,
                        callbacks=[process_message],
                        accept=['text/plain'])
    consumer.consume()

    while True:
        try:
            print 'drain_events'
            conn.drain_events(timeout=2)  # timeout
        except socket.timeout:
            pass
Пример #18
0
def main(arguments):
    hub = Hub()
    exchange = Exchange('asynt')
    queue = Queue('asynt', exchange, 'asynt')

    def send_message(conn):
        producer = Producer(conn)
        producer.publish('hello world', exchange=exchange, routing_key='asynt')
        print('message sent')

    def on_message(message):
        print('received: {0!r}'.format(message.body))
        message.ack()
        hub.stop()  # <-- exit after one message

    conn = Connection('redis://localhost:6379')
    conn.register_with_event_loop(hub)

    with Consumer(conn, [queue], on_message=on_message):
        send_message(conn)
        hub.run_forever()
Пример #19
0
    def send_request(self, fun, args, kwargs):
        payload = {'fun': fun, 'args': args, 'kwargs': kwargs}

        with Producer(self.connection) as producer:
            producer.publish(
                payload,
                exchange='',
                routing_key='rpc_queue',
                declare=[self.callback_queue],
                reply_to=self.callback_queue.name,
                correlation_id=self.correlation_id,
            )

        # 接受服务器发布的消息, 接受到后进入 on_response 回调
        with Consumer(self.connection,
                      on_message=self.on_response,
                      queues=[self.callback_queue],
                      no_ack=True):
            while self.response is None:
                self.connection.drain_events()
        return self.response
Пример #20
0
 def send(self, hijack_key, comment):
     log.debug(
         "Send 'comment' hijack message with key: {}".format(hijack_key))
     self.response = None
     self.correlation_id = uuid()
     callback_queue = Queue(
         uuid(),
         durable=False,
         exclusive=True,
         auto_delete=True,
         max_priority=4,
         consumer_arguments={"x-priority": 4},
     )
     with Connection(RABBITMQ_URI) as connection:
         with Producer(connection) as producer:
             producer.publish(
                 {
                     "key": hijack_key,
                     "comment": comment
                 },
                 exchange="",
                 routing_key="database.rpc.hijack-comment",
                 retry=True,
                 declare=[callback_queue],
                 reply_to=callback_queue.name,
                 correlation_id=self.correlation_id,
                 priority=4,
                 serializer="ujson",
             )
         with Consumer(
                 connection,
                 on_message=self.on_response,
                 queues=[callback_queue],
                 accept=["ujson"],
         ):
             while self.response is None:
                 connection.drain_events()
     if self.response["status"] == "accepted":
         return "Comment saved.", True
     return "Error while saving.", False
Пример #21
0
    def pull_messages(self,
                      qurator,
                      queues=None,
                      callbacks=None,
                      command=None):
        """Helper to pull messages from a particular queue.

        :qurator: Qurator object
        :command: queue set to pull from
        :callbacks: callbacks from
        """
        if (queues is None or callbacks is None) and command is None:
            raise Exception("Unable to determine "
                            "which queue or callback to use. "
                            "Please provide either both queues and callbacks "
                            "or a command to check")
        if queues is None:
            queues = qurator.queues[command]
        if callbacks is None:
            callbacks = qurator.callbacks[command]
        with Consumer(self._connection, queues, callbacks=callbacks):
            self._connection.drain_events(timeout=1)
Пример #22
0
def main():
    signal.signal(signal.SIGINT, sigint_handler)
    signal.signal(signal.SIGTERM, sigterm_handler)
    try:
        conn.register_with_event_loop(hub)
    except ConnectionRefusedError as err:
        logger.critical("Cannot connect to %s: %s", AMQP_URI, err)
        logger.critical("Exiting.")
        sys.exit(1)

    logger.info("Configuration: %s", config)
    if config.reporter_type == "elasticsearch":
        report = es_reporter()
    elif config.reporter_type == "email":
        report = smtp_reporter()
    with report as on_message_report:
        with Consumer(conn, [queue],
                      prefetch_count=config.amqp_prefetch,
                      on_message=on_message_report,
                      auto_declare=False):
            logger.info("Starting main loop")
            hub.run_forever()
Пример #23
0
 def get_consumers(self, _, channel):
     all_consumer = []
     self.consumers_channels.add(channel)
     service_name = self.container.service_cls.name
     config = self.container.config.get(AMQP_CONFIG_KEY, {}) or {}
     maxqos = config.get('qos', DEFAULT_AMQP_QOS) or DEFAULT_AMQP_QOS
     for extension in self.extensions:
         queue_name = get_queue_name(service_name, extension.obj_name)
         queue = Queue(queue_name, **extension.queue_options)
         msg = '{} -LISTEN-> {} -BIND-> {}'.format(extension.obj_name,
                                                   queue_name,
                                                   extension.exchange.name)
         logger.debug(msg)
         on_message = as_wraps_partial(self.on_message, extension)
         _channel = channel.connection.channel()
         consumer = Consumer(_channel,
                             queues=[queue],
                             callbacks=[on_message],
                             no_ack=True)
         consumer.qos(prefetch_count=maxqos)
         all_consumer.append(consumer)
     return all_consumer
Пример #24
0
    def purge(self, timeout: int = 5) -> None:
        def _purge_errors(exc, interval):
            self._logger.error(
                'Purging error: %s, will retry triggering in %s seconds',
                exc,
                interval,
                exc_info=True)

        def _purge_messages(cnsmr: BrightsideConsumer):
            cnsmr.purge()
            self._message = None

        connection = BrokerConnection(hostname=self._amqp_uri)
        with connections[connection].acquire(block=True) as conn:
            self._logger.debug('Got connection: %s', conn.as_uri())
            with Consumer([self._queue],
                          callbacks=[_purge_messages]) as consumer:
                ensure_kwargs = self.RETRY_OPTIONS.copy()
                ensure_kwargs['errback'] = _purge_errors
                safe_purge = conn.ensure(consumer, _purge_messages,
                                         **ensure_kwargs)
                safe_purge(consumer)
Пример #25
0
    def _consume(self):
        routing_key = '{0}{1}'.format(self._key_prefix, self._binding_key)

        while self._running:
            try:
                with connections[self._connection].acquire(block=True) as conn:
                    queue = Queue(exchange=self._exchange, routing_key=routing_key, channel=conn,
                                  durable=False, exclusive=True, auto_delete=True)

                    with Consumer(conn, queue, callbacks=[self._on_message]):
                        try:
                            while self._running:
                                try:
                                    conn.drain_events(timeout=10)
                                except timeout:
                                    pass
                        except Exception as e:
                            logger.debug('Error when draining message queue: {0}'.format(e))
            except IOError as e:
                logger.info('Disconnected from MQ Server. Reconnecting in {0} seconds.'.format(
                    self._reconnect_timeout))
                sleep(self._reconnect_timeout)
Пример #26
0
def request_service(conn, message_request, exchange_request,
                    request_routing_key):
    id_response = uuid()
    queue_response = Queue(name=id_response,
                           exchange=exchange_request,
                           routing_key=id_response,
                           exclusive=True,
                           auto_delete=True)
    message_request['reply_to'] = id_response
    conn.ensure_connection()
    with Producer(conn) as producer:
        producer.publish(json.dumps(message_request),
                         exchange=exchange_request.name,
                         routing_key=request_routing_key,
                         declare=[queue_response],
                         retry=True)

    message_response = None

    def on_response(body, message):
        nonlocal message_response
        message_response = json.loads(body)

    try:

        with Consumer(conn,
                      queues=queue_response,
                      callbacks=[on_response],
                      no_ack=True):
            try:
                while message_response is None:
                    conn.drain_events(timeout=10)
            except socket.timeout:
                return {'error': 'Can not connect to service'}
    except Exception:
        print("cannot create Consumer: " + request_routing_key)
        return {'error': 'Cannot create Consumer'}

    return message_response
Пример #27
0
def get_list_platforms(platform_status):
    print("API list platforms from Registry")

    if platform_status in ['active', "inactive", "all"]:
        message_request = {
            'reply_to': 'registry.response.api.api_get_list_platforms',
            'platform_status': platform_status
        }

        #request to api_get_list_platform of Registry
        queue_response = Queue(
            name='registry.response.api.api_get_list_platforms',
            exchange=exchange,
            routing_key='registry.response.api.api_get_list_platforms')
        request_routing_key = 'registry.request.api_get_list_platforms'
        rabbitmq_connection.ensure_connection()
        with Producer(rabbitmq_connection) as producer:
            producer.publish(json.dumps(message_request),
                             exchange=exchange.name,
                             routing_key=request_routing_key,
                             declare=[queue_response],
                             retry=True)

        message_response = None

        def on_response(body, message):
            nonlocal message_response
            message_response = json.loads(body)

        with Consumer(rabbitmq_connection,
                      queues=queue_response,
                      callbacks=[on_response],
                      no_ack=True):
            while message_response is None:
                rabbitmq_connection.drain_events()

        return message_response
    else:
        return None
Пример #28
0
    def receive(self, timeout: int) -> BrightsideMessage:

        self._message = BrightsideMessage(
            BrightsideMessageHeader(uuid4(), "", BrightsideMessageType.none),
            BrightsideMessageBody(""))

        def _consume(cnx: BrokerConnection, timesup: int) -> None:
            try:
                cnx.drain_events(timeout=timesup)
            except kombu_exceptions.TimeoutError:
                pass

        def _consume_errors(exc, interval: int) -> None:
            self._logger.error(
                'Draining error: %s, will retry triggering in %s seconds',
                exc,
                interval,
                exc_info=True)

        def _read_message(body: str, msg: KombuMessage) -> None:
            self._logger.debug(
                "Monitoring event received at: %s headers: %s payload: %s",
                datetime.utcnow().isoformat(), msg.headers, body)
            self._msg = msg
            self._message = self._message_factory.create_message(msg)

        connection = BrokerConnection(hostname=self._amqp_uri)
        with connections[connection].acquire(block=True) as conn:
            self._logger.debug('Got connection: %s', conn.as_uri())
            with Consumer(conn,
                          queues=[self._queue],
                          callbacks=[_read_message]) as consumer:
                consumer.qos(prefetch_count=1)
                ensure_kwargs = self.RETRY_OPTIONS.copy()
                ensure_kwargs['errback'] = _consume_errors
                safe_drain = conn.ensure(consumer, _consume, **ensure_kwargs)
                safe_drain(conn, timeout)

        return self._message
Пример #29
0
    def test_produce_consume_noack(self):
        channel = self.c.channel()
        producer = Producer(channel, self.e)
        consumer = Consumer(channel, self.q, no_ack=True)

        for i in range(10):
            producer.publish({'foo': i}, routing_key='test_transport_memory')

        _received = []

        def callback(message_data, message):
            _received.append(message)

        consumer.register_callback(callback)
        consumer.consume()

        while 1:
            if len(_received) == 10:
                break
            self.c.drain_events()

        self.assertEqual(len(_received), 10)
Пример #30
0
    def test_produce__consume_multiple(self):
        if not self.verify_alive():
            return
        chan1 = self.connection.channel()
        producer = Producer(chan1, self.exchange)
        b1 = Queue(self.P("b1"), self.exchange, "b1")(chan1)
        b2 = Queue(self.P("b2"), self.exchange, "b2")(chan1)
        b3 = Queue(self.P("b3"), self.exchange, "b3")(chan1)
        [q.declare() for q in (b1, b2, b3)]
        [q.purge() for q in (b1, b2, b3)]

        producer.publish("b1", routing_key="b1")
        producer.publish("b2", routing_key="b2")
        producer.publish("b3", routing_key="b3")
        chan1.close()

        chan2 = self.connection.channel()
        consumer = Consumer(chan2, [b1, b2, b3])
        messages = consumeN(self.connection, consumer, 3)
        self.assertItemsEqual(messages, ["b1", "b2", "b3"])
        chan2.close()
        self.purge([self.P("b1"), self.P("b2"), self.P("b3")])