def _publish_consume(self): results = [] def process_message(body, message): results.append(body) message.ack() task_queue = kombu.Queue('tasks', kombu.Exchange('tasks'), routing_key='tasks') to_publish = {'hello': 'world'} self.producer.publish(to_publish, exchange=task_queue.exchange, routing_key=task_queue.routing_key, declare=[task_queue]) with kombu.Consumer(self.conn, [task_queue], accept=['json'], callbacks=[process_message]) as consumer: Pin.override(consumer, service='kombu-patch', tracer=self.tracer) self.conn.drain_events(timeout=2) self.assertEqual(results[0], to_publish)
def __init__(self, conf): super(KombuRPCClient, self).__init__(conf) self.exchange = conf.get('exchange', '') self.user_id = conf.get('user_id', 'guest') self.password = conf.get('password', 'guest') self.topic = conf.get('topic', 'mistral') self.server_id = conf.get('server_id', '') self.host = conf.get('host', 'localhost') self.port = conf.get('port', 5672) self.virtual_host = conf.get('virtual_host', '/') self.durable_queue = conf.get('durable_queues', False) self.auto_delete = conf.get('auto_delete', False) self._timeout = conf.get('timeout', 60) self.conn = self._make_connection(self.host, self.port, self.user_id, self.password, self.virtual_host) # Create exchange. exchange = self._make_exchange(self.exchange, durable=self.durable_queue, auto_delete=self.auto_delete) # Create queue. queue_name = utils.generate_unicode_uuid() self.callback_queue = kombu.Queue(queue_name, exchange=exchange, routing_key=queue_name, durable=False, exclusive=True, auto_delete=True) # Create consumer. self.consumer = kombu.Consumer(channel=self.conn.channel(), queues=self.callback_queue, callbacks=[self._on_response], accept=['pickle', 'json']) self.consumer.qos(prefetch_count=1)
def test_publish_consume(self, connection): test_queue = kombu.Queue('ttl_test', routing_key='ttl_test') def callback(body, message): assert False, 'Callback should not be called' with connection as conn: with conn.channel() as channel: producer = kombu.Producer(channel) producer.publish({'hello': 'world'}, retry=True, exchange=test_queue.exchange, routing_key=test_queue.routing_key, declare=[test_queue], serializer='pickle', expiration=2) consumer = kombu.Consumer(conn, [test_queue], accept=['pickle']) consumer.register_callback(callback) sleep(3) with consumer: with pytest.raises(socket.timeout): conn.drain_events(timeout=1)
def ensure_consuming(self, force=False): if self.closed and not force: return while True: try: if self.queue: return with ConnectionContext(self.connection, no_release=True) as conn: channel = conn.default_channel print('channel:', conn.__dict__) # Queue 생성 self.queue = kombu.Queue(sock.gethostname() + "-(ensure_consuming)-" + self.pid, auto_delete=True, expires=RABBITMQ_QUEUE_EXPIRES, channel=channel) self.queue.declare() # Consumer 생성 self.consumer = kombu.Consumer( channel, [self.queue], no_ack=False, # on_message= prefetch_count=1) self.consumer.consume() except BrokenPipeError as err: self.queue = None conn.release() except Exception as err: print(err) self.queue = None conn.release() break
#exchange = kombu.Exchange('some-exchange') #queue = kombu.Queue(name='some-queue', exchange=exchange) exchange = kombu.Exchange('kombu_demo', type='direct') queue = kombu.Queue('kombu_demo', exchange, routing_key='kombu_demo') cnt = 0 def callback(body, message): global cnt print('%d: got msg - %s' % (cnt, body)) message.ack() cnt += 1 consumer = kombu.Consumer(channel, queues=queue, callbacks=[callback]) #consumer.consume() while True: try: #consumer = kombu.Consumer(channel, queues=queue, callbacks=[callback]) consumer.consume() connection.drain_events() time.sleep(1) except connection.connection_errors + connection.channel_errors: connection.close() print("Host down, connecting to the next one.") connection.ensure_connection() channel = connection.channel() consumer = kombu.Consumer(channel, queues=queue, callbacks=[callback]) consumer.consume()
def _consume(self, connection, queue): consumer = kombu.Consumer(connection, [queue], accept=['pickle']) consumer.register_callback(self._callback) with consumer: connection.drain_events(timeout=1)
def _create_consumer(self, connection, queue): consumer = kombu.Consumer( connection, [queue], accept=['pickle'] ) consumer.register_callback(self._callback) return consumer
def __init__( self, channel, # type: amqp.channel.Channel routing_key, # type: str queue, # type: str exchange, # type: str queue_arguments, # type: Dict[str, str] func, # type: Callable[[Any], Any] backoff_func=None # type: Optional[Callable[[int], float]] ): # type: (...) -> None self.channel = channel self.routing_key = routing_key self.queue = queue # queue name self.exchange = exchange # `settings.EXCHANGES` config key self.func = func self.backoff_func = backoff_func or self.backoff self.exchanges = { DEFAULT_EXCHANGE: kombu.Exchange(channel=self.channel) } for name, exchange_settings in settings.EXCHANGES.items(): self.exchanges[name] = kombu.Exchange(channel=self.channel, **exchange_settings) try: self.worker_queue = kombu.Queue( name=self.queue, exchange=self.exchanges[exchange], routing_key=self.routing_key, channel=self.channel, queue_arguments=queue_arguments, ) self.retry_queue = kombu.Queue( name='{queue}.retry'.format(queue=queue), exchange=self.exchanges[DEFAULT_EXCHANGE], routing_key='{queue}.retry'.format(queue=queue), # N.B. default exchange automatically routes messages to a queue # with the same name as the routing key provided. queue_arguments={ "x-dead-letter-exchange": "", "x-dead-letter-routing-key": self.queue, }, channel=self.channel, ) self.archive_queue = kombu.Queue( name='{queue}.archived'.format(queue=queue), exchange=self.exchanges[DEFAULT_EXCHANGE], routing_key='{queue}.archived'.format(queue=queue), queue_arguments=settings.ARCHIVE_QUEUE_ARGS, channel=self.channel, ) except KeyError as key_exc: raise NoExchange( "The exchange {exchange} was not found in settings.EXCHANGES.\n" "settings.EXCHANGES = {exchanges}".format( exchange=key_exc, exchanges=settings.EXCHANGES)) self.retry_producer = kombu.Producer( channel, exchange=self.retry_queue.exchange, routing_key=self.retry_queue.routing_key, serializer=settings.SERIALIZER, ) self.archive_producer = kombu.Producer( channel, exchange=self.archive_queue.exchange, routing_key=self.archive_queue.routing_key, serializer=settings.SERIALIZER, ) self.consumer = kombu.Consumer( channel, queues=[self.worker_queue], callbacks=[self], accept=settings.ACCEPT, ) self.consumer.qos(prefetch_count=settings.PREFETCH_COUNT)
def __init__(self, channel, # type: amqp.channel.Channel routing_key, # type: str queue, # type: str exchange, # type: str func, # type: Callable[[Any], Any, kombu.Message] backoff_func=None, # type: Optional[Callable[[int], float]], auto_delete=False, # type: bool durable=True # type: bool ): # type: (...) -> None self.channel = channel self.routing_key = routing_key self.queue = queue # queue name self.exchange = exchange # `settings.EXCHANGES` config key self.func = func self.backoff_func = backoff_func or self.backoff self._auto_delete = auto_delete self._durable = durable self.exchanges = { DEFAULT_EXCHANGE: kombu.Exchange(channel=self.channel) } for name, exchange_settings in settings.EXCHANGES.items(): self.exchanges[name] = kombu.Exchange( channel=self.channel, **exchange_settings ) try: self.worker_queue = kombu.Queue( name=self.queue, exchange=self.exchanges[exchange], routing_key=self.routing_key, channel=self.channel, auto_delete=self._auto_delete, durable=self._durable ) self.retry_queue = kombu.Queue( name='{0}.retry'.format(queue), exchange=self.exchanges[DEFAULT_EXCHANGE], routing_key='{0}.retry'.format(queue), # N.B. default exchange automatically routes messages to a queue # with the same name as the routing key provided. queue_arguments={ "x-dead-letter-exchange": "", "x-dead-letter-routing-key": self.queue, }, channel=self.channel, auto_delete=self._auto_delete, durable=self._durable ) self.archive_queue = kombu.Queue( name='{0}.archived'.format(queue), exchange=self.exchanges[DEFAULT_EXCHANGE], routing_key='{0}.archived'.format(queue), queue_arguments={ "x-expires": settings.ARCHIVE_EXPIRY, # Messages dropped after this "x-max-length": 1000000, # Maximum size of the queue "x-queue-mode": "lazy", # Keep messages on disk (reqs. rabbitmq 3.6.0+) }, channel=self.channel, auto_delete=self._auto_delete, durable=self._durable ) except KeyError as key_exc: raise NoExchange( "The exchange {0} was not found in settings.EXCHANGES. \n" "settings.EXCHANGES = {1}".format( key_exc, settings.EXCHANGES ) ) self.retry_producer = kombu.Producer( channel, exchange=self.retry_queue.exchange, routing_key=self.retry_queue.routing_key, serializer=settings.SERIALIZER, ) self.archive_producer = kombu.Producer( channel, exchange=self.archive_queue.exchange, routing_key=self.archive_queue.routing_key, serializer=settings.SERIALIZER, ) self.consumer = kombu.Consumer( channel, queues=[self.worker_queue], callbacks=[self], accept=settings.ACCEPT, )
def get_consumers(self, channel): return [kombu.Consumer(channel, queues=[queue], callbacks=[self.handle_message], accept=['json'])]