def ingest(self): """ Monitor AMQP for messages """ if self.stoq.worker.name: # Define our RabbitMQ route routing_key = self.stoq.worker.name # If this is an error message, let's make sure our queue # has "-errors" affixed to it if self.stoq.worker.error_queue is True: routing_key = routing_key + "-errors".strip() exchange = Exchange(self.exchange_name, type=self.exchange_type) queue = Queue(routing_key, exchange, routing_key=routing_key) # Setup our broker connection with RabbitMQ with Connection(hostname=self.host, port=self.port, userid=self.user, password=self.password, virtual_host=self.virtual_host) as conn: consumer = Consumer(conn, queue, callbacks=[self.queue_callback]) consumer.qos(prefetch_count=int(self.prefetch)) consumer.consume() while True: conn.drain_events() else: self.stoq.log.error("No worker name defined!")
def add_queue_rule(self, handler, name, autoack=True, prefetch_size=0, prefetch_count=0, **kwargs): """Add queue rule to Microservice :param prefetch_count: count of messages for getting from mq :param prefetch_size: size in bytes for getting data from mq :param handler: function for handling messages :param autoack: if True message.ack() after callback :type handler: callable object :param name: name of queue :type name: str """ if self.with_pool: if self.workers_override_prefetch: prefetch_count = self.workers rule = Rule(name, handler, self.logger, autoack=autoack, deferred_callbacks=self.deferred_callbacks, pool=self.pool, **kwargs) else: rule = Rule(name, handler, self.logger, autoack=autoack, **kwargs) self.connect() consumer = Consumer(self.connection, queues=[Queue(rule.name)], callbacks=[rule.callback], auto_declare=True) consumer.qos(prefetch_count=prefetch_count, prefetch_size=prefetch_size) self.consumers.append(consumer) self.logger.debug('Rule "%s" added!', rule.name)
def test_consumer_tag_prefix(self): channel = self.connection.channel() queue = Queue('qname', self.exchange, 'rkey') consumer = Consumer(channel, queue, tag_prefix='consumer_') consumer.consume() assert consumer._active_tags[queue.name].startswith('consumer_')
def test_on_callback_error_with_continue(self): channel = self.connection.channel() b1 = Queue('qname1', self.exchange, 'rkey') callback_exception = Exception('callback_exception') received, called = [], [] def first_callback(message_data, message): called.append('first') def second_callback(message_data, message): called.append('second') raise callback_exception def third_callback(message_data, message): called.append('third') def on_callback_error(message_data, message, exc, callback): received.append((message_data, message, exc, callback)) return True # continue callbacks callbacks = [first_callback, second_callback, third_callback] consumer = Consumer(channel, [b1], callbacks=callbacks, on_callback_error=on_callback_error) consumer._receive_callback({'foo': 'bar'}) self.assertEqual(called, ['first', 'second', 'third']) self.assertEqual(len(received), 1) message_data, message, exc, callback = received[0] self.assertEqual(message_data, {'foo': 'bar'}) self.assertEqual(exc, callback_exception) self.assertEqual(callback, second_callback)
def test_enter_exit_cancel_raises(self): c = Consumer(self.connection) c.cancel = Mock(name='Consumer.cancel') c.cancel.side_effect = KeyError('foo') with c: pass c.cancel.assert_called_with()
def _kconsume(n, name, conn, durable=DURABLE): channel = conn.channel() exchange = Exchange(name, type="direct", durable=durable, auto_delete=True) queue = Queue(name, exchange, name, durable=durable, auto_delete=True) consumer = Consumer(channel, queue) ucon = conn.connect() i = [0] def callback(message_data, message=None): i[0] += 1 if not i[0] % 10000: print(i[0]) consumer.register_callback(callback) consumer.consume(no_ack=True) start = time() while i[0] < n: try: conn.drain_events() except socket.timeout: pass print(time() - start)
def test_decode_error(self): channel = self.connection.channel() b1 = Queue("qname1", self.exchange, "rkey") consumer = Consumer(channel, [b1]) consumer.channel.throw_decode_error = True with self.assertRaises(ValueError): consumer._receive_callback({"foo": "bar"})
def test_enter_exit_cancel_not_called_on_connection_error(self): c = Consumer(self.connection) c.cancel = Mock(name='Consumer.cancel') assert self.connection.connection_errors with pytest.raises(self.connection.connection_errors[0]): with c: raise self.connection.connection_errors[0]() c.cancel.assert_not_called()
def test_consume__cancel(self): channel = self.connection.channel() queue = Queue('qname', self.exchange, 'rkey') consumer = Consumer(channel, queue, auto_declare=True) consumer.consume() consumer.cancel() assert 'basic_cancel' in channel assert not consumer._active_tags
def test_consume__cancel(self): channel = self.connection.channel() queue = Queue('qname', self.exchange, 'rkey') consumer = Consumer(channel, queue, auto_declare=True) consumer.consume() consumer.cancel() self.assertIn('basic_cancel', channel) self.assertFalse(consumer._active_tags)
def test_decode_error(self): channel = self.connection.channel() b1 = Queue('qname1', self.exchange, 'rkey') consumer = Consumer(channel, [b1]) consumer.channel.throw_decode_error = True with self.assertRaises(ValueError): consumer._receive_callback({'foo': 'bar'})
def run(self): consumer = Consumer(self.channel, self.queue, callbacks=[self.handle_message]) consumer.consume() while True: try: self.connection.drain_events() except KeyboardInterrupt: return
def test_timeout(self): if not self.verify_alive(): return chan = self.connection.channel() self.purge([self.queue.name]) consumer = Consumer(chan, self.queue) self.assertRaises(socket.timeout, self.connection.drain_events, timeout=0.3) consumer.cancel()
def test_timeout(self): if not self.connected: raise SkipTest("Broker not running.") chan = self.connection.channel() self.purge([self.queue.name]) consumer = Consumer(chan, self.queue) self.assertRaises(socket.timeout, self.connection.drain_events, timeout=0.3) consumer.cancel()
def consumer(self, wakeup=True): """Create event consumer.""" consumer = Consumer(self.connection, queues=[self.queue], no_ack=True) consumer.register_callback(self._receive) with consumer: if wakeup: self.wakeup_workers(channel=consumer.channel) yield consumer
def test_purge(self): channel = self.connection.channel() b1 = Queue("qname1", self.exchange, "rkey") b2 = Queue("qname2", self.exchange, "rkey") b3 = Queue("qname3", self.exchange, "rkey") b4 = Queue("qname4", self.exchange, "rkey") consumer = Consumer(channel, [b1, b2, b3, b4], auto_declare=True) consumer.purge() self.assertEqual(channel.called.count("queue_purge"), 4)
def test_revive(self): channel = self.connection.channel() b1 = Queue('qname1', self.exchange, 'rkey') consumer = Consumer(channel, [b1]) channel2 = self.connection.channel() consumer.revive(channel2) self.assertIs(consumer.channel, channel2) self.assertIs(consumer.queues[0].channel, channel2) self.assertIs(consumer.queues[0].exchange.channel, channel2)
def test_purge(self): channel = self.connection.channel() b1 = Queue('qname1', self.exchange, 'rkey') b2 = Queue('qname2', self.exchange, 'rkey') b3 = Queue('qname3', self.exchange, 'rkey') b4 = Queue('qname4', self.exchange, 'rkey') consumer = Consumer(channel, [b1, b2, b3, b4], auto_declare=True) consumer.purge() self.assertEqual(channel.called.count('queue_purge'), 4)
def connect(self, hostname="localhost", userid="guest", password="******", virtual_host="/"): self.conn = BrokerConnection(hostname, userid, password, virtual_host) # define Web2Server exchange exchange = Exchange(self.kwargs["X7_E"], type="direct") self.queue = Queue(self.kwargs["X7_Q"], exchange, routing_key=self.kwargs["X7_RK"]) channel = self.conn.channel() consumer = Consumer(channel, self.queue, callbacks=[self.callback]) consumer.consume()
def test_produce_consume(self): channel = self.c.channel() producer = Producer(channel, self.e) consumer1 = Consumer(channel, self.q) consumer2 = Consumer(channel, self.q2) self.q2(channel).declare() for i in range(10): producer.publish({'foo': i}, routing_key='test_transport_memory') for i in range(10): producer.publish({'foo': i}, routing_key='test_transport_memory2') _received1 = [] _received2 = [] def callback1(message_data, message): _received1.append(message) message.ack() def callback2(message_data, message): _received2.append(message) message.ack() consumer1.register_callback(callback1) consumer2.register_callback(callback2) consumer1.consume() consumer2.consume() while 1: if len(_received1) + len(_received2) == 20: break self.c.drain_events() self.assertEqual(len(_received1) + len(_received2), 20) # compression producer.publish({'compressed': True}, routing_key='test_transport_memory', compression='zlib') m = self.q(channel).get() self.assertDictEqual(m.payload, {'compressed': True}) # queue.delete for i in range(10): producer.publish({'foo': i}, routing_key='test_transport_memory') self.assertTrue(self.q(channel).get()) self.q(channel).delete() self.q(channel).declare() self.assertIsNone(self.q(channel).get()) # queue.purge for i in range(10): producer.publish({'foo': i}, routing_key='test_transport_memory2') self.assertTrue(self.q2(channel).get()) self.q2(channel).purge() self.assertIsNone(self.q2(channel).get())
def test_receive_callback_accept(self): message = Mock(name='Message') callback = Mock(name='on_message') c = Consumer(self.connection, accept=['json'], on_message=callback) c.channel = Mock(name='channel') c.channel.message_to_python = None c._receive_callback(message) callback.assert_called_with(message) self.assertSetEqual(message.accept, c.accept)
def _get_receiver(self, receiver, callback): queue = Queue(name=receiver) result = Consumer( self.conn, queues=queue, no_ack=(not self.safe), callbacks=(lambda x, y: callback(x),)) result.consume() return result
def test___enter____exit__(self): channel = self.connection.channel() queue = Queue('qname', self.exchange, 'rkey') consumer = Consumer(channel, queue, auto_declare=True) context = consumer.__enter__() assert context is consumer assert consumer._active_tags res = consumer.__exit__(None, None, None) assert not res assert 'basic_cancel' in channel assert not consumer._active_tags
def test___enter____exit__(self): channel = self.connection.channel() queue = Queue('qname', self.exchange, 'rkey') consumer = Consumer(channel, queue, auto_declare=True) context = consumer.__enter__() self.assertIs(context, consumer) self.assertTrue(consumer._active_tags) res = consumer.__exit__(None, None, None) self.assertFalse(res) self.assertIn('basic_cancel', channel) self.assertFalse(consumer._active_tags)
def test_basic_reject__requeue(self): channel = self.connection.channel() b1 = Queue('qname1', self.exchange, 'rkey') consumer = Consumer(channel, [b1]) def callback(message_data, message): message.requeue() consumer.register_callback(callback) consumer._receive_callback({'foo': 'bar'}) self.assertIn('basic_reject:requeue', channel)
def test_basic_reject__requeue(self): channel = self.connection.channel() b1 = Queue("qname1", self.exchange, "rkey") consumer = Consumer(channel, [b1]) def callback(message_data, message): message.requeue() consumer.register_callback(callback) consumer._receive_callback({"foo": "bar"}) self.assertIn("basic_reject:requeue", channel)
def test_receive_callback_accept(self): message = Mock(name='Message') message.errors = [] callback = Mock(name='on_message') c = Consumer(self.connection, accept=['json'], on_message=callback) c.on_decode_error = None c.channel = Mock(name='channel') c.channel.message_to_python = None c._receive_callback(message) callback.assert_called_with(message) assert message.accept == c.accept
def test_basic_ack_twice(self): channel = self.connection.channel() b1 = Queue('qname1', self.exchange, 'rkey') consumer = Consumer(channel, [b1]) def callback(message_data, message): message.ack() message.ack() consumer.register_callback(callback) with self.assertRaises(MessageStateError): consumer._receive_callback({'foo': 'bar'})
def test_basic_reject_twice(self): channel = self.connection.channel() b1 = Queue("qname1", self.exchange, "rkey") consumer = Consumer(channel, [b1]) def callback(message_data, message): message.reject() message.reject() consumer.register_callback(callback) with self.assertRaises(MessageStateError): consumer._receive_callback({"foo": "bar"}) self.assertIn("basic_reject", channel)
def test_basic_reject(self): channel = self.connection.channel() b1 = Queue('qname1', self.exchange, 'rkey') consumer = Consumer(channel, [b1]) def callback(message_data, message): message.reject() consumer.register_callback(callback) consumer._receive_callback({'foo': 'bar'}) assert 'basic_reject' in channel
def purge(self, timeout: int = 5) -> None: def _purge_errors(exc, interval): self._logger.error('Purging error: %s, will retry triggering in %s seconds', exc, interval, exc_info=True) def _purge_messages(cnsmr: BrightsideConsumer): cnsmr.purge() self._message = None connection = BrokerConnection(hostname=self._amqp_uri) with connections[connection].acquire(block=True) as conn: self._logger.debug('Got connection: %s', conn.as_uri()) with Consumer([self._queue], callbacks=[_purge_messages]) as consumer: ensure_kwargs = self.RETRY_OPTIONS.copy() ensure_kwargs['errback'] = _purge_errors safe_purge = conn.ensure(consumer, _purge_messages, **ensure_kwargs) safe_purge(consumer)
def run(self): queue_write_db = Queue(name='dbwriter.request.api_write_db', exchange=self.exchange, routing_key='dbwriter.request.api_write_db') while 1: try: self.consumer_connection.ensure_connection(max_retries=1) with Consumer(self.consumer_connection, queues=queue_write_db, callbacks=[self.api_write_db], no_ack=True): while True: self.consumer_connection.drain_events() except (ConnectionRefusedError, exceptions.OperationalError): print('Connection lost') except self.consumer_connection.connection_errors: print('Connection error')
def call(self, key): self.response = None self.correlation_id = uuid() with Producer(self.connection) as producer: producer.publish({'key': key}, exchange='', routing_key='db.read', declare=[self.callback_queue], reply_to=self.callback_queue.name, correlation_id=self.correlation_id) with Consumer(self.connection, on_message=self.on_response, queues=[self.callback_queue], no_ack=True): while self.response is None: self.connection.drain_events() return self.response['result']
def test_receive_callback_accept(self): message = Mock(name='Message') message.errors = [] callback = Mock(name='on_message') c = Consumer(self.connection, accept=['json'], on_message=callback) c.on_decode_error = None c.channel = Mock(name='channel') c.channel.message_to_python = None c._receive_callback(message) callback.assert_called_with(message) self.assertSetEqual(message.accept, c.accept)
def consumer_ack_wait(): connection, channel = getChannelAndConnection() exchange = Exchange(CONF.region_id + "_ack", 'direct', channel) queue = Queue(CONF.region_id + "_ack", exchange=exchange, routing_key=CONF.region_id + "_ack", channel=channel) consumer = Consumer(channel, queues=[queue], callbacks=[process_ack_msg]) consumer.consume() while True: connection.drain_events() consumer.cancel()
def get_things_info_by_platform_id(platform_id, thing_status, item_status): print( "API get things info in platform_id with thing_status and item_status") if (thing_status in ["active", "inactive", "all"]) \ and (item_status in ["active", "inactive", "all"]): message_request = { 'reply_to': 'registry.response.api.api_get_things_by_platform_id', 'thing_status': thing_status, 'item_status': item_status, 'platform_id': platform_id } # request to api_get_things of Registry queue_response = Queue( name='registry.response.api.api_get_things_by_platform_id', exchange=exchange, routing_key='registry.response.api.api_get_things_by_platform_id') request_routing_key = 'registry.request.api_get_things_by_platform_id' rabbitmq_connection.ensure_connection() with Producer(rabbitmq_connection) as producer: producer.publish(json.dumps(message_request), exchange=exchange.name, routing_key=request_routing_key, declare=[queue_response], retry=True) message_response = None def on_response(body, message): nonlocal message_response message_response = json.loads(body) with Consumer(rabbitmq_connection, queues=queue_response, callbacks=[on_response], no_ack=True): while message_response is None: rabbitmq_connection.drain_events() return message_response else: return None
def call(self, n): self.response = None self.correlation_id = uuid() with Producer(self.connection) as producer: producer.publish( {'n': n}, exchange='', routing_key='rpc_queue', declare=[self.callback_queue], reply_to=self.callback_queue, correlation_id=self.correlation_id, ) with Consumer(self.connection, on_message=self.on_response, queues=[self.callback_queue], no_ack=True): while self.response is None: self.connection.drain_events() return self.response
def start_consuming( self, callback: Callable, queue_name: str, prefetch_count: int = 1, no_ack: bool = False, expires: int = None, callback_ready: Callable = None, ): if self._logger is not None: self._logger.debug("Start consuming queue: %s" % queue_name) self._consuming = True while self._consuming: revived_connection = self._connection.clone() revived_connection.ensure_connection() channel = revived_connection.channel() channel.basic_qos(0, prefetch_count, True) queues = [] queue_obj = Queue( channel=channel, name=queue_name, no_ack=no_ack, durable=False, expires=expires, queue_arguments={"x-max-priority": 255}, ) queue_obj.declare() queues.append(queue_obj) consumer = Consumer( revived_connection, queues, callbacks=[callback], accept=["json"], auto_declare=False, prefetch_count=prefetch_count, ) consumer.revive(channel) consumer.consume() while self._consuming: callback_ready is not None and callback_ready() try: revived_connection.drain_events(timeout=2) except socket.timeout: revived_connection.heartbeat_check() except self._connection.connection_errors + ( AMQPError, ConnectionForced, ConnectionError, ): # pragma: no cover if self._logger is not None: self._logger.exception("Connection error", stack_info=True) break
def send_task(self, payload, routing_key, local=False, get_response=False, timeout=10): if local: declare_queues = self.control_queues else: declare_queues = self.declare_queues reply_to = None callback_queue = [] if get_response: reply_to = self.callback_queue.name callback_queue = [self.callback_queue] self.correlation_id = uuid() try: with producers[self.connection].acquire(block=True, timeout=10) as producer: producer.publish( payload, exchange=None if local else self.exchange, declare=declare_queues, routing_key=routing_key, reply_to=reply_to, correlation_id=self.correlation_id, retry=True, headers={'epoch': time.time()}, ) if get_response: with Consumer(self.connection, on_message=self.on_response, queues=callback_queue, no_ack=True): while self.response is self._response: self.connection.drain_events(timeout=timeout) return self.response except socket.timeout: log.exception( "Error waiting for task: '%s' sent with routing key '%s'", payload, routing_key) except Exception: log.exception("Error queueing async task: '%s'. for %s", payload, routing_key)
def send(self, hijack_key, prefix, type_, hijack_as, action): log.debug( "Send 'learn_new_rule - {0}' hijack message with key: {1}".format( action, hijack_key)) self.response = None self.correlation_id = uuid() callback_queue = Queue( uuid(), durable=False, exclusive=True, auto_delete=True, max_priority=4, consumer_arguments={"x-priority": 4}, ) with Connection(RABBITMQ_URI) as connection: with Producer(connection) as producer: producer.publish( { "key": hijack_key, "prefix": prefix, "type": type_, "hijack_as": hijack_as, "action": action, }, exchange="", routing_key="configuration.rpc.hijack-learn-rule", retry=True, declare=[callback_queue], reply_to=callback_queue.name, correlation_id=self.correlation_id, priority=4, serializer="ujson", ) with Consumer( connection, on_message=self.on_response, queues=[callback_queue], accept=["ujson"], ): while self.response is None: connection.drain_events() if self.response["success"]: return self.response["new_yaml_conf"], True return self.response["new_yaml_conf"], False
def create_consumer(self, exchange_name, queue_name, process): try: channel = self.connection.channel() exchange = Exchange(exchange_name, type="topic") queue = Queue(queue_name, exchange, routing_key="notifications.#") consumer = Consumer(channel, queue) consumer.register_callback(process) consumer.consume() log.info("create consumer: " + repr(consumer)) except OSError: raise MQConnectionError("please check your mq user, password and host configuration.") return self.connection
def _read_monitoring_messages(self): def _drain(cnx, timeout): try: cnx.drain_events(timeout=timeout) except kombu_exceptions.TimeoutError: pass def _drain_errors(exc, interval): self._logger.error('Draining error: %s, will retry triggering in %s seconds', exc, interval, exc_info=True) def _read_message(body, message): self._logger.debug("Monitoring event received at: %s headers: %s payload: %s", datetime.utcnow().isoformat(), message.headers, message.payload) now = datetime.utcnow().isoformat() activity = body print("{time}: {event}".format(time=now, event=activity)) message.ack() # read the next batch number of monitoring messages from the control bus # evaluate for color coding (error is red) # print to stdout connection = BrokerConnection(hostname=self._amqp_uri) with connections[connection].acquire(block=True) as conn: self._logger.debug('Got connection: %s', conn.as_uri()) with Consumer(conn, [self._monitoring_queue], callbacks=[_read_message], accept=['json', 'text/plain']) as consumer: self._running.set() ensure_kwargs = self.RETRY_OPTIONS.copy() ensure_kwargs['errback'] = _drain_errors lines = 0 updates = 0 while self._running.is_set(): # page size number before we sleep safe_drain = conn.ensure(consumer, _drain, **ensure_kwargs) safe_drain(conn, DRAIN_EVENTS_TIMEOUT) lines += 1 if lines == self.page_size: if self.limit != -1 and updates > self.limit: self._running.clear() else: sleep(self.delay_between_refreshes) lines = 0 updates += 1
def get_consumers(self, channel): ep = settings.RABBIT_SETTINGS['RabbitEndpoints']['Events'] ep_tasks = settings.RABBIT_SETTINGS['RabbitEndpoints']['Tasks'] event_queue = Queue( ep['Queue'], Exchange(name=ep['Exchange'], type=ep['ExchangeType'], durable=False), ep['RoutingKey']) task_queue = Queue( ep_tasks['Queue'], Exchange(name=ep_tasks['Exchange'], type=ep_tasks['ExchangeType'], durable=False), ep_tasks['RoutingKey']) return [ Consumer(channel, queues=[event_queue], callbacks=[self.handle_message], accept=['json']) ]
def receive_event(self): def handle_notification(body, message): self.logger.info("Receive Event!") # event_name = json.loads(body)["event_name"] event_id = json.loads(body)["event_id"] event_source = json.loads(body)["event_source"] trigger_id = json.loads(body)["trigger_id"] # event_generator_id = json.loads(body)["event_generator_id"] time = json.loads(body)["time"] # mapping trigger_id to condition_id and action_id condition_id, condition_type, condition_content, action_id, action_content = self.mapping( trigger_id) # Check if condition has condition_id is true is_condition_satisfice = self.check_condition( condition_id, condition_type, condition_content) self.logger.info("check condition result: " + str(is_condition_satisfice)) if (is_condition_satisfice == True): # Execute an action self.call_to_action(action_id, action_content) self.logger.info("Finish receive one event!\n\n\n") # End handle_notification try: self.consumer_connection.ensure_connection(max_retries=1) with nested( Consumer(self.consumer_connection, queues=self.queue_get_states, callbacks=[handle_notification], no_ack=True)): while True: self.consumer_connection.drain_events() except (ConnectionRefusedError, exceptions.OperationalError): self.logger.error("Connection lost", exc_info=True) except self.consumer_connection.connection_errors: self.logger.error("Connection error", exc_info=True) except Exception as e: self.logger.error("Error receive event", exc_info=True)
def check_changes(self): with open(self.path, "r") as f: content = f.readlines() # Taken any action here when a file is modified. changes = "".join(difflib.unified_diff(self.content, content)) if changes: self.response = None self.correlation_id = uuid() callback_queue = Queue( uuid(), durable=False, auto_delete=True, max_priority=4, consumer_arguments={"x-priority": 4}, ) with Producer(self.connection) as producer: producer.publish( content, exchange="", routing_key="configuration.rpc.modify", serializer="yaml", retry=True, declare=[callback_queue], reply_to=callback_queue.name, correlation_id=self.correlation_id, priority=4, ) with Consumer( self.connection, on_message=self.on_response, queues=[callback_queue], accept=["ujson"], ): while self.response is None: self.connection.drain_events() if self.response["status"] == "accepted": text = "new configuration accepted:\n{}".format(changes) log.info(text) self.content = content else: log.error("invalid configuration:\n{}".format(content)) self.response = None
def main(): parser = ArgumentParser( description="Spam a particular individual in `spam_domain`") parser.add_argument("--name", help='The local name of the email to spam', default='randomuser') args = parser.parse_args() spammer = Spammer(args.name) with Connection(config.get_rabbit_url()) as connection: main_queue = Queue(args.name, exchange=spammer_exchange, routing_key=args.name) with Consumer(connection, queues=[main_queue], callbacks=[spammer.spam], prefetch_count=1): while True: connection.drain_events() # start consuming
def run(): queue_notification = Queue(name='monitor.request.alert', exchange=exchange, routing_key='monitor.request.alert') while 1: try: consumer_connection.ensure_connection(max_retries=1) with nested( Consumer(consumer_connection, queues=queue_notification, callbacks=[handle_notification], no_ack=True)): while True: consumer_connection.drain_events() except (ConnectionRefusedError, exceptions.OperationalError): print('Connection lost') except consumer_connection.connection_errors: print('Connection error')
def ingest(self): """ Monitor AMQP for messages """ # Define our RabbitMQ route if self.stoq.worker.source_queue: routing_key = self.stoq.worker.source_queue else: routing_key = self.stoq.worker.name # If this is an error message, let's make sure our queue # has "-errors" affixed to it if self.stoq.worker.error_queue is True: routing_key = routing_key + "-errors".strip() exchange = Exchange(self.exchange_name, type=self.exchange_type) queue_arguments = {'x-max-priority': 10} queue = Queue(routing_key, exchange, routing_key=routing_key, queue_arguments=queue_arguments) self.log.info( "Monitoring {} queue for messages...".format(routing_key)) # Setup our broker connection with RabbitMQ with Connection(hostname=self.host, port=self.port, userid=self.user, password=self.password, virtual_host=self.virtual_host, ssl=self.ssl_config) as conn: conn.connect() consumer = Consumer(conn, queue, callbacks=[self.queue_callback]) consumer.qos(prefetch_count=int(self.prefetch)) consumer.consume() while True: try: conn.drain_events() except Exception as err: self.log.critical( "Unable to process queue: {}".format(err), exc_info=True) conn.release() break
def __init__(self): client_id = settings.DISCORD_APP_ID intents = discord.Intents.default() intents.members = True super().__init__( command_prefix=DISCORD_BOT_PREFIX, description=description, intents=intents, ) self.redis = None self.redis = self.loop.run_until_complete( aioredis.create_pool(getattr(settings, "BROKER_URL", "redis://localhost:6379/0"), minsize=5, maxsize=10)) print('redis pool started', self.redis) self.client_id = client_id self.session = aiohttp.ClientSession(loop=self.loop) self.tasks = [] self.message_connection = Connection( getattr(settings, "BROKER_URL", 'redis://localhost:6379/0')) queues = [] for que in queue_keys: queues.append(Queue(que)) self.message_consumer = Consumer(self.message_connection, queues, callbacks=[self.on_queue_message], accept=['json']) django.setup() for hook in hooks.get_hooks("discord_cogs_hook"): for cog in hook(): try: self.load_extension(cog) except Exception as e: print(f"Failed to load cog {cog}", file=sys.stderr) traceback.print_exc()
def on_modified(self, event): if event.is_directory: return None elif event.src_path == self.path: with open(self.path, 'r') as f: content = f.readlines() # Taken any action here when a file is modified. changes = ''.join(difflib.unified_diff(self.content, content)) if len(changes) > 0: self.response = None self.correlation_id = uuid() callback_queue = Queue( uuid(), durable=False, auto_delete=True, max_priority=4, consumer_arguments={'x-priority': 4}) with Producer(self.connection) as producer: producer.publish(content, exchange='', routing_key='config-modify-queue', serializer='yaml', retry=True, declare=[callback_queue], reply_to=callback_queue.name, correlation_id=self.correlation_id, priority=4) with Consumer(self.connection, on_message=self.on_response, queues=[callback_queue], no_ack=True): while self.response is None: self.connection.drain_events() if self.response['status'] == 'accepted': text = 'new configuration accepted:\n{}'.format( changes) log.info(text) self.content = content else: log.error('invalid configuration:\n{}'.format(content)) self.response = None
def config_request_rpc(self) -> NoReturn: """ Initial RPC of this service to request the configuration. The RPC is blocked until the configuration service replies back. """ self.correlation_id = uuid() callback_queue = Queue( uuid(), durable=False, auto_delete=True, max_priority=4, consumer_arguments={"x-priority": 4}, ) self.producer.publish( "", exchange="", routing_key="config-request-queue", reply_to=callback_queue.name, correlation_id=self.correlation_id, retry=True, declare=[ Queue( "config-request-queue", durable=False, max_priority=4, consumer_arguments={"x-priority": 4}, ), callback_queue, ], priority=4, serializer="ujson", ) with Consumer( self.connection, on_message=self.handle_config_request_reply, queues=[callback_queue], accept=["ujson"], ): while self.rules is None: self.connection.drain_events() log.debug("{}".format(self.rules))
def test_produce__consume_large_messages(self, bytes=1048576, n=10, charset=string.punctuation + string.letters + string.digits): if not self.verify_alive(): return bytes = min(filter(None, [bytes, self.message_size_limit])) messages = [ "".join(random.choice(charset) for j in xrange(bytes)) + "--%s" % n for i in xrange(n) ] digests = [] chan1 = self.connection.channel() consumer = Consumer(chan1, self.queue) for queue in consumer.queues: queue.purge() producer = Producer(chan1, self.exchange) for i, message in enumerate(messages): producer.publish({ "text": message, "i": i }, routing_key=self.prefix) digests.append(self._digest(message)) received = [(msg["i"], msg["text"]) for msg in consumeN(self.connection, consumer, n)] self.assertEqual(len(received), n) ordering = [i for i, _ in received] if ordering != range(n): warnings.warn("%s did not deliver messages in FIFO order: %r" % (self.transport, ordering)) for i, text in received: if text != messages[i]: raise AssertionError("%i: %r is not %r" % (i, text[-100:], messages[i][-100:])) self.assertEqual(self._digest(text), digests[i]) chan1.close() self.purge([self.queue.name])
def control_with_reply(self, command, timeout=5): logger.warn('checking {} {} for {}'.format(self.service, command, self.queuename)) reply_queue = Queue(name="amq.rabbitmq.reply-to") self.result = None with Connection(settings.BROKER_URL) as conn: with Consumer(conn, reply_queue, callbacks=[self.process_message], no_ack=True): self.publish({'control': command}, conn, reply_to='amq.rabbitmq.reply-to') try: conn.drain_events(timeout=timeout) except socket.timeout: logger.error('{} did not reply within {}s'.format( self.service, timeout)) raise return self.result
def listen_for_response(self, connection): """ Set up a consumer and listen for a response. If successful, this will return the contents of :py:attr:`self.result` which will have been set up by the callback. :param connection: A Kombu Connection instance. :return: A dictionary containing the result, or None if the request failed. """ self.result = None queue = self.get_response_queue(connection) with Consumer(connection, queue, callbacks=[self.callback]): while self.result is None: connection.drain_events(timeout=self.amqp_timeout) return self.process_response(self.result)
def call(self, message: dict, event_type: str, entity_type: str, site: Site) -> any: self.response = None self.correlation_id = uuid() with Producer(self.connection) as producer: _logger.info( f'CELERY RPC call {site.domain} with {event_type} - {entity_type} {message} reply to {self.correlation_id}' ) producer.publish( { "event_type": event_type, "entity_type": entity_type, "entity_site": site.domain, "data": message }, exchange='', routing_key=self.routing_key, declare=[self.callback_queue], reply_to=self.callback_queue.name, correlation_id=self.correlation_id, serializer='msgpack', ) with Consumer(self.connection, on_message=self.on_response, queues=[self.callback_queue], no_ack=True): _logger.info( f'CELERY RPC call consume {site.domain} with {event_type} - {entity_type} {message} reply to {self.correlation_id}' ) t_current = time.time() while self.response is None: self.connection.drain_events(timeout=1) # time.sleep(0.25) # sleep for 250 milliseconds # if time.time() >= t_current + 60000: # break _logger.info( f'CELERY RPC call consume {site.domain} with response {self.response}' ) return self.response
def consume(self, conn, queue): with conn.channel() as channel: consumer = Consumer( channel=channel, queues=[queue], accept=['json'], ) consumer.register_callback(self._process_message) consumer.consume() while True: try: conn.drain_events(timeout=5) except socket.timeout: self.app.logger.debug('Timeout waiting for events') conn.heartbeat_check()
def call(self, method): if method not in ALLOWED_METHODS: raise Exception("Method '{}' not allowed".format(method)) self.response = None self.correlation_id = uuid() with Producer(self.connection) as producer: producer.publish( {'method': method}, exchange=self.exchange, routing_key=self.routing_key, declare=[self.callback_queue], reply_to=self.callback_queue.name, correlation_id=self.correlation_id, ) with Consumer(self.connection, on_message=self.on_response, queues=[self.callback_queue], no_ack=True): while self.response is None: self.connection.drain_events() return self.response
def send_request(self, fun, args, kwargs): payload = {'fun': fun, 'args': args, 'kwargs': kwargs} with Producer(self.connection) as producer: producer.publish( payload, exchange='', routing_key='rpc_queue', declare=[self.callback_queue], reply_to=self.callback_queue.name, correlation_id=self.correlation_id, ) # 接受服务器发布的消息, 接受到后进入 on_response 回调 with Consumer(self.connection, on_message=self.on_response, queues=[self.callback_queue], no_ack=True): while self.response is None: self.connection.drain_events() return self.response