def init_adversary(): global __username, __my_type, __connection, __global_exchange, __producer, __backchannel, __backch_producer, __adv_exchange, __adv_queue #, __my_queue __username = '******' __my_type = 'adversary' #adversary has broadcast-only access to the regular exchange __connection = Connection('amqp://') __connection.connect() __global_exchange = Exchange('broadcast', type='fanout', durable=False, delivery_mode=1) __global_exchange.maybe_bind(__connection) __producer = __connection.Producer(__connection) __backchannel = Connection('amqp://') __backchannel.connect() __adv_exchange = Exchange('adversary', durable=False, delivery_mode=1) __adv_exchange.maybe_bind(__backchannel) __backch_producer = __backchannel.Producer(__backchannel) __adv_queue = Queue('adversary', exchange=__adv_exchange, routing_key='adversary', durable=False) __adv_queue = __adv_queue(__backchannel) __adv_queue.declare()
def get_new_connection_pipe(self): """ 产生新的队列连接 :return: """ with self.mutex: if self.available <= 0: raise GetConnectionException self.available -= 1 try: conn = Connection(hostname=self.host, port=self.port, virtual_host=self.virtual_host, heartbeat=self.heartbeat_interval, userid=self.name, password=self.password) producer = conn.Producer() return ConnectionPipe(conn, producer) except: with self.mutex: self.available += 1 raise GetConnectionException
class MessageQueuePipeline(object): """Emit processed items to a RabbitMQ exchange/queue""" def __init__(self, host_name, exchange_name): self.q_connection = Connection('amqp://' + host_name) self.q_exchange = Exchange(exchange_name, 'direct', durable=True) dispatcher.connect(self.spider_opened, signals.spider_opened) dispatcher.connect(self.spider_closed, signals.spider_closed) @classmethod def from_settings(cls, settings): host_name = settings.get('BROKER_HOST') exchange_name = settings.get('EXCHANGE_NAME') return cls(host_name, exchange_name) def spider_opened(self, spider): self.producer = self.q_connection.Producer(serializer='json', exchange=self.q_exchange) def spider_closed(self, spider): self.producer.close() def process_item(self, item, spider): return deferToThread(self._process_item, item, spider) def _process_item(self, item, spider): self.producer.publish(dict(item)) return item
class RabbitMQ: config = None connection = None exchange = None def init_app(self, app): self.config = app.config self.connection = Connection(self.config.get("RABMQ_RABBITMQ_URL")) self.exchange = Exchange( name=self.config.get("RABMQ_SEND_EXCHANGE_NAME"), type=self.config.get("RABMQ_SEND_EXCHANGE_TYPE") or "topic", auto_delete=False, durable=True, ) def send(self, body, routing_key): correlation_id = str(uuid.uuid4()) # Dedicated reply-to Q, expiring after 30 seconds reply_to = Queue( name=correlation_id, expires=self.config.get("RABMQ_REPLY_EXPIRES") ) producer = self.connection.Producer(serializer="json") producer.publish( body, exchange=self.exchange, routing_key=routing_key, reply_to=reply_to.name, correlation_id=correlation_id, ) return correlation_id
def connect_rabbitmq(): current_app.logger.info('Connecting to rabbitmq on %s', current_app.config['RABBITMQ_CONNECTION_STRING']) connection = Connection(current_app.config['RABBITMQ_CONNECTION_STRING']) producer = connection.Producer() return producer
class RabbitMQPublisher(object): def __init__(self, source_id, network_id, model_key=None, run_key=None): broker_url = get_broker_url(model_key) self.conn = Connection(broker_url) self.conn.connect() self.channel = 'oa-{source_id}-{network_id}-{model_key}'.format( source_id=source_id, network_id=network_id, model_key=model_key or environ.get(constants.MODEL_KEY) ) if run_key is not None: self.channel += '-{}'.format(run_key) self.producer = self.conn.Producer(serializer='json') # set up the Exchange, Queue, and Producer media_exchange = Exchange('media', 'direct', durable=True) video_queue = Queue('video', exchange=media_exchange, routing_key='video') # publish updates def publish(self, payload): self.producer.publish(self.channel, json.dumps(payload)) def close(self): self.conn.release()
class KombuMessenger(object): def __init__(self): from kombu import Connection, Exchange self.conn = Connection(settings.MESSAGE_BUS['URL'], **settings.MESSAGE_BUS.get('OPTIONS', {})) self.exchange = Exchange(**settings.MESSAGE_BUS['EXCHANGE']) self.messenger = self.conn.Producer() def send_message(self, topic, msg): self.messenger.publish(msg, exchange=self.exchange, routing_key=topic)
def postLogs(logcache): '''post logs to rabbitmq expects a queue object from the multiprocessing library looks for a list of servers in options.mqservers separated by commas creates connections to each, initializes an exchange and a producer and randomly chooses one to publish incoming messages to. ''' mqproducers = list() canQuit = False logger.info('starting message queue posting process') # connect and declare the message queue/kombu objects. # with a list of producers for every potential message queue server. for server in options.mqservers.split(','): connString = 'amqp://{0}:{1}@{2}:{3}//'.format(options.mquser, options.mqpassword, server, options.mqport) mqConn = Connection(connString) eventTaskExchange = Exchange(name=options.taskexchange, type='direct', durable=True) eventTaskExchange(mqConn).declare() mqproducer = mqConn.Producer(serializer='json') ensurePublish = mqConn.ensure(mqproducer, mqproducer.publish, max_retries=10) mqproducers.append(ensurePublish) while True: try: # see if we have anything to post # waiting a bit to not end until we are told we can stop. postdata = logcache.get(True, 1) if postdata is None: # signalled from parent process that it's ok to stop. logcache.task_done() canQuit = True elif len(postdata) > 0: # post to eventtask exchange try: publisher = random.choice(mqproducers) publisher(postdata, exchange=eventTaskExchange, routing_key=options.taskexchange) except Exception as e: logger.error('Exception while posting message: %r' % e) logcache.task_done() except Empty as e: if canQuit: logger.info('shutting down message queue publisher') break logger.info('{0} done'.format('log posting task'))
def publish(queue, routing_key, body): connection = Connection(app.config.get('AMQP_QUEUE_URI')) try: with connection.Producer(serializer='json') as producer: producer.publish(body, exchange=exchange, routing_key=routing_key, declare=[queue]) finally: connection.release()
def test_retry_policy(self): with pytest.raises(OperationalError): conn = Connection('pyamqp://localhost:8000') producer = conn.Producer(serializer='json') producer.publish('Hello world!', retry=True, retry_policy={ 'interval_start': 0, 'interval_step': 1, 'interval_max': 5, 'max_retries': 3, })
def init(username, msgtype): global __username, __my_type, __connection, __global_exchange, __producer, __my_queue, __backchannel, __backch_producer, __adv_exchange, __adv_queue __username = username __my_type = msgtype print "Username: "******"Q:" #print __my_queue #__consumer = __connection.Consumer(queues=__my_queue) #we're not using consumer 'cause we're using queue get #__recv_loop = drain_consumer(__consumer, timeout=1) __backchannel = Connection('amqp://') __backchannel.connect() __adv_exchange = Exchange('adversary', durable=False, delivery_mode=1) __adv_exchange.maybe_bind(__backchannel) __backch_producer = __backchannel.Producer(__backchannel) __adv_queue = Queue(username + '-adv', exchange=__adv_exchange, routing_key=username + '-adv', durable=False) __adv_queue = __adv_queue(__backchannel) __adv_queue.declare()
class KombuPublisher(AbstractPublisher, ): """ 使用redis作为中间件,这种是最简单的使用redis的方式,此方式不靠谱很容易丢失大量消息。非要用reids作为中间件,请用其他类型的redis consumer """ def custom_init(self): self._kombu_broker_url_prefix = frame_config.KOMBU_URL.split(":")[0] logger_name = f'{self._logger_prefix}{self.__class__.__name__}--{self._kombu_broker_url_prefix}--{self._queue_name}' self.logger = LogManager(logger_name).get_logger_and_add_handlers(self._log_level_int, log_filename=f'{logger_name}.log' if self._is_add_file_handler else None, formatter_template=frame_config.NB_LOG_FORMATER_INDEX_FOR_CONSUMER_AND_PUBLISHER, ) # def init_broker(self): self.exchange = Exchange('distributed_framework_exchange', 'direct', durable=True) self.queue = Queue(self._queue_name, exchange=self.exchange, routing_key=self._queue_name, auto_delete=False) self.conn = Connection(frame_config.KOMBU_URL) self.queue(self.conn).declare() self.producer = self.conn.Producer(serializer='json') self.channel = self.producer.channel # type: Channel self.channel.body_encoding = 'no_encode' # self.channel = self.conn.channel() # type: Channel # # self.channel.exchange_declare(exchange='distributed_framework_exchange', durable=True, type='direct') # self.queue = self.channel.queue_declare(queue=self._queue_name, durable=True) self.logger.warning(f'使用 kombu 库 连接中间件') @deco_mq_conn_error def concrete_realization_of_publish(self, msg): self.producer.publish(json.loads(msg), exchange=self.exchange, routing_key=self._queue_name, declare=[self.queue]) @deco_mq_conn_error def clear(self): self.channel.queue_purge(self._queue_name) @deco_mq_conn_error def get_message_count(self): # queue = self.channel.queue_declare(queue=self._queue_name, durable=True) # return queue.method.message_count # self.logger.warning(self.channel._size(self._queue_name)) if self._kombu_broker_url_prefix == 'amqp': '''amqp tries to use librabbitmq but falls back to pyamqp.''' queue_declare_ok_t_named_tuple = self.channel.queue_declare(queue=self._queue_name, durable=True, auto_delete=False) # queue_declare_ok_t(queue='test_rabbit_queue2', message_count=100000, consumer_count=0) # print(type(queue_declare_ok_t_named_tuple),queue_declare_ok_t_named_tuple) return queue_declare_ok_t_named_tuple.message_count # noinspection PyProtectedMember return self.channel._size(self._queue_name) def close(self): self.channel.close() self.conn.close() self.logger.warning('关闭 kombu 包 链接')
def test_accept__content_disallowed(self): conn = Connection('memory://') q = Queue('foo', exchange=self.exchange) p = conn.Producer() p.publish( {'complex': object()}, declare=[q], exchange=self.exchange, serializer='pickle', ) callback = Mock(name='callback') with conn.Consumer(queues=[q], callbacks=[callback]) as consumer: with self.assertRaises(consumer.ContentDisallowed): conn.drain_events(timeout=1) callback.assert_not_called()
def test_accept__content_allowed(self): conn = Connection('memory://') q = Queue('foo', exchange=self.exchange) p = conn.Producer() p.publish( {'complex': object()}, declare=[q], exchange=self.exchange, serializer='pickle', ) callback = Mock(name='callback') with conn.Consumer(queues=[q], accept=['pickle'], callbacks=[callback]): conn.drain_events(timeout=1) callback.assert_called() body, message = callback.call_args[0] self.assertTrue(body['complex'])
def broadcastAttacker(attacker): ''' send this attacker info to our message queue ''' try: connString = 'amqp://{0}:{1}@{2}:{3}/{4}'.format(options.mquser, options.mqpassword, options.mqserver, options.mqport, options.mqvhost) if options.mqprotocol == 'amqps': mqSSL = True else: mqSSL = False mqConn = Connection(connString, ssl=mqSSL) alertExchange = Exchange( name=options.alertexchange, type='topic', durable=True) alertExchange(mqConn).declare() mqproducer = mqConn.Producer(serializer='json') logger.debug('Kombu configured') except Exception as e: logger.error('Exception while configuring kombu for alerts: {0}'.format(e)) try: # generate an 'alert' structure for this attacker: mqAlert = dict(severity='NOTICE', category='attacker') if 'datecreated' in attacker.keys(): mqAlert['utctimestamp'] = attacker['datecreated'].isoformat() mqAlert['summary'] = 'New Attacker: {0} events: {1}, alerts: {2}'.format(attacker['indicators'], attacker['eventscount'], attacker['alertscount']) logger.debug(mqAlert) ensurePublish = mqConn.ensure( mqproducer, mqproducer.publish, max_retries=10) ensurePublish( mqAlert, exchange=alertExchange, routing_key=options.routingkey ) except Exception as e: logger.error('Exception while publishing attacker: {0}'.format(e))
class Amqp(object): def __init__(self, url, exchange, queue, routing_key): self.conn = Connection(url) self.exchange = Exchange(exchange, 'direct') self.routing_key = routing_key self.queue = Queue(queue, self.exchange, self.routing_key) self.producer = None self.consumer = None def send(self, obj): if not self.producer: self.producer = self.conn.Producer() self.producer.publish(obj, exchange=self.exchange, routing_key=self.routing_key, declare=[self.queue], serializer='json', compression='zlib') def poll(self, cb_func): if not self.consumer: self.consumer = self.conn.Consumer(self.queue, callbacks=[cb_func]) self.consumer.qos(prefetch_count=1) self.consumer.consume() while True: self.conn.drain_events() def _release(self): if self.consumer: self.consumer.close() self.consumer = None if self.producer: self.producer.close() self.producer = None if self.conn: self.conn.release() self.conn = None def __enter__(self): return self def __exit__(self, exec_type, exc_value, traceback): self._release()
def handle(self, command): command_body = { "command": command.command, "argument": command.argument, "user_id": command.user_id, "chatroom_id": command.chatroom_id, } connection = Connection(settings.AMQP_ADDRESS) connection.connect() producer = connection.Producer() producer.publish( command_body, retry=True, exchange=settings.EXCHANGE, routing_key=settings.SENDER_ROUTING_KEY, declare=[settings.BOT_QUEUE], ) connection.release()
class AmqpConnection(object): """ Connect to the AMQP backend for easy publishing without having to hack our way through Celery. """ def __init__(self): self.conn = None self.default_exchange = Exchange('vumi', 'direct', durable=True) self.metrics_exchange = Exchange('vumi.metrics', 'direct', durable=True) def connect(self, dsn=None): if dsn is None: from django.conf import settings dsn = 'librabbitmq://%s:%s@%s:%s/%s' % ( settings.BROKER_USER, settings.BROKER_PASSWORD, settings.BROKER_HOST, settings.BROKER_PORT, settings.BROKER_VHOST) self.conn = Connection(dsn) self.producer = self.conn.Producer() def is_connected(self): return self.conn and self.conn.connected def publish(self, message, exchange, routing_key): self.producer.publish(message, exchange=exchange, routing_key=routing_key) def publish_command_message(self, command): return self.publish(command.to_json(), exchange=self.default_exchange, routing_key='vumi.api') def publish_metric_message(self, metric): return self.publish(metric.to_json(), exchange=self.metrics_exchange, routing_key='vumi.metrics') def get_metric_publisher(self): return MetricPublisher(self)
class SearchEngineMQ(object): def __init__(self, conf, env, user_info_export): self.conf = conf self.env = env self.user_info_export = user_info_export self._gen_id() self._stat_init() self.consumer = SEConsumer(self.connect, self.id) self.consumer.register_callbacks(self.user_info_export.export) def _gen_id(self): try: f = open(self.env.uuid_file()) self.id = f.read() f.close() except IOError: self.id = str(uuid.uuid1()) f = open(self.env.uuid_file(), "w") f.write(self.id) f.close() def _stat_init(self): self.stat_exchange = Exchange(SE_STATS_EXCHANGE, "topic", delivery_mode=1) self.stat_routing_key = ".".join([self.id, SE_STATS_TOPIC_SUFFIX]) self.connect = Connection(self.conf.rabbit_connection()) self.stat_producer = self.connect.Producer( exchange=self.stat_exchange, routing_key=self.stat_routing_key) def publish_stats(self, stat): msg = {MESSAGE_ID: self.id, MESSAGE_STATS: stat} self.stat_producer.publish(msg) def run(self): self.consumer.run()
class ReportMQ(object): def __init__(self,report_gen,conf): self.report_gen=report_gen self.conf=conf self.connect=Connection(self.conf.rabbit_connection()) self.report_res_exchange=Exchange(REPORT_RESPONSE_EXCHANGE,"direct",delivery_mode=1) self.report_res=self.connect.Producer(exchange=self.report_res_exchange, routing_key=REPORT_RESPONSE_ROUTING_KEY) self.report_req_consumer=ReportRequestConsumer(self.connect,self) def report_request(self,body): status=HTTP_INTERNAL_ERROR if self.report_gen.report_request(body[REPORT_USERNAME], body[REPORT_REQUEST_START_TIME], body[REPORT_REQUEST_END_TIME]): status=HTTP_OK msg={REPORT_USERNAME:body[REPORT_USERNAME], REPORT_STATUS:status} self.report_res.publish(msg) def run(self): self.report_req_consumer.run()
def revoke(app_name, app_env, task_uuid: Union[str, List[str]], args): # Check if agent is local if not settings.LEEK_ENABLE_AGENT: return responses.control_operations_not_supported # Retrieve subscription found, subscription = lookup_subscription(app_name, app_env) if not found: return responses.task_retry_subscription_not_found # Prepare connection/producer # noinspection PyBroadException try: connection = Connection(subscription["broker"]) connection.ensure_connection(max_retries=2) producer = connection.Producer() except AccessRefused: return responses.wrong_access_refused except Exception: return responses.broker_not_reachable arguments = { "task_id": task_uuid, **args, } # noinspection PyBroadException try: broadcast_worker_command("revoke", arguments, producer) except Exception as ex: logger.error(ex) return responses.task_revocation_failed connection.release() revocation_count = len(task_uuid) if isinstance(task_uuid, List) else 1 return {"acknowledged": True, "revocation_count": revocation_count}, 200
class test_Producer(Case): def setUp(self): self.exchange = Exchange('foo', 'direct') self.connection = Connection(transport=Transport) self.connection.connect() self.assertTrue(self.connection.connection.connected) self.assertFalse(self.exchange.is_bound) def test_repr(self): p = Producer(self.connection) self.assertTrue(repr(p)) def test_pickle(self): chan = Mock() producer = Producer(chan, serializer='pickle') p2 = pickle.loads(pickle.dumps(producer)) self.assertEqual(p2.serializer, producer.serializer) def test_no_channel(self): p = Producer(None) self.assertFalse(p._channel) @patch('kombu.messaging.maybe_declare') def test_maybe_declare(self, maybe_declare): p = self.connection.Producer() q = Queue('foo') p.maybe_declare(q) maybe_declare.assert_called_with(q, p.channel, False) @patch('kombu.common.maybe_declare') def test_maybe_declare_when_entity_false(self, maybe_declare): p = self.connection.Producer() p.maybe_declare(None) self.assertFalse(maybe_declare.called) def test_auto_declare(self): channel = self.connection.channel() p = Producer(channel, self.exchange, auto_declare=True) self.assertIsNot(p.exchange, self.exchange, 'creates Exchange clone at bind') self.assertTrue(p.exchange.is_bound) self.assertIn('exchange_declare', channel, 'auto_declare declares exchange') def test_manual_declare(self): channel = self.connection.channel() p = Producer(channel, self.exchange, auto_declare=False) self.assertTrue(p.exchange.is_bound) self.assertNotIn('exchange_declare', channel, 'auto_declare=False does not declare exchange') p.declare() self.assertIn('exchange_declare', channel, 'p.declare() declares exchange') def test_prepare(self): message = {'the quick brown fox': 'jumps over the lazy dog'} channel = self.connection.channel() p = Producer(channel, self.exchange, serializer='json') m, ctype, cencoding = p._prepare(message, headers={}) self.assertDictEqual(message, anyjson.loads(m)) self.assertEqual(ctype, 'application/json') self.assertEqual(cencoding, 'utf-8') def test_prepare_compression(self): message = {'the quick brown fox': 'jumps over the lazy dog'} channel = self.connection.channel() p = Producer(channel, self.exchange, serializer='json') headers = {} m, ctype, cencoding = p._prepare(message, compression='zlib', headers=headers) self.assertEqual(ctype, 'application/json') self.assertEqual(cencoding, 'utf-8') self.assertEqual(headers['compression'], 'application/x-gzip') import zlib self.assertEqual( anyjson.loads(zlib.decompress(m).decode('utf-8')), message, ) def test_prepare_custom_content_type(self): message = 'the quick brown fox'.encode('utf-8') channel = self.connection.channel() p = Producer(channel, self.exchange, serializer='json') m, ctype, cencoding = p._prepare(message, content_type='custom') self.assertEqual(m, message) self.assertEqual(ctype, 'custom') self.assertEqual(cencoding, 'binary') m, ctype, cencoding = p._prepare(message, content_type='custom', content_encoding='alien') self.assertEqual(m, message) self.assertEqual(ctype, 'custom') self.assertEqual(cencoding, 'alien') def test_prepare_is_already_unicode(self): message = 'the quick brown fox' channel = self.connection.channel() p = Producer(channel, self.exchange, serializer='json') m, ctype, cencoding = p._prepare(message, content_type='text/plain') self.assertEqual(m, message.encode('utf-8')) self.assertEqual(ctype, 'text/plain') self.assertEqual(cencoding, 'utf-8') m, ctype, cencoding = p._prepare(message, content_type='text/plain', content_encoding='utf-8') self.assertEqual(m, message.encode('utf-8')) self.assertEqual(ctype, 'text/plain') self.assertEqual(cencoding, 'utf-8') def test_publish_with_Exchange_instance(self): p = self.connection.Producer() p.channel = Mock() p.publish('hello', exchange=Exchange('foo'), delivery_mode='transient') self.assertEqual( p._channel.basic_publish.call_args[1]['exchange'], 'foo', ) def test_set_on_return(self): chan = Mock() chan.events = defaultdict(Mock) p = Producer(ChannelPromise(lambda: chan), on_return='on_return') p.channel chan.events['basic_return'].add.assert_called_with('on_return') def test_publish_retry_calls_ensure(self): p = Producer(Mock()) p._connection = Mock() ensure = p.connection.ensure = Mock() p.publish('foo', exchange='foo', retry=True) self.assertTrue(ensure.called) def test_publish_retry_with_declare(self): p = self.connection.Producer() p.maybe_declare = Mock() p.connection.ensure = Mock() ex = Exchange('foo') p._publish('hello', 0, '', '', {}, {}, 'rk', 0, 0, ex, declare=[ex]) p.maybe_declare.assert_called_with(ex) def test_revive_when_channel_is_connection(self): p = self.connection.Producer() p.exchange = Mock() new_conn = Connection('memory://') defchan = new_conn.default_channel p.revive(new_conn) self.assertIs(p.channel, defchan) p.exchange.revive.assert_called_with(defchan) def test_enter_exit(self): p = self.connection.Producer() p.release = Mock() self.assertIs(p.__enter__(), p) p.__exit__() p.release.assert_called_with() def test_connection_property_handles_AttributeError(self): p = self.connection.Producer() p.channel = object() p.__connection__ = None self.assertIsNone(p.connection) def test_publish(self): channel = self.connection.channel() p = Producer(channel, self.exchange, serializer='json') message = {'the quick brown fox': 'jumps over the lazy dog'} ret = p.publish(message, routing_key='process') self.assertIn('prepare_message', channel) self.assertIn('basic_publish', channel) m, exc, rkey = ret self.assertDictEqual(message, anyjson.loads(m['body'])) self.assertDictContainsSubset( { 'content_type': 'application/json', 'content_encoding': 'utf-8', 'priority': 0 }, m) self.assertDictContainsSubset({'delivery_mode': 2}, m['properties']) self.assertEqual(exc, p.exchange.name) self.assertEqual(rkey, 'process') def test_no_exchange(self): chan = self.connection.channel() p = Producer(chan) self.assertFalse(p.exchange.name) def test_revive(self): chan = self.connection.channel() p = Producer(chan) chan2 = self.connection.channel() p.revive(chan2) self.assertIs(p.channel, chan2) self.assertIs(p.exchange.channel, chan2) def test_on_return(self): chan = self.connection.channel() def on_return(exception, exchange, routing_key, message): pass p = Producer(chan, on_return=on_return) self.assertTrue(on_return in chan.events['basic_return']) self.assertTrue(p.on_return)
#get config info: parser = OptionParser() parser.add_option("-c", dest='configfile', default=os.path.join(os.path.dirname(__file__), __file__).replace('.py', '.conf'), help="configuration file to use") (options, args) = parser.parse_args() initConfig() #connect and declare the message queue/kombu objects. connString = 'amqp://{0}:{1}@{2}:{3}//'.format(options.mquser, options.mqpassword, options.mqserver, options.mqport) mqConn = Connection(connString) eventTaskExchange = Exchange(name=options.taskexchange, type='direct', durable=True) eventTaskExchange(mqConn).declare() eventTaskQueue = Queue(options.taskexchange, exchange=eventTaskExchange) eventTaskQueue(mqConn).declare() mqproducer = mqConn.Producer(serializer='json') if __name__ == "__main__": run(host=options.listen_host, port=8080) else: application = default_app()
class test_Producer: def setup(self): self.exchange = Exchange('foo', 'direct') self.connection = Connection(transport=Transport) self.connection.connect() assert self.connection.connection.connected assert not self.exchange.is_bound def test_repr(self): p = Producer(self.connection) assert repr(p) def test_pickle(self): chan = Mock() producer = Producer(chan, serializer='pickle') p2 = pickle.loads(pickle.dumps(producer)) assert p2.serializer == producer.serializer def test_no_channel(self): p = Producer(None) assert not p._channel @patch('kombu.messaging.maybe_declare') def test_maybe_declare(self, maybe_declare): p = self.connection.Producer() q = Queue('foo') p.maybe_declare(q) maybe_declare.assert_called_with(q, p.channel, False) @patch('kombu.common.maybe_declare') def test_maybe_declare_when_entity_false(self, maybe_declare): p = self.connection.Producer() p.maybe_declare(None) maybe_declare.assert_not_called() def test_auto_declare(self): channel = self.connection.channel() p = Producer(channel, self.exchange, auto_declare=True) # creates Exchange clone at bind assert p.exchange is not self.exchange assert p.exchange.is_bound # auto_declare declares exchange' assert 'exchange_declare' not in channel p.publish('foo') assert 'exchange_declare' in channel def test_manual_declare(self): channel = self.connection.channel() p = Producer(channel, self.exchange, auto_declare=False) assert p.exchange.is_bound # auto_declare=False does not declare exchange assert 'exchange_declare' not in channel # p.declare() declares exchange') p.declare() assert 'exchange_declare' in channel def test_prepare(self): message = {'the quick brown fox': 'jumps over the lazy dog'} channel = self.connection.channel() p = Producer(channel, self.exchange, serializer='json') m, ctype, cencoding = p._prepare(message, headers={}) assert json.loads(m) == message assert ctype == 'application/json' assert cencoding == 'utf-8' def test_prepare_compression(self): message = {'the quick brown fox': 'jumps over the lazy dog'} channel = self.connection.channel() p = Producer(channel, self.exchange, serializer='json') headers = {} m, ctype, cencoding = p._prepare(message, compression='zlib', headers=headers) assert ctype == 'application/json' assert cencoding == 'utf-8' assert headers['compression'] == 'application/x-gzip' import zlib assert json.loads(zlib.decompress(m).decode('utf-8')) == message def test_prepare_custom_content_type(self): message = 'the quick brown fox'.encode('utf-8') channel = self.connection.channel() p = Producer(channel, self.exchange, serializer='json') m, ctype, cencoding = p._prepare(message, content_type='custom') assert m == message assert ctype == 'custom' assert cencoding == 'binary' m, ctype, cencoding = p._prepare(message, content_type='custom', content_encoding='alien') assert m == message assert ctype == 'custom' assert cencoding == 'alien' def test_prepare_is_already_unicode(self): message = 'the quick brown fox' channel = self.connection.channel() p = Producer(channel, self.exchange, serializer='json') m, ctype, cencoding = p._prepare(message, content_type='text/plain') assert m == message.encode('utf-8') assert ctype == 'text/plain' assert cencoding == 'utf-8' m, ctype, cencoding = p._prepare(message, content_type='text/plain', content_encoding='utf-8') assert m == message.encode('utf-8') assert ctype == 'text/plain' assert cencoding == 'utf-8' def test_publish_with_Exchange_instance(self): p = self.connection.Producer() p.channel = Mock() p.channel.connection.client.declared_entities = set() p.publish('hello', exchange=Exchange('foo'), delivery_mode='transient') assert p._channel.basic_publish.call_args[1]['exchange'] == 'foo' def test_publish_with_expiration(self): p = self.connection.Producer() p.channel = Mock() p.channel.connection.client.declared_entities = set() p.publish('hello', exchange=Exchange('foo'), expiration=10) properties = p._channel.prepare_message.call_args[0][5] assert properties['expiration'] == '10000' def test_publish_with_reply_to(self): p = self.connection.Producer() p.channel = Mock() p.channel.connection.client.declared_entities = set() assert not p.exchange.name p.publish('hello', exchange=Exchange('foo'), reply_to=Queue('foo')) properties = p._channel.prepare_message.call_args[0][5] assert properties['reply_to'] == 'foo' def test_set_on_return(self): chan = Mock() chan.events = defaultdict(Mock) p = Producer(ChannelPromise(lambda: chan), on_return='on_return') p.channel chan.events['basic_return'].add.assert_called_with('on_return') def test_publish_retry_calls_ensure(self): p = Producer(Mock()) p._connection = Mock() p._connection.declared_entities = set() ensure = p.connection.ensure = Mock() p.publish('foo', exchange='foo', retry=True) ensure.assert_called() def test_publish_retry_with_declare(self): p = self.connection.Producer() p.maybe_declare = Mock() p.connection.ensure = Mock() ex = Exchange('foo') p._publish('hello', 0, '', '', {}, {}, 'rk', 0, 0, ex, declare=[ex]) p.maybe_declare.assert_called_with(ex) def test_revive_when_channel_is_connection(self): p = self.connection.Producer() p.exchange = Mock() new_conn = Connection('memory://') defchan = new_conn.default_channel p.revive(new_conn) assert p.channel is defchan p.exchange.revive.assert_called_with(defchan) def test_enter_exit(self): p = self.connection.Producer() p.release = Mock() assert p.__enter__() is p p.__exit__() p.release.assert_called_with() def test_connection_property_handles_AttributeError(self): p = self.connection.Producer() p.channel = object() p.__connection__ = None assert p.connection is None def test_publish(self): channel = self.connection.channel() p = Producer(channel, self.exchange, serializer='json') message = {'the quick brown fox': 'jumps over the lazy dog'} ret = p.publish(message, routing_key='process') assert 'prepare_message' in channel assert 'basic_publish' in channel m, exc, rkey = ret assert json.loads(m['body']) == message assert m['content_type'] == 'application/json' assert m['content_encoding'] == 'utf-8' assert m['priority'] == 0 assert m['properties']['delivery_mode'] == 2 assert exc == p.exchange.name assert rkey == 'process' def test_no_exchange(self): chan = self.connection.channel() p = Producer(chan) assert not p.exchange.name def test_revive(self): chan = self.connection.channel() p = Producer(chan) chan2 = self.connection.channel() p.revive(chan2) assert p.channel is chan2 assert p.exchange.channel is chan2 def test_on_return(self): chan = self.connection.channel() def on_return(exception, exchange, routing_key, message): pass p = Producer(chan, on_return=on_return) assert on_return in chan.events['basic_return'] assert p.on_return
def retry_task(app_name, task_doc): if task_doc.get("state") not in STATES_TERMINAL: return responses.task_retry_state_precondition_failed # Check if task is routable if not task_doc.get("exchange", "tasks") and not task_doc.get("routing_key"): return responses.task_not_routable # Check if agent is local if not settings.LEEK_ENABLE_AGENT: return responses.control_operations_not_supported # Retrieve subscription found, subscription = lookup_subscription(app_name, task_doc['app_env']) if not found: return responses.task_retry_subscription_not_found # Prepare connection/producer # noinspection PyBroadException try: connection = Connection(subscription["broker"]) connection.ensure_connection(max_retries=2) producer = connection.Producer() except AccessRefused: return responses.wrong_access_refused except Exception: return responses.broker_not_reachable # Prepare args argsrepr = task_doc.get("args") or "()" kwargsrepr = task_doc.get("kwargs") or "{}" # noinspection PyBroadException try: args = ast.literal_eval(argsrepr) kwargs = ast.literal_eval(kwargsrepr) except Exception: return responses.malformed_args_or_kwarg_repr # Prepare task ids task_id = uuid() if not task_doc.get("root_id"): root_id = task_id else: root_id = None headers = { "lang": "py", "task": task_doc["name"], "id": task_id, "shadow": None, "eta": None, "expires": None, "group": None, "group_index": None, "retries": 0, "timelimit": [None, None], "root_id": root_id, "parent_id": task_doc.get("parent_id"), "argsrepr": argsrepr, "kwargsrepr": kwargsrepr, "origin": "leek@control", "ignore_result": True, } properties = { "correlation_id": task_id, "reply_to": '', } body = ( args, kwargs, { "callbacks": None, "errbacks": None, "chain": None, "chord": None, }, ) # Queue actual task try: producer.publish( body, exchange=task_doc["exchange"], routing_key=task_doc["routing_key"], serializer="json", compression=None, retry=False, delivery_mode=2, # Persistent headers=headers, **properties ) except Exception as ex: logger.error(ex) return responses.task_retry_failed # Send task-sent event sent_event = { "type": "task-sent", "uuid": task_id, "root_id": root_id, "parent_id": task_doc.get("parent_id"), "name": task_doc["name"], "args": argsrepr, "kwargs": kwargsrepr, "retries": 0, "eta": None, "expires": None, # -- "queue": task_doc["queue"], "exchange": task_doc["exchange"], "routing_key": task_doc["routing_key"], # -- "hostname": "leek@control", "utcoffset": time.timezone // 3600, "pid": 1, "clock": 1, "timestamp": time.time(), } # noinspection PyBroadException try: producer.publish( sent_event, routing_key=subscription["routing_key"], exchange=subscription["exchange"], retry=False, serializer="json", headers={"hostname": "leek@control"}, delivery_mode=2, expiration=60 * 60 * 24 * 2 # EXPIRES IN 2 DAYS ) except Exception as ex: logger.warning(f"Failed to send `task-sent` event for the retried task! with exception: {ex}") connection.release() return {"task_id": task_id}, 200
class RabbitMQConnection: """ Class handling receiving and publishing message on the RabbitMQ messages bus """ def __init__(self, message_callback): self.message_callback = message_callback self.exchange = Exchange(CFG.exchange) self.connection = Connection(transport='amqp', hostname=CFG.host, port=CFG.port, userid=CFG.username, password=CFG.password, virtual_host=CFG.vhost, ssl=True) self.connection.connect() self.producer = self.connection.Producer(serializer='json', auto_declare=True) self.queue = Queue(channel=self.connection.channel(), name=CFG.queue, routing_key=CFG.routing_key) self.queue.declare() self.queue.bind_to(exchange=Exchange(CFG.exchange), routing_key=CFG.routing_key) self.consumer = self.connection.\ Consumer( queues=self.queue, callbacks=[self._handle_message], prefetch_count= CFG.prefetch_count) self.consuming = True def _handle_message(self, body, message): """ Callback called by consumer. :param body: :param message: :return: """ # body is sometimes dict and sometimes str # make sure it's a json dict before passing it on json_body = dict() if isinstance(body, dict): json_body = body elif isinstance(body, str): json_body = json.loads(body) self.message_callback(json_body) message.ack() def publish_message(self, message): """ Publishes passed message on the RabbitMQ message bus :param message: :return: """ self.producer.publish(message, retry=True, retry_policy={ 'interval_start': 0, 'interval_step': 2, 'interval_max': 30, 'max_retries': 30, }, exchange=self.exchange, routing_key=CFG.routing_key) def read_messages(self): """ Method reading messages from the queue in a while-true loop. Callback is defined in __init__ :return: """ with self.consumer: while self.consuming: self.connection.drain_events() def close_connection(self): """ Closes the channels/connections. :return: """ # for now called when you press Ctrl-C self.consuming = False self.producer.release() self.connection.release()
class test_PyroTransport: def setup(self): self.c = Connection(transport='pyro', virtual_host="kombu.broker") self.e = Exchange('test_transport_pyro') self.q = Queue('test_transport_pyro', exchange=self.e, routing_key='test_transport_pyro') self.q2 = Queue('test_transport_pyro2', exchange=self.e, routing_key='test_transport_pyro2') self.fanout = Exchange('test_transport_pyro_fanout', type='fanout') self.q3 = Queue('test_transport_pyro_fanout1', exchange=self.fanout) self.q4 = Queue('test_transport_pyro_fanout2', exchange=self.fanout) def test_driver_version(self): assert self.c.transport.driver_version() @pytest.mark.skip("requires running Pyro nameserver and Kombu Broker") def test_produce_consume_noack(self): channel = self.c.channel() producer = Producer(channel, self.e) consumer = Consumer(channel, self.q, no_ack=True) for i in range(10): producer.publish({'foo': i}, routing_key='test_transport_pyro') _received = [] def callback(message_data, message): _received.append(message) consumer.register_callback(callback) consumer.consume() while 1: if len(_received) == 10: break self.c.drain_events() assert len(_received) == 10 def test_drain_events(self): with pytest.raises(socket.timeout): self.c.drain_events(timeout=0.1) c1 = self.c.channel() c2 = self.c.channel() with pytest.raises(socket.timeout): self.c.drain_events(timeout=0.1) del (c1) # so pyflakes doesn't complain. del (c2) @pytest.mark.skip("requires running Pyro nameserver and Kombu Broker") def test_drain_events_unregistered_queue(self): c1 = self.c.channel() producer = self.c.Producer() consumer = self.c.Consumer([self.q2]) producer.publish( {'hello': 'world'}, declare=consumer.queues, routing_key=self.q2.routing_key, exchange=self.q2.exchange, ) message = consumer.queues[0].get()._raw class Cycle(object): def get(self, callback, timeout=None): return (message, 'foo'), c1 self.c.transport.cycle = Cycle() self.c.drain_events() @pytest.mark.skip("requires running Pyro nameserver and Kombu Broker") def test_queue_for(self): chan = self.c.channel() x = chan._queue_for('foo') assert x assert chan._queue_for('foo') is x
class test_MemoryTransport: def setup(self): self.c = Connection(transport='memory') self.e = Exchange('test_transport_memory') self.q = Queue('test_transport_memory', exchange=self.e, routing_key='test_transport_memory') self.q2 = Queue('test_transport_memory2', exchange=self.e, routing_key='test_transport_memory2') self.fanout = Exchange('test_transport_memory_fanout', type='fanout') self.q3 = Queue('test_transport_memory_fanout1', exchange=self.fanout) self.q4 = Queue('test_transport_memory_fanout2', exchange=self.fanout) def test_driver_version(self): assert self.c.transport.driver_version() def test_produce_consume_noack(self): channel = self.c.channel() producer = Producer(channel, self.e) consumer = Consumer(channel, self.q, no_ack=True) for i in range(10): producer.publish({'foo': i}, routing_key='test_transport_memory') _received = [] def callback(message_data, message): _received.append(message) consumer.register_callback(callback) consumer.consume() while 1: if len(_received) == 10: break self.c.drain_events() assert len(_received) == 10 def test_produce_consume_fanout(self): producer = self.c.Producer() consumer = self.c.Consumer([self.q3, self.q4]) producer.publish( {'hello': 'world'}, declare=consumer.queues, exchange=self.fanout, ) assert self.q3(self.c).get().payload == {'hello': 'world'} assert self.q4(self.c).get().payload == {'hello': 'world'} assert self.q3(self.c).get() is None assert self.q4(self.c).get() is None def test_produce_consume(self): channel = self.c.channel() producer = Producer(channel, self.e) consumer1 = Consumer(channel, self.q) consumer2 = Consumer(channel, self.q2) self.q2(channel).declare() for i in range(10): producer.publish({'foo': i}, routing_key='test_transport_memory') for i in range(10): producer.publish({'foo': i}, routing_key='test_transport_memory2') _received1 = [] _received2 = [] def callback1(message_data, message): _received1.append(message) message.ack() def callback2(message_data, message): _received2.append(message) message.ack() consumer1.register_callback(callback1) consumer2.register_callback(callback2) consumer1.consume() consumer2.consume() while 1: if len(_received1) + len(_received2) == 20: break self.c.drain_events() assert len(_received1) + len(_received2) == 20 # compression producer.publish({'compressed': True}, routing_key='test_transport_memory', compression='zlib') m = self.q(channel).get() assert m.payload == {'compressed': True} # queue.delete for i in range(10): producer.publish({'foo': i}, routing_key='test_transport_memory') assert self.q(channel).get() self.q(channel).delete() self.q(channel).declare() assert self.q(channel).get() is None # queue.purge for i in range(10): producer.publish({'foo': i}, routing_key='test_transport_memory2') assert self.q2(channel).get() self.q2(channel).purge() assert self.q2(channel).get() is None def test_drain_events(self): with pytest.raises(socket.timeout): self.c.drain_events(timeout=0.1) c1 = self.c.channel() c2 = self.c.channel() with pytest.raises(socket.timeout): self.c.drain_events(timeout=0.1) del (c1) # so pyflakes doesn't complain. del (c2) def test_drain_events_unregistered_queue(self): c1 = self.c.channel() producer = self.c.Producer() consumer = self.c.Consumer([self.q2]) producer.publish( {'hello': 'world'}, declare=consumer.queues, routing_key=self.q2.routing_key, exchange=self.q2.exchange, ) message = consumer.queues[0].get()._raw class Cycle: def get(self, callback, timeout=None): return (message, 'foo'), c1 self.c.transport.cycle = Cycle() self.c.drain_events() def test_queue_for(self): chan = self.c.channel() chan.queues.clear() x = chan._queue_for('foo') assert x assert chan._queue_for('foo') is x # see the issue # https://github.com/celery/kombu/issues/1050 def test_producer_on_return(self): def on_return(_exception, _exchange, _routing_key, _message): pass channel = self.c.channel() producer = Producer(channel, on_return=on_return) consumer = self.c.Consumer([self.q3]) producer.publish( {'hello': 'on return'}, declare=consumer.queues, exchange=self.fanout, ) assert self.q3(self.c).get().payload == {'hello': 'on return'} assert self.q3(self.c).get() is None
""" Example that use memory transport for message produce. """ import time from kombu import Connection, Exchange, Queue, Consumer media_exchange = Exchange('media', 'direct') video_queue = Queue('video', exchange=media_exchange, routing_key='video') task_queues = [video_queue] def handle_message(body, message): print(f"{time.time()} RECEIVED MESSAGE: {body!r}") message.ack() # 基于内存的传输连接,可以用于单元测试等 # producer和consumer需要在同一个进程 connection = Connection("memory:///") consumer = Consumer(connection, task_queues, callbacks=[handle_message]) producer = connection.Producer(serializer='json') producer.publish({"foo": "bar"}, exchange=media_exchange, routing_key='video', declare=task_queues) consumer.consume() connection.drain_events()
class KombuMessenger: """ Sends messages via Kombu. """ def __init__(self, queueHost, queueName, id, hostname, pid, type): """ Initializer. """ self._queueHost = queueHost self._queueName = queueName self._id = id self._hostname = hostname self._pid = pid self._type = type self._connection = Connection('pyamqp://*****:*****@%s:5672//' % self._queueHost) self._connection.ensure_connection() self._exchange = Exchange(self._queueName, type='direct') self._queue = Queue(self._queueName, self._exchange, routing_key=self._queueName) self._producer = self._connection.Producer() self._publish = self._connection.ensure(self._producer, self._producer.publish, max_retries=3) # end def def __del__(self): """ Finalizer. """ self._connection.close() # end def def __str__(self): """ Gets the string representation of this object. @return: the string representation of this object. @rtype: str """ return 'connection: "%s", id: "%s", queueName: "%s", hostname: "%s", pid: "%s", type: "%s"' % ( self._connection, self._id, self._queueName, self._hostname, self._pid, self._type) # end def def send(self, chunk): """ Send stream chunk with JSON descriptor. """ context = { 'id': self._id, 'datetime': datetime.isoformat(datetime.now()), 'hostname': self._hostname, 'pid': self._pid, 'type': self._type, 'chunk': chunk } #contextStr = json.dumps(context) self._publish(context, routing_key=self._queueName, declare=[self._queue])