def init_rabbit_mq(self): self.logger.info("Initializing RabbitMQ stuff") try: """""" schedule_exchange = \ Exchange("airtime-pypo", "direct", durable=True, auto_delete=True) schedule_queue = \ Queue("pypo-fetch", exchange=schedule_exchange, key="foo") connection = BrokerConnection(self.config["rabbitmq_host"], \ self.config["rabbitmq_user"], \ self.config["rabbitmq_password"], \ self.config["rabbitmq_vhost"]) channel = connection.channel() self.simple_queue = SimpleQueue(channel, schedule_queue) """ connection = Connection('amqp://*****:*****@172.16.82.1:5672//pypox') self.simple_queue = connection.SimpleQueue('pypo-fetch') #message = simple_queue.get(block=True, timeout=1) """ except Exception, e: self.logger.error(e) return False
def _do_test(): conn = BrokerConnection(transport=Transport) chan = conn.channel() self.assertTrue(chan.Client) self.assertTrue(chan.ResponseError) self.assertTrue(conn.transport.connection_errors) self.assertTrue(conn.transport.channel_errors)
def create_connection(): conn = BrokerConnection("localhost", "fred", "fred123", "home") channel = conn.channel() return channel
def test_close_disconnects(self): c = BrokerConnection(transport=Transport).channel() conn1 = c.client.connection conn2 = c.subclient.connection c.close() self.assertTrue(conn1.disconnected) self.assertTrue(conn2.disconnected)
def test_publish__consume(self): connection = BrokerConnection(transport=Transport) channel = connection.channel() producer = Producer(channel, self.exchange, routing_key="test_Redis") consumer = Consumer(channel, self.queue) producer.publish({"hello2": "world2"}) _received = [] def callback(message_data, message): _received.append(message_data) message.ack() consumer.register_callback(callback) consumer.consume() self.assertTrue(channel._poller._can_start()) try: connection.drain_events(timeout=1) self.assertTrue(_received) self.assertFalse(channel._poller._can_start()) self.assertRaises(socket.timeout, connection.drain_events, timeout=0.01) finally: channel.close()
def get_connection(self, vhost): if vhost in self.connections: connection = self.connections[vhost] else: connection = BrokerConnection(settings.AMQP_URL + vhost) if not connection.connected: connection.connect() return connection
def setup_rabbit_mq_channel(self): service_exchange = Exchange(self.acord_control_exchange, "topic", durable=False) # connections/channels connection = BrokerConnection(self.rabbit_host, self.rabbit_user, self.rabbit_password) logging.info("Connection to RabbitMQ server successful") channel = connection.channel() # produce self.producer = Producer(channel, exchange=service_exchange, routing_key='notifications.info')
def create_connection(config): conn = BrokerConnection(config.get("connection", "hostname"), config.get("connection", "userid"), config.get("connection", "password"), config.get("connection", "virtual_host")) channel = conn.channel() return channel
def test_parse_generated_as_uri(self): conn = BrokerConnection(self.url) info = conn.info() for k, v in self.expected.items(): self.assertEqual(info[k], v) # by default almost the same- no password self.assertEqual(conn.as_uri(), self.nopass) self.assertEqual(conn.as_uri(include_password=True), self.url)
def setup_rabbit_mq_channel(): global producer global rabbit_user, rabbit_password, rabbit_host, vcpeservice_rabbit_exchange,cpe_publisher_id vcpeservice_exchange = Exchange(vcpeservice_rabbit_exchange, "topic", durable=False) # connections/channels connection = BrokerConnection(rabbit_host, rabbit_user, rabbit_password) print 'Connection to RabbitMQ server successful' channel = connection.channel() # produce producer = Producer(channel, exchange=vcpeservice_exchange, routing_key='notifications.info')
def init_rabbit_mq(self): self.logger.info("Initializing RabbitMQ stuff") try: schedule_exchange = Exchange("airtime-pypo", "direct", durable=True, auto_delete=True) schedule_queue = Queue("pypo-fetch", exchange=schedule_exchange, key="foo") connection = BrokerConnection(config["rabbitmq_host"], config["rabbitmq_user"], config["rabbitmq_password"], config["rabbitmq_vhost"]) channel = connection.channel() self.simple_queue = SimpleQueue(channel, schedule_queue) except Exception, e: self.logger.error(e) return False
def setup_rabbit_mq_channel(self): service_exchange = Exchange(cfg.CONF.udpservice.acord_control_exchange, "topic", durable=False) rabbit_host = cfg.CONF.udpservice.rabbit_hosts rabbit_user = cfg.CONF.udpservice.rabbit_userid rabbit_password = cfg.CONF.udpservice.rabbit_password # connections/channels connection = BrokerConnection(rabbit_host, rabbit_user, rabbit_password) print 'Connection to RabbitMQ server successful' channel = connection.channel() # produce self.producer = Producer(channel, exchange=service_exchange, routing_key='notifications.info')
class AMQPWorker(Worker): queues = [ {'routing_key': 'test', 'name': 'test', 'handler': 'handle_test' } ] _connection = None def handle_test(self, body, message): log.debug("Handle message: %s" % body) message.ack() def handle(self): log.debug("Start consuming") exchange = Exchange('amqp.topic', type='direct', durable=True) self._connection = BrokerConnection(*CONNECTION) channel = self._connection.channel() for entry in self.queues: log.debug("prepare to consume %s" % entry['routing_key']) queue = Queue(entry['name'], exchange=exchange, routing_key=entry['routing_key']) consumer = Consumer(channel, queue) consumer.register_callback(getattr(self, entry['handler'])) consumer.consume() log.debug("start consuming...") while True: try: self._connection.drain_events() except socket.timeout: log.debug("nothing to consume...") break self._connection.close() def run(self): while self.alive: try: self.handle() except Exception: self.alive = False raise def handle_quit(self, sig, frame): if self._connection is not None: self._connection.close() self.alive = False def handle_exit(self, sig, frame): if self._connection is not None: self._connection.close() self.alive = False sys.exit(0)
def send_end(num): connection = BrokerConnection(hostname = 'myhost', userid = 'webfis', password = '******', virtual_host = 'webfishost', port = 5672) publisher = Publisher(connection=connection, exchange="end", routing_key="end"+str(num), exchange_type="direct") publisher.send("end") publisher.close() connection.release()
def setup_rabbit_mq_channel(): global producer global rabbit_user, rabbit_password, rabbit_host, vcpeservice_rabbit_exchange,cpe_publisher_id vcpeservice_exchange = Exchange(vcpeservice_rabbit_exchange, "topic", durable=False) # connections/channels connection = BrokerConnection(rabbit_host, rabbit_user, rabbit_password) logger.info('Connection to RabbitMQ server successful') channel = connection.channel() # produce producer = Producer(channel, exchange=vcpeservice_exchange, routing_key='notifications.info') p = subprocess.Popen('hostname', shell=True, stdout=subprocess.PIPE) (hostname, error) = p.communicate() cpe_publisher_id = cpe_publisher_id + '_on_' + hostname logger.info('cpe_publisher_id=%s',cpe_publisher_id)
def test_start__consume_messages(self): class _QoS(object): prev = 3 value = 4 def update(self): self.prev = self.value class _Consumer(MyKombuConsumer): iterations = 0 def reset_connection(self): if self.iterations >= 1: raise KeyError("foo") init_callback = Mock() l = _Consumer(self.ready_queue, self.eta_schedule, self.logger, send_events=False, init_callback=init_callback) l.task_consumer = Mock() l.broadcast_consumer = Mock() l.qos = _QoS() l.connection = BrokerConnection() l.iterations = 0 def raises_KeyError(limit=None): l.iterations += 1 if l.qos.prev != l.qos.value: l.qos.update() if l.iterations >= 2: raise KeyError("foo") l.consume_messages = raises_KeyError self.assertRaises(KeyError, l.start) self.assertTrue(init_callback.call_count) self.assertEqual(l.iterations, 1) self.assertEqual(l.qos.prev, l.qos.value) init_callback.reset_mock() l = _Consumer(self.ready_queue, self.eta_schedule, self.logger, send_events=False, init_callback=init_callback) l.qos = _QoS() l.task_consumer = Mock() l.broadcast_consumer = Mock() l.connection = BrokerConnection() l.consume_messages = Mock(side_effect=socket.error("foo")) self.assertRaises(socket.error, l.start) self.assertTrue(init_callback.call_count) self.assertTrue(l.consume_messages.call_count)
def test_db_values(self): c1 = BrokerConnection(virtual_host=1, transport=Transport).channel() self.assertEqual(c1.client.db, 1) c2 = BrokerConnection(virtual_host="1", transport=Transport).channel() self.assertEqual(c2.client.db, 1) c3 = BrokerConnection(virtual_host="/1", transport=Transport).channel() self.assertEqual(c3.client.db, 1) self.assertRaises(BrokerConnection(virtual_host="/foo", transport=Transport).channel)
def test_close_survives_connerror(self): class _CustomError(Exception): pass class MyTransport(Transport): connection_errors = (_CustomError, ) def close_connection(self, connection): raise _CustomError("foo") conn = BrokerConnection(transport=MyTransport) conn.connect() conn.close() self.assertTrue(conn._closed)
def setUp(self): class Mailbox(pidbox.Mailbox): def _collect(self, *args, **kwargs): return "COLLECTED" self.mailbox = Mailbox("test_pidbox") self.connection = BrokerConnection(transport="memory") self.state = {"var": 1} self.handlers = {"mymethod": self._handler} self.bound = self.mailbox(self.connection) self.default_chan = self.connection.channel() self.node = self.bound.Node("test_pidbox", state=self.state, handlers=self.handlers, channel=self.default_chan)
def test_db_port(self): c1 = BrokerConnection(port=None, transport=Transport).channel() self.assertEqual(c1.client.port, Transport.default_port) c1.close() c2 = BrokerConnection(port=9999, transport=Transport).channel() self.assertEqual(c2.client.port, 9999) c2.close()
def test_default_port(self): class Transport(amqplib.Transport): Connection = MockConnection c = BrokerConnection(port=None, transport=Transport).connect() self.assertEqual(c["host"], "127.0.0.1:%s" % (Transport.default_port, ))
def test_custom_port(self): class Transport(pyamqplib.Transport): Connection = MockConnection c = BrokerConnection(port=1337, transport=Transport).connect() self.assertEqual(c["host"], "localhost:1337")
class AirtimeNotifier(Notifier): def __init__(self, watch_manager, default_proc_fun=None, read_freq=0, threshold=0, timeout=None, airtime_config=None, api_client=None, bootstrap=None, mmc=None): Notifier.__init__(self, watch_manager, default_proc_fun, read_freq, threshold, timeout) self.logger = logging.getLogger() self.config = airtime_config self.api_client = api_client self.bootstrap = bootstrap self.md_manager = AirtimeMetadata() self.import_processes = {} self.watched_folders = [] self.mmc = mmc self.wm = watch_manager self.mask = pyinotify.ALL_EVENTS while not self.init_rabbit_mq(): self.logger.error("Error connecting to RabbitMQ Server. Trying again in few seconds") time.sleep(5) def init_rabbit_mq(self): self.logger.info("Initializing RabbitMQ stuff") try: schedule_exchange = Exchange("airtime-media-monitor", "direct", durable=True, auto_delete=True) schedule_queue = Queue("media-monitor", exchange=schedule_exchange, key="filesystem") self.connection = BrokerConnection(self.config.cfg["rabbitmq_host"], self.config.cfg["rabbitmq_user"], self.config.cfg["rabbitmq_password"], self.config.cfg["rabbitmq_vhost"]) channel = self.connection.channel() consumer = Consumer(channel, schedule_queue) consumer.register_callback(self.handle_message) consumer.consume() except Exception, e: self.logger.error(e) return False return True
def test_connection_errors(self): class MyTransport(Transport): connection_errors = (KeyError, ValueError) conn = BrokerConnection(transport=MyTransport) self.assertTupleEqual(conn.connection_errors, (KeyError, ValueError))
class PypoFetch(Thread): def __init__(self, q): Thread.__init__(self) logger = logging.getLogger('fetch') self.api_client = api_client.api_client_factory(config) self.set_export_source('scheduler') self.queue = q logger.info("PypoFetch: init complete") def init_rabbit_mq(self): logger = logging.getLogger('fetch') logger.info("Initializing RabbitMQ stuff") try: schedule_exchange = Exchange("airtime-schedule", "direct", durable=True, auto_delete=True) schedule_queue = Queue("pypo-fetch", exchange=schedule_exchange, key="foo") self.connection = BrokerConnection(config["rabbitmq_host"], config["rabbitmq_user"], config["rabbitmq_password"], "/") channel = self.connection.channel() consumer = Consumer(channel, schedule_queue) consumer.register_callback(handle_message) consumer.consume() except Exception, e: logger.error(e) return False return True
def __init__(self, hostname="127.0.0.1", userid="guest", password="******", virtual_host="/", port=5672, name="", routing_key=""): """ Setup a connection to the AMQP server, get a channel and create an exchange. A specific service listener implementation overrides the name and routing_key """ if name == "": raise Exception("Name must be non-empty string") self.name = name self.routing_key = routing_key if routing_key == "": exchange_type = "fanout" elif "*" in routing_key or "#" in routing_key: exchange_type = "topic" else : exchange_type = "direct" self.connection = BrokerConnection(hostname=hostname, userid=userid, password=password, virtual_host=virtual_host, port=443, insist=False, ssl=False) self.channel = self.connection.channel() self.exchange = Exchange(name=self.name, type=exchange_type, durable=False, channel=self.channel) self.connection.connect() return
def test_default_port(self): class Transport(pyamqplib.Transport): Connection = dict c = BrokerConnection(port=None, transport=Transport).connect() self.assertEqual(c["host"], "localhost:%s" % (Transport.default_port, ))
def setup_rabbit_mq_channel(): global producer global rabbit_user, rabbit_password, rabbit_host, rabbit_exchange, publisher_id service_exchange = Exchange(rabbit_exchange, "topic", durable=False) # connections/channels connection = BrokerConnection(rabbit_host, rabbit_user, rabbit_password) logging.info('Connection to RabbitMQ server successful') channel = connection.channel() # produce producer = Producer(channel, exchange=service_exchange, routing_key='notifications.info') p = subprocess.Popen('hostname', shell=True, stdout=subprocess.PIPE) (hostname, error) = p.communicate() publisher_id = publisher_id + '_on_' + hostname logging.info('publisher_id=%s', publisher_id)
def test_url_parser(self): try: import sqlalchemy # noqa except ImportError: raise SkipTest("sqlalchemy not installed") with patch("kombu.transport.sqlalchemy.Channel._open"): url = "sqlalchemy+sqlite://celerydb.sqlite" BrokerConnection(url).connect() url = "sqla+sqlite://celerydb.sqlite" BrokerConnection(url).connect() # Should prevent regression fixed by f187ccd url = "sqlb+sqlite://celerydb.sqlite" with self.assertRaises(KeyError): BrokerConnection(url).connect()
def test_url_parser(self): from kombu.transport import mongodb from pymongo.errors import ConfigurationError class Transport(mongodb.Transport): Connection = MockConnection url = "mongodb://" c = BrokerConnection(url, transport=Transport).connect() client = c.channels[0].client self.assertEquals(client.name, "kombu_default") self.assertEquals(client.connection.host, "127.0.0.1") url = "mongodb://localhost" c = BrokerConnection(url, transport=Transport).connect() client = c.channels[0].client self.assertEquals(client.name, "kombu_default") url = "mongodb://localhost/dbname" c = BrokerConnection(url, transport=Transport).connect() client = c.channels[0].client self.assertEquals(client.name, "dbname") url = "mongodb://localhost,example.org:29017/dbname" c = BrokerConnection(url, transport=Transport).connect() client = c.channels[0].client nodes = client.connection.nodes self.assertEquals(len(nodes), 2) self.assertTrue(("example.org", 29017) in nodes) self.assertEquals(client.name, "dbname") # Passing options breaks kombu's _init_params method # url = "mongodb://localhost,localhost2:29017/dbname?safe=true" # c = BrokerConnection(url, transport=Transport).connect() # client = c.channels[0].client url = "mongodb://*****:*****@localhost/dbname" c = BrokerConnection(url, transport=Transport).connect() # Assuming there's no user 'username' with password 'password' # configured in mongodb # Needed, otherwise the error would be rose before # the assertRaises is called def get_client(): c.channels[0].client self.assertRaises(ConfigurationError, get_client)
def test__enter____exit__(self): conn = BrokerConnection(transport=Transport) context = conn.__enter__() self.assertIs(context, conn) conn.connect() self.assertTrue(conn.connection.connected) conn.__exit__() self.assertIsNone(conn.connection) conn.close() # again
def test__enter____exit__(self): conn = BrokerConnection(backend_cls=Backend) context = conn.__enter__() self.assertIs(context, conn) conn.connect() self.assertTrue(conn.connection.connected) conn.__exit__() self.assertIsNone(conn.connection) conn.close() # again
def _rabbit_configured(self): # Data message should be forwarded to AMQP try: with BrokerConnection(settings.BROKER_URL) as conn: c = conn.connect() return c.connected except socket.error: return False
def setUp(self): self.c = BrokerConnection(transport="memory") self.e = Exchange("test_transport_memory") self.q = Queue("test_transport_memory", exchange=self.e, routing_key="test_transport_memory") self.q2 = Queue("test_transport_memory2", exchange=self.e, routing_key="test_transport_memory2")
def init_rabbit_mq(self): self.logger.info("Initializing RabbitMQ stuff") try: schedule_exchange = \ Exchange("airtime-pypo", "direct", durable=True, auto_delete=True) schedule_queue = \ Queue("pypo-fetch", exchange=schedule_exchange, key="foo") connection = BrokerConnection(self.config["rabbitmq_host"], \ self.config["rabbitmq_user"], \ self.config["rabbitmq_password"], \ self.config["rabbitmq_vhost"]) channel = connection.channel() self.simple_queue = SimpleQueue(channel, schedule_queue) except Exception, e: self.logger.error(e) return False
def get_connection(self): """ Return a connection instance configured as described in the settings file. """ transport = settings.MESSAGE_BROKER['transport'] transport_options = settings.MESSAGE_BROKER.get("options", {}) return BrokerConnection(transport=transport, **transport_options)
def test_revive_when_channel_is_connection(self): p = self.connection.Producer() p.exchange = Mock() new_conn = BrokerConnection("memory://") defchan = new_conn.default_channel p.revive(new_conn) self.assertIs(p.channel, defchan) p.exchange.revive.assert_called_with(defchan)
def setup_rabbit_mq_channel(self): ceilometer_exchange = Exchange(self.rabbit_exchange, "topic", durable=False) # connections/channels connection = BrokerConnection(self.rabbit_host, self.rabbit_user, self.rabbit_password) LOG.info( "BroadViewPublisher: Connection to RabbitMQ server successful") channel = connection.channel() # produce self._producer = Producer(channel, exchange=ceilometer_exchange, routing_key='notifications.info') self._publish = connection.ensure(self._producer, self._producer.publish, errback=self.errback, max_retries=3)
class AirtimeNotifier(Loggable): """ AirtimeNotifier is responsible for interecepting RabbitMQ messages and feeding them to the event_handler object it was initialized with. The only thing it does to the messages is parse them from json """ def __init__(self, cfg, message_receiver): self.cfg = cfg self.handler = message_receiver while not self.init_rabbit_mq(): self.logger.error( "Error connecting to RabbitMQ Server. Trying again in few seconds" ) time.sleep(5) def init_rabbit_mq(self): try: self.logger.info("Initializing RabbitMQ message consumer...") schedule_exchange = Exchange("airtime-media-monitor", "direct", durable=True, auto_delete=True) schedule_queue = Queue("media-monitor", exchange=schedule_exchange, key="filesystem") self.connection = BrokerConnection(self.cfg["rabbitmq_host"], self.cfg["rabbitmq_user"], self.cfg["rabbitmq_password"], self.cfg["rabbitmq_vhost"]) channel = self.connection.channel() self.simple_queue = SimpleQueue(channel, schedule_queue) self.logger.info("Initialized RabbitMQ consumer.") except Exception as e: self.logger.info("Failed to initialize RabbitMQ consumer") self.logger.error(e) return False return True def handle_message(self, message): """ Messages received from RabbitMQ are handled here. These messages instruct media-monitor of events such as a new directory being watched, file metadata has been changed, or any other changes to the config of media-monitor via the web UI. """ self.logger.info("Received md from RabbitMQ: %s" % str(message)) m = json.loads(message) # TODO : normalize any other keys that could be used to pass # directories if 'directory' in m: m['directory'] = normpath(m['directory']) self.handler.message(m)
def init_rabbit_mq(self): self.logger.info("Initializing RabbitMQ stuff") try: schedule_exchange = Exchange("airtime-media-monitor", "direct", durable=True, auto_delete=True) schedule_queue = Queue("media-monitor", exchange=schedule_exchange, key="filesystem") self.connection = BrokerConnection( self.config.cfg["rabbitmq_host"], self.config.cfg["rabbitmq_user"], self.config.cfg["rabbitmq_password"], "/") channel = self.connection.channel() consumer = Consumer(channel, schedule_queue) consumer.register_callback(self.handle_message) consumer.consume() except Exception, e: self.logger.error(e) return False
def check_end(num): connection = BrokerConnection(hostname = 'myhost', userid = 'webfis', password = '******', virtual_host = 'webfishost', port = 5672) consumer = Consumer(connection=connection, queue="end"+str(num), exchange="end", routing_key="end"+str(num), exchange_type="direct") message = consumer.fetch() if message and message.payload == "end": end = True else: end = False consumer.close() connection.release() return end
def main(): """ Query the master server and add the results to the database """ usage = 'usage: masterserver.py [options]' parser = OptionParser(usage) parser.add_option("-d", "--debug", action="store_true", dest="debug", default=False, help="enable debug messages") (options, args) = parser.parse_args() if len(args) != 0: parser.error('incorrect number of arguments') logging.basicConfig(level=logging.DEBUG) if not options.debug: logging.disable(logging.DEBUG) # declare exchange server_exchange = Exchange('servers', type='fanout') # set up our amqp connection connection = BrokerConnection( hostname='localhost', userid='gamelion', password='******', virtual_host='/' ) channel = connection.channel() producer = Producer(channel, server_exchange, serializer="pickle") # run through all the master servers we know of and ask them for ips for server_address in master_servers: logging.debug('*' * 60) logging.debug('NEW SERVER: %s', str(server_address)) logging.debug('*' * 60) run_full_query(server_address, producer) channel.close() connection.release()
class Sender(): """ Simple class to wrap the operations needed for an AMQP listener """ def __init__(self, hostname="127.0.0.1", userid="guest", password="******", virtual_host="/", port=5672, name="", routing_key=""): """ Setup a connection to the AMQP server, get a channel and create an exchange. A specific service listener implementation overrides the name and routing_key """ if name == "": raise Exception("Name must be non-empty string") self.name = name self.routing_key = routing_key if routing_key == "": exchange_type = "fanout" elif "*" in routing_key or "#" in routing_key: exchange_type = "topic" else : exchange_type = "direct" self.connection = BrokerConnection(hostname=hostname, userid=userid, password=password, virtual_host=virtual_host, port=443, insist=False, ssl=False) self.channel = self.connection.channel() self.exchange = Exchange(name=self.name, type=exchange_type, durable=False, channel=self.channel) self.connection.connect() return def send(self, msg): """ Publishes a message to the AMQP server on the initialized exchange msg is a string, usually a JSON dump """ self.exchange.publish(self.exchange.Message(msg), routing_key=self.routing_key) return
def init_rabbit_mq(self): self.logger.info("Initializing RabbitMQ stuff") try: schedule_exchange = Exchange("airtime-media-monitor", "direct", durable=True, auto_delete=True) schedule_queue = Queue("media-monitor", exchange=schedule_exchange, key="filesystem") self.connection = BrokerConnection(self.config.cfg["rabbitmq_host"], self.config.cfg["rabbitmq_user"], self.config.cfg["rabbitmq_password"], self.config.cfg["rabbitmq_vhost"]) channel = self.connection.channel() consumer = Consumer(channel, schedule_queue) consumer.register_callback(self.handle_message) consumer.consume() except Exception, e: self.logger.error(e) return False
def init_rabbit_mq(self): logger = logging.getLogger('fetch') logger.info("Initializing RabbitMQ stuff") try: schedule_exchange = Exchange("airtime-schedule", "direct", durable=True, auto_delete=True) schedule_queue = Queue("pypo-fetch", exchange=schedule_exchange, key="foo") self.connection = BrokerConnection(config["rabbitmq_host"], config["rabbitmq_user"], config["rabbitmq_password"], "/") channel = self.connection.channel() consumer = Consumer(channel, schedule_queue) consumer.register_callback(handle_message) consumer.consume() except Exception, e: logger.error(e) return False
class AirtimeNotifier(Loggable): """ AirtimeNotifier is responsible for interecepting RabbitMQ messages and feeding them to the event_handler object it was initialized with. The only thing it does to the messages is parse them from json """ def __init__(self, cfg, message_receiver): self.cfg = cfg self.handler = message_receiver while not self.init_rabbit_mq(): self.logger.error("Error connecting to RabbitMQ Server. Trying again in few seconds") time.sleep(5) def init_rabbit_mq(self): try: self.logger.info("Initializing RabbitMQ message consumer...") schedule_exchange = Exchange("airtime-media-monitor", "direct", durable=True, auto_delete=True) schedule_queue = Queue("media-monitor", exchange=schedule_exchange, key="filesystem") self.connection = BrokerConnection( self.cfg["rabbitmq"]["host"], self.cfg["rabbitmq"]["user"], self.cfg["rabbitmq"]["password"], self.cfg["rabbitmq"]["vhost"], ) channel = self.connection.channel() self.simple_queue = SimpleQueue(channel, schedule_queue) self.logger.info("Initialized RabbitMQ consumer.") except Exception as e: self.logger.info("Failed to initialize RabbitMQ consumer") self.logger.error(e) return False return True def handle_message(self, message): """ Messages received from RabbitMQ are handled here. These messages instruct media-monitor of events such as a new directory being watched, file metadata has been changed, or any other changes to the config of media-monitor via the web UI. """ self.logger.info("Received md from RabbitMQ: %s" % str(message)) m = json.loads(message) # TODO : normalize any other keys that could be used to pass # directories if "directory" in m: m["directory"] = normpath(m["directory"]) self.handler.message(m)
def test_establish_connection(self): conn = BrokerConnection(port=5672, backend_cls=Backend) conn.connect() self.assertTrue(conn.connection.connected) self.assertEqual(conn.host, "localhost:5672") channel = conn.channel() self.assertTrue(channel.open) self.assertEqual(conn.drain_events(), "event") _connection = conn.connection conn.close() self.assertFalse(_connection.connected) self.assertIsInstance(conn.backend, Backend)
def handle(self): log.debug("Start consuming") exchange = Exchange('amqp.topic', type='direct', durable=True) self._connection = BrokerConnection(*CONNECTION) channel = self._connection.channel() for entry in self.queues: log.debug("prepare to consume %s" % entry['routing_key']) queue = Queue(entry['name'], exchange=exchange, routing_key=entry['routing_key']) consumer = Consumer(channel, queue) consumer.register_callback(getattr(self, entry['handler'])) consumer.consume() log.debug("start consuming...") while True: try: self._connection.drain_events() except socket.timeout: log.debug("nothing to consume...") break self._connection.close()
def test_publish__consume(self): connection = BrokerConnection(transport=Transport) channel = connection.channel() producer = Producer(channel, self.exchange, routing_key="test_Redis") consumer = Consumer(channel, self.queue) producer.publish({"hello2": "world2"}) _received = [] def callback(message_data, message): _received.append(message_data) message.ack() consumer.register_callback(callback) consumer.consume() self.assertIn(channel, channel.connection.cycle._channels) try: connection.drain_events(timeout=1) self.assertTrue(_received) self.assertRaises(socket.timeout, connection.drain_events, timeout=0.01) finally: channel.close()
def setUp(self): self.conn = BrokerConnection(port=5672, transport=Transport, transport_options=self._extra_args)
def create_resource(self, limit, preload): return BrokerConnection(port=5672, transport=Transport) \ .ChannelPool(limit, preload)
class test_MemoryTransport(unittest.TestCase): def setUp(self): self.c = BrokerConnection(transport="memory") self.e = Exchange("test_transport_memory") self.q = Queue("test_transport_memory", exchange=self.e, routing_key="test_transport_memory") self.q2 = Queue("test_transport_memory2", exchange=self.e, routing_key="test_transport_memory2") def test_produce_consume_noack(self): channel = self.c.channel() producer = Producer(channel, self.e) consumer = Consumer(channel, self.q, no_ack=True) for i in range(10): producer.publish({"foo": i}, routing_key="test_transport_memory") _received = [] def callback(message_data, message): _received.append(message) consumer.register_callback(callback) consumer.consume() while 1: if len(_received) == 10: break self.c.drain_events() self.assertEqual(len(_received), 10) def test_produce_consume(self): channel = self.c.channel() producer = Producer(channel, self.e) consumer1 = Consumer(channel, self.q) consumer2 = Consumer(channel, self.q2) self.q2(channel).declare() for i in range(10): producer.publish({"foo": i}, routing_key="test_transport_memory") for i in range(10): producer.publish({"foo": i}, routing_key="test_transport_memory2") _received1 = [] _received2 = [] def callback1(message_data, message): _received1.append(message) message.ack() def callback2(message_data, message): _received2.append(message) message.ack() consumer1.register_callback(callback1) consumer2.register_callback(callback2) consumer1.consume() consumer2.consume() while 1: if len(_received1) + len(_received2) == 20: break self.c.drain_events() self.assertEqual(len(_received1) + len(_received2), 20) # compression producer.publish({"compressed": True}, routing_key="test_transport_memory", compression="zlib") m = self.q(channel).get() self.assertDictEqual(m.payload, {"compressed": True}) # queue.delete for i in range(10): producer.publish({"foo": i}, routing_key="test_transport_memory") self.assertTrue(self.q(channel).get()) self.q(channel).delete() self.q(channel).declare() self.assertIsNone(self.q(channel).get()) # queue.purge for i in range(10): producer.publish({"foo": i}, routing_key="test_transport_memory2") self.assertTrue(self.q2(channel).get()) self.q2(channel).purge() self.assertIsNone(self.q2(channel).get()) def test_drain_events(self): self.assertRaises(ValueError, self.c.drain_events, timeout=0.1) c1 = self.c.channel() c2 = self.c.channel() self.assertRaises(socket.timeout, self.c.drain_events, timeout=0.1) del(c1) # so pyflakes doesn't complain. del(c2) def test_drain_events_unregistered_queue(self): c1 = self.c.channel() class Cycle(object): def get(self): return ("foo", "foo"), c1 self.c.transport.cycle = Cycle() self.assertRaises(KeyError, self.c.drain_events)
class test_Connection(unittest.TestCase): def setUp(self): self.conn = BrokerConnection(port=5672, transport=Transport) def test_establish_connection(self): conn = self.conn conn.connect() self.assertTrue(conn.connection.connected) self.assertEqual(conn.host, "localhost:5672") channel = conn.channel() self.assertTrue(channel.open) self.assertEqual(conn.drain_events(), "event") _connection = conn.connection conn.close() self.assertFalse(_connection.connected) self.assertIsInstance(conn.transport, Transport) def test__enter____exit__(self): conn = self.conn context = conn.__enter__() self.assertIs(context, conn) conn.connect() self.assertTrue(conn.connection.connected) conn.__exit__() self.assertIsNone(conn.connection) conn.close() # again def test_close_survives_connerror(self): class _CustomError(Exception): pass class MyTransport(Transport): connection_errors = (_CustomError, ) def close_connection(self, connection): raise _CustomError("foo") conn = BrokerConnection(transport=MyTransport) conn.connect() conn.close() self.assertTrue(conn._closed) def test_ensure_connection(self): self.assertTrue(self.conn.ensure_connection()) def test_SimpleQueue(self): conn = self.conn q = conn.SimpleQueue("foo") self.assertTrue(q.channel) self.assertTrue(q.channel_autoclose) chan = conn.channel() q2 = conn.SimpleQueue("foo", channel=chan) self.assertIs(q2.channel, chan) self.assertFalse(q2.channel_autoclose) def test_SimpleBuffer(self): conn = self.conn q = conn.SimpleBuffer("foo") self.assertTrue(q.channel) self.assertTrue(q.channel_autoclose) chan = conn.channel() q2 = conn.SimpleBuffer("foo", channel=chan) self.assertIs(q2.channel, chan) self.assertFalse(q2.channel_autoclose) def test__repr__(self): self.assertTrue(repr(self.conn)) def test__reduce__(self): x = pickle.loads(pickle.dumps(self.conn)) self.assertDictEqual(x.info(), self.conn.info()) def test_channel_errors(self): class MyTransport(Transport): channel_errors = (KeyError, ValueError) conn = BrokerConnection(transport=MyTransport) self.assertTupleEqual(conn.channel_errors, (KeyError, ValueError)) def test_connection_errors(self): class MyTransport(Transport): connection_errors = (KeyError, ValueError) conn = BrokerConnection(transport=MyTransport) self.assertTupleEqual(conn.connection_errors, (KeyError, ValueError))
def setUp(self): self.conn = BrokerConnection(port=5672, transport=Transport)
def _amqp_connection(): return BrokerConnection(settings.BROKER_URL)