def start(self): """ Enable AMQP queueing. This method puts up the event processor and sets it to "active". """ self.log.debug("enabling AMQP queueing") # Evaluate username user = self.env.config.get("amqp.id", default=None) if not user: user = self.env.uuid # Create initial broker connection url = "%s:%s" % (self.url['host'], self.url['port']) self._conn = Connection.establish(url, reconnect=self.reconnect, username=user, password=self.env.config.get("amqp.key"), transport=self.url['transport'], reconnect_interval=self.reconnect_interval, reconnect_limit=self.reconnect_limit) # Do automatic broker failover if requested if self.env.config.get('amqp.failover', default=False): auto_fetch_reconnect_urls(self._conn) # Create event provider self._eventProvider = EventProvider(self.env, self._conn)
def checkAuth(self, user, password): """ This function checks a username / password combination using the AMQP service' SASL configuration. =============== ============ Parameter Description =============== ============ user Username password Password =============== ============ ``Return:`` Bool, success or failure """ # Strip username/password parts of url url = "%s:%s" % (self.url['host'], self.url['port']) # Don't allow blank authentication if user == "" or password == "": return False try: conn = Connection.establish(url, transport=self.url['transport'], username=user, password=password) conn.close() except ConnectionError as e: self.log.debug("AMQP service authentication reports: %s" % str(e)) return False except Exception as e: self.log.critical("cannot proceed with authentication") self.log.exception(e) return False return True
def checkOverStockedQueues(host): connection = Connection.establish(host) broker = BrokerAgent(connection) queues = broker.getAllQueues() result = list() for q in queues: if (q.msgDepth != 0): print(q.name + " " + str(q.msgDepth)) result.append([q.name, q.msgDepth]) return result
def __init__(self, url, receiver_name, sender_name='pulp.task', asserting=False, **options): '''establishes a connection to given url; initializes session, sender and receiver''' self.url = url self.receiver_name = receiver_name self.sender_name = sender_name self._asserting = asserting self.last_sent = None self.last_fetched = None self.session = Connection.establish(self.url, **options).session() self.receiver = self.session.receiver("%s; {create: always}" % self.receiver_name) self.sender = self.session.sender(self.sender_name) self._timeout = None
def __init__(self, url, domain="org.clacks", xquery=".", callback=None): # Build connection url = parseURL(url) _url = "%s:%s" % (url['host'], url['port']) self.__conn = Connection.establish(_url, reconnect=True, username=url['user'], password=url['password'], transport=url['transport'], reconnect_interval=3, reconnect_limit=0) # Do automatic broker failover if requested #TODO: configure reconnect #auto_fetch_reconnect_urls(self.__conn) # Assemble subscription query queue = 'event-listener-%s' % uuid4() address = """%s; { create: always, delete:always, node: { durable: False, x-declare: { exclusive: True, auto-delete: True } }, link: { x-bindings: [ { exchange: '%s', queue: %s, key: event, arguments: { xquery: %r} } ] } }""" % (queue, domain, queue, xquery) # Add processor for core.event queue self.__callback = callback self.__eventWorker = AMQPStandaloneWorker( self.__conn, r_address=address, workers=1, callback=self.__eventProcessor)
def start(self): """ Enable AMQP queueing. This method puts up the event processor and sets it to "active". """ self.log.debug("enabling AMQP queueing") # Evaluate username user = self.config.get("amqp.id", default=None) if not user: user = self.env.uuid password = self.config.get("amqp.key") # Create initial broker connection url = "%s:%s" % (self.url['host'], self.url['port']) self._conn = Connection.establish(url, reconnect=self.reconnect, username=user, password=password, transport=self.url['transport'], reconnect_interval=self.reconnect_interval, reconnect_limit=self.reconnect_limit) # Do automatic broker failover if requested if self.config.get('amqp.failover', False): auto_fetch_reconnect_urls(self._conn) # Create event exchange socket = connect(self.url['host'], self.url['port']) if self.url['scheme'][-1] == 's': socket = ssl(socket) user = self.config.get("amqp.id", default=None) if not user: user = self.env.uuid connection = DirectConnection(sock=socket, username=user, password=self.config.get("amqp.key")) connection.start() session = connection.session(str(uuid4())) # pylint: disable=E1103 session.exchange_declare(exchange=self.env.domain, type="xml") connection.close() # Create event provider self._eventProvider = EventProvider(self.env, self.getConnection())
def __init__(self, url, receiver_name, sender_name='pulp.task', asserting=False, auth=Authenticator(), **options): '''establishes a connection to given url; initializes session, sender and receiver''' self.url = url self.receiver_name = receiver_name self.sender_name = sender_name self._asserting = asserting self.last_sent = None self.last_fetched = None self.session = Connection.establish(self.url, **options).session() self.receiver = self.session.receiver( "pulp.agent.%s; {create: always}" % self.receiver_name) self.sender = self.session.sender(self.sender_name) self._timeout = None self.auth = auth
def get_args(): parser = argparse.ArgumentParser(description="print messages sent to a qpid exchange") parser.add_argument("-e", "--exchange", help="name of a qpid exchange (default: amq.topic)", default="amq.topic") parser.add_argument("-s", "--subject", help="message subject to bind to", required=False) parser.add_argument("-a", "--address", help="hostname to connect to (default:localhost)", default="localhost") parser.add_argument("-p", "--port", help="port to connect to (default: 5672)", default="5672") parser.add_argument("-q", "--quiet", help="show message subject only", default=False, action="store_true") return parser.parse_args() args = get_args() source = args.exchange if args.subject: source = "%s/%s" % (source, args.subject) receiver = Connection.establish("%s:%s" % (args.address, args.port)).session().receiver(source) try: while True: message = receiver.fetch() if args.quiet: print message.subject else: print "------------------" print message except KeyboardInterrupt: print ""
consumers = get('/consumers/') return [ c['id'] for c in consumers if c['notes'].get('_child-node', False) ] def get_nodes_repos(node): " Returns a child node's 'bound' repos." bindings = get("/consumers/%s/bindings/" % (node)) return [ repo['repo_id'] for repo in bindings ] if __name__ == '__main__': args = get_args() # qpid connection address = "%s:%s" % (args.host, args.port) receiver = Connection.establish(address).session().receiver(args.exchange) try: while True: message = receiver.fetch() json_message = json.loads(message.content) if json_message['payload']['result'] == 'success': repo_id = json_message['payload']['repo_id'] else: continue nodes = get_nodes() for node in nodes: repos = get_nodes_repos(node) if repo_id in repos:
def __init__(self, address, **kwargs): self._connection = Connection.establish( address, client_properties={"qpid.ha-admin": 1}, **kwargs) self._agent = BrokerAgent(self._connection)
def __init__(self, serviceURL, serviceAddress=None, serviceName=None, conn=None, worker=None, methods=None): self.__URL = url = parseURL(serviceURL) self.__serviceURL = serviceURL self.__serviceName = serviceName self.__serviceAddress = serviceAddress self.__worker = worker self.__domain = url['path'] self.__methods = methods # Prepare AMQP connection if not already there if not conn: _url = "%s:%s" % (url['host'], url['port']) conn = Connection.establish(_url, reconnect=True, username=url['user'], password=url['password'], transport=url['transport'], reconnect_interval=3, reconnect_limit=0) #TODO: configure reconnect #auto_fetch_reconnect_urls(conn) # Prefill __serviceAddress correctly if domain is given if self.__domain: self.__serviceAddress = '%s.command.core' % self.__domain if not self.__serviceAddress: raise AMQPException("no serviceAddress or domain specified") if not self.__worker: self.__worker = {self.__serviceAddress: {}} # Pre instanciate core sessions for i in range(0, WORKERS): ssn = conn.session(str(uuid4())) self.__worker[self.__serviceAddress][i] = { 'ssn': ssn, 'sender': ssn.sender(self.__serviceAddress), 'receiver': ssn.receiver('reply-%s; {create:always, delete:always, node: { type: queue, durable: False, x-declare: { exclusive: False, auto-delete: True } }}' % ssn.name), 'locked': False} # Store connection self.__conn = conn self.__ssn = None self.__sender = None self.__receiver = None self.__sess = None # Retrieve methods if not self.__methods: self.__serviceName = "getMethods" self.__methods = self.__call__() self.__serviceName = None # If we've no direct queue, we need to push to different queues if self.__domain: queues = set([ x['target'] for x in self.__methods.itervalues() if x['target'] != 'core' ]) # Pre instanciate queue sessions for queue in queues: for i in range(0, WORKERS): ssn = conn.session(str(uuid4())) self.__worker[queue] = {} self.__worker[queue][i] = { 'ssn': ssn, 'sender': ssn.sender("%s.command.%s" % (self.__domain, queue)), 'receiver': ssn.receiver('reply-%s; {create:always, delete:always, node: { type: queue, durable: False, x-declare: { exclusive: False, auto-delete: True } }}' % ssn.name), 'locked': False}
from mock import MagicMock from mock import patch except ImportError: print 'Cannot run test without python MagicMock' print 'Please install MagicMock: pip install mock' exit(3) connection = None broker = None try: logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) logger = logging.getLogger(__name__) # setup broker connection connection = Connection.establish('127.0.0.1') broker = BrokerAgent(connection) # add test service busname busname = 'test-lofarbus-%s' % (uuid.uuid1()) broker.addExchange('topic', busname) # the system under test is the service and the rpc, not the RADatabase # so, patch (mock) the RADatabase class during these tests. # when the service instantiates an RADatabase it will get the mocked class. with patch('lofar.sas.resourceassignment.database.radb.RADatabase', autospec=True) as MockRADatabase: mock = MockRADatabase.return_value # modify the return values of the various RADatabase methods with pre-cooked answers mock.getTaskStatuses.return_value = [{'id': 1, 'name': 'opened'}, {'id': 2, 'name': 'scheduled'}] mock.getTaskTypes.return_value = [{'id': 0, 'name': 'OBSERVATION'}, {'id': 1, 'name': 'PIPELINE'}] mock.getResourceClaimStatuses.return_value = [{'id': 0, 'name': 'CLAIMED'},{'id': 1, 'name': 'ALLOCATED'},{'id': 2, 'name': 'CONFLICT'}]
def __init__(self, address, **kwargs): self._connection = Connection.establish( address, client_properties={"qpid.ha-admin":1}, **kwargs) self._agent = BrokerAgent(self._connection) assert self._agent.getHaBroker(), "HA module not loaded in broker at: %s"%(address)
def test_08_integration_test_with_messagebus(self): """ Full blown integration test listening for notifications on the bus, and checking which dir is up for a visit next. Needs a working local qpid broker. Test is skipped if qpid not available. """ try: broker = None connection = None import uuid from threading import Event from qpid.messaging import Connection, ConnectError from qpidtoollibs import BrokerAgent from lofar.messaging.messagebus import ToBus from lofar.messaging.messages import EventMessage from lofar.lta.ingest.common.config import DEFAULT_INGEST_NOTIFICATION_PREFIX # setup broker connection connection = Connection.establish('127.0.0.1') broker = BrokerAgent(connection) # add test service bus busname = 'test-LTASOIngestEventHandler-%s' % (uuid.uuid1()) broker.addExchange('topic', busname) sync_event = Event() class SyncedLTASOIngestEventHandler(LTASOIngestEventHandler): """This derived LTASOIngestEventHandler behaves exactly like the normal object under test LTASOIngestEventHandler, but it also sets a sync_event to sync between the listener thread and this main test thread""" def _handleMessage(self, msg): super(SyncedLTASOIngestEventHandler, self)._handleMessage(msg) sync_event.set() with SyncedLTASOIngestEventHandler(self.dbcreds, busname=busname): for site in self.db.sites(): for root_dir in self.db.rootDirectoriesForSite(site['id']): self._markAllDirectoriesRecentlyVisited() # create the subdir surl sub_dir_name = '/foo' sub_dir_path = root_dir['dir_name'] + sub_dir_name surl = site['url'] + sub_dir_path with ToBus(busname) as sender: msg = EventMessage( subject=DEFAULT_INGEST_NOTIFICATION_PREFIX + "TaskFinished", content={'srm_url': surl}) sender.send(msg) # wait for the handler to have processed the message self.assertTrue(sync_event.wait(2)) sync_event.clear() # surl should have been scheduled for a visit, all other dir's were marked as visited already... # so there should be a new dir for this surl, and it should be the least_recent_visited_dir site_visit_stats = self.db.visitStats( datetime.utcnow())[site['name']] least_recent_visited_dir_id = site_visit_stats.get( 'least_recent_visited_dir_id') self.assertIsNotNone(least_recent_visited_dir_id) least_recent_visited_dir = self.db.directory( least_recent_visited_dir_id) self.assertEqual(sub_dir_path, least_recent_visited_dir['dir_name']) except ImportError as e: logger.warning("skipping test due to: %s", e) except ConnectError as e: logger.warning("skipping test due to: %s", e) finally: # cleanup test bus and exit if broker: broker.delExchange(busname) if connection: connection.close()
def SetBroker(self, brokerUrl): self.url = brokerUrl self.connection = Connection.establish(self.url, **conn_options) self.broker = BrokerAgent(self.connection)
'--port', help='port to connect to (default: 5672)', default='5672') parser.add_argument('-q', '--quiet', help='show message subject only', default=False, action='store_true') return parser.parse_args() args = get_args() source = args.exchange if args.subject: source = '%s/%s' % (source, args.subject) receiver = Connection.establish( '%s:%s' % (args.address, args.port)).session().receiver(source) try: while True: message = receiver.fetch() if args.quiet: print message.subject else: print '------------------' print message except KeyboardInterrupt: print ''
def __init__(self, address, **kwargs): self._connection = Connection.establish( address, client_properties={"qpid.ha-admin": 1}, **kwargs) self._agent = BrokerAgent(self._connection) assert self._agent.getHaBroker( ), "HA module not loaded in broker at: %s" % (address)
from mock import patch except ImportError: print 'Cannot run test without python MagicMock' print 'Please install MagicMock: pip install mock' exit(3) connection = None broker = None try: logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) logger = logging.getLogger(__name__) # setup broker connection connection = Connection.establish('127.0.0.1') broker = BrokerAgent(connection) # add test service busname busname = 'test-lofarbus-%s' % (uuid.uuid1()) broker.addExchange('topic', busname) # the system under test is the service and the rpc, not the RADatabase # so, patch (mock) the RADatabase class during these tests. # when the service instantiates an RADatabase it will get the mocked class. with patch('lofar.sas.resourceassignment.database.radb.RADatabase', autospec=True) as MockRADatabase: mock = MockRADatabase.return_value # modify the return values of the various RADatabase methods with pre-cooked answers mock.getTaskStatuses.return_value = [{ 'id': 1,
def __init__(self, address, **kwargs): self._connection = Connection.establish( address, client_properties={"qpid.ha-admin":1}, **kwargs) self._agent = BrokerAgent(self._connection)