def main(): usage = "Usage: %s <instance_name>\n" % PROGNAME if len(sys.argv) != 2: sys.stderr.write(usage) return 1 instance_name = sys.argv[1] # WARNING: This assumes that instance names # are of the form prefix-id, and uses prefix to # determine the routekey for AMPQ prefix = instance_name.split('-')[0] routekey = "ganeti.%s.event.progress" % prefix amqp_client = AMQPClient(confirm_buffer=10) amqp_client.connect() amqp_client.exchange_declare(settings.EXCHANGE_GANETI, "topic") for msg in jsonstream(sys.stdin): msg['event_time'] = split_time(time.time()) msg['instance'] = instance_name # log to stderr sys.stderr.write("[MONITOR] %s\n" % json.dumps(msg)) # then send it over AMQP amqp_client.basic_publish(exchange=settings.EXCHANGE_GANETI, routing_key=routekey, body=json.dumps(msg)) amqp_client.close() return 0
def drain_queue(queue): """Strip a (declared) queue from all outstanding messages""" if not queue: return if not queue in queues.QUEUES: print "Queue %s not configured" % queue return print "Queue to be drained: %s" % queue if not get_user_confirmation(): return client = AMQPClient() client.connect() tag = client.basic_consume(queue=queue, callback=callbacks.dummy_proc) print "Queue draining about to start, hit Ctrl+c when done" time.sleep(2) print "Queue draining starting" num_processed = 0 while True: client.basic_wait() num_processed += 1 sys.stderr.write("Ignored %d messages\r" % num_processed) client.basic_cancel(tag) client.close()
def __init__(self, logger, cluster_name): pyinotify.ProcessEvent.__init__(self) self.logger = logger self.cluster_name = cluster_name # Set max_retries to 0 for unlimited retries. self.client = AMQPClient(hosts=settings.AMQP_HOSTS, confirm_buffer=25, max_retries=0, logger=logger) logger.info("Attempting to connect to RabbitMQ hosts") self.client.connect() logger.info("Connected successfully") self.ganeti_master = get_ganeti_master() logger.debug("Ganeti Master Node: %s", self.ganeti_master) self.ganeti_node = get_ganeti_node() logger.debug("Current Ganeti Node: %s", self.ganeti_node) # Check if this is the master node logger.info("Checking if this is Ganeti Master of %s cluster: %s", self.cluster_name, "YES" if self.ganeti_master == self.ganeti_node else "NO") self.client.exchange_declare(settings.EXCHANGE_GANETI, type='topic') self.op_handlers = {"INSTANCE": self.process_instance_op, "NETWORK": self.process_network_op, "CLUSTER": self.process_cluster_op, # "GROUP": self.process_group_op} "TAGS": self.process_tag_op}
def __init__(self, logger, cluster_name): pyinotify.ProcessEvent.__init__(self) self.logger = logger self.cluster_name = cluster_name # Set max_retries to 0 for unlimited retries. self.client = AMQPClient(hosts=settings.AMQP_HOSTS, confirm_buffer=25, max_retries=0, logger=logger) handler_logger.info("Attempting to connect to RabbitMQ hosts") self.client.connect() handler_logger.info("Connected succesfully") self.client.exchange_declare(settings.EXCHANGE_GANETI, type='topic') self.op_handlers = { "INSTANCE": self.process_instance_op, "NETWORK": self.process_network_op, "CLUSTER": self.process_cluster_op, # "GROUP": self.process_group_op} "TAGS": self.process_tag_op }
def handle(self, *args, **options): if len(args) != 1: raise CommandError("Please provide a queue") queue = args[0] interactive = options['interactive'] requeue = options['requeue'] client = AMQPClient() client.connect() pp = pprint.PrettyPrinter(indent=4, width=4, stream=self.stdout) more_msgs = True counter = 0 sep = '-' * 80 while more_msgs: msg = client.basic_get(queue=queue) if msg: counter += 1 self.stderr.write(sep + "\n") self.stderr.write('Message %d:\n' % counter) self.stderr.write(sep + "\n") pp.pprint(msg) if not requeue or interactive: if interactive and not get_user_confirmation(): continue # Acknowledging the message will remove it from the queue client.basic_ack(msg) else: more_msgs = False
def _init(self): log.info("Initializing") # Set confirm buffer to 1 for heartbeat messages self.client = AMQPClient(logger=log_amqp, confirm_buffer=1) # Connect to AMQP host self.client.connect() # Declare queues and exchanges exchange = settings.EXCHANGE_GANETI exchange_dl = queues.convert_exchange_to_dead(exchange) self.client.exchange_declare(exchange=exchange, type="topic") self.client.exchange_declare(exchange=exchange_dl, type="topic") for queue in queues.QUEUES: # Queues are mirrored to all RabbitMQ brokers self.client.queue_declare(queue=queue, mirrored=True, dead_letter_exchange=exchange_dl) # Declare the corresponding dead-letter queue queue_dl = queues.convert_queue_to_dead(queue) self.client.queue_declare(queue=queue_dl, mirrored=True) # Bind queues to handler methods for binding in queues.BINDINGS: try: callback = getattr(callbacks, binding[3]) except AttributeError: log.error("Cannot find callback %s", binding[3]) raise SystemExit(1) queue = binding[0] exchange = binding[1] routing_key = binding[2] self.client.queue_bind(queue=queue, exchange=exchange, routing_key=routing_key) self.client.basic_consume(queue=binding[0], callback=callback, prefetch_count=5) queue_dl = queues.convert_queue_to_dead(queue) exchange_dl = queues.convert_exchange_to_dead(exchange) # Bind the corresponding dead-letter queue self.client.queue_bind(queue=queue_dl, exchange=exchange_dl, routing_key=routing_key) log.debug("Binding %s(%s) to queue %s with handler %s", exchange, routing_key, queue, binding[3]) # Declare the queue that will be used for receiving requests, e.g. a # status check request hostname, pid = get_hostname(), os.getpid() queue = queues.get_dispatcher_request_queue(hostname, pid) self.client.queue_declare(queue=queue, mirrored=True, ttl=REQUEST_QUEUE_TTL) self.client.basic_consume(queue=queue, callback=handle_request) log.debug("Binding %s(%s) to queue %s with handler 'hadle_request'", exchange, routing_key, queue)
def __init__(self, **params): hosts = params['hosts'] self.exchange = params['exchange'] self.client_id = params['client_id'] self.client = AMQPClient(hosts=hosts) self.client.connect() self.client.exchange_declare(exchange=self.exchange, type='topic')
def _init(self): log.info("Initializing") self.client = AMQPClient(logger=log_amqp) # Connect to AMQP host self.client.connect() # Declare queues and exchanges exchange = settings.EXCHANGE_GANETI exchange_dl = queues.convert_exchange_to_dead(exchange) self.client.exchange_declare(exchange=exchange, type="topic") self.client.exchange_declare(exchange=exchange_dl, type="topic") for queue in queues.QUEUES: # Queues are mirrored to all RabbitMQ brokers self.client.queue_declare(queue=queue, mirrored=True, dead_letter_exchange=exchange_dl) # Declare the corresponding dead-letter queue queue_dl = queues.convert_queue_to_dead(queue) self.client.queue_declare(queue=queue_dl, mirrored=True) # Bind queues to handler methods for binding in queues.BINDINGS: try: callback = getattr(callbacks, binding[3]) except AttributeError: log.error("Cannot find callback %s", binding[3]) raise SystemExit(1) queue = binding[0] exchange = binding[1] routing_key = binding[2] self.client.queue_bind(queue=queue, exchange=exchange, routing_key=routing_key) self.client.basic_consume(queue=binding[0], callback=callback, prefetch_count=5) queue_dl = queues.convert_queue_to_dead(queue) exchange_dl = queues.convert_exchange_to_dead(exchange) # Bind the corresponding dead-letter queue self.client.queue_bind(queue=queue_dl, exchange=exchange_dl, routing_key=routing_key) log.debug("Binding %s(%s) to queue %s with handler %s", exchange, routing_key, queue, binding[3])
def purge_exchanges(): """Delete declared exchanges from RabbitMQ, after removing all queues""" purge_queues() client = AMQPClient() client.connect() exchanges = queues.EXCHANGES print "Exchanges to be deleted: ", exchanges if not get_user_confirmation(): return for exch in exchanges: result = client.exchange_delete(exchange=exch) print "Deleting exchange %s. Result: %s" % (exch, result) client.close()
def purge_queues(): """ Delete declared queues from RabbitMQ. Use with care! """ client = AMQPClient(max_retries=120) client.connect() print "Queues to be deleted: ", queues.QUEUES if not get_user_confirmation(): return for queue in queues.QUEUES: result = client.queue_delete(queue=queue) print "Deleting queue %s. Result: %s" % (queue, result) client.close()
def handle(self, *args, **options): verbose = (options["verbosity"] == "2") self.keep_zombies = options["keep_zombies"] log_level = logging.DEBUG if verbose else logging.WARNING log.setLevel(log_level) client = AMQPClient(confirms=False) client.connect() self.client = client for queue in queues.QUEUES: dead_queue = queues.convert_queue_to_dead(queue) while 1: message = client.basic_get(dead_queue) if not message: break log.debug("Received message %s", message) self.handle_message(message) client.close() return 0
def check_dispatcher_status(pid_file): """Check the status of a running snf-dispatcher process. Check the status of a running snf-dispatcher process, the PID of which is contained in the 'pid_file'. This function will send a 'status-check' message to the running snf-dispatcher, wait for dispatcher's response and pretty-print the results. """ dispatcher_pid = pidlockfile.read_pid_from_pidfile(pid_file) if dispatcher_pid is None: sys.stdout.write("snf-dispatcher with PID file '%s' is not running." " PID file does not exist\n" % pid_file) sys.exit(1) sys.stdout.write("snf-dispatcher (PID: %s): running\n" % dispatcher_pid) hostname = get_hostname() local_queue = "snf:check_tool:%s:%s" % (hostname, os.getpid()) dispatcher_queue = queues.get_dispatcher_request_queue( hostname, dispatcher_pid) log_amqp.setLevel(logging.WARNING) try: client = AMQPClient(logger=log_amqp) client.connect() client.queue_declare(queue=local_queue, mirrored=False, exclusive=True) client.basic_consume(queue=local_queue, callback=lambda x, y: 0, no_ack=True) msg = json.dumps({"action": "status-check", "reply_to": local_queue}) client.basic_publish("", dispatcher_queue, msg) except: sys.stdout.write("Error while connecting with AMQP\nError:\n") traceback.print_exc() sys.exit(1) sys.stdout.write("AMQP -> snf-dispatcher: ") msg = client.basic_wait(timeout=CHECK_TOOL_ACK_TIMEOUT) if msg is None: sys.stdout.write("fail\n") sys.stdout.write("ERROR: No reply from snf-dipatcher after '%s'" " seconds.\n" % CHECK_TOOL_ACK_TIMEOUT) sys.exit(1) else: try: body = json.loads(msg["body"]) assert (body["action"] == "status-check"), "Invalid action" assert (body["status"] == "started"), "Invalid status" sys.stdout.write("ok\n") except Exception as e: sys.stdout.write("Received invalid msg from snf-dispatcher:" " msg: %s error: %s\n" % (msg, e)) sys.exit(1) msg = client.basic_wait(timeout=CHECK_TOOL_REPORT_TIMEOUT) if msg is None: sys.stdout.write("fail\n") sys.stdout.write("ERROR: No status repot after '%s' seconds.\n" % CHECK_TOOL_REPORT_TIMEOUT) sys.exit(1) sys.stdout.write("Backends:\n") status = json.loads(msg["body"])["status"] for backend, bstatus in sorted(status.items()): sys.stdout.write(" * %s: \n" % backend) sys.stdout.write(" snf-dispatcher -> ganeti: %s\n" % bstatus["RAPI"]) sys.stdout.write(" snf-ganeti-eventd -> AMQP: %s\n" % bstatus["eventd"]) sys.exit(0)
def main(): parser = OptionParser() parser.add_option('-v', '--verbose', action='store_true', default=False, dest='verbose', help='Enable verbose logging') parser.add_option('--host', default=BROKER_HOST, dest='host', help='RabbitMQ host (default: %s)' % BROKER_HOST) parser.add_option('--port', default=BROKER_PORT, dest='port', help='RabbitMQ port (default: %s)' % BROKER_PORT, type='int') parser.add_option('--user', default=BROKER_USER, dest='user', help='RabbitMQ user (default: %s)' % BROKER_USER) parser.add_option('--password', default=BROKER_PASSWORD, dest='password', help='RabbitMQ password (default: %s)' % BROKER_PASSWORD) parser.add_option('--vhost', default=BROKER_VHOST, dest='vhost', help='RabbitMQ vhost (default: %s)' % BROKER_VHOST) parser.add_option('--queue', default=CONSUMER_QUEUE, dest='queue', help='RabbitMQ queue (default: %s)' % CONSUMER_QUEUE) parser.add_option('--exchange', default=CONSUMER_EXCHANGE, dest='exchange', help='RabbitMQ exchange (default: %s)' % CONSUMER_EXCHANGE) parser.add_option('--key', default=CONSUMER_KEY, dest='key', help='RabbitMQ key (default: %s)' % CONSUMER_KEY) parser.add_option('--callback', default=None, dest='callback', help='Callback function to consume messages') parser.add_option('--test', action='store_true', default=False, dest='test', help='Produce a dummy message for testing') opts, args = parser.parse_args() DEBUG = False if opts.verbose: DEBUG = True logging.basicConfig( format='%(asctime)s [%(levelname)s] %(name)s %(message)s', datefmt='%Y-%m-%d %H:%M:%S', level=logging.DEBUG if DEBUG else logging.INFO) logger = logging.getLogger('dispatcher') host = 'amqp://%s:%s@%s:%s' % (opts.user, opts.password, opts.host, opts.port) queue = opts.queue key = opts.key exchange = opts.exchange client = AMQPClient(hosts=[host]) client.connect() if opts.test: client.exchange_declare(exchange=exchange, type='topic') client.basic_publish(exchange=exchange, routing_key=key, body=json.dumps({"test": "0123456789"})) client.close() sys.exit() callback = None if opts.callback: cb = opts.callback.rsplit('.', 1) if len(cb) == 2: __import__(cb[0]) cb_module = sys.modules[cb[0]] callback = getattr(cb_module, cb[1]) def handle_message(client, msg): logger.debug('%s', msg) if callback: callback(msg) client.basic_ack(msg) client.queue_declare(queue=queue) client.queue_bind(queue=queue, exchange=exchange, routing_key=key) client.basic_consume(queue=queue, callback=handle_message) try: while True: client.basic_wait() except KeyboardInterrupt: pass finally: client.close()