def on_channel_created(self, channel): AMQPClient.on_channel_created(self, channel) yield gen.Task(channel.exchange_declare, exchange=options.rpc_exchange, exchange_type='topic') yield gen.Task(channel.exchange_declare, exchange=options.reports_exchange, exchange_type='topic') yield gen.Task(channel.queue_declare, queue=options.reports_queue) log.debug("Exchanges for Manager are declared") yield gen.Task(channel.queue_bind, queue=options.reports_queue, exchange=options.reports_exchange, routing_key='#') log.debug("reports queue %s is bound" % options.reports_queue) self.channel.basic_consume( consumer_callback=self.get_consumer_callback(options.reports_queue), queue=options.reports_queue, no_ack=False)
def send_task(self, task): "Put task msg to rpc exchange" log.debug("Sending %s" % task.to_json()) self.channel.basic_publish(body=task.to_json(), exchange=options.rpc_exchange, routing_key=task.node_oid, properties=pika.BasicProperties( content_type='application/json'))
def on_mngr_msg(client, body, routing_key): from swarm.reports.base_report import BaseReport global CLIENT CLIENT = client entity = Entity.from_json(body) log.debug("got msg %s, client is %s" % (entity.__class__.__name__, CLIENT)) if isinstance(entity, BaseReport): return on_report(entity)
def publish_entity(self, entity, exchange): self.channel.basic_publish( body=entity.to_json(), exchange=exchange, properties=pika.BasicProperties( content_type="application/json", delivery_mode=1), routing_key="%s.%s" % (self.oid, entity.__class__.__name__)) log.debug("%s is published" % (entity.to_json()))
def channel_created(client): log.debug("%s to RabbitMQ is created" % client.channel) on_node_started(client) smanager = SubprocessManager(client) smanager.add_report(IFConfigReport, 30) smanager.add_report(BrctlShowReport, 300) smanager.add_report(DFReport, 300) smanager.start() heartbeat = PeriodicCallback(partial(on_node_started, client), 15000) heartbeat.start() client.io_loop.add_timeout(time.time() + 15, partial(vm_inventory, client))
def load_fixtures(node_oid): "Load demo data for development" from swarm.scenarios import on_report from swarm.tests import fixtures from swarm.reports import (NodeOnlineReport, VmXMLReport, IFConfigReport, BrctlShowReport, DFReport) log.debug('Loading test data') storage_oid = str(uuid.uuid4()) on_report(NodeOnlineReport.create(node_oid, hostname='testhost', storages = [dict( storage_oid=storage_oid, path='/home/vgdcloud/storage1')])) on_report(IFConfigReport.create(node_oid, raw_data=fixtures.IFCONFIG_DATA)) on_report(BrctlShowReport.create(node_oid, raw_data=fixtures.BRCTL_SHOW_DATA)) on_report(VmXMLReport.create(node_oid, raw_data=fixtures.LIBVIRT_XML)) on_report(DFReport.create(node_oid, raw_data=fixtures.DF_RAW))
def wait_and_publish(self, proc, report_class, interval): log.debug("wait for %s collect result" % report_class) returncode = proc.poll() if returncode is None: self.io_loop.add_timeout( time.time() + 1, partial(self.wait_and_publish, proc, report_class, interval)) return if returncode == 0: data = proc.communicate()[0] self.report_done(report_class, data) else: err_data = proc.communicate()[1] self.report_failed(self, report_class, err_data) self.io_loop.add_timeout( time.time() + interval, partial(self.start_report_process, report_class, interval))
def on_channel_created(self, channel): self.channel = channel frame = yield gen.Task(channel.queue_declare, exclusive=True, auto_delete=True) self.rpc_queue = frame.method.queue yield gen.Task(channel.queue_bind, exchange=options.rpc_exchange, routing_key=self.oid, queue=self.rpc_queue) log.debug("rpc_queue %s for node %s is created" % ( self.rpc_queue, self.oid)) self.channel.basic_consume(self.get_consumer_callback(self.rpc_queue), queue=self.rpc_queue) if self._on_channel_created: self._on_channel_created(self) else: on_node_started(self)
def vm_inventory(client): log.debug('Inventory VMs') task = VMInventoryTask(node_oid=client.oid) worker = TaskThreadWorker(client, task) worker.start()
def open(self, vm_uuid): "Browser connected" log.debug("Open web socket connection for %s" % vm_uuid) self.vm_uuid = vm_uuid self.stream = self.get_stream()
def start(self): "Start performing data collection and sending" for report_class, interval in self._reports.items(): self.start_report_process(report_class, interval) log.debug("SubrocessManager started with %s reports" % ( str(self._reports.keys())))
def start_report_process(self, report_class, interval): log.debug("Start reporting by %s" % report_class) proc = Popen(report_class.cmd, stderr=PIPE, stdout=PIPE) self.io_loop.add_callback( partial(self.wait_and_publish, proc, report_class, interval))
def report_done(self, report_class, data): log.debug("data collected for %s" % report_class) report = report_class.create(self.client.oid, raw_data=data) self.client.publish_report(report)