class Consumer(kombu.mixins.ConsumerMixin): def __init__(self, name, connection, deployment, durable, queue_arguments, exchange, topics, connect_max_retries=10): self.connect_max_retries = connect_max_retries self.retry_attempts = 0 self.connection = connection self.deployment = deployment self.durable = durable self.queue_arguments = queue_arguments self.name = name self.last_time = None self.pmi = None self.processed = 0 self.total_processed = 0 self.topics = topics self.exchange = exchange signal.signal(signal.SIGTERM, self._shutdown) def _create_exchange(self, name, type, exclusive=False, auto_delete=False): return message_service.create_exchange(name, exchange_type=type, exclusive=exclusive, durable=self.durable, auto_delete=auto_delete) def _create_queue(self, name, nova_exchange, routing_key, exclusive=False, auto_delete=False): return message_service.create_queue( name, nova_exchange, durable=self.durable, auto_delete=exclusive, exclusive=auto_delete, queue_arguments=self.queue_arguments, routing_key=routing_key) def get_consumers(self, Consumer, channel): exchange = self._create_exchange(self.exchange, "topic") queues = [self._create_queue(topic['queue'], exchange, topic['routing_key']) for topic in self.topics] return [Consumer(queues=queues, callbacks=[self.on_nova])] def _process(self, message): routing_key = message.delivery_info['routing_key'] body = str(message.body) args = (routing_key, json.loads(body)) asJson = json.dumps(args) # save raw and ack the message raw, notif = views.process_raw_data( self.deployment, args, asJson, self.exchange) self.processed += 1 message.ack() POST_PROCESS_METHODS[raw.get_name()](raw, notif) self._check_memory() def _check_memory(self): if not self.pmi: self.pmi = ProcessMemoryInfo() self.last_vsz = self.pmi.vsz self.initial_vsz = self.pmi.vsz utc = datetime.datetime.utcnow() check = self.last_time is None if self.last_time: diff = utc - self.last_time if diff.seconds > 30: check = True if check: self.last_time = utc self.pmi.update() diff = (self.pmi.vsz - self.last_vsz) / 1000 idiff = (self.pmi.vsz - self.initial_vsz) / 1000 self.total_processed += self.processed per_message = 0 if self.total_processed: per_message = idiff / self.total_processed _get_child_logger().debug("%20s %20s %6dk/%6dk ram, " "%3d/%4d msgs @ %6dk/msg" % (self.name, self.exchange, diff, idiff, self.processed, self.total_processed, per_message)) self.last_vsz = self.pmi.vsz self.processed = 0 def on_nova(self, body, message): try: self._process(message) except Exception, e: _get_child_logger().debug("Problem: %s\nFailed message body:\n%s" % (e, json.loads(str(message.body)))) raise
class NovaConsumer(kombu.mixins.ConsumerMixin): def __init__(self, name, connection, deployment, durable, queue_arguments): self.connection = connection self.deployment = deployment self.durable = durable self.queue_arguments = queue_arguments self.name = name self.last_time = None self.pmi = None self.processed = 0 self.total_processed = 0 def _create_exchange(self, name, type, exclusive=False, auto_delete=False): return kombu.entity.Exchange(name, type=type, exclusive=exclusive, durable=self.durable, auto_delete=auto_delete) def _create_queue(self, name, nova_exchange, routing_key, exclusive=False, auto_delete=False): return kombu.Queue(name, nova_exchange, durable=self.durable, auto_delete=exclusive, exclusive=auto_delete, queue_arguments=self.queue_arguments, routing_key=routing_key) def get_consumers(self, Consumer, channel): nova_exchange = self._create_exchange("nova", "topic") nova_queues = [ self._create_queue('monitor.info', nova_exchange, 'monitor.info'), self._create_queue('monitor.error', nova_exchange, 'monitor.error') ] return [Consumer(queues=nova_queues, callbacks=[self.on_nova])] def _process(self, message): routing_key = message.delivery_info['routing_key'] body = str(message.body) args = (routing_key, json.loads(body)) asJson = json.dumps(args) # save raw and ack the message raw = views.process_raw_data(self.deployment, args, asJson) if raw: self.processed += 1 message.ack() views.post_process(raw, args[1]) self._check_memory() def _check_memory(self): if not self.pmi: self.pmi = ProcessMemoryInfo() self.last_vsz = self.pmi.vsz self.initial_vsz = self.pmi.vsz utc = datetime.datetime.utcnow() check = self.last_time is None if self.last_time: diff = utc - self.last_time if diff.seconds > 30: check = True if check: self.last_time = utc self.pmi.update() diff = (self.pmi.vsz - self.last_vsz) / 1000 idiff = (self.pmi.vsz - self.initial_vsz) / 1000 self.total_processed += self.processed per_message = 0 if self.total_processed: per_message = idiff / self.total_processed LOG.debug("%20s %6dk/%6dk ram, " "%3d/%4d msgs @ %6dk/msg" % (self.name, diff, idiff, self.processed, self.total_processed, per_message)) self.last_vsz = self.pmi.vsz self.processed = 0 def on_nova(self, body, message): try: self._process(message) except Exception, e: LOG.debug("Problem: %s\nFailed message body:\n%s" % (e, json.loads(str(message.body)))) raise
class NovaConsumer(kombu.mixins.ConsumerMixin): def __init__(self, name, connection, deployment, durable, queue_arguments): self.connection = connection self.deployment = deployment self.durable = durable self.queue_arguments = queue_arguments self.name = name self.last_time = None self.pmi = None self.processed = 0 self.total_processed = 0 def _create_exchange(self, name, type, exclusive=False, auto_delete=False): return kombu.entity.Exchange(name, type=type, exclusive=exclusive, durable=self.durable, auto_delete=auto_delete) def _create_queue(self, name, nova_exchange, routing_key, exclusive=False, auto_delete=False): return kombu.Queue(name, nova_exchange, durable=self.durable, auto_delete=exclusive, exclusive=auto_delete, queue_arguments=self.queue_arguments, routing_key=routing_key) def get_consumers(self, Consumer, channel): nova_exchange = self._create_exchange("nova", "topic") nova_queues = [ self._create_queue('monitor.info', nova_exchange, 'monitor.info'), self._create_queue('monitor.error', nova_exchange, 'monitor.error') ] return [Consumer(queues=nova_queues, callbacks=[self.on_nova])] def _process(self, message): routing_key = message.delivery_info['routing_key'] body = str(message.body) args = (routing_key, json.loads(body)) asJson = json.dumps(args) raw = views.process_raw_data(self.deployment, args, asJson) if raw: self.processed += 1 self._check_memory() def _check_memory(self): if not self.pmi: self.pmi = ProcessMemoryInfo() self.last_vsz = self.pmi.vsz self.initial_vsz = self.pmi.vsz utc = datetime.datetime.utcnow() check = self.last_time is None if self.last_time: diff = utc - self.last_time if diff.seconds > 30: check = True if check: self.last_time = utc self.pmi.update() diff = (self.pmi.vsz - self.last_vsz) / 1000 idiff = (self.pmi.vsz - self.initial_vsz) / 1000 self.total_processed += self.processed per_message = 0 if self.total_processed: per_message = idiff / self.total_processed LOG.debug("%20s %6dk/%6dk ram, " "%3d/%4d msgs @ %6dk/msg" % (self.name, diff, idiff, self.processed, self.total_processed, per_message)) self.last_vsz = self.pmi.vsz self.processed = 0 def on_nova(self, body, message): try: self._process(message) except Exception, e: LOG.exception("Problem %s" % e) message.ack()
class Consumer(kombu.mixins.ConsumerMixin): def __init__(self, name, connection, deployment, durable, queue_arguments, exchange, topics, connect_max_retries=10, stats=None): self.connect_max_retries = connect_max_retries self.retry_attempts = 0 self.connection = connection self.deployment = deployment self.durable = durable self.queue_arguments = queue_arguments self.name = name self.last_time = None self.pmi = None self.processed = 0 self.total_processed = 0 self.topics = topics self.exchange = exchange if stats is not None: self.stats = stats else: self.stats = dict() signal.signal(signal.SIGTERM, self._shutdown) def _create_exchange(self, name, type, exclusive=False, auto_delete=False): return message_service.create_exchange(name, exchange_type=type, exclusive=exclusive, durable=self.durable, auto_delete=auto_delete) def _create_queue(self, name, nova_exchange, routing_key, exclusive=False, auto_delete=False): return message_service.create_queue( name, nova_exchange, durable=self.durable, auto_delete=exclusive, exclusive=auto_delete, queue_arguments=self.queue_arguments, routing_key=routing_key) def get_consumers(self, Consumer, channel): exchange = self._create_exchange(self.exchange, "topic") queues = [ self._create_queue(topic['queue'], exchange, topic['routing_key']) for topic in self.topics ] return [Consumer(queues=queues, callbacks=[self.on_nova])] def _process(self, message): routing_key = message.delivery_info['routing_key'] body = str(message.body) args = (routing_key, json.loads(body)) asJson = json.dumps(args) # save raw and ack the message raw, notif = views.process_raw_data(self.deployment, args, asJson, self.exchange) self.processed += 1 message.ack() POST_PROCESS_METHODS[raw.get_name()](raw, notif) self._check_memory() def _check_memory(self): if not self.pmi: self.pmi = ProcessMemoryInfo() self.last_vsz = self.pmi.vsz self.initial_vsz = self.pmi.vsz utc = datetime.datetime.utcnow() check = self.last_time is None if self.last_time: diff = utc - self.last_time if diff.seconds > 30: check = True if check: self.last_time = utc self.pmi.update() diff = (self.pmi.vsz - self.last_vsz) / 1000 idiff = (self.pmi.vsz - self.initial_vsz) / 1000 self.total_processed += self.processed per_message = 0 if self.total_processed: per_message = idiff / self.total_processed _get_child_logger().debug( "%20s %20s %6dk/%6dk ram, " "%3d/%4d msgs @ %6dk/msg" % (self.name, self.exchange, diff, idiff, self.processed, self.total_processed, per_message)) self.stats['timestamp'] = utc self.stats['total_processed'] = self.total_processed self.stats['processed'] = self.processed self.last_vsz = self.pmi.vsz self.processed = 0 def on_nova(self, body, message): try: self._process(message) except ValueError, e: _get_child_logger().error( "Error: %s\nMalformed message body found : \n%s" % (e, str(message.body))) # Mark message as read to avoid re-reading the malformed message. message.ack() except Exception, e: _get_child_logger().error("Problem: %s\nFailed message body:\n%s" % (e, str(message.body))) raise
class NovaConsumer(object): def __init__(self, name, connection, deployment, durable): self.connection = connection self.deployment = deployment self.durable = durable self.name = name self.last_time = None self.pmi = None self.processed = 0 self.total_processed = 0 self.channel = connection.channel() self.nova_exchange = kombu.entity.Exchange("nova", type="topic", exclusive=False, durable=self.durable, auto_delete=False) self.nova_queues = [ kombu.Queue("stacktash.notifications.info", self.nova_exchange, durable=self.durable, auto_delete=False, exclusive=False, routing_key='notifications.info'), kombu.Queue("stacktash.notifications.error", self.nova_exchange, durable=self.durable, auto_delete=False, exclusive=False, routing_key='notifications.error'), ] def run(self): self.consumer = Consumer(channel=self.channel, queues=self.nova_queues, callbacks=[self.on_nova]) while True: self.consumer.consume() self.connection.drain_events() eventlet.sleep(0) def _process(self, body, message): routing_key = message.delivery_info['routing_key'] payload = (routing_key, body) # make sure jsonable body. json.dumps(payload) body = str(message.body) args = (routing_key, json.loads(body)) asJson = json.dumps(args) raw = dbapi.process_raw_data(self.deployment, args, asJson) if raw: self.processed += 1 self._check_memory() def _check_memory(self): if not self.pmi: self.pmi = ProcessMemoryInfo() self.last_vsz = self.pmi.vsz self.initial_vsz = self.pmi.vsz utc = datetime.datetime.utcnow() check = self.last_time is None if self.last_time: diff = utc - self.last_time if diff.seconds > 30: check = True if check: self.last_time = utc self.pmi.update() diff = (self.pmi.vsz - self.last_vsz) / 1000 idiff = (self.pmi.vsz - self.initial_vsz) / 1000 self.total_processed += self.processed per_message = 0 if self.total_processed: per_message = idiff / self.total_processed LOG.debug("%20s %6dk/%6dk ram, " "%3d/%4d msgs @ %6dk/msg" % (self.name, diff, idiff, self.processed, self.total_processed, per_message)) self.last_vsz = self.pmi.vsz self.processed = 0 def on_nova(self, body, message): try: self._process(body, message) except Exception, e: LOG.exception("Problem %s" % e) finally: