def test_message_id_only(self): message_id = str(uuid.uuid4()) properties = spec.BasicProperties( 'application/json', message_id=message_id) self.assertEqual(utils.message_info('', '', properties), message_id)
def test_message_id_only(self): message_id = str(uuid.uuid4()) properties = spec.BasicProperties('application/json', message_id=message_id) self.assertEqual(utils.message_info('', '', properties), message_id)
def invoke_consumer(self, message): """Wrap the actual processor processing bits :param rejected.data.Message message: The message to process """ # Only allow for a single message to be processed at a time with (yield self.consumer_lock.acquire()): if self.is_idle: if message.channel.is_closed: LOGGER.warning( 'Channel %s is closed on ' 'connection "%s", discarding ' 'local copy of message %s', message.channel.channel_number, message.connection, utils.message_info(message.exchange, message.routing_key, message.properties)) self.counters[self.CLOSED_ON_START] += 1 self.maybe_get_next_message() return self.set_state(self.STATE_PROCESSING) self.delivery_time = start_time = time.time() self.active_message = message self.measurement = data.Measurement() if message.method.redelivered: self.counters[self.REDELIVERED] += 1 self.measurement.set_tag(self.REDELIVERED, True) try: result = yield self.consumer.execute( message, self.measurement) except Exception as error: LOGGER.exception( 'Unhandled exception from consumer in ' 'process. This should not happen. %s', error) result = data.MESSAGE_REQUEUE LOGGER.debug('Finished processing message: %r', result) self.on_processed(message, result, start_time) elif self.is_waiting_to_shutdown: LOGGER.info( 'Requeueing pending message due to pending shutdown') self.reject(message, True) self.shutdown_connections() elif self.is_shutting_down: LOGGER.info('Requeueing pending message due to shutdown') self.reject(message, True) self.on_ready_to_stop() else: LOGGER.warning( 'Exiting invoke_consumer without processing, ' 'this should not happen. State: %s', self.state_description) self.maybe_get_next_message()
def invoke_consumer(self, message): """Wrap the actual processor processing bits :param rejected.data.Message message: The message to process """ # Only allow for a single message to be processed at a time with (yield self.consumer_lock.acquire()): if self.is_idle: if message.channel.is_closed: LOGGER.warning('Channel %s is closed on ' 'connection "%s", discarding ' 'local copy of message %s', message.channel.channel_number, message.connection, utils.message_info(message.exchange, message.routing_key, message.properties)) self.counters[self.CLOSED_ON_START] += 1 self.maybe_get_next_message() return self.set_state(self.STATE_PROCESSING) self.delivery_time = start_time = time.time() self.active_message = message self.measurement = data.Measurement() if message.method.redelivered: self.counters[self.REDELIVERED] += 1 self.measurement.set_tag(self.REDELIVERED, True) try: result = yield self.consumer.execute(message, self.measurement) except Exception as error: LOGGER.exception('Unhandled exception from consumer in ' 'process. This should not happen. %s', error) result = data.MESSAGE_REQUEUE LOGGER.debug('Finished processing message: %r', result) self.on_processed(message, result, start_time) elif self.is_waiting_to_shutdown: LOGGER.info( 'Requeueing pending message due to pending shutdown') self.reject(message, True) self.shutdown_connections() elif self.is_shutting_down: LOGGER.info('Requeueing pending message due to shutdown') self.reject(message, True) self.on_ready_to_stop() else: LOGGER.warning('Exiting invoke_consumer without processing, ' 'this should not happen. State: %s', self.state_description) self.maybe_get_next_message()
def test_message_info_output_no_correlation_id(self): message_id = str(uuid.uuid4()) exchange = str(uuid.uuid4()) routing_key = str(uuid.uuid4()) expectation = '{} published to "{}" using "{}"'.format( message_id, exchange, routing_key) properties = spec.BasicProperties( 'application/json', message_id=message_id) self.assertEqual( utils.message_info(exchange, routing_key, properties), expectation)
def test_message_info_output_no_correlation_id(self): message_id = str(uuid.uuid4()) exchange = str(uuid.uuid4()) routing_key = str(uuid.uuid4()) expectation = '{} published to "{}" using "{}"'.format( message_id, exchange, routing_key) properties = spec.BasicProperties('application/json', message_id=message_id) self.assertEqual(utils.message_info(exchange, routing_key, properties), expectation)
def on_return(self, channel, method, properties, body): """Invoked by RabbitMQ when it returns a message that was published. :param channel: The channel the message was delivered on :type channel: pika.channel.Channel :param method: The AMQP method frame :type method: pika.frame.Frame :param properties: The AMQP message properties :type properties: pika.spec.Basic.Properties :param bytes body: The message body """ pending = self.pending_confirmations() if not pending: # Exit early if there are no pending messages self.logger.warning( 'RabbitMQ returned message %s and no pending ' 'messages are unconfirmed', utils.message_info(method.exchange, method.routing_key, properties)) return self.logger.warning( 'RabbitMQ returned message %s: (%s) %s', utils.message_info(method.exchange, method.routing_key, properties), method.reply_code, method.reply_text) # Try and match the exact message or first message published that # matches the exchange and routing key for offset, msg in pending: if (msg.message_id == properties.message_id or (msg.exchange == method.exchange and msg.routing_key == method.routing_key)): self.published_messages[offset].future.set_result(False) return # Handle the case where we only can go on message ordering self.published_messages[0].future.set_result(False)
def process(self): self.logger.info( 'Processing message %s', utils.message_info( self.exchange, self.routing_key, self._message.properties)) with self.stats_track_duration('async_fetch'): results = yield [self.http_client.fetch('http://www.google.com'), self.http_client.fetch('http://www.bing.com')] self.logger.info('HTTP Status Codes: %r', [r.code for r in results]) result = yield self.publish_message( self.exchange, self.routing_key, {'correlation_id': self.message_id, 'message_id': str(uuid.uuid4()), 'type': 'example', 'timestamp': int(time.time())}, 'async_fetch request') self.logger.info('Confirmation result: %r', result) yield gen.sleep(1)
def process(self): self.logger.info( 'Processing message %s', utils.message_info(self.exchange, self.routing_key, self._message.properties)) with self.stats_track_duration('async_fetch'): results = yield [ self.http_client.fetch('http://www.google.com'), self.http_client.fetch('http://www.bing.com') ] self.logger.info('HTTP Status Codes: %r', [r.code for r in results]) result = yield self.publish_message( self.exchange, self.routing_key, { 'correlation_id': self.message_id, 'message_id': str(uuid.uuid4()), 'type': 'example', 'timestamp': int(time.time()) }, 'async_fetch request') self.logger.info('Confirmation result: %r', result) yield gen.sleep(1)
def test_no_identifiable_info(self): properties = spec.BasicProperties('application/json') self.assertEqual(utils.message_info('', '', properties), '')