def test_consume_from_2_queues(self): """ Check that worker consumes messages from 2 queues properly """ self.counter = 0 def show_queues(): queues = get_queues(_get_properties()) total_messages = 0 for queue in queues: print "queue %s: %s messages" % (queue, _queue_size(queue)) total_messages += _queue_size(queue) print "total messages: %s" % total_messages def check_queue_size(*args, **kwargs): """ Closure to attach to _dispatch counts how many messages processed """ self.counter += 1 #SetUp the TaskBroker but override _dispatch task_broker = _task_broker_factory(dispatch_func=check_queue_size) #Publish a Couple of Messages to both queues channel = task_broker.channel properties = {DEVICE_GROUP: "test", DEVICE_NAME: "testname"} queues = get_queues(properties) self.assertEquals(len(queues), 2) for queue in queues: foo_cmd_msg = CommandMessage(['foo'], '', 1, timeout=1) foo_msg = pack_message(foo_cmd_msg) channel.basic_publish(foo_msg, mandatory=True, exchange=queue, routing_key=queue) # bar_cmd_msg = CommandMessage(['bar'], '', 1, timeout=1) bar_msg = pack_message(bar_cmd_msg) channel.basic_publish(bar_msg, mandatory=True, exchange=queue, routing_key=queue) # baz_cmd_msg = CommandMessage(['bar'], '', 1, timeout=1) baz_msg = pack_message(baz_cmd_msg) channel.basic_publish(baz_msg, mandatory=True, exchange=queue, routing_key=queue) #Set to Consume task_broker._start_consume() while self.counter < 6: # Process all messages channel.wait() for queue in queues: # Make sure all queues are empty self.assertEquals(_queue_size(queue), 0)
def test_on_message_failing_commands(self): #send a failing command multiple times #watch the response queue for state changes response_queue = 'test_response_queue' cmd_msg1 = CommandMessage(['asdfasdfsadfs', '1'], response_queue, 1, timeout = 2) msg1 = pack_message(cmd_msg1) cmd_msg2 = CommandMessage(['asdfasdfsadfs', 'foo'], response_queue, 1, timeout = 1) msg2 = pack_message(cmd_msg2) task_broker = _task_broker_factory() channel = task_broker.channel _init_queue(channel, response_queue, response_queue, response_queue) # Send some commands for message in [msg1, msg2]: channel.basic_publish(message, mandatory = True, exchange = "test", routing_key = "test") #Set to Consume task_broker._start_consume() channel.wait() time.sleep(1) channel.wait() time.sleep(1) # We should have 0 tasks in the queue and STARTED, Exception and # FINISHED for both tasks in response queue self.assertEquals(_queue_size("test"), 0) self.assertEquals(_queue_size(response_queue), 10)
def test_on_message(self): #send a sleep command #send an echo command #watch the response queue for state changes response_queue = 'test_response_queue' cmd_msg1 = CommandMessage(['sleep', '1'], response_queue, 1, timeout=2) msg1 = pack_message(cmd_msg1) cmd_msg2 = CommandMessage(['echo', 'foo'], response_queue, 1, timeout=1) msg2 = pack_message(cmd_msg2) task_broker = _task_broker_factory() channel = task_broker.channel _init_queue(channel, response_queue, response_queue, response_queue) # Send some commands for message in [msg1, msg2]: channel.basic_publish(message, mandatory=True, exchange="test", routing_key="test") #Set to Consume task_broker._start_consume() channel.wait() time.sleep(1) channel.wait() time.sleep(1) # We should have 0 tasks in the queue and STARTED and FINISHED for both # tasks in response queue self.assertEquals(_queue_size("test"), 0) self.assertEquals(_queue_size(response_queue), 8)
def _publish_task_state_change(self, task_id, response_queue): """ Inform the response queue of the status of the Task @type response_queue: string @param response_queue: The name of the response queue """ state = self._task_state.next() self._log.debug("Task in state: '%s'" % (state)) # Monitor event send event_type = MonitorType.TASK_ONGOING if state == TaskCondition.FINISH: event_type = MonitorType.TASK_ENDED monitor_event = Monitor(event_type, gethostname(), task_id) amqp_message = pack_message(monitor_event) self.channel.basic_publish(amqp_message, mandatory=True, exchange=response_queue, routing_key=response_queue) state_msg = StateChangeMessage(task_id, state) amqp_message = pack_message(state_msg) self.channel.basic_publish(amqp_message, mandatory=True, exchange=response_queue, routing_key=response_queue)
def test_is_version_compatible(self): task_broker = _task_broker_factory() cmd_msg = CommandMessage(["ls"], "foo", 111, timeout = 2, min_worker_version = 100) packed_msg = pack_message(cmd_msg) self.assertFalse(task_broker._is_version_compatible(packed_msg)) cmd_msg = CommandMessage(["ls"], "foo", 111, timeout = 2, min_worker_version = 0.7) packed_msg = pack_message(cmd_msg) self.assertTrue(task_broker._is_version_compatible(packed_msg))
def set_state(self, event_type, description): """Calls Monitor DTO to create testrun state change message""" monitor_event = Monitor(event_type=event_type, sender=gethostname(), description=description) self._send_message(pack_message(monitor_event))
def set_state(self, event_type, description): """Calls Monitor DTO to create testrun state change message""" monitor_event = Monitor(event_type = event_type, sender = gethostname(), description = description) self._send_message(pack_message(monitor_event))
def _dispatch_tasks(self): """ Publish the Tasks to the RabbitMQ """ self.timeout_handler.start_queue_timeout() for task in self._tasks: log_msg = "Sending command '%s' with key '%s'" \ % (task.command, self._routing_key) LOGGER.debug(log_msg) #Send task in queue event with task id send_monitor_event(MonitorType.TASK_INQUEUE, __name__, task.task_id) cmd_msg = CommandMessage(task.command, self._testrun_queue, task.task_id, timeout = self._execution_timeout, xml_file = task.xml_file, min_worker_version = self._min_worker_version) message = pack_message(cmd_msg) self._channel.basic_publish(message, exchange = self._services_exchange, routing_key = self._routing_key)
def _dispatch_tasks(self): """ Publish the Tasks to the RabbitMQ """ self.timeout_handler.start_queue_timeout() for task in self._tasks: log_msg = "Sending command '%s' with key '%s'" \ % (task.command, self._routing_key) LOGGER.debug(log_msg) #Send task in queue event with task id send_monitor_event(MonitorType.TASK_INQUEUE, __name__, task.task_id) cmd_msg = CommandMessage( task.command, self._testrun_queue, task.task_id, timeout=self._execution_timeout, xml_file=task.xml_file, min_worker_version=self._min_worker_version) message = pack_message(cmd_msg) self._channel.basic_publish(message, exchange=self._services_exchange, routing_key=self._routing_key)
def test_on_message_not_version_compatible(self): """ Check that incompatible versions dont pull messages from the queue """ #self.assertTrue(_queue_size("test_v") is None) cmd_msg = CommandMessage(['echo', 'foo'], 'test', 1, timeout = 1) msg = pack_message(cmd_msg) task_broker = _task_broker_factory() channel = task_broker.channel channel.basic_publish(msg, mandatory = True, exchange = "test", routing_key = "test") self.assertEquals(1, _queue_size("test")) task_broker._start_consume() channel.wait() self.assertEquals(3, _queue_size("test")) #Check that the message can be pulled by another consumer connection = amqp.Connection(host = "localhost", userid = "guest", password = "******", virtual_host = "/", insist = False) channel = connection.channel() self.received = False def cb(message): channel.basic_ack(delivery_tag = message.delivery_tag) self.received = True channel.basic_consume("test", callback = cb) channel.wait() self.assertTrue(self.received) self.assertEquals(0, _queue_size("test"))
def test_is_version_compatible(self): task_broker = _task_broker_factory() cmd_msg = CommandMessage(["ls"], "foo", 111, timeout=2, min_worker_version=100) packed_msg = pack_message(cmd_msg) self.assertFalse(task_broker._is_version_compatible(packed_msg)) cmd_msg = CommandMessage(["ls"], "foo", 111, timeout=2, min_worker_version=0.7) packed_msg = pack_message(cmd_msg) self.assertTrue(task_broker._is_version_compatible(packed_msg))
def add_result(self, filename, content, origin="Unknown", test_package="Unknown", environment="Unknown"): """Calls OTSMessageIO to create result object message""" results = Results(filename, content, package=test_package, hostname=origin, environment=environment) self._send_message(pack_message(results))
def test_consume(self): """ Check that the consume sets the prefetch correctly """ def check_queue_size(*args, **kwargs): """ Closure to attach to _dispatch to check the queue size """ self.assertEquals(self.expected_size, _queue_size("test")) self.assertTrue(_queue_size("test") is None) #SetUp the TaskBroker but override _dispatch task_broker = _task_broker_factory(dispatch_func=check_queue_size) #Publish a Couple of Messages channel = task_broker.channel foo_cmd_msg = CommandMessage(['foo'], '', 1, timeout=1) foo_msg = pack_message(foo_cmd_msg) channel.basic_publish(foo_msg, mandatory=True, exchange="test", routing_key="test") bar_cmd_msg = CommandMessage(['bar'], '', 1, timeout=1) bar_msg = pack_message(bar_cmd_msg) channel.basic_publish(bar_msg, mandatory=True, exchange="test", routing_key="test") self.assertEquals(2, _queue_size("test")) task_broker._start_consume() self.expected_size = 1 channel.wait() self.expected_size = 0 channel.wait() time.sleep(5)
def send_quit(self): cmd_msg = CommandMessage(["quit"], self.queue, 111) message = pack_message(cmd_msg) conn = amqp.Connection(host="localhost", userid="guest", password="******", virtual_host="/", insist=False) channel = conn.channel() channel.basic_publish(message, exchange=ROUTING_KEY, routing_key=ROUTING_KEY)
def test_consume(self): """ Check that the consume sets the prefetch correctly """ def check_queue_size(*args,**kwargs): """ Closure to attach to _dispatch to check the queue size """ self.assertEquals(self.expected_size, _queue_size("test")) self.assertTrue(_queue_size("test") is None) #SetUp the TaskBroker but override _dispatch task_broker = _task_broker_factory(dispatch_func = check_queue_size) #Publish a Couple of Messages channel = task_broker.channel foo_cmd_msg = CommandMessage(['foo'],'', 1, timeout = 1) foo_msg = pack_message(foo_cmd_msg) channel.basic_publish(foo_msg, mandatory = True, exchange = "test", routing_key = "test") bar_cmd_msg = CommandMessage(['bar'],'', 1, timeout = 1) bar_msg = pack_message(bar_cmd_msg) channel.basic_publish(bar_msg, mandatory = True, exchange = "test", routing_key = "test") self.assertEquals(2, _queue_size("test")) task_broker._start_consume() self.expected_size = 1 channel.wait() self.expected_size = 0 channel.wait() time.sleep(5)
def send_quit(self): cmd_msg = CommandMessage(["quit"], self.queue, 111) message = pack_message(cmd_msg) conn = amqp.Connection(host = "localhost", userid = "guest", password = "******", virtual_host = "/", insist = False) channel = conn.channel() channel.basic_publish(message, exchange = ROUTING_KEY, routing_key = ROUTING_KEY)
def _publish_exception(self, task_id, response_queue, exception): """ Put an Exception on the response queue and move the Task onto the next state @type response_queue: C{str} @param response_queue: The name of the response queue @type exception: L{OTSException} @param exception: An OTSException """ self._log.debug("publishing exception") message = pack_message(exception) try: self.channel.basic_publish(message, mandatory=True, exchange=response_queue, routing_key=response_queue) except AMQPChannelException: self._log.error("Can't publish exception")
def emit(self, record): """ @type record : C{logging.LogRecord} @param record : The Log Record """ if self.channel is not None \ and self.queue is not None \ and self.exchange is not None: #FIXME: This rudely ignores the exc_info #as Python can't pickle the traceback record.exc_info = None # message = pack_message(record) try: self.channel.basic_publish(message, mandatory=True, exchange=self.exchange, routing_key=self.queue) except AMQPChannelException: print "Can't log to %s" % (self.queue)
def emit(self, record): """ @type record : C{logging.LogRecord} @param record : The Log Record """ if self.channel is not None \ and self.queue is not None \ and self.exchange is not None: #FIXME: This rudely ignores the exc_info #as Python can't pickle the traceback record.exc_info = None # message = pack_message(record) try: self.channel.basic_publish(message, mandatory = True, exchange = self.exchange, routing_key = self.queue) except AMQPChannelException: print "Can't log to %s" % (self.queue)
def test_on_message_not_version_compatible(self): """ Check that incompatible versions dont pull messages from the queue """ #self.assertTrue(_queue_size("test_v") is None) cmd_msg = CommandMessage(['echo', 'foo'], 'test', 1, timeout=1) msg = pack_message(cmd_msg) task_broker = _task_broker_factory() channel = task_broker.channel channel.basic_publish(msg, mandatory=True, exchange="test", routing_key="test") self.assertEquals(1, _queue_size("test")) task_broker._start_consume() channel.wait() self.assertEquals(3, _queue_size("test")) #Check that the message can be pulled by another consumer connection = amqp.Connection(host="localhost", userid="guest", password="******", virtual_host="/", insist=False) channel = connection.channel() self.received = False def cb(message): channel.basic_ack(delivery_tag=message.delivery_tag) self.received = True channel.basic_consume("test", callback=cb) channel.wait() self.assertTrue(self.received) self.assertEquals(0, _queue_size("test"))
def add_executed_packages(self, environment, packages): """Calls OTSMessageIO to create test package list""" packages = Packages(environment, packages) self._send_message(pack_message(packages))
def set_error(self, error_info, error_code): """Calls OTSMessageIO to cerate testrun error message""" exception = OTSException(error_code, error_info) self._send_message(pack_message(exception))
def test_consume_from_2_queues(self): """ Check that worker consumes messages from 2 queues properly """ self.counter = 0 def show_queues(): queues = get_queues(_get_properties()) total_messages = 0 for queue in queues: print "queue %s: %s messages" % (queue, _queue_size(queue)) total_messages += _queue_size(queue) print "total messages: %s" % total_messages def check_queue_size(*args,**kwargs): """ Closure to attach to _dispatch counts how many messages processed """ self.counter += 1 #SetUp the TaskBroker but override _dispatch task_broker = _task_broker_factory(dispatch_func = check_queue_size) #Publish a Couple of Messages to both queues channel = task_broker.channel properties = {DEVICE_GROUP : "test", DEVICE_NAME : "testname"} queues = get_queues(properties) self.assertEquals(len(queues), 2) for queue in queues: foo_cmd_msg = CommandMessage(['foo'],'', 1, timeout = 1) foo_msg = pack_message(foo_cmd_msg) channel.basic_publish(foo_msg, mandatory = True, exchange = queue, routing_key = queue) # bar_cmd_msg = CommandMessage(['bar'],'', 1, timeout = 1) bar_msg = pack_message(bar_cmd_msg) channel.basic_publish(bar_msg, mandatory = True, exchange = queue, routing_key = queue) # baz_cmd_msg = CommandMessage(['bar'],'', 1, timeout = 1) baz_msg = pack_message(baz_cmd_msg) channel.basic_publish(baz_msg, mandatory = True, exchange = queue, routing_key = queue) #Set to Consume task_broker._start_consume() while self.counter < 6: # Process all messages channel.wait() for queue in queues: # Make sure all queues are empty self.assertEquals(_queue_size(queue), 0)