def add_data_and_message(): """ Makes and returns some test data and message """ data = { 'data': '\\\\isis\\inst$\\NDXTESTINSTRUMENT\\Instrument\\data\\cycle_21_1\\data.nxs', 'facility': 'ISIS', 'instrument': 'TESTINSTRUMENT', 'rb_number': '1234', 'run_number': '4321', 'run_version': 1, 'reduction_script': 'print("hello")', # not actually used for the reduction 'reduction_arguments': { "standard_vars": { "arg1": "differentvalue", "arg2": 321 }, "advanced_vars": { "adv_arg1": "advancedvalue2", "adv_arg2": "" } }, 'description': 'This is a test', 'software': { "name": "Mantid", "version": "latest", }, } message = Message() message.populate(data) return data, message
def add_bad_data_and_message(): """ Makes and returns some test data and message """ data = { 'data': CYCLE_DIRECTORY % ("TESTINSTRUMENT", "21_1") + '/data.nxs', 'facility': 'ISIS', 'instrument': 'TESTINSTRUMENT', 'rb_number': '1234', 'run_number': '4321', 'run_version': 1, 'reduction_script': 'print("hello")', # not actually used for the reduction 'reduction_arguments': "None", 'description': 'This is a test', 'software': "6.2.0", "reduction_data": "/instrument/TESTINSTRUMENT/RBNumber/RB1234/autoreduced/Test run name/run-version-1", "reduction_log": "Running reduction script: /isis/NDXTESTINSTRUMENT/user/scripts/autoreduction/reduce.py" } message = Message() message.populate(data) return data, message
def test_populate_with_invalid_key(self): """ Test: A warning is logged When: An unknown key is used to populate the Message """ args = {'unknown': True} msg = Message() with self.assertRaises(ValueError): msg.populate(args)
def _handle_error(self, reduction_run: ReductionRun, message: Message, err: Exception): """ Couldn't save the state in the database properly - mark the run as errored. """ err_msg = "Encountered error when saving run variables" message.message = err_msg self._logger.error("%s\n%s", err_msg, str(err)) message.reduction_log = str(err) self.reduction_error(reduction_run, message)
def test_send_with_message_instance(self, mock_stomp_send): """ Test: send sends the given data using stomp.send When: send is called with a Message instance argument for message """ client = QueueClient() message = Message(description="test-message") client.send('dataready', message) (args, _) = mock_stomp_send.call_args self.assertEqual(args[0], 'dataready') self.assertEqual(args[1], message.serialize())
def setUp(self): self.valid_message = Message( run_number=1234, instrument='GEM', rb_number=1000000, started_by=-1, data='test/file/path', ) self.invalid_message = Message(run_number='12345', instrument='not inst', rb_number=-1, started_by='test', data=123)
def test_validate_data_ready_valid(self): """ Test: No exception is raised When: Calling validate for data_ready with a valid message """ message = Message(instrument='GEM', run_number=111, rb_number=2222222, data='file/path', facility="ISIS", started_by=0) try: self.assertIsNone(message.validate('/queue/DataReady')) except RuntimeError: self.fail()
def _populated(): """ Create and return a populated message object and corresponding dictionary """ run_number = 11111 rb_number = 2222222 description = 'test message' populated_msg = Message(run_number=run_number, rb_number=rb_number, description=description) populated_dict = { 'description': description, 'facility': "ISIS", 'run_number': run_number, 'instrument': None, 'rb_number': rb_number, 'started_by': None, 'data': None, 'overwrite': None, 'run_version': None, 'job_id': None, 'reduction_script': None, 'reduction_arguments': {}, 'reduction_log': "", 'admin_log': "", 'message': None, 'retry_in': None, 'reduction_data': None, 'software': None, 'flat_output': False } return populated_msg, populated_dict
def test_validate_data_ready_invalid(self): """ Test: An exception is raised When: An invalid Message is validated """ message = Message(instrument='Not an inst') self.assertRaises(RuntimeError, message.validate, '/queue/DataReady')
def send_retry_message(user_id: int, most_recent_run: ReductionRun, run_description: str, script_text: str, new_script_arguments: dict, overwrite_previous_data: bool) -> None: """ Creates & sends a retry message given the parameters :param user_id: The user submitting the run :param most_recent_run: The most recent run, used for common things across the two runs like run number, instrument name, etc :param run_description: Description of the rerun :param script_text: The script that will NOT be used for this reduction, because of a known issue https://github.com/ISISScientificComputing/autoreduce/issues/1115 :param new_script_arguments: Dict of arguments that will be used for the reduction :param overwrite_previous_data: Whether to overwrite the previous data in the data location """ message = Message( started_by=user_id, description=run_description, run_number=most_recent_run.run_number, instrument=most_recent_run.instrument.name, rb_number=most_recent_run.experiment.reference_number, data=most_recent_run.data_location.first().file_path, reduction_script=script_text, reduction_arguments=new_script_arguments, run_version=most_recent_run.run_version, facility=FACILITY, software=str(most_recent_run.software), overwrite=overwrite_previous_data) MessagingUtils.send(message)
def setUp(self): self.mocked_producer = mock.Mock(spec=Publisher) self.mocked_handler = mock.MagicMock(spec=HandleMessage) self.bad_message = "bad_message" self.good_message = Message() self.mock_confluent_message = mock.MagicMock( spec=confluent_kafka.Message) with mock.patch("autoreduce_qp.queue_processor.confluent_consumer.HandleMessage", return_value=self.mocked_handler), \ mock.patch("autoreduce_qp.queue_processor.confluent_consumer.DeserializingConsumer")\ as mock_confluent_consumer, mock.patch("logging.getLogger") as patched_logger: self.consumer = Consumer() self.mocked_logger = patched_logger.return_value self.mock_confluent_consumer = mock_confluent_consumer.return_value self.mock_confluent_consumer.subscribe.assert_called_with( [TRANSACTIONS_TOPIC])
def send_message_onwards(self, reduction_run: ReductionRun, message: Message, instrument: Instrument, software: Software): """ Send the message onwards, either for processing, if validation is OK and instrument isn't paused, otherwiese skips if either of those is true. """ # Activate instrument if script was found skip_reason = self.find_reason_to_skip_run(reduction_run, message, instrument) if skip_reason is not None: message.message = skip_reason message.reduction_log = skip_reason self.reduction_skipped(reduction_run, message) elif message.message: self.reduction_error(reduction_run, message) else: self.activate_db_inst(instrument) self.do_reduction(reduction_run, message, software)
def find_reason_to_skip_run(reduction_run: ReductionRun, message: Message, instrument) -> Optional[str]: """ Determine whether the processing should be skippped. The run will be skipped if the message validation fails or if the instrument is paused. """ if reduction_run.script.text == "": return "Script text for current instrument is empty" try: message.validate("data_ready") except RuntimeError as validation_err: return f"Validation error from handler: {validation_err}" if not instrument.is_active: return f"Run {message.run_number} has been skipped because the instrument {instrument.name} is inactive" if instrument.is_paused: return f"Run {message.run_number} has been skipped because the instrument {instrument.name} is paused" return None
def setUp(self): """ Start all external services """ # Get all clients try: self.publisher, self.consumer = setup_kafka_connections() except ConnectionException as err: raise RuntimeError( "Could not connect to Kafka - check your credentials. If running locally check that " "the Kafka Docker container is running") from err # Add placeholder variables: # these are used to ensure runs are deleted even if test fails before completion self.instrument = 'ARMI' self.instrument_obj, _ = Instrument.objects.get_or_create( name=self.instrument, is_active=True) self.rb_number = 1234567 self.run_number = 101 self.run_title = "test title" self.software = { "name": "Mantid", "version": "latest", } # Create test archive and add data self.data_archive = DataArchive([self.instrument], 19, 19) self.data_archive.create() # Create and send json message to Kafka self.data_ready_message = Message(rb_number=self.rb_number, instrument=self.instrument, run_number=self.run_number, run_title=self.run_title, description="This is a system test", facility="ISIS", software=self.software, started_by=0, reduction_script=None, reduction_arguments=None) if self.software.get("name") == "Mantid": expected_mantid_py = f"{MANTID_PATH}/mantid.py" if not os.path.exists(expected_mantid_py): os.makedirs(MANTID_PATH) with open(expected_mantid_py, mode="w", encoding="utf-8") as self.test_mantid_py: self.test_mantid_py.write(FAKE_MANTID) else: # Mantid is installed, don't create or delete (in tearDown) anything self.test_mantid_py = None
def on_message(self, incoming_message): """ Handle a message """ with self.mark_processing(): topic = incoming_message.topic() data = incoming_message.value() try: message = Message.parse_raw(data) except (ValidationError, TypeError): self.logger.error("Could not decode message: %s", data) return try: if topic == 'data_ready': self.message_handler.data_ready(message) else: self.logger.error( "Received a message on an unknown topic '%s'", topic) except Exception as exp: # pylint:disable=broad-except self.logger.error( "Unhandled exception encountered: %s %s\n\n%s", type(exp).__name__, exp, traceback.format_exc())
def _empty(): """ Create and return an empty message object and corresponding dictionary""" empty_msg = Message() empty_dict = { 'description': None, 'facility': "ISIS", 'run_number': None, 'instrument': None, 'rb_number': None, 'started_by': None, 'file_path': None, 'overwrite': None, 'run_version': None, 'job_id': None, 'reduction_script': None, 'reduction_arguments': {}, 'reduction_log': "", 'admin_log': "", 'return_message': None, 'retry_in': None, 'software': None, 'flat_output': False } return empty_msg, empty_dict
def run(self) -> Message: """Run the reduction subprocess.""" try: # We need to run the reduction in a new process, otherwise scripts # will fail when they use things that require access to a main loop # e.g. a GUI main loop, for matplotlib or Mantid serialized_vars = self.message.serialize() serialized_vars_truncated = self.message.serialize( limit_reduction_script=True) args = ["autoreduce-runner-start", serialized_vars, self.run_name] logger.info("Calling: %s %s %s %s ", "python3", "runner.py", serialized_vars_truncated, self.run_name) # Return a client configured from environment variables # The environment variables used are the same as those used by the Docker command-line client # https://docs.docker.com/engine/reference/commandline/cli/#environment-variables client = docker.from_env() image = get_correct_image(client, self.software) if "AUTOREDUCTION_PRODUCTION" not in os.environ: if not os.path.exists(ARCHIVE_ROOT): Path(ARCHIVE_ROOT).mkdir(parents=True, exist_ok=True) if not os.path.exists(self.reduced_data_path): self.reduced_data_path.mkdir(parents=True, exist_ok=True) # Run chmod's to make sure the directories are writable self.reduced_data_path.chmod(0o777) Path(ARCHIVE_ROOT).chmod(0o777) Path(AUTOREDUCE_HOME_ROOT).chmod(0o777) Path(f'{AUTOREDUCE_HOME_ROOT}/logs/autoreduce.log').chmod( 0o777) container = client.containers.run( image=image, command=args, volumes={ AUTOREDUCE_HOME_ROOT: { 'bind': '/home/isisautoreduce/.autoreduce/', 'mode': 'rw' }, self.mantid_path: { 'bind': '/home/isisautoreduce/.mantid/', 'mode': 'rw' }, ARCHIVE_ROOT: { 'bind': '/isis/', 'mode': 'rw' }, self.reduced_data_path: { 'bind': '/instrument/', 'mode': 'rw' }, }, stdin_open=True, environment=[ "AUTOREDUCTION_PRODUCTION=1", "PYTHONIOENCODING=utf-8" ], stdout=True, stderr=True, ) logger.info("Container logs %s", container.decode("utf-8")) with open(f'{AUTOREDUCE_HOME_ROOT}/output.txt', encoding="utf-8", mode='r') as out_file: result_message_raw = out_file.read() result_message = Message() result_message.populate(result_message_raw) # If the specified image does not exist. except ImageNotFound as exc: raise exc # If the server returns an error. except APIError as exc: raise exc # If the container exits with a non-zero exit code and detach is False. except ContainerError as exc: raise exc except Exception: # pylint:disable=broad-except logger.error("Processing encountered an error: %s", traceback.format_exc()) self.message.message = f"Processing encountered an error: {traceback.format_exc()}" result_message = self.message return result_message
class KafkaTestCase(TestCase): def setUp(self): self.mocked_producer = mock.Mock(spec=Publisher) self.mocked_handler = mock.MagicMock(spec=HandleMessage) self.bad_message = "bad_message" self.good_message = Message() self.mock_confluent_message = mock.MagicMock( spec=confluent_kafka.Message) with mock.patch("autoreduce_qp.queue_processor.confluent_consumer.HandleMessage", return_value=self.mocked_handler), \ mock.patch("autoreduce_qp.queue_processor.confluent_consumer.DeserializingConsumer")\ as mock_confluent_consumer, mock.patch("logging.getLogger") as patched_logger: self.consumer = Consumer() self.mocked_logger = patched_logger.return_value self.mock_confluent_consumer = mock_confluent_consumer.return_value self.mock_confluent_consumer.subscribe.assert_called_with( [TRANSACTIONS_TOPIC]) def test_on_message_unknown_topic(self): """Test receiving a message on an unknown topic""" fake_topic = "fake_topic" self.mock_confluent_message.topic.return_value = fake_topic self.mock_confluent_message.value.return_value = self.good_message.json( ) self.consumer.on_message(self.mock_confluent_message) self.mocked_logger.error.assert_called_with( "Received a message on an unknown topic '%s'", fake_topic) def test_on_message_bad_message(self): """Test receiving a bad (corrupt) message""" fake_topic = "fake_topic" self.mock_confluent_message.topic.return_value = fake_topic self.mock_confluent_message.value.return_value = self.bad_message self.consumer.on_message(self.mock_confluent_message) self.mocked_logger.error.assert_called_with( "Could not decode message: %s", self.bad_message) def test_on_message_handler_catches_exceptions(self): """Test on_message correctly handles an exception being raised""" def raise_expected_exception(msg): raise Exception(msg) self.mocked_handler.data_ready.side_effect = raise_expected_exception self.mock_confluent_message.value.return_value = self.good_message.json( ) self.consumer.on_message(self.mock_confluent_message) self.mocked_logger.error.assert_called_once() def test_success_run(self): """ Test that the poll loop runs successfully """ self.mock_confluent_message.error.return_value = None self.mock_confluent_consumer.poll.return_value = self.mock_confluent_message # Don't call the on_message method self.consumer.on_message = mock.Mock() # Stop the thread after 5 seconds run = threading.Timer(5, self.consumer.stop) run.start() # Start the thread self.consumer.run() self.mock_confluent_consumer.poll.assert_called_with(timeout=1.0) self.consumer.on_message.assert_called_with( self.mock_confluent_message) self.consumer.stop() def test_run_error_message(self): """ Test that the poll loop runs successfully """ self.mock_confluent_consumer.poll.return_value = self.mock_confluent_message # Don't call the on_message method self.consumer.on_message = mock.Mock() # Stop the thread after 10 seconds run = threading.Timer(5, self.consumer.stop) run.start() # Start the thread self.assertRaises(confluent_kafka.KafkaException, self.consumer.run) self.mock_confluent_consumer.poll.assert_called_with(timeout=1.0) self.mocked_logger.error.assert_called_with( "Undefined error in consumer loop") def test_stop_method(self): """ Test that the stop method works """ self.consumer.stop() self.assertTrue(self.consumer.stopped) def test_stop_consumer(self): """ Test if the consumer is stopped via Event.set() """ self.mock_confluent_message.error.return_value = None self.mock_confluent_consumer.poll.return_value = self.mock_confluent_message # Don't call the on_message method self.consumer.on_message = mock.Mock() run = threading.Timer(5, self.consumer.stop) run.start() self.consumer.run() self.mocked_logger.info.assert_called_with("Stopping the consumer") def test_setup_connection_exception(self): """ Test that the init of Consumer can handle not being able to connect to Kafka """ with mock.patch( "autoreduce_qp.queue_processor.confluent_consumer.DeserializingConsumer", side_effect=confluent_kafka.KafkaException): self.assertRaises(ConnectionException, setup_connection)