def submit_run(self, summary_rb_number, run_number, file_name): """ Submit a run to ActiveMQ :param summary_rb_number: RB number of the experiment as read from the summary file :param run_number: Run number as it appears in lastrun.txt :param file_name: File name e.g. GEM1234.nxs """ # Check to see if the last run exists, if not then raise an exception file_path = os.path.join(self.data_dir, CYCLE_FOLDER, file_name) if os.path.isfile(file_path): # Attempt to read an RB number from the Nexus file. This is most # useful for high frequency instruments like ENGINX where reading # from the summary is unreliable. rb_number = read_rb_number_from_nexus_file(file_path) if rb_number is None: rb_number = summary_rb_number EORM_LOG.info("Submitting '%s' with RB number '%s'", file_name, rb_number) message = Message(instrument=self.instrument_name, rb_number=rb_number, run_number=run_number, data=file_path, started_by=0) # Autoreduction service code message.validate('/queue/DataReady') self.client.send('/queue/DataReady', message, priority='9') else: raise FileNotFoundError( "File does not exist '{}'".format(file_path))
def setUp(self): self.mocked_client = mock.Mock(spec=QueueListener) self.msg = Message() self.msg.populate({ "run_number": 7654321, "rb_number": 1234567, "run_version": 0, "reduction_data": "/path/1", "started_by": -1, "data": "/path", "description": "This is a fake description", "instrument": "ARMI" # Autoreduction Mock Instrument }) with patch("logging.getLogger") as patched_logger: self.handler = HandleMessage(self.mocked_client) self.mocked_logger = patched_logger.return_value db_handle = model.database.access.start_database() self.data_model = db_handle.data_model self.variable_model = db_handle.variable_model self.experiment, _ = self.data_model.Experiment.objects.get_or_create( reference_number=1231231) self.instrument, _ = self.data_model.Instrument.objects.get_or_create( name="ARMI") status = STATUS.get_queued() fake_script_text = "scripttext" self.reduction_run = create_reduction_run_record( self.experiment, self.instrument, FakeMessage(), 0, fake_script_text, status) self.reduction_run.save()
def run(self) -> Message: """ Runs the reduction subprocess """ try: # We need to run the reduction in a new process, otherwise scripts # will fail when they use things that require access to a main loop # e.g. a GUI main loop, for matplotlib or Mantid python_path = sys.executable with tempfile.NamedTemporaryFile("w+") as temp_output_file: args = [ python_path, RUNNER_PATH, self.message.serialize(), temp_output_file.name ] logging.info( "Calling: %s %s %s %s", python_path, RUNNER_PATH, self.message.serialize(limit_reduction_script=True), temp_output_file.name) # run process until finished and check the exit code for success subprocess.run(args, check=True) # the subprocess will write out the result message in the tempfile, read it back result_message_raw = temp_output_file.file.read() result_message = Message() result_message.populate(result_message_raw) except subprocess.CalledProcessError: logging.error("Processing encountered an error: %s", traceback.format_exc()) self.message.message = f"Processing encountered an error: {traceback.format_exc()}" result_message = self.message return result_message
def on_message(self, headers, message): """ This method is where consumed messages are dealt with. It will consume a message. """ with self.mark_processing(): destination = headers["destination"] priority = headers["priority"] self.logger.info("Destination: %s Priority: %s", destination, priority) # Load the JSON message and header into dictionaries try: if not isinstance(message, Message): json_string = message message = Message() message.populate(json_string) except ValueError: self.logger.error("Could not decode message from %s\n\n%s", destination, traceback.format_exc()) return # the connection is configured with client-individual, meaning that each client # has to submit an acknowledgement for receiving the message # (otherwise I think that it is not removed from the queue but I am not sure about that) self.client.ack(headers["message-id"], headers["subscription"]) try: if destination == '/queue/DataReady': self.message_handler.data_ready(message) else: self.logger.error("Received a message on an unknown topic '%s'", destination) except Exception as exp: # pylint:disable=broad-except self.logger.error("Unhandled exception encountered: %s %s\n\n%s", type(exp).__name__, exp, traceback.format_exc())
def setUp(self): """Setup values for Post-Process Admin""" self.data = { 'data': '\\\\isis\\inst$\\data.nxs', 'facility': 'ISIS', 'instrument': 'GEM', 'rb_number': '1234', 'run_number': '4321', 'reduction_script': 'print(\'hello\')', 'reduction_arguments': 'None' } self.test_fname = "111.txt" self.test_root = os.path.join("instrument", "GEM", "RBNumber", "RB2010163", "autoreduced", "111") self.test_paths = [ os.path.join(self.test_root, "0"), os.path.join(self.test_root, "1"), os.path.join(self.test_root, "2") ] self.message = Message() self.message.populate(self.data) self.ceph_directory = MISC['ceph_directory'] % ( self.data["instrument"], self.data["rb_number"], self.data["run_number"]) self.temporary_directory = MISC['temp_root_directory'] self.log_and_err_name = f"RB{self.data['rb_number']}Run{self.data['run_number']}" self.reduce_result_dir = self.temporary_directory + self.ceph_directory
def on_message(self, headers, message): """ This method is where consumed messages are dealt with. It will consume a message. """ destination = headers["destination"] self._priority = headers["priority"] self._logger.info("Destination: %s Priority: %s", destination, self._priority) # Load the JSON message and header into dictionaries try: if not isinstance(message, Message): json_string = message message = Message() message.populate(json_string) except ValueError: self._logger.error("Could not decode message from %s", destination) self._logger.error(sys.exc_info()[1]) return try: if destination == '/queue/DataReady': self._message_handler.data_ready(message) elif destination == '/queue/ReductionStarted': self._message_handler.reduction_started(message) elif destination == '/queue/ReductionComplete': self._message_handler.reduction_complete(message) elif destination == '/queue/ReductionError': self._message_handler.reduction_error(message) elif destination == '/queue/ReductionSkipped': self._message_handler.reduction_skipped(message) else: self._logger.warning( "Received a message on an unknown topic '%s'", destination) except InvalidStateException as exp: self._logger.error("Stomp Client message handling exception:" "%s %s", type(exp).__name__, exp) self._logger.error(traceback.format_exc())
def test_populate_with_invalid_key(self, mock_log): """ Test: A warning is logged When: An unknown key is used to populate the Message """ args = {'unknown': True} msg = Message() msg.populate(args) mock_log.assert_called_once()
def test_populate_with_invalid_key(self): """ Test: A warning is logged When: An unknown key is used to populate the Message """ args = {'unknown': True} msg = Message() with self.assertRaises(ValueError): msg.populate(args)
def test_send_with_message_instance(self, mock_stomp_send): """ Test: send sends the given data using stomp.send When: send is called with a Message instance argument for message """ client = QueueClient() message = Message(description="test-message") client.send('dataready', message) (args, _) = mock_stomp_send.call_args self.assertEqual(args[0], 'dataready') self.assertEqual(args[1], message.serialize())
def test_on_message_can_receive_a_prepopulated_message(self): "Test receiving an already constructed Message object" message = Message() message.populate({"run_number": 1234567}) self.listener.on_message(self.headers, message) self.assertFalse(self.listener.is_processing_message()) self.mocked_logger.info.assert_called_once() self.mocked_client.ack.assert_called_once_with( self.headers["message-id"], self.headers["subscription"]) self.mocked_handler.data_ready.assert_called_once() self.assertIsInstance(self.mocked_handler.data_ready.call_args[0][0], Message)
def setUp(self): self.valid_message = Message( run_number=1234, instrument='GEM', rb_number=1, started_by=-1, data='test/file/path', ) self.invalid_message = Message(run_number='12345', instrument='not inst', rb_number=-1, started_by='test', data=123)
class TestReductionProcessManager(unittest.TestCase): def setUp(self) -> None: self.data = { 'data': '\\\\isis\\inst$\\data.nxs', 'facility': 'ISIS', 'instrument': 'GEM', 'rb_number': '1234', 'run_number': '4321', 'reduction_script': 'print(\'hello\')', # not actually used for the reduction 'reduction_arguments': 'None' } self.message = Message() self.message.populate(self.data) def test_init(self): "Test that the constructor is doing what's expected" rpm = ReductionProcessManager(self.message) assert rpm.message == self.message @patch( 'queue_processors.queue_processor.reduction.process_manager.subprocess.run' ) def test_run_subprocess_error(self, subprocess_run: Mock): """Test proper handling of a subprocess encountering an error""" def side_effect(args, **_kwargs): raise CalledProcessError(1, args) subprocess_run.side_effect = side_effect rpm = ReductionProcessManager(self.message) rpm.run() subprocess_run.assert_called_once() assert "Processing encountered an error" in rpm.message.message @patch( 'queue_processors.queue_processor.reduction.process_manager.subprocess.run' ) def test_run(self, subprocess_run: Mock): """Tests success path - it uses side effect to set the expected output file rather than raise an exception""" def side_effect(args, **_kwargs): with open(args[-1], 'w') as tmpfile: tmpfile.write(self.message.serialize()) subprocess_run.side_effect = side_effect rpm = ReductionProcessManager(self.message) result_message = rpm.run() assert result_message == self.message
def setUp(self) -> None: self.data = { 'data': '\\\\isis\\inst$\\data.nxs', 'facility': 'ISIS', 'instrument': 'GEM', 'rb_number': '1234', 'run_number': '4321', 'reduction_script': 'print(\'hello\')', # not actually used for the reduction 'reduction_arguments': 'None' } self.message = Message() self.message.populate(self.data)
def test_validate_data_ready_valid(self): """ Test: No exception is raised When: Calling validate for data_ready with a valid message """ message = Message(instrument='GEM', run_number=111, rb_number=222, data='file/path', facility="ISIS", started_by=0) try: self.assertIsNone(message.validate('/queue/DataReady')) except RuntimeError: self.fail()
def on_message(self, headers, data): """ handles message consuming. It will consume a message. """ destination = headers['destination'] logger.debug("Received frame destination: %s", destination) logger.debug("Received frame priority: %s", headers["priority"]) self.update_child_process_list() message = Message() message.populate(data) if message.cancel: self.add_cancel(message) return self.hold_message(destination, data, headers)
def _populated(): """ Create and return a populated message object and corresponding dictionary """ run_number = 11111 rb_number = 22222 description = 'test message' populated_msg = Message(run_number=run_number, rb_number=rb_number, description=description) populated_dict = {'description': description, 'facility': "ISIS", 'run_number': run_number, 'instrument': None, 'rb_number': rb_number, 'started_by': None, 'data': None, 'overwrite': None, 'run_version': None, 'job_id': None, 'reduction_script': None, 'reduction_arguments': None, 'reduction_log': "", 'admin_log': "", 'message': None, 'retry_in': None, 'reduction_data': None, 'cancel': None, 'software': None, } return populated_msg, populated_dict
def test_wish_user_script_failure(self): """ Test that WISH data goes through the system without issue """ # Set meta data for test self.instrument = 'WISH' self.rb_number = 222 self.run_number = 101 # Create supporting data structures e.g. Data Archive, Reduce directory file_location = self._setup_data_structures(reduce_script='fail', vars_script='') # Create and send json message to ActiveMQ data_ready_message = Message(rb_number=self.rb_number, instrument=self.instrument, data=file_location, run_number=self.run_number, facility="ISIS", started_by=0) self.queue_client.send('/queue/DataReady', data_ready_message) # Get Result from database results = self._find_run_in_database() # Validate self.assertEqual(self.instrument, results[0].instrument.name) self.assertEqual(self.rb_number, results[0].experiment.reference_number) self.assertEqual(self.run_number, results[0].run_number) self.assertEqual( 'e', results[0].status.value) # verbose value = "Error"
def _make_pending_msg(reduction_run): """ Creates a dict message from the given run, ready to be sent to ReductionPending. :param reduction_run: (ReductionRun) database object representing a reduction run / job :return: (Message) A constructed Message object from the meta data in the reduction_run """ # Deferred import to avoid circular dependencies # pylint:disable=import-outside-toplevel from queue_processors.queue_processor.queueproc_utils \ .reduction_run_utils import ReductionRunUtils script, arguments = ReductionRunUtils().get_script_and_arguments( reduction_run) # Currently only support single location model = access.start_database().data_model data_location = model.DataLocation.filter_by( reduction_run_id=reduction_run.id).first() if data_location: data_path = data_location.file_path else: raise RuntimeError("No data path found for reduction run") message = Message(run_number=reduction_run.run_number, instrument=reduction_run.instrument.name, rb_number=str( reduction_run.experiment.reference_number), data=data_path, reduction_script=script, reduction_arguments=arguments, run_version=reduction_run.run_version, facility=FACILITY) return message
def test_on_message_stores_priority(self): """ Tests that the client stores the priority from the msg header """ headers = self._get_header() self.listener.on_message(headers=headers, message=Message()) self.assertEqual(headers["priority"], self.listener._priority)
def send_retry_message(user_id: int, most_recent_run: ReductionRun, run_description: str, script_text: str, new_script_arguments: dict, overwrite_previous_data: bool): """ Creates & sends a retry message given the parameters :param user_id: The user submitting the run :param most_recent_run: The most recent run, used for common things across the two runs like run number, instrument name, etc :param run_description: Description of the rerun :param script_text: The script that will NOT be used for this reduction, because of a known issue https://github.com/ISISScientificComputing/autoreduce/issues/1115 :param new_script_arguments: Dict of arguments that will be used for the reduction :param overwrite_previous_data: Whether to overwrite the previous data in the data location """ message = Message( started_by=user_id, description=run_description, run_number=most_recent_run.run_number, instrument=most_recent_run.instrument.name, rb_number=most_recent_run.experiment.reference_number, data=most_recent_run.data_location.first().file_path, reduction_script=script_text, reduction_arguments=new_script_arguments, run_version=most_recent_run.run_version, facility=FACILITY, software=most_recent_run.software, overwrite=overwrite_previous_data) MessagingUtils.send(message)
def setUp(self): """ Start all external services """ # Get all clients self.database_client = DatabaseClient() self.database_client.connect() try: self.queue_client, self.listener = main() except ConnectionException as err: raise RuntimeError( "Could not connect to ActiveMQ - check you credentials. If running locally check that " "ActiveMQ is running and started by `python setup.py start`" ) from err # Add placeholder variables: # these are used to ensure runs are deleted even if test fails before completion self.instrument = 'ARMI' self.rb_number = 1234567 self.run_number = 101 # Create test archive and add data self.data_archive = DataArchive([self.instrument], 19, 19) self.data_archive.create() # Create and send json message to ActiveMQ self.data_ready_message = Message(rb_number=self.rb_number, instrument=self.instrument, run_number=self.run_number, description="This is a system test", facility="ISIS", started_by=0)
def create_run_records(self, message: Message): """ Creates or gets the necessary records to construct a ReductionRun """ # This must be done before looking up the run version to make sure the record exists experiment = db_access.get_experiment(message.rb_number) run_version = db_access.find_highest_run_version( experiment, run_number=str(message.run_number)) message.run_version = run_version instrument = db_access.get_instrument(str(message.instrument)) script = ReductionScript(instrument.name) script_text = script.text() # Make the new reduction run with the information collected so far reduction_run = db_records.create_reduction_run_record( experiment=experiment, instrument=instrument, message=message, run_version=run_version, script_text=script_text, status=self.status.get_queued()) reduction_run.save() # Create a new data location entry which has a foreign key linking it to the current # reduction run. The file path itself will point to a datafile # (e.g. "/isis/inst$/NDXWISH/Instrument/data/cycle_17_1/WISH00038774.nxs") data_location = self.data_model.DataLocation( file_path=message.data, reduction_run_id=reduction_run.pk) data_location.save() return reduction_run, message, instrument
def test_validate_data_ready_invalid(self): """ Test: An exception is raised When: An invalid Message is validated """ message = Message(instrument='Not an inst') self.assertRaises(RuntimeError, message.validate, '/queue/DataReady')
def create_run_variables(self, reduction_run, message: Message, instrument): """ Creates the RunVariables for this ReductionRun """ # Create all of the variables for the run that are described in it's reduce_vars.py self._logger.info('Creating variables for run') variables = self.instrument_variable.create_run_variables( reduction_run, message.reduction_arguments) if not variables: self._logger.warning( "No instrument variables found on %s for run %s", instrument.name, message.run_number) self._logger.info('Getting script and arguments') message.reduction_script = reduction_run.script message.reduction_arguments = self.get_script_arguments(variables) return message
def find_reason_to_skip_run(reduction_run, message: Message, instrument) -> Optional[str]: """ Determines whether the processing should be skippped. The run will be skipped if the message validation fails or if the instrument is paused """ if reduction_run.script == "": return "Script text for current instrument is empty" try: message.validate("/queue/DataReady") except RuntimeError as validation_err: return f"Validation error from handler: {str(validation_err)}" if instrument.is_paused: return f"Run {message.run_number} has been skipped because the instrument {instrument.name} is paused" return None
def _construct_and_send_skipped(self, rb_number, reason, message: Message): """ Construct a message and send to the skipped reduction queue :param rb_number: The RB Number associated with the reduction job :param reason: The error that caused the run to be skipped """ self._logger.warning("Skipping non-integer RB number: %s", rb_number) msg = 'Reduction Skipped: {}. Assuming run number to be ' \ 'a calibration run.'.format(reason) message.message = msg skipped_queue = ACTIVEMQ_SETTINGS.reduction_skipped self._client.send_message(skipped_queue, message)
def hold_message(self, destination, data, headers): """ Calls the reduction script. """ logger.debug("holding thread") message = Message() message.populate(data) self.update_child_process_list() if not self.should_proceed(message): # wait while the run shouldn't proceed # pylint: disable=maybe-no-member reactor.callLater(10, self.hold_message, # pragma: no cover destination, data, headers) return if self.should_cancel(message): self.cancel_run(message) # pylint: disable=maybe-no-member return if not os.path.isfile(MISC['post_process_directory']): logger.warning("Could not find autoreduction post processing file " "- please contact a system administrator") python_path = sys.executable logger.info("Calling: %s %s %s %s", python_path, MISC['post_process_directory'], destination, message.serialize(limit_reduction_script=True)) proc = subprocess.Popen([python_path, MISC['post_process_directory'], destination, message.serialize()]) # PPA expects json data self.add_process(proc, message)
def test_new_reduction_data_path_overwrite_paths_exist(self, _): """ Test: The given path is returned with a 0 directory appended When: _new_reduction_data_path is called on an existing path with overwrite: True """ self.setup_test_dir_structure(self.test_paths) mock_self = Mock() mock_self.message = Message(overwrite=True) expected = append_path(self.test_root, "0") actual = PostProcessAdmin._new_reduction_data_path( mock_self, self.test_root) self.assertEqual(expected, actual)
def test_new_reduction_data_only_root_path_exists(self): """ Test: The given path is returned with a 0 directory appended When: _new_reduction_data_path is called on a path without version sub-directories """ self.setup_test_dir_structure([self.test_root]) mock_self = Mock() mock_self.message = Message(overwrite=None) expected = append_path(self.test_root, "0") actual = PostProcessAdmin._new_reduction_data_path( mock_self, self.test_root) self.assertEqual(expected, actual)
def submit_run(active_mq_client, rb_number, instrument, data_file_location, run_number): """ Submit a new run for autoreduction :param active_mq_client: The client for access to ActiveMQ :param rb_number: desired experiment rb number :param instrument: name of the instrument :param data_file_location: location of the data file :param run_number: run number fo the experiment """ if active_mq_client is None: print("ActiveMQ not connected, cannot submit runs") return message = Message(rb_number=rb_number, instrument=instrument, data=data_file_location, run_number=run_number, facility="ISIS", started_by=-1) active_mq_client.send('/queue/DataReady', message, priority=1) print("Submitted run: \r\n" + message.serialize(indent=1))