def _get_state_gui_model(): user_file_path = create_user_file(sample_user_file) user_file_reader = UserFileReader(user_file_path) user_file_items = user_file_reader.read_user_file() if os.path.exists(user_file_path): os.remove(user_file_path) return StateGuiModel(user_file_items)
def _get_state_gui_model(): user_file_path = create_user_file(sample_user_file) user_file_reader = UserFileReader(user_file_path) user_file_items = user_file_reader.read_user_file() if os.path.exists(user_file_path): os.remove(user_file_path) return StateGuiModel(user_file_items)
def test_that_create_state_creates_correct_state(self): user_file_path = create_user_file(sample_user_file) user_file_reader = UserFileReader(user_file_path) user_file_items = user_file_reader.read_user_file() state_from_view = StateGuiModel(user_file_items) state = create_state(state_from_view, "SANS2D00022024", '', SANSFacility.ISIS) self.assertEqual(state.data.sample_scatter, "SANS2D00022024")
def _process_user_file(self, command): """ Processes a user file and retain the parased tags @param command: the command with the user file path """ file_name = command.values[0] user_file_reader = UserFileReader(file_name) new_state_entries = user_file_reader.read_user_file() self.add_to_processed_state_settings(new_state_entries)
def test_that_create_state_creates_correct_state(self): user_file_path = create_user_file(sample_user_file) user_file_reader = UserFileReader(user_file_path) user_file_items = user_file_reader.read_user_file() state_from_view = StateGuiModel(user_file_items) state = create_state(state_from_view, "SANS2D00022024", '', SANSFacility.ISIS) self.assertEqual(state.data.sample_scatter, "SANS2D00022024")
def create_gui_state_from_userfile(row_user_file, state_model): user_file_path = FileFinder.getFullPath(row_user_file) if not os.path.exists(user_file_path): raise RuntimeError("The user path {} does not exist. Make sure a valid user file path" " has been specified.".format(user_file_path)) user_file_reader = UserFileReader(user_file_path) user_file_items = user_file_reader.read_user_file() state_gui_model = StateGuiModel(user_file_items) state_gui_model.save_types = state_model.save_types return state_gui_model
def on_user_file_load(self): """ Loads the user file. Populates the models and the view. """ try: # 1. Get the user file path from the view user_file_path = self._view.get_user_file_path() if not user_file_path: return # 2. Get the full file path user_file_path = FileFinder.getFullPath(user_file_path) if not os.path.exists(user_file_path): raise RuntimeError( "The user path {} does not exist. Make sure a valid user file path" " has been specified.".format(user_file_path)) self._table_model.user_file = user_file_path # Clear out the current view self._view.reset_all_fields_to_default() # 3. Read and parse the user file user_file_reader = UserFileReader(user_file_path) user_file_items = user_file_reader.read_user_file() # 4. Populate the model self._state_model = StateGuiModel(user_file_items) # 5. Update the views. self._update_view_from_state_model() self._beam_centre_presenter.update_centre_positions( self._state_model) self._beam_centre_presenter.on_update_rows() self._masking_table_presenter.on_update_rows() self._workspace_diagnostic_presenter.on_user_file_load( user_file_path) except Exception as e: self.sans_logger.error( "Loading of the user file failed. {}".format(str(e))) self.display_warning_box('Warning', 'Loading of the user file failed.', str(e))
def _create_states(self, state_model, table_model, row_index=None): """ Here we create the states based on the settings in the models :param state_model: the state model object :param table_model: the table model object :param row_index: the selected row, if None then all rows are generated """ number_of_rows = self._view.get_number_of_rows() if row_index is not None: # Check if the selected index is valid if row_index >= number_of_rows: return None rows = [row_index] else: rows = range(number_of_rows) states = {} gui_state_director = GuiStateDirector(table_model, state_model, self._facility) for row in rows: self.sans_logger.information( "Generating state for row {}".format(row)) if not self.is_empty_row(row): row_user_file = table_model.get_row_user_file(row) if row_user_file: user_file_path = FileFinder.getFullPath(row_user_file) if not os.path.exists(user_file_path): raise RuntimeError( "The user path {} does not exist. Make sure a valid user file path" " has been specified.".format(user_file_path)) user_file_reader = UserFileReader(user_file_path) user_file_items = user_file_reader.read_user_file() row_state_model = StateGuiModel(user_file_items) row_gui_state_director = GuiStateDirector( table_model, row_state_model, self._facility) self._create_row_state(row_gui_state_director, states, row) else: self._create_row_state(gui_state_director, states, row) return states
def on_user_file_load(self): """ Loads the user file. Populates the models and the view. """ try: # 1. Get the user file path from the view user_file_path = self._view.get_user_file_path() if not user_file_path: return # 2. Get the full file path user_file_path = FileFinder.getFullPath(user_file_path) if not os.path.exists(user_file_path): raise RuntimeError( "The user path {} does not exist. Make sure a valid user file path" " has been specified.".format(user_file_path)) # Clear out the current view self._view.reset_all_fields_to_default() # 3. Read and parse the user file user_file_reader = UserFileReader(user_file_path) user_file_items = user_file_reader.read_user_file() # 4. Populate the model self._state_model = StateGuiModel(user_file_items) # 5. Update the views. self._update_view_from_state_model() # 6. Perform calls on child presenters self._masking_table_presenter.on_update_rows() self._settings_diagnostic_tab_presenter.on_update_rows() except Exception as e: self.sans_logger.error( "Loading of the user file failed. Ensure that the path to your files has been added " "to the Mantid search directories! See here for more details: {}" .format(str(e)))
def _process_user_file(self, command, file_information): """ Processes a user file and retain the parased tags @param command: the command with the user file path """ file_name = command.values[0] if file_name.casefold().endswith(".toml".casefold()): toml_file_reader = TomlParser() new_state_entries = toml_file_reader.parse_toml_file( toml_file_path=file_name, file_information=file_information) else: # Now comes the fun part where we try to coerce this to put out a State* object user_file_reader = UserFileReader(file_name) old_param_mapping = user_file_reader.read_user_file() command_adapter = CommandInterfaceAdapter( processed_state=old_param_mapping, file_information=file_information) new_state_entries = command_adapter.get_all_states( file_information=file_information) new_state_entries.data = self._data_info return new_state_entries
def test_that_can_read_user_file(self): # Arrange user_file_path = create_user_file(sample_user_file) reader = UserFileReader(user_file_path) # Act output = reader.read_user_file() # Assert expected_values = {LimitsId.wavelength: [simple_range(start=1.5, stop=12.5, step=0.125, step_type=RangeStepType.Lin)], LimitsId.q: [q_rebin_values(min=.001, max=.2, rebin_string="0.001,0.001,0.0126,-0.08,0.2")], LimitsId.qxy: [simple_range(0, 0.05, 0.001, RangeStepType.Lin)], BackId.single_monitors: [back_single_monitor_entry(1, 35000, 65000), back_single_monitor_entry(2, 85000, 98000)], DetectorId.reduction_mode: [ISISReductionMode.LAB], GravityId.on_off: [True], FitId.general: [fit_general(start=1.5, stop=12.5, fit_type=FitType.Logarithmic, data_type=None, polynomial_order=0)], MaskId.vertical_single_strip_mask: [single_entry_with_detector(191, DetectorType.LAB), single_entry_with_detector(191, DetectorType.HAB), single_entry_with_detector(0, DetectorType.LAB), single_entry_with_detector(0, DetectorType.HAB)], MaskId.horizontal_single_strip_mask: [single_entry_with_detector(0, DetectorType.LAB), single_entry_with_detector(0, DetectorType.HAB)], MaskId.horizontal_range_strip_mask: [range_entry_with_detector(190, 191, DetectorType.LAB), range_entry_with_detector(167, 172, DetectorType.LAB), range_entry_with_detector(190, 191, DetectorType.HAB), range_entry_with_detector(156, 159, DetectorType.HAB) ], MaskId.time: [range_entry_with_detector(17500, 22000, None)], MonId.direct: [monitor_file("DIRECTM1_15785_12m_31Oct12_v12.dat", DetectorType.LAB), monitor_file("DIRECTM1_15785_12m_31Oct12_v12.dat", DetectorType.HAB)], MonId.spectrum: [monitor_spectrum(1, True, True), monitor_spectrum(1, False, True)], SetId.centre: [position_entry(155.45, -169.6, DetectorType.LAB)], SetId.scales: [set_scales_entry(0.074, 1.0, 1.0, 1.0, 1.0)], SampleId.offset: [53.0], DetectorId.correction_x: [single_entry_with_detector(-16.0, DetectorType.LAB), single_entry_with_detector(-44.0, DetectorType.HAB)], DetectorId.correction_y: [single_entry_with_detector(-20.0, DetectorType.HAB)], DetectorId.correction_z: [single_entry_with_detector(47.0, DetectorType.LAB), single_entry_with_detector(47.0, DetectorType.HAB)], DetectorId.correction_rotation: [single_entry_with_detector(0.0, DetectorType.HAB)], LimitsId.events_binning: ["7000.0,500.0,60000.0"], MaskId.clear_detector_mask: [True], MaskId.clear_time_mask: [True], LimitsId.radius: [range_entry(12, 15)], TransId.spec_shift: [-70.], PrintId.print_line: ["for changer"], BackId.all_monitors: [range_entry(start=3500, stop=4500)], FitId.monitor_times: [range_entry(start=1000, stop=2000)], TransId.spec: [4], BackId.trans: [range_entry(start=123, stop=466)], TransId.radius: [7.0], TransId.roi: ["test.xml", "test2.xml"], TransId.mask: ["test3.xml", "test4.xml"], SampleId.path: [True], LimitsId.radius_cut: [200.0], LimitsId.wavelength_cut: [8.0], QResolutionId.on: [True], QResolutionId.delta_r: [11.], QResolutionId.collimation_length: [12.], QResolutionId.a1: [13.], QResolutionId.a2: [14.], QResolutionId.moderator: ["moderator_rkh_file.txt"], TubeCalibrationFileId.file: ["TUBE_SANS2D_BOTH_31681_25Sept15.nxs"]} self.assertEqual(len(expected_values), len(output)) for key, value in list(expected_values.items()): self.assertTrue(key in output) self.assertEqual(len(output[key]), len(value)) elements = output[key] # Make sure that the different entries are sorted UserFileReaderTest._sort_list(elements) UserFileReaderTest._sort_list(value) self.assertEqual(elements, value, "{} is not {}".format(elements, value)) # clean up if os.path.exists(user_file_path): os.remove(user_file_path)
def test_that_can_read_user_file(self): # Arrange user_file_path = create_user_file(sample_user_file) reader = UserFileReader(user_file_path) # Act output = reader.read_user_file() # Assert expected_values = { LimitsId.wavelength: [ simple_range(start=1.5, stop=12.5, step=0.125, step_type=RangeStepType.Lin) ], LimitsId.q: [ q_rebin_values(min=.001, max=.2, rebin_string="0.001,0.001,0.0126,-0.08,0.2") ], LimitsId.qxy: [simple_range(0, 0.05, 0.001, RangeStepType.Lin)], BackId.single_monitors: [ back_single_monitor_entry(1, 35000, 65000), back_single_monitor_entry(2, 85000, 98000) ], DetectorId.reduction_mode: [ISISReductionMode.LAB], GravityId.on_off: [True], FitId.general: [ fit_general(start=1.5, stop=12.5, fit_type=FitType.Logarithmic, data_type=None, polynomial_order=0) ], MaskId.vertical_single_strip_mask: [ single_entry_with_detector(191, DetectorType.LAB), single_entry_with_detector(191, DetectorType.HAB), single_entry_with_detector(0, DetectorType.LAB), single_entry_with_detector(0, DetectorType.HAB) ], MaskId.horizontal_single_strip_mask: [ single_entry_with_detector(0, DetectorType.LAB), single_entry_with_detector(0, DetectorType.HAB) ], MaskId.horizontal_range_strip_mask: [ range_entry_with_detector(190, 191, DetectorType.LAB), range_entry_with_detector(167, 172, DetectorType.LAB), range_entry_with_detector(190, 191, DetectorType.HAB), range_entry_with_detector(156, 159, DetectorType.HAB) ], MaskId.time: [range_entry_with_detector(17500, 22000, None)], MonId.direct: [ monitor_file("DIRECTM1_15785_12m_31Oct12_v12.dat", DetectorType.LAB), monitor_file("DIRECTM1_15785_12m_31Oct12_v12.dat", DetectorType.HAB) ], MonId.spectrum: [ monitor_spectrum(1, True, True), monitor_spectrum(1, False, True) ], SetId.centre: [position_entry(155.45, -169.6, DetectorType.LAB)], SetId.scales: [set_scales_entry(0.074, 1.0, 1.0, 1.0, 1.0)], SampleId.offset: [53.0], DetectorId.correction_x: [ single_entry_with_detector(-16.0, DetectorType.LAB), single_entry_with_detector(-44.0, DetectorType.HAB) ], DetectorId.correction_y: [single_entry_with_detector(-20.0, DetectorType.HAB)], DetectorId.correction_z: [ single_entry_with_detector(47.0, DetectorType.LAB), single_entry_with_detector(47.0, DetectorType.HAB) ], DetectorId.correction_rotation: [single_entry_with_detector(0.0, DetectorType.HAB)], LimitsId.events_binning: ["7000.0,500.0,60000.0"], MaskId.clear_detector_mask: [True], MaskId.clear_time_mask: [True], LimitsId.radius: [range_entry(12, 15)], TransId.spec_shift: [-70.], PrintId.print_line: ["for changer"], BackId.all_monitors: [range_entry(start=3500, stop=4500)], FitId.monitor_times: [range_entry(start=1000, stop=2000)], TransId.spec: [4], BackId.trans: [range_entry(start=123, stop=466)], TransId.radius: [7.0], TransId.roi: ["test.xml", "test2.xml"], TransId.mask: ["test3.xml", "test4.xml"], SampleId.path: [True], LimitsId.radius_cut: [200.0], LimitsId.wavelength_cut: [8.0], QResolutionId.on: [True], QResolutionId.delta_r: [11.], QResolutionId.collimation_length: [12.], QResolutionId.a1: [13.], QResolutionId.a2: [14.], QResolutionId.moderator: ["moderator_rkh_file.txt"], TubeCalibrationFileId.file: ["TUBE_SANS2D_BOTH_31681_25Sept15.nxs"] } self.assertTrue(len(expected_values) == len(output)) for key, value in list(expected_values.items()): self.assertTrue(key in output) self.assertTrue(len(output[key]) == len(value)) elements = output[key] # Make sure that the different entries are sorted UserFileReaderTest._sort_list(elements) UserFileReaderTest._sort_list(value) self.assertTrue(elements == value) # clean up if os.path.exists(user_file_path): os.remove(user_file_path)
def test_that_can_read_user_file(self): # Arrange user_file_path = create_user_file(sample_user_file) reader = UserFileReader(user_file_path) # Act output = reader.read_user_file() # Assert expected_values = { LimitsId.WAVELENGTH: [ simple_range(start=1.5, stop=12.5, step=0.125, step_type=RangeStepType.LIN) ], LimitsId.Q: [ q_rebin_values(min=.001, max=.2, rebin_string="0.001,0.001,0.0126,-0.08,0.2") ], LimitsId.QXY: [simple_range(0, 0.05, 0.001, RangeStepType.LIN)], BackId.SINGLE_MONITORS: [ back_single_monitor_entry(1, 35000, 65000), back_single_monitor_entry(2, 85000, 98000) ], DetectorId.REDUCTION_MODE: [ReductionMode.LAB], GravityId.ON_OFF: [True], FitId.GENERAL: [ fit_general(start=1.5, stop=12.5, fit_type=FitType.LOGARITHMIC, data_type=None, polynomial_order=0) ], MaskId.VERTICAL_SINGLE_STRIP_MASK: [ single_entry_with_detector(191, DetectorType.LAB), single_entry_with_detector(191, DetectorType.HAB), single_entry_with_detector(0, DetectorType.LAB), single_entry_with_detector(0, DetectorType.HAB) ], MaskId.HORIZONTAL_SINGLE_STRIP_MASK: [ single_entry_with_detector(0, DetectorType.LAB), single_entry_with_detector(0, DetectorType.HAB) ], MaskId.HORIZONTAL_RANGE_STRIP_MASK: [ range_entry_with_detector(190, 191, DetectorType.LAB), range_entry_with_detector(167, 172, DetectorType.LAB), range_entry_with_detector(190, 191, DetectorType.HAB), range_entry_with_detector(156, 159, DetectorType.HAB) ], MaskId.TIME: [range_entry_with_detector(17500, 22000, None)], MonId.DIRECT: [ monitor_file("DIRECTM1_15785_12m_31Oct12_v12.dat", DetectorType.LAB), monitor_file("DIRECTM1_15785_12m_31Oct12_v12.dat", DetectorType.HAB) ], MonId.SPECTRUM: [ monitor_spectrum(1, True, True), monitor_spectrum(1, False, True) ], SetId.CENTRE: [position_entry(155.45, -169.6, DetectorType.LAB)], SetId.SCALES: [set_scales_entry(0.074, 1.0, 1.0, 1.0, 1.0)], SampleId.OFFSET: [53.0], DetectorId.CORRECTION_X: [ single_entry_with_detector(-16.0, DetectorType.LAB), single_entry_with_detector(-44.0, DetectorType.HAB) ], DetectorId.CORRECTION_Y: [single_entry_with_detector(-20.0, DetectorType.HAB)], DetectorId.CORRECTION_Z: [ single_entry_with_detector(47.0, DetectorType.LAB), single_entry_with_detector(47.0, DetectorType.HAB) ], DetectorId.CORRECTION_ROTATION: [single_entry_with_detector(0.0, DetectorType.HAB)], LimitsId.EVENTS_BINNING: ["7000.0,500.0,60000.0"], MaskId.CLEAR_DETECTOR_MASK: [True], MaskId.CLEAR_TIME_MASK: [True], LimitsId.RADIUS: [range_entry(12, 15)], TransId.SPEC_4_SHIFT: [-70.], PrintId.PRINT_LINE: ["for changer"], BackId.ALL_MONITORS: [range_entry(start=3500, stop=4500)], FitId.MONITOR_TIMES: [range_entry(start=1000, stop=2000)], TransId.SPEC: [4], BackId.TRANS: [range_entry(start=123, stop=466)], TransId.RADIUS: [7.0], TransId.ROI: ["test.xml", "test2.xml"], TransId.MASK: ["test3.xml", "test4.xml"], SampleId.PATH: [True], LimitsId.RADIUS_CUT: [200.0], LimitsId.WAVELENGTH_CUT: [8.0], QResolutionId.ON: [True], QResolutionId.DELTA_R: [11.], QResolutionId.COLLIMATION_LENGTH: [12.], QResolutionId.A1: [13.], QResolutionId.A2: [14.], QResolutionId.MODERATOR: ["moderator_rkh_file.txt"], TubeCalibrationFileId.FILE: ["TUBE_SANS2D_BOTH_31681_25Sept15.nxs"] } self.assertEqual(len(expected_values), len(output)) for key, value in list(expected_values.items()): self.assertTrue(key in output) self.assertEqual(len(output[key]), len(value)) elements = output[key] # Make sure that the different entries are sorted UserFileReaderTest._sort_list(elements) UserFileReaderTest._sort_list(value) self.assertEqual(elements, value, "{} is not {}".format(elements, value)) # clean up if os.path.exists(user_file_path): os.remove(user_file_path)