def handle_show_fit_output_clicked(self, row_index: int) -> None: """Handles when the Show Output button is clicked in one of the table rows.""" run, group = self.view.get_run_and_group_for_row(row_index) parameter_table_name, covariance_matrix_name = self.model.create_background_output_workspaces_for(run, group) if parameter_table_name is not None and covariance_matrix_name is not None: self.view.show_table_workspace_display(retrieve_ws(parameter_table_name), "Parameter Table") self.view.show_table_workspace_display(retrieve_ws(covariance_matrix_name), "Normalised Covariance Matrix")
def _add_simultaneous_fit_results_to_ADS_and_context( self, input_workspace_names: list, parameter_table, output_group_workspace, covariance_matrix, global_parameters: list) -> None: """Adds the results of a simultaneous fit to the ADS and fitting context.""" function_name = self.fitting_context.function_name output_workspace_wraps, directory = self._create_output_workspace_wraps( input_workspace_names, function_name, output_group_workspace) parameter_table_name, _ = create_parameter_table_name( input_workspace_names[0] + "+ ...", function_name) covariance_matrix_name, _ = create_covariance_matrix_name( input_workspace_names[0] + "+ ...", function_name) self._add_workspace_to_ADS(parameter_table, parameter_table_name, directory) self._add_workspace_to_ADS(covariance_matrix, covariance_matrix_name, directory) parameter_workspace_wrap = StaticWorkspaceWrapper( parameter_table_name, retrieve_ws(parameter_table_name)) covariance_workspace_wrap = StaticWorkspaceWrapper( covariance_matrix_name, retrieve_ws(covariance_matrix_name)) # the directory returns with a slash, so lets remove it self._add_workspaces_to_group( [parameter_table_name, covariance_matrix_name], directory[:-1]) self._add_fit_to_context(input_workspace_names, output_workspace_wraps, parameter_workspace_wrap, covariance_workspace_wrap, global_parameters)
def _run_rebin(self, name, rebin_type, params): rebined_run_name = None if rebin_type == "Fixed": rebin_index = 1 rebin_option = "Steps: " + str(params) + " KeV" rebined_run_name = str(name) + REBINNED_FIXED_WS_SUFFIX if rebin_type == "Variable": rebin_index = 2 rebin_option = "Bin Boundaries: " + str(params) rebined_run_name = str(name) + REBINNED_VARIABLE_WS_SUFFIX remove_ws_if_present(rebined_run_name) raw_workspace = self.group_context[name].get_counts_workspace_for_run() CloneWorkspace(InputWorkspace=raw_workspace, OutputWorkspace=rebined_run_name) rebin_ws(rebined_run_name, params) workspace = retrieve_ws(rebined_run_name) group_workspace = retrieve_ws(self.group_context[name].run_number) group_workspace.addWorkspace(workspace) group = self.group_context[name] group.update_workspaces(str(workspace), rebin=True, rebin_index=rebin_index, rebin_option=rebin_option) self.update_plots_notifier.notify_subscribers(workspace)
def test_add_to_group_multiple(self): instrument = "MUSR" extension = "MA" run = "62260" run2 = "06226" ws = create_workspace(instrument + run + "fwd" + extension) ws2 = create_workspace(instrument + run + "bwd" + extension) ws3 = create_workspace(instrument + run2 + "fwd" + extension) ws4 = create_workspace(instrument + run2 + "bwd" + extension) _ = create_workspace("EMU" + run + "fwd" + extension) _ = create_workspace(instrument + run + "fwd" + "FD") # there was a bug that meant tables didnt work table_name = create_table_workspace(instrument + run + "table" + extension) add_to_group(instrument, extension) # check run group = retrieve_ws(instrument + run) expected = [ws.name(), ws2.name(), table_name] self.assertEqual(len(group.getNames()), len(expected)) for name in group.getNames(): self.assertTrue(name in expected) expected.remove(name) # check run2 group = retrieve_ws(instrument + run2) expected = [ws3.name(), ws4.name()] self.assertEqual(len(group.getNames()), len(expected)) for name in group.getNames(): self.assertTrue(name in expected) expected.remove(name)
def get_shade_lines(self, name, index): ws = retrieve_ws(name) if ws.isHistogramData(): ws = retrieve_ws(run_convert_to_points(name)) x_data, y1_data, y2_data = self._plot_model.get_shade_lines( ws, index) run_convert_to_histogram(name) return x_data, y1_data, y2_data return self._plot_model.get_shade_lines(ws, index)
def test_that_create_x_and_y_parameter_combination_workspace_will_create_the_expected_parameter_workspaces( self): self.model.result_table_names = self.result_table_names self.model.current_result_table_index = 0 table = create_results_table() add_ws_to_ads("Result1", table) self.model.create_x_and_y_parameter_combinations() self.model.create_x_and_y_parameter_combination_workspace( "workspace_name", "A0") self.model.create_x_and_y_parameter_combination_workspace("A1", "A0") self.model.create_x_and_y_parameter_combination_workspace( "workspace_name", "A1") self.model.create_x_and_y_parameter_combination_workspace( "Sigma", "Lambda") self.model.create_x_and_y_parameter_combination_workspace( "f1.Sigma", "f1.Lambda") self.assertTrue( check_if_workspace_exist("Result1; Parameter Combinations")) self.assertTrue( check_if_workspace_exist("Result1; A0 vs workspace_name")) self.assertTrue( check_if_workspace_exist("Result1; A1 vs workspace_name")) self.assertTrue(check_if_workspace_exist("Result1; A0 vs A1")) self.assertTrue(check_if_workspace_exist("Result1; Lambda vs Sigma")) self.assertTrue( check_if_workspace_exist("Result1; f1.Lambda vs f1.Sigma")) self.assertTrue( not check_if_workspace_exist("Result1; workspace_name vs A0")) self.assertTrue( not check_if_workspace_exist("Result1; workspace_name vs A1")) self.assertTrue(not check_if_workspace_exist("Result1; A1 vs A0")) self.assertTrue(not check_if_workspace_exist( "Result1; workspace_name vs workspace_name")) self.assertTrue(not check_if_workspace_exist("Result1; A0 vs A0")) self.assertTrue(not check_if_workspace_exist("Result1; A1 vs A1")) unit_test_ws = retrieve_ws("Result1; Lambda vs Sigma") self.assertTrue( str(unit_test_ws.getAxis(0).getUnit().symbol()) == '\\mu s^{-1}') self.assertTrue( str(unit_test_ws.YUnit()) == 'Lambda ($\\mu$ $s^{-1}$)') unit_test_ws = retrieve_ws("Result1; f1.Lambda vs f1.Sigma") self.assertTrue( str(unit_test_ws.getAxis(0).getUnit().symbol()) == '\\mu s^{-1}') self.assertTrue( str(unit_test_ws.YUnit()) == 'f1.Lambda ($\\mu$ $s^{-1}$)')
def _current_dead_times(self) -> list: """Returns a list of dead times for the currently displayed run and dead time mode.""" table_name = self._corrections_context.current_dead_time_table_name_for_run( self._data_context.instrument, self._corrections_model.current_runs()) table = retrieve_ws(table_name) if table_name else None return table.toDict()[DEAD_TIME_TABLE_KEY] if table is not None else []
def validate_selected_dead_time_workspace(self, table_name: str) -> str: """Validates the selected dead time workspace. Returns a string containing an error message if its invalid.""" if check_if_workspace_exist(table_name): table = retrieve_ws(table_name) return self._validate_dead_time_table(table) else: return f"Workspace '{table_name}' does not exist in the ADS."
def test_remove_workspace_ws(self): self.context.data_context.remove_workspace_by_name = mock.Mock() self.context.group_pair_context.remove_workspace_by_name = mock.Mock() self.context.phase_context.remove_workspace_by_name = mock.Mock() self.context.fitting_context.remove_workspace_by_name = mock.Mock() self.context.update_view_from_model_notifier.notify_subscribers = mock.Mock( ) self.context.deleted_plots_notifier.notify_subscribers = mock.Mock() name = run_create_workspace([0, 1], [0, 1], "test") ws = retrieve_ws("test") self.context.remove_workspace(ws) self.context.data_context.remove_workspace_by_name.assert_called_once_with( name) self.context.group_pair_context.remove_workspace_by_name.assert_called_once_with( name) self.context.phase_context.remove_workspace_by_name.assert_called_once_with( name) self.context.fitting_context.remove_workspace_by_name.assert_called_once_with( name) self.context.update_view_from_model_notifier.notify_subscribers.assert_called_once_with( name) self.context.deleted_plots_notifier.notify_subscribers.assert_called_once_with( ws)
def create_empty_table(name): alg = mantid.AlgorithmManager.create("CreateEmptyTableWorkspace") alg.initialize() alg.setAlwaysStoreInADS(True) alg.setProperty("OutputWorkspace", name) alg.execute() return retrieve_ws(name)
def __del__(self): try: remove_ws_if_present(self.get_counts_workspace_for_run()) if self.is_rebinned_workspace_present(): remove_ws_if_present( self.get_counts_workspace_for_run(rebin=True)) if self.is_peak_table_present(): remove_ws_if_present(self.get_peak_table()) if self.is_matches_table_present(): matches_table = retrieve_ws(self.get_matches_table()) for table in matches_table.getNames(): remove_ws_if_present(table) remove_ws_if_present(self.get_matches_table()) except Exception as error: """ If ADS is deleted before group is deleted, boost.python.ArgumentError is raised and boost.python.ArgumentError are not catchable """ if "Python argument types in" not in str(error): if self.error_notifier: error_message = f"Unexpected error occurred when deleting group {self.name}: " + str( error) self.error_notifier.notify_subscribers(error_message)
def test_rebin(self, mock_get_item, mock_remove_ws, mock_retrieve_ws): mock_get_item.return_value = EAGroup("9999; Detector 1", "detector 1", "9999") name = '9999; Detector 1' rebinned_name = '9999; Detector 1' + REBINNED_VARIABLE_WS_SUFFIX mock_params = "0, 2, 9" x_data = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] y_data = [1, 1, 1, 1, 1, 1, 1, 1, 1] CreateWorkspace(OutputWorkspace=name, DataX=x_data, DataY=y_data) self.context._run_rebin("9999; Detector 1", "Variable", mock_params) correct_data = CreateWorkspace(OutputWorkspace="correct_data", DataX=[0, 2, 4, 6, 8, 9], DataY=[2, 2, 2, 2, 1]) # Assert Statements self.assert_workspace_equal(correct_data, retrieve_ws(rebinned_name)) mock_remove_ws.assert_has_calls([mock.call(rebinned_name)]) mock_retrieve_ws.assert_has_calls([mock.call("9999")]) mock_get_item.assert_has_calls([mock.call(name)]) # clean up remove_ws_if_present(name) remove_ws_if_present("correct_data") remove_ws_if_present(rebinned_name)
def update_view(self): """ Checks context for loaded workspaces and add to values find peak combobox Checks all tables in load run's groups and add to show peaks and show matches combobox """ find_peak_workspaces = {} show_peaks_options = {} show_matches_options = {} for group in self.context.group_context.groups: run = group.run_number if run not in find_peak_workspaces: find_peak_workspaces[run] = ["All"] find_peak_workspaces[run].append(group.detector) if group.is_peak_table_present(): if run not in show_peaks_options: show_peaks_options[run] = [] show_peaks_options[run].append(group.get_peak_table()) if group.is_matches_table_present(): if run not in show_matches_options: show_matches_options[run] = [] matches_group_workspace = retrieve_ws(group.get_matches_table()) show_matches_options[run].extend(matches_group_workspace.getNames()) self.view.add_options_to_find_peak_combobox(find_peak_workspaces) self.view.add_options_to_show_peak_combobox(show_peaks_options) self.view.add_options_to_show_matches_combobox(show_matches_options) peak_label_info = self.model.current_peak_table_info # Update peak info label if peak_label_info["workspace"] is not None and peak_label_info["number_of_peaks"] is not None: self.view.set_peak_info(**peak_label_info)
def _handle_find_peak_algorithm_outputs(self, group_workspace, workspace, delay_errors): ignore_peak_matching = False remove_ws(workspace + ERRORS_WS_SUFFIX) remove_ws(workspace + REFITTED_PEAKS_WS_SUFFIX) peak_table = retrieve_ws(workspace + PEAKS_WS_SUFFIX) number_of_peaks = peak_table.rowCount() self.current_peak_table_info["workspace"] = workspace self.current_peak_table_info["number_of_peaks"] = number_of_peaks if number_of_peaks != 0: group_workspace.add(workspace + PEAKS_WS_SUFFIX) self.context.group_context[workspace].update_peak_table( workspace + PEAKS_WS_SUFFIX) else: peak_table.delete() if delay_errors: self.warnings.put( f"No peaks found in {workspace} try reducing acceptance threshold" ) ignore_peak_matching = True else: raise RuntimeError( f"No peaks found in {workspace} try reducing acceptance threshold" ) return ignore_peak_matching
def _parameter_combination_workspace_exists(self, workspace_name: str, x_values: list, y_values: list, y_errors: list) -> bool: """Returns true if a parameter combination workspace exists and contains the same data.""" if check_if_workspace_exist(workspace_name): workspace = retrieve_ws(workspace_name) return self._lists_are_equal(workspace.dataX(0), x_values) \ and self._lists_are_equal(workspace.dataY(0), y_values) \ and self._lists_are_equal(workspace.dataE(0), y_errors) return False
def set_reconstructed_data(self, ws, table_name): # get the indices and groups first self.reconstructed_data_name = ws if table_name: table = retrieve_ws(table_name) for index in range(table.rowCount()): data = table.row(index) self.reconstructed_data[index] = data["Group"]
def remove_workspace_names_from_plot(self, workspace_names: List[str]): """Removes the input workspace names from the plot""" for workspace_name in workspace_names: try: workspace = retrieve_ws(workspace_name) except RuntimeError: continue self._view.remove_workspace_from_plot(workspace) self._view.redraw_figure()
def _extract_x_and_y_from_current_result_table(self) -> None: """Extracts the X, Y and error values from the currently selected result table and saves them in the context.""" results_table_name = self.current_result_table_name if results_table_name is not None and check_if_workspace_exist(results_table_name): current_results_table = retrieve_ws(results_table_name) for i, column_name in enumerate(current_results_table.getColumnNames()): self._save_values_from_table_column(column_name, current_results_table.column(i)) self._populate_empty_parameter_errors(current_results_table.rowCount())
def create_workspace(name): alg = simpleapi.AlgorithmManager.create("CreateWorkspace") alg.initialize() alg.setAlwaysStoreInADS(True) alg.setLogging(False) alg.setProperty("dataX", [0, 1]) alg.setProperty("dataY", [0, 1]) alg.setProperty("OutputWorkspace", name) alg.execute() return retrieve_ws(name)
def _run_peak_algorithms(self, parameters): workspace = parameters["workspace"] run, detector = self.split_run_and_detector(workspace) if run is None or detector is None: group_workspace = retrieve_ws(workspace) for group in self.context.group_context.groups: if group.run_number == workspace: workspace_name = group.get_counts_workspace_for_run( rebin=False) tmp_parameters = copy.deepcopy(parameters) tmp_parameters["workspace"] = workspace_name if not self._run_find_peak_algorithm( tmp_parameters, group_workspace, True): self._run_peak_matching_algorithm( workspace_name, group_workspace) else: group_workspace = retrieve_ws(run) self._run_find_peak_algorithm(parameters, group_workspace) self._run_peak_matching_algorithm(workspace, group_workspace)
def test_safe_to_add_to_group_wrong_extension(self): instrument = "MUSR" extension = "MA" ws = create_workspace(instrument + "test" + "FD") tmp = create_workspace("dummy") make_group([tmp], "group") # get the group group = retrieve_ws("group") self.assertEqual( safe_to_add_to_group(ws, instrument, [group], extension), False)
def _create_workspace_group_to_store_combination_workspaces(self) -> WorkspaceGroup: """Return the Workspace Group used to store the different parameter combination matrix workspaces.""" group_name = self.parameter_combination_group_name() if group_name is None: return None if check_if_workspace_exist(group_name): workspace_group = retrieve_ws(group_name) else: workspace_group = WorkspaceGroup() add_ws_to_ads(group_name, workspace_group) return workspace_group
def test_check_not_in_group(self): ws_in_group = create_workspace("in") ws_out_group = create_workspace("out") make_group([ws_in_group], "group") # get the group group = retrieve_ws("group") self.assertEqual(len(group.getNames()), 1) self.assertEqual(group.getNames()[0], "in") self.assertEqual(check_not_in_group([group], ws_in_group.name()), False) self.assertEqual(check_not_in_group([group], ws_out_group.name()), True)
def update_match_table(self, likelihood_table_name, workspace_name): likelihood_table = retrieve_ws(likelihood_table_name) entry = list(self.split_run_and_detector(workspace_name)) likelihood_data = likelihood_table.toDict() if likelihood_table.rowCount() > NUMBER_OF_ELEMENTS_DISPLAYED: elements_list = likelihood_data[ "Element"][:NUMBER_OF_ELEMENTS_DISPLAYED] else: elements_list = likelihood_data["Element"] elements = " , ".join(elements_list) entry.append(elements) self.table_entries.put(entry)
def find_peak_algorithm(workspace, spectrum_number, min_energy, max_energy, threshold, min_width, estimate_width, max_width): FindPeaksAutomatic(InputWorkspace=retrieve_ws(workspace), SpectrumNumber=spectrum_number, StartXValue=min_energy, EndXValue=max_energy, AcceptanceThreshold=threshold, PeakPropertiesTableName=workspace + PEAKS_WS_SUFFIX, RefitPeakPropertiesTableName=workspace + REFITTED_PEAKS_WS_SUFFIX, MinPeakSigma=min_width, EstimatePeakSigma=estimate_width, MaxPeakSigma=max_width)
def _create_output_workspace_wraps_for_a_multi_domain_fit( self, input_workspace_names: list, output_group_workspace, output_group_name: str) -> list: """Returns a list of StaticWorkspaceWrapper objects containing the fitted output workspaces for many domains.""" self._add_workspace_to_ADS(output_group_workspace, output_group_name, "") output_workspace_names = self._rename_members_of_fitted_workspace_group( input_workspace_names, output_group_name) return [ StaticWorkspaceWrapper(workspace_name, retrieve_ws(workspace_name)) for workspace_name in output_workspace_names ]
def extract_rows(self, table_name): """ Copies information in a table given the name of the table """ table = retrieve_ws(table_name) table_data = table.toDict() table_entries = [] for i in range(table.rowCount()): table_entries.append([]) for column in table_data: table_entries[-1].append(str(table_data[column][i])) return table_entries
def _run_rebin(self, name, rebin): if rebin: params = "1" if self.gui_context['RebinType'] == 'Variable' and self.gui_context["RebinVariable"]: params = self.gui_context["RebinVariable"] if self.gui_context['RebinType'] == 'Fixed' and self.gui_context["RebinFixed"]: ws = retrieve_ws(name) x_data = ws.dataX(0) original_step = x_data[1] - x_data[0] params = float(self.gui_context["RebinFixed"]) * original_step return rebin_ws(name,params) else: return name
def x_limits_of_workspace( workspace_name: str, default_limits: tuple = (DEFAULT_X_LOWER, DEFAULT_X_UPPER) ) -> tuple: """Returns the x data limits of a provided workspace.""" if workspace_name is not None and check_if_workspace_exist(workspace_name): x_data = retrieve_ws(workspace_name).dataX(0) if len(x_data) > 0: x_data.sort() x_lower, x_higher = x_data[0], x_data[-1] # An offset is applied because if the x_lower is rounded up due to the precision of the Muon GUI, then some # data points could be missed out unintentionally. A similar issue could happen if the x_higher were rounded # down due to the GUI precision. return x_lower - X_OFFSET, x_higher + X_OFFSET return default_limits
def show_table(self, table_name): if table_name == "": message_box.warning("ERROR : No selected table", None) return elif not check_if_workspace_exist(table_name): message_box.warning(f"ERROR : {table_name} Table does not exist", None) return self.popup_table = EAAutoPopupTable(table_name) table = retrieve_ws(table_name) columns = table.getColumnNames() self.popup_table.create_table(columns) table_entries = self.extract_rows(table_name) for entry in table_entries: self.popup_table.add_entry_to_table(entry) self.popup_table.show()