def _add_simultaneous_fit_results_to_ADS_and_context( self, input_workspace_names: list, parameter_table, output_group_workspace, covariance_matrix, global_parameters: list) -> None: """Adds the results of a simultaneous fit to the ADS and fitting context.""" function_name = self.fitting_context.function_name output_workspace_wraps, directory = self._create_output_workspace_wraps( input_workspace_names, function_name, output_group_workspace) parameter_table_name, _ = create_parameter_table_name( input_workspace_names[0] + "+ ...", function_name) covariance_matrix_name, _ = create_covariance_matrix_name( input_workspace_names[0] + "+ ...", function_name) self._add_workspace_to_ADS(parameter_table, parameter_table_name, directory) self._add_workspace_to_ADS(covariance_matrix, covariance_matrix_name, directory) parameter_workspace_wrap = StaticWorkspaceWrapper( parameter_table_name, retrieve_ws(parameter_table_name)) covariance_workspace_wrap = StaticWorkspaceWrapper( covariance_matrix_name, retrieve_ws(covariance_matrix_name)) self._add_fit_to_context(input_workspace_names, output_workspace_wraps, parameter_workspace_wrap, covariance_workspace_wrap, global_parameters)
def _run_rebin(self, name, rebin_type, params): rebined_run_name = None if rebin_type == "Fixed": rebin_index = 1 rebin_option = "Steps: " + str(params) + " KeV" rebined_run_name = str(name) + REBINNED_FIXED_WS_SUFFIX if rebin_type == "Variable": rebin_index = 2 rebin_option = "Bin Boundaries: " + str(params) rebined_run_name = str(name) + REBINNED_VARIABLE_WS_SUFFIX remove_ws_if_present(rebined_run_name) raw_workspace = self.group_context[name].get_counts_workspace_for_run() CloneWorkspace(InputWorkspace=raw_workspace, OutputWorkspace=rebined_run_name) rebin_ws(rebined_run_name, params) workspace = retrieve_ws(rebined_run_name) group_workspace = retrieve_ws(self.group_context[name].run_number) group_workspace.addWorkspace(workspace) group = self.group_context[name] group.update_workspaces(str(workspace), rebin=True, rebin_index=rebin_index, rebin_option=rebin_option) self.update_plots_notifier.notify_subscribers(workspace)
def handle_show_fit_output_clicked(self, row_index: int) -> None: """Handles when the Show Output button is clicked in one of the table rows.""" run, group = self.view.get_run_and_group_for_row(row_index) parameter_table_name, covariance_matrix_name = self.model.create_background_output_workspaces_for( run, group) if parameter_table_name is not None and covariance_matrix_name is not None: self.view.show_table_workspace_display( retrieve_ws(parameter_table_name), "Parameter Table") self.view.show_table_workspace_display( retrieve_ws(covariance_matrix_name), "Normalised Covariance Matrix")
def __del__(self): try: remove_ws_if_present(self.get_counts_workspace_for_run()) if self.is_rebinned_workspace_present(): remove_ws_if_present( self.get_counts_workspace_for_run(rebin=True)) if self.is_peak_table_present(): remove_ws_if_present(self.get_peak_table()) if self.is_matches_table_present(): matches_table = retrieve_ws(self.get_matches_table()) for table in matches_table.getNames(): remove_ws_if_present(table) remove_ws_if_present(self.get_matches_table()) except Exception as error: """ If ADS is deleted before group is deleted, boost.python.ArgumentError is raised and boost.python.ArgumentError are not catchable """ if "Python argument types in" not in str(error): if self.error_notifier: error_message = f"Unexpected error occurred when deleting group {self.name}: " + str( error) self.error_notifier.notify_subscribers(error_message)
def _handle_find_peak_algorithm_outputs(self, group_workspace, workspace, delay_errors): ignore_peak_matching = False remove_ws(workspace + ERRORS_WS_SUFFIX) remove_ws(workspace + REFITTED_PEAKS_WS_SUFFIX) peak_table = retrieve_ws(workspace + PEAKS_WS_SUFFIX) number_of_peaks = peak_table.rowCount() self.current_peak_table_info["workspace"] = workspace self.current_peak_table_info["number_of_peaks"] = number_of_peaks if number_of_peaks != 0: group_workspace.add(workspace + PEAKS_WS_SUFFIX) self.context.group_context[workspace].update_peak_table( workspace + PEAKS_WS_SUFFIX) else: peak_table.delete() if delay_errors: self.warnings.put( f"No peaks found in {workspace} try reducing acceptance threshold" ) ignore_peak_matching = True else: raise RuntimeError( f"No peaks found in {workspace} try reducing acceptance threshold" ) return ignore_peak_matching
def _current_dead_times(self) -> list: """Returns a list of dead times for the currently displayed run and dead time mode.""" table_name = self._corrections_context.current_dead_time_table_name_for_run( self._data_context.instrument, self._corrections_model.current_runs()) table = retrieve_ws(table_name) if table_name else None return table.toDict()[DEAD_TIME_TABLE_KEY] if table is not None else []
def validate_selected_dead_time_workspace(self, table_name: str) -> str: """Validates the selected dead time workspace. Returns a string containing an error message if its invalid.""" if check_if_workspace_exist(table_name): table = retrieve_ws(table_name) return self._validate_dead_time_table(table) else: return f"Workspace '{table_name}' does not exist in the ADS."
def test_rebin(self, mock_get_item, mock_remove_ws, mock_retrieve_ws): mock_get_item.return_value = EAGroup("9999; Detector 1", "detector 1", "9999") name = '9999; Detector 1' rebinned_name = '9999; Detector 1' + REBINNED_VARIABLE_WS_SUFFIX mock_params = "0, 2, 9" x_data = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] y_data = [1, 1, 1, 1, 1, 1, 1, 1, 1] CreateWorkspace(OutputWorkspace=name, DataX=x_data, DataY=y_data) self.context._run_rebin("9999; Detector 1", "Variable", mock_params) correct_data = CreateWorkspace(OutputWorkspace="correct_data", DataX=[0, 2, 4, 6, 8, 9], DataY=[2, 2, 2, 2, 1]) # Assert Statements self.assert_workspace_equal(correct_data, retrieve_ws(rebinned_name)) mock_remove_ws.assert_has_calls([mock.call(rebinned_name)]) mock_retrieve_ws.assert_has_calls([mock.call("9999")]) mock_get_item.assert_has_calls([mock.call(name)]) # clean up remove_ws_if_present(name) remove_ws_if_present("correct_data") remove_ws_if_present(rebinned_name)
def create_empty_table(name): alg = mantid.AlgorithmManager.create("CreateEmptyTableWorkspace") alg.initialize() alg.setAlwaysStoreInADS(True) alg.setProperty("OutputWorkspace", name) alg.execute() return retrieve_ws(name)
def set_reconstructed_data(self, ws, table_name): # get the indices and groups first self.reconstructed_data_name = ws if table_name: table = retrieve_ws(table_name) for index in range(table.rowCount()): data = table.row(index) self.reconstructed_data[index] = data["Group"]
def _run_peak_algorithms(self, parameters): workspace = parameters["workspace"] run, detector = self.split_run_and_detector(workspace) if run is None or detector is None: group_workspace = retrieve_ws(workspace) for group in self.context.group_context.groups: if group.run_number == workspace: workspace_name = group.get_counts_workspace_for_run( rebin=False) tmp_parameters = copy.deepcopy(parameters) tmp_parameters["workspace"] = workspace_name if not self._run_find_peak_algorithm( tmp_parameters, group_workspace, True): self._run_peak_matching_algorithm( workspace_name, group_workspace) else: group_workspace = retrieve_ws(run) self._run_find_peak_algorithm(parameters, group_workspace) self._run_peak_matching_algorithm(workspace, group_workspace)
def _parameter_combination_workspace_exists(self, workspace_name: str, x_values: list, y_values: list, y_errors: list) -> bool: """Returns true if a parameter combination workspace exists and contains the same data.""" if check_if_workspace_exist(workspace_name): workspace = retrieve_ws(workspace_name) return self._lists_are_equal(workspace.dataX(0), x_values) \ and self._lists_are_equal(workspace.dataY(0), y_values) \ and self._lists_are_equal(workspace.dataE(0), y_errors) return False
def extract_rows(self, table_name): """ Copies information in a table given the name of the table """ table = retrieve_ws(table_name) table_data = table.toDict() table_entries = [] for i in range(table.rowCount()): table_entries.append([]) for column in table_data: table_entries[-1].append(str(table_data[column][i])) return table_entries
def update_match_table(self, likelihood_table_name, workspace_name): likelihood_table = retrieve_ws(likelihood_table_name) entry = list(self.split_run_and_detector(workspace_name)) likelihood_data = likelihood_table.toDict() if likelihood_table.rowCount() > NUMBER_OF_ELEMENTS_DISPLAYED: elements_list = likelihood_data[ "Element"][:NUMBER_OF_ELEMENTS_DISPLAYED] else: elements_list = likelihood_data["Element"] elements = " , ".join(elements_list) entry.append(elements) self.table_entries.put(entry)
def _create_workspace_group_to_store_combination_workspaces( self) -> WorkspaceGroup: """Return the Workspace Group used to store the different parameter combination matrix workspaces.""" group_name = self.parameter_combination_group_name() if group_name is None: return None if check_if_workspace_exist(group_name): workspace_group = retrieve_ws(group_name) else: workspace_group = WorkspaceGroup() add_ws_to_ads(group_name, workspace_group) return workspace_group
def find_peak_algorithm(workspace, spectrum_number, min_energy, max_energy, threshold, min_width, estimate_width, max_width): FindPeaksAutomatic(InputWorkspace=retrieve_ws(workspace), SpectrumNumber=spectrum_number, StartXValue=min_energy, EndXValue=max_energy, AcceptanceThreshold=threshold, PeakPropertiesTableName=workspace + PEAKS_WS_SUFFIX, RefitPeakPropertiesTableName=workspace + REFITTED_PEAKS_WS_SUFFIX, MinPeakSigma=min_width, EstimatePeakSigma=estimate_width, MaxPeakSigma=max_width)
def _create_output_workspace_wraps_for_a_multi_domain_fit( self, input_workspace_names: list, output_group_workspace, output_group_name: str) -> list: """Returns a list of StaticWorkspaceWrapper objects containing the fitted output workspaces for many domains.""" self._add_workspace_to_ADS(output_group_workspace, output_group_name, "") output_workspace_names = self._rename_members_of_fitted_workspace_group( input_workspace_names, output_group_name) return [ StaticWorkspaceWrapper(workspace_name, retrieve_ws(workspace_name)) for workspace_name in output_workspace_names ]
def _extract_x_and_y_from_current_result_table(self) -> None: """Extracts the X, Y and error values from the currently selected result table and saves them in the context.""" results_table_name = self.current_result_table_name if results_table_name is not None and check_if_workspace_exist( results_table_name): current_results_table = retrieve_ws(results_table_name) for i, column_name in enumerate( current_results_table.getColumnNames()): self._save_values_from_table_column( column_name, current_results_table.column(i)) self._populate_empty_parameter_errors( current_results_table.rowCount())
def x_limits_of_workspace( workspace_name: str, default_limits: tuple = (DEFAULT_X_LOWER, DEFAULT_X_UPPER) ) -> tuple: """Returns the x data limits of a provided workspace.""" if workspace_name is not None and check_if_workspace_exist(workspace_name): x_data = retrieve_ws(workspace_name).dataX(0) if len(x_data) > 0: x_data.sort() x_lower, x_higher = x_data[0], x_data[-1] # An offset is applied because if the x_lower is rounded up due to the precision of the Muon GUI, then some # data points could be missed out unintentionally. A similar issue could happen if the x_higher were rounded # down due to the GUI precision. return x_lower - X_OFFSET, x_higher + X_OFFSET return default_limits
def _run_rebin(self, name, rebin): if rebin: params = "1" if self.gui_context['RebinType'] == 'Variable' and self.gui_context[ "RebinVariable"]: params = self.gui_context["RebinVariable"] if self.gui_context['RebinType'] == 'Fixed' and self.gui_context[ "RebinFixed"]: ws = retrieve_ws(name) x_data = ws.dataX(0) original_step = x_data[1] - x_data[0] params = float(self.gui_context["RebinFixed"]) * original_step return rebin_ws(name, params) else: return name
def show_table(self, table_name): if table_name == "": message_box.warning("ERROR : No selected table", None) return elif not check_if_workspace_exist(table_name): message_box.warning(f"ERROR : {table_name} Table does not exist", None) return self.popup_table = EAAutoPopupTable(table_name) table = retrieve_ws(table_name) columns = table.getColumnNames() self.popup_table.create_table(columns) table_entries = self.extract_rows(table_name) for entry in table_entries: self.popup_table.add_entry_to_table(entry) self.popup_table.show()
def _create_output_workspace_wraps(self, input_workspace_names: list, function_name: str, output_group_workspace) -> tuple: """Returns a list of StaticWorkspaceWrapper objects containing the fitted output workspaces""" if self.fitting_context.number_of_datasets > 1: output_group_name, directory = create_multi_domain_fitted_workspace_name( input_workspace_names[0], function_name) output_workspace_wraps = self._create_output_workspace_wraps_for_a_multi_domain_fit( input_workspace_names, output_group_workspace, output_group_name) else: output_workspace_name, directory = create_fitted_workspace_name( input_workspace_names[0], function_name) self._add_workspace_to_ADS(output_group_workspace, output_workspace_name, directory) output_workspace_wraps = [ StaticWorkspaceWrapper(output_workspace_name, retrieve_ws(output_workspace_name)) ] return output_workspace_wraps, directory
def update_view(self): """ Checks context for loaded workspaces and add to values find peak combobox Checks all tables in load run's groups and add to show peaks and show matches combobox """ find_peak_workspaces = {} show_peaks_options = {} show_matches_options = {} for group in self.context.group_context.groups: run = group.run_number if run not in find_peak_workspaces: find_peak_workspaces[run] = ["All"] find_peak_workspaces[run].append(group.detector) if group.is_peak_table_present(): if run not in show_peaks_options: show_peaks_options[run] = [] show_peaks_options[run].append(group.get_peak_table()) if group.is_matches_table_present(): if run not in show_matches_options: show_matches_options[run] = [] matches_group_workspace = retrieve_ws( group.get_matches_table()) show_matches_options[run].extend( matches_group_workspace.getNames()) self.view.add_options_to_find_peak_combobox(find_peak_workspaces) self.view.add_options_to_show_peak_combobox(show_peaks_options) self.view.add_options_to_show_matches_combobox(show_matches_options) peak_label_info = self.model.current_peak_table_info # Update peak info label if peak_label_info["workspace"] is not None and peak_label_info[ "number_of_peaks"] is not None: self.view.set_peak_info(**peak_label_info)
def _set_x_label(self, workspace_name: str, axis_label: str) -> None: """Sets the label and unit for the X axis of a workspace.""" workspace = retrieve_ws(workspace_name) unit = self._get_unit_from_sample_logs(axis_label) workspace.getAxis(0).setUnit("Label").setLabel(axis_label, unit)