def populate_ADS(self): self.context.calculate_all_groups() self.context.show_all_groups() self.context.calculate_all_pairs() self.context.show_all_pairs() CreateWorkspace( [0], [0], OutputWorkspace='EMU19489; PhaseQuad; PhaseTable EMU19489') self.context.phase_context.add_phase_quad( MuonWorkspaceWrapper('EMU19489; PhaseQuad; PhaseTable EMU19489'), '19489')
def test_that_setting_a_new_workspace_removes_the_previous_one_from_the_ADS( self): workspace_handle = MuonWorkspaceWrapper(workspace=self.workspace) workspace_handle.show("name1") workspace2 = create_simple_workspace(data_x=[5, 6, 7, 8], data_y=[20, 20, 20, 20]) self.assertTrue(simpleapi.mtd.doesExist("name1")) workspace_handle.workspace = workspace2 self.assertFalse(simpleapi.mtd.doesExist("name1"))
def test_that_workspace_added_correctly_for_doubly_nested_structure(self): workspace = create_simple_workspace(data_x=[1, 2, 3, 4], data_y=[10, 10, 10, 10]) workspace_handle = MuonWorkspaceWrapper(workspace=workspace) workspace_handle.show("group1/group2/ws1") self.assert_group_workspace_exists("group1") self.assert_group_workspace_exists("group2") self.assert_group1_is_inside_group2("group2", "group1") self.assert_workspace_in_group("ws1", "group2")
def test_that_setting_a_new_workspace_resets_the_name_to_empty_string( self): workspace_handle = MuonWorkspaceWrapper(workspace=self.workspace) workspace_handle.show("name1") workspace2 = create_simple_workspace(data_x=[5, 6, 7, 8], data_y=[20, 20, 20, 20]) self.assertEqual(workspace_handle.name, "name1") workspace_handle.workspace = workspace2 self.assertEqual(workspace_handle.name, "")
def create_fake_workspace(self, name): workspace_mock = CreateSampleWorkspace(StoreInADS=False) LoadInstrument(Workspace=workspace_mock, InstrumentName='EMU', RewriteSpectraMap=False, StoreInADS=False) return { 'OutputWorkspace': [MuonWorkspaceWrapper(workspace_mock)], 'MainFieldDirection': 'transverse' }
def add_maxent_workspace_to_ADS(self, input_workspace, maxent_workspace, alg): run = re.search('[0-9]+', input_workspace).group() base_name = get_maxent_workspace_name(input_workspace) group = get_maxent_workspace_group_name(base_name, self.load.data_context.instrument, self.load.workspace_suffix) directory = get_base_data_directory(self.load, run) + group muon_workspace_wrapper = MuonWorkspaceWrapper(maxent_workspace, directory + base_name) muon_workspace_wrapper.show() maxent_output_options = self.get_maxent_output_options() self.load._frequency_context.add_maxEnt(run, maxent_workspace) self.add_optional_outputs_to_ADS(alg, maxent_output_options, base_name, directory)
def add_fft_workspace_to_ADS(self, input_workspace, imaginary_input_workspace, fft_workspace): run = re.search('[0-9]+', input_workspace).group() fft_workspace_name = get_fft_workspace_name(input_workspace, imaginary_input_workspace) group = get_fft_workspace_group_name(fft_workspace_name, self.load.data_context.instrument) directory = get_base_data_directory(self.load, run) + group muon_workspace_wrapper = MuonWorkspaceWrapper( fft_workspace, directory + fft_workspace_name) muon_workspace_wrapper.show()
def combine_loaded_runs(model, run_list, delete_added=False): period_list = [model._data_context.num_periods([run]) for run in run_list] if max(period_list) != min(period_list): raise RuntimeError( 'Inconsistent periods across co-added runs. This is not supported.' ) return_ws = model._loaded_data_store.get_data( run=[run_list[0]])["workspace"] running_total = [] for index in range(min(period_list)): workspace = return_ws["OutputWorkspace"][index] running_total_item = workspace.workspace.name() + 'CoAdd' CloneWorkspace(InputWorkspace=workspace.workspace.name(), OutputWorkspace=running_total_item) for run in run_list[1:]: ws = model._loaded_data_store.get_data( run=[run])["workspace"]["OutputWorkspace"][index].workspace Plus(LHSWorkspace=running_total_item, RHSWorkspace=ws, AllowDifferentNumberSpectra=False, OutputWorkspace=running_total_item) running_total.append(running_total_item) return_ws_actual = { key: return_ws[key] for key in ['MainFieldDirection', 'TimeZero', 'FirstGoodData'] } try: return_ws_actual['DetectorGroupingTable'] = return_ws[ 'DetectorGroupingTable'] except KeyError: pass # PSI Data does not include Detector Grouping table as it's read from sample logs instead try: return_ws_actual['DeadTimeTable'] = return_ws['DeadTimeTable'] except KeyError: pass # Again, PSI data does not always include DeadTimeTable either return_ws_actual["OutputWorkspace"] = [ MuonWorkspaceWrapper(running_total_period) for running_total_period in running_total ] return_ws_actual['DataDeadTimeTable'] = CloneWorkspace( InputWorkspace=return_ws['DataDeadTimeTable'], OutputWorkspace=return_ws['DataDeadTimeTable'] + 'CoAdd').name() model._loaded_data_store.remove_data( run=flatten_run_list(run_list), instrument=model._data_context.instrument) model._loaded_data_store.add_data( run=flatten_run_list(run_list), workspace=return_ws_actual, filename="Co-added", instrument=model._data_context.instrument)
def add_phase_table_to_ADS(self, base_name, detector_table): run = re.search('[0-9]+', base_name).group() phase_table_group = get_phase_table_workspace_group_name( base_name, self.context.data_context.instrument) directory = get_base_data_directory(self.context, run) + phase_table_group muon_workspace_wrapper = MuonWorkspaceWrapper(detector_table, directory + base_name) muon_workspace_wrapper.show() self.context.phase_context.add_phase_table(muon_workspace_wrapper)
def add_fitting_info_to_ADS_if_required(self, base_name, fit_workspace_name): if not self.view.output_fit_information: return run = re.search('[0-9]+', base_name).group() phase_table_group = get_phase_table_workspace_group_name(base_name, self.context.data_context.instrument, self.context.workspace_suffix) directory = get_base_data_directory(self.context, run) + phase_table_group muon_workspace_wrapper = MuonWorkspaceWrapper(directory + fit_workspace_name) muon_workspace_wrapper.show()
def show_pair_data(self, pair_name, show=True, rebin=False): for run in self.current_runs: run_as_string = run_list_to_string(run) name = get_pair_data_workspace_name(self, pair_name, run_as_string, rebin) directory = get_base_data_directory( self, run_as_string) + get_pair_data_directory( self, run_as_string) pair_workspace = calculate_pair_data(self, pair_name, run, rebin) if not rebin: self._pairs[pair_name].workspace[str( run)] = MuonWorkspaceWrapper(pair_workspace) if show: self._pairs[pair_name].workspace[str(run)].show(directory + name) else: self._pairs[pair_name].workspace_rebin[str( run)] = MuonWorkspaceWrapper(pair_workspace) if show: self._pairs[pair_name].workspace_rebin[str(run)].show( directory + name)
def load_workspace_from_filename(filename, input_properties=DEFAULT_INPUTS, output_properties=DEFAULT_OUTPUTS): try: alg = create_load_algorithm(filename, input_properties) alg.execute() except: alg = create_load_algorithm( filename.split(os.sep)[-1], input_properties) alg.execute() workspace = alg.getProperty("OutputWorkspace").value if is_workspace_group(workspace): # handle multi-period data load_result = _get_algorithm_properties(alg, output_properties) load_result["OutputWorkspace"] = [ MuonWorkspaceWrapper(ws) for ws in load_result["OutputWorkspace"] ] run = get_run_from_multi_period_data(workspace) load_result["DataDeadTimeTable"] = load_result["DeadTimeTable"][0] load_result["FirstGoodData"] = round( load_result["FirstGoodData"] - load_result['TimeZero'], 2) else: # single period data load_result = _get_algorithm_properties(alg, output_properties) load_result["OutputWorkspace"] = [ MuonWorkspaceWrapper(load_result["OutputWorkspace"]) ] run = int(workspace.getRunNumber()) load_result["DataDeadTimeTable"] = load_result["DeadTimeTable"] load_result["FirstGoodData"] = round( load_result["FirstGoodData"] - load_result['TimeZero'], 2) load_result["DeadTimeTable"] = None filename = alg.getProperty("Filename").value return load_result, run, filename
def handleFinished(self): self.activate() self.calculation_finished_notifier.notify_subscribers(self._maxent_output_workspace_name) # if phase table is outputed if self.view.output_reconstructed_spectra and SPECTRA in self._optional_output_names.keys(): ws = MuonWorkspaceWrapper(self._optional_output_names[SPECTRA]).workspace if self.use_groups: table = MuonWorkspaceWrapper(self._optional_output_names[GROUPINGTABLE]).workspace self.new_reconstructed_data.notify_subscribers({"ws": ws.name(), "table": table.name()}) else: self.new_reconstructed_data.notify_subscribers({"ws": ws.name(), "table": None}) if self.view.output_phase_table and PHASETABLE in self._optional_output_names.keys(): name = self._optional_output_names[PHASETABLE] if self.use_groups: num_groups = self.get_num_groups self.context.frequency_context.add_group_phase_table(MuonWorkspaceWrapper(name), num_groups) else: self.context.phase_context.add_phase_table(MuonWorkspaceWrapper(name)) self.new_phase_table.notify_subscribers() self.update_phase_table_options() # clear optional outputs self._optional_output_names = {}
def add_phase_quad_to_ADS(self, input_workspace, phasequad_workspace_name): run = re.search( '^{}([0-9, -]+)[;,_]?'.format( self.context.data_context.instrument), input_workspace).group(1) directory = get_base_data_directory(self.context, run) muon_workspace_wrapper = MuonWorkspaceWrapper(directory + phasequad_workspace_name) muon_workspace_wrapper.show() self.context.phase_context.add_phase_quad(muon_workspace_wrapper, run) self.phase_quad_calculation_complete_nofifier.notify_subscribers()
def _calculate_phasequads(self, phasequad_obj, rebin): for run in self._data_context.current_runs: if self._data_context.num_periods(run) > 1: raise ValueError("Cannot support multiple periods") ws_list = self.calculate_phasequad(phasequad_obj, run, rebin) run_string = run_list_to_string(run) directory = get_base_data_directory(self, run_string) for ws in ws_list: muon_workspace_wrapper = MuonWorkspaceWrapper(directory + ws) muon_workspace_wrapper.show() phasequad_obj.update_asymmetry_workspaces(ws_list, run, rebin=rebin)
def add_maxent_workspace_to_ADS(self, input_workspace, maxent_workspace, alg): run = re.search('[0-9]+', input_workspace).group() base_name = get_maxent_workspace_name(input_workspace, self.view.get_method) directory = get_maxent_workspace_group_name(base_name, self.context.data_context.instrument, self.context.workspace_suffix) muon_workspace_wrapper = MuonWorkspaceWrapper(directory + base_name) muon_workspace_wrapper.show() maxent_output_options = self.get_maxent_output_options() self.context.frequency_context.add_maxEnt(run, maxent_workspace) self.add_optional_outputs_to_ADS(alg, maxent_output_options, base_name, directory) # Storing this on the class so it can be sent as part of the calculation # finished signal. self._maxent_output_workspace_name = base_name
def show_group_data(self, group_name, show=True, rebin=False): for run in self.current_runs: run_as_string = run_list_to_string(run) group_workspace = calculate_group_data(self, group_name, run, rebin) group_asymmetry = estimate_group_asymmetry_data( self, group_name, run, rebin) directory = get_base_data_directory( self, run_as_string) + get_group_data_directory( self, run_as_string) name = get_group_data_workspace_name(self, group_name, run_as_string, rebin) asym_name = get_group_asymmetry_name(self, group_name, run_as_string, rebin) if not rebin: self._groups[group_name]._workspace[str( run)] = MuonWorkspaceWrapper(group_workspace) self._groups[group_name]._asymmetry_estimate[str( run)] = MuonWorkspaceWrapper(group_asymmetry) if show: self._groups[group_name].workspace[str(run)].show( directory + name) self._groups[group_name]._asymmetry_estimate[str( run)].show(directory + asym_name) else: self._groups[group_name]._workspace_rebin[str( run)] = MuonWorkspaceWrapper(group_workspace) self._groups[group_name]._asymmetry_estimate_rebin[str( run)] = MuonWorkspaceWrapper(group_asymmetry) if show: self._groups[group_name]._workspace_rebin[str(run)].show( directory + name) self._groups[group_name]._asymmetry_estimate_rebin[str( run)].show(directory + asym_name)
def test_that_any_multi_period_data_will_mark_everything_as_multiperiod( self): multi_period_worspace_list = [ MuonWorkspaceWrapper(f'raw_data_{period_index + 1}') for period_index in range(4) ] load_result = {'OutputWorkspace': multi_period_worspace_list} self.loaded_data.add_data(workspace=load_result, run=[84447], filename='workspace_filename', instrument='EMU') self.context._current_runs = [[84447], [19489]] is_multi_period = self.context.is_multi_period() self.assertTrue(is_multi_period)
def add_phase_quad_to_ADS(self, input_workspace, input_phase_table, phase_quad): run = re.search('[0-9]+', input_workspace).group() phasequad_workspace_name = get_phase_quad_workspace_name( input_workspace, input_phase_table) phase_table_group = get_phase_table_workspace_group_name( phasequad_workspace_name, self.context.data_context.instrument) directory = get_base_data_directory(self.context, run) + phase_table_group muon_workspace_wrapper = MuonWorkspaceWrapper( phase_quad, directory + phasequad_workspace_name) muon_workspace_wrapper.show() self.context.phase_context.add_phase_quad(muon_workspace_wrapper) self.phase_quad_calculation_complete_nofifier.notify_subscribers()
def add_fft_workspace_to_ADS(self, input_workspace, imaginary_input_workspace, fft_workspace_label): run = re.search('[0-9]+', input_workspace).group() fft_workspace = mantid.AnalysisDataService.retrieve( fft_workspace_label) Im_run = "" if imaginary_input_workspace != "": Im_run = re.search('[0-9]+', imaginary_input_workspace).group() fft_workspace_name = get_fft_workspace_name(input_workspace, imaginary_input_workspace) directory = get_fft_workspace_group_name( fft_workspace_name, self.load.data_context.instrument, self.load.workspace_suffix) Re = get_group_or_pair_from_name(input_workspace) Im = get_group_or_pair_from_name(imaginary_input_workspace) shift = 3 if fft_workspace.getNumberHistograms() == 6 else 0 spectra = { "_" + FREQUENCY_EXTENSIONS["RE"]: 0 + shift, "_" + FREQUENCY_EXTENSIONS["IM"]: 1 + shift, "_" + FREQUENCY_EXTENSIONS["MOD"]: 2 + shift } for spec_type in list(spectra.keys()): extracted_ws = extract_single_spec(fft_workspace, spectra[spec_type], fft_workspace_name + spec_type) if 'PhaseQuad' in self.view.workspace: self.load._frequency_context.add_FFT(fft_workspace_name + spec_type, run, Re, Im_run, Im, phasequad=True) else: self.load._frequency_context.add_FFT( fft_workspace_name + spec_type, run, Re, Im_run, Im) muon_workspace_wrapper = MuonWorkspaceWrapper(extracted_ws) muon_workspace_wrapper.show(directory + fft_workspace_name + spec_type) # This is a small hack to get the output name to a location where it can be part of the calculation finished # signal. self._output_workspace_name = fft_workspace_name + '_mod'
def show_raw_data(self): self.ads_observer.observeRename(False) for run in self.data_context.current_runs: with WorkspaceGroupDefinition(): run_string = run_list_to_string(run) loaded_workspace = \ self.data_context._loaded_data.get_data(run=run, instrument=self.data_context.instrument)['workspace'][ 'OutputWorkspace'] loaded_workspace_deadtime_table = self.data_context._loaded_data.get_data( run=run, instrument=self.data_context.instrument )['workspace']['DataDeadTimeTable'] directory = get_base_data_directory(self, run_string) deadtime_name = get_deadtime_data_workspace_name( self.data_context.instrument, str(run[0]), workspace_suffix=self.workspace_suffix) MuonWorkspaceWrapper(loaded_workspace_deadtime_table).show( directory + deadtime_name) self.data_context._loaded_data.get_data( run=run, instrument=self.data_context.instrument )['workspace']['DataDeadTimeTable'] = deadtime_name if len(loaded_workspace) > 1: # Multi-period data for i, single_ws in enumerate(loaded_workspace): name = directory + get_raw_data_workspace_name( self.data_context.instrument, run_string, multi_period=True, period=str(i + 1), workspace_suffix=self.workspace_suffix) single_ws.show(name) else: # Single period data name = directory + get_raw_data_workspace_name( self.data_context.instrument, run_string, multi_period=False, workspace_suffix=self.workspace_suffix) loaded_workspace[0].show(name) self.ads_observer.observeRename(True)
def combine_loaded_runs(model, run_list): return_ws = model._loaded_data_store.get_data(run=[run_list[0]])["workspace"] running_total = [] for index, workspace in enumerate(return_ws["OutputWorkspace"]): running_total.append(workspace.workspace) for run in run_list[1:]: ws = model._loaded_data_store.get_data(run=[run])["workspace"]["OutputWorkspace"][index].workspace running_total[index] = algorithm_utils.run_Plus({ "LHSWorkspace": running_total[index], "RHSWorkspace": ws, "AllowDifferentNumberSpectra": False} ) return_ws["OutputWorkspace"] = [MuonWorkspaceWrapper(running_total_period) for running_total_period in running_total] model._loaded_data_store.remove_data(run=flatten_run_list(run_list), instrument=model._data_context.instrument) model._loaded_data_store.add_data(run=flatten_run_list(run_list), workspace=return_ws, filename="Co-added", instrument=model._data_context.instrument)
def combine_loaded_runs(model, run_list, delete_added=False): return_ws = model._loaded_data_store.get_data( run=[run_list[0]])["workspace"] running_total = [] for index, workspace in enumerate(return_ws["OutputWorkspace"]): running_total_item = workspace.workspace.name() + 'CoAdd' CloneWorkspace(InputWorkspace=workspace.workspace.name(), OutputWorkspace=running_total_item) for run in run_list[1:]: ws = model._loaded_data_store.get_data( run=[run])["workspace"]["OutputWorkspace"][index].workspace Plus(LHSWorkspace=running_total_item, RHSWorkspace=ws, AllowDifferentNumberSpectra=False, OutputWorkspace=running_total_item) running_total.append(running_total_item) return_ws_actual = { key: return_ws[key] for key in [ 'MainFieldDirection', 'TimeZero', 'FirstGoodData', 'DeadTimeTable', 'DetectorGroupingTable' ] } return_ws_actual["OutputWorkspace"] = [ MuonWorkspaceWrapper(running_total_period) for running_total_period in running_total ] return_ws_actual['DataDeadTimeTable'] = CloneWorkspace( InputWorkspace=return_ws['DataDeadTimeTable'], OutputWorkspace=return_ws['DataDeadTimeTable'] + 'CoAdd').name() model._loaded_data_store.remove_data( run=flatten_run_list(run_list), instrument=model._data_context.instrument) model._loaded_data_store.add_data( run=flatten_run_list(run_list), workspace=return_ws_actual, filename="Co-added", instrument=model._data_context.instrument)
def update_workspaces(self, run, counts_workspace, asymmetry_workspace, asymmetry_workspace_unnorm, rebin): if rebin: self._counts_workspace_rebin.update( {str(run): MuonWorkspaceWrapper(counts_workspace)}) self._asymmetry_estimate_rebin.update( {str(run): MuonWorkspaceWrapper(asymmetry_workspace)}) self._asymmetry_estimate_rebin_unormalised.update( {str(run): MuonWorkspaceWrapper(asymmetry_workspace_unnorm)}) else: self._counts_workspace.update( {str(run): MuonWorkspaceWrapper(counts_workspace)}) self._asymmetry_estimate.update( {str(run): MuonWorkspaceWrapper(asymmetry_workspace)}) self._asymmetry_estimate_unormalised.update( {str(run): MuonWorkspaceWrapper(asymmetry_workspace_unnorm)})
def add_fft_workspace_to_ADS(self, input_workspace, imaginary_input_workspace, fft_workspace): run = re.search('[0-9]+', input_workspace).group() Im_run = re.search('[0-9]+', imaginary_input_workspace).group() fft_workspace_name = get_fft_workspace_name(input_workspace, imaginary_input_workspace) group = get_fft_workspace_group_name(fft_workspace_name, self.load.data_context.instrument, self.load.workspace_suffix) directory = get_base_data_directory(self.load, run) + group Re = get_group_or_pair_from_name(input_workspace) Im = get_group_or_pair_from_name(imaginary_input_workspace) shift = 3 if fft_workspace.getNumberHistograms() == 6 else 0 spectra = { "_" + FREQUENCY_EXTENSIONS["RE"]: 0 + shift, "_" + FREQUENCY_EXTENSIONS["IM"]: 1 + shift, "_" + FREQUENCY_EXTENSIONS["MOD"]: 2 + shift } for spec_type in list(spectra.keys()): extracted_ws = extract_single_spec(fft_workspace, spectra[spec_type]) if 'PhaseQuad' in self.view.workspace: self.load._frequency_context.add_FFT(fft_workspace_name + spec_type, run, Re, Im_run, Im, phasequad=True) else: self.load._frequency_context.add_FFT( fft_workspace_name + spec_type, run, Re, Im_run, Im) muon_workspace_wrapper = MuonWorkspaceWrapper( extracted_ws, directory + fft_workspace_name + spec_type) muon_workspace_wrapper.show()
def combine_loaded_runs(model, run_list): return_ws = model._loaded_data_store.get_data(run=run_list[0])["workspace"] running_total = return_ws["OutputWorkspace"].workspace for run in run_list[1:]: ws = model._loaded_data_store.get_data( run=run)["workspace"]["OutputWorkspace"].workspace running_total = algorithm_utils.run_Plus({ "LHSWorkspace": running_total, "RHSWorkspace": ws, "AllowDifferentNumberSpectra": False }) # remove the single loaded filename model._loaded_data_store.remove_data(run=run) model._loaded_data_store.remove_data(run=run_list[0]) return_ws["OutputWorkspace"] = MuonWorkspaceWrapper(running_total) model._loaded_data_store.add_data(run=flatten_run_list(run_list), workspace=return_ws, filename="Co-added")
def test_that_cannot_initialize_with_WorkspaceGroup_object(self): group_workspace = api.WorkspaceGroup() assert isinstance(group_workspace, WorkspaceGroup) with self.assertRaises(AttributeError): MuonWorkspaceWrapper(workspace=group_workspace)
def test_that_can_initialize_with_Workspace2D_object(self): MuonWorkspaceWrapper(workspace=self.workspace)
def test_that_can_initialize_with_TableWorkspace_object(self): table_workspace = create_simple_table_workspace() assert isinstance(table_workspace, ITableWorkspace) MuonWorkspaceWrapper(workspace=table_workspace)
def test_that_cannot_initialize_without_supplying_a_workspace(self): with self.assertRaises(TypeError): MuonWorkspaceWrapper()