def load_workspace_from_filename(filename, input_properties=DEFAULT_INPUTS, output_properties=DEFAULT_OUTPUTS): try: alg, psi_data = create_load_algorithm(filename, input_properties) alg.execute() except: alg, psi_data = create_load_algorithm( filename.split(os.sep)[-1], input_properties) alg.execute() workspace = AnalysisDataService.retrieve( alg.getProperty("OutputWorkspace").valueAsStr) if is_workspace_group(workspace): # handle multi-period data load_result = _get_algorithm_properties(alg, output_properties) load_result["OutputWorkspace"] = [ MuonWorkspaceWrapper(ws) for ws in workspace.getNames() ] run = get_run_from_multi_period_data(workspace) if not psi_data: load_result["DataDeadTimeTable"] = AnalysisDataService.retrieve( load_result["DeadTimeTable"]).getNames()[0] for index, deadtime_table in enumerate( AnalysisDataService.retrieve( load_result["DeadTimeTable"]).getNames()): if index == 0: load_result["DataDeadTimeTable"] = deadtime_table else: DeleteWorkspace(Workspace=deadtime_table) load_result["FirstGoodData"] = round( load_result["FirstGoodData"] - load_result['TimeZero'], 2) UnGroupWorkspace(load_result["DeadTimeTable"]) load_result["DeadTimeTable"] = None UnGroupWorkspace(workspace.name()) else: load_result["DataDeadTimeTable"] = None load_result["FirstGoodData"] = round(load_result["FirstGoodData"], 2) else: # single period data load_result = _get_algorithm_properties(alg, output_properties) load_result["OutputWorkspace"] = [ MuonWorkspaceWrapper(load_result["OutputWorkspace"]) ] run = int(workspace.getRunNumber()) if not psi_data: load_result["DataDeadTimeTable"] = load_result["DeadTimeTable"] load_result["DeadTimeTable"] = None load_result["FirstGoodData"] = round( load_result["FirstGoodData"] - load_result['TimeZero'], 2) else: load_result["DataDeadTimeTable"] = None load_result["FirstGoodData"] = round(load_result["FirstGoodData"], 2) return load_result, run, filename, psi_data
def load_workspace_from_filename(filename, input_properties=DEFAULT_INPUTS, output_properties=DEFAULT_OUTPUTS): try: alg, psi_data = create_load_algorithm(filename, input_properties) alg.execute() except: alg, psi_data = create_load_algorithm( filename.split(os.sep)[-1], input_properties) alg.execute() # The filename given to the loading algorithm can be different to the file that was actually loaded. # Pulling the filename back out of the algorithm after loading ensures that the path is accurate. filename = alg.getProperty("Filename").value workspace = AnalysisDataService.retrieve( alg.getProperty("OutputWorkspace").valueAsStr) if is_workspace_group(workspace): # handle multi-period data load_result = _get_algorithm_properties(alg, output_properties) load_result["OutputWorkspace"] = [ MuonWorkspaceWrapper(ws) for ws in workspace.getNames() ] run = get_run_from_multi_period_data(workspace) deadtime_tables = AnalysisDataService.retrieve( load_result["DeadTimeTable"]).getNames() load_result["DataDeadTimeTable"] = deadtime_tables[0] for table in deadtime_tables[1:]: DeleteWorkspace(Workspace=table) load_result["FirstGoodData"] = round( load_result["FirstGoodData"] - load_result['TimeZero'], 3) print("hiii", ) UnGroupWorkspace(load_result["DeadTimeTable"]) load_result["DeadTimeTable"] = None UnGroupWorkspace(workspace.name()) else: # single period data load_result = _get_algorithm_properties(alg, output_properties) load_result["OutputWorkspace"] = [ MuonWorkspaceWrapper(load_result["OutputWorkspace"]) ] run = int(workspace.getRunNumber()) load_result["DataDeadTimeTable"] = load_result["DeadTimeTable"] load_result["DeadTimeTable"] = None load_result["FirstGoodData"] = round( load_result["FirstGoodData"] - load_result['TimeZero'], 3) return load_result, run, filename, psi_data
def __call__(self, vesuvio_input, iteration, verbose_output=False): vesuvio_output = VesuvioTOFFitOutput( lambda index: vesuvio_input.sample_data.getSpectrum( index).getSpectrumNo()) if vesuvio_input.using_back_scattering_spectra: fit_profile_collection = self._mass_profile_collection.filter( ignore_hydrogen_filter) else: fit_profile_collection = self._mass_profile_collection all_mass_values = self._mass_profile_collection.masses fit_mass_values = fit_profile_collection.masses for index in range(vesuvio_input.spectra_number): self._fit_namer.set_index(index) all_profiles = ";".join( self._mass_profile_collection.functions(index)) fit_profiles = ";".join(fit_profile_collection.functions(index)) # Calculate pre-fit to retrieve parameter approximations for corrections prefit_result = self._prefit(vesuvio_input.sample_data, index, fit_mass_values, fit_profiles) # Calculate corrections corrections_result = self._corrections( vesuvio_input.sample_data, vesuvio_input.container_data, index, all_mass_values, all_profiles, prefit_result[1], verbose_output) # Calculate final fit fit_result = self._final_fit(corrections_result[-1], fit_mass_values, fit_profiles) # Update output with results from fit _update_output(vesuvio_output, prefit_result, corrections_result, fit_result) # Clear ADS of intermediate workspaces and workspace group if verbose_output: UnGroupWorkspace(corrections_result[0]) UnGroupWorkspace(corrections_result[1]) mtd.remove(prefit_result[1].getName()) mtd.remove(corrections_result[-1].getName()) mtd.remove(fit_result[1].getName()) return vesuvio_output
def calibrate_tube(workspace: WorkspaceTypes, tube_name: str, output_peak_table: str = 'PeakTable', output_parameters_table: str = 'ParametersTable', output_peak_y_table: str = 'PeakYTable', shadow_height: float = 1000, shadow_width: float = 4, fit_domain: float = 7) -> TableWorkspace: r""" Calibration table for one tube of CORELLI This function creates TableWorkspace 'CalibTable', TableWorkspace 'PeakTable', and WorkspaceGroup 'ParametersTable' containing TableWorkspace 'ParametersTableI' where 'I' is the tube number. :param workspace: string or handle to ~mantid.dataobjects.Workspace2D :param tube_name: string uniquely representing one tube e.g. 'bank88/sixteenpack/tube3' :param output_peak_table: :param output_parameters_table: :param output_peak_y_table: :param shadow_height: estimated dip in the background intensity. Dips typical of Cd-wire runs are around 1000 neutron counts. :param shadow_width: estimated width of the shadow cast by the wire, in pixel units. The Cd-wire typically cast a shadow over four pixels. :param fit_domain: estimated range, in pixel units, over which to carry out the fit. An appropriate value is about twice the shadow width :return: table containing detector ID and position vector """ message = f'Cannot process workspace {workspace}. Pass the name of an existing workspace or a workspace handle' assert isinstance(workspace, (str, Workspace2D)), message assert shadow_height > 0, 'shadow height must be positive' for marker in ('bank', 'sixteenpack', 'tube'): assert marker in tube_name, f'{tube_name} does not uniquely specify one tube' peak_height, peak_width = -shadow_height, shadow_width # Initial guess for the peak positions, assuming: # - the center of the the wire mesh coincides with the center ot the tube_calib_fit_params # - wires cast a shadow on a perfectly calibrated tube fit_extent = (fit_domain / PIXELS_PER_TUBE) * TUBE_LENGTH # fit domain in meters assert fit_extent < WIRE_GAP, 'The fit domain cannot be larger than the distance between consecutive wires' wire_pixel_positions = wire_positions(units='pixels')[1: -1] fit_par = TubeCalibFitParams(wire_pixel_positions, height=peak_height, width=peak_width, margin=fit_domain) fit_par.setAutomatic(True) # Generate the calibration table, the peak table, and the parameters table peaks_form = [1] * len(wire_pixel_positions) # signals we'll be fitting dips (peaks with negative heights) calibration_table, _ = tube.calibrate(workspace, tube_name, wire_positions(units='meters')[1: -1], peaks_form, fitPar=fit_par, outputPeak=True, parameters_table_group='ParametersTableGroup') calibration_table = trim_calibration_table(calibration_table) # discard X and Z coordinates # Additional workspaces # Table with shadow positions along the tube, in pixel units if output_peak_table != 'PeakTable': # 'PeakTable' is output by tube.calibrate RenameWorkspace(InputWorkspace='PeakTable', OutputWorkspace=output_peak_table) # Table with shadow positions along the vertical axis calculate_peak_y_table(output_peak_table, 'ParametersTableGroup', output_workspace=output_peak_y_table) # Table with optimized parameters for the polynomial coefficients A0, A1, A2, and chi-square RenameWorkspace(InputWorkspace=mtd['ParametersTableGroup'].getItem(0), OutputWorkspace=output_parameters_table) UnGroupWorkspace(InputWorkspace='ParametersTableGroup') return calibration_table
def setUpClass(cls): LoadNexusProcessed(Filename="VULCAN_192227_diagnostics.nxs", OutputWorkspace="diagtest") UnGroupWorkspace("diagtest") cls.workspaces = [ "diag_dspacing", "diag_fitted", "diag_fitparam", "strain", "single_strain", "difference", "single_diff", "center_tof" ]
def test_ungrouping_in_ads_calls_any_change_handle(self): CreateSampleWorkspace(OutputWorkspace="ws1") CreateSampleWorkspace(OutputWorkspace="ws2") GroupWorkspaces(InputWorkspaces="ws1,ws2", OutputWorkspace="NewGroup") self.project.anyChangeHandle = mock.MagicMock() UnGroupWorkspace(InputWorkspace="NewGroup") # 1 for removing old group and 1 for something else but 2 seems right self.assertEqual(2, self.project.anyChangeHandle.call_count)
def remove_unwanted_workspaces(workspace_name, temp_workspace_name, period): # Delete all entries except for the period which is requested workspaces_to_keep = temp_workspace_name + "_" + str(period) group_workspace = mtd[temp_workspace_name] workspace_names_to_remove = [element.name() for element in group_workspace if element.name() != workspaces_to_keep] for to_remove in workspace_names_to_remove: DeleteWorkspace(to_remove) # We need to ungroup the group workspace which only contains now a single workspace UnGroupWorkspace(group_workspace) RenameWorkspace(InputWorkspace=workspaces_to_keep, OutputWorkspace=workspace_name)
def test_observeUnGroup_calls_unGroupHandle_when_set_on_ads_and_a_group_is_ungrouped(self): CreateSampleWorkspace(OutputWorkspace="ws1") CreateSampleWorkspace(OutputWorkspace="ws2") GroupWorkspaces(InputWorkspaces="ws1,ws2", OutputWorkspace="NewGroup") self.fake_class.observeUnGroup(True) self.fake_class.unGroupHandle = mock.MagicMock() UnGroupWorkspace(InputWorkspace="NewGroup") self.assertEqual(self.fake_class.unGroupHandle.call_count, 1)
def test_observeAll_calls_anyChangeHandle_when_set_on_ads_ungroup_performed(self): self.fake_class.observeAll(True) CreateSampleWorkspace(OutputWorkspace="ws1") expected_count = 1 CreateSampleWorkspace(OutputWorkspace="ws2") expected_count += 1 GroupWorkspaces(InputWorkspaces="ws1,ws2", OutputWorkspace="NewGroup") # One for grouping the workspace expected_count += 1 # One for adding it to the ADS expected_count += 1 # Will replace first workspace UnGroupWorkspace(InputWorkspace="NewGroup") # One for ungrouping the workspace expected_count += 1 # One for removing the grouped workspace object from the ADS expected_count += 1 self.assertEqual(self.fake_class.anyChangeHandle.call_count, expected_count)
def __load_cached_data(cache_files, sha1, abs_method="", prefix_name=""): """try to load cached data from memory and disk :param abs_method: absorption calculation method :param sha1: SHA1 that identify cached workspace :param cache_files: list of cache file names to search :param prefix_name: prefix to add to wkspname for caching return found_abs_wksp_sample, found_abs_wksp_container abs_wksp_sample, abs_wksp_container, cache_files[ 0 ] """ # init abs_wksp_sample, abs_wksp_container = "", "" found_abs_wksp_sample, found_abs_wksp_container = False, False # step_0: depending on the abs_method, suffix will be different if abs_method == "SampleOnly": abs_wksp_sample = f"{prefix_name}_ass" found_abs_wksp_container = True elif abs_method == "SampleAndContainer": abs_wksp_sample = f"{prefix_name}_ass" abs_wksp_container = f"{prefix_name}_acc" elif abs_method == "FullPaalmanPings": abs_wksp_sample = f"{prefix_name}_assc" abs_wksp_container = f"{prefix_name}_ac" else: raise ValueError( "Unrecognized absorption correction method '{}'".format( abs_method)) # step_1: check memory if mtd.doesExist(abs_wksp_sample): found_abs_wksp_sample = mtd[abs_wksp_sample].run( )["absSHA1"].value == sha1 if mtd.doesExist(abs_wksp_container): found_abs_wksp_container = mtd[abs_wksp_container].run( )["absSHA1"].value == sha1 # step_2: load from disk if either is not found in memory if (not found_abs_wksp_sample) or (not found_abs_wksp_container): for candidate in cache_files: if os.path.exists(candidate): wsntmp = "tmpwsg" Load(Filename=candidate, OutputWorkspace=wsntmp) wstype = mtd[wsntmp].id() if wstype == "Workspace2D": RenameWorkspace(InputWorkspace=wsntmp, OutputWorkspace=abs_wksp_sample) elif wstype == "WorkspaceGroup": UnGroupWorkspace(InputWorkspace=wsntmp) else: raise ValueError( f"Unsupported cached workspace type: {wstype}") break # step_3: check memory again if mtd.doesExist(abs_wksp_sample): found_abs_wksp_sample = mtd[abs_wksp_sample].run( )["absSHA1"].value == sha1 if mtd.doesExist(abs_wksp_container): found_abs_wksp_container = mtd[abs_wksp_container].run( )["absSHA1"].value == sha1 return found_abs_wksp_sample, found_abs_wksp_container, abs_wksp_sample, abs_wksp_container, cache_files[ 0]