def _filterEvents(self, run, ws_name): r""" Filter out ExcludeTimeSegment if applicable Parameters ---------- run: str run number ws_name : str name of the workspace to filter """ for run_fragment in self.getProperty('ExcludeTimeSegment').value.split( ','): if run + ':' in run_fragment: self.generateSplitterWorkspace(run_fragment.split(':')[1]) sapi.FilterEvents(InputWorkspace=ws_name, SplitterWorkspace='splitter', OutputWorkspaceBaseName='splitted', GroupWorkspaces=True, OutputWorkspaceIndexedFrom1=True, RelativeTime=True) sapi.UnGroupWorkspace('splitted') sapi.RenameWorkspace(InputWorkspace='splitted_0', OutputWorkspace=ws_name) break
def fit_tof_iteration(sample_data, container_data, runs, flags): """ Performs a single iterations of the time of flight corrections and fitting workflow. :param sample_data: Loaded sample data workspaces :param container_data: Loaded container data workspaces :param runs: A string specifying the runs to process :param flags: A dictionary of flags to control the processing :return: Tuple of (workspace group name, pre correction fit parameters, final fit parameters, chi^2 values) """ # Transform inputs into something the algorithm can understand if isinstance(flags['masses'][0], list): mass_values = _create_profile_strs_and_mass_list( copy.deepcopy(flags['masses'][0]))[0] profiles_strs = [] for mass_spec in flags['masses']: profiles_strs.append( _create_profile_strs_and_mass_list(mass_spec)[1]) else: mass_values, profiles_strs = _create_profile_strs_and_mass_list( flags['masses']) background_str = _create_background_str(flags.get('background', None)) intensity_constraints = _create_intensity_constraint_str( flags['intensity_constraints']) ties = _create_user_defined_ties_str(flags['masses']) num_spec = sample_data.getNumberHistograms() pre_correct_pars_workspace = None pars_workspace = None fit_workspace = None max_fit_iterations = flags.get('max_fit_iterations', 5000) output_groups = [] chi2_values = [] data_workspaces = [] result_workspaces = [] group_name = runs + '_result' for index in range(num_spec): if isinstance(profiles_strs, list): profiles = profiles_strs[index] else: profiles = profiles_strs suffix = _create_fit_workspace_suffix(index, sample_data, flags['fit_mode'], flags['spectra'], flags.get('iteration', None)) # Corrections corrections_args = dict() # Need to do a fit first to obtain the parameter table pre_correction_pars_name = runs + "_params_pre_correction" + suffix corrections_fit_name = "__vesuvio_corrections_fit" ms.VesuvioTOFFit(InputWorkspace=sample_data, WorkspaceIndex=index, Masses=mass_values, MassProfiles=profiles, Background=background_str, IntensityConstraints=intensity_constraints, Ties=ties, OutputWorkspace=corrections_fit_name, FitParameters=pre_correction_pars_name, MaxIterations=max_fit_iterations, Minimizer=flags['fit_minimizer']) ms.DeleteWorkspace(corrections_fit_name) corrections_args['FitParameters'] = pre_correction_pars_name # Add the multiple scattering arguments corrections_args.update(flags['ms_flags']) corrected_data_name = runs + "_tof_corrected" + suffix linear_correction_fit_params_name = runs + "_correction_fit_scale" + suffix if flags.get('output_verbose_corrections', False): corrections_args[ "CorrectionWorkspaces"] = runs + "_correction" + suffix corrections_args[ "CorrectedWorkspaces"] = runs + "_corrected" + suffix if container_data is not None: corrections_args["ContainerWorkspace"] = container_data ms.VesuvioCorrections( InputWorkspace=sample_data, OutputWorkspace=corrected_data_name, LinearFitResult=linear_correction_fit_params_name, WorkspaceIndex=index, GammaBackground=flags.get('gamma_correct', False), Masses=mass_values, MassProfiles=profiles, IntensityConstraints=intensity_constraints, MultipleScattering=True, GammaBackgroundScale=flags.get('fixed_gamma_scaling', 0.0), ContainerScale=flags.get('fixed_container_scaling', 0.0), **corrections_args) # Final fit fit_ws_name = runs + "_data" + suffix pars_name = runs + "_params" + suffix fit_result = ms.VesuvioTOFFit( InputWorkspace=corrected_data_name, WorkspaceIndex=0, Masses=mass_values, MassProfiles=profiles, Background=background_str, IntensityConstraints=intensity_constraints, Ties=ties, OutputWorkspace=fit_ws_name, FitParameters=pars_name, MaxIterations=max_fit_iterations, Minimizer=flags['fit_minimizer']) chi2_values.append(fit_result[-1]) ms.DeleteWorkspace(corrected_data_name) # Process parameter tables if pre_correct_pars_workspace is None: pre_correct_pars_workspace = _create_param_workspace( num_spec, mtd[pre_correction_pars_name]) if pars_workspace is None: pars_workspace = _create_param_workspace(num_spec, mtd[pars_name]) if fit_workspace is None: fit_workspace = _create_param_workspace( num_spec, mtd[linear_correction_fit_params_name]) spec_num_str = str(sample_data.getSpectrum(index).getSpectrumNo()) current_spec = 'spectrum_' + spec_num_str _update_fit_params(pre_correct_pars_workspace, index, mtd[pre_correction_pars_name], current_spec) _update_fit_params(pars_workspace, index, mtd[pars_name], current_spec) _update_fit_params(fit_workspace, index, mtd[linear_correction_fit_params_name], current_spec) ms.DeleteWorkspace(pre_correction_pars_name) ms.DeleteWorkspace(pars_name) ms.DeleteWorkspace(linear_correction_fit_params_name) # Process spectrum group # Note the ordering of operations here gives the order in the WorkspaceGroup output_workspaces = [] data_workspaces.append(fit_ws_name) if flags.get('output_verbose_corrections', False): output_workspaces += mtd[ corrections_args["CorrectionWorkspaces"]].getNames() output_workspaces += mtd[ corrections_args["CorrectedWorkspaces"]].getNames() ms.UnGroupWorkspace(corrections_args["CorrectionWorkspaces"]) ms.UnGroupWorkspace(corrections_args["CorrectedWorkspaces"]) for workspace in output_workspaces: group_name = runs + '_iteration_' + str( flags.get('iteration', None)) name = group_name + '_' + workspace.split( '_')[1] + '_' + workspace.split('_')[-1] result_workspaces.append(name) if index == 0: ms.RenameWorkspace(InputWorkspace=workspace, OutputWorkspace=name) else: ms.ConjoinWorkspaces(InputWorkspace1=name, InputWorkspace2=workspace) # Output the parameter workspaces params_pre_corr = runs + "_params_pre_correction_iteration_" + str( flags['iteration']) params_name = runs + "_params_iteration_" + str(flags['iteration']) fit_name = runs + "_correction_fit_scale_iteration_" + str( flags['iteration']) AnalysisDataService.Instance().addOrReplace( params_pre_corr, pre_correct_pars_workspace) AnalysisDataService.Instance().addOrReplace(params_name, pars_workspace) AnalysisDataService.Instance().addOrReplace(fit_name, fit_workspace) if result_workspaces: output_groups.append( ms.GroupWorkspaces(InputWorkspaces=result_workspaces, OutputWorkspace=group_name)) if data_workspaces: output_groups.append( ms.GroupWorkspaces(InputWorkspaces=data_workspaces, OutputWorkspace=group_name + '_data')) else: output_groups.append(fit_ws_name) if len(output_groups) > 1: result_ws = output_groups else: result_ws = output_groups[0] return result_ws, pre_correct_pars_workspace, pars_workspace, chi2_values