コード例 #1
0
ファイル: polaris_algs.py プロジェクト: bethhampshire/mantid
def save_unsplined_vanadium(vanadium_ws, output_path):
    converted_workspaces = []
    for ws_index in range(vanadium_ws.getNumberOfEntries()):
        ws = vanadium_ws.getItem(ws_index)
        previous_units = ws.getAxis(0).getUnit().unitID()

        if previous_units != WORKSPACE_UNITS.tof:
            ws = mantid.ConvertUnits(InputWorkspace=ws,
                                     Target=WORKSPACE_UNITS.tof)

        ws = mantid.RenameWorkspace(
            InputWorkspace=ws,
            OutputWorkspace="van_bank_{}".format(ws_index + 1))
        converted_workspaces.append(ws)

    converted_group = mantid.GroupWorkspaces(",".join(
        ws.name() for ws in converted_workspaces))
    mantid.SaveNexus(InputWorkspace=converted_group,
                     Filename=output_path,
                     Append=False)
    mantid.DeleteWorkspace(converted_group)
コード例 #2
0
    def _calculate_total_dos_with_scale(self, ions, frequencies, eigenvectors,
                                        weights):
        """
        Calculate the complete Density of States for all the ions of interest to the user with scaled intensities
        @param frequencies      :: frequency data from file
        @param eigenvectors     :: eigenvector data from file
        @param weights          :: weight data from file
        """
        # Build a dict of all ions
        all_ions = dict()
        for ion in set([i['species'] for i in ions]):
            all_ions[ion] = [i['index'] for i in ions if i['species'] == ion]

        partial_workspaces, sum_workspace = self._compute_partial_ion_workflow(
            all_ions, frequencies, eigenvectors, weights)

        # Discard the partial workspaces
        for partial_ws in partial_workspaces:
            s_api.DeleteWorkspace(partial_ws)

        # Rename the summed workspace, this will be the output
        s_api.RenameWorkspace(InputWorkspace=sum_workspace,
                              OutputWorkspace=self._out_ws_name)
コード例 #3
0
    def _filterEvents(self, run, ws_name):
        r"""Filter out ExcludeTimeSegment if applicable

        Parameters
        ----------
        run: str
            run number
        ws_name : str
            name of the workspace to filter
        """
        for run_fragment in self.getProperty("ExcludeTimeSegment").value.split(';'):
            if run+':' in run_fragment:
                self.generateSplitterWorkspace(run_fragment.split(':')[1])
                sapi.FilterEvents(InputWorkspace=ws_name,
                                  SplitterWorkspace='splitter',
                                  OutputWorkspaceBaseName='splitted',
                                  GroupWorkspaces=True,
                                  OutputWorkspaceIndexedFrom1=True,
                                  RelativeTime=True)
                sapi.UnGroupWorkspace('splitted')
                sapi.RenameWorkspace(InputWorkspace='splitted_0',
                                     OutputWorkspace=ws_name)
                break
コード例 #4
0
    def edit_matrix_workspace(sq_name,
                              scale_factor,
                              shift,
                              edited_sq_name=None):
        """
        Edit the matrix workspace of S(Q) by scaling and shift
        :param sq_name: name of the SofQ workspace
        :param scale_factor:
        :param shift:
        :param edited_sq_name: workspace for the edited S(Q)
        :return:
        """
        # get the workspace
        if AnalysisDataService.doesExist(sq_name) is False:
            raise RuntimeError(
                'S(Q) workspace {0} cannot be found in ADS.'.format(sq_name))

        if edited_sq_name is not None:
            simpleapi.CloneWorkspace(InputWorkspace=sq_name,
                                     OutputWorkspace=edited_sq_name)
            sq_ws = AnalysisDataService.retrieve(edited_sq_name)
        else:
            sq_ws = AnalysisDataService.retrieve(sq_name)

        # get the vector of Y
        sq_ws = sq_ws * scale_factor
        sq_ws = sq_ws + shift
        if sq_ws.name() != edited_sq_name:
            simpleapi.DeleteWorkspace(Workspace=edited_sq_name)
            simpleapi.RenameWorkspace(InputWorkspace=sq_ws,
                                      OutputWorkspace=edited_sq_name)

        assert sq_ws is not None, 'S(Q) workspace cannot be None.'
        print('[DB...BAT] S(Q) workspace that is edit is {0}'.format(sq_ws))

        return
コード例 #5
0
def generate_ts_pdf(run_number,
                    focus_file_path,
                    merge_banks=False,
                    q_lims=None,
                    cal_file_name=None,
                    sample_details=None,
                    delta_r=None,
                    delta_q=None,
                    pdf_type="G(r)",
                    lorch_filter=None,
                    freq_params=None,
                    debug=False):
    focused_ws = _obtain_focused_run(run_number, focus_file_path)
    focused_ws = mantid.ConvertUnits(InputWorkspace=focused_ws,
                                     Target="MomentumTransfer",
                                     EMode='Elastic')

    raw_ws = mantid.Load(Filename='POLARIS' + str(run_number) + '.nxs')
    sample_geometry = common.generate_sample_geometry(sample_details)
    sample_material = common.generate_sample_material(sample_details)
    self_scattering_correction = mantid.TotScatCalculateSelfScattering(
        InputWorkspace=raw_ws,
        CalFileName=cal_file_name,
        SampleGeometry=sample_geometry,
        SampleMaterial=sample_material,
        CrystalDensity=sample_details.material_object.crystal_density)

    ws_group_list = []
    for i in range(self_scattering_correction.getNumberHistograms()):
        ws_name = 'correction_' + str(i)
        mantid.ExtractSpectra(InputWorkspace=self_scattering_correction,
                              OutputWorkspace=ws_name,
                              WorkspaceIndexList=[i])
        ws_group_list.append(ws_name)
    self_scattering_correction = mantid.GroupWorkspaces(
        InputWorkspaces=ws_group_list)
    self_scattering_correction = mantid.RebinToWorkspace(
        WorkspaceToRebin=self_scattering_correction,
        WorkspaceToMatch=focused_ws)

    focused_ws = mantid.Subtract(LHSWorkspace=focused_ws,
                                 RHSWorkspace=self_scattering_correction)
    focused_ws -= 1  # This -1 to the correction has been moved out of CalculatePlaczekSelfScattering
    if delta_q:
        focused_ws = mantid.Rebin(InputWorkspace=focused_ws, Params=delta_q)
    if merge_banks:
        q_min, q_max = _load_qlims(q_lims)
        merged_ws = mantid.MatchAndMergeWorkspaces(InputWorkspaces=focused_ws,
                                                   XMin=q_min,
                                                   XMax=q_max,
                                                   CalculateScale=False)
        fast_fourier_filter(merged_ws, freq_params=freq_params)
        pdf_output = mantid.PDFFourierTransform(
            Inputworkspace="merged_ws",
            InputSofQType="S(Q)-1",
            PDFType=pdf_type,
            Filter=lorch_filter,
            DeltaR=delta_r,
            rho0=sample_details.material_object.crystal_density)
    else:
        for ws in focused_ws:
            fast_fourier_filter(ws, freq_params=freq_params)
        pdf_output = mantid.PDFFourierTransform(
            Inputworkspace='focused_ws',
            InputSofQType="S(Q)-1",
            PDFType=pdf_type,
            Filter=lorch_filter,
            DeltaR=delta_r,
            rho0=sample_details.material_object.crystal_density)
        pdf_output = mantid.RebinToWorkspace(WorkspaceToRebin=pdf_output,
                                             WorkspaceToMatch=pdf_output[4],
                                             PreserveEvents=True)
    if not debug:
        common.remove_intermediate_workspace('self_scattering_correction')
    # Rename output ws
    if 'merged_ws' in locals():
        mantid.RenameWorkspace(InputWorkspace='merged_ws',
                               OutputWorkspace=run_number + '_merged_Q')
    mantid.RenameWorkspace(InputWorkspace='focused_ws',
                           OutputWorkspace=run_number + '_focused_Q')
    target_focus_ws_name = run_number + '_focused_Q_'
    target_pdf_ws_name = run_number + '_pdf_R_'
    if isinstance(focused_ws, WorkspaceGroup):
        for i in range(len(focused_ws)):
            if str(focused_ws[i]) != (target_focus_ws_name + str(i + 1)):
                mantid.RenameWorkspace(InputWorkspace=focused_ws[i],
                                       OutputWorkspace=target_focus_ws_name +
                                       str(i + 1))
    mantid.RenameWorkspace(InputWorkspace='pdf_output',
                           OutputWorkspace=run_number + '_pdf_R')
    if isinstance(pdf_output, WorkspaceGroup):
        for i in range(len(pdf_output)):
            if str(pdf_output[i]) != (target_pdf_ws_name + str(i + 1)):
                mantid.RenameWorkspace(InputWorkspace=pdf_output[i],
                                       OutputWorkspace=target_pdf_ws_name +
                                       str(i + 1))
    return pdf_output
コード例 #6
0
    def rebin_workspace(self, input_ws, binning_param_list, output_ws_name):
        """
        rebin input workspace with user specified binning parameters
        :param input_ws:
        :param binning_param_list:
        :param output_ws_name:
        :return:
        """
        if binning_param_list is None:
            # no re-binning is required: clone the output workspace
            output_workspace = api.CloneWorkspace(
                InputWorkspace=input_ws, OutputWorkspace=output_ws_name)

        else:
            # rebin input workspace
            processed_single_spec_ws_list = list()
            for ws_index in range(input_ws.getNumberHistograms()):
                # rebin on each
                temp_out_name = output_ws_name + '_' + str(ws_index)
                processed_single_spec_ws_list.append(temp_out_name)
                # extract a spectrum out
                api.ExtractSpectra(input_ws,
                                   WorkspaceIndexList=[ws_index],
                                   OutputWorkspace=temp_out_name)
                # get binning parameter
                bin_params = binning_param_list[ws_index]
                if bin_params is None:
                    continue
                # rebin
                # check
                if len(bin_params) % 2 == 0:
                    # odd number and cannot be binning parameters
                    raise RuntimeError(
                        'Binning parameter {0} cannot be accepted.'.format(
                            bin_params))

                api.Rebin(InputWorkspace=temp_out_name,
                          OutputWorkspace=temp_out_name,
                          Params=bin_params,
                          PreserveEvents=True)
                rebinned_ws = AnalysisDataService.retrieve(temp_out_name)
                self.log().warning(
                    'Rebinnd workspace Size(x) = {0}, Size(y) = {1}'.format(
                        len(rebinned_ws.readX(0)), len(rebinned_ws.readY(0))))

                # Upon this point, the workspace is still HistogramData.
                # Check whether it is necessary to reset the X-values to reference TOF from VDRIVE
                temp_out_ws = AnalysisDataService.retrieve(temp_out_name)
                if len(bin_params) == 2 * len(temp_out_ws.readX(0)) - 1:
                    reset_bins = True
                else:
                    reset_bins = False

                # convert to point data
                api.ConvertToPointData(InputWorkspace=temp_out_name,
                                       OutputWorkspace=temp_out_name)
                # align the bin boundaries if necessary
                temp_out_ws = AnalysisDataService.retrieve(temp_out_name)

                if reset_bins:
                    # good to align:
                    for tof_i in range(len(temp_out_ws.readX(0))):
                        temp_out_ws.dataX(0)[tof_i] = int(
                            bin_params[2 * tof_i] * 10) / 10.
                    # END-FOR (tof-i)
                # END-IF (align)
            # END-FOR

            # merge together
            api.RenameWorkspace(
                InputWorkspace=processed_single_spec_ws_list[0],
                OutputWorkspace=output_ws_name)
            for ws_index in range(1, len(processed_single_spec_ws_list)):
                api.ConjoinWorkspaces(
                    InputWorkspace1=output_ws_name,
                    InputWorkspace2=processed_single_spec_ws_list[ws_index])
            # END-FOR
            output_workspace = AnalysisDataService.retrieve(output_ws_name)
        # END-IF-ELSE

        return output_workspace
コード例 #7
0
 def read(self, number, panel, extension):
     if type(number) is int:
         filename = self.datafile
         logger.notice("will be reading filename...{}".format(filename))
         spectra_min, spectra_max = self.return_panel_van.get(panel) if self.is_vanadium else \
             self.return_panel.get(panel)
         if panel != 0:
             output = "w{0}-{1}".format(number, panel)
         else:
             output = "w{}".format(number)
         shared_load_files(extension, filename, output, spectra_max,
                           spectra_min, False)
         if extension == "nxs_event":
             simple.LoadEventNexus(Filename=filename,
                                   OutputWorkspace=output,
                                   LoadMonitors='1')
             self.read_event_nexus(number, output, panel)
         if extension[:10] == "nxs_event_":
             label, tmin, tmax = split_string_event(extension)
             output = output + "_" + label
             if tmax == "end":
                 simple.LoadEventNexus(Filename=filename,
                                       OutputWorkspace=output,
                                       FilterByTimeStart=tmin,
                                       LoadMonitors='1',
                                       MonitorsAsEvents='1',
                                       FilterMonByTimeStart=tmin)
             else:
                 simple.LoadEventNexus(Filename=filename,
                                       OutputWorkspace=output,
                                       FilterByTimeStart=tmin,
                                       FilterByTimeStop=tmax,
                                       LoadMonitors='1',
                                       MonitorsAsEvents='1',
                                       FilterMonByTimeStart=tmin,
                                       FilterMonByTimeStop=tmax)
             self.read_event_nexus(number, output, panel)
     else:
         num_1, num_2 = split_run_string(number)
         output = "w{0}_{1}-{2}".format(num_1, num_2, panel)
         output1 = self.load_multi_run_part(extension, num_1, panel)
         output2 = self.load_multi_run_part(extension, num_2, panel)
         simple.MergeRuns(output1 + "," + output2, output)
         simple.DeleteWorkspace(output1)
         simple.DeleteWorkspace(output2)
     simple.ConvertUnits(InputWorkspace=output,
                         OutputWorkspace=output,
                         Target="Wavelength",
                         Emode="Elastic")
     lmin, lmax = Wish.LAMBDA_RANGE
     simple.CropWorkspace(InputWorkspace=output,
                          OutputWorkspace=output,
                          XMin=lmin,
                          XMax=lmax)
     monitor_run = "monitor{}".format(number)
     if monitor_run not in simple.mtd:
         monitor = self.process_incidentmon(number,
                                            extension,
                                            spline_terms=70)
     else:
         monitor = simple.mtd[monitor_run]
     simple.NormaliseToMonitor(InputWorkspace=output,
                               OutputWorkspace=output + "norm1",
                               MonitorWorkspace=monitor)
     simple.NormaliseToMonitor(InputWorkspace=output + "norm1",
                               OutputWorkspace=output + "norm2",
                               MonitorWorkspace=monitor,
                               IntegrationRangeMin=0.7,
                               IntegrationRangeMax=10.35)
     simple.DeleteWorkspace(output)
     simple.DeleteWorkspace(output + "norm1")
     simple.RenameWorkspace(InputWorkspace=output + "norm2",
                            OutputWorkspace=output)
     simple.ConvertUnits(InputWorkspace=output,
                         OutputWorkspace=output,
                         Target="TOF",
                         EMode="Elastic")
     simple.ReplaceSpecialValues(InputWorkspace=output,
                                 OutputWorkspace=output,
                                 NaNValue=0.0,
                                 NaNError=0.0,
                                 InfinityValue=0.0,
                                 InfinityError=0.0)
     return output
コード例 #8
0
def generate_ts_pdf(run_number,
                    focus_file_path,
                    sample_details,
                    merge_banks=False,
                    q_lims=None,
                    cal_file_name=None,
                    delta_r=None,
                    delta_q=None,
                    pdf_type="G(r)",
                    lorch_filter=None,
                    freq_params=None,
                    debug=False):
    if sample_details is None:
        raise RuntimeError(
            "A SampleDetails object was not set. Please create a SampleDetails object and set the "
            "relevant properties it. Then set the new sample by calling set_sample_details()"
        )
    focused_ws = _obtain_focused_run(run_number, focus_file_path)
    focused_ws = mantid.ConvertUnits(InputWorkspace=focused_ws,
                                     Target="MomentumTransfer",
                                     EMode='Elastic')

    raw_ws = mantid.Load(Filename='POLARIS' + str(run_number))
    sample_geometry_json = sample_details.generate_sample_geometry()
    sample_material_json = sample_details.generate_sample_material()

    self_scattering_correction = mantid.TotScatCalculateSelfScattering(
        InputWorkspace=raw_ws,
        CalFileName=cal_file_name,
        SampleGeometry=sample_geometry_json,
        SampleMaterial=sample_material_json)

    ws_group_list = []
    for i in range(self_scattering_correction.getNumberHistograms()):
        ws_name = 'correction_' + str(i)
        mantid.ExtractSpectra(InputWorkspace=self_scattering_correction,
                              OutputWorkspace=ws_name,
                              WorkspaceIndexList=[i])
        ws_group_list.append(ws_name)
    self_scattering_correction = mantid.GroupWorkspaces(
        InputWorkspaces=ws_group_list)
    self_scattering_correction = mantid.RebinToWorkspace(
        WorkspaceToRebin=self_scattering_correction,
        WorkspaceToMatch=focused_ws)
    if not compare_ws_compatibility(focused_ws, self_scattering_correction):
        raise RuntimeError(
            "To use create_total_scattering_pdf you need to run focus with "
            "do_van_normalisation=true first.")
    focused_ws = mantid.Subtract(LHSWorkspace=focused_ws,
                                 RHSWorkspace=self_scattering_correction)
    if debug:
        dcs_corrected = mantid.CloneWorkspace(InputWorkspace=focused_ws)

    # convert diff cross section to S(Q) - 1
    material_builder = MaterialBuilder()
    sample = material_builder.setFormula(
        sample_details.material_object.chemical_formula).build()
    sample_total_scatter_cross_section = sample.totalScatterXSection()
    sample_coh_scatter_cross_section = sample.cohScatterXSection()
    focused_ws = focused_ws - sample_total_scatter_cross_section / (4 *
                                                                    math.pi)
    focused_ws = focused_ws * 4 * math.pi / sample_coh_scatter_cross_section
    if debug:
        s_of_q_minus_one = mantid.CloneWorkspace(InputWorkspace=focused_ws)

    if delta_q:
        focused_ws = mantid.Rebin(InputWorkspace=focused_ws, Params=delta_q)
    if merge_banks:
        q_min, q_max = _load_qlims(q_lims)
        merged_ws = mantid.MatchAndMergeWorkspaces(InputWorkspaces=focused_ws,
                                                   XMin=q_min,
                                                   XMax=q_max,
                                                   CalculateScale=False)
        fast_fourier_filter(merged_ws,
                            rho0=sample_details.material_object.number_density,
                            freq_params=freq_params)
        pdf_output = mantid.PDFFourierTransform(
            Inputworkspace="merged_ws",
            InputSofQType="S(Q)-1",
            PDFType=pdf_type,
            Filter=lorch_filter,
            DeltaR=delta_r,
            rho0=sample_details.material_object.number_density)
    else:
        for ws in focused_ws:
            fast_fourier_filter(
                ws,
                rho0=sample_details.material_object.number_density,
                freq_params=freq_params)
        pdf_output = mantid.PDFFourierTransform(
            Inputworkspace='focused_ws',
            InputSofQType="S(Q)-1",
            PDFType=pdf_type,
            Filter=lorch_filter,
            DeltaR=delta_r,
            rho0=sample_details.material_object.number_density)
        pdf_output = mantid.RebinToWorkspace(WorkspaceToRebin=pdf_output,
                                             WorkspaceToMatch=pdf_output[4],
                                             PreserveEvents=True)
    if not debug:
        common.remove_intermediate_workspace('self_scattering_correction')
    # Rename output ws
    if 'merged_ws' in locals():
        mantid.RenameWorkspace(InputWorkspace='merged_ws',
                               OutputWorkspace=run_number + '_merged_Q')
    mantid.RenameWorkspace(InputWorkspace='focused_ws',
                           OutputWorkspace=run_number + '_focused_Q')
    target_focus_ws_name = run_number + '_focused_Q_'
    target_pdf_ws_name = run_number + '_pdf_R_'
    if isinstance(focused_ws, WorkspaceGroup):
        for i in range(len(focused_ws)):
            if str(focused_ws[i]) != (target_focus_ws_name + str(i + 1)):
                mantid.RenameWorkspace(InputWorkspace=focused_ws[i],
                                       OutputWorkspace=target_focus_ws_name +
                                       str(i + 1))
    mantid.RenameWorkspace(InputWorkspace='pdf_output',
                           OutputWorkspace=run_number + '_pdf_R')
    if isinstance(pdf_output, WorkspaceGroup):
        for i in range(len(pdf_output)):
            if str(pdf_output[i]) != (target_pdf_ws_name + str(i + 1)):
                mantid.RenameWorkspace(InputWorkspace=pdf_output[i],
                                       OutputWorkspace=target_pdf_ws_name +
                                       str(i + 1))
    return pdf_output
コード例 #9
0
ファイル: commands.py プロジェクト: tomgriffinstfc/mantid
def fit_tof_iteration(sample_data, container_data, runs, flags):
    """
    Performs a single iterations of the time of flight corrections and fitting
    workflow.

    :param sample_data: Loaded sample data workspaces
    :param container_data: Loaded container data workspaces
    :param runs: A string specifying the runs to process
    :param flags: A dictionary of flags to control the processing
    :return: Tuple of (workspace group name, pre correction fit parameters,
             final fit parameters, chi^2 values)
    """
    # Transform inputs into something the algorithm can understand
    if isinstance(flags['masses'][0], list):
        mass_values = _create_profile_strs_and_mass_list(
            copy.deepcopy(flags['masses'][0]))[0]
        profiles_strs = []
        for mass_spec in flags['masses']:
            profiles_strs.append(
                _create_profile_strs_and_mass_list(mass_spec)[1])
    else:
        mass_values, profiles_strs = _create_profile_strs_and_mass_list(
            flags['masses'])
    background_str = _create_background_str(flags.get('background', None))
    intensity_constraints = _create_intensity_constraint_str(
        flags['intensity_constraints'])
    ties = _create_user_defined_ties_str(flags['masses'])

    num_spec = sample_data.getNumberHistograms()
    pre_correct_pars_workspace = None
    pars_workspace = None
    fit_workspace = None
    max_fit_iterations = flags.get('max_fit_iterations', 5000)

    output_groups = []
    chi2_values = []
    data_workspaces = []
    result_workspaces = []
    group_name = runs + '_result'
    for index in range(num_spec):
        if isinstance(profiles_strs, list):
            profiles = profiles_strs[index]
        else:
            profiles = profiles_strs

        suffix = _create_fit_workspace_suffix(index, sample_data,
                                              flags['fit_mode'],
                                              flags['spectra'],
                                              flags.get('iteration', None))

        # Corrections
        corrections_args = dict()

        # Need to do a fit first to obtain the parameter table
        pre_correction_pars_name = runs + "_params_pre_correction" + suffix
        corrections_fit_name = "__vesuvio_corrections_fit"
        ms.VesuvioTOFFit(InputWorkspace=sample_data,
                         WorkspaceIndex=index,
                         Masses=mass_values,
                         MassProfiles=profiles,
                         Background=background_str,
                         IntensityConstraints=intensity_constraints,
                         Ties=ties,
                         OutputWorkspace=corrections_fit_name,
                         FitParameters=pre_correction_pars_name,
                         MaxIterations=max_fit_iterations,
                         Minimizer=flags['fit_minimizer'])
        ms.DeleteWorkspace(corrections_fit_name)
        corrections_args['FitParameters'] = pre_correction_pars_name

        # Add the multiple scattering arguments
        corrections_args.update(flags['ms_flags'])

        corrected_data_name = runs + "_tof_corrected" + suffix
        linear_correction_fit_params_name = runs + "_correction_fit_scale" + suffix

        if flags.get('output_verbose_corrections', False):
            corrections_args[
                "CorrectionWorkspaces"] = runs + "_correction" + suffix
            corrections_args[
                "CorrectedWorkspaces"] = runs + "_corrected" + suffix

        if container_data is not None:
            corrections_args["ContainerWorkspace"] = container_data

        ms.VesuvioCorrections(
            InputWorkspace=sample_data,
            OutputWorkspace=corrected_data_name,
            LinearFitResult=linear_correction_fit_params_name,
            WorkspaceIndex=index,
            GammaBackground=flags.get('gamma_correct', False),
            Masses=mass_values,
            MassProfiles=profiles,
            IntensityConstraints=intensity_constraints,
            MultipleScattering=True,
            GammaBackgroundScale=flags.get('fixed_gamma_scaling', 0.0),
            ContainerScale=flags.get('fixed_container_scaling', 0.0),
            **corrections_args)

        # Final fit
        fit_ws_name = runs + "_data" + suffix
        pars_name = runs + "_params" + suffix
        fit_result = ms.VesuvioTOFFit(
            InputWorkspace=corrected_data_name,
            WorkspaceIndex=0,
            Masses=mass_values,
            MassProfiles=profiles,
            Background=background_str,
            IntensityConstraints=intensity_constraints,
            Ties=ties,
            OutputWorkspace=fit_ws_name,
            FitParameters=pars_name,
            MaxIterations=max_fit_iterations,
            Minimizer=flags['fit_minimizer'])
        chi2_values.append(fit_result[-1])
        ms.DeleteWorkspace(corrected_data_name)

        # Process parameter tables
        if pre_correct_pars_workspace is None:
            pre_correct_pars_workspace = _create_param_workspace(
                num_spec, mtd[pre_correction_pars_name])

        if pars_workspace is None:
            pars_workspace = _create_param_workspace(num_spec, mtd[pars_name])

        if fit_workspace is None:
            fit_workspace = _create_param_workspace(
                num_spec, mtd[linear_correction_fit_params_name])

        spec_num_str = str(sample_data.getSpectrum(index).getSpectrumNo())
        current_spec = 'spectrum_' + spec_num_str

        _update_fit_params(pre_correct_pars_workspace, index,
                           mtd[pre_correction_pars_name], current_spec)
        _update_fit_params(pars_workspace, index, mtd[pars_name], current_spec)

        _update_fit_params(fit_workspace, index,
                           mtd[linear_correction_fit_params_name],
                           current_spec)

        ms.DeleteWorkspace(pre_correction_pars_name)
        ms.DeleteWorkspace(pars_name)
        ms.DeleteWorkspace(linear_correction_fit_params_name)

        # Process spectrum group
        # Note the ordering of operations here gives the order in the WorkspaceGroup
        output_workspaces = []
        data_workspaces.append(fit_ws_name)
        if flags.get('output_verbose_corrections', False):
            output_workspaces += mtd[
                corrections_args["CorrectionWorkspaces"]].getNames()
            output_workspaces += mtd[
                corrections_args["CorrectedWorkspaces"]].getNames()
            ms.UnGroupWorkspace(corrections_args["CorrectionWorkspaces"])
            ms.UnGroupWorkspace(corrections_args["CorrectedWorkspaces"])

            for workspace in output_workspaces:

                group_name = runs + '_iteration_' + str(
                    flags.get('iteration', None))
                name = group_name + '_' + workspace.split(
                    '_')[1] + '_' + workspace.split('_')[-1]
                result_workspaces.append(name)
                if index == 0:
                    ms.RenameWorkspace(InputWorkspace=workspace,
                                       OutputWorkspace=name)
                else:
                    ms.ConjoinWorkspaces(InputWorkspace1=name,
                                         InputWorkspace2=workspace)

        # Output the parameter workspaces
        params_pre_corr = runs + "_params_pre_correction_iteration_" + str(
            flags['iteration'])
        params_name = runs + "_params_iteration_" + str(flags['iteration'])
        fit_name = runs + "_correction_fit_scale_iteration_" + str(
            flags['iteration'])
        AnalysisDataService.Instance().addOrReplace(
            params_pre_corr, pre_correct_pars_workspace)
        AnalysisDataService.Instance().addOrReplace(params_name,
                                                    pars_workspace)
        AnalysisDataService.Instance().addOrReplace(fit_name, fit_workspace)

    if result_workspaces:
        output_groups.append(
            ms.GroupWorkspaces(InputWorkspaces=result_workspaces,
                               OutputWorkspace=group_name))

    if data_workspaces:
        output_groups.append(
            ms.GroupWorkspaces(InputWorkspaces=data_workspaces,
                               OutputWorkspace=group_name + '_data'))
    else:
        output_groups.append(fit_ws_name)

    if len(output_groups) > 1:
        result_ws = output_groups
    else:
        result_ws = output_groups[0]

    return result_ws, pre_correct_pars_workspace, pars_workspace, chi2_values
コード例 #10
0
    def PyExec(self):
        # get parameter values
        wsString = self.getPropertyValue("InputWorkspace").strip()
        #internal values
        wsOutput = "__OutputWorkspace"
        wsTemp = "__Sort_temp"
        #get the workspace list
        wsNames = []
        for wsName in wsString.split(","):
            ws = mtd[wsName.strip()]
            if type(ws) == WorkspaceGroup:
                wsNames.extend(ws.getNames())
            else:
                wsNames.append(wsName)

        if wsOutput in mtd:
            ms.DeleteWorkspace(Workspace=wsOutput)
        sortStat = []
        for wsName in wsNames:
            if "qvectors" in wsName:
                #extract the spectrum
                ws = mtd[wsName.strip()]
                for s in range(0, ws.getNumberHistograms()):
                    y_s = ws.readY(s)
                    stuple = (self.GetXValue(y_s), s)
                    sortStat.append(stuple)
                sortStat.sort()
        if len(sortStat) == 0:
            raise RuntimeError("Cannot find file with qvectors, aborting")
        #sort spectra using norm of q
        for wsName in wsNames:
            ws = mtd[wsName.strip()]
            yUnit = ws.getAxis(1).getUnit().unitID()
            transposed = False
            if ws.getNumberHistograms() < len(sortStat):
                ms.Transpose(InputWorkspace=wsName, OutputWorkspace=wsName)
                transposed = True
            for norm, spec in sortStat:
                ms.ExtractSingleSpectrum(InputWorkspace=wsName,
                                         OutputWorkspace=wsTemp,
                                         WorkspaceIndex=spec)
                if wsOutput in mtd:
                    ms.ConjoinWorkspaces(InputWorkspace1=wsOutput,
                                         InputWorkspace2=wsTemp,
                                         CheckOverlapping=False)
                    if wsTemp in mtd:
                        ms.DeleteWorkspace(Workspace=wsTemp)
                else:
                    ms.RenameWorkspace(InputWorkspace=wsTemp,
                                       OutputWorkspace=wsOutput)

            #put norm as y value and copy units from input
            loopIndex = 0
            wsOut = mtd[wsOutput]
            for norm, spec in sortStat:
                wsOut.getSpectrum(loopIndex).setSpectrumNo(int(norm * 1000))
                loopIndex = loopIndex + 1
            if len(yUnit) > 0:
                wsOut.getAxis(1).setUnit(yUnit)
            if transposed:
                ms.Transpose(InputWorkspace=wsOutput, OutputWorkspace=wsOutput)
            ms.RenameWorkspace(InputWorkspace=wsOutput, OutputWorkspace=wsName)