Esempio n. 1
0
    def PyExec(self):
        # Progress reporter for algorithm initialization
        prog_reporter = Progress(self, start=0.0, end=0.1, nreports=4)

        raw_xvals, raw_yvals, raw_error, error_ws = self.load_data(
            prog_reporter)

        # Convert the data to point data
        prog_reporter.report('Converting to point data')
        raw_data_ws = ConvertToPointData(error_ws, StoreInADS=False)
        raw_xvals = raw_data_ws.readX(0).copy()
        raw_yvals = raw_data_ws.readY(0).copy()

        raw_xvals, raw_yvals, raw_error = self.crop_data(
            raw_xvals, raw_yvals, raw_error, prog_reporter)

        # Find the best peaks
        (peakids, peak_table, refit_peak_table), baseline = self.process(
            raw_xvals,
            raw_yvals,
            raw_error,
            acceptance=self._acceptance,
            average_window=self._smooth_window,
            bad_peak_to_consider=self._bad_peak_to_consider,
            use_poisson=self._use_poisson_cost,
            peak_width_estimate=self._estimate_peak_sigma,
            fit_to_baseline=self._fit_to_baseline,
            prog_reporter=prog_reporter)

        if self._plot_peaks:
            self.plot_peaks(raw_xvals, raw_yvals, baseline, peakids)

        self.set_output_properties(peak_table, refit_peak_table)
 def _toPointData(self, ws):
     """Convert ws from binned to point data."""
     pointWSName = self._names.withSuffix('as_points')
     pointWS = ConvertToPointData(InputWorkspace=ws,
                                  OutputWorkspace=pointWSName,
                                  EnableLogging=self._subalgLogging)
     pointWS.setDx(0, ws.readDx(0))
     self._cleanup.cleanup(ws)
     return pointWS
Esempio n. 3
0
    def testCalculateEfficiencyCorrectionAlphaForEventWksp(self):
        self._input_wksp = "input_wksp"
        self._correction_wksp = "correction_wksp"
        self._output_wksp = "output_wksp"

        # Create an exponentially decaying function in wavelength to simulate
        # measured sample
        CreateSampleWorkspace(
            WorkspaceType="Event",
            Function="User Defined",
            UserDefinedFunction="name=ExpDecay, Height=100, Lifetime=4",
            Xmin=0.2,
            Xmax=4.0,
            BinWidth=0.01,
            XUnit="Wavelength",
            NumEvents=10000,
            NumBanks=1,
            BankPixelWidth=1,
            OutputWorkspace=self._input_wksp)

        # Calculate the efficiency correction
        alg_test = run_algorithm("CalculateEfficiencyCorrection",
                                 InputWorkspace=self._input_wksp,
                                 Alpha=self._alpha,
                                 OutputWorkspace=self._correction_wksp)
        self.assertTrue(alg_test.isExecuted())
        ConvertToPointData(InputWorkspace=self._input_wksp,
                           OutputWorkspace=self._input_wksp)
        self.checkResults(eventCheck=True)
Esempio n. 4
0
    def create_merged_workspace(self, workspace_list):
        if workspace_list:
            # get max number of bins and max X range
            max_num_bins = 0
            for ws_name in workspace_list:
                if ws_name:
                    ws = AnalysisDataService.retrieve(ws_name)
                    max_num_bins = max(ws.blocksize(), max_num_bins)

            # create single ws for the merged data, use original ws as a template
            template_ws = next(ws for ws in workspace_list if ws is not None)
            merged_ws = WorkspaceFactory.create(
                AnalysisDataService.retrieve(template_ws),
                NVectors=NUM_FILES_PER_DETECTOR,
                XLength=max_num_bins,
                YLength=max_num_bins)

            # create a merged workspace based on every entry from workspace list
            for i in range(0, NUM_FILES_PER_DETECTOR):
                # load in ws - first check workspace exists
                if workspace_list[i]:
                    ws = AnalysisDataService.retrieve(workspace_list[i])
                    # check if histogram data, and convert if necessary
                    if ws.isHistogramData():
                        ws = ConvertToPointData(InputWorkspace=ws.name(),
                                                OutputWorkspace=ws.name())
                    # find max x val
                    max_x = np.max(ws.readX(0))
                    # get current number of bins
                    num_bins = ws.blocksize()
                    # pad bins
                    X_padded = np.empty(max_num_bins)
                    X_padded.fill(max_x)
                    X_padded[:num_bins] = ws.readX(0)
                    Y_padded = np.zeros(max_num_bins)
                    Y_padded[:num_bins] = ws.readY(0)
                    E_padded = np.zeros(max_num_bins)
                    E_padded[:num_bins] = ws.readE(0)

                    # set row of merged workspace
                    merged_ws.setX(i, X_padded)
                    merged_ws.setY(i, Y_padded)
                    merged_ws.setE(i, E_padded)

                    # set y axis labels
                    self.set_y_axis_labels(merged_ws, SPECTRUM_INDEX)

                    # remove workspace from ADS
                    DeleteWorkspace(ws)

            return merged_ws
Esempio n. 5
0
 def _toPointData(self, ws, extraLabel=''):
     """Convert ws from binned to point data."""
     pointWSName = self._names.withSuffix(extraLabel + 'as_points')
     pointWS = ConvertToPointData(InputWorkspace=ws,
                                  OutputWorkspace=pointWSName,
                                  EnableLogging=self._subalgLogging)
     self._cleanup.cleanup(ws)
     return pointWS
def GetIncidentSpectrumFromMonitor(Filename,
                                   OutputWorkspace="IncidentWorkspace",
                                   IncidentIndex=0,
                                   TransmissionIndex=1,
                                   Binning=".1,6000,2.9",
                                   BinType="ResampleX"):

    # -------------------------------------------------
    # Joerg's read_bm.pro code

    # Loop workspaces to get each incident spectrum
    monitor = 'monitor'
    LoadNexusMonitors(Filename=Filename, OutputWorkspace=monitor)
    ConvertUnits(InputWorkspace=monitor,
                 OutputWorkspace=monitor,
                 Target='Wavelength',
                 EMode='Elastic')
    lambdaMin, lambdaBinning, lambdaMax = [
        float(x) for x in Binning.split(',')
    ]
    for x in [lambdaMin, lambdaBinning, lambdaMax]:
        print(x, type(x))
    if BinType == 'ResampleX':
        ResampleX(
            InputWorkspace=monitor,
            OutputWorkspace=monitor,
            XMin=[lambdaMin],  # TODO change ResampleX
            XMax=[lambdaMax],
            NumberBins=abs(int(lambdaBinning)),
            LogBinning=(int(lambdaBinning) < 0),
            PreserveEvents=True)
    elif BinType == 'Rebin':
        Rebin(InputWorkspace=monitor,
              OutputWorkspace=monitor,
              Params=[lambdaMin, lambdaBinning, lambdaMax],
              PreserveEvents=True)
    ConvertToPointData(InputWorkspace=monitor, OutputWorkspace=monitor)

    lam = mtd[monitor].readX(IncidentIndex)  # wavelength in A
    bm = mtd[monitor].readY(IncidentIndex)  # neutron counts / microsecond
    p = 0.000794807  # Pressure
    thickness = .1  # 1 mm = .1 cm
    abs_xs_3He = 5333.0  # barns for lambda == 1.798 A
    p_to_rho = 2.43e-5  # pressure to rho (atoms/angstroms^3)
    # p is set to give efficiency of 1.03 10^-5 at 1.8 A
    e0 = abs_xs_3He * lam / 1.798 * p_to_rho * p * thickness
    print('Efficiency:', 1. - np.exp(-e0))
    bmeff = bm / (1. - np.exp(-e0))  # neutron counts / microsecond
    print(bmeff)
    # bmeff = bmeff / constants.micro      # neutron counts / second

    CreateWorkspace(DataX=lam,
                    DataY=bmeff,
                    OutputWorkspace=OutputWorkspace,
                    UnitX='Wavelength')
    mtd[OutputWorkspace].setYUnit('Counts')
    return mtd[OutputWorkspace]
    def PyExec(self):
        self._setup()
        if not self.getProperty("InputWorkspace").isDefault:
            self._output_ws = CloneWorkspace(Inputworkspace=self._input_ws,
                                             OutputWorkspace=self._output_ws,
                                             StoreInADS=False)
        else:
            self._output_ws = CreateWorkspace(NSpec=1, DataX=[0], DataY=[0],
                                              UnitX='Wavelength', Distribution=False,
                                              StoreInADS=False)
            self._output_ws = Rebin(InputWorkspace=self._output_ws,
                                    Params=self._bin_params,
                                    StoreInADS=False)

        if self._output_ws.isDistribution():
            ConvertFromDistribution(Workspace=self._output_ws,
                                    StoreInADS=False)

        self._output_ws = ConvertToPointData(InputWorkspace=self._output_ws,
                                             StoreInADS=False)
        self._output_ws = ConvertUnits(InputWorkspace=self._output_ws,
                                       Target='Wavelength',
                                       EMode='Elastic',
                                       StoreInADS=False)

        if self.getProperty('Alpha').isDefault:
            if self._density_type == 'Mass Density':
                SetSampleMaterial(
                    InputWorkspace=self._output_ws,
                    ChemicalFormula=self._chemical_formula,
                    SampleMassDensity=self._density,
                    StoreInADS=False)
                self._density = self._output_ws.sample().getMaterial().numberDensityEffective
            elif self._density_type == 'Number Density':
                SetSampleMaterial(
                    InputWorkspace=self._output_ws,
                    ChemicalFormula=self._chemical_formula,
                    SampleNumberDensity=self._density,
                    StoreInADS=False)
            else:
                raise RuntimeError(f'Unknown "DensityType": {self._density_type}')
            if self.getProperty('MeasuredEfficiency').isDefault:
                self._calculate_area_density_from_density()
            else:
                self._calculate_area_density_from_efficiency()
            self._calculate_alpha_absXS_term()
            if self._xsection_type == "TotalXSection":
                self._calculate_alpha_scatXS_term()

        wavelengths = self._output_ws.readX(0)
        efficiency = self._calculate_efficiency(wavelengths)
        for histo in range(self._output_ws.getNumberHistograms()):
            self._output_ws.setY(histo, efficiency)

        self.setProperty('OutputWorkspace', self._output_ws)
 def setUp(self):
     # Create the workspace to hold the already corrected incident spectrum
     self.incident_wksp = CreateWorkspace(
         OutputWorkspace=self.incident_wksp_name,
         NSpec=1,
         DataX=[0],
         DataY=[0],
         UnitX='Wavelength',
         VerticalAxisUnit='Text',
         VerticalAxisValues='IncidentSpectrum')
     self.incident_wksp = Rebin(InputWorkspace=self.incident_wksp,
                                OutputWorkspace="foobar",
                                Params=self.binning_default)
     self.incident_wksp = ConvertToPointData(
         InputWorkspace=self.incident_wksp, OutputWorkspace="foobar")
     # Add the incident spectrum to the workspace
     corrected_spectrum = self.generate_incident_spectrum(
         self.incident_wksp.readX(0), self.phiMax, self.phiEpi, self.alpha,
         self.lambda1, self.lambda2, self.lambdaT)
     self.incident_wksp.setY(0, corrected_spectrum)
     self.agl_instance = FitIncidentSpectrum
Esempio n. 9
0
    def setUp(self):
        '''
        This file is the back-calculated spectrum of polyethylene moderator (ambient 300K)
        prior to the efficiency correction described in Eq (3) of:
          Mildner et al. "A cooled polyethylene moderator on a pulsed neutron source",
          Nucl. Instr. Meth. 152, 1978, doi:/10.1016/0029-554X(78)90043-5

        After the correction is applied, the workspace will replicated (b) in Fig. 2 of this article.

        Similar results are shown in Fig 1.a for "ambient (300K)" in:
          S. Howells, "On the choice of moderator for a liquids diffractometer on a pulsed neutron source",
          Nucl. Instr. Meth. Phys. Res. 223, 1984, doi:10.1016/0167-5087(84)90256-4
        '''
        LoadAscii(
            OutputWorkspace=self._input_wksp,
            Filename=
            "CalculateEfficiencyCorrection_milder_moderator_polyethlyene_300K.txt",
            Unit="Wavelength")
        ConvertToPointData(InputWorkspace=self._input_wksp,
                           OutputWorkspace=self._input_wksp)
Esempio n. 10
0
    def PyExec(self):
        """
        Main method to execute the algorithm
        """
        # Get input
        wksp = self.getProperty('InputWorkspace').value
        assert wksp, 'Input workspace cannot be None'

        # process workspace to make it a PointData workspace
        if wksp.isHistogramData():
            wksp_name = wksp.name()
            wksp = ConvertToPointData(InputWorkspace=wksp_name,
                                      OutputWorkspace=wksp_name)

        gsas_content = self._write_gsas_fxye(wksp)

        # Get output and write file
        gsas_name = self.getProperty('OutputFilename').value
        with open(gsas_name, 'w') as gsas_file:
            gsas_file.write(gsas_content)
    def setUp(self):
        """
        Create sample workspaces.
        """

        # Create some test data
        sample = CreateSampleWorkspace(NumBanks=1,
                                       BankPixelWidth=1,
                                       XUnit='Wavelength',
                                       XMin=6.8,
                                       XMax=7.9,
                                       BinWidth=0.1)

        self._sample_ws = sample

        # Create empty test data not in wavelength
        sample_empty_unit = CreateSampleWorkspace(NumBanks=1,
                                                  BankPixelWidth=1,
                                                  XUnit='Empty',
                                                  XMin=6.8,
                                                  XMax=7.9,
                                                  BinWidth=0.1)

        SetInstrumentParameter(Workspace=sample_empty_unit,
                               ParameterName='Efixed',
                               ParameterType='Number',
                               Value='5.')

        self._sample_empty_unit = sample_empty_unit

        empty_unit_point = ConvertToPointData(sample_empty_unit)

        self._empty_unit_point = empty_unit_point

        can = Scale(InputWorkspace=sample, Factor=1.2)
        self._can_ws = can

        self._corrections_ws_name = 'corrections'
class FitIncidentSpectrumTest(unittest.TestCase):

    incident_wksp_name = 'incident_spectrum_wksp'
    phiMax = 6324
    phiEpi = 786
    alpha = 0.099
    lambda1 = 0.67143
    lambda2 = 0.06075
    lambdaT = 1.58
    binning_default = "0.2,0.01,4.0"

    def setUp(self):
        # Create the workspace to hold the already corrected incident spectrum
        self.incident_wksp = CreateWorkspace(
            OutputWorkspace=self.incident_wksp_name,
            NSpec=1,
            DataX=[0],
            DataY=[0],
            UnitX='Wavelength',
            VerticalAxisUnit='Text',
            VerticalAxisValues='IncidentSpectrum')
        self.incident_wksp = Rebin(InputWorkspace=self.incident_wksp,
                                   OutputWorkspace="foobar",
                                   Params=self.binning_default)
        self.incident_wksp = ConvertToPointData(
            InputWorkspace=self.incident_wksp, OutputWorkspace="foobar")
        # Add the incident spectrum to the workspace
        corrected_spectrum = self.generate_incident_spectrum(
            self.incident_wksp.readX(0), self.phiMax, self.phiEpi, self.alpha,
            self.lambda1, self.lambda2, self.lambdaT)
        self.incident_wksp.setY(0, corrected_spectrum)
        self.agl_instance = FitIncidentSpectrum

    def generate_incident_spectrum(self, wavelengths, phi_max, phi_epi, alpha,
                                   lambda_1, lambda_2, lambda_T):
        delta_term = 1. / (1. + np.exp((wavelengths - lambda_1) / lambda_2))
        term1 = phi_max * (lambda_T**4. / wavelengths**
                           5.) * np.exp(-(lambda_T / wavelengths)**2.)
        term2 = phi_epi * delta_term / (wavelengths**(1 + 2 * alpha))
        return term1 + term2

    def test_fit_cubic_spline_with_gauss_conv_produces_fit_with_same_range_as_binning_for_calc(
            self):
        binning_for_calc = "0.2,0.1,3.0"
        binning_for_fit = "0.2,0.1,4.0"
        alg_test = run_algorithm("FitIncidentSpectrum",
                                 InputWorkspace=self.incident_wksp,
                                 OutputWorkspace="fit_wksp",
                                 BinningForCalc=binning_for_calc,
                                 BinningForFit=binning_for_fit,
                                 FitSpectrumWith="GaussConvCubicSpline")
        self.assertTrue(alg_test.isExecuted())
        fit_wksp = AnalysisDataService.retrieve("fit_wksp")
        self.assertEqual(
            fit_wksp.readX(0).all(),
            np.arange(0.2, 3, 0.01).all())

    def test_fit_cubic_spline_produces_fit_with_same_range_as_binning_for_calc(
            self):
        binning_for_calc = "0.2,0.1,3.0"
        binning_for_fit = "0.2,0.1,4.0"
        alg_test = run_algorithm("FitIncidentSpectrum",
                                 InputWorkspace=self.incident_wksp,
                                 OutputWorkspace="fit_wksp",
                                 BinningForCalc=binning_for_calc,
                                 BinningForFit=binning_for_fit,
                                 FitSpectrumWith="CubicSpline")
        self.assertTrue(alg_test.isExecuted())
        fit_wksp = AnalysisDataService.retrieve("fit_wksp")
        self.assertEqual(fit_wksp.readX(0).all(), np.arange(0.2, 3, 0.1).all())

    def test_fit_cubic_spline_via_mantid_produces_fit_with_same_range_as_binning_for_calc(
            self):
        binning_for_calc = "0.2,0.1,3.0"
        binning_for_fit = "0.2,0.1,4.0"
        alg_test = run_algorithm("FitIncidentSpectrum",
                                 InputWorkspace=self.incident_wksp,
                                 OutputWorkspace="fit_wksp",
                                 BinningForCalc=binning_for_calc,
                                 BinningForFit=binning_for_fit,
                                 FitSpectrumWith="CubicSplineViaMantid")
        self.assertTrue(alg_test.isExecuted())
        fit_wksp = AnalysisDataService.retrieve("fit_wksp")
        self.assertEqual(fit_wksp.readX(0).all(), np.arange(0.2, 3, 0.1).all())
        binning = "%s,%s,%s" % (lam_lo, lam_delta, lam_hi)
        for moderator, spectrum_params in incident_spectrums.items():
            color = next(ax_bm._get_lines.prop_cycler)['color']

            incident_ws = 'howells_%s' % moderator
            CreateWorkspace(OutputWorkspace=incident_ws,
                            NSpec=1,
                            DataX=[0],
                            DataY=[0],
                            UnitX='Wavelength',
                            VerticalAxisUnit='Text',
                            VerticalAxisValues='IncidentSpectrum')
            Rebin(InputWorkspace=incident_ws,
                  OutputWorkspace=incident_ws,
                  Params=binning)
            ConvertToPointData(InputWorkspace=incident_ws,
                               OutputWorkspace=incident_ws)

            wavelengths = mtd[incident_ws].readX(0)
            incident_spectrum = calc_HowellsFunction(wavelengths,
                                                     **spectrum_params)
            mtd[incident_ws].setY(0, incident_spectrum)
            ax_bm.plot(mtd[incident_ws],
                       '-',
                       color=color,
                       wkspIndex=0,
                       label=moderator)

            eff_ws = 'efficiency'
            CalculateEfficiencyCorrection(InputWorkspace=incident_ws,
                                          Alpha=-0.693,
                                          OutputWorkspace=eff_ws)
Esempio n. 14
0
    def PyExec(self):
        input_workspaces = self._expand_groups()
        outWS = self.getPropertyValue("OutputWorkspace")
        CreatePeaksWorkspace(OutputType='LeanElasticPeak',
                             InstrumentWorkspace=input_workspaces[0],
                             NumberOfPeaks=0,
                             OutputWorkspace=outWS,
                             EnableLogging=False)

        method = self.getProperty("Method").value
        n_bkgr_pts = self.getProperty("NumBackgroundPts").value
        n_fwhm = self.getProperty("WidthScale").value
        scale = self.getProperty("ScaleFactor").value
        chisqmax = self.getProperty("ChiSqMax").value
        signalNoiseMin = self.getProperty("SignalNoiseMin").value
        ll = self.getProperty("LowerLeft").value
        ur = self.getProperty("UpperRight").value
        startX = self.getProperty('StartX').value
        endX = self.getProperty('EndX').value
        use_lorentz = self.getProperty("ApplyLorentz").value
        optmize_q = self.getProperty("OptimizeQVector").value
        output_fit = self.getProperty("OutputFitResults").value

        if output_fit and method != "Counts":
            fit_results = WorkspaceGroup()
            AnalysisDataService.addOrReplace(outWS + "_fit_results",
                                             fit_results)

        for inWS in input_workspaces:
            tmp_inWS = '__tmp_' + inWS
            IntegrateMDHistoWorkspace(InputWorkspace=inWS,
                                      P1Bin=f'{ll[1]},{ur[1]}',
                                      P2Bin=f'{ll[0]},{ur[0]}',
                                      OutputWorkspace=tmp_inWS,
                                      EnableLogging=False)
            ConvertMDHistoToMatrixWorkspace(tmp_inWS,
                                            OutputWorkspace=tmp_inWS,
                                            EnableLogging=False)
            data = ConvertToPointData(tmp_inWS,
                                      OutputWorkspace=tmp_inWS,
                                      EnableLogging=False)

            run = mtd[inWS].getExperimentInfo(0).run()
            scan_log = 'omega' if np.isclose(run.getTimeAveragedStd('phi'),
                                             0.0) else 'phi'
            scan_axis = run[scan_log].value
            scan_step = (scan_axis[-1] - scan_axis[0]) / (scan_axis.size - 1)
            data.setX(0, scan_axis)

            y = data.extractY().flatten()
            x = data.extractX().flatten()

            __tmp_pw = CreatePeaksWorkspace(OutputType='LeanElasticPeak',
                                            InstrumentWorkspace=inWS,
                                            NumberOfPeaks=0,
                                            EnableLogging=False)

            if method != "Counts":
                # fit against gaussian with flat background for both the Fitted and CountsWithFitting methods
                fit_result = self._fit_gaussian(inWS, data, x, y, startX, endX,
                                                output_fit)

                if fit_result and fit_result.OutputStatus == 'success' and fit_result.OutputChi2overDoF < chisqmax:
                    B, A, peak_centre, sigma, _ = fit_result.OutputParameters.toDict(
                    )['Value']
                    _, errA, _, errs, _ = fit_result.OutputParameters.toDict(
                    )['Error']

                    if method == "Fitted":
                        integrated_intensity = A * sigma * np.sqrt(2 * np.pi)

                        # Convert correlation back into covariance
                        cor_As = (
                            fit_result.OutputNormalisedCovarianceMatrix.cell(
                                1, 4) / 100 *
                            fit_result.OutputParameters.cell(1, 2) *
                            fit_result.OutputParameters.cell(3, 2))
                        # σ^2 = 2π (A^2 σ_s^2 + σ_A^2 s^2 + 2 A s σ_As)
                        integrated_intensity_error = np.sqrt(
                            2 * np.pi * (A**2 * errs**2 + sigma**2 * errA**2 +
                                         2 * A * sigma * cor_As))

                    elif method == "CountsWithFitting":
                        y = y[slice(
                            np.searchsorted(
                                x, peak_centre - 2.3548 * sigma * n_fwhm / 2),
                            np.searchsorted(
                                x, peak_centre + 2.3548 * sigma * n_fwhm / 2))]
                        # subtract out the fitted flat background
                        integrated_intensity = (y.sum() -
                                                B * y.size) * scan_step
                        integrated_intensity_error = np.sum(
                            np.sqrt(y)) * scan_step

                    # update the goniometer position based on the fitted peak center
                    if scan_log == 'omega':
                        SetGoniometer(Workspace=__tmp_pw,
                                      Axis0=f'{peak_centre},0,1,0,-1',
                                      Axis1='chi,0,0,1,-1',
                                      Axis2='phi,0,1,0,-1',
                                      EnableLogging=False)
                    else:
                        SetGoniometer(Workspace=__tmp_pw,
                                      Axis0='omega,0,1,0,-1',
                                      Axis1='chi,0,0,1,-1',
                                      Axis2=f'{peak_centre},0,1,0,-1',
                                      EnableLogging=False)
                else:
                    self.log().warning(
                        "Failed to fit workspace {}: Output Status={}, ChiSq={}"
                        .format(inWS, fit_result.OutputStatus,
                                fit_result.OutputChi2overDoF))
                    self._delete_tmp_workspaces(str(__tmp_pw), tmp_inWS)
                    continue
            else:
                integrated_intensity, integrated_intensity_error = self._counts_integration(
                    data, n_bkgr_pts, scan_step)

                # set the goniometer position to use the average of the scan
                SetGoniometer(Workspace=__tmp_pw,
                              Axis0='omega,0,1,0,-1',
                              Axis1='chi,0,0,1,-1',
                              Axis2='phi,0,1,0,-1',
                              EnableLogging=False)

            integrated_intensity *= scale
            integrated_intensity_error *= scale

            peak = __tmp_pw.createPeakHKL([
                run['h'].getStatistics().median,
                run['k'].getStatistics().median,
                run['l'].getStatistics().median
            ])
            peak.setWavelength(float(run['wavelength'].value))
            peak.setIntensity(integrated_intensity)
            peak.setSigmaIntensity(integrated_intensity_error)

            if integrated_intensity / integrated_intensity_error > signalNoiseMin:
                __tmp_pw.addPeak(peak)

                # correct q-vector using CentroidPeaksMD
                if optmize_q:
                    __tmp_q_ws = HB3AAdjustSampleNorm(InputWorkspaces=inWS,
                                                      NormaliseBy='None',
                                                      EnableLogging=False)
                    __tmp_pw = CentroidPeaksMD(__tmp_q_ws,
                                               __tmp_pw,
                                               EnableLogging=False)
                    DeleteWorkspace(__tmp_q_ws, EnableLogging=False)

                if use_lorentz:
                    # ILL Neutron Data Booklet, Second Edition, Section 2.9, Part 4.1, Equation 7
                    peak = __tmp_pw.getPeak(0)
                    lorentz = abs(
                        np.sin(peak.getScattering() *
                               np.cos(peak.getAzimuthal())))
                    peak.setIntensity(peak.getIntensity() * lorentz)
                    peak.setSigmaIntensity(peak.getSigmaIntensity() * lorentz)

                CombinePeaksWorkspaces(outWS,
                                       __tmp_pw,
                                       OutputWorkspace=outWS,
                                       EnableLogging=False)

                if output_fit and method != "Counts":
                    fit_results.addWorkspace(
                        RenameWorkspace(tmp_inWS + '_Workspace',
                                        outWS + "_" + inWS + '_Workspace',
                                        EnableLogging=False))
                    fit_results.addWorkspace(
                        RenameWorkspace(tmp_inWS + '_Parameters',
                                        outWS + "_" + inWS + '_Parameters',
                                        EnableLogging=False))
                    fit_results.addWorkspace(
                        RenameWorkspace(
                            tmp_inWS + '_NormalisedCovarianceMatrix',
                            outWS + "_" + inWS + '_NormalisedCovarianceMatrix',
                            EnableLogging=False))
                    fit_results.addWorkspace(
                        IntegrateMDHistoWorkspace(
                            InputWorkspace=inWS,
                            P1Bin=f'{ll[1]},0,{ur[1]}',
                            P2Bin=f'{ll[0]},0,{ur[0]}',
                            P3Bin='0,{}'.format(
                                mtd[inWS].getDimension(2).getNBins()),
                            OutputWorkspace=outWS + "_" + inWS + "_ROI",
                            EnableLogging=False))
            else:
                self.log().warning(
                    "Skipping peak from {} because Signal/Noise={:.3f} which is less than {}"
                    .format(inWS,
                            integrated_intensity / integrated_intensity_error,
                            signalNoiseMin))

            self._delete_tmp_workspaces(str(__tmp_pw), tmp_inWS)

        self.setProperty("OutputWorkspace", mtd[outWS])
Esempio n. 15
0
    def PyExec(self):
        runs = self.getProperty('Filename').value
        runs_as_str = self.getPropertyValue('Filename')
        number_runs = runs_as_str.count(',') + runs_as_str.count('+') + 1
        self._progress = Progress(self,
                                  start=0.0,
                                  end=1.0,
                                  nreports=number_runs)
        self._loader = self.getPropertyValue('LoaderName')
        self._version = self.getProperty('LoaderVersion').value
        self._loader_options = self.getProperty('LoaderOptions').value
        merge_options = self.getProperty('MergeRunsOptions').value
        output = self.getPropertyValue('OutputWorkspace')
        if output.startswith('__'):
            self._prefix = '__'

        # get the first run
        to_group = []
        first_run = runs[0]
        if isinstance(first_run, list):
            first_run = first_run[0]

        if self._loader == 'Load':
            # figure out the winning loader
            winning_loader = FileLoaderRegistry.Instance().chooseLoader(
                first_run)
            self._loader = winning_loader.name()
            self._version = winning_loader.version()
            self.setPropertyValue('LoaderName', self._loader)
            self.setProperty('LoaderVersion', self._version)

        for runs_to_sum in runs:
            if not isinstance(runs_to_sum, list):
                run = runs_to_sum
                runnumber = self._prefix + os.path.basename(run).split('.')[0]
                self._load(run, runnumber)
                to_group.append(runnumber)
            else:
                runnumbers = self._prefix
                merged = ''
                for i, run in enumerate(runs_to_sum):
                    runnumber = os.path.basename(run).split('.')[0]
                    runnumbers += '_' + runnumber
                    runnumber = self._prefix + runnumber
                    self._load(run, runnumber)
                    if i == 0:
                        merged = runnumber
                    else:
                        # we need to merge to a temp name, and rename later,
                        # since if the merged is a group workspace,
                        # it's items will be orphaned
                        tmp_merged = '__tmp_' + merged
                        MergeRuns(InputWorkspaces=[merged, runnumber],
                                  OutputWorkspace=tmp_merged,
                                  **merge_options)
                        DeleteWorkspace(Workspace=runnumber)
                        DeleteWorkspace(Workspace=merged)
                        RenameWorkspace(InputWorkspace=tmp_merged,
                                        OutputWorkspace=merged)

                runnumbers = runnumbers[1:]
                RenameWorkspace(InputWorkspace=merged,
                                OutputWorkspace=runnumbers)
                to_group.append(runnumbers)

        if len(to_group) != 1:
            if self.getPropertyValue('OutputBehaviour') == 'Group':
                GroupWorkspaces(InputWorkspaces=to_group,
                                OutputWorkspace=output)
            else:
                log_as_x = self.getPropertyValue('SampleLogAsXAxis')
                # first ensure point data before attempting conjoin, as it is undefined for histograms
                for ws in to_group:
                    ConvertToPointData(InputWorkspace=ws, OutputWorkspace=ws)
                if log_as_x:
                    ConjoinXRuns(InputWorkspaces=to_group,
                                 OutputWorkspace=output,
                                 SampleLogAsXAxis=log_as_x)
                else:
                    ConjoinXRuns(InputWorkspaces=to_group,
                                 OutputWorkspace=output,
                                 LinearizeAxis=True)
        else:
            RenameWorkspace(InputWorkspace=to_group[0], OutputWorkspace=output)

        self.setProperty('OutputWorkspace', mtd[output])
    def PyExec(self):
        input_workspaces = self._expand_groups()
        outWS = self.getPropertyValue("OutputWorkspace")
        CreatePeaksWorkspace(OutputType='LeanElasticPeak',
                             InstrumentWorkspace=input_workspaces[0],
                             NumberOfPeaks=0,
                             OutputWorkspace=outWS,
                             EnableLogging=False)

        scale = self.getProperty("ScaleFactor").value
        chisqmax = self.getProperty("ChiSqMax").value
        signalNoiseMin = self.getProperty("SignalNoiseMin").value
        ll = self.getProperty("LowerLeft").value
        ur = self.getProperty("UpperRight").value
        startX = self.getProperty('StartX').value
        endX = self.getProperty('EndX').value
        use_lorentz = self.getProperty("ApplyLorentz").value
        optmize_q = self.getProperty("OptimizeQVector").value
        output_fit = self.getProperty("OutputFitResults").value

        if output_fit:
            fit_results = WorkspaceGroup()
            AnalysisDataService.addOrReplace(outWS + "_fit_results",
                                             fit_results)

        for inWS in input_workspaces:
            tmp_inWS = '__tmp_' + inWS
            IntegrateMDHistoWorkspace(InputWorkspace=inWS,
                                      P1Bin=f'{ll[1]},{ur[1]}',
                                      P2Bin=f'{ll[0]},{ur[0]}',
                                      OutputWorkspace=tmp_inWS,
                                      EnableLogging=False)
            ConvertMDHistoToMatrixWorkspace(tmp_inWS,
                                            OutputWorkspace=tmp_inWS,
                                            EnableLogging=False)
            data = ConvertToPointData(tmp_inWS,
                                      OutputWorkspace=tmp_inWS,
                                      EnableLogging=False)

            run = mtd[inWS].getExperimentInfo(0).run()
            scan_log = 'omega' if np.isclose(run.getTimeAveragedStd('phi'),
                                             0.0) else 'phi'
            scan_axis = run[scan_log].value
            data.setX(0, scan_axis)

            y = data.extractY().flatten()
            x = data.extractX().flatten()
            function = f"name=FlatBackground, A0={np.nanmin(y)};" \
                f"name=Gaussian, PeakCentre={x[np.nanargmax(y)]}, Height={np.nanmax(y)-np.nanmin(y)}, Sigma=0.25"
            constraints = f"f0.A0 > 0, f1.Height > 0, {x.min()} < f1.PeakCentre < {x.max()}"
            try:
                fit_result = Fit(function,
                                 data,
                                 Output=str(data),
                                 IgnoreInvalidData=True,
                                 OutputParametersOnly=not output_fit,
                                 Constraints=constraints,
                                 StartX=startX,
                                 EndX=endX,
                                 EnableLogging=False)
            except RuntimeError as e:
                self.log().warning("Failed to fit workspace {}: {}".format(
                    inWS, e))
                continue

            if fit_result.OutputStatus == 'success' and fit_result.OutputChi2overDoF < chisqmax:
                __tmp_pw = CreatePeaksWorkspace(OutputType='LeanElasticPeak',
                                                InstrumentWorkspace=inWS,
                                                NumberOfPeaks=0,
                                                EnableLogging=False)

                _, A, x, s, _ = fit_result.OutputParameters.toDict()['Value']
                _, errA, _, errs, _ = fit_result.OutputParameters.toDict(
                )['Error']

                if scan_log == 'omega':
                    SetGoniometer(Workspace=__tmp_pw,
                                  Axis0=f'{x},0,1,0,-1',
                                  Axis1='chi,0,0,1,-1',
                                  Axis2='phi,0,1,0,-1',
                                  EnableLogging=False)
                else:
                    SetGoniometer(Workspace=__tmp_pw,
                                  Axis0='omega,0,1,0,-1',
                                  Axis1='chi,0,0,1,-1',
                                  Axis2=f'{x},0,1,0,-1',
                                  EnableLogging=False)

                peak = __tmp_pw.createPeakHKL([
                    run['h'].getStatistics().median,
                    run['k'].getStatistics().median,
                    run['l'].getStatistics().median
                ])
                peak.setWavelength(float(run['wavelength'].value))

                integrated_intensity = A * s * np.sqrt(2 * np.pi) * scale
                peak.setIntensity(integrated_intensity)

                # Convert correlation back into covariance
                cor_As = (
                    fit_result.OutputNormalisedCovarianceMatrix.cell(1, 4) /
                    100 * fit_result.OutputParameters.cell(1, 2) *
                    fit_result.OutputParameters.cell(3, 2))
                # σ^2 = 2π (A^2 σ_s^2 + σ_A^2 s^2 + 2 A s σ_As)
                integrated_intensity_error = np.sqrt(
                    2 * np.pi * (A**2 * errs**2 + s**2 * errA**2 +
                                 2 * A * s * cor_As)) * scale
                peak.setSigmaIntensity(integrated_intensity_error)

                if integrated_intensity / integrated_intensity_error > signalNoiseMin:
                    __tmp_pw.addPeak(peak)

                    # correct q-vector using CentroidPeaksMD
                    if optmize_q:
                        __tmp_q_ws = HB3AAdjustSampleNorm(InputWorkspaces=inWS,
                                                          NormaliseBy='None',
                                                          EnableLogging=False)
                        __tmp_pw = CentroidPeaksMD(__tmp_q_ws,
                                                   __tmp_pw,
                                                   EnableLogging=False)
                        DeleteWorkspace(__tmp_q_ws, EnableLogging=False)

                    if use_lorentz:
                        # ILL Neutron Data Booklet, Second Edition, Section 2.9, Part 4.1, Equation 7
                        peak = __tmp_pw.getPeak(0)
                        lorentz = abs(
                            np.sin(peak.getScattering() *
                                   np.cos(peak.getAzimuthal())))
                        peak.setIntensity(peak.getIntensity() * lorentz)
                        peak.setSigmaIntensity(peak.getSigmaIntensity() *
                                               lorentz)

                    CombinePeaksWorkspaces(outWS,
                                           __tmp_pw,
                                           OutputWorkspace=outWS,
                                           EnableLogging=False)
                    DeleteWorkspace(__tmp_pw, EnableLogging=False)

                    if output_fit:
                        fit_results.addWorkspace(
                            RenameWorkspace(tmp_inWS + '_Workspace',
                                            outWS + "_" + inWS + '_Workspace',
                                            EnableLogging=False))
                        fit_results.addWorkspace(
                            RenameWorkspace(tmp_inWS + '_Parameters',
                                            outWS + "_" + inWS + '_Parameters',
                                            EnableLogging=False))
                        fit_results.addWorkspace(
                            RenameWorkspace(tmp_inWS +
                                            '_NormalisedCovarianceMatrix',
                                            outWS + "_" + inWS +
                                            '_NormalisedCovarianceMatrix',
                                            EnableLogging=False))
                        fit_results.addWorkspace(
                            IntegrateMDHistoWorkspace(
                                InputWorkspace=inWS,
                                P1Bin=f'{ll[1]},0,{ur[1]}',
                                P2Bin=f'{ll[0]},0,{ur[0]}',
                                P3Bin='0,{}'.format(
                                    mtd[inWS].getDimension(2).getNBins()),
                                OutputWorkspace=outWS + "_" + inWS + "_ROI",
                                EnableLogging=False))
                else:
                    self.log().warning(
                        "Skipping peak from {} because Signal/Noise={:.3f} which is less than {}"
                        .format(
                            inWS,
                            integrated_intensity / integrated_intensity_error,
                            signalNoiseMin))
            else:
                self.log().warning(
                    "Failed to fit workspace {}: Output Status={}, ChiSq={}".
                    format(inWS, fit_result.OutputStatus,
                           fit_result.OutputChi2overDoF))

            for tmp_ws in (tmp_inWS, tmp_inWS + '_Workspace',
                           tmp_inWS + '_Parameters',
                           tmp_inWS + '_NormalisedCovarianceMatrix'):
                if mtd.doesExist(tmp_ws):
                    DeleteWorkspace(tmp_ws, EnableLogging=False)

        self.setProperty("OutputWorkspace", mtd[outWS])