コード例 #1
0
def fast_fourier_filter(ws, freq_params=None):
    if not freq_params:
        return
    # This is a simple fourier filter using the FFTSmooth to get a WS with only the low radius components, then
    # subtracting that from the merged WS

    x_range = ws.dataX(0)
    # The param p in FFTSmooth defined such that if the input ws has Nx bins then in the fourier space ws it will cut of
    # all frequencies in bins nk=Nk/p and above, calculated by p = pi/(k_c*dQ) when k_c is the cutoff frequency desired.
    # The input ws of FFTSmooth has binning [x_min, dx, x_max], with Nx bins.
    # FFTSmooth doubles the length of the input ws and preforms an FFT with output ws binning
    # [0, dk, k_max]=[0, 1/2*(x_max-x_min), 1/(2*dx)], and Nk=Nx bins.
    # k_max/k_c = Nk/nk
    # 1/(k_c*2*dx) = p
    # because FFT uses sin(2*pi*k*x) while PDFFourierTransform uses sin(Q*r) we need to include a factor of 2*pi
    # p = pi/(k_c*dQ)
    lower_freq_param = round(np.pi / (freq_params[0] * (x_range[1] - x_range[0])))
    # This is giving the FFTSmooth the data in the form of S(Q)-1, later we use PDFFourierTransform with Q(S(Q)-1)
    # it does not matter which we use in this case.
    tmp = mantid.FFTSmooth(InputWorkspace=ws, Filter="Zeroing", Params=str(lower_freq_param), StoreInADS=False,
                           IgnoreXBins=True)
    mantid.Minus(LHSWorkspace=ws, RHSWorkspace=tmp, OutputWorkspace=ws)
    if len(freq_params) > 1:
        upper_freq_param = round(np.pi / (freq_params[1] * (x_range[1] - x_range[0])))
        mantid.FFTSmooth(InputWorkspace=ws, OutputWorkspace=ws, Filter="Zeroing",
                         Params=str(upper_freq_param), IgnoreXBins=True)
コード例 #2
0
def subtract_summed_runs(ws_to_correct, empty_sample):
    """
    Subtracts the list of empty runs specified by the empty_sample_ws_string
    from the workspace specified. Returns the subtracted workspace.
    :param ws_to_correct: The workspace to correct
    :param empty_sample: The empty workspace to subtract
    :return: The workspace with the empty runs subtracted
    """
    # Skip this step if the workspace has no current, as subtracting empty
    # would give us negative counts
    if workspace_has_current(ws_to_correct):
        try:
            mantid.Minus(LHSWorkspace=ws_to_correct,
                         RHSWorkspace=empty_sample,
                         OutputWorkspace=ws_to_correct)
        except ValueError:
            raise ValueError(
                "The empty run(s) specified for this file do not have matching binning. Do the TOF windows of"
                " the empty and sample match?")
    else:
        ws_to_correct = copy.deepcopy(ws_to_correct)

    remove_intermediate_workspace(empty_sample)

    return ws_to_correct
コード例 #3
0
    def calculate_scaled_hab_output(self, shift, scale, sample_count_secondary,
                                    sample_norm_secondary, can_count_secondary,
                                    can_norm_secondary):
        scaled_norm_front = mantid_api.Scale(
            InputWorkspace=sample_norm_secondary,
            Factor=1.0 / scale,
            Operation='Multiply',
            StoreInADS=False)
        shifted_norm_front = mantid_api.Scale(
            InputWorkspace=sample_norm_secondary,
            Factor=shift,
            Operation='Multiply',
            StoreInADS=False)
        numerator = mantid_api.Plus(LHSWorkspace=sample_count_secondary,
                                    RHSWorkspace=shifted_norm_front,
                                    StoreInADS=False)
        hab_sample = mantid_api.Divide(LHSWorkspace=numerator,
                                       RHSWorkspace=scaled_norm_front,
                                       StoreInADS=False)

        if can_count_secondary is not None and can_norm_secondary is not None:
            scaled_norm_front_can = mantid_api.Scale(
                InputWorkspace=can_norm_secondary,
                Factor=1.0 / scale,
                Operation='Multiply',
                StoreInADS=False)
            hab_can = mantid_api.Divide(LHSWorkspace=can_count_secondary,
                                        RHSWorkspace=scaled_norm_front_can,
                                        StoreInADS=False)
            hab_sample = mantid_api.Minus(LHSWorkspace=hab_sample,
                                          RHSWorkspace=hab_can,
                                          StoreInADS=False)
            return hab_sample
        else:
            return hab_sample
コード例 #4
0
def subtract_summed_runs(ws_to_correct,
                         empty_sample_ws_string,
                         instrument,
                         scale_factor=None):
    """
    Loads the list of empty runs specified by the empty_sample_ws_string and subtracts
    them from the workspace specified. Returns the subtracted workspace.
    :param ws_to_correct: The workspace to subtract the empty instrument runs from
    :param empty_sample_ws_string: The empty run numbers to subtract from the workspace
    :param instrument: The instrument object these runs belong to
    :param scale_factor: The percentage to scale the loaded runs by
    :return: The workspace with the empty runs subtracted
    """
    # If an empty string was not specified just return to skip this step
    if empty_sample_ws_string is None:
        return ws_to_correct

    empty_sample = load_current_normalised_ws_list(
        run_number_string=empty_sample_ws_string,
        instrument=instrument,
        input_batching=INPUT_BATCHING.Summed)
    empty_sample = empty_sample[0]
    if scale_factor:
        empty_sample = mantid.Scale(InputWorkspace=empty_sample,
                                    OutputWorkspace=empty_sample,
                                    Factor=scale_factor,
                                    Operation="Multiply")
    mantid.Minus(LHSWorkspace=ws_to_correct,
                 RHSWorkspace=empty_sample,
                 OutputWorkspace=ws_to_correct)
    remove_intermediate_workspace(empty_sample)

    return ws_to_correct
コード例 #5
0
def subtract_summed_runs(ws_to_correct, empty_sample_ws_string, instrument, scale_factor=None):
    """
    Loads the list of empty runs specified by the empty_sample_ws_string and subtracts
    them from the workspace specified. Returns the subtracted workspace.
    :param ws_to_correct: The workspace to subtract the empty instrument runs from
    :param empty_sample_ws_string: The empty run numbers to subtract from the workspace
    :param instrument: The instrument object these runs belong to
    :param scale_factor: The percentage to scale the loaded runs by
    :return: The workspace with the empty runs subtracted
    """
    # Skip this step if an empty string was not specified
    # or if the workspace has no current, as subtracting empty would give us negative counts
    if empty_sample_ws_string is None or not workspace_has_current(ws_to_correct):
        return ws_to_correct

    empty_sample = load_current_normalised_ws_list(run_number_string=empty_sample_ws_string, instrument=instrument,
                                                   input_batching=INPUT_BATCHING.Summed)
    empty_sample = empty_sample[0]
    if scale_factor:
        empty_sample = mantid.Scale(InputWorkspace=empty_sample, OutputWorkspace=empty_sample, Factor=scale_factor,
                                    Operation="Multiply")
    try:
        mantid.Minus(LHSWorkspace=ws_to_correct, RHSWorkspace=empty_sample, OutputWorkspace=ws_to_correct)
    except ValueError:
        raise ValueError("The empty run(s) specified for this file do not have matching binning. Do the TOF windows of"
                         " the empty and sample match?")

    remove_intermediate_workspace(empty_sample)

    return ws_to_correct
コード例 #6
0
    def _correct_sample_can(self):
        """
        Correct for sample and container.
        """

        logger.information('Correcting sample and container')
        corrected_can_ws = '__corrected_can'

        factor_types = ['_ass']
        if self._use_can:
            factor_types.extend(['_acc', '_acsc', '_assc'])
        corr_unit = s_api.mtd[self._corrections +
                              '_ass'].getAxis(0).getUnit().unitID()
        for f_type in factor_types:
            self._convert_units_wavelength(corr_unit,
                                           self._corrections + f_type,
                                           self._corrections + f_type,
                                           "Wavelength")

        if self._rebin_container_ws:
            s_api.RebinToWorkspace(
                WorkspaceToRebin=self._scaled_container_wavelength,
                WorkspaceToMatch=self._corrections + '_acc',
                OutputWorkspace=self._scaled_container_wavelength)

        # Acc
        s_api.Divide(LHSWorkspace=self._scaled_container_wavelength,
                     RHSWorkspace=self._corrections + '_acc',
                     OutputWorkspace=corrected_can_ws)

        # Acsc
        s_api.Multiply(LHSWorkspace=corrected_can_ws,
                       RHSWorkspace=self._corrections + '_acsc',
                       OutputWorkspace=corrected_can_ws)
        s_api.Minus(LHSWorkspace=self._sample_ws_wavelength,
                    RHSWorkspace=corrected_can_ws,
                    OutputWorkspace=self._output_ws_name)

        # Assc
        s_api.Divide(LHSWorkspace=self._output_ws_name,
                     RHSWorkspace=self._corrections + '_assc',
                     OutputWorkspace=self._output_ws_name)

        for f_type in factor_types:
            self._convert_units_wavelength(corr_unit,
                                           self._corrections + f_type,
                                           self._corrections + f_type,
                                           corr_unit)

        s_api.DeleteWorkspace(corrected_can_ws)
コード例 #7
0
    def _fr_correction(self):
        """
        applies flipping ratio correction
        according to J Appl. Cryst. 42, 69-84, 2009
        creates the corrected workspaces
        """
        wslist = []
        # 1. retrieve NiCr and Background
        sf_nicr = api.AnalysisDataService.retrieve(self.input_workspaces['SF_NiCr'])
        nsf_nicr = api.AnalysisDataService.retrieve(self.input_workspaces['NSF_NiCr'])
        sf_bkgr = api.AnalysisDataService.retrieve(self.input_workspaces['SF_Background'])
        nsf_bkgr = api.AnalysisDataService.retrieve(self.input_workspaces['NSF_Background'])

        # 2. subtract background from NiCr
        _sf_nicr_bg_ = sf_nicr - sf_bkgr
        wslist.append(_sf_nicr_bg_.name())
        _nsf_nicr_bg_ = nsf_nicr - nsf_bkgr
        wslist.append(_nsf_nicr_bg_.name())
        # check negative values, throw exception
        sf_arr = np.array(_sf_nicr_bg_.extractY()).flatten()
        nsf_arr = np.array(_nsf_nicr_bg_.extractY()).flatten()
        sf_neg_values = np.where(sf_arr < 0)[0]
        nsf_neg_values = np.where(nsf_arr < 0)[0]
        if len(sf_neg_values) or len(nsf_neg_values):
            self.cleanup(wslist)
            message = "Background is higher than NiCr signal!"
            self.log().error(message)
            raise RuntimeError(message)

        # 3. calculate flipping ratio F - 1 = (NiCr - Bkg)NSF/(NiCr - Bkg)SF - 1
        _coef_ws_ = api.Divide(LHSWorkspace=_nsf_nicr_bg_, RHSWorkspace=_sf_nicr_bg_, WarnOnZeroDivide=True) - 1.0
        wslist.append(_coef_ws_.name())
        # 4. apply correction raw data
        sf_data_ws = api.AnalysisDataService.retrieve(self.input_workspaces['SF_Data'])
        nsf_data_ws = api.AnalysisDataService.retrieve(self.input_workspaces['NSF_Data'])
        # NSF_corr[i] = NSF[i] + (NSF[i] - SF[i])/(F[i] - 1)
        _diff_ws_ = nsf_data_ws - sf_data_ws
        wslist.append(_diff_ws_.name())
        _tmp_ws_ = api.Divide(LHSWorkspace=_diff_ws_, RHSWorkspace=_coef_ws_, WarnOnZeroDivide=True)
        _tmp_ws_.setYUnit(nsf_data_ws.YUnit())
        api.Plus(LHSWorkspace=nsf_data_ws, RHSWorkspace=_tmp_ws_, OutputWorkspace=self.nsf_outws_name)
        # SF_corr[i] = SF[i] - (NSF[i] - SF[i])/(F[i] - 1)
        api.Minus(LHSWorkspace=sf_data_ws, RHSWorkspace=_tmp_ws_, OutputWorkspace=self.sf_outws_name)
        api.DeleteWorkspace(_tmp_ws_)

        # cleanup
        self.cleanup(wslist)
        return
コード例 #8
0
    def _subtract_background(self, datalist, bglist, deterota):
        """
        subracts background
        """
        result = dict.fromkeys(deterota)
        for angle in deterota:
            wsname = datalist[angle] + "-bg"
            api.Minus(datalist[angle], bglist[angle], OutputWorkspace=wsname)
            self.toremove.append(wsname)
            if self._is_negative(wsname):
                message = "Background " + bglist[angle] + " is higher than Vanadium " + datalist[angle] + " signal!"
                self.cleanup(self.toremove)
                raise RuntimeError(message)
            result[angle] = wsname

        return result
コード例 #9
0
    def _subtract(self):
        """
        Do a simple container subtraction (when no corrections are given).
        """

        logger.information('Using simple container subtraction')

        if self._rebin_container_ws:
            logger.information('Rebining container to ensure Minus')
            s_api.RebinToWorkspace(
                WorkspaceToRebin=self._scaled_container_wavelength,
                WorkspaceToMatch=self._sample_ws_wavelength,
                OutputWorkspace=self._scaled_container_wavelength)

        s_api.Minus(LHSWorkspace=self._sample_ws_wavelength,
                    RHSWorkspace=self._scaled_container_wavelength,
                    OutputWorkspace=self._output_ws_name)
コード例 #10
0
ファイル: DPDFreduction.py プロジェクト: luzpaz/mantid
    def PyExec(self):
        self._runs = self.getProperty('RunNumbers').value
        self._vanfile = self.getProperty('Vanadium').value
        self._ecruns = self.getProperty('EmptyCanRunNumbers').value
        self._ebins = (self.getProperty('EnergyBins').value).tolist()
        self._qbins = (self.getProperty('MomentumTransferBins').value).tolist()
        self._snorm = self.getProperty('NormalizeSlices').value
        self._clean = self.getProperty('CleanWorkspaces').value
        wn_sqes = self.getPropertyValue("OutputWorkspace")

        # workspace names
        prefix = ''
        if self._clean:
            prefix = '__'
        # "wn" denotes workspace name
        wn_data = prefix + 'data'  # Accumulated data events
        wn_data_mon = prefix + 'data_monitors'  # Accumulated monitors for data
        wn_van = prefix + 'vanadium'  # White-beam vanadium
        wn_van_st = prefix + 'vanadium_S_theta'
        wn_reduced = prefix + 'reduced'  # data after DGSReduction
        wn_ste = prefix + 'S_theta_E'  # data after grouping by theta angle
        wn_sten = prefix + 'S_theta_E_normalized'
        wn_steni = prefix + 'S_theta_E_interp'
        wn_sqe = prefix + 'S_Q_E'
        wn_sqeb = prefix + 'S_Q_E_binned'
        wn_sqesn = prefix + wn_sqes + '_norm'
        # Empty can files
        wn_ec_data = prefix + 'ec_data'  # Accumulated empty can data
        wn_ec_data_mon = prefix + 'ec_data_monitors'  # Accumulated monitors for empty can
        wn_ec_reduced = prefix + 'ec_reduced'  # empty can data after DGSReduction
        wn_ec_ste = prefix + 'ec_S_theta_E'  # empty can data after grouping by theta angle

        # Save current configuration
        facility = config['default.facility']
        instrument = config['default.instrument']
        datasearch = config["datasearch.searcharchive"]
        # Allows searching for ARCS run numbers
        config['default.facility'] = 'SNS'
        config['default.instrument'] = 'ARCS'
        config["datasearch.searcharchive"] = "On"

        try:
            # Load the vanadium file, assumed to be preprocessed, meaning that
            # for every detector all events within a particular wide wavelength
            # range have been rebinned into a single histogram
            self._load(self._vanfile, wn_van)
            # Check for white-beam vanadium, true if the vertical chopper is absent (vChTrans==2)
            if api.mtd[wn_van].run().getProperty('vChTrans').value[0] != 2:
                raise ValueError("White-vanadium is required")

            # Load several event files into a single workspace. The nominal incident
            # energy should be the same to avoid difference in energy resolution
            self._load(self._runs, wn_data)

            # Load empty can event files, if present
            if self._ecruns:
                self._load(self._ecruns, wn_ec_data)

        finally:
            # Recover the default configuration
            config['default.facility'] = facility
            config['default.instrument'] = instrument
            config["datasearch.searcharchive"] = datasearch

        # Obtain incident energy as the mean of the nominal Ei values.
        # There is one nominal value for each run number.
        ws_data = sapi.mtd[wn_data]
        Ei = ws_data.getRun()['EnergyRequest'].getStatistics().mean
        Ei_std = ws_data.getRun()['EnergyRequest'].getStatistics(
        ).standard_deviation

        # Verify empty can runs were obtained at similar energy
        if self._ecruns:
            ws_ec_data = sapi.mtd[wn_ec_data]
            ec_Ei = ws_ec_data.getRun()['EnergyRequest'].getStatistics().mean
            if abs(Ei - ec_Ei) > Ei_std:
                raise RuntimeError(
                    'Empty can runs were obtained at a significant' +
                    ' different incident energy than the sample runs')

        # Obtain energy range. If user did not supply a triad
        # [Estart, Ewidth, Eend] but only Ewidth, then estimate
        # Estart and End from the nominal energies
        if len(self._ebins) == 1:
            ws_data = sapi.mtd[wn_data]
            Ei = ws_data.getRun()['EnergyRequest'].getStatistics().mean
            self._ebins.insert(0, -0.5 * Ei)  # prepend
            self._ebins.append(0.95 * Ei)  # append

        # Enforce that the elastic energy (E=0) lies in the middle of the
        # central bin with an appropriate small shift in the energy range
        Ei_min_reduced = self._ebins[0] / self._ebins[1]
        remainder = Ei_min_reduced - int(Ei_min_reduced)
        if remainder >= 0.0:
            erange_shift = self._ebins[1] * (0.5 - remainder)
        else:
            erange_shift = self._ebins[1] * (-0.5 - remainder)
        self._ebins[0] += erange_shift  # shift minimum energy
        self._ebins[-1] += erange_shift  # shift maximum energy

        # Convert to energy transfer. Normalize by proton charge.
        # The output workspace is S(detector-id,E)
        factor = 0.1  # use a finer energy bin than the one passed (self._ebins[1])
        Erange = '{0},{1},{2}'.format(self._ebins[0], factor * self._ebins[1],
                                      self._ebins[2])
        Ei_calc, T0 = sapi.GetEiT0atSNS(MonitorWorkspace=wn_data_mon,
                                        IncidentEnergyGuess=Ei)
        sapi.MaskDetectors(Workspace=wn_data,
                           MaskedWorkspace=wn_van)  # Use vanadium mask
        sapi.DgsReduction(SampleInputWorkspace=wn_data,
                          SampleInputMonitorWorkspace=wn_data_mon,
                          IncidentEnergyGuess=Ei_calc,
                          UseIncidentEnergyGuess=1,
                          TimeZeroGuess=T0,
                          EnergyTransferRange=Erange,
                          IncidentBeamNormalisation='ByCurrent',
                          OutputWorkspace=wn_reduced)

        if self._ecruns:
            sapi.MaskDetectors(Workspace=wn_ec_data, MaskedWorkspace=wn_van)
            sapi.DgsReduction(SampleInputWorkspace=wn_ec_data,
                              SampleInputMonitorWorkspace=wn_ec_data_mon,
                              IncidentEnergyGuess=Ei_calc,
                              UseIncidentEnergyGuess=1,
                              TimeZeroGuess=T0,
                              EnergyTransferRange=Erange,
                              IncidentBeamNormalisation='ByCurrent',
                              OutputWorkspace=wn_ec_reduced)

        # Obtain maximum and minimum |Q| values, as well as dQ if none passed
        if len(self._qbins) < 3:
            if not self._qbins:
                # insert dQ if empty qbins. The minimal momentum transfer
                # is the result on an event where the initial energy was
                # Ei and the final energy was Ei+dE.
                dE = self._ebins[1]
                self._qbins.append(
                    numpy.sqrt((Ei + dE) / ENERGY_TO_WAVEVECTOR) -
                    numpy.sqrt(Ei / ENERGY_TO_WAVEVECTOR))
            mins, maxs = sapi.ConvertToMDMinMaxLocal(wn_reduced,
                                                     Qdimensions='|Q|',
                                                     dEAnalysisMode='Direct')
            self._qbins.insert(0, mins[0])  # prepend minimum Q
            self._qbins.append(maxs[0])  # append maximum Q

        # Delete sample and empty can event workspaces to free memory.
        if self._clean:
            sapi.DeleteWorkspace(wn_data)
            if self._ecruns:
                sapi.DeleteWorkspace(wn_ec_data)

        # Convert to S(theta,E)
        ki = numpy.sqrt(Ei / ENERGY_TO_WAVEVECTOR)
        # If dE is the smallest energy transfer considered,
        # then dQ/ki is the smallest dtheta (in radians)
        dtheta = self._qbins[1] / ki * (180.0 / numpy.pi)
        # Use a finer dtheta that the nominal smallest value
        factor = 1. / 5  # a reasonable (heuristic) value
        dtheta *= factor
        # Fix: a very small dtheta (<0.15 degrees) prevents correct interpolation
        dtheta = max(0.15, dtheta)
        # Group detectors according to theta angle for the sample runs
        group_file_os_handle, group_file_name = mkstemp(suffix='.xml')
        group_file_handle = os.fdopen(group_file_os_handle, 'w')
        sapi.GenerateGroupingPowder(InputWorkspace=wn_reduced,
                                    AngleStep=dtheta,
                                    GroupingFilename=group_file_name)
        group_file_handle.close()
        sapi.GroupDetectors(InputWorkspace=wn_reduced,
                            MapFile=group_file_name,
                            OutputWorkspace=wn_ste)
        # Group detectors according to theta angle for the emtpy can run
        if self._ecruns:
            sapi.GroupDetectors(InputWorkspace=wn_ec_reduced,
                                MapFile=group_file_name,
                                OutputWorkspace=wn_ec_ste)
            # Subtract the empty can from the can+sample
            sapi.Minus(LHSWorkspace=wn_ste,
                       RHSWorkspace=wn_ec_ste,
                       OutputWorkspace=wn_ste)

        # Normalize by the vanadium intensity, but before that we need S(theta)
        # for the vanadium. Recall every detector has all energies into a single
        # bin, so we get S(theta) instead of S(theta,E)
        sapi.GroupDetectors(InputWorkspace=wn_van,
                            MapFile=group_file_name,
                            OutputWorkspace=wn_van_st)
        # Divide by vanadium. Make sure it is integrated in the energy domain
        sapi.Integration(wn_van_st, OutputWorkspace=wn_van_st)
        sapi.Divide(wn_ste, wn_van_st, OutputWorkspace=wn_sten)
        sapi.ClearMaskFlag(Workspace=wn_sten)

        # Temporary file generated by GenerateGroupingPowder to be removed
        os.remove(group_file_name)  # no need for this file
        os.remove(os.path.splitext(group_file_name)[0] + ".par")

        max_i_theta = 0.0
        min_i_theta = 0.0

        # Linear interpolation for those theta values with low intensity
        # First, find minimum theta index with a non-zero histogram
        ws_sten = sapi.mtd[wn_sten]

        for i_theta in range(ws_sten.getNumberHistograms()):
            if ws_sten.dataY(i_theta).any():
                min_i_theta = i_theta
                break
        # second, find maximum theta with a non-zero histogram
        for i_theta in range(ws_sten.getNumberHistograms() - 1, -1, -1):
            if ws_sten.dataY(i_theta).any():
                max_i_theta = i_theta
                break

        # Scan a range of theta angles and apply interpolation to those theta angles
        # with considerably low intensity (gaps)
        delta_theta = max_i_theta - min_i_theta
        gaps = self._findGaps(wn_sten, int(min_i_theta + 0.1 * delta_theta),
                              int(max_i_theta - 0.1 * delta_theta))
        sapi.CloneWorkspace(InputWorkspace=wn_sten, OutputWorkspace=wn_steni)
        for gap in gaps:
            self._interpolate(wn_steni, gap)  # interpolate this gap

        # Convert S(theta,E) to S(Q,E), then rebin in |Q| and E to MD workspace
        sapi.ConvertToMD(InputWorkspace=wn_steni,
                         QDimensions='|Q|',
                         dEAnalysisMode='Direct',
                         OutputWorkspace=wn_sqe)
        Qmin = self._qbins[0]
        Qmax = self._qbins[-1]
        dQ = self._qbins[1]
        Qrange = '|Q|,{0},{1},{2}'.format(Qmin, Qmax, int((Qmax - Qmin) / dQ))
        Ei_min = self._ebins[0]
        Ei_max = self._ebins[-1]
        dE = self._ebins[1]
        deltaErange = 'DeltaE,{0},{1},{2}'.format(Ei_min, Ei_max,
                                                  int((Ei_max - Ei_min) / dE))
        sapi.BinMD(InputWorkspace=wn_sqe,
                   AxisAligned=1,
                   AlignedDim0=Qrange,
                   AlignedDim1=deltaErange,
                   OutputWorkspace=wn_sqeb)

        # Slice the data by transforming to a Matrix2Dworkspace,
        # with deltaE along the vertical axis
        sapi.ConvertMDHistoToMatrixWorkspace(
            InputWorkspace=wn_sqeb,
            Normalization='NumEventsNormalization',
            OutputWorkspace=wn_sqes)

        # Ensure correct units
        sapi.mtd[wn_sqes].getAxis(0).setUnit("MomentumTransfer")
        sapi.mtd[wn_sqes].getAxis(1).setUnit("DeltaE")

        # Shift the energy axis, since the reported values should be the center
        # of the bins, instead of the minimum bin boundary
        ws_sqes = sapi.mtd[wn_sqes]
        Eaxis = ws_sqes.getAxis(1)
        e_shift = self._ebins[1] / 2.0
        for i in range(Eaxis.length()):
            Eaxis.setValue(i, Eaxis.getValue(i) + e_shift)

        # Normalize each slice, if requested
        if self._snorm:
            sapi.Integration(InputWorkspace=wn_sqes, OutputWorkspace=wn_sqesn)
            sapi.Divide(LHSWorkspace=wn_sqes,
                        RHSWorkspace=wn_sqesn,
                        OutputWorkspace=wn_sqes)

        # Clean up workspaces from intermediate steps
        if self._clean:
            for name in (wn_van, wn_reduced, wn_ste, wn_van_st, wn_sten,
                         wn_steni, wn_sqe, wn_sqeb, wn_sqesn,
                         'PreprocessedDetectorsWS'):
                if sapi.mtd.doesExist(name):
                    sapi.DeleteWorkspace(name)

        # Ouput some info as a Notice in the log
        ebins = ', '.join(['{0:.2f}'.format(x) for x in self._ebins])
        qbins = ', '.join(['{0:.2f}'.format(x) for x in self._qbins])
        tbins = '{0:.2f} {1:.2f} {2:.2f}'.format(min_i_theta * dtheta, dtheta,
                                                 max_i_theta * dtheta)
        message = '\n******  SOME OUTPUT INFORMATION ***' + \
                  '\nEnergy bins: ' + ebins + \
                  '\nQ bins: ' + qbins + \
                  '\nTheta bins: '+tbins
        kapi.logger.notice(message)

        self.setProperty("OutputWorkspace", sapi.mtd[wn_sqes])
コード例 #11
0
    def PyExec(self):
        ms.ExtractSingleSpectrum(InputWorkspace=self._input_ws,
                                 OutputWorkspace=self._output_ws,
                                 WorkspaceIndex=self._spec_idx)

        # Performs corrections
        self._define_corrections()

        # The workspaces to fit for correction scale factors
        fit_corrections = [
            wks for wks in self._correction_workspaces
            if 'MultipleScattering' not in wks
        ]

        # Perform fitting of corrections
        fixed_params = {}

        fixed_gamma_factor = self.getProperty("GammaBackgroundScale").value
        if fixed_gamma_factor != 0.0 and not self._back_scattering:
            fixed_params['GammaBackground'] = fixed_gamma_factor

        fixed_container_scale = self.getProperty("ContainerScale").value
        if fixed_container_scale != 0.0:
            fixed_params['Container'] = fixed_container_scale

        params_ws = self._fit_corrections(fit_corrections,
                                          self._linear_fit_table,
                                          **fixed_params)
        self.setProperty("LinearFitResult", params_ws)

        # Scale gamma background
        if self.getProperty(
                "GammaBackground").value and not self._back_scattering:
            gamma_correct_ws = self._get_correction_workspace(
                'GammaBackground')[1]
            gamma_factor = self._get_correction_scale_factor(
                'GammaBackground', fit_corrections, params_ws)
            ms.Scale(InputWorkspace=gamma_correct_ws,
                     OutputWorkspace=gamma_correct_ws,
                     Factor=gamma_factor)

        # Scale multiple scattering
        if self.getProperty("MultipleScattering").value:
            # Use factor of total scattering as this includes single and multiple scattering
            multi_scatter_correct_ws = self._get_correction_workspace(
                'MultipleScattering')[1]
            total_scatter_correct_ws = self._get_correction_workspace(
                'TotalScattering')[1]
            total_scatter_factor = self._get_correction_scale_factor(
                'TotalScattering', fit_corrections, params_ws)
            ms.Scale(InputWorkspace=multi_scatter_correct_ws,
                     OutputWorkspace=multi_scatter_correct_ws,
                     Factor=total_scatter_factor)
            ms.Scale(InputWorkspace=total_scatter_correct_ws,
                     OutputWorkspace=total_scatter_correct_ws,
                     Factor=total_scatter_factor)

        # Scale by container
        if self._container_ws != "":
            container_correct_ws = self._get_correction_workspace(
                'Container')[1]
            container_factor = self._get_correction_scale_factor(
                'Container', fit_corrections, params_ws)
            ms.Scale(InputWorkspace=container_correct_ws,
                     OutputWorkspace=container_correct_ws,
                     Factor=container_factor)

        # Calculate and output corrected workspaces as a WorkspaceGroup
        if self._corrected_wsg != "":
            corrected_workspaces = [
                ws_name.replace(self._correction_wsg, self._corrected_wsg)
                for ws_name in self._correction_workspaces
            ]
            for corrected, correction in zip(corrected_workspaces,
                                             self._correction_workspaces):
                ms.Minus(LHSWorkspace=self._output_ws,
                         RHSWorkspace=correction,
                         OutputWorkspace=corrected)
            ms.GroupWorkspaces(InputWorkspaces=corrected_workspaces,
                               OutputWorkspace=self._corrected_wsg)
            self.setProperty("CorrectedWorkspaces", self._corrected_wsg)

        # Apply corrections
        for correction in self._correction_workspaces:
            if 'TotalScattering' not in correction:
                ms.Minus(LHSWorkspace=self._output_ws,
                         RHSWorkspace=correction,
                         OutputWorkspace=self._output_ws)

        self.setProperty("OutputWorkspace", self._output_ws)

        # Remove correction workspaces if they are no longer required
        if self._correction_wsg == "":
            for wksp in self._correction_workspaces:
                ms.DeleteWorkspace(wksp)
コード例 #12
0
ファイル: SANSReduction.py プロジェクト: chatcannon/mantid
    def _py_exec(self):
        filename = self.getProperty("Filename").value
        output_ws = self.getPropertyValue("OutputWorkspace")
        property_manager_name = self.getProperty("ReductionProperties").value
        property_manager = PropertyManagerDataService.retrieve(
            property_manager_name)

        property_list = [p.name for p in property_manager.getProperties()]

        output_msg = ""
        # Find the beam center
        if "SANSBeamFinderAlgorithm" in property_list:
            p = property_manager.getProperty("SANSBeamFinderAlgorithm")
            alg = Algorithm.fromString(p.valueAsStr)
            if alg.existsProperty("ReductionProperties"):
                alg.setProperty("ReductionProperties", property_manager_name)
            alg.execute()
            if alg.existsProperty("OutputMessage"):
                output_msg += alg.getProperty("OutputMessage").value + '\n'

        # Load the sample data
        msg = self._multiple_load(filename, output_ws, property_manager,
                                  property_manager_name)
        output_msg += "Loaded %s\n" % filename
        output_msg += msg

        # Perform the main corrections on the sample data
        output_msg += self.process_data_file(output_ws)

        # Sample data transmission correction
        beam_center_x = None
        beam_center_y = None
        if "TransmissionBeamCenterAlgorithm" in property_list:
            # Execute the beam finding algorithm and set the beam
            # center for the transmission calculation
            p = property_manager.getProperty("TransmissionBeamCenterAlgorithm")
            alg = Algorithm.fromString(p.valueAsStr)
            if alg.existsProperty("ReductionProperties"):
                alg.setProperty("ReductionProperties", property_manager_name)
            alg.execute()
            beam_center_x = alg.getProperty("FoundBeamCenterX").value
            beam_center_y = alg.getProperty("FoundBeamCenterY").value

        if "TransmissionAlgorithm" in property_list:
            p = property_manager.getProperty("TransmissionAlgorithm")
            alg = Algorithm.fromString(p.valueAsStr)
            alg.setProperty("InputWorkspace", output_ws)
            alg.setProperty("OutputWorkspace", output_ws)

            if alg.existsProperty("BeamCenterX") \
                and alg.existsProperty("BeamCenterY") \
                and beam_center_x is not None \
                and beam_center_y is not None:
                alg.setProperty("BeamCenterX", beam_center_x)
                alg.setProperty("BeamCenterY", beam_center_y)

            if alg.existsProperty("ReductionProperties"):
                alg.setProperty("ReductionProperties", property_manager_name)
            alg.execute()

            if alg.existsProperty("MeasuredTransmission"):
                meas_trans = alg.getProperty("MeasuredTransmission").value
                if property_manager.existsProperty(
                        "MeasuredTransmissionValue"):
                    property_manager.setProperty("MeasuredTransmissionValue",
                                                 meas_trans)
                else:
                    property_manager.declareProperty(
                        "MeasuredTransmissionValue", meas_trans)
            if alg.existsProperty("MeasuredError"):
                meas_err = alg.getProperty("MeasuredError").value
                if property_manager.existsProperty(
                        "MeasuredTransmissionError"):
                    property_manager.setProperty("MeasuredTransmissionError",
                                                 meas_err)
                else:
                    property_manager.declareProperty(
                        "MeasuredTransmissionError", meas_err)

            if alg.existsProperty("OutputMessage"):
                output_msg += alg.getProperty("OutputMessage").value + '\n'

        # Process background data
        if "BackgroundFiles" in property_list:
            background = property_manager.getProperty("BackgroundFiles").value
            background_ws = "__background_%s" % output_ws
            msg = self._multiple_load(background, background_ws,
                                      property_manager, property_manager_name)
            bck_msg = "Loaded background %s\n" % background
            bck_msg += msg

            # Process background like we processed the sample data
            bck_msg += self.process_data_file(background_ws)

            trans_beam_center_x = None
            trans_beam_center_y = None
            if "BckTransmissionBeamCenterAlgorithm" in property_list:
                # Execute the beam finding algorithm and set the beam
                # center for the transmission calculation
                p = property_manager.getProperty(
                    "BckTransmissionBeamCenterAlgorithm")
                alg = Algorithm.fromString(p.valueAsStr)
                if alg.existsProperty("ReductionProperties"):
                    alg.setProperty("ReductionProperties",
                                    property_manager_name)
                alg.execute()
                trans_beam_center_x = alg.getProperty("FoundBeamCenterX").value
                trans_beam_center_y = alg.getProperty("FoundBeamCenterY").value

            # Background transmission correction
            if "BckTransmissionAlgorithm" in property_list:
                p = property_manager.getProperty("BckTransmissionAlgorithm")
                alg = Algorithm.fromString(p.valueAsStr)
                alg.setProperty("InputWorkspace", background_ws)
                alg.setProperty("OutputWorkspace",
                                '__' + background_ws + "_reduced")

                if alg.existsProperty("BeamCenterX") \
                    and alg.existsProperty("BeamCenterY") \
                    and trans_beam_center_x is not None \
                    and trans_beam_center_y is not None:
                    alg.setProperty("BeamCenterX", trans_beam_center_x)
                    alg.setProperty("BeamCenterY", trans_beam_center_y)

                if alg.existsProperty("ReductionProperties"):
                    alg.setProperty("ReductionProperties",
                                    property_manager_name)
                alg.execute()

                if alg.existsProperty("MeasuredTransmission"):
                    meas_trans = alg.getProperty("MeasuredTransmission").value
                    if property_manager.existsProperty(
                            "MeasuredBckTransmissionValue"):
                        property_manager.setProperty(
                            "MeasuredBckTransmissionValue", meas_trans)
                    else:
                        property_manager.declareProperty(
                            "MeasuredBckTransmissionValue", meas_trans)
                if alg.existsProperty("MeasuredError"):
                    meas_err = alg.getProperty("MeasuredError").value
                    if property_manager.existsProperty(
                            "MeasuredBckTransmissionError"):
                        property_manager.setProperty(
                            "MeasuredBckTransmissionError", meas_err)
                    else:
                        property_manager.declareProperty(
                            "MeasuredBckTransmissionError", meas_err)

                if alg.existsProperty("OutputMessage"):
                    output_msg += alg.getProperty("OutputMessage").value + '\n'
                else:
                    output_msg += "Transmission correction applied\n"
                background_ws = '__' + background_ws + '_reduced'

            # Subtract background
            api.RebinToWorkspace(WorkspaceToRebin=background_ws,
                                 WorkspaceToMatch=output_ws,
                                 OutputWorkspace=background_ws + '_rebin',
                                 PreserveEvents=True)
            api.Minus(LHSWorkspace=output_ws,
                      RHSWorkspace=background_ws + '_rebin',
                      OutputWorkspace=output_ws)

            bck_msg = bck_msg.replace('\n', '\n   |')
            output_msg += "Background subtracted [%s]\n   %s\n" % (
                background_ws, bck_msg)

        # Absolute scale correction
        output_msg += self._simple_execution("AbsoluteScaleAlgorithm",
                                             output_ws)

        # Geometry correction
        output_msg += self._simple_execution("GeometryAlgorithm", output_ws)

        # Compute I(q)
        iq_output = None
        if "IQAlgorithm" in property_list:
            iq_output = self.getPropertyValue("OutputWorkspace")
            iq_output = iq_output + '_Iq'
            p = property_manager.getProperty("IQAlgorithm")
            alg = Algorithm.fromString(p.valueAsStr)
            alg.setProperty("InputWorkspace", output_ws)
            alg.setProperty("OutputWorkspace", iq_output)
            alg.setProperty("ReductionProperties", property_manager_name)
            alg.execute()
            if alg.existsProperty("OutputMessage"):
                output_msg += alg.getProperty("OutputMessage").value + '\n'

        # Compute I(qx,qy)
        iqxy_output = None
        if "IQXYAlgorithm" in property_list:
            iq_output_name = self.getPropertyValue("OutputWorkspace")
            iqxy_output = iq_output_name + '_Iqxy'
            p = property_manager.getProperty("IQXYAlgorithm")
            alg = Algorithm.fromString(p.valueAsStr)
            alg.setProperty("InputWorkspace", output_ws)
            alg.setProperty("OutputWorkspace", iq_output_name)
            if alg.existsProperty("ReductionProperties"):
                alg.setProperty("ReductionProperties", property_manager_name)
            alg.execute()
            if alg.existsProperty("OutputMessage"):
                output_msg += alg.getProperty("OutputMessage").value + '\n'

        # Verify output directory and save data
        if "OutputDirectory" in property_list:
            output_dir = property_manager.getProperty("OutputDirectory").value
            #if len(output_dir)==0:
            #    output_dir = os.path.dirname(filename)
            if os.path.isdir(output_dir):
                # Check whether we were in frame-skipping mode
                if iq_output is not None \
                and not AnalysisDataService.doesExist(iq_output):
                    for i in [1, 2]:
                        iq_frame = iq_output.replace('_Iq', '_frame%s_Iq' % i)
                        iqxy_frame = None
                        if iqxy_output is not None:
                            iqxy_frame = iqxy_output.replace(
                                '_Iqxy', '_frame%s_Iqxy' % i)
                        if AnalysisDataService.doesExist(iq_frame):
                            output_msg += self._save_output(
                                iq_frame, iqxy_frame, output_dir,
                                property_manager)
                else:
                    output_msg += self._save_output(iq_output, iqxy_output,
                                                    output_dir,
                                                    property_manager)
                Logger("SANSReduction").notice("Output saved in %s" %
                                               output_dir)
            elif len(output_dir) > 0:
                msg = "Output directory doesn't exist: %s\n" % output_dir
                Logger("SANSReduction").error(msg)

        self.setProperty("OutputMessage", output_msg)
コード例 #13
0
    def process_vanadium(self,
                         vanadium,
                         empty,
                         panel,
                         height,
                         radius,
                         cycle_van="09_3",
                         cycle_empty="09_3"):
        user_data_directory = self.use_folder + cycle_van + '/'
        self.set_data_directory(user_data_directory)
        self.datafile = self.get_file_name(vanadium, "raw")
        vanadium_ws = self.read(vanadium, panel, "raw")
        user_data_directory = self.use_folder + cycle_empty + '/'
        self.set_data_directory(user_data_directory)
        self.datafile = self.get_file_name(empty, "raw")
        empty_ws = self.read(empty, panel, "raw")
        simple.Minus(LHSWorkspace=vanadium_ws,
                     RHSWorkspace=empty_ws,
                     OutputWorkspace=vanadium_ws)
        simple.DeleteWorkspace(empty_ws)
        absorption_corrections(4.8756, height, 0.07118, radius, 5.16,
                               vanadium_ws)
        vanfoc = self.focus(vanadium_ws, panel)

        panel_crop = {
            1: (0.95, 53.3),
            2: (0.58, 13.1),
            3: (0.44, 7.77),
            4: (0.38, 5.86),
            5: (0.35, 4.99),
            6: (0.35, 4.99),
            7: (0.38, 5.86),
            8: (0.44, 7.77),
            9: (0.58, 13.1),
            10: (0.95, 53.3)
        }
        d_min, d_max = panel_crop.get(panel)
        simple.CropWorkspace(InputWorkspace=vanfoc,
                             OutputWorkspace=vanfoc,
                             XMin=d_min,
                             XMax=d_max)
        spline_coefficient = {
            1: 120,
            2: 120,
            3: 120,
            4: 130,
            5: 140,
            6: 140,
            7: 130,
            8: 120,
            9: 120,
            10: 120
        }
        simple.SplineBackground(InputWorkspace=vanfoc,
                                OutputWorkspace=vanfoc,
                                NCoeff=spline_coefficient.get(panel))
        smoothing_coefficient = "30" if panel == 3 else "40"
        simple.SmoothData(InputWorkspace=vanfoc,
                          OutputWorkspace=vanfoc,
                          NPoints=smoothing_coefficient)
        return
コード例 #14
0
    def PyExec(self):
        config['default.facility'] = 'SNS'
        config['default.instrument'] = 'ARCS'
        self._runs = self.getProperty('RunNumbers').value
        self._vanfile = self.getProperty('Vanadium').value
        self._ecruns = self.getProperty('EmptyCanRunNumbers').value
        self._ebins_str = self.getProperty('EnergyBins').value
        self._qbins_str = self.getProperty('MomentumTransferBins').value
        self._snorm = self.getProperty('NormalizeSlices').value
        self._clean = self.getProperty('CleanWorkspaces').value
        wn_sqes = self.getPropertyValue("OutputWorkspace")

        # workspace names
        prefix = ''
        if self._clean:
            prefix = '__'
        # Sample files
        wn_data = prefix + 'data'
        wn_van = prefix + 'vanadium'
        wn_reduced = prefix + 'reduced'
        wn_ste = prefix + 'S_theta_E'
        wn_van_st = prefix + 'vanadium_S_theta'
        wn_sten = prefix + 'S_theta_E_normalized'
        wn_steni = prefix + 'S_theta_E_normalized_interp'
        wn_sqe = prefix + 'S_Q_E'
        wn_sqeb = prefix + 'S_Q_E_binned'
        wn_sqesn = prefix + wn_sqes + '_norm'
        # Empty can files
        wn_ec_data = prefix + 'ec_data'
        wn_ec_reduced = prefix + 'ec_reduced'
        wn_ec_ste = prefix + 'ec_S_theta_E'

        datasearch = config["datasearch.searcharchive"]
        if datasearch != "On":
            config["datasearch.searcharchive"] = "On"

        # Load several event files into a sinle workspace. The nominal incident
        # energy should be the same to avoid difference in energy resolution
        api.Load(Filename=self._runs, OutputWorkspace=wn_data)

        # Load the vanadium file, assume to be preprocessed, meaning that
        # for every detector all events whithin a particular wide wavelength
        # range have been rebinned into a single histogram
        api.Load(Filename=self._vanfile, OutputWorkspace=wn_van)

        # Load empty can event files, if present
        if self._ecruns:
            api.Load(Filename=self._ecruns, OutputWorkspace=wn_ec_data)

        # Retrieve the mask from the vanadium workspace, and apply it to the data
        # (and empty can, if submitted)
        api.MaskDetectors(Workspace=wn_data, MaskedWorkspace=wn_van)
        if self._ecruns:
            api.MaskDetectors(Workspace=wn_ec_data, MaskedWorkspace=wn_van)

        # Obtain incident energy as the mean of the nominal Ei values.
        # There is one nominal value per events file.
        ws_data = api.mtd[wn_data]
        Ei = ws_data.getRun()['EnergyRequest'].getStatistics().mean
        Ei_std = ws_data.getRun()['EnergyRequest'].getStatistics(
        ).standard_deviation

        # Verify empty can runs were obtained at similar energy
        if self._ecruns:
            ws_ec_data = api.mtd[wn_ec_data]
            ec_Ei = ws_ec_data.getRun()['EnergyRequest'].getStatistics().mean
            if abs(Ei - ec_Ei) > Ei_std:
                raise RuntimeError(
                    'Empty can runs were obtained at a significant' +
                    ' different incident energy than the sample runs')

        # Obtain energy range
        self._ebins = [
            float(x)
            for x in re.compile(r'\d+[\.\d+]*').findall(self._ebins_str)
        ]
        if len(self._ebins) == 1:
            ws_data = api.mtd[wn_data]
            Ei = ws_data.getRun()['EnergyRequest'].getStatistics().mean
            self._ebins.insert(0, -0.5 * Ei)  # prepend
            self._ebins.append(0.95 * Ei)  # append

        # Enforce that the elastic energy (E=0) lies in the middle of the
        # central bin with an appropriate small shift in the energy range
        Ei_min_reduced = self._ebins[0] / self._ebins[1]
        remainder = Ei_min_reduced - int(Ei_min_reduced)
        if remainder >= 0.0:
            erange_shift = self._ebins[1] * (0.5 - remainder)
        else:
            erange_shift = self._ebins[1] * (-0.5 - remainder)
        self._ebins[0] += erange_shift  # shift minimum energy
        self._ebins[-1] += erange_shift  # shift maximum energy

        # Convert to energy transfer. Normalize by proton charge.
        # The output workspace is S(detector-id,E)
        factor = 0.1  # a fine energy bin
        Erange = '{0},{1},{2}'.format(self._ebins[0], factor * self._ebins[1],
                                      self._ebins[2])
        api.DgsReduction(SampleInputWorkspace=wn_data,
                         EnergyTransferRange=Erange,
                         OutputWorkspace=wn_reduced)
        if self._ecruns:
            api.DgsReduction(SampleInputWorkspace=wn_ec_data,
                             EnergyTransferRange=Erange,
                             IncidentBeamNormalisation='ByCurrent',
                             OutputWorkspace=wn_ec_reduced)

        # Obtain maximum and minimum |Q| values, as well as dQ if none passed
        self._qbins = [
            float(x)
            for x in re.compile(r'\d+[\.\d+]*').findall(self._qbins_str)
        ]
        if len(self._qbins) < 3:
            if not self._qbins:
                # insert dQ if empty qbins
                dE = self._ebins[1]
                self._qbins.append(
                    numpy.sqrt((Ei + dE) / ENERGY_TO_WAVEVECTOR) -
                    numpy.sqrt(Ei / ENERGY_TO_WAVEVECTOR))
            mins, maxs = api.ConvertToMDMinMaxLocal(wn_reduced,
                                                    Qdimensions='|Q|',
                                                    dEAnalysisMode='Direct')
            self._qbins.insert(0, mins[0])  # prepend minimum Q
            self._qbins.append(maxs[0])  # append maximum Q

        # Clean up the events files. They take a lot of space in memory
        api.DeleteWorkspace(wn_data)
        if self._ecruns:
            api.DeleteWorkspace(wn_ec_data)

        # Convert to S(theta,E)
        ki = numpy.sqrt(Ei / ENERGY_TO_WAVEVECTOR)
        factor = 1. / 5  # a reasonable (heuristic) value
        # If dE is the smallest energy transfer considered,
        # then dQ/ki is the smallest dtheta (in radians)
        dtheta = factor * self._qbins[1] / ki * (180.0 / numpy.pi)
        # very small dtheta (<0.15 degrees) prevents interpolation
        dtheta = max(0.15, dtheta)
        group_file_os_handle, group_file_name = mkstemp(suffix='.xml')
        group_file_handle = os.fdopen(group_file_os_handle, 'w')
        api.GenerateGroupingPowder(InputWorkspace=wn_reduced,
                                   AngleStep=dtheta,
                                   GroupingFilename=group_file_name)
        group_file_handle.close()
        api.GroupDetectors(InputWorkspace=wn_reduced,
                           MapFile=group_file_name,
                           OutputWorkspace=wn_ste)
        if self._ecruns:
            api.GroupDetectors(InputWorkspace=wn_ec_reduced,
                               MapFile=group_file_name,
                               OutputWorkspace=wn_ec_ste)
            # Substract the empty can from the can+sample
            api.Minus(LHSWorkspace=wn_ste,
                      RHSWorkspace=wn_ec_ste,
                      OutputWorkspace=wn_ste)

        # Normalize by the vanadium intensity, but before that we need S(theta)
        # for the vanadium. Recall every detector has all energies into a single
        # bin, so we get S(theta) instead of S(theta,E)
        api.GroupDetectors(InputWorkspace=wn_van,
                           MapFile=group_file_name,
                           OutputWorkspace=wn_van_st)
        os.remove(group_file_name)  # no need for this file
        api.Divide(wn_ste, wn_van_st, OutputWorkspace=wn_sten)
        api.ClearMaskFlag(Workspace=wn_sten)

        max_i_theta = 0.0
        min_i_theta = 0.0

        # Linear interpolation
        # First, find minimum theta index with a non-zero histogram
        ws_sten = api.mtd[wn_sten]
        for i_theta in range(ws_sten.getNumberHistograms()):
            if ws_sten.dataY(i_theta).any():
                min_i_theta = i_theta
                break
        # second, find maximum theta with a non-zero histogram
        for i_theta in range(ws_sten.getNumberHistograms() - 1, -1, -1):
            if ws_sten.dataY(i_theta).any():
                max_i_theta = i_theta
                break
        # Scan the region [min_i_theta, max_i_theta] and apply interpolation to
        # theta angles with no signal whatsoever, S(theta*, E)=0.0 for all energies
        api.CloneWorkspace(InputWorkspace=wn_sten, OutputWorkspace=wn_steni)
        ws_steni = api.mtd[wn_steni]
        i_theta = 1 + min_i_theta
        while i_theta < max_i_theta:
            if not ws_steni.dataY(i_theta).any():
                nonnull_i_theta_start = i_theta - 1  # angle index of non-null histogram
                # scan until we find a non-null histogram
                while not ws_steni.dataY(i_theta).any():
                    i_theta += 1
                nonnull_i_theta_end = i_theta  # angle index of non-null histogram
                # The range [1+nonnull_i_theta_start, nonnull_i_theta_end]
                # contains only null-histograms. Interpolate!
                y_start = ws_steni.dataY(nonnull_i_theta_start)
                y_end = ws_steni.dataY(nonnull_i_theta_end)
                intercept = y_start
                slope = (y_end - y_start) / (nonnull_i_theta_end -
                                             nonnull_i_theta_start)
                for null_i_theta in range(1 + nonnull_i_theta_start,
                                          nonnull_i_theta_end):
                    ws_steni.dataY(null_i_theta)[:] = intercept + slope * (
                        null_i_theta - nonnull_i_theta_start)
            i_theta += 1

        # Convert S(theta,E) to S(Q,E), then rebin in |Q| and E to MD workspace
        api.ConvertToMD(InputWorkspace=wn_steni,
                        QDimensions='|Q|',
                        dEAnalysisMode='Direct',
                        OutputWorkspace=wn_sqe)
        Qmin = self._qbins[0]
        Qmax = self._qbins[-1]
        dQ = self._qbins[1]
        Qrange = '|Q|,{0},{1},{2}'.format(Qmin, Qmax, int((Qmax - Qmin) / dQ))
        Ei_min = self._ebins[0]
        Ei_max = self._ebins[-1]
        dE = self._ebins[1]
        deltaErange = 'DeltaE,{0},{1},{2}'.format(Ei_min, Ei_max,
                                                  int((Ei_max - Ei_min) / dE))
        api.BinMD(InputWorkspace=wn_sqe,
                  AxisAligned=1,
                  AlignedDim0=Qrange,
                  AlignedDim1=deltaErange,
                  OutputWorkspace=wn_sqeb)

        # Slice the data by transforming to a Matrix2Dworkspace, with deltaE along the vertical axis
        api.ConvertMDHistoToMatrixWorkspace(
            InputWorkspace=wn_sqeb,
            Normalization='NumEventsNormalization',
            OutputWorkspace=wn_sqes)

        # Shift the energy axis, since the reported values should be the center
        # of the bins, instead of the minimum bin boundary
        ws_sqes = api.mtd[wn_sqes]
        Eaxis = ws_sqes.getAxis(1)
        e_shift = self._ebins[1] / 2.0
        for i in range(Eaxis.length()):
            Eaxis.setValue(i, Eaxis.getValue(i) + e_shift)

        # Normalize each slice
        if self._snorm:
            api.Integration(InputWorkspace=wn_sqes, OutputWorkspace=wn_sqesn)
            api.Divide(LHSWorkspace=wn_sqes,
                       RHSWorkspace=wn_sqesn,
                       OutputWorkspace=wn_sqes)

        # Clean up workspaces from intermediate steps
        if self._clean:
            for name in (wn_van, wn_reduced, wn_ste, wn_van_st, wn_sten,
                         wn_steni, wn_sqe, wn_sqeb, wn_sqesn):
                api.DeleteWorkspace(name)
            if api.mtd.doesExist('PreprocessedDetectorsWS'):
                api.DeleteWorkspace('PreprocessedDetectorsWS')

        # Ouput some info
        message = '\n******  SOME OUTPUT INFORMATION ***' + \
                  '\nEnergy bins: ' + ', '.join(['{0:.2f}'.format(x) for x in self._ebins]) + \
                  '\nQ bins: ' + ', '.join(['{0:.2f}'.format(x) for x in self._qbins]) + \
                  '\nTheta bins: {0:.2f} {1:.2f} {2:.2f}'.format(min_i_theta * dtheta, dtheta, max_i_theta * dtheta)
        logger.notice(message)

        self.setProperty("OutputWorkspace", api.mtd[wn_sqes])
コード例 #15
0
def _create_van(instrument,
                van,
                empty,
                output_van_file_name,
                num_of_splines=60,
                absorb=True,
                gen_absorb=False):
    cycle_information = instrument._get_cycle_information(van)

    input_van_ws = _read_ws(number=van, instrument=instrument)
    input_empty_ws = _read_ws(number=empty, instrument=instrument)

    corrected_van_ws = mantid.Minus(LHSWorkspace=input_van_ws,
                                    RHSWorkspace=input_empty_ws)

    remove_intermediate_workspace(input_empty_ws)
    remove_intermediate_workspace(input_van_ws)

    calibration_full_paths = instrument._get_calibration_full_paths(
        cycle=cycle_information["cycle"])
    tof_binning = instrument._get_create_van_tof_binning()

    if absorb:
        corrected_van_ws = _apply_absorb_corrections(calibration_full_paths,
                                                     corrected_van_ws,
                                                     gen_absorb)

    corrected_van_ws = mantid.ConvertUnits(InputWorkspace=corrected_van_ws,
                                           Target="TOF")
    corrected_van_ws = mantid.Rebin(InputWorkspace=corrected_van_ws,
                                    Params=tof_binning["1"])

    corrected_van_ws = mantid.AlignDetectors(
        InputWorkspace=corrected_van_ws,
        CalibrationFile=calibration_full_paths["calibration"])

    focused_van_file = mantid.DiffractionFocussing(
        InputWorkspace=corrected_van_ws,
        GroupingFileName=calibration_full_paths["grouping"])

    focused_van_file = mantid.ConvertUnits(InputWorkspace=focused_van_file,
                                           Target="TOF")

    focused_van_file = mantid.Rebin(InputWorkspace=focused_van_file,
                                    Params=tof_binning["2"])
    focused_van_file = mantid.ConvertUnits(InputWorkspace=focused_van_file,
                                           Target="dSpacing")

    remove_intermediate_workspace(corrected_van_ws)

    splined_ws_list = instrument._spline_background(
        focused_van_file, num_of_splines,
        cycle_information["instrument_version"])

    if instrument._PEARL_use_full_path():
        out_van_file_path = output_van_file_name
    else:
        out_van_file_path = instrument.calibration_dir + output_van_file_name

    append = False
    for ws in splined_ws_list:
        mantid.SaveNexus(Filename=out_van_file_path,
                         InputWorkspace=ws,
                         Append=append)
        remove_intermediate_workspace(ws)
        append = True

    mantid.LoadNexus(Filename=out_van_file_path, OutputWorkspace="Van_data")