Example #1
0
def _calibration_processing(calibration_dir, calibration_runs, cross_correlate_params, get_det_offset_params,
                            grouping_file_name, input_ws, instrument, offset_file, rebin_1_params, rebin_2_params):
    calibration_ws = input_ws
    if calibration_ws.getAxis(0).getUnit().unitID() != WORKSPACE_UNITS.d_spacing:
        calibration_ws = mantid.Rebin(InputWorkspace=input_ws, Params=rebin_1_params)
        calibration_ws = mantid.ConvertUnits(InputWorkspace=calibration_ws, Target="dSpacing")
    spectrum_list = []
    for i in range(0, calibration_ws.getNumberHistograms()):
        try:
            calibration_ws.getDetector(i)

        except RuntimeError:
            pass
        else:
            spectrum_list.append(i)
    calibration_ws = mantid.ExtractSpectra(InputWorkspace=calibration_ws, WorkspaceIndexList=spectrum_list)
    rebinned = mantid.Rebin(InputWorkspace=calibration_ws, Params=rebin_2_params)
    cross_correlated = mantid.CrossCorrelate(InputWorkspace=rebinned, **cross_correlate_params)

    # Offsets workspace must be referenced as string so it can be deleted, as simpleapi doesn't recognise it as a ws
    offsets_ws_name = "offsets"
    mantid.GetDetectorOffsets(InputWorkspace=cross_correlated, GroupingFileName=offset_file,
                              OutputWorkspace=offsets_ws_name, **get_det_offset_params)
    rebinned_tof = mantid.ConvertUnits(InputWorkspace=rebinned, Target="TOF")
    aligned = mantid.AlignDetectors(InputWorkspace=rebinned_tof, CalibrationFile=offset_file)
    grouping_file = os.path.join(calibration_dir, grouping_file_name)
    focused = mantid.DiffractionFocussing(InputWorkspace=aligned, GroupingFileName=grouping_file,
                                          OutputWorkspace=instrument._generate_output_file_name(calibration_runs)
                                          + "_grouped")
    print("Saved cal file to " + offset_file)
    common.remove_intermediate_workspace([calibration_ws, rebinned, cross_correlated, rebinned_tof,
                                          offsets_ws_name])
    return focused
Example #2
0
def _generate_grouped_ts_pdf(focused_ws, q_lims):
    focused_ws = mantid.ConvertUnits(InputWorkspace=focused_ws,
                                     Target="MomentumTransfer",
                                     EMode='Elastic')
    min_x = np.inf
    max_x = -np.inf
    num_x = -np.inf
    for ws in focused_ws:
        x_data = ws.dataX(0)
        min_x = min(np.min(x_data), min_x)
        max_x = max(np.max(x_data), max_x)
        num_x = max(x_data.size, num_x)
    binning = [min_x, (max_x - min_x) / num_x, max_x]
    focused_ws = mantid.Rebin(InputWorkspace=focused_ws, Params=binning)
    focused_data_combined = mantid.ConjoinSpectra(InputWorkspaces=focused_ws)
    mantid.MatchSpectra(InputWorkspace=focused_data_combined,
                        OutputWorkspace=focused_data_combined,
                        ReferenceSpectrum=5)
    if type(q_lims) == str:
        q_min = []
        q_max = []
        try:
            with open(q_lims, 'r') as f:
                line_list = [line.rstrip('\n') for line in f]
                for line in line_list[1:]:
                    value_list = line.split()
                    q_min.append(float(value_list[2]))
                    q_max.append(float(value_list[3]))
            q_min = np.array(q_min)
            q_max = np.array(q_max)
        except IOError:
            raise RuntimeError("q_lims is not valid")
    elif type(q_lims) == list or type(q_lims) == np.ndarray:
        q_min = q_lims[0, :]
        q_max = q_lims[1, :]
    else:
        raise RuntimeError("q_lims is not valid")
    bin_width = np.inf
    for i in range(q_min.size):
        pdf_x_array = focused_data_combined.readX(i)
        tmp1 = np.where(pdf_x_array >= q_min[i])
        tmp2 = np.amin(tmp1)
        q_min[i] = pdf_x_array[tmp2]
        q_max[i] = pdf_x_array[np.amax(np.where(pdf_x_array <= q_max[i]))]
        bin_width = min(pdf_x_array[1] - pdf_x_array[0], bin_width)
    focused_data_combined = mantid.CropWorkspaceRagged(
        InputWorkspace=focused_data_combined, XMin=q_min, XMax=q_max)
    focused_data_combined = mantid.Rebin(
        InputWorkspace=focused_data_combined,
        Params=[min(q_min), bin_width, max(q_max)])
    focused_data_combined = mantid.SumSpectra(
        InputWorkspace=focused_data_combined,
        WeightedSum=True,
        MultiplyBySpectra=False)
    pdf_output = mantid.PDFFourierTransform(
        Inputworkspace=focused_data_combined,
        InputSofQType="S(Q)",
        PDFType="G(r)",
        Filter=True)
    return pdf_output
Example #3
0
def create_calibration(calibration_runs, instrument, offset_file_name,
                       grouping_file_name, calibration_dir, rebin_1_params,
                       rebin_2_params, cross_correlate_params,
                       get_det_offset_params):
    """
    Create a calibration file from (usually) a ceria run
    :param calibration_runs: Run number(s) for this run
    :param instrument: The PEARL instrument object
    :param offset_file_name: Name of the file to write detector offset information to
    :param grouping_file_name: Name of grouping calibration file
    :param calibration_dir: Path to directory containing calibration information
    :param rebin_1_params: Parameters for the first rebin step (as a string in the usual format)
    :param rebin_2_params: Parameters for the second rebin step (as a string in the usual format)
    :param cross_correlate_params: Parameters for CrossCorrelate (as a dictionary PropertyName: PropertyValue)
    :param get_det_offset_params: Parameters for GetDetectorOffsets (as a dictionary PropertyName: PropertyValue)
    """
    input_ws_list = common.load_current_normalised_ws_list(
        run_number_string=calibration_runs,
        instrument=instrument,
        input_batching=INPUT_BATCHING.Summed)

    input_ws = input_ws_list[0]
    calibration_ws = mantid.Rebin(InputWorkspace=input_ws,
                                  Params=rebin_1_params)

    if calibration_ws.getAxis(
            0).getUnit().unitID() != WORKSPACE_UNITS.d_spacing:
        calibration_ws = mantid.ConvertUnits(InputWorkspace=calibration_ws,
                                             Target="dSpacing")

    rebinned = mantid.Rebin(InputWorkspace=calibration_ws,
                            Params=rebin_2_params)
    cross_correlated = mantid.CrossCorrelate(InputWorkspace=rebinned,
                                             **cross_correlate_params)

    offset_file = os.path.join(calibration_dir, offset_file_name)
    # Offsets workspace must be referenced as string so it can be deleted, as simpleapi doesn't recognise it as a ws
    offsets_ws_name = "offsets"
    mantid.GetDetectorOffsets(InputWorkspace=cross_correlated,
                              GroupingFileName=offset_file,
                              OutputWorkspace=offsets_ws_name,
                              **get_det_offset_params)

    rebinned_tof = mantid.ConvertUnits(InputWorkspace=rebinned, Target="TOF")
    aligned = mantid.AlignDetectors(InputWorkspace=rebinned_tof,
                                    CalibrationFile=offset_file)

    grouping_file = os.path.join(calibration_dir, grouping_file_name)
    focused = mantid.DiffractionFocussing(
        InputWorkspace=aligned,
        GroupingFileName=grouping_file,
        OutputWorkspace=instrument._generate_output_file_name(calibration_runs)
        + "_grouped")

    common.remove_intermediate_workspace([
        calibration_ws, rebinned, cross_correlated, rebinned_tof, aligned,
        offsets_ws_name
    ])
    return focused
Example #4
0
    def _generate_flux_spectrum(self, run_set, sam_ws):
        r"""
        Retrieve the aggregate flux and create an spectrum of intensities
        versus wavelength such that intensities will be similar for any
        of the possible flux normalization types.

        Parameters
        ----------
        sam_ws: str
            Name of aggregated sample workspace

        Returns
        -------
        str
            Name of aggregated flux workspace (output workspace)
        """

        flux_binning = [1.5, 0.0005, 7.5]  # wavelength binning
        suffix = re.sub('[^0-9a-zA-Z]+', '_', self._flux_normalization_type)
        flux_ws = tws(self._make_run_name(run_set[0]) + '_' + suffix)
        if self._MonNorm:
            self._sum_monitors(run_set, flux_ws)
            rpf = self._elucidate_reflection_parameter_file(sam_ws)
            sapi.LoadParameterFile(Workspace=flux_ws, Filename=rpf)
            sapi.ModeratorTzeroLinear(InputWorkspace=flux_ws,
                                      OutputWorkspace=flux_ws)
            sapi.Rebin(InputWorkspace=flux_ws,
                       OutputWorkspace=flux_ws,
                       Params='10',  # 10 microseconds TOF bin width
                       PreserveEvents=False)
            sapi.ConvertUnits(InputWorkspace=flux_ws,
                              OutputWorkspace=flux_ws,
                              Target='Wavelength')
            sapi.OneMinusExponentialCor(InputWorkspace=flux_ws,
                                        OutputWorkspace=flux_ws,
                                        C='0.20749999999999999',
                                        C1='0.001276')
            sapi.Scale(InputWorkspace=flux_ws, OutputWorkspace=flux_ws,
                       Factor='1e-06')
            sapi.Rebin(InputWorkspace=flux_ws, OutputWorkspace=flux_ws,
                       Params=flux_binning)
        else:
            ws = mtd[sam_ws].getRun()
            if self._flux_normalization_type == 'Proton Charge':
                aggregate_flux = ws.getProtonCharge()
            elif self._flux_normalization_type == 'Duration':
                aggregate_flux = ws.getProperty('duration').value
            # These factors ensure intensities typical of flux workspaces
            # derived from monitor data
            f = {'Proton Charge': 0.00874, 'Duration': 0.003333}
            x = np.arange(flux_binning[0], flux_binning[2], flux_binning[1])
            y = f[self._flux_normalization_type] * \
                aggregate_flux * np.ones(len(x) - 1)
            _flux_ws = sapi.CreateWorkspace(OutputWorkspace=flux_ws, DataX=x,
                                            DataY=y, UnitX='Wavelength')
            _flux_ws.setYUnit(mtd[sam_ws].YUnit())
        return flux_ws
def extract_roi(workspace, step='0.01', roi=[162, 175, 112, 145]):
    """
        Returns a spectrum (Counts/proton charge vs lambda) given a filename
        or run number and the lambda step size and the corner of the ROI.

        :param str workspace: Mantid workspace name
        :param float step: wavelength bin width for rebinning
        :param list roi: [x_min, x_max, y_min, y_max] pixels
    """
    _workspace = str(workspace)
    if mantid.mtd[_workspace].getRun()['gd_prtn_chrg'].value > 0:
        api.NormaliseByCurrent(InputWorkspace=_workspace,
                               OutputWorkspace=_workspace)
    api.ConvertUnits(InputWorkspace=_workspace,
                     Target='Wavelength',
                     OutputWorkspace=_workspace)
    api.Rebin(InputWorkspace=_workspace,
              Params=step,
              OutputWorkspace=_workspace)
    api.RefRoi(InputWorkspace=_workspace,
               NXPixel=304,
               NYPixel=256,
               SumPixels=True,
               XPixelMin=roi[0],
               XPIxelMax=roi[1],
               YPixelMin=roi[2],
               YPixelMax=roi[3],
               IntegrateY=True,
               ConvertToQ=False,
               OutputWorkspace=_workspace)
    api.SumSpectra(InputWorkspace=_workspace, OutputWorkspace=_workspace)
    return _workspace
Example #6
0
 def _group_and_SofQW(self, wsName, etRebins, isSample=True):
     """ Transforms from wavelength and detector ID to S(Q,E)
     @param wsName: workspace as a function of wavelength and detector id
     @return: S(Q,E)
     """
     api.ConvertUnits(InputWorkspace=wsName,
                      OutputWorkspace=wsName,
                      Target='DeltaE',
                      EMode='Indirect')
     api.CorrectKiKf(InputWorkspace=wsName,
                     OutputWorkspace=wsName,
                     EMode='Indirect')
     api.Rebin(InputWorkspace=wsName,
               OutputWorkspace=wsName,
               Params=etRebins)
     if self._groupDetOpt != "None":
         if self._groupDetOpt == "Low-Resolution":
             grp_file = "BASIS_Grouping_LR.xml"
         else:
             grp_file = "BASIS_Grouping.xml"
         # If mask override used, we need to add default grouping file
         # location to search paths
         if self._overrideMask:
             config.appendDataSearchDir(DEFAULT_MASK_GROUP_DIR)
             api.GroupDetectors(InputWorkspace=wsName,
                                OutputWorkspace=wsName,
                                MapFile=grp_file,
                                Behaviour="Sum")
     wsSqwName = wsName + '_divided_sqw' if isSample and self._doNorm else wsName + '_sqw'
     api.SofQW3(InputWorkspace=wsName,
                OutputWorkspace=wsSqwName,
                QAxisBinning=self._qBins,
                EMode='Indirect',
                EFixed='2.0826')
     return wsSqwName
Example #7
0
    def test_unit_conversion(self):
        import mantid.simpleapi as mantid
        eventWS = self.base_event_ws
        ws = mantid.Rebin(eventWS, 10000, PreserveEvents=False)
        tmp = scn.mantid.convert_Workspace2D_to_data_array(ws)
        target_tof = tmp.coords['tof']
        ws = mantid.ConvertUnits(InputWorkspace=ws,
                                 Target="Wavelength",
                                 EMode="Elastic")
        converted_mantid = scn.mantid.convert_Workspace2D_to_data_array(ws)

        da = scn.mantid.convert_EventWorkspace_to_data_array(
            eventWS, load_pulse_times=False)
        da = sc.histogram(da, bins=target_tof)
        d = sc.Dataset(data={da.name: da})
        converted = scn.convert(d, 'tof', 'wavelength', scatter=True)

        self.assertTrue(
            np.all(np.isclose(converted_mantid.values, converted[""].values)))
        self.assertTrue(
            np.all(
                np.isclose(
                    converted_mantid.coords['wavelength'].values,
                    converted.coords['wavelength'].values,
                )))
Example #8
0
    def test_Workspace2D_not_common_bins_masks(self):
        import mantid.simpleapi as mantid
        eventWS = self.base_event_ws
        ws = mantid.Rebin(eventWS, 10000, PreserveEvents=False)
        ws = mantid.ConvertUnits(ws,
                                 "Wavelength",
                                 EMode="Direct",
                                 EFixed=0.1231)

        # these X values will mask different number of bins
        masked_ws = self._mask_bins_and_spectra(ws,
                                                -214,
                                                -192,
                                                num_spectra=3,
                                                indices='0-40')

        self.assertFalse(masked_ws.isCommonBins())

        ds = scn.mantid.convert_Workspace2D_to_data_array(masked_ws)

        # bin with 3 masks
        np.testing.assert_array_equal(ds.masks["bin"].values[0],
                                      [True, True, False])

        # bin with only 2
        np.testing.assert_array_equal(ds.masks["bin"].values[31],
                                      [True, True, False])

        np.testing.assert_array_equal(ds.masks["spectrum"].values[0:3],
                                      [True, True, True])
Example #9
0
def convert_to_y_space_and_symmetrise(ws_name,mass):
    # phenomenological roule-of-thumb to define the y-range for a given mass
    max_Y = np.ceil(2.5*mass+27)
    rebin_parameters = str(-max_Y)+","+str(2.*max_Y/120)+","+str(max_Y)
    # converting to y-space, rebinning, and defining a normalisation matrix to take into account the kinetic cut-off
    sapi.ConvertToYSpace(InputWorkspace=ws_name,Mass=mass,OutputWorkspace=ws_name+"_JoY",QWorkspace=ws_name+"_Q")
    ws = sapi.Rebin(InputWorkspace=ws_name+"_JoY", Params = rebin_parameters,FullBinsOnly=True, OutputWorkspace= ws_name+"_JoY")
    tmp=sapi.CloneWorkspace(InputWorkspace=ws_name+"_JoY")
    for j in range(tmp.getNumberHistograms()):
        for k in range(tmp.blocksize()):
            tmp.dataE(j)[k] =0.
            if np.isnan( tmp.dataY(j)[k] ) :
                ws.dataY(j)[k] =0.
                tmp.dataY(j)[k] =0.
            if (tmp.dataY(j)[k]!=0):
                tmp.dataY(j)[k] =1.
    tmp=sapi.SumSpectra('tmp')
    sapi.SumSpectra(InputWorkspace=ws_name+"_JoY",OutputWorkspace=ws_name+"_JoY_sum")
    sapi.Divide(LHSWorkspace=ws_name+"_JoY_sum", RHSWorkspace="tmp", OutputWorkspace =ws_name+"_JoY_sum")
    #rewriting the temporary workspaces ws and tmp
    ws=sapi.mtd[ws_name+"_JoY_sum"]
    tmp=sapi.CloneWorkspace(InputWorkspace=ws_name+"_JoY_sum")
    for k in range(tmp.blocksize()):
        tmp.dataE(0)[k] =(ws.dataE(0)[k]+ws.dataE(0)[ws.blocksize()-1-k])/2.
        tmp.dataY(0)[k] =(ws.dataY(0)[k]+ws.dataY(0)[ws.blocksize()-1-k])/2.
    sapi.RenameWorkspace(InputWorkspace="tmp",OutputWorkspace=ws_name+"_JoY_sym")
    normalise_workspace(ws_name+"_JoY_sym")
    return max_Y
Example #10
0
    def _calibData(self, sam_ws, mon_ws):
        api.MaskDetectors(Workspace=sam_ws, DetectorList=self._dMask)
        #MaskedWorkspace='BASIS_MASK')
        api.ModeratorTzeroLinear(InputWorkspace=sam_ws,\
                           OutputWorkspace=sam_ws)
        api.LoadParameterFile(Workspace=sam_ws,
                              Filename=config.getInstrumentDirectory() +
                              'BASIS_silicon_111_Parameters.xml')
        api.ConvertUnits(InputWorkspace=sam_ws,
                         OutputWorkspace=sam_ws,
                         Target='Wavelength',
                         EMode='Indirect')

        if not self._noMonNorm:
            api.ModeratorTzeroLinear(InputWorkspace=mon_ws,\
                               OutputWorkspace=mon_ws)
            api.Rebin(InputWorkspace=mon_ws,
                      OutputWorkspace=mon_ws,
                      Params='10')
            api.ConvertUnits(InputWorkspace=mon_ws,
                             OutputWorkspace=mon_ws,
                             Target='Wavelength')
            api.OneMinusExponentialCor(InputWorkspace=mon_ws,
                                       OutputWorkspace=mon_ws,
                                       C='0.20749999999999999',
                                       C1='0.001276')
            api.Scale(InputWorkspace=mon_ws,
                      OutputWorkspace=mon_ws,
                      Factor='9.9999999999999995e-07')
            api.RebinToWorkspace(WorkspaceToRebin=sam_ws,
                                 WorkspaceToMatch=mon_ws,
                                 OutputWorkspace=sam_ws)
            api.Divide(LHSWorkspace=sam_ws,
                       RHSWorkspace=mon_ws,
                       OutputWorkspace=sam_ws)
Example #11
0
 def test_Workspace2D(self):
     # This is from the Mantid system-test data
     filename = 'CNCS_51936_event.nxs'
     eventWS = mantid.LoadEventNexus(filename)
     ws = mantid.Rebin(eventWS, -0.001, PreserveEvents=False)
     d = mantidcompat.to_dataset(ws)
     print(d)
Example #12
0
    def _calibData(self, sam_ws, mon_ws):
        sapi.MaskDetectors(Workspace=sam_ws,
                           DetectorList=self._dMask)
        sapi.ModeratorTzeroLinear(InputWorkspace=sam_ws,
                                  OutputWorkspace=sam_ws)
        sapi.LoadParameterFile(Workspace=sam_ws,
                               Filename=pjoin(DEFAULT_CONFIG_DIR,
                                              self._reflection["parameter_file"]))
        sapi.ConvertUnits(InputWorkspace=sam_ws,
                          OutputWorkspace=sam_ws,
                          Target='Wavelength',
                          EMode='Indirect')

        if self._MonNorm:
            sapi.ModeratorTzeroLinear(InputWorkspace=mon_ws,
                                      OutputWorkspace=mon_ws)
            sapi.Rebin(InputWorkspace=mon_ws,
                       OutputWorkspace=mon_ws,
                       Params='10')
            sapi.ConvertUnits(InputWorkspace=mon_ws,
                              OutputWorkspace=mon_ws,
                              Target='Wavelength')
            sapi.OneMinusExponentialCor(InputWorkspace=mon_ws,
                                        OutputWorkspace=mon_ws,
                                        C='0.20749999999999999',
                                        C1='0.001276')
            sapi.Scale(InputWorkspace=mon_ws,
                       OutputWorkspace=mon_ws,
                       Factor='1e-06')
            sapi.RebinToWorkspace(WorkspaceToRebin=sam_ws,
                                  WorkspaceToMatch=mon_ws,
                                  OutputWorkspace=sam_ws)
            sapi.Divide(LHSWorkspace=sam_ws,
                        RHSWorkspace=mon_ws,
                        OutputWorkspace=sam_ws)
Example #13
0
def generate_ts_pdf(run_number, focus_file_path, merge_banks=False, q_lims=None, cal_file_name=None,
                    sample_details=None, delta_r=None, delta_q=None, pdf_type="G(r)", lorch_filter=None,
                    freq_params=None, debug=False):
    focused_ws = _obtain_focused_run(run_number, focus_file_path)
    focused_ws = mantid.ConvertUnits(InputWorkspace=focused_ws, Target="MomentumTransfer", EMode='Elastic')

    raw_ws = mantid.Load(Filename='POLARIS'+str(run_number)+'.nxs')
    sample_geometry = common.generate_sample_geometry(sample_details)
    sample_material = common.generate_sample_material(sample_details)
    self_scattering_correction = mantid.TotScatCalculateSelfScattering(
        InputWorkspace=raw_ws,
        CalFileName=cal_file_name,
        SampleGeometry=sample_geometry,
        SampleMaterial=sample_material,
        CrystalDensity=sample_details.material_object.crystal_density)

    ws_group_list = []
    for i in range(self_scattering_correction.getNumberHistograms()):
        ws_name = 'correction_' + str(i)
        mantid.ExtractSpectra(InputWorkspace=self_scattering_correction, OutputWorkspace=ws_name,
                              WorkspaceIndexList=[i])
        ws_group_list.append(ws_name)
    self_scattering_correction = mantid.GroupWorkspaces(InputWorkspaces=ws_group_list)
    self_scattering_correction = mantid.RebinToWorkspace(WorkspaceToRebin=self_scattering_correction,
                                                         WorkspaceToMatch=focused_ws)

    focused_ws = mantid.Subtract(LHSWorkspace=focused_ws, RHSWorkspace=self_scattering_correction)
    if delta_q:
        focused_ws = mantid.Rebin(InputWorkspace=focused_ws, Params=delta_q)
    if merge_banks:
        q_min, q_max = _load_qlims(q_lims)
        merged_ws = mantid.MatchAndMergeWorkspaces(InputWorkspaces=focused_ws, XMin=q_min, XMax=q_max,
                                                   CalculateScale=False)
        fast_fourier_filter(merged_ws, freq_params=freq_params)
        pdf_output = mantid.PDFFourierTransform(Inputworkspace="merged_ws", InputSofQType="S(Q)-1", PDFType=pdf_type,
                                                Filter=lorch_filter, DeltaR=delta_r,
                                                rho0=sample_details.material_object.crystal_density)
    else:
        for ws in focused_ws:
            fast_fourier_filter(ws, freq_params=freq_params)
        pdf_output = mantid.PDFFourierTransform(Inputworkspace='focused_ws', InputSofQType="S(Q)-1", PDFType=pdf_type,
                                                Filter=lorch_filter, DeltaR=delta_r,
                                                rho0=sample_details.material_object.crystal_density)
        pdf_output = mantid.RebinToWorkspace(WorkspaceToRebin=pdf_output, WorkspaceToMatch=pdf_output[4],
                                             PreserveEvents=True)
    if not debug:
        common.remove_intermediate_workspace('self_scattering_correction')
    # Rename output ws
    if 'merged_ws' in locals():
        mantid.RenameWorkspace(InputWorkspace='merged_ws', OutputWorkspace=run_number + '_merged_Q')
    mantid.RenameWorkspace(InputWorkspace='focused_ws', OutputWorkspace=run_number+'_focused_Q')
    if isinstance(focused_ws, WorkspaceGroup):
        for i in range(len(focused_ws)):
            mantid.RenameWorkspace(InputWorkspace=focused_ws[i], OutputWorkspace=run_number+'_focused_Q_'+str(i+1))
    mantid.RenameWorkspace(InputWorkspace='pdf_output', OutputWorkspace=run_number+'_pdf_R')
    if isinstance(pdf_output, WorkspaceGroup):
        for i in range(len(pdf_output)):
            mantid.RenameWorkspace(InputWorkspace=pdf_output[i], OutputWorkspace=run_number+'_pdf_R_'+str(i+1))
    return pdf_output
Example #14
0
def align_and_focus(run_number, nexus_file_name, target_unit, binning_parameters, convert_to_matrix):
    """
    align and focus a run
    :param run_number:
    :param nexus_file_name:
    :param target_unit:
    :param binning_parameters:
    :param convert_to_matrix:
    :return:
    """
    # check inputs ... blabla

    # load data
    output_ws_name = 'VULCAN_{0}_events'.format(run_number)
    mantidapi.Load(Filename=nexus_file_name, OutputWorkspace=output_ws_name)
    mantidapi.CompressEvents(InputWorkspace=output_ws_name,
                             OutputWorkspace=output_ws_name,
                             Tolerance='0.01')

    # TODO FIXME - the calibration file name shall be set from user configuration through function method
    VULCAN_FOCUS_CAL = '/SNS/VULCAN/shared/CALIBRATION/2017_8_11_CAL/VULCAN_calibrate_2017_08_17.h5'
    VULCAN_FOCUS_CAL_GEN1 = '/SNS/VULCAN/shared/CALIBRATION/2011_1_7_CAL/vulcan_foc_all_2bank_11p.cal'

    # calibration file
    if output_ws_name.endswith('h5'):
        cal_file_name = VULCAN_FOCUS_CAL
    else:
        cal_file_name = VULCAN_FOCUS_CAL_GEN1

    # align and focus
    final_ws_name = 'VULCAN_{0}'.format(run_number)
    print(output_ws_name)
    print(final_ws_name)
    print(cal_file_name)

    # output is TOF
    mantidapi.AlignAndFocusPowder(InputWorkspace=output_ws_name,
                                  OutputWorkspace=final_ws_name,
                                  CalFileName=cal_file_name,
                                  Params='-0.001',
                                  DMin='0.5', DMax='3.5',
                                  PreserveEvents=not convert_to_matrix)

    # clean
    mantidapi.DeleteWorkspace(Workspace=output_ws_name)

    # convert unit
    if target_unit == 'dSpacing':
        mantidapi.ConvertUnits(InputWorkspace=final_ws_name,
                               OutputWorkspace=final_ws_name,
                               Target='dSpacing',
                               EMode='Elastic')

    # binning
    mantidapi.Rebin(InputWorkspace=final_ws_name,
                    OutputWorkspace=final_ws_name,
                    Params=numpy.array(binning_parameters))

    return final_ws_name
Example #15
0
def save_mantid_gsas(gsas_ws_name, gda_file_name, binning_parameters):
    """
    Save temporary GSAS file
    :param gsas_ws_name:
    :param gda_file_name:
    :param binning_parameters:
    :return:
    """
    temp1_ws = ADS.retrieve(gsas_ws_name)
    print('[DB...BAT] Before aligned {0}.. vec x: {1}... is histograms? {2}'
          ''.format(type(temp1_ws),
                    temp1_ws.readX(2)[0], temp1_ws.isHistogramData()))

    aligned_gss_ws_name = '{0}_temp'.format(gsas_ws_name)

    if isinstance(binning_parameters, numpy.ndarray):
        # align to VDRIVE
        align_to_vdrive_bin(gsas_ws_name, binning_parameters,
                            aligned_gss_ws_name)
    elif binning_parameters is not None:
        api.Rebin(InputWorkspace=gsas_ws_name,
                  OutputWorkspace=aligned_gss_ws_name,
                  Params=binning_parameters)
    # END-IF (rebin)

    aws = ADS.retrieve(aligned_gss_ws_name)
    print('[DB...INFO] Save Mantid GSS: {} is histogram: {}'.format(
        aligned_gss_ws_name, aws.isHistogramData()))

    # Convert from PointData to Histogram
    api.ConvertToHistogram(InputWorkspace=aligned_gss_ws_name,
                           OutputWorkspace=aligned_gss_ws_name)

    # Save
    print('[DB...VERY IMPORTANT] Save to GSAS File {0} as a temporary output'.
          format(gda_file_name))
    curr_ws = ADS.retrieve(aligned_gss_ws_name)
    print(
        '[DB...INFO] Into SaveGSS: number of histograms = {}, Bank 1/2 size = {}, Bank3 size = {}'
        ''.format(curr_ws.getNumberHistograms(), len(curr_ws.readX(0)),
                  len(curr_ws.readX(2))))
    print('[DB...INFO] B1[0] = {}, B1[-1] = {}, B3[0] = {}, B3[-1] = {}'
          ''.format(
              curr_ws.readX(0)[0],
              curr_ws.readX(0)[-1],
              curr_ws.readX(2)[0],
              curr_ws.readX(2)[-1]))
    api.SaveGSS(InputWorkspace=aligned_gss_ws_name,
                Filename=gda_file_name,
                SplitFiles=False,
                Append=False,
                Format="SLOG",
                MultiplyByBinWidth=False,
                ExtendedHeader=False,
                UseSpectrumNumberAsBankID=True)

    return gda_file_name
Example #16
0
def calc_calibration_without_vanadium(focused_ws, index, instrument):
    focus_spectrum = mantid.ExtractSingleSpectrum(InputWorkspace=focused_ws,
                                                  WorkspaceIndex=index)
    focus_spectrum = mantid.ConvertUnits(InputWorkspace=focus_spectrum,
                                         Target="TOF")
    focus_spectrum = mantid.Rebin(InputWorkspace=focus_spectrum,
                                  Params=instrument.tof_binning)
    focus_calibrated = mantid.CropWorkspace(InputWorkspace=focus_spectrum,
                                            XMin=0.1)
    return focus_calibrated
Example #17
0
def align_to_vdrive_bin(input_ws, vec_ref_tof, output_ws_name):
    """
    Rebin input workspace (to histogram data)
     in order to to match VULCAN's VDRIVE-generated GSAS file
    :param input_ws: focused workspace
    :param vec_ref_tof: vector of TOF bins
    :param output_ws_name:
    :return:
    """
    # Create a complicated bin parameter
    params = []
    dx = None
    for ibin in range(len(vec_ref_tof) - 1):
        x0 = vec_ref_tof[ibin]
        xf = vec_ref_tof[ibin + 1]
        dx = xf - x0
        params.append(x0)
        params.append(dx)

    # last bin
    assert dx is not None, 'Vector of refT has less than 2 values.  It is not supported.'
    x0 = vec_ref_tof[-1]
    xf = 2 * dx + x0
    params.extend([x0, 2 * dx, xf])

    # Rebin
    tempws = api.Rebin(InputWorkspace=input_ws,
                       Params=params,
                       PreserveEvents=True)

    # Map to a new workspace with 'vdrive-bin', which is the integer value of log bins
    numhist = tempws.getNumberHistograms()
    newvecx = []
    newvecy = []
    newvece = []
    for iws in range(numhist):
        vecx = tempws.readX(iws)
        vecy = tempws.readY(iws)
        vece = tempws.readE(iws)
        for i in range(len(vecx) - 1):
            newvecx.append(int(vecx[i] * 10) / 10.)
            newvecy.append(vecy[i])
            newvece.append(vece[i])
            # ENDFOR (i)
    # ENDFOR (iws)
    api.DeleteWorkspace(Workspace=tempws)
    gsaws = api.CreateWorkspace(DataX=newvecx,
                                DataY=newvecy,
                                DataE=newvece,
                                NSpec=numhist,
                                UnitX="TOF",
                                ParentWorkspace=input_ws,
                                OutputWorkspace=output_ws_name)

    return gsaws
Example #18
0
 def _group_and_SofQW(self, wsName, etRebins, isSample=True):
     """ Transforms from wavelength and detector ID to S(Q,E)
     @param wsName: workspace as a function of wavelength and detector id
     @param etRebins: final energy domain and bin width
     @param isSample: discriminates between sample and vanadium
     @return: S(Q,E)
     """
     sapi.ConvertUnits(InputWorkspace=wsName,
                       OutputWorkspace=wsName,
                       Target='DeltaE',
                       EMode='Indirect')
     sapi.CorrectKiKf(InputWorkspace=wsName,
                      OutputWorkspace=wsName,
                      EMode='Indirect')
     sapi.Rebin(InputWorkspace=wsName,
                OutputWorkspace=wsName,
                Params=etRebins)
     if self._groupDetOpt != "None":
         if self._groupDetOpt == "Low-Resolution":
             grp_file = "BASIS_Grouping_LR.xml"
         else:
             grp_file = "BASIS_Grouping.xml"
         # If mask override used, we need to add default grouping file
         # location to search paths
         if self._overrideMask:
             config.appendDataSearchDir(DEFAULT_MASK_GROUP_DIR)
             sapi.GroupDetectors(InputWorkspace=wsName,
                                 OutputWorkspace=wsName,
                                 MapFile=grp_file,
                                 Behaviour="Sum")
     wsSqwName = wsName + '_divided_sqw' if isSample and self._doNorm else wsName + '_sqw'
     sapi.SofQW3(InputWorkspace=wsName,
                 QAxisBinning=self._qBins,
                 EMode='Indirect',
                 EFixed=self._reflection["default_energy"],
                 OutputWorkspace=wsSqwName)
     # Rebin the vanadium within the elastic line
     if not isSample:
         sapi.Rebin(InputWorkspace=wsSqwName,
                    OutputWorkspace=wsSqwName,
                    Params=self._reflection["vanadium_bins"])
     return wsSqwName
Example #19
0
def rebin_time(bin_param, wsname):
    """
    rebin by time

    @param bin_param:: the parameters to rebin to
    @param wsname:: the workspace to rebin
    @return: the rebinned workspace
    """
    output = "engg_focus_input_ws"
    ws = simple.Rebin(InputWorkspace=wsname, Params=bin_param, OutputWorkspace=output)
    return ws
Example #20
0
def filter_cross_sections(file_path, events=True, histo=False):
    """
        Filter events according to polarization state.
        :param str file_path: file to read
        :param bool events: if True, an event nexus file will be written
        :param bool histo: if True, a histo nexus file will be written
    """
    cross_sections = {}
    cross_sections_histo = {}

    xs_list = api.MRFilterCrossSections(file_path,
                                        PolState=POL_STATE,
                                        AnaState=ANA_STATE,
                                        PolVeto=POL_VETO,
                                        AnaVeto=ANA_VETO)

    if len(xs_list) > 0:
        tof_min, tof_max = get_tof_range(xs_list[0])

    for workspace in xs_list:
        if "cross_section_id" in workspace.getRun():
            entry = workspace.getRun().getProperty("cross_section_id").value
        else:
            entry = 'Off_Off'
            api.AddSampleLog(Workspace=workspace,
                             LogName='cross_section_id',
                             LogText=entry)
        if workspace.getNumberEvents() < 5:
            logging.warn("No events in %s", entry)
            continue

        run_number = workspace.getRunNumber()
        if events:
            events_file = "/tmp/filtered_%s_%s_%s.nxs" % (run_number, entry,
                                                          "events")
            api.SaveNexus(InputWorkspace=workspace,
                          Filename=events_file,
                          Title='entry_%s' % entry)
            cross_sections['entry-%s' % entry] = events_file
        if histo:
            #tof_min = workspace.getTofMin()
            #tof_max = workspace.getTofMax()
            ws_binned = api.Rebin(InputWorkspace=workspace,
                                  Params="%s, %s, %s" %
                                  (tof_min, TOF_BIN, tof_max),
                                  PreserveEvents=False)
            histo_file = "/tmp/filtered_%s_%s_%s.nxs" % (run_number, entry,
                                                         "histo")
            api.SaveNexus(InputWorkspace=ws_binned,
                          Filename=histo_file,
                          Title='entry_%s' % entry)
            cross_sections_histo['entry-%s' % entry] = histo_file

    return cross_sections, cross_sections_histo
Example #21
0
    def _do_silicon_calibration(self, runs_to_process, cal_file_name, grouping_file_name):
        create_si_ws = common._read_ws(number=runs_to_process, instrument=self)
        cycle_details = self._get_cycle_information(runs_to_process)
        instrument_version = cycle_details["instrument_version"]

        if instrument_version == "new" or instrument_version == "new2":
            create_si_ws = mantid.Rebin(InputWorkspace=create_si_ws, Params="100,-0.0006,19950")

        create_si_d_spacing_ws = mantid.ConvertUnits(InputWorkspace=create_si_ws, Target="dSpacing")

        if instrument_version == "new2":
            create_si_d_spacing_rebin_ws = mantid.Rebin(InputWorkspace=create_si_d_spacing_ws, Params="1.71,0.002,2.1")
            create_si_cross_corr_ws = mantid.CrossCorrelate(InputWorkspace=create_si_d_spacing_rebin_ws,
                                                            ReferenceSpectra=20, WorkspaceIndexMin=9,
                                                            WorkspaceIndexMax=1063, XMin=1.71, XMax=2.1)
        elif instrument_version == "new":
            create_si_d_spacing_rebin_ws = mantid.Rebin(InputWorkspace=create_si_d_spacing_ws, Params="1.85,0.002,2.05")
            create_si_cross_corr_ws = mantid.CrossCorrelate(InputWorkspace=create_si_d_spacing_rebin_ws,
                                                            ReferenceSpectra=20, WorkspaceIndexMin=9,
                                                            WorkspaceIndexMax=943, XMin=1.85, XMax=2.05)
        elif instrument_version == "old":
            create_si_d_spacing_rebin_ws = mantid.Rebin(InputWorkspace=create_si_d_spacing_ws, Params="3,0.002,3.2")
            create_si_cross_corr_ws = mantid.CrossCorrelate(InputWorkspace=create_si_d_spacing_rebin_ws,
                                                            ReferenceSpectra=500, WorkspaceIndexMin=1,
                                                            WorkspaceIndexMax=1440, XMin=3, XMax=3.2)
        else:
            raise NotImplementedError("The instrument version is not supported for creating a silicon calibration")

        common.remove_intermediate_workspace(create_si_d_spacing_ws)
        common.remove_intermediate_workspace(create_si_d_spacing_rebin_ws)

        calibration_output_path = self.calibration_dir + cal_file_name
        create_si_offsets_ws = mantid.GetDetectorOffsets(InputWorkspace=create_si_cross_corr_ws,
                                                         Step=0.002, DReference=1.920127251, XMin=-200, XMax=200,
                                                         GroupingFileName=calibration_output_path)
        create_si_aligned_ws = mantid.AlignDetectors(InputWorkspace=create_si_ws,
                                                     CalibrationFile=calibration_output_path)
        grouping_output_path = self.calibration_dir + grouping_file_name
        create_si_grouped_ws = mantid.DiffractionFocussing(InputWorkspace=create_si_aligned_ws,
                                                           GroupingFileName=grouping_output_path)
        del create_si_offsets_ws, create_si_grouped_ws
Example #22
0
def load_and_crop_data(runs, spectra, ip_file, diff_mode='single',
                       fit_mode='spectra', rebin_params=None):
    """
    @param runs The string giving the runs to load
    @param spectra A list of spectra to load
    @param ip_file A string denoting the IP file
    @param diff_mode Either 'double' or 'single'
    @param fit_mode If bank then the loading is changed to summing each bank to a separate spectrum
    @param rebin_params Rebin parameter string to rebin data by (no rebin if None)
    """
    instrument = VESUVIO()
    load_banks = (fit_mode == 'bank')
    output_name = _create_tof_workspace_suffix(runs, spectra)

    if load_banks:
        sum_spectra = True
        if spectra == "forward":
            bank_ranges = instrument.forward_banks
        elif spectra == "backward":
            bank_ranges = instrument.backward_banks
        else:
            raise ValueError("Fitting by bank requires selecting either 'forward' or 'backward' "
                             "for the spectra to load")
        bank_ranges = ["{0}-{1}".format(x, y) for x, y in bank_ranges]
        spectra = ";".join(bank_ranges)
    else:
        sum_spectra = False
        if spectra == "forward":
            spectra = "{0}-{1}".format(*instrument.forward_spectra)
        elif spectra == "backward":
            spectra = "{0}-{1}".format(*instrument.backward_spectra)

    if diff_mode == "double":
        diff_mode = "DoubleDifference"
    else:
        diff_mode = "SingleDifference"

    kwargs = {"Filename": runs,
              "Mode": diff_mode, "InstrumentParFile": ip_file,
              "SpectrumList": spectra, "SumSpectra": sum_spectra,
              "OutputWorkspace": output_name}
    full_range = ms.LoadVesuvio(**kwargs)
    tof_data = ms.CropWorkspace(InputWorkspace=full_range,
                                XMin=instrument.tof_range[0],
                                XMax=instrument.tof_range[1],
                                OutputWorkspace=output_name)

    if rebin_params is not None:
        tof_data = ms.Rebin(InputWorkspace=tof_data,
                            OutputWorkspace=output_name,
                            Params=rebin_params)

    return tof_data
Example #23
0
    def _create_calibration(self, calibration_runs, offset_file_name, grouping_file_name):
        input_ws = common._read_ws(number=calibration_runs, instrument=self)
        cycle_information = self._get_cycle_information(calibration_runs)

        # TODO move these hard coded params to instrument specific
        if cycle_information["instrument_version"] == "new" or cycle_information["instrument_version"] == "new2":
            input_ws = mantid.Rebin(InputWorkspace=input_ws, Params="100,-0.0006,19950")

        d_spacing_cal = mantid.ConvertUnits(InputWorkspace=input_ws, Target="dSpacing")
        d_spacing_cal = mantid.Rebin(InputWorkspace=d_spacing_cal, Params="1.8,0.002,2.1")

        if cycle_information["instrument_version"] == "new2":
            cross_cor_ws = mantid.CrossCorrelate(InputWorkspace=d_spacing_cal, ReferenceSpectra=20,
                                                 WorkspaceIndexMin=9, WorkspaceIndexMax=1063, XMin=1.8, XMax=2.1)

        elif cycle_information["instrument_version"] == "new":
            cross_cor_ws = mantid.CrossCorrelate(InputWorkspace=d_spacing_cal, ReferenceSpectra=20,
                                                 WorkspaceIndexMin=9, WorkspaceIndexMax=943, XMin=1.8, XMax=2.1)
        else:
            cross_cor_ws = mantid.CrossCorrelate(InputWorkspace=d_spacing_cal, ReferenceSpectra=500,
                                                 WorkspaceIndexMin=1, WorkspaceIndexMax=1440, XMin=1.8, XMax=2.1)
        if self._old_api_uses_full_paths:  # Workaround for old API setting full paths
            grouping_file_path = grouping_file_name
            offset_file_path = offset_file_name
        else:
            offset_file_path = self.calibration_dir + offset_file_name
            grouping_file_path = self.calibration_dir + grouping_file_name

        # Ceo Cell refined to 5.4102(3) so 220 is 1.912795
        offset_output_path = mantid.GetDetectorOffsets(InputWorkspace=cross_cor_ws, Step=0.002, DReference=1.912795,
                                                       XMin=-200, XMax=200, GroupingFileName=offset_file_path)
        del offset_output_path  # This isn't used so delete it to keep linters happy
        aligned_ws = mantid.AlignDetectors(InputWorkspace=input_ws, CalibrationFile=offset_file_path)
        cal_grouped_ws = mantid.DiffractionFocussing(InputWorkspace=aligned_ws, GroupingFileName=grouping_file_path)

        common.remove_intermediate_workspace(d_spacing_cal)
        common.remove_intermediate_workspace(cross_cor_ws)
        common.remove_intermediate_workspace(aligned_ws)
        common.remove_intermediate_workspace(cal_grouped_ws)
Example #24
0
    def test_EventWorkspace(self):
        # This is from the Mantid system-test data
        filename = 'CNCS_51936_event.nxs'
        eventWS = mantid.LoadEventNexus(filename)
        ws = mantid.Rebin(eventWS, -0.001, PreserveEvents=False)

        binned_mantid = mantidcompat.to_dataset(ws)

        tof = sp.Variable(binned_mantid[sp.Coord.Tof])
        d = mantidcompat.to_dataset(eventWS)
        binned = sp.histogram(d, tof)

        delta = sp.sum(binned_mantid - binned, sp.Dim.Position)
        print(delta)
Example #25
0
def calculate_mantid_resolutions(ws_name, mass):
    max_Y = np.ceil(2.5*mass+27)
    rebin_parameters = str(-max_Y)+","+str(2.*max_Y/240)+","+str(max_Y) # twice the binning as for the data
    ws= sapi.mtd[ws_name]
    for index in range(ws.getNumberHistograms()):
        sapi.VesuvioResolution(Workspace=ws,WorkspaceIndex=index,Mass=mass,OutputWorkspaceYSpace="tmp")
        tmp=sapi.Rebin("tmp",rebin_parameters)
        if index == 0:
            sapi.RenameWorkspace(tmp,"resolution")
        else:
            sapi.AppendSpectra("resolution", tmp, OutputWorkspace= "resolution")
    sapi.SumSpectra(InputWorkspace="resolution",OutputWorkspace="resolution")
    normalise_workspace("resolution")
    safe_delete_ws(tmp)
Example #26
0
    def _rebinVdrive(self, inputws, vec_refT, outputwsname):
        """ Rebin to match VULCAN's VDRIVE-generated GSAS file
        Arguments:
         - inputws : focussed workspace
         - vec_refT: list of TOF bins
        """
        # Create a complicated bin parameter
        params = []
        for ibin in range(len(vec_refT) - 1):
            x0 = vec_refT[ibin]
            xf = vec_refT[ibin + 1]
            dx = xf - x0
            params.append(x0)
            params.append(dx)

        # last bin
        x0 = vec_refT[-1]
        xf = 2 * dx + x0
        params.extend([x0, 2 * dx, xf])

        # Rebin
        tempws = api.Rebin(InputWorkspace=inputws,
                           Params=params,
                           PreserveEvents=False)

        # Map to a new workspace with 'vdrive-bin', which is the integer value of log bins
        numhist = tempws.getNumberHistograms()
        newvecx = []
        newvecy = []
        newvece = []
        for iws in range(numhist):
            vecx = tempws.readX(iws)
            vecy = tempws.readY(iws)
            vece = tempws.readE(iws)
            for i in range(len(vecx) - 1):
                newvecx.append(int(vecx[i] * 10) / 10.)
                newvecy.append(vecy[i])
                newvece.append(vece[i])
            # ENDFOR (i)
        # ENDFOR (iws)
        api.DeleteWorkspace(Workspace=tempws)
        gsaws = api.CreateWorkspace(DataX=newvecx,
                                    DataY=newvecy,
                                    DataE=newvece,
                                    NSpec=numhist,
                                    UnitX="TOF",
                                    ParentWorkspace=inputws,
                                    OutputWorkspace=outputwsname)

        return gsaws
Example #27
0
 def test_Workspace2D(self):
     import mantid.simpleapi as mantid
     eventWS = self.base_event_ws
     ws = mantid.Rebin(eventWS, 10000, PreserveEvents=False)
     d = scn.mantid.convert_Workspace2D_to_data_array(ws)
     self.assertEqual(
         d.attrs["run_start"].value,
         "2012-05-21T15:14:56.279289666",
     )
     self.assertEqual(d.data.unit, sc.units.counts)
     for i in range(ws.getNumberHistograms()):
         assert np.all(np.equal(d.values[i], ws.readY(i)))
         assert np.all(np.equal(d.variances[i], ws.readE(i) * ws.readE(i)))
     self.assertEqual(d.coords['spectrum'].dtype, sc.DType.int32)
     self.assertEqual(d.coords['tof'].dtype, sc.DType.float64)
Example #28
0
    def test_EventWorkspace(self):
        import mantid.simpleapi as mantid
        eventWS = self.base_event_ws
        ws = mantid.Rebin(eventWS, 10000)

        binned_mantid = scn.mantid.convert_Workspace2D_to_data_array(ws)

        target_tof = binned_mantid.coords['tof']
        d = scn.mantid.convert_EventWorkspace_to_data_array(
            eventWS, load_pulse_times=False)
        binned = sc.histogram(d, bins=target_tof)

        delta = sc.sum(binned_mantid - binned, 'spectrum')
        delta = sc.sum(delta, 'tof')
        self.assertLess(np.abs(delta.value), 1e-5)
Example #29
0
def absorption_correction(filename,
                          lambda_binning=(0.7, 10.35, 5615),
                          **mantid_args):
    """
    This method is a straightforward wrapper exposing CylinderAbsorption
    through scipp

    CylinderAbsorption calculates an approximation of the
    attenuation due to absorption and single scattering in a 'cylindrical'
    shape.

    Requirements:
    - The instrument associated with the workspace must be fully defined.
      (This being a WISH-centric implementation is done with the predefined
      instr file)

    Parameters
    ----------
    filename: Path to the file with data

    lambda_binning: min, max and number of steps for binning in wavelength

    mantid_args: additional arguments to be passed to Mantid's
                 CylinderAbsorption method.

    Returns
    -------
    Scipp dataset containing absorption correction in Wavelength units.

    """

    # Create empty workspace with proper dimensions.
    workspace = simpleapi.LoadEventNexus(filename,
                                         MetaDataOnly=True,
                                         LoadMonitors=False,
                                         LoadLogs=False)
    workspace.getAxis(0).setUnit('Wavelength')

    # Rebin the resulting correction based on default WISH binning
    lambda_min, lambda_max, number_bins = lambda_binning
    bin_width = (lambda_max - lambda_min) / number_bins
    workspace = simpleapi.Rebin(workspace,
                                params=[lambda_min, bin_width, lambda_max],
                                FullBinsOnly=True)

    correction = simpleapi.CylinderAbsorption(workspace, **mantid_args)

    return scn.from_mantid(correction)
Example #30
0
    def test_function_attached_as_workpace_method_does_the_same_as_the_free_function(self):
        # Use Rebin as a test
        ws1 = simpleapi.CreateWorkspace(DataX=[1.5,2.0,2.5,3.0],DataY=[1,2,3],NSpec=1,UnitX='Wavelength')
        self.assertTrue(hasattr(ws1, "rebin"))

        ws2 = simpleapi.Rebin(ws1,Params=[1.5,1.5,3])
        ws3 = ws1.rebin(Params=[1.5,1.5,3])
        ws4 = ws1.rebin([1.5,1.5,3])
        result = simpleapi.CompareWorkspaces(ws2,ws3)
        self.assertTrue(result[0])
        result = simpleapi.CompareWorkspaces(ws2,ws4)
        self.assertTrue(result[0])

        simpleapi.DeleteWorkspace(ws1)
        simpleapi.DeleteWorkspace(ws2)
        simpleapi.DeleteWorkspace(ws3)