Exemple #1
0
def sumToShim(rnum, output_dir=None):
    """
    Combine both spin states into a single workspace

    Parameters
    ----------
    rnum : int
      The run number to be shimmed
    output_dir : string
      If given, the folder where the workspace should be saved

    """
    try:
        wtemp = Load(BASE.format(rnum), LoadMonitors=True)
        RebinToWorkspace('wtemp_1',
                         'wtemp_monitors_1',
                         PreserveEvents=False,
                         OutputWorkspace='wtemp_1')
        RebinToWorkspace('wtemp_2',
                         'wtemp_monitors_1',
                         PreserveEvents=False,
                         OutputWorkspace='wtemp_2')
        wtemp_1 = ConjoinWorkspaces('wtemp_monitors_1', 'wtemp_1')
        wtemp_2 = ConjoinWorkspaces('wtemp_monitors_2', 'wtemp_2')
    except:
        wtemp_monitors = Load(BASE.format(rnum))
    wtempShim = mtd['wtemp_monitors_1'] + mtd['wtemp_monitors_2']
    RenameWorkspace(wtempShim, 'LARMOR{:08d}'.format(rnum))
    if output_dir:
        SaveNexusProcessed(
            'LARMOR{:08d}'.format(rnum),
            os.path.join(output_dir, "LARMOR{:08d}-add.nxs".format(rnum)))
    RenameWorkspace('LARMOR{:08d}'.format(rnum),
                    'LARMOR{:08d}-add'.format(rnum))
Exemple #2
0
def create_test_workspace(workspace_name, ragged=False):
    CreateWorkspace([0, 1, 2, 3, 4, 2, 3, 4, 5, 6],
                    [0, 1, 2, 3, 4, 3, 4, 5, 6, 7],
                    NSpec=2,
                    OutputWorkspace=workspace_name)
    if ragged:
        CreateWorkspace([1, 2, 3, 4], [1, 2, 3, 4],
                        NSpec=1,
                        OutputWorkspace='__temp1')
        CreateWorkspace([2, 3, 4, 5, 6], [3, 4, 5, 6, 7],
                        NSpec=1,
                        OutputWorkspace='__temp2')
        ConjoinWorkspaces(workspace_name, '__temp1', CheckOverlapping=False)
        ConjoinWorkspaces(workspace_name, '__temp2', CheckOverlapping=False)
    return AnalysisDataService.retrieve(workspace_name)
    def PyExec(self):
        ws_list = self.getProperty('InputWorkspaces').value
        x_min = self.getProperty('XMin').value
        x_max = self.getProperty('XMax').value
        scale_bool = self.getProperty('CalculateScale').value
        offset_bool = self.getProperty('CalculateOffset').value
        flattened_list = self.unwrap_groups(ws_list)
        largest_range_spectrum, rebin_param = self.get_common_bin_range_and_largest_spectra(flattened_list)
        CloneWorkspace(InputWorkspace=flattened_list[0], OutputWorkspace='ws_conjoined')
        Rebin(InputWorkspace='ws_conjoined', OutputWorkspace='ws_conjoined', Params=rebin_param)
        for ws in flattened_list[1:]:
            temp = CloneWorkspace(InputWorkspace=ws)
            temp = Rebin(InputWorkspace=temp, Params=rebin_param)
            ConjoinWorkspaces(InputWorkspace1='ws_conjoined',
                              InputWorkspace2=temp,
                              CheckOverlapping=False)
        ws_conjoined = AnalysisDataService.retrieve('ws_conjoined')
        ref_spec = ws_conjoined.getSpectrum(largest_range_spectrum).getSpectrumNo()
        ws_conjoined, offset, scale, chisq = MatchSpectra(InputWorkspace=ws_conjoined,
                                                          ReferenceSpectrum=ref_spec,
                                                          CalculateScale=scale_bool,
                                                          CalculateOffset=offset_bool)
        x_min, x_max, bin_width = self.fit_x_lims_to_match_histogram_bins(ws_conjoined, x_min, x_max)

        ws_conjoined = CropWorkspaceRagged(InputWorkspace=ws_conjoined, XMin=x_min, XMax=x_max)
        ws_conjoined = Rebin(InputWorkspace=ws_conjoined, Params=[min(x_min), bin_width, max(x_max)])
        merged_ws = SumSpectra(InputWorkspace=ws_conjoined, WeightedSum=True, MultiplyBySpectra=False, StoreInADS=False)
        DeleteWorkspace(ws_conjoined)
        self.setProperty('OutputWorkspace', merged_ws)
Exemple #4
0
 def setUpClass(cls):
     cls.g1da = config['graph1d.autodistribution']
     config['graph1d.autodistribution'] = 'On'
     cls.ws2d_histo = CreateWorkspace(DataX=[10, 20, 30, 10, 20, 30],
                                      DataY=[2, 3, 4, 5],
                                      DataE=[1, 2, 3, 4],
                                      NSpec=2,
                                      Distribution=True,
                                      YUnitLabel="Counts per $\\AA$",
                                      UnitX='Wavelength',
                                      VerticalAxisUnit='DeltaE',
                                      VerticalAxisValues=[4, 6, 8],
                                      OutputWorkspace='ws2d_histo')
     cls.ws2d_histo_non_dist = CreateWorkspace(DataX=[10, 20, 30, 10, 20, 30],
                                               DataY=[2, 3, 4, 5],
                                               DataE=[1, 2, 3, 4],
                                               NSpec=2,
                                               Distribution=False,
                                               YUnitLabel='Counts',
                                               UnitX='Wavelength',
                                               OutputWorkspace='ws2d_histo_non_dist')
     cls.ws2d_histo_rag = CreateWorkspace(DataX=[1, 2, 3, 4, 5, 2, 4, 6, 8, 10],
                                          DataY=[2] * 8,
                                          NSpec=2,
                                          VerticalAxisUnit='DeltaE',
                                          VerticalAxisValues=[5, 7, 9],
                                          OutputWorkspace='ws2d_histo_rag')
     cls.ws_MD_2d = CreateMDHistoWorkspace(Dimensionality=3,
                                           Extents='-3,3,-10,10,-1,1',
                                           SignalInput=range(25),
                                           ErrorInput=range(25),
                                           NumberOfEvents=10 * np.ones(25),
                                           NumberOfBins='5,5,1',
                                           Names='Dim1,Dim2,Dim3',
                                           Units='MomentumTransfer,EnergyTransfer,Angstrom',
                                           OutputWorkspace='ws_MD_2d')
     cls.ws_MD_1d = CreateMDHistoWorkspace(Dimensionality=3,
                                           Extents='-3,3,-10,10,-1,1',
                                           SignalInput=range(5),
                                           ErrorInput=range(5),
                                           NumberOfEvents=10 * np.ones(5),
                                           NumberOfBins='1,5,1',
                                           Names='Dim1,Dim2,Dim3',
                                           Units='MomentumTransfer,EnergyTransfer,Angstrom',
                                           OutputWorkspace='ws_MD_1d')
     cls.ws2d_point_uneven = CreateWorkspace(DataX=[10, 20, 30],
                                             DataY=[1, 2, 3],
                                             NSpec=1,
                                             OutputWorkspace='ws2d_point_uneven')
     wp = CreateWorkspace(DataX=[15, 25, 35, 45], DataY=[1, 2, 3, 4], NSpec=1)
     ConjoinWorkspaces(cls.ws2d_point_uneven, wp, CheckOverlapping=False)
     cls.ws2d_point_uneven = mantid.mtd['ws2d_point_uneven']
     cls.ws2d_histo_uneven = CreateWorkspace(DataX=[10, 20, 30, 40],
                                             DataY=[1, 2, 3],
                                             NSpec=1,
                                             OutputWorkspace='ws2d_histo_uneven')
     AddTimeSeriesLog(cls.ws2d_histo, Name="my_log", Time="2010-01-01T00:00:00", Value=100)
     AddTimeSeriesLog(cls.ws2d_histo, Name="my_log", Time="2010-01-01T00:30:00", Value=15)
     AddTimeSeriesLog(cls.ws2d_histo, Name="my_log", Time="2010-01-01T00:50:00", Value=100.2)
Exemple #5
0
    def PyExec(self):
        # get parameter values
        wsString = self.getPropertyValue("InputWorkspace").strip()
        #internal values
        wsOutput = "__OutputWorkspace"
        wsTemp = "__Sort_temp"
        #get the workspace list
        wsNames = []
        for wsName in wsString.split(","):
            ws = mtd[wsName.strip()]
            if type(ws) == WorkspaceGroup:
                wsNames.extend(ws.getNames())
            else:
                wsNames.append(wsName)

        if wsOutput in mtd:
            DeleteWorkspace(Workspace=wsOutput)
        sortStat = []
        for wsName in wsNames:
            if "qvectors" in wsName:
                #extract the spectrum
                ws = mtd[wsName.strip()]
                for s in range(0, ws.getNumberHistograms()):
                    y_s = ws.readY(s)
                    tuple = (self.GetXValue(y_s), s)
                    sortStat.append(tuple)
                sortStat.sort()
        if len(sortStat) == 0:
            raise RuntimeError("Cannot find file with qvectors, aborting")
        #sort spectra using norm of q
        for wsName in wsNames:
            ws = mtd[wsName.strip()]
            yUnit = ws.getAxis(1).getUnit().unitID()
            transposed = False
            if ws.getNumberHistograms() < len(sortStat):
                Transpose(InputWorkspace=wsName, OutputWorkspace=wsName)
                transposed = True
            for norm, spec in sortStat:
                ExtractSingleSpectrum(InputWorkspace=wsName, OutputWorkspace=wsTemp, WorkspaceIndex=spec)
                if wsOutput in mtd:
                    ConjoinWorkspaces(InputWorkspace1=wsOutput,InputWorkspace2=wsTemp,CheckOverlapping=False)
                    if wsTemp in mtd:
                        DeleteWorkspace(Workspace=wsTemp)
                else:
                    RenameWorkspace(InputWorkspace=wsTemp, OutputWorkspace=wsOutput)

            #put norm as y value and copy units from input
            loopIndex = 0
            wsOut = mtd[wsOutput]
            for norm, spec in sortStat:
                wsOut.getSpectrum(loopIndex).setSpectrumNo(int(norm*1000))
                loopIndex = loopIndex + 1
            if len(yUnit) > 0:
                wsOut.getAxis(1).setUnit(yUnit)
            if transposed:
                Transpose(InputWorkspace=wsOutput, OutputWorkspace=wsOutput)
            RenameWorkspace(InputWorkspace=wsOutput, OutputWorkspace=wsName)
Exemple #6
0
def handle_saving_event_workspace_when_saving_as_histogram(binning, runs, def_type, inst):
    ws_in_monitor = mtd[ADD_FILES_SUM_TEMPORARY_MONITORS]
    if binning == 'Monitors':
        mon_x = ws_in_monitor.dataX(0)
        binning = str(mon_x[0])
        bin_gap = mon_x[1] - mon_x[0]
        binning = binning + "," + str(bin_gap)
        for j in range(2, len(mon_x)):
            next_bin_gap = mon_x[j] - mon_x[j-1]
            if next_bin_gap != bin_gap:
                bin_gap = next_bin_gap
                binning = binning + "," + str(mon_x[j-1]) + "," + str(bin_gap)
        binning = binning + "," + str(mon_x[len(mon_x)-1])

    sanslog.notice(binning)
    Rebin(InputWorkspace=ADD_FILES_SUM_TEMPORARY, OutputWorkspace='AddFilesSumTemporary_Rebin', Params=binning,
          PreserveEvents=False)

    # loading the nexus file using LoadNexus is necessary because it has some metadata
    # that is not in LoadEventNexus. This must be fixed.
    filename, ext = _make_filename(runs[0], def_type, inst)
    workspace_type = get_workspace_type(filename)
    if workspace_type is WorkspaceType.MultiperiodEvent:
        # If we are dealing with multi-period event workspaces then there is no way of getting any other
        # sample log information hence we use make a copy of the monitor workspace and use that instead
        # of the reloading the first file again
        CloneWorkspace(InputWorkspace=ADD_FILES_SUM_TEMPORARY_MONITORS, OutputWorkspace=ADD_FILES_SUM_TEMPORARY)
    else:
        LoadNexus(Filename=filename, OutputWorkspace=ADD_FILES_SUM_TEMPORARY,
                  SpectrumMax=ws_in_monitor.getNumberHistograms())
    # User may have selected a binning which is different from the default
    Rebin(InputWorkspace=ADD_FILES_SUM_TEMPORARY, OutputWorkspace=ADD_FILES_SUM_TEMPORARY, Params=binning)
    # For now the monitor binning must be the same as the detector binning
    # since otherwise both cannot exist in the same output histogram file
    Rebin(InputWorkspace=ADD_FILES_SUM_TEMPORARY_MONITORS, OutputWorkspace=ADD_FILES_SUM_TEMPORARY_MONITORS,
          Params=binning)

    ws_in_monitor = mtd[ADD_FILES_SUM_TEMPORARY_MONITORS]
    wsOut = mtd[ADD_FILES_SUM_TEMPORARY]
    ws_in_detector = mtd['AddFilesSumTemporary_Rebin']

    # We loose added sample log information since we reload a single run workspace
    # and conjoin with the added workspace. In order to preserve some added sample
    # logs we need to transfer them at this point
    transfer_special_sample_logs(from_ws=ws_in_detector, to_ws=wsOut)

    mon_n = ws_in_monitor.getNumberHistograms()
    for i in range(mon_n):
        wsOut.setY(i, ws_in_monitor.dataY(i))
        wsOut.setE(i, ws_in_monitor.dataE(i))
    ConjoinWorkspaces(wsOut, ws_in_detector, CheckOverlapping=True)

    if 'AddFilesSumTemporary_Rebin' in mtd:
        DeleteWorkspace('AddFilesSumTemporary_Rebin')
Exemple #7
0
 def test_MatchAndMergeWorkspaces_accepts_a_mixture_of_ws_size(self):
     x_min = np.array([2, 5, 10, 15, 20])
     x_max = np.array([10, 20, 30, 40, 45])
     ws_group = AnalysisDataService.retrieve('ws_group')
     ConjoinWorkspaces(InputWorkspace1=ws_group[3],
                       InputWorkspace2=ws_group[4],
                       CheckOverlapping=False)
     ws_list = [ws_group[0], ws_group[1], ws_group[2], ws_group[3]]
     ws_merged = MatchAndMergeWorkspaces(InputWorkspaces=ws_list, XMin=x_min, XMax=x_max)
     self.assertIsInstance(ws_merged, MatrixWorkspace)
     self.assertEqual(ws_merged.getNumberHistograms(), 1)
     self.assertAlmostEqual(min(ws_merged.dataX(0)), 2, places=0)
     self.assertAlmostEqual(max(ws_merged.dataX(0)), 45, places=0)
    def setUpClass(cls):
        cls.ws_widget = WorkspaceWidget(QMainWindow())
        mat_ws = CreateSampleWorkspace()
        table_ws = CreateEmptyTableWorkspace()
        group_ws = GroupWorkspaces([mat_ws, table_ws])
        single_val_ws = CreateSingleValuedWorkspace(5, 6)

        # Create ragged workspace
        ws2d_ragged = CreateWorkspace(DataX=[10, 20, 30],
                                      DataY=[1, 2, 3],
                                      NSpec=1,
                                      OutputWorkspace='Ragged')
        temp = CreateWorkspace(DataX=[15, 25, 35, 45],
                               DataY=[1, 2, 3, 4],
                               NSpec=1)
        ConjoinWorkspaces(ws2d_ragged, temp, CheckOverlapping=False)
        ws2d_ragged = AnalysisDataService.retrieve('Ragged')

        cls.w_spaces = [mat_ws, table_ws, group_ws, single_val_ws]
        cls.ws_names = ['MatWS', 'TableWS', 'GroupWS', 'SingleValWS']
        # create md workspace
        md_ws = CreateMDHistoWorkspace(SignalInput='1,2,3,4,2,1',
                                       ErrorInput='1,1,1,1,1,1',
                                       Dimensionality=3,
                                       Extents='-1,1,-1,1,0.5,6.5',
                                       NumberOfBins='1,1,6',
                                       Names='x,y,|Q|',
                                       Units='mm,km,AA^-1',
                                       OutputWorkspace='MDHistoWS1D')
        # self.w_spaces = [mat_ws, table_ws, group_ws, single_val_ws, md_ws]
        # self.ws_names = ['MatWS', 'TableWS', 'GroupWS', 'SingleValWS', 'MDHistoWS1D']
        for ws_name, ws in zip(cls.ws_names, cls.w_spaces):
            cls.ws_widget._ads.add(ws_name, ws)
        cls.ws_names.append(md_ws.name())
        cls.w_spaces.append(md_ws)
        cls.ws_names.append(ws2d_ragged.name())
        cls.w_spaces.append(ws2d_ragged)
Exemple #9
0
def _conjoin(workspace1, workspace2):
    workspace_name = workspace1.getName()
    ConjoinWorkspaces(InputWorkspace1=workspace_name,
                      InputWorkspace2=workspace2,
                      EnableLogging=False)
    return mtd[workspace_name]
Exemple #10
0
    def PyExec(self):
        inputWS = self.getProperty("InputWorkspace").value
        outputWS = self.getProperty("OutputWorkspace").valueAsStr
        xmins = self.getProperty("XMin").value
        xmaxs = self.getProperty("XMax").value
        deltas = self.getProperty("Delta").value
        preserveEvents = self.getProperty("PreserveEvents").value

        if self.__use_simple_rebin(xmins, xmaxs, deltas):
            # plain old rebin should have been used
            name = "__{}_rebinned_".format(outputWS)
            params = (xmins[0], deltas[0], xmaxs[0])
            Rebin(InputWorkspace=inputWS,
                  OutputWorkspace=name,
                  Params=params,
                  PreserveEvents=preserveEvents)
            self.setProperty("OutputWorkspace", mtd[name])
            DeleteWorkspace(name)
        else:
            numSpec = inputWS.getNumberHistograms()

            # fill out the values for min and max as appropriate
            xmins = self.__extend_value(numSpec, xmins, replaceNan=True)
            xmaxs = self.__extend_value(numSpec, xmaxs, replaceNan=True)
            deltas = self.__extend_value(numSpec, deltas, replaceNan=False)

            self.log().debug("MIN:  " + str(xmins))
            self.log().debug("DELTA:" + str(deltas))
            self.log().debug("MAX:  " + str(xmaxs))

            # temporary workspaces should be hidden
            names = [
                "__{}_spec_{}".format(outputWS, i) for i in range(len(xmins))
            ]

            # how much the progress bar moves forward for each spectrum
            progStep = float(1) / float(3 * numSpec)

            # crop out each spectra and conjoin to a temporary workspace
            accumulationWS = None
            for i, (name, xmin, xmax,
                    delta) in enumerate(zip(names, xmins, xmaxs, deltas)):
                # don't  go beyond the range of the data
                x = inputWS.readX(i)
                if xmin == Property.EMPTY_DBL:
                    xmin = x[0]
                if xmax == Property.EMPTY_DBL:
                    xmax = x[-1]

                progStart = 3 * i * progStep

                try:
                    # extract the range of the spectrum requested
                    ExtractSpectra(InputWorkspace=inputWS,
                                   OutputWorkspace=name,
                                   StartWorkspaceIndex=i,
                                   EndWorkspaceIndex=i,
                                   XMin=xmin,
                                   XMax=xmax,
                                   startProgress=progStart,
                                   endProgress=(progStart + progStep),
                                   EnableLogging=False)

                    # rebin the data
                    Rebin(InputWorkspace=name,
                          OutputWorkspace=name,
                          Params=(xmin, delta, xmax),
                          PreserveEvents=preserveEvents,
                          startProgress=progStart,
                          endProgress=(progStart + progStep),
                          EnableLogging=False)
                except Exception as e:
                    raise RuntimeError('for index={}: {}'.format(i, e)) from e

                # accumulate
                if accumulationWS is None:
                    accumulationWS = name  # messes up progress during very first step
                else:
                    # this deletes both input workspaces
                    ConjoinWorkspaces(InputWorkspace1=accumulationWS,
                                      InputWorkspace2=name,
                                      startProgress=(progStart + 2 * progStep),
                                      endProgress=(progStart + 3 * progStep),
                                      EnableLogging=False)
            self.setProperty("OutputWorkspace", mtd[accumulationWS])
            DeleteWorkspace(accumulationWS, EnableLogging=False)
Exemple #11
0
    def PyExec(self):
        inputWS = self.getProperty('InputWorkspace').value
        outputWS = self.getProperty('OutputWorkspace').valueAsStr
        xmins = self.getProperty('XMin').value
        xmaxs = self.getProperty('XMax').value

        if len(xmins) == 1 and len(xmaxs) == 1:
            name = '__{}_cropped_'.format(outputWS)
            CropWorkspace(InputWorkspace=inputWS,
                          OutputWorkspace=name,
                          XMin=xmins[0],
                          XMax=xmaxs[0])
            self.setProperty('OutputWorkspace', mtd[name])
            DeleteWorkspace(name)
        else:
            numSpec = inputWS.getNumberHistograms()

            # fill out the values for min and max as appropriate
            # numpy 1.7 (on rhel7) doesn't have np.full
            if len(xmins) == 0:
                xmins = np.array([Property.EMPTY_DBL] * numSpec)
            elif len(xmins) == 1:
                xmins = np.array([xmins[0]] * numSpec)
            if len(xmaxs) == 0:
                xmaxs = np.array([Property.EMPTY_DBL] * numSpec)
            elif len(xmaxs) == 1:
                xmaxs = np.array([xmaxs[0]] * numSpec)

            # replace nan with EMPTY_DBL in xmin/xmax
            indices = np.where(np.invert(np.isfinite(xmins)))
            xmins[indices] = Property.EMPTY_DBL
            indices = np.where(np.invert(np.isfinite(xmaxs)))
            xmaxs[indices] = Property.EMPTY_DBL

            self.log().information('MIN: ' + str(xmins))
            self.log().information('MAX: ' + str(xmaxs))

            # temporary workspaces should be hidden
            names = [
                '__{}_spec_{}'.format(outputWS, i) for i in range(len(xmins))
            ]

            # how much the progress bar moves forward for each spectrum
            progStep = float(1) / float(2 * numSpec)

            # crop out each spectra and conjoin to a temporary workspace
            accumulationWS = None
            for i, (name, xmin, xmax) in enumerate(zip(names, xmins, xmaxs)):
                # don't  go beyond the range of the data
                x = inputWS.readX(i)
                if xmin < x[0]:
                    xmin = Property.EMPTY_DBL
                if xmax > x[-1]:
                    xmax = Property.EMPTY_DBL

                progStart = 2 * i * progStep

                # extract the range of the spectrum requested
                CropWorkspace(InputWorkspace=inputWS,
                              OutputWorkspace=name,
                              StartWorkspaceIndex=i,
                              EndWorkspaceIndex=i,
                              XMin=xmin,
                              XMax=xmax,
                              startProgress=progStart,
                              endProgress=(progStart + progStep),
                              EnableLogging=False)

                # accumulate
                if accumulationWS is None:
                    accumulationWS = name  # messes up progress during very first step
                else:
                    ConjoinWorkspaces(InputWorkspace1=accumulationWS,
                                      InputWorkspace2=name,
                                      startProgress=(progStart + progStep),
                                      endProgress=(progStart + 2 * progStep),
                                      EnableLogging=False)
            self.setProperty('OutputWorkspace', mtd[accumulationWS])
            DeleteWorkspace(accumulationWS)
    def PyExec(self):
        data = self.getProperty("InputWorkspace").value  # [1~n]
        bkg = self.getProperty("BackgroundWorkspace").value  # [1~n]
        cal = self.getProperty("CalibrationWorkspace").value  # [1]
        xMin = self.getProperty("XMin").value
        xMax = self.getProperty("XMax").value
        numberBins = self.getProperty("NumberBins").value
        outWS = self.getPropertyValue("OutputWorkspace")

        # NOTE:
        # StringArrayProperty cannot be optional, so the background can only be passed in as a string
        # or a list, which will be manually unpacked here
        if bkg != "":
            bkg = [
                AnalysisDataService.retrieve(me)
                for me in map(str.strip, bkg.split(","))
            ]

        # NOTE:
        # xMin and xMax are initialized as empty numpy.array (np.array([])).
        _xMin, _xMax = self._locate_global_xlimit()
        xMin = _xMin if xMin.size == 0 else xMin
        xMax = _xMax if xMax.size == 0 else xMax

        # BEGIN_FOR: prcess_spectra
        for n, _wsn in enumerate(data):
            _mskn = f"__mask_{n}"  # calculated in previous loop
            _ws = AnalysisDataService.retrieve(_wsn)

            # resample spectra
            _ws_resampled = ResampleX(
                InputWorkspace=f"__ws_{n}",
                XMin=xMin,
                XMax=xMax,
                NumberBins=numberBins,
                EnableLogging=False,
            )

            # calibration
            if cal is not None:
                _ws_cal_resampled = self._resample_calibration(_ws, _mskn, xMin, xMax)
                _ws_resampled = Divide(
                    LHSWorkspace=_ws_resampled,
                    RHSWorkspace=_ws_cal_resampled,
                    EnableLogging=False,
                )
            else:
                _ws_cal_resampled = None

            _ws_resampled = Scale(
                InputWorkspace=_ws_resampled,
                Factor=self._get_scale(cal) / self._get_scale(_ws),
                EnableLogging=False,
            )

            # background
            if bkg != "":
                bgn = bkg[n] if isinstance(bkg, list) else bkg

                _ws_bkg_resampled = self._resample_background(
                    bgn, _ws, _mskn, xMin, xMax, _ws_cal_resampled
                )

                _ws_resampled = Minus(
                    LHSWorkspace=_ws_resampled,
                    RHSWorkspace=_ws_bkg_resampled,
                    EnableLogging=False,
                )

            # conjoin
            if n < 1:
                CloneWorkspace(
                    InputWorkspace=_ws_resampled,
                    OutputWorkspace="__ws_conjoined",
                    EnableLogging=False,
                )
            else:
                ConjoinWorkspaces(
                    InputWorkspace1="__ws_conjoined",
                    InputWorkspace2=_ws_resampled,
                    CheckOverlapping=False,
                    EnableLogging=False,
                )
        # END_FOR: prcess_spectra

        # Step_3: sum all spectra
        # ref: https://docs.mantidproject.org/nightly/algorithms/SumSpectra-v1.html
        if cal is not None:
            SumSpectra(
                InputWorkspace="__ws_conjoined",
                OutputWorkspace=outWS,
                WeightedSum=True,
                MultiplyBySpectra=False,
                EnableLogging=False,
            )
        else:
            SumSpectra(
                InputWorkspace="__ws_conjoined",
                OutputWorkspace=outWS,
                WeightedSum=True,
                MultiplyBySpectra=True,
                EnableLogging=False,
            )

        self.setProperty("OutputWorkspace", outWS)

        # Step_4: remove temp workspaces
        [
            DeleteWorkspace(ws, EnableLogging=False)
            for ws in self.temp_workspace_list
            if mtd.doesExist(ws)
        ]
Exemple #13
0
 def setUpClass(cls):
     cls.g1da = config['graph1d.autodistribution']
     config['graph1d.autodistribution'] = 'On'
     cls.ws2d_non_distribution = CreateWorkspace(
         DataX=[10, 20, 30, 10, 20, 30],
         DataY=[2, 3, 4, 5],
         DataE=[1, 2, 3, 4],
         NSpec=2,
         Distribution=False,
         UnitX='Wavelength',
         YUnitLabel='Counts per microAmp.hour',
         VerticalAxisUnit='DeltaE',
         VerticalAxisValues=[4, 6, 8],
         OutputWorkspace='ws2d_non_distribution')
     cls.ws2d_distribution = CreateWorkspace(
         DataX=[10, 20, 30, 10, 20, 30],
         DataY=[2, 3, 4, 5, 6],
         DataE=[1, 2, 3, 4, 6],
         NSpec=1,
         Distribution=True,
         UnitX='Wavelength',
         YUnitLabel='Counts per microAmp.hour',
         OutputWorkspace='ws2d_distribution')
     cls.ws2d_histo = CreateWorkspace(DataX=[10, 20, 30, 10, 20, 30],
                                      DataY=[2, 3, 4, 5],
                                      DataE=[1, 2, 3, 4],
                                      NSpec=2,
                                      Distribution=True,
                                      UnitX='Wavelength',
                                      VerticalAxisUnit='DeltaE',
                                      VerticalAxisValues=[4, 6, 8],
                                      OutputWorkspace='ws2d_histo')
     cls.ws2d_point = CreateWorkspace(
         DataX=[1, 2, 3, 4, 1, 2, 3, 4, 1, 2, 3, 4],
         DataY=[2] * 12,
         NSpec=3,
         OutputWorkspace='ws2d_point')
     cls.ws1d_point = CreateWorkspace(DataX=[1, 2],
                                      DataY=[1, 2],
                                      NSpec=1,
                                      Distribution=False,
                                      OutputWorkspace='ws1d_point')
     cls.ws2d_histo_rag = CreateWorkspace(
         DataX=[1, 2, 3, 4, 5, 2, 4, 6, 8, 10],
         DataY=[2] * 8,
         NSpec=2,
         VerticalAxisUnit='DeltaE',
         VerticalAxisValues=[5, 7, 9],
         OutputWorkspace='ws2d_histo_rag')
     cls.ws2d_point_rag = CreateWorkspace(DataX=[1, 2, 3, 4, 2, 4, 6, 8],
                                          DataY=[2] * 8,
                                          NSpec=2,
                                          OutputWorkspace='ws2d_point_rag')
     cls.ws_MD_2d = CreateMDHistoWorkspace(
         Dimensionality=3,
         Extents='-3,3,-10,10,-1,1',
         SignalInput=range(25),
         ErrorInput=range(25),
         NumberOfEvents=10 * np.ones(25),
         NumberOfBins='5,5,1',
         Names='Dim1,Dim2,Dim3',
         Units='MomentumTransfer,EnergyTransfer,Angstrom',
         OutputWorkspace='ws_MD_2d')
     cls.ws_MD_1d = CreateMDHistoWorkspace(
         Dimensionality=3,
         Extents='-3,3,-10,10,-1,1',
         SignalInput=range(5),
         ErrorInput=range(5),
         NumberOfEvents=10 * np.ones(5),
         NumberOfBins='1,5,1',
         Names='Dim1,Dim2,Dim3',
         Units='MomentumTransfer,EnergyTransfer,Angstrom',
         OutputWorkspace='ws_MD_1d')
     cls.ws2d_point_uneven = CreateWorkspace(
         DataX=[10, 20, 30],
         DataY=[1, 2, 3],
         NSpec=1,
         OutputWorkspace='ws2d_point_uneven')
     cls.ws2d_high_counting_detector = CreateWorkspace(
         DataX=[1, 2, 3, 4] * 1000,
         DataY=[2] * 4 * 12 + [200] * 4 + [2] * 987 * 4,
         NSpec=1000,
         OutputWorkspace='ws2d_high_counting_detector')
     wp = CreateWorkspace(DataX=[15, 25, 35, 45],
                          DataY=[1, 2, 3, 4],
                          NSpec=1)
     ConjoinWorkspaces(cls.ws2d_point_uneven, wp, CheckOverlapping=False)
     cls.ws2d_point_uneven = mantid.mtd['ws2d_point_uneven']
     cls.ws2d_histo_uneven = CreateWorkspace(
         DataX=[10, 20, 30, 40],
         DataY=[1, 2, 3],
         NSpec=1,
         OutputWorkspace='ws2d_histo_uneven')
     wp = CreateWorkspace(DataX=[15, 25, 35, 45, 55],
                          DataY=[1, 2, 3, 4],
                          NSpec=1)
     ConjoinWorkspaces(cls.ws2d_histo_uneven, wp, CheckOverlapping=False)
     cls.ws2d_histo_uneven = mantid.mtd['ws2d_histo_uneven']
     newYAxis = mantid.api.NumericAxis.create(3)
     newYAxis.setValue(0, 10)
     newYAxis.setValue(1, 15)
     newYAxis.setValue(2, 25)
     cls.ws2d_histo_uneven.replaceAxis(1, newYAxis)
     AddTimeSeriesLog(cls.ws2d_histo,
                      Name="my_log",
                      Time="2010-01-01T00:00:00",
                      Value=100)
     AddTimeSeriesLog(cls.ws2d_histo,
                      Name="my_log",
                      Time="2010-01-01T00:30:00",
                      Value=15)
     AddTimeSeriesLog(cls.ws2d_histo,
                      Name="my_log",
                      Time="2010-01-01T00:50:00",
                      Value=100.2)
Exemple #14
0
    def PyExec(self):
        data = self._expand_groups()
        bkg = self.getProperty(
            "BackgroundWorkspace").valueAsStr  # same background for all
        cal = self.getProperty(
            "CalibrationWorkspace").value  # same calibration for all
        numberBins = self.getProperty("NumberBins").value
        outWS = self.getPropertyValue("OutputWorkspace")
        summing = self.getProperty("Sum").value  # [Yes or No]

        # convert all of the input workspaces into spectrum of "target" units (generally angle)
        data, masks = self._convert_data(data)

        # determine x-range
        xMin, xMax = self._locate_global_xlimit(data)

        # BEGIN_FOR: prcess_spectra
        for n, (_wsn, _mskn) in enumerate(zip(data, masks)):
            # resample spectra
            ResampleX(
                InputWorkspace=_wsn,
                OutputWorkspace=_wsn,
                XMin=xMin,
                XMax=xMax,
                NumberBins=numberBins,
                EnableLogging=False,
            )

            # calibration
            if cal is not None:
                _ws_cal_resampled = self._resample_calibration(
                    _wsn, _mskn, xMin, xMax)
                Divide(
                    LHSWorkspace=_wsn,
                    RHSWorkspace=_ws_cal_resampled,
                    OutputWorkspace=_wsn,
                    EnableLogging=False,
                )
            else:
                _ws_cal_resampled = None

            Scale(
                InputWorkspace=_wsn,
                OutputWorkspace=_wsn,
                Factor=self._get_scale(cal) / self._get_scale(_wsn),
                EnableLogging=False,
            )

            # background
            if bkg:
                _ws_bkg_resampled = self._resample_background(
                    bkg, _wsn, _mskn, xMin, xMax, _ws_cal_resampled)

                Minus(
                    LHSWorkspace=_wsn,
                    RHSWorkspace=_ws_bkg_resampled,
                    OutputWorkspace=_wsn,
                    EnableLogging=False,
                )

            if summing:
                # conjoin
                if n < 1:
                    RenameWorkspace(
                        InputWorkspace=_wsn,
                        OutputWorkspace="__ws_conjoined",
                        EnableLogging=False,
                    )
                else:
                    # this adds to `InputWorkspace1`
                    ConjoinWorkspaces(
                        InputWorkspace1="__ws_conjoined",
                        InputWorkspace2=_wsn,
                        CheckOverlapping=False,
                        EnableLogging=False,
                    )

        # END_FOR: prcess_spectra
        # Step_3: sum all spectra
        # ref: https://docs.mantidproject.org/nightly/algorithms/SumSpectra-v1.html
        if summing:
            if cal is not None:
                outWS = SumSpectra(
                    InputWorkspace="__ws_conjoined",
                    OutputWorkspace=outWS,
                    WeightedSum=True,
                    MultiplyBySpectra=not bool(cal),
                    EnableLogging=False,
                )
            else:
                outWS = SumSpectra(
                    InputWorkspace="__ws_conjoined",
                    OutputWorkspace=outWS,
                    WeightedSum=True,
                    MultiplyBySpectra=True,
                    EnableLogging=False,
                )
        else:
            if len(data) == 1:
                outWS = RenameWorkspace(InputWorkspace=data[0],
                                        OutputWorkspace=outWS)
            else:
                outWS = GroupWorkspaces(InputWorkspaces=data,
                                        OutputWorkspace=outWS)

        self.setProperty("OutputWorkspace", outWS)

        # Step_4: remove temp workspaces
        [
            DeleteWorkspace(ws, EnableLogging=False)
            for ws in self.temp_workspace_list if mtd.doesExist(ws)
        ]