Пример #1
0
    def PyExec(self):
        config['default.facility'] = "SNS"
        config['default.instrument'] = self._long_inst
        self._doIndiv = self.getProperty("DoIndividual").value
        self._etBins = self.getProperty(
            "EnergyBins").value / MICROEV_TO_MILLIEV
        self._qBins = self.getProperty("MomentumTransferBins").value
        self._noMonNorm = self.getProperty("NoMonitorNorm").value
        self._maskFile = self.getProperty("MaskFile").value
        self._groupDetOpt = self.getProperty("GroupDetectors").value
        self._normalizeToFirst = self.getProperty("NormalizeToFirst").value
        self._normalizeToVanadium = self.getProperty("GroupDetectors").value
        self._doNorm = self.getProperty("DivideByVanadium").value

        datasearch = config["datasearch.searcharchive"]
        if datasearch != "On":
            config["datasearch.searcharchive"] = "On"

        # Handle masking file override if necessary
        self._overrideMask = bool(self._maskFile)
        if not self._overrideMask:
            config.appendDataSearchDir(DEFAULT_MASK_GROUP_DIR)
            self._maskFile = DEFAULT_MASK_FILE

        api.LoadMask(Instrument='BASIS',
                     OutputWorkspace='BASIS_MASK',
                     InputFile=self._maskFile)

        # Work around length issue
        _dMask = api.ExtractMask('BASIS_MASK')
        self._dMask = _dMask[1]
        api.DeleteWorkspace(_dMask[0])

        ############################
        ##  Process the Vanadium  ##
        ############################

        norm_runs = self.getProperty("NormRunNumbers").value
        if self._doNorm and bool(norm_runs):
            if ";" in norm_runs:
                raise SyntaxError("Normalization does not support run groups")
            self._doNorm = self.getProperty("NormalizationType").value
            self.log().information("Divide by Vanadium with normalization" +
                                   self._doNorm)

            # The following steps are common to all types of Vanadium normalization

            # norm_runs encompasses a single set, thus _getRuns returns
            # a list of only one item
            norm_set = self._getRuns(norm_runs, doIndiv=False)[0]
            self._normWs = self._sum_and_calibrate(norm_set,
                                                   extra_extension="_norm")

            # This rebin integrates counts onto a histogram of a single bin
            if self._doNorm == "by detectorID":
                normRange = self.getProperty("NormWavelengthRange").value
                self._normRange = [
                    normRange[0], normRange[1] - normRange[0], normRange[1]
                ]
                api.Rebin(InputWorkspace=self._normWs,
                          OutputWorkspace=self._normWs,
                          Params=self._normRange)

            # FindDetectorsOutsideLimits to be substituted by MedianDetectorTest
            api.FindDetectorsOutsideLimits(InputWorkspace=self._normWs,
                                           OutputWorkspace="BASIS_NORM_MASK")

            # additional reduction steps when normalizing by Q slice
            if self._doNorm == "by Q slice":
                self._normWs = self._group_and_SofQW(self._normWs,
                                                     self._etBins,
                                                     isSample=False)

        ##########################
        ##  Process the sample  ##
        ##########################
        self._run_list = self._getRuns(self.getProperty("RunNumbers").value,
                                       doIndiv=self._doIndiv)
        for run_set in self._run_list:
            self._samWs = self._sum_and_calibrate(run_set)
            self._samWsRun = str(run_set[0])
            # Mask detectors with insufficient Vanadium signal
            if self._doNorm:
                api.MaskDetectors(Workspace=self._samWs,
                                  MaskedWorkspace='BASIS_NORM_MASK')
            # Divide by Vanadium
            if self._doNorm == "by detector ID":
                api.Divide(LHSWorkspace=self._samWs,
                           RHSWorkspace=self._normWs,
                           OutputWorkspace=self._samWs)
            # additional reduction steps
            self._samSqwWs = self._group_and_SofQW(self._samWs,
                                                   self._etBins,
                                                   isSample=True)
            # Divide by Vanadium
            if self._doNorm == "by Q slice":
                api.Integration(InputWorkspace=self._normWs,
                                OutputWorkspace=self._normWs,
                                RangeLower=DEFAULT_VANADIUM_ENERGY_RANGE[0],
                                RangeUpper=DEFAULT_VANADIUM_ENERGY_RANGE[1])
                api.Divide(LHSWorkspace=self._samSqwWs,
                           RHSWorkspace=self._normWs,
                           OutputWorkspace=self._samSqwWs)
            # Clear mask from reduced file. Needed for binary operations
            # involving this S(Q,w)
            api.ClearMaskFlag(Workspace=self._samSqwWs)
            # Scale so that elastic line has Y-values ~ 1
            if self._normalizeToFirst:
                self._ScaleY(self._samSqwWs)
            # Output Dave and Nexus files
            extension = "_divided.dat" if self._doNorm else ".dat"
            dave_grp_filename = self._makeRunName(self._samWsRun,
                                                  False) + extension
            api.SaveDaveGrp(Filename=dave_grp_filename,
                            InputWorkspace=self._samSqwWs,
                            ToMicroEV=True)
            extension = "_divided_sqw.nxs" if self._doNorm else "_sqw.nxs"
            processed_filename = self._makeRunName(self._samWsRun,
                                                   False) + extension
            api.SaveNexus(Filename=processed_filename,
                          InputWorkspace=self._samSqwWs)
Пример #2
0
    def _PyExec(self):
        # Collect Flux Normalization
        if self.getProperty('DoFluxNormalization').value is True:
            self._flux_normalization_type =\
                self.getProperty('FluxNormalizationType').value
            if self._flux_normalization_type == 'Monitor':
                self._MonNorm = True

        self._reflection =\
            REFLECTIONS_DICT[self.getProperty('ReflectionType').value]
        self._doIndiv = self.getProperty('DoIndividual').value

        # micro-eV to mili-eV
        self._etBins = 1.E-03 * self.getProperty('EnergyBins').value
        self._qBins = self.getProperty('MomentumTransferBins').value
        self._qBins[0] -= self._qBins[1] / 2.0  # leftmost bin boundary
        self._qBins[2] += self._qBins[1] / 2.0  # rightmost bin boundary

        self._maskFile = self.getProperty('MaskFile').value
        maskfile = self.getProperty('MaskFile').value
        self._maskFile = maskfile if maskfile else\
            pjoin(DEFAULT_MASK_GROUP_DIR, self._reflection['mask_file'])

        self._groupDetOpt = self.getProperty('GroupDetectors').value
        self._normalizeToFirst = self.getProperty('NormalizeToFirst').value
        self._doNorm = self.getProperty('DivideByVanadium').value

        # retrieve properties pertaining to saving to NXSPE file
        self._nsxpe_do = self.getProperty('SaveNXSPE').value
        if self._nsxpe_do:
            self._nxspe_psi_angle_log = self.getProperty('PsiAngleLog').value
            self._nxspe_offset = self.getProperty('PsiOffset').value

        # Apply default mask if not supplied by user
        self._overrideMask = bool(self._maskFile)
        if not self._overrideMask:
            mantid_config.appendDataSearchDir(DEFAULT_MASK_GROUP_DIR)
            self._maskFile = self._reflection['mask_file']

        self._maskWs = tws('BASIS_MASK')
        sapi.LoadMask(Instrument='BASIS',
                      OutputWorkspace=self._maskWs,
                      InputFile=self._maskFile)

        # Work around length issue
        _dMask = sapi.ExtractMask(InputWorkspace=self._maskWs,
                                  OutputWorkspace=tws('ExtractMask'))
        self._dMask = _dMask[1]

        #
        #  Process the Vanadium
        #
        norm_runs = self.getProperty('NormRunNumbers').value
        if self._doNorm and bool(norm_runs):
            self._normalizationType = self.getProperty(
                'NormalizationType').value
            self.log().information('Divide by Vanadium with normalization' +
                                   self._normalizationType)

            # Following steps common to all types of Vanadium normalization

            # norm_runs encompasses a single set, thus _getRuns returns
            # a list of only one item
            norm_set = self._get_runs(norm_runs, doIndiv=False)[0]
            normWs = tws(self._make_run_name(norm_set[0]) + '_vanadium')
            self._sum_and_calibrate(norm_set, normWs)

            normRange = self._reflection['vanadium_wav_range']
            bin_width = normRange[1] - normRange[0]
            # This rebin integrates counts onto a histogram of a single bin
            if self._normalizationType == 'by detector ID':
                self._normRange = [normRange[0], bin_width, normRange[1]]
                sapi.Rebin(InputWorkspace=normWs,
                           OutputWorkspace=normWs,
                           Params=self._normRange)
                self._normWs = normWs
            # Detectors outside limits are substituted by MedianDetectorTest
            self._normMask = tws('BASIS_NORM_MASK')
            sapi.FindDetectorsOutsideLimits(
                InputWorkspace=normWs,
                LowThreshold=1.0 * bin_width,
                # no count events outside ranges
                RangeLower=normRange[0],
                RangeUpper=normRange[1],
                OutputWorkspace=self._normMask)
            # additional reduction steps when normalizing by Q slice
            if self._normalizationType == 'by Q slice':
                self._normWs = self._group_and_SofQW(normWs,
                                                     normWs,
                                                     self._etBins,
                                                     isSample=False)
        #
        #  Process the sample
        #
        self._run_list = self._get_runs(self.getProperty('RunNumbers').value,
                                        doIndiv=self._doIndiv)
        for run_set in self._run_list:
            self._samWs = tws(self._make_run_name(run_set[0]))
            self._sum_and_calibrate(run_set, self._samWs)
            self._samWsRun = str(run_set[0])
            # Divide by Vanadium detector ID, if pertinent
            if self._normalizationType == 'by detector ID':
                # Mask detectors with low Vanadium signal before dividing
                sapi.MaskDetectors(Workspace=self._samWs,
                                   MaskedWorkspace=self._normMask)
                sapi.Divide(LHSWorkspace=self._samWs,
                            RHSWorkspace=self._normWs,
                            OutputWorkspace=self._samWs)
            # additional reduction steps
            prefix = self._make_run_name(run_set[0])
            self._samSqwWs = self._group_and_SofQW(self._samWs,
                                                   prefix,
                                                   self._etBins,
                                                   isSample=True)
            # Divide by Vanadium Q slice, if pertinent
            if self._normalizationType == 'by Q slice':
                sapi.Divide(LHSWorkspace=self._samSqwWs,
                            RHSWorkspace=self._normWs,
                            OutputWorkspace=self._samSqwWs)
            # Clear mask from reduced file. Needed for binary operations
            # involving this S(Q,w)
            sapi.ClearMaskFlag(Workspace=self._samSqwWs)
            # Scale so that elastic line has Y-values ~ 1
            if self._normalizeToFirst:
                self._ScaleY(self._samSqwWs)

            # Transform the vertical axis (Q) to point data
            # Q-values are in X-axis now
            sapi.Transpose(InputWorkspace=self._samSqwWs,
                           OutputWorkspace=self._samSqwWs)
            # from histo to point
            sapi.ConvertToPointData(InputWorkspace=self._samSqwWs,
                                    OutputWorkspace=self._samSqwWs)
            # Q-values back to vertical axis
            sapi.Transpose(InputWorkspace=self._samSqwWs,
                           OutputWorkspace=self._samSqwWs)
            self.serialize_in_log(self._samSqwWs)  # store the call
            # Output Dave and Nexus files
            extension = '_divided.dat' if self._doNorm else '.dat'
            dave_grp_filename = self._make_run_name(self._samWsRun, False) + \
                extension
            sapi.SaveDaveGrp(Filename=dave_grp_filename,
                             InputWorkspace=self._samSqwWs,
                             ToMicroEV=True)
            extension = '_divided_sqw.nxs' if self._doNorm else '_sqw.nxs'
            processed_filename = self._make_run_name(self._samWsRun, False) + \
                extension
            sapi.SaveNexus(Filename=processed_filename,
                           InputWorkspace=self._samSqwWs)

            # additional output
            if self.getProperty('OutputSusceptibility').value:
                temperature = mtd[self._samSqwWs].getRun().\
                    getProperty(TEMPERATURE_SENSOR).getStatistics().mean
                samXqsWs = self._samSqwWs.replace('sqw', 'Xqw')
                sapi.ApplyDetailedBalance(InputWorkspace=self._samSqwWs,
                                          OutputWorkspace=samXqsWs,
                                          Temperature=str(temperature))
                sapi.ConvertUnits(InputWorkspace=samXqsWs,
                                  OutputWorkspace=samXqsWs,
                                  Target='DeltaE_inFrequency')
                self.serialize_in_log(samXqsWs)
                susceptibility_filename = processed_filename.replace(
                    'sqw', 'Xqw')
                sapi.SaveNexus(Filename=susceptibility_filename,
                               InputWorkspace=samXqsWs)
            if self.getProperty('OutputPowderSpectrum').value:
                self.generatePowderSpectrum()
Пример #3
0
    def PyExec(self):
        config['default.facility'] = "SNS"
        config['default.instrument'] = self._long_inst
        self._reflection = REFLECTIONS_DICT[self.getProperty(
            "ReflectionType").value]
        self._doIndiv = self.getProperty("DoIndividual").value
        self._etBins = 1.E-03 * self.getProperty(
            "EnergyBins").value  # micro-eV to mili-eV
        self._qBins = self.getProperty("MomentumTransferBins").value
        self._qBins[0] -= self._qBins[
            1] / 2.0  # self._qBins[0] is leftmost bin boundary
        self._qBins[2] += self._qBins[
            1] / 2.0  # self._qBins[2] is rightmost bin boundary
        self._noMonNorm = self.getProperty("NoMonitorNorm").value
        self._maskFile = self.getProperty("MaskFile").value
        self._groupDetOpt = self.getProperty("GroupDetectors").value
        self._normalizeToFirst = self.getProperty("NormalizeToFirst").value
        self._doNorm = self.getProperty("DivideByVanadium").value

        datasearch = config["datasearch.searcharchive"]
        if datasearch != "On":
            config["datasearch.searcharchive"] = "On"

        # Apply default mask if not supplied by user
        self._overrideMask = bool(self._maskFile)
        if not self._overrideMask:
            config.appendDataSearchDir(DEFAULT_MASK_GROUP_DIR)
            self._maskFile = self._reflection["mask_file"]

        sapi.LoadMask(Instrument='BASIS',
                      OutputWorkspace='BASIS_MASK',
                      InputFile=self._maskFile)

        # Work around length issue
        _dMask = sapi.ExtractMask('BASIS_MASK')
        self._dMask = _dMask[1]
        sapi.DeleteWorkspace(_dMask[0])

        ############################
        ##  Process the Vanadium  ##
        ############################

        norm_runs = self.getProperty("NormRunNumbers").value
        if self._doNorm and bool(norm_runs):
            if ";" in norm_runs:
                raise SyntaxError("Normalization does not support run groups")
            self._normalizationType = self.getProperty(
                "NormalizationType").value
            self.log().information("Divide by Vanadium with normalization" +
                                   self._normalizationType)

            # The following steps are common to all types of Vanadium normalization

            # norm_runs encompasses a single set, thus _getRuns returns
            # a list of only one item
            norm_set = self._getRuns(norm_runs, doIndiv=False)[0]
            normWs = self._sum_and_calibrate(norm_set, extra_extension="_norm")

            # This rebin integrates counts onto a histogram of a single bin
            if self._normalizationType == "by detectorID":
                normRange = self.getProperty("NormWavelengthRange").value
                self._normRange = [
                    normRange[0], normRange[1] - normRange[0], normRange[1]
                ]
                sapi.Rebin(InputWorkspace=normWs,
                           OutputWorkspace=normWs,
                           Params=self._normRange)

            # FindDetectorsOutsideLimits to be substituted by MedianDetectorTest
            sapi.FindDetectorsOutsideLimits(InputWorkspace=normWs,
                                            OutputWorkspace="BASIS_NORM_MASK")

            # additional reduction steps when normalizing by Q slice
            if self._normalizationType == "by Q slice":
                self._normWs = self._group_and_SofQW(normWs,
                                                     self._etBins,
                                                     isSample=False)
            if not self._debugMode:
                sapi.DeleteWorkspace(normWs)  # Delete vanadium events file

        ##########################
        ##  Process the sample  ##
        ##########################
        self._run_list = self._getRuns(self.getProperty("RunNumbers").value,
                                       doIndiv=self._doIndiv)
        for run_set in self._run_list:
            self._samWs = self._sum_and_calibrate(run_set)
            self._samWsRun = str(run_set[0])
            # Divide by Vanadium detector ID, if pertinent
            if self._normalizationType == "by detector ID":
                # Mask detectors with insufficient Vanadium signal before dividing
                sapi.MaskDetectors(Workspace=self._samWs,
                                   MaskedWorkspace='BASIS_NORM_MASK')
                sapi.Divide(LHSWorkspace=self._samWs,
                            RHSWorkspace=self._normWs,
                            OutputWorkspace=self._samWs)
            # additional reduction steps
            self._samSqwWs = self._group_and_SofQW(self._samWs,
                                                   self._etBins,
                                                   isSample=True)
            if not self._debugMode:
                sapi.DeleteWorkspace(self._samWs)  # delete events file
            # Divide by Vanadium Q slice, if pertinent
            if self._normalizationType == "by Q slice":
                sapi.Divide(LHSWorkspace=self._samSqwWs,
                            RHSWorkspace=self._normWs,
                            OutputWorkspace=self._samSqwWs)
            # Clear mask from reduced file. Needed for binary operations
            # involving this S(Q,w)
            sapi.ClearMaskFlag(Workspace=self._samSqwWs)
            # Scale so that elastic line has Y-values ~ 1
            if self._normalizeToFirst:
                self._ScaleY(self._samSqwWs)
            # Transform the vertical axis to point data
            sapi.Transpose(
                InputWorkspace=self._samSqwWs,
                OutputWorkspace=self._samSqwWs)  # Q-values are in X-axis now
            sapi.ConvertToPointData(
                InputWorkspace=self._samSqwWs,
                OutputWorkspace=self._samSqwWs)  # from histo to point
            sapi.Transpose(InputWorkspace=self._samSqwWs,
                           OutputWorkspace=self._samSqwWs
                           )  # Q-values back to vertical axis
            # Output Dave and Nexus files
            extension = "_divided.dat" if self._doNorm else ".dat"
            dave_grp_filename = self._makeRunName(self._samWsRun,
                                                  False) + extension
            sapi.SaveDaveGrp(Filename=dave_grp_filename,
                             InputWorkspace=self._samSqwWs,
                             ToMicroEV=True)
            extension = "_divided_sqw.nxs" if self._doNorm else "_sqw.nxs"
            processed_filename = self._makeRunName(self._samWsRun,
                                                   False) + extension
            sapi.SaveNexus(Filename=processed_filename,
                           InputWorkspace=self._samSqwWs)

        if not self._debugMode:
            sapi.DeleteWorkspace("BASIS_MASK")  # delete the mask
            if self._doNorm and bool(norm_runs):
                sapi.DeleteWorkspace("BASIS_NORM_MASK")  # delete vanadium mask
                sapi.DeleteWorkspace(self._normWs)  # Delete vanadium S(Q)
Пример #4
0
    def PyExec(self):
        config['default.facility'] = "SNS"
        config['default.instrument'] = self._long_inst
        self._doIndiv = self.getProperty("DoIndividual").value
        self._etBins = self.getProperty(
            "EnergyBins").value / MICROEV_TO_MILLIEV
        self._qBins = self.getProperty("MomentumTransferBins").value
        self._noMonNorm = self.getProperty("NoMonitorNorm").value
        self._maskFile = self.getProperty("MaskFile").value
        self._groupDetOpt = self.getProperty("GroupDetectors").value

        datasearch = config["datasearch.searcharchive"]
        if datasearch != "On":
            config["datasearch.searcharchive"] = "On"

        # Handle masking file override if necessary
        self._overrideMask = bool(self._maskFile)
        if not self._overrideMask:
            config.appendDataSearchDir(DEFAULT_MASK_GROUP_DIR)
            self._maskFile = DEFAULT_MASK_FILE

        api.LoadMask(Instrument='BASIS',
                     OutputWorkspace='BASIS_MASK',
                     InputFile=self._maskFile)

        # Work around length issue
        _dMask = api.ExtractMask('BASIS_MASK')
        self._dMask = _dMask[1]
        api.DeleteWorkspace(_dMask[0])

        # Do normalization if run numbers are present
        norm_runs = self.getProperty("NormRunNumbers").value
        self._doNorm = bool(norm_runs)
        self.log().information("Do Norm: " + str(self._doNorm))
        if self._doNorm:
            if ";" in norm_runs:
                raise SyntaxError("Normalization does not support run groups")
# Setup the integration (rebin) parameters
            normRange = self.getProperty("NormWavelengthRange").value
            self._normRange = [
                normRange[0], normRange[1] - normRange[0], normRange[1]
            ]

            # Process normalization runs
            self._norm_run_list = self._getRuns(norm_runs)
            for norm_set in self._norm_run_list:
                extra_extension = "_norm"
                self._normWs = self._makeRunName(norm_set[0])
                self._normWs += extra_extension
                self._normMonWs = self._normWs + "_monitors"
                self._sumRuns(norm_set, self._normWs, self._normMonWs,
                              extra_extension)
                self._calibData(self._normWs, self._normMonWs)

            api.Rebin(InputWorkspace=self._normWs, OutputWorkspace=self._normWs,\
              Params=self._normRange)
            api.FindDetectorsOutsideLimits(InputWorkspace=self._normWs,\
           OutputWorkspace="BASIS_NORM_MASK")

        self._run_list = self._getRuns(self.getProperty("RunNumbers").value)
        for run_set in self._run_list:
            self._samWs = self._makeRunName(run_set[0])
            self._samMonWs = self._samWs + "_monitors"
            self._samWsRun = str(run_set[0])

            self._sumRuns(run_set, self._samWs, self._samMonWs)
            # After files are all added, run the reduction
            self._calibData(self._samWs, self._samMonWs)

            if self._doNorm:
                api.MaskDetectors(Workspace=self._samWs,\
                 MaskedWorkspace='BASIS_NORM_MASK')
                api.Divide(LHSWorkspace=self._samWs, RHSWorkspace=self._normWs,\
           OutputWorkspace=self._samWs)

            api.ConvertUnits(InputWorkspace=self._samWs,
                             OutputWorkspace=self._samWs,
                             Target='DeltaE',
                             EMode='Indirect')
            api.CorrectKiKf(InputWorkspace=self._samWs,
                            OutputWorkspace=self._samWs,
                            EMode='Indirect')

            api.Rebin(InputWorkspace=self._samWs,
                      OutputWorkspace=self._samWs,
                      Params=self._etBins)
            if self._groupDetOpt != "None":
                if self._groupDetOpt == "Low-Resolution":
                    grp_file = "BASIS_Grouping_LR.xml"
                else:
                    grp_file = "BASIS_Grouping.xml"
                # If mask override used, we need to add default grouping file location to
                # search paths
                if self._overrideMask:
                    config.appendDataSearchDir(DEFAULT_MASK_GROUP_DIR)

                api.GroupDetectors(InputWorkspace=self._samWs,
                                   OutputWorkspace=self._samWs,
                                   MapFile=grp_file,
                                   Behaviour="Sum")

            self._samSqwWs = self._samWs + '_sqw'
            api.SofQW3(InputWorkspace=self._samWs,
                       OutputWorkspace=self._samSqwWs,
                       QAxisBinning=self._qBins,
                       EMode='Indirect',
                       EFixed=DEFAULT_ENERGY)

            dave_grp_filename = self._makeRunName(self._samWsRun,
                                                  False) + ".dat"
            api.SaveDaveGrp(Filename=dave_grp_filename,
                            InputWorkspace=self._samSqwWs,
                            ToMicroEV=True)
            processed_filename = self._makeRunName(self._samWsRun,
                                                   False) + "_sqw.nxs"
            api.SaveNexus(Filename=processed_filename,
                          InputWorkspace=self._samSqwWs)
Пример #5
0
    def PyExec(self):
        config['default.facility'] = 'SNS'
        config['default.instrument'] = self._long_inst
        self._reflection =\
            REFLECTIONS_DICT[self.getProperty('ReflectionType').value]
        self._doIndiv = self.getProperty('DoIndividual').value
        # micro-eV to mili-eV
        self._etBins = 1.E-03 * self.getProperty('EnergyBins').value
        self._qBins = self.getProperty('MomentumTransferBins').value
        self._qBins[0] -= self._qBins[1]/2.0  # leftmost bin boundary
        self._qBins[2] += self._qBins[1]/2.0  # rightmost bin boundary
        self._MonNorm = self.getProperty('MonitorNorm').value
        self._maskFile = self.getProperty('MaskFile').value
        maskfile = self.getProperty('MaskFile').value
        self._maskFile = maskfile if maskfile else\
            pjoin(DEFAULT_MASK_GROUP_DIR, self._reflection['mask_file'])
        self._groupDetOpt = self.getProperty('GroupDetectors').value
        self._normalizeToFirst = self.getProperty('NormalizeToFirst').value
        self._doNorm = self.getProperty('DivideByVanadium').value

        # retrieve properties pertaining to saving to NXSPE file
        self._nsxpe_do = self.getProperty('SaveNXSPE').value
        if self._nsxpe_do:
            self._nxspe_psi_angle_log = self.getProperty('PsiAngleLog').value
            self._nxspe_offset = self.getProperty('PsiOffset').value

        datasearch = config["datasearch.searcharchive"]
        if datasearch != "On":
            config["datasearch.searcharchive"] = "On"

        # Apply default mask if not supplied by user
        self._overrideMask = bool(self._maskFile)
        if not self._overrideMask:
            config.appendDataSearchDir(DEFAULT_MASK_GROUP_DIR)
            self._maskFile = self._reflection["mask_file"]

        sapi.LoadMask(Instrument='BASIS',
                      OutputWorkspace='BASIS_MASK',
                      InputFile=self._maskFile)

        # Work around length issue
        _dMask = sapi.ExtractMask('BASIS_MASK')
        self._dMask = _dMask[1]
        sapi.DeleteWorkspace(_dMask[0])

        ############################
        ##  Process the Vanadium  ##
        ############################

        norm_runs = self.getProperty("NormRunNumbers").value
        if self._doNorm and bool(norm_runs):
            if ";" in norm_runs:
                raise SyntaxError("Normalization does not support run groups")
            self._normalizationType = self.getProperty("NormalizationType").value
            self.log().information("Divide by Vanadium with normalization" +
                                   self._normalizationType)

            # Following steps common to all types of Vanadium normalization

            # norm_runs encompasses a single set, thus _getRuns returns
            # a list of only one item
            norm_set = self._getRuns(norm_runs, doIndiv=False)[0]
            normWs = self._sum_and_calibrate(norm_set, extra_extension="_norm")

            normRange = self.getProperty("NormWavelengthRange").value
            bin_width = normRange[1] - normRange[0]
            # This rebin integrates counts onto a histogram of a single bin
            if self._normalizationType == "by detector ID":
                self._normRange = [normRange[0], bin_width, normRange[1]]
                sapi.Rebin(InputWorkspace=normWs,
                           OutputWorkspace=normWs,
                           Params=self._normRange)
                self._normWs = normWs
            # FindDetectorsOutsideLimits to be substituted by MedianDetectorTest
            sapi.FindDetectorsOutsideLimits(InputWorkspace=normWs,
                                            LowThreshold=1.0*bin_width,
                                            # no count events outside ranges
                                            RangeLower=normRange[0],
                                            RangeUpper=normRange[1],
                                            OutputWorkspace='BASIS_NORM_MASK')
            # additional reduction steps when normalizing by Q slice
            if self._normalizationType == "by Q slice":
                self._normWs = self._group_and_SofQW(normWs, self._etBins,
                                                     isSample=False)

        ##########################
        ##  Process the sample  ##
        ##########################
        self._run_list = self._getRuns(self.getProperty("RunNumbers").value,
                                       doIndiv=self._doIndiv)
        for run_set in self._run_list:
            self._samWs = self._sum_and_calibrate(run_set)
            self._samWsRun = str(run_set[0])
            # Divide by Vanadium detector ID, if pertinent
            if self._normalizationType == "by detector ID":
                # Mask detectors with insufficient Vanadium signal before dividing
                sapi.MaskDetectors(Workspace=self._samWs,
                                   MaskedWorkspace='BASIS_NORM_MASK')
                sapi.Divide(LHSWorkspace=self._samWs,
                            RHSWorkspace=self._normWs,
                            OutputWorkspace=self._samWs)
            # additional reduction steps
            self._samSqwWs = self._group_and_SofQW(self._samWs, self._etBins,
                                                   isSample=True)
            if not self._debugMode:
                sapi.DeleteWorkspace(self._samWs)  # delete events file
            # Divide by Vanadium Q slice, if pertinent
            if self._normalizationType == "by Q slice":
                sapi.Divide(LHSWorkspace=self._samSqwWs,
                            RHSWorkspace=self._normWs,
                            OutputWorkspace=self._samSqwWs)
            # Clear mask from reduced file. Needed for binary operations
            # involving this S(Q,w)
            sapi.ClearMaskFlag(Workspace=self._samSqwWs)
            # Scale so that elastic line has Y-values ~ 1
            if self._normalizeToFirst:
                self._ScaleY(self._samSqwWs)

            # Transform the vertical axis (Q) to point data
            # Q-values are in X-axis now
            sapi.Transpose(InputWorkspace=self._samSqwWs,
                           OutputWorkspace=self._samSqwWs)
            # from histo to point
            sapi.ConvertToPointData(InputWorkspace=self._samSqwWs,
                                    OutputWorkspace=self._samSqwWs)
            # Q-values back to vertical axis
            sapi.Transpose(InputWorkspace=self._samSqwWs,
                           OutputWorkspace=self._samSqwWs)
            self.serialize_in_log(self._samSqwWs)  # store the call
            # Output Dave and Nexus files
            extension = "_divided.dat" if self._doNorm else ".dat"
            dave_grp_filename = self._makeRunName(self._samWsRun, False) +\
                extension
            sapi.SaveDaveGrp(Filename=dave_grp_filename,
                             InputWorkspace=self._samSqwWs,
                             ToMicroEV=True)
            extension = "_divided_sqw.nxs" if self._doNorm else "_sqw.nxs"
            processed_filename = self._makeRunName(self._samWsRun, False) +\
                extension
            sapi.SaveNexus(Filename=processed_filename,
                           InputWorkspace=self._samSqwWs)

            # additional output
            if self.getProperty("OutputSusceptibility").value:
                temperature = mtd[self._samSqwWs].getRun().\
                    getProperty(TEMPERATURE_SENSOR).getStatistics().mean
                samXqsWs = self._samSqwWs.replace("sqw", "Xqw")
                sapi.ApplyDetailedBalance(InputWorkspace=self._samSqwWs,
                                          OutputWorkspace=samXqsWs,
                                          Temperature=str(temperature))
                sapi.ConvertUnits(InputWorkspace=samXqsWs,
                                  OutputWorkspace=samXqsWs,
                                  Target="DeltaE_inFrequency",
                                  Emode="Indirect")
                self.serialize_in_log(samXqsWs)
                susceptibility_filename = processed_filename.replace("sqw", "Xqw")
                sapi.SaveNexus(Filename=susceptibility_filename,
                               InputWorkspace=samXqsWs)

        if not self._debugMode:
            sapi.DeleteWorkspace("BASIS_MASK")  # delete the mask
            if self._doNorm and bool(norm_runs):
                sapi.DeleteWorkspace("BASIS_NORM_MASK")  # delete vanadium mask
                sapi.DeleteWorkspace(self._normWs)  # Delete vanadium S(Q)
                if self._normalizationType == "by Q slice":
                    sapi.DeleteWorkspace(normWs)  # Delete vanadium events file
            if self.getProperty("ExcludeTimeSegment").value:
                sapi.DeleteWorkspace('splitter')
                [sapi.DeleteWorkspace(name) for name in
                 ('splitted_unfiltered', 'TOFCorrectWS') if
                 AnalysisDataService.doesExist(name)]