예제 #1
0
 def _group_and_SofQW(self, wsName, etRebins, isSample=True):
     """ Transforms from wavelength and detector ID to S(Q,E)
     @param wsName: workspace as a function of wavelength and detector id
     @return: S(Q,E)
     """
     api.ConvertUnits(InputWorkspace=wsName,
                      OutputWorkspace=wsName,
                      Target='DeltaE',
                      EMode='Indirect')
     api.CorrectKiKf(InputWorkspace=wsName,
                     OutputWorkspace=wsName,
                     EMode='Indirect')
     api.Rebin(InputWorkspace=wsName,
               OutputWorkspace=wsName,
               Params=etRebins)
     if self._groupDetOpt != "None":
         if self._groupDetOpt == "Low-Resolution":
             grp_file = "BASIS_Grouping_LR.xml"
         else:
             grp_file = "BASIS_Grouping.xml"
         # If mask override used, we need to add default grouping file
         # location to search paths
         if self._overrideMask:
             config.appendDataSearchDir(DEFAULT_MASK_GROUP_DIR)
             api.GroupDetectors(InputWorkspace=wsName,
                                OutputWorkspace=wsName,
                                MapFile=grp_file,
                                Behaviour="Sum")
     wsSqwName = wsName + '_divided_sqw' if isSample and self._doNorm else wsName + '_sqw'
     api.SofQW3(InputWorkspace=wsName,
                OutputWorkspace=wsSqwName,
                QAxisBinning=self._qBins,
                EMode='Indirect',
                EFixed='2.0826')
     return wsSqwName
예제 #2
0
 def _group_and_SofQW(self, wsName, etRebins, isSample=True):
     """ Transforms from wavelength and detector ID to S(Q,E)
     @param wsName: workspace as a function of wavelength and detector id
     @param etRebins: final energy domain and bin width
     @param isSample: discriminates between sample and vanadium
     @return: S(Q,E)
     """
     sapi.ConvertUnits(InputWorkspace=wsName,
                       OutputWorkspace=wsName,
                       Target='DeltaE',
                       EMode='Indirect')
     sapi.CorrectKiKf(InputWorkspace=wsName,
                      OutputWorkspace=wsName,
                      EMode='Indirect')
     sapi.Rebin(InputWorkspace=wsName,
                OutputWorkspace=wsName,
                Params=etRebins)
     if self._groupDetOpt != "None":
         if self._groupDetOpt == "Low-Resolution":
             grp_file = "BASIS_Grouping_LR.xml"
         else:
             grp_file = "BASIS_Grouping.xml"
         # If mask override used, we need to add default grouping file
         # location to search paths
         if self._overrideMask:
             config.appendDataSearchDir(DEFAULT_MASK_GROUP_DIR)
             sapi.GroupDetectors(InputWorkspace=wsName,
                                 OutputWorkspace=wsName,
                                 MapFile=grp_file,
                                 Behaviour="Sum")
     wsSqwName = wsName + '_divided_sqw' if isSample and self._doNorm else wsName + '_sqw'
     sapi.SofQW3(InputWorkspace=wsName,
                 QAxisBinning=self._qBins,
                 EMode='Indirect',
                 EFixed=self._reflection["default_energy"],
                 OutputWorkspace=wsSqwName)
     # Rebin the vanadium within the elastic line
     if not isSample:
         sapi.Rebin(InputWorkspace=wsSqwName,
                    OutputWorkspace=wsSqwName,
                    Params=self._reflection["vanadium_bins"])
     return wsSqwName
예제 #3
0
    def _group_and_SofQW(self, wsName, prefix, etRebins, isSample=True):
        r"""
        Transforms from wavelength and detector ID to S(Q,E)

        Parameters
        ----------
        wsName: str
            Name of a workspace as a function of wavelength and detector id
        prefix: str
            Name prefix for output workspaces and files
        etRebins: list
            Final energy domain and bin width
        isSample: bool
            Discriminates between sample and vanadium

        Returns
        -------
        str
            Name of S(Q,E) workspace
        """
        sapi.ConvertUnits(InputWorkspace=wsName,
                          OutputWorkspace=wsName,
                          Target='DeltaE',
                          EMode='Indirect',
                          EFixed=self._reflection['default_energy'])
        sapi.CorrectKiKf(InputWorkspace=wsName,
                         OutputWorkspace=wsName,
                         EMode='Indirect',
                         EFixed=self._reflection['default_energy'])
        sapi.Rebin(InputWorkspace=wsName,
                   OutputWorkspace=wsName,
                   Params=etRebins)
        if self._groupDetOpt != 'None':
            if self._groupDetOpt == 'Low-Resolution':
                grp_file = 'BASIS_Grouping_LR.xml'
            else:
                grp_file = 'BASIS_Grouping.xml'
            # If mask override used, we need to add default grouping file
            # location to search paths
            if self._overrideMask:
                mantid_config.appendDataSearchDir(DEFAULT_MASK_GROUP_DIR)
                sapi.GroupDetectors(InputWorkspace=wsName,
                                    OutputWorkspace=wsName,
                                    MapFile=grp_file,
                                    Behaviour='Sum')

        # Output NXSPE file (must be done before transforming the
        # vertical axis to point data)
        if isSample and self._nsxpe_do:
            extension = '.nxspe'
            run = mtd[wsName].getRun()
            if run.hasProperty(self._nxspe_psi_angle_log):
                psi_angle_logproperty = \
                    run.getProperty(self._nxspe_psi_angle_log)
                psi_angle = np.average(psi_angle_logproperty.value)
                psi_angle += self._nxspe_offset
                nxspe_filename = prefix + extension
                sapi.SaveNXSPE(InputWorkspace=wsName,
                               Filename=nxspe_filename,
                               Efixed=self._reflection['default_energy'],
                               Psi=psi_angle,
                               KiOverKfScaling=1)
            else:
                error_message = 'Runs have no log entry named {}'\
                    .format(self._nxspe_psi_angle_log)
                self.log().error(error_message)

        wsSqwName = prefix if isSample is True else wsName
        wsSqwName += '_divided_sqw' if self._doNorm is True else '_sqw'

        sapi.SofQW3(InputWorkspace=wsName,
                    QAxisBinning=self._qBins,
                    EMode='Indirect',
                    EFixed=self._reflection['default_energy'],
                    OutputWorkspace=wsSqwName)
        # Rebin the vanadium within the elastic line
        if not isSample:
            sapi.Rebin(InputWorkspace=wsSqwName,
                       OutputWorkspace=wsSqwName,
                       Params=self._reflection['vanadium_bins'])
        return wsSqwName
예제 #4
0
    def PyExec(self):
        config['default.facility'] = "SNS"
        config['default.instrument'] = self._long_inst
        self._doIndiv = self.getProperty("DoIndividual").value
        self._etBins = self.getProperty(
            "EnergyBins").value / MICROEV_TO_MILLIEV
        self._qBins = self.getProperty("MomentumTransferBins").value
        self._noMonNorm = self.getProperty("NoMonitorNorm").value
        self._maskFile = self.getProperty("MaskFile").value
        self._groupDetOpt = self.getProperty("GroupDetectors").value

        datasearch = config["datasearch.searcharchive"]
        if datasearch != "On":
            config["datasearch.searcharchive"] = "On"

        # Handle masking file override if necessary
        self._overrideMask = bool(self._maskFile)
        if not self._overrideMask:
            config.appendDataSearchDir(DEFAULT_MASK_GROUP_DIR)
            self._maskFile = DEFAULT_MASK_FILE

        api.LoadMask(Instrument='BASIS',
                     OutputWorkspace='BASIS_MASK',
                     InputFile=self._maskFile)

        # Work around length issue
        _dMask = api.ExtractMask('BASIS_MASK')
        self._dMask = _dMask[1]
        api.DeleteWorkspace(_dMask[0])

        # Do normalization if run numbers are present
        norm_runs = self.getProperty("NormRunNumbers").value
        self._doNorm = bool(norm_runs)
        self.log().information("Do Norm: " + str(self._doNorm))
        if self._doNorm:
            if ";" in norm_runs:
                raise SyntaxError("Normalization does not support run groups")
# Setup the integration (rebin) parameters
            normRange = self.getProperty("NormWavelengthRange").value
            self._normRange = [
                normRange[0], normRange[1] - normRange[0], normRange[1]
            ]

            # Process normalization runs
            self._norm_run_list = self._getRuns(norm_runs)
            for norm_set in self._norm_run_list:
                extra_extension = "_norm"
                self._normWs = self._makeRunName(norm_set[0])
                self._normWs += extra_extension
                self._normMonWs = self._normWs + "_monitors"
                self._sumRuns(norm_set, self._normWs, self._normMonWs,
                              extra_extension)
                self._calibData(self._normWs, self._normMonWs)

            api.Rebin(InputWorkspace=self._normWs, OutputWorkspace=self._normWs,\
              Params=self._normRange)
            api.FindDetectorsOutsideLimits(InputWorkspace=self._normWs,\
           OutputWorkspace="BASIS_NORM_MASK")

        self._run_list = self._getRuns(self.getProperty("RunNumbers").value)
        for run_set in self._run_list:
            self._samWs = self._makeRunName(run_set[0])
            self._samMonWs = self._samWs + "_monitors"
            self._samWsRun = str(run_set[0])

            self._sumRuns(run_set, self._samWs, self._samMonWs)
            # After files are all added, run the reduction
            self._calibData(self._samWs, self._samMonWs)

            if self._doNorm:
                api.MaskDetectors(Workspace=self._samWs,\
                 MaskedWorkspace='BASIS_NORM_MASK')
                api.Divide(LHSWorkspace=self._samWs, RHSWorkspace=self._normWs,\
           OutputWorkspace=self._samWs)

            api.ConvertUnits(InputWorkspace=self._samWs,
                             OutputWorkspace=self._samWs,
                             Target='DeltaE',
                             EMode='Indirect')
            api.CorrectKiKf(InputWorkspace=self._samWs,
                            OutputWorkspace=self._samWs,
                            EMode='Indirect')

            api.Rebin(InputWorkspace=self._samWs,
                      OutputWorkspace=self._samWs,
                      Params=self._etBins)
            if self._groupDetOpt != "None":
                if self._groupDetOpt == "Low-Resolution":
                    grp_file = "BASIS_Grouping_LR.xml"
                else:
                    grp_file = "BASIS_Grouping.xml"
                # If mask override used, we need to add default grouping file location to
                # search paths
                if self._overrideMask:
                    config.appendDataSearchDir(DEFAULT_MASK_GROUP_DIR)

                api.GroupDetectors(InputWorkspace=self._samWs,
                                   OutputWorkspace=self._samWs,
                                   MapFile=grp_file,
                                   Behaviour="Sum")

            self._samSqwWs = self._samWs + '_sqw'
            api.SofQW3(InputWorkspace=self._samWs,
                       OutputWorkspace=self._samSqwWs,
                       QAxisBinning=self._qBins,
                       EMode='Indirect',
                       EFixed=DEFAULT_ENERGY)

            dave_grp_filename = self._makeRunName(self._samWsRun,
                                                  False) + ".dat"
            api.SaveDaveGrp(Filename=dave_grp_filename,
                            InputWorkspace=self._samSqwWs,
                            ToMicroEV=True)
            processed_filename = self._makeRunName(self._samWsRun,
                                                   False) + "_sqw.nxs"
            api.SaveNexus(Filename=processed_filename,
                          InputWorkspace=self._samSqwWs)