def iliad_wrapper(*args):
        #seq = inspect.stack()
        # output workspace name.
        try:
            _,r = funcreturns.lhs_info('both')
            out_ws_name = r[0]
        except:
            out_ws_name = None

        host = args[0]
        if len(args) > 1:
            input_file = args[1]
            if len(args) > 2:
                output_directory = args[2]
            else:
                output_directory = None
        else:
            input_file = None
            output_directory = None
        # add input file folder to data search directory if file has it
        if input_file and isinstance(input_file,str):
            data_path = os.path.dirname(input_file)
            if len(data_path) > 0:
                try:
                    config.appendDataSearchDir(str(data_path))
                    args[1] = os.path.basename(input_file)
                #pylint: disable=bare-except
                except: # if mantid is not available, this should ignore config
                    pass
        if output_directory:
            config['defaultsave.directory'] = str(output_directory)

        #pylint: disable=protected-access
        if host._run_from_web:
            #pylint: disable=protected-access
            web_vars = host._wvs.get_all_vars()
            host.reducer.prop_man.set_input_parameters(**web_vars)
        else:
            pass # we should set already set up variables using

        custom_print_function = host.set_custom_output_filename()
        if not custom_print_function is None:
            PropertyManager.save_file_name.set_custom_print(custom_print_function)
        #
        rez = reduce(*args)

        # prohibit returning workspace to web services.
        #pylint: disable=protected-access
        if host._run_from_web and not isinstance(rez,str):
            rez = ""
        else:
            if isinstance(rez, list):
              # multirep run, just return as it is
                return rez
            if not(rez is None) and out_ws_name and rez.name() != out_ws_name:
            # the function does not return None, pylint is wrong
            #pylint: disable=W1111
                rez = RenameWorkspace(InputWorkspace=rez, OutputWorkspace=out_ws_name)

        return rez
예제 #2
0
 def _group_and_SofQW(self, wsName, etRebins, isSample=True):
     """ Transforms from wavelength and detector ID to S(Q,E)
     @param wsName: workspace as a function of wavelength and detector id
     @param etRebins: final energy domain and bin width
     @param isSample: discriminates between sample and vanadium
     @return: S(Q,E)
     """
     api.ConvertUnits(InputWorkspace=wsName,
                      OutputWorkspace=wsName,
                      Target='DeltaE', EMode='Indirect')
     api.CorrectKiKf(InputWorkspace=wsName,
                     OutputWorkspace=wsName,
                     EMode='Indirect')
     api.Rebin(InputWorkspace=wsName,
               OutputWorkspace=wsName,
               Params=etRebins)
     if self._groupDetOpt != "None":
         if self._groupDetOpt == "Low-Resolution":
             grp_file = "BASIS_Grouping_LR.xml"
         else:
             grp_file = "BASIS_Grouping.xml"
         # If mask override used, we need to add default grouping file
         # location to search paths
         if self._overrideMask:
             config.appendDataSearchDir(DEFAULT_MASK_GROUP_DIR)
             api.GroupDetectors(InputWorkspace=wsName,
                                OutputWorkspace=wsName,
                                MapFile=grp_file, Behaviour="Sum")
     wsSqwName = wsName+'_divided_sqw' if isSample and self._doNorm else wsName+'_sqw'
     api.SofQW3(InputWorkspace=wsName,
                OutputWorkspace=wsSqwName,
                QAxisBinning=self._qBins, EMode='Indirect',
                EFixed=self._reflection["default_energy"])
     return wsSqwName
예제 #3
0
 def _group_and_SofQW(self, wsName, etRebins, isSample=True):
     """ Transforms from wavelength and detector ID to S(Q,E)
     @param wsName: workspace as a function of wavelength and detector id
     @return: S(Q,E)
     """
     api.ConvertUnits(InputWorkspace=wsName,
                      OutputWorkspace=wsName,
                      Target='DeltaE',
                      EMode='Indirect')
     api.CorrectKiKf(InputWorkspace=wsName,
                     OutputWorkspace=wsName,
                     EMode='Indirect')
     api.Rebin(InputWorkspace=wsName,
               OutputWorkspace=wsName,
               Params=etRebins)
     if self._groupDetOpt != "None":
         if self._groupDetOpt == "Low-Resolution":
             grp_file = "BASIS_Grouping_LR.xml"
         else:
             grp_file = "BASIS_Grouping.xml"
         # If mask override used, we need to add default grouping file
         # location to search paths
         if self._overrideMask:
             config.appendDataSearchDir(DEFAULT_MASK_GROUP_DIR)
             api.GroupDetectors(InputWorkspace=wsName,
                                OutputWorkspace=wsName,
                                MapFile=grp_file,
                                Behaviour="Sum")
     wsSqwName = wsName + '_divided_sqw' if isSample and self._doNorm else wsName + '_sqw'
     api.SofQW3(InputWorkspace=wsName,
                OutputWorkspace=wsSqwName,
                QAxisBinning=self._qBins,
                EMode='Indirect',
                EFixed='2.0826')
     return wsSqwName
def main(input_file=None,output_dir=None):
    """ This method is used to run code from web service
        and should not be touched unless you change the name of the
        particular ReductionWrapper class (e.g. ReduceMAPS_MultiRep2015 here)

        exception to change the output folder to save data to
    """
    inst_dir = '/ceph/home/isis_direct_soft/InstrumentFiles/merlin/';
    data_dir1 = r'//isis/inst$/NDXMERLIN/Instrument/data/cycle_17_1'    
    data_dir2 = '/archive/NDXMERLIN/Instrument/data/cycle_16_5/;/archive/NDXMERLIN/Instrument/data/cycle_16_4/'
    config.appendDataSearchDir('{0};{1};{2}'.format(inst_dir,data_dir1,data_dir2))    

    web_var.advanced_vars['hardmaskPlus'] = os.path.join(AUTOREDUCTION_DIR, web_var.advanced_vars['hardmaskPlus'])
    web_var.advanced_vars['det_cal_file'] = os.path.join(AUTOREDUCTION_DIR, web_var.advanced_vars['det_cal_file'])
    web_var.advanced_vars['map_file'] = os.path.join(AUTOREDUCTION_DIR, web_var.advanced_vars['map_file'])

    # note web variables initialization
    rd = mer_red.MERLINReduction(web_var)

    file,ext = os.path.splitext(input_file)
    fext = rd.reducer.prop_man.data_file_ext

    input_file = file+fext

    rd.reduce(input_file,output_dir)
    
    # Define folder for web service to copy results to
    output_folder = ''
    return output_folder
예제 #5
0
    def iliad_wrapper(*args):
        #seq = inspect.stack()

        host = args[0]
        if len(args) > 1:
            input_file = args[1]
            if len(args) > 2:
                output_directory = args[2]
            else:
                output_directory = None
        else:
            input_file = None
            output_directory = None

        use_web_variables = False
        if host._web_var and output_directory:
            use_web_variables = True
            config.appendDataSearchDir(str(output_directory))
            web_vars = dict(host._web_var.standard_vars.items() +
                            host._web_var.advanced_vars.items())
            host.iliad_prop.set_input_parameters(**web_vars)
            host.iliad_prop.sample_run = input_file

        rez = main(*args)
        # prohibit returning workspace to web services.
        if use_web_variables and not isinstance(rez, str):
            rez = ""
        return rez
예제 #6
0
    def PyExec(self):
        inst=self.getProperty('Instrument').value
        fname = self.getProperty('Filename').value

        diclookup = {
            "AMOR":"amor.dic",
            "BOA":"boa.dic",
            "DMC":"dmc.dic",
            "FOCUS":"focus.dic",
            "HRPT":"hrpt.dic",
            "MARSI":"marsin.dic",
            "MARSE":"marse.dic",
            "POLDI_legacy":"poldi_legacy.dic",
            "POLDI":"poldi.dic",
            "RITA-2":"rita.dic",
            "SANS":"sans.dic",
            "SANS2":"sans.dic",
            "TRICS":"trics.dic"
        }

        lookupInstrumentName = inst
        if inst == 'POLDI':
            lookupInstrumentName = self._getPoldiLookupName(fname, lookupInstrumentName)

        dictsearch = os.path.join(config['instrumentDefinition.directory'],"nexusdictionaries")
        dicname = os.path.join(dictsearch, diclookup[lookupInstrumentName])
        wname = "__tmp"
        ws = mantid.simpleapi.LoadFlexiNexus(fname,dicname,OutputWorkspace=wname)

        if inst == "POLDI":
            if ws.getNumberHistograms() == 800:
                ws.maskDetectors(SpectraList=list(range(0,800))[::2])

                config.appendDataSearchDir(config['groupingFiles.directory'])
                grp_file = "POLDI_Grouping_800to400.xml"
                ws = mantid.simpleapi.GroupDetectors(InputWorkspace=ws,
                                                     OutputWorkspace=wname,
                                                     MapFile=grp_file, Behaviour="Sum")

            # Reverse direction of POLDI data so that low index corresponds to low 2theta.
            histogramCount = ws.getNumberHistograms()
            oldYData = []
            for i in range(histogramCount):
                oldYData.append([x for x in ws.readY(i)])

            for i in range(histogramCount):
                ws.setY(i, np.array(oldYData[histogramCount - 1 - i]))

        elif inst == "TRICS":
            ws = mantid.simpleapi.LoadFlexiNexus(fname,dicname,OutputWorkspace=wname)
            ws = mantid.simpleapi.SINQTranspose3D(ws,OutputWorkspace=wname)

        # Attach workspace to the algorithm property
        self.setProperty("OutputWorkspace", ws)
        # delete temporary reference
        mantid.simpleapi.DeleteWorkspace(wname,EnableLogging=False)
예제 #7
0
    def PyExec(self):
        inst=self.getProperty('Instrument').value
        fname = self.getProperty('Filename').value

        diclookup = {
            "AMOR":"amor.dic",
            "BOA":"boa.dic",
            "DMC":"dmc.dic",
            "FOCUS":"focus.dic",
            "HRPT":"hrpt.dic",
            "MARSI":"marsin.dic",
            "MARSE":"marse.dic",
            "POLDI_legacy":"poldi_legacy.dic",
            "POLDI":"poldi.dic",
            "RITA-2":"rita.dic",
            "SANS":"sans.dic",
            "SANS2":"sans.dic",
            "TRICS":"trics.dic"
        }

        lookupInstrumentName = inst
        if inst == 'POLDI':
            lookupInstrumentName = self._getPoldiLookupName(fname, lookupInstrumentName)

        dictsearch = os.path.join(config['instrumentDefinition.directory'],"nexusdictionaries")
        dicname = os.path.join(dictsearch, diclookup[lookupInstrumentName])
        wname = "__tmp"
        ws = mantid.simpleapi.LoadFlexiNexus(fname,dicname,OutputWorkspace=wname)

        if inst == "POLDI":
            if ws.getNumberHistograms() == 800:
                ws.maskDetectors(SpectraList=list(range(0,800))[::2])

                config.appendDataSearchDir(config['groupingFiles.directory'])
                grp_file = "POLDI_Grouping_800to400.xml"
                ws = mantid.simpleapi.GroupDetectors(InputWorkspace=ws,
                                                     OutputWorkspace=wname,
                                                     MapFile=grp_file, Behaviour="Sum")

            # Reverse direction of POLDI data so that low index corresponds to low 2theta.
            histogramCount = ws.getNumberHistograms()
            oldYData = []
            for i in range(histogramCount):
                oldYData.append([x for x in ws.readY(i)])

            for i in range(histogramCount):
                ws.setY(i, np.array(oldYData[histogramCount - 1 - i]))

        elif inst == "TRICS":
            ws = mantid.simpleapi.LoadFlexiNexus(fname,dicname,OutputWorkspace=wname)
            ws = mantid.simpleapi.SINQTranspose3D(ws,OutputWorkspace=wname)

        # Attach workspace to the algorithm property
        self.setProperty("OutputWorkspace", ws)
        # delete temporary reference
        mantid.simpleapi.DeleteWorkspace(wname,EnableLogging=False)
def iliad_maps_setup():
	
	# where to save resutls (usually specified in Mantid, data search directories)
	save_dir = config.getString('defaultsave.directory')
	if len(save_dir) ==0 :
		config['defaultsave.directory']=os.getcwd()
		save_dir = config.getString('defaultsave.directory')
    
	print "Data will be saved into: ",save_dir
	# map mask and cal file, again the values from Mantid, data search directories can be modified here
	config.appendDataSearchDir('/home/maps/mprogs/InstrumentFiles/maps') 
	# data (raw or nxs) run files -- values from data search directories can be modified here
	config.appendDataSearchDir('/isisdatar55/NDXMAPS/Instrument/data/cycle_14_2') 
예제 #9
0
    def iliad_wrapper(*args):
        #seq = inspect.stack()
        # output workspace name.
        try:
            n,r = funcreturns.lhs_info('both')
            out_ws_name = r[0]
        except:
            out_ws_name = None

        host = args[0]
        if len(args)>1:
            input_file = args[1]
            if len(args)>2:
                output_directory = args[2]
            else:
                output_directory =None
        else:
            input_file=None
            output_directory=None
        # add input file folder to data search directory if file has it
        if input_file and isinstance(input_file,str):
           data_path = os.path.dirname(input_file)
           if len(data_path)>0:
              try:               
                 config.appendDataSearchDir(str(data_path))
                 args[1] = os.path.basename(input_file)
              except: # if mantid is not available, this should ignore config
                 pass
        if output_directory:
           config['defaultsave.directory'] = output_directory

        if host._run_from_web:
            web_vars = dict(host._wvs.standard_vars.items()+host._wvs.advanced_vars.items())
            host.reducer.prop_man.set_input_parameters(**web_vars)
        else:
            pass # we should set already set up variables using 

 
        rez = reduce(*args)

        # prohibit returning workspace to web services. 
        if host._run_from_web and not isinstance(rez,str):
            rez=""
        else:         
          if isinstance(rez,list):
              # multirep run, just return as it is
              return rez
          if out_ws_name and rez.name() != out_ws_name :
              rez=RenameWorkspace(InputWorkspace=rez,OutputWorkspace=out_ws_name)
             
        return rez
예제 #10
0
 def _group_and_SofQW(self, wsName, etRebins, isSample=True):
     """ Transforms from wavelength and detector ID to S(Q,E)
     @param wsName: workspace as a function of wavelength and detector id
     @param etRebins: final energy domain and bin width
     @param isSample: discriminates between sample and vanadium
     @return: S(Q,E)
     """
     sapi.ConvertUnits(InputWorkspace=wsName,
                       OutputWorkspace=wsName,
                       Target='DeltaE',
                       EMode='Indirect')
     sapi.CorrectKiKf(InputWorkspace=wsName,
                      OutputWorkspace=wsName,
                      EMode='Indirect')
     sapi.Rebin(InputWorkspace=wsName,
                OutputWorkspace=wsName,
                Params=etRebins)
     if self._groupDetOpt != "None":
         if self._groupDetOpt == "Low-Resolution":
             grp_file = "BASIS_Grouping_LR.xml"
         else:
             grp_file = "BASIS_Grouping.xml"
         # If mask override used, we need to add default grouping file
         # location to search paths
         if self._overrideMask:
             config.appendDataSearchDir(DEFAULT_MASK_GROUP_DIR)
             sapi.GroupDetectors(InputWorkspace=wsName,
                                 OutputWorkspace=wsName,
                                 MapFile=grp_file,
                                 Behaviour="Sum")
     wsSqwName = wsName + '_divided_sqw' if isSample and self._doNorm else wsName + '_sqw'
     sapi.SofQW3(InputWorkspace=wsName,
                 QAxisBinning=self._qBins,
                 EMode='Indirect',
                 EFixed=self._reflection["default_energy"],
                 OutputWorkspace=wsSqwName)
     # Rebin the vanadium within the elastic line
     if not isSample:
         sapi.Rebin(InputWorkspace=wsSqwName,
                    OutputWorkspace=wsSqwName,
                    Params=self._reflection["vanadium_bins"])
     return wsSqwName
예제 #11
0
 def test_load_banks(self):
     # loading a non-existing file
     with self.assertRaises(AssertionError) as exception_info:
         load_banks('I_am_no_here', '58', output_workspace='jambalaya')
     assert 'File I_am_no_here does not exist' in str(
         exception_info.exception)
     # loading an event nexus file will take too much time, so it's left as a system test.
     # loading a nexus processed file
     for directory in config.getDataSearchDirs():
         if 'UnitTest' in directory:
             data_dir = path.join(directory, 'CORELLI', 'calibration')
             config.appendDataSearchDir(
                 path.join(directory, 'CORELLI', 'calibration'))
             break
     workspace = load_banks(path.join(data_dir,
                                      'CORELLI_123454_bank58.nxs'),
                            '58',
                            output_workspace='jambalaya')
     self.assertAlmostEqual(workspace.readY(42)[0], 13297.0)
     DeleteWorkspaces(['jambalaya'])
예제 #12
0
    def PyExec(self):
        inst=self.getProperty('Instrument').value
        fname = self.getProperty('Filename').value

        diclookup = {\
            "AMOR":"amor.dic",
            "BOA":"boa.dic",
            "DMC":"dmc.dic",
            "FOCUS":"focus.dic",
            "HRPT":"hrpt.dic",
            "MARSI":"marsin.dic",
            "MARSE":"marse.dic",
            "POLDI":"poldi.dic",
            "RITA-2":"rita.dic",
            "SANS":"sans.dic",
            "SANS2":"sans.dic",
            "TRICS":"trics.dic"
        }
        dictsearch = os.path.join(config['instrumentDefinition.directory'],"nexusdictionaries")
        dicname = os.path.join(dictsearch, diclookup[inst])
        wname = "__tmp"
        ws = mantid.simpleapi.LoadFlexiNexus(fname,dicname,OutputWorkspace=wname)

        if inst == "POLDI":
            if ws.getNumberHistograms() == 800:
               ws.maskDetectors(SpectraList=range(0,800)[::2])

            config.appendDataSearchDir(config['groupingFiles.directory'])
            grp_file = "POLDI_Grouping_800to400.xml"
            ws = mantid.simpleapi.GroupDetectors(InputWorkspace=ws,
                                                 OutputWorkspace=wname,
                                                 MapFile=grp_file, Behaviour="Sum")
        elif inst == "TRICS":
            ws = mantid.simpleapi.LoadFlexiNexus(fname,dicname,OutputWorkspace=wname)
            ws = mantid.simpleapi.SINQTranspose3D(ws,OutputWorkspace=wname)

        # Attach workspace to the algorithm property
        self.setProperty("OutputWorkspace", ws)
        # delete temporary reference
        mantid.simpleapi.DeleteWorkspace(wname,EnableLogging=False)
예제 #13
0
    def PyExec(self):
        config['default.facility'] = "SNS"
        config['default.instrument'] = self._long_inst
        self._doIndiv = self.getProperty("DoIndividual").value
        self._etBins = self.getProperty(
            "EnergyBins").value / MICROEV_TO_MILLIEV
        self._qBins = self.getProperty("MomentumTransferBins").value
        self._noMonNorm = self.getProperty("NoMonitorNorm").value
        self._maskFile = self.getProperty("MaskFile").value
        self._groupDetOpt = self.getProperty("GroupDetectors").value
        self._normalizeToFirst = self.getProperty("NormalizeToFirst").value
        self._normalizeToVanadium = self.getProperty("GroupDetectors").value
        self._doNorm = self.getProperty("DivideByVanadium").value

        datasearch = config["datasearch.searcharchive"]
        if datasearch != "On":
            config["datasearch.searcharchive"] = "On"

        # Handle masking file override if necessary
        self._overrideMask = bool(self._maskFile)
        if not self._overrideMask:
            config.appendDataSearchDir(DEFAULT_MASK_GROUP_DIR)
            self._maskFile = DEFAULT_MASK_FILE

        api.LoadMask(Instrument='BASIS',
                     OutputWorkspace='BASIS_MASK',
                     InputFile=self._maskFile)

        # Work around length issue
        _dMask = api.ExtractMask('BASIS_MASK')
        self._dMask = _dMask[1]
        api.DeleteWorkspace(_dMask[0])

        ############################
        ##  Process the Vanadium  ##
        ############################

        norm_runs = self.getProperty("NormRunNumbers").value
        if self._doNorm and bool(norm_runs):
            if ";" in norm_runs:
                raise SyntaxError("Normalization does not support run groups")
            self._doNorm = self.getProperty("NormalizationType").value
            self.log().information("Divide by Vanadium with normalization" +
                                   self._doNorm)

            # The following steps are common to all types of Vanadium normalization

            # norm_runs encompasses a single set, thus _getRuns returns
            # a list of only one item
            norm_set = self._getRuns(norm_runs, doIndiv=False)[0]
            self._normWs = self._sum_and_calibrate(norm_set,
                                                   extra_extension="_norm")

            # This rebin integrates counts onto a histogram of a single bin
            if self._doNorm == "by detectorID":
                normRange = self.getProperty("NormWavelengthRange").value
                self._normRange = [
                    normRange[0], normRange[1] - normRange[0], normRange[1]
                ]
                api.Rebin(InputWorkspace=self._normWs,
                          OutputWorkspace=self._normWs,
                          Params=self._normRange)

            # FindDetectorsOutsideLimits to be substituted by MedianDetectorTest
            api.FindDetectorsOutsideLimits(InputWorkspace=self._normWs,
                                           OutputWorkspace="BASIS_NORM_MASK")

            # additional reduction steps when normalizing by Q slice
            if self._doNorm == "by Q slice":
                self._normWs = self._group_and_SofQW(self._normWs,
                                                     self._etBins,
                                                     isSample=False)

        ##########################
        ##  Process the sample  ##
        ##########################
        self._run_list = self._getRuns(self.getProperty("RunNumbers").value,
                                       doIndiv=self._doIndiv)
        for run_set in self._run_list:
            self._samWs = self._sum_and_calibrate(run_set)
            self._samWsRun = str(run_set[0])
            # Mask detectors with insufficient Vanadium signal
            if self._doNorm:
                api.MaskDetectors(Workspace=self._samWs,
                                  MaskedWorkspace='BASIS_NORM_MASK')
            # Divide by Vanadium
            if self._doNorm == "by detector ID":
                api.Divide(LHSWorkspace=self._samWs,
                           RHSWorkspace=self._normWs,
                           OutputWorkspace=self._samWs)
            # additional reduction steps
            self._samSqwWs = self._group_and_SofQW(self._samWs,
                                                   self._etBins,
                                                   isSample=True)
            # Divide by Vanadium
            if self._doNorm == "by Q slice":
                api.Integration(InputWorkspace=self._normWs,
                                OutputWorkspace=self._normWs,
                                RangeLower=DEFAULT_VANADIUM_ENERGY_RANGE[0],
                                RangeUpper=DEFAULT_VANADIUM_ENERGY_RANGE[1])
                api.Divide(LHSWorkspace=self._samSqwWs,
                           RHSWorkspace=self._normWs,
                           OutputWorkspace=self._samSqwWs)
            # Clear mask from reduced file. Needed for binary operations
            # involving this S(Q,w)
            api.ClearMaskFlag(Workspace=self._samSqwWs)
            # Scale so that elastic line has Y-values ~ 1
            if self._normalizeToFirst:
                self._ScaleY(self._samSqwWs)
            # Output Dave and Nexus files
            extension = "_divided.dat" if self._doNorm else ".dat"
            dave_grp_filename = self._makeRunName(self._samWsRun,
                                                  False) + extension
            api.SaveDaveGrp(Filename=dave_grp_filename,
                            InputWorkspace=self._samSqwWs,
                            ToMicroEV=True)
            extension = "_divided_sqw.nxs" if self._doNorm else "_sqw.nxs"
            processed_filename = self._makeRunName(self._samWsRun,
                                                   False) + extension
            api.SaveNexus(Filename=processed_filename,
                          InputWorkspace=self._samSqwWs)
예제 #14
0
    def _group_and_SofQW(self, wsName, prefix, etRebins, isSample=True):
        r"""
        Transforms from wavelength and detector ID to S(Q,E)

        Parameters
        ----------
        wsName: str
            Name of a workspace as a function of wavelength and detector id
        prefix: str
            Name prefix for output workspaces and files
        etRebins: list
            Final energy domain and bin width
        isSample: bool
            Discriminates between sample and vanadium

        Returns
        -------
        str
            Name of S(Q,E) workspace
        """
        sapi.ConvertUnits(InputWorkspace=wsName,
                          OutputWorkspace=wsName,
                          Target='DeltaE',
                          EMode='Indirect',
                          EFixed=self._reflection['default_energy'])
        sapi.CorrectKiKf(InputWorkspace=wsName,
                         OutputWorkspace=wsName,
                         EMode='Indirect',
                         EFixed=self._reflection['default_energy'])
        sapi.Rebin(InputWorkspace=wsName,
                   OutputWorkspace=wsName,
                   Params=etRebins)
        if self._groupDetOpt != 'None':
            if self._groupDetOpt == 'Low-Resolution':
                grp_file = 'BASIS_Grouping_LR.xml'
            else:
                grp_file = 'BASIS_Grouping.xml'
            # If mask override used, we need to add default grouping file
            # location to search paths
            if self._overrideMask:
                mantid_config.appendDataSearchDir(DEFAULT_MASK_GROUP_DIR)
                sapi.GroupDetectors(InputWorkspace=wsName,
                                    OutputWorkspace=wsName,
                                    MapFile=grp_file,
                                    Behaviour='Sum')

        # Output NXSPE file (must be done before transforming the
        # vertical axis to point data)
        if isSample and self._nsxpe_do:
            extension = '.nxspe'
            run = mtd[wsName].getRun()
            if run.hasProperty(self._nxspe_psi_angle_log):
                psi_angle_logproperty = \
                    run.getProperty(self._nxspe_psi_angle_log)
                psi_angle = np.average(psi_angle_logproperty.value)
                psi_angle += self._nxspe_offset
                nxspe_filename = prefix + extension
                sapi.SaveNXSPE(InputWorkspace=wsName,
                               Filename=nxspe_filename,
                               Efixed=self._reflection['default_energy'],
                               Psi=psi_angle,
                               KiOverKfScaling=1)
            else:
                error_message = 'Runs have no log entry named {}'\
                    .format(self._nxspe_psi_angle_log)
                self.log().error(error_message)

        wsSqwName = prefix if isSample is True else wsName
        wsSqwName += '_divided_sqw' if self._doNorm is True else '_sqw'

        sapi.SofQW3(InputWorkspace=wsName,
                    QAxisBinning=self._qBins,
                    EMode='Indirect',
                    EFixed=self._reflection['default_energy'],
                    OutputWorkspace=wsSqwName)
        # Rebin the vanadium within the elastic line
        if not isSample:
            sapi.Rebin(InputWorkspace=wsSqwName,
                       OutputWorkspace=wsSqwName,
                       Params=self._reflection['vanadium_bins'])
        return wsSqwName
예제 #15
0
    def _PyExec(self):
        # Collect Flux Normalization
        if self.getProperty('DoFluxNormalization').value is True:
            self._flux_normalization_type =\
                self.getProperty('FluxNormalizationType').value
            if self._flux_normalization_type == 'Monitor':
                self._MonNorm = True

        self._reflection =\
            REFLECTIONS_DICT[self.getProperty('ReflectionType').value]
        self._doIndiv = self.getProperty('DoIndividual').value

        # micro-eV to mili-eV
        self._etBins = 1.E-03 * self.getProperty('EnergyBins').value
        self._qBins = self.getProperty('MomentumTransferBins').value
        self._qBins[0] -= self._qBins[1] / 2.0  # leftmost bin boundary
        self._qBins[2] += self._qBins[1] / 2.0  # rightmost bin boundary

        self._maskFile = self.getProperty('MaskFile').value
        maskfile = self.getProperty('MaskFile').value
        self._maskFile = maskfile if maskfile else\
            pjoin(DEFAULT_MASK_GROUP_DIR, self._reflection['mask_file'])

        self._groupDetOpt = self.getProperty('GroupDetectors').value
        self._normalizeToFirst = self.getProperty('NormalizeToFirst').value
        self._doNorm = self.getProperty('DivideByVanadium').value

        # retrieve properties pertaining to saving to NXSPE file
        self._nsxpe_do = self.getProperty('SaveNXSPE').value
        if self._nsxpe_do:
            self._nxspe_psi_angle_log = self.getProperty('PsiAngleLog').value
            self._nxspe_offset = self.getProperty('PsiOffset').value

        # Apply default mask if not supplied by user
        self._overrideMask = bool(self._maskFile)
        if not self._overrideMask:
            mantid_config.appendDataSearchDir(DEFAULT_MASK_GROUP_DIR)
            self._maskFile = self._reflection['mask_file']

        self._maskWs = tws('BASIS_MASK')
        sapi.LoadMask(Instrument='BASIS',
                      OutputWorkspace=self._maskWs,
                      InputFile=self._maskFile)

        # Work around length issue
        _dMask = sapi.ExtractMask(InputWorkspace=self._maskWs,
                                  OutputWorkspace=tws('ExtractMask'))
        self._dMask = _dMask[1]

        #
        #  Process the Vanadium
        #
        norm_runs = self.getProperty('NormRunNumbers').value
        if self._doNorm and bool(norm_runs):
            self._normalizationType = self.getProperty(
                'NormalizationType').value
            self.log().information('Divide by Vanadium with normalization' +
                                   self._normalizationType)

            # Following steps common to all types of Vanadium normalization

            # norm_runs encompasses a single set, thus _getRuns returns
            # a list of only one item
            norm_set = self._get_runs(norm_runs, doIndiv=False)[0]
            normWs = tws(self._make_run_name(norm_set[0]) + '_vanadium')
            self._sum_and_calibrate(norm_set, normWs)

            normRange = self._reflection['vanadium_wav_range']
            bin_width = normRange[1] - normRange[0]
            # This rebin integrates counts onto a histogram of a single bin
            if self._normalizationType == 'by detector ID':
                self._normRange = [normRange[0], bin_width, normRange[1]]
                sapi.Rebin(InputWorkspace=normWs,
                           OutputWorkspace=normWs,
                           Params=self._normRange)
                self._normWs = normWs
            # Detectors outside limits are substituted by MedianDetectorTest
            self._normMask = tws('BASIS_NORM_MASK')
            sapi.FindDetectorsOutsideLimits(
                InputWorkspace=normWs,
                LowThreshold=1.0 * bin_width,
                # no count events outside ranges
                RangeLower=normRange[0],
                RangeUpper=normRange[1],
                OutputWorkspace=self._normMask)
            # additional reduction steps when normalizing by Q slice
            if self._normalizationType == 'by Q slice':
                self._normWs = self._group_and_SofQW(normWs,
                                                     normWs,
                                                     self._etBins,
                                                     isSample=False)
        #
        #  Process the sample
        #
        self._run_list = self._get_runs(self.getProperty('RunNumbers').value,
                                        doIndiv=self._doIndiv)
        for run_set in self._run_list:
            self._samWs = tws(self._make_run_name(run_set[0]))
            self._sum_and_calibrate(run_set, self._samWs)
            self._samWsRun = str(run_set[0])
            # Divide by Vanadium detector ID, if pertinent
            if self._normalizationType == 'by detector ID':
                # Mask detectors with low Vanadium signal before dividing
                sapi.MaskDetectors(Workspace=self._samWs,
                                   MaskedWorkspace=self._normMask)
                sapi.Divide(LHSWorkspace=self._samWs,
                            RHSWorkspace=self._normWs,
                            OutputWorkspace=self._samWs)
            # additional reduction steps
            prefix = self._make_run_name(run_set[0])
            self._samSqwWs = self._group_and_SofQW(self._samWs,
                                                   prefix,
                                                   self._etBins,
                                                   isSample=True)
            # Divide by Vanadium Q slice, if pertinent
            if self._normalizationType == 'by Q slice':
                sapi.Divide(LHSWorkspace=self._samSqwWs,
                            RHSWorkspace=self._normWs,
                            OutputWorkspace=self._samSqwWs)
            # Clear mask from reduced file. Needed for binary operations
            # involving this S(Q,w)
            sapi.ClearMaskFlag(Workspace=self._samSqwWs)
            # Scale so that elastic line has Y-values ~ 1
            if self._normalizeToFirst:
                self._ScaleY(self._samSqwWs)

            # Transform the vertical axis (Q) to point data
            # Q-values are in X-axis now
            sapi.Transpose(InputWorkspace=self._samSqwWs,
                           OutputWorkspace=self._samSqwWs)
            # from histo to point
            sapi.ConvertToPointData(InputWorkspace=self._samSqwWs,
                                    OutputWorkspace=self._samSqwWs)
            # Q-values back to vertical axis
            sapi.Transpose(InputWorkspace=self._samSqwWs,
                           OutputWorkspace=self._samSqwWs)
            self.serialize_in_log(self._samSqwWs)  # store the call
            # Output Dave and Nexus files
            extension = '_divided.dat' if self._doNorm else '.dat'
            dave_grp_filename = self._make_run_name(self._samWsRun, False) + \
                extension
            sapi.SaveDaveGrp(Filename=dave_grp_filename,
                             InputWorkspace=self._samSqwWs,
                             ToMicroEV=True)
            extension = '_divided_sqw.nxs' if self._doNorm else '_sqw.nxs'
            processed_filename = self._make_run_name(self._samWsRun, False) + \
                extension
            sapi.SaveNexus(Filename=processed_filename,
                           InputWorkspace=self._samSqwWs)

            # additional output
            if self.getProperty('OutputSusceptibility').value:
                temperature = mtd[self._samSqwWs].getRun().\
                    getProperty(TEMPERATURE_SENSOR).getStatistics().mean
                samXqsWs = self._samSqwWs.replace('sqw', 'Xqw')
                sapi.ApplyDetailedBalance(InputWorkspace=self._samSqwWs,
                                          OutputWorkspace=samXqsWs,
                                          Temperature=str(temperature))
                sapi.ConvertUnits(InputWorkspace=samXqsWs,
                                  OutputWorkspace=samXqsWs,
                                  Target='DeltaE_inFrequency')
                self.serialize_in_log(samXqsWs)
                susceptibility_filename = processed_filename.replace(
                    'sqw', 'Xqw')
                sapi.SaveNexus(Filename=susceptibility_filename,
                               InputWorkspace=samXqsWs)
            if self.getProperty('OutputPowderSpectrum').value:
                self.generatePowderSpectrum()
예제 #16
0
import time

#instrument name:
inst='map'
iliad_setup(inst)
ext='.raw'

# where to save resutls (usually specified in Mantid, data search directories)
save_dir = config.getString('defaultsave.directory')
if len(save_dir) ==0 :
    config['defaultsave.directory']=os.getcwd()
    save_dir = config.getString('defaultsave.directory')
    
print "Data will be saved into: ",save_dir
# map mask and cal file, again the values from Mantid, data search directories can be modified here
config.appendDataSearchDir('/home/maps/mprogs/InstrumentFiles/maps') 
# data (raw or nxs) run files -- values from data search directories can be modified here
config.appendDataSearchDir('/isisdatar55/NDXMAPS/Instrument/data/cycle_12_3') 

maskfile='4to1_022.msk' #'testMask2.msk'#hard mask out the edges of detectors, which tend to be noisy

#map file
mapfile='4to1' #single crystal mapping file
#mapfile='/opt/Mantid/instrument/mapfiles/maps/parker_rings' #powder mapping file
mv_mapfile='4to1_mid_lowang'

#If run number is 00000 (from updatestore) delete existing workspace so that new raw data file is loaded
try:
    map00000=CloneWorkspace('MAP00000')
    DeleteWorkspace('MAP00000')
    DeleteWorkspace('map00000')
예제 #17
0
from mantid.simpleapi import *
from mantid import config

#import dgreduce_old as dgrd
import dgreduce as dgrd
import time

#dgrd = reload(dgrd)
#instrument name:
inst='map'
#iliad_setup(inst)
dgrd.setup(inst)
ext='.raw'

maps_dir = 'c:/Users/wkc26243/Documents/work/Libisis/InstrumentFiles/maps/'
config.appendDataSearchDir(maps_dir)
data_dir = r'd:\Data\Fe\Feb2013\Ei200'
config.appendDataSearchDir(data_dir)
config['defaultsave.directory'] = r'd:\Data\Fe\July2013'

maskfile='4to1_022.msk' #'testMask2.msk'#hard mask out the edges of detectors, which tend to be noisy

#map file
mapfile='4to1' #single crystal mapping file
#mapfile='/opt/Mantid/instrument/mapfiles/maps/parker_rings' #powder mapping file
mv_mapfile='4to1_mid_lowang'

# latest white beam vanadium file for bad detector diagnosis
wbvan=15527

#Run numbers can be specified as a list:
import time

#instrument name:
inst = 'map'
iliad_setup(inst)
ext = '.raw'

# where to save resutls (usually specified in Mantid, data search directories)
save_dir = config.getString('defaultsave.directory')
if len(save_dir) == 0:
    config['defaultsave.directory'] = os.getcwd()
    save_dir = config.getString('defaultsave.directory')

print "Data will be saved into: ", save_dir
# map mask and cal file, again the values from Mantid, data search directories can be modified here
config.appendDataSearchDir('/home/maps/mprogs/InstrumentFiles/maps')
# data (raw or nxs) run files -- values from data search directories can be modified here
config.appendDataSearchDir('/isisdatar55/NDXMAPS/Instrument/data/cycle_12_3')

maskfile = '4to1_022.msk'  #'testMask2.msk'#hard mask out the edges of detectors, which tend to be noisy

#map file
mapfile = '4to1'  #single crystal mapping file
#mapfile='/opt/Mantid/instrument/mapfiles/maps/parker_rings' #powder mapping file
mv_mapfile = '4to1_mid_lowang'

#If run number is 00000 (from updatestore) delete existing workspace so that new raw data file is loaded
try:
    map00000 = CloneWorkspace('MAP00000')
    DeleteWorkspace('MAP00000')
    DeleteWorkspace('map00000')
예제 #19
0
    def PyExec(self):
        config['default.facility'] = "SNS"
        config['default.instrument'] = self._long_inst
        self._reflection = REFLECTIONS_DICT[self.getProperty("ReflectionType").value]
        self._doIndiv = self.getProperty("DoIndividual").value
        self._etBins = 1.E-03 * self.getProperty("EnergyBins").value  # micro-eV to mili-eV
        self._qBins = self.getProperty("MomentumTransferBins").value
        self._qBins[0] -= self._qBins[1]/2.0  # self._qBins[0] is leftmost bin boundary
        self._qBins[2] += self._qBins[1]/2.0  # self._qBins[2] is rightmost bin boundary
        self._noMonNorm = self.getProperty("NoMonitorNorm").value
        self._maskFile = self.getProperty("MaskFile").value
        self._groupDetOpt = self.getProperty("GroupDetectors").value
        self._normalizeToFirst = self.getProperty("NormalizeToFirst").value
        self._doNorm = self.getProperty("DivideByVanadium").value

        datasearch = config["datasearch.searcharchive"]
        if datasearch != "On":
            config["datasearch.searcharchive"] = "On"

        # Apply default mask if not supplied by user
        self._overrideMask = bool(self._maskFile)
        if not self._overrideMask:
            config.appendDataSearchDir(DEFAULT_MASK_GROUP_DIR)
            self._maskFile = self._reflection["mask_file"]

        api.LoadMask(Instrument='BASIS', OutputWorkspace='BASIS_MASK',
                     InputFile=self._maskFile)

        # Work around length issue
        _dMask = api.ExtractMask('BASIS_MASK')
        self._dMask = _dMask[1]
        api.DeleteWorkspace(_dMask[0])

        ############################
        ##  Process the Vanadium  ##
        ############################

        norm_runs = self.getProperty("NormRunNumbers").value
        if self._doNorm and bool(norm_runs):
            if ";" in norm_runs:
                raise SyntaxError("Normalization does not support run groups")
            self._normalizationType = self.getProperty("NormalizationType").value
            self.log().information("Divide by Vanadium with normalization" + self._normalizationType)

            # The following steps are common to all types of Vanadium normalization

            # norm_runs encompasses a single set, thus _getRuns returns
            # a list of only one item
            norm_set = self._getRuns(norm_runs, doIndiv=False)[0]
            self._normWs = self._sum_and_calibrate(norm_set, extra_extension="_norm")

            # This rebin integrates counts onto a histogram of a single bin
            if self._normalizationType == "by detectorID":
                normRange = self.getProperty("NormWavelengthRange").value
                self._normRange = [normRange[0], normRange[1]-normRange[0], normRange[1]]
                api.Rebin(InputWorkspace=self._normWs, OutputWorkspace=self._normWs,
                          Params=self._normRange)

            # FindDetectorsOutsideLimits to be substituted by MedianDetectorTest
            api.FindDetectorsOutsideLimits(InputWorkspace=self._normWs,
                                           OutputWorkspace="BASIS_NORM_MASK")

            # additional reduction steps when normalizing by Q slice
            if self._normalizationType == "by Q slice":
                self._normWs = self._group_and_SofQW(self._normWs,
                                                     DEFAULT_VANADIUM_BINS,
                                                     isSample=False)

        ##########################
        ##  Process the sample  ##
        ##########################
        self._run_list = self._getRuns(self.getProperty("RunNumbers").value,
                                       doIndiv=self._doIndiv)
        for run_set in self._run_list:
            self._samWs = self._sum_and_calibrate(run_set)
            self._samWsRun = str(run_set[0])
            # Mask detectors with insufficient Vanadium signal
            if self._doNorm:
                api.MaskDetectors(Workspace=self._samWs,
                                  MaskedWorkspace='BASIS_NORM_MASK')
            # Divide by Vanadium
            if self._normalizationType == "by detector ID":
                api.Divide(LHSWorkspace=self._samWs, RHSWorkspace=self._normWs,
                           OutputWorkspace=self._samWs)
            # additional reduction steps
            self._samSqwWs = self._group_and_SofQW(self._samWs, self._etBins, isSample=True)
            # Divide by Vanadium
            if self._normalizationType == "by Q slice":
                api.Divide(LHSWorkspace=self._samSqwWs, RHSWorkspace=self._normWs,
                           OutputWorkspace=self._samSqwWs)
            # Clear mask from reduced file. Needed for binary operations
            # involving this S(Q,w)
            api.ClearMaskFlag(Workspace=self._samSqwWs)
            # Scale so that elastic line has Y-values ~ 1
            if self._normalizeToFirst:
                self._ScaleY(self._samSqwWs)
            # Output Dave and Nexus files
            extension = "_divided.dat" if self._doNorm else ".dat"
            dave_grp_filename = self._makeRunName(self._samWsRun,
                                                  False) + extension
            api.SaveDaveGrp(Filename=dave_grp_filename,
                            InputWorkspace=self._samSqwWs,
                            ToMicroEV=True)
            extension = "_divided_sqw.nxs" if self._doNorm else "_sqw.nxs"
            processed_filename = self._makeRunName(self._samWsRun,
                                                   False) + extension
            api.SaveNexus(Filename=processed_filename,
                          InputWorkspace=self._samSqwWs)
import time

#instrument name:
inst=''
iliad_setup(inst)
ext='.raw'

# where to save resutls (usually specified in Mantid, data search directories)
save_dir = config.getString('defaultsave.directory')
if len(save_dir) ==0 :
    config['defaultsave.directory']=os.getcwd()
    save_dir = config.getString('defaultsave.directory')
    
print "Data will be saved into: ",save_dir
# map mask and cal file, again the values from Mantid, data search directories can be modified here
config.appendDataSearchDir('/usr/local/mprogs/InstrumentFiles/mari') 
# data (raw or nxs) run files -- values from data search directories can be modified here
config.appendDataSearchDir('/isisdatar55/NDXMARI/Instrument/data/cycle_05_1') 
config.appendDataSearchDir(r'd:/Data/MantidSystemTests/Data') 


maskfile='mar11015.msk' #'testMask2.msk'#hard mask out the edges of detectors, which tend to be noisy

#map file
mapfile='mari_res.map' # mapping file
#mapfile='/opt/Mantid/instrument/mapfiles/maps/parker_rings' #powder mapping file
mv_mapfile='4to1_mid_lowang'

# latest white beam vanadium file for bad detector diagnosis
wbvan=11060
예제 #21
0
    def PyExec(self):
        config['default.facility'] = 'SNS'
        config['default.instrument'] = self._long_inst
        self._reflection =\
            REFLECTIONS_DICT[self.getProperty('ReflectionType').value]
        self._doIndiv = self.getProperty('DoIndividual').value
        # micro-eV to mili-eV
        self._etBins = 1.E-03 * self.getProperty('EnergyBins').value
        self._qBins = self.getProperty('MomentumTransferBins').value
        self._qBins[0] -= self._qBins[1]/2.0  # leftmost bin boundary
        self._qBins[2] += self._qBins[1]/2.0  # rightmost bin boundary
        self._MonNorm = self.getProperty('MonitorNorm').value
        self._maskFile = self.getProperty('MaskFile').value
        maskfile = self.getProperty('MaskFile').value
        self._maskFile = maskfile if maskfile else\
            pjoin(DEFAULT_MASK_GROUP_DIR, self._reflection['mask_file'])
        self._groupDetOpt = self.getProperty('GroupDetectors').value
        self._normalizeToFirst = self.getProperty('NormalizeToFirst').value
        self._doNorm = self.getProperty('DivideByVanadium').value

        # retrieve properties pertaining to saving to NXSPE file
        self._nsxpe_do = self.getProperty('SaveNXSPE').value
        if self._nsxpe_do:
            self._nxspe_psi_angle_log = self.getProperty('PsiAngleLog').value
            self._nxspe_offset = self.getProperty('PsiOffset').value

        datasearch = config["datasearch.searcharchive"]
        if datasearch != "On":
            config["datasearch.searcharchive"] = "On"

        # Apply default mask if not supplied by user
        self._overrideMask = bool(self._maskFile)
        if not self._overrideMask:
            config.appendDataSearchDir(DEFAULT_MASK_GROUP_DIR)
            self._maskFile = self._reflection["mask_file"]

        sapi.LoadMask(Instrument='BASIS',
                      OutputWorkspace='BASIS_MASK',
                      InputFile=self._maskFile)

        # Work around length issue
        _dMask = sapi.ExtractMask('BASIS_MASK')
        self._dMask = _dMask[1]
        sapi.DeleteWorkspace(_dMask[0])

        ############################
        ##  Process the Vanadium  ##
        ############################

        norm_runs = self.getProperty("NormRunNumbers").value
        if self._doNorm and bool(norm_runs):
            if ";" in norm_runs:
                raise SyntaxError("Normalization does not support run groups")
            self._normalizationType = self.getProperty("NormalizationType").value
            self.log().information("Divide by Vanadium with normalization" +
                                   self._normalizationType)

            # Following steps common to all types of Vanadium normalization

            # norm_runs encompasses a single set, thus _getRuns returns
            # a list of only one item
            norm_set = self._getRuns(norm_runs, doIndiv=False)[0]
            normWs = self._sum_and_calibrate(norm_set, extra_extension="_norm")

            normRange = self.getProperty("NormWavelengthRange").value
            bin_width = normRange[1] - normRange[0]
            # This rebin integrates counts onto a histogram of a single bin
            if self._normalizationType == "by detector ID":
                self._normRange = [normRange[0], bin_width, normRange[1]]
                sapi.Rebin(InputWorkspace=normWs,
                           OutputWorkspace=normWs,
                           Params=self._normRange)
                self._normWs = normWs
            # FindDetectorsOutsideLimits to be substituted by MedianDetectorTest
            sapi.FindDetectorsOutsideLimits(InputWorkspace=normWs,
                                            LowThreshold=1.0*bin_width,
                                            # no count events outside ranges
                                            RangeLower=normRange[0],
                                            RangeUpper=normRange[1],
                                            OutputWorkspace='BASIS_NORM_MASK')
            # additional reduction steps when normalizing by Q slice
            if self._normalizationType == "by Q slice":
                self._normWs = self._group_and_SofQW(normWs, self._etBins,
                                                     isSample=False)

        ##########################
        ##  Process the sample  ##
        ##########################
        self._run_list = self._getRuns(self.getProperty("RunNumbers").value,
                                       doIndiv=self._doIndiv)
        for run_set in self._run_list:
            self._samWs = self._sum_and_calibrate(run_set)
            self._samWsRun = str(run_set[0])
            # Divide by Vanadium detector ID, if pertinent
            if self._normalizationType == "by detector ID":
                # Mask detectors with insufficient Vanadium signal before dividing
                sapi.MaskDetectors(Workspace=self._samWs,
                                   MaskedWorkspace='BASIS_NORM_MASK')
                sapi.Divide(LHSWorkspace=self._samWs,
                            RHSWorkspace=self._normWs,
                            OutputWorkspace=self._samWs)
            # additional reduction steps
            self._samSqwWs = self._group_and_SofQW(self._samWs, self._etBins,
                                                   isSample=True)
            if not self._debugMode:
                sapi.DeleteWorkspace(self._samWs)  # delete events file
            # Divide by Vanadium Q slice, if pertinent
            if self._normalizationType == "by Q slice":
                sapi.Divide(LHSWorkspace=self._samSqwWs,
                            RHSWorkspace=self._normWs,
                            OutputWorkspace=self._samSqwWs)
            # Clear mask from reduced file. Needed for binary operations
            # involving this S(Q,w)
            sapi.ClearMaskFlag(Workspace=self._samSqwWs)
            # Scale so that elastic line has Y-values ~ 1
            if self._normalizeToFirst:
                self._ScaleY(self._samSqwWs)

            # Transform the vertical axis (Q) to point data
            # Q-values are in X-axis now
            sapi.Transpose(InputWorkspace=self._samSqwWs,
                           OutputWorkspace=self._samSqwWs)
            # from histo to point
            sapi.ConvertToPointData(InputWorkspace=self._samSqwWs,
                                    OutputWorkspace=self._samSqwWs)
            # Q-values back to vertical axis
            sapi.Transpose(InputWorkspace=self._samSqwWs,
                           OutputWorkspace=self._samSqwWs)
            self.serialize_in_log(self._samSqwWs)  # store the call
            # Output Dave and Nexus files
            extension = "_divided.dat" if self._doNorm else ".dat"
            dave_grp_filename = self._makeRunName(self._samWsRun, False) +\
                extension
            sapi.SaveDaveGrp(Filename=dave_grp_filename,
                             InputWorkspace=self._samSqwWs,
                             ToMicroEV=True)
            extension = "_divided_sqw.nxs" if self._doNorm else "_sqw.nxs"
            processed_filename = self._makeRunName(self._samWsRun, False) +\
                extension
            sapi.SaveNexus(Filename=processed_filename,
                           InputWorkspace=self._samSqwWs)

            # additional output
            if self.getProperty("OutputSusceptibility").value:
                temperature = mtd[self._samSqwWs].getRun().\
                    getProperty(TEMPERATURE_SENSOR).getStatistics().mean
                samXqsWs = self._samSqwWs.replace("sqw", "Xqw")
                sapi.ApplyDetailedBalance(InputWorkspace=self._samSqwWs,
                                          OutputWorkspace=samXqsWs,
                                          Temperature=str(temperature))
                sapi.ConvertUnits(InputWorkspace=samXqsWs,
                                  OutputWorkspace=samXqsWs,
                                  Target="DeltaE_inFrequency",
                                  Emode="Indirect")
                self.serialize_in_log(samXqsWs)
                susceptibility_filename = processed_filename.replace("sqw", "Xqw")
                sapi.SaveNexus(Filename=susceptibility_filename,
                               InputWorkspace=samXqsWs)

        if not self._debugMode:
            sapi.DeleteWorkspace("BASIS_MASK")  # delete the mask
            if self._doNorm and bool(norm_runs):
                sapi.DeleteWorkspace("BASIS_NORM_MASK")  # delete vanadium mask
                sapi.DeleteWorkspace(self._normWs)  # Delete vanadium S(Q)
                if self._normalizationType == "by Q slice":
                    sapi.DeleteWorkspace(normWs)  # Delete vanadium events file
            if self.getProperty("ExcludeTimeSegment").value:
                sapi.DeleteWorkspace('splitter')
                [sapi.DeleteWorkspace(name) for name in
                 ('splitted_unfiltered', 'TOFCorrectWS') if
                 AnalysisDataService.doesExist(name)]
예제 #22
0
    def PyExec(self):
        config['default.facility'] = "SNS"
        config['default.instrument'] = self._long_inst
        self._doIndiv = self.getProperty("DoIndividual").value
        self._etBins = self.getProperty("EnergyBins").value / MICROEV_TO_MILLIEV
        self._qBins = self.getProperty("MomentumTransferBins").value
        self._noMonNorm = self.getProperty("NoMonitorNorm").value
        self._maskFile = self.getProperty("MaskFile").value
        self._groupDetOpt = self.getProperty("GroupDetectors").value

        datasearch = config["datasearch.searcharchive"]
        if (datasearch != "On"):
            config["datasearch.searcharchive"] = "On"

        # Handle masking file override if necessary
        self._overrideMask = bool(self._maskFile)
        if not self._overrideMask:
            config.appendDataSearchDir(DEFAULT_MASK_GROUP_DIR)
            self._maskFile = DEFAULT_MASK_FILE

        api.LoadMask(Instrument='BASIS', OutputWorkspace='BASIS_MASK', 
                     InputFile=self._maskFile)
                     
        # Work around length issue
        _dMask = api.ExtractMask('BASIS_MASK')
        self._dMask = _dMask[1]
        api.DeleteWorkspace(_dMask[0])
	
	# Do normalization if run numbers are present
	norm_runs = self.getProperty("NormRunNumbers").value
	self._doNorm = bool(norm_runs)
	self.log().information("Do Norm: " + str(self._doNorm))
	if self._doNorm:
	    if ";" in norm_runs:
	        raise SyntaxError("Normalization does not support run groups")
	    # Setup the integration (rebin) parameters
	    normRange = self.getProperty("NormWavelengthRange").value
	    self._normRange = [normRange[0], normRange[1]-normRange[0], normRange[1]]

	    # Process normalization runs
	    self._norm_run_list = self._getRuns(norm_runs)
	    for norm_set in self._norm_run_list:
                extra_extension = "_norm"
                self._normWs = self._makeRunName(norm_set[0])
                self._normWs += extra_extension
	        self._normMonWs = self._normWs + "_monitors"
                self._sumRuns(norm_set, self._normWs, self._normMonWs, extra_extension)
	        self._calibData(self._normWs, self._normMonWs)
	    
	    api.Rebin(InputWorkspace=self._normWs, OutputWorkspace=self._normWs,
		      Params=self._normRange)
	    api.FindDetectorsOutsideLimits(InputWorkspace=self._normWs, 
					   OutputWorkspace="BASIS_NORM_MASK")

        self._run_list = self._getRuns(self.getProperty("RunNumbers").value)
        for run_set in self._run_list:
            self._samWs = self._makeRunName(run_set[0])
            self._samMonWs = self._samWs + "_monitors"
            self._samWsRun = str(run_set[0])
	    
	    self._sumRuns(run_set, self._samWs, self._samMonWs)
            # After files are all added, run the reduction
	    self._calibData(self._samWs, self._samMonWs)
                    
	    if self._doNorm:
		api.MaskDetectors(Workspace=self._samWs, 
	                          MaskedWorkspace='BASIS_NORM_MASK')
		api.Divide(LHSWorkspace=self._samWs, RHSWorkspace=self._normWs,
			   OutputWorkspace=self._samWs)

            api.ConvertUnits(InputWorkspace=self._samWs, 
                             OutputWorkspace=self._samWs, 
                             Target='DeltaE', EMode='Indirect')
            api.CorrectKiKf(InputWorkspace=self._samWs, 
                            OutputWorkspace=self._samWs, 
                            EMode='Indirect')
                   
            api.Rebin(InputWorkspace=self._samWs, 
                      OutputWorkspace=self._samWs, 
                      Params=self._etBins)
            if self._groupDetOpt != "None":
                if self._groupDetOpt == "Low-Resolution":
                    grp_file = "BASIS_Grouping_LR.xml"
                else:
                    grp_file = "BASIS_Grouping.xml"
                # If mask override used, we need to add default grouping file location to
                # search paths
                if self._overrideMask:
                    config.appendDataSearchDir(DEFAULT_MASK_GROUP_DIR)

                api.GroupDetectors(InputWorkspace=self._samWs, 
                                   OutputWorkspace=self._samWs,
                                   MapFile=grp_file, Behaviour="Sum")
                
            self._samSqwWs = self._samWs+'_sqw'
            api.SofQW3(InputWorkspace=self._samWs, 
                       OutputWorkspace=self._samSqwWs,
                       QAxisBinning=self._qBins, EMode='Indirect', 
                       EFixed='2.0826')
            
            dave_grp_filename = self._makeRunName(self._samWsRun, 
                                                  False) + ".dat"
            api.SaveDaveGrp(Filename=dave_grp_filename, 
                            InputWorkspace=self._samSqwWs,
                            ToMicroEV=True)
            processed_filename = self._makeRunName(self._samWsRun, 
                                                   False) + "_sqw.nxs"
            api.SaveNexus(Filename=processed_filename, 
                          InputWorkspace=self._samSqwWs) 
예제 #23
0
    def iliad_wrapper(*args):
        #seq = inspect.stack()
        # output workspace name.
        try:
            name = funcinspect.lhs_info('names')
            out_ws_name = name[0]
# no-exception-type(s) specified. Who knows what exception this internal procedure rises...
#pylint: disable=W0702
        except:
            out_ws_name = None

        host = args[0]
        if len(args) > 1:
            input_file = args[1]
            if len(args) > 2:
                output_directory = args[2]
            else:
                output_directory = None
        else:
            input_file = None
            output_directory = None
        # add input file folder to data search directory if file has it
        if input_file and isinstance(input_file, string_types):
            data_path = os.path.dirname(input_file)
            if len(data_path) > 0:
                try:
                    config.appendDataSearchDir(str(data_path))
                    args[1] = os.path.basename(input_file)
                #pylint: disable=bare-except
                except:  # if mantid is not available, this should ignore config
                    pass
        if output_directory:
            config['defaultsave.directory'] = str(output_directory)

        #pylint: disable=protected-access
        if host._run_from_web:
            #pylint: disable=protected-access
            web_vars = host._wvs.get_all_vars()
            host.reducer.prop_man.set_input_parameters(**web_vars)
        else:
            pass  # we should set already set up variables using

        custom_print_function = host.set_custom_output_filename()
        if custom_print_function is not None:
            PropertyManager.save_file_name.set_custom_print(
                custom_print_function)
        #
        rez = reduce(*args)

        # prohibit returning workspace to web services.
        #pylint: disable=protected-access
        if host._run_from_web and not isinstance(rez, string_types):
            rez = ""
        else:
            if isinstance(rez, list):
                # multirep run, just return as it is
                return rez
            if rez is not None and out_ws_name and rez.name() != out_ws_name:
                # the function does not return None, pylint is wrong
                #pylint: disable=W1111
                rez = PropertyManager.sample_run.synchronize_ws(
                    rez, out_ws_name)
        return rez
예제 #24
0
    def PyExec(self):
        config['default.facility'] = "SNS"
        config['default.instrument'] = self._long_inst
        self._reflection = REFLECTIONS_DICT[self.getProperty(
            "ReflectionType").value]
        self._doIndiv = self.getProperty("DoIndividual").value
        self._etBins = 1.E-03 * self.getProperty(
            "EnergyBins").value  # micro-eV to mili-eV
        self._qBins = self.getProperty("MomentumTransferBins").value
        self._qBins[0] -= self._qBins[
            1] / 2.0  # self._qBins[0] is leftmost bin boundary
        self._qBins[2] += self._qBins[
            1] / 2.0  # self._qBins[2] is rightmost bin boundary
        self._noMonNorm = self.getProperty("NoMonitorNorm").value
        self._maskFile = self.getProperty("MaskFile").value
        self._groupDetOpt = self.getProperty("GroupDetectors").value
        self._normalizeToFirst = self.getProperty("NormalizeToFirst").value
        self._doNorm = self.getProperty("DivideByVanadium").value

        datasearch = config["datasearch.searcharchive"]
        if datasearch != "On":
            config["datasearch.searcharchive"] = "On"

        # Apply default mask if not supplied by user
        self._overrideMask = bool(self._maskFile)
        if not self._overrideMask:
            config.appendDataSearchDir(DEFAULT_MASK_GROUP_DIR)
            self._maskFile = self._reflection["mask_file"]

        sapi.LoadMask(Instrument='BASIS',
                      OutputWorkspace='BASIS_MASK',
                      InputFile=self._maskFile)

        # Work around length issue
        _dMask = sapi.ExtractMask('BASIS_MASK')
        self._dMask = _dMask[1]
        sapi.DeleteWorkspace(_dMask[0])

        ############################
        ##  Process the Vanadium  ##
        ############################

        norm_runs = self.getProperty("NormRunNumbers").value
        if self._doNorm and bool(norm_runs):
            if ";" in norm_runs:
                raise SyntaxError("Normalization does not support run groups")
            self._normalizationType = self.getProperty(
                "NormalizationType").value
            self.log().information("Divide by Vanadium with normalization" +
                                   self._normalizationType)

            # The following steps are common to all types of Vanadium normalization

            # norm_runs encompasses a single set, thus _getRuns returns
            # a list of only one item
            norm_set = self._getRuns(norm_runs, doIndiv=False)[0]
            normWs = self._sum_and_calibrate(norm_set, extra_extension="_norm")

            # This rebin integrates counts onto a histogram of a single bin
            if self._normalizationType == "by detectorID":
                normRange = self.getProperty("NormWavelengthRange").value
                self._normRange = [
                    normRange[0], normRange[1] - normRange[0], normRange[1]
                ]
                sapi.Rebin(InputWorkspace=normWs,
                           OutputWorkspace=normWs,
                           Params=self._normRange)

            # FindDetectorsOutsideLimits to be substituted by MedianDetectorTest
            sapi.FindDetectorsOutsideLimits(InputWorkspace=normWs,
                                            OutputWorkspace="BASIS_NORM_MASK")

            # additional reduction steps when normalizing by Q slice
            if self._normalizationType == "by Q slice":
                self._normWs = self._group_and_SofQW(normWs,
                                                     self._etBins,
                                                     isSample=False)
            if not self._debugMode:
                sapi.DeleteWorkspace(normWs)  # Delete vanadium events file

        ##########################
        ##  Process the sample  ##
        ##########################
        self._run_list = self._getRuns(self.getProperty("RunNumbers").value,
                                       doIndiv=self._doIndiv)
        for run_set in self._run_list:
            self._samWs = self._sum_and_calibrate(run_set)
            self._samWsRun = str(run_set[0])
            # Divide by Vanadium detector ID, if pertinent
            if self._normalizationType == "by detector ID":
                # Mask detectors with insufficient Vanadium signal before dividing
                sapi.MaskDetectors(Workspace=self._samWs,
                                   MaskedWorkspace='BASIS_NORM_MASK')
                sapi.Divide(LHSWorkspace=self._samWs,
                            RHSWorkspace=self._normWs,
                            OutputWorkspace=self._samWs)
            # additional reduction steps
            self._samSqwWs = self._group_and_SofQW(self._samWs,
                                                   self._etBins,
                                                   isSample=True)
            if not self._debugMode:
                sapi.DeleteWorkspace(self._samWs)  # delete events file
            # Divide by Vanadium Q slice, if pertinent
            if self._normalizationType == "by Q slice":
                sapi.Divide(LHSWorkspace=self._samSqwWs,
                            RHSWorkspace=self._normWs,
                            OutputWorkspace=self._samSqwWs)
            # Clear mask from reduced file. Needed for binary operations
            # involving this S(Q,w)
            sapi.ClearMaskFlag(Workspace=self._samSqwWs)
            # Scale so that elastic line has Y-values ~ 1
            if self._normalizeToFirst:
                self._ScaleY(self._samSqwWs)
            # Transform the vertical axis to point data
            sapi.Transpose(
                InputWorkspace=self._samSqwWs,
                OutputWorkspace=self._samSqwWs)  # Q-values are in X-axis now
            sapi.ConvertToPointData(
                InputWorkspace=self._samSqwWs,
                OutputWorkspace=self._samSqwWs)  # from histo to point
            sapi.Transpose(InputWorkspace=self._samSqwWs,
                           OutputWorkspace=self._samSqwWs
                           )  # Q-values back to vertical axis
            # Output Dave and Nexus files
            extension = "_divided.dat" if self._doNorm else ".dat"
            dave_grp_filename = self._makeRunName(self._samWsRun,
                                                  False) + extension
            sapi.SaveDaveGrp(Filename=dave_grp_filename,
                             InputWorkspace=self._samSqwWs,
                             ToMicroEV=True)
            extension = "_divided_sqw.nxs" if self._doNorm else "_sqw.nxs"
            processed_filename = self._makeRunName(self._samWsRun,
                                                   False) + extension
            sapi.SaveNexus(Filename=processed_filename,
                           InputWorkspace=self._samSqwWs)

        if not self._debugMode:
            sapi.DeleteWorkspace("BASIS_MASK")  # delete the mask
            if self._doNorm and bool(norm_runs):
                sapi.DeleteWorkspace("BASIS_NORM_MASK")  # delete vanadium mask
                sapi.DeleteWorkspace(self._normWs)  # Delete vanadium S(Q)
예제 #25
0
파일: reduce_mari.py 프로젝트: abuts/Python
# Run number and Ei
Load('MAR21334_fixed.nxs',OutputWorkspace='live')
runno=mtd['live'] #'live'

sum_runs=False
ei=50

# White vanadium run number
wbvan=21334
# Default save directory
#config['defaultsave.directory'] = '/instrument/MARI/RBNumber/RB1610190' #data_dir 
data_dir =os.path.dirname(os.path.realpath(__file__));
config['defaultsave.directory'] = data_dir;
    #config.appendDataSearchDir(map_mask_dir)
config.appendDataSearchDir(data_dir)

# Absolute normalisation parameters
#monovan=21803
#sam_mass=41.104
#sam_rmm=398.9439
monovan=0
sam_mass=0
sam_rmm=0

# Set to true to remove the constant ToF background from the data.
remove_bkg = True

# If necessary, add any sequence of reduction paramerters defined in MARIParameters.xml file 
# to the end ot the illiad string using the form: property=value 
# (e.g.:  iliad_mari(runno,ei,wbvan,monovan,sam_mass,sam_rmm,sum_runs,check_background=False)
예제 #26
0
    def build_or_validate_result(self, Error=1.e-6, ToleranceRelErr=True):
        """ Method validates results of the reduction against reference file or workspace.

            Inputs:
            sample_run     -- the run number to reduce or validate against existing result
            validation_file -- The name of nxs file, containing workspace, produced by reducing SampleRun,
                              or the pointer to the workspace, which is the reference workspace
                              for SampleRun reduction.

            Returns:
            True   if reduction for sample_run produces result within Error from the reference file
                   as reported by CompareWorkspaces.
            False  if CheckWorkspaceMatch comparison between sample and reduction is unsuccessful

            True  if was not able to load reference file. In this case, algorithm builds validation
                  file and returns True if the reduction and saving of this file is successful

        """
        # this row defines location of the validation file
        validation_file = self.validation_file_name()
        sample_run = self.validate_run_number
        if isinstance(validation_file, string_types):
            path, name = os.path.split(validation_file)
            if name in mtd:
                reference_ws = mtd[name]
                build_validation = False
                fileName = "workspace:" + reference_ws.name()
            else:
                if len(path) > 0:
                    config.appendDataSearchDir(path)
                # it there bug in getFullPath? It returns the same string if given full path
                # but file has not been found
#pylint: disable=unused-variable
                name, fext = os.path.splitext(name)
                fileName = FileFinder.getFullPath(name + '.nxs')
                if len(fileName) > 0:
                    build_validation = False
                    try:
                        reference_ws = Load(fileName)
#pylint: disable=bare-except
                    except:
                        build_validation = True
                else:
                    build_validation = True
        elif isinstance(validation_file, api.Workspace):
            # its workspace:
            reference_ws = validation_file
            build_validation = False
            fileName = "workspace:" + reference_ws.name()
        else:
            build_validation = True
        #--------------------------------------------------------
        if build_validation:
            self.reducer.prop_man.save_file_name = validation_file
            self.reducer.prop_man.log\
                ("*** WARNING:can not find or load validation file {0}\n"
                 "    Building validation file for run N:{1}".format(validation_file,sample_run),'warning')
        else:
            self.reducer.prop_man.log\
                 ("*** FOUND VALIDATION FILE: {0}\n"
                  "    Validating run {1} against this file".format(fileName,sample_run),'warning')

        # just in case, to be sure
        current_web_state = self._run_from_web
        current_wait_state = self.wait_for_file
        # disable wait for input and
        self._run_from_web = False
        self.wait_for_file = False
        #
        self.def_advanced_properties()
        self.def_main_properties()
        #
        self.reducer.sample_run = sample_run
        self.reducer.prop_man.save_format = None

        reduced = self.reduce()

        if build_validation:
            self.reducer.prop_man.save_file_name = None
            result_name = os.path.splitext(validation_file)[0]
            self.reducer.prop_man.log(
                "*** Saving validation file with name: {0}.nxs".format(
                    result_name), 'notice')
            SaveNexus(reduced, Filename=result_name + '.nxs')
            return True, 'Created validation file {0}.nxs'.format(result_name)
        else:
            if isinstance(reduced,
                          list):  # check only first result in multirep
                reduced = reduced[0]
            # Cheat! Counterintuitive!
            if self._tolerr:
                TOLL = self._tolerr
            else:
                TOLL = Error
            result = CompareWorkspaces(Workspace1=reference_ws,
                                       Workspace2=reduced,
                                       Tolerance=TOLL,
                                       CheckSample=False,
                                       CheckInstrument=False,
                                       ToleranceRelErr=ToleranceRelErr)

        self.wait_for_file = current_wait_state
        self._run_from_web = current_web_state
        if result[0]:
            return True,'Reference file and reduced workspace are equal with accuracy {0:<3.2f}'\
                        .format(TOLL)
        else:
            fname, _ = os.path.splitext(fileName)
            filename = fname + '-mismatch.nxs'
            self.reducer.prop_man.log(
                "***WARNING: can not get results matching the reference file.\n"
                "   Saving new results to file {0}".format(filename),
                'warning')
            SaveNexus(reduced, Filename=filename)
            return False, result
예제 #27
0
    def _group_and_SofQW(self, wsName, etRebins, isSample=True):
        """ Transforms from wavelength and detector ID to S(Q,E)
        @param wsName: workspace as a function of wavelength and detector id
        @param etRebins: final energy domain and bin width
        @param isSample: discriminates between sample and vanadium
        @return: string name of S(Q,E)
        """
        sapi.ConvertUnits(InputWorkspace=wsName,
                          OutputWorkspace=wsName,
                          Target='DeltaE',
                          EMode='Indirect')
        sapi.CorrectKiKf(InputWorkspace=wsName,
                         OutputWorkspace=wsName,
                         EMode='Indirect')
        sapi.Rebin(InputWorkspace=wsName,
                   OutputWorkspace=wsName,
                   Params=etRebins)
        if self._groupDetOpt != "None":
            if self._groupDetOpt == "Low-Resolution":
                grp_file = "BASIS_Grouping_LR.xml"
            else:
                grp_file = "BASIS_Grouping.xml"
            # If mask override used, we need to add default grouping file
            # location to search paths
            if self._overrideMask:
                config.appendDataSearchDir(DEFAULT_MASK_GROUP_DIR)
                sapi.GroupDetectors(InputWorkspace=wsName,
                                    OutputWorkspace=wsName,
                                    MapFile=grp_file,
                                    Behaviour="Sum")

        # Output NXSPE file (must be done before transforming the
        # vertical axis to point data)
        if isSample and self._nsxpe_do:
            extension = '.nxspe'
            run = mtd[wsName].getRun()
            if run.hasProperty(self._nxspe_psi_angle_log):
                psi_angle_logproperty = \
                    run.getProperty(self._nxspe_psi_angle_log)
                psi_angle = np.average(psi_angle_logproperty.value)
                psi_angle += self._nxspe_offset
                nxspe_filename = wsName + extension
                sapi.SaveNXSPE(InputWorkspace=wsName,
                               Filename=nxspe_filename,
                               Efixed=self._reflection['default_energy'],
                               Psi=psi_angle,
                               KiOverKfScaling=1)
            else:
                error_message = 'Runs have no log entry named {}'\
                    .format(self._nxspe_psi_angle_log)
                self.log().error(error_message)

        wsSqwName = wsName + '_divided_sqw' \
            if isSample and self._doNorm else wsName + '_sqw'
        sapi.SofQW3(InputWorkspace=wsName,
                    QAxisBinning=self._qBins,
                    EMode='Indirect',
                    EFixed=self._reflection["default_energy"],
                    OutputWorkspace=wsSqwName)
        # Rebin the vanadium within the elastic line
        if not isSample:
            sapi.Rebin(InputWorkspace=wsSqwName,
                       OutputWorkspace=wsSqwName,
                       Params=self._reflection["vanadium_bins"])
        return wsSqwName
예제 #28
0
    def PyExec(self):
        config['default.facility'] = "SNS"
        config['default.instrument'] = self._long_inst
        self._doIndiv = self.getProperty("DoIndividual").value
        self._etBins = self.getProperty(
            "EnergyBins").value / MICROEV_TO_MILLIEV
        self._qBins = self.getProperty("MomentumTransferBins").value
        self._noMonNorm = self.getProperty("NoMonitorNorm").value
        self._maskFile = self.getProperty("MaskFile").value
        self._groupDetOpt = self.getProperty("GroupDetectors").value

        datasearch = config["datasearch.searcharchive"]
        if datasearch != "On":
            config["datasearch.searcharchive"] = "On"

        # Handle masking file override if necessary
        self._overrideMask = bool(self._maskFile)
        if not self._overrideMask:
            config.appendDataSearchDir(DEFAULT_MASK_GROUP_DIR)
            self._maskFile = DEFAULT_MASK_FILE

        api.LoadMask(Instrument='BASIS',
                     OutputWorkspace='BASIS_MASK',
                     InputFile=self._maskFile)

        # Work around length issue
        _dMask = api.ExtractMask('BASIS_MASK')
        self._dMask = _dMask[1]
        api.DeleteWorkspace(_dMask[0])

        # Do normalization if run numbers are present
        norm_runs = self.getProperty("NormRunNumbers").value
        self._doNorm = bool(norm_runs)
        self.log().information("Do Norm: " + str(self._doNorm))
        if self._doNorm:
            if ";" in norm_runs:
                raise SyntaxError("Normalization does not support run groups")
# Setup the integration (rebin) parameters
            normRange = self.getProperty("NormWavelengthRange").value
            self._normRange = [
                normRange[0], normRange[1] - normRange[0], normRange[1]
            ]

            # Process normalization runs
            self._norm_run_list = self._getRuns(norm_runs)
            for norm_set in self._norm_run_list:
                extra_extension = "_norm"
                self._normWs = self._makeRunName(norm_set[0])
                self._normWs += extra_extension
                self._normMonWs = self._normWs + "_monitors"
                self._sumRuns(norm_set, self._normWs, self._normMonWs,
                              extra_extension)
                self._calibData(self._normWs, self._normMonWs)

            api.Rebin(InputWorkspace=self._normWs, OutputWorkspace=self._normWs,\
              Params=self._normRange)
            api.FindDetectorsOutsideLimits(InputWorkspace=self._normWs,\
           OutputWorkspace="BASIS_NORM_MASK")

        self._run_list = self._getRuns(self.getProperty("RunNumbers").value)
        for run_set in self._run_list:
            self._samWs = self._makeRunName(run_set[0])
            self._samMonWs = self._samWs + "_monitors"
            self._samWsRun = str(run_set[0])

            self._sumRuns(run_set, self._samWs, self._samMonWs)
            # After files are all added, run the reduction
            self._calibData(self._samWs, self._samMonWs)

            if self._doNorm:
                api.MaskDetectors(Workspace=self._samWs,\
                 MaskedWorkspace='BASIS_NORM_MASK')
                api.Divide(LHSWorkspace=self._samWs, RHSWorkspace=self._normWs,\
           OutputWorkspace=self._samWs)

            api.ConvertUnits(InputWorkspace=self._samWs,
                             OutputWorkspace=self._samWs,
                             Target='DeltaE',
                             EMode='Indirect')
            api.CorrectKiKf(InputWorkspace=self._samWs,
                            OutputWorkspace=self._samWs,
                            EMode='Indirect')

            api.Rebin(InputWorkspace=self._samWs,
                      OutputWorkspace=self._samWs,
                      Params=self._etBins)
            if self._groupDetOpt != "None":
                if self._groupDetOpt == "Low-Resolution":
                    grp_file = "BASIS_Grouping_LR.xml"
                else:
                    grp_file = "BASIS_Grouping.xml"
                # If mask override used, we need to add default grouping file location to
                # search paths
                if self._overrideMask:
                    config.appendDataSearchDir(DEFAULT_MASK_GROUP_DIR)

                api.GroupDetectors(InputWorkspace=self._samWs,
                                   OutputWorkspace=self._samWs,
                                   MapFile=grp_file,
                                   Behaviour="Sum")

            self._samSqwWs = self._samWs + '_sqw'
            api.SofQW3(InputWorkspace=self._samWs,
                       OutputWorkspace=self._samSqwWs,
                       QAxisBinning=self._qBins,
                       EMode='Indirect',
                       EFixed=DEFAULT_ENERGY)

            dave_grp_filename = self._makeRunName(self._samWsRun,
                                                  False) + ".dat"
            api.SaveDaveGrp(Filename=dave_grp_filename,
                            InputWorkspace=self._samSqwWs,
                            ToMicroEV=True)
            processed_filename = self._makeRunName(self._samWsRun,
                                                   False) + "_sqw.nxs"
            api.SaveNexus(Filename=processed_filename,
                          InputWorkspace=self._samSqwWs)
예제 #29
0
import MERReductionSrRuO3 as mpr
from mantid.simpleapi import *
from mantid import config

reload(mpr)
rd = mpr.MERLINReduction()

map_mask_dir = '/home/zmp58988/RB1510482'
config.appendDataSearchDir(map_mask_dir)

# set up advanced and main properties
rd.def_advanced_properties()
rd.def_main_properties()

#Filename?
rd.set_custom_output_filename()


def iliad_merlin_crystal(runno, ei, wbvan, rebin_pars, monovan, sam_mass,
                         sam_rmm, bg_range):

    rd.reducer.prop_man.map_file = 'one2one_143.map'
    #rd.reducer.prop_man.hardmaskOnly = "MER23698.msk"
    rd.reducer.prop_man.hardmaskPlus = "MER23698.msk"

    rd.reducer.prop_man.incident_energy = ei

    rd.reducer.prop_man.sample_run = runno
    rd.reducer.prop_man.wb_run = wbvan
    rd.reducer.prop_man.energy_bins = rebin_pars
예제 #30
0
    def build_or_validate_result(self,Error=1.e-6,ToleranceRelErr=True):
        """ Method validates results of the reduction against reference file or workspace.

            Inputs:
            sample_run     -- the run number to reduce or validate against existing result
            validation_file -- The name of nxs file, containing workspace, produced by reducing SampleRun,
                              or the pointer to the workspace, which is the reference workspace
                              for SampleRun reduction.

            Returns:
            True   if reduction for sample_run produces result within Error from the reference file
                   as reported by CheckWorkspaceMatch.
            False  if CheckWorkspaceMatch comparison between sample and reduction is unsuccessful

            True  if was not able to load reference file. In this case, algorithm builds validation
                  file and returns True if the reduction and saving of this file is successful

        """
        # this row defines location of the validation file
        validation_file = self.validation_file_name()
        sample_run = self.validate_run_number
        if isinstance(validation_file,str):
            path,name = os.path.split(validation_file)
            if name in mtd:
                reference_ws = mtd[name]
                build_validation = False
                fileName = "workspace:"+reference_ws.name()
            else:
                if len(path)>0:
                    config.appendDataSearchDir(path)
                # it there bug in getFullPath? It returns the same string if given full path
                # but file has not been found
                name,fext=os.path.splitext(name)
                fileName = FileFinder.getFullPath(name+'.nxs')
                if len(fileName)>0:
                    build_validation = False
                    try:
                        reference_ws = Load(fileName)
                    except:
                        build_validation = True
                else:
                    build_validation = True
        elif isinstance(validation_file,api.Workspace):
        # its workspace:
            reference_ws = validation_file
            build_validation = False
            fileName = "workspace:"+reference_ws.name()
        else:
            build_validation = True
        #--------------------------------------------------------
        if build_validation:
            self.reducer.prop_man.save_file_name = validation_file
            self.reducer.prop_man.log\
                 ("*** WARNING:can not find or load validation file {0}\n"\
                  "    Building validation file for run N:{1}".format(validation_file,sample_run),'warning')
        else:
            self.reducer.prop_man.log\
                 ("*** FOUND VALIDATION FILE: {0}\n"\
                  "    Validating run {1} against this file".format(fileName,sample_run),'warning')

        # just in case, to be sure
        current_web_state = self._run_from_web
        current_wait_state = self.wait_for_file
        # disable wait for input and
        self._run_from_web = False
        self.wait_for_file = False
        #
        self.def_advanced_properties()
        self.def_main_properties()
        #
        self.reducer.sample_run = sample_run
        self.reducer.prop_man.save_format = None

        reduced = self.reduce()

        if build_validation:
            self.reducer.prop_man.save_file_name = None
            result_name = os.path.splitext(validation_file)[0]
            self.reducer.prop_man.log("*** Saving validation file with name: {0}.nxs".format(result_name),'notice')
            SaveNexus(reduced,Filename=result_name + '.nxs')
            return True,'Created validation file {0}.nxs'.format(result_name)
        else:
            if isinstance(reduced,list): # check only first result in multirep
                reduced = reduced[0]
            # Cheat! Counterintuitive!
            if self._tolerr:
                TOLL=self._tolerr
            else:
                TOLL = Error
            result = CheckWorkspacesMatch(Workspace1=reference_ws,Workspace2=reduced,\
                                      Tolerance=TOLL,CheckSample=False,\
                                      CheckInstrument=False,ToleranceRelErr=ToleranceRelErr)

        self.wait_for_file = current_wait_state
        self._run_from_web = current_web_state
        if result == 'Success!':
            return True,'Reference file and reduced workspace are equal with accuracy {0:<3.2f}'\
                        .format(TOLL)
        else:
            fname,ext = os.path.splitext(fileName)
            filename = fname+'-mismatch.nxs'
            self.reducer.prop_man.log("***WARNING: can not get results matching the reference file.\n"\
                                      "   Saving new results to file {0}".format(filename),'warning')
            SaveNexus(reduced,Filename=filename)
            return False,result
예제 #31
0
import  MERReductionSrRuO3 as mpr
from mantid.simpleapi import *
from mantid import config

reload(mpr)
rd=mpr.MERLINReduction()

map_mask_dir = '/home/zmp58988/RB1510482'
config.appendDataSearchDir(map_mask_dir)

# set up advanced and main properties
rd.def_advanced_properties()
rd.def_main_properties()

#Filename?
rd.set_custom_output_filename()

def iliad_merlin_crystal(runno,ei,wbvan,rebin_pars,monovan,sam_mass,sam_rmm,bg_range):

    rd.reducer.prop_man.map_file='one2one_143.map'
    #rd.reducer.prop_man.hardmaskOnly = "MER23698.msk"
    rd.reducer.prop_man.hardmaskPlus = "MER23698.msk"
    
    rd.reducer.prop_man.incident_energy=ei
    
    rd.reducer.prop_man.sample_run = runno
    rd.reducer.prop_man.wb_run=wbvan
    rd.reducer.prop_man.energy_bins=rebin_pars
    
    rd.reducer.prop_man.bkgd_range=bg_range