コード例 #1
0
    def validateInputs(self):
        """
        Checks for invalid input properties.
        """
        from IndirectCommon import CheckHistZero
        issues = dict()

        input_workspace_name = self.getPropertyValue('InputWorkspace')

        # Validate spectra range
        spectra_range = self.getProperty('SpectraRange').value
        if len(spectra_range) != 0 and len(spectra_range) != 2:
            issues['SpectraRange'] = 'Must be in format "spec_min,spec_max"'

        if len(spectra_range) == 2:
            spec_min = spectra_range[0]
            spec_max = spectra_range[1]

            num_sample_spectra, _ = CheckHistZero(input_workspace_name)
            min_spectra_number = mtd[input_workspace_name].getSpectrum(0).getSpectrumNo()
            max_spectra_number = mtd[input_workspace_name].getSpectrum(num_sample_spectra - 1).getSpectrumNo()

            if spec_min < min_spectra_number:
                issues['SpectraRange'] = 'Minimum spectra must be greater than or equal to %d' % min_spectra_number

            if spec_max > max_spectra_number:
                issues['SpectraRange'] = 'Maximum spectra must be less than or equal to %d' % max_spectra_number

            if spec_max < spec_min:
                issues['SpectraRange'] = 'Minimum spectra must be smaller than maximum spectra'

        # Validate X range
        x_min = self.getProperty('XMin').value
        if x_min < -1e-5:
            issues['XMin'] = 'XMin must be greater than or equal to zero'

        x_max = self.getProperty('XMax').value
        if x_max < 1e-5:
            issues['XMax'] = 'XMax must be greater than zero'

        if math.fabs(x_max - x_min) < 1e-5:
            issues['XMin'] = 'X range is close to zero'
            issues['XMax'] = 'X range is close to zero'

        if x_max < x_min:
            issues['XMin'] = 'XMin must be less than XMax'
            issues['XMax'] = 'XMax must be greater than XMin'

        # Valudate X range against workspace X range
        sample_x = mtd[input_workspace_name].readX(0)
        sample_x_min = sample_x.min()
        sample_x_max = sample_x.max()

        if x_max > sample_x_max:
            issues['XMax'] = 'XMax value (%f) is greater than largest X value (%f)' % (x_max, sample_x_max)

        if -x_min < sample_x_min:
            issues['XMin'] = 'Negative XMin value (%f) is less than smallest X value (%f)' % (-x_min, sample_x_min)

        return issues
コード例 #2
0
    def _transform(self):
        """
        Run TransformToIqt.
        """
        from IndirectCommon import CheckHistZero, CheckHistSame, CheckAnalysers
        try:
            CheckAnalysers(self._sample, self._resolution)
        except ValueError:
            # A genuine error the shows that the two runs are incompatible
            raise
        except:
            # Checking could not be performed due to incomplete or no instrument
            logger.warning(
                'Could not check for matching analyser and reflection')

        # Process resolution data
        num_res_hist = CheckHistZero(self._resolution)[0]
        if num_res_hist > 1:
            CheckHistSame(self._sample, 'Sample', self._resolution,
                          'Resolution')

        iqt = CalculateIqt(InputWorkspace=self._sample,
                           ResolutionWorkspace=self._resolution,
                           EnergyMin=self._e_min,
                           EnergyMax=self._e_max,
                           EnergyWidth=self._e_width,
                           NumberOfIterations=self._number_of_iterations,
                           SeedValue=self._seed,
                           StoreInADS=False,
                           OutputWorkspace="__ciqt")

        # Set Y axis unit and label
        iqt.setYUnit('')
        iqt.setYUnitLabel('Intensity')
        return iqt
コード例 #3
0
    def _setup(self):
        """
        Get the algorithm properties and validate them.
        """
        from IndirectCommon import CheckHistZero

        self._sample = self.getPropertyValue('Sample')

        self._x_min = math.fabs(self.getProperty('XMin').value)
        self._x_max = math.fabs(self.getProperty('XMax').value)

        self._verbose = self.getProperty('Verbose').value
        self._plot = self.getProperty('Plot').value
        self._save = self.getProperty('Save').value

        self._spectra_range = self.getProperty('SpectraRange').value
        # If the user did not enter a spectra range, use the spectra range of the workspace
        if len(self._spectra_range) == 0:
            num_sample_spectra, _ = CheckHistZero(self._sample)
            min_spectra_number = mtd[self._sample].getSpectrum(0).getSpectrumNo()
            max_spectra_number = mtd[self._sample].getSpectrum(num_sample_spectra - 1).getSpectrumNo()
            self._spectra_range = [min_spectra_number, max_spectra_number]

        self._output_workspace = self.getPropertyValue('OutputWorkspace')
        self._props_output_workspace = self.getPropertyValue('OutputPropertiesTable')
コード例 #4
0
ファイル: TransformToIqt.py プロジェクト: yutiansut/mantid
    def _transform(self):
        """
        Run TransformToIqt.
        """
        from IndirectCommon import CheckHistZero, CheckHistSame

        # Process resolution data
        res_number_of_histograms = CheckHistZero(self._resolution)[0]
        sample_number_of_histograms = CheckHistZero(self._sample)[0]
        if res_number_of_histograms > 1 and sample_number_of_histograms is not res_number_of_histograms:
            CheckHistSame(self._sample, 'Sample', self._resolution,
                          'Resolution')

        calculateiqt_alg = self.createChildAlgorithm(name='CalculateIqt',
                                                     startProgress=0.3,
                                                     endProgress=1.0,
                                                     enableLogging=True)
        calculateiqt_alg.setAlwaysStoreInADS(False)
        args = {
            "InputWorkspace": self._sample,
            "OutputWorkspace": "iqt",
            "ResolutionWorkspace": self._resolution,
            "EnergyMin": self._e_min,
            "EnergyMax": self._e_max,
            "EnergyWidth": self._e_width,
            "CalculateErrors": self._calculate_errors,
            "NumberOfIterations": self._number_of_iterations,
            "SeedValue": self._seed
        }
        for key, value in args.items():
            calculateiqt_alg.setProperty(key, value)
        calculateiqt_alg.execute()

        iqt = calculateiqt_alg.getProperty("OutputWorkspace").value

        # Set Y axis unit and label
        iqt.setYUnit('')
        iqt.setYUnitLabel('Intensity')
        return iqt
コード例 #5
0
ファイル: TimeSlice.py プロジェクト: yutiansut/mantid
    def _process_raw_file(self, raw_file):
        """
        Process a raw sample file.

        @param raw_file Name of file to process
        """
        from IndirectCommon import CheckHistZero

        # Crop the raw file to use the desired number of spectra
        # less one because CropWorkspace is zero based
        CropWorkspace(InputWorkspace=raw_file,
                      OutputWorkspace=raw_file,
                      StartWorkspaceIndex=int(self._spectra_range[0]) - 1,
                      EndWorkspaceIndex=int(self._spectra_range[1]) - 1)

        num_hist = CheckHistZero(raw_file)[0]

        # Use calibration file if desired
        if self._calib_ws is not None:
            Divide(LHSWorkspace=raw_file,
                   RHSWorkspace=self._calib_ws,
                   OutputWorkspace=raw_file)

        # Construct output workspace name
        run = mtd[raw_file].getRun().getLogData('run_number').value
        inst = mtd[raw_file].getInstrument().getName()
        slice_file = inst.lower() + run + self._output_ws_name_suffix

        if self._background_range is None:
            Integration(InputWorkspace=raw_file,
                        OutputWorkspace=slice_file,
                        RangeLower=self._peak_range[0],
                        RangeUpper=self._peak_range[1],
                        StartWorkspaceIndex=0,
                        EndWorkspaceIndex=num_hist - 1)
        else:
            CalculateFlatBackground(InputWorkspace=raw_file,
                                    OutputWorkspace=slice_file,
                                    StartX=self._background_range[0],
                                    EndX=self._background_range[1],
                                    Mode='Mean')
            Integration(InputWorkspace=slice_file,
                        OutputWorkspace=slice_file,
                        RangeLower=self._peak_range[0],
                        RangeUpper=self._peak_range[1],
                        StartWorkspaceIndex=0,
                        EndWorkspaceIndex=num_hist - 1)

        return slice_file
コード例 #6
0
    def _transform(self):
        """
        Run TransformToIqt.
        """
        from IndirectCommon import CheckHistZero, CheckHistSame, CheckAnalysers

        try:
            CheckAnalysers(self._sample, self._resolution)
        except ValueError:
            # A genuine error the shows that the two runs are incompatible
            raise
        except BaseException:
            # Checking could not be performed due to incomplete or no
            # instrument
            logger.warning(
                'Could not check for matching analyser and reflection')

        # Process resolution data
        num_res_hist = CheckHistZero(self._resolution)[0]
        if num_res_hist > 1:
            CheckHistSame(self._sample, 'Sample', self._resolution,
                          'Resolution')

        calculateiqt_alg = self.createChildAlgorithm(name='CalculateIqt',
                                                     startProgress=0.3,
                                                     endProgress=1.0,
                                                     enableLogging=True)
        calculateiqt_alg.setAlwaysStoreInADS(False)
        args = {
            "InputWorkspace": self._sample,
            "OutputWorkspace": "iqt",
            "ResolutionWorkspace": self._resolution,
            "EnergyMin": self._e_min,
            "EnergyMax": self._e_max,
            "EnergyWidth": self._e_width,
            "CalculateErrors": self._calculate_errors,
            "NumberOfIterations": self._number_of_iterations,
            "SeedValue": self._seed
        }
        for key, value in args.items():
            calculateiqt_alg.setProperty(key, value)
        calculateiqt_alg.execute()

        iqt = calculateiqt_alg.getProperty("OutputWorkspace").value

        # Set Y axis unit and label
        iqt.setYUnit('')
        iqt.setYUnitLabel('Intensity')
        return iqt
コード例 #7
0
ファイル: BayesQuasi.py プロジェクト: mcvine/mantid
    def PyExec(self):

        # Check for platform support
        if not is_supported_f2py_platform():
            unsupported_msg = "This algorithm can only be run on valid platforms." \
                              + " please view the algorithm documentation to see" \
                              + " what platforms are currently supported"
            raise RuntimeError(unsupported_msg)

        from IndirectBayes import (CalcErange, GetXYE, ReadNormFile,
                                   ReadWidthFile, QLAddSampleLogs, C2Fw, C2Se,
                                   QuasiPlot)
        from IndirectCommon import (CheckXrange, CheckAnalysers, getEfixed,
                                    GetThetaQ, CheckHistZero, CheckHistSame,
                                    IndentifyDataBoundaries)
        setup_prog = Progress(self, start=0.0, end=0.3, nreports=5)
        self.log().information('BayesQuasi input')

        erange = [self._e_min, self._e_max]
        nbins = [self._sam_bins, self._res_bins]
        setup_prog.report('Converting to binary for Fortran')
        #convert true/false to 1/0 for fortran
        o_el = 1 if self._elastic else 0
        o_w1 = 1 if self._width else 0
        o_res = 1 if self._res_norm else 0

        #fortran code uses background choices defined using the following numbers
        setup_prog.report('Encoding input options')
        if self._background == 'Sloping':
            o_bgd = 2
        elif self._background == 'Flat':
            o_bgd = 1
        elif self._background == 'Zero':
            o_bgd = 0

        fitOp = [o_el, o_bgd, o_w1, o_res]

        setup_prog.report('Establishing save path')
        workdir = config['defaultsave.directory']
        if not os.path.isdir(workdir):
            workdir = os.getcwd()
            logger.information(
                'Default Save directory is not set. Defaulting to current working Directory: '
                + workdir)

        array_len = 4096  # length of array in Fortran
        setup_prog.report('Checking X Range')
        CheckXrange(erange, 'Energy')

        nbin, nrbin = nbins[0], nbins[1]

        logger.information('Sample is ' + self._samWS)
        logger.information('Resolution is ' + self._resWS)

        # Check for trailing and leading zeros in data
        setup_prog.report(
            'Checking for leading and trailing zeros in the data')
        first_data_point, last_data_point = IndentifyDataBoundaries(
            self._samWS)
        if first_data_point > self._e_min:
            logger.warning(
                "Sample workspace contains leading zeros within the energy range."
            )
            logger.warning("Updating eMin: eMin = " + str(first_data_point))
            self._e_min = first_data_point
        if last_data_point < self._e_max:
            logger.warning(
                "Sample workspace contains trailing zeros within the energy range."
            )
            logger.warning("Updating eMax: eMax = " + str(last_data_point))
            self._e_max = last_data_point

        # update erange with new values
        erange = [self._e_min, self._e_max]

        setup_prog.report('Checking Analysers')
        CheckAnalysers(self._samWS, self._resWS)
        setup_prog.report('Obtaining EFixed, theta and Q')
        efix = getEfixed(self._samWS)
        theta, Q = GetThetaQ(self._samWS)

        nsam, ntc = CheckHistZero(self._samWS)

        totalNoSam = nsam

        #check if we're performing a sequential fit
        if self._loop != True:
            nsam = 1

        nres = CheckHistZero(self._resWS)[0]

        setup_prog.report('Checking Histograms')
        if self._program == 'QL':
            if nres == 1:
                prog = 'QLr'  # res file
            else:
                prog = 'QLd'  # data file
                CheckHistSame(self._samWS, 'Sample', self._resWS, 'Resolution')
        elif self._program == 'QSe':
            if nres == 1:
                prog = 'QSe'  # res file
            else:
                raise ValueError('Stretched Exp ONLY works with RES file')

        logger.information('Version is ' + prog)
        logger.information(' Number of spectra = ' + str(nsam))
        logger.information(' Erange : ' + str(erange[0]) + ' to ' +
                           str(erange[1]))

        setup_prog.report('Reading files')
        Wy, We = ReadWidthFile(self._width, self._wfile, totalNoSam)
        dtn, xsc = ReadNormFile(self._res_norm, self._resnormWS, totalNoSam)

        setup_prog.report('Establishing output workspace name')
        fname = self._samWS[:-4] + '_' + prog
        probWS = fname + '_Prob'
        fitWS = fname + '_Fit'
        wrks = os.path.join(workdir, self._samWS[:-4])
        logger.information(' lptfile : ' + wrks + '_' + prog + '.lpt')
        lwrk = len(wrks)
        wrks.ljust(140, ' ')
        wrkr = self._resWS
        wrkr.ljust(140, ' ')

        setup_prog.report('Initialising probability list')
        # initialise probability list
        if self._program == 'QL':
            prob0 = []
            prob1 = []
            prob2 = []
        xQ = np.array([Q[0]])
        for m in range(1, nsam):
            xQ = np.append(xQ, Q[m])
        xProb = xQ
        xProb = np.append(xProb, xQ)
        xProb = np.append(xProb, xQ)
        eProb = np.zeros(3 * nsam)

        group = ''
        workflow_prog = Progress(self, start=0.3, end=0.7, nreports=nsam * 3)
        for m in range(0, nsam):
            logger.information('Group ' + str(m) + ' at angle ' +
                               str(theta[m]))
            nsp = m + 1
            nout, bnorm, Xdat, Xv, Yv, Ev = CalcErange(self._samWS, m, erange,
                                                       nbin)
            Ndat = nout[0]
            Imin = nout[1]
            Imax = nout[2]
            if prog == 'QLd':
                mm = m
            else:
                mm = 0
            Nb, Xb, Yb, Eb = GetXYE(self._resWS, mm,
                                    array_len)  # get resolution data
            numb = [nsam, nsp, ntc, Ndat, nbin, Imin, Imax, Nb, nrbin]
            rscl = 1.0
            reals = [efix, theta[m], rscl, bnorm]

            if prog == 'QLr':
                workflow_prog.report(
                    'Processing Sample number %i as Lorentzian' % nsam)
                nd, xout, yout, eout, yfit, yprob = QLr.qlres(
                    numb, Xv, Yv, Ev, reals, fitOp, Xdat, Xb, Yb, Wy, We, dtn,
                    xsc, wrks, wrkr, lwrk)
                message = ' Log(prob) : ' + str(yprob[0]) + ' ' + str(
                    yprob[1]) + ' ' + str(yprob[2]) + ' ' + str(yprob[3])
                logger.information(message)
            if prog == 'QLd':
                workflow_prog.report('Processing Sample number %i' % nsam)
                nd, xout, yout, eout, yfit, yprob = QLd.qldata(
                    numb, Xv, Yv, Ev, reals, fitOp, Xdat, Xb, Yb, Eb, Wy, We,
                    wrks, wrkr, lwrk)
                message = ' Log(prob) : ' + str(yprob[0]) + ' ' + str(
                    yprob[1]) + ' ' + str(yprob[2]) + ' ' + str(yprob[3])
                logger.information(message)
            if prog == 'QSe':
                workflow_prog.report(
                    'Processing Sample number %i as Stretched Exp' % nsam)
                nd,xout,yout,eout,yfit,yprob=Qse.qlstexp(numb,Xv,Yv,Ev,reals,fitOp,\
                                                        Xdat,Xb,Yb,Wy,We,dtn,xsc,\
                                                        wrks,wrkr,lwrk)
            dataX = xout[:nd]
            dataX = np.append(dataX, 2 * xout[nd - 1] - xout[nd - 2])
            yfit_list = np.split(yfit[:4 * nd], 4)
            dataF1 = yfit_list[1]
            if self._program == 'QL':
                dataF2 = yfit_list[2]
            workflow_prog.report('Processing data')
            dataG = np.zeros(nd)
            datX = dataX
            datY = yout[:nd]
            datE = eout[:nd]
            datX = np.append(datX, dataX)
            datY = np.append(datY, dataF1[:nd])
            datE = np.append(datE, dataG)
            res1 = dataF1[:nd] - yout[:nd]
            datX = np.append(datX, dataX)
            datY = np.append(datY, res1)
            datE = np.append(datE, dataG)
            nsp = 3
            names = 'data,fit.1,diff.1'
            res_plot = [0, 1, 2]
            if self._program == 'QL':
                workflow_prog.report('Processing Lorentzian result data')
                datX = np.append(datX, dataX)
                datY = np.append(datY, dataF2[:nd])
                datE = np.append(datE, dataG)
                res2 = dataF2[:nd] - yout[:nd]
                datX = np.append(datX, dataX)
                datY = np.append(datY, res2)
                datE = np.append(datE, dataG)
                nsp += 2
                names += ',fit.2,diff.2'
                res_plot.append(4)
                prob0.append(yprob[0])
                prob1.append(yprob[1])
                prob2.append(yprob[2])

            # create result workspace
            fitWS = fname + '_Workspaces'
            fout = fname + '_Workspace_' + str(m)

            workflow_prog.report('Creating OutputWorkspace')
            CreateWorkspace(OutputWorkspace=fout, DataX=datX, DataY=datY, DataE=datE,\
                Nspec=nsp, UnitX='DeltaE', VerticalAxisUnit='Text', VerticalAxisValues=names)

            # append workspace to list of results
            group += fout + ','

        comp_prog = Progress(self, start=0.7, end=0.8, nreports=2)
        comp_prog.report('Creating Group Workspace')
        GroupWorkspaces(InputWorkspaces=group, OutputWorkspace=fitWS)

        if self._program == 'QL':
            comp_prog.report('Processing Lorentzian probability data')
            yPr0 = np.array([prob0[0]])
            yPr1 = np.array([prob1[0]])
            yPr2 = np.array([prob2[0]])
            for m in range(1, nsam):
                yPr0 = np.append(yPr0, prob0[m])
                yPr1 = np.append(yPr1, prob1[m])
                yPr2 = np.append(yPr2, prob2[m])
            yProb = yPr0
            yProb = np.append(yProb, yPr1)
            yProb = np.append(yProb, yPr2)
            probWs = CreateWorkspace(OutputWorkspace=probWS, DataX=xProb, DataY=yProb, DataE=eProb,\
                Nspec=3, UnitX='MomentumTransfer')
            outWS = C2Fw(self._samWS[:-4], fname)
            if self._plot != 'None':
                QuasiPlot(fname, self._plot, res_plot, self._loop)
        if self._program == 'QSe':
            comp_prog.report('Runnning C2Se')
            outWS = C2Se(fname)
            if self._plot != 'None':
                QuasiPlot(fname, self._plot, res_plot, self._loop)

        log_prog = Progress(self, start=0.8, end=1.0, nreports=8)
        #Add some sample logs to the output workspaces
        log_prog.report('Copying Logs to outputWorkspace')
        CopyLogs(InputWorkspace=self._samWS, OutputWorkspace=outWS)
        log_prog.report('Adding Sample logs to Output workspace')
        QLAddSampleLogs(outWS, self._resWS, prog, self._background,
                        self._elastic, erange, (nbin, nrbin), self._resnormWS,
                        self._wfile)
        log_prog.report('Copying logs to fit Workspace')
        CopyLogs(InputWorkspace=self._samWS, OutputWorkspace=fitWS)
        log_prog.report('Adding sample logs to Fit workspace')
        QLAddSampleLogs(fitWS, self._resWS, prog, self._background,
                        self._elastic, erange, (nbin, nrbin), self._resnormWS,
                        self._wfile)
        log_prog.report('Finialising log copying')

        if self._save:
            log_prog.report('Saving workspaces')
            fit_path = os.path.join(workdir, fitWS + '.nxs')
            SaveNexusProcessed(InputWorkspace=fitWS, Filename=fit_path)
            out_path = os.path.join(workdir,
                                    outWS + '.nxs')  # path name for nxs file
            SaveNexusProcessed(InputWorkspace=outWS, Filename=out_path)
            logger.information('Output fit file created : ' + fit_path)
            logger.information('Output paramter file created : ' + out_path)

        self.setProperty('OutputWorkspaceFit', fitWS)
        self.setProperty('OutputWorkspaceResult', outWS)
        log_prog.report('Setting workspace properties')

        if self._program == 'QL':
            self.setProperty('OutputWorkspaceProb', probWS)
コード例 #8
0
ファイル: BayesStretch.py プロジェクト: yutiansut/mantid
    def PyExec(self):
        run_f2py_compatibility_test()

        from IndirectBayes import (CalcErange, GetXYE)
        from IndirectCommon import (CheckXrange, CheckAnalysersOrEFixed,
                                    getEfixed, GetThetaQ, CheckHistZero)
        setup_prog = Progress(self, start=0.0, end=0.3, nreports=5)
        logger.information('BayesStretch input')
        logger.information('Sample is %s' % self._sam_name)
        logger.information('Resolution is %s' % self._res_name)

        setup_prog.report('Converting to binary for Fortran')
        fitOp = self._encode_fit_ops(self._elastic, self._background)

        setup_prog.report('Establishing save path')
        workdir = self._establish_save_path()

        setup_prog.report('Checking X Range')
        CheckXrange(self._erange, 'Energy')

        setup_prog.report('Checking Analysers')
        CheckAnalysersOrEFixed(self._sam_name, self._res_name)
        setup_prog.report('Obtaining EFixed, theta and Q')
        efix = getEfixed(self._sam_name)
        theta, Q = GetThetaQ(self._sam_name)

        setup_prog.report('Checking Histograms')
        nsam, ntc = CheckHistZero(self._sam_name)

        # check if we're performing a sequential fit
        if not self._loop:
            nsam = 1

        logger.information('Version is Stretch')
        logger.information('Number of spectra = %s ' % nsam)
        logger.information('Erange : %f to %f ' %
                           (self._erange[0], self._erange[1]))

        setup_prog.report('Creating FORTRAN Input')
        fname = self._sam_name[:-4] + '_Stretch'
        wrks = os.path.join(workdir, self._sam_name[:-4])
        logger.information('lptfile : %s_Qst.lpt' % wrks)
        lwrk = len(wrks)
        wrks.ljust(140, ' ')
        wrkr = self._res_name
        wrkr.ljust(140, ' ')
        eBet0 = np.zeros(self._nbet)  # set errors to zero
        eSig0 = np.zeros(self._nsig)  # set errors to zero
        rscl = 1.0
        Qaxis = ''

        workflow_prog = Progress(self, start=0.3, end=0.7, nreports=nsam * 3)

        # Empty arrays to hold Sigma and Bet x,y,e values
        xSig, ySig, eSig = [], [], []
        xBet, yBet, eBet = [], [], []

        for m in range(nsam):
            logger.information('Group %i at angle %f' % (m, theta[m]))
            nsp = m + 1
            nout, bnorm, Xdat, Xv, Yv, Ev = CalcErange(self._sam_name, m,
                                                       self._erange,
                                                       self._nbins[0])
            Ndat = nout[0]
            Imin = nout[1]
            Imax = nout[2]

            # get resolution data (4096 = FORTRAN array length)
            Nb, Xb, Yb, _ = GetXYE(self._res_name, 0, 4096)
            numb = [
                nsam, nsp, ntc, Ndat, self._nbins[0], Imin, Imax, Nb,
                self._nbins[1], self._nbet, self._nsig
            ]
            reals = [efix, theta[m], rscl, bnorm]

            workflow_prog.report('Processing spectrum number %i' % m)
            xsout, ysout, xbout, ybout, zpout = Que.quest(
                numb, Xv, Yv, Ev, reals, fitOp, Xdat, Xb, Yb, wrks, wrkr, lwrk)
            dataXs = xsout[:self._nsig]  # reduce from fixed FORTRAN array
            dataYs = ysout[:self._nsig]
            dataXb = xbout[:self._nbet]
            dataYb = ybout[:self._nbet]
            zpWS = fname + '_Zp' + str(m)
            if m > 0:
                Qaxis += ','
            Qaxis += str(Q[m])

            dataXz = []
            dataYz = []
            dataEz = []

            for n in range(self._nsig):
                yfit_list = np.split(zpout[:self._nsig * self._nbet],
                                     self._nsig)
                dataYzp = yfit_list[n]

                dataXz = np.append(dataXz, xbout[:self._nbet])
                dataYz = np.append(dataYz, dataYzp[:self._nbet])
                dataEz = np.append(dataEz, eBet0)

            zpWS = fname + '_Zp' + str(m)
            self._create_workspace(zpWS, [dataXz, dataYz, dataEz], self._nsig,
                                   dataXs, True)

            xSig = np.append(xSig, dataXs)
            ySig = np.append(ySig, dataYs)
            eSig = np.append(eSig, eSig0)
            xBet = np.append(xBet, dataXb)
            yBet = np.append(yBet, dataYb)
            eBet = np.append(eBet, eBet0)

            if m == 0:
                groupZ = zpWS
            else:
                groupZ = groupZ + ',' + zpWS

        # create workspaces for sigma and beta
        workflow_prog.report('Creating OutputWorkspace')
        self._create_workspace(fname + '_Sigma', [xSig, ySig, eSig], nsam,
                               Qaxis)
        self._create_workspace(fname + '_Beta', [xBet, yBet, eBet], nsam,
                               Qaxis)

        group = fname + '_Sigma,' + fname + '_Beta'
        fit_ws = fname + '_Fit'
        s_api.GroupWorkspaces(InputWorkspaces=group, OutputWorkspace=fit_ws)
        contour_ws = fname + '_Contour'
        s_api.GroupWorkspaces(InputWorkspaces=groupZ,
                              OutputWorkspace=contour_ws)

        # Add some sample logs to the output workspaces
        log_prog = Progress(self, start=0.8, end=1.0, nreports=6)
        log_prog.report('Copying Logs to Fit workspace')
        copy_log_alg = self.createChildAlgorithm('CopyLogs',
                                                 enableLogging=False)
        copy_log_alg.setProperty('InputWorkspace', self._sam_name)
        copy_log_alg.setProperty('OutputWorkspace', fit_ws)
        copy_log_alg.execute()

        log_prog.report('Adding Sample logs to Fit workspace')
        self._add_sample_logs(fit_ws, self._erange, self._nbins[0])

        log_prog.report('Copying logs to Contour workspace')
        copy_log_alg.setProperty('InputWorkspace', self._sam_name)
        copy_log_alg.setProperty('OutputWorkspace', contour_ws)
        copy_log_alg.execute()

        log_prog.report('Adding sample logs to Contour workspace')
        self._add_sample_logs(contour_ws, self._erange, self._nbins[0])
        log_prog.report('Finialising log copying')

        # sort x axis
        s_api.SortXAxis(InputWorkspace=fit_ws,
                        OutputWorkspace=fit_ws,
                        EnableLogging=False)
        s_api.SortXAxis(InputWorkspace=contour_ws,
                        OutputWorkspace=contour_ws,
                        EnableLogging=False)

        self.setProperty('OutputWorkspaceFit', fit_ws)
        self.setProperty('OutputWorkspaceContour', contour_ws)
        log_prog.report('Setting workspace properties')
コード例 #9
0
    def _transform(self):
        """
        Run TransformToIqt.
        """
        from IndirectCommon import CheckHistZero, CheckHistSame, CheckAnalysers
        trans_prog = Progress(self, start=0.3, end=0.8, nreports=15)
        try:
            CheckAnalysers(self._sample, self._resolution)
        except ValueError:
            # A genuine error the shows that the two runs are incompatible
            raise
        except:
            # Checking could not be performed due to incomplete or no instrument
            logger.warning(
                'Could not check for matching analyser and reflection')

        # Process resolution data
        num_res_hist = CheckHistZero(self._resolution)[0]
        if num_res_hist > 1:
            CheckHistSame(self._sample, 'Sample', self._resolution,
                          'Resolution')

        rebin_param = str(self._e_min) + ',' + str(self._e_width) + ',' + str(
            self._e_max)
        trans_prog.report('Rebinning Workspace')
        Rebin(InputWorkspace=self._sample,
              OutputWorkspace='__sam_data',
              Params=rebin_param,
              FullBinsOnly=True)

        # Sample
        trans_prog.report('Rebinning sample')
        Rebin(InputWorkspace='__sam_data',
              OutputWorkspace='__sam_data',
              Params=rebin_param)
        trans_prog.report('Integrating Sample')
        Integration(InputWorkspace='__sam_data', OutputWorkspace='__sam_int')
        trans_prog.report('Converting Sample to data points')
        ConvertToPointData(InputWorkspace='__sam_data',
                           OutputWorkspace='__sam_data')
        trans_prog.report('Extracting FFT spectrum for Sample')
        ExtractFFTSpectrum(InputWorkspace='__sam_data',
                           OutputWorkspace='__sam_fft',
                           FFTPart=2)
        trans_prog.report('Dividing Sample')
        Divide(LHSWorkspace='__sam_fft',
               RHSWorkspace='__sam_int',
               OutputWorkspace='__sam')

        # Resolution
        trans_prog.report('Rebinnig Resolution')
        Rebin(InputWorkspace=self._resolution,
              OutputWorkspace='__res_data',
              Params=rebin_param)
        trans_prog.report('Integrating Resolution')
        Integration(InputWorkspace='__res_data', OutputWorkspace='__res_int')
        trans_prog.report('Converting Resolution to data points')
        ConvertToPointData(InputWorkspace='__res_data',
                           OutputWorkspace='__res_data')
        trans_prog.report('Extractig FFT Resolution spectrum')
        ExtractFFTSpectrum(InputWorkspace='__res_data',
                           OutputWorkspace='__res_fft',
                           FFTPart=2)
        trans_prog.report('Dividing Resolution')
        Divide(LHSWorkspace='__res_fft',
               RHSWorkspace='__res_int',
               OutputWorkspace='__res')

        trans_prog.report('Diving Workspaces')
        Divide(LHSWorkspace='__sam',
               RHSWorkspace='__res',
               OutputWorkspace=self._output_workspace)

        # Cleanup sample workspaces
        trans_prog.report('Deleting Sample temp')
        DeleteWorkspace('__sam_data')
        DeleteWorkspace('__sam_int')
        DeleteWorkspace('__sam_fft')
        DeleteWorkspace('__sam')

        # Crop nonsense values off workspace
        binning = int(math.ceil(mtd[self._output_workspace].blocksize() / 2.0))
        bin_v = mtd[self._output_workspace].dataX(0)[binning]
        trans_prog.report('Cropping output')
        CropWorkspace(InputWorkspace=self._output_workspace,
                      OutputWorkspace=self._output_workspace,
                      XMax=bin_v)

        # Set Y axis unit and label
        mtd[self._output_workspace].setYUnit('')
        mtd[self._output_workspace].setYUnitLabel('Intensity')

        trans_prog.report('Deleting Resolution temp')
        # Clean up resolution workspaces
        DeleteWorkspace('__res_data')
        DeleteWorkspace('__res_int')
        DeleteWorkspace('__res_fft')
        DeleteWorkspace('__res')
コード例 #10
0
    def PyExec(self):
        from IndirectCommon import CheckHistZero, CheckElimits, getDefaultWorkingDirectory

        workflow_prog = Progress(self, start=0.0, end=1.0, nreports=20)
        workflow_prog.report('Setting up algorithm')
        sample_workspace = self.getPropertyValue('Sample')
        output_workspace = self.getPropertyValue('OutputWorkspace')
        factor = self.getProperty('Scale').value
        emin = self.getProperty('EnergyMin').value
        emax = self.getProperty('EnergyMax').value
        erange = [emin, emax]

        Plot = self.getProperty('Plot').value
        Save = self.getProperty('Save').value

        workflow_prog.report('Validating input')
        num_spectra,num_w = CheckHistZero(sample_workspace)

        logger.information('Sample %s has %d Q values & %d w values' % (sample_workspace, num_spectra, num_w))

        x_data = np.asarray(mtd[sample_workspace].readX(0))
        CheckElimits(erange,x_data)

        workflow_prog.report('Cropping Workspace')
        samWS = '__temp_sqw_moments_cropped'
        CropWorkspace(InputWorkspace=sample_workspace, OutputWorkspace=samWS,
                      XMin=erange[0], XMax=erange[1])

        logger.information('Energy range is %f to %f' % (erange[0], erange[1]))

        if factor > 0.0:
            workflow_prog.report('Scaling Workspace by factor %f' % factor)
            Scale(InputWorkspace=samWS, OutputWorkspace=samWS, Factor=factor, Operation='Multiply')
            logger.information('y(q,w) scaled by %f' % factor)

        #calculate delta x
        workflow_prog.report('Converting to point data')
        ConvertToPointData(InputWorkspace=samWS, OutputWorkspace=samWS)
        x_data = np.asarray(mtd[samWS].readX(0))
        workflow_prog.report('Creating temporary data workspace')
        x_workspace = CreateWorkspace(OutputWorkspace="__temp_sqw_moments_x",
                                      DataX=x_data, DataY=x_data, UnitX="DeltaE")

        #calculate moments
        moments_0 = output_workspace + '_M0'
        moments_1 = output_workspace + '_M1'
        moments_2 = output_workspace + '_M2'
        moments_3 = output_workspace + '_M3'
        moments_4 = output_workspace + '_M4'

        workflow_prog.report('Multiplying Workspaces by moments')
        Multiply(x_workspace, samWS, OutputWorkspace=moments_1)
        Multiply(x_workspace, moments_1, OutputWorkspace=moments_2)
        Multiply(x_workspace, moments_2, OutputWorkspace=moments_3)
        Multiply(x_workspace, moments_3, OutputWorkspace=moments_4)
        DeleteWorkspace(moments_3)

        workflow_prog.report('Converting to Histogram')
        ConvertToHistogram(InputWorkspace=samWS, OutputWorkspace=samWS)
        workflow_prog.report('Intergrating result')
        Integration(samWS, OutputWorkspace=moments_0)

        moments = [moments_1, moments_2, moments_4]
        for moment_ws in moments:
            workflow_prog.report('Processing workspace %s' % moment_ws)
            ConvertToHistogram(InputWorkspace=moment_ws, OutputWorkspace=moment_ws)
            Integration(moment_ws, OutputWorkspace=moment_ws)
            Divide(moment_ws, moments_0, OutputWorkspace=moment_ws)

        workflow_prog.report('Deleting Workspaces')
        DeleteWorkspace(samWS)
        DeleteWorkspace(x_workspace)

        #create output workspace
        extensions = ['_M0', '_M1', '_M2', '_M4']
        for ext in extensions:
            ws_name = output_workspace+ext
            workflow_prog.report('Processing Workspace %s' % ext)
            Transpose(InputWorkspace=ws_name, OutputWorkspace=ws_name)
            ConvertToHistogram(InputWorkspace=ws_name, OutputWorkspace=ws_name)
            ConvertUnits(InputWorkspace=ws_name, OutputWorkspace=ws_name,
                         Target='MomentumTransfer', EMode='Indirect')

            CopyLogs(InputWorkspace=sample_workspace, OutputWorkspace=ws_name)
            workflow_prog.report('Adding Sample logs to %s' % ws_name)
            AddSampleLog(Workspace=ws_name, LogName="energy_min",
                         LogType="Number", LogText=str(emin))
            AddSampleLog(Workspace=ws_name, LogName="energy_max",
                         LogType="Number", LogText=str(emax))
            AddSampleLog(Workspace=ws_name, LogName="scale_factor",
                         LogType="Number", LogText=str(factor))

        # Group output workspace
        workflow_prog.report('Grouping OutputWorkspace')
        group_workspaces = ','.join([output_workspace+ext for ext in extensions])
        GroupWorkspaces(InputWorkspaces=group_workspaces, OutputWorkspace=output_workspace)

        if Save:
            workflow_prog.report('Saving Workspace')
            workdir = getDefaultWorkingDirectory()
            opath = os.path.join(workdir,output_workspace+'.nxs')
            SaveNexusProcessed(InputWorkspace=output_workspace, Filename=opath)
            logger.information('Output file : ' + opath)

        if Plot:
            workflow_prog.report('Plotting Workspace')
            self._plot_moments(output_workspace)

        self.setProperty("OutputWorkspace", output_workspace)
        workflow_prog.report('Algorithm complete')
コード例 #11
0
ファイル: Fury.py プロジェクト: chatcannon/mantid
    def _fury(self):
        """
        Run Fury.
        """
        from IndirectCommon import CheckHistZero, CheckHistSame, CheckAnalysers

        # Process resolution data
        CheckAnalysers(self._sample, self._resolution, self._verbose)
        num_res_hist = CheckHistZero(self._resolution)[0]
        if num_res_hist > 1:
            CheckHistSame(self._sample, 'Sample', self._resolution,
                          'Resolution')

        rebin_param = str(self._e_min) + ',' + str(self._e_width) + ',' + str(
            self._e_max)
        Rebin(InputWorkspace=self._sample,
              OutputWorkspace='__sam_rebin',
              Params=rebin_param,
              FullBinsOnly=True)

        Rebin(InputWorkspace=self._resolution,
              OutputWorkspace='__res_data',
              Params=rebin_param)
        Integration(InputWorkspace='__res_data', OutputWorkspace='__res_int')
        ConvertToPointData(InputWorkspace='__res_data',
                           OutputWorkspace='__res_data')
        ExtractFFTSpectrum(InputWorkspace='__res_data',
                           OutputWorkspace='__res_fft',
                           FFTPart=2)
        Divide(LHSWorkspace='__res_fft',
               RHSWorkspace='__res_int',
               OutputWorkspace='__res')

        Rebin(InputWorkspace='__sam_rebin',
              OutputWorkspace='__sam_data',
              Params=rebin_param)
        Integration(InputWorkspace='__sam_data', OutputWorkspace='__sam_int')
        ConvertToPointData(InputWorkspace='__sam_data',
                           OutputWorkspace='__sam_data')
        ExtractFFTSpectrum(InputWorkspace='__sam_data',
                           OutputWorkspace='__sam_fft',
                           FFTPart=2)
        Divide(LHSWorkspace='__sam_fft',
               RHSWorkspace='__sam_int',
               OutputWorkspace='__sam')

        Divide(LHSWorkspace='__sam',
               RHSWorkspace='__res',
               OutputWorkspace=self._output_workspace)

        # Cleanup sample workspaces
        DeleteWorkspace('__sam_rebin')
        DeleteWorkspace('__sam_data')
        DeleteWorkspace('__sam_int')
        DeleteWorkspace('__sam_fft')
        DeleteWorkspace('__sam')

        # Crop nonsense values off workspace
        binning = int(math.ceil(mtd[self._output_workspace].blocksize() / 2.0))
        bin_v = mtd[self._output_workspace].dataX(0)[binning]
        CropWorkspace(InputWorkspace=self._output_workspace,
                      OutputWorkspace=self._output_workspace,
                      XMax=bin_v)

        # Clean up resolution workspaces
        DeleteWorkspace('__res_data')
        DeleteWorkspace('__res_int')
        DeleteWorkspace('__res_fft')
        DeleteWorkspace('__res')