Ejemplo n.º 1
0
    def getVectorProcessVanToPlot(self, exp, scan, tempdata=False):
        """ Get vec x and y for the processed vanadium spectrum
        """
        # get on hold of processed vanadium data workspace
        wsmanager = self.getWorkspace(exp, scan, raiseexception=True)

        if tempdata is True:
            procVanWs = wsmanager.getProcessedVanadiumWSTemp()
        else:
            procVanWs = wsmanager.getProcessedVanadiumWS()
            #procVanWs = wsmanager._processedVanWS

        if procVanWs is None:
            raise NotImplementedError(
                "Exp %d Scan %d does not have processed vanadium workspace." %
                (exp, scan))

        # convert to point data if necessary
        if len(procVanWs.readX(0)) != len(procVanWs.readY(0)):
            wsname = procVanWs.name() + "_pd"
            api.ConvertToPointData(InputWorkspace=procVanWs,
                                   OutputWorkspace=wsname)
            outws = AnalysisDataService.retrieve(wsname)
        else:
            outws = procVanWs

        # get vectors
        return outws.readX(0), outws.readY(0)
Ejemplo n.º 2
0
def _plot_with_options(axes_option, workspace, options_list, plot_number):
    """
    Enable/disable legend, grid, limits according to
    options (ops) for the given axes (ax).
    Plot with or without errorbars.
    """
    ws_plot = api.ConvertToPointData(workspace)
    if options_list['errorbars']:
        axes_option.errorbar(ws_plot.readX(0),
                             ws_plot.readY(0),
                             yerr=ws_plot.readE(0),
                             label=workspace.name())
    else:
        axes_option.plot(ws_plot.readX(0),
                         ws_plot.readY(0),
                         label=workspace.name())

    axes_option.grid(options_list['grid'])
    axes_option.set_xscale(options_list['xScale'])
    axes_option.set_yscale(options_list['yScale'])
    if options_list['xLimits'] != 'auto':
        axes_option.set_xlim(options_list['xLimits'])
    if options_list['yLimits'] != 'auto':
        axes_option.set_ylim(options_list['yLimits'])

    # If a list of titles was given, use it to title each subplot
    if hasattr(options_list['title'], "__iter__"):
        axes_option.set_title(options_list['title'][plot_number])
    if options_list['legend'] and hasattr(options_list['legendLocation'],
                                          "__iter__"):
        axes_option.legend(loc=options_list['legendLocation'][plot_number])
    elif options_list['legend']:
        axes_option.legend(loc=options_list['legendLocation'])
Ejemplo n.º 3
0
    def export_to_rmcprofile(ws_name,
                             output_file_name,
                             comment='',
                             ws_index=0):
        """ Export a workspace 2D to a 2 column data for RMCProfile
        """
        # check inputs
        assert isinstance(
            ws_name,
            str), 'Workspace name {0} must be a string but not a {1}.'.format(
                ws_name, str(ws_name))
        assert isinstance(
            output_file_name, str
        ), 'Output file name {0} must be a string but not a {1}.'.format(
            output_file_name, type(output_file_name))
        assert isinstance(
            comment,
            str), 'Comment {0} must be a string but not a {1}.'.format(
                comment, type(comment))
        assert isinstance(
            ws_index,
            int), 'Workspace index must be an integer but not a {1}.'.format(
                ws_index, type(ws_index))

        # convert to point data from histogram
        simpleapi.ConvertToPointData(InputWorkspace=ws_name,
                                     OutputWorkspace=ws_name)

        # get workspace for vecX and vecY
        if AnalysisDataService.doesExist(ws_name):
            workspace = AnalysisDataService.retrieve(ws_name)
        else:
            raise RuntimeError(
                'Workspace {0} does not exist in ADS.'.format(ws_name))
        if not 0 <= ws_index < workspace.getNumberHistograms():
            raise RuntimeError(
                'Workspace index {0} is out of range.'.format(ws_index))

        vec_x = workspace.readX(ws_index)
        vec_y = workspace.readY(ws_index)

        # write to buffer
        wbuf = ''
        wbuf += '{0}\n'.format(len(vec_x))
        wbuf += '{0}\n'.format(comment)
        for index in range(len(vec_x)):
            wbuf += ' {0} {1}\n'.format(vec_x[index], vec_y[index])

        # write to file
        try:
            ofile = open(output_file_name, 'w')
            ofile.write(wbuf)
            ofile.close()
        except IOError as io_err:
            raise RuntimeError(
                'Unable to export data to file {0} in RMCProfile format due to {1}.'
                .format(output_file_name, io_err))

        return
Ejemplo n.º 4
0
    def load_sq(self, file_name):
        """
        Load S(Q) to a numpy
        Guarantees: the file is loaded to self._currSQX, _currSQY and _currSQE
        Parameters
        ----------
        file_name :: name of the S(Q)

        Returns
        -------
        2-tuple range of Q
        """
        # generate S(Q) workspace name
        sq_ws_name = os.path.basename(file_name).split('.')[0]

        # call mantid LoadAscii
        ext = file_name.upper().split('.')[-1]
        if ext == 'NXS':
            simpleapi.LoadNexusProcessed(Filename=file_name,
                                         OutputWorkspace=sq_ws_name)
            simpleapi.ConvertUnits(InputWorkspace=sq_ws_name,
                                   OutputWorkspace=sq_ws_name,
                                   EMode='Elastic',
                                   Target='MomentumTransfer')
            simpleapi.ConvertToPointData(
                InputWorkspace=sq_ws_name,
                OutputWorkspace=sq_ws_name)  # TODO REMOVE THIS LINE
        elif ext == 'DAT' or ext == 'txt':
            try:
                simpleapi.LoadAscii(Filename=file_name,
                                    OutputWorkspace=sq_ws_name,
                                    Unit='MomentumTransfer')
            except RuntimeError:
                sq_ws_name, q_min, q_max = "InvalidInput", 0, 0
                return sq_ws_name, q_min, q_max
            # The S(Q) file is in fact S(Q)-1 in sq file.  So need to add 1 to
            # the workspace
            out_ws = AnalysisDataService.retrieve(sq_ws_name)
            out_ws += 1

        assert AnalysisDataService.doesExist(
            sq_ws_name), 'Unable to load S(Q) file %s.' % file_name

        # set to the current S(Q) workspace name
        self._currSqWsName = sq_ws_name
        self._sqIndexDict[self._currSqWsName] = 0

        # get range of Q from the loading
        sq_ws = AnalysisDataService.retrieve(sq_ws_name)
        q_min = sq_ws.readX(0)[0]
        q_max = sq_ws.readX(0)[-1]

        return sq_ws_name, q_min, q_max
Ejemplo n.º 5
0
def create_merged_workspace(workspace_list):
    if workspace_list:
        # get max number of bins and max X range
        max_num_bins = 0
        for ws_name in workspace_list:
            if ws_name:
                ws = mantid.mtd[ws_name]
                max_num_bins = max(ws.blocksize(), max_num_bins)

        # create single ws for the merged data, use original ws as a template
        template_ws = next(ws for ws in workspace_list if ws is not None)
        merged_ws = mantid.WorkspaceFactory.create(
            mantid.mtd[template_ws],
            NVectors=num_files_per_detector,
            XLength=max_num_bins,
            YLength=max_num_bins)

        # create a merged workspace based on every entry from workspace list
        for i in range(0, num_files_per_detector):
            # load in ws - first check workspace exists
            if workspace_list[i]:
                ws = mantid.mtd[workspace_list[i]]
                # check if histogram data, and convert if necessary
                if ws.isHistogramData():
                    ws = mantid.ConvertToPointData(InputWorkspace=ws.name(),
                                                   OutputWorkspace=ws.name())
                # find max x val
                max_x = np.max(ws.readX(0))
                # get current number of bins
                num_bins = ws.blocksize()
                # pad bins
                X_padded = np.empty(max_num_bins)
                X_padded.fill(max_x)
                X_padded[:num_bins] = ws.readX(0)
                Y_padded = np.zeros(max_num_bins)
                Y_padded[:num_bins] = ws.readY(0)
                E_padded = np.zeros(max_num_bins)
                E_padded[:num_bins] = ws.readE(0)

                # set row of merged workspace
                merged_ws.setX(i, X_padded)
                merged_ws.setY(i, Y_padded)
                merged_ws.setE(i, E_padded)

                # remove workspace from ADS
                mantid.AnalysisDataService.remove(ws.getName())

        return merged_ws
Ejemplo n.º 6
0
    def get_bragg_data(self, ws_group_name, bank_id, x_unit):
        """ Get Bragg diffraction data of 1 bank
        Args:
            ws_group_name
            bank_id:
            x_unit:
        Returns:
        3-tuple of numpy 1D array for X, Y and E
        """
        # check
        assert isinstance(bank_id, int) and bank_id > 0
        msg = 'Workspace groups {} does not exist in controller.'.format(
            ws_group_name)
        msg += 'Current existing are {}.'.format(self._braggDataDict.keys())
        assert ws_group_name in self._braggDataDict, msg

        ws_name = '%s_bank%d' % (ws_group_name.split('_group')[0], bank_id)
        error_message = 'Bank %d is not found in group %s. Available bank IDs are %s.' % (
            bank_id, ws_group_name, str(self._braggDataDict[ws_group_name][1]))
        assert ws_name in self._braggDataDict[ws_group_name][1], error_message

        # FIXME - It is quite messy here! Using dictionary or forming workspace name?
        # construct bank workspace name
        # ws_name = self._braggDataDict[ws_group_name][1][bank_id]
        assert AnalysisDataService.doesExist(
            ws_name), 'Workspace %s does not exist.' % ws_name

        # convert units if necessary
        bank_ws = AnalysisDataService.retrieve(ws_name)
        curr_unit = bank_ws.getAxis(0).getUnit().unitID()
        if curr_unit != x_unit:
            simpleapi.ConvertToHistogram(InputWorkspace=ws_name,
                                         OutputWorkspace=ws_name)
            simpleapi.ConvertUnits(InputWorkspace=ws_name,
                                   OutputWorkspace=ws_name,
                                   Target=x_unit,
                                   EMode='Elastic')

        # convert to point data for plotting
        simpleapi.ConvertToPointData(InputWorkspace=ws_name,
                                     OutputWorkspace=ws_name)

        # get workspace
        bank_ws = AnalysisDataService.retrieve(ws_name)

        return bank_ws.readX(0), bank_ws.readY(0), bank_ws.readE(0)
Ejemplo n.º 7
0
    def get_ws_data(ws_name):
        """

        Parameters
        ----------
        ws_name

        Returns
        -------

        """
        # convert to point data for plotting
        simpleapi.ConvertToPointData(InputWorkspace=ws_name,
                                     OutputWorkspace=ws_name)

        out_ws = AnalysisDataService.retrieve(ws_name)

        return out_ws.readX(0), out_ws.readY(0), out_ws.readE(0)
Ejemplo n.º 8
0
    def getMergedVector(self, mkey):
        """ Get vector X and Y from merged scans
        """
        if self._myMergedWSDict.has_key(mkey) is True:
            wksp = self._myMergedWSDict[mkey]

            # convert to point data if necessary
            if len(wksp.readX(0)) != len(wksp.readY(0)):
                wsname = wksp.name() + "_pd"
                api.ConvertToPointData(InputWorkspace=wksp,
                                       OutputWorkspace=wsname)
                wksp = AnalysisDataService.retrieve(wsname)

            vecx = wksp.readX(0)
            vecy = wksp.readY(0)
        else:
            raise NotImplementedError("No merged workspace for key = %s." %
                                      (str(mkey)))

        return (vecx, vecy)
Ejemplo n.º 9
0
    def getVectorToPlot(self, exp, scan):
        """ Get vec x and vec y of the reduced workspace to plot
        """
        # get on hold of reduced workspace
        wsmanager = self.getWorkspace(exp, scan, raiseexception=True)
        reducedws = wsmanager.reducedws
        if reducedws is None:
            raise NotImplementedError(
                "Exp %d Scan %d does not have reduced workspace." %
                (exp, scan))

        # convert to point data if necessary
        if len(reducedws.readX(0)) != len(reducedws.readY(0)):
            wsname = reducedws.name() + "_pd"
            api.ConvertToPointData(InputWorkspace=reducedws,
                                   OutputWorkspace=wsname)
            outws = AnalysisDataService.retrieve(wsname)
        else:
            outws = reducedws

        # get vectors
        return outws.readX(0), outws.readY(0)
Ejemplo n.º 10
0
    def _plotTimeCounts(self, wksp):
        """ Plot time/counts
        """
        import datetime
        # Rebin events by pulse time
        try:
            # Get run start and run stop
            if wksp.getRun().hasProperty("run_start"):
                runstart = wksp.getRun().getProperty("run_start").value
            else:
                runstart = wksp.getRun().getProperty("proton_charge").times[0]
            runstop = wksp.getRun().getProperty("proton_charge").times[-1]

            runstart = str(runstart).split(".")[0].strip()
            runstop = str(runstop).split(".")[0].strip()

            t0 = datetime.datetime.strptime(runstart, "%Y-%m-%dT%H:%M:%S")
            tf = datetime.datetime.strptime(runstop, "%Y-%m-%dT%H:%M:%S")

            # Calcualte
            dt = tf - t0
            timeduration = dt.days * 3600 * 24 + dt.seconds

            timeres = float(timeduration) / MAXTIMEBINSIZE
            if timeres < 1.0:
                timeres = 1.0

            sumwsname = "_Summed_%s" % (str(wksp))
            if AnalysisDataService.doesExist(sumwsname) is False:
                sumws = api.SumSpectra(InputWorkspace=wksp,
                                       OutputWorkspace=sumwsname)
                sumws = api.RebinByPulseTimes(InputWorkspace=sumws,
                                              OutputWorkspace=sumwsname,
                                              Params="%f" % (timeres))
                sumws = api.ConvertToPointData(InputWorkspace=sumws,
                                               OutputWorkspace=sumwsname)
            else:
                sumws = AnalysisDataService.retrieve(sumwsname)
        except RuntimeError as e:
            return str(e)

        vecx = sumws.readX(0)
        vecy = sumws.readY(0)

        xmin = min(vecx)
        xmax = max(vecx)
        ymin = min(vecy)
        ymax = max(vecy)

        # Reset graph
        self.ui.mainplot.set_xlim(xmin, xmax)
        self.ui.mainplot.set_ylim(ymin, ymax)

        self.ui.mainplot.set_xlabel('Time (seconds)', fontsize=13)
        self.ui.mainplot.set_ylabel('Counts', fontsize=13)

        # Set up main line
        setp(self.mainline, xdata=vecx, ydata=vecy)

        # Reset slide
        newslidery = [min(vecy), max(vecy)]

        newleftx = xmin + (xmax - xmin) * self._leftSlideValue * 0.01
        setp(self.leftslideline, xdata=[newleftx, newleftx], ydata=newslidery)

        newrightx = xmin + (xmax - xmin) * self._rightSlideValue * 0.01
        setp(self.rightslideline,
             xdata=[newrightx, newrightx],
             ydata=newslidery)

        self.ui.graphicsView.draw()

        return
Ejemplo n.º 11
0
    def rebin_workspace(self, input_ws, binning_param_list, output_ws_name):
        """
        rebin input workspace with user specified binning parameters
        :param input_ws:
        :param binning_param_list:
        :param output_ws_name:
        :return:
        """
        if binning_param_list is None:
            # no re-binning is required: clone the output workspace
            output_workspace = api.CloneWorkspace(
                InputWorkspace=input_ws, OutputWorkspace=output_ws_name)

        else:
            # rebin input workspace
            processed_single_spec_ws_list = list()
            for ws_index in range(input_ws.getNumberHistograms()):
                # rebin on each
                temp_out_name = output_ws_name + '_' + str(ws_index)
                processed_single_spec_ws_list.append(temp_out_name)
                # extract a spectrum out
                api.ExtractSpectra(input_ws,
                                   WorkspaceIndexList=[ws_index],
                                   OutputWorkspace=temp_out_name)
                # get binning parameter
                bin_params = binning_param_list[ws_index]
                if bin_params is None:
                    continue
                # rebin
                # check
                if len(bin_params) % 2 == 0:
                    # odd number and cannot be binning parameters
                    raise RuntimeError(
                        'Binning parameter {0} cannot be accepted.'.format(
                            bin_params))

                api.Rebin(InputWorkspace=temp_out_name,
                          OutputWorkspace=temp_out_name,
                          Params=bin_params,
                          PreserveEvents=True)
                rebinned_ws = AnalysisDataService.retrieve(temp_out_name)
                self.log().warning(
                    'Rebinnd workspace Size(x) = {0}, Size(y) = {1}'.format(
                        len(rebinned_ws.readX(0)), len(rebinned_ws.readY(0))))

                # Upon this point, the workspace is still HistogramData.
                # Check whether it is necessary to reset the X-values to reference TOF from VDRIVE
                temp_out_ws = AnalysisDataService.retrieve(temp_out_name)
                if len(bin_params) == 2 * len(temp_out_ws.readX(0)) - 1:
                    reset_bins = True
                else:
                    reset_bins = False

                # convert to point data
                api.ConvertToPointData(InputWorkspace=temp_out_name,
                                       OutputWorkspace=temp_out_name)
                # align the bin boundaries if necessary
                temp_out_ws = AnalysisDataService.retrieve(temp_out_name)

                if reset_bins:
                    # good to align:
                    for tof_i in range(len(temp_out_ws.readX(0))):
                        temp_out_ws.dataX(0)[tof_i] = int(
                            bin_params[2 * tof_i] * 10) / 10.
                    # END-FOR (tof-i)
                # END-IF (align)
            # END-FOR

            # merge together
            api.RenameWorkspace(
                InputWorkspace=processed_single_spec_ws_list[0],
                OutputWorkspace=output_ws_name)
            for ws_index in range(1, len(processed_single_spec_ws_list)):
                api.ConjoinWorkspaces(
                    InputWorkspace1=output_ws_name,
                    InputWorkspace2=processed_single_spec_ws_list[ws_index])
            # END-FOR
            output_workspace = AnalysisDataService.retrieve(output_ws_name)
        # END-IF-ELSE

        return output_workspace
Ejemplo n.º 12
0
    def _PyExec(self):
        # Collect Flux Normalization
        if self.getProperty('DoFluxNormalization').value is True:
            self._flux_normalization_type =\
                self.getProperty('FluxNormalizationType').value
            if self._flux_normalization_type == 'Monitor':
                self._MonNorm = True

        self._reflection =\
            REFLECTIONS_DICT[self.getProperty('ReflectionType').value]
        self._doIndiv = self.getProperty('DoIndividual').value

        # micro-eV to mili-eV
        self._etBins = 1.E-03 * self.getProperty('EnergyBins').value
        self._qBins = self.getProperty('MomentumTransferBins').value
        self._qBins[0] -= self._qBins[1] / 2.0  # leftmost bin boundary
        self._qBins[2] += self._qBins[1] / 2.0  # rightmost bin boundary

        self._maskFile = self.getProperty('MaskFile').value
        maskfile = self.getProperty('MaskFile').value
        self._maskFile = maskfile if maskfile else\
            pjoin(DEFAULT_MASK_GROUP_DIR, self._reflection['mask_file'])

        self._groupDetOpt = self.getProperty('GroupDetectors').value
        self._normalizeToFirst = self.getProperty('NormalizeToFirst').value
        self._doNorm = self.getProperty('DivideByVanadium').value

        # retrieve properties pertaining to saving to NXSPE file
        self._nsxpe_do = self.getProperty('SaveNXSPE').value
        if self._nsxpe_do:
            self._nxspe_psi_angle_log = self.getProperty('PsiAngleLog').value
            self._nxspe_offset = self.getProperty('PsiOffset').value

        # Apply default mask if not supplied by user
        self._overrideMask = bool(self._maskFile)
        if not self._overrideMask:
            mantid_config.appendDataSearchDir(DEFAULT_MASK_GROUP_DIR)
            self._maskFile = self._reflection['mask_file']

        self._maskWs = tws('BASIS_MASK')
        sapi.LoadMask(Instrument='BASIS',
                      OutputWorkspace=self._maskWs,
                      InputFile=self._maskFile)

        # Work around length issue
        _dMask = sapi.ExtractMask(InputWorkspace=self._maskWs,
                                  OutputWorkspace=tws('ExtractMask'))
        self._dMask = _dMask[1]

        #
        #  Process the Vanadium
        #
        norm_runs = self.getProperty('NormRunNumbers').value
        if self._doNorm and bool(norm_runs):
            self._normalizationType = self.getProperty(
                'NormalizationType').value
            self.log().information('Divide by Vanadium with normalization' +
                                   self._normalizationType)

            # Following steps common to all types of Vanadium normalization

            # norm_runs encompasses a single set, thus _getRuns returns
            # a list of only one item
            norm_set = self._get_runs(norm_runs, doIndiv=False)[0]
            normWs = tws(self._make_run_name(norm_set[0]) + '_vanadium')
            self._sum_and_calibrate(norm_set, normWs)

            normRange = self._reflection['vanadium_wav_range']
            bin_width = normRange[1] - normRange[0]
            # This rebin integrates counts onto a histogram of a single bin
            if self._normalizationType == 'by detector ID':
                self._normRange = [normRange[0], bin_width, normRange[1]]
                sapi.Rebin(InputWorkspace=normWs,
                           OutputWorkspace=normWs,
                           Params=self._normRange)
                self._normWs = normWs
            # Detectors outside limits are substituted by MedianDetectorTest
            self._normMask = tws('BASIS_NORM_MASK')
            sapi.FindDetectorsOutsideLimits(
                InputWorkspace=normWs,
                LowThreshold=1.0 * bin_width,
                # no count events outside ranges
                RangeLower=normRange[0],
                RangeUpper=normRange[1],
                OutputWorkspace=self._normMask)
            # additional reduction steps when normalizing by Q slice
            if self._normalizationType == 'by Q slice':
                self._normWs = self._group_and_SofQW(normWs,
                                                     normWs,
                                                     self._etBins,
                                                     isSample=False)
        #
        #  Process the sample
        #
        self._run_list = self._get_runs(self.getProperty('RunNumbers').value,
                                        doIndiv=self._doIndiv)
        for run_set in self._run_list:
            self._samWs = tws(self._make_run_name(run_set[0]))
            self._sum_and_calibrate(run_set, self._samWs)
            self._samWsRun = str(run_set[0])
            # Divide by Vanadium detector ID, if pertinent
            if self._normalizationType == 'by detector ID':
                # Mask detectors with low Vanadium signal before dividing
                sapi.MaskDetectors(Workspace=self._samWs,
                                   MaskedWorkspace=self._normMask)
                sapi.Divide(LHSWorkspace=self._samWs,
                            RHSWorkspace=self._normWs,
                            OutputWorkspace=self._samWs)
            # additional reduction steps
            prefix = self._make_run_name(run_set[0])
            self._samSqwWs = self._group_and_SofQW(self._samWs,
                                                   prefix,
                                                   self._etBins,
                                                   isSample=True)
            # Divide by Vanadium Q slice, if pertinent
            if self._normalizationType == 'by Q slice':
                sapi.Divide(LHSWorkspace=self._samSqwWs,
                            RHSWorkspace=self._normWs,
                            OutputWorkspace=self._samSqwWs)
            # Clear mask from reduced file. Needed for binary operations
            # involving this S(Q,w)
            sapi.ClearMaskFlag(Workspace=self._samSqwWs)
            # Scale so that elastic line has Y-values ~ 1
            if self._normalizeToFirst:
                self._ScaleY(self._samSqwWs)

            # Transform the vertical axis (Q) to point data
            # Q-values are in X-axis now
            sapi.Transpose(InputWorkspace=self._samSqwWs,
                           OutputWorkspace=self._samSqwWs)
            # from histo to point
            sapi.ConvertToPointData(InputWorkspace=self._samSqwWs,
                                    OutputWorkspace=self._samSqwWs)
            # Q-values back to vertical axis
            sapi.Transpose(InputWorkspace=self._samSqwWs,
                           OutputWorkspace=self._samSqwWs)
            self.serialize_in_log(self._samSqwWs)  # store the call
            # Output Dave and Nexus files
            extension = '_divided.dat' if self._doNorm else '.dat'
            dave_grp_filename = self._make_run_name(self._samWsRun, False) + \
                extension
            sapi.SaveDaveGrp(Filename=dave_grp_filename,
                             InputWorkspace=self._samSqwWs,
                             ToMicroEV=True)
            extension = '_divided_sqw.nxs' if self._doNorm else '_sqw.nxs'
            processed_filename = self._make_run_name(self._samWsRun, False) + \
                extension
            sapi.SaveNexus(Filename=processed_filename,
                           InputWorkspace=self._samSqwWs)

            # additional output
            if self.getProperty('OutputSusceptibility').value:
                temperature = mtd[self._samSqwWs].getRun().\
                    getProperty(TEMPERATURE_SENSOR).getStatistics().mean
                samXqsWs = self._samSqwWs.replace('sqw', 'Xqw')
                sapi.ApplyDetailedBalance(InputWorkspace=self._samSqwWs,
                                          OutputWorkspace=samXqsWs,
                                          Temperature=str(temperature))
                sapi.ConvertUnits(InputWorkspace=samXqsWs,
                                  OutputWorkspace=samXqsWs,
                                  Target='DeltaE_inFrequency')
                self.serialize_in_log(samXqsWs)
                susceptibility_filename = processed_filename.replace(
                    'sqw', 'Xqw')
                sapi.SaveNexus(Filename=susceptibility_filename,
                               InputWorkspace=samXqsWs)
            if self.getProperty('OutputPowderSpectrum').value:
                self.generatePowderSpectrum()
Ejemplo n.º 13
0
def do_fitting_benchmark_one_problem(prob,
                                     minimizers,
                                     use_errors=True,
                                     count=0,
                                     previous_name="none"):
    """
    One problem with potentially several starting points, returns a list (start points) of
    lists (minimizers).

    @param prob :: fitting problem
    @param minimizers :: list of minimizers to evaluate/compare
    @param use_errors :: whether to use observational errors when evaluating accuracy (in the
                         cost function)
    @param count :: the current count for the number of different start values for a given problem
    """

    wks, cost_function = prepare_wks_cost_function(prob, use_errors)

    # Each NIST problem generate two results per file - from two different starting points
    results_fit_problem = []

    # Get function definitions for the problem - one for each starting point
    function_defs = get_function_definitions(prob)
    # search for lowest chi2
    min_sum_err_sq = 1.e20
    # Loop over the different starting points
    for user_func in function_defs:
        results_problem_start = []
        for minimizer_name in minimizers:
            t_start = time.clock()

            status, chi2, fit_wks, params, errors = run_fit(
                wks,
                prob,
                function=user_func,
                minimizer=minimizer_name,
                cost_function=cost_function)
            t_end = time.clock()
            print("*** with minimizer {0}, Status: {1}, chi2: {2}".format(
                minimizer_name, status, chi2))
            print("   params: {0}, errors: {1}".format(params, errors))

            def sum_of_squares(values):
                return np.sum(np.square(values))

            if fit_wks:
                sum_err_sq = sum_of_squares(fit_wks.readY(2))
                # print " output simulated values: {0}".format(fit_wks.readY(1))
                if sum_err_sq < min_sum_err_sq:
                    tmp = msapi.ConvertToPointData(fit_wks)
                    best_fit = data(minimizer_name, tmp.readX(1), tmp.readY(1))
                    min_sum_err_sq = sum_err_sq
            else:
                sum_err_sq = float("inf")
                print(" WARNING: no output fit workspace")
            print("   sum sq: {0}".format(sum_err_sq))
            result = test_result.FittingTestResult()
            result.problem = prob
            result.fit_status = status
            result.fit_chi2 = chi2
            result.params = params
            result.errors = errors
            result.sum_err_sq = sum_err_sq
            # If the fit has failed, also set the runtime to NaN
            result.runtime = t_end - t_start if not np.isnan(chi2) else np.nan
            print("Result object: {0}".format(result))
            results_problem_start.append(result)
        results_fit_problem.append(results_problem_start)
        # make plots
        fig = plot()
        best_fit.markers = ''
        best_fit.linestyle = '-'
        best_fit.colour = 'green'
        best_fit.order_data()
        fig.add_data(best_fit)
        tmp = msapi.ConvertToPointData(wks)
        xData = tmp.readX(0)
        yData = tmp.readY(0)
        eData = tmp.readE(0)
        raw = data("Data", xData, yData, eData)
        raw.showError = True
        raw.linestyle = ''
        fig.add_data(raw)
        fig.labels['y'] = "Arbitrary units"
        fig.labels['x'] = "Time ($\mu s$)"
        if prob.name == previous_name:
            count += 1
        else:
            count = 1
            previous_name = prob.name
        #fig.labels['y']="something "
        fig.labels['title'] = prob.name[:-4] + " " + str(count)
        fig.title_size = 10
        fit_result = msapi.Fit(user_func,
                               wks,
                               Output='ws_fitting_test',
                               Minimizer='Levenberg-Marquardt',
                               CostFunction='Least squares',
                               IgnoreInvalidData=True,
                               StartX=prob.start_x,
                               EndX=prob.end_x,
                               MaxIterations=0)
        tmp = msapi.ConvertToPointData(fit_result.OutputWorkspace)
        xData = tmp.readX(1)
        yData = tmp.readY(1)
        startData = data("Start Guess", xData, yData)
        startData.order_data()
        startData.colour = "blue"
        startData.markers = ''
        startData.linestyle = "-"
        start_fig = plot()
        start_fig.add_data(raw)
        start_fig.add_data(startData)
        start_fig.labels['x'] = "Time ($\mu s$)"
        start_fig.labels['y'] = "Arbitrary units"
        title = user_func[27:-1]
        title = splitByString(title, 30)
        # remove the extension (e.g. .nxs) if there is one
        run_ID = prob.name
        k = -1
        k = run_ID.rfind(".")
        if k != -1:
            run_ID = run_ID[:k]

        start_fig.labels['title'] = run_ID + " " + str(count) + "\n" + title
        start_fig.title_size = 10
        fig.make_scatter_plot("Fit for " + run_ID + " " + str(count) + ".pdf")
        start_fig.make_scatter_plot("start for " + run_ID + " " + str(count) +
                                    ".pdf")
    return results_fit_problem
Ejemplo n.º 14
0
    def PyExec(self):
        config['default.facility'] = "SNS"
        config['default.instrument'] = self._long_inst
        self._reflection = REFLECTIONS_DICT[self.getProperty(
            "ReflectionType").value]
        self._doIndiv = self.getProperty("DoIndividual").value
        self._etBins = 1.E-03 * self.getProperty(
            "EnergyBins").value  # micro-eV to mili-eV
        self._qBins = self.getProperty("MomentumTransferBins").value
        self._qBins[0] -= self._qBins[
            1] / 2.0  # self._qBins[0] is leftmost bin boundary
        self._qBins[2] += self._qBins[
            1] / 2.0  # self._qBins[2] is rightmost bin boundary
        self._noMonNorm = self.getProperty("NoMonitorNorm").value
        self._maskFile = self.getProperty("MaskFile").value
        self._groupDetOpt = self.getProperty("GroupDetectors").value
        self._normalizeToFirst = self.getProperty("NormalizeToFirst").value
        self._doNorm = self.getProperty("DivideByVanadium").value

        datasearch = config["datasearch.searcharchive"]
        if datasearch != "On":
            config["datasearch.searcharchive"] = "On"

        # Apply default mask if not supplied by user
        self._overrideMask = bool(self._maskFile)
        if not self._overrideMask:
            config.appendDataSearchDir(DEFAULT_MASK_GROUP_DIR)
            self._maskFile = self._reflection["mask_file"]

        sapi.LoadMask(Instrument='BASIS',
                      OutputWorkspace='BASIS_MASK',
                      InputFile=self._maskFile)

        # Work around length issue
        _dMask = sapi.ExtractMask('BASIS_MASK')
        self._dMask = _dMask[1]
        sapi.DeleteWorkspace(_dMask[0])

        ############################
        ##  Process the Vanadium  ##
        ############################

        norm_runs = self.getProperty("NormRunNumbers").value
        if self._doNorm and bool(norm_runs):
            if ";" in norm_runs:
                raise SyntaxError("Normalization does not support run groups")
            self._normalizationType = self.getProperty(
                "NormalizationType").value
            self.log().information("Divide by Vanadium with normalization" +
                                   self._normalizationType)

            # The following steps are common to all types of Vanadium normalization

            # norm_runs encompasses a single set, thus _getRuns returns
            # a list of only one item
            norm_set = self._getRuns(norm_runs, doIndiv=False)[0]
            normWs = self._sum_and_calibrate(norm_set, extra_extension="_norm")

            # This rebin integrates counts onto a histogram of a single bin
            if self._normalizationType == "by detectorID":
                normRange = self.getProperty("NormWavelengthRange").value
                self._normRange = [
                    normRange[0], normRange[1] - normRange[0], normRange[1]
                ]
                sapi.Rebin(InputWorkspace=normWs,
                           OutputWorkspace=normWs,
                           Params=self._normRange)

            # FindDetectorsOutsideLimits to be substituted by MedianDetectorTest
            sapi.FindDetectorsOutsideLimits(InputWorkspace=normWs,
                                            OutputWorkspace="BASIS_NORM_MASK")

            # additional reduction steps when normalizing by Q slice
            if self._normalizationType == "by Q slice":
                self._normWs = self._group_and_SofQW(normWs,
                                                     self._etBins,
                                                     isSample=False)
            if not self._debugMode:
                sapi.DeleteWorkspace(normWs)  # Delete vanadium events file

        ##########################
        ##  Process the sample  ##
        ##########################
        self._run_list = self._getRuns(self.getProperty("RunNumbers").value,
                                       doIndiv=self._doIndiv)
        for run_set in self._run_list:
            self._samWs = self._sum_and_calibrate(run_set)
            self._samWsRun = str(run_set[0])
            # Divide by Vanadium detector ID, if pertinent
            if self._normalizationType == "by detector ID":
                # Mask detectors with insufficient Vanadium signal before dividing
                sapi.MaskDetectors(Workspace=self._samWs,
                                   MaskedWorkspace='BASIS_NORM_MASK')
                sapi.Divide(LHSWorkspace=self._samWs,
                            RHSWorkspace=self._normWs,
                            OutputWorkspace=self._samWs)
            # additional reduction steps
            self._samSqwWs = self._group_and_SofQW(self._samWs,
                                                   self._etBins,
                                                   isSample=True)
            if not self._debugMode:
                sapi.DeleteWorkspace(self._samWs)  # delete events file
            # Divide by Vanadium Q slice, if pertinent
            if self._normalizationType == "by Q slice":
                sapi.Divide(LHSWorkspace=self._samSqwWs,
                            RHSWorkspace=self._normWs,
                            OutputWorkspace=self._samSqwWs)
            # Clear mask from reduced file. Needed for binary operations
            # involving this S(Q,w)
            sapi.ClearMaskFlag(Workspace=self._samSqwWs)
            # Scale so that elastic line has Y-values ~ 1
            if self._normalizeToFirst:
                self._ScaleY(self._samSqwWs)
            # Transform the vertical axis to point data
            sapi.Transpose(
                InputWorkspace=self._samSqwWs,
                OutputWorkspace=self._samSqwWs)  # Q-values are in X-axis now
            sapi.ConvertToPointData(
                InputWorkspace=self._samSqwWs,
                OutputWorkspace=self._samSqwWs)  # from histo to point
            sapi.Transpose(InputWorkspace=self._samSqwWs,
                           OutputWorkspace=self._samSqwWs
                           )  # Q-values back to vertical axis
            # Output Dave and Nexus files
            extension = "_divided.dat" if self._doNorm else ".dat"
            dave_grp_filename = self._makeRunName(self._samWsRun,
                                                  False) + extension
            sapi.SaveDaveGrp(Filename=dave_grp_filename,
                             InputWorkspace=self._samSqwWs,
                             ToMicroEV=True)
            extension = "_divided_sqw.nxs" if self._doNorm else "_sqw.nxs"
            processed_filename = self._makeRunName(self._samWsRun,
                                                   False) + extension
            sapi.SaveNexus(Filename=processed_filename,
                           InputWorkspace=self._samSqwWs)

        if not self._debugMode:
            sapi.DeleteWorkspace("BASIS_MASK")  # delete the mask
            if self._doNorm and bool(norm_runs):
                sapi.DeleteWorkspace("BASIS_NORM_MASK")  # delete vanadium mask
                sapi.DeleteWorkspace(self._normWs)  # Delete vanadium S(Q)
Ejemplo n.º 15
0
    def PyExec(self):
        config['default.facility'] = 'SNS'
        config['default.instrument'] = self._long_inst
        self._reflection =\
            REFLECTIONS_DICT[self.getProperty('ReflectionType').value]
        self._doIndiv = self.getProperty('DoIndividual').value
        # micro-eV to mili-eV
        self._etBins = 1.E-03 * self.getProperty('EnergyBins').value
        self._qBins = self.getProperty('MomentumTransferBins').value
        self._qBins[0] -= self._qBins[1]/2.0  # leftmost bin boundary
        self._qBins[2] += self._qBins[1]/2.0  # rightmost bin boundary
        self._MonNorm = self.getProperty('MonitorNorm').value
        self._maskFile = self.getProperty('MaskFile').value
        maskfile = self.getProperty('MaskFile').value
        self._maskFile = maskfile if maskfile else\
            pjoin(DEFAULT_MASK_GROUP_DIR, self._reflection['mask_file'])
        self._groupDetOpt = self.getProperty('GroupDetectors').value
        self._normalizeToFirst = self.getProperty('NormalizeToFirst').value
        self._doNorm = self.getProperty('DivideByVanadium').value

        # retrieve properties pertaining to saving to NXSPE file
        self._nsxpe_do = self.getProperty('SaveNXSPE').value
        if self._nsxpe_do:
            self._nxspe_psi_angle_log = self.getProperty('PsiAngleLog').value
            self._nxspe_offset = self.getProperty('PsiOffset').value

        datasearch = config["datasearch.searcharchive"]
        if datasearch != "On":
            config["datasearch.searcharchive"] = "On"

        # Apply default mask if not supplied by user
        self._overrideMask = bool(self._maskFile)
        if not self._overrideMask:
            config.appendDataSearchDir(DEFAULT_MASK_GROUP_DIR)
            self._maskFile = self._reflection["mask_file"]

        sapi.LoadMask(Instrument='BASIS',
                      OutputWorkspace='BASIS_MASK',
                      InputFile=self._maskFile)

        # Work around length issue
        _dMask = sapi.ExtractMask('BASIS_MASK')
        self._dMask = _dMask[1]
        sapi.DeleteWorkspace(_dMask[0])

        ############################
        ##  Process the Vanadium  ##
        ############################

        norm_runs = self.getProperty("NormRunNumbers").value
        if self._doNorm and bool(norm_runs):
            if ";" in norm_runs:
                raise SyntaxError("Normalization does not support run groups")
            self._normalizationType = self.getProperty("NormalizationType").value
            self.log().information("Divide by Vanadium with normalization" +
                                   self._normalizationType)

            # Following steps common to all types of Vanadium normalization

            # norm_runs encompasses a single set, thus _getRuns returns
            # a list of only one item
            norm_set = self._getRuns(norm_runs, doIndiv=False)[0]
            normWs = self._sum_and_calibrate(norm_set, extra_extension="_norm")

            normRange = self.getProperty("NormWavelengthRange").value
            bin_width = normRange[1] - normRange[0]
            # This rebin integrates counts onto a histogram of a single bin
            if self._normalizationType == "by detector ID":
                self._normRange = [normRange[0], bin_width, normRange[1]]
                sapi.Rebin(InputWorkspace=normWs,
                           OutputWorkspace=normWs,
                           Params=self._normRange)
                self._normWs = normWs
            # FindDetectorsOutsideLimits to be substituted by MedianDetectorTest
            sapi.FindDetectorsOutsideLimits(InputWorkspace=normWs,
                                            LowThreshold=1.0*bin_width,
                                            # no count events outside ranges
                                            RangeLower=normRange[0],
                                            RangeUpper=normRange[1],
                                            OutputWorkspace='BASIS_NORM_MASK')
            # additional reduction steps when normalizing by Q slice
            if self._normalizationType == "by Q slice":
                self._normWs = self._group_and_SofQW(normWs, self._etBins,
                                                     isSample=False)

        ##########################
        ##  Process the sample  ##
        ##########################
        self._run_list = self._getRuns(self.getProperty("RunNumbers").value,
                                       doIndiv=self._doIndiv)
        for run_set in self._run_list:
            self._samWs = self._sum_and_calibrate(run_set)
            self._samWsRun = str(run_set[0])
            # Divide by Vanadium detector ID, if pertinent
            if self._normalizationType == "by detector ID":
                # Mask detectors with insufficient Vanadium signal before dividing
                sapi.MaskDetectors(Workspace=self._samWs,
                                   MaskedWorkspace='BASIS_NORM_MASK')
                sapi.Divide(LHSWorkspace=self._samWs,
                            RHSWorkspace=self._normWs,
                            OutputWorkspace=self._samWs)
            # additional reduction steps
            self._samSqwWs = self._group_and_SofQW(self._samWs, self._etBins,
                                                   isSample=True)
            if not self._debugMode:
                sapi.DeleteWorkspace(self._samWs)  # delete events file
            # Divide by Vanadium Q slice, if pertinent
            if self._normalizationType == "by Q slice":
                sapi.Divide(LHSWorkspace=self._samSqwWs,
                            RHSWorkspace=self._normWs,
                            OutputWorkspace=self._samSqwWs)
            # Clear mask from reduced file. Needed for binary operations
            # involving this S(Q,w)
            sapi.ClearMaskFlag(Workspace=self._samSqwWs)
            # Scale so that elastic line has Y-values ~ 1
            if self._normalizeToFirst:
                self._ScaleY(self._samSqwWs)

            # Transform the vertical axis (Q) to point data
            # Q-values are in X-axis now
            sapi.Transpose(InputWorkspace=self._samSqwWs,
                           OutputWorkspace=self._samSqwWs)
            # from histo to point
            sapi.ConvertToPointData(InputWorkspace=self._samSqwWs,
                                    OutputWorkspace=self._samSqwWs)
            # Q-values back to vertical axis
            sapi.Transpose(InputWorkspace=self._samSqwWs,
                           OutputWorkspace=self._samSqwWs)
            self.serialize_in_log(self._samSqwWs)  # store the call
            # Output Dave and Nexus files
            extension = "_divided.dat" if self._doNorm else ".dat"
            dave_grp_filename = self._makeRunName(self._samWsRun, False) +\
                extension
            sapi.SaveDaveGrp(Filename=dave_grp_filename,
                             InputWorkspace=self._samSqwWs,
                             ToMicroEV=True)
            extension = "_divided_sqw.nxs" if self._doNorm else "_sqw.nxs"
            processed_filename = self._makeRunName(self._samWsRun, False) +\
                extension
            sapi.SaveNexus(Filename=processed_filename,
                           InputWorkspace=self._samSqwWs)

            # additional output
            if self.getProperty("OutputSusceptibility").value:
                temperature = mtd[self._samSqwWs].getRun().\
                    getProperty(TEMPERATURE_SENSOR).getStatistics().mean
                samXqsWs = self._samSqwWs.replace("sqw", "Xqw")
                sapi.ApplyDetailedBalance(InputWorkspace=self._samSqwWs,
                                          OutputWorkspace=samXqsWs,
                                          Temperature=str(temperature))
                sapi.ConvertUnits(InputWorkspace=samXqsWs,
                                  OutputWorkspace=samXqsWs,
                                  Target="DeltaE_inFrequency",
                                  Emode="Indirect")
                self.serialize_in_log(samXqsWs)
                susceptibility_filename = processed_filename.replace("sqw", "Xqw")
                sapi.SaveNexus(Filename=susceptibility_filename,
                               InputWorkspace=samXqsWs)

        if not self._debugMode:
            sapi.DeleteWorkspace("BASIS_MASK")  # delete the mask
            if self._doNorm and bool(norm_runs):
                sapi.DeleteWorkspace("BASIS_NORM_MASK")  # delete vanadium mask
                sapi.DeleteWorkspace(self._normWs)  # Delete vanadium S(Q)
                if self._normalizationType == "by Q slice":
                    sapi.DeleteWorkspace(normWs)  # Delete vanadium events file
            if self.getProperty("ExcludeTimeSegment").value:
                sapi.DeleteWorkspace('splitter')
                [sapi.DeleteWorkspace(name) for name in
                 ('splitted_unfiltered', 'TOFCorrectWS') if
                 AnalysisDataService.doesExist(name)]
Ejemplo n.º 16
0
    def _plotTimeCounts(self, wksp):
        """ Plot time/counts
        """
        import datetime
        # Rebin events by pulse time
        try:
            # Get run start
            if wksp.getRun().hasProperty("run_start"):
                runstart = wksp.getRun().getProperty("run_start").value
            elif wksp.getRun().hasProperty("proton_charge"):
                runstart = wksp.getRun().getProperty("proton_charge").times[0]
            else:
                runstart = wksp.getRun().getProperty("start_time").value

            # get run stop
            if wksp.getRun().hasProperty("proton_charge"):
                runstop = wksp.getRun().getProperty("proton_charge").times[-1]
                runstop = str(runstop).split(".")[0].strip()
                tf = datetime.datetime.strptime(runstop, "%Y-%m-%dT%H:%M:%S")
            else:
                last_pulse = wksp.getPulseTimeMax().toISO8601String()
                tf = datetime.datetime.strptime(last_pulse[:19],
                                                "%Y-%m-%dT%H:%M:%S")
                tf += datetime.timedelta(0, wksp.getTofMax() / 1000000)

            runstart = str(runstart).split(".")[0].strip()

            t0 = datetime.datetime.strptime(runstart, "%Y-%m-%dT%H:%M:%S")

            # Calculate
            dt = tf - t0
            timeduration = dt.days * 3600 * 24 + dt.seconds
            timeres = float(timeduration) / MAXTIMEBINSIZE
            if timeres < 1.0:
                timeres = 1.0

            sumwsname = '_Summed_{}'.format(wksp)
            if not AnalysisDataService.doesExist(sumwsname):
                sumws = api.SumSpectra(InputWorkspace=wksp,
                                       OutputWorkspace=sumwsname)
                sumws = api.RebinByPulseTimes(InputWorkspace=sumws,
                                              OutputWorkspace=sumwsname,
                                              Params='{}'.format(timeres))
                sumws = api.ConvertToPointData(InputWorkspace=sumws,
                                               OutputWorkspace=sumwsname)
            else:
                sumws = AnalysisDataService.retrieve(sumwsname)
        except RuntimeError as e:
            return str(e)

        vecx = sumws.readX(0)
        vecy = sumws.readY(0)

        # if there is only one xbin in the summed workspace, that means we have an evetn file without pulse,
        # and in this case we use the original workspace time limits
        if len(vecx) == 1:
            xmin = min(wksp.readX(0)) / 1000000
            xmax = max(wksp.readX(0)) / 1000000
        else:
            xmin = min(vecx)
            xmax = max(vecx)

        ymin = min(vecy)
        ymax = max(vecy)

        # Reset graph
        self.ui.mainplot.set_xlim(xmin, xmax)
        self.ui.mainplot.set_ylim(ymin, ymax)

        self.ui.mainplot.set_xlabel('Time (seconds)', fontsize=13)
        self.ui.mainplot.set_ylabel('Counts', fontsize=13)

        # Set up main line
        setp(self.mainline, xdata=vecx, ydata=vecy)

        # Reset slide
        newslidery = [min(vecy), max(vecy)]

        newleftx = xmin + (xmax - xmin) * self._leftSlideValue * 0.01
        setp(self.leftslideline, xdata=[newleftx, newleftx], ydata=newslidery)

        newrightx = xmin + (xmax - xmin) * self._rightSlideValue * 0.01
        setp(self.rightslideline,
             xdata=[newrightx, newrightx],
             ydata=newslidery)
        self.canvas.draw()