示例#1
0
def convert_to_y_space_and_symmetrise(ws_name,mass):
    # phenomenological roule-of-thumb to define the y-range for a given mass
    max_Y = np.ceil(2.5*mass+27)
    rebin_parameters = str(-max_Y)+","+str(2.*max_Y/120)+","+str(max_Y)
    # converting to y-space, rebinning, and defining a normalisation matrix to take into account the kinetic cut-off
    sapi.ConvertToYSpace(InputWorkspace=ws_name,Mass=mass,OutputWorkspace=ws_name+"_JoY",QWorkspace=ws_name+"_Q")
    ws = sapi.Rebin(InputWorkspace=ws_name+"_JoY", Params = rebin_parameters,FullBinsOnly=True, OutputWorkspace= ws_name+"_JoY")
    tmp=sapi.CloneWorkspace(InputWorkspace=ws_name+"_JoY")
    for j in range(tmp.getNumberHistograms()):
        for k in range(tmp.blocksize()):
            tmp.dataE(j)[k] =0.
            if np.isnan( tmp.dataY(j)[k] ) :
                ws.dataY(j)[k] =0.
                tmp.dataY(j)[k] =0.
            if (tmp.dataY(j)[k]!=0):
                tmp.dataY(j)[k] =1.
    tmp=sapi.SumSpectra('tmp')
    sapi.SumSpectra(InputWorkspace=ws_name+"_JoY",OutputWorkspace=ws_name+"_JoY_sum")
    sapi.Divide(LHSWorkspace=ws_name+"_JoY_sum", RHSWorkspace="tmp", OutputWorkspace =ws_name+"_JoY_sum")
    #rewriting the temporary workspaces ws and tmp
    ws=sapi.mtd[ws_name+"_JoY_sum"]
    tmp=sapi.CloneWorkspace(InputWorkspace=ws_name+"_JoY_sum")
    for k in range(tmp.blocksize()):
        tmp.dataE(0)[k] =(ws.dataE(0)[k]+ws.dataE(0)[ws.blocksize()-1-k])/2.
        tmp.dataY(0)[k] =(ws.dataY(0)[k]+ws.dataY(0)[ws.blocksize()-1-k])/2.
    sapi.RenameWorkspace(InputWorkspace="tmp",OutputWorkspace=ws_name+"_JoY_sym")
    normalise_workspace(ws_name+"_JoY_sym")
    return max_Y
示例#2
0
def _generate_grouped_ts_pdf(focused_ws, q_lims):
    focused_ws = mantid.ConvertUnits(InputWorkspace=focused_ws,
                                     Target="MomentumTransfer",
                                     EMode='Elastic')
    min_x = np.inf
    max_x = -np.inf
    num_x = -np.inf
    for ws in focused_ws:
        x_data = ws.dataX(0)
        min_x = min(np.min(x_data), min_x)
        max_x = max(np.max(x_data), max_x)
        num_x = max(x_data.size, num_x)
    binning = [min_x, (max_x - min_x) / num_x, max_x]
    focused_ws = mantid.Rebin(InputWorkspace=focused_ws, Params=binning)
    focused_data_combined = mantid.ConjoinSpectra(InputWorkspaces=focused_ws)
    mantid.MatchSpectra(InputWorkspace=focused_data_combined,
                        OutputWorkspace=focused_data_combined,
                        ReferenceSpectrum=5)
    if type(q_lims) == str:
        q_min = []
        q_max = []
        try:
            with open(q_lims, 'r') as f:
                line_list = [line.rstrip('\n') for line in f]
                for line in line_list[1:]:
                    value_list = line.split()
                    q_min.append(float(value_list[2]))
                    q_max.append(float(value_list[3]))
            q_min = np.array(q_min)
            q_max = np.array(q_max)
        except IOError:
            raise RuntimeError("q_lims is not valid")
    elif type(q_lims) == list or type(q_lims) == np.ndarray:
        q_min = q_lims[0, :]
        q_max = q_lims[1, :]
    else:
        raise RuntimeError("q_lims is not valid")
    bin_width = np.inf
    for i in range(q_min.size):
        pdf_x_array = focused_data_combined.readX(i)
        tmp1 = np.where(pdf_x_array >= q_min[i])
        tmp2 = np.amin(tmp1)
        q_min[i] = pdf_x_array[tmp2]
        q_max[i] = pdf_x_array[np.amax(np.where(pdf_x_array <= q_max[i]))]
        bin_width = min(pdf_x_array[1] - pdf_x_array[0], bin_width)
    focused_data_combined = mantid.CropWorkspaceRagged(
        InputWorkspace=focused_data_combined, XMin=q_min, XMax=q_max)
    focused_data_combined = mantid.Rebin(
        InputWorkspace=focused_data_combined,
        Params=[min(q_min), bin_width, max(q_max)])
    focused_data_combined = mantid.SumSpectra(
        InputWorkspace=focused_data_combined,
        WeightedSum=True,
        MultiplyBySpectra=False)
    pdf_output = mantid.PDFFourierTransform(
        Inputworkspace=focused_data_combined,
        InputSofQType="S(Q)",
        PDFType="G(r)",
        Filter=True)
    return pdf_output
def extract_roi(workspace, step='0.01', roi=[162, 175, 112, 145]):
    """
        Returns a spectrum (Counts/proton charge vs lambda) given a filename
        or run number and the lambda step size and the corner of the ROI.

        :param str workspace: Mantid workspace name
        :param float step: wavelength bin width for rebinning
        :param list roi: [x_min, x_max, y_min, y_max] pixels
    """
    _workspace = str(workspace)
    if mantid.mtd[_workspace].getRun()['gd_prtn_chrg'].value > 0:
        api.NormaliseByCurrent(InputWorkspace=_workspace,
                               OutputWorkspace=_workspace)
    api.ConvertUnits(InputWorkspace=_workspace,
                     Target='Wavelength',
                     OutputWorkspace=_workspace)
    api.Rebin(InputWorkspace=_workspace,
              Params=step,
              OutputWorkspace=_workspace)
    api.RefRoi(InputWorkspace=_workspace,
               NXPixel=304,
               NYPixel=256,
               SumPixels=True,
               XPixelMin=roi[0],
               XPIxelMax=roi[1],
               YPixelMin=roi[2],
               YPixelMax=roi[3],
               IntegrateY=True,
               ConvertToQ=False,
               OutputWorkspace=_workspace)
    api.SumSpectra(InputWorkspace=_workspace, OutputWorkspace=_workspace)
    return _workspace
示例#4
0
    def PyExec(self):
        # Input
        vana_input = self.getProperty("VanadiumWorkspaces").value
        bg_input = self.getProperty("BackgroundWorkspaces").value
        vana_workspaces = self._expand_groups(vana_input)
        bg_workspaces = self._expand_groups(bg_input)
        self.log().notice("Input Vanadium workspaces: " + str(vana_workspaces))
        self.log().notice("Input Background workspaces: " + str(bg_workspaces))

        # number of vanadium and background workspaces must match
        if len(vana_workspaces) != len(bg_workspaces):
            raise RuntimeError("Number of Vanadium and background workspaces doe not match!")

        # compare optional sample logs, throw warnings
        result = api.CompareSampleLogs(vana_workspaces+bg_workspaces, self.properties_to_compare, 5e-3)
        if result:
            self.log().warning("Following properties do not match: " + result)

        # split input workspaces to groups SF/NSF and detector angles
        deterota = self._get_detector_positions(vana_workspaces)
        sfvana, nsfvana = self._sort_workspaces(vana_workspaces, deterota)
        sfbg, nsfbg = self._sort_workspaces(bg_workspaces, deterota)

        # subract background
        sfv = self._subtract_background(sfvana, sfbg, deterota)
        nsfv = self._subtract_background(nsfvana, nsfbg, deterota)
        total = self._sum_signal(sfv, nsfv, deterota)

        # compute vmean
        _mean_ws_ = api.Mean(",".join(list(total.values())))     # Mean takes string
        self.toremove.append(_mean_ws_.name())
        num =  self._get_notmasked_detectors_number(_mean_ws_)
        if num == 0:
            self.cleanup(self.toremove)
            raise RuntimeError("All detectors are masked! Cannot compute coefficients.")
        _vana_mean_ = api.SumSpectra(_mean_ws_)/num
        self.toremove.append(_vana_mean_.name())

        # compute coefficients k_i = (VSF_i + VNSF_i)/Vmean
        outws_name = self.getPropertyValue("OutputWorkspace")
        # for only one detector position only one workspace will be created
        if len(deterota) == 1:
            api.Divide(list(total.values())[0], _vana_mean_, OutputWorkspace=outws_name)
        else:
            # for many detector positions group of workspaces will be created
            results = []
            for angle in deterota:
                wsname = outws_name + '_2theta' + str(angle)
                api.Divide(total[angle], _vana_mean_, OutputWorkspace=wsname)
                results.append(wsname)

            api.GroupWorkspaces(results, OutputWorkspace=outws_name)

        self.cleanup(self.toremove)
        outws = api.AnalysisDataService.retrieve(outws_name)
        self.setProperty("OutputWorkspace", outws)

        return
示例#5
0
def calculate_mantid_resolutions(ws_name, mass):
    max_Y = np.ceil(2.5*mass+27)
    rebin_parameters = str(-max_Y)+","+str(2.*max_Y/240)+","+str(max_Y) # twice the binning as for the data
    ws= sapi.mtd[ws_name]
    for index in range(ws.getNumberHistograms()):
        sapi.VesuvioResolution(Workspace=ws,WorkspaceIndex=index,Mass=mass,OutputWorkspaceYSpace="tmp")
        tmp=sapi.Rebin("tmp",rebin_parameters)
        if index == 0:
            sapi.RenameWorkspace(tmp,"resolution")
        else:
            sapi.AppendSpectra("resolution", tmp, OutputWorkspace= "resolution")
    sapi.SumSpectra(InputWorkspace="resolution",OutputWorkspace="resolution")
    normalise_workspace("resolution")
    safe_delete_ws(tmp)
示例#6
0
    def _plotTimeCounts(self, wksp):
        """ Plot time/counts
        """
        import datetime
        # Rebin events by pulse time
        try:
            # Get run start and run stop
            if wksp.getRun().hasProperty("run_start"):
                runstart = wksp.getRun().getProperty("run_start").value
            else:
                runstart = wksp.getRun().getProperty("proton_charge").times[0]
            runstop = wksp.getRun().getProperty("proton_charge").times[-1]

            runstart = str(runstart).split(".")[0].strip()
            runstop = str(runstop).split(".")[0].strip()

            t0 = datetime.datetime.strptime(runstart, "%Y-%m-%dT%H:%M:%S")
            tf = datetime.datetime.strptime(runstop, "%Y-%m-%dT%H:%M:%S")

            # Calcualte
            dt = tf - t0
            timeduration = dt.days * 3600 * 24 + dt.seconds

            timeres = float(timeduration) / MAXTIMEBINSIZE
            if timeres < 1.0:
                timeres = 1.0

            sumwsname = "_Summed_%s" % (str(wksp))
            if AnalysisDataService.doesExist(sumwsname) is False:
                sumws = api.SumSpectra(InputWorkspace=wksp,
                                       OutputWorkspace=sumwsname)
                sumws = api.RebinByPulseTimes(InputWorkspace=sumws,
                                              OutputWorkspace=sumwsname,
                                              Params="%f" % (timeres))
                sumws = api.ConvertToPointData(InputWorkspace=sumws,
                                               OutputWorkspace=sumwsname)
            else:
                sumws = AnalysisDataService.retrieve(sumwsname)
        except RuntimeError as e:
            return str(e)

        vecx = sumws.readX(0)
        vecy = sumws.readY(0)

        xmin = min(vecx)
        xmax = max(vecx)
        ymin = min(vecy)
        ymax = max(vecy)

        # Reset graph
        self.ui.mainplot.set_xlim(xmin, xmax)
        self.ui.mainplot.set_ylim(ymin, ymax)

        self.ui.mainplot.set_xlabel('Time (seconds)', fontsize=13)
        self.ui.mainplot.set_ylabel('Counts', fontsize=13)

        # Set up main line
        setp(self.mainline, xdata=vecx, ydata=vecy)

        # Reset slide
        newslidery = [min(vecy), max(vecy)]

        newleftx = xmin + (xmax - xmin) * self._leftSlideValue * 0.01
        setp(self.leftslideline, xdata=[newleftx, newleftx], ydata=newslidery)

        newrightx = xmin + (xmax - xmin) * self._rightSlideValue * 0.01
        setp(self.rightslideline,
             xdata=[newrightx, newrightx],
             ydata=newslidery)

        self.ui.graphicsView.draw()

        return
def compare(
        pack="C25B/eightpack-bottom",
        nxspath="/SNS/SEQ/IPTS-19573/nexus/SEQ_130249.nxs.h5",  #C60
        detIDs_npy='../C60-I_d/detIDs.npy',
        newIDF='./SEQUOIA_Definition.xml',
        dmin=2,
        dmax=11,
        dd=0.01,
        dvalues=None,
        tmin=0,
        tmax=2000):
    orig_ws = msa.LoadEventNexus(Filename=nxspath,
                                 FilterByTimeStart=tmin,
                                 FilterByTimeStop=tmax)

    ws = orig_ws
    instrument = ws.getInstrument()
    packnameandtype = pack
    packname, packtype = pack.split('/')
    pack = instrument.getComponentByName(packnameandtype)
    firstpixel = pack[0][0].getID()
    lasttube = pack[pack.nelements() - 1]
    lastpixel = lasttube[lasttube.nelements() - 1]
    lastpixel = lastpixel.getID()
    print "first and last pixel IDs:", firstpixel, lastpixel
    #
    #
    detIDs = list(np.load(detIDs_npy))
    startindex = detIDs.index(firstpixel)
    endindex = detIDs.index(lastpixel)
    print "first and last pixel indexes:", startindex, endindex
    del ws

    # # Old I(d)
    daxis = "%s,%s,%s" % (dmin, dd, dmax)
    I_d_0 = msa.ConvertUnits(InputWorkspace=orig_ws,
                             Target='dSpacing',
                             EMode='Elastic')
    I_d_0 = msa.Rebin(InputWorkspace=I_d_0, Params=daxis)
    pack_I_d_0 = msa.SumSpectra(InputWorkspace=I_d_0,
                                StartWorkspaceIndex=startindex,
                                EndWorkspaceIndex=endindex)
    xbb0 = pack_I_d_0.readX(0)
    y0 = pack_I_d_0.readY(0).copy()
    x0 = (xbb0[1:] + xbb0[:-1]) / 2
    msa.DeleteWorkspace(I_d_0)
    msa.DeleteWorkspace(pack_I_d_0)

    # # New I(d)
    msa.LoadInstrument(orig_ws, Filename=newIDF, RewriteSpectraMap=False)
    I_d_1 = msa.ConvertUnits(InputWorkspace=orig_ws,
                             Target='dSpacing',
                             EMode='Elastic')
    I_d_1 = msa.Rebin(InputWorkspace=I_d_1, Params=daxis)
    pack_I_d_1 = msa.SumSpectra(InputWorkspace=I_d_1,
                                StartWorkspaceIndex=startindex,
                                EndWorkspaceIndex=endindex)
    xbb1 = pack_I_d_1.readX(0)
    y1 = pack_I_d_1.readY(0).copy()
    x1 = (xbb1[1:] + xbb1[:-1]) / 2
    msa.DeleteWorkspace(I_d_1)
    msa.DeleteWorkspace(pack_I_d_1)
    msa.DeleteWorkspace(orig_ws)

    data = [x0, y0, x1, y1]
    np.save("%s-I_d.npy" % packname, data)
    plt.figure(figsize=(7, 4))
    plt.title("Pack %s" % packname)
    plt.plot(x0, y0, label='original')
    plt.plot(x1, y1, label='after loading new xml')
    for d in dvalues:
        plt.axvline(x=d, linewidth=1, color='k')
    # plt.xlim(3,3.3)
    plt.legend(loc='upper left')
    outpng = '%s-I_d.png' % packname
    plt.savefig(outpng)
    return
    def run_bilby_reduction(self):
        # Read input csv file and define / create a folder for the output data
        csv_files_to_reduce_list = mantid_api.FileFinder.getFullPath(
            self.current_reduction_settings[0]["csv_file_name"])
        reduced_files_path = self.setup_save_out_path(csv_files_to_reduce_list)

        # Wavelength binning
        binning_wavelength_ini_str = self.retrieve_reduction_settings(
            "binning_wavelength_ini",
            raise_exception=True,
            message="binning_wavelength_ini cannot be empty")

        binning_wavelength_ini = BilbyCustomFunctions_Reduction.read_convert_to_float(
            binning_wavelength_ini_str)
        binning_wavelength_ini_original = binning_wavelength_ini

        # WAVELENGTH RANGE FOR TRANSMISSION: the aim is to fit transmission on the whole range, and take only part for the data reduction
        # must  be equal or longer than binning_wavelength_ini
        binning_wavelength_transmission_str = self.current_reduction_settings[
            0]["binning_wavelength_transmission"]
        binning_wavelength_transmission = BilbyCustomFunctions_Reduction.read_convert_to_float(
            binning_wavelength_transmission_str)
        binning_wavelength_transmission_original = binning_wavelength_transmission

        # Check of wavelength range: transmission range must be equal or longer than the wavelength binning range for data reduction
        if (binning_wavelength_ini[0] < binning_wavelength_transmission[0]
            ) or (binning_wavelength_ini[2] >
                  binning_wavelength_transmission[2]):
            raise ValueError(
                "Range for transmission binning shall be equal or wider than the range for the"
                " sample wavelength binning (refer to line 94)")

        # Binning for Q
        binning_q_str = self.current_reduction_settings[0]["binning_q"]
        binning_q = BilbyCustomFunctions_Reduction.read_convert_to_float(
            binning_q_str)

        RadiusCut = self.retrieve_reduction_settings("RadiusCut", default=0.0)
        WaveCut = self.retrieve_reduction_settings("WaveCut", default=0.0)

        # Transmission fit parameters
        transmission_fit_ini = self.current_reduction_settings[0][
            "transmission_fit"]
        if (transmission_fit_ini != "Linear") and (
                transmission_fit_ini != "Log") and (transmission_fit_ini !=
                                                    "Polynomial"):
            raise ValueError("Check value of transmission_fit; it can be only"
                             " \"Linear\", \"Log\" or \"Polynomial\","
                             " first letter is mandatory capital")

        PolynomialOrder = self.current_reduction_settings[0]["PolynomialOrder"]

        # Wavelength interval: if reduction on wavelength intervals is needed
        wavelength_interval_input = self.current_reduction_settings[0][
            "wavelength_intervals"].lower()
        wavelength_intervals = BilbyCustomFunctions_Reduction.string_boolean(
            wavelength_interval_input)
        wavelength_intervals_original = wavelength_intervals
        wav_delta = 0.0  # set the value, needed for the "wavelengh_slices" function

        if self.reduce_2D:
            print(
                "2D reduction is performing. Q interval and number of points are taking into account;"
                " Q-binning intervals are ignored.")
            number_data_points_2D = float(
                self.retrieve_reduction_settings(
                    "2D_number_data_points",
                    raise_exception=True,
                    message="Number of points shall be given"))

            plot_2D = self.current_reduction_settings[0]["plot_2D"].lower()
            plot_2D = BilbyCustomFunctions_Reduction.string_boolean(plot_2D)
            binning_q[1] = (
                binning_q[0] + binning_q[2]
            ) / number_data_points_2D  # To replace deltaQ from the input file
        else:
            plot_2D = None

        ######################################
        # Calling function to read given csv file
        parameters = BilbyCustomFunctions_Reduction.files_list_reduce(
            csv_files_to_reduce_list)
        files_to_reduce = BilbyCustomFunctions_Reduction.files_to_reduce(
            parameters, self.index_files_to_reduce)
        if len(files_to_reduce) == 0:
            raise ValueError(
                'Please check index_files_to_reduce; chosen one does not exist'
            )

        # reduce requested files one by one
        for current_file in files_to_reduce:
            sam_file = current_file["Sample"] + '.tar'

            ws_sam, time_range = self.setup_time_range(current_file, sam_file)

            # To read the mode value: True - ToF; False - NVS; this will define some steps inside SANSDataProcessor
            try:
                external_mode = (ws_sam.run().getProperty("is_tof").value)
            except:
                external_mode = True  # This is needed for old files, where the ToF/mono mode value has not been recorded

            # Internal frame source has been used during data collection; it is not always NVS only,
            # one can have both, NVS and choppers running for this mode
            if (not external_mode):
                print(
                    "Internal frame source. Binning range is taken from the sample scattering data."
                )
                binning_wavelength_ini = (ws_sam.readX(0)[0],
                                          ws_sam.readX(0)[ws_sam.blocksize()] -
                                          ws_sam.readX(0)[0],
                                          ws_sam.readX(0)[ws_sam.blocksize()])
                binning_wavelength_transmission = binning_wavelength_ini
                if wavelength_intervals:
                    wavelength_intervals = False
                    print("NVS: monochromatic mode")
                    print(
                        "There is no sense to reduce monochromatic data on multiple wavelength;"
                        " \"wavelength_intervals\" value changed to False.")
            else:
                # important for the case when NVS data is being analysed first,
                # ie to be able to come back to the whole range & wavelength slices, if needed
                binning_wavelength_ini = binning_wavelength_ini_original
                binning_wavelength_transmission = binning_wavelength_transmission_original
                wavelength_intervals = wavelength_intervals_original
                if wavelength_intervals:
                    wav_delta = float(
                        self.current_reduction_settings[0]["wav_delta"]
                    )  # no need to read if the previous is false

            # empty beam scattering in transmission mode
            ws_emp_file = current_file["T_EmptyBeam"] + '.tar'
            mantid_api.LoadBBY(
                ws_emp_file, OutputWorkspace='ws_emp'
            )  # Note that this is of course a transmission measurement - shall be long

            # transmission workspaces and masks
            transm_file = current_file["T_Sample"] + '.tar'
            ws_tranSam = mantid_api.LoadBBY(transm_file)
            ws_tranEmp = mantid_api.LoadBBY(
                ws_emp_file)  # empty beam for transmission
            transm_mask = current_file["mask_transmission"] + '.xml'
            ws_tranMsk = mantid_api.LoadMask('Bilby', transm_mask)

            sam_mask_file = current_file["mask"] + '.xml'
            ws_samMsk = mantid_api.LoadMask('Bilby', sam_mask_file)

            # scaling: attenuation
            att_pos = float(ws_tranSam.run().getProperty("att_pos").value)

            scale = BilbyCustomFunctions_Reduction.attenuation_correction(
                att_pos, self.data_before_May_2016)
            print("scale, aka attenuation factor {}".format(scale))

            thickness = current_file["thickness [cm]"]

            # Cd / Al masks shift
            if self.correct_tubes_shift:
                BilbyCustomFunctions_Reduction.correction_tubes_shift(
                    ws_sam, self.path_tube_shift_correction)

            if self.data_before_2016:
                BilbyCustomFunctions_Reduction.det_shift_before_2016(ws_sam)

                # Blocked beam
            if self.blocked_beam:
                ws_blocked_beam = current_file["BlockedBeam"] + '.tar'
                ws_blk = mantid_api.LoadBBY(ws_blocked_beam)
                if self.correct_tubes_shift:
                    BilbyCustomFunctions_Reduction.correction_tubes_shift(
                        ws_blk, self.path_tube_shift_correction)
            else:
                ws_blk = None

            # Detector sensitivity
            ws_sen = None

            # empty beam normalisation
            mantid_api.MaskDetectors(
                "ws_emp", MaskedWorkspace=ws_tranMsk
            )  # does not have to be ws_tranMsk, can be a specific mask
            mantid_api.ConvertUnits("ws_emp",
                                    Target="Wavelength",
                                    OutputWorkspace='ws_emp')

            # wavelenth intervals: building  binning_wavelength list
            binning_wavelength, n = BilbyCustomFunctions_Reduction.wavelengh_slices(
                wavelength_intervals, binning_wavelength_ini, wav_delta)

            # By now we know how many wavelengths bins we have, so shall run Q1D n times
            # -- Processing --
            suffix = '_' + current_file[
                "suffix"]  # is the same for all wavelength intervals
            suffix_2 = current_file["additional_description"]
            if suffix_2 != '':
                suffix += '_' + suffix_2

            plot1Dgraph = None

            for i in range(n):
                ws_emp_partial = mantid_api.Rebin("ws_emp",
                                                  Params=binning_wavelength[i])
                ws_emp_partial = mantid_api.SumSpectra(ws_emp_partial,
                                                       IncludeMonitors=False)

                base_output_name = self.get_base_output_name(
                    i, sam_file, binning_wavelength, time_range, suffix)

                # needed here, otherwise SANSDataProcessor replaced it with "transmission_fit" string
                transmission_fit = transmission_fit_ini

                output_workspace, transmission_fit = mantid_api.BilbySANSDataProcessor(
                    InputWorkspace=ws_sam,
                    InputMaskingWorkspace=ws_samMsk,
                    BlockedBeamWorkspace=ws_blk,
                    EmptyBeamSpectrumShapeWorkspace=ws_emp_partial,
                    SensitivityCorrectionMatrix=ws_sen,
                    TransmissionWorkspace=ws_tranSam,
                    TransmissionEmptyBeamWorkspace=ws_tranEmp,
                    TransmissionMaskingWorkspace=ws_tranMsk,
                    ScalingFactor=scale,
                    SampleThickness=thickness,
                    FitMethod=transmission_fit,
                    PolynomialOrder=PolynomialOrder,
                    BinningWavelength=binning_wavelength[i],
                    BinningWavelengthTransm=binning_wavelength_transmission,
                    BinningQ=binning_q,
                    TimeMode=external_mode,
                    AccountForGravity=self.account_for_gravity,
                    SolidAngleWeighting=self.solid_angle_weighting,
                    RadiusCut=RadiusCut,
                    WaveCut=WaveCut,
                    WideAngleCorrection=self.wide_angle_correction,
                    Reduce2D=self.reduce_2D,
                    OutputWorkspace=base_output_name)

                if not self.reduce_2D:
                    BilbyCustomFunctions_Reduction.strip_NaNs(
                        output_workspace, base_output_name)

                self.plot_graphs(i, reduced_files_path, base_output_name,
                                 output_workspace, plot_2D, plot1Dgraph)
                self.save_out_files(reduced_files_path, base_output_name,
                                    output_workspace)

                return output_workspace, transmission_fit
def generate_plots(run_number, workspace, options=None):
    """
        Generate diagnostics plots
    """
    n_x = int(
        workspace.getInstrument().getNumberParameter("number-of-x-pixels")[0])
    n_y = int(
        workspace.getInstrument().getNumberParameter("number-of-y-pixels")[0])

    # X-TOF plot
    tof_min = workspace.getTofMin()
    tof_max = workspace.getTofMax()
    workspace = api.Rebin(workspace, params="%s, 50, %s" % (tof_min, tof_max))

    direct_summed = api.RefRoi(InputWorkspace=workspace,
                               IntegrateY=True,
                               NXPixel=n_x,
                               NYPixel=n_y,
                               ConvertToQ=False,
                               YPixelMin=0,
                               YPixelMax=n_y,
                               OutputWorkspace="direct_summed")
    signal = np.log10(direct_summed.extractY())
    tof_axis = direct_summed.extractX()[0] / 1000.0

    x_tof_plot = _plot2d(z=signal,
                         y=np.arange(signal.shape[0]),
                         x=tof_axis,
                         x_label="TOF (ms)",
                         y_label="X pixel",
                         title="r%s" % run_number)

    # X-Y plot
    _workspace = api.Integration(workspace)
    signal = np.log10(_workspace.extractY())
    z = np.reshape(signal, (n_x, n_y))
    xy_plot = _plot2d(z=z.T,
                      x=np.arange(n_x),
                      y=np.arange(n_y),
                      title="r%s" % run_number)

    # Count per X pixel
    integrated = api.Integration(direct_summed)
    integrated = api.Transpose(integrated)
    signal_y = integrated.readY(0)
    signal_x = np.arange(len(signal_y))
    peak_pixels = _plot1d(signal_x,
                          signal_y,
                          x_label="X pixel",
                          y_label="Counts",
                          title="r%s" % run_number)

    # TOF distribution
    workspace = api.SumSpectra(workspace)
    signal_x = workspace.readX(0) / 1000.0
    signal_y = workspace.readY(0)
    tof_dist = _plot1d(signal_x,
                       signal_y,
                       x_range=None,
                       x_label="TOF (ms)",
                       y_label="Counts",
                       title="r%s" % run_number)

    return [xy_plot, x_tof_plot, peak_pixels, tof_dist]
示例#10
0
#!/usr/bin/env python

from mantid import simpleapi as msa
import os
from matplotlib import pyplot as plt

orig_I_d = msa.Load(os.path.expanduser("~/tmp/130273-I_d.nxs"))
I_d = msa.Load(
    os.path.expanduser("~/tmp/130273-newxml-using-both-difc-and-L2-I_d.nxs"))

start_index = 2048

orig_I_d_s = msa.SumSpectra(InputWorkspace=orig_I_d,
                            StartWorkspaceIndex=start_index,
                            EndWorkspaceIndex=start_index + 1023)
I_d_s = msa.SumSpectra(InputWorkspace=I_d,
                       StartWorkspaceIndex=start_index,
                       EndWorkspaceIndex=start_index + 1023)

orig_d_bb = orig_I_d_s.readX(0)
orig_I = orig_I_d_s.readY(0)
d_bb = I_d_s.readX(0)
I = I_d_s.readY(0)
plt.figure(figsize=(7, 4))
plt.plot(orig_d_bb[:-1], orig_I, label='original')
plt.plot(d_bb[:-1], I, label='calibrated')
# plt.xlim(3,3.3)
plt.legend(loc='upper left')
plt.show()
示例#11
0
def get_I_d(nxs_files,
            init_IDF,
            outdir,
            packs,
            dt=1000.,
            d_axis=(2., 11., 0.02),
            Npixels_per_pack=1024):
    """nxs_files: paths of calibration nxs files
    init_IDF: initial IDF path
    outdir: output directory
    packs: list of pack names, e.g. C26B/eightpack-bottom
    dt: time step for loading files. too large will need too much memory
    d_axis: dmin, dmax, delta_d. e.g. 2., 11., 0.02
    Npixels_per_pack: number of pixels per pack

    Output files:
    * difc-nominal.npy
    * detIDs.npy
    * I_d-xbb.npy
    * I_d-y-PACKNAME.npy
    * pack-PACKNAME.yaml

    NOTE:
    * Assumed that the difc array from CalculateDIFC is ordered according to the "spectrrum list" in
      the mantid workspace. See function getDetIDs
    * Different combinations of nxs_files, init_IDF, d_axis should use different outdirs
    """
    if not os.path.exists(outdir): os.makedirs(outdir)
    # ## Compute nominal difc using first file in the list
    nxspath = nxs_files[0]
    ws = msa.LoadEventNexus(nxspath, FilterByTimeStart=0,
                            FilterByTimeStop=1)  # load just one second
    #
    msa.LoadInstrument(ws, Filename=init_IDF, RewriteSpectraMap=False)
    import shutil
    shutil.copyfile(init_IDF, os.path.join(outdir, 'init_IDF.xml'))
    #
    difc = msa.CalculateDIFC(InputWorkspace=ws)
    difc = difc.extractY().flatten().copy()
    msa.DeleteWorkspace('difc')
    np.save(os.path.join(outdir, 'difc-nominal.npy'), difc)
    # IDs of all pixels
    detIDs = getDetIDs(ws)
    np.save(os.path.join(outdir, 'detIDs.npy'), detIDs)
    #
    # map pack name to (start_pixelID, stop_pixelID)
    pack2pixelID_start_stop = dict()
    for name in packs:
        pack2pixelID_start_stop[name] = getFirstLastPixelIDs(ws, name)
        continue
    # clean up
    msa.DeleteWorkspace('ws')

    runtimes = dict()
    for f in nxs_files:
        runtimes[f] = getRunTime(f)
    print "* run times:", runtimes

    dmin, dmax, delta_d = d_axis
    Nd = int((dmax - dmin) / delta_d)
    print "* Number of d bins:", Nd

    #
    Npacks = len(packs)

    y_matrix = np.zeros((Npacks, Npixels_per_pack, Nd))
    xbb_saved = None
    for nxsfile in nxs_files:
        print "* Working on", nxsfile
        t_total = runtimes[nxsfile]
        for tstart in np.arange(0, t_total - dt, dt):
            print "* tstart", tstart
            tend = min(t_total - 1, tstart + dt)
            ws = msa.LoadEventNexus(nxsfile,
                                    FilterByTimeStart=tstart,
                                    FilterByTimeStop=tend)
            msa.LoadInstrument(ws, Filename=init_IDF, RewriteSpectraMap=False)
            I_d = msa.ConvertUnits(InputWorkspace=ws,
                                   Target='dSpacing',
                                   EMode='Elastic')
            I_d = msa.Rebin(InputWorkspace=I_d,
                            Params='%s,%s,%s' % (dmin, delta_d, dmax))

            # loop over packs
            for ipack, packname in enumerate(packs):
                firstpixel, lastpixel = pack2pixelID_start_stop[packname]
                startindex = detIDs.index(firstpixel)
                endindex = detIDs.index(lastpixel)
                print "array indexes of first and last pixel", startindex, endindex

                y_pack = y_matrix[ipack]
                # loop over pixels in the pack
                for i, pixelindex in enumerate(range(startindex,
                                                     endindex + 1)):
                    I_d_pixel = msa.SumSpectra(InputWorkspace=I_d,
                                               StartWorkspaceIndex=pixelindex,
                                               EndWorkspaceIndex=pixelindex)
                    xbb = I_d_pixel.readX(0)
                    if xbb_saved is None: xbb_saved = np.array(xbb, copy=True)
                    y = I_d_pixel.readY(0)
                    y_pack[i] += y
                    msa.DeleteWorkspace('I_d_pixel')
                    continue
                continue

            msa.DeleteWorkspaces(['ws', 'I_d'])
            continue
        continue

    xbb = np.arange(dmin, dmax + delta_d / 2., delta_d)
    np.save(os.path.join(outdir, "I_d-xbb.npy"), xbb)
    # for debugging
    np.save(os.path.join(outdir, "I_d-y_matrix.npy"), y_matrix)

    for ipack, packname in enumerate(packs):
        y_pack = y_matrix[ipack]
        packname1 = packname.split('/')[0]  # "C25T"
        # save y values of I(d) for the pack
        np.save(os.path.join(outdir, "I_d-y-%s.npy" % packname1), y_pack)
        # save pack info
        first, last = pack2pixelID_start_stop[packname]
        pixelIDs = dict(first=first, last=last)
        pack_info = dict(pixelIDs=pixelIDs)
        dumpYaml(pack_info, os.path.join(outdir, 'pack-%s.yaml' % packname1))
        continue
    return
                               EMode='Elastic')
        I_d = msa.Rebin(InputWorkspace=I_d,
                        Params='%s,%s,%s' % (dmin, delta_d, dmax))

        # loop over packs
        for ipack, packname in enumerate(packs):
            firstpixel, lastpixel = pack_indexes[packname]
            startindex = detIDs.index(firstpixel)
            endindex = detIDs.index(lastpixel)
            print "array indexes of first and last pixel", startindex, endindex

            y_pack = y_matrix[ipack]
            # loop over pixels in the pack
            for i, pixelindex in enumerate(range(startindex, endindex + 1)):
                I_d_pixel = msa.SumSpectra(InputWorkspace=I_d,
                                           StartWorkspaceIndex=pixelindex,
                                           EndWorkspaceIndex=pixelindex)
                xbb = I_d_pixel.readX(0)
                if xbb_saved is None: xbb_saved = np.array(xbb, copy=True)
                y = I_d_pixel.readY(0)
                y_pack[i] += y
                msa.DeleteWorkspace('I_d_pixel')
                continue
            continue

        msa.DeleteWorkspaces(['ws', 'I_d'])
        continue
    continue

# This is already saved before
# xbb = np.arange(dmin, dmax+delta_d/2., delta_d)
示例#13
0
    def _plotTimeCounts(self, wksp):
        """ Plot time/counts
        """
        import datetime
        # Rebin events by pulse time
        try:
            # Get run start
            if wksp.getRun().hasProperty("run_start"):
                runstart = wksp.getRun().getProperty("run_start").value
            elif wksp.getRun().hasProperty("proton_charge"):
                runstart = wksp.getRun().getProperty("proton_charge").times[0]
            else:
                runstart = wksp.getRun().getProperty("start_time").value

            # get run stop
            if wksp.getRun().hasProperty("proton_charge"):
                runstop = wksp.getRun().getProperty("proton_charge").times[-1]
                runstop = str(runstop).split(".")[0].strip()
                tf = datetime.datetime.strptime(runstop, "%Y-%m-%dT%H:%M:%S")
            else:
                last_pulse = wksp.getPulseTimeMax().toISO8601String()
                tf = datetime.datetime.strptime(last_pulse[:19],
                                                "%Y-%m-%dT%H:%M:%S")
                tf += datetime.timedelta(0, wksp.getTofMax() / 1000000)

            runstart = str(runstart).split(".")[0].strip()

            t0 = datetime.datetime.strptime(runstart, "%Y-%m-%dT%H:%M:%S")

            # Calculate
            dt = tf - t0
            timeduration = dt.days * 3600 * 24 + dt.seconds
            timeres = float(timeduration) / MAXTIMEBINSIZE
            if timeres < 1.0:
                timeres = 1.0

            sumwsname = '_Summed_{}'.format(wksp)
            if not AnalysisDataService.doesExist(sumwsname):
                sumws = api.SumSpectra(InputWorkspace=wksp,
                                       OutputWorkspace=sumwsname)
                sumws = api.RebinByPulseTimes(InputWorkspace=sumws,
                                              OutputWorkspace=sumwsname,
                                              Params='{}'.format(timeres))
                sumws = api.ConvertToPointData(InputWorkspace=sumws,
                                               OutputWorkspace=sumwsname)
            else:
                sumws = AnalysisDataService.retrieve(sumwsname)
        except RuntimeError as e:
            return str(e)

        vecx = sumws.readX(0)
        vecy = sumws.readY(0)

        # if there is only one xbin in the summed workspace, that means we have an evetn file without pulse,
        # and in this case we use the original workspace time limits
        if len(vecx) == 1:
            xmin = min(wksp.readX(0)) / 1000000
            xmax = max(wksp.readX(0)) / 1000000
        else:
            xmin = min(vecx)
            xmax = max(vecx)

        ymin = min(vecy)
        ymax = max(vecy)

        # Reset graph
        self.ui.mainplot.set_xlim(xmin, xmax)
        self.ui.mainplot.set_ylim(ymin, ymax)

        self.ui.mainplot.set_xlabel('Time (seconds)', fontsize=13)
        self.ui.mainplot.set_ylabel('Counts', fontsize=13)

        # Set up main line
        setp(self.mainline, xdata=vecx, ydata=vecy)

        # Reset slide
        newslidery = [min(vecy), max(vecy)]

        newleftx = xmin + (xmax - xmin) * self._leftSlideValue * 0.01
        setp(self.leftslideline, xdata=[newleftx, newleftx], ydata=newslidery)

        newrightx = xmin + (xmax - xmin) * self._rightSlideValue * 0.01
        setp(self.rightslideline,
             xdata=[newrightx, newrightx],
             ydata=newslidery)
        self.canvas.draw()
示例#14
0
def get_I_tof(nxs_files,
              outdir,
              packs,
              dt=1000.,
              tofaxis=None,
              Npixels_per_pack=1024):
    """nxs_files: paths of calibration nxs files
    outdir: output directory
    packs: list of pack names, e.g. C26B/eightpack-bottom
    dt: time step for loading files. too large will need too much memory
    Npixels_per_pack: number of pixels per pack
    tofaxis: tofmin, tofmax, dtof

    Output files:
    * detIDs.npy
    * I_tof-xbb.npy
    * I_tof-y-PACKNAME.npy
    * pack-PACKNAME.yaml

    NOTE:
    * Different combinations of nxs_files, init_IDF, d_axis should use different outdirs
    """
    tofmin, tofmax, dtof = tofaxis
    if not os.path.exists(outdir): os.makedirs(outdir)
    # ## Compute nominal difc using first file in the list
    nxspath = nxs_files[0]
    ws = msa.LoadEventNexus(nxspath, FilterByTimeStart=0,
                            FilterByTimeStop=1)  # load just one second
    #
    # IDs of all pixels
    detIDs = getDetIDs(ws)
    np.save(os.path.join(outdir, 'detIDs.npy'), detIDs)
    #
    # map pack name to (start_pixelID, stop_pixelID)
    pack2pixelID_start_stop = dict()
    for name in packs:
        pack2pixelID_start_stop[name] = getFirstLastPixelIDs(ws, name)
        continue
    # get tof axis
    I_tof = msa.Rebin(InputWorkspace=ws,
                      Params='%s,%s,%s' % (tofmin, dtof, tofmax))
    I_tof = msa.SumSpectra(InputWorkspace=I_tof)
    xbb = np.array(I_tof.readX(0), copy=True)
    print xbb[0], xbb[-1], len(xbb)
    # clean up
    msa.DeleteWorkspaces(['ws', 'I_tof'])

    runtimes = dict()
    for f in nxs_files:
        runtimes[f] = getRunTime(f)
    print "* run times:", runtimes

    Ntof = len(xbb) - 1
    print "* Number of TOF bins:", Ntof

    #
    Npacks = len(packs)

    y_matrix = np.zeros((Npacks, Npixels_per_pack, Ntof))
    for nxsfile in nxs_files:
        print "* Working on", nxsfile
        t_total = runtimes[nxsfile]
        for tstart in np.arange(0, t_total - dt, dt):
            print "* tstart", tstart
            tend = min(t_total - 1, tstart + dt)
            ws = msa.LoadEventNexus(nxsfile,
                                    FilterByTimeStart=tstart,
                                    FilterByTimeStop=tend)
            I_tof = msa.Rebin(InputWorkspace=ws,
                              Params='%s,%s,%s' % (tofmin, dtof, tofmax))

            # loop over packs
            for ipack, packname in enumerate(packs):
                firstpixel, lastpixel = pack2pixelID_start_stop[packname]
                startindex = detIDs.index(firstpixel)
                endindex = detIDs.index(lastpixel)
                print "array indexes of first and last pixel", startindex, endindex

                y_pack = y_matrix[ipack]
                # loop over pixels in the pack
                for i, pixelindex in enumerate(range(startindex,
                                                     endindex + 1)):
                    I_tof_pixel = msa.SumSpectra(
                        InputWorkspace=I_tof,
                        StartWorkspaceIndex=pixelindex,
                        EndWorkspaceIndex=pixelindex)
                    y = I_tof_pixel.readY(0)
                    y_pack[i] += y
                    msa.DeleteWorkspace('I_tof_pixel')
                    continue
                continue

            msa.DeleteWorkspaces(['ws', 'I_tof'])
            continue
        continue

    #xbb = np.arange(tofmin, tofmax+dtof/2., dtof)
    # print xbb
    np.save(os.path.join(outdir, "I_tof-xbb.npy"), xbb)
    # for debugging
    np.save(os.path.join(outdir, "I_tof-y_matrix.npy"), y_matrix)

    for ipack, packname in enumerate(packs):
        y_pack = y_matrix[ipack]
        packname1 = packname.split('/')[0]  # "C25T"
        # save y values of I(d) for the pack
        np.save(os.path.join(outdir, "I_tof-y-%s.npy" % packname1), y_pack)
        # save pack info
        first, last = pack2pixelID_start_stop[packname]
        pixelIDs = dict(first=first, last=last)
        pack_info = dict(pixelIDs=pixelIDs)
        dumpYaml(pack_info, os.path.join(outdir, 'pack-%s.yaml' % packname1))
        continue
    return