예제 #1
0
 def __init__(self, filename=None, output_wks=None, metadata_only=False):
     self.filename = filename
     self.output_wks = output_wks
     self.metadata_only = metadata_only
     self.workspace = api.LoadEventNexus(Filename=self.filename,
                                         OutputWorkspace=self.output_wks,
                                         MetadataOnly=self.metadata_only)
예제 #2
0
    def _sum_runs(self, run_set, sam_ws, mon_ws, extra_ext=None):
        """
        Aggregate the set of runs
        @param run_set: list of run numbers
        @param sam_ws:  name of aggregate workspace for the sample
        @param mon_ws:  name of aggregate workspace for the monitors
        @param extra_ext: string to be added to the temporary workspaces
        """
        for run in run_set:
            ws_name = self._make_run_name(run)
            if extra_ext is not None:
                ws_name += extra_ext
            mon_ws_name = ws_name + '_monitors'
            run_file = self._make_run_file(run)

            sapi.LoadEventNexus(Filename=run_file,
                                OutputWorkspace=ws_name,
                                BankName=self._reflection['banks'])
            if str(run)+':' in self.getProperty('ExcludeTimeSegment').value:
                self._filterEvents(str(run), ws_name)

            if self._MonNorm:
                sapi.LoadNexusMonitors(Filename=run_file,
                                       OutputWorkspace=mon_ws_name)

            if sam_ws != ws_name:
                sapi.Plus(LHSWorkspace=sam_ws,
                          RHSWorkspace=ws_name,
                          OutputWorkspace=sam_ws)
                sapi.DeleteWorkspace(ws_name)
            if mon_ws != mon_ws_name and self._MonNorm:
                sapi.Plus(LHSWorkspace=mon_ws,
                          RHSWorkspace=mon_ws_name,
                          OutputWorkspace=mon_ws)
                sapi.DeleteWorkspace(mon_ws_name)
예제 #3
0
    def load_single_run(self, run, name):
        r"""
        Find and load events.

        Applies event filtering if necessary.

        Parameters
        ----------
        run: str
            Run number
        name: str
            Name of the output EventsWorkspace

        Returns
        -------
        EventsWorkspace
        """

        kwargs = dict(Filename=self._make_run_file(run),
                      BankName=self._reflection['banks'],
                      OutputWorkspace=name)
        if str(run) + ':' in self.getProperty('RetainTimeSegment').value:
            kwargs.update(self._retainEvents(run))
        sapi.LoadEventNexus(**kwargs)
        if str(run) + ':' in self.getProperty('ExcludeTimeSegment').value:
            self._filterEvents(run, name)
예제 #4
0
파일: mantid_test.py 프로젝트: yxqd/scipp
 def test_Workspace2D(self):
     # This is from the Mantid system-test data
     filename = 'CNCS_51936_event.nxs'
     eventWS = mantid.LoadEventNexus(filename)
     ws = mantid.Rebin(eventWS, -0.001, PreserveEvents=False)
     d = mantidcompat.to_dataset(ws)
     print(d)
예제 #5
0
    def _sumRuns(self, run_set, sam_ws, mon_ws, extra_ext=None):
        for run in run_set:
            ws_name = self._makeRunName(run)
            if extra_ext is not None:
                ws_name += extra_ext
            mon_ws_name = ws_name + "_monitors"
            run_file = self._makeRunFile(run)
            # Reflection 311 is restricted to bank with name "bank2"
            api.LoadEventNexus(Filename=run_file,
                               BankName="bank2",
                               OutputWorkspace=ws_name)

            if not self._noMonNorm:
                api.LoadNexusMonitors(Filename=run_file,
                                      OutputWorkspace=mon_ws_name)
            if sam_ws != ws_name:
                api.Plus(LHSWorkspace=sam_ws,
                         RHSWorkspace=ws_name,
                         OutputWorkspace=sam_ws)
                api.DeleteWorkspace(ws_name)
            if mon_ws != mon_ws_name and not self._noMonNorm:
                api.Plus(LHSWorkspace=mon_ws,
                         RHSWorkspace=mon_ws_name,
                         OutputWorkspace=mon_ws)
                api.DeleteWorkspace(mon_ws_name)
예제 #6
0
 def load_data(self, file_path):
     r"""
     # type: (unicode) -> WorkspaceGroup
     @brief Load one or more data sets according to the needs ot the instrument.
     @details This function assumes that when loading more than one data file, the files are congruent and their
     events will be added together.
     @param file_path: absolute path to one or more data files. If more than one, paths should be concatenated
     with the plus symbol '+'.
     @returns WorkspaceGroup with any number of cross-sections
     """
     fp_instance = FilePath(file_path)
     xs_list = list()
     temp_workspace_root_name = ''.join(
         random.sample(string.ascii_letters,
                       12))  # random string of 12 characters
     workspace_root_name = fp_instance.run_numbers(
         string_representation='short')
     for path in fp_instance.single_paths:
         is_legacy = path.endswith(".nxs")
         if is_legacy or not USE_SLOW_FLIPPER_LOG:
             _path_xs_list = api.MRFilterCrossSections(
                 Filename=path,
                 PolState=self.pol_state,
                 AnaState=self.ana_state,
                 PolVeto=self.pol_veto,
                 AnaVeto=self.ana_veto,
                 CrossSectionWorkspaces="%s_entry" %
                 temp_workspace_root_name)
             # Only keep good workspaces, and get rid of the rejected events
             path_xs_list = [
                 ws for ws in _path_xs_list if
                 not ws.getRun()['cross_section_id'].value == 'unfiltered'
             ]
         else:
             ws = api.LoadEventNexus(Filename=path,
                                     OutputWorkspace="raw_events")
             path_xs_list = self.dummy_filter_cross_sections(
                 ws, name_prefix=temp_workspace_root_name)
         if len(
                 xs_list
         ) == 0:  # initialize xs_list with the cross sections of the first data file
             xs_list = path_xs_list
             for ws in xs_list:  # replace the temporary names with the run number(s)
                 name_new = str(ws).replace(temp_workspace_root_name,
                                            workspace_root_name)
                 api.RenameWorkspace(str(ws), name_new)
         else:
             for i, ws in enumerate(xs_list):
                 api.Plus(LHSWorkspace=str(ws),
                          RHSWorkspace=str(path_xs_list[i]),
                          OutputWorkspace=str(ws))
     # Insert a log indicating which run numbers contributed to this cross-section
     for ws in xs_list:
         api.AddSampleLog(
             Workspace=str(ws),
             LogName='run_numbers',
             LogText=fp_instance.run_numbers(string_representation='short'),
             LogType='String')
     return xs_list
예제 #7
0
def getRunTime(p):
    ws = msa.LoadEventNexus(Filename=p,
                            FilterByTimeStart=0,
                            FilterByTimeStop=0)
    run = ws.getRun()
    t = (run.endTime() - run.startTime()).total_seconds()
    msa.DeleteWorkspace('ws')
    return t
예제 #8
0
def get_nominal_difc(nxspath, idfpath, outpath=None):
    ws = msa.LoadEventNexus(nxspath, FilterByTimeStart=0, FilterByTimeStop=1)
    msa.LoadInstrument(ws, Filename=idfpath, RewriteSpectraMap=False)
    difc = msa.CalculateDIFC(InputWorkspace=ws)
    difc = difc.extractY().flatten().copy()
    msa.DeleteWorkspace('difc')
    if outpath:
        np.save(outpath, difc)
    return difc
예제 #9
0
 def test_reduce_with_dirst(self):
     """
          This will excercise a different path in looking for direct beams.
     """
     ws=api.LoadEventNexus(Filename="REF_M_29160")
     finder = DirectBeamFinder(ws)
     finder.data_dir = os.getcwd()
     finder.ar_dir = os.getcwd()
     finder.db_dir = os.getcwd()
     finder.search()
예제 #10
0
 def setUpClass(cls):
     import mantid.simpleapi as mantid
     # This is from the Mantid system-test data
     filename = "CNCS_51936_event.nxs"
     # This needs OutputWorkspace specified, as it doesn't
     # pick up the name from the class variable name
     cls.base_event_ws = mantid.LoadEventNexus(
         scn.data.get_path(filename),
         OutputWorkspace="test_ws{}".format(__file__),
         SpectrumMax=200,
         StoreInADS=False)
예제 #11
0
 def test_peaks(self):
     """
         REF_M_24949_event.nxs.md5: 214df921d4fa70ff5a33c4eb6f8284ad
         http://198.74.56.37/ftp/external-data/md5/%(hash)
     """
     ws=api.LoadEventNexus(Filename='REF_M_29160', OutputWorkspace='REF_M_29160')
     fitter = Fitter(ws, prepare_plot_data=True)
     x, y = fitter.fit_2d_peak()
     api.logger.notice("Found: %s %s" % (str(x), str(y)))
     center_x = np.sum(x)/2.0
     self.assertGreater(center_x, 120)
     self.assertLess(center_x, 174)
예제 #12
0
파일: mantid_test.py 프로젝트: yxqd/scipp
    def test_EventWorkspace(self):
        # This is from the Mantid system-test data
        filename = 'CNCS_51936_event.nxs'
        eventWS = mantid.LoadEventNexus(filename)
        ws = mantid.Rebin(eventWS, -0.001, PreserveEvents=False)

        binned_mantid = mantidcompat.to_dataset(ws)

        tof = sp.Variable(binned_mantid[sp.Coord.Tof])
        d = mantidcompat.to_dataset(eventWS)
        binned = sp.histogram(d, tof)

        delta = sp.sum(binned_mantid - binned, sp.Dim.Position)
        print(delta)
예제 #13
0
def get_nominal_difc(nxspath, init_IDF, outdir):
    if not os.path.exists(outdir): os.makedirs(outdir)
    # ## Compute nominal difc
    ws = msa.LoadEventNexus(nxspath, FilterByTimeStart=0,
                            FilterByTimeStop=1)  # load just one second
    #
    msa.LoadInstrument(ws, Filename=init_IDF, RewriteSpectraMap=False)
    import shutil
    shutil.copyfile(init_IDF, os.path.join(outdir, 'init_IDF.xml'))
    #
    difc = msa.CalculateDIFC(InputWorkspace=ws)
    difc = difc.extractY().flatten().copy()
    msa.DeleteWorkspace('difc')
    np.save(os.path.join(outdir, 'difc-nominal.npy'), difc)
    return
예제 #14
0
 def load_meta_data(cls, file_path, outputWorkspace):
     try:
         if IN_MANTIDPLOT:
             script = "LoadEventNexus(Filename='%s', OutputWorkspace='%s', MetaDataOnly=True)" % (
                 file_path, outputWorkspace)
             mantidplot.runPythonScript(script, True)
             if not AnalysisDataService.doesExist(outputWorkspace):
                 return False
         else:
             api.LoadEventNexus(Filename=file_path,
                                OutputWorkspace=outputWorkspace,
                                MetaDataOnly=True)
         return True
     except:
         return False
예제 #15
0
def absorption_correction(filename,
                          lambda_binning=(0.7, 10.35, 5615),
                          **mantid_args):
    """
    This method is a straightforward wrapper exposing CylinderAbsorption
    through scipp

    CylinderAbsorption calculates an approximation of the
    attenuation due to absorption and single scattering in a 'cylindrical'
    shape.

    Requirements:
    - The instrument associated with the workspace must be fully defined.
      (This being a WISH-centric implementation is done with the predefined
      instr file)

    Parameters
    ----------
    filename: Path to the file with data

    lambda_binning: min, max and number of steps for binning in wavelength

    mantid_args: additional arguments to be passed to Mantid's
                 CylinderAbsorption method.

    Returns
    -------
    Scipp dataset containing absorption correction in Wavelength units.

    """

    # Create empty workspace with proper dimensions.
    workspace = simpleapi.LoadEventNexus(filename,
                                         MetaDataOnly=True,
                                         LoadMonitors=False,
                                         LoadLogs=False)
    workspace.getAxis(0).setUnit('Wavelength')

    # Rebin the resulting correction based on default WISH binning
    lambda_min, lambda_max, number_bins = lambda_binning
    bin_width = (lambda_max - lambda_min) / number_bins
    workspace = simpleapi.Rebin(workspace,
                                params=[lambda_min, bin_width, lambda_max],
                                FullBinsOnly=True)

    correction = simpleapi.CylinderAbsorption(workspace, **mantid_args)

    return scn.from_mantid(correction)
 def loadNXSData(self):
     self.list_NXSData = []
     self.big_table = []
     _list_runs = self.list_runs
     for _runs in _list_runs:
         _full_file_name = api.FileFinder.findRuns("%s_%d" % (INSTRUMENT_SHORT_NAME, int(_runs)))[0]
         if _full_file_name != '':
             workspace = api.LoadEventNexus(Filename=_full_file_name, OutputWorkspace="__data_file_%s" % _runs, MetaDataOnly=False)
             _data = LRData(workspace, read_options=self.read_options)
             if _data is not None:
                 self.list_NXSData.append(_data)
                 self.loaded_list_runs.append(_runs)
                 self.sortNXSData()
                 self.fillTable()
                 self.sf_gui.update_table(self, False)
                 QtGui.QApplication.processEvents()
예제 #17
0
 def __init__(self, data_file, workspace_name=None):
     self.errors = []
     if HAS_MANTID:
         try:
             if workspace_name is None:
                 self.data_ws = "__raw_data_file"
             else:
                 self.data_ws = str(workspace_name)
             try:
                 api.LoadEventNexus(Filename=data_file, OutputWorkspace=workspace_name)
             except:
                 self.errors.append("Error loading data file as Nexus event file:\n%s" % sys.exc_info()[1])
                 api.Load(Filename=data_file, OutputWorkspace=workspace_name)
                 self.errors = []
         except:
             self.data_ws = None
             self.errors.append("Error loading data file:\n%s" % sys.exc_info()[1])
예제 #18
0
    def _sumRuns(self, run_set, sam_ws, mon_ws, extra_ext=None):
        """
        Aggregate the set of runs
        @param run_set: list of run numbers
        @param sam_ws:  name of aggregate workspace for the sample
        @param mon_ws:  name of aggregate workspace for the monitors
        @param extra_ext: string to be added to the temporary workspaces
        """
        for run in run_set:
            ws_name = self._makeRunName(run)
            if extra_ext is not None:
                ws_name += extra_ext
            mon_ws_name = ws_name + "_monitors"
            run_file = self._makeRunFile(run)

            # Faster loading for the 311 reflection
            if self._reflection["name"] == "silicon311":
                kwargs = {"BankName": "bank2"}  # 311 analyzers only in bank2
            else:
                kwargs = {}

            sapi.LoadEventNexus(Filename=run_file,
                                OutputWorkspace=ws_name,
                                **kwargs)
            if str(run) + ':' in self.getProperty("ExcludeTimeSegment").value:
                self._filterEvents(str(run), ws_name)

            if self._MonNorm:
                sapi.LoadNexusMonitors(Filename=run_file,
                                       OutputWorkspace=mon_ws_name)

            if sam_ws != ws_name:
                sapi.Plus(LHSWorkspace=sam_ws,
                          RHSWorkspace=ws_name,
                          OutputWorkspace=sam_ws)
                sapi.DeleteWorkspace(ws_name)
            if mon_ws != mon_ws_name and self._MonNorm:
                sapi.Plus(LHSWorkspace=mon_ws,
                          RHSWorkspace=mon_ws_name,
                          OutputWorkspace=mon_ws)
                sapi.DeleteWorkspace(mon_ws_name)
예제 #19
0
파일: mantid_test.py 프로젝트: yxqd/scipp
    def test_unit_conversion(self):
        # This is from the Mantid system-test data
        filename = 'CNCS_51936_event.nxs'
        eventWS = mantid.LoadEventNexus(filename)
        ws = mantid.Rebin(eventWS, -0.001, PreserveEvents=False)
        tmp = mantidcompat.to_dataset(ws)
        tof = sp.Variable(tmp[sp.Coord.Tof])
        ws = mantid.ConvertUnits(InputWorkspace=ws, Target='DeltaE',
                                 EMode='Direct', EFixed=3.3056)

        converted_mantid = mantidcompat.to_dataset(ws)
        converted_mantid[sp.Coord.Ei] = ([], 3.3059)

        d = mantidcompat.to_dataset(eventWS, drop_pulse_times=True)
        d[sp.Coord.Ei] = ([], 3.3059)
        d.merge(sp.histogram(d, tof))
        del(d[sp.Data.Events])
        converted = sp.convert(d, sp.Dim.Tof, sp.Dim.DeltaE)

        delta = sp.sum(converted_mantid - converted, sp.Dim.Position)
        print(delta)
예제 #20
0
def extract_meta_data(file_path=None,
                      cross_section_data=None,
                      configuration=None):
    """
        Get mid Q-value from meta data
        :param str file_path: name of the file to read
    """
    meta_data = NexusMetaData()

    if cross_section_data is not None:
        meta_data.mid_q = Instrument.mid_q_value(
            cross_section_data.event_workspace)
        meta_data.is_direct_beam = cross_section_data.is_direct_beam
        return meta_data
    elif file_path is None:
        raise RuntimeError(
            "Either a file path or a data object must be supplied")

    nxs = h5py.File(file_path, mode='r')
    keys = nxs.keys()
    keys.sort()
    nxs.close()

    if len(keys) == 0:
        logging.error("No entry in data file %s", file_path)
        return meta_data

    try:
        ws = api.LoadEventNexus(str(file_path),
                                MetaDataOnly=True,
                                NXentryName=str(keys[0]))
        meta_data.mid_q = Instrument.mid_q_value(ws)
        meta_data.is_direct_beam = Instrument.check_direct_beam(ws)
    except:
        logging.error(sys.exc_value)
        raise RuntimeError("Could not load file %s [%s]" %
                           (file_path, keys[0]))

    return meta_data
예제 #21
0
    def test_simple_load(self):
        """
            REF_M_29160.nxs.h5: 58d6698e1d6bf98e0315687cb980d333
        """
        ws=api.LoadEventNexus(Filename="REF_M_29160")
        _, ratio1, ratio2, asym1, _ = calculate_ratios(ws, delta_wl = 0.05,
                                                       roi=[156,210,49,170],
                                                       slow_filter=True)

        y1 = ratio1.readY(0)
        ref = np.loadtxt("test/r1_29160.txt").T
        diff = (y1-ref[1])**2/ref[2]**2
        self.assertTrue(np.sum(diff)/(len(y1)+1.0) < 0.5)

        y1 = ratio2.readY(0)
        ref = np.loadtxt("test/r2_29160.txt").T
        diff = (y1-ref[1])**2/ref[2]**2
        self.assertTrue(np.sum(diff)/(len(y1)+1.0) < 0.5)

        y1 = asym1.readY(0)
        ref = np.loadtxt("test/a2_29160.txt").T
        diff = (y1-ref[1])**2/ref[2]**2
        self.assertTrue(np.sum(diff)/(len(y1)+1.0) < 0.5)
예제 #22
0
def _filter_cross_sections(file_path, events=True, histo=False):
    """
        Filter events according to an aggregated state log.
        :param str file_path: file to read

        BL4A:SF:ICP:getDI

        015 (0000 1111): SF1=OFF, SF2=OFF, SF1Veto=OFF, SF2Veto=OFF
        047 (0010 1111): SF1=ON, SF2=OFF, SF1Veto=OFF, SF2Veto=OFF
        031 (0001 1111): SF1=OFF, SF2=ON, SF1Veto=OFF, SF2Veto=OFF
        063 (0011 1111): SF1=ON, SF2=ON, SF1Veto=OFF, SF2Veto=OFF
    """
    state_log = "BL4A:SF:ICP:getDI"
    states = {'Off_Off': 15, 'On_Off': 47, 'Off_On': 31, 'On_On': 63}
    cross_sections = {}
    workspace = api.LoadEventNexus(Filename=file_path,
                                   OutputWorkspace="raw_events")

    for pol_state in states:
        try:
            _ws = api.FilterByLogValue(InputWorkspace=workspace,
                                       LogName=state_log,
                                       TimeTolerance=0.1,
                                       MinimumValue=states[pol_state],
                                       MaximumValue=states[pol_state],
                                       LogBoundary='Left')

            events_file = "/tmp/filtered_%s_%s.nxs" % (pol_state, "events")
            api.SaveNexus(InputWorkspace=_ws,
                          Filename=events_file,
                          Title='entry_%s' % pol_state)
            cross_sections['entry-%s' % pol_state] = events_file
        except:
            logging.error("Could not filter %s: %s", pol_state,
                          sys.exc_info()[1])

    return cross_sections, None
예제 #23
0
# Mantid Repository : https://github.com/mantidproject/mantid
#
# Copyright &copy; 2018 ISIS Rutherford Appleton Laboratory UKRI,
#     NScD Oak Ridge National Laboratory, European Spallation Source
#     & Institut Laue - Langevin
# SPDX - License - Identifier: GPL - 3.0 +
# pylint: disable=invalid-name
# Basic parameters  for  Triphylite Crystal
# Name of the workspaces to create
import mantid.simpleapi as mantid

ws_name = "TOPAZ_3132"
filename = ws_name + "_event.nxs"
ws = mantid.LoadEventNexus(Filename=filename,
                           FilterByTofMin=3000,
                           FilterByTofMax=16000)

# ------------------------------------------------------------------------------------------------------------------------------------------
# Part 1. Basic Reduction

# Spherical Absorption and Lorentz Corrections
ws = mantid.AnvredCorrection(InputWorkspace=ws,
                             LinearScatteringCoef=0.451,
                             LinearAbsorptionCoef=0.993,
                             Radius=0.14)

# Convert to Q space
LabQ = mantid.ConvertToDiffractionMDWorkspace(InputWorkspace=ws,
                                              LorentzCorrection='0',
                                              OutputDimensions='Q (lab frame)',
                                              SplitInto=2,
def compare(
        pack="C25B/eightpack-bottom",
        nxspath="/SNS/SEQ/IPTS-19573/nexus/SEQ_130249.nxs.h5",  #C60
        detIDs_npy='../C60-I_d/detIDs.npy',
        newIDF='./SEQUOIA_Definition.xml',
        dmin=2,
        dmax=11,
        dd=0.01,
        dvalues=None,
        tmin=0,
        tmax=2000):
    orig_ws = msa.LoadEventNexus(Filename=nxspath,
                                 FilterByTimeStart=tmin,
                                 FilterByTimeStop=tmax)

    ws = orig_ws
    instrument = ws.getInstrument()
    packnameandtype = pack
    packname, packtype = pack.split('/')
    pack = instrument.getComponentByName(packnameandtype)
    firstpixel = pack[0][0].getID()
    lasttube = pack[pack.nelements() - 1]
    lastpixel = lasttube[lasttube.nelements() - 1]
    lastpixel = lastpixel.getID()
    print "first and last pixel IDs:", firstpixel, lastpixel
    #
    #
    detIDs = list(np.load(detIDs_npy))
    startindex = detIDs.index(firstpixel)
    endindex = detIDs.index(lastpixel)
    print "first and last pixel indexes:", startindex, endindex
    del ws

    # # Old I(d)
    daxis = "%s,%s,%s" % (dmin, dd, dmax)
    I_d_0 = msa.ConvertUnits(InputWorkspace=orig_ws,
                             Target='dSpacing',
                             EMode='Elastic')
    I_d_0 = msa.Rebin(InputWorkspace=I_d_0, Params=daxis)
    pack_I_d_0 = msa.SumSpectra(InputWorkspace=I_d_0,
                                StartWorkspaceIndex=startindex,
                                EndWorkspaceIndex=endindex)
    xbb0 = pack_I_d_0.readX(0)
    y0 = pack_I_d_0.readY(0).copy()
    x0 = (xbb0[1:] + xbb0[:-1]) / 2
    msa.DeleteWorkspace(I_d_0)
    msa.DeleteWorkspace(pack_I_d_0)

    # # New I(d)
    msa.LoadInstrument(orig_ws, Filename=newIDF, RewriteSpectraMap=False)
    I_d_1 = msa.ConvertUnits(InputWorkspace=orig_ws,
                             Target='dSpacing',
                             EMode='Elastic')
    I_d_1 = msa.Rebin(InputWorkspace=I_d_1, Params=daxis)
    pack_I_d_1 = msa.SumSpectra(InputWorkspace=I_d_1,
                                StartWorkspaceIndex=startindex,
                                EndWorkspaceIndex=endindex)
    xbb1 = pack_I_d_1.readX(0)
    y1 = pack_I_d_1.readY(0).copy()
    x1 = (xbb1[1:] + xbb1[:-1]) / 2
    msa.DeleteWorkspace(I_d_1)
    msa.DeleteWorkspace(pack_I_d_1)
    msa.DeleteWorkspace(orig_ws)

    data = [x0, y0, x1, y1]
    np.save("%s-I_d.npy" % packname, data)
    plt.figure(figsize=(7, 4))
    plt.title("Pack %s" % packname)
    plt.plot(x0, y0, label='original')
    plt.plot(x1, y1, label='after loading new xml')
    for d in dvalues:
        plt.axvline(x=d, linewidth=1, color='k')
    # plt.xlim(3,3.3)
    plt.legend(loc='upper left')
    outpng = '%s-I_d.png' % packname
    plt.savefig(outpng)
    return
예제 #25
0
def get_I_tof(nxs_files,
              outdir,
              packs,
              dt=1000.,
              tofaxis=None,
              Npixels_per_pack=1024):
    """nxs_files: paths of calibration nxs files
    outdir: output directory
    packs: list of pack names, e.g. C26B/eightpack-bottom
    dt: time step for loading files. too large will need too much memory
    Npixels_per_pack: number of pixels per pack
    tofaxis: tofmin, tofmax, dtof

    Output files:
    * detIDs.npy
    * I_tof-xbb.npy
    * I_tof-y-PACKNAME.npy
    * pack-PACKNAME.yaml

    NOTE:
    * Different combinations of nxs_files, init_IDF, d_axis should use different outdirs
    """
    tofmin, tofmax, dtof = tofaxis
    if not os.path.exists(outdir): os.makedirs(outdir)
    # ## Compute nominal difc using first file in the list
    nxspath = nxs_files[0]
    ws = msa.LoadEventNexus(nxspath, FilterByTimeStart=0,
                            FilterByTimeStop=1)  # load just one second
    #
    # IDs of all pixels
    detIDs = getDetIDs(ws)
    np.save(os.path.join(outdir, 'detIDs.npy'), detIDs)
    #
    # map pack name to (start_pixelID, stop_pixelID)
    pack2pixelID_start_stop = dict()
    for name in packs:
        pack2pixelID_start_stop[name] = getFirstLastPixelIDs(ws, name)
        continue
    # get tof axis
    I_tof = msa.Rebin(InputWorkspace=ws,
                      Params='%s,%s,%s' % (tofmin, dtof, tofmax))
    I_tof = msa.SumSpectra(InputWorkspace=I_tof)
    xbb = np.array(I_tof.readX(0), copy=True)
    print xbb[0], xbb[-1], len(xbb)
    # clean up
    msa.DeleteWorkspaces(['ws', 'I_tof'])

    runtimes = dict()
    for f in nxs_files:
        runtimes[f] = getRunTime(f)
    print "* run times:", runtimes

    Ntof = len(xbb) - 1
    print "* Number of TOF bins:", Ntof

    #
    Npacks = len(packs)

    y_matrix = np.zeros((Npacks, Npixels_per_pack, Ntof))
    for nxsfile in nxs_files:
        print "* Working on", nxsfile
        t_total = runtimes[nxsfile]
        for tstart in np.arange(0, t_total - dt, dt):
            print "* tstart", tstart
            tend = min(t_total - 1, tstart + dt)
            ws = msa.LoadEventNexus(nxsfile,
                                    FilterByTimeStart=tstart,
                                    FilterByTimeStop=tend)
            I_tof = msa.Rebin(InputWorkspace=ws,
                              Params='%s,%s,%s' % (tofmin, dtof, tofmax))

            # loop over packs
            for ipack, packname in enumerate(packs):
                firstpixel, lastpixel = pack2pixelID_start_stop[packname]
                startindex = detIDs.index(firstpixel)
                endindex = detIDs.index(lastpixel)
                print "array indexes of first and last pixel", startindex, endindex

                y_pack = y_matrix[ipack]
                # loop over pixels in the pack
                for i, pixelindex in enumerate(range(startindex,
                                                     endindex + 1)):
                    I_tof_pixel = msa.SumSpectra(
                        InputWorkspace=I_tof,
                        StartWorkspaceIndex=pixelindex,
                        EndWorkspaceIndex=pixelindex)
                    y = I_tof_pixel.readY(0)
                    y_pack[i] += y
                    msa.DeleteWorkspace('I_tof_pixel')
                    continue
                continue

            msa.DeleteWorkspaces(['ws', 'I_tof'])
            continue
        continue

    #xbb = np.arange(tofmin, tofmax+dtof/2., dtof)
    # print xbb
    np.save(os.path.join(outdir, "I_tof-xbb.npy"), xbb)
    # for debugging
    np.save(os.path.join(outdir, "I_tof-y_matrix.npy"), y_matrix)

    for ipack, packname in enumerate(packs):
        y_pack = y_matrix[ipack]
        packname1 = packname.split('/')[0]  # "C25T"
        # save y values of I(d) for the pack
        np.save(os.path.join(outdir, "I_tof-y-%s.npy" % packname1), y_pack)
        # save pack info
        first, last = pack2pixelID_start_stop[packname]
        pixelIDs = dict(first=first, last=last)
        pack_info = dict(pixelIDs=pixelIDs)
        dumpYaml(pack_info, os.path.join(outdir, 'pack-%s.yaml' % packname1))
        continue
    return
예제 #26
0
# coding: utf-8

import os, numpy as np
from mantid import simpleapi as msa, mtd

workdir = "/SNS/users/lj7/dv/sns-chops/detcalib/SEQ"
os.chdir(workdir)

# ## Compute nominal difc
nxspath = '/SNS/SEQ/IPTS-19573/nexus/SEQ_130249.nxs.h5'
ws = msa.LoadEventNexus(nxspath, FilterByTimeStart=0, FilterByTimeStop=1)
msa.LoadInstrument(ws,
                   Filename='./SEQUOIA_Definition_guessshortpacks.xml',
                   RewriteSpectraMap=False)
difc = msa.CalculateDIFC(InputWorkspace=ws)
difc = difc.extractY().flatten().copy()
msa.DeleteWorkspace('difc')

# get det ID list
detIDs = []
for i in range(ws.getNumberHistograms()):
    sp = ws.getSpectrum(i)
    dets = list(sp.getDetectorIDs())
    assert len(dets) == 1
    detIDs.append(dets[0])
    continue
for i in range(len(detIDs) - 1):
    assert detIDs[i] < detIDs[i + 1]


# # Get pack index
예제 #27
0
 def read(self, number, panel, extension):
     if type(number) is int:
         filename = self.datafile
         logger.notice("will be reading filename...{}".format(filename))
         spectra_min, spectra_max = self.return_panel_van.get(panel) if self.is_vanadium else \
             self.return_panel.get(panel)
         if panel != 0:
             output = "w{0}-{1}".format(number, panel)
         else:
             output = "w{}".format(number)
         shared_load_files(extension, filename, output, spectra_max,
                           spectra_min, False)
         if extension == "nxs_event":
             simple.LoadEventNexus(Filename=filename,
                                   OutputWorkspace=output,
                                   LoadMonitors='1')
             self.read_event_nexus(number, output, panel)
         if extension[:10] == "nxs_event_":
             label, tmin, tmax = split_string_event(extension)
             output = output + "_" + label
             if tmax == "end":
                 simple.LoadEventNexus(Filename=filename,
                                       OutputWorkspace=output,
                                       FilterByTimeStart=tmin,
                                       LoadMonitors='1',
                                       MonitorsAsEvents='1',
                                       FilterMonByTimeStart=tmin)
             else:
                 simple.LoadEventNexus(Filename=filename,
                                       OutputWorkspace=output,
                                       FilterByTimeStart=tmin,
                                       FilterByTimeStop=tmax,
                                       LoadMonitors='1',
                                       MonitorsAsEvents='1',
                                       FilterMonByTimeStart=tmin,
                                       FilterMonByTimeStop=tmax)
             self.read_event_nexus(number, output, panel)
     else:
         num_1, num_2 = split_run_string(number)
         output = "w{0}_{1}-{2}".format(num_1, num_2, panel)
         output1 = self.load_multi_run_part(extension, num_1, panel)
         output2 = self.load_multi_run_part(extension, num_2, panel)
         simple.MergeRuns(output1 + "," + output2, output)
         simple.DeleteWorkspace(output1)
         simple.DeleteWorkspace(output2)
     simple.ConvertUnits(InputWorkspace=output,
                         OutputWorkspace=output,
                         Target="Wavelength",
                         Emode="Elastic")
     lmin, lmax = Wish.LAMBDA_RANGE
     simple.CropWorkspace(InputWorkspace=output,
                          OutputWorkspace=output,
                          XMin=lmin,
                          XMax=lmax)
     monitor_run = "monitor{}".format(number)
     if monitor_run not in simple.mtd:
         monitor = self.process_incidentmon(number,
                                            extension,
                                            spline_terms=70)
     else:
         monitor = simple.mtd[monitor_run]
     simple.NormaliseToMonitor(InputWorkspace=output,
                               OutputWorkspace=output + "norm1",
                               MonitorWorkspace=monitor)
     simple.NormaliseToMonitor(InputWorkspace=output + "norm1",
                               OutputWorkspace=output + "norm2",
                               MonitorWorkspace=monitor,
                               IntegrationRangeMin=0.7,
                               IntegrationRangeMax=10.35)
     simple.DeleteWorkspace(output)
     simple.DeleteWorkspace(output + "norm1")
     simple.RenameWorkspace(InputWorkspace=output + "norm2",
                            OutputWorkspace=output)
     simple.ConvertUnits(InputWorkspace=output,
                         OutputWorkspace=output,
                         Target="TOF",
                         EMode="Elastic")
     simple.ReplaceSpecialValues(InputWorkspace=output,
                                 OutputWorkspace=output,
                                 NaNValue=0.0,
                                 NaNError=0.0,
                                 InfinityValue=0.0,
                                 InfinityError=0.0)
     return output
예제 #28
0
def get_I_d(nxs_files,
            init_IDF,
            outdir,
            packs,
            dt=1000.,
            d_axis=(2., 11., 0.02),
            Npixels_per_pack=1024):
    """nxs_files: paths of calibration nxs files
    init_IDF: initial IDF path
    outdir: output directory
    packs: list of pack names, e.g. C26B/eightpack-bottom
    dt: time step for loading files. too large will need too much memory
    d_axis: dmin, dmax, delta_d. e.g. 2., 11., 0.02
    Npixels_per_pack: number of pixels per pack

    Output files:
    * difc-nominal.npy
    * detIDs.npy
    * I_d-xbb.npy
    * I_d-y-PACKNAME.npy
    * pack-PACKNAME.yaml

    NOTE:
    * Assumed that the difc array from CalculateDIFC is ordered according to the "spectrrum list" in
      the mantid workspace. See function getDetIDs
    * Different combinations of nxs_files, init_IDF, d_axis should use different outdirs
    """
    if not os.path.exists(outdir): os.makedirs(outdir)
    # ## Compute nominal difc using first file in the list
    nxspath = nxs_files[0]
    ws = msa.LoadEventNexus(nxspath, FilterByTimeStart=0,
                            FilterByTimeStop=1)  # load just one second
    #
    msa.LoadInstrument(ws, Filename=init_IDF, RewriteSpectraMap=False)
    import shutil
    shutil.copyfile(init_IDF, os.path.join(outdir, 'init_IDF.xml'))
    #
    difc = msa.CalculateDIFC(InputWorkspace=ws)
    difc = difc.extractY().flatten().copy()
    msa.DeleteWorkspace('difc')
    np.save(os.path.join(outdir, 'difc-nominal.npy'), difc)
    # IDs of all pixels
    detIDs = getDetIDs(ws)
    np.save(os.path.join(outdir, 'detIDs.npy'), detIDs)
    #
    # map pack name to (start_pixelID, stop_pixelID)
    pack2pixelID_start_stop = dict()
    for name in packs:
        pack2pixelID_start_stop[name] = getFirstLastPixelIDs(ws, name)
        continue
    # clean up
    msa.DeleteWorkspace('ws')

    runtimes = dict()
    for f in nxs_files:
        runtimes[f] = getRunTime(f)
    print "* run times:", runtimes

    dmin, dmax, delta_d = d_axis
    Nd = int((dmax - dmin) / delta_d)
    print "* Number of d bins:", Nd

    #
    Npacks = len(packs)

    y_matrix = np.zeros((Npacks, Npixels_per_pack, Nd))
    xbb_saved = None
    for nxsfile in nxs_files:
        print "* Working on", nxsfile
        t_total = runtimes[nxsfile]
        for tstart in np.arange(0, t_total - dt, dt):
            print "* tstart", tstart
            tend = min(t_total - 1, tstart + dt)
            ws = msa.LoadEventNexus(nxsfile,
                                    FilterByTimeStart=tstart,
                                    FilterByTimeStop=tend)
            msa.LoadInstrument(ws, Filename=init_IDF, RewriteSpectraMap=False)
            I_d = msa.ConvertUnits(InputWorkspace=ws,
                                   Target='dSpacing',
                                   EMode='Elastic')
            I_d = msa.Rebin(InputWorkspace=I_d,
                            Params='%s,%s,%s' % (dmin, delta_d, dmax))

            # loop over packs
            for ipack, packname in enumerate(packs):
                firstpixel, lastpixel = pack2pixelID_start_stop[packname]
                startindex = detIDs.index(firstpixel)
                endindex = detIDs.index(lastpixel)
                print "array indexes of first and last pixel", startindex, endindex

                y_pack = y_matrix[ipack]
                # loop over pixels in the pack
                for i, pixelindex in enumerate(range(startindex,
                                                     endindex + 1)):
                    I_d_pixel = msa.SumSpectra(InputWorkspace=I_d,
                                               StartWorkspaceIndex=pixelindex,
                                               EndWorkspaceIndex=pixelindex)
                    xbb = I_d_pixel.readX(0)
                    if xbb_saved is None: xbb_saved = np.array(xbb, copy=True)
                    y = I_d_pixel.readY(0)
                    y_pack[i] += y
                    msa.DeleteWorkspace('I_d_pixel')
                    continue
                continue

            msa.DeleteWorkspaces(['ws', 'I_d'])
            continue
        continue

    xbb = np.arange(dmin, dmax + delta_d / 2., delta_d)
    np.save(os.path.join(outdir, "I_d-xbb.npy"), xbb)
    # for debugging
    np.save(os.path.join(outdir, "I_d-y_matrix.npy"), y_matrix)

    for ipack, packname in enumerate(packs):
        y_pack = y_matrix[ipack]
        packname1 = packname.split('/')[0]  # "C25T"
        # save y values of I(d) for the pack
        np.save(os.path.join(outdir, "I_d-y-%s.npy" % packname1), y_pack)
        # save pack info
        first, last = pack2pixelID_start_stop[packname]
        pixelIDs = dict(first=first, last=last)
        pack_info = dict(pixelIDs=pixelIDs)
        dumpYaml(pack_info, os.path.join(outdir, 'pack-%s.yaml' % packname1))
        continue
    return
예제 #29
0
# packtype = 'eightpack-bottom'
packtype = 'eightpack'
x_path = 'C60-C26T-I_d-x.npy'
y_path = 'C60-I_d-y-B24.npy'

d_spacing_max_mismatch = 0.2  # maximum fractional mismatch of d spacing values allowed.
d_spacing_peak_width = 0.1  # fractional width of d spacing peak.
maxchisq = 3.  # if chisq>maxchisq, mask this pixel
min_counts = 2000  # if total couts of the peak < min_counts, don't count this peak

# Outputs
difc_outpath = "C60-difc-2-B24.npy"
difc_mask_outpath = 'C60-difc-2-B24-mask.npy'

# ## Compute nominal difc
ws = msa.LoadEventNexus(nxspath, FilterByTimeStart=0, FilterByTimeStop=1)
msa.LoadInstrument(ws, Filename=initial_idf, RewriteSpectraMap=False)
difc = msa.CalculateDIFC(InputWorkspace=ws)
difc = difc.extractY().flatten().copy()
msa.DeleteWorkspace('difc')

# # Get pack pixel IDs
instrument = ws.getInstrument()
pack = instrument.getComponentByName("%s/%s" % (packname, packtype))
firstpixel = pack[0][0].getID()
lasttube = pack[pack.nelements() - 1]
lastpixel = lasttube[lasttube.nelements() - 1]
lastpixel = lastpixel.getID()

# Get detID list
detIDs = []