예제 #1
0
파일: steps.py 프로젝트: usnistgov/reductus
def LoadDCS(filelist=None, check_timestamps=True):
    """
    loads a data file into a SansData obj and returns that.
    Checks to see if data being loaded is 2D; if not, quits

    **Inputs**

    filelist (fileinfo[]): Files to open.
    
    check_timestamps (bool): verify that timestamps on file match request

    **Returns**

    output (raw[]): all the entries loaded.

    2018-04-25 Brian Maranville
    """
    from dataflow.fetch import url_get
    from .dcsdata import readDCS
    if filelist is None:
        filelist = []
    data = []
    for fileinfo in filelist:
        path, mtime, entries = fileinfo['path'], fileinfo.get('mtime', None), fileinfo.get('entries', None)
        name = basename(path)
        fid = BytesIO(url_get(fileinfo, mtime_check=check_timestamps))
        entry = readDCS(name, fid)
        data.append(entry)

    return data
예제 #2
0
파일: steps.py 프로젝트: usnistgov/reductus
def _LoadVSANS(filelist=None, check_timestamps=True):
    """
    loads a data file into a VSansData obj and returns that.

    **Inputs**

    filelist (fileinfo[]): Files to open.
    
    check_timestamps (bool): verify that timestamps on file match request

    **Returns**

    output (raw[]): all the entries loaded.

    2018-04-29 Brian Maranville
    """
    from dataflow.fetch import url_get
    from .loader import readVSANSNexuz
    if filelist is None:
        filelist = []
    data = []
    for fileinfo in filelist:
        path, mtime, entries = fileinfo['path'], fileinfo.get('mtime', None), fileinfo.get('entries', None)
        name = basename(path)
        fid = BytesIO(url_get(fileinfo, mtime_check=check_timestamps))
        entries = readVSANSNexuz(name, fid)
        if fileinfo['path'].endswith("DIV.h5"):
            print('div file...')
            for entry in entries:
                entry.metadata['analysis.filepurpose'] = "Sensitivity"
                entry.metadata['analysis.intent'] = "DIV"
                entry.metadata['sample.description'] = entry.metadata['run.filename']
        data.extend(entries)

    return data
예제 #3
0
파일: steps.py 프로젝트: usnistgov/reductus
def LoadVSANSHe3(filelist=None, check_timestamps=True):
    """
    loads a data file into a VSansData obj and returns that.

    **Inputs**

    filelist (fileinfo[]): Files to open.
    
    check_timestamps (bool): verify that timestamps on file match request

    **Returns**

    output (raw[]): all the entries loaded.

    2018-04-29 Brian Maranville
    """
    from dataflow.fetch import url_get
    from .loader import readVSANSNexuz, he3_metadata_lookup
    if filelist is None:
        filelist = []
    data = []
    for fileinfo in filelist:
        path, mtime, entries = fileinfo['path'], fileinfo.get('mtime', None), fileinfo.get('entries', None)
        name = basename(path)
        fid = BytesIO(url_get(fileinfo, mtime_check=check_timestamps))
        entries = readVSANSNexuz(name, fid, metadata_lookup=he3_metadata_lookup)
        data.extend(entries)

    return data
예제 #4
0
파일: steps.py 프로젝트: usnistgov/reductus
def LoadVSANSDIV(filelist=None, check_timestamps=True):
    """
    loads a DIV file into a VSansData obj and returns that.

    **Inputs**

    filelist (fileinfo[]): Files to open.
    
    check_timestamps (bool): verify that timestamps on file match request

    **Returns**

    output (realspace[]): all the entries loaded.

    2019-10-30 Brian Maranville
    """
    from dataflow.fetch import url_get
    from .loader import readVSANSNexuz
    

    if filelist is None:
        filelist = []

    data = []
    for fileinfo in filelist:
        path, mtime, entries = fileinfo['path'], fileinfo.get('mtime', None), fileinfo.get('entries', None)
        name = basename(path)
        fid = BytesIO(url_get(fileinfo, mtime_check=check_timestamps))
        entries = readVSANSNexuz(name, fid) # metadata_lookup=div_metadata_lookup)
        for entry in entries:
            div_entries = _loadDivData(entry)
            data.extend(div_entries)

    return data
예제 #5
0
def LoadMAGIKPSD(fileinfo=None, collapse=True, collapse_axis='y', auto_PolState=False, PolState='', flip=True, transpose=True):
    """
    loads a data file into a MetaArray and returns that.
    Checks to see if data being loaded is 2D; if not, quits

    Need to rebin and regrid if the detector is moving...

    **Inputs**

    fileinfo (fileinfo): File to open.

    collapse {Collapse along one of the axes} (bool): sum over axis of detector

    collapse_axis {number index of axis to collapse along} (opt:x|y): axis to sum over

    auto_PolState {Automatic polarization identify} (bool): automatically determine the polarization state from entry name

    PolState (str): polarization state if not automatically detected

    flip (bool): flip the data up and down

    transpose (bool): transpose the data

    **Returns**

    output (ospec2d[]): all the entries loaded.

    2016-04-02 Brian Maranville
    """

    path, mtime, entries = fileinfo['path'], fileinfo['mtime'], fileinfo['entries']
    name = basename(path)
    fid = BytesIO(url_get(fileinfo))
    file_obj = h5_open_zip(name, fid)
    return loadMAGIKPSD_helper(file_obj, name, path, collapse=collapse, collapse_axis=collapse_axis, auto_PolState=auto_PolState, PolState=PolState, flip=flip, transpose=transpose)
예제 #6
0
def url_load(fileinfo):
    path, mtime, entries = fileinfo['path'], fileinfo['mtime'], fileinfo['entries']
    name = basename(path)
    fid = BytesIO(url_get(fileinfo))
    nx_entries = LoadMAGIKPSD.load_entries(name, fid, entries=entries)
    fid.close()
    return nx_entries
예제 #7
0
def url_load(fileinfo):
    path, mtime, entries = fileinfo['path'], fileinfo['mtime'], fileinfo['entries']
    name = basename(path)
    fid = BytesIO(url_get(fileinfo))
    nx_entries = LoadMAGIKPSD.load_entries(name, fid, entries=entries)
    fid.close()
    return nx_entries
예제 #8
0
def LoadMAGIKPSD(fileinfo=None, collapse=True, collapse_axis='y', auto_PolState=False, PolState='', flip=True, transpose=True):
    """
    loads a data file into a MetaArray and returns that.
    Checks to see if data being loaded is 2D; if not, quits

    Need to rebin and regrid if the detector is moving...

    **Inputs**

    fileinfo (fileinfo): File to open.

    collapse {Collapse along one of the axes} (bool): sum over axis of detector

    collapse_axis {number index of axis to collapse along} (opt:x|y): axis to sum over

    auto_PolState {Automatic polarization identify} (bool): automatically determine the polarization state from entry name

    PolState (str): polarization state if not automatically detected

    flip (bool): flip the data up and down

    transpose (bool): transpose the data

    **Returns**

    output (ospec2d[]): all the entries loaded.

    2016-04-04 Brian Maranville
    """

    path, mtime, entries = fileinfo['path'], fileinfo['mtime'], fileinfo['entries']
    name = basename(path)
    fid = BytesIO(url_get(fileinfo))
    file_obj = h5_open_zip(name, fid)
    return loadMAGIKPSD_helper(file_obj, name, path, collapse=collapse, collapse_axis=collapse_axis, auto_PolState=auto_PolState, PolState=PolState, flip=flip, transpose=transpose)
예제 #9
0
def url_load(fileinfo, check_timestamps=True):
    path, mtime, entries = fileinfo['path'], fileinfo.get('mtime', None), fileinfo.get('entries', None)
    filename = basename(path)
    content = url_get(fileinfo, mtime_check=check_timestamps)
    if filename.endswith('.raw') or filename.endswith('.ras'):
        return xrawref.load_from_string(filename, content, entries=entries)
    else:
        return nexusref.load_from_string(filename, content, entries=entries)
예제 #10
0
파일: load.py 프로젝트: e-rus/reductus
def url_load(fileinfo):
    path, mtime, entries = fileinfo['path'], fileinfo['mtime'], fileinfo[
        'entries']
    filename = basename(path)
    content = url_get(fileinfo)
    if filename.endswith('.raw') or filename.endswith('.ras'):
        return xrawref.load_from_string(filename, content, entries=entries)
    else:
        return nexusref.load_from_string(filename, content, entries=entries)
예제 #11
0
파일: load.py 프로젝트: usnistgov/reductus
def url_load(fileinfo, check_timestamps=True, loader=None):
    path, entries = fileinfo['path'], fileinfo.get('entries', None)
    filename = basename(path)
    content = url_get(fileinfo, mtime_check=check_timestamps)
    if loader is not None:
        return load_from_string(filename, content, entries=entries,
                                loader=loader)
    elif filename.endswith('.raw') or filename.endswith('.ras'):
        from . import xrawref
        return load_from_string(filename, content, entries=entries,
                                loader=xrawref.load_entries)
    elif filename.endswith('.nxs.cdr'):
        from . import candor
        return load_from_string(filename, content, entries=entries,
                                loader=candor.load_entries)
    else:
        from . import nexusref
        return load_from_string(filename, content, entries=entries,
                                loader=nexusref.load_entries)
예제 #12
0
def LoadRawUSANS(filelist=None,
                 check_timestamps=True,
                 det_deadtime=7e-6,
                 trans_deadtime=1.26e-5):
    """
    loads a data file into a RawSansData obj and returns that.

    **Inputs**

    filelist (fileinfo[]): Files to open.
    
    check_timestamps (bool): verify that timestamps on file match request

    det_deadtime {main deadtime (s)} (float): main detector deadtime, in seconds

    trans_deadtime {trans deadtime (s)} (float): transmission detector deadtime, in seconds 

    **Returns**

    output (data[]): all the entries loaded.

    2020-01-29 Brian Maranville
    """
    from dataflow.fetch import url_get
    from .loader import readUSANSNexus
    from .usansdata import USansData
    if filelist is None:
        filelist = []
    data = []
    for fileinfo in filelist:
        path, mtime, entries = fileinfo['path'], fileinfo.get(
            'mtime', None), fileinfo.get('entries', None)
        name = basename(path)
        fid = BytesIO(url_get(fileinfo, mtime_check=check_timestamps))
        entries = readUSANSNexus(name,
                                 fid,
                                 det_deadtime=det_deadtime,
                                 trans_deadtime=trans_deadtime)

        data.extend(entries)

    return data
예제 #13
0
def LoadSANS(filelist=None, flip=False, transpose=False, check_timestamps=True):
    """
    loads a data file into a SansData obj and returns that.
    Checks to see if data being loaded is 2D; if not, quits

    **Inputs**

    filelist (fileinfo[]): Files to open.

    flip (bool): flip the data up and down

    transpose (bool): transpose the data
    
    check_timestamps (bool): verify that timestamps on file match request

    **Returns**

    output (sans2d[]): all the entries loaded.

    2018-04-20 Brian Maranville
    """
    from dataflow.fetch import url_get
    from .loader import readSANSNexuz
    if filelist is None:
        filelist = []
    data = []
    for fileinfo in filelist:
        path, mtime, entries = fileinfo['path'], fileinfo.get('mtime', None), fileinfo.get('entries', None)
        name = basename(path)
        fid = BytesIO(url_get(fileinfo, mtime_check=check_timestamps))
        entries = readSANSNexuz(name, fid)
        for entry in entries:
            if flip:
                entry.data.x = np.fliplr(entry.data.x)
            if transpose:
                entry.data.x = entry.data.x.T
        data.extend(entries)

    return data
예제 #14
0
def LoadSANS(filelist=None, flip=False, transpose=False):
    """
    loads a data file into a SansData obj and returns that.
    Checks to see if data being loaded is 2D; if not, quits

    **Inputs**

    filelist (fileinfo[]): Files to open.

    flip (bool): flip the data up and down

    transpose (bool): transpose the data

    **Returns**

    output (sans2d[]): all the entries loaded.

    2017-04-17 Brian Maranville
    """
    from dataflow.fetch import url_get
    from .loader import readSANSNexuz
    if filelist is None:
        filelist = []
    data = []
    for fileinfo in filelist:
        path, mtime, entries = fileinfo['path'], fileinfo['mtime'], fileinfo[
            'entries']
        name = basename(path)
        fid = BytesIO(url_get(fileinfo))
        entries = readSANSNexuz(name, fid)
        for entry in entries:
            if flip:
                entry.data.x = np.fliplr(entry.data.x)
            if transpose:
                entry.data.x = entry.data.x.T
        data.extend(entries)

    return data