Ejemplo n.º 1
0
def test_list():
    f = tempfile.NamedTemporaryFile()
    d = {}
    d["hello"] = ["a", "b", "c", "d"]
    d["blaise"] = [0, 1, 2, 3, 4, 5, 6, -124]
    d["mixture"] = ["blaise", "wisconsin", "echo", "wisconsin"]
    tidy_headers.write(f.name, d)
    assert_dict_equal(d, tidy_headers.read(f.name))
    f.close()
Ejemplo n.º 2
0
def test_array_nans():
    f = tempfile.NamedTemporaryFile()
    d = {}
    d["nan"] = np.array([np.nan])
    d["inf"] = np.array([np.inf])
    d["-inf"] = np.array([-np.inf])
    tidy_headers.write(f.name, d)
    assert_dict_equal(d, tidy_headers.read(f.name))
    f.close()
Ejemplo n.º 3
0
def test_array():
    f = tempfile.NamedTemporaryFile()
    d = {}
    d["5"] = np.random.random(5)
    d["12, 45"] = np.random.random((12, 45))
    d["7, 15, 2"] = np.random.random((7, 15, 2))
    tidy_headers.write(f.name, d)
    assert_dict_equal(d, tidy_headers.read(f.name))
    f.close()
Ejemplo n.º 4
0
def test_list():
    f = tempfile.NamedTemporaryFile()
    d = {}
    d['hello'] = ['a', 'b', 'c', 'd']
    d['blaise'] = [0, 1, 2, 3, 4, 5, 6, -124]
    d['mixture'] = ['blaise', 'wisconsin', 'echo', 'wisconsin']
    tidy_headers.write(f.name, d)
    assert_dict_equal(d, tidy_headers.read(f.name))
    f.close()
Ejemplo n.º 5
0
def test_str():
    f = tempfile.NamedTemporaryFile()
    d = {
        "blaise": "thompson",
        "red": "tomato",
        "love": "hate",
        "madison": "wisconsin"
    }
    tidy_headers.write(f.name, d)
    assert_dict_equal(d, tidy_headers.read(f.name))
    f.close()
Ejemplo n.º 6
0
def test_str():
    f = tempfile.NamedTemporaryFile()
    d = {
        'blaise': 'thompson',
        'red': 'tomato',
        'love': 'hate',
        'madison': 'wisconsin'
    }
    tidy_headers.write(f.name, d)
    assert_dict_equal(d, tidy_headers.read(f.name))
    f.close()
Ejemplo n.º 7
0
def test_float():
    f = tempfile.NamedTemporaryFile()
    d = {
        "zero": 0.0,
        "four": 4.0,
        "large": 124.0,
        "negative": -12.0,
        "fraction": 0.5
    }
    tidy_headers.write(f.name, d)
    assert_dict_equal(d, tidy_headers.read(f.name))
    f.close()
Ejemplo n.º 8
0
def test_float():
    f = tempfile.NamedTemporaryFile()
    d = {
        'zero': 0.,
        'four': 4.,
        'large': 124.,
        'negative': -12.,
        'fraction': 0.5
    }
    tidy_headers.write(f.name, d)
    assert_dict_equal(d, tidy_headers.read(f.name))
    f.close()
Ejemplo n.º 9
0
def from_PyCMDS(filepath, name=None, parent=None, verbose=True):
    """Create a data object from a single PyCMDS output file.

    Parameters
    ----------
    filepath : str
        The file to load. Can accept .data, .fit, or .shots files.
    name : str or None (optional)
        The name to be applied to the new data object. If None, name is read
        from file.
    parent : WrightTools.Collection (optional)
        Collection to place new data object within. Default is None.
    verbose : bool (optional)
        Toggle talkback. Default is True.

    Returns
    -------
    data
        A Data instance.
    """
    # header
    headers = tidy_headers.read(filepath)
    # name
    if name is None:  # name not given in method arguments
        data_name = headers['data name']
    else:
        data_name = name
    if data_name == '':  # name not given in PyCMDS
        data_name = headers['data origin']
    # create data object
    kwargs = {'name': data_name, 'kind': 'PyCMDS', 'source': filepath,
              'created': headers['file created'],
              }
    if parent is not None:
        data = parent.create_data(**kwargs)
    else:
        data = Data(**kwargs)
    # array
    arr = np.genfromtxt(filepath).T
    # get axes and scanned variables
    axes = []
    for name, identity, units in zip(headers['axis names'],
                                     headers['axis identities'],
                                     headers['axis units']):
        # points and centers
        points = np.array(headers[name + ' points'])
        if name + ' centers' in headers.keys():
            centers = headers[name + ' centers']
        else:
            centers = None
        # create
        axis = {'points': points, 'units': units, 'name': name, 'identity': identity,
                'centers': centers}
        axes.append(axis)
    shape = tuple([a['points'].size for a in axes])
    for i, ax in enumerate(axes):
        sh = [1] * len(shape)
        sh[i] = len(ax['points'])
        data.create_variable(name=ax['name'] + '_points',
                             values=np.array(ax['points']).reshape(sh))
        if ax['centers'] is not None:
            sh = [1] * len(shape)
            sh[i - 1] = len(axes[i - 1]['points'])
            data.create_variable(name=ax['name'] + '_centers',
                                 values=np.array(ax['centers']).reshape(sh))
    # get assorted remaining things
    # variables and channels
    for index, kind, name in zip(range(len(arr)), headers['kind'], headers['name']):
        values = np.full(np.prod(shape), np.nan)
        values[:len(arr[index])] = arr[index]
        values.shape = shape
        if name == 'time':
            data.create_variable(name='labtime', values=values)
        if kind == 'hardware':
            # sadly, recorded tolerances are not reliable
            # so a bit of hard-coded hacking is needed
            # if this ends up being too fragile, we might have to use the points arrays
            # ---Blaise 2018-01-09
            units = headers['units'][index]
            label = headers['label'][index]
            if 'w' in name and name.startswith(tuple(data.variable_names)):
                inherited_shape = data[name.split('_')[0]].shape
                for i, s in enumerate(inherited_shape):
                    if s == 1:
                        values = np.mean(values, axis=i)
                        values = np.expand_dims(values, i)
            else:
                tolerance = headers['tolerance'][index]
                for i in range(len(shape)):
                    if tolerance is None:
                        break
                    if 'd' in name:
                        tolerance = 3.
                    if 'zero' in name:
                        tolerance = 1e-10
                    try:
                        assert i == headers['axis names'].index(name)
                        tolerance = 0
                    except (ValueError, AssertionError):
                        if (name in headers['axis names'] and
                                '%s_centers' % name not in data.variable_names):
                            tolerance = np.inf
                    mean = np.nanmean(values, axis=i)
                    mean = np.expand_dims(mean, i)
                    values, meanexp = wt_kit.share_nans(values, mean)
                    if np.allclose(meanexp, values, atol=tolerance, equal_nan=True):
                        values = mean
            if name in headers['axis names']:
                points = np.array(headers[name + ' points'])
                pointsshape = [1, ] * len(values.shape)
                for i, ax in enumerate(axes):
                    if ax['name'] == name:
                        pointsshape[i] = len(points)
                        break
                points.shape = pointsshape
                for i in range(points.ndim):
                    if points.shape[i] == 1:
                        points = np.repeat(points, values.shape[i], axis=i)
                values[np.isnan(values)] = points[np.isnan(values)]
            data.create_variable(name, values=values, units=units, label=label)
        if kind == 'channel':
            data.create_channel(name=name, values=values, shape=values.shape)
    # axes
    for a in axes:
        expression = a['identity']
        if expression.startswith('D'):
            expression = expression[1:]
        expression.replace('=D', '=')
        a['expression'] = expression
    data.transform(*[a['expression'] for a in axes])
    # return
    if verbose:
        print('data created at {0}'.format(data.fullpath))
        print('  axes: {0}'.format(data.axis_names))
        print('  shape: {0}'.format(data.shape))
    return data
Ejemplo n.º 10
0
import os
import time
import collections

import numpy as np

import tidy_headers

# --- define -------------------------------------------------------------------------------------

here = os.path.abspath(os.path.dirname(__file__))

# --- workspace ----------------------------------------------------------------------------------

filepath = os.path.join(here, "minimal.txt")

# create metadata
meta = collections.OrderedDict()
meta["time"] = time.time()
meta["name"] = "Blaise Thompson"
meta["colors"] = ["red", "blue", "green", "white"]
meta["number"] = 42
meta["array"] = np.random.random((5, 3))

# write metadata
tidy_headers.write(filepath, meta)

# read metadata
print(tidy_headers.read(filepath))
Ejemplo n.º 11
0
def test_int():
    f = tempfile.NamedTemporaryFile()
    d = {"zero": 0, "four": 4, "large": 1200000, "negative": -1}
    tidy_headers.write(f.name, d)
    assert_dict_equal(d, tidy_headers.read(f.name))
    f.close()
Ejemplo n.º 12
0
def from_PyCMDS(filepath,
                name=None,
                parent=None,
                verbose=True,
                *,
                collapse=True) -> Data:
    """Create a data object from a single PyCMDS output file.

    Parameters
    ----------
    filepath : path-like
        Path to the .data file
        Can be either a local or remote file (http/ftp).
        Can be compressed with gz/bz2, decompression based on file name.
    name : str or None (optional)
        The name to be applied to the new data object. If None, name is read
        from file.
    parent : WrightTools.Collection (optional)
        Collection to place new data object within. Default is None.
    verbose : bool (optional)
        Toggle talkback. Default is True.

    Returns
    -------
    data
        A Data instance.
    """
    filestr = os.fspath(filepath)

    # header
    ds = np.DataSource(None)
    file_ = ds.open(filestr, "rt")
    headers = tidy_headers.read(file_)
    file_.seek(0)
    # name
    if name is None:  # name not given in method arguments
        data_name = headers["data name"]
    else:
        data_name = name
    if data_name == "":  # name not given in PyCMDS
        data_name = headers["data origin"]
    # create data object
    kwargs = {
        "name": data_name,
        "kind": "PyCMDS",
        "source": filestr,
        "created": headers["file created"],
    }
    if parent is not None:
        data = parent.create_data(**kwargs)
    else:
        data = Data(**kwargs)
    if collapse:
        # array
        arr = np.genfromtxt(file_).T
    # get axes and scanned variables
    axes = []
    for name, identity, units in zip(headers["axis names"],
                                     headers["axis identities"],
                                     headers["axis units"]):
        # points and centers
        points = np.array(headers[name + " points"])
        if name + " centers" in headers.keys():
            centers = headers[name + " centers"]
        else:
            centers = None
        # create
        axis = {
            "points": points,
            "units": units,
            "name": name,
            "identity": identity,
            "centers": centers,
        }
        axes.append(axis)
    shape = tuple([a["points"].size for a in axes])
    for i, ax in enumerate(axes):
        sh = [1] * len(shape)
        sh[i] = len(ax["points"])
        data.create_variable(name=ax["name"] + "_points",
                             values=np.array(ax["points"]).reshape(sh))
        if ax["centers"] is not None:
            centers = np.array(ax["centers"])
            sh = list(shape)
            sh[i] = 1
            for j, s in enumerate(sh):
                if centers.size % s:
                    sh[j] = 1
            data.create_variable(name=ax["name"] + "_centers",
                                 values=np.array(centers.reshape(sh)))
    # get assorted remaining things
    # variables and channels
    try:
        signed = iter(headers["channel signed"])
    except KeyError:
        signed = itertools.repeat(False)
    for index, (kind, name) in enumerate(zip(headers["kind"],
                                             headers["name"])):
        if collapse:
            _collapse_read_in(data, headers, axes, arr, signed, index, kind,
                              name, shape)
        else:
            _no_collapse_create(data, headers, signed, index, kind, name,
                                shape)
    if not collapse:
        _no_collapse_fill(data, headers, file_, shape, verbose)
    file_.close()
    # axes
    for a in axes:
        expression = a["identity"]
        if expression.startswith("D"):
            expression = expression[1:]
        expression.replace("=D", "=")
        a["expression"] = expression
    data.transform(*[a["expression"] for a in axes])
    for a, u in zip(data.axes, headers["axis units"]):
        if u is not None:
            a.convert(u)
    if (headers["system name"] == "fs"
            and int(headers["PyCMDS version"].split(".")[0]) == 0
            and int(headers["PyCMDS version"].split(".")[1]) < 10):
        # in versions of PyCMDS up to (and including) 0.9.0
        # there was an incorrect hard-coded conversion factor between mm and fs
        # this ONLY applied to Newport MFA stages
        # we apply this correction knowing that Newport MFAs were only used on the "fs" system
        # and knowing that the Newport MFAs were always assigned as "d1", "d2" and "d3"
        # ---Blaise 2019-04-09
        for delay in ("d1", "d2", "d3", "d1_points", "d2_points", "d3_points"):
            if delay not in data.variable_names:
                continue
            data[delay][:] *= 6000.671281903963041 / 6671.281903963041
            if verbose:
                print(f"Correction factor applied to {delay}")
    # return
    if verbose:
        print("data created at {0}".format(data.fullpath))
        print("  axes: {0}".format(data.axis_names))
        print("  shape: {0}".format(data.shape))
    return data
Ejemplo n.º 13
0
def test_int():
    f = tempfile.NamedTemporaryFile()
    d = {'zero': 0, 'four': 4, 'large': 1200000, 'negative': -1}
    tidy_headers.write(f.name, d)
    assert_dict_equal(d, tidy_headers.read(f.name))
    f.close()