Example #1
0
def test_recarray():
    # check roundtrip of structured array
    dt = [('f1', 'f8'),
          ('f2', 'S10')]
    arr = np.zeros((2,), dtype=dt)
    arr[0]['f1'] = 0.5
    arr[0]['f2'] = 'python'
    arr[1]['f1'] = 99
    arr[1]['f2'] = 'not perl'
    stream = BytesIO()
    savemat(stream, {'arr': arr})
    d = loadmat(stream, struct_as_record=False)
    a20 = d['arr'][0,0]
    yield assert_equal, a20.f1, 0.5
    yield assert_equal, a20.f2, 'python'
    d = loadmat(stream, struct_as_record=True)
    a20 = d['arr'][0,0]
    yield assert_equal, a20['f1'], 0.5
    yield assert_equal, a20['f2'], 'python'
    # structs always come back as object types
    yield assert_equal, a20.dtype, np.dtype([('f1', 'O'),
                                             ('f2', 'O')])
    a21 = d['arr'].flat[1]
    yield assert_equal, a21['f1'], 99
    yield assert_equal, a21['f2'], 'not perl'
Example #2
0
def create_Q_matrix(kappa, folder, ridge=1e-6):
    GiCG = loadmat(folder + "GiCG.mat")["GiCG"]
    G = loadmat(folder + "G.mat")["G"]
    C0 = loadmat(folder + "C0.mat")["C0"]
    
    Q = GiCG + 2 * (kappa ** 2) * G + (kappa ** 4) * C0
    return Q + eye(Q.shape[0], Q.shape[1]) * ridge
Example #3
0
def test_compression():
    arr = np.zeros(100).reshape((5,20))
    arr[2,10] = 1
    stream = BytesIO()
    savemat(stream, {'arr':arr})
    raw_len = len(stream.getvalue())
    vals = loadmat(stream)
    yield assert_array_equal, vals['arr'], arr
    stream = BytesIO()
    savemat(stream, {'arr':arr}, do_compression=True)
    compressed_len = len(stream.getvalue())
    vals = loadmat(stream)
    yield assert_array_equal, vals['arr'], arr
    yield assert_, raw_len > compressed_len
    # Concatenate, test later
    arr2 = arr.copy()
    arr2[0,0] = 1
    stream = BytesIO()
    savemat(stream, {'arr':arr, 'arr2':arr2}, do_compression=False)
    vals = loadmat(stream)
    yield assert_array_equal, vals['arr2'], arr2
    stream = BytesIO()
    savemat(stream, {'arr':arr, 'arr2':arr2}, do_compression=True)
    vals = loadmat(stream)
    yield assert_array_equal, vals['arr2'], arr2
Example #4
0
def test_str_round():
    # from report by Angus McMorland on mailing list 3 May 2010
    stream = BytesIO()
    in_arr = np.array(['Hello', 'Foob'])
    out_arr = np.array(['Hello', 'Foob '])
    savemat(stream, dict(a=in_arr))
    res = loadmat(stream)
    # resulted in ['HloolFoa', 'elWrdobr']
    assert_array_equal(res['a'], out_arr)
    stream.truncate(0)
    stream.seek(0)
    # Make Fortran ordered version of string
    in_str = in_arr.tostring(order='F')
    in_from_str = np.ndarray(shape=a.shape,
                             dtype=in_arr.dtype,
                             order='F',
                             buffer=in_str)
    savemat(stream, dict(a=in_from_str))
    assert_array_equal(res['a'], out_arr)
    # unicode save did lead to buffer too small error
    stream.truncate(0)
    stream.seek(0)
    in_arr_u = in_arr.astype('U')
    out_arr_u = out_arr.astype('U')
    savemat(stream, {'a': in_arr_u})
    res = loadmat(stream)
    assert_array_equal(res['a'], out_arr_u)
Example #5
0
def test_multiple_open():
    # Ticket #1039, on Windows: check that files are not left open
    tmpdir = mkdtemp()
    try:
        x = dict(x=np.zeros((2, 2)))

        fname = pjoin(tmpdir, "a.mat")

        # Check that file is not left open
        savemat(fname, x)
        os.unlink(fname)
        savemat(fname, x)
        loadmat(fname)
        os.unlink(fname)

        # Check that stream is left open
        f = open(fname, 'wb')
        savemat(f, x)
        f.seek(0)
        f.close()

        f = open(fname, 'rb')
        loadmat(f)
        f.seek(0)
        f.close()
    finally:
        shutil.rmtree(tmpdir)
Example #6
0
def plot_gaps(name, axis):
    """Takes the file name and coordinate axis and plots the marker gaps"""
    runDict = {}
    # load the matlab data file into a dictionary
    mio.loadmat(name, mdict=runDict)
    # name the rider depending on the filename
    if name[0] == '1':
        rider = 'Jodi'
        runDict['gearing']=runDict['gear']
    elif name[0] == '2':
        rider = 'Victor'
    else:
        rider = 'Jason'
    speed = np.average(runDict['V'])
    # only look at one marker dimension (i.e. 'xori')
    v = np.isnan(runDict[axis])
    # set the figure size
    fig = figure(1, figsize=(15., 9.))
    matshow(v.T, fignum=1, aspect='auto', cmap=cm.gray)
    yticks(range(np.shape(v)[1]), labels)
    xlabel('Sample Number')
    ylabel('Marker gaps in the {axis} coordinate'.format(axis=axis[0]))
    title(('File: {name}, {rider} riding the {bike} at {speed} km/h while' +
            ' {condition} in gear {gear}').format(name=name, rider=rider, bike=runDict['bike'][0],
                speed=str(speed), condition=runDict['condition'][0],
                gear=runDict['gearing'][0][0]))
    fig.savefig('gapImages/'+name[0:-4]+axis[0]+'.png')
    show()
Example #7
0
def test_1d_shape():
    # Current 5 behavior is 1D -> column vector
    arr = np.arange(5)
    stream = BytesIO()
    warn_ctx = WarningManager()
    warn_ctx.__enter__()
    try:
        # silence warnings for tests
        warnings.simplefilter('ignore')
        savemat(stream, {'oned':arr}, format='5')
        vals = loadmat(stream)
        assert_equal(vals['oned'].shape, (5,1))
        # Current 4 behavior is 1D -> row vector
        stream = BytesIO()
        savemat(stream, {'oned':arr}, format='4')
        vals = loadmat(stream)
        assert_equal(vals['oned'].shape, (1, 5))
        for format in ('4', '5'):
            # can be explicitly 'column' for oned_as
            stream = BytesIO()
            savemat(stream, {'oned':arr},
                    format=format,
                    oned_as='column')
            vals = loadmat(stream)
            assert_equal(vals['oned'].shape, (5,1))
            # but different from 'row'
            stream = BytesIO()
            savemat(stream, {'oned':arr},
                    format=format,
                    oned_as='row')
            vals = loadmat(stream)
            assert_equal(vals['oned'].shape, (1,5))
    finally:
        warn_ctx.__exit__()
Example #8
0
def test_1d_shape():
    # Current 5 behavior is 1D -> column vector
    arr = np.arange(5)
    stream = StringIO()
    savemat(stream, {'oned':arr}, format='5')
    vals = loadmat(stream)
    yield assert_equal, vals['oned'].shape, (5,1)
    # Current 4 behavior is 1D -> row vector
    arr = np.arange(5)
    stream = StringIO()
    savemat(stream, {'oned':arr}, format='4')
    vals = loadmat(stream)
    yield assert_equal, vals['oned'].shape, (1, 5)
    for format in ('4', '5'):
        # can be explicitly 'column' for oned_as
        stream = StringIO()
        savemat(stream, {'oned':arr}, 
                format=format,
                oned_as='column')
        vals = loadmat(stream)
        yield assert_equal, vals['oned'].shape, (5,1)
        # but different from 'row'
        stream = StringIO()
        savemat(stream, {'oned':arr}, 
                format=format,
                oned_as='row')
        vals = loadmat(stream)
        yield assert_equal, vals['oned'].shape, (1,5)
Example #9
0
def test_mat_struct_squeeze():
    stream = BytesIO()
    in_d = {"st": {"one": 1, "two": 2}}
    savemat_future(stream, in_d)
    # no error without squeeze
    out_d = loadmat(stream, struct_as_record=False)
    # previous error was with squeeze, with mat_struct
    out_d = loadmat(stream, struct_as_record=False, squeeze_me=True)
Example #10
0
def load_ozone_data(folder):
    # actual observations
    y = loadmat(folder + "y.mat")["y"][:, 0]
    assert(y.ndim == 1)
    
    # triangulation of globe
    A = loadmat(folder + "A.mat")["A"]
    
    return y, A
Example #11
0
 def create_Q_matrix(self, kappa):
     folder = OzonePosterior.get_data_folder()
     
     GiCG = loadmat(folder + "GiCG.mat")["GiCG"]
     G = loadmat(folder + "G.mat")["G"]
     C0 = loadmat(folder + "C0.mat")["C0"]
     
     Q = GiCG + 2 * (kappa ** 2) * G + (kappa ** 4) * C0
     return Q + eye(Q.shape[0], Q.shape[1]) * OzonePosterior.ridge
Example #12
0
 def load_ozone_data():
     folder = OzonePosterior.get_data_folder()
     
     y = loadmat(folder + "y.mat")["y"][:, 0]
     assert(len(shape(y)) == 1)
     
     A = loadmat(folder + "A.mat")["A"]
     
     return y, A
Example #13
0
def test_miutf8_for_miint8_compromise():
    # Check reader accepts ascii as miUTF8 for array names
    filename = pjoin(test_data_path, 'miutf8_array_name.mat')
    res = loadmat(filename)
    assert_equal(res['array_name'], [[1]])
    # mat file with non-ascii utf8 name raises error
    filename = pjoin(test_data_path, 'bad_miutf8_array_name.mat')
    with assert_raises(ValueError):
        loadmat(filename)
Example #14
0
def test_warnings():
    fname = pjoin(test_data_path, 'testdouble_7.1_GLNX86.mat')
    warnings.simplefilter('error')
    # This should not generate a warning
    mres = loadmat(fname, struct_as_record=True)
    # This neither
    mres = loadmat(fname, struct_as_record=False)
    # This should - because of deprecated system path search
    yield assert_raises, DeprecationWarning, find_mat_file, fname
    warnings.resetwarnings()
Example #15
0
def test_loadmat_varnames():
    # Test that we can get just one variable from a mat file using loadmat
    eg_file = pjoin(test_data_path, "testmulti_7.4_GLNX86.mat")
    sys_v_names = ["__globals__", "__header__", "__version__"]
    vars = loadmat(eg_file)
    assert_equal(set(vars.keys()), set(["a", "theta"] + sys_v_names))
    vars = loadmat(eg_file, variable_names=["a"])
    assert_equal(set(vars.keys()), set(["a"] + sys_v_names))
    vars = loadmat(eg_file, variable_names=["theta"])
    assert_equal(set(vars.keys()), set(["theta"] + sys_v_names))
Example #16
0
def test_miuint32_compromise():
    # Reader should accept miUINT32 for miINT32, but check signs
    # mat file with miUINT32 for miINT32, but OK values
    filename = pjoin(test_data_path, 'miuint32_for_miint32.mat')
    res = loadmat(filename)
    assert_equal(res['an_array'], np.arange(10)[None, :])
    # mat file with miUINT32 for miINT32, with negative value
    filename = pjoin(test_data_path, 'bad_miuint32.mat')
    with assert_raises(ValueError):
        loadmat(filename)
Example #17
0
def test_mat_struct_squeeze():
    stream = BytesIO()
    in_d = {'st':{'one':1, 'two':2}}
    savemat(stream, in_d)
    # no error without squeeze
    out_d = loadmat(stream, struct_as_record=False)
    # previous error was with squeeze, with mat_struct
    out_d = loadmat(stream,
                    struct_as_record=False,
                    squeeze_me=True,
                    )
Example #18
0
def test_warnings():
    # This test is an echo of the previous behavior, which was to raise a
    # warning if the user triggered a search for mat files on the Python system
    # path.  We can remove the test in the next version after upcoming (0.13)
    fname = pjoin(test_data_path, 'testdouble_7.1_GLNX86.mat')
    with warnings.catch_warnings():
        warnings.simplefilter('error')
        # This should not generate a warning
        mres = loadmat(fname, struct_as_record=True)
        # This neither
        mres = loadmat(fname, struct_as_record=False)
Example #19
0
def test_warnings():
    fname = pjoin(test_data_path, 'testdouble_7.1_GLNX86.mat')
    warn_ctx = WarningManager()
    warn_ctx.__enter__()
    try:
        warnings.simplefilter('error')
        # This should not generate a warning
        mres = loadmat(fname, struct_as_record=True)
        # This neither
        mres = loadmat(fname, struct_as_record=False)
        # This should - because of deprecated system path search
        assert_raises(DeprecationWarning, find_mat_file, fname)
    finally:
        warn_ctx.__exit__()
Example #20
0
def test_save_object():
    class C(object): pass
    c = C()
    c.field1 = 1
    c.field2 = 'a string'
    stream = BytesIO()
    savemat_future(stream, {'c': c})
    d = loadmat(stream, struct_as_record=False)
    c2 = d['c'][0,0]
    assert_equal(c2.field1, 1)
    assert_equal(c2.field2, 'a string')
    d = loadmat(stream, struct_as_record=True)
    c2 = d['c'][0,0]
    assert_equal(c2['field1'], 1)
    assert_equal(c2['field2'], 'a string')
Example #21
0
def test_save_object():
    class C(object): pass
    c = C()
    c.field1 = 1
    c.field2 = 'a string'
    stream = StringIO()
    savemat(stream, {'c': c})
    d = loadmat(stream, struct_as_record=False)
    c2 = d['c'][0,0]
    yield assert_equal, c2.field1, 1
    yield assert_equal, c2.field2, 'a string'
    d = loadmat(stream, struct_as_record=True)
    c2 = d['c'][0,0]
    yield assert_equal, c2['field1'], 1
    yield assert_equal, c2['field2'], 'a string'
Example #22
0
def test_gzip_simple():
    xdense = np.zeros((20,20))
    xdense[2,3] = 2.3
    xdense[4,5] = 4.5
    x = SP.csc_matrix(xdense)

    name = 'gzip_test'
    expected = {'x':x}
    format = '4'

    tmpdir = mkdtemp()
    try:
        fname = pjoin(tmpdir,name)
        mat_stream = gzip.open(fname,mode='wb')
        savemat(mat_stream, expected, format=format)
        mat_stream.close()

        mat_stream = gzip.open(fname,mode='rb')
        actual = loadmat(mat_stream, struct_as_record=True)
        mat_stream.close()
    finally:
        shutil.rmtree(tmpdir)

    assert_array_almost_equal(actual['x'].todense(),
                              expected['x'].todense(),
                              err_msg=repr(actual))
Example #23
0
def test_skip_variable():
    # Test skipping over the first of two variables in a MAT file
    # using mat_reader_factory and put_variables to read them in.
    #
    # This is a regression test of a problem that's caused by
    # using the compressed file reader seek instead of the raw file
    # I/O seek when skipping over a compressed chunk.
    #
    # The problem arises when the chunk is large: this file has
    # a 256x256 array of random (uncompressible) doubles.
    #
    filename = pjoin(test_data_path,'test_skip_variable.mat')
    #
    # Prove that it loads with loadmat
    #
    d = loadmat(filename, struct_as_record=True)
    yield assert_, 'first' in d
    yield assert_, 'second' in d
    #
    # Make the factory
    #
    factory = mat_reader_factory(filename, struct_as_record=True)
    #
    # This is where the factory breaks with an error in MatMatrixGetter.to_next
    #
    d = factory.get_variables('second')
    yield assert_, 'second' in d
    factory.mat_stream.close()
Example #24
0
def test_unicode_mat4():
    # Mat4 should save unicode as latin1
    bio = BytesIO()
    var = {'second_cat': u('Schrödinger')}
    savemat(bio, var, format='4')
    var_back = loadmat(bio)
    assert_equal(var_back['second_cat'], var['second_cat'])
Example #25
0
def test_fieldnames():
    # Check that field names are as expected
    stream = BytesIO()
    savemat(stream, {'a': {'a':1, 'b':2}})
    res = loadmat(stream)
    field_names = res['a'].dtype.names
    assert_equal(set(field_names), set(('a', 'b')))
Example #26
0
def test_save_dict():
    # Test that dict can be saved (as recarray), loaded as matstruct
    d = {'a':1, 'b':2}
    stream = StringIO()
    savemat(stream, {'dict':d})
    stream.seek(0)
    vals = loadmat(stream)
Example #27
0
def test_save_dict():
    # Test that dict can be saved (as recarray), loaded as matstruct
    dict_types = ((dict, False),)
    try:
        from collections import OrderedDict
    except ImportError:
        pass
    else:
        dict_types += ((OrderedDict, True),)
    ab_exp = np.array([[(1, 2)]], dtype=[('a', object), ('b', object)])
    ba_exp = np.array([[(2, 1)]], dtype=[('b', object), ('a', object)])
    for dict_type, is_ordered in dict_types:
        # Initialize with tuples to keep order for OrderedDict
        d = dict_type([('a', 1), ('b', 2)])
        stream = BytesIO()
        savemat(stream, {'dict': d})
        stream.seek(0)
        vals = loadmat(stream)['dict']
        assert_equal(set(vals.dtype.names), set(['a', 'b']))
        if is_ordered:  # Input was ordered, output in ab order
            assert_array_equal(vals, ab_exp)
        else:  # Not ordered input, either order output
            if vals.dtype.names[0] == 'a':
                assert_array_equal(vals, ab_exp)
            else:
                assert_array_equal(vals, ba_exp)
Example #28
0
def test_multiple_fieldnames():
    # Example provided by Dharhas Pothina
    # Extracted using mio5.varmats_from_mat
    multi_fname = pjoin(TEST_DATA_PATH, "nasty_duplicate_fieldnames.mat")
    vars = loadmat(multi_fname)
    funny_names = vars["Summary"].dtype.names
    assert_(set(["_1_Station_Q", "_2_Station_Q", "_3_Station_Q"]).issubset(funny_names))
Example #29
0
def test_warnings():
    fname = join(test_data_path, 'testdouble_7.1_GLNX86.mat')
    warnings.simplefilter('error')
    # This should not generate a warning
    mres = loadmat(fname, struct_as_record=True)
    # This neither
    mres = loadmat(fname, struct_as_record=False)
    # This should
    yield assert_raises, FutureWarning, loadmat, fname
    # This too
    yield assert_raises, FutureWarning, find_mat_file, fname
    # we need kwargs for this one
    yield (lambda a, k: assert_raises(*a, **k),
          (DeprecationWarning, loadmat, fname),
          {'struct_as_record':True, 'basename':'raw'})
    warnings.resetwarnings()
Example #30
0
 def readSamples(self, fileName, key,recalc=False,samples=None):
     fn = fileName + ".pre"
     try:
         if recalc: raise IOError()
         with open(fn): pass
         print "precalculated file present"
         self.mu, self.cov = hsplit(mat(fromfile(fn).reshape((3,-1))),[1])
     except IOError:
         if samples != None:
             self._samples = samples
             print "got samples: " , self._samples
         else:
             print "no file present, calculating..."
             smpls = loadmat(fileName)[key]
             print "loaded from mat file"
             self._samples = mat(smpls)
             print "reshaped into samples"
         self.mu = sum(self._samples, axis=1) / self._samples.shape[1]
         print "mu=", str(self.mu)
         sampdiffmu = self._samples - self.mu
         self.cov = sampdiffmu*sampdiffmu.T / self._samples.shape[1]
         print"cov=", str(self.cov)
         mat(hstack((self.mu,self.cov))).tofile(fn)
     self._invCov = self.cov.I
     self._detCov = det(self.cov)
     self._multConst = 1 / sqrt((2 * pi) ** 3 * self._detCov)
Example #31
0
def test_empty_struct():
    # ticket 885
    filename = pjoin(test_data_path, 'test_empty_struct.mat')
    # before ticket fix, this would crash with ValueError, empty data
    # type
    d = loadmat(filename, struct_as_record=True)
    a = d['a']
    assert_equal(a.shape, (1, 1))
    assert_equal(a.dtype, np.dtype(object))
    assert_(a[0, 0] is None)
    stream = BytesIO()
    arr = np.array((), dtype='U')
    # before ticket fix, this used to give data type not understood
    savemat(stream, {'arr': arr})
    d = loadmat(stream)
    a2 = d['arr']
    assert_array_equal(a2, arr)
Example #32
0
def test_scalar_squeeze():
    stream = BytesIO()
    in_d = {'scalar': [[0.1]], 'string': 'my name', 'st': {'one': 1, 'two': 2}}
    savemat(stream, in_d)
    out_d = loadmat(stream, squeeze_me=True)
    assert_(isinstance(out_d['scalar'], float))
    assert_(isinstance(out_d['string'], string_types))
    assert_(isinstance(out_d['st'], np.ndarray))
Example #33
0
def test_save_object():
    class C(object):
        pass

    c = C()
    c.field1 = 1
    c.field2 = 'a string'
    stream = BytesIO()
    savemat(stream, {'c': c})
    d = loadmat(stream, struct_as_record=False)
    c2 = d['c'][0, 0]
    assert_equal(c2.field1, 1)
    assert_equal(c2.field2, 'a string')
    d = loadmat(stream, struct_as_record=True)
    c2 = d['c'][0, 0]
    assert_equal(c2['field1'], 1)
    assert_equal(c2['field2'], 'a string')
Example #34
0
def test_sparse_in_struct():
    # reproduces bug found by DC where Cython code was insisting on
    # ndarray return type, but getting sparse matrix
    st = {'sparsefield': SP.coo_matrix(np.eye(4))}
    stream = BytesIO()
    savemat(stream, {'a': st})
    d = loadmat(stream, struct_as_record=True)
    yield assert_array_equal, d['a'][0, 0]['sparsefield'].todense(), np.eye(4)
Example #35
0
def _load_check_case(name, files, case):
    for file_name in files:
        matdict = loadmat(file_name, struct_as_record=True)
        label = "test %s; file %s" % (name, file_name)
        for k, expected in case.items():
            k_label = "%s, variable %s" % (label, k)
            assert_(k in matdict, "Missing key at %s" % k_label)
            _check_level(k_label, expected, matdict[k])
Example #36
0
def test_save_empty_dict():
    # saving empty dict also gives empty struct
    stream = BytesIO()
    savemat(stream, {'arr': {}})
    d = loadmat(stream)
    a = d['arr']
    assert_equal(a.shape, (1, 1))
    assert_equal(a.dtype, np.dtype(object))
    assert_(a[0, 0] is None)
Example #37
0
def test_miutf8_for_miint8_compromise():
    # Check reader accepts ascii as miUTF8 for array names
    filename = pjoin(test_data_path, 'miutf8_array_name.mat')
    res = loadmat(filename)
    assert_equal(res['array_name'], [[1]])
    # mat file with non-ascii utf8 name raises error
    filename = pjoin(test_data_path, 'bad_miutf8_array_name.mat')
    with warnings.catch_warnings(record=True):  # Py3k ResourceWarning
        assert_raises(ValueError, loadmat, filename)
Example #38
0
 def load_markers(matfile, label_file_path, fs):
     mrk_in_samples = matfile['mrk']['pos'][0, 0].squeeze()
     mrk_in_ms = mrk_in_samples * 1000.0 / fs
     mrk_code = matfile['mrk']['y'][0, 0].squeeze()
     labels = loadmat(label_file_path)
     true_y = labels['true_y'].squeeze()
     assert len(true_y) == len(mrk_code)
     mrk_code = true_y
     return mrk_in_ms, mrk_code
Example #39
0
def test_miuint32_compromise():
    # Reader should accept miUINT32 for miINT32, but check signs
    # mat file with miUINT32 for miINT32, but OK values
    filename = pjoin(test_data_path, 'miuint32_for_miint32.mat')
    res = loadmat(filename)
    assert_equal(res['an_array'], np.arange(10)[None, :])
    # mat file with miUINT32 for miINT32, with negative value
    filename = pjoin(test_data_path, 'bad_miuint32.mat')
    assert_raises(ValueError, loadmat, filename)
Example #40
0
def test_warnings():
    fname = join(test_data_path, 'testdouble_7.1_GLNX86.mat')
    warnings.simplefilter('error')
    # This should not generate a warning
    mres = loadmat(fname, struct_as_record=True)
    # This neither
    mres = loadmat(fname, struct_as_record=False)
    # This should
    yield assert_raises, FutureWarning, loadmat, fname
    # This too
    yield assert_raises, FutureWarning, find_mat_file, fname
    # we need kwargs for this one
    yield (lambda a, k: assert_raises(*a, **k), (DeprecationWarning, loadmat,
                                                 fname), {
                                                     'struct_as_record': True,
                                                     'basename': 'raw'
                                                 })
    warnings.resetwarnings()
Example #41
0
def test_multiple_fieldnames():
    # Example provided by Dharhas Pothina
    # Extracted using mio5.varmats_from_mat
    multi_fname = pjoin(TEST_DATA_PATH, 'nasty_duplicate_fieldnames.mat')
    vars = loadmat(multi_fname)
    funny_names = vars['Summary'].dtype.names
    assert_(
        set(['_1_Station_Q', '_2_Station_Q',
             '_3_Station_Q']).issubset(funny_names))
Example #42
0
def test_1d_shape():
    # New 5 behavior is 1D -> row vector
    arr = np.arange(5)
    for format in ('4', '5'):
        # Column is the default
        stream = BytesIO()
        savemat(stream, {'oned': arr}, format=format)
        vals = loadmat(stream)
        assert_equal(vals['oned'].shape, (1, 5))
        # can be explicitly 'column' for oned_as
        stream = BytesIO()
        savemat(stream, {'oned': arr}, format=format, oned_as='column')
        vals = loadmat(stream)
        assert_equal(vals['oned'].shape, (5, 1))
        # but different from 'row'
        stream = BytesIO()
        savemat(stream, {'oned': arr}, format=format, oned_as='row')
        vals = loadmat(stream)
        assert_equal(vals['oned'].shape, (1, 5))
Example #43
0
def test_regression_653():
    # Saving a dictionary with only invalid keys used to raise an error. Now we
    # save this as an empty struct in matlab space.
    sio = BytesIO()
    savemat(sio, {'d': {1: 2}}, format='5')
    back = loadmat(sio)['d']
    # Check we got an empty struct equivalent
    assert_equal(back.shape, (1, 1))
    assert_equal(back.dtype, np.dtype(object))
    assert_(back[0, 0] is None)
Example #44
0
def test_miutf8_for_miint8_compromise():
    # Check reader accepts ascii as miUTF8 for array names
    filename = pjoin(test_data_path, 'miutf8_array_name.mat')
    res = loadmat(filename)
    assert_equal(res['array_name'], [[1]])
    # mat file with non-ascii utf8 name raises error
    filename = pjoin(test_data_path, 'bad_miutf8_array_name.mat')
    with suppress_warnings() as sup:
        sup.filter(message="unclosed file")  # Py3k ResourceWarning
        assert_raises(ValueError, loadmat, filename)
Example #45
0
def test_empty_sparse():
    # Can we read empty sparse matrices?
    sio = BytesIO()
    import scipy.sparse
    empty_sparse = scipy.sparse.csr_matrix([[0, 0], [0, 0]])
    savemat(sio, dict(x=empty_sparse))
    sio.seek(0)
    res = loadmat(sio)
    assert_array_equal(res['x'].shape, empty_sparse.shape)
    assert_array_equal(res['x'].todense(), 0)
Example #46
0
def test_miuint32_compromise():
    # Reader should accept miUINT32 for miINT32, but check signs
    # mat file with miUINT32 for miINT32, but OK values
    filename = pjoin(test_data_path, 'miuint32_for_miint32.mat')
    res = loadmat(filename)
    assert_equal(res['an_array'], np.arange(10)[None, :])
    # mat file with miUINT32 for miINT32, with negative value
    filename = pjoin(test_data_path, 'bad_miuint32.mat')
    with suppress_warnings() as sup:
        sup.filter(message="unclosed file")  # Py3k ResourceWarning
        assert_raises(ValueError, loadmat, filename)
Example #47
0
def test_round_types():
    # Check that saving, loading preserves dtype in most cases
    arr = np.arange(10)
    stream = BytesIO()
    for dts in ('f8','f4','i8','i4','i2','i1',
                'u8','u4','u2','u1','c16','c8'):
        stream.truncate(0)
        stream.seek(0) # needed for BytesIO in python 3
        savemat_future(stream, {'arr': arr.astype(dts)})
        vars = loadmat(stream)
        assert_equal(np.dtype(dts), vars['arr'].dtype)
Example #48
0
    def load(self):
        matfile = loadmat(self.filename)
        cnt, fs = self.load_signal(matfile)

        # load markers
        mrk_in_ms, mrk_codes = self.load_markers(matfile, self.label_file_path,
                                                 fs, len(cnt.data))

        cnt.markers = zip(mrk_in_ms, mrk_codes)
        assert fs == 512
        cnt.fs = fs
        return cnt
Example #49
0
    def test_input_simulation(self):
        """
        This tests that input simulation works.
        """
        self.m_SENS = JMUModel('QuadTankSens.jmu')
        self.SENS = JMIDAESens(self.m_SENS)
        
        path_result = os.path.join(get_files_path(), 'Results', 
                                'qt_par_est_data.mat')
        
        data = loadmat(path_result,appendmat=False)

        # Extract data series  
        t_meas = data['t'][6000::100,0]-60  
        u1 = data['u1_d'][6000::100,0]
        u2 = data['u2_d'][6000::100,0]
                
        # Build input trajectory matrix for use in simulation
        u_data = N.transpose(N.vstack((t_meas,u1,u2)))

        u_traj = TrajectoryLinearInterpolation(u_data[:,0], 
                            u_data[:,1:])

        input_object = (['u1','u2'], u_traj)
        
        qt_mod = JMIDAESens(self.m_SENS, input_object)

        qt_sim = IDA(qt_mod)

        #Store data continuous during the simulation, important when solving a 
        #problem with sensitivites.
        qt_sim.report_continuously = True 
            
        #Value used when IDA estimates the tolerances on the parameters
        qt_sim.pbar = qt_mod.p0 
            
        #Let Sundials find consistent initial conditions by use of 'IDA_YA_YDP_INIT'
        qt_sim.make_consistent('IDA_YA_YDP_INIT')
            
        #Simulate
        qt_sim.simulate(60) #Simulate 4 seconds with 400 communication points

        #write_data(qt_sim)

        res = ResultDymolaTextual('QuadTankSens_result.txt')
    
        dx1da1 = res.get_variable_data('dx1/da1')
        dx1da2 = res.get_variable_data('dx1/da2')
        dx4da1 = res.get_variable_data('dx4/da1')
        
        nose.tools.assert_almost_equal(dx1da2.x[0], 0.000000, 4)
        nose.tools.assert_almost_equal(dx1da2.x[-1], 0.00000, 4)
Example #50
0
def test_save_dict():
    # Test that both dict and OrderedDict can be saved (as recarray),
    # loaded as matstruct, and preserve order
    ab_exp = np.array([[(1, 2)]], dtype=[('a', object), ('b', object)])
    for dict_type in (dict, OrderedDict):
        # Initialize with tuples to keep order
        d = dict_type([('a', 1), ('b', 2)])
        stream = BytesIO()
        savemat(stream, {'dict': d})
        stream.seek(0)
        vals = loadmat(stream)['dict']
        assert_equal(vals.dtype.names, ('a', 'b'))
        assert_array_equal(vals, ab_exp)
Example #51
0
def test_mat4_3d():
    # test behavior when writing 3D arrays to matlab 4 files
    stream = BytesIO()
    arr = np.arange(24).reshape((2, 3, 4))
    warnings.simplefilter('error')
    assert_raises(DeprecationWarning, savemat_future, stream, {'a': arr}, True,
                  '4')
    warnings.resetwarnings()
    # For now, we save a 3D array as 2D
    warnings.simplefilter('ignore')
    savemat_future(stream, {'a': arr}, format='4')
    warnings.resetwarnings()
    d = loadmat(stream)
    assert_array_equal(d['a'], arr.reshape((6, 4)))
Example #52
0
def test_recarray():
    # check roundtrip of structured array
    dt = [('f1', 'f8'), ('f2', 'S10')]
    arr = np.zeros((2, ), dtype=dt)
    arr[0]['f1'] = 0.5
    arr[0]['f2'] = 'python'
    arr[1]['f1'] = 99
    arr[1]['f2'] = 'not perl'
    stream = BytesIO()
    savemat(stream, {'arr': arr})
    d = loadmat(stream, struct_as_record=False)
    a20 = d['arr'][0, 0]
    assert_equal(a20.f1, 0.5)
    assert_equal(a20.f2, 'python')
    d = loadmat(stream, struct_as_record=True)
    a20 = d['arr'][0, 0]
    assert_equal(a20['f1'], 0.5)
    assert_equal(a20['f2'], 'python')
    # structs always come back as object types
    assert_equal(a20.dtype, np.dtype([('f1', 'O'), ('f2', 'O')]))
    a21 = d['arr'].flat[1]
    assert_equal(a21['f1'], 99)
    assert_equal(a21['f2'], 'not perl')
Example #53
0
def test_1d_shape():
    # Current 5 behavior is 1D -> column vector
    arr = np.arange(5)
    stream = StringIO()
    savemat(stream, {'oned': arr}, format='5')
    vals = loadmat(stream)
    yield assert_equal, vals['oned'].shape, (5, 1)
    # Current 4 behavior is 1D -> row vector
    arr = np.arange(5)
    stream = StringIO()
    savemat(stream, {'oned': arr}, format='4')
    vals = loadmat(stream)
    yield assert_equal, vals['oned'].shape, (1, 5)
    for format in ('4', '5'):
        # can be explicitly 'column' for oned_as
        stream = StringIO()
        savemat(stream, {'oned': arr}, format=format, oned_as='column')
        vals = loadmat(stream)
        yield assert_equal, vals['oned'].shape, (5, 1)
        # but different from 'row'
        stream = StringIO()
        savemat(stream, {'oned': arr}, format=format, oned_as='row')
        vals = loadmat(stream)
        yield assert_equal, vals['oned'].shape, (1, 5)
def load_matlab(filename, keys):
    mat = loadmat(filename)
    dat = []
    if type(keys) == str:
        keys = [keys]
    for key in keys:
        if key not in mat:
            print('%s not present in %s, skipping.' % (key, filename))
            continue
        dat.append(mat[key])

    if len(dat) == 1:
        return dat[0]

    return dat
Example #55
0
def run_demo(with_plots=True):
    # Load measurement data from file
    data = loadmat(os.path.join(curr_dir, 'files', 'FurutaData.mat'), appendmat=False)

    # Extract data series
    t_meas = data['time'][:,0]
    phi_meas = data['phi'][:,0]
    theta_meas = data['theta'][:,0]
    data = N.array([t_meas, phi_meas, theta_meas]).transpose()

    #Compile the model
    name = compile_fmu("Furuta", os.path.join(curr_dir, 'files', 'Furuta.mo'))

    model = load_fmu(name)

    res_opt = model.estimate(parameters=["armFriction", "pendulumFriction"],
                         measurements = (['armJoint.phi', 'pendulumJoint.phi'], data))

    # Set optimal parameter values into the model
    model.set('armFriction', res_opt["armFriction"])
    model.set('pendulumFriction', res_opt["pendulumFriction"])

    opts = model.simulate_options()
    opts['filter'] = ['armJoint.phi', 'pendulumJoint.phi']

    # Simulate model response with optimal parameter values
    res = model.simulate(start_time=0., final_time=40)

    # Load optimal simulation result
    phi_opt = res['armJoint.phi']
    theta_opt = res['pendulumJoint.phi']
    t_opt  = res['time']

    assert N.abs(res.final('armJoint.phi') + 0.313)      < 3e-3
    assert N.abs(res.final('pendulumJoint.phi') - 3.130) < 3e-3
    assert N.abs(res.final('time') - 40.0)               < 1e-3

    if with_plots:
        plt.figure(1)
        plt.subplot(2,1,1)
        plt.plot(t_opt, theta_opt, linewidth=1, label='Simulation optimal parameters')
        plt.plot(t_meas, theta_meas, linewidth=1, label='Physical data')
        plt.legend()
        plt.subplot(2,1,2)
        plt.plot(t_opt, phi_opt, linewidth=1, label='Simulation optimal parameters')
        plt.plot(t_meas, phi_meas, linewidth=1, label='Physical data')
        plt.legend()
        plt.show()
Example #56
0
    def load(self):
        matfile = loadmat(self.filename)
        cnt, fs = self.load_signal(matfile)

        # load markers
        if self.label_file_path is None:
            mrk_in_ms, mrk_codes = self.load_markers_from_signal_file(
                matfile, fs)
        else:
            mrk_in_ms, mrk_codes = self.load_markers_from_label_file(
                self.label_file_path, fs)

        cnt.markers = zip(mrk_in_ms, mrk_codes)
        assert fs == 1000
        cnt.fs = fs
        return cnt
Example #57
0
def get_results(**kwargs):
    """
    Gets the labels from precomputed clustering
    """
    configuration = {
        'path': '/media/robbis/DATA/fmri/movie_viviana/',
        'band': 'alpha',
        'filetype': 'masked',
        'fname': 'mat_corr_sub_%s.mat',
        'conditions': ['movie', 'scramble', 'rest'],
        'state_res_fname': "clustering_labels_%s_maxk_%s_%s_%s.pyobj",
        'max_k': 15,
        'method': 'variance+mean',
        'filter': 'none'
    }

    configuration.update(kwargs)

    path = os.path.join(configuration['path'], configuration['band'])
    filetype = configuration['filetype']
    method = configuration['method']
    max_k = configuration['max_k']
    conditions = configuration['conditions']
    filter_type = configuration['filter']

    clustering = dict()
    X = dict()

    for condition in conditions:

        path_cluster = "%s/%s/%s/%s" % (filetype, method, filter_type,
                                        condition)
        path_cluster = os.path.join(path, path_cluster)
        fname = configuration['state_res_fname'] % (condition, str(max_k),
                                                    method, filetype)

        path_file = os.path.join(path_cluster, fname)
        logger.info("Reading results from " + path_file)
        clustering_ = pickle.load(file(path_file, 'r'))
        clustering[condition] = clustering_

        X[condition] = loadmat(os.path.join(
            path_cluster, "filtered_data.mat"))['filtered_data']

    return X, clustering
Example #58
0
def test_empty_sparse():
    # Can we read empty sparse matrices?
    sio = BytesIO()
    import scipy.sparse
    empty_sparse = scipy.sparse.csr_matrix([[0, 0], [0, 0]])
    savemat(sio, dict(x=empty_sparse))
    sio.seek(0)
    res = loadmat(sio)
    assert_array_equal(res['x'].shape, empty_sparse.shape)
    assert_array_equal(res['x'].todense(), 0)
    # Do empty sparse matrices get written with max nnz 1?
    # See https://github.com/scipy/scipy/issues/4208
    sio.seek(0)
    reader = MatFile5Reader(sio)
    reader.initialize_read()
    reader.read_file_header()
    hdr, _ = reader.read_var_header()
    assert_equal(hdr.nzmax, 1)
Example #59
0
def test_varmats_from_mat():
    # Make a mat file with several variables, write it, read it back
    names_vars = (('arr', mlarr(np.arange(10))),
                  ('mystr', mlarr('a string')),
                  ('mynum', mlarr(10)))
    # Dict like thing to give variables in defined order
    class C(object):
        def items(self): return names_vars
    stream = BytesIO()
    savemat_future(stream, C())
    varmats = varmats_from_mat(stream)
    assert_equal(len(varmats), 3)
    for i in range(3):
        name, var_stream = varmats[i]
        exp_name, exp_res = names_vars[i]
        assert_equal(name, exp_name)
        res = loadmat(var_stream)
        assert_array_equal(res[name], exp_res)
Example #60
0
def test_logical_sparse():
    # Test we can read logical sparse stored in mat file as bytes.
    # See https://github.com/scipy/scipy/issues/3539.
    # In some files saved by MATLAB, the sparse data elements (Real Part
    # Subelement in MATLAB speak) are stored with apparent type double
    # (miDOUBLE) but are in fact single bytes.
    filename = pjoin(test_data_path, 'logical_sparse.mat')
    # Before fix, this would crash with:
    # ValueError: indices and data should have the same size
    d = loadmat(filename, struct_as_record=True)
    log_sp = d['sp_log_5_4']
    assert_(isinstance(log_sp, SP.csc_matrix))
    assert_equal(log_sp.dtype.type, np.bool_)
    assert_array_equal(
        log_sp.toarray(),
        [[True, True, True, False], [False, False, True, False],
         [False, False, True, False], [False, False, False, False],
         [False, False, False, False]])