예제 #1
0
파일: test_hdf5.py 프로젝트: arokem/PyMVPA
def test_directaccess():
    f = tempfile.NamedTemporaryFile()
    h5save(f.name, "test")
    assert_equal(h5load(f.name), "test")
    f.close()
    f = tempfile.NamedTemporaryFile()
    h5save(f.name, datasets["uni4medium"])
    assert_array_equal(h5load(f.name).samples, datasets["uni4medium"].samples)
예제 #2
0
def test_directaccess():
    f = tempfile.NamedTemporaryFile()
    h5save(f.name, 'test')
    assert_equal(h5load(f.name), 'test')
    f.close()
    f = tempfile.NamedTemporaryFile()
    h5save(f.name, datasets['uni4medium'])
    assert_array_equal(h5load(f.name).samples, datasets['uni4medium'].samples)
예제 #3
0
def test_recursion():
    obj = range(2)
    obj.append(HDFDemo())
    obj.append(obj)
    f = tempfile.NamedTemporaryFile()
    h5save(f.name, obj)
    lobj = h5load(f.name)
    assert_equal(obj[:2], lobj[:2])
    assert_equal(type(obj[2]), type(lobj[2]))
    ok_(obj[3] is obj)
    ok_(lobj[3] is lobj)
예제 #4
0
파일: test_hdf5.py 프로젝트: B-Rich/PyMVPA
def test_recursion():
    obj = range(2)
    obj.append(HDFDemo())
    obj.append(obj)
    f = tempfile.NamedTemporaryFile()
    h5save(f.name, obj)
    lobj = h5load(f.name)
    assert_equal(obj[:2], lobj[:2])
    assert_equal(type(obj[2]), type(lobj[2]))
    ok_(obj[3] is obj)
    ok_(lobj[3] is lobj)
예제 #5
0
파일: test_hdf5.py 프로젝트: geeragh/PyMVPA
def test_function_ptrs():
    ds = load_example_fmri_dataset()
    # add a mapper with a function ptr inside
    ds = ds.get_mapped(mean_sample())
    f = tempfile.NamedTemporaryFile()
    h5save(f.name, ds)
    ds_loaded = h5load(f.name)
    fresh = load_example_fmri_dataset().O
    # check that the reconstruction function pointer in the FxMapper points
    # to the right one
    assert_array_equal(ds_loaded.a.mapper.forward(fresh),
                        ds.samples)
예제 #6
0
def test_function_ptrs():
    if not externals.exists('nifti') and not externals.exists('nibabel'):
        raise SkipTest
    ds = load_example_fmri_dataset()
    # add a mapper with a function ptr inside
    ds = ds.get_mapped(mean_sample())
    f = tempfile.NamedTemporaryFile()
    h5save(f.name, ds)
    ds_loaded = h5load(f.name)
    fresh = load_example_fmri_dataset().O
    # check that the reconstruction function pointer in the FxMapper points
    # to the right one
    assert_array_equal(ds_loaded.a.mapper.forward(fresh), ds.samples)
예제 #7
0
def test_h5py_clfs(lrn):
    # lets simply clone it so we could make its all states on
    lrn = lrn.clone()
    # Lets enable all the states
    lrn.ca.enable('all')

    f = tempfile.NamedTemporaryFile()

    # Store/reload untrained learner
    try:
        h5save(f.name, lrn)
    except Exception, e:
        raise AssertionError, \
              "Failed to store due to %r" % (e,)
예제 #8
0
파일: test_hdf5_clf.py 프로젝트: esc/PyMVPA
def test_h5py_clfs(lrn):
    # lets simply clone it so we could make its all states on
    lrn = lrn.clone()
    # Lets enable all the states
    lrn.ca.enable('all')

    f = tempfile.NamedTemporaryFile()

    # Store/reload untrained learner
    try:
        h5save(f.name, lrn)
    except Exception, e:
        raise AssertionError, \
              "Failed to store due to %r" % (e,)
예제 #9
0
파일: test_hdf5.py 프로젝트: geeragh/PyMVPA
def _test_h5py_clfs():
    # YOH: For now just to see which ones work (could be stored/loaded)
    #      Later on to become a proper valid test
    from mvpa.clfs.warehouse import clfswh, regrswh

    for lrn in clfswh[:] + regrswh[:]:
        print lrn
        f = tempfile.NamedTemporaryFile()
        try:
            h5save(f.name, lrn)
            lrn_ = h5load(f.name)
            print "ok: %s" % lrn_
        except Exception, e:
            #raise AssertionError,
            print "Failed to store %s due to %r" % (lrn, e)
예제 #10
0
def test_h5py_dataset_typecheck():
    ds = datasets['uni2small']

    _, fpath = tempfile.mkstemp('mvpa', 'test')

    h5save(fpath, [[1, 2, 3]])
    assert_raises(ValueError, AttrDataset.from_hdf5, fpath)
    # this one just catches if there is such a group
    assert_raises(ValueError, AttrDataset.from_hdf5, fpath, name='bogus')

    hdf = h5py.File(fpath, 'w')
    ds = AttrDataset([1, 2, 3])
    obj2hdf(hdf, ds, name='non-bogus')
    obj2hdf(hdf, [1, 2, 3], name='bogus')
    hdf.close()

    assert_raises(ValueError, AttrDataset.from_hdf5, fpath, name='bogus')
    ds_loaded = AttrDataset.from_hdf5(fpath, name='non-bogus')
    assert_array_equal(ds, ds_loaded)  # just to do smth useful with ds ;)

    # cleanup and ignore stupidity
    os.remove(fpath)
예제 #11
0
파일: test_hdf5.py 프로젝트: geeragh/PyMVPA
def test_h5py_dataset_typecheck():
    ds = datasets['uni2small']

    _, fpath = tempfile.mkstemp('mvpa', 'test')

    h5save(fpath, [[1, 2, 3]])
    assert_raises(ValueError, AttrDataset.from_hdf5, fpath)
    # this one just catches if there is such a group
    assert_raises(ValueError, AttrDataset.from_hdf5, fpath, name='bogus')

    hdf = h5py.File(fpath, 'w')
    ds = AttrDataset([1, 2, 3])
    obj2hdf(hdf, ds, name='non-bogus')
    obj2hdf(hdf, [1, 2, 3], name='bogus')
    hdf.close()

    assert_raises(ValueError, AttrDataset.from_hdf5, fpath, name='bogus')
    ds_loaded = AttrDataset.from_hdf5(fpath, name='non-bogus')
    assert_array_equal(ds, ds_loaded)   # just to do smth useful with ds ;)

    # cleanup and ignore stupidity
    os.remove(fpath)
예제 #12
0
    te = TransferError(lrn, errorfx)
    te_ = TransferError(lrn_, errorfx)

    error = te(ds_test, ds_train)
    error_ = te_(ds_test, ds_train)

    ok_(error == error_)

    if len(set(['swig', 'rpy2']).intersection(lrn.__tags__)):
        raise SkipTest("Trained swigged and R-interfaced classifiers can't "
                       "be stored/reloaded yet")

    # now lets store/reload the trained one
    try:
        h5save(f.name, lrn_)
    except Exception, e:
        raise AssertionError, \
              "Failed to store trained lrn due to %r" % (e,)

    # This lrn__ is doubly stored/loaded ;-)
    try:
        lrn__ = h5load(f.name)
    except Exception, e:
        raise AssertionError, \
              "Failed to load trained lrn due to %r" % (e,)

    # Verify that we have the same ca enabled
    # TODO
    #ok_(set(lrn.ca.enabled) == set(lrn__.ca.enabled))
    # and having the same values?
예제 #13
0
파일: test_hdf5_clf.py 프로젝트: esc/PyMVPA
    postproc=BinaryFxNode(errorfx, 'targets')
    te = TransferMeasure(lrn, splitter, postproc=postproc)
    te_ = TransferMeasure(lrn_, splitter, postproc=postproc)

    error = te(ds)
    error_ = te_(ds)

    assert_array_equal(error, error_)

    if len(set(['swig', 'rpy2']).intersection(lrn.__tags__)):
        raise SkipTest("Trained swigged and R-interfaced classifiers can't "
                       "be stored/reloaded yet")

    # now lets store/reload the trained one
    try:
        h5save(f.name, lrn_)
    except Exception, e:
        raise AssertionError, \
              "Failed to store trained lrn due to %r" % (e,)

    # This lrn__ is doubly stored/loaded ;-)
    try:
        lrn__ = h5load(f.name)
    except Exception, e:
        raise AssertionError, \
              "Failed to load trained lrn due to %r" % (e,)

    # Verify that we have the same ca enabled
    # TODO
    #ok_(set(lrn.ca.enabled) == set(lrn__.ca.enabled))
    # and having the same values?
예제 #14
0
def test_0d_object_ndarray():
    f = tempfile.NamedTemporaryFile()
    a = np.array(0, dtype=object)
    h5save(f.name, a)
    a_ = h5load(f.name)
    ok_(a == a_)
예제 #15
0
파일: test_hdf5.py 프로젝트: B-Rich/PyMVPA
def test_0d_object_ndarray():
    f = tempfile.NamedTemporaryFile()
    a = np.array(0, dtype=object)
    h5save(f.name, a)
    a_ = h5load(f.name)
    ok_(a == a_)