Example #1
0
def test_save_load_object_dtype_ds(obj=None):
    """Test saving of custom object ndarray (GH #84)
    """
    aobjf = np.asanyarray(obj).flatten()

    if not aobjf.size and externals.versions['hdf5'] < '1.8.7':
        raise SkipTest(
            "Versions of hdf5 before 1.8.7 have problems with empty arrays")

    # print obj, obj.shape
    f = tempfile.NamedTemporaryFile()

    # save/reload
    h5save(f.name, obj)
    obj_ = h5load(f.name)

    # and compare
    # neh -- not versatile enough
    #assert_objectarray_equal(np.asanyarray(obj), np.asanyarray(obj_))

    assert_array_equal(obj.shape, obj_.shape)
    assert_equal(type(obj), type(obj_))
    # so we could test both ds and arrays
    aobjf_ = np.asanyarray(obj_).flatten()
    # checks if having just array above
    if aobjf.size:
        assert_equal(type(aobjf[0]), type(aobjf_[0]))
        assert_array_equal(aobjf[0]['d'], aobjf_[0]['d'])
Example #2
0
def prepare_subject_for_hyperalignment(subject_label, bold_fname, mask_fname, out_dir):
    print('Loading data %s with mask %s' % (bold_fname, mask_fname))
    ds = fmri_dataset(samples=bold_fname, mask=mask_fname)
    zscore(ds, chunks_attr=None)
    out_fname = os.path.join(out_dir, 'sub-%s_data.hdf5' % subject_label)
    print('Saving to %s' % out_fname)
    h5save(out_fname, ds)
Example #3
0
def test_h5py_dataset_typecheck():
    ds = datasets["uni2small"]

    fd, fpath = tempfile.mkstemp("mvpa", "test")
    os.close(fd)
    fd, fpath2 = tempfile.mkstemp("mvpa", "test")
    os.close(fd)

    h5save(fpath2, [[1, 2, 3]])
    assert_raises(ValueError, AttrDataset.from_hdf5, fpath2)
    # this one just catches if there is such a group
    assert_raises(ValueError, AttrDataset.from_hdf5, fpath2, name="bogus")

    hdf = h5py.File(fpath, "w")
    ds = AttrDataset([1, 2, 3])
    obj2hdf(hdf, ds, name="non-bogus")
    obj2hdf(hdf, [1, 2, 3], name="bogus")
    hdf.close()

    assert_raises(ValueError, AttrDataset.from_hdf5, fpath, name="bogus")
    ds_loaded = AttrDataset.from_hdf5(fpath, name="non-bogus")
    assert_array_equal(ds, ds_loaded)  # just to do smth useful with ds ;)

    # cleanup and ignore stupidity
    os.remove(fpath)
    os.remove(fpath2)
Example #4
0
def test_save_load_object_dtype_ds(obj=None):
    """Test saving of custom object ndarray (GH #84)
    """
    aobjf = np.asanyarray(obj).flatten()

    if not aobjf.size and externals.versions['hdf5'] < '1.8.7':
        raise SkipTest("Versions of hdf5 before 1.8.7 have problems with empty arrays")

    # print obj, obj.shape
    f = tempfile.NamedTemporaryFile()

    # save/reload
    h5save(f.name, obj)
    obj_ = h5load(f.name)

    # and compare
    # neh -- not versatile enough
    #assert_objectarray_equal(np.asanyarray(obj), np.asanyarray(obj_))

    assert_array_equal(obj.shape, obj_.shape)
    assert_equal(type(obj), type(obj_))
    # so we could test both ds and arrays
    aobjf_ = np.asanyarray(obj_).flatten()
    # checks if having just array above
    if aobjf.size:
        assert_equal(type(aobjf[0]), type(aobjf_[0]))
        assert_array_equal(aobjf[0]['d'], aobjf_[0]['d'])
Example #5
0
def generate_testing_fmri_dataset(filename=None):
    """Helper to generate a dataset for regression testing of mvpa2/nibabel

    Parameters
    ----------
    filename : str
       Filename of a dataset file to store.  If not provided, it is composed
       using :func:`get_testing_fmri_dataset_filename`

    Returns
    -------
    Dataset, string
       Generated dataset, filename to the HDF5 where it was stored
    """
    import mvpa2
    from mvpa2.base.hdf5 import h5save
    from mvpa2.datasets.sources import load_example_fmri_dataset
    # Load our sample dataset
    ds_full = load_example_fmri_dataset(name='1slice', literal=False)
    # Subselect a small "ROI"
    ds = ds_full[20:23, 10:14]
    # collect all versions/dependencies for possible need to troubleshoot later
    ds.a['wtf'] = mvpa2.wtf()
    ds.a['versions'] = mvpa2.externals.versions
    # save to a file identified by version of PyMVPA and nibabel and hash of
    # all other versions
    out_filename = filename or get_testing_fmri_dataset_filename()
    h5save(out_filename, ds, compression=9)
    # ATM it produces >700kB .hdf5 which is this large because of
    # the ds.a.mapper with both Flatten and StaticFeatureSelection occupying
    # more than 190kB each, with ds.a.mapper as a whole generating 570kB file
    # Among those .ca seems to occupy notable size, e.g. 130KB for the FlattenMapper
    # even though no heavy storage is really needed for any available value --
    # primarily all is meta-information embedded into hdf5 to describe our things
    return ds, out_filename
Example #6
0
def test_store_metaclass_types(fname):
    from mvpa2.kernels.base import Kernel
    allowedtype = Kernel
    h5save(fname, allowedtype)
    lkrn = h5load(fname)
    assert_equal(lkrn, Kernel)
    assert_equal(lkrn.__metaclass__, Kernel.__metaclass__)
Example #7
0
def generate_testing_fmri_dataset(filename=None):
    """Helper to generate a dataset for regression testing of mvpa2/nibabel

    Parameters
    ----------
    filename : str
       Filename of a dataset file to store.  If not provided, it is composed
       using :func:`get_testing_fmri_dataset_filename`

    Returns
    -------
    Dataset, string
       Generated dataset, filename to the HDF5 where it was stored
    """
    import mvpa2
    from mvpa2.base.hdf5 import h5save
    from mvpa2.datasets.sources import load_example_fmri_dataset
    # Load our sample dataset
    ds_full = load_example_fmri_dataset(name='1slice', literal=False)
    # Subselect a small "ROI"
    ds = ds_full[20:23, 10:14]
    # collect all versions/dependencies for possible need to troubleshoot later
    ds.a['wtf'] = mvpa2.wtf()
    ds.a['versions'] = mvpa2.externals.versions
    # save to a file identified by version of PyMVPA and nibabel and hash of
    # all other versions
    out_filename = filename or get_testing_fmri_dataset_filename()
    h5save(out_filename, ds, compression=9)
    # ATM it produces >700kB .hdf5 which is this large because of
    # the ds.a.mapper with both Flatten and StaticFeatureSelection occupying
    # more than 190kB each, with ds.a.mapper as a whole generating 570kB file
    # Among those .ca seems to occupy notable size, e.g. 130KB for the FlattenMapper
    # even though no heavy storage is really needed for any available value --
    # primarily all is meta-information embedded into hdf5 to describe our things
    return ds, out_filename
Example #8
0
def test_save_load_python_objs(fname, obj):
    """Test saving objects of various types
    """
    # try:
    #     print type(obj), " ",
    #     print obj # , obj.shape
    # except Exception as e:
    #     print e
    # save/reload
    try:
        h5save(fname, obj)
    except Exception as e:
        raise AssertionError("Failed to h5save %s: %s" % (safe_str(obj), e))
    try:
        obj_ = h5load(fname)
    except Exception as e:
        raise AssertionError("Failed to h5load %s: %s" % (safe_str(obj), e))

    assert_equal(type(obj), type(obj_))

    if isinstance(obj, np.ndarray):
        assert_equal(obj.dtype, obj_.dtype)
        assert_array_equal(obj, obj_)
    else:
        assert_equal(obj, obj_)
Example #9
0
def run(args):
    """Run it"""
    verbose(1, "Loading %d result files" % len(args.data))

    filetype_in = guess_backend(args.data[0])

    if filetype_in == 'nifti':
        dss = [fmri_dataset(f) for f in args.data]
    elif filetype_in == 'hdf5':
        dss = [h5load(f) for f in args.data]
    data = np.asarray([d.samples[args.isample] for d in dss])

    if args.mask:
        filetype_mask = guess_backend(args.mask)
        if filetype_mask == 'nifti':
            mask = nib.load(args.mask).get_data()
        elif filetype_mask == 'hdf5':
            mask = h5load(args.mask).samples
        out_of_mask = mask == 0
    else:
        # just take where no voxel had a value
        out_of_mask = np.sum(data != 0, axis=0) == 0

    t, p = ttest_1samp(data,
                       popmean=args.chance_level,
                       axis=0,
                       alternative=args.alternative)

    if args.stat == 'z':
        if args.alternative == 'two-sided':
            s = stats.norm.isf(p / 2)
        else:
            s = stats.norm.isf(p)
        # take the sign of the original t
        s = np.abs(s) * np.sign(t)
    elif args.stat == 'p':
        s = p
    elif args.stat == 't':
        s = t
    else:
        raise ValueError('WTF you gave me? have no clue about %r' %
                         (args.stat, ))

    if s.shape != out_of_mask.shape:
        try:
            out_of_mask = out_of_mask.reshape(s.shape)
        except ValueError:
            raise ValueError('Cannot use mask of shape {0} with '
                             'data of shape {1}'.format(
                                 out_of_mask.shape, s.shape))
    s[out_of_mask] = 0

    verbose(1, "Saving to %s" % args.output)
    filetype_out = guess_backend(args.output)
    if filetype_out == 'nifti':
        map2nifti(dss[0], data=s).to_filename(args.output)
    else:  # filetype_out is hdf5
        s = Dataset(np.atleast_2d(s), fa=dss[0].fa, a=dss[0].a)
        h5save(args.output, s)
    return s
Example #10
0
def test_store_metaclass_types(fname):
    from mvpa2.kernels.base import Kernel
    allowedtype=Kernel
    h5save(fname, allowedtype)
    lkrn = h5load(fname)
    assert_equal(lkrn, Kernel)
    assert_equal(lkrn.__metaclass__, Kernel.__metaclass__)
Example #11
0
def test_product_flatten():
    nsamples = 17
    product_name_values = [('chan', ['C1', 'C2']),
                           ('freq', np.arange(4, 20, 6)),
                           ('time', np.arange(-200, 800, 200))]

    shape = (nsamples, ) + tuple(len(v) for _, v in product_name_values)

    sample_names = ['samp%d' % i for i in xrange(nsamples)]

    # generate random data in four dimensions
    data = np.random.normal(size=shape)
    ds = Dataset(data, sa=dict(sample_names=sample_names))

    # apply flattening to ds
    flattener = ProductFlattenMapper(product_name_values)

    # test I/O (only if h5py is available)
    if externals.exists('h5py'):
        from mvpa2.base.hdf5 import h5save, h5load
        import tempfile
        import os

        fd, testfn = tempfile.mkstemp('mapper.h5py', 'test_product')
        os.close(fd)
        h5save(testfn, flattener)
        flattener = h5load(testfn)
        os.unlink(testfn)

    mds = flattener(ds)

    prod = lambda x: reduce(operator.mul, x)

    # ensure the size is ok
    assert_equal(mds.shape, (nsamples, ) + (prod(shape[1:]), ))

    ndim = len(product_name_values)

    idxs = [range(len(v)) for _, v in product_name_values]
    for si in xrange(nsamples):
        for fi, p in enumerate(itertools.product(*idxs)):
            data_tup = (si, ) + p

            x = mds[si, fi]

            # value should match
            assert_equal(data[data_tup], x.samples[0, 0])

            # indices should match as well
            all_idxs = tuple(x.fa['chan_freq_time_indices'].value.ravel())
            assert_equal(p, all_idxs)

            # values and indices in each dimension should match
            for i, (name, value) in enumerate(product_name_values):
                assert_equal(x.fa[name].value, value[p[i]])
                assert_equal(x.fa[name + '_indices'].value, p[i])

    product_name_values += [('foo', [1, 2, 3])]
    flattener = ProductFlattenMapper(product_name_values)
    assert_raises(ValueError, flattener, ds)
Example #12
0
def test_h5py_dataset_typecheck():
    ds = datasets['uni2small']

    fd, fpath = tempfile.mkstemp('mvpa', 'test')
    os.close(fd)
    fd, fpath2 = tempfile.mkstemp('mvpa', 'test')
    os.close(fd)

    h5save(fpath2, [[1, 2, 3]])
    assert_raises(ValueError, AttrDataset.from_hdf5, fpath2)
    # this one just catches if there is such a group
    assert_raises(ValueError, AttrDataset.from_hdf5, fpath2, name='bogus')

    hdf = h5py.File(fpath, 'w')
    ds = AttrDataset([1, 2, 3])
    obj2hdf(hdf, ds, name='non-bogus')
    obj2hdf(hdf, [1, 2, 3], name='bogus')
    hdf.close()

    assert_raises(ValueError, AttrDataset.from_hdf5, fpath, name='bogus')
    ds_loaded = AttrDataset.from_hdf5(fpath, name='non-bogus')
    assert_array_equal(ds, ds_loaded)  # just to do smth useful with ds ;)

    # cleanup and ignore stupidity
    os.remove(fpath)
    os.remove(fpath2)
Example #13
0
def test_product_flatten():
    nsamples = 17
    product_name_values = [('chan', ['C1', 'C2']),
                         ('freq', np.arange(4, 20, 6)),
                         ('time', np.arange(-200, 800, 200))]

    shape = (nsamples,) + tuple(len(v) for _, v in product_name_values)

    sample_names = ['samp%d' % i for i in xrange(nsamples)]

    # generate random data in four dimensions
    data = np.random.normal(size=shape)
    ds = Dataset(data, sa=dict(sample_names=sample_names))

    # apply flattening to ds
    flattener = ProductFlattenMapper(product_name_values)

    # test I/O (only if h5py is available)
    if externals.exists('h5py'):
        from mvpa2.base.hdf5 import h5save, h5load
        import tempfile
        import os

        _, testfn = tempfile.mkstemp('mapper.h5py', 'test_product')
        h5save(testfn, flattener)
        flattener = h5load(testfn)
        os.unlink(testfn)

    mds = flattener(ds)

    prod = lambda x:reduce(operator.mul, x)

    # ensure the size is ok
    assert_equal(mds.shape, (nsamples,) + (prod(shape[1:]),))

    ndim = len(product_name_values)

    idxs = [range(len(v)) for _, v in product_name_values]
    for si in xrange(nsamples):
        for fi, p in enumerate(itertools.product(*idxs)):
            data_tup = (si,) + p

            x = mds[si, fi]

            # value should match
            assert_equal(data[data_tup], x.samples[0, 0])

            # indices should match as well
            all_idxs = tuple(x.fa['chan_freq_time_indices'].value.ravel())
            assert_equal(p, all_idxs)

            # values and indices in each dimension should match
            for i, (name, value) in enumerate(product_name_values):
                assert_equal(x.fa[name].value, value[p[i]])
                assert_equal(x.fa[name + '_indices'].value, p[i])

    product_name_values += [('foo', [1, 2, 3])]
    flattener = ProductFlattenMapper(product_name_values)
    assert_raises(ValueError, flattener, ds)
Example #14
0
def test_state_cycle_with_custom_reduce(fname):
    # BoxcarMapper has a custom __reduce__ implementation . The 'space'
    # setting will only survive a svae/load cycle if the state is correctly
    # handle for custom reduce iplementations.
    bm = BoxcarMapper([0], 1, space='boxy')
    h5save(fname, bm)
    bm_rl = h5load(fname)
    assert_equal(bm_rl.get_space(), 'boxy')
Example #15
0
def test_state_cycle_with_custom_reduce(fname):
    # BoxcarMapper has a custom __reduce__ implementation . The 'space'
    # setting will only survive a svae/load cycle if the state is correctly
    # handle for custom reduce iplementations.
    bm = BoxcarMapper([0], 1, space='boxy')
    h5save(fname, bm)
    bm_rl = h5load(fname)
    assert_equal(bm_rl.get_space(), 'boxy')
Example #16
0
def prepare_subject_for_hyperalignment(subject_label, bold_fname, mask_fname,
                                       out_dir):
    print('Loading data %s with mask %s' % (bold_fname, mask_fname))
    ds = fmri_dataset(samples=bold_fname, mask=mask_fname)
    zscore(ds, chunks_attr=None)
    out_fname = os.path.join(out_dir, 'sub-%s_data.hdf5' % subject_label)
    print('Saving to %s' % out_fname)
    h5save(out_fname, ds)
Example #17
0
def test_store_metaclass_types():
    f = tempfile.NamedTemporaryFile()
    from mvpa2.kernels.base import Kernel
    allowedtype=Kernel
    h5save(f.name, allowedtype)
    lkrn = h5load(f.name)
    assert_equal(lkrn, Kernel)
    assert_equal(lkrn.__metaclass__, Kernel.__metaclass__)
Example #18
0
def test_directaccess():
    f = tempfile.NamedTemporaryFile()
    h5save(f.name, "test")
    assert_equal(h5load(f.name), "test")
    f.close()
    f = tempfile.NamedTemporaryFile()
    h5save(f.name, datasets["uni4medium"])
    assert_array_equal(h5load(f.name).samples, datasets["uni4medium"].samples)
Example #19
0
def test_directaccess():
    f = tempfile.NamedTemporaryFile()
    h5save(f.name, 'test')
    assert_equal(h5load(f.name), 'test')
    f.close()
    f = tempfile.NamedTemporaryFile()
    h5save(f.name, datasets['uni4medium'])
    assert_array_equal(h5load(f.name).samples, datasets['uni4medium'].samples)
Example #20
0
def test_store_metaclass_types():
    f = tempfile.NamedTemporaryFile()
    from mvpa2.kernels.base import Kernel
    allowedtype = Kernel
    h5save(f.name, allowedtype)
    lkrn = h5load(f.name)
    assert_equal(lkrn, Kernel)
    assert_equal(lkrn.__metaclass__, Kernel.__metaclass__)
Example #21
0
def _reduce_mapper(node2volume_attributes,
                   attribute_mapper,
                   src_trg_indices,
                   eta_step=1,
                   proc_id=None,
                   results_backend='native',
                   tmp_prefix='tmpvoxsel'):
    '''applies voxel selection to a list of src_trg_indices
    results are added to node2volume_attributes.
    '''

    if not src_trg_indices:
        return None

    if not results_backend in ('native', 'hdf5'):
        raise ValueError('Illegal results backend %r' % results_backend)

    def _pat(index, xs=src_trg_indices, f=max):
        try:
            if not xs:
                y = 1
            else:
                y = f(x[index] for x in xs)
            if y < 1:
                y = 1
            p = '%%%dd' % math.ceil(math.log10(y))
        except:
            p = '%s'
        return p

    progresspat = 'node %s -> %s [%%3d%%%%]' % (_pat(0), _pat(1))

    # start the clock
    tstart = time.time()
    n = len(src_trg_indices)

    for i, (src, trg) in enumerate(src_trg_indices):
        idxs, misc_attrs = attribute_mapper(trg)

        if idxs is not None:
            node2volume_attributes.add(int(src), idxs, misc_attrs)

        if _debug() and eta_step and (i % eta_step == 0 or i == n - 1):
            msg = _eta(tstart,
                       float(i + 1) / n,
                       progresspat % (src, trg, 100. * (i + 1) / n),
                       show=False)
            if not proc_id is None:
                msg += ' (#%s)' % proc_id
            debug('SVS', msg, cr=True)

    if results_backend == 'hdf5':
        tmp_postfix = ('__tmp__%d_%s.h5py' % (hash(time.time()), proc_id))
        tmp_fn = tmp_prefix + tmp_postfix
        h5save(tmp_fn, node2volume_attributes)
        return tmp_fn
    else:
        return node2volume_attributes
def _reduce_mapper(
    node2volume_attributes,
    attribute_mapper,
    src_trg_indices,
    eta_step=1,
    proc_id=None,
    results_backend="native",
    tmp_prefix="tmpvoxsel",
):
    """applies voxel selection to a list of src_trg_indices
    results are added to node2volume_attributes.
    """

    if not src_trg_indices:
        return None

    if not results_backend in ("native", "hdf5"):
        raise ValueError("Illegal results backend %r" % results_backend)

    def _pat(index, xs=src_trg_indices, f=max):
        try:
            if not xs:
                y = 1
            else:
                y = f(x[index] for x in xs)
            if y < 1:
                y = 1
            p = "%%%dd" % math.ceil(math.log10(y))
        except:
            p = "%s"
        return p

    progresspat = "node %s -> %s [%%3d%%%%]" % (_pat(0), _pat(1))

    # start the clock
    tstart = time.time()
    n = len(src_trg_indices)

    for i, (src, trg) in enumerate(src_trg_indices):
        idxs, misc_attrs = attribute_mapper(trg)

        if idxs is not None:
            node2volume_attributes.add(int(src), idxs, misc_attrs)

        if _debug() and eta_step and (i % eta_step == 0 or i == n - 1):
            msg = _eta(tstart, float(i + 1) / n, progresspat % (src, trg, 100.0 * (i + 1) / n), show=False)
            if not proc_id is None:
                msg += " (#%s)" % proc_id
            debug("SVS", msg, cr=True)

    if results_backend == "hdf5":
        tmp_postfix = "__tmp__%d_%s.h5py" % (hash(time.time()), proc_id)
        tmp_fn = tmp_prefix + tmp_postfix
        h5save(tmp_fn, node2volume_attributes)
        return tmp_fn
    else:
        return node2volume_attributes
Example #23
0
def run(args):
    from mvpa2.base.hdf5 import h5save
    ds = None
    if not args.txt_data is None:
        verbose(1, "Load data from TXT file '%s'" % args.txt_data)
        samples = _load_from_txt(args.txt_data)
        ds = Dataset(samples)
    elif not args.npy_data is None:
        verbose(1, "Load data from NPY file '%s'" % args.npy_data)
        samples = _load_from_npy(args.npy_data)
        ds = Dataset(samples)
    elif not args.mri_data is None:
        verbose(1, "Load data from MRI image(s) %s" % args.mri_data)
        from mvpa2.datasets.mri import fmri_dataset
        vol_attr = dict()
        if not args.add_vol_attr is None:
            # XXX add a way to use the mapper of an existing dataset to
            # add a volume attribute without having to load the entire
            # mri data again
            vol_attr = dict(args.add_vol_attr)
            if not len(args.add_vol_attr) == len(vol_attr):
                warning("--vol-attr option with duplicate attribute name: "
                        "check arguments!")
            verbose(2, "Add volumetric feature attributes: %s" % vol_attr)
        ds = fmri_dataset(args.mri_data, mask=args.mask, add_fa=vol_attr)

    if ds is None:
        if args.data is None:
            raise RuntimeError('no data source specific')
        else:
            ds = hdf2ds(args.data)[0]
    else:
        if args.data is not None:
            verbose(
                1,
                'ignoring dataset input in favor of other data source -- remove either one to disambiguate'
            )

    # act on all attribute options
    ds = process_common_dsattr_opts(ds, args)

    if not args.add_fsl_mcpar is None:
        from mvpa2.misc.fsl.base import McFlirtParams
        mc_par = McFlirtParams(args.add_fsl_mcpar)
        for param in mc_par:
            verbose(
                2, "Add motion regressor as sample attribute '%s'" %
                ('mc_' + param))
            ds.sa['mc_' + param] = mc_par[param]

    verbose(3, "Dataset summary %s" % (ds.summary()))
    # and store
    outfilename = args.output
    if not outfilename.endswith('.hdf5'):
        outfilename += '.hdf5'
    verbose(1, "Save dataset to '%s'" % outfilename)
    h5save(outfilename, ds, mkdir=True, compression=args.hdf5_compression)
Example #24
0
def run(args):
    """Run it"""
    verbose(1, "Loading %d result files" % len(args.data))

    filetype_in = guess_backend(args.data[0])

    if filetype_in == 'nifti':
        dss = [fmri_dataset(f) for f in args.data]
    elif filetype_in == 'hdf5':
        dss = [h5load(f) for f in args.data]
    data = np.asarray([d.samples[args.isample] for d in dss])

    if args.mask:
        filetype_mask = guess_backend(args.mask)
        if filetype_mask == 'nifti':
            mask = nib.load(args.mask).get_data()
        elif filetype_mask == 'hdf5':
            mask = h5load(args.mask).samples
        out_of_mask = mask == 0
    else:
        # just take where no voxel had a value
        out_of_mask = np.sum(data != 0, axis=0)==0

    t, p = ttest_1samp(data, popmean=args.chance_level, axis=0,
                       alternative=args.alternative)

    if args.stat == 'z':
        if args.alternative == 'two-sided':
            s = stats.norm.isf(p/2)
        else:
            s = stats.norm.isf(p)
        # take the sign of the original t
        s = np.abs(s) * np.sign(t)
    elif args.stat == 'p':
        s = p
    elif args.stat == 't':
        s = t
    else:
        raise ValueError('WTF you gave me? have no clue about %r' % (args.stat,))

    if s.shape != out_of_mask.shape:
        try:
            out_of_mask = out_of_mask.reshape(s.shape)
        except ValueError:
            raise ValueError('Cannot use mask of shape {0} with '
                             'data of shape {1}'.format(out_of_mask.shape, s.shape))
    s[out_of_mask] = 0

    verbose(1, "Saving to %s" % args.output)
    filetype_out = guess_backend(args.output)
    if filetype_out == 'nifti':
        map2nifti(dss[0], data=s).to_filename(args.output)
    else:  # filetype_out is hdf5
        s = Dataset(np.atleast_2d(s), fa=dss[0].fa, a=dss[0].a)
        h5save(args.output, s)
    return s
Example #25
0
def test_various_special_cases(fname):
    # 0d object ndarray
    a = np.array(0, dtype=object)
    h5save(fname, a)
    a_ = h5load(fname)
    ok_(a == a_)
    # slice
    h5save(fname, slice(2, 5, 3))
    sl = h5load(fname)
    ok_(sl == slice(2, 5, 3))
Example #26
0
def test_recursion(fname):
    obj = range(2)
    obj.append(HDFDemo())
    obj.append(obj)
    h5save(fname, obj)
    lobj = h5load(fname)
    assert_equal(obj[:2], lobj[:2])
    assert_equal(type(obj[2]), type(lobj[2]))
    ok_(obj[3] is obj)
    ok_(lobj[3] is lobj)
Example #27
0
def test_state_setter_getter(fname):
    # make sure the presence of custom __setstate__, __getstate__ methods
    # is honored -- numpy's RNGs have it
    from numpy.random.mtrand import RandomState
    r = RandomState()
    h5save(fname, r)
    rl = h5load(fname)
    rl_state = rl.get_state()
    for i, v in enumerate(r.get_state()):
        assert_array_equal(v, rl_state[i])
Example #28
0
def test_recursion(fname):
    obj = range(2)
    obj.append(HDFDemo())
    obj.append(obj)
    h5save(fname, obj)
    lobj = h5load(fname)
    assert_equal(obj[:2], lobj[:2])
    assert_equal(type(obj[2]), type(lobj[2]))
    ok_(obj[3] is obj)
    ok_(lobj[3] is lobj)
def save_data(data, save_path):
    """

    Returns
    -------

    """
    if os.path.exists(save_path):
        raise Warning("File % already exists.\nOverwriting.".format(save_path))
    h5save(save_path, data)
Example #30
0
def test_state_setter_getter(fname):
    # make sure the presence of custom __setstate__, __getstate__ methods
    # is honored -- numpy's RNGs have it
    from numpy.random.mtrand import RandomState
    r = RandomState()
    h5save(fname, r)
    rl = h5load(fname)
    rl_state = rl.get_state()
    for i, v in enumerate(r.get_state()):
        assert_array_equal(v, rl_state[i])
Example #31
0
def test_various_special_cases(fname):
    # 0d object ndarray
    a = np.array(0, dtype=object)
    h5save(fname, a)
    a_ = h5load(fname)
    ok_(a == a_)
    # slice
    h5save(fname, slice(2,5,3))
    sl = h5load(fname)
    ok_(sl == slice(2,5,3))
Example #32
0
def test_function_ptrs(fname):
    skip_if_no_external('nibabel')
    ds = load_example_fmri_dataset()
    # add a mapper with a function ptr inside
    ds = ds.get_mapped(mean_sample())
    h5save(fname, ds)
    ds_loaded = h5load(fname)
    fresh = load_example_fmri_dataset().O
    # check that the reconstruction function pointer in the FxMapper points
    # to the right one
    assert_array_equal(ds_loaded.a.mapper.forward(fresh), ds.samples)
Example #33
0
def test_various_special_cases():
    # 0d object ndarray
    f = tempfile.NamedTemporaryFile()
    a = np.array(0, dtype=object)
    h5save(f.name, a)
    a_ = h5load(f.name)
    ok_(a == a_)
    # slice
    h5save(f.name, slice(2,5,3))
    sl = h5load(f.name)
    ok_(sl == slice(2,5,3))
Example #34
0
def test_various_special_cases():
    # 0d object ndarray
    f = tempfile.NamedTemporaryFile()
    a = np.array(0, dtype=object)
    h5save(f.name, a)
    a_ = h5load(f.name)
    ok_(a == a_)
    # slice
    h5save(f.name, slice(2, 5, 3))
    sl = h5load(f.name)
    ok_(sl == slice(2, 5, 3))
Example #35
0
def test_recursion():
    obj = range(2)
    obj.append(HDFDemo())
    obj.append(obj)
    f = tempfile.NamedTemporaryFile()
    h5save(f.name, obj)
    lobj = h5load(f.name)
    assert_equal(obj[:2], lobj[:2])
    assert_equal(type(obj[2]), type(lobj[2]))
    ok_(obj[3] is obj)
    ok_(lobj[3] is lobj)
Example #36
0
def test_recursion():
    obj = range(2)
    obj.append(HDFDemo())
    obj.append(obj)
    f = tempfile.NamedTemporaryFile()
    h5save(f.name, obj)
    lobj = h5load(f.name)
    assert_equal(obj[:2], lobj[:2])
    assert_equal(type(obj[2]), type(lobj[2]))
    ok_(obj[3] is obj)
    ok_(lobj[3] is lobj)
Example #37
0
def test_save_load_python_objs(obj):
    """Test saving objects of various types
    """
    # print obj, obj.shape
    f = tempfile.NamedTemporaryFile()

    # save/reload
    try:
        h5save(f.name, obj)
    except Exception, e:
        raise AssertionError("Failed to h5save %s: %s" % (obj, e))
Example #38
0
def test_function_ptrs(fname):
    skip_if_no_external('nibabel')
    ds = load_example_fmri_dataset()
    # add a mapper with a function ptr inside
    ds = ds.get_mapped(mean_sample())
    h5save(fname, ds)
    ds_loaded = h5load(fname)
    fresh = load_example_fmri_dataset().O
    # check that the reconstruction function pointer in the FxMapper points
    # to the right one
    assert_array_equal(ds_loaded.a.mapper.forward(fresh),
                       ds.samples)
Example #39
0
def test_h5py_clfs(fname, lrn):
    # lets simply clone it so we could make its all states on
    lrn = lrn.clone()
    # Lets enable all the states
    lrn.ca.enable('all')

    # Store/reload untrained learner
    try:
        h5save(fname, lrn)
    except Exception, e:
        raise AssertionError, \
              "Failed to store due to %r" % (e,)
def _reduce_mapper(node2volume_attributes, attribute_mapper,
                   src_trg_indices, eta_step=1, proc_id=None,
                   results_backend='native', tmp_prefix='tmpvoxsel'):
    '''applies voxel selection to a list of src_trg_indices
    results are added to node2volume_attributes.
    '''

    if not src_trg_indices:
        return None

    if not results_backend in ('native', 'hdf5'):
        raise ValueError('Illegal results backend %r' % results_backend)


    def _pat(index, xs=src_trg_indices, f=max):
        try:
            if not xs:
                y = 1
            else:
                y = f(x[index] for x in xs)
            if y < 1:
                y = 1
            p = '%%%dd' % math.ceil(math.log10(y))
        except:
            p = '%s'
        return p

    progresspat = '(node %s -> %s)' % (_pat(0), _pat(1))

    # start the clock
    bar = ProgressBar()
    n = len(src_trg_indices)

    for i, (src, trg) in enumerate(src_trg_indices):
        idxs, misc_attrs = attribute_mapper(trg)

        if idxs is not None:
            node2volume_attributes.add(int(src), idxs, misc_attrs)

        if _debug() and eta_step and (i % eta_step == 0 or i == n - 1):
            msg = bar(float(i + 1) / n, progresspat % (src, trg))
            if not proc_id is None:
                msg += ' (#%s)' % proc_id
            debug('SVS', msg, cr=True)

    if results_backend == 'hdf5':
        tmp_postfix = ('__tmp__%d_%s.h5py' %
                                 (hash(time.time()), proc_id))
        tmp_fn = tmp_prefix + tmp_postfix
        h5save(tmp_fn, node2volume_attributes)
        return tmp_fn
    else:
        return node2volume_attributes
Example #41
0
def test_gifti_dataset_h5py(fn, include_nodes):
    if not externals.exists('h5py'):
        raise SkipTest

    from mvpa2.base.hdf5 import h5save, h5load

    ds = _get_test_dataset(include_nodes)

    h5save(fn, ds)
    ds2 = h5load(fn)

    assert_datasets_equal(ds, ds2)
Example #42
0
def test_save_load_python_objs(obj):
    """Test saving objects of various types
    """
    # print obj, obj.shape
    f = tempfile.NamedTemporaryFile()

    # save/reload
    h5save(f.name, obj)
    obj_ = h5load(f.name)

    assert_equal(type(obj), type(obj_))
    assert_equal(obj, obj_)
Example #43
0
def test_save_load_python_objs(obj):
    """Test saving objects of various types
    """
    # print obj, obj.shape
    f = tempfile.NamedTemporaryFile()

    # save/reload
    h5save(f.name, obj)
    obj_ = h5load(f.name)

    assert_equal(type(obj), type(obj_))
    assert_equal(obj, obj_)
Example #44
0
def run(args):
    from mvpa2.base.hdf5 import h5save
    ds = None
    if not args.txt_data is None:
        verbose(1, "Load data from TXT file '%s'" % args.txt_data)
        samples = _load_from_txt(args.txt_data)
        ds = Dataset(samples)
    elif not args.npy_data is None:
        verbose(1, "Load data from NPY file '%s'" % args.npy_data)
        samples = _load_from_npy(args.npy_data)
        ds = Dataset(samples)
    elif not args.mri_data is None:
        verbose(1, "Load data from MRI image(s) %s" % args.mri_data)
        from mvpa2.datasets.mri import fmri_dataset
        vol_attr = dict()
        if not args.add_vol_attr is None:
            # XXX add a way to use the mapper of an existing dataset to
            # add a volume attribute without having to load the entire
            # mri data again
            vol_attr = dict(args.add_vol_attr)
            if not len(args.add_vol_attr) == len(vol_attr):
                warning("--vol-attr option with duplicate attribute name: "
                        "check arguments!")
            verbose(2, "Add volumetric feature attributes: %s" % vol_attr)
        ds = fmri_dataset(args.mri_data, mask=args.mask, add_fa=vol_attr)

    if ds is None:
        if args.data is None:
            raise RuntimeError('no data source specific')
        else:
            ds = hdf2ds(args.data)[0]
    else:
        if args.data is not None:
            verbose(1, 'ignoring dataset input in favor of other data source -- remove either one to disambiguate')

    # act on all attribute options
    ds = process_common_dsattr_opts(ds, args)

    if not args.add_fsl_mcpar is None:
        from mvpa2.misc.fsl.base import McFlirtParams
        mc_par = McFlirtParams(args.add_fsl_mcpar)
        for param in mc_par:
            verbose(2, "Add motion regressor as sample attribute '%s'"
                       % ('mc_' + param))
            ds.sa['mc_' + param] = mc_par[param]

    verbose(3, "Dataset summary %s" % (ds.summary()))
    # and store
    outfilename = args.output
    if not outfilename.endswith('.hdf5'):
        outfilename += '.hdf5'
    verbose(1, "Save dataset to '%s'" % outfilename)
    h5save(outfilename, ds, mkdir=True, compression=args.hdf5_compression)
Example #45
0
def test_gifti_dataset_h5py(fn, include_nodes):
    if not externals.exists('h5py'):
        raise SkipTest

    from mvpa2.base.hdf5 import h5save, h5load

    ds = _get_test_dataset(include_nodes)

    h5save(fn, ds)
    ds2 = h5load(fn)

    assert_datasets_equal(ds, ds2)
Example #46
0
    def test_surface_dset_h5py_io_with_unicode(self, fn):
        skip_if_no_external('h5py')
        from mvpa2.base.hdf5 import h5save, h5load

        ds = dataset_wizard(np.arange(20).reshape((4, 5)), targets=1, chunks=1)
        ds.sa['unicode'] = ['u1', 'uu2', 'uuu3', 'uuuu4']
        ds.sa['str'] = ['s1', 'ss2', 'sss3', 'ssss4']
        ds.fa['node_indices'] = np.arange(5)

        # test h5py I/O
        h5save(fn, ds)
        ds2 = h5load(fn)
        assert_datasets_equal(ds, ds2)
Example #47
0
def test_function_ptrs():
    if not externals.exists('nibabel'):
        raise SkipTest
    ds = load_example_fmri_dataset()
    # add a mapper with a function ptr inside
    ds = ds.get_mapped(mean_sample())
    f = tempfile.NamedTemporaryFile()
    h5save(f.name, ds)
    ds_loaded = h5load(f.name)
    fresh = load_example_fmri_dataset().O
    # check that the reconstruction function pointer in the FxMapper points
    # to the right one
    assert_array_equal(ds_loaded.a.mapper.forward(fresh), ds.samples)
Example #48
0
    def test_surface_dset_h5py_io_with_unicode(self, fn):
        skip_if_no_external('h5py')
        from mvpa2.base.hdf5 import h5save, h5load

        ds = dataset_wizard(np.arange(20).reshape((4, 5)), targets=1, chunks=1)
        ds.sa['unicode'] = [u'u1', u'uu2', u'uuu3', u'uuuu4']
        ds.sa['str'] = ['s1', 'ss2', 'sss3', 'ssss4']
        ds.fa['node_indices'] = np.arange(5)

        # test h5py I/O
        h5save(fn, ds)
        ds2 = h5load(fn)
        assert_datasets_equal(ds, ds2)
    def test_surface_outside_volume_voxel_selection(self, fn):
        skip_if_no_external('h5py')
        from mvpa2.base.hdf5 import h5save, h5load

        vol_shape = (10, 10, 10, 1)
        vol_affine = np.identity(4)
        vg = volgeom.VolGeom(vol_shape, vol_affine)

        # make surfaces that are far away from all voxels
        # in the volume
        sphere_density = 4
        far = 10000.
        outer = surf.generate_sphere(sphere_density) * 10 + far
        inner = surf.generate_sphere(sphere_density) * 5 + far

        vs = volsurf.VolSurfMaximalMapping(vg, inner, outer)
        radii = [10., 10]  # fixed and variable radii

        outside_node_margins = [0, far, True]
        for outside_node_margin in outside_node_margins:
            for radius in radii:
                selector = lambda: surf_voxel_selection.voxel_selection(vs,
                                                                        radius,
                                                                        outside_node_margin=outside_node_margin)

                if type(radius) is int and outside_node_margin is True:
                    assert_raises(ValueError, selector)
                else:
                    sel = selector()
                    if outside_node_margin is True:
                        # it should have all the keys, but they should
                        # all be empty
                        assert_array_equal(sel.keys(), range(inner.nvertices))
                        for k, v in sel.iteritems():
                            assert_equal(v, [])
                    else:
                        assert_array_equal(sel.keys(), [])

                    if outside_node_margin is True and \
                                    externals.versions['hdf5'] < '1.8.7':
                        raise SkipTest("Versions of hdf5 before 1.8.7 have "
                                       "problems with empty arrays")

                    h5save(fn, sel)
                    sel_copy = h5load(fn)

                    assert_array_equal(sel.keys(), sel_copy.keys())
                    for k in sel.keys():
                        assert_equal(sel[k], sel_copy[k])

                    assert_equal(sel, sel_copy)
Example #50
0
    def test_surface_outside_volume_voxel_selection(self, fn):
        skip_if_no_external('h5py')
        from mvpa2.base.hdf5 import h5save, h5load

        vol_shape = (10, 10, 10, 1)
        vol_affine = np.identity(4)
        vg = volgeom.VolGeom(vol_shape, vol_affine)

        # make surfaces that are far away from all voxels
        # in the volume
        sphere_density = 4
        far = 10000.
        outer = surf.generate_sphere(sphere_density) * 10 + far
        inner = surf.generate_sphere(sphere_density) * 5 + far

        vs = volsurf.VolSurfMaximalMapping(vg, inner, outer)
        radii = [10., 10]  # fixed and variable radii

        outside_node_margins = [0, far, True]
        for outside_node_margin in outside_node_margins:
            for radius in radii:
                selector = lambda: surf_voxel_selection.voxel_selection(vs,
                                                                        radius,
                                                                        outside_node_margin=outside_node_margin)

                if type(radius) is int and outside_node_margin is True:
                    assert_raises(ValueError, selector)
                else:
                    sel = selector()
                    if outside_node_margin is True:
                        # it should have all the keys, but they should
                        # all be empty
                        assert_array_equal(sel.keys(), range(inner.nvertices))
                        for k, v in sel.iteritems():
                            assert_equal(v, [])
                    else:
                        assert_array_equal(sel.keys(), [])

                    if outside_node_margin is True and \
                                    externals.versions['hdf5'] < '1.8.7':
                        raise SkipTest("Versions of hdf5 before 1.8.7 have "
                                       "problems with empty arrays")

                    h5save(fn, sel)
                    sel_copy = h5load(fn)

                    assert_array_equal(sel.keys(), sel_copy.keys())
                    for k in sel.keys():
                        assert_equal(sel[k], sel_copy[k])

                    assert_equal(sel, sel_copy)
Example #51
0
def test_function_ptrs():
    if not externals.exists('nibabel'):
        raise SkipTest
    ds = load_example_fmri_dataset()
    # add a mapper with a function ptr inside
    ds = ds.get_mapped(mean_sample())
    f = tempfile.NamedTemporaryFile()
    h5save(f.name, ds)
    ds_loaded = h5load(f.name)
    fresh = load_example_fmri_dataset().O
    # check that the reconstruction function pointer in the FxMapper points
    # to the right one
    assert_array_equal(ds_loaded.a.mapper.forward(fresh),
                        ds.samples)
Example #52
0
def test_h5py_clfs(lrn):
    # lets simply clone it so we could make its all states on
    lrn = lrn.clone()
    # Lets enable all the states
    lrn.ca.enable('all')

    f = tempfile.NamedTemporaryFile()

    # Store/reload untrained learner
    try:
        h5save(f.name, lrn)
    except Exception, e:
        raise AssertionError, \
              "Failed to store due to %r" % (e,)
Example #53
0
    def setUp(self):
        self.tmpdir = mkdtemp()

        data_ = fmri_dataset(datafn)
        datafn_hdf5 = pjoin(self.tmpdir, 'datain.hdf5')
        h5save(datafn_hdf5, data_)

        mask_ = fmri_dataset(maskfn)
        maskfn_hdf5 = pjoin(self.tmpdir, 'maskfn.hdf5')
        h5save(maskfn_hdf5, mask_)

        self.datafn = [datafn, datafn_hdf5]
        self.outfn = [pjoin(self.tmpdir, 'output') + ext
                      for ext in ['.nii.gz', '.nii', '.hdf5', '.h5']]
        self.maskfn = ['', maskfn, maskfn_hdf5]
Example #54
0
def saveload(obj, f, backend='hdf5'):
    """Helper to save/load using some of tested backends
    """
    if backend == 'hdf5':
        h5save(f, obj)
        #import os; os.system('h5dump %s' % f)
        obj_ = h5load(f)
    else:
        #check pickle -- does it correctly
        import cPickle
        with open(f, 'w') as f_:
            cPickle.dump(obj, f_)
        with open(f) as f_:
            obj_ = cPickle.load(f_)
    return obj_
Example #55
0
def saveload(obj, f, backend='hdf5'):
    """Helper to save/load using some of tested backends
    """
    if backend == 'hdf5':
        h5save(f, obj)
        #import os; os.system('h5dump %s' % f)
        obj_ = h5load(f)
    else:
        #check pickle -- does it correctly
        import cPickle
        with open(f, 'w') as f_:
            cPickle.dump(obj, f_)
        with open(f) as f_:
            obj_ = cPickle.load(f_)
    return obj_
    def test_surface_minimal_lowres_voxel_selection(self, fn):
        vol_shape = (4, 10, 10, 1)
        vol_affine = np.identity(4)
        vg = volgeom.VolGeom(vol_shape, vol_affine)

        # make surfaces that are far away from all voxels
        # in the volume
        sphere_density = 10
        radius = 10

        outer = surf.generate_plane((0, 0, 4), (0, .4, 0), (0, 0, .4), 14, 14)
        inner = outer + 2

        source = surf.generate_plane((0, 0, 4), (0, .8, 0),
                                     (0, 0, .8), 7, 7) + 1

        for i, nvm in enumerate(('minimal', 'minimal_lowres')):
            qe = disc_surface_queryengine(radius,
                                          vg,
                                          inner,
                                          outer,
                                          source,
                                          node_voxel_mapping=nvm)

            voxsel = qe.voxsel
            if i == 0:
                voxsel0 = voxsel
            else:
                assert_equal(voxsel.keys(), voxsel0.keys())
                for k in voxsel.keys():
                    p = voxsel[k]
                    q = voxsel0[k]

                    # require at least 60% agreement
                    delta = set.symmetric_difference(set(p), set(q))
                    assert_true(len(delta) < .8 * (len(p) + len(q)))

            if externals.exists('h5py'):
                from mvpa2.base.hdf5 import h5save, h5load

                h5save(fn, voxsel)
                voxsel_copy = h5load(fn)
                assert_equal(voxsel.keys(), voxsel_copy.keys())

                for id in qe.ids:
                    assert_array_equal(voxsel.get(id), voxsel_copy.get(id))
    def test_surface_minimal_lowres_voxel_selection(self, fn):
        vol_shape = (4, 10, 10, 1)
        vol_affine = np.identity(4)
        vg = volgeom.VolGeom(vol_shape, vol_affine)


        # make surfaces that are far away from all voxels
        # in the volume
        sphere_density = 10
        radius = 10

        outer = surf.generate_plane((0, 0, 4), (0, .4, 0),
                                    (0, 0, .4), 14, 14)
        inner = outer + 2

        source = surf.generate_plane((0, 0, 4), (0, .8, 0),
                                     (0, 0, .8), 7, 7) + 1

        for i, nvm in enumerate(('minimal', 'minimal_lowres')):
            qe = disc_surface_queryengine(radius, vg, inner,
                                          outer, source,
                                          node_voxel_mapping=nvm)

            voxsel = qe.voxsel
            if i == 0:
                voxsel0 = voxsel
            else:
                assert_equal(voxsel.keys(), voxsel0.keys())
                for k in voxsel.keys():
                    p = voxsel[k]
                    q = voxsel0[k]

                    # require at least 60% agreement
                    delta = set.symmetric_difference(set(p), set(q))
                    assert_true(len(delta) < .8 * (len(p) + len(q)))

            if externals.exists('h5py'):
                from mvpa2.base.hdf5 import h5save, h5load

                h5save(fn, voxsel)
                voxsel_copy = h5load(fn)
                assert_equal(voxsel.keys(), voxsel_copy.keys())

                for id in qe.ids:
                    assert_array_equal(voxsel.get(id),
                                       voxsel_copy.get(id))
    def setUp(self):
        self.tmpdir = mkdtemp()

        data_ = fmri_dataset(datafn)
        datafn_hdf5 = pjoin(self.tmpdir, 'datain.hdf5')
        h5save(datafn_hdf5, data_)

        mask_ = fmri_dataset(maskfn)
        maskfn_hdf5 = pjoin(self.tmpdir, 'maskfn.hdf5')
        h5save(maskfn_hdf5, mask_)

        self.datafn = [datafn, datafn_hdf5]
        self.outfn = [
            pjoin(self.tmpdir, 'output') + ext
            for ext in ['.nii.gz', '.nii', '.hdf5', '.h5']
        ]
        self.maskfn = ['', maskfn, maskfn_hdf5]
    def test_surface_voxel_query_engine(self):
        vol_shape = (10, 10, 10, 1)
        vol_affine = np.identity(4)
        vol_affine[0, 0] = vol_affine[1, 1] = vol_affine[2, 2] = 5
        vg = volgeom.VolGeom(vol_shape, vol_affine)

        # make the surfaces
        sphere_density = 10

        outer = surf.generate_sphere(sphere_density) * 25. + 15
        inner = surf.generate_sphere(sphere_density) * 20. + 15

        vs = volsurf.VolSurfMaximalMapping(vg, inner, outer)

        radius = 10

        for fallback, expected_nfeatures in ((True, 1000), (False, 183)):
            voxsel = surf_voxel_selection.voxel_selection(vs, radius)
            qe = SurfaceVoxelsQueryEngine(voxsel,
                                          fallback_euclidean_distance=fallback)

            # test i/o and ensure that the loaded instance is trained
            if externals.exists('h5py'):
                fd, qefn = tempfile.mkstemp('qe.hdf5', 'test')
                os.close(fd)
                h5save(qefn, qe)
                qe = h5load(qefn)
                os.remove(qefn)

            m = _Voxel_Count_Measure()

            sl = Searchlight(m, queryengine=qe)

            data = np.random.normal(size=vol_shape)
            img = nb.Nifti1Image(data, vol_affine)
            ds = fmri_dataset(img)

            sl_map = sl(ds)

            counts = sl_map.samples

            assert_true(np.all(np.logical_and(5 <= counts, counts <= 18)))
            assert_equal(sl_map.nfeatures, expected_nfeatures)
Example #60
0
def test_h5save_mkdir(dirname):
    # create deeper directory name
    filename = os.path.join(dirname, 'a', 'b', 'c', 'test_file.hdf5')
    assert_raises(IOError, h5save, filename, {}, mkdir=False)

    # And create by default
    h5save(filename, {})
    ok_(os.path.exists(filename))
    d = h5load(filename)
    assert_equal(d, {})

    # And that we can still just save into a file in current directory
    # Let's be safe and assure going back to the original directory
    cwd = os.getcwd()
    try:
        os.chdir(dirname)
        h5save("TEST.hdf5", [1, 2, 3])
    finally:
        os.chdir(cwd)