def run(args):
    """Run it"""
    verbose(1, "Loading %d result files" % len(args.data))

    filetype_in = guess_backend(args.data[0])

    if filetype_in == 'nifti':
        dss = [fmri_dataset(f) for f in args.data]
    elif filetype_in == 'hdf5':
        dss = [h5load(f) for f in args.data]
    data = np.asarray([d.samples[args.isample] for d in dss])

    if args.mask:
        filetype_mask = guess_backend(args.mask)
        if filetype_mask == 'nifti':
            mask = nib.load(args.mask).get_data()
        elif filetype_mask == 'hdf5':
            mask = h5load(args.mask).samples
        out_of_mask = mask == 0
    else:
        # just take where no voxel had a value
        out_of_mask = np.sum(data != 0, axis=0) == 0

    t, p = ttest_1samp(data,
                       popmean=args.chance_level,
                       axis=0,
                       alternative=args.alternative)

    if args.stat == 'z':
        if args.alternative == 'two-sided':
            s = stats.norm.isf(p / 2)
        else:
            s = stats.norm.isf(p)
        # take the sign of the original t
        s = np.abs(s) * np.sign(t)
    elif args.stat == 'p':
        s = p
    elif args.stat == 't':
        s = t
    else:
        raise ValueError('WTF you gave me? have no clue about %r' %
                         (args.stat, ))

    if s.shape != out_of_mask.shape:
        try:
            out_of_mask = out_of_mask.reshape(s.shape)
        except ValueError:
            raise ValueError('Cannot use mask of shape {0} with '
                             'data of shape {1}'.format(
                                 out_of_mask.shape, s.shape))
    s[out_of_mask] = 0

    verbose(1, "Saving to %s" % args.output)
    filetype_out = guess_backend(args.output)
    if filetype_out == 'nifti':
        map2nifti(dss[0], data=s).to_filename(args.output)
    else:  # filetype_out is hdf5
        s = Dataset(np.atleast_2d(s), fa=dss[0].fa, a=dss[0].a)
        h5save(args.output, s)
    return s
Beispiel #2
0
def test_directaccess():
    f = tempfile.NamedTemporaryFile()
    h5save(f.name, 'test')
    assert_equal(h5load(f.name), 'test')
    f.close()
    f = tempfile.NamedTemporaryFile()
    h5save(f.name, datasets['uni4medium'])
    assert_array_equal(h5load(f.name).samples, datasets['uni4medium'].samples)
Beispiel #3
0
def test_directaccess():
    f = tempfile.NamedTemporaryFile()
    h5save(f.name, "test")
    assert_equal(h5load(f.name), "test")
    f.close()
    f = tempfile.NamedTemporaryFile()
    h5save(f.name, datasets["uni4medium"])
    assert_array_equal(h5load(f.name).samples, datasets["uni4medium"].samples)
Beispiel #4
0
def run(args):
    """Run it"""
    verbose(1, "Loading %d result files" % len(args.data))

    filetype_in = guess_backend(args.data[0])

    if filetype_in == 'nifti':
        dss = [fmri_dataset(f) for f in args.data]
    elif filetype_in == 'hdf5':
        dss = [h5load(f) for f in args.data]
    data = np.asarray([d.samples[args.isample] for d in dss])

    if args.mask:
        filetype_mask = guess_backend(args.mask)
        if filetype_mask == 'nifti':
            mask = nib.load(args.mask).get_data()
        elif filetype_mask == 'hdf5':
            mask = h5load(args.mask).samples
        out_of_mask = mask == 0
    else:
        # just take where no voxel had a value
        out_of_mask = np.sum(data != 0, axis=0)==0

    t, p = ttest_1samp(data, popmean=args.chance_level, axis=0,
                       alternative=args.alternative)

    if args.stat == 'z':
        if args.alternative == 'two-sided':
            s = stats.norm.isf(p/2)
        else:
            s = stats.norm.isf(p)
        # take the sign of the original t
        s = np.abs(s) * np.sign(t)
    elif args.stat == 'p':
        s = p
    elif args.stat == 't':
        s = t
    else:
        raise ValueError('WTF you gave me? have no clue about %r' % (args.stat,))

    if s.shape != out_of_mask.shape:
        try:
            out_of_mask = out_of_mask.reshape(s.shape)
        except ValueError:
            raise ValueError('Cannot use mask of shape {0} with '
                             'data of shape {1}'.format(out_of_mask.shape, s.shape))
    s[out_of_mask] = 0

    verbose(1, "Saving to %s" % args.output)
    filetype_out = guess_backend(args.output)
    if filetype_out == 'nifti':
        map2nifti(dss[0], data=s).to_filename(args.output)
    else:  # filetype_out is hdf5
        s = Dataset(np.atleast_2d(s), fa=dss[0].fa, a=dss[0].a)
        h5save(args.output, s)
    return s
Beispiel #5
0
def test_various_special_cases(fname):
    # 0d object ndarray
    a = np.array(0, dtype=object)
    h5save(fname, a)
    a_ = h5load(fname)
    ok_(a == a_)
    # slice
    h5save(fname, slice(2, 5, 3))
    sl = h5load(fname)
    ok_(sl == slice(2, 5, 3))
Beispiel #6
0
def test_various_special_cases(fname):
    # 0d object ndarray
    a = np.array(0, dtype=object)
    h5save(fname, a)
    a_ = h5load(fname)
    ok_(a == a_)
    # slice
    h5save(fname, slice(2,5,3))
    sl = h5load(fname)
    ok_(sl == slice(2,5,3))
Beispiel #7
0
def test_various_special_cases():
    # 0d object ndarray
    f = tempfile.NamedTemporaryFile()
    a = np.array(0, dtype=object)
    h5save(f.name, a)
    a_ = h5load(f.name)
    ok_(a == a_)
    # slice
    h5save(f.name, slice(2,5,3))
    sl = h5load(f.name)
    ok_(sl == slice(2,5,3))
Beispiel #8
0
def test_various_special_cases():
    # 0d object ndarray
    f = tempfile.NamedTemporaryFile()
    a = np.array(0, dtype=object)
    h5save(f.name, a)
    a_ = h5load(f.name)
    ok_(a == a_)
    # slice
    h5save(f.name, slice(2, 5, 3))
    sl = h5load(f.name)
    ok_(sl == slice(2, 5, 3))
Beispiel #9
0
def test_save_load_python_objs(fname, obj):
    """Test saving objects of various types
    """
    # try:
    #     print type(obj), " ",
    #     print obj # , obj.shape
    # except Exception as e:
    #     print e
    # save/reload
    try:
        h5save(fname, obj)
    except Exception as e:
        raise AssertionError("Failed to h5save %s: %s" % (safe_str(obj), e))
    try:
        obj_ = h5load(fname)
    except Exception as e:
        raise AssertionError("Failed to h5load %s: %s" % (safe_str(obj), e))

    assert_equal(type(obj), type(obj_))

    if isinstance(obj, np.ndarray):
        assert_equal(obj.dtype, obj_.dtype)
        assert_array_equal(obj, obj_)
    else:
        assert_equal(obj, obj_)
Beispiel #10
0
def load_data():
    data = h5load(os.path.join(pymvpa_datadbroot, 'mnist', "mnist.hdf5"))
    traindata = data['train'].samples
    trainlabels = data['train'].sa.labels
    testdata = data['test'].samples
    testlabels = data['test'].sa.labels
    return traindata, trainlabels, testdata, testlabels
Beispiel #11
0
def hdf2ds(fnames):
    """Load dataset(s) from an HDF5 file

    Parameters
    ----------
    fname : list(str)
      Names of the input HDF5 files

    Returns
    -------
    list(Dataset)
      All datasets-like elements in all given HDF5 files (in order of
      appearance). If any given HDF5 file contains non-Dataset elements
      they are silently ignored. If no given HDF5 file contains any
      dataset, an empty list is returned.
    """
    from mvpa2.base.hdf5 import h5load
    dss = []
    for fname in fnames:
        content = h5load(fname)
        if is_datasetlike(content):
            dss.append(content)
        else:
            for c in content:
                if is_datasetlike(c):
                    dss.append(c)
    return dss
Beispiel #12
0
def test_save_load_object_dtype_ds(obj=None):
    """Test saving of custom object ndarray (GH #84)
    """
    aobjf = np.asanyarray(obj).flatten()

    if not aobjf.size and externals.versions['hdf5'] < '1.8.7':
        raise SkipTest("Versions of hdf5 before 1.8.7 have problems with empty arrays")

    # print obj, obj.shape
    f = tempfile.NamedTemporaryFile()

    # save/reload
    h5save(f.name, obj)
    obj_ = h5load(f.name)

    # and compare
    # neh -- not versatile enough
    #assert_objectarray_equal(np.asanyarray(obj), np.asanyarray(obj_))

    assert_array_equal(obj.shape, obj_.shape)
    assert_equal(type(obj), type(obj_))
    # so we could test both ds and arrays
    aobjf_ = np.asanyarray(obj_).flatten()
    # checks if having just array above
    if aobjf.size:
        assert_equal(type(aobjf[0]), type(aobjf_[0]))
        assert_array_equal(aobjf[0]['d'], aobjf_[0]['d'])
Beispiel #13
0
    def fetch(self,
              n_subjects=10,
              resume=True,
              force=False,
              check=True,
              verbose=1):
        """data_types is a list, can contain: anat, diff, func, rest, psyc, bgnd
        """
        if n_subjects > self.MAX_SUBJECTS:
            raise ValueError('Max # subjects == %d' % self.MAX_SUBJECTS)

        processed_files = [
            'S%02d_func_mni.nii.gz' % subj_id
            for subj_id in range(1, 1 + n_subjects)
        ]
        processed_files.append('stims.csv')
        processed_files = [
            os.path.join(self.data_dir, f) for f in processed_files
        ]

        raw_files = (
            'http://data.pymvpa.org/datasets/hyperalignment_tutorial_data/hyperalignment_tutorial_data_2.4.hdf5.gz',
        )
        raw_files = self.fetcher.fetch(raw_files,
                                       resume=resume,
                                       force=force,
                                       check=check,
                                       verbose=verbose)

        if force or np.any([not os.path.exists(f) for f in processed_files]):
            # Import local version of pymvpa
            cur_dir = os.path.dirname(os.path.abspath(__file__))
            mvpa2_path = os.path.abspath(
                os.path.join(cur_dir, '..', '..', 'core', '_external',
                             'pymvpa'))
            sys.path = [mvpa2_path] + sys.path
            from mvpa2.base.hdf5 import h5load

            # Load the file and manipulate into expected form.
            ds_all = h5load(raw_files[0])
            for si, func_filename in enumerate(processed_files[:-1]):
                if not os.path.exists(os.path.dirname(func_filename)):
                    os.makedirs(os.path.dirname(func_filename))

                # Construct and save the image
                func_data = np.transpose(ds_all[si].O, [1, 2, 3, 0])
                func_affine = ds_all[si].a['imgaffine'].value
                func_hdr = ds_all[si].a['imghdr'].value
                img = nib.Nifti1Image(func_data,
                                      affine=func_affine)  #, header=func_hdr)
                nib.save(img, func_filename)

            # Construct and save the stimuli
            value_arr = np.asarray([ds_all[0].T, ds_all[0].sa['chunks']])
            csv_cols = np.vstack([['stim', 'chunk'], value_arr.T])
            np.savetxt(processed_files[-1], csv_cols, delimiter=',', fmt='%s')

        return Bunch(raw_data=raw_files[0],
                     func=processed_files[:-1],
                     stim=processed_files[-1])
Beispiel #14
0
def test_store_metaclass_types(fname):
    from mvpa2.kernels.base import Kernel
    allowedtype=Kernel
    h5save(fname, allowedtype)
    lkrn = h5load(fname)
    assert_equal(lkrn, Kernel)
    assert_equal(lkrn.__metaclass__, Kernel.__metaclass__)
Beispiel #15
0
def test_store_metaclass_types(fname):
    from mvpa2.kernels.base import Kernel
    allowedtype = Kernel
    h5save(fname, allowedtype)
    lkrn = h5load(fname)
    assert_equal(lkrn, Kernel)
    assert_equal(lkrn.__metaclass__, Kernel.__metaclass__)
Beispiel #16
0
def test_product_flatten():
    nsamples = 17
    product_name_values = [('chan', ['C1', 'C2']),
                           ('freq', np.arange(4, 20, 6)),
                           ('time', np.arange(-200, 800, 200))]

    shape = (nsamples, ) + tuple(len(v) for _, v in product_name_values)

    sample_names = ['samp%d' % i for i in xrange(nsamples)]

    # generate random data in four dimensions
    data = np.random.normal(size=shape)
    ds = Dataset(data, sa=dict(sample_names=sample_names))

    # apply flattening to ds
    flattener = ProductFlattenMapper(product_name_values)

    # test I/O (only if h5py is available)
    if externals.exists('h5py'):
        from mvpa2.base.hdf5 import h5save, h5load
        import tempfile
        import os

        fd, testfn = tempfile.mkstemp('mapper.h5py', 'test_product')
        os.close(fd)
        h5save(testfn, flattener)
        flattener = h5load(testfn)
        os.unlink(testfn)

    mds = flattener(ds)

    prod = lambda x: reduce(operator.mul, x)

    # ensure the size is ok
    assert_equal(mds.shape, (nsamples, ) + (prod(shape[1:]), ))

    ndim = len(product_name_values)

    idxs = [range(len(v)) for _, v in product_name_values]
    for si in xrange(nsamples):
        for fi, p in enumerate(itertools.product(*idxs)):
            data_tup = (si, ) + p

            x = mds[si, fi]

            # value should match
            assert_equal(data[data_tup], x.samples[0, 0])

            # indices should match as well
            all_idxs = tuple(x.fa['chan_freq_time_indices'].value.ravel())
            assert_equal(p, all_idxs)

            # values and indices in each dimension should match
            for i, (name, value) in enumerate(product_name_values):
                assert_equal(x.fa[name].value, value[p[i]])
                assert_equal(x.fa[name + '_indices'].value, p[i])

    product_name_values += [('foo', [1, 2, 3])]
    flattener = ProductFlattenMapper(product_name_values)
    assert_raises(ValueError, flattener, ds)
Beispiel #17
0
def hdf2ds(fnames):
    """Load dataset(s) from an HDF5 file

    Parameters
    ----------
    fname : list(str)
      Names of the input HDF5 files

    Returns
    -------
    list(Dataset)
      All datasets-like elements in all given HDF5 files (in order of
      appearance). If any given HDF5 file contains non-Dataset elements
      they are silently ignored. If no given HDF5 file contains any
      dataset, an empty list is returned.
    """
    from mvpa2.base.hdf5 import h5load
    dss = []
    for fname in fnames:
        content = h5load(fname)
        if is_datasetlike(content):
            dss.append(content)
        else:
            for c in content:
                if is_datasetlike(c):
                    dss.append(c)
    return dss
Beispiel #18
0
def test_dataset_without_chunks(fname):
    #  ValueError: All chunk dimensions must be positive (Invalid arguments to routine: Out of range)
    # MH: This is not about Dataset chunks, but about an empty samples array
    ds = AttrDataset([8], a=dict(custom=1))
    save(ds, fname, compression='gzip')
    ds_loaded = h5load(fname)
    ok_(ds_loaded.a.custom == ds.a.custom)
def load_data(data_path, runs=None):
    """

    Parameters
    ----------
    data_path: Path to data.
    runs: If not None, which chunks/runs to keep.

    Returns
    -------
    List of datasets.
    """
    if not os.path.exists(data_path):
        raise ValueError("Data path doesn't exist: {0}".format(data_path))

    dss = h5load(data_path)
    if not isinstance(dss, (list, tuple, np.ndarray)):
        raise TypeError("Input datasets should be a sequence "
                        "(of type list, tuple, or ndarray) of datasets.")

    if runs is None:
        return dss
    else:
        dss = [sd.select(sadict={'chunks': runs}) for sd in dss]
        return dss
Beispiel #20
0
def test_save_load_object_dtype_ds(obj=None):
    """Test saving of custom object ndarray (GH #84)
    """
    aobjf = np.asanyarray(obj).flatten()

    if not aobjf.size and externals.versions['hdf5'] < '1.8.7':
        raise SkipTest(
            "Versions of hdf5 before 1.8.7 have problems with empty arrays")

    # print obj, obj.shape
    f = tempfile.NamedTemporaryFile()

    # save/reload
    h5save(f.name, obj)
    obj_ = h5load(f.name)

    # and compare
    # neh -- not versatile enough
    #assert_objectarray_equal(np.asanyarray(obj), np.asanyarray(obj_))

    assert_array_equal(obj.shape, obj_.shape)
    assert_equal(type(obj), type(obj_))
    # so we could test both ds and arrays
    aobjf_ = np.asanyarray(obj_).flatten()
    # checks if having just array above
    if aobjf.size:
        assert_equal(type(aobjf[0]), type(aobjf_[0]))
        assert_array_equal(aobjf[0]['d'], aobjf_[0]['d'])
Beispiel #21
0
def test_dataset_without_chunks(fname):
    #  ValueError: All chunk dimensions must be positive (Invalid arguments to routine: Out of range)
    # MH: This is not about Dataset chunks, but about an empty samples array
    ds = AttrDataset([8], a=dict(custom=1))
    save(ds, fname, compression='gzip')
    ds_loaded = h5load(fname)
    ok_(ds_loaded.a.custom == ds.a.custom)
Beispiel #22
0
def load_data():
    data = h5load(os.path.join(pymvpa_datadbroot, 'mnist', "mnist.hdf5"))
    traindata = data['train'].samples
    trainlabels = data['train'].sa.labels
    testdata = data['test'].samples
    testlabels = data['test'].sa.labels
    return traindata, trainlabels, testdata, testlabels
Beispiel #23
0
def test_product_flatten():
    nsamples = 17
    product_name_values = [('chan', ['C1', 'C2']),
                         ('freq', np.arange(4, 20, 6)),
                         ('time', np.arange(-200, 800, 200))]

    shape = (nsamples,) + tuple(len(v) for _, v in product_name_values)

    sample_names = ['samp%d' % i for i in xrange(nsamples)]

    # generate random data in four dimensions
    data = np.random.normal(size=shape)
    ds = Dataset(data, sa=dict(sample_names=sample_names))

    # apply flattening to ds
    flattener = ProductFlattenMapper(product_name_values)

    # test I/O (only if h5py is available)
    if externals.exists('h5py'):
        from mvpa2.base.hdf5 import h5save, h5load
        import tempfile
        import os

        _, testfn = tempfile.mkstemp('mapper.h5py', 'test_product')
        h5save(testfn, flattener)
        flattener = h5load(testfn)
        os.unlink(testfn)

    mds = flattener(ds)

    prod = lambda x:reduce(operator.mul, x)

    # ensure the size is ok
    assert_equal(mds.shape, (nsamples,) + (prod(shape[1:]),))

    ndim = len(product_name_values)

    idxs = [range(len(v)) for _, v in product_name_values]
    for si in xrange(nsamples):
        for fi, p in enumerate(itertools.product(*idxs)):
            data_tup = (si,) + p

            x = mds[si, fi]

            # value should match
            assert_equal(data[data_tup], x.samples[0, 0])

            # indices should match as well
            all_idxs = tuple(x.fa['chan_freq_time_indices'].value.ravel())
            assert_equal(p, all_idxs)

            # values and indices in each dimension should match
            for i, (name, value) in enumerate(product_name_values):
                assert_equal(x.fa[name].value, value[p[i]])
                assert_equal(x.fa[name + '_indices'].value, p[i])

    product_name_values += [('foo', [1, 2, 3])]
    flattener = ProductFlattenMapper(product_name_values)
    assert_raises(ValueError, flattener, ds)
Beispiel #24
0
def test_state_cycle_with_custom_reduce(fname):
    # BoxcarMapper has a custom __reduce__ implementation . The 'space'
    # setting will only survive a svae/load cycle if the state is correctly
    # handle for custom reduce iplementations.
    bm = BoxcarMapper([0], 1, space='boxy')
    h5save(fname, bm)
    bm_rl = h5load(fname)
    assert_equal(bm_rl.get_space(), 'boxy')
Beispiel #25
0
def test_state_cycle_with_custom_reduce(fname):
    # BoxcarMapper has a custom __reduce__ implementation . The 'space'
    # setting will only survive a svae/load cycle if the state is correctly
    # handle for custom reduce iplementations.
    bm = BoxcarMapper([0], 1, space='boxy')
    h5save(fname, bm)
    bm_rl = h5load(fname)
    assert_equal(bm_rl.get_space(), 'boxy')
Beispiel #26
0
def test_store_metaclass_types():
    f = tempfile.NamedTemporaryFile()
    from mvpa2.kernels.base import Kernel
    allowedtype = Kernel
    h5save(f.name, allowedtype)
    lkrn = h5load(f.name)
    assert_equal(lkrn, Kernel)
    assert_equal(lkrn.__metaclass__, Kernel.__metaclass__)
Beispiel #27
0
def test_store_metaclass_types():
    f = tempfile.NamedTemporaryFile()
    from mvpa2.kernels.base import Kernel
    allowedtype=Kernel
    h5save(f.name, allowedtype)
    lkrn = h5load(f.name)
    assert_equal(lkrn, Kernel)
    assert_equal(lkrn.__metaclass__, Kernel.__metaclass__)
Beispiel #28
0
def load_results(path, name, task):

    folder = '0_results'
    
    print 'Opening ' + os.path.join(path, folder, name+'_'+task+'_120618_map.hdf5')
    map = h5load(os.path.join(path, folder, name+'_'+task+'_120618_map.hdf5'))
    
    mapper = pickle.load(open(os.path.join(path, folder, name+'_'+task+'_120618_mapper.pyobj'), 'r'))
    
    rev_map = mapper.reverse(map.samples)
    
    
    
    fileName = [elem for elem in os.listdir(os.path.join(path, name,'rest')) if elem.find('.nii.gz') != -1][0]
    
    niftiimg = ni.load(os.path.join(path, name,'rest',fileName))
    
    
    ni.save(ni.Nifti1Image(rev_map.squeeze(), niftiimg.get_affine()), os.path.join(path, name,name+'_120618_nifti_map.nii.gz'))
      
    imgIn = os.path.join(path, name,name+'_120618_nifti_map.nii.gz')
    refIn = '/usr/share/fsl/4.1/data/standard/MNI152_T1_2mm_brain.nii.gz'
    
    
    mat = [elem for elem in os.listdir(os.path.join(path, name, 'rest')) if elem.find('.mat') != -1 and elem.find('mni') != -1][0]
    
    command = 'flirt '+ \
              ' -in '+imgIn+ \
              ' -ref '+refIn+ \
              ' -init '+ os.path.join(path, name, 'rest',mat) +\
              ' -applyxfm' + \
              ' -out '+ os.path.join(path, name,name+'_nifti_map.nii.gz')[:-7] +'_120618_mni.nii.gz' 
                  
    print command         
    os.system(command)
    
    results = pickle.load(open(os.path.join(path, folder, name+'_'+task+'_120618_res.pyobj'), 'r'))

    print '**************** '+name+' **********************'
    print results.stats
    
    
    
    mni_img = ni.load(os.path.join(path, name,name+'_nifti_map.nii.gz')[:-7] +'_120618_mni.nii.gz')
    mni_mask =  ni.load('/usr/share/data/fsl-mni152-templates/MNI152lin_T1_2mm_brain_mask.nii.gz')
    brain = ni.load('/usr/share/fsl/4.1/data/standard/MNI152_T1_2mm.nii.gz')
                         
    res_masked = mni_img.get_data() * mni_mask.get_data()
    
    res_masked = (res_masked - np.mean(res_masked))/np.std(res_masked)
    
    ni.save(ni.Nifti1Image(res_masked, mni_mask.get_affine(), header = mni_mask.get_header()), 
            os.path.join(path, name,name+'_nifti_map.nii.gz')[:-7] +'_120618_res_masked_norm_mni.nii.gz')
    
    ni.save(ni.Nifti1Image(brain.get_data(), mni_mask.get_affine(), header = mni_mask.get_header()), 
            os.path.join(path,'_MNI152_T1_2mm.nii.gz'))
    
    return [results, res_masked]
Beispiel #29
0
def test_state_setter_getter(fname):
    # make sure the presence of custom __setstate__, __getstate__ methods
    # is honored -- numpy's RNGs have it
    from numpy.random.mtrand import RandomState
    r = RandomState()
    h5save(fname, r)
    rl = h5load(fname)
    rl_state = rl.get_state()
    for i, v in enumerate(r.get_state()):
        assert_array_equal(v, rl_state[i])
Beispiel #30
0
def test_state_setter_getter(fname):
    # make sure the presence of custom __setstate__, __getstate__ methods
    # is honored -- numpy's RNGs have it
    from numpy.random.mtrand import RandomState
    r = RandomState()
    h5save(fname, r)
    rl = h5load(fname)
    rl_state = rl.get_state()
    for i, v in enumerate(r.get_state()):
        assert_array_equal(v, rl_state[i])
Beispiel #31
0
def test_cosmo_io_h5py(fn):
    skip_if_no_external('h5py')
    from mvpa2.base.hdf5 import h5save, h5load

    # Dataset from cosmo
    ds = cosmo.from_any(_create_small_mat_dataset_dict())
    h5save(fn, ds)
    ds_loaded = h5load(fn)

    _assert_ds_equal(ds, ds_loaded)

    # Queryengine
    qe = cosmo.from_any(_create_small_mat_nbrhood_dict())
    h5save(fn, qe)
    qe_loaded = h5load(fn)

    assert_array_equal(qe.ids, qe_loaded.ids)
    _assert_array_collectable_equal(qe.a, qe_loaded.a)
    _assert_array_collectable_equal(qe.fa, qe_loaded.fa)
Beispiel #32
0
def run_hyperalignment(subjects_to_analyze, out_dir):
    # Load subject data
    ds_all = []
    for subject_label in subjects_to_analyze:
        ds_all.append(h5load('%s/sub-%s_data.hdf5' % (out_dir, subject_label)))
    # Initialize searchlight hyperalignment
    slhyper = SearchlightHyperalignment(radius=2, nblocks=10, sparse_radius=5,
                                         dtype='float16')
    hmappers = slhyper(ds_all)
    return hmappers
Beispiel #33
0
def test_recursion(fname):
    obj = range(2)
    obj.append(HDFDemo())
    obj.append(obj)
    h5save(fname, obj)
    lobj = h5load(fname)
    assert_equal(obj[:2], lobj[:2])
    assert_equal(type(obj[2]), type(lobj[2]))
    ok_(obj[3] is obj)
    ok_(lobj[3] is lobj)
Beispiel #34
0
def test_recursion(fname):
    obj = range(2)
    obj.append(HDFDemo())
    obj.append(obj)
    h5save(fname, obj)
    lobj = h5load(fname)
    assert_equal(obj[:2], lobj[:2])
    assert_equal(type(obj[2]), type(lobj[2]))
    ok_(obj[3] is obj)
    ok_(lobj[3] is lobj)
Beispiel #35
0
def test_cosmo_io_h5py(fn):
    skip_if_no_external('h5py')
    from mvpa2.base.hdf5 import h5save, h5load

    # Dataset from cosmo
    ds = cosmo.from_any(_create_small_mat_dataset_dict())
    h5save(fn, ds)
    ds_loaded = h5load(fn)

    _assert_ds_equal(ds, ds_loaded)

    # Queryengine
    qe = cosmo.from_any(_create_small_mat_nbrhood_dict())
    h5save(fn, qe)
    qe_loaded = h5load(fn)

    assert_array_equal(qe.ids, qe_loaded.ids)
    _assert_array_collectable_equal(qe.a, qe_loaded.a)
    _assert_array_collectable_equal(qe.fa, qe_loaded.fa)
Beispiel #36
0
def test_recursion():
    obj = range(2)
    obj.append(HDFDemo())
    obj.append(obj)
    f = tempfile.NamedTemporaryFile()
    h5save(f.name, obj)
    lobj = h5load(f.name)
    assert_equal(obj[:2], lobj[:2])
    assert_equal(type(obj[2]), type(lobj[2]))
    ok_(obj[3] is obj)
    ok_(lobj[3] is lobj)
Beispiel #37
0
def test_recursion():
    obj = range(2)
    obj.append(HDFDemo())
    obj.append(obj)
    f = tempfile.NamedTemporaryFile()
    h5save(f.name, obj)
    lobj = h5load(f.name)
    assert_equal(obj[:2], lobj[:2])
    assert_equal(type(obj[2]), type(lobj[2]))
    ok_(obj[3] is obj)
    ok_(lobj[3] is lobj)
Beispiel #38
0
def test_function_ptrs(fname):
    skip_if_no_external('nibabel')
    ds = load_example_fmri_dataset()
    # add a mapper with a function ptr inside
    ds = ds.get_mapped(mean_sample())
    h5save(fname, ds)
    ds_loaded = h5load(fname)
    fresh = load_example_fmri_dataset().O
    # check that the reconstruction function pointer in the FxMapper points
    # to the right one
    assert_array_equal(ds_loaded.a.mapper.forward(fresh), ds.samples)
Beispiel #39
0
def test_function_ptrs(fname):
    skip_if_no_external('nibabel')
    ds = load_example_fmri_dataset()
    # add a mapper with a function ptr inside
    ds = ds.get_mapped(mean_sample())
    h5save(fname, ds)
    ds_loaded = h5load(fname)
    fresh = load_example_fmri_dataset().O
    # check that the reconstruction function pointer in the FxMapper points
    # to the right one
    assert_array_equal(ds_loaded.a.mapper.forward(fresh),
                       ds.samples)
Beispiel #40
0
    def fetch(self, n_subjects=10, resume=True, force=False, check=True,
              verbose=1):
        """data_types is a list, can contain: anat, diff, func, rest, psyc, bgnd
        """
        if n_subjects > self.MAX_SUBJECTS:
            raise ValueError('Max # subjects == %d' % self.MAX_SUBJECTS)

        processed_files = ['S%02d_func_mni.nii.gz' % subj_id
                           for subj_id in range(1, 1 + n_subjects)]
        processed_files.append('stims.csv')
        processed_files = [op.join(self.data_dir, f)
                           for f in processed_files]

        raw_files = ('http://data.pymvpa.org/datasets/'
                     'hyperalignment_tutorial_data/'
                     'hyperalignment_tutorial_data_2.4.hdf5.gz',)
        raw_files = self.fetcher.fetch(raw_files, resume=resume, force=force,
                                       check=check, verbose=verbose)

        if force or np.any([not op.exists(f) for f in processed_files]):
            # Import local version of pymvpa
            if sys.version_info[0] > 2:
                raise NotImplementedError("pymvpa only works in Python 2,"
                                          "to convert this file.")
            cur_dir = op.dirname(op.abspath(__file__))
            mvpa2_path = op.abspath(op.join(cur_dir, '..', '..', 'core',
                                            '_external', 'pymvpa'))
            sys.path = [mvpa2_path] + sys.path
            from mvpa2.base.hdf5 import h5load

            # Load the file and manipulate into expected form.
            ds_all = h5load(raw_files[0])
            for si, func_filename in enumerate(processed_files[:-1]):
                if not op.exists(op.dirname(func_filename)):
                    os.makedirs(op.dirname(func_filename))

                # Construct and save the image
                func_data = np.transpose(ds_all[si].O, [1, 2, 3, 0])
                func_affine = ds_all[si].a['imgaffine'].value
                func_hdr = ds_all[si].a['imghdr'].value
                img = nib.Nifti1Image(func_data, affine=func_affine,
                                      extra=func_hdr)
                nib.save(img, func_filename)

            # Construct and save the stimuli
            value_arr = np.asarray([ds_all[0].T, ds_all[0].sa['chunks']])
            csv_cols = np.vstack([['stim', 'chunk'], value_arr.T])
            np.savetxt(processed_files[-1], csv_cols, delimiter=',', fmt='%s')

        return dict(
            raw_data=raw_files[0],
            func=processed_files[:-1],
            stim=processed_files[-1])
Beispiel #41
0
def load_ds(subnr, mask=None, zscore_ds=True):
    ds = h5load(fns.betafn(subnr))
    if mask is not None:
        ds = ds[:, mask]
    ds = ds[ds.sa.condition != 'self']
    #if zscore_ds:
    #    zscore(ds, chunks_attr='chunks')
    # add familiarity
    ds.sa['familiarity'] = [
        'familiar' if l.startswith('f') else 'control' for l in ds.sa.condition
    ]
    return ds
Beispiel #42
0
def test_gifti_dataset_h5py(fn, include_nodes):
    if not externals.exists('h5py'):
        raise SkipTest

    from mvpa2.base.hdf5 import h5save, h5load

    ds = _get_test_dataset(include_nodes)

    h5save(fn, ds)
    ds2 = h5load(fn)

    assert_datasets_equal(ds, ds2)
Beispiel #43
0
def run_hyperalignment(subjects_to_analyze, out_dir):
    # Load subject data
    ds_all = []
    for subject_label in subjects_to_analyze:
        ds_all.append(h5load('%s/sub-%s_data.hdf5' % (out_dir, subject_label)))
    # Initialize searchlight hyperalignment
    slhyper = SearchlightHyperalignment(radius=2,
                                        nblocks=10,
                                        sparse_radius=5,
                                        dtype='float16')
    hmappers = slhyper(ds_all)
    return hmappers
Beispiel #44
0
def test_generate_testing_fmri_dataset(tempfile):
    skip_if_no_external('nibabel')
    skip_if_no_external('h5py')

    from mvpa2.base.hdf5 import h5load
    from mvpa2.testing.regress import generate_testing_fmri_dataset

    ds, filename = generate_testing_fmri_dataset(tempfile)
    assert_equal(tempfile, filename)
    assert_true(exists(tempfile))
    ds_reloaded = h5load(tempfile)
    assert_datasets_equal(ds, ds_reloaded)
Beispiel #45
0
def test_generate_testing_fmri_dataset(tempfile):
    skip_if_no_external('nibabel')
    skip_if_no_external('h5py')

    from mvpa2.base.hdf5 import h5load
    from mvpa2.testing.regress import generate_testing_fmri_dataset

    ds, filename = generate_testing_fmri_dataset(tempfile)
    assert_equal(tempfile, filename)
    assert_true(exists(tempfile))
    ds_reloaded = h5load(tempfile)
    assert_datasets_equal(ds, ds_reloaded, ignore_a={'wtf'})
Beispiel #46
0
def test_gifti_dataset_h5py(fn, include_nodes):
    if not externals.exists('h5py'):
        raise SkipTest

    from mvpa2.base.hdf5 import h5save, h5load

    ds = _get_test_dataset(include_nodes)

    h5save(fn, ds)
    ds2 = h5load(fn)

    assert_datasets_equal(ds, ds2)
Beispiel #47
0
def test_save_load_python_objs(obj):
    """Test saving objects of various types
    """
    # print obj, obj.shape
    f = tempfile.NamedTemporaryFile()

    # save/reload
    h5save(f.name, obj)
    obj_ = h5load(f.name)

    assert_equal(type(obj), type(obj_))
    assert_equal(obj, obj_)
Beispiel #48
0
def test_save_load_python_objs(obj):
    """Test saving objects of various types
    """
    # print obj, obj.shape
    f = tempfile.NamedTemporaryFile()

    # save/reload
    h5save(f.name, obj)
    obj_ = h5load(f.name)

    assert_equal(type(obj), type(obj_))
    assert_equal(obj, obj_)
Beispiel #49
0
    def test_surface_dset_h5py_io_with_unicode(self, fn):
        skip_if_no_external('h5py')
        from mvpa2.base.hdf5 import h5save, h5load

        ds = dataset_wizard(np.arange(20).reshape((4, 5)), targets=1, chunks=1)
        ds.sa['unicode'] = ['u1', 'uu2', 'uuu3', 'uuuu4']
        ds.sa['str'] = ['s1', 'ss2', 'sss3', 'ssss4']
        ds.fa['node_indices'] = np.arange(5)

        # test h5py I/O
        h5save(fn, ds)
        ds2 = h5load(fn)
        assert_datasets_equal(ds, ds2)
Beispiel #50
0
def _get_data(f):
    """Adapter to load data from various formats
    """
    if f.endswith('.hdf5'):
        from mvpa2.base.hdf5 import h5load
        data = h5load(f).samples
    else:  #if f.endswith('.nii.gz') or f.endswith('.img') or f.endswith('.hdr'):
        n = nb.load(f)
        data = n.get_data()
        # strip rudimentary 4th dimension
        if len(data.shape) == 4 and data.shape[-1] == 1:
            data = data[:, :, :, 0]
    return data
Beispiel #51
0
def test_function_ptrs():
    if not externals.exists('nibabel'):
        raise SkipTest
    ds = load_example_fmri_dataset()
    # add a mapper with a function ptr inside
    ds = ds.get_mapped(mean_sample())
    f = tempfile.NamedTemporaryFile()
    h5save(f.name, ds)
    ds_loaded = h5load(f.name)
    fresh = load_example_fmri_dataset().O
    # check that the reconstruction function pointer in the FxMapper points
    # to the right one
    assert_array_equal(ds_loaded.a.mapper.forward(fresh), ds.samples)
Beispiel #52
0
def _get_data(f):
    """Adapter to load data from various formats
    """
    if f.endswith('.hdf5'):
        from mvpa2.base.hdf5 import h5load
        data = h5load(f).samples
    else: #if f.endswith('.nii.gz') or f.endswith('.img') or f.endswith('.hdr'):
        n = nb.load(f)
        data = n.get_data()
        # strip rudimentary 4th dimension
        if len(data.shape) == 4 and data.shape[-1] == 1:
            data = data[:, :, :, 0]
    return data
Beispiel #53
0
    def test_surface_dset_h5py_io_with_unicode(self, fn):
        skip_if_no_external('h5py')
        from mvpa2.base.hdf5 import h5save, h5load

        ds = dataset_wizard(np.arange(20).reshape((4, 5)), targets=1, chunks=1)
        ds.sa['unicode'] = [u'u1', u'uu2', u'uuu3', u'uuuu4']
        ds.sa['str'] = ['s1', 'ss2', 'sss3', 'ssss4']
        ds.fa['node_indices'] = np.arange(5)

        # test h5py I/O
        h5save(fn, ds)
        ds2 = h5load(fn)
        assert_datasets_equal(ds, ds2)
Beispiel #54
0
    def test_surface_outside_volume_voxel_selection(self, fn):
        skip_if_no_external('h5py')
        from mvpa2.base.hdf5 import h5save, h5load

        vol_shape = (10, 10, 10, 1)
        vol_affine = np.identity(4)
        vg = volgeom.VolGeom(vol_shape, vol_affine)

        # make surfaces that are far away from all voxels
        # in the volume
        sphere_density = 4
        far = 10000.
        outer = surf.generate_sphere(sphere_density) * 10 + far
        inner = surf.generate_sphere(sphere_density) * 5 + far

        vs = volsurf.VolSurfMaximalMapping(vg, inner, outer)
        radii = [10., 10]  # fixed and variable radii

        outside_node_margins = [0, far, True]
        for outside_node_margin in outside_node_margins:
            for radius in radii:
                selector = lambda: surf_voxel_selection.voxel_selection(vs,
                                                                        radius,
                                                                        outside_node_margin=outside_node_margin)

                if type(radius) is int and outside_node_margin is True:
                    assert_raises(ValueError, selector)
                else:
                    sel = selector()
                    if outside_node_margin is True:
                        # it should have all the keys, but they should
                        # all be empty
                        assert_array_equal(sel.keys(), range(inner.nvertices))
                        for k, v in sel.iteritems():
                            assert_equal(v, [])
                    else:
                        assert_array_equal(sel.keys(), [])

                    if outside_node_margin is True and \
                                    externals.versions['hdf5'] < '1.8.7':
                        raise SkipTest("Versions of hdf5 before 1.8.7 have "
                                       "problems with empty arrays")

                    h5save(fn, sel)
                    sel_copy = h5load(fn)

                    assert_array_equal(sel.keys(), sel_copy.keys())
                    for k in sel.keys():
                        assert_equal(sel[k], sel_copy[k])

                    assert_equal(sel, sel_copy)
    def test_surface_outside_volume_voxel_selection(self, fn):
        skip_if_no_external('h5py')
        from mvpa2.base.hdf5 import h5save, h5load

        vol_shape = (10, 10, 10, 1)
        vol_affine = np.identity(4)
        vg = volgeom.VolGeom(vol_shape, vol_affine)

        # make surfaces that are far away from all voxels
        # in the volume
        sphere_density = 4
        far = 10000.
        outer = surf.generate_sphere(sphere_density) * 10 + far
        inner = surf.generate_sphere(sphere_density) * 5 + far

        vs = volsurf.VolSurfMaximalMapping(vg, inner, outer)
        radii = [10., 10]  # fixed and variable radii

        outside_node_margins = [0, far, True]
        for outside_node_margin in outside_node_margins:
            for radius in radii:
                selector = lambda: surf_voxel_selection.voxel_selection(vs,
                                                                        radius,
                                                                        outside_node_margin=outside_node_margin)

                if type(radius) is int and outside_node_margin is True:
                    assert_raises(ValueError, selector)
                else:
                    sel = selector()
                    if outside_node_margin is True:
                        # it should have all the keys, but they should
                        # all be empty
                        assert_array_equal(sel.keys(), range(inner.nvertices))
                        for k, v in sel.iteritems():
                            assert_equal(v, [])
                    else:
                        assert_array_equal(sel.keys(), [])

                    if outside_node_margin is True and \
                                    externals.versions['hdf5'] < '1.8.7':
                        raise SkipTest("Versions of hdf5 before 1.8.7 have "
                                       "problems with empty arrays")

                    h5save(fn, sel)
                    sel_copy = h5load(fn)

                    assert_array_equal(sel.keys(), sel_copy.keys())
                    for k in sel.keys():
                        assert_equal(sel[k], sel_copy[k])

                    assert_equal(sel, sel_copy)
Beispiel #56
0
 def __handle_results(self, results):
     if self.results_backend == 'hdf5':
         # 'results' must be just a filename
         assert(isinstance(results, str))
         if __debug__:
             debug('SLC', "Loading results from %s" % results)
         results_data = h5load(results)
         os.unlink(results)
         if __debug__:
             debug('SLC_', "Loaded results of len=%d from"
                   % len(results_data))
         return results_data
     else:
         return results
 def __handle_results(self, results):
     if self.params.results_backend == 'hdf5':
         # 'results' must be just a filename
         assert (isinstance(results, str))
         if __debug__:
             debug('SLC', "Loading results from %s" % results)
         results_data = h5load(results)
         os.unlink(results)
         if __debug__:
             debug('SLC_',
                   "Loaded results of len=%d from" % len(results_data))
         for isub, res in enumerate(results_data):
             self.projections[isub] = self.projections[isub] + res
         return
Beispiel #58
0
def test_function_ptrs():
    if not externals.exists('nibabel'):
        raise SkipTest
    ds = load_example_fmri_dataset()
    # add a mapper with a function ptr inside
    ds = ds.get_mapped(mean_sample())
    f = tempfile.NamedTemporaryFile()
    h5save(f.name, ds)
    ds_loaded = h5load(f.name)
    fresh = load_example_fmri_dataset().O
    # check that the reconstruction function pointer in the FxMapper points
    # to the right one
    assert_array_equal(ds_loaded.a.mapper.forward(fresh),
                        ds.samples)
Beispiel #59
0
def load_results_map (path, namelist, datetime, task, mask='none'):
    """
        @param datetime: Indicates the moment of the analysis in a string formatted as AAAAMMGG_HHMM, could be used only a portion of that
        @param mask: Optional param, is only setted when we want to store the result of a particular brain mask used during analysis
    """    
    
    resFolder = '0_results'
    
    fileList = os.listdir(os.path.join(path, resFolder))
    
    if (mask == 'none'):
        mask = ''
    
    fileList = [f for f in fileList if f.find(task)     != -1 
                                   and f.find(datetime) != -1 
                                   and f.find('map')    != -1 
                                   and f.find(mask)     != -1
                                   ]
    fileList.sort()
    
    if (len(fileList) == 0):
        print 'Results not found!'
        return;
    
    fileName = [elem for elem in os.listdir(os.path.join(path, 'andant','rest')) if elem.find('.nii.gz') != -1][0]
    niftiimg = ni.load(os.path.join(path, 'andant', 'rest', fileName))
    affine_tr = niftiimg.get_affine()
    
    for i in range(0, len(fileList), 2):
        
        mapFile = fileList[i]
        mapperFile = fileList[i+1]
        
        parts = mapFile.split('_')
        name = parts[2]     
        
        print 'Opening: ' + os.path.join(path, resFolder, mapFile)
        map = h5load(os.path.join(path, resFolder, mapFile))
        
        print 'Opening: ' + os.path.join(path, resFolder, mapperFile)
        mapper = pickle.load(open(os.path.join(path, resFolder, mapperFile), 'r'))
    
        rev_map = mapper.reverse(map.samples)
        
        fileName = [elem for elem in os.listdir(os.path.join(path, name,'rest')) if elem.find('.nii.gz') != -1][0]
    
        niftiimg = ni.load(os.path.join(path, name,'rest',fileName))
    
        print 'Saving results at: ' + os.path.join(path, name, mapFile.split('.')[0] + '.nii.gz')
        ni.save(ni.Nifti1Image(rev_map.squeeze(), affine_tr), os.path.join(path, name, mapFile.split('.')[0] + '.nii.gz'))