示例#1
0
def test_tutorialdata_loader_masking():
    if not externals.exists('nibabel'):
        raise SkipTest

    ds_brain = load_tutorial_data(flavor='25mm')
    ds_nomask = load_tutorial_data(roi=None, flavor='25mm')
    assert_greater(ds_nomask.nfeatures, ds_brain.nfeatures)
示例#2
0
def test_roi_combo():
    skip_if_no_external('nibabel')

    ds1 = load_tutorial_data(roi=1, flavor='25mm')
    ds4 = load_tutorial_data(roi=4, flavor='25mm')
    ds_combo = load_tutorial_data(roi=(1, 4), flavor='25mm')
    assert_equal(ds1.nfeatures + ds4.nfeatures, ds_combo.nfeatures)
def test_roi_combo():
    skip_if_no_external('nibabel')

    ds1 = load_tutorial_data(roi=1, flavor='25mm')
    ds4 = load_tutorial_data(roi=4, flavor='25mm')
    ds_combo = load_tutorial_data(roi=(1, 4), flavor='25mm')
    assert_equal(ds1.nfeatures + ds4.nfeatures, ds_combo.nfeatures)
示例#4
0
def test_tutorialdata_loader_masking():
    if not externals.exists('nibabel'):
        raise SkipTest

    ds_brain = load_tutorial_data(flavor='25mm')
    ds_nomask = load_tutorial_data(roi=None, flavor='25mm')
    assert_greater(ds_nomask.nfeatures, ds_brain.nfeatures)
示例#5
0
def test_roi_combo():
    if not externals.exists('nibabel'):
        raise SkipTest

    ds1 = load_tutorial_data(roi=1, flavor='25mm')
    ds4 = load_tutorial_data(roi=4, flavor='25mm')
    ds_combo = load_tutorial_data(roi=(1, 4), flavor='25mm')
    assert_equal(ds1.nfeatures + ds4.nfeatures, ds_combo.nfeatures)
示例#6
0
def test_roi_combo():
    if not externals.exists('nibabel'):
        raise SkipTest

    ds1 = load_tutorial_data(roi=1, flavor='25mm')
    ds4 = load_tutorial_data(roi=4, flavor='25mm')
    ds_combo = load_tutorial_data(roi=(1, 4), flavor='25mm')
    assert_equal(ds1.nfeatures + ds4.nfeatures, ds_combo.nfeatures)
示例#7
0
def test_tutorialdata_rois(roi):
    if not externals.exists('nibabel'):
        raise SkipTest

    # just checking that we have the files
    ds = load_tutorial_data(roi=roi, flavor='25mm')
    assert_equal(len(ds), 1452)
示例#8
0
def test_hoc_rois(roi):
    skip_if_no_external('nibabel')

    # just checking which harvard-oxford rois we can rely on in the downsampled
    # data
    ds = load_tutorial_data(roi=roi, flavor='25mm')
    assert_equal(len(ds), 1452)
示例#9
0
def test_tutorialdata_rois(roi):
    if not externals.exists('nibabel'):
        raise SkipTest

    # just checking that we have the files
    ds = load_tutorial_data(roi=roi, flavor='25mm')
    assert_equal(len(ds), 1452)
示例#10
0
def test_hoc_rois(roi):
    skip_if_no_external('nibabel')

    # just checking which harvard-oxford rois we can rely on in the downsampled
    # data
    ds = load_tutorial_data(roi=roi, flavor='25mm')
    assert_equal(len(ds), 1452)
示例#11
0
def test_hoc_rois(roi):
    if not externals.exists('nibabel'):
        raise SkipTest

    # just checking which harvard-oxford rois we can rely on in the downsampled
    # data
    ds = load_tutorial_data(roi=roi, flavor='25mm')
    assert_equal(len(ds), 1452)
示例#12
0
def test_hoc_rois(roi):
    if not externals.exists('nibabel'):
        raise SkipTest

    # just checking which harvard-oxford rois we can rely on in the downsampled
    # data
    ds = load_tutorial_data(roi=roi, flavor='25mm')
    assert_equal(len(ds), 1452)
示例#13
0
def test_example_data():
    skip_if_no_external('nibabel')

    # both expected flavor are present
    ds1 = load_example_fmri_dataset()
    ds25 = load_example_fmri_dataset(name='25mm', literal=True)
    assert_equal(len(ds1), len(ds25))
    # no 25mm dataset with numerical labels
    assert_raises(ValueError, load_example_fmri_dataset, name='25mm')
    # the 25mm example is the same as the coarse tutorial data
    ds25tut = load_tutorial_data(flavor='25mm')
    assert_array_equal(ds25.samples, ds25tut.samples)
示例#14
0
def test_example_data():
    skip_if_no_external('nibabel')

    # both expected flavor are present
    ds1 = load_example_fmri_dataset()
    ds25 = load_example_fmri_dataset(name='25mm', literal=True)
    assert_equal(len(ds1), len(ds25))
    # no 25mm dataset with numerical labels
    assert_raises(ValueError, load_example_fmri_dataset, name='25mm')
    # the 25mm example is the same as the coarse tutorial data
    ds25tut = load_tutorial_data(flavor='25mm')
    assert_array_equal(ds25.samples, ds25tut.samples)
示例#15
0
def _test_gnb_overflow_haxby():  # pragma: no cover
    # example from https://github.com/PyMVPA/PyMVPA/issues/581
    # a heavier version of the above test
    import os
    import numpy as np

    from mvpa2.datasets.sources.native import load_tutorial_data
    from mvpa2.clfs.gnb import GNB
    from mvpa2.measures.base import CrossValidation
    from mvpa2.generators.partition import HalfPartitioner
    from mvpa2.mappers.zscore import zscore
    from mvpa2.mappers.detrend import poly_detrend
    from mvpa2.datasets.miscfx import remove_invariant_features
    from mvpa2.testing.datasets import *

    datapath = '/usr/share/data/pymvpa2-tutorial/'
    haxby = load_tutorial_data(datapath,
                               roi='vt',
                               add_fa={
                                   'vt_thr_glm':
                                   os.path.join(datapath, 'haxby2001',
                                                'sub001', 'masks', 'orig',
                                                'vt.nii.gz')
                               })
    # poly_detrend(haxby, polyord=1, chunks_attr='chunks')
    haxby = haxby[np.array(
        [
            l in ['rest', 'scrambled']  # ''house', 'face']
            for l in haxby.targets
        ],
        dtype='bool')]
    #zscore(haxby, chunks_attr='chunks', param_est=('targets', ['rest']),
    #       dtype='float32')
    # haxby = haxby[haxby.sa.targets != 'rest']
    haxby = remove_invariant_features(haxby)

    clf = GNB(enable_ca='estimates', logprob=True, normalize=True)

    #clf.train(haxby)
    #clf.predict(haxby)
    # estimates a bit "overfit" to judge in the train/predict on the same data

    cv = CrossValidation(clf,
                         HalfPartitioner(attr='chunks'),
                         postproc=None,
                         enable_ca=['stats'])

    cv_results = cv(haxby)
    res1_est = clf.ca.estimates
    print "Estimates:\n", res1_est
    print "Exp(estimates):\n", np.round(np.exp(res1_est), 3)
    assert np.all(np.isfinite(res1_est))
示例#16
0
def test_tutorialdata_rois(roi):
    skip_if_no_external('nibabel')

    # just checking that we have the files
    ds = load_tutorial_data(roi=roi, flavor='25mm')
    assert_equal(len(ds), 1452)
示例#17
0
def test_tutorialdata_loader_masking():
    skip_if_no_external('nibabel')

    ds_brain = load_tutorial_data(flavor='25mm')
    ds_nomask = load_tutorial_data(roi=None, flavor='25mm')
    assert_greater(ds_nomask.nfeatures, ds_brain.nfeatures)
示例#18
0
文件: rsa_fmri.py 项目: hanke/PyMVPA
import numpy as np
import pylab as pl
from os.path import join as pjoin
from mvpa2 import cfg

"""
In this example we use a dataset from :ref:`Haxby et al. (2001) <HGF+01>` were
participants watched pictures of eight different visual objects, while fMRI was
recorded. The following snippet load a portion of this dataset (single subject)
from regions on the ventral and occipital surface of the brain.
"""

# load dataset -- ventral and occipital ROIs
from mvpa2.datasets.sources.native import load_tutorial_data
datapath = pjoin(cfg.get('location', 'tutorial data'), 'haxby2001')
ds = load_tutorial_data(roi=(15, 16, 23, 24, 36, 38, 39, 40, 48))

"""
We only do minimal pre-processing: linear trend removal and Z-scoring all voxel
time-series with respect to the mean and standard deviation of the "rest"
condition.
"""

# only minial detrending
from mvpa2.mappers.detrend import poly_detrend
poly_detrend(ds, polyord=1, chunks_attr='chunks')
# z-scoring with respect to the 'rest' condition
from mvpa2.mappers.zscore import zscore
zscore(ds, chunks_attr='chunks', param_est=('targets', 'rest'))
# now remove 'rest' samples
ds = ds[ds.sa.targets != 'rest']
示例#19
0
import numpy as np
import pylab as pl
import os
from os.path import join as pjoin
from mvpa2 import cfg

"""
In this example we use a dataset from Haxby et al. (2001) were participants watched pictures of eight different visual objects, while fMRI was recorded. The following snippet load a portion of this dataset (single subject) from regions on the ventral and occipital surface of the brain.
"""

# load dataset -- ventral and occipital ROIs
from mvpa2.datasets.sources.native import load_tutorial_data
#'/home/lab/Desktop/PyMVPA-master/mvpa2/data/'
#datapath = '/usr/lib/python2.7/dist-packages/mvpa2/data/haxby2001'
datapath = pjoin(cfg.get('location', 'tutorial data'), 'haxby2001')
ds = load_tutorial_data(path = '/usr/lib/python2.7/dist-packages/mvpa2/data',roi=(15, 16, 23, 24, 36, 38, 39, 40, 48))

"""
We only do minimal pre-processing: linear trend removal and Z-scoring all voxel time-series with respect to the mean and standard deviation of the “rest” condition.
"""

# only minimal detrending
from mvpa2.mappers.detrend import poly_detrend
poly_detrend(ds, polyord=1, chunks_attr='chunks')
# z-scoring with respect to the 'rest' condition
from mvpa2.mappers.zscore import zscore
zscore(ds, chunks_attr='chunks', param_est=('targets', 'rest'))
# now remove 'rest' samples
ds = ds[ds.sa.targets != 'rest']

"""
示例#20
0
import numpy as np
import pylab as pl
from os.path import join as pjoin
from mvpa2 import cfg
"""
In this example we use a dataset from :ref:`Haxby et al. (2001) <HGF+01>` were
participants watched pictures of eight different visual objects, while fMRI was
recorded. The following snippet load a portion of this dataset (single subject)
from regions on the ventral and occipital surface of the brain.
"""

# load dataset -- ventral and occipital ROIs
from mvpa2.datasets.sources.native import load_tutorial_data
datapath = pjoin(cfg.get('location', 'tutorial data'), 'haxby2001')
ds = load_tutorial_data(roi=(15, 16, 23, 24, 36, 38, 39, 40, 48))
"""
We only do minimal pre-processing: linear trend removal and Z-scoring all voxel
time-series with respect to the mean and standard deviation of the "rest"
condition.
"""

# only minial detrending
from mvpa2.mappers.detrend import poly_detrend
poly_detrend(ds, polyord=1, chunks_attr='chunks')
# z-scoring with respect to the 'rest' condition
from mvpa2.mappers.zscore import zscore
zscore(ds, chunks_attr='chunks', param_est=('targets', 'rest'))
# now remove 'rest' samples
ds = ds[ds.sa.targets != 'rest']
"""
示例#21
0
# Take example from http://www.pymvpa.org/examples/rsa_fmri.html

import numpy as np
import pylab as pl
from os.path import join as pjoin
from mvpa2 import cfg

#----- fix this
# load dataset -- ventral and occipital ROIs
from mvpa2.datasets.sources.native import load_tutorial_data
#datapath = pjoin(cfg.get('location', 'tutorial data'), 'haxby2001')
datapath = '/Users/drordotan/anaconda/lib/python2.7/site-packages/mvpa2/data'
#ds = load_tutorial_data(roi=(15, 16, 23, 24, 36, 38, 39, 40, 48))
ds = load_tutorial_data(path=datapath,
                        flavor='25mm',
                        roi=(15, 16, 23, 24, 36, 38, 39, 40, 48))

# We only do minimal pre-processing: linear trend removal and Z-scoring all voxel time-series with
# respect to the mean and standard deviation of the "rest" condition.
from mvpa2.mappers.detrend import poly_detrend

poly_detrend(ds, polyord=1, chunks_attr='chunks')
# z-scoring with respect to the 'rest' condition
from mvpa2.mappers.zscore import zscore

zscore(ds, chunks_attr='chunks', param_est=('targets', 'rest'))
# now remove 'rest' samples
ds = ds[ds.sa.targets != 'rest']

示例#22
0
def test_tutorialdata_rois(roi):
    skip_if_no_external('nibabel')

    # just checking that we have the files
    ds = load_tutorial_data(roi=roi, flavor='25mm')
    assert_equal(len(ds), 1452)
示例#23
0
def test_tutorialdata_loader_masking():
    skip_if_no_external('nibabel')

    ds_brain = load_tutorial_data(flavor='25mm')
    ds_nomask = load_tutorial_data(roi=None, flavor='25mm')
    assert_greater(ds_nomask.nfeatures, ds_brain.nfeatures)