Exemplo n.º 1
0
def test_fetch_haxby_simple():
    local_url = "file://" + os.path.join(datadir, "pymvpa-exampledata.tar.bz2")
    haxby = datasets.fetch_haxby_simple(data_dir=tmpdir,
                                        url=local_url,
                                        verbose=0)
    datasetdir = os.path.join(tmpdir, 'haxby2001_simple', 'pymvpa-exampledata')
    for key, file in [('session_target', 'attributes.txt'),
                      ('func', 'bold.nii.gz'), ('mask', 'mask.nii.gz'),
                      ('conditions_target', 'attributes_literal.txt')]:
        assert_equal(haxby[key], os.path.join(datasetdir, file))
        assert_true(os.path.exists(os.path.join(datasetdir, file)))
Exemplo n.º 2
0
def test_fetch_haxby_simple():
    local_url = "file://" + os.path.join(datadir, "pymvpa-exampledata.tar.bz2")
    haxby = datasets.fetch_haxby_simple(data_dir=tmpdir, url=local_url,
                                        verbose=0)
    datasetdir = os.path.join(tmpdir, 'haxby2001_simple', 'pymvpa-exampledata')
    for key, file in [
            ('session_target', 'attributes.txt'),
            ('func', 'bold.nii.gz'),
            ('mask', 'mask.nii.gz'),
            ('conditions_target', 'attributes_literal.txt')]:
        assert_equal(haxby[key], os.path.join(datasetdir, file))
        assert_true(os.path.exists(os.path.join(datasetdir, file)))
Exemplo n.º 3
0
"""
The Haxby dataset: face vs house in object recognition
=======================================================

This example does a simple but efficient decoding on the Haxby dataset:
using a feature selection, followed by an SVM.

"""

#############################################################################
# Retrieve the files of the Haxby dataset
from nilearn import datasets

haxby_dataset = datasets.fetch_haxby_simple()

# print basic information on the dataset
print('Mask nifti image (3D) is located at: %s' % haxby_dataset.mask)
print('Functional nifti image (4D) is located at: %s' % haxby_dataset.func[0])

#############################################################################
# Load the behavioral data
import numpy as np
y, session = np.loadtxt(haxby_dataset.session_target[0]).astype("int").T
conditions = np.recfromtxt(haxby_dataset.conditions_target[0])['f0']

# Restrict to faces and houses
condition_mask = np.logical_or(conditions == b'face', conditions == b'house')
y = y[condition_mask]
conditions = conditions[condition_mask]

# We have 2 conditions
The example shows the small differences that exist between
Bonferroni-corrected p-values and family-wise corrected p-values obtained
from a permutation test combined with a max-type procedure.
Bonferroni correction is a bit conservative, as revealed by the presence of
a few false negative.

"""
# Author: Virgile Fritsch, <*****@*****.**>, Feb. 2014
import numpy as np
import nibabel
from nilearn import datasets
from nilearn.input_data import NiftiMasker
from nilearn.mass_univariate import permuted_ols

### Load Haxby dataset ########################################################
dataset_files = datasets.fetch_haxby_simple()

### Mask data #################################################################
mask_img = nibabel.load(dataset_files.mask)
nifti_masker = NiftiMasker(mask=dataset_files.mask,
                           memory='nilearn_cache',
                           memory_level=1)  # cache options
fmri_masked = nifti_masker.fit_transform(dataset_files.func)

### Restrict to faces and houses ##############################################
conditions_encoded, _ = np.loadtxt(
    dataset_files.session_target).astype("int").T
conditions = np.recfromtxt(dataset_files.conditions_target)['f0']
condition_mask = np.logical_or(conditions == 'face', conditions == 'house')
conditions_encoded = conditions_encoded[condition_mask]
fmri_masked = fmri_masked[condition_mask]
Exemplo n.º 5
0
"""
The haxby dataset: different multi-class strategies
=======================================================

We compare one vs all and one vs one multi-class strategies: the overall
cross-validated accuracy and the confusion matrix.

"""
# Import matplotlib for plotting
from matplotlib import pyplot as plt

### Load Haxby dataset ########################################################
from nilearn import datasets
import numpy as np
dataset_files = datasets.fetch_haxby_simple()

# fmri_data and mask are copied to break any reference to the original object
y, session = np.loadtxt(dataset_files.session_target).astype("int").T
conditions = np.recfromtxt(dataset_files.conditions_target)['f0']

# Remove the rest condition, it is not very interesting
non_rest = conditions != 'rest'
conditions = conditions[non_rest]
y = y[non_rest]
session = session[non_rest]

# Get the labels of the numerical conditions represented by the vector y
unique_conditions, order = np.unique(conditions, return_index=True)
# Sort the conditions by the order of appearance
unique_conditions = unique_conditions[np.argsort(order)]
[2] Anderson, M. J. & Robinson, J. (2001).
    Permutation tests for linear models.
    Australian & New Zealand Journal of Statistics, 43(1), 75-88.
    (http://avesbiodiv.mncn.csic.es/estadistica/permut2.pdf)

"""
# Author: Virgile Fritsch, <*****@*****.**>, Feb. 2014
import numpy as np
from scipy import linalg
from nilearn import datasets
from nilearn.input_data import NiftiMasker
from nilearn.mass_univariate import permuted_ols

### Load Haxby dataset ########################################################
haxby_dataset = datasets.fetch_haxby_simple()

# print basic information on the dataset
print('Mask nifti image (3D) is located at: %s' % haxby_dataset.mask)
print('Functional nifti image (4D) is located at: %s' % haxby_dataset.func[0])

### Mask data #################################################################
mask_filename = haxby_dataset.mask
nifti_masker = NiftiMasker(
    mask_img=mask_filename,
    memory='nilearn_cache', memory_level=1)  # cache options
func_filename = haxby_dataset.func[0]
fmri_masked = nifti_masker.fit_transform(func_filename)

### Restrict to faces and houses ##############################################
conditions_encoded, sessions = np.loadtxt(