Пример #1
0
def get_template(space='mni152_1mm', mask=None):
    if space == 'mni152_1mm':
        if mask is None:
            img = nib.load(datasets.fetch_icbm152_2009()['t1'])
        elif mask == 'brain':
            img = nib.load(datasets.fetch_icbm152_2009()['mask'])
        elif mask == 'gm':
            img = datasets.fetch_icbm152_brain_gm_mask(threshold=0.2)
        else:
            raise ValueError('Mask {0} not supported'.format(mask))
    elif space == 'mni152_2mm':
        if mask is None:
            img = datasets.load_mni152_template()
        elif mask == 'brain':
            img = datasets.load_mni152_brain_mask()
        elif mask == 'gm':
            # this approach seems to approximate the 0.2 thresholded
            # GM mask pretty well
            temp_img = datasets.load_mni152_template()
            data = temp_img.get_data()
            data = data * -1
            data[data != 0] += np.abs(np.min(data))
            data = (data > 1200).astype(int)
            img = nib.Nifti1Image(data, temp_img.affine)
        else:
            raise ValueError('Mask {0} not supported'.format(mask))
    else:
        raise ValueError('Space {0} not supported'.format(space))
    return img
Пример #2
0
def get_template(space='mni152_1mm', mask=None):
    """
    Load template file.

    Parameters
    ----------
    space : {'mni152_1mm', 'mni152_2mm', 'ale_2mm'}, optional
        Template to load. Default is 'mni152_1mm'.
    mask : {None, 'brain', 'gm'}, optional
        Whether to return the raw template (None), a brain mask ('brain'), or
        a gray-matter mask ('gm'). Default is None.

    Returns
    -------
    img : :obj:`nibabel.nifti1.Nifti1Image`
        Template image object.
    """
    if space == 'mni152_1mm':
        if mask is None:
            img = nib.load(datasets.fetch_icbm152_2009()['t1'])
        elif mask == 'brain':
            img = nib.load(datasets.fetch_icbm152_2009()['mask'])
        elif mask == 'gm':
            img = datasets.fetch_icbm152_brain_gm_mask(threshold=0.2)
        else:
            raise ValueError('Mask {0} not supported'.format(mask))
    elif space == 'mni152_2mm':
        if mask is None:
            img = datasets.load_mni152_template()
        elif mask == 'brain':
            img = datasets.load_mni152_brain_mask()
        elif mask == 'gm':
            # this approach seems to approximate the 0.2 thresholded
            # GM mask pretty well
            temp_img = datasets.load_mni152_template()
            data = temp_img.get_data()
            data = data * -1
            data[data != 0] += np.abs(np.min(data))
            data = (data > 1200).astype(int)
            img = nib.Nifti1Image(data, temp_img.affine)
        else:
            raise ValueError('Mask {0} not supported'.format(mask))
    elif space == 'ale_2mm':
        if mask is None:
            img = datasets.load_mni152_template()
        else:
            # Not the same as the nilearn brain mask, but should correspond to
            # the default "more conservative" MNI152 mask in GingerALE.
            img = nib.load(
                op.join(get_resource_path(),
                        'templates/MNI152_2x2x2_brainmask.nii.gz'))
    else:
        raise ValueError('Space {0} not supported'.format(space))
    return img
Пример #3
0
age = oasis_dataset.ext_vars['age'].astype(float)

###############################################################################
# Sex is encoded as 'M' or 'F'. make it a binary variable
sex = oasis_dataset.ext_vars['mf'] == b'F'

###############################################################################
# Print basic information on the dataset
print('First gray-matter anatomy image (3D) is located at: %s' %
      oasis_dataset.gray_matter_maps[0])  # 3D data
print('First white-matter anatomy image (3D) is located at: %s' %
      oasis_dataset.white_matter_maps[0])  # 3D data

###############################################################################
# Get a mask image: A mask of the  cortex of the ICBM template
gm_mask = datasets.fetch_icbm152_brain_gm_mask()

###############################################################################
# Resample the images, since this mask has a different resolution
from nilearn.image import resample_to_img
mask_img = resample_to_img(
    gm_mask, gray_matter_map_filenames[0], interpolation='nearest')

#############################################################################
# Analyse data
# ------------
#
# First create an adequate design matrix with three columns: 'age',
# 'sex', 'intercept'.
import pandas as pd
import numpy as np
Пример #4
0
age = oasis_dataset.ext_vars['age'].astype(float)

###############################################################################
# Sex is encoded as 'M' or 'F'. Hence, we make it a binary variable.
sex = oasis_dataset.ext_vars['mf'] == 'F'

###############################################################################
# Print basic information on the dataset.
print('First gray-matter anatomy image (3D) is located at: %s' %
      oasis_dataset.gray_matter_maps[0])  # 3D data
print('First white-matter anatomy image (3D) is located at: %s' %
      oasis_dataset.white_matter_maps[0])  # 3D data

###############################################################################
# Get a mask image: A mask of the  cortex of the ICBM template.
gm_mask = datasets.fetch_icbm152_brain_gm_mask()

###############################################################################
# Resample the images, since this mask has a different resolution.
from nilearn.image import resample_to_img
mask_img = resample_to_img(
    gm_mask, gray_matter_map_filenames[0], interpolation='nearest')

#############################################################################
# Analyse data
# ------------
#
# First, we create an adequate design matrix with three columns: 'age',
# 'sex', 'intercept'.
import pandas as pd
import numpy as np
motion_confounds = data_store[name].motion_param
connectome_regress_confounds = None

from utils import data_info

shape, affine, _ = data_info(func_imgs[0])

###########################################################################
# Masker
# ------
# Masking the data

from nilearn import datasets

# Fetch grey matter mask from nilearn shipped with ICBM templates
gm_mask = datasets.fetch_icbm152_brain_gm_mask(threshold=0.2)

from nilearn.input_data import MultiNiftiMasker

masker = MultiNiftiMasker(mask_img=gm_mask, target_shape=shape,
                          target_affine=affine, smoothing_fwhm=6.,
                          standardize=True, detrend=True, mask_strategy='epi',
                          memory=mem, memory_level=2, n_jobs=2,
                          verbose=10)

##############################################################################
# Cross Validator
# ---------------

from sklearn.model_selection import StratifiedShuffleSplit
#
# Masking consists in selecting the region of the image on which the
# model is run: it is useless to run it outside of the brain.
#
# The approach taken by FirstLeveModel is to estimate it from the fMRI
# data itself when no mask is explicitly provided.  Since the data
# have been resampled into MNI space, we can use instead a mask of the
# grey matter in MNI space. The benefit is that it makes voxel-level
# comparisons easier across subjects and datasets, and removes
# non-grey matter regions, in which no BOLD signal is expected.  The
# downside is that the mask may not fit very well this particular
# data.

data_mask = first_level_model.masker_.mask_img_
from nilearn.datasets import fetch_icbm152_brain_gm_mask
icbm_mask = fetch_icbm152_brain_gm_mask()

from nilearn.plotting import plot_roi
plt.figure(figsize=(16, 4))
ax = plt.subplot(121)
plot_roi(icbm_mask, title='ICBM mask', axes=ax)
ax = plt.subplot(122)
plot_roi(data_mask, title='Data-driven mask', axes=ax)
plt.show()

#########################################################################
# For the sake of time saving, we resample icbm_mask to our data.
# For this we call the resample_to_img routine of Nilearn.
# We use interpolation = 'nearest' to keep the mask as a binary image.
from nilearn.image import resample_to_img
resampled_icbm_mask = resample_to_img(icbm_mask,
Пример #7
0
# First we load the ADHD200 data
from nilearn import datasets
import scipy as sp
from fmri_methods_sipi import rot_sub_data, hotelling_t2
import matplotlib.pyplot as plt
from nilearn import input_data
from nilearn.plotting import plot_roi, show, plot_stat_map
from nilearn.image.image import mean_img
from nilearn.image import index_img
from sklearn.decomposition import PCA
from scipy.stats import levene
from statsmodels.sandbox.stats.multicomp import multipletests
from nilearn.image import resample_to_img

std_msk = datasets.load_mni152_brain_mask()
gm_msk = datasets.fetch_icbm152_brain_gm_mask(threshold=0.3)
gm_msk.set_data_dtype(sp.float32)
affine_tar = 1.0 * sp.eye(4)
affine_tar[3, 3] = .3

adhd_dataset = datasets.fetch_adhd()

func_filenames = adhd_dataset.func  # list of 4D nifti files for each subject

mean_func_img = mean_img(func_filenames[0])

gm_msk = resample_to_img(source_img=gm_msk,
                         target_img=mean_func_img,
                         interpolation='nearest')
#affine_tar,
#interpolation='nearest', target_shape=sp.array([61,73,61]))
#
# Masking consists in selecting the region of the image on which the
# model is run: it is useless to run it outside of the brain.
#
# The approach taken by FirstLeveModel is to estimate it from the fMRI
# data themselves when no mask is explicitly provided.  Since the data
# have been resampled into MNI space, we can use instead a mask of the
# grey matter in MNI space. The benefit is that it makes voxel-level
# comparisons easier across subjects and datasets, and removed
# non-grey matter regions, in which no BOLD signal is expected.  The
# downside is that the mask may not fit very well these particular
# data.

data_mask = first_level_model.masker_.mask_img_
from nilearn.datasets import fetch_icbm152_brain_gm_mask
icbm_mask = fetch_icbm152_brain_gm_mask()

from nilearn.plotting import plot_roi 
plt.figure(figsize=(16, 4))
ax = plt.subplot(121)
plot_roi(icbm_mask, title='ICBM mask', axes=ax)
ax = plt.subplot(122)
plot_roi(data_mask, title='Data-driven mask', axes=ax)
plt.show()

#########################################################################
# For the sake of time saving, we reample icbm_mask to our data
# For this we call the resample_to_img routine of Nilearn.
# We use interpolation = 'nearest' to keep the mask a binary image
from nilearn.image import resample_to_img
resampled_icbm_mask = resample_to_img(icbm_mask, data_mask,