Esempio n. 1
0
def test_extract_time_series_empty_rois():
    data = datasets.fetch_development_fmri(1)
    fmri_file = data.func[0]
    mask_img = compute_epi_mask(fmri_file)
    masker_pars = {
        "mask_img": mask_img,
        "detrend": True,
        "standardize": True,
        "smoothing_fwhm": 6,
        "radius": 5,
        "allow_overlap": True
    }
    atlas = datasets.fetch_coords_seitzman_2018()
    coords = np.vstack((atlas.rois['x'], atlas.rois['y'], atlas.rois['z'])).T
    coords = coords[:4]
    coords[0] = [-90, -90, -90]
    coords[1] = [-90, -90, -90]
    time_series, excluded_rois = extract_time_series(masker_pars, fmri_file,
                                                     None, coords)

    assert excluded_rois == {0: [-90., -90., -90.], 1: [-90., -90., -90.]}
    assert (time_series[:, 0] == 0).all()
    assert (time_series[:, 1] == 0).all()
    assert not (time_series[0] == 0).all()
    assert not (time_series[1] == 0).all()
Esempio n. 2
0
def test_extract_time_series():
    data = datasets.fetch_development_fmri(1)
    fmri_file = data.func[0]
    mask_img = compute_epi_mask(fmri_file)
    masker_pars = {
        "mask_img": mask_img,
        "detrend": True,
        "standardize": True,
        "smoothing_fwhm": 6,
        "radius": 5,
        "allow_overlap": True
    }
    atlas = datasets.fetch_coords_seitzman_2018()
    coords = np.vstack((atlas.rois['x'], atlas.rois['y'], atlas.rois['z'])).T
    coords = coords[:4]
    time_series, excluded_rois = extract_time_series(masker_pars, fmri_file,
                                                     None, coords)
    time_series1, excluded_rois1 = extract_time_series_empty(
        masker_pars, fmri_file, None, coords)

    assert excluded_rois == {}
    assert excluded_rois1 == {}
    assert time_series.shape == (168, 4)
    assert time_series1.shape == (168, 4)
    assert np.allclose(time_series, time_series1)
Esempio n. 3
0
        plotting.plot_matrix(matrix,
                             vmin=-vmax,
                             vmax=vmax,
                             cmap='RdBu_r',
                             title=title,
                             figure=fig,
                             colorbar=False)


###############################################################################
# Load brain development fMRI dataset and MSDL atlas
# -------------------------------------------------------------------
# We study only 30 subjects from the dataset, to save computation time.
from nilearn import datasets

rest_data = datasets.fetch_development_fmri(n_subjects=30)

###############################################################################
# We use probabilistic regions of interest (ROIs) from the MSDL atlas.
msdl_data = datasets.fetch_atlas_msdl()
msdl_coords = msdl_data.region_coords
n_regions = len(msdl_coords)
print('MSDL has {0} ROIs, part of the following networks :\n{1}.'.format(
    n_regions, msdl_data.networks))

###############################################################################
# Region signals extraction
# -------------------------
# To extract regions time series, we instantiate a
# :class:`nilearn.input_data.NiftiMapsMasker` object and pass the atlas the
# file name to it, as well as filtering band-width and detrending option.
     ISBI 2016, Lecture Notes in Computer Science

.. note::

    The use of the attribute `components_img_` from decomposition
    estimators is implemented from version 0.4.1.
    For older versions, unmask the deprecated attribute `components_` to
    get the components image using attribute `masker_` embedded in estimator.
    See the :ref:`section Inverse transform: unmasking data <unmasking_step>`.
"""
###############################################################################
# Load brain development fmri dataset
# -----------------------------------
from nilearn import datasets

rest_dataset = datasets.fetch_development_fmri(n_subjects=30)
func_filenames = rest_dataset.func  # list of 4D nifti files for each subject

# print basic information on the dataset
print('First functional nifti image (4D) is at: %s' %
      rest_dataset.func[0])  # 4D data

###############################################################################
# Create two decomposition estimators
# ------------------------------------
from nilearn.decomposition import DictLearning, CanICA

n_components = 40

###############################################################################
# Dictionary learning
"""
Smoothing an image
===================

Here we smooth a mean EPI image and plot the result

As we vary the smoothing FWHM, note how we decrease the amount of noise,
but also loose spatial details. In general, the best amount of smoothing
for a given analysis depends on the spatial extent of the effects that
are expected.

"""

from nilearn import datasets, plotting, image

data = datasets.fetch_development_fmri(n_subjects=1)

# Print basic information on the dataset
print('First subject functional nifti image (4D) are located at: %s' %
      data.func[0])

first_epi_file = data.func[0]

# First the compute the mean image, from the 4D series of image
mean_func = image.mean_img(first_epi_file)

# Then we smooth, with a varying amount of smoothing, from none to 20mm
# by increments of 5mm
for smoothing in range(0, 25, 5):
    smoothed_img = image.smooth_img(mean_func, smoothing)
    plotting.plot_epi(smoothed_img,
Esempio n. 6
0
########################################################################
# Download a brain development fmri dataset and turn it to a data matrix
# -----------------------------------------------------------------------
#
# We download one subject of the movie watching dataset from Internet

from matplotlib import patches, ticker
import matplotlib.pyplot as plt
from nilearn.image import get_data
import numpy as np
from nilearn.image import mean_img, index_img
from nilearn import plotting
import time
from nilearn.regions import Parcellations
from nilearn import datasets
dataset = datasets.fetch_development_fmri(n_subjects=1)

# print basic information on the dataset
print('First subject functional nifti image (4D) is at: %s' %
      dataset.func[0])  # 4D data


#########################################################################
# Brain parcellations with Ward Clustering
# ----------------------------------------
#
# Transforming list of images to data matrix and build brain parcellations,
# all can be done at once using `Parcellations` object.


# Computing ward for the first time, will be long... This can be seen by
Esempio n. 7
0
"""

##############################################################################
# Retrieve the atlas and the data
# -------------------------------
from nilearn import datasets

dataset = datasets.fetch_atlas_harvard_oxford('cort-maxprob-thr25-2mm')
atlas_filename = dataset.maps
labels = dataset.labels

print('Atlas ROIs are located in nifti image (4D) at: %s' %
      atlas_filename)  # 4D data

# One subject of brain development fmri data
data = datasets.fetch_development_fmri(n_subjects=1, reduce_confounds=True)
fmri_filenames = data.func[0]
reduced_confounds = data.confounds[0]  # This is a preselected set of confounds

##############################################################################
# Extract signals on a parcellation defined by labels
# ---------------------------------------------------
# Using the NiftiLabelsMasker
from nilearn.maskers import NiftiLabelsMasker
masker = NiftiLabelsMasker(labels_img=atlas_filename,
                           standardize=True,
                           memory='nilearn_cache',
                           verbose=5)

# Here we go from nifti files to the signal time series in a numpy
# array. Note how we give confounds to be regressed out during signal
Esempio n. 8
0
def nilearn_data(testpath):
    return fetch_development_fmri(n_subjects=1,
                                  age_group="adult",
                                  data_dir=str(testpath))