def fetch_atlas(atlas_name, rois=False): """Retruns selected atlas path """ if atlas_name == 'msdl': atlas = fetch_msdl_atlas()['maps'] elif atlas_name == 'harvard_oxford': atlas = os.path.join(CACHE_DIR, 'atlas', 'HarvardOxford-cortl-maxprob-thr0-2mm.nii.gz') elif atlas_name == 'juelich': atlas = os.path.join(CACHE_DIR, 'atlas', 'Juelich-maxprob-thr0-2mm.nii.gz') elif atlas_name == 'mayo': atlas = os.path.join(CACHE_DIR, 'atlas', 'atlas_68_rois.nii.gz') elif atlas_name == 'canica': atlas = os.path.join(CACHE_DIR, 'atlas', 'atlas_canica_61_rois.nii.gz') elif atlas_name == 'canica141': atlas = os.path.join(CACHE_DIR, 'atlas', 'atlas_canica_141_rois.nii.gz') elif atlas_name == 'tvmsdl': atlas = os.path.join(CACHE_DIR, 'atlas', 'atlas_tv_msdl.nii.gz') dmn = None if (atlas_name in ['msdl', 'mayo', 'canica']) and rois: dmn = fetch_dmn_atlas(atlas_name, atlas) atlas_img = index_img(atlas, dmn['rois']) atlas = os.path.join(CACHE_DIR, 'atlas', 'atlas_dmn.nii.gz') atlas_img.to_filename(atlas) return atlas, dmn
def fetch_atlas(atlas_name): """Retruns selected atlas path """ if atlas_name == 'msdl': atlas = fetch_msdl_atlas()['maps'] elif atlas_name == 'harvard_oxford': # atlas = os.path.join(CACHE_DIR, 'atlas', # 'HarvardOxford-cortl-prob-2mm.nii.gz') atlas = os.path.join(CACHE_DIR, 'atlas', 'HarvardOxford-cortl-maxprob-thr0-2mm.nii.gz') elif atlas_name == 'juelich': # atlas = os.path.join(CACHE_DIR, 'atlas', # 'Juelich-prob-2mm.nii.gz') atlas = os.path.join(CACHE_DIR, 'atlas', 'Juelich-maxprob-thr0-2mm.nii.gz') elif atlas_name == 'mayo': atlas = os.path.join(CACHE_DIR, 'atlas', 'atlas_68_rois.nii.gz') elif atlas_name == 'canica': atlas = os.path.join(CACHE_DIR, 'atlas', 'atlas_canica_61_rois.nii.gz') elif atlas_name == 'canica141': atlas = os.path.join(CACHE_DIR, 'atlas', 'atlas_canica_141_rois.nii.gz') elif atlas_name == 'tvmsdl': atlas = os.path.join(CACHE_DIR, 'atlas', 'atlas_tv_msdl.nii.gz') return atlas
def load_msdl_names_and_coords(): """ Returns msdl atlas ROIs """ atlas = fetch_msdl_atlas() roi_coords = np.loadtxt(atlas['labels'], dtype=np.float, delimiter=',', skiprows=1, usecols=(0,1,2)) roi_names = np.loadtxt(atlas['labels'], dtype=np.str, delimiter=',', skiprows=1, usecols=(3,)) for i in range(len(roi_names)): roi_names[i] = roi_names[i].strip() roi_names[-1] = roi_names[-1][:10] roi_names[-2] = roi_names[-2][:10] return roi_names, roi_coords
def load_msdl_names_and_coords(): """ Returns msdl atlas ROIs """ atlas = fetch_msdl_atlas() roi_coords = np.loadtxt(atlas['labels'], dtype=np.float, delimiter=',', skiprows=1, usecols=(0, 1, 2)) roi_names = np.loadtxt(atlas['labels'], dtype=np.str, delimiter=',', skiprows=1, usecols=(3, )) for i in range(len(roi_names)): roi_names[i] = roi_names[i].strip() roi_names[-1] = roi_names[-1][:10] roi_names[-2] = roi_names[-2][:10] return roi_names, roi_coords
plt.colorbar() plt.title("%s / covariance" % title) # Display precision matrix plt.figure() plt.imshow(prec, interpolation="nearest", vmin=-span, vmax=span, cmap=plotting.cm.bwr) plt.colorbar() plt.title("%s / precision" % title) # Fetching datasets ########################################################### print("-- Fetching datasets ...") from nilearn import datasets msdl_atlas_dataset = datasets.fetch_msdl_atlas() adhd_dataset = datasets.fetch_adhd(n_subjects=1) # Extracting region signals ################################################### import nilearn.image import nilearn.input_data from sklearn.externals.joblib import Memory mem = Memory('nilearn_cache') masker = nilearn.input_data.NiftiMapsMasker( msdl_atlas_dataset.maps, resampling_target="maps", detrend=True, low_pass=None, high_pass=0.01, t_r=2.5, standardize=True, memory=mem, memory_level=1, verbose=2) masker.fit()
# Display precision matrix plt.figure() plt.imshow(prec, interpolation="nearest", vmin=-span, vmax=span, cmap=plt.cm.get_cmap("bwr")) plt.colorbar() plt.title("%s / precision" % title) ############################################################################### ############################################################################### CACHE_DIR = os.path.join('/', 'disk4t', 'mehdi', 'data', 'tmp') dataset = fetch_adni_rs_fmri() atlas = fetch_msdl_atlas() func_files = dataset['func'] dx_group = np.array(dataset['dx_group']) idx = {} for g in ['AD', 'LMCI', 'EMCI', 'Normal']: idx[g] = np.where(dx_group == g) atlas4d = nib.load(atlas['maps']) atlas4d_data = atlas4d.get_data() atlas3d_data = np.sum(atlas4d_data, axis=3) atlas3d = nib.Nifti1Image(atlas3d_data, atlas4d.get_affine()) n_subjects = len(func_files) subjects = [] cov_feat = []
of functional regions in rest, and the :class:`nilearn.input_data.NiftiMapsMasker` to extract time series. Note that the inverse covariance (or precision) contains values that can be linked to *negated* partial correlations, so we negated it for display. As the MSDL atlas comes with (x, y, z) MNI coordinates for the different regions, we can visualize the matrix as a graph of interaction in a brain. To avoid having too dense a graph, we represent only the 20% edges with the highest values. """ from nilearn import datasets atlas = datasets.fetch_msdl_atlas() atlas_filename = atlas['maps'] # Load the labels import numpy as np csv_filename = atlas['labels'] # The recfromcsv function can load a csv file labels = np.recfromcsv(csv_filename) names = labels['name'] from nilearn.input_data import NiftiMapsMasker masker = NiftiMapsMasker(maps_img=atlas_filename, standardize=True, memory='nilearn_cache', verbose=5)
<https://team.inria.fr/parietal/research/spatial_patterns/spatial-patterns-in-resting-state/>`_ of functional regions in rest. The key to extract signals is to use the :class:`nilearn.input_data.NiftiMapsMasker` that can transform nifti objects to time series using a probabilistic atlas. As the MSDL atlas comes with (x, y, z) MNI coordinates for the different regions, we can visualize the matrix as a graph of interaction in a brain. To avoid having too dense a graph, we represent only the 20% edges with the highest values. """ from nilearn import datasets atlas = datasets.fetch_msdl_atlas() atlas_filename = atlas['maps'] # Load the labels import numpy as np csv_filename = atlas['labels'] # The recfromcsv function can load a csv file labels = np.recfromcsv(csv_filename) names = labels['name'] from nilearn.input_data import NiftiMapsMasker masker = NiftiMapsMasker(maps_img=atlas_filename, standardize=True, memory='nilearn_cache', verbose=5) data = datasets.fetch_adhd(n_subjects=1)
# Display precision matrix plt.figure() plt.imshow(prec, interpolation="nearest", vmin=-span, vmax=span, cmap=plotting.cm.bwr) plt.colorbar() plt.title("%s / precision" % title) # Fetching datasets ########################################################### print("-- Fetching datasets ...") from nilearn import datasets msdl_atlas_dataset = datasets.fetch_msdl_atlas() adhd_dataset = datasets.fetch_adhd(n_subjects=n_subjects) # print basic information on the dataset print('First subject functional nifti image (4D) is at: %s' % adhd_dataset.func[0]) # 4D data # Extracting region signals ################################################### from nilearn import image from nilearn import input_data from sklearn.externals.joblib import Memory mem = Memory('nilearn_cache') masker = input_data.NiftiMapsMasker(msdl_atlas_dataset.maps, resampling_target="maps",
def test_fetch_msdl_atlas(): dataset = datasets.fetch_msdl_atlas(data_dir=tmpdir, verbose=0) assert_true(isinstance(dataset.labels, _basestring)) assert_true(isinstance(dataset.maps, _basestring)) assert_equal(len(url_request.urls), 1)
# -*- coding: utf-8 -*- """ Created on Wed Apr 1 10:11:24 2015 @author: mr243268 """ import os, time import numpy as np from loader import load_dynacomp from nilearn.datasets import fetch_msdl_atlas from nilearn.input_data import NiftiMapsMasker dataset = load_dynacomp(preprocessing_folder='pipeline_2', prefix='resampled_wr') atlas = fetch_msdl_atlas() # add mask, smoothing, filtering and detrending masker = NiftiMapsMasker(maps_img=atlas['maps'], mask_img=dataset.mask, low_pass=.1, high_pass=.01, t_r=1.05, smoothing_fwhm=6., detrend=True, standardize=False, resampling_target='data', memory_level=0, verbose=5) for i in range(len(dataset.subjects)):