def get_data(subj, mask, sessions=None, dtype='jacobian'): """ Parameters ---------- subj : (N,) list of img_like mask : Niimg_like sessions : list of str Returns ------- data : (N x M) np.ndarray Data extracted from ``imgs``, where ``M`` is the number of parcels in ``mask`` """ # check mask is correct if not isinstance(mask, BaseMasker): if not isinstance(mask, str): raise ValueError('Mask must be a mask object or filepath.') if 'probabilistic' in mask: mask = NiftiMapsMasker(mask, resampling_target='maps') else: mask = NiftiLabelsMasker(mask, resampling_target='labels') # only fit mask if it hasn't been fitted to save time if not hasattr(mask, 'maps_img_'): mask = mask.fit() # get images for supplied sessions (or all images) subj_dir = pjoin(DERIV_DIR, subj) if sessions is not None: imgs = list( itertools.chain.from_iterable( [glob.glob(pjoin(subj_dir, f'*_ses-{ses}_*_{dtype}.nii.gz')) for ses in sorted(sessions)] ) ) else: imgs = sorted(glob.glob(pjoin(subj_dir, '*{dtype}.nii.gz'))) # extract subject / session information from data (BIDS format) demo = np.row_stack([REGEX.findall(i) for i in imgs]) # fit mask to data and stack across sessions data = np.row_stack([mask.transform(check_niimg(img, atleast_4d=True)) for img in imgs]) return data, demo
# dump network projections ############################################################################### # retrieve network projections from nilearn import datasets as ds smith_pkg = ds.fetch_atlas_smith_2009() icas_path = smith_pkg['rsn20'] from nilearn.input_data import NiftiMapsMasker nmm = NiftiMapsMasker( mask_img=mask_file, maps_img=icas_path, resampling_target='mask', standardize=True, detrend=True) nmm.fit() nmm.maps_img_.to_filename('dbg_ica_maps.nii.gz') FS_netproj = nmm.transform(all_sub_rs_maps) np.save('%i_nets_timeseries' % sub_id, FS_netproj) # compute network sparse inverse covariance from sklearn.covariance import GraphLassoCV from nilearn.image import index_img from nilearn import plotting try: gsc_nets = GraphLassoCV(verbose=2, alphas=20) gsc_nets.fit(FS_netproj) np.save('%i_nets_cov' % sub_id, gsc_nets.covariance_) np.save('%i_nets_prec' % sub_id, gsc_nets.precision_) except: pass
labels = atlas['labels'] # Load the functional datasets data = datasets.fetch_development_fmri(n_subjects=1) print('First subject resting-state nifti image (4D) is located at: %s' % data.func[0]) ############################################################################ # Extract the time series # ------------------------ from nilearn.input_data import NiftiMapsMasker masker = NiftiMapsMasker(maps_img=atlas_filename, standardize=True, memory='nilearn_cache', verbose=5) masker.fit(data.func[0]) time_series = masker.transform(data.func[0], confounds=data.confounds) ############################################################################ # We can generate an HTML report and visualize the components of the # :class:`~nilearn.input_data.NiftiMapsMasker`. # You can pass the indices of the spatial maps you want to include in the # report in the order you want them to appear. # Here, we only include maps 2, 6, 7, 16, and 21 in the report: report = masker.generate_report(displayed_maps=[2, 6, 7, 16, 21]) report ############################################################################ # `time_series` is now a 2D matrix, of shape (number of time points x # number of regions) print(time_series.shape)
# We study only 60 subjects from the dataset, to save computation time. from nilearn import datasets development_dataset = datasets.fetch_development_fmri(n_subjects=60) ############################################################################### # We use probabilistic regions of interest (ROIs) from the MSDL atlas. from nilearn.input_data import NiftiMapsMasker msdl_data = datasets.fetch_atlas_msdl() msdl_coords = msdl_data.region_coords masker = NiftiMapsMasker( msdl_data.maps, resampling_target="data", t_r=2, detrend=True, low_pass=.1, high_pass=.01, memory='nilearn_cache', memory_level=1).fit() masked_data = [masker.transform(func, confounds) for (func, confounds) in zip( development_dataset.func, development_dataset.confounds)] ############################################################################### # What kind of connectivity is most powerful for classification? # -------------------------------------------------------------- # we will use connectivity matrices as features to distinguish children from # adults. We use cross-validation and measure classification accuracy to # compare the different kinds of connectivity matrices. # prepare the classification pipeline from sklearn.pipeline import Pipeline from nilearn.connectome import ConnectivityMeasure from sklearn.svm import LinearSVC from sklearn.dummy import DummyClassifier
# -*- coding: utf-8 -*- """ Created on Mon Jun 1 10:21:42 2015 @author: mr243268 """ from embedding import CovEmbedding, vec_to_sym from nilearn.datasets import fetch_nyu_rest, fetch_msdl_atlas from nilearn.input_data import NiftiMapsMasker dataset = fetch_nyu_rest(n_subjects=1) atlas = fetch_msdl_atlas() masker = NiftiMapsMasker(atlas['maps'], detrend=True, standardize=True) masker.fit() ts = masker.transform(dataset.func[0]) cov_embed = CovEmbedding(kind='tangent') output = cov_embed.fit_transform([ts]) m = vec_to_sym(output)
from nilearn.datasets import fetch_atlas_msdl, fetch_cobre from nilearn.input_data import NiftiMapsMasker from nilearn.connectome import vec_to_sym_matrix from nilearn.plotting import plot_matrix from posce import PopulationShrunkCovariance # fetch atlas msdl = fetch_atlas_msdl() # fetch rfMRI scans from cobre dataset cobre = fetch_cobre(n_subjects=20) # extract timeseries masker = NiftiMapsMasker(msdl.maps, detrend=True, standardize=True, verbose=1, memory=".") masker.fit() ts = [masker.transform(f) for f in cobre.func] # compute PoSCE on the same dataset posce = PopulationShrunkCovariance(shrinkage=1e-2) posce.fit(ts) connectivities = posce.transform(ts) # plot the first shrunk covariance cov = vec_to_sym_matrix(connectivities[0]) plot_matrix(cov)
from nilearn.input_data import NiftiMapsMasker import numpy as np msdl = datasets.fetch_atlas_msdl() print('number of regions in MSDL atlas:', len(msdl.labels)) masker = NiftiMapsMasker(msdl.maps, resampling_target="data", t_r=2, detrend=True, low_pass=.1, high_pass=.01, memory='nilearn_cache', memory_level=3).fit([]) masked_data = [ masker.transform(func, confounds) for (func, confounds) in zip(rest_data.func, rest_data.confounds) ] masked_data = np.asarray(masked_data) print('masked data shape:', masked_data[0].shape) ############################################################################### # Compute and plot connectivity matrix from nilearn.connectome import ConnectivityMeasure correlation_measure = ConnectivityMeasure(kind='correlation').fit(masked_data) plotting.plot_matrix(correlation_measure.mean_, tri='lower') ###############################################################################