def demo_nyu_rest(data_dir="/tmp/nyu_data", n_subjects=1, output_dir="/tmp/nyu_rest_output"): """Demo for FSL Feeds data. Parameters ---------- data_dir: string, optional where the data is located on your disk, where it will be downloaded to output_dir: string, optional where output will be written to """ # fetch data nyu_data = fetch_nyu_rest(data_dir=data_dir, n_subjects=n_subjects) # subject data factory def subject_factory(session=1): session_func = [x for x in nyu_data.func if "session%i" % session in x] for subject_id in set([os.path.basename( os.path.dirname (os.path.dirname(x))) for x in session_func]): func = [x for x in session_func if subject_id in x] assert len(func) == 1 func = func[0] yield SubjectData(subject_id=subject_id, func=func, output_dir=os.path.join( output_dir, "session%i" % session, subject_id)) # invoke demon to run de demo _fmri_demo_runner(subject_factory(), "NYU Resting State")
def _nyu_rest_factory(session=1): from pypreprocess.nipype_preproc_spm_utils import SubjectData nyu_data = fetch_nyu_rest(sessions=[session], n_subjects=7) session_func = [x for x in nyu_data.func if "session%i" % session in x] session_anat = [ x for x in nyu_data.anat_skull if "session%i" % session in x] for subject_id in set([os.path.basename(os.path.dirname (os.path.dirname(x))) for x in session_func]): # instantiate subject_data object subject_data = SubjectData() subject_data.subject_id = subject_id subject_data.session_id = session # set func subject_data.func = [x for x in session_func if subject_id in x] assert len(subject_data.func) == 1 subject_data.func = subject_data.func[0] # set anat subject_data.anat = [x for x in session_anat if subject_id in x] assert len(subject_data.anat) == 1 subject_data.anat = subject_data.anat[0] # set subject output directory subject_data.output_dir = "/tmp/%s" % subject_id subject_data.sanitize(deleteorient=True, niigz2nii=False) yield (subject_data.subject_id, subject_data.func[0], subject_data.anat)
def demo_nyu_rest(output_dir): """Demo for FSL Feeds data. Parameters ---------- output_dir: string where output will be written to """ output_dir = os.path.join(output_dir, 'nyu_mrimc_output') # fetch data nyu_data = fetch_nyu_rest() # subject data factory subjects = [] session = 1 session_func = [x for x in nyu_data.func if "session%i" % session in x] for subject_id in set([ os.path.basename(os.path.dirname(os.path.dirname(x))) for x in session_func ]): # set func func = [x for x in session_func if subject_id in x] assert len(func) == 1 func = func[0] subjects.append( SubjectData(subject_id=subject_id, func=func, output_dir=os.path.join(output_dir, "session%i" % session, subject_id))) # invoke demon to run de demo _demo_runner(subjects, "NYU resting state")
def demo_nyu_rest(output_dir="/tmp/nyu_mrimc_output",): """Demo for FSL Feeds data. Parameters ---------- data_dir: string, optional where the data is located on your disk, where it will be downloaded to output_dir: string, optional where output will be written to """ # fetch data nyu_data = fetch_nyu_rest() # subject data factory subjects = [] session = 1 session_func = [x for x in nyu_data.func if "session%i" % session in x] for subject_id in set([os.path.basename(os.path.dirname(os.path.dirname(x))) for x in session_func]): # set func func = [x for x in session_func if subject_id in x] assert len(func) == 1 func = func[0] subjects.append( SubjectData( subject_id=subject_id, func=func, output_dir=os.path.join(output_dir, "session%i" % session, subject_id) ) ) # invoke demon to run de demo _demo_runner(subjects, "NYU resting state")
def test_fetch_nyu_rest(): # First session, all subjects nyu = datasets.fetch_nyu_rest(data_dir=tmpdir, verbose=0) assert_equal(len(url_request.urls), 2) assert_equal(len(nyu.func), 25) assert_equal(len(nyu.anat_anon), 25) assert_equal(len(nyu.anat_skull), 25) assert_true(np.all(np.asarray(nyu.session) == 1)) # All sessions, 12 subjects url_request.reset() nyu = datasets.fetch_nyu_rest(data_dir=tmpdir, sessions=[1, 2, 3], n_subjects=12, verbose=0) # Session 1 has already been downloaded assert_equal(len(url_request.urls), 2) assert_equal(len(nyu.func), 36) assert_equal(len(nyu.anat_anon), 36) assert_equal(len(nyu.anat_skull), 36) s = np.asarray(nyu.session) assert_true(np.all(s[:12] == 1)) assert_true(np.all(s[12:24] == 2)) assert_true(np.all(s[24:] == 3))
def get_dataset(dataset, max_images=np.inf, **kwargs): """Retrieve & normalize dataset from nilearn""" # Download if dataset == 'neurovault': images, term_scores = fetch_neurovault(max_images=max_images, **kwargs) elif dataset == 'abide': dataset = datasets.fetch_abide_pcp( n_subjects=min(94, max_images), **kwargs) images = [{'absolute_path': p} for p in dataset['func_preproc']] term_scores = None elif dataset == 'nyu': dataset = datasets.fetch_nyu_rest( n_subjects=min(25, max_images), **kwargs) images = [{'absolute_path': p} for p in dataset['func']] term_scores = None else: raise ValueError("Unknown dataset: %s" % dataset) return images, term_scores
""" Simple example of NiftiMasker use ================================== Here is a simple example of automatic mask computation using the nifti masker. The mask is computed and visualized. """ ### Load nyu_rest dataset ##################################################### from nilearn import datasets from nilearn.input_data import NiftiMasker nyu_dataset = datasets.fetch_nyu_rest(n_subjects=1) # print basic information on the dataset print('First anatomical nifti image (3D) is at: %s' % nyu_dataset.anat_anon[0]) print('First functional nifti image (4D) is at: %s' % nyu_dataset.func[0]) # 4D data ### Compute the mask ########################################################## # As this is raw resting-state EPI, the background is noisy and we cannot # rely on the 'background' masking strategy. We need to use the 'epi' one nifti_masker = NiftiMasker(standardize=False, mask_strategy='epi', memory="nilearn_cache", memory_level=2) func_filename = nyu_dataset.func[0] nifti_masker.fit(func_filename) mask_img = nifti_masker.mask_img_
""" Independent component analysis of resting-state fMRI ===================================================== An example applying ICA to resting-state data. """ ### Load nyu_rest dataset ##################################################### from nilearn import datasets # Here we use only 3 subjects to get faster-running code. For better # results, simply increase this number # XXX: must get the code to run for more than 1 subject nyu_dataset = datasets.fetch_nyu_rest(n_subjects=1) func_filename = nyu_dataset.func[0] # print basic information on the dataset print('First subject anatomical nifti image (3D) is at: %s' % nyu_dataset.anat_anon[0]) print('First subject functional nifti image (4D) is at: %s' % nyu_dataset.func[0]) # 4D data ### Preprocess ################################################################ from nilearn.input_data import NiftiMasker # This is resting-state data: the background has not been removed yet, # thus we need to use mask_strategy='epi' to compute the mask from the # EPI images masker = NiftiMasker(smoothing_fwhm=8, memory='nilearn_cache', memory_level=1, mask_strategy='epi', standardize=False) data_masked = masker.fit_transform(func_filename)
def get_nyu_data(data_dir='./datas/brain', n_subjects=1): dataset = datasets.fetch_nyu_rest(n_subjects=n_subjects, data_dir=data_dir) imgs = dataset.func return imgs
ind = np.tril_indices(len(roi_names), k=-1) dyn_fc = [] for subject in dyn_dataset.subjects: dyn_fc.append( loader.load_dynacomp_fc(subject_id=subject, session='func1', metric='pc', msdl=False, preprocessing_folder='pipeline_1')[ind]) dyn_fc = np.asarray(dyn_fc) ############################################################################## # NYU rs-fMRI ############################################################################## nyu_func = fetch_nyu_rest()['func'] masker = NiftiMapsMasker(maps_img=roi_imgs, low_pass=.1, high_pass=.01, t_r=2., smoothing_fwhm=6., detrend=True, standardize=False, resampling_target='maps', memory_level=0, verbose=5) masker.fit() def mask_and_covariance(f): x = masker.transform(f)
# -*- coding: utf-8 -*- """ Created on Mon Jun 1 10:21:42 2015 @author: mr243268 """ from embedding import CovEmbedding, vec_to_sym from nilearn.datasets import fetch_nyu_rest, fetch_msdl_atlas from nilearn.input_data import NiftiMapsMasker dataset = fetch_nyu_rest(n_subjects=1) atlas = fetch_msdl_atlas() masker = NiftiMapsMasker(atlas['maps'], detrend=True, standardize=True) masker.fit() ts = masker.transform(dataset.func[0]) cov_embed = CovEmbedding(kind='tangent') output = cov_embed.fit_transform([ts]) m = vec_to_sym(output)
roi_imgs = loader.dict_to_list(loader.load_dynacomp_rois()[0]) roi_names, roi_coords = loader.load_roi_names_and_coords(dyn_dataset.subjects[0]) ind = np.tril_indices(len(roi_names), k=-1) dyn_fc = [] for subject in dyn_dataset.subjects: dyn_fc.append(loader.load_dynacomp_fc(subject_id=subject, session='func1', metric='pc', msdl=False, preprocessing_folder='pipeline_1')[ind]) dyn_fc = np.asarray(dyn_fc) ############################################################################## # NYU rs-fMRI ############################################################################## nyu_func = fetch_nyu_rest()['func'] masker = NiftiMapsMasker(maps_img=roi_imgs, low_pass=.1, high_pass=.01, t_r=2., smoothing_fwhm=6., detrend=True, standardize=False, resampling_target='maps', memory_level=0, verbose=5) masker.fit() def mask_and_covariance(f): x = masker.transform(f) return np.corrcoef(x.T)[ind] # gl = GraphLassoCV(verbose=2) # gl.fit(x) # return gl.covariance_[ind]
""" :Author: DOHMATOB Elvis Dopgima :Synopsis: Preprocessing of NYU rest data. """ import sys import os # import API for preprocessing business from pypreprocess.nipype_preproc_spm_utils import do_subjects_preproc # input data-grabber for SPM Auditory (single-subject) data from nilearn.datasets import fetch_nyu_rest # file containing configuration for preprocessing the data jobfile = os.path.join(os.path.dirname(sys.argv[0]), "nyu_rest_preproc.ini") # fetch spm auditory data sd = fetch_nyu_rest() # preprocess the data dataset_dir = os.path.dirname( os.path.dirname(os.path.dirname(os.path.dirname(sd.anat_skull[0])))) results = do_subjects_preproc(jobfile, dataset_dir=dataset_dir)
""" :Author: DOHMATOB Elvis Dopgima :Synopsis: Preprocessing of NYU rest data. """ import sys import os # import API for preprocessing business from pypreprocess.nipype_preproc_spm_utils import do_subjects_preproc # input data-grabber for SPM Auditory (single-subject) data from nilearn.datasets import fetch_nyu_rest # file containing configuration for preprocessing the data jobfile = os.path.join(os.path.dirname(sys.argv[0]), "nyu_rest_preproc.ini") # fetch spm auditory data sd = fetch_nyu_rest() # preprocess the data dataset_dir = os.path.dirname( os.path.dirname(os.path.dirname(os.path.dirname( sd.anat_skull[0])))) results = do_subjects_preproc(jobfile, dataset_dir=dataset_dir)