コード例 #1
0
ファイル: mixture.py プロジェクト: mengsenz/dyneusr
def spatial_correlations(mm, atlas=None):
    if atlas is None:
        from nilearn.datasets import fetch_atlas_msdl
        atlas = fetch_atlas_msdl()
    from nilearn.input_data import NiftiMapsMasker
    masker = NiftiMapsMasker(maps_img=atlas['maps'])
    rsns_masked = masker.fit_transform(atlas['maps'])
    mm_masked = masker.fit_transform([mm])
    cc = np.corrcoef(mm_masked, rsns_masked)
    return cc
コード例 #2
0
ファイル: utils.py プロジェクト: MartinPerez/unicog
def get_data_in_rois_method1(ROIs, subjects, contrasts, condir):
    """ returns the average contratst in each ROI and for each subject """
    masker = NiftiMapsMasker(ROIs)
    print ROIs
    
    values = np.zeros((len(subjects), len(contrasts), len(ROIs)))
    for isub, sub in enumerate(subjects):
        conlist = [op.join(sub, condir, x) for x in contrasts]
        print conlist
        res = masker.fit_transform(conlist)
        values[isub, :] = masker.fit_transform(conlist)
        #print values
    return values
コード例 #3
0
ファイル: utils.py プロジェクト: bgauthie/unicog
def get_data_in_rois_method1(ROIs, subjects, contrasts, condir):
    """ returns the average contratst in each ROI and for each subject """
    masker = NiftiMapsMasker(ROIs)
    print ROIs

    values = np.zeros((len(subjects), len(contrasts), len(ROIs)))
    for isub, sub in enumerate(subjects):
        conlist = [op.join(sub, condir, x) for x in contrasts]
        print conlist
        res = masker.fit_transform(conlist)
        values[isub, :] = masker.fit_transform(conlist)
        #print values
    return values
コード例 #4
0
def extract_rois_signals(preprocessing_folder ='pipeline_2', prefix= 'resampled_wr'):
    dataset = load_dynacomp(preprocessing_folder = preprocessing_folder,prefix = prefix)
    for idx, func in enumerate([dataset.func1, dataset.func2]):
      for i in range(len(dataset.subjects)):
	tic = time.clock()
	print func[i]
	output_path, _ = os.path.split(func[i])
	print dataset.subjects[i]
	maps_img = dict_to_list(dataset.rois[i])
	#add mask, smoothing, filter and detrending
	print 'Nifti'
	masker = NiftiMapsMasker(maps_img=maps_img,
				mask_img = dataset.mask,
				low_pass = .1,
				high_pass = .01,
				smoothing_fwhm =6.,
				t_r = 1.05,
				detrend = True,
				standardize = False,
				resampling_target ='data',
				memory_level = 0,
				verbose=5)
	
	#extract signal to x
	print 'masker'
	x = masker.fit_transform(func[i])
	print x
	np.save(os.path.join(PATH_TO_SAVE_DATA,'output' + str(i+1) +'_rois_filter'),x)
	
      print time.clock() - tic
      
      return x
コード例 #5
0
def extract_data(images, masks):
    """ given a set of brain images and a set of masks,  extract the average signal inside each mask for each brain image.
        Returns a dataframe with 3 columns: image, mask, value.
    """

    masker = NiftiMapsMasker(masks)
    values = masker.fit_transform(images)
    nimgs, nmasks = values.shape

    cp = op.commonpath(images)
    labelsimages = [i.replace(cp, '') for i in images]
    print(cp)
    print(labelsimages)

    cpmask = op.commonprefix(masks)
    labelsrois = [i.replace(cpmask, '').replace('.nii.gz', '') for i in masks]
    print(cpmask)
    print(labelsrois)

    df = pd.DataFrame(columns=['image', 'mask', 'value'])
    row = 0
    for iimg in range(nimgs):
        for iroi in range(nmasks):
            df.loc[row] = pd.Series({
                'image': labelsimages[iimg],
                'mask': labelsrois[iroi],
                'value': values[iimg, iroi]
            })
            row = row + 1
    return df
コード例 #6
0
def extract_from_masker(dataset_like,dataset_mask, funci, timer =True):
  tic = time.clock()



  maps_img = dict_to_list(dataset_like)
  #add mask, smoothing, filter and detrending
  print 'Nifti'
  masker = NiftiMapsMasker(maps_img=maps_img,
			  mask_img = dataset_mask,
			  low_pass = .1,
			  high_pass = .01,
			  smoothing_fwhm =6.,
			  t_r = 1.05,
			  detrend = True,
			  standardize = False,
			  resampling_target ='data',
			  memory_level = 0,
			  verbose=5)

  #extract signal to x
  print 'masker'
  x = masker.fit_transform(funci)
  if timer:
    print time.clock() - tic
  return x
コード例 #7
0
def extract_one_signal(dataset):
    for idx, func in enumerate([dataset.func1, dataset.func2]):
      for i in range(len(dataset.subjects)):
	tic = time.clock()



	#maps_img = dict_to_list(func)
	#add mask, smoothing, filter and detrending
	maps_img = dict_to_list(dataset.rois[i])
	masker = NiftiMapsMasker(maps_img=maps_img,
				mask_img = dataset.mask,
				low_pass = .1,
				high_pass = .01,
				smoothing_fwhm =6.,
				t_r = 1.05,
				detrend = True,
				standardize = False,
				resampling_target ='data',
				memory_level = 0,
				verbose=5)
	
	#extract signal to x
	x = masker.fit_transform(func[i])
	
	print "loading time : "+ str(time.clock() - tic)
	return x,maps_img
コード例 #8
0
def get_data_in_roi(path_roi, data_file):
    """Using of the NiftiMapsMasker """
    masker = NiftiMapsMasker([path_roi])
    array_datas = np.zeros((len(data_file), 177))
    for e, d in enumerate(data_file):
        nifti_obj = nibabel.load(d)
        data = masker.fit_transform(d)
        array_datas[e - 1] = data.ravel()
    return array_datas
コード例 #9
0
ファイル: utils.py プロジェクト: MartinPerez/unicog
def get_data_in_roi(path_roi, data_file):
    """Using of the NiftiMapsMasker """
    masker = NiftiMapsMasker([path_roi])
    array_datas = np.zeros((len(data_file), 177))
    for e, d in enumerate(data_file):
        nifti_obj = nibabel.load(d)
        data = masker.fit_transform(d)
        array_datas[e-1] = data.ravel()
    return array_datas
コード例 #10
0
ファイル: utils.py プロジェクト: lizhouxin/LeiCA_LIFE
def extract_parcellation_time_series(in_data, parcellation_name,
                                     parcellations_dict, bp_freqs, tr):
    '''
    Depending on parcellation['is_probabilistic'] this function chooses either NiftiLabelsMasker or NiftiMapsMasker
    to extract the time series of each parcel
    if bp_freq: data is band passfiltered at (hp, lp), if (None,None): no filter, if (None, .1) only lp...
    tr in ms (e.g. from freesurfer ImageInfo())
    returns np.array with parcellation time series and saves this array also to parcellation_time_series_file, and
    path to pickled masker object
    '''
    from nilearn.input_data import NiftiLabelsMasker, NiftiMapsMasker, NiftiSpheresMasker
    import os, pickle
    import numpy as np

    if parcellations_dict[parcellation_name][
            'is_probabilistic'] == True:  # use probab. nilearn
        masker = NiftiMapsMasker(
            maps_img=parcellations_dict[parcellation_name]['nii_path'],
            standardize=True)

    elif parcellations_dict[parcellation_name]['is_probabilistic'] == 'sphere':
        atlas = pickle.load(
            open(parcellations_dict[parcellation_name]['nii_path']))
        coords = atlas.rois
        masker = NiftiSpheresMasker(coords,
                                    radius=5,
                                    allow_overlap=True,
                                    standardize=True)

    else:  # 0/1 labels
        masker = NiftiLabelsMasker(
            labels_img=parcellations_dict[parcellation_name]['nii_path'],
            standardize=True)

    # add bandpass filter (only executes if freq not None
    hp, lp = bp_freqs
    masker.low_pass = lp
    masker.high_pass = hp
    if tr is not None:
        masker.t_r = tr
    else:
        masker.t_r = None

    masker.standardize = True

    masker_file = os.path.join(os.getcwd(), 'masker.pkl')
    with open(masker_file, 'w') as f:
        pickle.dump(masker, f)

    parcellation_time_series = masker.fit_transform(in_data)

    parcellation_time_series_file = os.path.join(
        os.getcwd(), 'parcellation_time_series.npy')
    np.save(parcellation_time_series_file, parcellation_time_series)

    return parcellation_time_series, parcellation_time_series_file, masker_file
コード例 #11
0
ファイル: connectome.py プロジェクト: dangom/nilearn-cli
def extract_timeseries_probabilistic(filename, maps, confounds=None):
    """Because the power parcellation is given in coordinates and not labels,
    we dedicate an exclusive function to deal with it.
    """
    maps_masker = NiftiMapsMasker(maps,
                                  resampling_target="data",
                                  standardize=True)

    time_series = maps_masker.fit_transform(filename, confounds=confounds)
    return time_series
コード例 #12
0
def run_mini_pipeline():
    atlas = datasets.fetch_atlas_msdl()
    atlas_img = atlas['maps']
    labels = pd.read_csv(atlas['labels'])['name']

    masker = NiftiMapsMasker(maps_img=atlas_img, standardize=True,
                               memory='/tmp/nilearn', verbose=0)

    data = datasets.fetch_adhd(number_subjects)

    figures_folder = '../figures/'
    count=0
    for func_file, confound_file in zip(data.func, data.confounds):
        
        # fit the data to the atlas mask, regress out confounds
        time_series = masker.fit_transform(func_file, confounds=confound_file)

        correlation = np.corrcoef(time_series.T)

        #plotting starts here
        plt.figure(figsize=(10, 10))
        plt.imshow(correlation, interpolation="nearest")
        x_ticks = plt.xticks(range(len(labels)), labels, rotation=90)
        y_ticks = plt.yticks(range(len(labels)), labels)
        corr_file = figures_folder+'subject_number_' + str(count) + '_correlation.pdf'
        plt.savefig(corr_file)

        atlas_region_coords = [plotting.find_xyz_cut_coords(img) for img in image.iter_img(atlas_img)]
        threshold = 0.6
        plotting.plot_connectome(correlation, atlas_region_coords, edge_threshold=threshold)
        connectome_file = figures_folder+'subject_number_' + str(count) + '_connectome.pdf'
        plt.savefig(connectome_file)


        #graph setup

        #binarize correlation matrix
        correlation[correlation<threshold] = 0
        correlation[correlation != 0] = 1

        graph = nx.from_numpy_matrix(correlation)

        partition=louvain.best_partition(graph)

        values = [partition.get(node) for node in graph.nodes()]

        plt.figure()
        nx.draw_spring(graph, cmap = plt.get_cmap('jet'), node_color = values, node_size=30, with_labels=True)
        graph_file = figures_folder+'subject_number_' + str(count) + '_community.pdf'
        plt.savefig(graph_file)

        count += 1

        plt.close('all')
コード例 #13
0
ファイル: utils.py プロジェクト: MartinPerez/unicog
def get_data_in_rois_method2(ROIs, subjects, contrasts, condir, localizerf, threshold):
    """ returns, for individual subjects, the average contrasts values  in ROIs masked by individual localizers,
    thresholded at a fixed theshold"""
    values = np.zeros((len(subjects), len(contrasts), len(ROIs)))
    for isub, sub in enumerate(subjects):
        conlist = [op.join(sub, condir, x) for x in contrasts]
        localizer_img = nibabel.load(op.join(sub, localizerf))
        locmask = binarize_img(localizer_img, threshold)
        masker = NiftiMapsMasker(ROIs, locmask)
        values[isub, :] = masker.fit_transform(conlist)
    return values
コード例 #14
0
ファイル: utils.py プロジェクト: bgauthie/unicog
def get_data_in_rois_method2(ROIs, subjects, contrasts, condir, localizerf,
                             threshold):
    """ returns, for individual subjects, the average contrasts values  in ROIs masked by individual localizers,
    thresholded at a fixed theshold"""
    values = np.zeros((len(subjects), len(contrasts), len(ROIs)))
    for isub, sub in enumerate(subjects):
        conlist = [op.join(sub, condir, x) for x in contrasts]
        localizer_img = nibabel.load(op.join(sub, localizerf))
        locmask = binarize_img(localizer_img, threshold)
        masker = NiftiMapsMasker(ROIs, locmask)
        values[isub, :] = masker.fit_transform(conlist)
    return values
def getConnectome(imgPath=None,
                  atlasPath=None,
                  viewInBrowser=False,
                  displayCovMatrix=False):
    """
    Gets the connectome of a functional MRI scan
    imgPath -> absolute or relative path to the .nii file
    atlasPath -> download path for the reference MSDL atlas
    viewInBrowser (optional, default=False) -> if True, opens up an interactive viewer in the browser
    displayCovMatrix (optional, default=False) -> display the inverse covariance matrix
    Returns a tuple of shape (estimator, atlas)
    """
    # Download the reference atlas
    atlas = datasets.fetch_atlas_msdl(data_dir=atlasPath)
    # Loading atlas image stored in 'maps'
    atlasFilename = atlas['maps']
    # Get the time series for the fMRI scan
    masker = NiftiMapsMasker(maps_img=atlasFilename,
                             standardize=True,
                             memory='nilearn_cache',
                             verbose=5)
    timeSeries = masker.fit_transform(imgPath)
    # Compute the connectome using sparse inverse covariance
    estimator = GraphicalLassoCV()
    estimator.fit(timeSeries)
    if (displayCovMatrix):
        labels = atlas['labels']
        plotting.plot_matrix(estimator.covariance_,
                             labels=labels,
                             figure=(9, 7),
                             vmax=1,
                             vmin=-1,
                             title='Covariance')
        plotting.plot_matrix(estimator.precision_,
                             labels=labels,
                             figure=(9, 7),
                             vmax=1,
                             vmin=-1,
                             title='Inverse covariance (Precision)')
        #covPlot.get_figure().savefig('Covariance.png')
        # precPlot.get_figure().savefig('Inverse Covariance.png')
    if (viewInBrowser):
        coords = atlas.region_coords
        view = plotting.view_connectome(-estimator.precision_, coords, '60.0%')
        #view.save_as_html(file_name='Connectome Test.html')
        view.open_in_browser()
    return (estimator, atlas)
コード例 #16
0
ファイル: utils.py プロジェクト: fliem/LeiCA_LIFE
def extract_parcellation_time_series(in_data, parcellation_name, parcellations_dict, bp_freqs, tr):
    '''
    Depending on parcellation['is_probabilistic'] this function chooses either NiftiLabelsMasker or NiftiMapsMasker
    to extract the time series of each parcel
    if bp_freq: data is band passfiltered at (hp, lp), if (None,None): no filter, if (None, .1) only lp...
    tr in ms (e.g. from freesurfer ImageInfo())
    returns np.array with parcellation time series and saves this array also to parcellation_time_series_file, and
    path to pickled masker object
    '''
    from nilearn.input_data import NiftiLabelsMasker, NiftiMapsMasker, NiftiSpheresMasker
    import os, pickle
    import numpy as np

    if parcellations_dict[parcellation_name]['is_probabilistic'] == True:  # use probab. nilearn
        masker = NiftiMapsMasker(maps_img=parcellations_dict[parcellation_name]['nii_path'], standardize=True)

    elif parcellations_dict[parcellation_name]['is_probabilistic'] == 'sphere':
        atlas = pickle.load(open(parcellations_dict[parcellation_name]['nii_path']))
        coords = atlas.rois
        masker = NiftiSpheresMasker(coords, radius=5, allow_overlap=True, standardize=True)

    else:  # 0/1 labels
        masker = NiftiLabelsMasker(labels_img=parcellations_dict[parcellation_name]['nii_path'],
                                   standardize=True)

    # add bandpass filter (only executes if freq not None
    hp, lp = bp_freqs
    masker.low_pass = lp
    masker.high_pass = hp
    if tr is not None:
        masker.t_r = tr
    else:
        masker.t_r = None

    masker.standardize = True

    masker_file = os.path.join(os.getcwd(), 'masker.pkl')
    with open(masker_file, 'w') as f:
        pickle.dump(masker, f)

    parcellation_time_series = masker.fit_transform(in_data)

    parcellation_time_series_file = os.path.join(os.getcwd(), 'parcellation_time_series.npy')
    np.save(parcellation_time_series_file, parcellation_time_series)

    return parcellation_time_series, parcellation_time_series_file, masker_file
コード例 #17
0
def _fmri_roi_extract_image(data,  atlas_path, atlas_type, radius, overlap_ok,mask = None):
    if 'label' in atlas_type:
        logging.debug('Labels Extract')
        label_masker = NiftiLabelsMasker(atlas_path, mask_img=mask)
        timeseries = label_masker.fit_transform(data)
    if 'sphere' in atlas_type:
        atlas_path = np.loadtxt(atlas_path)
        logging.debug('Sphere Extract')
        spheres_masker = NiftiSpheresMasker(atlas_path, float(radius),mask_img=mask, allow_overlap = overlap_ok)
        timeseries = spheres_masker.fit_transform(data)
    if 'maps' in atlas_type:
        logging.debug('Maps Extract')
        maps_masker = NiftiMapsMasker(atlas_path,mask_img=mask, allow_overlap = overlap_ok)
        timeseries = maps_masker.fit_transform(data)
    timeseries[timeseries == 0.0] = np.nan

    return timeseries
コード例 #18
0
ファイル: calc_con_mats.py プロジェクト: Yaqiongxiao/LeiCA
def extract_parcellation_time_series(in_data, parcellation_name, parcellations_dict, bp_freqs, tr):
    """
    Depending on parcellation['is_probabilistic'] this function chooses either NiftiLabelsMasker or NiftiMapsMasker
    to extract the time series of each parcel
    if bp_freq: data is band passfiltered at (hp, lp), if (None,None): no filter, if (None, .1) only lp...
    tr in ms (e.g. from freesurfer ImageInfo())
    returns np.array with parcellation time series and saves this array also to parcellation_time_series_file, and
    path to pickled masker object
    """
    from nilearn.input_data import NiftiLabelsMasker, NiftiMapsMasker
    import os, pickle
    import numpy as np

    if parcellations_dict[parcellation_name]["is_probabilistic"]:  # use probab. nilearn
        masker = NiftiMapsMasker(maps_img=parcellations_dict[parcellation_name]["nii_path"])
    else:  # 0/1 labels
        masker = NiftiLabelsMasker(labels_img=parcellations_dict[parcellation_name]["nii_path"])

    # add bandpass filter (only executes if freq not None
    hp, lp = bp_freqs
    masker.low_pass = lp
    masker.high_pass = hp
    if tr is not None:
        masker.t_r = float(tr) / 1000.0
    else:
        masker.t_r = None

    masker.standardize = True

    masker_file = os.path.join(os.getcwd(), "masker.pkl")
    with open(masker_file, "w") as f:
        pickle.dump(masker, f)

    parcellation_time_series = masker.fit_transform(in_data)

    parcellation_time_series_file = os.path.join(os.getcwd(), "parcellation_time_series.npy")
    np.save(parcellation_time_series_file, parcellation_time_series)

    return parcellation_time_series, parcellation_time_series_file, masker_file
コード例 #19
0
                        prefix='resampled_wr')

# func1, func2
for idx, func in enumerate([dataset.func1, dataset.func2]):
    # all the subjects
    for i in range(len(dataset.subjects)):
        tic = time.clock()
        output_path, _ = os.path.split(func[i])
        print dataset.subjects[i]
        maps_img = dict_to_list(dataset.rois[i])
        # add mask, smoothing, filtering and detrending
        masker = NiftiMapsMasker(maps_img=maps_img,
                                 mask_img=dataset.mask,
                                 low_pass=.1,
                                 high_pass=.01,
                                 smoothing_fwhm=6.,
                                 t_r=1.05,
                                 detrend=True,
                                 standardize=False,
                                 resampling_target='data',
                                 memory_level=0,
                                 verbose=5)
        output_path, _ = os.path.split(func[i])
        # extract the signal to x
        x = masker.fit_transform(func[i])
        np.save(
            os.path.join(output_path, 'func' + str(idx + 1) + '_rois_filter'),
            x)
        toc = time.clock()
        print toc - tic
コード例 #20
0
ファイル: utils.py プロジェクト: MrFDA/unicog
def get_data_in_roi(path_roi, data_file):
    """Using of the NiftiMapsMasker """
    masker = NiftiMapsMasker([path_roi])
    nifti_obj = nibabel.load(data_file)
    data = masker.fit_transform(nifti_obj)
    return data
dataset = load_dynacomp(preprocessing_folder='pipeline_2',
                        prefix='resampled_wr')
atlas = fetch_msdl_atlas()

# add mask, smoothing, filtering and detrending
masker = NiftiMapsMasker(maps_img=atlas['maps'],
                         mask_img=dataset.mask,
                         low_pass=.1,
                         high_pass=.01,
                         t_r=1.05,
                         smoothing_fwhm=6.,
                         detrend=True,
                         standardize=False,
                         resampling_target='data',
                         memory_level=0,
                         verbose=5)

for i in range(len(dataset.subjects)):
    tic = time.clock()
    output_path, _ = os.path.split(dataset.func1[i])
    if not os.path.isfile(os.path.join(output_path, 'func1_msdl_filter.npy')):
        print i, dataset.subjects[i]
        output_path, _ = os.path.split(dataset.func1[i])
        x = masker.fit_transform(dataset.func1[i])
        np.save(os.path.join(output_path, 'func1_msdl_filter'), x)
        x = masker.fit_transform(dataset.func2[i])
        np.save(os.path.join(output_path, 'func2_msdl_filter'), x)
    toc = time.clock()
    print 'time: ', toc - tic
コード例 #22
0
prepdir = "/oak/stanford/groups/russpold/data/ds000030/1.0.3/derivatives/fmriprep_0.4.4"
subs = [
    x for x in os.listdir(prepdir)
    if x.startswith('sub-') and not x.endswith(".html")
]

atlas = datasets.fetch_atlas_msdl()
atlas_filename = atlas['maps']

outdir = os.path.join(os.environ.get("SCRATCH"), "CNP_ts")
if not os.path.exists(outdir):
    os.mkdir(outdir)

for sub in subs:
    subnum = float(sub.split("-")[1])
    outfile = os.path.join(outdir, "%s_MSDL.csv" % sub)
    if os.path.exists(outfile):
        continue
    preprocd = os.path.join(
        prepdir, sub, 'func',
        '%s_task-rest_bold_space-MNI152NLin2009cAsym_preproc.nii.gz' % sub)
    if not os.path.exists(preprocd):
        continue
    masker = NiftiMapsMasker(maps_img=atlas_filename,
                             standardize=True,
                             memory='nilearn_cache',
                             verbose=5)
    time_series = masker.fit_transform(preprocd)
    ts = pd.DataFrame(time_series)
    ts.to_csv(outfile, index=False, header=False)
コード例 #23
0
ファイル: extract_ts.py プロジェクト: jokedurnez/RandomNotes
from nilearn.input_data import NiftiMapsMasker
from nilearn import datasets
import pandas as pd
import os

prepdir = "/oak/stanford/groups/russpold/data/ds000030/1.0.3/derivatives/fmriprep_0.4.4"
subs = [x for x in os.listdir(prepdir) if x.startswith('sub-') and not x.endswith(".html")]

atlas = datasets.fetch_atlas_msdl()
atlas_filename = atlas['maps']

outdir = os.path.join(os.environ.get("SCRATCH"),"CNP_ts")
if not os.path.exists(outdir):
    os.mkdir(outdir)

for sub in subs:
    subnum = float(sub.split("-")[1])
    outfile = os.path.join(outdir,"%s_MSDL.csv"%sub)
    if os.path.exists(outfile):
        continue
    preprocd = os.path.join(prepdir,sub,'func','%s_task-rest_bold_space-MNI152NLin2009cAsym_preproc.nii.gz'%sub)
    if not os.path.exists(preprocd):
        continue
    masker = NiftiMapsMasker(maps_img=atlas_filename, standardize=True,memory='nilearn_cache', verbose=5)
    time_series = masker.fit_transform(preprocd)
    ts = pd.DataFrame(time_series)
    ts.to_csv(outfile,index=False,header=False)
コード例 #24
0
    ], dtype=float),
    'DMN-LECN': np.empty([
        len(subject_folders),
    ], dtype=float)
})

for subject_folder in subject_folders:
    img = image.load_img(
        glob(
            op.join(
                subject_folder,
                'sub-*_task-rest_run-01_space-MNI152NLin2009cAsym_desc-preproc_bold-clean.nii*'
            )))

    #average timeseries extracted from each network
    network_time_series = network_masker.fit_transform(img)

    #calculate connectivity matrix from each subject
    network_conmat = correlation_measure.fit_transform([network_time_series
                                                        ])[0]

    #grab connectivity values from locations in matrix
    df.at[subject_folder, 'SN-DMN'] = network_conmat[0, 1]
    df.at[subject_folder, 'SN-RECN'] = network_conmat[0, 2]
    df.at[subject_folder, 'SN-LECN'] = network_conmat[0, 3]
    df.at[subject_folder, 'DMN-RECN'] = network_conmat[1, 2]
    df.at[subject_folder, 'DMN-LECN'] = network_conmat[1, 3]

    #Add to the list of vectorized connectivity matrices
    print(subject_folder)
    print(network_conmat)
コード例 #25
0
atlas_filename = atlas['maps']
# Loading atlas data stored in 'labels'
labels = atlas['labels']

# Apply atlas to my data
from nilearn.image import resample_to_img
Atlas = resample_to_img(atlas_filename, mask, interpolation='continuous')

# Gain the TimeSeries
from nilearn.input_data import NiftiMapsMasker
masker = NiftiMapsMasker(maps_img=Atlas,
                         standardize=True,
                         memory='nilearn_cache',
                         verbose=5)

time_series = masker.fit_transform(fMRIData)

############################################################################
# Build and display a correlation matrix
from nilearn.connectome import ConnectivityMeasure
correlation_measure = ConnectivityMeasure(kind='correlation')
correlation_matrix = correlation_measure.fit_transform([time_series])[0]

# Display the correlation matrix
import numpy as np
from matplotlib import pyplot as plt
plt.figure(figsize=(10, 10))
# Mask out the major diagonal
np.fill_diagonal(correlation_matrix, 0)
plt.imshow(correlation_matrix,
           interpolation="nearest",
コード例 #26
0
 at_check.append(plt.gcf())
 plt.close()
 
 # select matching regressor files
 for f_name in func_imgs:            
     f=func_imgs.index(f_name)
     if len(reg_dirs) > func_index:
         reg_file = glob.glob(reg_dirs[func_index]+'/'+reg_prefix+f_name[len(root+func_type)+common+1:len(f_name)-4] + '*.txt')         
     else:
         reg_file=[]
         print('could not find matching regressor directory')
                 
     # extracting time series according to atlas    
     if func_imgs[f]:                        
                                 
         time_series.append( masker_r.fit_transform(func_imgs[f]))
         if reg_file:            
             time_serie_r = masker_r.fit_transform(func_imgs[f], confounds=reg_file)                                
             
             regressors.append(np.loadtxt(reg_file[0]))                
         else:
             time_serie_r=masker_r.fit_transform(func_imgs[f])
             print('no confounds removed')
             
         time_series_r.append(time_serie_r)    
         progress = 100*( (float(f)+1.)/len(func_imgs))            
         print(str(progress) + '% done in computing time series for '+func_type)
     
         
 # update dictionary containing all data
 
コード例 #27
0
labels = np.recfromcsv(csv_filename)
names = labels['name']

from nilearn.input_data import NiftiMapsMasker
masker = NiftiMapsMasker(maps_img=atlas_filename,
                         standardize=True,
                         memory='nilearn_cache',
                         verbose=5)

data = datasets.fetch_adhd(n_subjects=1)

# print basic information on the dataset
print('First subject functional nifti images (4D) are at: %s' %
      data.func[0])  # 4D data

time_series = masker.fit_transform(data.func[0], confounds=data.confounds)

# Compute the sparse inverse covariance
from sklearn.covariance import GraphLassoCV
estimator = GraphLassoCV()

estimator.fit(time_series)

# Display the connectome matrix
from matplotlib import pyplot as plt

from nilearn import plotting
coords = np.vstack((labels['x'], labels['y'], labels['z'])).T

# Display the covariance
plt.figure(figsize=(10, 10))
コード例 #28
0
ファイル: difumo_script.py プロジェクト: xdjwolf/kpe_task
mask_params = {
    'mask_img': group_mask,
    'detrend': True,
    'standardize': True,
    'high_pass': 0.01,
    'low_pass': 0.1,
    't_r': 1,
    'smoothing_fwhm': 6.,
    'verbose': 5
}

masker = NiftiMapsMasker(maps_img=maps_img, **mask_params)

# %% Generate npy files of timeseries for each subject per session
# we will use it later on, stratify to scripts etc.
# build a specific folder
try:
    os.makedirs(output_dir)
except:
    print('Folder already exist')

subject_ts = []
for sub in subject_list:
    print(f' Analysing subject {sub}')
    subject = sub.split('KPE')[1]
    func = func_template.format(sub=subject, session=ses)
    confound = confound_template.format(sub=subject, session=ses)
    signals = masker.fit_transform(imgs=func, confounds=removeVars(confound))
    save = np.save(output_dir + 'sub-' + subject + '_ses-' + ses, signals)
    subject_ts.append(signals)
コード例 #29
0
dataset = load_dynacomp(preprocessing_folder='pipeline_2',
                        prefix='resampled_wr')
atlas = fetch_msdl_atlas()

# add mask, smoothing, filtering and detrending
masker = NiftiMapsMasker(maps_img=atlas['maps'],
                         mask_img=dataset.mask,
                         low_pass=.1,
                         high_pass=.01,
                         t_r=1.05,
                         smoothing_fwhm=6.,
                         detrend=True,
                         standardize=False,
                         resampling_target='data',
                         memory_level=0,
                         verbose=5)

for i in range(len(dataset.subjects)):
    tic = time.clock()
    output_path, _ = os.path.split(dataset.func1[i])
    if not os.path.isfile(os.path.join(output_path, 'func1_msdl_filter.npy')):
        print i, dataset.subjects[i]
        output_path, _ = os.path.split(dataset.func1[i])
        x = masker.fit_transform(dataset.func1[i])
        np.save(os.path.join(output_path, 'func1_msdl_filter') , x)
        x = masker.fit_transform(dataset.func2[i])
        np.save(os.path.join(output_path, 'func2_msdl_filter') , x)
    toc = time.clock()
    print 'time: ', toc - tic
コード例 #30
0
    'mask_img': gm_mask,
    'detrend': True,
    'standardize': True,
    'high_pass': 0.01,
    'low_pass': 0.1,
    't_r': 2.53,
    'smoothing_fwhm': 6.,
    'verbose': 1
}

masker = NiftiMapsMasker(maps_img=maps_img, **mask_params)
subjects_timeseries = []
dx_groups = []
for label, func_img in zip(phenotypes['DX_GROUP'], func_imgs):
    confounds = extract_confounds(func_img, mask_img=gm_mask, n_confounds=10)
    signals = masker.fit_transform(func_img, confounds=confounds)
    subjects_timeseries.append(signals)
    dx_groups.append(label)

##############################################################################
# Functional Connectomes
# ----------------------
connectome_measure = connectome.ConnectivityMeasure(
    cov_estimator=LedoitWolf(assume_centered=True),
    kind='tangent',
    vectorize=True)

# Vectorized connectomes across subject-specific timeseries
vec = connectome_measure.fit_transform(subjects_timeseries)

##############################################################################
コード例 #31
0
# 6. Weighting based on collections
# 7. Distribution of #images per collection
# 8. Average over all images?
# 9. Nearest neighbor to go from images to labels
# 10. Get encoder maps for the most frequently occuring words
# 11. Normalize by frequency of different terms in baseline
# 12. Precompute baseline

# get data / list of files
nv_data = datasets.fetch_neurovault(max_images=None, mode='offline',
                                    data_dir=data_dir)

images = nv_data['images']
images_meta = nv_data['images_meta']
collections = nv_data['collections_meta']

# export metadata to pandas
metadata = pd.DataFrame(images_meta)

# read Arthur Mensch's parcellations
maps = nib.load('components_512.nii.gz')

# reduce dimensionality
imgs = []
for ii, image in enumerate(images):
    print('Resampling image %d' % ii)
    imgs.append(read_resampled_img(image))

masker = NiftiMapsMasker(maps)
X = masker.fit_transform(imgs)
コード例 #32
0

masker = NiftiMapsMasker(components_img, smoothing_fwhm=6,
                         standardize=True, detrend=True,
                         t_r=2.5, low_pass=0.1,
                         high_pass=0.01)


# In[ ]:


subjects_timeseries = {}
for subject_func in func:
#     print (subject_func.split('\\'))
    key = get_key(subject_func.split('\\')[-1])
    subjects_timeseries[key] = masker.fit_transform(subject_func)

#     subjects_timeseries.append(masker.fit_transform(subject_func))
    
#     
# Visualizing extracted timeseries signals. We import matplotlib.pyplot
import matplotlib.pyplot as plt


# In[ ]:


subjects_timeseries


# In[ ]:
コード例 #33
0
                                   mask_img=mask,
                                   smoothing_fwhm=None,
                                   standardize=stdz,
                                   detrend=detr,
                                   low_pass=None,
                                   high_pass=None,
                                   t_r=TR,
                                   resampling_target='data',
                                   memory=mem_dir,
                                   memory_level=5,
                                   verbose=0)

        # extracting time series according to atlas
        if func_imgs[f]:

            time_series.append(masker_r.fit_transform(func_imgs[f]))
            if reg_file:
                time_serie_r = masker_r.fit_transform(func_imgs[f],
                                                      confounds=reg_file)

                regressors.append(np.loadtxt(reg_file[0]))
            else:
                time_serie_r = masker_r.fit_transform(func_imgs[f])
                print('no confounds removed')

            time_series_r.append(time_serie_r)
            progress = np.round(100 * ((float(f) + 1.) / len(func_imgs)))
            print(
                str(progress) + '% done in computing time series for ' +
                func_type)
コード例 #34
0
dataset = load_dynacomp(preprocessing_folder='pipeline_2',
                        prefix='resampled_wr')

# func1, func2
for idx, func in enumerate([dataset.func1, dataset.func2]):
    # all the subjects
    for i in range(len(dataset.subjects)):
        tic = time.clock()
        output_path, _ = os.path.split(func[i])
        print dataset.subjects[i]
        maps_img = dict_to_list(dataset.rois[i])
        # add mask, smoothing, filtering and detrending
        masker = NiftiMapsMasker(maps_img=maps_img,
                                 mask_img=dataset.mask,
                                 low_pass=.1,
                                 high_pass=.01,
                                 smoothing_fwhm=6.,
                                 t_r=1.05,
                                 detrend=True,
                                 standardize=False,
                                 resampling_target='data',
                                 memory_level=0,
                                 verbose=5)
        output_path, _ = os.path.split(func[i])
        # extract the signal to x
        x = masker.fit_transform(func[i])
        np.save(os.path.join(output_path,
                             'func' + str(idx+1) + '_rois_filter'), x)
        toc = time.clock()
        print toc - tic
コード例 #35
0
                tproject.inputs.automask = True
            else:
                tproject.inputs.automask = False
                tproject.inputs.mask = curmask
            tproject.inputs.bandpass = tuple(bandpass)
            if NoiseReg.shape[1] > 0:
                tproject.inputs.ort = noise_fn
            #tproject.inputs.censor  = curcache + "/SpikeReg.txt"
            #tproject.inputs.cenmode = 'NTRP'
            tproject.inputs.out_file = errts_fn
            tproject.run()

            # get time-series
            print('Regressed ' + str(NoiseReg.shape[1] + nAROMAComps) +
                  ' parameters from ROI time-series...')
            roits = masker.fit_transform(errts_fn)
            np.savetxt(outfile, roits, delimiter='\t')
            elapsed = time.time() - t
            print('Elapsed time (s) for ' + pipelines[jj].outid + ': ' +
                  str(np.round(elapsed, 1)))

        #store info into dataframe w/
        idlist[ii, jj] = os.path.basename(curfunc).split('_')[0]
        atlaslist[ii, jj] = atlas
        ses[ii, jj] = curfunc.split('ses-')[1].split('/')[0]
        task[ii, jj] = curfunc.split('task-')[1].split('_')[0]
        run[ii, jj] = curfunc.split('run-')[1].split('_')[0]
        ntr[ii, jj] = float(timepoints)
        fdthr[ii, jj] = float(pipelines[jj].fdthr)
        dvthr[ii, jj] = float(pipelines[jj].dvrthr)
        ntrabovethr[ii, jj] = float(np.sum(SpikeReg == 1)) - n_init2drop
コード例 #36
0
Script to test functions

Created on Thu Mar 26 15:02:11 2015

@author: [email protected]
"""

from loader import load_dynacomp, list_of_dicts_to_key_list, dict_to_list
from nilearn.input_data import NiftiMapsMasker
import time

# Load Dynacomp dataset
dataset = load_dynacomp()

# Dataset keys
print 'keys\n', dataset.keys()

# Dataset functional 1
print 'func1\n', dataset.func1

# Dataset behaviordata : prePerf
print 'prePerf\n', list_of_dicts_to_key_list(dataset.behavior, 'prePerf')

# Generate seed-masker for subject 0
maps_img = dict_to_list(dataset.rois[1])

tic = time.clock()
masker = NiftiMapsMasker(maps_img, verbose=5)
x = masker.fit_transform(dataset.func1[1])
toc = time.clock()
print toc - tic
コード例 #37
0
def extract_time_series(fmris,
                        subjects_list,
                        atlas,
                        confounds=None,
                        standardize=True,
                        verbose=5):
    """Extracting time series from a list of fmris
    
    Arguments:
        fmris {list} -- List of loaded fMRIs
        subjects_list {list} -- List of subjects' IDs
        atlas {str} -- Path to atlas
    
    Keyword Arguments:
        confounds {list<String>} -- List of confound's path (default: {None})
        standardize {bool} -- Standardize time series (default: {True})
        verbose {int} -- Verbosity (default: {5})
    
    Returns:
        {tuple} -- Returns time series and processed subjects's IDs
    """
    subjects_time_series = []
    processed_subjects = []

    for i, fmri in enumerate(fmris):
        subject_id = [s for s in subjects_list if str(s) in str(fmri)]
        if confounds:
            assert str(subject_id[0]) in confounds[i]

        if not subject_id:
            print(
                f'{bcolors.WARNING}Found fmri without corresponding ID. Skipping \'{str(fmri)}\'{bcolors.ENDC}'
            )
            continue

        confounds_message = ''
        if confounds:
            confounds_message = f'Using confounds {confounds[i]}'
        else:
            confounds_message = 'Using no confounds'
        print(
            f'{bcolors.OKBLUE}Loading {subject_id[0]}; {confounds_message}{bcolors.ENDC}'
        )
        processed_subjects.append(subject_id[0])

        img = load_img(fmri.as_posix())

        masker = None
        if not 'basc' in atlas:
            masker = NiftiMapsMasker(maps_img=atlas,
                                     standardize=standardize,
                                     verbose=verbose)
        else:
            masker = NiftiLabelsMasker(labels_img=atlas,
                                       standardize=standardize,
                                       verbose=verbose)

        time_series = masker.fit_transform(
            img, confounds=confounds[i] if confounds else None)

        subjects_time_series.append(time_series)

    return (subjects_time_series, processed_subjects)
コード例 #38
0
def main(argv):
    ## this is input parsing
    try:
        opts, args = getopt.getopt(argv, "hi:", ["ifile="])
    except getopt.GetoptError:
        print('single_subject_hitting_time3.py -i <sub_data>')
        sys.exit(2)
    for opt, arg in opts:
        if opt == '-h':
            print('single_subject_hitting_time3.py -i <sub_data>')
            sys.exit()
        elif opt in ("-i", "--ifile"):
            sub_data = arg
    print('sub_data is "', sub_data)

    ## atlas 3
    atlas = "Schaefer200Yeo17Pauli"  # msdl or haox or mmp
    schaefer_atlas = datasets.fetch_atlas_schaefer_2018(
        n_rois=200,
        yeo_networks=17,
        resolution_mm=1,
        data_dir=None,
        base_url=None,
        resume=True,
        verbose=1)  #atlas_filename = "MMP1_rois.nii" #Glasser et al., 2016
    schaefer_filename = schaefer_atlas.maps
    schaefer_labels = schaefer_atlas.labels
    schaefer_masker = NiftiLabelsMasker(labels_img=schaefer_filename,
                                        standardize=True,
                                        memory='nilearn_cache',
                                        verbose=5)
    pauli_atlas = datasets.fetch_atlas_pauli_2017()
    pauli_filename = pauli_atlas.maps
    pauli_labels = pauli_atlas.labels
    pauli_masker = NiftiMapsMasker(maps_img=pauli_filename,
                                   standardize=True,
                                   verbose=5)

    all_labels = np.hstack([schaefer_labels, pauli_labels])
    print(all_labels)

    correlation_measure = ConnectivityMeasure(kind='correlation')

    #n_rois=len(schaefer_labels) + len(pauli_labels)
    #p_corr_all = np.zeros([n_rois, n_rois,len(sub_data)])
    #H_all = np.zeros([n_rois, n_rois,len(sub_data)])

    # generate subject number and position from sub_data
    subnum = sub_data.split(os.sep)[-4]
    #subnum_fmt = "{:06}".format(int(subnum))

    out_base = os.sep.join(sub_data.split(os.sep)[:-3])
    out_dir = out_base + os.sep + "deriv" + os.sep + "snag"
    if not os.path.exists(out_dir):
        os.makedirs(out_dir)

    #extract time series from a atlas(es)

    file_base = "_".join(sub_data.split(os.sep)[-1].split("_")[:-2])

    adj_out_file = out_dir + os.sep + file_base + "_timeseries-corr_" + atlas + "_data_filt"
    if not (pathlib.Path(adj_out_file).exists()):
        #func_dir_in = sub_data #+ os.sep + 'restEPI'  #directory with func images
        #funcs_filenames = glob.glob(func_dir_in + os.sep + '*.nii') #find all funcs in this directory
        confounds = high_variance_confounds(sub_data)
        #schafer cortical atlas
        schaefer_time_series = schaefer_masker.fit_transform(
            sub_data, confounds=confounds)  #cortical segments
        print("schaefer ts shape: ")
        print(schaefer_time_series.shape)
        #subcortical atlas
        pauli_time_series = pauli_masker.fit_transform(
            sub_data, confounds=confounds)  #subccortical segments
        print("pauli ts shape: ")
        print(pauli_time_series.shape)

        #stack time series and determine adjacency matrix from the resulting set of time series
        full_ts_set = np.hstack((schaefer_time_series, pauli_time_series))
        print("concatenated ts shape: ")
        print(full_ts_set.shape)
        correlation_matrix = correlation_measure.fit_transform([full_ts_set
                                                                ])[0]
        np.savetxt(adj_out_file, correlation_matrix, delimiter=",")
        print(correlation_matrix.shape[0], correlation_matrix.shape[1])
    else:
        correlation_matrix = genfromtxt(
            adj_out_file, delimiter=','
        )  #load the file if the correlation matrix was pre-computed
    correlation_matrix = abs(
        correlation_matrix
    )  #absolute value to make all transition probabilities positive
    np.fill_diagonal(correlation_matrix, 0)  #set self connections to zero

    #p_corr_all[:,:,sub_ind] = correlation_matrix.copy()  #stack correlation matrices for later analysis

    #build hitting time matrix
    H_out_file = out_dir + os.sep + file_base + "_normedH_" + atlas + "_corr"  #file where hitting-time matrix will be saved
    print(H_out_file)
    if not (pathlib.Path(H_out_file).exists()
            ):  #compute hitting time matrix if it isn't already saved
        H = hitting_matrix(correlation_matrix)
        #H_all[:,:,sub_ind] = H
        np.savetxt(H_out_file, H, delimiter=",")
        print("saved " + H_out_file)
コード例 #39
0
ファイル: utils.py プロジェクト: bgauthie/unicog
def get_data_in_roi(path_roi, data_file):
    """Using of the NiftiMapsMasker """
    masker = NiftiMapsMasker([path_roi])
    nifti_obj = nibabel.load(data_file)
    data = masker.fit_transform(nifti_obj)
    return data
コード例 #40
0
# Loading atlas data stored in 'labels'
labels = atlas['labels']

# Load the functional datasets
data = datasets.fetch_adhd(n_subjects=1)

print('First subject resting-state nifti image (4D) is located at: %s' %
      data.func[0])

############################################################################
# Extract the time series
from nilearn.input_data import NiftiMapsMasker
masker = NiftiMapsMasker(maps_img=atlas_filename, standardize=True,
                         memory='nilearn_cache', verbose=5)

time_series = masker.fit_transform(data.func[0],
                                   confounds=data.confounds)

############################################################################
# `time_series` is now a 2D matrix, of shape (number of time points x
# number of regions)
print(time_series.shape)

############################################################################
# Build and display a correlation matrix
from nilearn.connectome import ConnectivityMeasure
correlation_measure = ConnectivityMeasure(kind='correlation')
correlation_matrix = correlation_measure.fit_transform([time_series])[0]

# Display the correlation matrix
import numpy as np
from matplotlib import pyplot as plt
コード例 #41
0
ファイル: CJR ICA.py プロジェクト: coreyjr2/Python-Tutorials
msdl_maps = msdl.maps
msdl_labels = msdl.labels
msdl_networks = msdl.networks
msdl_coordinates = msdl.region_coords

'''Now let's build our masker'''
from nilearn.input_data import NiftiMapsMasker


'''create masker to extract functional data within atlas parcels'''
masker = NiftiMapsMasker(maps_img=msdl['maps'], standardize=True,
                         memory='nilearn_cache')

'''calculate the correlation matrix for each of the four subjects'''
#Control
rest_cn = np.array(masker.fit_transform(rest_img_cn_sub_10228))
cn_matrix = calc_correlation_matrix(rest_cn)
#Schizophrenia
rest_sz = np.array(masker.fit_transform(rest_img_sz_sub_50006))
sz_matrix = calc_correlation_matrix(rest_sz)
#ADHD
rest_adhd = np.array(masker.fit_transform(rest_img_adhd_sub_70001))
adhd_matrix = calc_correlation_matrix(rest_adhd)
#Bipolar
rest_bp = np.array(masker.fit_transform(rest_img_bp_sub_60001))
bp_matrix = calc_correlation_matrix(rest_bp)


'''calculate the correlation matrix for the test subjects'''
test_rest_cn = np.array(masker.fit_transform(rest_img_cn_sub_10249))
test_cn_matrix = calc_correlation_matrix(rest_cn)
コード例 #42
0
atlas4d_data = atlas4d.get_data()
atlas3d_data = np.sum(atlas4d_data, axis=3)
atlas3d = nib.Nifti1Image(atlas3d_data, atlas4d.get_affine())

n_subjects = len(func_files)
subjects = []
cov_feat = []

for subject_n in range(n_subjects):
    filename = func_files[subject_n]
    print("Processing file %s" % filename)
    print("-- Computing region signals ...")
    masker = NiftiMapsMasker(atlas["maps"],
                             resampling_target="maps", standardize=False,
                             memory=CACHE_DIR, memory_level=1, verbose=0)
    region_ts = masker.fit_transform(filename)
    subjects.append(region_ts)
    print("-- Computing covariances")
    cov_matrix = np.cov(region_ts.T)
    cov_feat.append(cov_matrix[np.tril_indices(len(cov_matrix))])

from sklearn.svm import SVC
from sklearn.cross_validation import StratifiedShuffleSplit
    
cov_feat = np.array(cov_feat)

nb_iter = 100
pg_counter = 0
groups = [['AD', 'Normal'], ['AD', 'EMCI'], ['AD', 'LMCI'],
          ['EMCI', 'LMCI'], ['EMCI', 'Normal'], ['LMCI', 'Normal']]
score = np.zeros((nb_iter, len(groups)))
コード例 #43
0
    images = sorted(glob(filter))
    if images == []:
        print('Empty list :' + filter)
        sys.exit(3)
        
    labels = basenames(images)
    u = [x.split('_') for x in labels]
    subj = [x[1] for x in u]
    con = [x[0] for x in u]
    
    ROIs = sorted(glob(op.join(mask_dir, '*.nii')))
    roi_names = basenames(ROIs)
    
    # extract data 
    masker = NiftiMapsMasker(ROIs)
    values = masker.fit_transform(images)

    # save it into a pandas DataFrame
    df = pd.DataFrame(columns=['subject', 'con', 'ROI', 'beta'])

    n1, n2 = values.shape
    k = 0
    for i1 in range(n1):
        for i2 in range(n2):
             df.loc[k] = pd.Series({'subject': subj[i1],
                                    'con': con[i1],
                                    'ROI': roi_names[i2],
                                    'beta': values[i1, i2]})
             k = k + 1
    df.to_csv(output, index=False)