예제 #1
0
def _fetch_msdl():
    """ The AAL atlas does not contain a background label.
    To make the API consistent we fix it here.
    """
    msdl = datasets.fetch_atlas_msdl()
    msdl['labels'] = ['Background'] + msdl['labels']
    return msdl
def load_atlas(atlas_location=None, download_path=DEFAULT_DOWNLOAD_PATH):
    """Loading a provided atlas
    
    Keyword Arguments:
        atlas_location {str} -- path or url to the atlas (default: {None})
        download_path {[type]} -- download path for the atlas(default: {'./downloaded_atlas.nii.gz'})
    
    Returns:
        {Nibabel Image} -- Atlas's path
    """
    print(f'{bcolors.OKBLUE}Loading atlas{bcolors.ENDC}')
    atlas_filename = ''

    if not atlas_location:
        atlas = datasets.fetch_atlas_msdl()
        atlas_filename = atlas['maps']

    else:
        if is_url(atlas_location):
            print(
                f'{bcolors.OKBLUE}Beginning atlas download with urllib2...{bcolors.ENDC}'
            )
            urllib.request.urlretrieve(atlas_location, download_path)

            atlas_filename = download_path
        elif 'MIST' in atlas_location:
            print(f'{bcolors.OKBLUE}Getting MIST atlases...{bcolors.ENDC}')

            atlas = datasets.fetch_atlas_basc_multiscale_2015(version='sym')
            atlas_filename = atlas['scale{}'.format(
                atlas_location.split('_')[1].zfill(3))]
        else:
            atlas_filename = atlas_location

    return atlas_filename
예제 #3
0
def get_nilearn_adhd_data(n_subjects, nilearn_download_dir):

    # Load the functional datasets
    datasets.get_data_dirs(data_dir=nilearn_download_dir)
    adhd_data = datasets.fetch_adhd(n_subjects=n_subjects,
                                    data_dir=nilearn_download_dir)
    msdl_data = datasets.fetch_atlas_msdl(data_dir=nilearn_download_dir)
    masker = input_data.NiftiMapsMasker(msdl_data.maps,
                                        resampling_target="data",
                                        t_r=2.5,
                                        detrend=True,
                                        low_pass=.1,
                                        high_pass=.01,
                                        memory='nilearn_cache',
                                        memory_level=1)

    pooled_subjects = []
    adhd_labels = []  # 1 if ADHD, 0 if control
    age = []
    for func_file, confound_file, phenotypic in zip(adhd_data.func,
                                                    adhd_data.confounds,
                                                    adhd_data.phenotypic):
        time_series = masker.fit_transform(func_file, confounds=confound_file)
        pooled_subjects.append(time_series)
        adhd_labels.append(phenotypic['adhd'])
        age.append(phenotypic['age'])
    correlation_measure = ConnectivityMeasure(kind='correlation')
    corr_mat = correlation_measure.fit_transform(pooled_subjects)
    print('Correlations are stacked in an array of shape {0}'.format(
        corr_mat.shape))
    beh = np.zeros((n_subjects, 2))
    beh[:, 0] = adhd_labels
    beh[:, 1] = age

    return corr_mat, beh
def test_extract_timeseries():
    current_dir = split(__file__)[0]
    filename_func = join(current_dir, 'data', 'func.nii.gz')
    filename_confounds = join(current_dir, 'data', 'confounds.txt')

    time_serie = extract_timeseries(filename_func,
                                    atlas=fetch_atlas_msdl().maps)
    assert_equal(time_serie.shape, (5, 39))
    assert_almost_equal(mean(time_serie), -1.05343771823e-13)
    assert_almost_equal(std(time_serie), 5.253714931447222)

    time_serie = extract_timeseries(filename_func,
                                    atlas=fetch_atlas_msdl().maps,
                                    confounds=filename_confounds)
    assert_equal(time_serie.shape, (5, 39))
    assert_almost_equal(mean(time_serie), -1.05567147082e-13)
    assert_almost_equal(std(time_serie), 1.8468688363637491e-13)
def test_make_masker_from_atlas():
    atlas = fetch_atlas_basc_multiscale_2015().scale007
    masker = make_masker_from_atlas(atlas)
    assert_true(isinstance(masker, NiftiLabelsMasker))
    assert_equal(masker.labels_img.shape, (53, 64, 52))

    atlas = fetch_atlas_msdl().maps
    masker = make_masker_from_atlas(atlas)
    assert_true(isinstance(masker, NiftiMapsMasker))
    assert_equal(masker.maps_img.shape, (40, 48, 35, 39))
예제 #6
0
파일: mixture.py 프로젝트: mengsenz/dyneusr
def spatial_correlations(mm, atlas=None):
    if atlas is None:
        from nilearn.datasets import fetch_atlas_msdl
        atlas = fetch_atlas_msdl()
    from nilearn.input_data import NiftiMapsMasker
    masker = NiftiMapsMasker(maps_img=atlas['maps'])
    rsns_masked = masker.fit_transform(atlas['maps'])
    mm_masked = masker.fit_transform([mm])
    cc = np.corrcoef(mm_masked, rsns_masked)
    return cc
def run_mini_pipeline():
    atlas = datasets.fetch_atlas_msdl()
    atlas_img = atlas['maps']
    labels = pd.read_csv(atlas['labels'])['name']

    masker = NiftiMapsMasker(maps_img=atlas_img, standardize=True,
                               memory='/tmp/nilearn', verbose=0)

    data = datasets.fetch_adhd(number_subjects)

    figures_folder = '../figures/'
    count=0
    for func_file, confound_file in zip(data.func, data.confounds):
        
        # fit the data to the atlas mask, regress out confounds
        time_series = masker.fit_transform(func_file, confounds=confound_file)

        correlation = np.corrcoef(time_series.T)

        #plotting starts here
        plt.figure(figsize=(10, 10))
        plt.imshow(correlation, interpolation="nearest")
        x_ticks = plt.xticks(range(len(labels)), labels, rotation=90)
        y_ticks = plt.yticks(range(len(labels)), labels)
        corr_file = figures_folder+'subject_number_' + str(count) + '_correlation.pdf'
        plt.savefig(corr_file)

        atlas_region_coords = [plotting.find_xyz_cut_coords(img) for img in image.iter_img(atlas_img)]
        threshold = 0.6
        plotting.plot_connectome(correlation, atlas_region_coords, edge_threshold=threshold)
        connectome_file = figures_folder+'subject_number_' + str(count) + '_connectome.pdf'
        plt.savefig(connectome_file)


        #graph setup

        #binarize correlation matrix
        correlation[correlation<threshold] = 0
        correlation[correlation != 0] = 1

        graph = nx.from_numpy_matrix(correlation)

        partition=louvain.best_partition(graph)

        values = [partition.get(node) for node in graph.nodes()]

        plt.figure()
        nx.draw_spring(graph, cmap = plt.get_cmap('jet'), node_color = values, node_size=30, with_labels=True)
        graph_file = figures_folder+'subject_number_' + str(count) + '_community.pdf'
        plt.savefig(graph_file)

        count += 1

        plt.close('all')
예제 #8
0
def get_single_timeseries(subject_id, atlas='msdl'):
    atlas = datasets.fetch_atlas_msdl()
    atlas_file = atlas['maps']
    labels = atlas['labels']

    masker = input_data.NiftiMapsMasker(maps_img=atlas_file,
                                        standardize=True,
                                        memory='nilearn_cache',
                                        verbose=5)
    subject = get_filepaths([subject_id])[0]

    timeseries = masker.fit_transform(subject)

    return timeseries
예제 #9
0
def viz_connectome (regressor,imagename:str):
'''
The plotting.plot_connectome nilearn tool allows to visualize the nodes and connections among regions of the brain 
based on the selected regressor (for this project we use the mean). This value will define the strength of the connectivity
'''


    # List of colors 
    colors_df=pd.read_excel(open('colors_8.xlsx','rb'))
    colors_df['lower_color'] = map(lambda x: x.lower(), colors_df['color'])

    # Plot the tangent matrix
    # The labels of the MSDL Atlas that we are using 
    # Data from the atlas used (in the given example MSDL)
    atlas = datasets.fetch_atlas_msdl()
    # Loading atlas data stored in 'labels'
    labels = atlas['labels']
    # Loading atlas coordinates
    coords = atlas.region_coords

    # Plot of the connectome based on the nilearn plotting.plot_connectome package
    fig = plt.figure(figsize=(6,7))
    display=plotting.plot_connectome(regressor,coords,node_size =40,
                             edge_threshold="99.5%", display_mode="ortho",  title="Tangent-ASD", alpha=1,
                                     colorbar=True, annotate=False)

    values =list(colors_df["color"])
    keys = labels
    colors_labels = dict(zip(keys, values))

    patchList = []
    fontP = FontProperties()
    fontP.set_size('small')

    for key in colors_labels:

            data_key = mpatches.Patch(color=colors_labels[key], label=key)
            patchList.append(data_key)
   

    plt.legend(handles=patchList,prop=fontP, loc='upper center', bbox_to_anchor=(0.5, -0.05),
              ncol=8, fancybox=True, shadow=True)

    #depends on the data to viz
    plt.savefig(imagename+".png", dpi=300)

    plt.close(fig)
    plt.show()
예제 #10
0
def get_timeseries(subject_ids, atlas='msdl'):
    atlas = datasets.fetch_atlas_msdl()
    atlas_file = atlas['maps']
    labels = atlas['labels']

    masker = input_data.NiftiMapsMasker(maps_img=atlas_file,
                                        standardize=True,
                                        memory='nilearn_cache',
                                        verbose=5)
    timeseries = []
    subjects = get_filepaths(subject_ids)

    for subject in subjects:
        timeseries.append(masker.fit_transform(subject))

    return timeseries
def getConnectome(imgPath=None,
                  atlasPath=None,
                  viewInBrowser=False,
                  displayCovMatrix=False):
    """
    Gets the connectome of a functional MRI scan
    imgPath -> absolute or relative path to the .nii file
    atlasPath -> download path for the reference MSDL atlas
    viewInBrowser (optional, default=False) -> if True, opens up an interactive viewer in the browser
    displayCovMatrix (optional, default=False) -> display the inverse covariance matrix
    Returns a tuple of shape (estimator, atlas)
    """
    # Download the reference atlas
    atlas = datasets.fetch_atlas_msdl(data_dir=atlasPath)
    # Loading atlas image stored in 'maps'
    atlasFilename = atlas['maps']
    # Get the time series for the fMRI scan
    masker = NiftiMapsMasker(maps_img=atlasFilename,
                             standardize=True,
                             memory='nilearn_cache',
                             verbose=5)
    timeSeries = masker.fit_transform(imgPath)
    # Compute the connectome using sparse inverse covariance
    estimator = GraphicalLassoCV()
    estimator.fit(timeSeries)
    if (displayCovMatrix):
        labels = atlas['labels']
        plotting.plot_matrix(estimator.covariance_,
                             labels=labels,
                             figure=(9, 7),
                             vmax=1,
                             vmin=-1,
                             title='Covariance')
        plotting.plot_matrix(estimator.precision_,
                             labels=labels,
                             figure=(9, 7),
                             vmax=1,
                             vmin=-1,
                             title='Inverse covariance (Precision)')
        #covPlot.get_figure().savefig('Covariance.png')
        # precPlot.get_figure().savefig('Inverse Covariance.png')
    if (viewInBrowser):
        coords = atlas.region_coords
        view = plotting.view_connectome(-estimator.precision_, coords, '60.0%')
        #view.save_as_html(file_name='Connectome Test.html')
        view.open_in_browser()
    return (estimator, atlas)
예제 #12
0
def plot_connectivity_matrix(matrix_data,matrix_name:str):
'''
The connectivity_matrix nilearn implementation allows the visualization of the matrices based on the atlas and matrix selected.

'''
    # Plot the tangent matrix
    # The labels of the MSDL Atlas that we are using 
    # Data from the atlas used (in the given example MSDL)
    atlas = datasets.fetch_atlas_msdl()
    # Loading atlas data stored in 'labels'
    labels = atlas['labels']
    # Loading atlas coordinates
    coords = atlas.region_coords
    
    font = {'family': 'serif',
            'color':  'black',
            'weight': 'bold',
            'size': 12}
    
    tt = plt.figure(1,figsize=(7,6))

 
    np.fill_diagonal(matrix_data, 0)
    plt.imshow(matrix_data, interpolation='None', cmap='RdYlBu_r', vmax=.000002, vmin=-.000002)
    plt.yticks(range(len(atlas.labels)),labels, fontsize=10, weight='bold');
    plt.xticks( range(len(atlas.labels)), labels, rotation=90, fontsize=10, weight='bold');
    plt.title(str(matrix_name)+'_msdl',fontdict=font)
    plt.colorbar(shrink=0.8)
    
    tt2 = plt.figure(2)   
    view=plotting.view_connectome(matrix_data,coords, node_size=5.0, edge_threshold='99.5%')    
    

    plt.show()    
   
    return view
예제 #13
0
from nilearn.datasets import fetch_atlas_msdl, fetch_cobre
from nilearn.input_data import NiftiMapsMasker
from nilearn.connectome import vec_to_sym_matrix
from nilearn.plotting import plot_matrix
from posce import PopulationShrunkCovariance

# fetch atlas
msdl = fetch_atlas_msdl()

# fetch rfMRI scans from cobre dataset
cobre = fetch_cobre(n_subjects=20)

# extract timeseries
masker = NiftiMapsMasker(msdl.maps,
                         detrend=True,
                         standardize=True,
                         verbose=1,
                         memory=".")
masker.fit()
ts = [masker.transform(f) for f in cobre.func]

# compute PoSCE on the same dataset
posce = PopulationShrunkCovariance(shrinkage=1e-2)
posce.fit(ts)
connectivities = posce.transform(ts)

# plot the first shrunk covariance
cov = vec_to_sym_matrix(connectivities[0])
plot_matrix(cov)
def run_mini_pipeline():
    atlas = datasets.fetch_atlas_msdl()
    atlas_img = atlas['maps']
    labels = pd.read_csv(atlas['labels'])['name']

    masker = NiftiMapsMasker(maps_img=atlas_img,
                             standardize=True,
                             memory='/tmp/nilearn',
                             verbose=0)

    data = datasets.fetch_adhd(number_subjects)

    figures_folder = '../figures/'
    count = 0
    for func_file, confound_file in zip(data.func, data.confounds):

        # fit the data to the atlas mask, regress out confounds
        time_series = masker.fit_transform(func_file, confounds=confound_file)

        correlation = np.corrcoef(time_series.T)

        #plotting starts here
        plt.figure(figsize=(10, 10))
        plt.imshow(correlation, interpolation="nearest")
        x_ticks = plt.xticks(range(len(labels)), labels, rotation=90)
        y_ticks = plt.yticks(range(len(labels)), labels)
        corr_file = figures_folder + 'subject_number_' + str(
            count) + '_correlation.pdf'
        plt.savefig(corr_file)

        atlas_region_coords = [
            plotting.find_xyz_cut_coords(img)
            for img in image.iter_img(atlas_img)
        ]
        threshold = 0.6
        plotting.plot_connectome(correlation,
                                 atlas_region_coords,
                                 edge_threshold=threshold)
        connectome_file = figures_folder + 'subject_number_' + str(
            count) + '_connectome.pdf'
        plt.savefig(connectome_file)

        #graph setup

        #binarize correlation matrix
        correlation[correlation < threshold] = 0
        correlation[correlation != 0] = 1

        graph = nx.from_numpy_matrix(correlation)

        partition = louvain.best_partition(graph)

        values = [partition.get(node) for node in graph.nodes()]

        plt.figure()
        nx.draw_spring(graph,
                       cmap=plt.get_cmap('jet'),
                       node_color=values,
                       node_size=30,
                       with_labels=True)
        graph_file = figures_folder + 'subject_number_' + str(
            count) + '_community.pdf'
        plt.savefig(graph_file)

        count += 1

        plt.close('all')
elif atlas_name == 'msdl_lang' :
    atlas_filename='/media/vd239549/LaCie/victor/nilearn_data/msdl_atlas/msdl_lang/msdl_lang.nii'
    labels = np.recfromcsv('/media/vd239549/LaCie/victor/nilearn_data/msdl_atlas/msdl_lang/msdl_lang_rois_labels.csv')    
    #rois = np.vstack((labels['name'],labels['netname'])).T
    #coords = np.vstack((labels['x'], labels['y'], labels['z'])).T
    rois = labels['name'].T
    n_r = len(rois)
    l=360./n_r#roi label size in figures     
    visu = atlas_filename
    
    all_ntwks = range(n_r)          
    networks = {'All ROIs':all_ntwks}
else :
    print ('unknown atlas, msdl used as default atlas')    
    atlas_name = 'msdl'    
    atlas = datasets.fetch_atlas_msdl(data_dir=atlas_dir)
    atlas_filename, labels = atlas.maps, atlas.labels
    labels = np.recfromcsv(atlas.labels)    
    #rois = np.vstack((labels['name'],labels['netname'])).T
    #coords = np.vstack((labels['x'], labels['y'], labels['z'])).T
    rois = labels['name'].T
    n_r = len(rois)
    l=360./n_r#roi label size in figures     
    visu = atlas_filename
    all_ntwks = range(n_r)          
    networks = {'Auditory': [0,1],'striate' : [2],'DMN': [3,4,5,6],'Occ post' :[7],
                'Motor': [8],'Attentional' : [9,10,11,12,14,15,16,17,18],
                'Basal' : [13],'Visual secondary' : [19,20,21], 'Salience':[22,23,24],
                'Temporal(STS)':[25,26],'Langage':[27,28,29,30,31],'Cereb':[32],
                'Dors PCC': [33],'cing ins' :[34,35,36],'Ant IPS': [37,38],'All ROIs':all_ntwks}
예제 #16
0
파일: conmats.py 프로젝트: fliem/sea_zrh_rs
def _get_roi_info(parc):
    if parc == "msdl":
        atlas = datasets.fetch_atlas_msdl()
        roi_file = atlas['maps']
        df_labels = pd.DataFrame({"roi_labels": atlas['labels']})
        if isinstance(df_labels["roi_labels"][0], bytes):
            df_labels["roi_labels"] = df_labels.roi_labels.apply(bytes.decode)
        roi_names = df_labels["roi_labels"].values
        roi_type = "maps"

    elif parc == "gordon":
        atlas_dir = "/parcs/Gordon/Parcels"
        roi_file = os.path.join(atlas_dir, "Parcels_MNI_111.nii")
        labs_df = pd.read_excel(os.path.join(atlas_dir, "Parcels.xlsx"))
        roi_names = labs_df.ParcelID.values
        roi_type = "labels"

    elif parc == "basc197":
        atlas = datasets.fetch_atlas_basc_multiscale_2015(version='sym')
        roi_file = atlas['scale197']
        roi_names = np.arange(1, 198).astype(int)
        roi_type = "labels"

    elif parc == "basc444":
        atlas = datasets.fetch_atlas_basc_multiscale_2015(version='sym')
        roi_file = atlas['scale444']
        roi_names = np.arange(1, 445).astype(int)
        roi_type = "labels"

    elif parc == "schaefer200":
        atlas_dir = "/parcs/Schaefer"
        schaefer_cols = "roi community c1 c2 c3 c4".split(" ")
        roi_file = os.path.join(atlas_dir, "Schaefer2018_200Parcels_17Networks_order_FSLMNI152_1mm.nii.gz")
        labs_df = pd.read_csv(os.path.join(atlas_dir, "Schaefer2018_200Parcels_17Networks_order.txt"), sep="\t",
                              names=schaefer_cols)
        roi_names = labs_df.roi
        roi_type = "labels"

    elif parc == "schaefer400":
        atlas_dir = "/parcs/Schaefer"
        schaefer_cols = "roi community c1 c2 c3 c4".split(" ")
        roi_file = os.path.join(atlas_dir, "Schaefer2018_400Parcels_17Networks_order_FSLMNI152_1mm.nii.gz")
        labs_df = pd.read_csv(os.path.join(atlas_dir, "Schaefer2018_400Parcels_17Networks_order.txt"), sep="\t",
                              names=schaefer_cols)
        roi_names = labs_df.roi
        roi_type = "labels"

    elif parc == "yeo17":
        atlas = datasets.fetch_atlas_yeo_2011()
        roi_file = atlas['thick_17']
        yeo_cols = "roi roi_labels c1 c2 c3 c4".split(" ")
        df_labels = pd.read_csv(atlas["colors_17"], sep=r"\s*", engine="python", names=yeo_cols, skiprows=1)
        roi_names = df_labels["roi_labels"].values
        roi_type = "labels"

    elif parc == "yeo17thin":
        atlas = datasets.fetch_atlas_yeo_2011()
        roi_file = atlas['thin_17']
        yeo_cols = "roi roi_labels c1 c2 c3 c4".split(" ")
        df_labels = pd.read_csv(atlas["colors_17"], sep=r"\s*", engine="python", names=yeo_cols, skiprows=1)
        roi_names = df_labels["roi_labels"].values
        roi_type = "labels"

    elif parc == "yeo17split":
        atlas_dir = "/parcs/Yeo_splithemi"
        roi_file = os.path.join(atlas_dir, "yeo_2011_thick17_splithemi.nii.gz")
        labs_df = pd.read_csv(os.path.join(atlas_dir, "yeo_2011_thick17_splithemi.tsv"), sep="\t")
        roi_names = labs_df.full_roi_name.values
        roi_type = "labels"

    elif parc == "yeo7":
        atlas = datasets.fetch_atlas_yeo_2011()
        roi_file = atlas['thick_7']
        yeo_cols = "roi roi_labels c1 c2 c3 c4".split(" ")
        df_labels = pd.read_csv(atlas["colors_7"], sep=r"\s*", engine="python", names=yeo_cols, skiprows=1)
        roi_names = df_labels["roi_labels"].values
        roi_type = "labels"
    else:
        raise Exception("Parcellation not known {}".format(parc))
    return roi_file, roi_names, roi_type
예제 #17
0
def get_atlas_info(atlas, res=None):
    """
    Gather all information from a specified atlas, including the path to the atlas maps, as well as labels
    and their indexes.

    Parameters
    ----------
    atlas : str
        Atlas dataset to be downloaded through nilearn's dataset_fetch_atlas functionality.
    res: str
        Specific version of atlas to be downloaded. Only necessary for Harvard-Oxford and Talairach.
        Please check nilearns respective documentation at
        https://nilearn.github.io/modules/generated/nilearn.datasets.fetch_atlas_harvard_oxford.html or
        https://nilearn.github.io/modules/generated/nilearn.datasets.fetch_atlas_talairach.html

    Returns
    -------
    atlas_info_df : pandas dataframe
        A pandas dataframe containing information about the ROIs and their indexes included in a given atlas.
    atl_ds.maps : str
        Path to the atlas maps.

    Examples
    --------
    >>> get_atlas_info('aal')
    atlas_info_df
    atl_ds.maps
    """

    if atlas == 'aal':
        atl_ds = datasets.fetch_atlas_aal()

    elif atlas == 'harvard_oxford':
        if res is None:
            print(
                'Please provide the specific version of the Harvard-Oxford atlas you would like to use.'
            )
        else:
            atl_ds = datasets.fetch_atlas_harvard_oxford(res)

    elif atlas == 'destriuex':
        atl_ds = datasets.fetch_atlas_destrieux_2009()

    elif atlas == 'msdl':
        atl_ds = datasets.fetch_atlas_msdl()

    elif atlas == 'talairach':
        if res is None:
            print(
                'Please provide the level of the Talairach atlas you would like to use.'
            )
        else:
            atl_ds = datasets.fetch_atlas_talairach(level_name=res)

    elif atlas == 'pauli_2017':
        atl_ds = datasets.fetch_atlas_pauli_2017()

    index = []
    labels = []

    for ind, label in enumerate(atl_ds.labels):
        index.append(ind)
        if atlas == 'destriuex':
            labels.append(label[1])
        else:
            labels.append(label)

    atlas_info_df = pd.DataFrame({'index': index, 'label': labels})

    return atlas_info_df, atl_ds.maps
예제 #18
0
def get_atlas_rois(atlas, roi_idx, hemisphere, res=None, path=None):
    """
    Extract ROIs from a given atlas.

    Parameters
    ----------
    atlas : str
        Atlas dataset to be downloaded through nilearn's dataset_fetch_atlas functionality.
    roi_idx: list
        List of int of the ROI(s) you want to extract from the atlas. If not sure, use get_atlas_info.
    hemisphere: list
        List of str, that is hemispheres of the ROI(s) you want to extract. Can be ['left'], ['right'] or ['left', 'right'].
    res: str
        Specific version of atlas to be downloaded. Only necessary for Harvard-Oxford and Talairach.
        Please check nilearns respective documentation at
        https://nilearn.github.io/modules/generated/nilearn.datasets.fetch_atlas_harvard_oxford.html or
        https://nilearn.github.io/modules/generated/nilearn.datasets.fetch_atlas_talairach.html
    path: str
        Path to where the extracted ROI(s) will be saved to. If None, ROI(s) will be saved in the current
        working directory.

    Returns
    -------
    list_rois: list
        A list of the extracted ROIs.

    Examples
    --------
    >>> get_atlas_rois('aal', [1, 2, 3], ['left', 'right'], path='/home/urial/Desktop')
    list_rois
    """

    if atlas == 'aal':
        atl_ds = datasets.fetch_atlas_aal()

    elif atlas == 'harvard_oxford':
        if res is None:
            print(
                'Please provide the specific version of the Harvard-Oxford atlas you would like to use.'
            )
        else:
            atl_ds = datasets.fetch_atlas_harvard_oxford(res)

    elif atlas == 'destriuex':
        atl_ds = datasets.fetch_atlas_destrieux_2009()

    elif atlas == 'msdl':
        atl_ds = datasets.fetch_atlas_msdl()

    elif atlas == 'talairach':
        if res is None:
            print(
                'Please provide the level of the Talairach atlas you would like to use.'
            )
        else:
            atl_ds = datasets.fetch_atlas_talairach(level_name=res)

    elif atlas == 'pauli_2017':
        atl_ds = datasets.fetch_atlas_pauli_2017()

    if roi_idx is None:
        print('Please provide the indices of the ROIs you want to extract.')
    elif hemisphere is None:
        print(
            'Please provide the hemisphere(s) from which you want to extract ROIs.'
        )

    for label in roi_idx:
        for hemi in hemisphere:
            roi_ex = Node(PickAtlas(), name='roi_ex')
            roi_ex.inputs.atlas = atl_ds.maps
            roi_ex.inputs.labels = label
            roi_ex.inputs.hemi = hemi
            if path is None:
                roi_ex.inputs.output_file = '%s_%s_%s.nii.gz' % (
                    atlas, str(label), hemi)
                roi_ex.run()
                list_rois = glob('%s_*.nii.gz' % atlas)
            elif path:
                roi_ex.inputs.output_file = opj(
                    path, '%s_%s_%s.nii.gz' % (atlas, str(label), hemi))
                roi_ex.run()
                list_rois = glob(opj(path, '%s_*.nii.gz' % atlas))

    print('The following ROIs were extracted: ')
    print('\n'.join(map(str, list_rois)))

    return list_rois
예제 #19
0
 def __init__(self, atlas_name=''):
     from nilearn import datasets
     msdl = datasets.fetch_atlas_msdl()
     self.dataset = getattr(smith, atlas_name)
예제 #20
0
def DownloadAAL3(PATH):
    import os
    from nilearn import datasets

    if os.path.isfile(PATH + '/AAL3_for_SPM12.tar.gz'):
        print('The atlas AAL3 has already been downloaded')
    else:
        os.system('wget https://www.oxcns.org/AAL3_for_SPM12.tar.gz -P ' +
                  PATH)

    if os.path.exists(PATH + '/AAL3'):
        print('The atlas AAL3 has already been unzipped')
    else:
        os.system('tar -zxvf ' + PATH + '/AAL3_for_SPM12.tar.gz -C ' + PATH)

    if os.path.exists(PATH + '/AAL3/AAL3.mat'):
        print('The atlas labels AAL have already been downloaded')
    else:
        os.system(
            'wget https://www.dropbox.com/s/eeullhxfv8tk6fg/AAL3.mat?dl=1 -P '
            + PATH + '/AAL3')
        os.system('mv ' + PATH + '/AAL3/AAL3.mat?dl=1 ' + PATH +
                  '/AAL3/AAL3.mat')

    ###################
    A_HOx = '/home/lxmera/nilearn_data/fsl/data/atlases/HarvardOxford/HarvardOxford-cort-maxprob-thr25-2mm.nii.gz'
    if os.path.isfile(A_HOx):
        print('The atlas Harvard-Oxford has already been downloaded')
    else:
        ###############################
        if os.path.exists('/home/lxmera'):
            datasets.fetch_atlas_harvard_oxford('cort-maxprob-thr25-2mm')
        else:
            print('####################################')
            print('#                                  #')
            print('#       CAMBIA EL USUARIO          #')
            print('#                                  #')
            print('####################################')

    mat2 = A_HOx[:A_HOx.rfind('/')] + '/labelsHof.mat'
    if os.path.isfile(mat2):
        print('The atlas labels Harvard-Oxford have already been downloaded')
    else:
        os.system(
            'wget https://www.dropbox.com/s/t0keqsapcbdl10b/labelsHof.mat?dl=1 -P '
            + A_HOx[:A_HOx.rfind('/')])
        os.system('mv ' + A_HOx[:A_HOx.rfind('/')] + '/labelsHof.mat?dl=1 ' +
                  A_HOx[:A_HOx.rfind('/')] + '/labelsHof.mat')

    A_MSDL = '/home/lxmera/nilearn_data/msdl_atlas/MSDL_rois/msdl_rois.nii'
    if os.path.isfile(A_MSDL):
        print('The atlas MSDL has already been downloaded')
    else:
        ###############################
        if os.path.exists('/home/lxmera'):
            datasets.fetch_atlas_msdl()
        else:
            print('####################################')
            print('#                                  #')
            print('#       CAMBIA EL USUARIO          #')
            print('#                                  #')
            print('####################################')

    mat3 = A_MSDL[:A_MSDL.rfind('/')] + '/labelsMSDL.mat'
    if os.path.isfile(mat3):
        print('The atlas labels MSDL have already been downloaded')
    else:
        os.system(
            'wget https://www.dropbox.com/s/j18tleliudcx2yn/labelsMSDL.mat?dl=1 -P '
            + A_MSDL[:A_MSDL.rfind('/')])
        os.system('mv ' + A_MSDL[:A_MSDL.rfind('/')] +
                  '/labelsMSDL.mat?dl=1 ' + A_MSDL[:A_MSDL.rfind('/')] +
                  '/labelsMSDL.mat')

    atlas = PATH + '/AAL3/AAL3.nii.gz'
    mat = PATH + '/AAL3/AAL3.mat'
    return atlas, mat, A_HOx, mat2, A_MSDL, mat3
from camcan.preprocessing import extract_timeseries

from nilearn.datasets import (fetch_atlas_basc_multiscale_2015,
                              fetch_atlas_msdl)

import joblib
from joblib import Parallel, delayed, Memory

# path to the Cam-CAN data set
CAMCAN_PREPROCESSED = '/home/mehdi/data/camcan/camcan_preproc'
CAMCAN_PATIENTS_EXCLUDED = '/home/mehdi/data/camcan/camcan_preproc/'\
                           'excluded_subjects.csv'
CAMCAN_TIMESERIES = '/home/mehdi/data/camcan/camcan_timeseries'
# path to the atlases
ATLASES = [
    fetch_atlas_msdl().maps,
    fetch_atlas_basc_multiscale_2015().scale064,
    fetch_atlas_basc_multiscale_2015().scale122,
    fetch_atlas_basc_multiscale_2015().scale197
]
ATLASES_DESCR = ['msdl', 'basc064', 'basc122', 'basc197']
# path for the caching
CACHE_TIMESERIES = '/home/mehdi/data/camcan/cache/timeseries'
if not os.path.exists(CACHE_TIMESERIES):
    os.makedirs(CACHE_TIMESERIES)
MEMORY = Memory(CACHE_TIMESERIES)

N_JOBS = 20

dataset = load_camcan_rest(data_dir=CAMCAN_PREPROCESSED,
                           patients_excluded=CAMCAN_PATIENTS_EXCLUDED)
예제 #22
0
PATH_TO_DATA = ['/home/lemaitre/Documents/data/INST/*']
SUBJECTS_EXCLUDED = ('/home/lemaitre/Documents/data/'
                     'inst_excluded_subjects.csv')
PATH_OUTPUT = '/home/lemaitre/Documents/data/INST_time_series'

subjects_path = []
for pdata in PATH_TO_DATA:
    subjects_path += glob.glob(pdata)
subjects_path = sorted(subjects_path)
subjects_path = [sp for sp in subjects_path if isdir(sp)]

PATH_TO_RESTING_STATE = 'session_1/rest_1/rest_res2standard.nii.gz'
PATH_TO_MOTION_CORRECTION = 'session_1/rest_1/rest_mc.1D'

# path to the atlases
ATLASES = [fetch_atlas_msdl().maps,
           fetch_atlas_basc_multiscale_2015().scale064,
           fetch_atlas_basc_multiscale_2015().scale122,
           fetch_atlas_basc_multiscale_2015().scale197,
           fetch_atlas_harvard_oxford(atlas_name='cort-prob-2mm').maps,
           fetch_atlas_craddock_2012().scorr_mean,
           fetch_coords_power_2011()]
ATLASES_DESCR = ['msdl', 'basc064', 'basc122', 'basc197',
                 'harvard_oxford_cort_prob_2mm', 'craddock_scorr_mean',
                 'power_2011']

# load the list of patient to exclude
excluded_subjects = pd.read_csv(
    SUBJECTS_EXCLUDED,
    dtype={'subject_id': object})['subject_id'].tolist()
예제 #23
0
# using connectome measure object
mean_correlation_matrix = connectome_measure.mean_

# grab center coordinates for atlas labels
coordinates = plotting.find_parcellation_cut_coords(labels_img=yeo['thick_17'])

# plot connectome with 80% edge strength in the connectivity
plotting.plot_connectome(mean_correlation_matrix, coordinates,
                         edge_threshold="80%",
                         title='Yeo Atlas 17 thick (func)')

##########################################################################
# Load probabilistic atlases - extracting coordinates on brain maps
# -----------------------------------------------------------------

msdl = datasets.fetch_atlas_msdl()

##########################################################################
# Iterate over fetched atlases to extract coordinates - probabilistic
# -------------------------------------------------------------------
from nilearn.input_data import NiftiMapsMasker

# create masker to extract functional data within atlas parcels
masker = NiftiMapsMasker(maps_img=msdl['maps'], standardize=True,
                         memory='nilearn_cache')

# extract time series from all subjects and concatenate them
time_series = []
for func, confounds in zip(data.func, data.confounds):
    time_series.append(masker.fit_transform(func, confounds=confounds))
    # Display covariance matrix
    plotting.plot_matrix(cov, cmap=plotting.cm.bwr,
                         vmin=-1, vmax=1, title="%s / covariance" % title,
                         labels=labels)
    # Display precision matrix
    plotting.plot_matrix(prec, cmap=plotting.cm.bwr,
                         vmin=-span, vmax=span, title="%s / precision" % title,
                         labels=labels)


##############################################################################
# Fetching datasets
# ------------------
from nilearn import datasets
msdl_atlas_dataset = datasets.fetch_atlas_msdl()
adhd_dataset = datasets.fetch_adhd(n_subjects=n_subjects)

# print basic information on the dataset
print('First subject functional nifti image (4D) is at: %s' %
      adhd_dataset.func[0])  # 4D data


##############################################################################
# Extracting region signals
# --------------------------
from nilearn import image
from nilearn import input_data

# A "memory" to avoid recomputation
from sklearn.externals.joblib import Memory
예제 #25
0
source = config['source']

if source == 'craddock':
    components = fetch_craddock_parcellation().parcellate400
    data = np.ones_like(check_niimg(components).get_data())
    mask = new_img_like(components, data)
    label_masker = NiftiLabelsMasker(labels_img=components,
                                     smoothing_fwhm=0,
                                     mask_img=mask).fit()
    maps_img = label_masker.inverse_transform(maps)
else:
    mask = fetch_mask()
    masker = MultiNiftiMasker(mask_img=mask).fit()

    if source == 'msdl':
        components = fetch_atlas_msdl()['maps']
        components = masker.transform(components)
    elif source in ['hcp_rs', 'hcp_rs_concat', 'hcp_rs_positive']:
        data = fetch_atlas_modl()
        if source == 'hcp_rs':
            components_imgs = [data.nips2017_components64]
        elif source == 'hcp_rs_concat':
            components_imgs = [
                data.nips2017_components16, data.nips2017_components64,
                data.nips2017_components256
            ]
        else:
            components_imgs = [
                data.positive_components16, data.positive_components64,
                data.positive_components512
            ]
예제 #26
0
def test_fetch_atlas_msdl():
    dataset = datasets.fetch_atlas_msdl(data_dir=tmpdir, verbose=0)
    assert_true(isinstance(dataset.labels, _basestring))
    assert_true(isinstance(dataset.maps, _basestring))
    assert_equal(len(url_request.urls), 1)
예제 #27
0
def test_fetch_atlas_msdl():
    dataset = datasets.fetch_atlas_msdl(data_dir=tmpdir, verbose=0)
    assert_true(isinstance(dataset.labels, _basestring))
    assert_true(isinstance(dataset.maps, _basestring))
    assert_equal(len(url_request.urls), 1)
of functional regions in rest.

The key to extract signals is to use the
:class:`nilearn.input_data.NiftiMapsMasker` that can transform nifti
objects to time series using a probabilistic atlas.

As the MSDL atlas comes with (x, y, z) MNI coordinates for the different
regions, we can visualize the matrix as a graph of interaction in a
brain. To avoid having too dense a graph, we represent only the 20% edges
with the highest values.

"""
############################################################################
# Retrieve the atlas and the data
from nilearn import datasets
atlas = datasets.fetch_atlas_msdl()
# Loading atlas image stored in 'maps'
atlas_filename = atlas['maps']
# Loading atlas data stored in 'labels'
labels = atlas['labels']

# Load the functional datasets
data = datasets.fetch_adhd(n_subjects=1)

print('First subject resting-state nifti image (4D) is located at: %s' %
      data.func[0])

############################################################################
# Extract the time series
from nilearn.input_data import NiftiMapsMasker
masker = NiftiMapsMasker(maps_img=atlas_filename, standardize=True,
예제 #29
0
def main(
    workdir,
    outdir,
    atlas,
    kernel,
    sparsity,
    affinity,
    approach,
    gradients,
    subcort,
    neurosynth,
    neurosynth_file,
    sleuth_file,
    nimare_dataset,
    roi_mask,
    term,
    topic,
):
    workdir = op.join(workdir, "tmp")
    if op.isdir(workdir):
        shutil.rmtree(workdir)
    os.makedirs(workdir)

    atlas_name = "atlas-{0}".format(atlas)
    kernel_name = "kernel-{0}".format(kernel)
    sparsity_name = "sparsity-{0}".format(sparsity)
    affinity_name = "affinity-{0}".format(affinity)
    approach_name = "approach-{0}".format(approach)
    gradients_name = "gradients-{0}".format(gradients)
    dset = None

    # handle neurosynth dataset, if called
    if neurosynth:
        if neurosynth_file is None:

            ns_data_dir = op.join(workdir, "neurosynth")
            dataset_file = op.join(ns_data_dir, "neurosynth_dataset.pkl.gz")
            # download neurosynth dataset if necessary
            if not op.isfile(dataset_file):
                neurosynth_download(ns_data_dir)

        else:
            dataset_file = neurosynth_file

        dset = Dataset.load(dataset_file)
        dataset_name = "dataset-neurosynth"

    # handle sleuth text file, if called
    if sleuth_file is not None:
        dset = convert_sleuth_to_dataset(sleuth_file, target="mni152_2mm")
        dataset_name = "dataset-{0}".format(op.basename(sleuth_file).split(".")[0])

    if nimare_dataset is not None:
        dset = Dataset.load(nimare_dataset)
        dataset_name = "dataset-{0}".format(op.basename(nimare_dataset).split(".")[0])

    if dset:
        # slice studies, if needed
        if roi_mask is not None:
            roi_ids = dset.get_studies_by_mask(roi_mask)
            print(
                "{}/{} studies report at least one coordinate in the "
                "ROI".format(len(roi_ids), len(dset.ids))
            )
            dset_sel = dset.slice(roi_ids)
            dset = dset_sel
            dataset_name = "dataset-neurosynth_mask-{0}".format(
                op.basename(roi_mask).split(".")[0]
            )

        if term is not None:
            labels = ["Neurosynth_TFIDF__{label}".format(label=label) for label in [term]]
            term_ids = dset.get_studies_by_label(labels=labels, label_threshold=0.1)
            print(
                "{}/{} studies report association "
                "with the term {}".format(len(term_ids), len(dset.ids), term)
            )
            dset_sel = dset.slice(term_ids)
            dset = dset_sel
            # img_inds = np.nonzero(dset.masker.mask_img.get_fdata())  # unused
            # vox_locs = np.unravel_index(img_inds, dset.masker.mask_img.shape)  # unused
            dataset_name = "dataset-neurosynth_term-{0}".format(term)

        if topic is not None:
            topics = [
                "Neurosynth_{version}__{topic}".format(version=topic[0], topic=topic)
                for topic in topic[1:]
            ]
            topics_ids = []
            for topic in topics:
                topic_ids = dset.annotations.id[np.where(dset.annotations[topic])[0]].tolist()
                topics_ids.extend(topic_ids)
                print(
                    "{}/{} studies report association "
                    "with the term {}".format(len(topic_ids), len(dset.ids), topic)
                )
            topics_ids_unique = np.unique(topics_ids)
            print("{} unique ids".format(len(topics_ids_unique)))
            dset_sel = dset.slice(topics_ids_unique)
            dset = dset_sel
            # img_inds = np.nonzero(dset.masker.mask_img.get_fdata())  # unused
            # vox_locs = np.unravel_index(img_inds, dset.masker.mask_img.shape)  # unused
            dataset_name = "dataset-neurosynth_topic-{0}".format("_".join(topic[1:]))

        if (
            neurosynth
            or (sleuth_file is not None)
            or (nimare_dataset is not None)
        ):
            # set kernel for MA smoothing
            if kernel == "peaks2maps":
                print("Running peak2maps")
                k = Peaks2MapsKernel(resample_to_mask=True)
            elif kernel == "alekernel":
                print("Running alekernel")
                k = ALEKernel(fwhm=15)

            if atlas is not None:
                if atlas == "harvard-oxford":
                    print("Parcellating using the Harvard Oxford Atlas")
                    # atlas_labels = atlas.labels[1:]  # unused
                    atlas_shape = atlas.maps.shape
                    atlas_affine = atlas.maps.affine
                    atlas_data = atlas.maps.get_fdata()
                elif atlas == "aal":
                    print("Parcellating using the AAL Atlas")
                    atlas = datasets.fetch_atlas_aal()
                    # atlas_labels = atlas.labels  # unused
                    atlas_shape = nib.load(atlas.maps).shape
                    atlas_affine = nib.load(atlas.maps).affine
                    atlas_data = nib.load(atlas.maps).get_fdata()
                elif atlas == "craddock-2012":
                    print("Parcellating using the Craddock-2012 Atlas")
                    atlas = datasets.fetch_atlas_craddock_2012()
                elif atlas == "destrieux-2009":
                    print("Parcellating using the Destrieux-2009 Atlas")
                    atlas = datasets.fetch_atlas_destrieux_2009(lateralized=True)
                    # atlas_labels = atlas.labels[3:]  # unused
                    atlas_shape = nib.load(atlas.maps).shape
                    atlas_affine = nib.load(atlas.maps).affine
                    atlas_data = nib.load(atlas.maps).get_fdata()
                elif atlas == "msdl":
                    print("Parcellating using the MSDL Atlas")
                    atlas = datasets.fetch_atlas_msdl()
                elif atlas == "surface":
                    print("Generating surface vertices")

                if atlas != "fsaverage5" and atlas != "hcp":
                    imgs = k.transform(dset, return_type="image")

                    masker = NiftiLabelsMasker(
                        labels_img=atlas.maps, standardize=True, memory="nilearn_cache"
                    )
                    time_series = masker.fit_transform(imgs)

                else:
                    # change to array for other approach
                    imgs = k.transform(dset, return_type="image")
                    print(np.shape(imgs))

                    if atlas == "fsaverage5":
                        fsaverage = fetch_surf_fsaverage(mesh="fsaverage5")
                        pial_left = fsaverage.pial_left
                        pial_right = fsaverage.pial_right
                        medial_wall_inds_left = surface.load_surf_data(
                            "./templates/lh.Medial_wall.label"
                        )
                        print(np.shape(medial_wall_inds_left))
                        medial_wall_inds_right = surface.load_surf_data(
                            "./templates/rh.Medial_wall.label"
                        )
                        print(np.shape(medial_wall_inds_right))
                        sulc_left = fsaverage.sulc_left
                        sulc_right = fsaverage.sulc_right

                    elif atlas == "hcp":
                        pial_left = "./templates/S1200.L.pial_MSMAll.32k_fs_LR.surf.gii"
                        pial_right = "./templates/S1200.R.pial_MSMAll.32k_fs_LR.surf.gii"
                        medial_wall_inds_left = np.where(
                            nib.load("./templates/hcp.tmp.lh.dscalar.nii").get_fdata()[0] == 0
                        )[0]
                        medial_wall_inds_right = np.where(
                            nib.load("./templates/hcp.tmp.rh.dscalar.nii").get_fdata()[0] == 0
                        )[0]
                        left_verts = 32492 - len(medial_wall_inds_left)
                        sulc_left = nib.load(
                            "./templates/S1200.sulc_MSMAll.32k_fs_LR.dscalar.nii"
                        ).get_fdata()[0][0:left_verts]
                        sulc_left = np.insert(
                            sulc_left,
                            np.subtract(
                                medial_wall_inds_left, np.arange(len(medial_wall_inds_left))
                            ),
                            0,
                        )
                        sulc_right = nib.load(
                            "./templates/S1200.sulc_MSMAll.32k_fs_LR.dscalar.nii"
                        ).get_fdata()[0][left_verts:]
                        sulc_right = np.insert(
                            sulc_right,
                            np.subtract(
                                medial_wall_inds_right, np.arange(len(medial_wall_inds_right))
                            ),
                            0,
                        )

                    surf_lh = surface.vol_to_surf(
                        imgs,
                        pial_left,
                        radius=6.0,
                        interpolation="nearest",
                        kind="ball",
                        n_samples=None,
                        mask_img=dset.masker.mask_img,
                    )
                    surf_rh = surface.vol_to_surf(
                        imgs,
                        pial_right,
                        radius=6.0,
                        interpolation="nearest",
                        kind="ball",
                        n_samples=None,
                        mask_img=dset.masker.mask_img,
                    )
                    surfs = np.transpose(np.vstack((surf_lh, surf_rh)))
                    del surf_lh, surf_rh

                    # handle cortex first
                    coords_left = surface.load_surf_data(pial_left)[0]
                    coords_left = np.delete(coords_left, medial_wall_inds_left, axis=0)
                    coords_right = surface.load_surf_data(pial_right)[0]
                    coords_right = np.delete(coords_right, medial_wall_inds_right, axis=0)

                    print("Left Hemipshere Vertices")
                    surface_macms_lh, inds_discard_lh = build_macms(dset, surfs, coords_left)
                    print(np.shape(surface_macms_lh))
                    print(inds_discard_lh)

                    print("Right Hemipshere Vertices")
                    surface_macms_rh, inds_discard_rh = build_macms(dset, surfs, coords_right)
                    print(np.shape(surface_macms_rh))
                    print(len(inds_discard_rh))

                    lh_vertices_total = np.shape(surface_macms_lh)[0]
                    rh_vertices_total = np.shape(surface_macms_rh)[0]
                    time_series = np.transpose(np.vstack((surface_macms_lh, surface_macms_rh)))
                    print(np.shape(time_series))
                    del surface_macms_lh, surface_macms_rh

                    if subcort:
                        subcort_img = nib.load("templates/rois-subcortical_mni152_mask.nii.gz")
                        subcort_vox = np.asarray(np.where(subcort_img.get_fdata()))
                        subcort_mm = vox2mm(subcort_vox.T, subcort_img.affine)

                        print("Subcortical Voxels")
                        subcort_macm, inds_discard_subcort = build_macms(dset, surfs, subcort_mm)

                        num_subcort_vox = np.shape(subcort_macm)[0]
                        print(inds_discard_subcort)

                        time_series = np.hstack((time_series, np.asarray(subcort_macm).T))
                        print(np.shape(time_series))

                time_series = time_series.astype("float32")

                print("calculating correlation matrix")
                correlation = ConnectivityMeasure(kind="correlation")
                time_series = correlation.fit_transform([time_series])[0]
                print(np.shape(time_series))

                if affinity == "cosine":
                    time_series = calculate_affinity(time_series, 10 * sparsity)

            else:
                time_series = np.transpose(k.transform(dset, return_type="array"))

    print("Performing gradient analysis")

    gradients, statistics = embed.compute_diffusion_map(
        time_series, alpha=0.5, return_result=True, overwrite=True
    )
    pickle.dump(statistics, open(op.join(workdir, "statistics.p"), "wb"))

    # if subcortical included in gradient decomposition, remove gradient scores
    if subcort:
        subcort_grads = gradients[np.shape(gradients)[0] - num_subcort_vox :, :]
        subcort_grads = insert(subcort_grads, inds_discard_subcort)
        gradients = gradients[0 : np.shape(gradients)[0] - num_subcort_vox, :]

    # get left hemisphere gradient scores, and insert 0's where medial wall is
    gradients_lh = gradients[0:lh_vertices_total, :]
    if len(inds_discard_lh) > 0:
        gradients_lh = insert(gradients_lh, inds_discard_lh)
    gradients_lh = insert(gradients_lh, medial_wall_inds_left)

    # get right hemisphere gradient scores and insert 0's where medial wall is
    gradients_rh = gradients[-rh_vertices_total:, :]
    if len(inds_discard_rh) > 0:
        gradients_rh = insert(gradients_rh, inds_discard_rh)
    gradients_rh = insert(gradients_rh, medial_wall_inds_right)

    grad_dict = {
        "grads_lh": gradients_lh,
        "grads_rh": gradients_rh,
        "pial_left": pial_left,
        "sulc_left": sulc_left,
        "pial_right": pial_right,
        "sulc_right": sulc_right,
    }
    if subcort:
        grad_dict["subcort_grads"] = subcort_grads
    pickle.dump(grad_dict, open(op.join(workdir, "gradients.p"), "wb"))

    # map the gradient to the parcels
    for i in range(np.shape(gradients)[1]):
        if atlas is not None:
            if atlas == "fsaverage5" or atlas == "hcp":

                plot_surfaces(grad_dict, i, workdir)

                if subcort:
                    tmpimg = masking.unmask(subcort_grads[:, i], subcort_img)
                    nib.save(tmpimg, op.join(workdir, "gradient-{0}.nii.gz".format(i)))
            else:
                tmpimg = np.zeros(atlas_shape)
                for j, n in enumerate(np.unique(atlas_data)[1:]):
                    inds = atlas_data == n
                    tmpimg[inds] = gradients[j, i]
                    nib.save(
                        nib.Nifti1Image(tmpimg, atlas_affine),
                        op.join(workdir, "gradient-{0}.nii.gz".format(i)),
                    )
        else:
            tmpimg = np.zeros(np.prod(dset.masker.mask_img.shape))
            inds = np.ravel_multi_index(
                np.nonzero(dset.masker.mask_img.get_fdata()), dset.masker.mask_img.shape
            )
            tmpimg[inds] = gradients[:, i]
            nib.save(
                nib.Nifti1Image(
                    np.reshape(tmpimg, dset.masker.mask_img.shape), dset.masker.mask_img.affine
                ),
                op.join(workdir, "gradient-{0}.nii.gz".format(i)),
            )

            os.system(
                "python3 /Users/miriedel/Desktop/GitHub/surflay/make_figures.py "
                "-f {grad_image} --colormap jet".format(
                    grad_image=op.join(workdir, "gradient-{0}.nii.gz".format(i))
                )
            )

    output_dir = op.join(
        outdir,
        (
            f"{dataset_name}_{atlas_name}_{kernel_name}_{sparsity_name}_{gradients_name}_"
            f"{affinity_name}_{approach_name}"
        )
    )

    shutil.copytree(workdir, output_dir)

    shutil.rmtree(workdir)
예제 #30
0
                             title=title,
                             figure=fig,
                             colorbar=False)


###############################################################################
# Load brain development fMRI dataset and MSDL atlas
# -------------------------------------------------------------------
# We study only 30 subjects from the dataset, to save computation time.
from nilearn import datasets

rest_data = datasets.fetch_development_fmri(n_subjects=30)

###############################################################################
# We use probabilistic regions of interest (ROIs) from the MSDL atlas.
msdl_data = datasets.fetch_atlas_msdl()
msdl_coords = msdl_data.region_coords
n_regions = len(msdl_coords)
print('MSDL has {0} ROIs, part of the following networks :\n{1}.'.format(
    n_regions, msdl_data.networks))

###############################################################################
# Region signals extraction
# -------------------------
# To extract regions time series, we instantiate a
# :class:`nilearn.input_data.NiftiMapsMasker` object and pass the atlas the
# file name to it, as well as filtering band-width and detrending option.
from nilearn import input_data

masker = input_data.NiftiMapsMasker(msdl_data.maps,
                                    resampling_target="data",
예제 #31
0
def reduce(dataset, output_dir=None, direct=False, source='hcp_rs_concat'):
    """Create a reduced version of a given dataset.
        Unmask must be called beforehand"""
    memory = Memory(cachedir=get_cache_dirs()[0], verbose=2)
    print('Fetch data')
    this_dataset_dir = join(get_output_dir(output_dir), 'unmasked', dataset)
    masker, X = get_raw_contrast_data(this_dataset_dir)
    print('Retrieve components')
    if source == 'craddock':
        components = fetch_craddock_parcellation().parcellate400
        niimgs = masker.inverse_transform(X.values)
        label_masker = NiftiLabelsMasker(labels_img=components,
                                         smoothing_fwhm=0,
                                         mask_img=masker.mask_img_).fit()
        # components = label_masker.inverse_transform(np.eye(400))
        print('Transform and fit data')
        Xt = label_masker.transform(niimgs)
    else:
        if source == 'msdl':
            components = [fetch_atlas_msdl()['maps']]
        else:
            data = fetch_atlas_modl()
            if source == 'hcp_rs':
                components_imgs = [data.nips2017_components256]
            elif source == 'hcp_rs_concat':
                components_imgs = [
                    data.nips2017_components16, data.nips2017_components64,
                    data.nips2017_components256
                ]
            elif source == 'hcp_336':
                components_imgs = [data.nips2017_components336]
            elif source == 'hcp_new':
                components_imgs = [
                    data.positive_new_components16,
                    data.positive_new_components64,
                    data.positive_new_components128
                ]
            elif source == 'hcp_new_big':
                components_imgs = [
                    data.positive_new_components16,
                    data.positive_new_components64,
                    data.positive_new_components512
                ]
            elif source == 'hcp_rs_positive_concat':
                components_imgs = [
                    data.positive_components16, data.positive_components64,
                    data.positive_components512
                ]
            elif source == 'hcp_new_208':
                components_imgs = [data.positive_new_components208]

            components = masker.transform(components_imgs)
        print('Transform and fit data')
        proj, proj_inv, _ = memory.cache(make_projection_matrix)(
            components, scale_bases=True)
        if direct:
            proj = proj_inv.T
        Xt = X.dot(proj)
    Xt = pd.DataFrame(data=Xt, index=X.index)
    this_source = source
    if direct:
        this_source += '_direct'
    this_output_dir = join(get_output_dir(output_dir), 'reduced', this_source,
                           dataset)
    if not os.path.exists(this_output_dir):
        os.makedirs(this_output_dir)
    print(join(this_output_dir, 'Xt.pkl'))
    Xt.to_pickle(join(this_output_dir, 'Xt.pkl'))
    dump(masker, join(this_output_dir, 'masker.pkl'))
    np.save(join(output_dir, 'components'), components)