Exemple #1
0
def test_view_surf():
    fsaverage = fetch_surf_fsaverage()
    mesh = surface.load_surf_mesh(fsaverage['pial_right'])
    surf_map = mesh[0][:, 0]
    html = html_surface.view_surf(fsaverage['pial_right'], surf_map,
                                  fsaverage['sulc_right'], '90%')
    check_html(html)
    html = html_surface.view_surf(fsaverage['pial_right'],
                                  surf_map,
                                  fsaverage['sulc_right'],
                                  .3,
                                  title="SOME_TITLE")
    check_html(html)
    assert "SOME_TITLE" in html.html
    html = html_surface.view_surf(fsaverage['pial_right'])
    check_html(html)
    destrieux = datasets.fetch_atlas_surf_destrieux()['map_left']
    html = html_surface.view_surf(fsaverage['pial_left'],
                                  destrieux,
                                  symmetric_cmap=False)
    check_html(html)
    html = html_surface.view_surf(fsaverage['pial_right'],
                                  fsaverage['sulc_right'],
                                  threshold=None,
                                  cmap='Greys')
    check_html(html)
    assert_raises(ValueError, html_surface.view_surf, mesh, mesh[0][::2, 0])
    assert_raises(ValueError,
                  html_surface.view_surf,
                  mesh,
                  mesh[0][:, 0],
                  bg_map=mesh[0][::2, 0])
Exemple #2
0
    def extract_correlation_hemi(self,
                                 nifti_image,
                                 output_file,
                                 mesh,
                                 hemi='map_left'):
        """
        Input params:
            - hemi (hemisphere) = 'map_left' or 'map_right'
            - mesh : 'fsaverage.infl_left'
                    'fsaverage.infl_right'
                    'fsaverage.pial_left'
                    'fsaverage.pial_right'
                    'fsaverage.sulc_left'
                    'fsaverage.sulc_right'
        Output param:
            - correlation matrix and its zFisher values
            - Save the correlation matrix to csv file
        """
        # extract surface data from nifti image ###################
        surface_data = surface.vol_to_surf(nifti_image, surf_mesh=mesh)
        timeseries = surface.load_surf_data(surface_data)
        # fill Nan value with 0 and infinity with large finite numbers
        timeseries = np.nan_to_num(timeseries)
        # get destrieux atlas ######################################
        destrieux_atlas = datasets.fetch_atlas_surf_destrieux()
        labels = destrieux_atlas['labels']  # get labels
        parcellation = destrieux_atlas[hemi]  # get parcellation

        # convert timeseries surface to 2D matrix where each column is a ROI
        rois = []
        for i in range(len(labels)):
            pcc_labels = np.where(parcellation == i)[0]
            # each parcellation to 1D matrix
            seed_timeseries = np.mean(timeseries[pcc_labels], axis=0)
            rois.append(np.array(seed_timeseries))
        rois = np.array(rois).T
        rois = np.nan_to_num(rois)

        # extract correlation matrix
        correlation_measure = ConnectivityMeasure(kind='correlation')
        corr_rois = correlation_measure.fit_transform([rois])[0]
        corr_rois_z = np.arctanh(corr_rois)  # normalize to z-fisher

        # save the correlation to csv
        df = pd.DataFrame(corr_rois_z)
        df.to_csv(output_file, index=False, header=None)

        return corr_rois, corr_rois_z
def test_view_surf():
    fsaverage = fetch_surf_fsaverage()
    mesh = surface.load_surf_mesh(fsaverage['pial_right'])
    surf_map = mesh[0][:, 0]
    html = html_surface.view_surf(fsaverage['pial_right'], surf_map,
                                  fsaverage['sulc_right'], '90%')
    check_html(html)
    html = html_surface.view_surf(fsaverage['pial_right'], surf_map,
                                  fsaverage['sulc_right'], .3)
    check_html(html)
    html = html_surface.view_surf(fsaverage['pial_right'])
    check_html(html)
    destrieux = datasets.fetch_atlas_surf_destrieux()['map_left']
    html = html_surface.view_surf(
        fsaverage['pial_left'], destrieux, symmetric_cmap=False)
    check_html(html)
    assert_raises(ValueError, html_surface.view_surf, mesh, mesh[0][::2, 0])
    assert_raises(ValueError, html_surface.view_surf, mesh, mesh[0][:, 0],
                  bg_map=mesh[0][::2, 0])
Exemple #4
0
# Retrieving the data
# -------------------

# NKI resting state data from nilearn
from nilearn import datasets

nki_dataset = datasets.fetch_surf_nki_enhanced(n_subjects=1)

# The nki dictionary contains file names for the data
# of all downloaded subjects.
print(('Resting state data of the first subjects on the '
       'fsaverag5 surface left hemisphere is at: %s' %
      nki_dataset['func_left'][0]))

# Destrieux parcellation for left hemisphere in fsaverage5 space
destrieux_atlas = datasets.fetch_atlas_surf_destrieux()
parcellation = destrieux_atlas['map_left']
labels = destrieux_atlas['labels']

# Fsaverage5 surface template
fsaverage = datasets.fetch_surf_fsaverage()

# The fsaverage dataset contains file names pointing to
# the file locations
print('Fsaverage5 pial surface of left hemisphere is at: %s' %
      fsaverage['pial_left'])
print('Fsaverage5 inflated surface of left hemisphere is at: %s' %
      fsaverage['infl_left'])
print('Fsaverage5 sulcal depth map of left hemisphere is at: %s' %
      fsaverage['sulc_left'])
--------

For genetic decoding we use the Allen Human Brain Atlas through the abagen
toolbox. Note that abagen only accepts parcellated data. Here is a minimal
example of how we use abagen to get the genetic expression of the regions of the
Destrieux atlas. Please note that downloading the dataset and running this
analysis can take several minutes. As such, we will not run the analysis here.
"""

from brainstat.context.genetics import surface_genetic_expression
from nilearn import datasets
import numpy as np

run_analysis = False  # Too resource intensive to run on ReadTheDocs

destrieux = datasets.fetch_atlas_surf_destrieux()
labels = np.hstack((destrieux["map_left"], destrieux["map_right"]))
fsaverage = datasets.fetch_surf_fsaverage()
surfaces_pial = [fsaverage["pial_left"], fsaverage["pial_right"]]

if run_analysis:
    expression = surface_genetic_expression(labels, surfaces_pial, space="fsaverage")

########################################################################
# Expression is a pandas DataFrame which shows the genetic expression of genes
# within each region of the atlas. By default, the values will fall in the range
# [0, 1] where higher values represent higher expression. However, if you change
# the normalization function then this may change. Some regions may return NaN
# values for all modules. This occurs when there are no samples within this region
# across all donors.
#
Exemple #6
0
def plot_destrieux_surface(
    stats: Union[pd.Series, pd.DataFrame],
    hemisphere: str = "Left",
    measurement: str = None,
    average: bool = False,
    std: bool = False,
    standardize: bool = False,
    title: str = None,
    symmetric_cmap: bool = False,
    cmap: str = None,
    vmin: float = None,
    vmax: float = None,
    factor: int = 1,
) -> plt.Figure:
    if title is None and measurement is None:
        title = f"{hemisphere} Hemisphere Values"
    elif title is None:
        title = f"{measurement} ({hemisphere})"
    if (
        isinstance(stats, pd.DataFrame) or stats.index.nlevels > 2
    ) and measurement is None:
        measurement = "Surface Area"
    destrieux_atlas = datasets.fetch_atlas_surf_destrieux()
    destrieux_labels = [
        parse_destrieux_label(label) for label in destrieux_atlas["labels"][1:]
    ]
    fsaverage = datasets.fetch_surf_fsaverage()
    if isinstance(stats, pd.DataFrame):
        data = stats.xs("Destrieux", level="Atlas").copy()
    else:
        data = stats.copy()
    if average:
        data = data.mean(level=["Hemisphere", "Region Name"])
        title = f"Average {title}"
    if std:
        data = data.std(level=["Hemisphere", "Region Name"])
        title = f"{measurement} Standard Deviation ({hemisphere})"
        vmin = 0
    if standardize:
        if isinstance(data, pd.DataFrame):
            data.loc[:, :] = StandardScaler().fit_transform(data)
        else:
            data.loc[:] = StandardScaler().fit_transform(data)
        title = f"Standardized {title}"
        symmetric_cmap = True
        cmap = cmap if cmap is not None else "coolwarm"
    cmap = cmap if cmap is not None else "Reds"
    hemi_stats = data.xs(hemisphere, level="Hemisphere")
    destrieux_projection = destrieux_atlas[f"map_{hemisphere.lower()}"].copy()
    region_ids = sorted(set(destrieux_projection))
    for i, region_id in enumerate(region_ids):
        label = destrieux_labels[i]
        if label == MEDIAL_WALL:
            value = 0
        else:
            if isinstance(data, pd.DataFrame):
                value = hemi_stats.loc[label, measurement]
            else:
                if hemi_stats.index.nlevels == 2:
                    value = hemi_stats.loc[(measurement, label)]
                else:
                    value = hemi_stats.loc[label]
        region_mask = destrieux_projection == region_id
        destrieux_projection[region_mask] = value * factor
    surface = plotting.view_surf(
        fsaverage[f"infl_{hemisphere.lower()}"],
        destrieux_projection,
        bg_map=fsaverage[f"sulc_{hemisphere.lower()}"],
        cmap=cmap,
        title=title,
        symmetric_cmap=symmetric_cmap,
        vmin=vmin,
        vmax=vmax,
    )
    surface.resize(900, 600)
    return surface