# Histological decoding
# ---------------------
# For histological decoding we use microstructural profile covariance gradients
# computed from the BigBrain dataset. (TODO: Add more background). Firstly, lets
# download the MPC data and compute its gradients. As the computations for this aren't
# very intesnive, we can actually run this on ReadTheDocs!

from brainstat.context.histology import (
    read_histology_profile,
    compute_mpc,
    compute_histology_gradients,
)
from brainspace.datasets import load_parcellation

# Load the Schaefer 400 atlas
schaefer_400 = load_parcellation("schaefer", scale=400, join=True)

# Run the analysis
histology_profiles = read_histology_profile(template="fs_LR_64k")
mpc = compute_mpc(histology_profiles, labels=schaefer_400)
gradient_map = compute_histology_gradients(mpc)

########################################################################
# Lets plot the first gradient of histology to see what it looks like.
# We will use BrainSpace to create our plots. For full details on how
# BrainSpace's plotting functionality works, please consult the BrainSpace
# ReadTheDocs. (NOTE: Temporarily disabled due to build errors)

from brainspace.plotting.surface_plotting import plot_hemispheres
from brainspace.utils.parcellation import map_to_labels
from brainspace.datasets import load_conte69
Пример #2
0
customization of gradient computation with different kernels and dimensionality
reductions, as well as aligning gradients from different datasets. This
tutorial will only show you how to apply these techniques.
"""

###############################################################################
# As before, we’ll start by loading the sample data.

import warnings
warnings.simplefilter('ignore')

from brainspace.datasets import load_group_fc, load_parcellation, load_conte69

# First load mean connectivity matrix and Schaefer parcellation
conn_matrix = load_group_fc('schaefer', scale=400)
labeling = load_parcellation('schaefer', scale=400, join=True)

mask = labeling != 0

# and load the conte69 hemisphere surfaces
surf_lh, surf_rh = load_conte69()

###############################################################################
# The GradientMaps object allows for many different kernels and dimensionality
# reduction techniques. Let’s have a look at three different kernels.

import numpy as np

from brainspace.gradient import GradientMaps
from brainspace.plotting import plot_hemispheres
from brainspace.utils.parcellation import map_to_labels
Пример #3
0
GradientMaps class. The flexible usage of this class allows for the
customization of gradient computation with different kernels and dimensionality
reductions, as well as aligning gradients from different datasets. This
tutorial will only show you how to apply these techniques.
"""

###############################################################################
# Customizing gradient computation
# +++++++++++++++++++++++++++++++++
# As before, we’ll start by loading the sample data.

from brainspace.datasets import load_group_fc, load_parcellation, load_conte69

# First load mean connectivity matrix and Schaefer parcellation
conn_matrix = load_group_fc('schaefer', scale=400)
labeling = load_parcellation('schaefer', scale=400, join=True)

mask = labeling != 0

# and load the conte69 hemisphere surfaces
surf_lh, surf_rh = load_conte69()

###############################################################################
# The GradientMaps object allows for many different kernels and dimensionality
# reduction techniques. Let’s have a look at three different kernels.

import numpy as np

from brainspace.gradient import GradientMaps
from brainspace.plotting import plot_hemispheres
from brainspace.utils.parcellation import map_to_labels