def local_plot_hemispheres(values, label_text, color_range, cmap="viridis"): # Plot cortical surfaces with values as the data, label_text as # the labels, and color_range as the limits of the color bar. return plot_hemispheres( pial_left, pial_right, values, color_bar=True, color_range=color_range, label_text=label_text, cmap=cmap, embed_nb=True, size=(1400, 200), zoom=1.45, nan_color=(0.7, 0.7, 0.7, 1), cb__labelTextProperty={"fontSize": 12}, interactive=False, )
n_permutations = 1000 sp = SpinPermutations(n_rep=n_permutations, random_state=0) sp.fit(sphere_lh, points_rh=sphere_rh) t1wt2w_rotated = np.hstack(sp.randomize(t1wt2w_lh, t1wt2w_rh)) thickness_rotated = np.hstack(sp.randomize(thickness_lh, thickness_rh)) ############################################################################### # As an illustration of the rotation, let’s plot the original t1w/t2w data # Plot original data plot_hemispheres(surf_lh, surf_rh, array_name=t1wt2w, size=(1200, 300), cmap='viridis', nan_color=(0.5, 0.5, 0.5, 1), color_bar=True) ############################################################################### # as well as a few rotated versions. # sphinx_gallery_thumbnail_number = 2 # Plot some rotations plot_hemispheres(surf_lh, surf_rh, array_name=t1wt2w_rotated[:3], size=(1200, 800), cmap='viridis', nan_color=(0.5, 0.5, 0.5, 1),
gradients_kernel = [None] * len(kernels) for i, k in enumerate(kernels): gm = GradientMaps(kernel=k, approach='dm', random_state=0) gm.fit(conn_matrix) gradients_kernel[i] = map_to_labels(gm.gradients_[:, i], labeling, mask=mask, fill=np.nan) label_text = ['Pearson', 'Spearman', 'Normalized\nAngle'] plot_hemispheres(surf_lh, surf_rh, array_name=gradients_kernel, size=(1200, 800), cmap='viridis_r', color_bar=True, label_text=label_text) ############################################################################### # It seems the gradients provided by these kernels are quite similar although # their scaling is quite different. Do note that the gradients are in arbitrary # units, so the smaller/larger axes across kernels do not imply anything. # Similar to using different kernels, we can also use different dimensionality # reduction techniques. # PCA, Laplacian eigenmaps and diffusion mapping embeddings = ['pca', 'le', 'dm'] gradients_embedding = [None] * len(embeddings)
# Visualization from brainspace.datasets import load_group_fc, load_parcellation, load_conte69 from brainspace.gradient import GradientMaps from brainspace.plotting import plot_hemispheres from brainspace.utils.parcellation import map_to_labels atlas = np.load("Z:\\hschoi\\backup\\hschoi\\template\\MMP\\MMP.10k_fs_LR.npy") pc_num = 2 ref_PCs = ref_PC[:, pc_num] X = ref_PCs labeling = atlas conn_matrix = X # X # ref_PCs mask = labeling != 0 grad = map_to_labels(conn_matrix, labeling, mask=mask, fill=np.nan) plot_hemispheres( surf_lh, surf_rh, array_name=grad, size=(1300, 200), color_bar=True, cmap='jet', zoom=1.85 ) #'jet' # 'viridis_r', 'Blues', , 'seismic' # color_range = (-0.1,0.16)
from brainspace.gradient import GradientMaps import numpy as np from brainspace.utils.parcellation import map_to_labels import nibabel as nib from nilearn import surface import pandas as pd surf_lh, surf_rh = hcp360() labeling = load_hcp_parcellation('hcp', scale=360, join=True) print(np.shape(labeling)) P = plot_hemispheres(surf_lh, surf_rh, array_name=labeling, size=(1200, 200), cmap='tab20', zoom=1.85) P.screenshot('test.png') """hcp_grad = pd.read_csv("gradients.csv") mask = labeling != 0 grad = [None] * 4 for i in range(4): L_grad = hcp_grad['L_grad_'+str(i+1)].tolist() R_grad = hcp_grad['R_grad_'+str(i+1)].tolist() grad_LR = L_grad + R_grad # map the gradient to the parcels grad[i] = map_to_labels(np.array(grad_LR), labeling, mask=mask, fill=np.nan)
# proc_freesurfer try: # Freesurfer native thickness th = np.concatenate((nb.freesurfer.read_morph_data(dir_fS + subBIDS + '/surf/lh.thickness'), nb.freesurfer.read_morph_data(dir_fS + subBIDS + '/surf/rh.thickness')), axis=0) plot_hemispheres(surf_lh, surf_rh, array_name=th, size=(900, 250), color_bar='bottom', zoom=1.25, embed_nb=True, interactive=False, share='both', nan_color=(0, 0, 0, 1), color_range=(1.5, 4), cmap="inferno", transparent_bg=False, screenshot=True, filename=dir_QC_png + subBIDS + '_space-fsnative_desc-surf_thickness.png') # Freesurfer native curvature cv = np.concatenate( (nb.freesurfer.read_morph_data(dir_fS + subBIDS + '/surf/lh.curv'), nb.freesurfer.read_morph_data(dir_fS + subBIDS + '/surf/rh.curv')), axis=0) plot_hemispheres(wm_lh, wm_rh,
gm = GradientMaps(n_components=2, random_state=0) gm.fit(correlation_matrix) ############################################################################### # Visualize results from brainspace.datasets import load_fsa5 from brainspace.plotting import plot_hemispheres from brainspace.utils.parcellation import map_to_labels # Map gradients to original parcels grad = [None] * 2 for i, g in enumerate(gm.gradients_.T): grad[i] = map_to_labels(g, labeling, mask=mask, fill=np.nan) # Load fsaverage5 surfaces surf_lh, surf_rh = load_fsa5() # sphinx_gallery_thumbnail_number = 2 plot_hemispheres(surf_lh, surf_rh, array_name=grad, size=(1200, 400), cmap='viridis_r', color_bar=True, label_text=['Grad1', 'Grad2'], zoom=1.5) ############################################################################### # This concludes the setup tutorial. The following tutorials can be run using # either the output generated here or the example data.
# ## Functional gradients to fsaverage5 surface # In[152]: # Mask of the medial wall on fsaverage 5 mask_fs5 = labels_fs5 != 0 # Map gradients to original parcels grad = [None] * 3 for i, g in enumerate(gm.gradients_.T[0:3,:]): grad[i] = map_to_labels(g, labels_fs5, fill=np.nan, mask=mask_fs5) # Plot Gradients RdYlBu plot_hemispheres(fs5_lh, fs5_rh, array_name=grad, size=(1000, 600), cmap='coolwarm', embed_nb=True, label_text={'left':['Grad1', 'Grad2','Grad3']}, color_bar='left', zoom=1.25, nan_color=(1, 1, 1, 1) ) # ## Functional gradients to conte69 surface # In[153]: # mask of the medial wall mask_c69 = labels_c69 != 0 # Map gradients to original parcels grad = [None] * 3 for i, g in enumerate(gm.gradients_.T[0:3,:]): grad[i] = map_to_labels(g, labels_c69, fill=np.nan, mask=mask_c69)
def test_plot_hemispheres(): s1 = to_data(vtk.vtkSphereSource()) s2 = to_data(vtk.vtkSphereSource()) plot_hemispheres(s1, s2, offscreen=True)
from brainspace.datasets import load_group_fc, load_parcellation, load_conte69 # First load mean connectivity matrix and Schaefer parcellation conn_matrix = load_group_fc('schaefer', scale=400) labeling = load_parcellation('schaefer', scale=400, join=True) # and load the conte69 hemisphere surfaces surf_lh, surf_rh = load_conte69() ############################################################################### # Let’s first look at the parcellation scheme we’re using. from brainspace.plotting import plot_hemispheres plot_hemispheres(surf_lh, surf_rh, array_name=labeling, size=(1200, 300), cmap='tab20') ############################################################################### # and let’s construct our gradients. from brainspace.gradient import GradientMaps # Ask for 10 gradients (default) gm = GradientMaps(n_components=10, random_state=0) gm.fit(conn_matrix) ############################################################################### # Note that the default parameters are normalized angle kernel, diffusion # embedding approach, 10 components. Once you have your gradients, a good first
# Load the data th_lh = dir_morph + subjectID + '_space-fsnative_desc-lh_thickness.mgh' th_rh = dir_morph + subjectID + '_space-fsnative_desc-rh_thickness.mgh' th_nat = np.hstack( np.concatenate( (np.array(load(th_lh).get_fdata()), np.array(load(th_rh).get_fdata())), axis=0)) # Plot the surface plot_hemispheres(inf_lh, inf_rh, array_name=th_nat, size=(900, 250), color_bar='bottom', zoom=1.25, embed_nb=True, interactive=False, share='both', nan_color=(0, 0, 0, 1), color_range=(1.5, 4), cmap="inferno", transparent_bg=False) # ### Thickness: Inflated fsaverage5 # In[6]: # Load the data th_lh_fs5 = dir_morph + subjectID + '_space-fsaverage5_desc-lh_thickness.mgh' th_rh_fs5 = dir_morph + subjectID + '_space-fsaverage5_desc-rh_thickness.mgh' th_fs5 = np.hstack(
gradients_kernel = [None] * len(kernels) for i, k in enumerate(kernels): gm = GradientMaps(kernel=k, approach='dm', random_state=0) gm.fit(conn_matrix) gradients_kernel[i] = map_to_labels(gm.gradients_[:, i], labeling, mask=mask, fill=np.nan) label_text = ['Pearson', 'Spearman', 'Normalized\nAngle'] plot_hemispheres(surf_lh, surf_rh, array_name=gradients_kernel, size=(1200, 600), cmap='viridis_r', color_bar=True, label_text=label_text, zoom=1.45) ############################################################################### # It seems the gradients provided by these kernels are quite similar although # their scaling is quite different. Do note that the gradients are in arbitrary # units, so the smaller/larger axes across kernels do not imply anything. # Similar to using different kernels, we can also use different dimensionality # reduction techniques. # PCA, Laplacian eigenmaps and diffusion mapping embeddings = ['pca', 'le', 'dm'] gradients_embedding = [None] * len(embeddings)