def surface_to_volume( pial_mesh, wm_mesh, labels, volume_template, volume_save, interpolation="nearest", ): """Projects surface labels to the cortical ribbon. Parameters ---------- pial_mesh : str, BSPolyData Filename of a pial mesh or a BSPolyData object of the same. wm_mesh : str, BSPolyData Filename of a pial mesh or a BSPolyData object of the same. labels : str, numpy.ndarray Filename of a .label.gii or .shape.gii file, or a numpy array containing the labels. volume_template : str, nibabel.nifti1.Nifti1Image Filename of a nifti image in the same space as the mesh files or a NIfTI image loaded with nibabel. volume_save : str Filename to which the label image will be saved. interpolation : str Either 'nearest' for nearest neighbor interpolation, or 'linear' for trilinear interpolation, defaults to 'nearest'. """ if not isinstance(pial_mesh, BSPolyData): pial_mesh = read_surface(pial_mesh) if not isinstance(wm_mesh, BSPolyData): wm_mesh = read_surface(wm_mesh) if not isinstance(volume_template, nib.nifti1.Nifti1Image): volume_template = nib.load(volume_template) logging.debug("Computing voxels inside the cortical ribbon.") ribbon_points = cortical_ribbon(pial_mesh, wm_mesh, volume_template) logging.debug("Computing labels for cortical ribbon voxels.") ribbon_labels = ribbon_interpolation( pial_mesh, wm_mesh, labels, volume_template, ribbon_points, interpolation=interpolation, ) logging.debug("Constructing new nifti image.") new_data = np.zeros(volume_template.shape) ribbon_points = np.rint(ribbon_points, np.ones(ribbon_points.shape, dtype=int), casting="unsafe") for i in range(ribbon_labels.shape[0]): new_data[ribbon_points[i, 0], ribbon_points[i, 1], ribbon_points[i, 2]] = ribbon_labels[i] new_nii = nib.Nifti1Image(new_data, volume_template.affine) nib.save(new_nii, volume_save)
def load_surface(lh, rh, with_normals=True, join=False): """ Loads surfaces. Parameters ---------- with_normals : bool, optional Whether to compute surface normals. Default is True. join : bool, optional. If False, return one surface for left and right hemispheres. Otherwise, return a single surface as a combination of both left and right. surfaces. Default is False. Returns ------- surf : tuple of BSPolyData or BSPolyData. Surfaces for left and right hemispheres. If ``join == True``, one surface with both hemispheres. """ surfs = [None] * 2 for i, side in enumerate([lh, rh]): surfs[i] = read_surface(side) if with_normals: nf = wrap_vtk(vtkPolyDataNormals, splitting=False, featureAngle=0.1) surfs[i] = serial_connect(surfs[i], nf) if join: return combine_surfaces(*surfs) return surfs[0], surfs[1]
def mesh_average(filenames, fun=np.add, output_surfstat=False): """Average, minimum, or maximum of surfaces. Args: filenames (2D numpy array): Numpy array of filenames of surfaces or BSPolyData objects. fun : function handle to apply to two surfaces, e.g. np.add (default) will give the average of the surfaces, np.fmin or np.fmax will give the min or max, respectively. output_surfstat (boolean): If True, outputs the surface in SurfStat format. If false outputs the surface as BSPolyData. Default is False. Returns: surface [BSPolyData, dict]: The output surface. """ if filenames.ndim is not 2: raise ValueError("Filenames must be a 2-dimensional array.") for i in range(0, filenames.shape[0]): surfaces = np.empty(filenames.shape[1], dtype=np.object) for j in range(0, filenames.shape[1]): # Check whether input is BSPolyData or a filename. if isinstance(filenames[i, j], BSPolyData): surfaces[j] = filenames[i, j] else: surfaces[j] = read_surface(filenames[i, j]) # Concatenate second dimension of filenames. if j is 0: tri = get_cells(surfaces[j]) coord = get_points(surfaces[j]) else: tri = np.concatenate( (tri, get_cells(surfaces[j]) + coord.shape[0]), axis=0 ) coord = np.concatenate((coord, get_points(surfaces[j])), axis=0) if i is 0: m = 1 coord_all = coord else: coord_all = fun(coord_all, coord) m = fun(m, 1) coord_all = coord_all / m if output_surfstat: surface = {"tri": np.array(tri) + 1, "coord": np.array(coord_all).T} else: surface = build_polydata(coord_all, tri) return surface
def fetch_template_surface( template: str, join: bool = True, layer: Optional[str] = None, data_dir: Optional[Union[str, Path]] = None, ) -> Union[BSPolyData, Tuple[BSPolyData, BSPolyData]]: """Loads surface templates. Parameters ---------- template : str Name of the surface template. Valid values are "fslr32k", "fsaverage", "fsaverage3", "fsaverage4", "fsaverage5", "fsaverage6", "civet41k", "civet164k". join : bool, optional If true, returns surfaces as a single object, if false, returns an object per hemisphere, by default True. layer : str, optional Name of the cortical surface of interest. Valid values are "white", "smoothwm", "pial", "inflated", "sphere" for fsaverage surfaces; "midthickness", "inflated", "vinflated" for "fslr32k"; "mid", "white" for CIVET surfaces; and "sphere" for "civet41k". If None, defaults to "pial" or "midthickness", by default None. data_dir : str, Path, optional Directory to save the data, by default $HOME_DIR/brainstat_data/surface_data. Returns ------- BSPolyData or tuple of BSPolyData Output surface(s). If a tuple, then the first element is the left hemisphere. """ data_dir = Path( data_dir) if data_dir else data_directories["SURFACE_DATA_DIR"] surface_files = _fetch_template_surface_files(template, data_dir, layer) if template[:9] == "fsaverage": surfaces_fs = [read_geometry(file) for file in surface_files] surfaces = [ build_polydata(surface[0], surface[1]) for surface in surfaces_fs ] elif template == "fslr32k": surfaces = [read_surface(file) for file in surface_files] else: surfaces_obj = [read_civet(file) for file in surface_files] surfaces = [ build_polydata(surface[0], surface[1]) for surface in surfaces_obj ] if join: return combine_surfaces(surfaces[0], surfaces[1]) else: return surfaces[0], surfaces[1]
def test_io_nb(): s = _generate_sphere() root_pth = os.path.dirname(__file__) io_pth = os.path.join(root_pth, 'test_sphere_io.gii') mio.write_surface(s, io_pth) s2 = mio.read_surface(io_pth) assert np.allclose(s.Points, s2.Points) assert np.all(s.GetCells2D() == s2.GetCells2D()) os.remove(io_pth)
def read_surface_gz(filename): """Extension of brainspace's read_surface to include .gz files. Parameters ---------- filename : str Filename of file to open. Returns ------- BSPolyData Surface mesh. """ if filename.endswith(".gz"): extension = os.path.splitext(filename[:-3])[-1] with tempfile.NamedTemporaryFile(suffix=extension) as f_tmp: with gzip.open(filename, "rb") as f_gz: shutil.copyfileobj(f_gz, f_tmp) return read_surface(f_tmp.name) else: return read_surface(filename)
def test_io(ext): s = _generate_sphere() root_pth = os.path.dirname(__file__) io_pth = os.path.join(root_pth, 'test_sphere_io.{ext}').format(ext=ext) mio.write_surface(s, io_pth) s2 = mio.read_surface(io_pth) assert np.allclose(s.Points, s2.Points) assert np.all(s.get_cells2D() == s2.get_cells2D()) os.remove(io_pth)
import vtk from vtk import vtkPolyDataNormals from brainspace.mesh.mesh_io import read_surface from brainspace.mesh.mesh_operations import combine_surfaces from brainspace.utils.parcellation import reduce_by_labels from brainspace.vtk_interface import wrap_vtk, serial_connect template_path = "Z:/hschoi/backup/hschoi/template/MMP" template_L = "S900.L.midthickness_MSMAll.10k_fs_LR.surf.gii" # S900.L.midthickness_MSMAll.10k_fs_LR.surf.gii # L.very_inflated_MSMAll.10k_fs_LR.surf.gii template_R = "S900.R.midthickness_MSMAll.10k_fs_LR.surf.gii" # S900.R.midthickness_MSMAll.10k_fs_LR.surf.gii # R.very_inflated_MSMAll.10k_fs_LR.surf.gii surfs = [None] * 2 surfs[0] = read_surface(join(template_path, template_L)) nf = wrap_vtk(vtkPolyDataNormals, splitting=False, featureAngle=0.1) surf_lh = serial_connect(surfs[0], nf) surfs[1] = read_surface(join(template_path, template_R)) nf = wrap_vtk(vtkPolyDataNormals, splitting=False, featureAngle=0.1) surf_rh = serial_connect(surfs[1], nf) # Visualization from brainspace.datasets import load_group_fc, load_parcellation, load_conte69 from brainspace.gradient import GradientMaps from brainspace.plotting import plot_hemispheres from brainspace.utils.parcellation import map_to_labels atlas = np.load("Z:\\hschoi\\backup\\hschoi\\template\\MMP\\MMP.10k_fs_LR.npy")
def __freesurfer_to_surfgii(freesurfer_file: str, gifti_file: str) -> None: surf = read_surface(freesurfer_file, itype="fs") write_surface(surf, gifti_file, otype="gii")
ColCurv = plt.colors.ListedColormap([ '#A2CD5A', '#A0CA5B', '#9FC85C', '#9EC55D', '#9DC35E', '#9CC05F', '#9BBE61', '#9ABB62', '#99B963', '#98B664', '#96B465', '#95B166', '#94AF68', '#93AC69', '#92AA6A', '#91A76B', '#90A56C', '#8FA26D', '#8EA06F', '#8C9D70', '#8B9B71', '#8A9972', '#899673', '#889475', '#879176', '#868F77', '#858C78', '#848A79', '#82877A', '#81857C', '#80827D', '#7F807E', '#807D7D', '#827A7A', '#857777', '#877575', '#8A7272', '#8C6F6F', '#8F6C6C', '#916969', '#946666', '#966464', '#996161', '#9B5E5E', '#9D5B5B', '#A05858', '#A25656', '#A55353', '#A75050', '#AA4D4D', '#AC4A4A', '#AF4747', '#B14545', '#B44242', '#B63F3F', '#B93C3C', '#BB3939', '#BE3636', '#C03434', '#C33131', '#C52E2E', '#C82B2B', '#CA2828', '#CD2626' ]) # Load fsaverage5 inflated fs5I_lh = read_surface(dir_fS + 'fsaverage5/surf/lh.inflated', itype='fs') fs5I_rh = read_surface(dir_fS + 'fsaverage5/surf/rh.inflated', itype='fs') # Load conte69 c69_lh, c69_rh = load_conte69() # Load native mid surface mid_lh, mid_rh = load_surface( dir_fS + subBIDS + '/surf/lh.midthickness.surf.gii', dir_fS + subBIDS + '/surf/rh.midthickness.surf.gii', with_normals=True, join=False) # Load native surface surf_lh = read_surface(dir_fS + subBIDS + '/surf/lh.pial', itype='fs') surf_rh = read_surface(dir_fS + subBIDS + '/surf/rh.pial', itype='fs')
subjectDir='micapipe/sub-HC001/ses-01' # <<<<<<<<<<<< CHANGE THIS SUBJECT's DIRECTORY # Here we define the atlas atlas='schaefer-400' # <<<<<<<<<<<< CHANGE THIS ATLAS # ## Load the surfaces # In[79]: # Load conte69 c69_lh, c69_rh = load_conte69() # Load fsaverage5 fs5_lh = read_surface('freesurfer/fsaverage5/surf/lh.pial', itype='fs') fs5_rh = read_surface('freesurfer/fsaverage5/surf/rh.pial', itype='fs') # Load annotation file in fsaverage5 annot_lh_fs5= nb.freesurfer.read_annot(micapipe + '/parcellations/lh.schaefer-400_mics.annot') annot_rh_fs5= nb.freesurfer.read_annot(micapipe + '/parcellations/rh.schaefer-400_mics.annot')[0]+200 # replace with 0 the medial wall of the right labels annot_rh_fs5 = np.where(annot_rh_fs5==200, 0, annot_rh_fs5) # fsaverage5 labels labels_fs5 = np.concatenate((annot_lh_fs5[0], annot_rh_fs5), axis=0) # Read label for conte69 labels_c69 = np.loadtxt(open(micapipe + '/parcellations/schaefer-400_conte69.csv'), dtype=np.int)
# This variable will be different for each subject subjectID = 'sub-HC001_ses-01' # <<<<<<<<<<<< CHANGE THIS SUBJECT's ID subjectDir = 'micapipe/sub-HC001/ses-01' # <<<<<<<<<<<< CHANGE THIS SUBJECT's DIRECTORY # Set paths and variables dir_FS = 'freesurfer/' + subjectID dir_conte = subjectDir + '/anat/surfaces/conte69/' dir_morph = subjectDir + '/anat/surfaces/morphology/' dir_mpc = subjectDir + '/anat/surfaces/micro_profiles/' # ## Load all the surfaces # In[4]: # Load native pial surface pial_lh = read_surface(dir_FS + '/surf/lh.pial', itype='fs') pial_rh = read_surface(dir_FS + '/surf/rh.pial', itype='fs') # Load native mid surface mid_lh = read_surface(dir_FS + '/surf/lh.midthickness.surf.gii', itype='gii') mid_rh = read_surface(dir_FS + '/surf/rh.midthickness.surf.gii', itype='gii') # Load native white matter surface wm_lh = read_surface(dir_FS + '/surf/lh.white', itype='fs') wm_rh = read_surface(dir_FS + '/surf/rh.white', itype='fs') # Load native inflated surface inf_lh = read_surface(dir_FS + '/surf/lh.inflated', itype='fs') inf_rh = read_surface(dir_FS + '/surf/rh.inflated', itype='fs') # Load fsaverage5