def test_load_surf_mesh_file_freesurfer(): mesh = generate_surf() for suff in ['.pial', '.inflated', '.white', '.orig', 'sphere']: filename_fs_mesh = tempfile.mktemp(suffix=suff) nb.freesurfer.write_geometry(filename_fs_mesh, mesh[0], mesh[1]) assert len(load_surf_mesh(filename_fs_mesh)) == 2 assert_array_almost_equal(load_surf_mesh(filename_fs_mesh)[0], mesh[0]) assert_array_almost_equal(load_surf_mesh(filename_fs_mesh)[1], mesh[1]) os.remove(filename_fs_mesh)
def test_load_surf_mesh_file_error(): # test if files with unexpected suffixes raise errors mesh = generate_surf() wrong_suff = ['.vtk', '.obj', '.mnc', '.txt'] for suff in wrong_suff: filename_wrong = tempfile.mktemp(suffix=suff) nb.freesurfer.write_geometry(filename_wrong, mesh[0], mesh[1]) with pytest.raises(ValueError, match='input type is not recognized'): load_surf_mesh(filename_wrong) os.remove(filename_wrong)
def test_load_surf_mesh_file_freesurfer(): # Older nibabel versions does not support 'write_geometry' if LooseVersion(nb.__version__) <= LooseVersion('1.2.0'): raise SkipTest mesh = _generate_surf() for suff in ['.pial', '.inflated', '.white', '.orig', 'sphere']: filename_fs_mesh = tempfile.mktemp(suffix=suff) nb.freesurfer.write_geometry(filename_fs_mesh, mesh[0], mesh[1]) assert_equal(len(load_surf_mesh(filename_fs_mesh)), 2) assert_array_almost_equal(load_surf_mesh(filename_fs_mesh)[0], mesh[0]) assert_array_almost_equal(load_surf_mesh(filename_fs_mesh)[1], mesh[1]) os.remove(filename_fs_mesh)
def test_load_surf_mesh_file_freesurfer(): # Older nibabel versions does not support 'write_geometry' if LooseVersion(nb.__version__) <= LooseVersion('1.2.0'): raise SkipTest mesh = generate_surf() for suff in ['.pial', '.inflated', '.white', '.orig', 'sphere']: filename_fs_mesh = tempfile.mktemp(suffix=suff) nb.freesurfer.write_geometry(filename_fs_mesh, mesh[0], mesh[1]) assert_equal(len(load_surf_mesh(filename_fs_mesh)), 2) assert_array_almost_equal(load_surf_mesh(filename_fs_mesh)[0], mesh[0]) assert_array_almost_equal(load_surf_mesh(filename_fs_mesh)[1], mesh[1]) os.remove(filename_fs_mesh)
def test_load_surf_mesh_file_gii_gz(): # Test the loader `load_surf_mesh` with gzipped fsaverage5 files fsaverage = datasets.fetch_surf_fsaverage().pial_left coords, faces = load_surf_mesh(fsaverage) assert_true(isinstance(coords, np.ndarray)) assert_true(isinstance(faces, np.ndarray))
def test_load_surf_mesh_file_gii_gz(): # Test the loader `load_surf_mesh` with gzipped fsaverage5 files fsaverage = datasets.fetch_surf_fsaverage().pial_left coords, faces = load_surf_mesh(fsaverage) assert isinstance(coords, np.ndarray) assert isinstance(faces, np.ndarray)
def test_view_surf(): fsaverage = fetch_surf_fsaverage() mesh = surface.load_surf_mesh(fsaverage['pial_right']) surf_map = mesh[0][:, 0] html = html_surface.view_surf(fsaverage['pial_right'], surf_map, fsaverage['sulc_right'], '90%') check_html(html, title="Surface plot") html = html_surface.view_surf(fsaverage['pial_right'], surf_map, fsaverage['sulc_right'], .3, title="SOME_TITLE") check_html(html, title="SOME_TITLE") assert "SOME_TITLE" in html.html html = html_surface.view_surf(fsaverage['pial_right']) check_html(html) atlas = np.random.RandomState(0).randint(0, 10, size=len(mesh[0])) html = html_surface.view_surf(fsaverage['pial_left'], atlas, symmetric_cmap=False) check_html(html) html = html_surface.view_surf(fsaverage['pial_right'], fsaverage['sulc_right'], threshold=None, cmap='Greys') check_html(html) with pytest.raises(ValueError): html_surface.view_surf(mesh, mesh[0][::2, 0]) with pytest.raises(ValueError): html_surface.view_surf(mesh, mesh[0][:, 0], bg_map=mesh[0][::2, 0])
def test_view_surf(): fsaverage = fetch_surf_fsaverage() mesh = surface.load_surf_mesh(fsaverage['pial_right']) surf_map = mesh[0][:, 0] html = html_surface.view_surf(fsaverage['pial_right'], surf_map, fsaverage['sulc_right'], '90%') check_html(html) html = html_surface.view_surf(fsaverage['pial_right'], surf_map, fsaverage['sulc_right'], .3, title="SOME_TITLE") check_html(html) assert "SOME_TITLE" in html.html html = html_surface.view_surf(fsaverage['pial_right']) check_html(html) destrieux = datasets.fetch_atlas_surf_destrieux()['map_left'] html = html_surface.view_surf(fsaverage['pial_left'], destrieux, symmetric_cmap=False) check_html(html) html = html_surface.view_surf(fsaverage['pial_right'], fsaverage['sulc_right'], threshold=None, cmap='Greys') check_html(html) assert_raises(ValueError, html_surface.view_surf, mesh, mesh[0][::2, 0]) assert_raises(ValueError, html_surface.view_surf, mesh, mesh[0][:, 0], bg_map=mesh[0][::2, 0])
def test_check_mesh(): mesh = html_surface._check_mesh('fsaverage5') assert mesh is html_surface._check_mesh(mesh) assert_raises(ValueError, html_surface._check_mesh, 'fsaverage3') mesh.pop('pial_left') assert_raises(ValueError, html_surface._check_mesh, mesh) assert_raises(TypeError, html_surface._check_mesh, surface.load_surf_mesh(mesh['pial_right']))
def test_load_surf_mesh_list(): # test if correct list is returned mesh = generate_surf() assert_equal(len(load_surf_mesh(mesh)), 2) assert_array_equal(load_surf_mesh(mesh)[0], mesh[0]) assert_array_equal(load_surf_mesh(mesh)[1], mesh[1]) # test if incorrect list, array or dict raises error assert_raises_regex(ValueError, 'it must have two elements', load_surf_mesh, []) assert_raises_regex(ValueError, 'it must have two elements', load_surf_mesh, [mesh[0]]) assert_raises_regex(ValueError, 'it must have two elements', load_surf_mesh, [mesh[0], mesh[1], mesh[1]]) assert_raises_regex(ValueError, 'input type is not recognized', load_surf_mesh, mesh[0]) assert_raises_regex(ValueError, 'input type is not recognized', load_surf_mesh, dict()) del mesh
def test_mesh_to_plotly(hemi): """Tests for function mesh_to_plotly.""" fsaverage = fetch_surf_fsaverage() coord, triangles = load_surf_mesh(fsaverage[f'pial_{hemi}']) plotly = mesh_to_plotly(fsaverage[f'pial_{hemi}']) for i, key in enumerate(['_x', '_y', '_z']): assert np.allclose(decode(plotly[key], '<f4'), coord[:, i]) for i, key in enumerate(['_i', '_j', '_k']): assert np.allclose(decode(plotly[key], '<i4'), triangles[:, i])
def test_load_surf_mesh_list(): # test if correct list is returned mesh = _generate_surf() assert_equal(len(load_surf_mesh(mesh)), 2) assert_array_equal(load_surf_mesh(mesh)[0], mesh[0]) assert_array_equal(load_surf_mesh(mesh)[1], mesh[1]) # test if incorrect list, array or dict raises error assert_raises_regex(ValueError, 'it must have two elements', load_surf_mesh, []) assert_raises_regex(ValueError, 'it must have two elements', load_surf_mesh, [mesh[0]]) assert_raises_regex(ValueError, 'it must have two elements', load_surf_mesh, [mesh[0], mesh[1], mesh[1]]) assert_raises_regex(ValueError, 'input type is not recognized', load_surf_mesh, mesh[0]) assert_raises_regex(ValueError, 'input type is not recognized', load_surf_mesh, dict()) del mesh
def plot_labelmap(label_map): cols = [ "#FCF9F5", "#C06A45", # CER "#5B5BFF", # DMN "#D73E68", # FP "#8D18AB", # LIM "#0AFE47", # MOT "#FF9C42", # VAT_SAL_SUB "#FFFFAA" # VIS ] from nilearn import surface from nilearn import plotting, datasets import numpy.linalg as npl import nibabel as nb fsaverage = datasets.fetch_surf_fsaverage() from PAINTeR import utils s = utils.load_surface_obj('/Users/tspisak/tmp/wm_gm_simp2.obj') s2 = surface.load_surf_mesh(fsaverage['pial_left']) from nibabel.affines import apply_affine img = nb.load(label_map) data = img.get_data() import pandas as pd from PAINTeR import global_vars l = pd.read_csv(global_vars._ATLAS_LABELS_, sep="\t") modules = l['modules'].values lut = pd.factorize(modules)[0] + 1 lut = np.array([0] + lut.tolist()) data = lut[np.array(data, dtype=int)] parcellation = np.repeat(0, len(s[0])) for i in range(len(s[0])): coord = np.round(apply_affine(npl.inv(img.affine), s[0][i])).astype(int) if coord[0] - 1 >= data.shape[0] or coord[1] - 1 >= data.shape[ 1] or coord[2] - 1 >= data.shape[2]: parcellation[i] = 0 else: parcellation[i] = data[coord[0] - 1, coord[1] - 1, coord[2] - 1] import matplotlib.cm as cm view = plotting.view_surf( s, surf_map=parcellation, cmap=ListedColormap(sns.color_palette( cols)), # ListedColormap(cm.get_cmap('tab20').colors) threshold=0, symmetric_cmap=False) view.open_in_browser()
def test_mesh_to_plotly(): fsaverage = fetch_surf_fsaverage() coord, triangles = surface.load_surf_mesh(fsaverage['pial_left']) plotly = js_plotting_utils.mesh_to_plotly(fsaverage['pial_left']) for i, key in enumerate(['_x', '_y', '_z']): assert np.allclose( js_plotting_utils.decode(plotly[key], '<f4'), coord[:, i]) for i, key in enumerate(['_i', '_j', '_k']): assert np.allclose( js_plotting_utils.decode(plotly[key], '<i4'), triangles[:, i])
def test_check_mesh(): mesh = html_surface._check_mesh('fsaverage5') assert mesh is html_surface._check_mesh(mesh) with pytest.raises(ValueError): html_surface._check_mesh('fsaverage3') mesh.pop('pial_left') with pytest.raises(ValueError): html_surface._check_mesh(mesh) with pytest.raises(TypeError): html_surface._check_mesh(surface.load_surf_mesh(mesh['pial_right']))
def test_mesh_to_plotly(): fsaverage = fetch_surf_fsaverage() coord, triangles = surface.load_surf_mesh(fsaverage['pial_left']) plotly = js_plotting_utils.mesh_to_plotly(fsaverage['pial_left']) for i, key in enumerate(['_x', '_y', '_z']): assert np.allclose(js_plotting_utils.decode(plotly[key], '<f4'), coord[:, i]) for i, key in enumerate(['_i', '_j', '_k']): assert np.allclose(js_plotting_utils.decode(plotly[key], '<i4'), triangles[:, i])
def test_load_surf_mesh_file_gii(): # Test the loader `load_surf_mesh` # If nibabel is of older version we skip tests as nibabel does not # support intent argument and intent codes are not handled properly with # older versions if not LooseVersion(nb.__version__) >= LooseVersion('2.1.0'): raise SkipTest mesh = generate_surf() # test if correct gii is loaded into correct list filename_gii_mesh = tempfile.mktemp(suffix='.gii') coord_array = gifti.GiftiDataArray(data=mesh[0], intent=nb.nifti1.intent_codes[ 'NIFTI_INTENT_POINTSET']) face_array = gifti.GiftiDataArray(data=mesh[1], intent=nb.nifti1.intent_codes[ 'NIFTI_INTENT_TRIANGLE']) gii = gifti.GiftiImage(darrays=[coord_array, face_array]) gifti.write(gii, filename_gii_mesh) assert_array_equal(load_surf_mesh(filename_gii_mesh)[0], mesh[0]) assert_array_equal(load_surf_mesh(filename_gii_mesh)[1], mesh[1]) os.remove(filename_gii_mesh) # test if incorrect gii raises error filename_gii_mesh_no_point = tempfile.mktemp(suffix='.gii') gifti.write(gifti.GiftiImage(darrays=[face_array, face_array]), filename_gii_mesh_no_point) assert_raises_regex(ValueError, 'NIFTI_INTENT_POINTSET', load_surf_mesh, filename_gii_mesh_no_point) os.remove(filename_gii_mesh_no_point) filename_gii_mesh_no_face = tempfile.mktemp(suffix='.gii') gifti.write(gifti.GiftiImage(darrays=[coord_array, coord_array]), filename_gii_mesh_no_face) assert_raises_regex(ValueError, 'NIFTI_INTENT_TRIANGLE', load_surf_mesh, filename_gii_mesh_no_face) os.remove(filename_gii_mesh_no_face)
def test_load_surf_mesh(): coords, faces = generate_surf() mesh = Mesh(coords, faces) assert_array_equal(mesh.coordinates, coords) assert_array_equal(mesh.faces, faces) # Call load_surf_mesh with a Mesh as argument loaded_mesh = load_surf_mesh(mesh) assert isinstance(loaded_mesh, Mesh) assert_array_equal(mesh.coordinates, loaded_mesh.coordinates) assert_array_equal(mesh.faces, loaded_mesh.faces) mesh_like = MeshLikeObject(coords, faces) assert_array_equal(mesh_like.coordinates, coords) assert_array_equal(mesh_like.faces, faces) # Call load_surf_mesh with an object having # coordinates and faces attributes loaded_mesh = load_surf_mesh(mesh_like) assert isinstance(loaded_mesh, Mesh) assert_array_equal(mesh_like.coordinates, loaded_mesh.coordinates) assert_array_equal(mesh_like.faces, loaded_mesh.faces)
def test_load_surf_mesh_file_glob(): mesh = generate_surf() fname1 = tempfile.mktemp(suffix='.pial') nb.freesurfer.write_geometry(fname1, mesh[0], mesh[1]) fname2 = tempfile.mktemp(suffix='.pial') nb.freesurfer.write_geometry(fname2, mesh[0], mesh[1]) assert_raises_regex(ValueError, 'More than one file matching path', load_surf_mesh, os.path.join(os.path.dirname(fname1), "*.pial")) assert_raises_regex(ValueError, 'No files matching path', load_surf_mesh, os.path.join(os.path.dirname(fname1), "*.unlikelysuffix")) assert_equal(len(load_surf_mesh(fname1)), 2) assert_array_almost_equal(load_surf_mesh(fname1)[0], mesh[0]) assert_array_almost_equal(load_surf_mesh(fname1)[1], mesh[1]) os.remove(fname1) os.remove(fname2)
def test_get_vertexcolor(): fsaverage = fetch_surf_fsaverage() mesh = surface.load_surf_mesh(fsaverage['pial_left']) surf_map = np.arange(len(mesh[0])) colors = html_surface.colorscale('jet', surf_map, 10) vertexcolors = html_surface._get_vertexcolor( surf_map, colors['cmap'], colors['norm'], colors['abs_threshold'], fsaverage['sulc_left']) assert len(vertexcolors) == len(mesh[0]) vertexcolors = html_surface._get_vertexcolor( surf_map, colors['cmap'], colors['norm'], colors['abs_threshold']) assert len(vertexcolors) == len(mesh[0])
def to_plotly(mesh): mesh = surface.load_surf_mesh(mesh) x, y, z = map(_encode, np.asarray(mesh[0].T, dtype='<f4')) i, j, k = map(_encode, np.asarray(mesh[1].T, dtype='<i4')) info = { "_x": x, "_y": y, "_z": z, "_i": i, "_j": j, "_k": k, } return info
def test_fill_html_template(): fsaverage = fetch_surf_fsaverage() mesh = surface.load_surf_mesh(fsaverage['pial_right']) surf_map = mesh[0][:, 0] img = _get_img() info = html_surface.one_mesh_info( surf_map, fsaverage['pial_right'], '90%', black_bg=True, bg_map=fsaverage['sulc_right']) html = html_surface._fill_html_template(info, embed_js=False) check_html(html) assert "jquery.min.js" in html.html info = html_surface.full_brain_info(img) html = html_surface._fill_html_template(info) check_html(html) assert "* plotly.js (gl3d - minified) v1.38.3" in html.html
def plot_on_surf(data, sides=['left', 'right'], selected=False, threshold=None, inflate=1.001, **kwargs): """ Plot a numpy array of data on the corresponding fsaverage surface. The kwargs are passed to mlab.triangular_mesh """ actors = dict() for side in sides: actors[side] = list() mesh = surface.load_surf_mesh(fsaverage['infl_%s' % side]) shift = -42 if side == 'left' else 42 surf = mlab.triangular_mesh(inflate * mesh[0][:, 0] + shift, inflate * mesh[0][:, 1], inflate * mesh[0][:, 2], mesh[1], scalars=data, **kwargs, ) surf.module_manager.source.filter.splitting = False # Avoid openGL bugs with backfaces surf.actor.property.backface_culling = True actors[side].append(surf) if threshold is not None: surf.enable_contours = True surf.contour.auto_contours = False surf.contour.contours = [threshold, data.max()] surf.contour.filled_contours = True # A trick exploiting a bug in Z-ordering to highlight the # contour surf.actor.property.opacity = .5 # Add a second surface to fill the contours if selected: dark_color = tuple(.3 * c for c in kwargs['color']) else: dark_color = tuple(.5 * c for c in kwargs['color']) surf2 = mlab.pipeline.contour_surface(surf, color=dark_color, line_width=8 if selected else 2, ) # opacity=.5) # surf2.enable_contours = True surf2.contour.auto_contours = False surf2.contour.contours = [threshold, data.max()] actors[side].append(surf2) return actors
def plot_surf(mesh, data, view="right", threshold="85%", output_file=None): coords, triangles = surface.load_surf_mesh(mesh) x, y, z = coords.T i, j, k = triangles.T colors = colorscale(cold_hot, data, threshold) vertexcolor = _get_vertexcolor( data, colors["cmap"], colors["norm"], colors["abs_threshold"], surface.load_surf_data(fsaverage["sulc_right"]), ) mesh_3d = go.Mesh3d(x=x, y=y, z=z, i=i, j=j, k=k, vertexcolor=vertexcolor) fig = go.Figure(data=[mesh_3d]) fig.update_layout(scene_camera=CAMERAS[view], **LAYOUT) if output_file is not None: fig.write_image(output_file) return fig
def test_view_surf(): fsaverage = fetch_surf_fsaverage() mesh = surface.load_surf_mesh(fsaverage['pial_right']) surf_map = mesh[0][:, 0] html = html_surface.view_surf(fsaverage['pial_right'], surf_map, fsaverage['sulc_right'], '90%') check_html(html) html = html_surface.view_surf(fsaverage['pial_right'], surf_map, fsaverage['sulc_right'], .3) check_html(html) html = html_surface.view_surf(fsaverage['pial_right']) check_html(html) destrieux = datasets.fetch_atlas_surf_destrieux()['map_left'] html = html_surface.view_surf( fsaverage['pial_left'], destrieux, symmetric_cmap=False) check_html(html) assert_raises(ValueError, html_surface.view_surf, mesh, mesh[0][::2, 0]) assert_raises(ValueError, html_surface.view_surf, mesh, mesh[0][:, 0], bg_map=mesh[0][::2, 0])
def test_one_mesh_info(): fsaverage = fetch_surf_fsaverage() mesh = surface.load_surf_mesh(fsaverage['pial_right']) surf_map = mesh[0][:, 0] info = html_surface.one_mesh_info( surf_map, fsaverage['pial_right'], '90%', black_bg=True, bg_map=fsaverage['sulc_right']) assert {'_x', '_y', '_z', '_i', '_j', '_k'}.issubset( info['inflated_left'].keys()) assert len(decode( info['inflated_left']['_x'], '<f4')) == len(surf_map) assert len(info['vertexcolor_left']) == len(surf_map) cmax = np.max(np.abs(surf_map)) assert (info['cmin'], info['cmax']) == (-cmax, cmax) assert type(info['cmax']) == float json.dumps(info) assert info['black_bg'] assert not info['full_brain_mesh'] check_colors(info['colorscale'])
def test_plot_surf_avg_method(): mesh = generate_surf() rng = np.random.RandomState(42) # Plot with avg_method ## Test all built-in methods and check mapp = rng.standard_normal(size=mesh[0].shape[0]) mesh_ = load_surf_mesh(mesh) coords, faces = mesh_[0], mesh_[1] for method in ['mean', 'median', 'min', 'max']: display = plot_surf(mesh, surf_map=mapp, avg_method=method, engine='matplotlib') if method == 'mean': agg_faces = np.mean(mapp[faces], axis=1) elif method == 'median': agg_faces = np.median(mapp[faces], axis=1) elif method == 'min': agg_faces = np.min(mapp[faces], axis=1) elif method == 'max': agg_faces = np.max(mapp[faces], axis=1) vmin = np.min(agg_faces) vmax = np.max(agg_faces) agg_faces -= vmin agg_faces /= (vmax - vmin) cmap = plt.cm.get_cmap(plt.rcParamsDefault['image.cmap']) assert_array_equal( cmap(agg_faces), display._axstack.as_list()[0].collections[0]._facecolors) ## Try custom avg_method def custom_avg_function(vertices): return vertices[0] * vertices[1] * vertices[2] plot_surf( mesh, surf_map=rng.standard_normal(size=mesh[0].shape[0]), avg_method=custom_avg_function, engine='matplotlib', ) # Save execution time and memory plt.close()
def test_full_brain_info(): surfaces = datasets.fetch_surf_fsaverage() img = _get_img() info = html_surface.full_brain_info(img, surfaces) check_colors(info['colorscale']) assert {'pial_left', 'pial_right', 'inflated_left', 'inflated_right', 'vertexcolor_left', 'vertexcolor_right'}.issubset(info.keys()) assert info['cmin'] == - info['cmax'] assert info['full_brain_mesh'] assert not info['black_bg'] assert type(info['cmax']) == float json.dumps(info) for hemi in ['left', 'right']: mesh = surface.load_surf_mesh(surfaces['pial_{}'.format(hemi)]) assert len(info['vertexcolor_{}'.format(hemi)]) == len(mesh[0]) assert len(decode( info['inflated_{}'.format(hemi)]['_z'], '<f4')) == len(mesh[0]) assert len(decode( info['pial_{}'.format(hemi)]['_j'], '<i4')) == len(mesh[1])
def clean_surface_map(maps, hemi, cluster_size): """Clean surface maps by removing small connected components""" from nilearn.surface import load_surf_mesh if os.path.exists('/neurospin/ibc'): dir_ = '/neurospin/ibc/derivatives/sub-01/ses-00/anat/fsaverage/surf' else: dir_ = '/storage/store/data/ibc/derivatives/sub-01/ses-00/anat/' + \ 'fsaverage/surf' if hemi == 'right': mesh = os.path.join(dir_, 'rh.inflated') else: mesh = os.path.join(dir_, 'lh.inflated') _, faces = load_surf_mesh(mesh) connectivity = faces_2_connectivity(faces) for i in range(maps.shape[1]): maps[:, i] = connected_components_cleaning(connectivity, maps[:, i], cluster_size=cluster_size) return maps
def test_full_brain_info(): fsaverage = fetch_surf_fsaverage() img = _get_img() info = html_surface.full_brain_info(img) check_colors(info['colorscale']) assert {'pial_left', 'pial_right', 'inflated_left', 'inflated_right', 'vertexcolor_left', 'vertexcolor_right'}.issubset(info.keys()) assert info['cmin'] == - info['cmax'] assert info['full_brain_mesh'] assert not info['black_bg'] assert type(info['cmax']) == float json.dumps(info) for hemi in ['left', 'right']: mesh = surface.load_surf_mesh(fsaverage['pial_{}'.format(hemi)]) assert len(info['vertexcolor_{}'.format(hemi)]) == len(mesh[0]) assert len(decode( info['inflated_{}'.format(hemi)]['_z'], '<f4')) == len(mesh[0]) assert len(decode( info['pial_{}'.format(hemi)]['_j'], '<i4')) == len(mesh[1])
def test_load_surf_mesh_file_gii(tmp_path): # Test the loader `load_surf_mesh` # If nibabel is of older version we skip tests as nibabel does not # support intent argument and intent codes are not handled properly with # older versions if not LooseVersion(nb.__version__) >= LooseVersion('2.1.0'): raise pytest.skip('Nibabel version too old to handle intent codes') mesh = generate_surf() # test if correct gii is loaded into correct list fd_mesh, filename_gii_mesh = tempfile.mkstemp(suffix='.gii', dir=str(tmp_path)) os.close(fd_mesh) coord_array = gifti.GiftiDataArray(data=mesh[0], intent=nb.nifti1.intent_codes[ 'NIFTI_INTENT_POINTSET']) face_array = gifti.GiftiDataArray(data=mesh[1], intent=nb.nifti1.intent_codes[ 'NIFTI_INTENT_TRIANGLE']) gii = gifti.GiftiImage(darrays=[coord_array, face_array]) gifti.write(gii, filename_gii_mesh) assert_array_equal(load_surf_mesh(filename_gii_mesh)[0], mesh[0]) assert_array_equal(load_surf_mesh(filename_gii_mesh)[1], mesh[1]) os.remove(filename_gii_mesh) # test if incorrect gii raises error fd_no, filename_gii_mesh_no_point = tempfile.mkstemp(suffix='.gii', dir=str(tmp_path)) os.close(fd_no) gifti.write(gifti.GiftiImage(darrays=[face_array, face_array]), filename_gii_mesh_no_point) with pytest.raises(ValueError, match='NIFTI_INTENT_POINTSET'): load_surf_mesh(filename_gii_mesh_no_point) os.remove(filename_gii_mesh_no_point) fd_face, filename_gii_mesh_no_face = tempfile.mkstemp(suffix='.gii', dir=str(tmp_path)) os.close(fd_face) gifti.write(gifti.GiftiImage(darrays=[coord_array, coord_array]), filename_gii_mesh_no_face) with pytest.raises(ValueError, match='NIFTI_INTENT_TRIANGLE'): load_surf_mesh(filename_gii_mesh_no_face) os.remove(filename_gii_mesh_no_face)
def surface(): """ Returns 3D surface coordinates. Returns ------- fsaverage : Dictionary-like (from the Nilearn documentation) The interest attributes are : - 'pial_left': Gifti file, left hemisphere pial surface mesh - 'pial_right': Gifti file, right hemisphere pial surface mesh - 'infl_left': Gifti file, left hemisphere inflated pial surface mesh - 'infl_right': Gifti file, right hemisphere inflated pial surface mesh - 'sulc_left': Gifti file, left hemisphere sulcal depth data - 'sulc_right': Gifti file, right hemisphere sulcal depth data surf : Dictionary-like Object containing the x, y, z coordinates as well as the i, j, k triangulation coordinates """ """ Get surface for plotting. :return fsaverage: surface locations as in nilearn :return surf: surface for plotting """ fsaverage = fetch_surf_fsaverage('fsaverage') surf = {} for key in [ t + '_' + h for t in ['pial', 'infl'] for h in ['left', 'right'] ]: surf = load_surf_mesh(fsaverage[key]) x, y, z = np.asarray(surf[0].T, dtype='<f4') i, j, k = np.asarray(surf[1].T, dtype='<i4') surf[key] = dict(x=x, y=y, z=z, i=i, j=j, k=k) return fsaverage, surf
def to_three(mesh, stat_map, sample_mesh=None): if sample_mesh is None: sample_mesh = mesh mesh = surface.load_surf_mesh(mesh) coords = mesh[0][mesh[1].ravel()] surf_stat_map = surface.vol_to_surf(stat_map, sample_mesh) surf_stat_map -= surf_stat_map.min() surf_stat_map /= surf_stat_map.max() colors = cm.cold_hot(surf_stat_map[mesh[1].ravel()])[:, :3] center = list(map(float, mesh[0].mean(axis=0))) center = {'x': center[0], 'y': center[1], 'z': center[2]} vertices = np.asarray(coords.ravel(), dtype='<f4') # vertices = list(map(float, coords.ravel())) col = list(map(float, colors.ravel())) return { 'INSERT_VERTICES_HERE': base64.b64encode( vertices.tobytes()).decode('utf-8'), #json.dumps(vertices), 'INSERT_COLORS_HERE': json.dumps(col), 'INSERT_CENTER_POSITION_HERE': json.dumps(center) }
def test_load_surf_mesh_file_glob(tmp_path): mesh = generate_surf() fd1, fname1 = tempfile.mkstemp(suffix='.pial', dir=str(tmp_path)) os.close(fd1) nb.freesurfer.write_geometry(fname1, mesh[0], mesh[1]) fd2, fname2 = tempfile.mkstemp(suffix='.pial', dir=str(tmp_path)) os.close(fd2) nb.freesurfer.write_geometry(fname2, mesh[0], mesh[1]) with pytest.raises(ValueError, match='More than one file matching path'): load_surf_mesh(os.path.join(os.path.dirname(fname1), "*.pial")) with pytest.raises(ValueError, match='No files matching path'): load_surf_mesh(os.path.join(os.path.dirname(fname1), "*.unlikelysuffix") ) assert len(load_surf_mesh(fname1)) == 2 assert_array_almost_equal(load_surf_mesh(fname1)[0], mesh[0]) assert_array_almost_equal(load_surf_mesh(fname1)[1], mesh[1]) os.remove(fname1) os.remove(fname2)
def test_load_surf_mesh_file_gii(tmp_path): # Test the loader `load_surf_mesh` mesh = generate_surf() # test if correct gii is loaded into correct list fd_mesh, filename_gii_mesh = tempfile.mkstemp(suffix='.gii', dir=str(tmp_path)) os.close(fd_mesh) coord_array = gifti.GiftiDataArray(data=mesh[0], intent=nb.nifti1.intent_codes[ 'NIFTI_INTENT_POINTSET']) face_array = gifti.GiftiDataArray(data=mesh[1], intent=nb.nifti1.intent_codes[ 'NIFTI_INTENT_TRIANGLE']) gii = gifti.GiftiImage(darrays=[coord_array, face_array]) gifti.write(gii, filename_gii_mesh) assert_array_equal(load_surf_mesh(filename_gii_mesh)[0], mesh[0]) assert_array_equal(load_surf_mesh(filename_gii_mesh)[1], mesh[1]) os.remove(filename_gii_mesh) # test if incorrect gii raises error fd_no, filename_gii_mesh_no_point = tempfile.mkstemp(suffix='.gii', dir=str(tmp_path)) os.close(fd_no) gifti.write(gifti.GiftiImage(darrays=[face_array, face_array]), filename_gii_mesh_no_point) with pytest.raises(ValueError, match='NIFTI_INTENT_POINTSET'): load_surf_mesh(filename_gii_mesh_no_point) os.remove(filename_gii_mesh_no_point) fd_face, filename_gii_mesh_no_face = tempfile.mkstemp(suffix='.gii', dir=str(tmp_path)) os.close(fd_face) gifti.write(gifti.GiftiImage(darrays=[coord_array, coord_array]), filename_gii_mesh_no_face) with pytest.raises(ValueError, match='NIFTI_INTENT_TRIANGLE'): load_surf_mesh(filename_gii_mesh_no_face) os.remove(filename_gii_mesh_no_face)
# Display connectome from surface parcellation # # The following code extracts 3D coordinates of surface parcels (a.k.a. labels # in the Freesurfer naming convention). To do so we load the pial surface # of fsaverage subject, get the vertices contained in each parcel and compute # the mean location to obtain the coordinates. import numpy as np from nilearn import surface atlas = destrieux_atlas coordinates = [] labels = destrieux_atlas['labels'] for hemi in ['left', 'right']: vert = destrieux_atlas['map_%s' % hemi] rr, _ = surface.load_surf_mesh(fsaverage['pial_%s' % hemi]) for k, label in enumerate(labels): if "Unknown" not in str(label): # Omit the Unknown label. # Compute mean location of vertices in label of index k coordinates.append(np.mean(rr[vert == k], axis=0)) coordinates = np.array(coordinates) # 3D coordinates of parcels # We now make a synthetic connectivity matrix that connects labels # between left and right hemispheres. n_parcels = len(coordinates) corr = np.zeros((n_parcels, n_parcels)) n_parcels_hemi = n_parcels // 2 corr[np.arange(n_parcels_hemi), np.arange(n_parcels_hemi) + n_parcels_hemi] = 1 corr = corr + corr.T
def test_load_surf_mesh_list(): # test if correct list is returned mesh = generate_surf() assert len(load_surf_mesh(mesh)) == 2 assert_array_equal(load_surf_mesh(mesh)[0], mesh[0]) assert_array_equal(load_surf_mesh(mesh)[1], mesh[1]) # test if incorrect list, array or dict raises error with pytest.raises(ValueError, match='it must have two elements'): load_surf_mesh([]) with pytest.raises(ValueError, match='it must have two elements'): load_surf_mesh([mesh[0]]) with pytest.raises(ValueError, match='it must have two elements'): load_surf_mesh([mesh[0], mesh[1], mesh[1]]) with pytest.raises(ValueError, match='input type is not recognized'): load_surf_mesh(mesh[0]) with pytest.raises(ValueError, match='input type is not recognized'): load_surf_mesh(dict()) del mesh