def add_noise_to_surface(s, noise_level=.05): vertices = s.vertices noise = np.random.uniform(size=vertices.shape, low=-.5 * noise_level, high=-.5 * noise_level, ) vertices_noisy = vertices + np.random.uniform(size=vertices.shape) * \ noise_level return Surface(vertices_noisy, s.faces, check=False)
def get_freesurfer_surface(): surfaces = [] for lr in 'lr': coords1, faces1 = nib.freesurfer.read_geometry(os.path.join(surface_path,'{lr}h.white'.format(lr=lr))) coords2, faces2 = nib.freesurfer.read_geometry(os.path.join(surface_path,'{lr}h.pial'.format(lr=lr))) surf = Surface((coords1+coords2)*0.5, faces1) #take the average of these two. Faces are identical. surfaces.append(surf) return surfaces[0].merge(surfaces[1])
def test_surface_flatten(self, dim): def unit_vec3(dim, scale): v = [0, 0, 0] v[dim] = float(scale) return tuple(v) origin = (0, 0, 0) plane_size = 10 scale = 1. vec1 = unit_vec3(dim, scale=scale) vec2 = unit_vec3((dim + 1) % 3, scale=scale) plane = generate_plane(origin, vec1, vec2, plane_size, plane_size) noise_level = .05 nan_vertices_ratio = .05 # add some noise to spatial coordinates vertices = plane.vertices noise = np.random.uniform(size=vertices.shape, low=-.5, high=.5) * noise_level * scale vertices_noisy = vertices + noise # make some vertices NaN (as might be the case for flat surfaces) nan_count_float = plane.nvertices * nan_vertices_ratio nan_count = np.ceil(nan_count_float).astype(np.int) nan_vertices = np.random.random_integers(plane.nvertices, size=(nan_count,)) - 1 vertices_noisy[nan_vertices, dim] = np.nan plane_noisy = Surface(vertices_noisy, plane.faces) # compute normals f_normal = plane_noisy.face_normals # find average normal non_nan_f_normal = np.logical_not(np.any(np.isnan(f_normal), axis=1)) f_normal_avg = np.mean(f_normal[non_nan_f_normal], axis=0) # test average normal assert_array_almost_equal(plane.nanmean_face_normal, f_normal_avg, decimal=2) # the output has only x and y coordinates; with z-coordinates set # to zero, the coordinates must be at similar pairwise distances max_deformation = .1 x, y = flat_surface2xy(plane_noisy, max_deformation) n_vertices = plane.nvertices z = np.zeros((n_vertices,)) flat_xyz = np.asarray((x, y, z)) # nodes are rotated must have same pairwise distance as # the original surface max_difference = 3 * noise_level SurfingSurfaceTests.assert_coordinates_almost_equal_modulo_rotation( flat_xyz.T, plane.vertices, max_difference)
def set_nan_to_surface_vertices(s, nan_ratio=.05): # make some vertices NaN (as might be the case for flat surfaces) nan_count = int(np.ceil(s.nvertices * nan_ratio)) nan_vertices_ids = np.random.random_integers(s.nvertices, size=(nan_count,)) - 1 vertices_noisy = s.vertices + 0. vertices_noisy[nan_vertices_ids, :] = np.nan return Surface(vertices_noisy, s.faces, check=False)
def test_average_node_edge_length_tiny(self): a = np.random.uniform(low=2, high=5) b = np.random.uniform(low=2, high=5) c = (a ** 2 + b ** 2) ** .5 vertices = [(0, 0, 0), (0, 0, a), (0, b, 0)] faces = [(0, 1, 2)] s = Surface(vertices, faces) expected_avg = [(a + b) / 2, (a + c) / 2, (b + c) / 2] assert_almost_equal(s.average_node_edge_length, expected_avg)
def load_surface(lr): all_coords = [] for surf_type in ['white', 'pial']: coords, faces = read_geometry( '/data_dir/freesurfer/' 'subjects/fsaverage/surf/lh.{surf_type}'.format( surf_type=surf_type)) all_coords.append(coords) coords = np.array(all_coords).astype(np.float).mean(axis=0) surf = Surface(coords, faces) return surf
def test_gifti_dataset_with_anatomical_surface(fn, format_, include_nodes): ds = _get_test_dataset(include_nodes) nsamples, nfeatures = ds.shape vertices = np.random.normal(size=(nfeatures, 3)) faces = np.asarray([i + np.arange(3) for i in range(2 * nfeatures)]) % nfeatures surf = Surface(vertices, faces) img = map2gifti(ds, surface=surf) arr_index = 0 if include_nodes: # check node indices node_arr = img.darrays[arr_index] assert_equal(node_arr.intent, intent_codes.code['NIFTI_INTENT_NODE_INDEX']) assert_equal(node_arr.coordsys, None) assert_equal(node_arr.data.dtype, np.int32) assert_equal(node_arr.datatype, data_type_codes['int32']) arr_index += 1 for sample in ds.samples: # check sample content arr = img.darrays[arr_index] data = arr.data assert_almost_equal(data, sample) assert_equal(arr.coordsys, None) assert_equal(arr.data.dtype, np.float32) assert_equal(arr.datatype, data_type_codes['float32']) arr_index += 1 # check vertices vertex_arr = img.darrays[arr_index] assert_almost_equal(vertex_arr.data, vertices) assert_equal(vertex_arr.data.dtype, np.float32) assert_equal(vertex_arr.datatype, data_type_codes['float32']) # check faces arr_index += 1 face_arr = img.darrays[arr_index] assert_almost_equal(face_arr.data, faces) assert_equal(face_arr.data.dtype, np.int32) assert_equal(face_arr.datatype, data_type_codes['int32']) # getting the functional data should ignore faces and vertices ds_again = gifti_dataset(img) assert_datasets_almost_equal(ds, ds_again)
def get_freesurfer_surfaces(hemi): """ Parameters: ----------- hemi: hemisphere (can be 'r','l','b') Returns: surf: a freesurfer surface created with the .white and .pial files. """ import nibabel as nib from mvpa2.support.nibabel.surf import Surface if hemi == 'b': lh = get_freesurfer_surfaces('l') rh = get_freesurfer_surfaces('r') return lh.merge(rh) coords1, faces1 = nib.freesurfer.read_geometry(os.path.join(utils.basedir,'{lr}h.white'.format(lr=hemi))) coords2, faces2 = nib.freesurfer.read_geometry(os.path.join(utils.basedir,'{lr}h.pial'.format(lr=hemi))) np.testing.assert_array_equal(faces1, faces2) surf = Surface((coords1 + coords2) * 0.5, faces1) return surf
def get_testdata(self): # rs = np.random.RandomState(0) rs = np.random.RandomState() nt = 200 n_triangles = 4 ns = 10 nv = n_triangles * 3 vertices = np.zeros((nv, 3)) # 4 separated triangles faces = [] for i in range(n_triangles): vertices[i * 3] = [i * 2, 0, 0] vertices[i * 3 + 1] = [i * 2 + 1, 1 / np.sqrt(3), 0] vertices[i * 3 + 2] = [i * 2 + 1, -1 / np.sqrt(3), 0] faces.append([i * 3, i * 3 + 1, i * 3 + 2]) faces = np.array(faces) surface = Surface(vertices, faces) ds_orig = np.zeros((nt, nv)) # add coarse-scale information for i in range(n_triangles): ds_orig[:, i * 3:(i + 1) * 3] += rs.normal(size=(nt, 1)) # add fine-scale information ds_orig += rs.normal(size=(nt, nv)) dss_train, dss_test = [], [] for i in range(ns): ds = np.zeros_like(ds_orig) for j in range(n_triangles): ds[:, j * 3:(j + 1) * 3] = np.dot(ds_orig[:, j * 3:(j + 1) * 3], get_random_rotation(3)) # special_ortho_group.rvs(3, random_state=rs)) ds = Dataset(ds) ds.fa['node_indices'] = np.arange(nv) ds_train, ds_test = ds[:nt // 2, :], ds[nt // 2:, :] zscore(ds_train, chunks_attr=None) zscore(ds_test, chunks_attr=None) dss_train.append(ds_train) dss_test.append(ds_test) return dss_train, dss_test, surface