def test_surface_face_normal(self, do_deterioriate_surface): vec1 = np.random.normal(size=(3,)) vec2 = np.random.normal(size=(3,)) vec_normal = -np.cross(vec1, vec2) plane = generate_plane((0, 0, 0), vec1, vec2, 10, 10) if do_deterioriate_surface: plane = SurfingSurfaceTests.deterioriate_surface(plane) plane_face_normals = plane.face_normals has_non_nan = False for f_n in plane_face_normals: if np.any(np.isnan(f_n)): continue assert_vector_direction_almost_equal(f_n, vec_normal, decimal=0) assert_almost_equal(f_n, surf.normalized( plane.nanmean_face_normal), decimal=0) has_non_nan = True if not has_non_nan: assert False, "Test should include faces with non-NaN normals"
def test_average_node_edge_length(self): for side in xrange(1, 5): s_flat = surf.generate_plane((0, 0, 0), (0, 0, 1), (0, 1, 0), 6, 6) rnd_xyz = 0 * np.random.normal(size=s_flat.vertices.shape) s = surf.Surface(s_flat.vertices + rnd_xyz, s_flat.faces) nvertices = s.nvertices sd = np.zeros((nvertices,)) c = np.zeros((nvertices,)) def d(src, trg, vertices=s.vertices): s = vertices[src, :] t = vertices[trg, :] delta = s - t print s, t, delta return np.sum(delta ** 2) ** .5 for i_face in s.faces: for i in xrange(3): src = i_face[i] trg = i_face[(i + 1) % 3] sd[src] += d(src, trg) sd[trg] += d(src, trg) c[src] += 1 c[trg] += 1 print i, src, trg, d(src, trg) assert_array_almost_equal(sd / c, s.average_node_edge_length)
def test_volsurf_projections(self): white = surf.generate_plane((0, 0, 0), (0, 1, 0), (0, 0, 1), 10, 10) pial = white + np.asarray([[1, 0, 0]]) above = pial + np.asarray([[3, 0, 0]]) vg = volgeom.VolGeom((10, 10, 10), np.eye(4)) vs = volsurf.VolSurfMaximalMapping(vg, white, pial) dx = pial.vertices - white.vertices for s, w in ((white, 0), (pial, 1), (above, 4)): xyz = s.vertices ws = vs.surf_project_weights(True, xyz) delta = vs.surf_unproject_weights_nodewise(ws) - xyz assert_array_equal(delta, np.zeros((100, 3))) assert_true(np.all(w == ws)) vs = volsurf.VolSurfMaximalMapping(vg, white, pial, nsteps=2) n2vs = vs.get_node2voxels_mapping() assert_equal(n2vs, dict((i, {i: 0.0, i + 100: 1.0}) for i in xrange(100))) nd = 17 ds_mm_expected = np.sum((above.vertices - pial.vertices[nd, :]) ** 2, 1) ** 0.5 ds_mm = vs.coordinates_to_grey_distance_mm(nd, above.vertices) assert_array_almost_equal(ds_mm_expected, ds_mm) ds_mm_nodewise = vs.coordinates_to_grey_distance_mm(True, above.vertices) assert_array_equal(ds_mm_nodewise, np.ones((100,)) * 3)
def test_volsurf_projections(self): white = surf.generate_plane((0, 0, 0), (0, 1, 0), (0, 0, 1), 10, 10) pial = white + np.asarray([[1, 0, 0]]) above = pial + np.asarray([[3, 0, 0]]) vg = volgeom.VolGeom((10, 10, 10), np.eye(4)) vs = volsurf.VolSurfMaximalMapping(vg, white, pial) dx = pial.vertices - white.vertices for s, w in ((white, 0), (pial, 1), (above, 4)): xyz = s.vertices ws = vs.surf_project_weights(True, xyz) delta = vs.surf_unproject_weights_nodewise(ws) - xyz assert_array_equal(delta, np.zeros((100, 3))) assert_true(np.all(w == ws)) vs = volsurf.VolSurfMaximalMapping(vg, white, pial, nsteps=2) n2vs = vs.get_node2voxels_mapping() assert_equal(n2vs, dict((i, { i: 0., i + 100: 1. }) for i in xrange(100))) nd = 17 ds_mm_expected = np.sum((above.vertices - pial.vertices[nd, :])**2, 1)**.5 ds_mm = vs.coordinates_to_grey_distance_mm(nd, above.vertices) assert_array_almost_equal(ds_mm_expected, ds_mm) ds_mm_nodewise = vs.coordinates_to_grey_distance_mm( True, above.vertices) assert_array_equal(ds_mm_nodewise, np.ones((100, )) * 3)
def test_surface_minimal_lowres_voxel_selection(self, fn): vol_shape = (4, 10, 10, 1) vol_affine = np.identity(4) vg = volgeom.VolGeom(vol_shape, vol_affine) # make surfaces that are far away from all voxels # in the volume sphere_density = 10 radius = 10 outer = surf.generate_plane((0, 0, 4), (0, .4, 0), (0, 0, .4), 14, 14) inner = outer + 2 source = surf.generate_plane((0, 0, 4), (0, .8, 0), (0, 0, .8), 7, 7) + 1 for i, nvm in enumerate(('minimal', 'minimal_lowres')): qe = disc_surface_queryengine(radius, vg, inner, outer, source, node_voxel_mapping=nvm) voxsel = qe.voxsel if i == 0: voxsel0 = voxsel else: assert_equal(voxsel.keys(), voxsel0.keys()) for k in voxsel.keys(): p = voxsel[k] q = voxsel0[k] # require at least 60% agreement delta = set.symmetric_difference(set(p), set(q)) assert_true(len(delta) < .8 * (len(p) + len(q))) if externals.exists('h5py'): from mvpa2.base.hdf5 import h5save, h5load h5save(fn, voxsel) voxsel_copy = h5load(fn) assert_equal(voxsel.keys(), voxsel_copy.keys()) for id in qe.ids: assert_array_equal(voxsel.get(id), voxsel_copy.get(id))
def test_surface_flatten(self, dim): def unit_vec3(dim, scale): v = [0, 0, 0] v[dim] = float(scale) return tuple(v) origin = (0, 0, 0) plane_size = 10 scale = 1. vec1 = unit_vec3(dim, scale=scale) vec2 = unit_vec3((dim + 1) % 3, scale=scale) plane = generate_plane(origin, vec1, vec2, plane_size, plane_size) noise_level = .05 nan_vertices_ratio = .05 # add some noise to spatial coordinates vertices = plane.vertices noise = np.random.uniform(size=vertices.shape, low=-.5, high=.5) * noise_level * scale vertices_noisy = vertices + noise # make some vertices NaN (as might be the case for flat surfaces) nan_count_float = plane.nvertices * nan_vertices_ratio nan_count = np.ceil(nan_count_float).astype(np.int) nan_vertices = np.random.random_integers(plane.nvertices, size=(nan_count,)) - 1 vertices_noisy[nan_vertices, dim] = np.nan plane_noisy = Surface(vertices_noisy, plane.faces) # compute normals f_normal = plane_noisy.face_normals # find average normal non_nan_f_normal = np.logical_not(np.any(np.isnan(f_normal), axis=1)) f_normal_avg = np.mean(f_normal[non_nan_f_normal], axis=0) # test average normal assert_array_almost_equal(plane.nanmean_face_normal, f_normal_avg, decimal=2) # the output has only x and y coordinates; with z-coordinates set # to zero, the coordinates must be at similar pairwise distances max_deformation = .1 x, y = flat_surface2xy(plane_noisy, max_deformation) n_vertices = plane.nvertices z = np.zeros((n_vertices,)) flat_xyz = np.asarray((x, y, z)) # nodes are rotated must have same pairwise distance as # the original surface max_difference = 3 * noise_level SurfingSurfaceTests.assert_coordinates_almost_equal_modulo_rotation( flat_xyz.T, plane.vertices, max_difference)
def test_surf_ring_queryengine(self): s = surf.generate_plane((0, 0, 0), (0, 1, 0), (0, 0, 1), 4, 5) # add second layer s2 = surf.merge(s, (s + (.01, 0, 0))) ds = Dataset(samples=np.arange(20)[np.newaxis], fa=dict(node_indices=np.arange(39, 0, -2))) # add more features (with shared node indices) ds3 = hstack((ds, ds, ds)) radius = 2.5 inner_radius = 1.0 # Makes sure it raises error if inner_radius is >= radius assert_raises(ValueError, lambda: queryengine.SurfaceRingQueryEngine(surface=s2, inner_radius=2.5, radius=radius)) distance_metrics = ('euclidean', 'dijkstra', 'euclidean', 'dijkstra') for distance_metric, include_center in zip(distance_metrics, [True, False]*2): qe = queryengine.SurfaceRingQueryEngine(surface=s2, radius=radius, inner_radius=inner_radius, distance_metric=distance_metric, include_center=include_center) # untrained qe should give errors assert_raises(ValueError, lambda: qe.ids) assert_raises(ValueError, lambda: qe.query_byid(0)) # node index out of bounds should give error ds_ = ds.copy() ds_.fa.node_indices[0] = 100 assert_raises(ValueError, lambda: qe.train(ds_)) # lack of node indices should give error ds_.fa.pop('node_indices') assert_raises(ValueError, lambda: qe.train(ds_)) # train the qe qe.train(ds3) for node in np.arange(-1, s2.nvertices + 1): if node < 0 or node >= s2.nvertices: assert_raises(KeyError, lambda: qe.query_byid(node)) continue feature_ids = np.asarray(qe.query_byid(node)) # node indices relative to ds base_ids = feature_ids[feature_ids < 20] # should have multiples of 20 assert_equal(set(feature_ids), set((base_ids[np.newaxis].T + \ [0, 20, 40]).ravel())) node_indices = s2.circlearound_n2d(node, radius, distance_metric or 'dijkstra') fa_indices = [fa_index for fa_index, inode in enumerate(ds3.fa.node_indices) if inode in node_indices and node_indices[inode] > inner_radius] if include_center and node in ds3.fa.node_indices: fa_indices += np.where(ds3.fa.node_indices == node)[0].tolist() assert_equal(set(feature_ids), set(fa_indices))
def test_surf_pairs(self): o, x, y = map(np.asarray, [(0, 0, 0), (0, 1, 0), (1, 0, 0)]) d = np.asarray((0, 0, .1)) n = 10 s1 = surf.generate_plane(o, x, y, n, n) s2 = surf.generate_plane(o + d, x, y, n, n) s = surf.merge(s1, s2) # try for small surface eps = .0000001 pw = s.pairwise_near_nodes(.5) for i in xrange(n ** 2): d = pw.pop((i, i + 100)) assert_array_almost_equal(d, .1) assert_true(len(pw) == 0) pw = s.pairwise_near_nodes(.5) for i in xrange(n ** 2): d = pw.pop((i, i + 100)) assert_array_almost_equal(d, .1) assert_true(len(pw) == 0) # bigger one pw = s.pairwise_near_nodes(1.4) for i in xrange(n ** 2): p, q = i // n, i % n offsets = sum(([] if q == 0 else [-1], [] if q == n - 1 else [+1], [] if p == 0 else [-n], [] if p == n - 1 else [n], [0]), []) for offset in offsets: ii = i + offset + n ** 2 d = pw.pop((i, ii)) assert_true((d < .5) ^ (offset > 0)) assert_true(len(pw) == 0)
def test_surf_border(self): s = surf.generate_sphere(3) assert_array_equal(s.nodes_on_border(), [False] * 11) s = surf.generate_plane((0, 0, 0), (0, 1, 0), (1, 0, 0), 10, 10) b = s.nodes_on_border() v = s.vertices vb = reduce(np.logical_or, [v[:, 0] == 0, v[:, 1] == 0, v[:, 0] == 9, v[:, 1] == 9]) assert_array_equal(b, vb) assert_true(s.nodes_on_border(0))
def test_flat_surface_plotting(self): side = 10 step = 1 / float(side) plane = surf.generate_plane((0, 0, 0), (step, 0, 0), (0, step, 0), side, side) # generate data with simple gradient data = plane.vertices[:, 0] - plane.vertices[:, 1] data = (data - np.min(data)) / (np.max(data) - np.min(data)) color_map = None img_side = 50 fsp = FlatSurfacePlotter(plane, min_nsteps=img_side, color_map=color_map) img_arr = fsp(data) # verify shape ##assert_equal(img_arr.shape,(img_side,img_side,4)) # get colors cmap = plt.get_cmap(color_map) expected_img_arr = cmap(data) # map vertex coordinates to indices in img_arr # (using nearest neighbor interpolation) xs = (plane.vertices[:, 0] * img_side).astype(np.int) ys = (plane.vertices[:, 1] * img_side).astype(np.int) # allocate space for rgb values img_rgb = np.zeros((plane.nvertices, 3)) expected_img_rgb = np.zeros((plane.nvertices, 3)) # store expected and found RGB values for i, (x, y) in enumerate(zip(*(xs, ys))): img_rgb[i] = img_arr[y, x, :3] expected_img_rgb[i] = expected_img_arr[i, :3] # RGB values should match c = np.corrcoef(img_rgb.T, expected_img_rgb.T)[:3, 3:6] assert (np.all(np.diag(c) > .9))
def test_surfing_nodes_on_border_paths_surface_with_hole(self): s = surf.generate_plane((0, 0, 0), (0, 0, 1), (0, 1, 0), 6, 6) faces_to_remove = [1, 3, 7, 8, 3, 12, 13, 14, 22] faces_to_keep = np.setdiff1d(np.arange(s.nfaces), faces_to_remove) faces_to_add = [(0, 3, 10), (0, 4, 7), (0, 6, 4)] faces_hole = np.vstack((s.faces[faces_to_keep], faces_to_add)) s_hole = surf.Surface(s.vertices, faces_hole) pths = s_hole.nodes_on_border_paths() expected_pths = [[1, 6, 4, 7, 0], [3, 4, 9, 8, 2], [11, 17, 23, 29, 35, 34, 33, 32, 31, 30, 24, 18, 12, 6, 7, 13, 19, 14, 9, 10, 5]] def as_sorted_sets(xs): return sorted(map(set, xs), key=min) assert_equal(as_sorted_sets(pths), as_sorted_sets(expected_pths), )
def test_surf_queryengine(self, qefn): s = surf.generate_plane((0, 0, 0), (0, 1, 0), (0, 0, 1), 4, 5) # add scond layer s2 = surf.merge(s, (s + (.01, 0, 0))) ds = Dataset(samples=np.arange(20)[np.newaxis], fa=dict(node_indices=np.arange(39, 0, -2))) # add more features (with shared node indices) ds3 = hstack((ds, ds, ds)) radius = 2.5 # Note: sweepargs it not used to avoid re-generating the same # surface and dataset multiple times. for distance_metric in ('euclidean', 'dijkstra', '<illegal>', None): builder = lambda: queryengine.SurfaceQueryEngine(s2, radius, distance_metric) if distance_metric in ('<illegal>', None): assert_raises(ValueError, builder) continue qe = builder() # test i/o and ensure that the untrained instance is not trained if externals.exists('h5py'): fd, qefn = tempfile.mkstemp('qe.hdf5', 'test'); os.close(fd) h5save(qefn, qe) qe = h5load(qefn) os.remove(qefn) # untrained qe should give errors assert_raises(ValueError, lambda:qe.ids) assert_raises(ValueError, lambda:qe.query_byid(0)) # node index out of bounds should give error ds_ = ds.copy() ds_.fa.node_indices[0] = 100 assert_raises(ValueError, lambda: qe.train(ds_)) # lack of node indices should give error ds_.fa.pop('node_indices') assert_raises(ValueError, lambda: qe.train(ds_)) # train the qe qe.train(ds3) # test i/o and ensure that the loaded instance is trained if externals.exists('h5py'): h5save(qefn, qe) qe = h5load(qefn) for node in np.arange(-1, s2.nvertices + 1): if node < 0 or node >= s2.nvertices: assert_raises(KeyError, lambda: qe.query_byid(node)) continue feature_ids = np.asarray(qe.query_byid(node)) # node indices relative to ds base_ids = feature_ids[feature_ids < 20] # should have multiples of 20 assert_equal(set(feature_ids), set((base_ids[np.newaxis].T + \ [0, 20, 40]).ravel())) node_indices = list(s2.circlearound_n2d(node, radius, distance_metric or 'dijkstra')) fa_indices = [fa_index for fa_index, node in enumerate(ds3.fa.node_indices) if node in node_indices] assert_equal(set(feature_ids), set(fa_indices)) # smoke tests assert_true('SurfaceQueryEngine' in '%s' % qe) assert_true('SurfaceQueryEngine' in '%r' % qe)
def test_surf(self, temp_fn): """Some simple testing with surfaces """ s = surf.generate_sphere(10) assert_true(s.nvertices == 102) assert_true(s.nfaces == 200) v = s.vertices f = s.faces assert_true(v.shape == (102, 3)) assert_true(f.shape == (200, 3)) # another surface t = s * 10 + 2 assert_true(t.same_topology(s)) assert_array_equal(f, t.faces) assert_array_equal(v * 10 + 2, t.vertices) # allow updating, but should not affect original array # CHECKME: maybe we want to throw an exception instead assert_true((v * 10 + 2 == t.vertices).all().all()) assert_true((s.vertices * 10 + 2 == t.vertices).all().all()) # a few checks on vertices and nodes v_check = {40:(0.86511144 , -0.28109175, -0.41541501), 10:(0.08706015, -0.26794358, -0.95949297)} f_check = {10:(7, 8, 1), 40:(30, 31, 21)} vf_checks = [(v_check, lambda x:x.vertices), (f_check, lambda x:x.faces)] eps = .0001 for cmap, f in vf_checks: for k, v in cmap.iteritems(): surfval = f(s)[k, :] assert_true((abs(surfval - v) < eps).all()) # make sure same topology fails with different topology u = surf.generate_cube() assert_false(u.same_topology(s)) # check that neighbours are computed correctly # even if we nuke the topology afterwards for _ in [0, 1]: nbrs = s.neighbors n_check = [(0, 96, 0.284629), (40, 39, 0.56218349), (100, 99, 0.1741202)] for i, j, k in n_check: assert_true(abs(nbrs[i][j] - k) < eps) def assign_zero(x): x.faces[:, :] = 0 return None assert_raises((ValueError, RuntimeError), assign_zero, s) # see if mapping to high res works h = surf.generate_sphere(40) low2high = s.map_to_high_resolution_surf(h, .1) partmap = {7: 141, 8: 144, 9: 148, 10: 153, 11: 157, 12: 281} for k, v in partmap.iteritems(): assert_true(low2high[k] == v) # ensure that slow implementation gives same results as fast one low2high_slow = s.map_to_high_resolution_surf(h, .1) for k, v in low2high.iteritems(): assert_true(low2high_slow[k] == v) # should fail if epsilon is too small assert_raises(ValueError, lambda x:x.map_to_high_resolution_surf(h, .01), s) n2f = s.node2faces for i in xrange(s.nvertices): nf = [10] if i < 2 else [5, 6] # number of faces expected assert_true(len(n2f[i]) in nf) # test dijkstra distances ds2 = s.dijkstra_distance(2) some_ds = {0: 3.613173280799, 1: 0.2846296765, 2: 0., 52: 1.87458018, 53: 2.0487004817, 54: 2.222820777, 99: 3.32854360, 100: 3.328543604, 101: 3.3285436042} eps = np.finfo('f').eps for k, v in some_ds.iteritems(): assert_true(abs(v - ds2[k]) < eps) # test I/O (through ascii files) surf.write(temp_fn, s, overwrite=True) s2 = surf.read(temp_fn) # test i/o and ensure that the loaded instance is trained if externals.exists('h5py'): h5save(temp_fn, s2) s2 = h5load(temp_fn) assert_array_almost_equal(s.vertices, s2.vertices, 4) assert_array_almost_equal(s.faces, s2.faces, 4) # test plane (new feature end of August 2012) s3 = surf.generate_plane((0, 0, 0), (2, 0, 0), (0, 1, 0), 10, 20) assert_equal(s3.nvertices, 200) assert_equal(s3.nfaces, 342) assert_array_almost_equal(s3.vertices[-1, :], np.array([18., 19, 0.])) assert_array_almost_equal(s3.faces[-1, :], np.array([199, 198, 179])) # test bar p, q = (0, 0, 0), (100, 0, 0) s4 = surf.generate_bar(p, q, 10, 12) assert_equal(s4.nvertices, 26) assert_equal(s4.nfaces, 48)
def test_afni_suma_spec(self, temp_dir): # XXX this function generates quite a few temporary files, # which are removed at the end. # the decorator @with_tempfile seems unsuitable as it only # supports a single temporary file # make temporary directory os.mkdir(temp_dir) # generate surfaces inflated_surf = surf.generate_plane((0, 0, 0), (0, 1, 0), (0, 0, 1), 10, 10) white_surf = inflated_surf + 1. # helper function _tmp = lambda x:pathjoin(temp_dir, x) # filenames for surfaces and spec file inflated_fn = _tmp('_lh_inflated.asc') white_fn = _tmp('_lh_white.asc') spec_fn = _tmp('lh.spec') spec_dir = os.path.split(spec_fn)[0] # generate SUMA-like spec dictionary white = dict(SurfaceFormat='ASCII', EmbedDimension='3', SurfaceType='FreeSurfer', SurfaceName=white_fn, Anatomical='Y', LocalCurvatureParent='SAME', LocalDomainParent='SAME', SurfaceState='smoothwm') inflated = dict(SurfaceFormat='ASCII', EmbedDimension='3', SurfaceType='FreeSurfer', SurfaceName=inflated_fn, Anatomical='N', LocalCurvatureParent=white_fn, LocalDomainParent=white_fn, SurfaceState='inflated') # make SurfaceSpec object spec = afni_suma_spec.SurfaceSpec([white], directory=spec_dir) spec.add_surface(inflated) # test __str__ and __repr__ assert_true('SurfaceSpec instance with 2 surfaces' ', 2 states ' in '%s' % spec) assert_true(('%r' % spec).startswith('SurfaceSpec')) # test finding surfaces inflated_ = spec.find_surface_from_state('inflated') assert_equal([(1, inflated)], inflated_) empty = spec.find_surface_from_state('unknown') assert_equal(empty, []) # test .same_states minimal = afni_suma_spec.SurfaceSpec([dict(SurfaceState=s) for s in ('smoothwm', 'inflated')]) assert_true(spec.same_states(minimal)) assert_false(spec.same_states(afni_suma_spec.SurfaceSpec(dict()))) # test 'smart' surface file matching assert_equal(spec.get_surface_file('smo'), white_fn) assert_equal(spec.get_surface_file('inflated'), inflated_fn) assert_equal(spec.get_surface_file('this should be None'), None) # test i/o spec.write(spec_fn) spec_ = afni_suma_spec.from_any(spec_fn) # prepare for another (right-hemisphere) spec file lh_spec = spec rh_spec_fn = spec_fn.replace('lh', 'rh') rh_inflated_fn = _tmp(os.path.split(inflated_fn)[1].replace('_lh', '_rh')) rh_white_fn = _tmp(os.path.split(white_fn)[1].replace('_lh', '_rh')) rh_spec_fn = _tmp('rh.spec') rh_white = dict(SurfaceFormat='ASCII', EmbedDimension='3', SurfaceType='FreeSurfer', SurfaceName=rh_white_fn, Anatomical='Y', LocalCurvatureParent='SAME', LocalDomainParent='SAME', SurfaceState='smoothwm') rh_inflated = dict(SurfaceFormat='ASCII', EmbedDimension='3', SurfaceType='FreeSurfer', SurfaceName=rh_inflated_fn, Anatomical='N', LocalCurvatureParent=rh_white_fn, LocalDomainParent=rh_white_fn, SurfaceState='inflated') rh_spec = afni_suma_spec.SurfaceSpec([rh_white], directory=spec_dir) rh_spec.add_surface(rh_inflated) # write files all_temp_fns = [spec_fn, rh_spec_fn] for fn, s in [(rh_inflated_fn, inflated_surf), (rh_white_fn, white_surf), (inflated_fn, inflated_surf), (white_fn, white_surf)]: surf.write(fn, s) all_temp_fns.append(fn) # test adding views added_specs = afni_suma_spec.hemi_pairs_add_views((lh_spec, rh_spec), 'inflated', '.asc') for hemi, added_spec in zip(('l', 'r'), added_specs): states = ['smoothwm', 'inflated'] + ['CoM%sinflated' % i for i in 'msiap'] assert_equal(states, [s['SurfaceState'] for s in added_specs[0].surfaces]) all_temp_fns.extend([s['SurfaceName'] for s in added_spec.surfaces]) # test combining specs (bh=both hemispheres) bh_spec = afni_suma_spec.combine_left_right(added_specs) # test merging specs (mh=merged hemispheres) mh_spec, mh_surfs = afni_suma_spec.merge_left_right(bh_spec) assert_equal([s['SurfaceState'] for s in mh_spec.surfaces], ['smoothwm'] + ['CoM%sinflated' % i for i in 'msiap'])
def test_voxel_selection(self): """Compare surface and volume based searchlight""" """ Tests to see whether results are identical for surface-based searchlight (just one plane; Euclidean distnace) and volume-based searchlight. Note that the current value is a float; if it were int, it would specify the number of voxels in each searchlight""" radius = 10.0 """Define input filenames""" epi_fn = pathjoin(pymvpa_dataroot, "bold.nii.gz") maskfn = pathjoin(pymvpa_dataroot, "mask.nii.gz") """ Use the EPI datafile to define a surface. The surface has as many nodes as there are voxels and is parallel to the volume 'slice' """ vg = volgeom.from_any(maskfn, mask_volume=True) aff = vg.affine nx, ny, nz = vg.shape[:3] """Plane goes in x and y direction, so we take these vectors from the affine transformation matrix of the volume""" plane = surf.generate_plane(aff[:3, 3], aff[:3, 0], aff[:3, 1], nx, ny) """ Simulate pial and white matter as just above and below the central plane """ normal_vec = aff[:3, 2] outer = plane + normal_vec inner = plane + -normal_vec """ Combine volume and surface information """ vsm = volsurf.VolSurfMaximalMapping(vg, outer, inner) """ Run voxel selection with specified radius (in mm), using Euclidean distance measure """ surf_voxsel = surf_voxel_selection.voxel_selection(vsm, radius, distance_metric="e") """Define the measure""" # run_slow=True would give an actual cross-validation with meaningful # accuracies. Because this is a unit-test only the number of voxels # in each searchlight is tested. run_slow = False if run_slow: meas = CrossValidation(GNB(), OddEvenPartitioner(), errorfx=lambda p, t: np.mean(p == t)) postproc = mean_sample else: meas = _Voxel_Count_Measure() postproc = lambda x: x """ Surface analysis: define the query engine, cross validation, and searchlight """ surf_qe = SurfaceVerticesQueryEngine(surf_voxsel) surf_sl = Searchlight(meas, queryengine=surf_qe, postproc=postproc) """ new (Sep 2012): also test 'simple' queryengine wrapper function """ surf_qe2 = disc_surface_queryengine( radius, maskfn, inner, outer, plane, volume_mask=True, distance_metric="euclidean" ) surf_sl2 = Searchlight(meas, queryengine=surf_qe2, postproc=postproc) """ Same for the volume analysis """ element_sizes = tuple(map(abs, (aff[0, 0], aff[1, 1], aff[2, 2]))) sph = Sphere(radius, element_sizes=element_sizes) kwa = {"voxel_indices": sph} vol_qe = IndexQueryEngine(**kwa) vol_sl = Searchlight(meas, queryengine=vol_qe, postproc=postproc) """The following steps are similar to start_easy.py""" attr = SampleAttributes(pathjoin(pymvpa_dataroot, "attributes_literal.txt")) mask = surf_voxsel.get_mask() dataset = fmri_dataset( samples=pathjoin(pymvpa_dataroot, "bold.nii.gz"), targets=attr.targets, chunks=attr.chunks, mask=mask ) if run_slow: # do chunkswise linear detrending on dataset poly_detrend(dataset, polyord=1, chunks_attr="chunks") # zscore dataset relative to baseline ('rest') mean zscore(dataset, chunks_attr="chunks", param_est=("targets", ["rest"])) # select class face and house for this demo analysis # would work with full datasets (just a little slower) dataset = dataset[np.array([l in ["face", "house"] for l in dataset.sa.targets], dtype="bool")] """Apply searchlight to datasets""" surf_dset = surf_sl(dataset) surf_dset2 = surf_sl2(dataset) vol_dset = vol_sl(dataset) surf_data = surf_dset.samples surf_data2 = surf_dset2.samples vol_data = vol_dset.samples assert_array_equal(surf_data, surf_data2) assert_array_equal(surf_data, vol_data)
def test_flat_surface_plotting_exception_wrong_size(self): s = surf.generate_plane((0, 0, 0), (0, 0, 1), (0, 1, 0), 6, 6) for offset in (-1, 0, 1): nfeatures = s.nvertices + offset ds = AttrDataset(samples=np.random.normal(size=(1, nfeatures)))
def test_voxel_selection(self): '''Compare surface and volume based searchlight''' ''' Tests to see whether results are identical for surface-based searchlight (just one plane; Euclidean distnace) and volume-based searchlight. Note that the current value is a float; if it were int, it would specify the number of voxels in each searchlight''' radius = 10. '''Define input filenames''' epi_fn = os.path.join(pymvpa_dataroot, 'bold.nii.gz') maskfn = os.path.join(pymvpa_dataroot, 'mask.nii.gz') ''' Use the EPI datafile to define a surface. The surface has as many nodes as there are voxels and is parallel to the volume 'slice' ''' vg = volgeom.from_any(maskfn, mask_volume=True) aff = vg.affine nx, ny, nz = vg.shape[:3] '''Plane goes in x and y direction, so we take these vectors from the affine transformation matrix of the volume''' plane = surf.generate_plane(aff[:3, 3], aff[:3, 0], aff[:3, 1], nx, ny) ''' Simulate pial and white matter as just above and below the central plane ''' normal_vec = aff[:3, 2] outer = plane + normal_vec inner = plane + -normal_vec ''' Combine volume and surface information ''' vsm = volsurf.VolSurfMaximalMapping(vg, outer, inner) ''' Run voxel selection with specified radius (in mm), using Euclidean distance measure ''' surf_voxsel = surf_voxel_selection.voxel_selection(vsm, radius, distance_metric='e') '''Define the measure''' # run_slow=True would give an actual cross-validation with meaningful # accuracies. Because this is a unit-test only the number of voxels # in each searchlight is tested. run_slow = False if run_slow: meas = CrossValidation(GNB(), OddEvenPartitioner(), errorfx=lambda p, t: np.mean(p == t)) postproc = mean_sample else: meas = _Voxel_Count_Measure() postproc = lambda x: x ''' Surface analysis: define the query engine, cross validation, and searchlight ''' surf_qe = SurfaceVerticesQueryEngine(surf_voxsel) surf_sl = Searchlight(meas, queryengine=surf_qe, postproc=postproc) ''' new (Sep 2012): also test 'simple' queryengine wrapper function ''' surf_qe2 = disc_surface_queryengine(radius, maskfn, inner, outer, plane, volume_mask=True, distance_metric='euclidean') surf_sl2 = Searchlight(meas, queryengine=surf_qe2, postproc=postproc) ''' Same for the volume analysis ''' element_sizes = tuple(map(abs, (aff[0, 0], aff[1, 1], aff[2, 2]))) sph = Sphere(radius, element_sizes=element_sizes) kwa = {'voxel_indices': sph} vol_qe = IndexQueryEngine(**kwa) vol_sl = Searchlight(meas, queryengine=vol_qe, postproc=postproc) '''The following steps are similar to start_easy.py''' attr = SampleAttributes( os.path.join(pymvpa_dataroot, 'attributes_literal.txt')) mask = surf_voxsel.get_mask() dataset = fmri_dataset(samples=os.path.join(pymvpa_dataroot, 'bold.nii.gz'), targets=attr.targets, chunks=attr.chunks, mask=mask) if run_slow: # do chunkswise linear detrending on dataset poly_detrend(dataset, polyord=1, chunks_attr='chunks') # zscore dataset relative to baseline ('rest') mean zscore(dataset, chunks_attr='chunks', param_est=('targets', ['rest'])) # select class face and house for this demo analysis # would work with full datasets (just a little slower) dataset = dataset[np.array( [l in ['face', 'house'] for l in dataset.sa.targets], dtype='bool')] '''Apply searchlight to datasets''' surf_dset = surf_sl(dataset) surf_dset2 = surf_sl2(dataset) vol_dset = vol_sl(dataset) surf_data = surf_dset.samples surf_data2 = surf_dset2.samples vol_data = vol_dset.samples assert_array_equal(surf_data, surf_data2) assert_array_equal(surf_data, vol_data)
def test_afni_suma_spec(self, temp_dir): # XXX this function generates quite a few temporary files, # which are removed at the end. # the decorator @with_tempfile seems unsuitable as it only # supports a single temporary file # make temporary directory os.mkdir(temp_dir) # generate surfaces inflated_surf = surf.generate_plane((0, 0, 0), (0, 1, 0), (0, 0, 1), 10, 10) white_surf = inflated_surf + 1. # helper function _tmp = lambda x: os.path.join(temp_dir, x) # filenames for surfaces and spec file inflated_fn = _tmp('_lh_inflated.asc') white_fn = _tmp('_lh_white.asc') spec_fn = _tmp('lh.spec') spec_dir = os.path.split(spec_fn)[0] # generate SUMA-like spec dictionary white = dict(SurfaceFormat='ASCII', EmbedDimension='3', SurfaceType='FreeSurfer', SurfaceName=white_fn, Anatomical='Y', LocalCurvatureParent='SAME', LocalDomainParent='SAME', SurfaceState='smoothwm') inflated = dict(SurfaceFormat='ASCII', EmbedDimension='3', SurfaceType='FreeSurfer', SurfaceName=inflated_fn, Anatomical='N', LocalCurvatureParent=white_fn, LocalDomainParent=white_fn, SurfaceState='inflated') # make SurfaceSpec object spec = afni_suma_spec.SurfaceSpec([white], directory=spec_dir) spec.add_surface(inflated) # test __str__ and __repr__ assert_true('SurfaceSpec instance with 2 surfaces' ', 2 states ' in '%s' % spec) assert_true(('%r' % spec).startswith('SurfaceSpec')) # test finding surfaces inflated_ = spec.find_surface_from_state('inflated') assert_equal([(1, inflated)], inflated_) empty = spec.find_surface_from_state('unknown') assert_equal(empty, []) # test .same_states minimal = afni_suma_spec.SurfaceSpec( [dict(SurfaceState=s) for s in ('smoothwm', 'inflated')]) assert_true(spec.same_states(minimal)) assert_false(spec.same_states(afni_suma_spec.SurfaceSpec(dict()))) # test 'smart' surface file matching assert_equal(spec.get_surface_file('smo'), white_fn) assert_equal(spec.get_surface_file('inflated'), inflated_fn) assert_equal(spec.get_surface_file('this should be None'), None) # test i/o spec.write(spec_fn) spec_ = afni_suma_spec.from_any(spec_fn) # prepare for another (right-hemisphere) spec file lh_spec = spec rh_spec_fn = spec_fn.replace('lh', 'rh') rh_inflated_fn = _tmp( os.path.split(inflated_fn)[1].replace('_lh', '_rh')) rh_white_fn = _tmp(os.path.split(white_fn)[1].replace('_lh', '_rh')) rh_spec_fn = _tmp('rh.spec') rh_white = dict(SurfaceFormat='ASCII', EmbedDimension='3', SurfaceType='FreeSurfer', SurfaceName=rh_white_fn, Anatomical='Y', LocalCurvatureParent='SAME', LocalDomainParent='SAME', SurfaceState='smoothwm') rh_inflated = dict(SurfaceFormat='ASCII', EmbedDimension='3', SurfaceType='FreeSurfer', SurfaceName=rh_inflated_fn, Anatomical='N', LocalCurvatureParent=rh_white_fn, LocalDomainParent=rh_white_fn, SurfaceState='inflated') rh_spec = afni_suma_spec.SurfaceSpec([rh_white], directory=spec_dir) rh_spec.add_surface(rh_inflated) # write files all_temp_fns = [spec_fn, rh_spec_fn] for fn, s in [(rh_inflated_fn, inflated_surf), (rh_white_fn, white_surf), (inflated_fn, inflated_surf), (white_fn, white_surf)]: surf.write(fn, s) all_temp_fns.append(fn) # test adding views added_specs = afni_suma_spec.hemi_pairs_add_views((lh_spec, rh_spec), 'inflated', '.asc') for hemi, added_spec in zip(('l', 'r'), added_specs): states = ['smoothwm', 'inflated' ] + ['CoM%sinflated' % i for i in 'msiap'] assert_equal(states, [s['SurfaceState'] for s in added_specs[0].surfaces]) all_temp_fns.extend( [s['SurfaceName'] for s in added_spec.surfaces]) # test combining specs (bh=both hemispheres) bh_spec = afni_suma_spec.combine_left_right(added_specs) # test merging specs (mh=merged hemispheres) mh_spec, mh_surfs = afni_suma_spec.merge_left_right(bh_spec) assert_equal([s['SurfaceState'] for s in mh_spec.surfaces], ['smoothwm'] + ['CoM%sinflated' % i for i in 'msiap'])