def test_queryengine_io(self, fn): skip_if_no_external('h5py') from mvpa2.base.hdf5 import h5save, h5load vol_shape = (10, 10, 10, 1) vol_affine = np.identity(4) vg = volgeom.VolGeom(vol_shape, vol_affine) # generate some surfaces, # and add some noise to them sphere_density = 10 outer = surf.generate_sphere(sphere_density) * 5 + 8 inner = surf.generate_sphere(sphere_density) * 3 + 8 radius = 5. add_fa = ['center_distances', 'grey_matter_position'] qe = disc_surface_queryengine(radius, vg, inner, outer, add_fa=add_fa) ds = fmri_dataset(vg.get_masked_nifti_image()) # the following is not really a strong requirement. XXX remove? assert_raises(ValueError, lambda: qe[qe.ids[0]]) # check that after training it behaves well qe.train(ds) i = qe.ids[0] try: m = qe[i] except ValueError, e: raise AssertionError( 'Failed to query %r from %r after training on %r. Exception was: %r' % (i, qe, ds, e))
def test_queryengine_io(self, fn): skip_if_no_external("h5py") from mvpa2.base.hdf5 import h5save, h5load vol_shape = (10, 10, 10, 1) vol_affine = np.identity(4) vg = volgeom.VolGeom(vol_shape, vol_affine) # generate some surfaces, # and add some noise to them sphere_density = 10 outer = surf.generate_sphere(sphere_density) * 5 + 8 inner = surf.generate_sphere(sphere_density) * 3 + 8 radius = 5.0 add_fa = ["center_distances", "grey_matter_position"] qe = disc_surface_queryengine(radius, vg, inner, outer, add_fa=add_fa) ds = fmri_dataset(vg.get_masked_nifti_image()) # the following is not really a strong requirement. XXX remove? assert_raises(ValueError, lambda: qe[qe.ids[0]]) # check that after training it behaves well qe.train(ds) i = qe.ids[0] try: m = qe[i] except ValueError, e: raise AssertionError( "Failed to query %r from %r after training on %r. " "Exception was: %r" % (i, qe, ds, e) )
def test_h5support(self): sh = (20, 20, 20) msk = np.zeros(sh) for i in xrange(0, sh[0], 2): msk[i, :, :] = 1 vg = volgeom.VolGeom(sh, np.identity(4), mask=msk) density = 20 outer = surf.generate_sphere(density) * 10. + 5 inner = surf.generate_sphere(density) * 5. + 5 intermediate = outer * .5 + inner * .5 xyz = intermediate.vertices radius = 50 backends = ['native', 'hdf5'] for i, backend in enumerate(backends): if backend == 'hdf5' and not externals.exists('h5py'): continue sel = surf_voxel_selection.run_voxel_selection(radius, vg, inner, outer, results_backend=backend) if i == 0: sel0 = sel else: assert_equal(sel0, sel)
def test_surface_voxel_query_engine(self): vol_shape = (10, 10, 10, 1) vol_affine = np.identity(4) vol_affine[0, 0] = vol_affine[1, 1] = vol_affine[2, 2] = 5 vg = volgeom.VolGeom(vol_shape, vol_affine) # make the surfaces sphere_density = 10 outer = surf.generate_sphere(sphere_density) * 25. + 15 inner = surf.generate_sphere(sphere_density) * 20. + 15 vs = volsurf.VolSurfMaximalMapping(vg, inner, outer) radius = 10 for fallback, expected_nfeatures in ((True, 1000), (False, 183)): voxsel = surf_voxel_selection.voxel_selection(vs, radius) qe = SurfaceVoxelsQueryEngine(voxsel, fallback_euclidian_distance=fallback) m = _Voxel_Count_Measure() sl = Searchlight(m, queryengine=qe) data = np.random.normal(size=vol_shape) img = nb.Nifti1Image(data, vol_affine) ds = fmri_dataset(img) sl_map = sl(ds) counts = sl_map.samples assert_true(np.all(np.logical_and(5 <= counts, counts <= 18))) assert_equal(sl_map.nfeatures, expected_nfeatures)
def test_h5support(self): sh = (20, 20, 20) msk = np.zeros(sh) for i in xrange(0, sh[0], 2): msk[i, :, :] = 1 vg = volgeom.VolGeom(sh, np.identity(4), mask=msk) density = 20 outer = surf.generate_sphere(density) * 10. + 5 inner = surf.generate_sphere(density) * 5. + 5 intermediate = outer * .5 + inner * .5 xyz = intermediate.vertices radius = 50 backends = ['native', 'hdf5'] for i, backend in enumerate(backends): if backend == 'hdf5' and not externals.exists('h5py'): continue sel = surf_voxel_selection.run_voxel_selection( radius, vg, inner, outer, results_backend=backend) if i == 0: sel0 = sel else: assert_equal(sel0, sel)
def test_surface_minimal_voxel_selection(self): # Tests 'minimal' voxel selection. # It assumes that 'maximal' voxel selection works (which is tested # in other unit tests) vol_shape = (10, 10, 10, 1) vol_affine = np.identity(4) vg = volgeom.VolGeom(vol_shape, vol_affine) # generate some surfaces, # and add some noise to them sphere_density = 10 nvertices = sphere_density**2 + 2 noise = np.random.uniform(size=(nvertices, 3)) outer = surf.generate_sphere(sphere_density) * 5 + 8 + noise inner = surf.generate_sphere(sphere_density) * 3 + 8 + noise radii = [5., 20., 10] # note: no fixed radii at the moment # Note: a little outside margin is necessary # as otherwise there are nodes in the minimal case # that have no voxels associated with them for radius in radii: for output_modality in ('surface', 'volume'): for i, nvm in enumerate(('minimal', 'maximal')): qe = disc_surface_queryengine( radius, vg, inner, outer, node_voxel_mapping=nvm, output_modality=output_modality) voxsel = qe.voxsel if i == 0: keys_ = voxsel.keys() voxsel_ = voxsel else: keys = voxsel.keys() # minimal one has a subset assert_equal(keys, keys_) # and the subset is quite overlapping assert_true(len(keys) * .90 < len(keys_)) for k in keys_: x = set(voxsel_[k]) y = set(voxsel[k]) d = set.symmetric_difference(x, y) r = float(len(d)) / 2 / len(x) if type(radius) is float: assert_equal(x - y, set()) # decent agreement in any case # between the two sets assert_true(r < .6)
def test_niml_dset_voxsel(self, fn): if not externals.exists('nibabel'): return # This is actually a bit of an integration test. # It tests storing and retrieving searchlight results. # Imports are inline here so that it does not mess up the header # and makes the other unit tests more modular # XXX put this in a separate file? from mvpa2.misc.surfing import volgeom, surf_voxel_selection, queryengine from mvpa2.measures.searchlight import Searchlight from mvpa2.support.nibabel import surf from mvpa2.measures.base import Measure from mvpa2.datasets.mri import fmri_dataset class _Voxel_Count_Measure(Measure): # used to check voxel selection results is_trained = True def __init__(self, dtype, **kwargs): Measure.__init__(self, **kwargs) self.dtype = dtype def _call(self, dset): return self.dtype(dset.nfeatures) sh = (20, 20, 20) vg = volgeom.VolGeom(sh, np.identity(4)) density = 20 outer = surf.generate_sphere(density) * 10. + 5 inner = surf.generate_sphere(density) * 5. + 5 intermediate = outer * .5 + inner * .5 xyz = intermediate.vertices radius = 50 sel = surf_voxel_selection.run_voxel_selection(radius, vg, inner, outer) qe = queryengine.SurfaceVerticesQueryEngine(sel) for dtype in (int, float): sl = Searchlight(_Voxel_Count_Measure(dtype), queryengine=qe) ds = fmri_dataset(vg.get_empty_nifti_image(1)) r = sl(ds) niml.write(fn, r) rr = niml.read(fn) os.remove(fn) assert_array_equal(r.samples, rr.samples)
def test_niml_dset_voxsel(self): if not externals.exists('nibabel'): return # This is actually a bit of an integration test. # It tests storing and retrieving searchlight results. # Imports are inline here so that it does not mess up the header # and makes the other unit tests more modular # XXX put this in a separate file? from mvpa2.misc.surfing import volgeom, surf_voxel_selection, queryengine from mvpa2.measures.searchlight import Searchlight from mvpa2.support.nibabel import surf from mvpa2.measures.base import Measure from mvpa2.datasets.mri import fmri_dataset class _Voxel_Count_Measure(Measure): # used to check voxel selection results is_trained = True def __init__(self, dtype, **kwargs): Measure.__init__(self, **kwargs) self.dtype = dtype def _call(self, dset): return self.dtype(dset.nfeatures) sh = (20, 20, 20) vg = volgeom.VolGeom(sh, np.identity(4)) density = 20 outer = surf.generate_sphere(density) * 10. + 5 inner = surf.generate_sphere(density) * 5. + 5 intermediate = outer * .5 + inner * .5 xyz = intermediate.vertices radius = 50 sel = surf_voxel_selection.run_voxel_selection(radius, vg, inner, outer) qe = queryengine.SurfaceVerticesQueryEngine(sel) for dtype in (int, float): sl = Searchlight(_Voxel_Count_Measure(dtype), queryengine=qe) ds = fmri_dataset(vg.get_empty_nifti_image(1)) r = sl(ds) _, fn = tempfile.mkstemp('.niml.dset', 'dset') niml_dset.write(fn, r) rr = niml_dset.read(fn) os.remove(fn) assert_array_equal(r.samples, rr.samples)
def test_surface_minimal_voxel_selection(self): # Tests 'minimal' voxel selection. # It assumes that 'maximal' voxel selection works (which is tested # in other unit tests) vol_shape = (10, 10, 10, 1) vol_affine = np.identity(4) vg = volgeom.VolGeom(vol_shape, vol_affine) # generate some surfaces, # and add some noise to them sphere_density = 10 nvertices = sphere_density ** 2 + 2 noise = np.random.uniform(size=(nvertices, 3)) outer = surf.generate_sphere(sphere_density) * 5 + 8 + noise inner = surf.generate_sphere(sphere_density) * 3 + 8 + noise radii = [5.0, 20.0, 10] # note: no fixed radii at the moment # Note: a little outside margin is necessary # as otherwise there are nodes in the minimal case # that have no voxels associated with them for radius in radii: for output_modality in ("surface", "volume"): for i, nvm in enumerate(("minimal", "maximal")): qe = disc_surface_queryengine( radius, vg, inner, outer, node_voxel_mapping=nvm, output_modality=output_modality ) voxsel = qe.voxsel if i == 0: keys_ = voxsel.keys() voxsel_ = voxsel else: keys = voxsel.keys() # minimal one has a subset assert_equal(keys, keys_) # and the subset is quite overlapping assert_true(len(keys) * 0.90 < len(keys_)) for k in keys_: x = set(voxsel_[k]) y = set(voxsel[k]) d = set.symmetric_difference(x, y) r = float(len(d)) / 2 / len(x) if type(radius) is float: assert_equal(x - y, set()) # decent agreement in any case # between the two sets assert_true(r < 0.6)
def test_surface_outside_volume_voxel_selection(self, fn): skip_if_no_external('h5py') from mvpa2.base.hdf5 import h5save, h5load vol_shape = (10, 10, 10, 1) vol_affine = np.identity(4) vg = volgeom.VolGeom(vol_shape, vol_affine) # make surfaces that are far away from all voxels # in the volume sphere_density = 4 far = 10000. outer = surf.generate_sphere(sphere_density) * 10 + far inner = surf.generate_sphere(sphere_density) * 5 + far vs = volsurf.VolSurfMaximalMapping(vg, inner, outer) radii = [10., 10] # fixed and variable radii outside_node_margins = [0, far, True] for outside_node_margin in outside_node_margins: for radius in radii: selector = lambda: surf_voxel_selection.voxel_selection(vs, radius, outside_node_margin=outside_node_margin) if type(radius) is int and outside_node_margin is True: assert_raises(ValueError, selector) else: sel = selector() if outside_node_margin is True: # it should have all the keys, but they should # all be empty assert_array_equal(sel.keys(), range(inner.nvertices)) for k, v in sel.iteritems(): assert_equal(v, []) else: assert_array_equal(sel.keys(), []) if outside_node_margin is True and \ externals.versions['hdf5'] < '1.8.7': raise SkipTest("Versions of hdf5 before 1.8.7 have " "problems with empty arrays") h5save(fn, sel) sel_copy = h5load(fn) assert_array_equal(sel.keys(), sel_copy.keys()) for k in sel.keys(): assert_equal(sel[k], sel_copy[k]) assert_equal(sel, sel_copy)
def test_volsurf(self): vg = volgeom.VolGeom((50, 50, 50), np.identity(4)) density = 40 outer = surf.generate_sphere(density) * 25. + 25 inner = surf.generate_sphere(density) * 20. + 25 # increasingly select more voxels in 'grey matter' steps_start_stop = [(1, .5, .5), (5, .5, .5), (3, .3, .7), (5, .3, .7), (5, 0., 1.), (10, 0., 1.)] mp = None expected_keys = set(range(density**2 + 2)) selection_counter = [] voxel_counter = [] for sp, sa, so in steps_start_stop: vs = volsurf.VolSurfMaximalMapping(vg, outer, inner, (outer + inner) * .5, sp, sa, so) n2v = vs.get_node2voxels_mapping() if mp is None: mp = n2v assert_equal(expected_keys, set(n2v.keys())) counter = 0 for k, v2pos in n2v.iteritems(): for v, pos in v2pos.iteritems(): # should be close to grey matter assert_true(-1. <= pos <= 2.) counter += 1 selection_counter.append(counter) img = vs.voxel_count_nifti_image() voxel_counter.append(np.sum(img.get_data() > 0)) # hard coded number of expected voxels selection_expected = [1602, 1602, 4618, 5298, 7867, 10801] assert_equal(selection_counter, selection_expected) voxel_expected = [1498, 1498, 4322, 4986, 7391, 10141] assert_equal(voxel_counter, voxel_expected) # check that string building works assert_true(len('%s%r' % (vs, vs)) > 0)
def test_volsurf(self): vg = volgeom.VolGeom((50, 50, 50), np.identity(4)) density = 40 outer = surf.generate_sphere(density) * 25. + 25 inner = surf.generate_sphere(density) * 20. + 25 # increasingly select more voxels in 'grey matter' steps_start_stop = [(1, .5, .5), (5, .5, .5), (3, .3, .7), (5, .3, .7), (5, 0., 1.), (10, 0., 1.)] mp = None expected_keys = set(range(density ** 2 + 2)) selection_counter = [] voxel_counter = [] for sp, sa, so in steps_start_stop: vs = volsurf.VolSurfMaximalMapping(vg, outer, inner, (outer + inner) * .5, sp, sa, so) n2v = vs.get_node2voxels_mapping() if mp is None: mp = n2v assert_equal(expected_keys, set(n2v.keys())) counter = 0 for k, v2pos in n2v.iteritems(): for v, pos in v2pos.iteritems(): # should be close to grey matter assert_true(-1. <= pos and pos <= 2.) counter += 1 selection_counter.append(counter) img = vs.voxel_count_nifti_image() voxel_counter.append(np.sum(img.get_data() > 0)) # hard coded number of expected voxels selection_expected = [1602, 1602, 4618, 5298, 7867, 10801] assert_equal(selection_counter, selection_expected) voxel_expected = [1498, 1498, 4322, 4986, 7391, 10141] assert_equal(voxel_counter, voxel_expected) # check that string building works assert_true(len('%s%r' % (vs, vs)) > 0)
def test_surf_fs_asc(self, temp_fn): s = surf.generate_sphere(5) * 100 surf_fs_asc.write(temp_fn, s, overwrite=True) t = surf_fs_asc.read(temp_fn) assert_array_almost_equal(s.vertices, t.vertices) assert_array_almost_equal(s.vertices, t.vertices) theta = np.asarray([0, 0., 180.]) r = s.rotate(theta, unit='deg') l2r = surf.get_sphere_left_right_mapping(s, r) l2r_expected = [ 0, 1, 2, 6, 5, 4, 3, 11, 10, 9, 8, 7, 15, 14, 13, 12, 16, 19, 18, 17, 21, 20, 23, 22, 26, 25, 24 ] assert_array_equal(l2r, np.asarray(l2r_expected)) sides_facing = 'apism' for side_facing in sides_facing: l, r = surf.reposition_hemisphere_pairs(s + 10., t + (-10.), side_facing) m = surf.merge(l, r) # not sure at the moment why medial rotation # messes up - but leave for now eps = 666 if side_facing == 'm' else .001 assert_true((abs(m.center_of_mass) < eps).all())
def test_surf_fs_asc(self, temp_fn): s = surf.generate_sphere(5) * 100 surf_fs_asc.write(temp_fn, s, overwrite=True) t = surf_fs_asc.read(temp_fn) assert_array_almost_equal(s.vertices, t.vertices) assert_array_almost_equal(s.vertices, t.vertices) theta = np.asarray([0, 0., 180.]) r = s.rotate(theta, unit='deg') l2r = surf.get_sphere_left_right_mapping(s, r) l2r_expected = [0, 1, 2, 6, 5, 4, 3, 11, 10, 9, 8, 7, 15, 14, 13, 12, 16, 19, 18, 17, 21, 20, 23, 22, 26, 25, 24] assert_array_equal(l2r, np.asarray(l2r_expected)) sides_facing = 'apism' for side_facing in sides_facing: l, r = surf.reposition_hemisphere_pairs(s + 10., t + (-10.), side_facing) m = surf.merge(l, r) # not sure at the moment why medial rotation # messes up - but leave for now eps = 666 if side_facing == 'm' else .001 assert_true((abs(m.center_of_mass) < eps).all())
def test_surface_voxel_query_engine(self): vol_shape = (10, 10, 10, 1) vol_affine = np.identity(4) vol_affine[0, 0] = vol_affine[1, 1] = vol_affine[2, 2] = 5 vg = volgeom.VolGeom(vol_shape, vol_affine) # make the surfaces sphere_density = 10 outer = surf.generate_sphere(sphere_density) * 25. + 15 inner = surf.generate_sphere(sphere_density) * 20. + 15 vs = volsurf.VolSurfMaximalMapping(vg, inner, outer) radius = 10 for fallback, expected_nfeatures in ((True, 1000), (False, 183)): voxsel = surf_voxel_selection.voxel_selection(vs, radius) qe = SurfaceVoxelsQueryEngine(voxsel, fallback_euclidean_distance=fallback) # test i/o and ensure that the loaded instance is trained if externals.exists('h5py'): fd, qefn = tempfile.mkstemp('qe.hdf5', 'test'); os.close(fd) h5save(qefn, qe) qe = h5load(qefn) os.remove(qefn) m = _Voxel_Count_Measure() sl = Searchlight(m, queryengine=qe) data = np.random.normal(size=vol_shape) img = nb.Nifti1Image(data, vol_affine) ds = fmri_dataset(img) sl_map = sl(ds) counts = sl_map.samples assert_true(np.all(np.logical_and(5 <= counts, counts <= 18))) assert_equal(sl_map.nfeatures, expected_nfeatures)
def test_surface_voxel_query_engine(self): vol_shape = (10, 10, 10, 1) vol_affine = np.identity(4) vol_affine[0, 0] = vol_affine[1, 1] = vol_affine[2, 2] = 5 vg = volgeom.VolGeom(vol_shape, vol_affine) # make the surfaces sphere_density = 10 outer = surf.generate_sphere(sphere_density) * 25. + 15 inner = surf.generate_sphere(sphere_density) * 20. + 15 vs = volsurf.VolSurfMaximalMapping(vg, inner, outer) radius = 10 for fallback, expected_nfeatures in ((True, 1000), (False, 183)): voxsel = surf_voxel_selection.voxel_selection(vs, radius) qe = SurfaceVoxelsQueryEngine(voxsel, fallback_euclidean_distance=fallback) # test i/o and ensure that the loaded instance is trained if externals.exists('h5py'): fd, qefn = tempfile.mkstemp('qe.hdf5', 'test') os.close(fd) h5save(qefn, qe) qe = h5load(qefn) os.remove(qefn) m = _Voxel_Count_Measure() sl = Searchlight(m, queryengine=qe) data = np.random.normal(size=vol_shape) img = nb.Nifti1Image(data, vol_affine) ds = fmri_dataset(img) sl_map = sl(ds) counts = sl_map.samples assert_true(np.all(np.logical_and(5 <= counts, counts <= 18))) assert_equal(sl_map.nfeatures, expected_nfeatures)
def test_minimal_dataset(self): vol_shape = (10, 10, 10, 3) vol_affine = np.identity(4) vg = volgeom.VolGeom(vol_shape, vol_affine) data = np.random.normal(size=vol_shape) msk = np.ones(vol_shape[:3]) msk[:, 1:-1:2, :] = 0 ni_data = nb.Nifti1Image(data, vol_affine) ni_msk = nb.Nifti1Image(msk, vol_affine) ds = fmri_dataset(ni_data, mask=ni_msk) sphere_density = 20 outer = surf.generate_sphere(sphere_density) * 10. + 5 inner = surf.generate_sphere(sphere_density) * 7. + 5 radius = 10 sel = surf_voxel_selection.run_voxel_selection(radius, ds, inner, outer) sel_fids = set.union(*(set(sel[k]) for k in sel.keys())) ds_vox = map(tuple, ds.fa.voxel_indices) vg = sel.volgeom sel_vox = map(tuple, vg.lin2ijk(np.asarray(list(sel_fids)))) fid_mask = np.asarray([v in sel_vox for v in ds_vox]) assert_array_equal(fid_mask, sel.get_dataset_feature_mask(ds)) # check if it raises errors ni_neg_msk = nb.Nifti1Image(1 - msk, vol_affine) neg_ds = fmri_dataset(ni_data, mask=ni_neg_msk) # inverted mask assert_raises(ValueError, sel.get_dataset_feature_mask, neg_ds) min_ds = sel.get_minimal_dataset(ds) assert_array_equal(min_ds.samples, ds[:, fid_mask].samples)
def test_surf_border(self): s = surf.generate_sphere(3) assert_array_equal(s.nodes_on_border(), [False] * 11) s = surf.generate_plane((0, 0, 0), (0, 1, 0), (1, 0, 0), 10, 10) b = s.nodes_on_border() v = s.vertices vb = reduce(np.logical_or, [v[:, 0] == 0, v[:, 1] == 0, v[:, 0] == 9, v[:, 1] == 9]) assert_array_equal(b, vb) assert_true(s.nodes_on_border(0))
def test_mask_with_keys(self): vol_shape = (10, 10, 10, 3) vol_affine = np.identity(4) vg = volgeom.VolGeom(vol_shape, vol_affine) data = np.random.normal(size=vol_shape) msk = np.ones(vol_shape[:3]) msk[:, 1:-1:2, :] = 0 ni_data = nb.Nifti1Image(data, vol_affine) ni_msk = nb.Nifti1Image(msk, vol_affine) ds = fmri_dataset(ni_data, mask=ni_msk) sphere_density = 20 outer = surf.generate_sphere(sphere_density) * 10.0 + 5 inner = surf.generate_sphere(sphere_density) * 7.0 + 5 radius = 10 sel = surf_voxel_selection.run_voxel_selection(radius, ds, inner, outer) # in the mapping below: # (tup: None) means that tup as input should raise a KeyError # (tup: i) with i an int means that tup as input should return i # elements qe_ids2nvoxels = { SurfaceVoxelsQueryEngine: {(1, 2, 3): 13, tuple(np.arange(0, 200, 2)): 82, (601,): None, None: 126}, SurfaceVerticesQueryEngine: {(1, 2, 3): None, (205, 209, 210, 214): 36, None: 126}, } for constructor, ids2nfeatures in qe_ids2nvoxels.iteritems(): qe = constructor(sel) qe.train(ds) img = qe.get_masked_nifti_image() assert_array_equal(img.get_data(), qe.get_masked_nifti_image(qe.ids).get_data()) img_getter = qe.get_masked_nifti_image for ids, nfeatures in ids2nfeatures.iteritems(): ids_list = ids if ids is None else list(ids) if nfeatures is None and ids is not None: assert_raises(KeyError, img_getter, ids_list) else: img = img_getter(ids_list) nfeatures_found = np.sum(img.get_data()) assert_equal(nfeatures, nfeatures_found) if constructor is SurfaceVerticesQueryEngine: expected_image = qe.get_masked_nifti_image(ids_list) expected_mask = expected_image.get_data() check_mask_func = lambda x: assert_array_equal(expected_mask, x) check_image_func = lambda x: check_mask_func(x.get_data()) and assert_array_equal( x.get_affine(), expected_image.get_affine() ) check_mask_func(sel.get_mask(ids_list)) check_image_func(sel.get_nifti_image_mask(ids_list)) tups = sel.get_voxel_indices(ids_list) tups_mask = np.zeros(expected_mask.shape) for tup in tups: tups_mask[tup] += 1 assert_array_equal(expected_mask != 0, tups_mask != 0)
def test_surf_voxel_selection(self): vol_shape = (10, 10, 10) vol_affine = np.identity(4) vol_affine[0, 0] = vol_affine[1, 1] = vol_affine[2, 2] = 5 vg = volgeom.VolGeom(vol_shape, vol_affine) density = 10 outer = surf.generate_sphere(density) * 25. + 15 inner = surf.generate_sphere(density) * 20. + 15 vs = volsurf.VolSurfMaximalMapping(vg, outer, inner) nv = outer.nvertices # select under variety of parameters # parameters are distance metric (dijkstra or euclidean), # radius, and number of searchlight centers params = [('d', 1., 10), ('d', 1., 50), ('d', 1., 100), ('d', 2., 100), ('e', 2., 100), ('d', 2., 100), ('d', 20, 100), ('euclidean', 5, None), ('dijkstra', 10, None)] # function that indicates for which parameters the full test is run test_full = lambda x: len(x[0]) > 1 or x[2] == 100 expected_labs = ['grey_matter_position', 'center_distances'] voxcount = [] tested_double_features = False for param in params: distance_metric, radius, ncenters = param srcs = range(0, nv, nv // (ncenters or nv)) sel = surf_voxel_selection.voxel_selection( vs, radius, source_surf_nodes=srcs, distance_metric=distance_metric) # see how many voxels were selected vg = sel.volgeom datalin = np.zeros((vg.nvoxels, 1)) mp = sel for k, idxs in mp.iteritems(): if idxs is not None: datalin[idxs] = 1 voxcount.append(np.sum(datalin)) if test_full(param): assert_equal(np.sum(datalin), np.sum(sel.get_mask())) assert_true(len('%s%r' % (sel, sel)) > 0) # see if voxels containing inner and outer # nodes were selected for sf in [inner, outer]: for k, idxs in mp.iteritems(): xyz = np.reshape(sf.vertices[k, :], (1, 3)) linidx = vg.xyz2lin(xyz) # only required if xyz is actually within the volume assert_equal(linidx in idxs, vg.contains_lin(linidx)) # check that it has all the attributes labs = sel.aux_keys() assert_true(all([lab in labs for lab in expected_labs])) if externals.exists('h5py'): # some I/O testing fd, fn = tempfile.mkstemp('.h5py', 'test') os.close(fd) h5save(fn, sel) sel2 = h5load(fn) os.remove(fn) assert_equal(sel, sel2) else: sel2 = sel # check that mask is OK even after I/O assert_array_equal(sel.get_mask(), sel2.get_mask()) # test I/O with surfaces # XXX the @tempfile decorator only supports a single filename # hence this method does not use it fd, outerfn = tempfile.mkstemp('outer.asc', 'test') os.close(fd) fd, innerfn = tempfile.mkstemp('inner.asc', 'test') os.close(fd) fd, volfn = tempfile.mkstemp('vol.nii', 'test') os.close(fd) surf.write(outerfn, outer, overwrite=True) surf.write(innerfn, inner, overwrite=True) img = sel.volgeom.get_empty_nifti_image() img.to_filename(volfn) sel3 = surf_voxel_selection.run_voxel_selection( radius, volfn, innerfn, outerfn, source_surf_nodes=srcs, distance_metric=distance_metric) outer4 = surf.read(outerfn) inner4 = surf.read(innerfn) vsm4 = vs = volsurf.VolSurfMaximalMapping(vg, inner4, outer4) # check that two ways of voxel selection match sel4 = surf_voxel_selection.voxel_selection( vsm4, radius, source_surf_nodes=srcs, distance_metric=distance_metric) assert_equal(sel3, sel4) os.remove(outerfn) os.remove(innerfn) os.remove(volfn) # compare sel3 with other selection results # NOTE: which voxels are precisely selected by sel can be quite # off from those in sel3, as writing the surfaces imposes # rounding errors and the sphere is very symmetric, which # means that different neighboring nodes are selected # to select a certain number of voxels. sel3cmp_difference_ratio = [(sel, .2), (sel4, 0.)] for selcmp, ratio in sel3cmp_difference_ratio: nunion = ndiff = 0 for k in selcmp.keys(): p = set(sel3.get(k)) q = set(selcmp.get(k)) nunion += len(p.union(q)) ndiff += len(p.symmetric_difference(q)) assert_true(float(ndiff) / float(nunion) <= ratio) # check searchlight call # as of late Aug 2012, this is with the fancy query engine # as implemented by Yarik mask = sel.get_mask() keys = None if ncenters is None else sel.keys() dset_data = np.reshape(np.arange(vg.nvoxels), vg.shape) dset_img = nb.Nifti1Image(dset_data, vg.affine) dset = fmri_dataset(samples=dset_img, mask=mask) qe = queryengine.SurfaceVerticesQueryEngine( sel, # you can optionally add additional # information about each near-disk-voxels add_fa=['center_distances', 'grey_matter_position']) # test i/o ensuring that when loading it is still trained if externals.exists('h5py'): fd, qefn = tempfile.mkstemp('qe.hdf5', 'test') os.close(fd) h5save(qefn, qe) qe = h5load(qefn) os.remove(qefn) assert_false('ERROR' in repr(qe)) # to check if repr works voxelcounter = _Voxel_Count_Measure() searchlight = Searchlight( voxelcounter, queryengine=qe, roi_ids=keys, nproc=1, enable_ca=['roi_feature_ids', 'roi_center_ids']) sl_dset = searchlight(dset) selected_count = sl_dset.samples[0, :] mp = sel for i, k in enumerate(sel.keys()): # check that number of selected voxels matches assert_equal(selected_count[i], len(mp[k])) assert_equal(searchlight.ca.roi_center_ids, sel.keys()) assert_array_equal(sl_dset.fa['center_ids'], qe.ids) # check nearest node is *really* the nearest node allvx = sel.get_targets() intermediate = outer * .5 + inner * .5 for vx in allvx: nearest = sel.target2nearest_source(vx) xyz = intermediate.vertices[nearest, :] sqsum = np.sum((xyz - intermediate.vertices)**2, 1) idx = np.argmin(sqsum) assert_equal(idx, nearest) if not tested_double_features: # test only once # see if we have multiple features for the same voxel, we would get them all dset1 = dset.copy() dset1.fa['dset'] = [1] dset2 = dset.copy() dset2.fa['dset'] = [2] dset_ = hstack((dset1, dset2), 'drop_nonunique') dset_.sa = dset1.sa # dset_.a.imghdr = dset1.a.imghdr assert_true('imghdr' in dset_.a.keys()) assert_equal(dset_.a['imghdr'].value, dset1.a['imghdr'].value) roi_feature_ids = searchlight.ca.roi_feature_ids sl_dset_ = searchlight(dset_) # and we should get twice the counts assert_array_equal(sl_dset_.samples, sl_dset.samples * 2) # compare old and new roi_feature_ids assert (len(roi_feature_ids) == len( searchlight.ca.roi_feature_ids)) nfeatures = dset.nfeatures for old, new in zip(roi_feature_ids, searchlight.ca.roi_feature_ids): # each new ids should comprise of old ones + (old + nfeatures) # since we hstack'ed two datasets assert_array_equal( np.hstack([(x, x + nfeatures) for x in old]), new) tested_double_features = True # check whether number of voxels were selected is as expected expected_voxcount = [22, 93, 183, 183, 183, 183, 183, 183, 183] assert_equal(voxcount, expected_voxcount)
def test_surf(self, temp_fn): """Some simple testing with surfaces """ s = surf.generate_sphere(10) assert_true(s.nvertices == 102) assert_true(s.nfaces == 200) v = s.vertices f = s.faces assert_true(v.shape == (102, 3)) assert_true(f.shape == (200, 3)) # another surface t = s * 10 + 2 assert_true(t.same_topology(s)) assert_array_equal(f, t.faces) assert_array_equal(v * 10 + 2, t.vertices) # allow updating, but should not affect original array # CHECKME: maybe we want to throw an exception instead assert_true((v * 10 + 2 == t.vertices).all().all()) assert_true((s.vertices * 10 + 2 == t.vertices).all().all()) # a few checks on vertices and nodes v_check = { 40: (0.86511144, -0.28109175, -0.41541501), 10: (0.08706015, -0.26794358, -0.95949297) } f_check = {10: (7, 8, 1), 40: (30, 31, 21)} vf_checks = [(v_check, lambda x: x.vertices), (f_check, lambda x: x.faces)] eps = .0001 for cmap, f in vf_checks: for k, v in cmap.iteritems(): surfval = f(s)[k, :] assert_true((abs(surfval - v) < eps).all()) # make sure same topology fails with different topology u = surf.generate_cube() assert_false(u.same_topology(s)) # check that neighbours are computed correctly # even if we nuke the topology afterwards for _ in [0, 1]: nbrs = s.neighbors n_check = [(0, 96, 0.284629), (40, 39, 0.56218349), (100, 99, 0.1741202)] for i, j, k in n_check: assert_true(abs(nbrs[i][j] - k) < eps) def assign_zero(x): x.faces[:, :] = 0 return None assert_raises((ValueError, RuntimeError), assign_zero, s) # see if mapping to high res works h = surf.generate_sphere(40) low2high = s.map_to_high_resolution_surf(h, .1) partmap = {7: 141, 8: 144, 9: 148, 10: 153, 11: 157, 12: 281} for k, v in partmap.iteritems(): assert_true(low2high[k] == v) # ensure that slow implementation gives same results as fast one low2high_slow = s.map_to_high_resolution_surf(h, .1) for k, v in low2high.iteritems(): assert_true(low2high_slow[k] == v) # should fail if epsilon is too small assert_raises(ValueError, lambda x: x.map_to_high_resolution_surf(h, .01), s) n2f = s.node2faces for i in xrange(s.nvertices): nf = [10] if i < 2 else [5, 6] # number of faces expected assert_true(len(n2f[i]) in nf) # test dijkstra distances ds2 = s.dijkstra_distance(2) some_ds = { 0: 3.613173280799, 1: 0.2846296765, 2: 0., 52: 1.87458018, 53: 2.0487004817, 54: 2.222820777, 99: 3.32854360, 100: 3.328543604, 101: 3.3285436042 } eps = np.finfo('f').eps for k, v in some_ds.iteritems(): assert_true(abs(v - ds2[k]) < eps) # test I/O (through ascii files) surf.write(temp_fn, s, overwrite=True) s2 = surf.read(temp_fn) # test i/o and ensure that the loaded instance is trained if externals.exists('h5py'): h5save(temp_fn, s2) s2 = h5load(temp_fn) assert_array_almost_equal(s.vertices, s2.vertices, 4) assert_array_almost_equal(s.faces, s2.faces, 4) # test plane (new feature end of August 2012) s3 = surf.generate_plane((0, 0, 0), (2, 0, 0), (0, 1, 0), 10, 20) assert_equal(s3.nvertices, 200) assert_equal(s3.nfaces, 342) assert_array_almost_equal(s3.vertices[-1, :], np.array([18., 19, 0.])) assert_array_almost_equal(s3.faces[-1, :], np.array([199, 198, 179])) # test bar p, q = (0, 0, 0), (100, 0, 0) s4 = surf.generate_bar(p, q, 10, 12) assert_equal(s4.nvertices, 26) assert_equal(s4.nfaces, 48)
def test_volume_mask_dict(self): # also tests the outside_node_margin feature sh = (10, 10, 10) msk = np.zeros(sh) for i in xrange(0, sh[0], 2): msk[i, :, :] = 1 vol_affine = np.identity(4) vol_affine[0, 0] = vol_affine[1, 1] = vol_affine[2, 2] = 2 vg = volgeom.VolGeom(sh, vol_affine, mask=msk) density = 10 outer = surf.generate_sphere(density) * 10. + 5 inner = surf.generate_sphere(density) * 5. + 5 intermediate = outer * .5 + inner * .5 xyz = intermediate.vertices radius = 50 outside_node_margins = [None, 0, 100., np.inf, True] expected_center_count = [87] * 2 + [intermediate.nvertices] * 3 for k, outside_node_margin in enumerate(outside_node_margins): sel = surf_voxel_selection.run_voxel_selection( radius, vg, inner, outer, outside_node_margin=outside_node_margin) assert_equal(intermediate, sel.source) assert_equal(len(sel.keys()), expected_center_count[k]) assert_true( set(sel.aux_keys()).issubset( set(['center_distances', 'grey_matter_position']))) msk_lin = msk.ravel() sel_msk_lin = sel.get_mask().ravel() for i in xrange(vg.nvoxels): if msk_lin[i]: src = sel.target2nearest_source(i) assert_false((src is None) ^ (sel_msk_lin[i] == 0)) if src is None: continue # index of node nearest to voxel i src_anywhere = sel.target2nearest_source( i, fallback_euclidean_distance=True) # coordinates of node nearest to voxel i xyz_src = xyz[src_anywhere] # coordinates of voxel i xyz_trg = vg.lin2xyz(np.asarray([i])) # distance between node nearest to voxel i, and voxel i # this should be the smallest distancer d = volgeom.distance(np.reshape(xyz_src, (1, 3)), xyz_trg) # distances between all nodes and voxel i ds = volgeom.distance(xyz, xyz_trg) # order of the distances is_ds = np.argsort(ds.ravel()) # go over all the nodes # require that the node is in the volume # mask # index of node nearest to voxel i ii = np.argmin(ds) xyz_min = xyz[ii] lin_min = vg.xyz2lin([xyz_min]) # linear index of voxel that contains xyz_src lin_src = vg.xyz2lin(np.reshape(xyz_src, (1, 3))) # when using multi-core support, # pickling and unpickling can reduce the precision # a little bit, causing rounding errors eps = 1e-14 delta = np.abs(ds[ii] - d) assert_false(delta > eps and ii in sel and i in sel[ii] and vg.contains_lin(lin_min))
def test_mask_with_keys(self): vol_shape = (10, 10, 10, 3) vol_affine = np.identity(4) vg = volgeom.VolGeom(vol_shape, vol_affine) data = np.random.normal(size=vol_shape) msk = np.ones(vol_shape[:3]) msk[:, 1:-1:2, :] = 0 ni_data = nb.Nifti1Image(data, vol_affine) ni_msk = nb.Nifti1Image(msk, vol_affine) ds = fmri_dataset(ni_data, mask=ni_msk) sphere_density = 20 outer = surf.generate_sphere(sphere_density) * 10. + 5 inner = surf.generate_sphere(sphere_density) * 7. + 5 radius = 10 sel = surf_voxel_selection.run_voxel_selection(radius, ds, inner, outer) # in the mapping below: # (tup: None) means that tup as input should raise a KeyError # (tup: i) with i an int means that tup as input should return i # elements qe_ids2nvoxels = {SurfaceVoxelsQueryEngine: {(1, 2, 3): 13, tuple(np.arange(0, 200, 2)): 82, (601,): None, None: 126}, SurfaceVerticesQueryEngine: {(1, 2, 3): None, (205, 209, 210, 214): 36, None: 126}} for constructor, ids2nfeatures in qe_ids2nvoxels.iteritems(): qe = constructor(sel) qe.train(ds) img = qe.get_masked_nifti_image() assert_array_equal(img.get_data(), qe.get_masked_nifti_image(qe.ids).get_data()) img_getter = qe.get_masked_nifti_image for ids, nfeatures in ids2nfeatures.iteritems(): ids_list = ids if ids is None else list(ids) if nfeatures is None and ids is not None: assert_raises(KeyError, img_getter, ids_list) else: img = img_getter(ids_list) nfeatures_found = np.sum(img.get_data()) assert_equal(nfeatures, nfeatures_found) if constructor is SurfaceVerticesQueryEngine: expected_image = qe.get_masked_nifti_image(ids_list) expected_mask = expected_image.get_data() check_mask_func = lambda x: assert_array_equal( expected_mask, x) check_image_func = lambda x: check_mask_func( x.get_data()) and \ assert_array_equal(x.affine, expected_image.affine) check_mask_func(sel.get_mask(ids_list)) check_image_func(sel.get_nifti_image_mask(ids_list)) tups = sel.get_voxel_indices(ids_list) tups_mask = np.zeros(expected_mask.shape) for tup in tups: tups_mask[tup] += 1 assert_array_equal(expected_mask != 0, tups_mask != 0)
def test_voxel_selection_alternative_calls(self): # Tests a multitude of different searchlight calls # that all should yield exactly the same results. # # Calls differ by whether the arguments are filenames # or data objects, whether values are specified explicityly # or set to the default implicitly (using None). # and by different calls to run the voxel selection. # # This method does not test for mask functionality. # define the volume vol_shape = (10, 10, 10, 3) vol_affine = np.identity(4) vol_affine[0, 0] = vol_affine[1, 1] = vol_affine[2, 2] = 5 # four versions: array, nifti image, file name, fmri dataset volarr = np.ones(vol_shape) volimg = nb.Nifti1Image(volarr, vol_affine) # There is a detected problem with elderly NumPy's (e.g. 1.6.1 # on precise on travis) leading to segfaults while operating # on memmapped volumes being forwarded to pprocess. # Thus just making it compressed volume for those cases suf = ".gz" if (externals.exists("pprocess") and externals.versions["numpy"] < "1.6.2") else "" fd, volfn = tempfile.mkstemp("vol.nii" + suf, "test") os.close(fd) volimg.to_filename(volfn) volds = fmri_dataset(volfn) fd, volfngz = tempfile.mkstemp("vol.nii.gz", "test") os.close(fd) volimg.to_filename(volfngz) voldsgz = fmri_dataset(volfngz) # make the surfaces sphere_density = 10 # two versions: Surface and file name outer = surf.generate_sphere(sphere_density) * 25.0 + 15 inner = surf.generate_sphere(sphere_density) * 20.0 + 15 intermediate = inner * 0.5 + outer * 0.5 nv = outer.nvertices fd, outerfn = tempfile.mkstemp("outer.asc", "test") os.close(fd) fd, innerfn = tempfile.mkstemp("inner.asc", "test") os.close(fd) fd, intermediatefn = tempfile.mkstemp("intermediate.asc", "test") os.close(fd) for s, fn in zip([outer, inner, intermediate], [outerfn, innerfn, intermediatefn]): surf.write(fn, s, overwrite=True) # searchlight radius (in mm) radius = 10.0 # dataset used to run searchlight on ds = fmri_dataset(volfn) # simple voxel counter (run for each searchlight position) m = _Voxel_Count_Measure() # number of voxels expected in each searchlight r_expected = np.array( [ [ 18, 9, 10, 9, 9, 9, 9, 10, 9, 9, 9, 9, 11, 11, 11, 11, 10, 10, 10, 9, 10, 11, 9, 10, 10, 8, 7, 8, 8, 8, 9, 10, 12, 12, 11, 7, 7, 8, 5, 9, 11, 11, 12, 12, 9, 5, 8, 7, 7, 12, 12, 13, 12, 12, 7, 7, 8, 5, 9, 12, 12, 13, 11, 9, 5, 8, 7, 7, 11, 12, 12, 11, 12, 10, 10, 11, 9, 11, 12, 12, 12, 12, 16, 13, 16, 16, 16, 17, 15, 17, 17, 17, 16, 16, 16, 18, 16, 16, 16, 16, 18, 16, ] ] ) params = dict( intermediate_=(intermediate, intermediatefn, None), center_nodes_=(None, range(nv)), volume_=(volimg, volfn, volds, volfngz, voldsgz), surf_src_=("filename", "surf"), volume_mask_=(None, True, 0, 2), call_method_=("qe", "rvs", "gam"), ) combis = _cartprod(params) # compute all possible combinations combistep = 17 # 173 # some fine prime number to speed things up # if this value becomes too big then not all # cases are covered # the unit test tests itself whether all values # occur at least once tested_params = dict() def val2str(x): return "%r:%r" % (type(x), x) for i in xrange(0, len(combis), combistep): combi = combis[i] intermediate_ = combi["intermediate_"] center_nodes_ = combi["center_nodes_"] volume_ = combi["volume_"] surf_src_ = combi["surf_src_"] volume_mask_ = combi["volume_mask_"] call_method_ = combi["call_method_"] # keep track of which values were used - # so that this unit test tests itself for k in combi.keys(): if not k in tested_params: tested_params[k] = set() tested_params[k].add(val2str(combi[k])) if surf_src_ == "filename": s_i, s_m, s_o = inner, intermediate, outer elif surf_src_ == "surf": s_i, s_m, s_o = innerfn, intermediatefn, outerfn else: raise ValueError("this should not happen") if call_method_ == "qe": # use the fancy query engine wrapper qe = disc_surface_queryengine( radius, volume_, s_i, s_o, s_m, source_surf_nodes=center_nodes_, volume_mask=volume_mask_ ) sl = Searchlight(m, queryengine=qe) r = sl(ds).samples elif call_method_ == "rvs": # use query-engine but build the # ingredients by hand vg = volgeom.from_any(volume_, volume_mask_) vs = volsurf.VolSurfMaximalMapping(vg, s_i, s_o) sel = surf_voxel_selection.voxel_selection(vs, radius, source_surf=s_m, source_surf_nodes=center_nodes_) qe = SurfaceVerticesQueryEngine(sel) sl = Searchlight(m, queryengine=qe) r = sl(ds).samples elif call_method_ == "gam": # build everything from the ground up vg = volgeom.from_any(volume_, volume_mask_) vs = volsurf.VolSurfMaximalMapping(vg, s_i, s_o) sel = surf_voxel_selection.voxel_selection(vs, radius, source_surf=s_m, source_surf_nodes=center_nodes_) mp = sel ks = sel.keys() nk = len(ks) r = np.zeros((1, nk)) for i, k in enumerate(ks): r[0, i] = len(mp[k]) # check if result is as expected assert_array_equal(r_expected, r) # clean up all_fns = [volfn, volfngz, outerfn, innerfn, intermediatefn] map(os.remove, all_fns) for k, vs in params.iteritems(): if not k in tested_params: raise ValueError("Missing key: %r" % k) for v in vs: vstr = val2str(v) if not vstr in tested_params[k]: raise ValueError("Missing value %r for %s" % (tested_params[k], k))
def test_surf_voxel_selection(self): vol_shape = (10, 10, 10) vol_affine = np.identity(4) vol_affine[0, 0] = vol_affine[1, 1] = vol_affine[2, 2] = 5 vg = volgeom.VolGeom(vol_shape, vol_affine) density = 10 outer = surf.generate_sphere(density) * 25. + 15 inner = surf.generate_sphere(density) * 20. + 15 vs = volsurf.VolSurfMaximalMapping(vg, outer, inner) nv = outer.nvertices # select under variety of parameters # parameters are distance metric (dijkstra or euclidean), # radius, and number of searchlight centers params = [('d', 1., 10), ('d', 1., 50), ('d', 1., 100), ('d', 2., 100), ('e', 2., 100), ('d', 2., 100), ('d', 20, 100), ('euclidean', 5, None), ('dijkstra', 10, None)] # function that indicates for which parameters the full test is run test_full = lambda x:len(x[0]) > 1 or x[2] == 100 expected_labs = ['grey_matter_position', 'center_distances'] voxcount = [] tested_double_features = False for param in params: distance_metric, radius, ncenters = param srcs = range(0, nv, nv // (ncenters or nv)) sel = surf_voxel_selection.voxel_selection(vs, radius, source_surf_nodes=srcs, distance_metric=distance_metric) # see how many voxels were selected vg = sel.volgeom datalin = np.zeros((vg.nvoxels, 1)) mp = sel for k, idxs in mp.iteritems(): if idxs is not None: datalin[idxs] = 1 voxcount.append(np.sum(datalin)) if test_full(param): assert_equal(np.sum(datalin), np.sum(sel.get_mask())) assert_true(len('%s%r' % (sel, sel)) > 0) # see if voxels containing inner and outer # nodes were selected for sf in [inner, outer]: for k, idxs in mp.iteritems(): xyz = np.reshape(sf.vertices[k, :], (1, 3)) linidx = vg.xyz2lin(xyz) # only required if xyz is actually within the volume assert_equal(linidx in idxs, vg.contains_lin(linidx)) # check that it has all the attributes labs = sel.aux_keys() assert_true(all([lab in labs for lab in expected_labs])) if externals.exists('h5py'): # some I/O testing fd, fn = tempfile.mkstemp('.h5py', 'test'); os.close(fd) h5save(fn, sel) sel2 = h5load(fn) os.remove(fn) assert_equal(sel, sel2) else: sel2 = sel # check that mask is OK even after I/O assert_array_equal(sel.get_mask(), sel2.get_mask()) # test I/O with surfaces # XXX the @tempfile decorator only supports a single filename # hence this method does not use it fd, outerfn = tempfile.mkstemp('outer.asc', 'test'); os.close(fd) fd, innerfn = tempfile.mkstemp('inner.asc', 'test'); os.close(fd) fd, volfn = tempfile.mkstemp('vol.nii', 'test'); os.close(fd) surf.write(outerfn, outer, overwrite=True) surf.write(innerfn, inner, overwrite=True) img = sel.volgeom.get_empty_nifti_image() img.to_filename(volfn) sel3 = surf_voxel_selection.run_voxel_selection(radius, volfn, innerfn, outerfn, source_surf_nodes=srcs, distance_metric=distance_metric) outer4 = surf.read(outerfn) inner4 = surf.read(innerfn) vsm4 = vs = volsurf.VolSurfMaximalMapping(vg, inner4, outer4) # check that two ways of voxel selection match sel4 = surf_voxel_selection.voxel_selection(vsm4, radius, source_surf_nodes=srcs, distance_metric=distance_metric) assert_equal(sel3, sel4) os.remove(outerfn) os.remove(innerfn) os.remove(volfn) # compare sel3 with other selection results # NOTE: which voxels are precisely selected by sel can be quite # off from those in sel3, as writing the surfaces imposes # rounding errors and the sphere is very symmetric, which # means that different neighboring nodes are selected # to select a certain number of voxels. sel3cmp_difference_ratio = [(sel, .2), (sel4, 0.)] for selcmp, ratio in sel3cmp_difference_ratio: nunion = ndiff = 0 for k in selcmp.keys(): p = set(sel3.get(k)) q = set(selcmp.get(k)) nunion += len(p.union(q)) ndiff += len(p.symmetric_difference(q)) assert_true(float(ndiff) / float(nunion) <= ratio) # check searchlight call # as of late Aug 2012, this is with the fancy query engine # as implemented by Yarik mask = sel.get_mask() keys = None if ncenters is None else sel.keys() dset_data = np.reshape(np.arange(vg.nvoxels), vg.shape) dset_img = nb.Nifti1Image(dset_data, vg.affine) dset = fmri_dataset(samples=dset_img, mask=mask) qe = queryengine.SurfaceVerticesQueryEngine(sel, # you can optionally add additional # information about each near-disk-voxels add_fa=['center_distances', 'grey_matter_position']) # test i/o ensuring that when loading it is still trained if externals.exists('h5py'): fd, qefn = tempfile.mkstemp('qe.hdf5', 'test'); os.close(fd) h5save(qefn, qe) qe = h5load(qefn) os.remove(qefn) assert_false('ERROR' in repr(qe)) # to check if repr works voxelcounter = _Voxel_Count_Measure() searchlight = Searchlight(voxelcounter, queryengine=qe, roi_ids=keys, nproc=1, enable_ca=['roi_feature_ids', 'roi_center_ids']) sl_dset = searchlight(dset) selected_count = sl_dset.samples[0, :] mp = sel for i, k in enumerate(sel.keys()): # check that number of selected voxels matches assert_equal(selected_count[i], len(mp[k])) assert_equal(searchlight.ca.roi_center_ids, sel.keys()) assert_array_equal(sl_dset.fa['center_ids'], qe.ids) # check nearest node is *really* the nearest node allvx = sel.get_targets() intermediate = outer * .5 + inner * .5 for vx in allvx: nearest = sel.target2nearest_source(vx) xyz = intermediate.vertices[nearest, :] sqsum = np.sum((xyz - intermediate.vertices) ** 2, 1) idx = np.argmin(sqsum) assert_equal(idx, nearest) if not tested_double_features: # test only once # see if we have multiple features for the same voxel, we would get them all dset1 = dset.copy() dset1.fa['dset'] = [1] dset2 = dset.copy() dset2.fa['dset'] = [2] dset_ = hstack((dset1, dset2), 'drop_nonunique') dset_.sa = dset1.sa #dset_.a.imghdr = dset1.a.imghdr assert_true('imghdr' in dset_.a.keys()) assert_equal(dset_.a['imghdr'].value, dset1.a['imghdr'].value) roi_feature_ids = searchlight.ca.roi_feature_ids sl_dset_ = searchlight(dset_) # and we should get twice the counts assert_array_equal(sl_dset_.samples, sl_dset.samples * 2) # compare old and new roi_feature_ids assert(len(roi_feature_ids) == len(searchlight.ca.roi_feature_ids)) nfeatures = dset.nfeatures for old, new in zip(roi_feature_ids, searchlight.ca.roi_feature_ids): # each new ids should comprise of old ones + (old + nfeatures) # since we hstack'ed two datasets assert_array_equal(np.hstack([(x, x + nfeatures) for x in old]), new) tested_double_features = True # check whether number of voxels were selected is as expected expected_voxcount = [22, 93, 183, 183, 183, 183, 183, 183, 183] assert_equal(voxcount, expected_voxcount)
def test_surf(self, temp_fn): """Some simple testing with surfaces """ s = surf.generate_sphere(10) assert_true(s.nvertices == 102) assert_true(s.nfaces == 200) v = s.vertices f = s.faces assert_true(v.shape == (102, 3)) assert_true(f.shape == (200, 3)) # another surface t = s * 10 + 2 assert_true(t.same_topology(s)) assert_array_equal(f, t.faces) assert_array_equal(v * 10 + 2, t.vertices) # allow updating, but should not affect original array # CHECKME: maybe we want to throw an exception instead assert_true((v * 10 + 2 == t.vertices).all().all()) assert_true((s.vertices * 10 + 2 == t.vertices).all().all()) # a few checks on vertices and nodes v_check = {40:(0.86511144 , -0.28109175, -0.41541501), 10:(0.08706015, -0.26794358, -0.95949297)} f_check = {10:(7, 8, 1), 40:(30, 31, 21)} vf_checks = [(v_check, lambda x:x.vertices), (f_check, lambda x:x.faces)] eps = .0001 for cmap, f in vf_checks: for k, v in cmap.iteritems(): surfval = f(s)[k, :] assert_true((abs(surfval - v) < eps).all()) # make sure same topology fails with different topology u = surf.generate_cube() assert_false(u.same_topology(s)) # check that neighbours are computed correctly # even if we nuke the topology afterwards for _ in [0, 1]: nbrs = s.neighbors n_check = [(0, 96, 0.284629), (40, 39, 0.56218349), (100, 99, 0.1741202)] for i, j, k in n_check: assert_true(abs(nbrs[i][j] - k) < eps) def assign_zero(x): x.faces[:, :] = 0 return None assert_raises((ValueError, RuntimeError), assign_zero, s) # see if mapping to high res works h = surf.generate_sphere(40) low2high = s.map_to_high_resolution_surf(h, .1) partmap = {7: 141, 8: 144, 9: 148, 10: 153, 11: 157, 12: 281} for k, v in partmap.iteritems(): assert_true(low2high[k] == v) # ensure that slow implementation gives same results as fast one low2high_slow = s.map_to_high_resolution_surf(h, .1) for k, v in low2high.iteritems(): assert_true(low2high_slow[k] == v) # should fail if epsilon is too small assert_raises(ValueError, lambda x:x.map_to_high_resolution_surf(h, .01), s) n2f = s.node2faces for i in xrange(s.nvertices): nf = [10] if i < 2 else [5, 6] # number of faces expected assert_true(len(n2f[i]) in nf) # test dijkstra distances ds2 = s.dijkstra_distance(2) some_ds = {0: 3.613173280799, 1: 0.2846296765, 2: 0., 52: 1.87458018, 53: 2.0487004817, 54: 2.222820777, 99: 3.32854360, 100: 3.328543604, 101: 3.3285436042} eps = np.finfo('f').eps for k, v in some_ds.iteritems(): assert_true(abs(v - ds2[k]) < eps) # test I/O (through ascii files) surf.write(temp_fn, s, overwrite=True) s2 = surf.read(temp_fn) # test i/o and ensure that the loaded instance is trained if externals.exists('h5py'): h5save(temp_fn, s2) s2 = h5load(temp_fn) assert_array_almost_equal(s.vertices, s2.vertices, 4) assert_array_almost_equal(s.faces, s2.faces, 4) # test plane (new feature end of August 2012) s3 = surf.generate_plane((0, 0, 0), (2, 0, 0), (0, 1, 0), 10, 20) assert_equal(s3.nvertices, 200) assert_equal(s3.nfaces, 342) assert_array_almost_equal(s3.vertices[-1, :], np.array([18., 19, 0.])) assert_array_almost_equal(s3.faces[-1, :], np.array([199, 198, 179])) # test bar p, q = (0, 0, 0), (100, 0, 0) s4 = surf.generate_bar(p, q, 10, 12) assert_equal(s4.nvertices, 26) assert_equal(s4.nfaces, 48)
def test_volume_mask_dict(self): # also tests the outside_node_margin feature sh = (10, 10, 10) msk = np.zeros(sh) for i in xrange(0, sh[0], 2): msk[i, :, :] = 1 vol_affine = np.identity(4) vol_affine[0, 0] = vol_affine[1, 1] = vol_affine[2, 2] = 2 vg = volgeom.VolGeom(sh, vol_affine, mask=msk) density = 10 outer = surf.generate_sphere(density) * 10. + 5 inner = surf.generate_sphere(density) * 5. + 5 intermediate = outer * .5 + inner * .5 xyz = intermediate.vertices radius = 50 outside_node_margins = [None, 0, 100., np.inf, True] expected_center_count = [87] * 2 + [intermediate.nvertices] * 3 for k, outside_node_margin in enumerate(outside_node_margins): sel = surf_voxel_selection.run_voxel_selection(radius, vg, inner, outer, outside_node_margin=outside_node_margin) assert_equal(intermediate, sel.source) assert_equal(len(sel.keys()), expected_center_count[k]) assert_true(set(sel.aux_keys()).issubset(set(['center_distances', 'grey_matter_position']))) msk_lin = msk.ravel() sel_msk_lin = sel.get_mask().ravel() for i in xrange(vg.nvoxels): if msk_lin[i]: src = sel.target2nearest_source(i) assert_false((src is None) ^ (sel_msk_lin[i] == 0)) if src is None: continue # index of node nearest to voxel i src_anywhere = sel.target2nearest_source(i, fallback_euclidean_distance=True) # coordinates of node nearest to voxel i xyz_src = xyz[src_anywhere] # coordinates of voxel i xyz_trg = vg.lin2xyz(np.asarray([i])) # distance between node nearest to voxel i, and voxel i # this should be the smallest distancer d = volgeom.distance(np.reshape(xyz_src, (1, 3)), xyz_trg) # distances between all nodes and voxel i ds = volgeom.distance(xyz, xyz_trg) # order of the distances is_ds = np.argsort(ds.ravel()) # go over all the nodes # require that the node is in the volume # mask # index of node nearest to voxel i ii = np.argmin(ds) xyz_min = xyz[ii] lin_min = vg.xyz2lin([xyz_min]) # linear index of voxel that contains xyz_src lin_src = vg.xyz2lin(np.reshape(xyz_src, (1, 3))) # when using multi-core support, # pickling and unpickling can reduce the precision # a little bit, causing rounding errors eps = 1e-14 delta = np.abs(ds[ii] - d) assert_false(delta > eps and ii in sel and i in sel[ii] and vg.contains_lin(lin_min))
def test_voxel_selection_alternative_calls(self): # Tests a multitude of different searchlight calls # that all should yield exactly the same results. # # Calls differ by whether the arguments are filenames # or data objects, whether values are specified explicityly # or set to the default implicitly (using None). # and by different calls to run the voxel selection. # # This method does not test for mask functionality. # define the volume vol_shape = (10, 10, 10, 3) vol_affine = np.identity(4) vol_affine[0, 0] = vol_affine[1, 1] = vol_affine[2, 2] = 5 # four versions: array, nifti image, file name, fmri dataset volarr = np.ones(vol_shape) volimg = nb.Nifti1Image(volarr, vol_affine) # There is a detected problem with elderly NumPy's (e.g. 1.6.1 # on precise on travis) leading to segfaults while operating # on memmapped volumes being forwarded to pprocess. # Thus just making it compressed volume for those cases suf = '.gz' \ if externals.exists('pprocess') and externals.versions['numpy'] < '1.6.2' \ else '' fd, volfn = tempfile.mkstemp('vol.nii' + suf, 'test') os.close(fd) volimg.to_filename(volfn) volds = fmri_dataset(volfn) fd, volfngz = tempfile.mkstemp('vol.nii.gz', 'test') os.close(fd) volimg.to_filename(volfngz) voldsgz = fmri_dataset(volfngz) # make the surfaces sphere_density = 10 # two versions: Surface and file name outer = surf.generate_sphere(sphere_density) * 25. + 15 inner = surf.generate_sphere(sphere_density) * 20. + 15 intermediate = inner * .5 + outer * .5 nv = outer.nvertices fd, outerfn = tempfile.mkstemp('outer.asc', 'test') os.close(fd) fd, innerfn = tempfile.mkstemp('inner.asc', 'test') os.close(fd) fd, intermediatefn = tempfile.mkstemp('intermediate.asc', 'test') os.close(fd) for s, fn in zip([outer, inner, intermediate], [outerfn, innerfn, intermediatefn]): surf.write(fn, s, overwrite=True) # searchlight radius (in mm) radius = 10. # dataset used to run searchlight on ds = fmri_dataset(volfn) # simple voxel counter (run for each searchlight position) m = _Voxel_Count_Measure() # number of voxels expected in each searchlight r_expected = np.array([[ 18, 9, 10, 9, 9, 9, 9, 10, 9, 9, 9, 9, 11, 11, 11, 11, 10, 10, 10, 9, 10, 11, 9, 10, 10, 8, 7, 8, 8, 8, 9, 10, 12, 12, 11, 7, 7, 8, 5, 9, 11, 11, 12, 12, 9, 5, 8, 7, 7, 12, 12, 13, 12, 12, 7, 7, 8, 5, 9, 12, 12, 13, 11, 9, 5, 8, 7, 7, 11, 12, 12, 11, 12, 10, 10, 11, 9, 11, 12, 12, 12, 12, 16, 13, 16, 16, 16, 17, 15, 17, 17, 17, 16, 16, 16, 18, 16, 16, 16, 16, 18, 16 ]]) params = dict(intermediate_=(intermediate, intermediatefn, None), center_nodes_=(None, range(nv)), volume_=(volimg, volfn, volds, volfngz, voldsgz), surf_src_=('filename', 'surf'), volume_mask_=(None, True, 0, 2), call_method_=("qe", "rvs", "gam")) combis = _cartprod(params) # compute all possible combinations combistep = 17 #173 # some fine prime number to speed things up # if this value becomes too big then not all # cases are covered # the unit test tests itself whether all values # occur at least once tested_params = dict() def val2str(x): return '%r:%r' % (type(x), x) for i in xrange(0, len(combis), combistep): combi = combis[i] intermediate_ = combi['intermediate_'] center_nodes_ = combi['center_nodes_'] volume_ = combi['volume_'] surf_src_ = combi['surf_src_'] volume_mask_ = combi['volume_mask_'] call_method_ = combi['call_method_'] # keep track of which values were used - # so that this unit test tests itself for k in combi.keys(): if not k in tested_params: tested_params[k] = set() tested_params[k].add(val2str(combi[k])) if surf_src_ == 'filename': s_i, s_m, s_o = inner, intermediate, outer elif surf_src_ == 'surf': s_i, s_m, s_o = innerfn, intermediatefn, outerfn else: raise ValueError('this should not happen') if call_method_ == "qe": # use the fancy query engine wrapper qe = disc_surface_queryengine(radius, volume_, s_i, s_o, s_m, source_surf_nodes=center_nodes_, volume_mask=volume_mask_) sl = Searchlight(m, queryengine=qe) r = sl(ds).samples elif call_method_ == 'rvs': # use query-engine but build the # ingredients by hand vg = volgeom.from_any(volume_, volume_mask_) vs = volsurf.VolSurfMaximalMapping(vg, s_i, s_o) sel = surf_voxel_selection.voxel_selection( vs, radius, source_surf=s_m, source_surf_nodes=center_nodes_) qe = SurfaceVerticesQueryEngine(sel) sl = Searchlight(m, queryengine=qe) r = sl(ds).samples elif call_method_ == 'gam': # build everything from the ground up vg = volgeom.from_any(volume_, volume_mask_) vs = volsurf.VolSurfMaximalMapping(vg, s_i, s_o) sel = surf_voxel_selection.voxel_selection( vs, radius, source_surf=s_m, source_surf_nodes=center_nodes_) mp = sel ks = sel.keys() nk = len(ks) r = np.zeros((1, nk)) for i, k in enumerate(ks): r[0, i] = len(mp[k]) # check if result is as expected assert_array_equal(r_expected, r) # clean up all_fns = [volfn, volfngz, outerfn, innerfn, intermediatefn] map(os.remove, all_fns) for k, vs in params.iteritems(): if not k in tested_params: raise ValueError("Missing key: %r" % k) for v in vs: vstr = val2str(v) if not vstr in tested_params[k]: raise ValueError("Missing value %r for %s" % (tested_params[k], k))