def __init__(self, vg, white, pial, intermediate=None): ''' Parameters ---------- volgeom: volgeom.VolGeom Volume geometry white: surf.Surface Surface representing white-grey matter boundary pial: surf.Surface Surface representing pial-grey matter boundary intermediate: surf.Surface (default: None). Surface representing intermediate surface. If omitted it is the node-wise average of white and pial. This parameter is usually ignored, except when used in a VolSurfMinimalLowresMapping. Notes ----- 'pial' and 'white' should have the same topology. ''' self._volgeom = volgeom.from_any(vg) self._pial = surf.from_any(pial) self._white = surf.from_any(white) if not self._pial.same_topology(self._white): raise Exception("Not same topology for white and pial") #if intermediate is None: # intermediate = (self.pial_surface * .5) + (self.white_surface * .5) self._intermediate = surf.from_any(intermediate)
def test_volgeom_masking(self): maskstep = 5 vg = volgeom.VolGeom((2 * maskstep, 2 * maskstep, 2 * maskstep), np.identity(4)) mask = vg.get_empty_array() sh = vg.shape # mask a subset of the voxels rng = range(0, sh[0], maskstep) for i in rng: for j in rng: for k in rng: mask[i, j, k] = 1 # make a new volgeom instance vg = volgeom.VolGeom(vg.shape, vg.affine, mask) data = vg.get_masked_nifti_image(nt=1) msk = vg.get_masked_nifti_image() dset = fmri_dataset(data, mask=msk) vg_dset = volgeom.from_any(dset) # ensure that the mask is set properly and assert_equal(vg.nvoxels, vg.nvoxels_mask * maskstep ** 3) assert_equal(vg_dset, vg) dilates = range(0, 8, 2) nvoxels_masks = [] # keep track of number of voxels for each size for dilate in dilates: covers_full_volume = dilate * 2 >= maskstep * 3 ** .5 + 1 # constr gets values: None, Sphere(0), 2, Sphere(2), ... for i, constr in enumerate([Sphere, lambda x:x if x else None]): dilater = constr(dilate) img_dilated = vg.get_masked_nifti_image(dilate=dilater) data = img_dilated.get_data() assert_array_equal(data, vg.get_masked_array(dilate=dilater)) n = np.sum(data) # number of voxels in mask is increasing assert_true(all(n >= p for p in nvoxels_masks)) # results should be identical irrespective of constr if i == 0: # - first call with this value of dilate: has to be more # voxels than very previous dilation value, unless the # full volume is covered - then it can be equal too # - every next call: ensure size matches cmp = lambda x, y:(x >= y if covers_full_volume else x > y) assert_true(all(cmp(n, p) for p in nvoxels_masks)) nvoxels_masks.append(n) else: # same size as previous call assert_equal(n, nvoxels_masks[-1]) # if dilate is not None or zero, then it should # have selected all the voxels if the radius is big enough assert_equal(np.sum(data) == vg.nvoxels, covers_full_volume)
def __init__(self, vg, source, meta=None, src2nbr=None, src2aux=None): """Initialize a VolumeMaskDictionary Parameters ---------- vg: volgeom.VolGeom or fmri_dataset-like or str data structure that contains volume geometry information. source: Surface.surf or numpy.ndarray or None structure that contains the geometric information of (the centers of) each mask. In the case of surface-searchlights this should be a surface used as the center for searchlights. meta: dict or None Optional meta data stored with this instance (such as searchlight radius and volumetric information). A use case is storing an instance and loading it later, and then checking whether the meta information is correct when it used to run a searchlight analysis. src2nbr: dict or None In a typical use case it contains a mapping from node center indices to lists of voxel indices. src2aux: dict or None In a typical use case it can contain auxiliary information such as distance of each voxel to each center. """ self._volgeom = volgeom.from_any(vg) self._source = source self._src2nbr = dict() if src2nbr is None else src2nbr self._src2aux = dict() if src2nbr is None else src2aux self._meta = meta # this attribute is initially set to None # upon the first call that requires an inverse mapping # it is generated. self._lazy_nbr2src = None
def __init__(self, vg, source, meta=None, src2nbr=None, src2aux=None): """ Parameters ---------- vg: volgeom.VolGeom or fmri_dataset-like or str data structure that contains volume geometry information. source: Surface.surf or numpy.ndarray or None structure that contains the geometric information of (the centers of) each mask. In the case of surface-searchlights this should be a surface used as the center for searchlights. meta: dict or None Optional meta data stored with this instance (such as searchlight radius and volumetric information). A use case is storing an instance and loading it later, and then checking whether the meta information is correct when it used to run a searchlight analysis. src2nbr: dict or None In a typical use case it contains a mapping from node center indices to lists of voxel indices. src2aux: dict or None In a typical use case it can contain auxiliary information such as distance of each voxel to each center. """ self._volgeom = volgeom.from_any(vg) self._source = source self._src2nbr = dict() if src2nbr is None else src2nbr self._src2aux = dict() if src2nbr is None else src2aux self._meta = meta # this attribute is initially set to None # upon the first call that requires an inverse mapping # it is generated. self._lazy_nbr2src = None
def __init__(self, vg): ''' Parameters ---------- vg: Volgeom.volgeom or str or NiftiImage volume to be used as a surface ''' self._vg = volgeom.from_any(vg) n = self._vg.nvoxels vertices = self._vg.lin2xyz(np.arange(n)) faces = np.zeros((0, 3), dtype=np.int) # call the parent's class constructor super(VolumeBasedSurface, self).__init__(vertices, faces, check=False)
def from_volume(v): '''Makes a pseudo-surface from a volume. Each voxels corresponds to a node; there is no topology. A use case is mimicking traditional volume-based searchlights Parameters ---------- v: str of NiftiImage input volume Returns ------- s: surf.Surface Surface with an equal number as nodes as there are voxels in the input volume. The associated topology is empty. ''' vg = volgeom.from_any(v) vs = VolumeBasedSurface(vg) return VolSurfMaximalMapping(vg, vs, vs, vs)
def __init__(self, vg, source, src2nbr=None, src2aux=None): """ Parameters ---------- vg: volgeom.VolGeom or fmri_dataset-like or str data structure that contains volume geometry information. source: Surface.surf or numpy.ndarray or None structure that contains the geometric information of (the centers of) each mask. In the case of surface-searchlights this should be a surface used as the center for searchlights. """ self._volgeom = volgeom.from_any(vg) self._source = source self._src2nbr = dict() if src2nbr is None else src2nbr self._src2aux = dict() if src2nbr is None else src2aux # this attribute is initially set to None # upon the first call that requires an inverse mapping # it is generated. self._lazy_nbr2src = None
def train(self, dataset): '''Train the query engine on a dataset''' vg = self.voxsel.volgeom # We are creating a map from big unmasked indices of voxels # known to voxsel into the dataset's feature indexes. # We verify that the current dataset has the necessary # features (i.e. are not masked out) and that the volume # geometry matches that of the original voxel selection vg_ds = None try: vg_ds = volgeom.from_any(dataset) except: vg_ds = None if vg_ds: eps = .0001 if np.max(np.abs(vg_ds.affine - vg.affine)) > eps: raise ValueError("Mismatch in affine matrix: %r !+ %r" % (vg_ds.affine, vg.affine)) if not vg_ds.same_shape(vg): raise ValueError("Mismatch in shape: (%s,%s,%s) != " "(%s,%s,%s)" % (vg_ds.shape[:3], vg.shape[:3])) else: warning("Could not find dataset volume geometry for %r" % dataset) self._map_voxel_coord = map_voxel_coord = {} long_is = vg.ijk2lin(dataset.fa[self.space].value) long_is_invol = vg.contains_lin(long_is) for i, long_i in enumerate(long_is): if not long_is_invol[i]: raise ValueError('Feature id %d (with voxel id %d)' ' is not in the (possibly masked) ' 'volume geometry %r)' % (i, long_i, vg)) if long_i in map_voxel_coord: map_voxel_coord[long_i].append(i) else: map_voxel_coord[long_i] = [i]
def run_voxel_selection(radius, volume, white_surf, pial_surf, source_surf=None, source_surf_nodes=None, volume_mask=None, distance_metric='dijkstra', start_mm=0, stop_mm=0, start_fr=0., stop_fr=1., nsteps=10, eta_step=1, nproc=None, outside_node_margin=None, results_backend=None, tmp_prefix='tmpvoxsel', node_voxel_mapping='maximal'): """ Voxel selection wrapper for multiple center nodes on the surface Parameters ---------- radius: int or float Size of searchlight. If an integer, then it indicates the number of voxels. If a float, then it indicates the radius of the disc volume: Dataset or NiftiImage or volgeom.Volgeom Volume in which voxels are selected. white_surf: str of surf.Surface Surface of white-matter to grey-matter boundary, or filename of file containing such a surface. pial_surf: str of surf.Surface Surface of grey-matter to pial-matter boundary, or filename of file containing such a surface. source_surf: surf.Surface or None Surface used to compute distance between nodes. If omitted, it is the average of the gray and white surfaces. source_surf_nodes: list of int or numpy array or None Indices of nodes in source_surf that serve as searchlight center. By default every node serves as a searchlight center. volume_mask: None (default) or False or int Mask from volume to apply from voxel selection results. By default no mask is applied. If volume_mask is an integer k, then the k-th volume from volume is used to mask the data. If volume is a Dataset and has a property volume.fa.voxel_indices, then these indices are used to mask the data, unless volume_mask is False or an integer. distance_metric: str Distance metric between nodes. 'euclidean' or 'dijksta' (default) start_fr: float (default: 0) Relative start position of line in gray matter, 0.=white surface, 1.=pial surface stop_fr: float (default: 1) Relative stop position of line (as in see start) start_mm: float (default: 0) Absolute start position offset (as in start_fr) stop_mm: float (default: 0) Absolute start position offset (as in start_fr) nsteps: int (default: 10) Number of steps from white to pial surface eta_step: int (default: 1) After how many searchlights an estimate should be printed of the remaining time until completion of all searchlights nproc: int or None Number of parallel threads. None means as many threads as the system supports. The pprocess is required for parallel threads; if it cannot be used, then a single thread is used. outside_node_margin: float or None (default) By default nodes outside the volume are skipped; using this parameter allows for a marign. If this value is a float (possibly np.inf), then all nodes within outside_node_margin Dijkstra distance from any node within the volume are still assigned associated voxels. If outside_node_margin is True, then a node is always assigned voxels regardless of its position in the volume. results_backend : 'native' or 'hdf5' or None (default). Specifies the way results are provided back from a processing block in case of nproc > 1. 'native' is pickling/unpickling of results by pprocess, while 'hdf5' would use h5save/h5load functionality. 'hdf5' might be more time and memory efficient in some cases. If None, then 'hdf5' if used if available, else 'native'. tmp_prefix : str, optional If specified -- serves as a prefix for temporary files storage if results_backend == 'hdf5'. Thus can specify the directory to use (trailing file path separator is not added automagically). node_voxel_mapping: 'minimal' or 'maximal' or 'minimal_lowres' If 'minimal' then each voxel is associated with at most one node. If 'maximal' it is associated with as many nodes that contain the voxel (default: 'maximal'). If 'minimal_lowres' then each voxel is associated with at most one node, and each node that is mapped onto has a corresponding node (at the same spatial location) in source_surf. Returns ------- sel: volume_mask_dict.VolumeMaskDictionary Voxel selection results, that associates, which each node, the indices of the surrounding voxels. """ vg = volgeom.from_any(volume, volume_mask) mapper_dict = dict(maximal=volsurf.VolSurfMaximalMapping, minimal=volsurf.VolSurfMinimalMapping, minimal_lowres=volsurf.VolSurfMinimalLowresMapping) mapper = mapper_dict[node_voxel_mapping] vsm = mapper(vg, white=white_surf, pial=pial_surf, intermediate=source_surf, nsteps=nsteps, start_fr=start_fr, stop_fr=stop_fr, start_mm=start_mm, stop_mm=stop_mm) sel = voxel_selection(vol_surf_mapping=vsm, radius=radius, source_surf=source_surf, source_surf_nodes=source_surf_nodes, distance_metric=distance_metric, eta_step=eta_step, nproc=nproc, outside_node_margin=outside_node_margin, results_backend=results_backend, tmp_prefix=tmp_prefix) return sel
def test_volgeom(self, temp_fn): sz = (17, 71, 37, 73) # size of 4-D 'brain volume' d = 2. # voxel size xo, yo, zo = -6., -12., -20. # origin mx = np.identity(4, np.float) * d # affine transformation matrix mx[3, 3] = 1 mx[0, 3] = xo mx[1, 3] = yo mx[2, 3] = zo vg = volgeom.VolGeom(sz, mx) # initialize volgeom eq_shape_nvoxels = { (17, 71, 37): (True, True), (71, 17, 37, 1): (False, True), (17, 71, 37, 2): (True, True), (17, 71, 37, 73): (True, True), (2, 2, 2): (False, False) } for other_sz, (eq_shape, eq_nvoxels) in eq_shape_nvoxels.iteritems(): other_vg = volgeom.VolGeom(other_sz, mx) assert_equal(other_vg.same_shape(vg), eq_shape) assert_equal(other_vg.nvoxels_mask == vg.nvoxels_mask, eq_nvoxels) nv = sz[0] * sz[1] * sz[2] # number of voxels nt = sz[3] # number of time points assert_equal(vg.nvoxels, nv) # a couple of hard-coded test cases # last two are outside the volume linidxs = [0, 1, sz[2], sz[1] * sz[2], nv - 1, -1, nv] subidxs = ([(0, 0, 0), (0, 0, 1), (0, 1, 0), (1, 0, 0), (sz[0] - 1, sz[1] - 1, sz[2] - 1)] + [(sz[0], sz[1], sz[2])] * 2) xyzs = ([(xo, yo, zo), (xo, yo, zo + d), (xo, yo + d, zo), (xo + d, yo, zo), (xo + d * (sz[0] - 1), yo + d * (sz[1] - 1), zo + d * (sz[2] - 1))] + [(np.nan, np.nan, np.nan)] * 2) for i, linidx in enumerate(linidxs): lin = np.asarray([linidx]) ijk = vg.lin2ijk(lin) ijk_expected = np.reshape(np.asarray(subidxs[i]), (1, 3)) assert_array_almost_equal(ijk, ijk_expected) xyz = vg.lin2xyz(lin) xyz_expected = np.reshape(np.asarray(xyzs[i]), (1, 3)) assert_array_almost_equal(xyz, xyz_expected) # check that some identities hold ab, bc, ac = vg.lin2ijk, vg.ijk2xyz, vg.lin2xyz ba, cb, ca = vg.ijk2lin, vg.xyz2ijk, vg.xyz2lin identities = [ lambda x: ab(ba(x)), lambda x: bc(cb(x)), lambda x: ac(ca(x)), lambda x: ba(ab(x)), lambda x: cb(bc(x)), lambda x: ca(ac(x)), lambda x: bc(ab(ca(x))), lambda x: ba(cb(ac(x))) ] # 0=lin, 1=ijk, 2=xyz identities_input = [1, 2, 2, 0, 1, 0, 2, 0] # voxel indices to test linrange = [0, 1, sz[2], sz[1] * sz[2]] + range(0, nv, nv // 100) lin = np.reshape(np.asarray(linrange), (-1, )) ijk = vg.lin2ijk(lin) xyz = vg.ijk2xyz(ijk) for j, identity in enumerate(identities): inp = identities_input[j] x = {0: lin, 1: ijk, 2: xyz}[inp] assert_array_equal(x, identity(x)) # check that masking works assert_true(vg.contains_lin(lin).all()) assert_false(vg.contains_lin(-lin - 1).any()) assert_true(vg.contains_ijk(ijk).all()) assert_false(vg.contains_ijk(-ijk - 1).any()) # ensure that we have no rounding issues deltas = [-.51, -.49, 0., .49, .51] should_raise = [True, False, False, False, True] for delta, r in zip(deltas, should_raise): xyz_d = xyz + delta * d lin_d = vg.xyz2lin(xyz_d) if r: assert_raises(AssertionError, assert_array_almost_equal, lin_d, lin) else: assert_array_almost_equal(lin_d, lin) # some I/O testing img = vg.get_empty_nifti_image() img.to_filename(temp_fn) assert_true(os.path.exists(temp_fn)) vg2 = volgeom.from_any(img) vg3 = volgeom.from_any(temp_fn) assert_array_equal(vg.affine, vg2.affine) assert_array_equal(vg.affine, vg3.affine) assert_equal(vg.shape[:3], vg2.shape[:3], 0) assert_equal(vg.shape[:3], vg3.shape[:3], 0) assert_true(len('%s%r' % (vg, vg)) > 0)
def test_volgeom(self, temp_fn): sz = (17, 71, 37, 73) # size of 4-D 'brain volume' d = 2. # voxel size xo, yo, zo = -6., -12., -20. # origin mx = np.identity(4, np.float) * d # affine transformation matrix mx[3, 3] = 1 mx[0, 3] = xo mx[1, 3] = yo mx[2, 3] = zo vg = volgeom.VolGeom(sz, mx) # initialize volgeom eq_shape_nvoxels = {(17, 71, 37): (True, True), (71, 17, 37, 1): (False, True), (17, 71, 37, 2): (True, True), (17, 71, 37, 73): (True, True), (2, 2, 2): (False, False)} for other_sz, (eq_shape, eq_nvoxels) in eq_shape_nvoxels.iteritems(): other_vg = volgeom.VolGeom(other_sz, mx) assert_equal(other_vg.same_shape(vg), eq_shape) assert_equal(other_vg.nvoxels_mask == vg.nvoxels_mask, eq_nvoxels) nv = sz[0] * sz[1] * sz[2] # number of voxels nt = sz[3] # number of time points assert_equal(vg.nvoxels, nv) # a couple of hard-coded test cases # last two are outside the volume linidxs = [0, 1, sz[2], sz[1] * sz[2], nv - 1, -1 , nv] subidxs = ([(0, 0, 0), (0, 0, 1), (0, 1, 0), (1, 0, 0), (sz[0] - 1, sz[1] - 1, sz[2] - 1)] + [(sz[0], sz[1], sz[2])] * 2) xyzs = ([(xo, yo, zo), (xo, yo, zo + d), (xo, yo + d, zo), (xo + d, yo, zo), (xo + d * (sz[0] - 1), yo + d * (sz[1] - 1), zo + d * (sz[2] - 1))] + [(np.nan, np.nan, np.nan)] * 2) for i, linidx in enumerate(linidxs): lin = np.asarray([linidx]) ijk = vg.lin2ijk(lin) ijk_expected = np.reshape(np.asarray(subidxs[i]), (1, 3)) assert_array_almost_equal(ijk, ijk_expected) xyz = vg.lin2xyz(lin) xyz_expected = np.reshape(np.asarray(xyzs[i]), (1, 3)) assert_array_almost_equal(xyz, xyz_expected) # check that some identities hold ab, bc, ac = vg.lin2ijk, vg.ijk2xyz, vg.lin2xyz ba, cb, ca = vg.ijk2lin, vg.xyz2ijk, vg.xyz2lin identities = [lambda x:ab(ba(x)), lambda x:bc(cb(x)), lambda x:ac(ca(x)), lambda x:ba(ab(x)), lambda x:cb(bc(x)), lambda x:ca(ac(x)), lambda x:bc(ab(ca(x))), lambda x:ba(cb(ac(x)))] # 0=lin, 1=ijk, 2=xyz identities_input = [1, 2, 2, 0, 1, 0, 2, 0] # voxel indices to test linrange = [0, 1, sz[2], sz[1] * sz[2]] + range(0, nv, nv // 100) lin = np.reshape(np.asarray(linrange), (-1,)) ijk = vg.lin2ijk(lin) xyz = vg.ijk2xyz(ijk) for j, identity in enumerate(identities): inp = identities_input[j] x = {0: lin, 1: ijk, 2: xyz}[inp] assert_array_equal(x, identity(x)) # check that masking works assert_true(vg.contains_lin(lin).all()) assert_false(vg.contains_lin(-lin - 1).any()) assert_true(vg.contains_ijk(ijk).all()) assert_false(vg.contains_ijk(-ijk - 1).any()) # ensure that we have no rounding issues deltas = [-.51, -.49, 0., .49, .51] should_raise = [True, False, False, False, True] for delta, r in zip(deltas, should_raise): xyz_d = xyz + delta * d lin_d = vg.xyz2lin(xyz_d) if r: assert_raises(AssertionError, assert_array_almost_equal, lin_d, lin) else: assert_array_almost_equal(lin_d, lin) # some I/O testing img = vg.get_empty_nifti_image() img.to_filename(temp_fn) assert_true(os.path.exists(temp_fn)) vg2 = volgeom.from_any(img) vg3 = volgeom.from_any(temp_fn) assert_array_equal(vg.affine, vg2.affine) assert_array_equal(vg.affine, vg3.affine) assert_equal(vg.shape[:3], vg2.shape[:3], 0) assert_equal(vg.shape[:3], vg3.shape[:3], 0) assert_true(len('%s%r' % (vg, vg)) > 0)
def test_voxel_selection_alternative_calls(self): # Tests a multitude of different searchlight calls # that all should yield exactly the same results. # # Calls differ by whether the arguments are filenames # or data objects, whether values are specified explicityly # or set to the default implicitly (using None). # and by different calls to run the voxel selection. # # This method does not test for mask functionality. # define the volume vol_shape = (10, 10, 10, 3) vol_affine = np.identity(4) vol_affine[0, 0] = vol_affine[1, 1] = vol_affine[2, 2] = 5 # four versions: array, nifti image, file name, fmri dataset volarr = np.ones(vol_shape) volimg = nb.Nifti1Image(volarr, vol_affine) # There is a detected problem with elderly NumPy's (e.g. 1.6.1 # on precise on travis) leading to segfaults while operating # on memmapped volumes being forwarded to pprocess. # Thus just making it compressed volume for those cases suf = '.gz' \ if externals.exists('pprocess') and externals.versions['numpy'] < '1.6.2' \ else '' fd, volfn = tempfile.mkstemp('vol.nii' + suf, 'test') os.close(fd) volimg.to_filename(volfn) volds = fmri_dataset(volfn) fd, volfngz = tempfile.mkstemp('vol.nii.gz', 'test') os.close(fd) volimg.to_filename(volfngz) voldsgz = fmri_dataset(volfngz) # make the surfaces sphere_density = 10 # two versions: Surface and file name outer = surf.generate_sphere(sphere_density) * 25. + 15 inner = surf.generate_sphere(sphere_density) * 20. + 15 intermediate = inner * .5 + outer * .5 nv = outer.nvertices fd, outerfn = tempfile.mkstemp('outer.asc', 'test') os.close(fd) fd, innerfn = tempfile.mkstemp('inner.asc', 'test') os.close(fd) fd, intermediatefn = tempfile.mkstemp('intermediate.asc', 'test') os.close(fd) for s, fn in zip([outer, inner, intermediate], [outerfn, innerfn, intermediatefn]): surf.write(fn, s, overwrite=True) # searchlight radius (in mm) radius = 10. # dataset used to run searchlight on ds = fmri_dataset(volfn) # simple voxel counter (run for each searchlight position) m = _Voxel_Count_Measure() # number of voxels expected in each searchlight r_expected = np.array([[ 18, 9, 10, 9, 9, 9, 9, 10, 9, 9, 9, 9, 11, 11, 11, 11, 10, 10, 10, 9, 10, 11, 9, 10, 10, 8, 7, 8, 8, 8, 9, 10, 12, 12, 11, 7, 7, 8, 5, 9, 11, 11, 12, 12, 9, 5, 8, 7, 7, 12, 12, 13, 12, 12, 7, 7, 8, 5, 9, 12, 12, 13, 11, 9, 5, 8, 7, 7, 11, 12, 12, 11, 12, 10, 10, 11, 9, 11, 12, 12, 12, 12, 16, 13, 16, 16, 16, 17, 15, 17, 17, 17, 16, 16, 16, 18, 16, 16, 16, 16, 18, 16 ]]) params = dict(intermediate_=(intermediate, intermediatefn, None), center_nodes_=(None, range(nv)), volume_=(volimg, volfn, volds, volfngz, voldsgz), surf_src_=('filename', 'surf'), volume_mask_=(None, True, 0, 2), call_method_=("qe", "rvs", "gam")) combis = _cartprod(params) # compute all possible combinations combistep = 17 #173 # some fine prime number to speed things up # if this value becomes too big then not all # cases are covered # the unit test tests itself whether all values # occur at least once tested_params = dict() def val2str(x): return '%r:%r' % (type(x), x) for i in xrange(0, len(combis), combistep): combi = combis[i] intermediate_ = combi['intermediate_'] center_nodes_ = combi['center_nodes_'] volume_ = combi['volume_'] surf_src_ = combi['surf_src_'] volume_mask_ = combi['volume_mask_'] call_method_ = combi['call_method_'] # keep track of which values were used - # so that this unit test tests itself for k in combi.keys(): if not k in tested_params: tested_params[k] = set() tested_params[k].add(val2str(combi[k])) if surf_src_ == 'filename': s_i, s_m, s_o = inner, intermediate, outer elif surf_src_ == 'surf': s_i, s_m, s_o = innerfn, intermediatefn, outerfn else: raise ValueError('this should not happen') if call_method_ == "qe": # use the fancy query engine wrapper qe = disc_surface_queryengine(radius, volume_, s_i, s_o, s_m, source_surf_nodes=center_nodes_, volume_mask=volume_mask_) sl = Searchlight(m, queryengine=qe) r = sl(ds).samples elif call_method_ == 'rvs': # use query-engine but build the # ingredients by hand vg = volgeom.from_any(volume_, volume_mask_) vs = volsurf.VolSurfMaximalMapping(vg, s_i, s_o) sel = surf_voxel_selection.voxel_selection( vs, radius, source_surf=s_m, source_surf_nodes=center_nodes_) qe = SurfaceVerticesQueryEngine(sel) sl = Searchlight(m, queryengine=qe) r = sl(ds).samples elif call_method_ == 'gam': # build everything from the ground up vg = volgeom.from_any(volume_, volume_mask_) vs = volsurf.VolSurfMaximalMapping(vg, s_i, s_o) sel = surf_voxel_selection.voxel_selection( vs, radius, source_surf=s_m, source_surf_nodes=center_nodes_) mp = sel ks = sel.keys() nk = len(ks) r = np.zeros((1, nk)) for i, k in enumerate(ks): r[0, i] = len(mp[k]) # check if result is as expected assert_array_equal(r_expected, r) # clean up all_fns = [volfn, volfngz, outerfn, innerfn, intermediatefn] map(os.remove, all_fns) for k, vs in params.iteritems(): if not k in tested_params: raise ValueError("Missing key: %r" % k) for v in vs: vstr = val2str(v) if not vstr in tested_params[k]: raise ValueError("Missing value %r for %s" % (tested_params[k], k))
def test_voxel_selection(self): '''Compare surface and volume based searchlight''' ''' Tests to see whether results are identical for surface-based searchlight (just one plane; Euclidean distnace) and volume-based searchlight. Note that the current value is a float; if it were int, it would specify the number of voxels in each searchlight''' radius = 10. '''Define input filenames''' epi_fn = os.path.join(pymvpa_dataroot, 'bold.nii.gz') maskfn = os.path.join(pymvpa_dataroot, 'mask.nii.gz') ''' Use the EPI datafile to define a surface. The surface has as many nodes as there are voxels and is parallel to the volume 'slice' ''' vg = volgeom.from_any(maskfn, mask_volume=True) aff = vg.affine nx, ny, nz = vg.shape[:3] '''Plane goes in x and y direction, so we take these vectors from the affine transformation matrix of the volume''' plane = surf.generate_plane(aff[:3, 3], aff[:3, 0], aff[:3, 1], nx, ny) ''' Simulate pial and white matter as just above and below the central plane ''' normal_vec = aff[:3, 2] outer = plane + normal_vec inner = plane + -normal_vec ''' Combine volume and surface information ''' vsm = volsurf.VolSurfMaximalMapping(vg, outer, inner) ''' Run voxel selection with specified radius (in mm), using Euclidean distance measure ''' surf_voxsel = surf_voxel_selection.voxel_selection(vsm, radius, distance_metric='e') '''Define the measure''' # run_slow=True would give an actual cross-validation with meaningful # accuracies. Because this is a unit-test only the number of voxels # in each searchlight is tested. run_slow = False if run_slow: meas = CrossValidation(GNB(), OddEvenPartitioner(), errorfx=lambda p, t: np.mean(p == t)) postproc = mean_sample else: meas = _Voxel_Count_Measure() postproc = lambda x: x ''' Surface analysis: define the query engine, cross validation, and searchlight ''' surf_qe = SurfaceVerticesQueryEngine(surf_voxsel) surf_sl = Searchlight(meas, queryengine=surf_qe, postproc=postproc) ''' new (Sep 2012): also test 'simple' queryengine wrapper function ''' surf_qe2 = disc_surface_queryengine(radius, maskfn, inner, outer, plane, volume_mask=True, distance_metric='euclidean') surf_sl2 = Searchlight(meas, queryengine=surf_qe2, postproc=postproc) ''' Same for the volume analysis ''' element_sizes = tuple(map(abs, (aff[0, 0], aff[1, 1], aff[2, 2]))) sph = Sphere(radius, element_sizes=element_sizes) kwa = {'voxel_indices': sph} vol_qe = IndexQueryEngine(**kwa) vol_sl = Searchlight(meas, queryengine=vol_qe, postproc=postproc) '''The following steps are similar to start_easy.py''' attr = SampleAttributes( os.path.join(pymvpa_dataroot, 'attributes_literal.txt')) mask = surf_voxsel.get_mask() dataset = fmri_dataset(samples=os.path.join(pymvpa_dataroot, 'bold.nii.gz'), targets=attr.targets, chunks=attr.chunks, mask=mask) if run_slow: # do chunkswise linear detrending on dataset poly_detrend(dataset, polyord=1, chunks_attr='chunks') # zscore dataset relative to baseline ('rest') mean zscore(dataset, chunks_attr='chunks', param_est=('targets', ['rest'])) # select class face and house for this demo analysis # would work with full datasets (just a little slower) dataset = dataset[np.array( [l in ['face', 'house'] for l in dataset.sa.targets], dtype='bool')] '''Apply searchlight to datasets''' surf_dset = surf_sl(dataset) surf_dset2 = surf_sl2(dataset) vol_dset = vol_sl(dataset) surf_data = surf_dset.samples surf_data2 = surf_dset2.samples vol_data = vol_dset.samples assert_array_equal(surf_data, surf_data2) assert_array_equal(surf_data, vol_data)
def test_voxel_selection_alternative_calls(self): # Tests a multitude of different searchlight calls # that all should yield exactly the same results. # # Calls differ by whether the arguments are filenames # or data objects, whether values are specified explicityly # or set to the default implicitly (using None). # and by different calls to run the voxel selection. # # This method does not test for mask functionality. # define the volume vol_shape = (10, 10, 10, 3) vol_affine = np.identity(4) vol_affine[0, 0] = vol_affine[1, 1] = vol_affine[2, 2] = 5 # four versions: array, nifti image, file name, fmri dataset volarr = np.ones(vol_shape) volimg = nb.Nifti1Image(volarr, vol_affine) # There is a detected problem with elderly NumPy's (e.g. 1.6.1 # on precise on travis) leading to segfaults while operating # on memmapped volumes being forwarded to pprocess. # Thus just making it compressed volume for those cases suf = ".gz" if (externals.exists("pprocess") and externals.versions["numpy"] < "1.6.2") else "" fd, volfn = tempfile.mkstemp("vol.nii" + suf, "test") os.close(fd) volimg.to_filename(volfn) volds = fmri_dataset(volfn) fd, volfngz = tempfile.mkstemp("vol.nii.gz", "test") os.close(fd) volimg.to_filename(volfngz) voldsgz = fmri_dataset(volfngz) # make the surfaces sphere_density = 10 # two versions: Surface and file name outer = surf.generate_sphere(sphere_density) * 25.0 + 15 inner = surf.generate_sphere(sphere_density) * 20.0 + 15 intermediate = inner * 0.5 + outer * 0.5 nv = outer.nvertices fd, outerfn = tempfile.mkstemp("outer.asc", "test") os.close(fd) fd, innerfn = tempfile.mkstemp("inner.asc", "test") os.close(fd) fd, intermediatefn = tempfile.mkstemp("intermediate.asc", "test") os.close(fd) for s, fn in zip([outer, inner, intermediate], [outerfn, innerfn, intermediatefn]): surf.write(fn, s, overwrite=True) # searchlight radius (in mm) radius = 10.0 # dataset used to run searchlight on ds = fmri_dataset(volfn) # simple voxel counter (run for each searchlight position) m = _Voxel_Count_Measure() # number of voxels expected in each searchlight r_expected = np.array( [ [ 18, 9, 10, 9, 9, 9, 9, 10, 9, 9, 9, 9, 11, 11, 11, 11, 10, 10, 10, 9, 10, 11, 9, 10, 10, 8, 7, 8, 8, 8, 9, 10, 12, 12, 11, 7, 7, 8, 5, 9, 11, 11, 12, 12, 9, 5, 8, 7, 7, 12, 12, 13, 12, 12, 7, 7, 8, 5, 9, 12, 12, 13, 11, 9, 5, 8, 7, 7, 11, 12, 12, 11, 12, 10, 10, 11, 9, 11, 12, 12, 12, 12, 16, 13, 16, 16, 16, 17, 15, 17, 17, 17, 16, 16, 16, 18, 16, 16, 16, 16, 18, 16, ] ] ) params = dict( intermediate_=(intermediate, intermediatefn, None), center_nodes_=(None, range(nv)), volume_=(volimg, volfn, volds, volfngz, voldsgz), surf_src_=("filename", "surf"), volume_mask_=(None, True, 0, 2), call_method_=("qe", "rvs", "gam"), ) combis = _cartprod(params) # compute all possible combinations combistep = 17 # 173 # some fine prime number to speed things up # if this value becomes too big then not all # cases are covered # the unit test tests itself whether all values # occur at least once tested_params = dict() def val2str(x): return "%r:%r" % (type(x), x) for i in xrange(0, len(combis), combistep): combi = combis[i] intermediate_ = combi["intermediate_"] center_nodes_ = combi["center_nodes_"] volume_ = combi["volume_"] surf_src_ = combi["surf_src_"] volume_mask_ = combi["volume_mask_"] call_method_ = combi["call_method_"] # keep track of which values were used - # so that this unit test tests itself for k in combi.keys(): if not k in tested_params: tested_params[k] = set() tested_params[k].add(val2str(combi[k])) if surf_src_ == "filename": s_i, s_m, s_o = inner, intermediate, outer elif surf_src_ == "surf": s_i, s_m, s_o = innerfn, intermediatefn, outerfn else: raise ValueError("this should not happen") if call_method_ == "qe": # use the fancy query engine wrapper qe = disc_surface_queryengine( radius, volume_, s_i, s_o, s_m, source_surf_nodes=center_nodes_, volume_mask=volume_mask_ ) sl = Searchlight(m, queryengine=qe) r = sl(ds).samples elif call_method_ == "rvs": # use query-engine but build the # ingredients by hand vg = volgeom.from_any(volume_, volume_mask_) vs = volsurf.VolSurfMaximalMapping(vg, s_i, s_o) sel = surf_voxel_selection.voxel_selection(vs, radius, source_surf=s_m, source_surf_nodes=center_nodes_) qe = SurfaceVerticesQueryEngine(sel) sl = Searchlight(m, queryengine=qe) r = sl(ds).samples elif call_method_ == "gam": # build everything from the ground up vg = volgeom.from_any(volume_, volume_mask_) vs = volsurf.VolSurfMaximalMapping(vg, s_i, s_o) sel = surf_voxel_selection.voxel_selection(vs, radius, source_surf=s_m, source_surf_nodes=center_nodes_) mp = sel ks = sel.keys() nk = len(ks) r = np.zeros((1, nk)) for i, k in enumerate(ks): r[0, i] = len(mp[k]) # check if result is as expected assert_array_equal(r_expected, r) # clean up all_fns = [volfn, volfngz, outerfn, innerfn, intermediatefn] map(os.remove, all_fns) for k, vs in params.iteritems(): if not k in tested_params: raise ValueError("Missing key: %r" % k) for v in vs: vstr = val2str(v) if not vstr in tested_params[k]: raise ValueError("Missing value %r for %s" % (tested_params[k], k))
def test_voxel_selection(self): """Compare surface and volume based searchlight""" """ Tests to see whether results are identical for surface-based searchlight (just one plane; Euclidean distnace) and volume-based searchlight. Note that the current value is a float; if it were int, it would specify the number of voxels in each searchlight""" radius = 10.0 """Define input filenames""" epi_fn = pathjoin(pymvpa_dataroot, "bold.nii.gz") maskfn = pathjoin(pymvpa_dataroot, "mask.nii.gz") """ Use the EPI datafile to define a surface. The surface has as many nodes as there are voxels and is parallel to the volume 'slice' """ vg = volgeom.from_any(maskfn, mask_volume=True) aff = vg.affine nx, ny, nz = vg.shape[:3] """Plane goes in x and y direction, so we take these vectors from the affine transformation matrix of the volume""" plane = surf.generate_plane(aff[:3, 3], aff[:3, 0], aff[:3, 1], nx, ny) """ Simulate pial and white matter as just above and below the central plane """ normal_vec = aff[:3, 2] outer = plane + normal_vec inner = plane + -normal_vec """ Combine volume and surface information """ vsm = volsurf.VolSurfMaximalMapping(vg, outer, inner) """ Run voxel selection with specified radius (in mm), using Euclidean distance measure """ surf_voxsel = surf_voxel_selection.voxel_selection(vsm, radius, distance_metric="e") """Define the measure""" # run_slow=True would give an actual cross-validation with meaningful # accuracies. Because this is a unit-test only the number of voxels # in each searchlight is tested. run_slow = False if run_slow: meas = CrossValidation(GNB(), OddEvenPartitioner(), errorfx=lambda p, t: np.mean(p == t)) postproc = mean_sample else: meas = _Voxel_Count_Measure() postproc = lambda x: x """ Surface analysis: define the query engine, cross validation, and searchlight """ surf_qe = SurfaceVerticesQueryEngine(surf_voxsel) surf_sl = Searchlight(meas, queryengine=surf_qe, postproc=postproc) """ new (Sep 2012): also test 'simple' queryengine wrapper function """ surf_qe2 = disc_surface_queryengine( radius, maskfn, inner, outer, plane, volume_mask=True, distance_metric="euclidean" ) surf_sl2 = Searchlight(meas, queryengine=surf_qe2, postproc=postproc) """ Same for the volume analysis """ element_sizes = tuple(map(abs, (aff[0, 0], aff[1, 1], aff[2, 2]))) sph = Sphere(radius, element_sizes=element_sizes) kwa = {"voxel_indices": sph} vol_qe = IndexQueryEngine(**kwa) vol_sl = Searchlight(meas, queryengine=vol_qe, postproc=postproc) """The following steps are similar to start_easy.py""" attr = SampleAttributes(pathjoin(pymvpa_dataroot, "attributes_literal.txt")) mask = surf_voxsel.get_mask() dataset = fmri_dataset( samples=pathjoin(pymvpa_dataroot, "bold.nii.gz"), targets=attr.targets, chunks=attr.chunks, mask=mask ) if run_slow: # do chunkswise linear detrending on dataset poly_detrend(dataset, polyord=1, chunks_attr="chunks") # zscore dataset relative to baseline ('rest') mean zscore(dataset, chunks_attr="chunks", param_est=("targets", ["rest"])) # select class face and house for this demo analysis # would work with full datasets (just a little slower) dataset = dataset[np.array([l in ["face", "house"] for l in dataset.sa.targets], dtype="bool")] """Apply searchlight to datasets""" surf_dset = surf_sl(dataset) surf_dset2 = surf_sl2(dataset) vol_dset = vol_sl(dataset) surf_data = surf_dset.samples surf_data2 = surf_dset2.samples vol_data = vol_dset.samples assert_array_equal(surf_data, surf_data2) assert_array_equal(surf_data, vol_data)