Beispiel #1
0
    def __init__(self, vg, white, pial, intermediate=None):
        '''
        Parameters
        ----------
        volgeom: volgeom.VolGeom
            Volume geometry
        white: surf.Surface
            Surface representing white-grey matter boundary
        pial: surf.Surface
            Surface representing pial-grey matter boundary
        intermediate: surf.Surface (default: None).
            Surface representing intermediate surface. If omitted
            it is the node-wise average of white and pial.
            This parameter is usually ignored, except when used
            in a VolSurfMinimalLowresMapping.

        Notes
        -----
        'pial' and 'white' should have the same topology.
        '''
        self._volgeom = volgeom.from_any(vg)
        self._pial = surf.from_any(pial)
        self._white = surf.from_any(white)

        if not self._pial.same_topology(self._white):
            raise Exception("Not same topology for white and pial")

        #if intermediate is None:
        #    intermediate = (self.pial_surface * .5) + (self.white_surface * .5)
        self._intermediate = surf.from_any(intermediate)
Beispiel #2
0
    def __init__(self, vg, white, pial, intermediate=None):
        '''
        Parameters
        ----------
        volgeom: volgeom.VolGeom
            Volume geometry
        white: surf.Surface
            Surface representing white-grey matter boundary
        pial: surf.Surface
            Surface representing pial-grey matter boundary
        intermediate: surf.Surface (default: None).
            Surface representing intermediate surface. If omitted
            it is the node-wise average of white and pial.
            This parameter is usually ignored, except when used
            in a VolSurfMinimalLowresMapping.

        Notes
        -----
        'pial' and 'white' should have the same topology.
        '''
        self._volgeom = volgeom.from_any(vg)
        self._pial = surf.from_any(pial)
        self._white = surf.from_any(white)

        if not self._pial.same_topology(self._white):
            raise Exception("Not same topology for white and pial")

        #if intermediate is None:
        #    intermediate = (self.pial_surface * .5) + (self.white_surface * .5)
        self._intermediate = surf.from_any(intermediate)
Beispiel #3
0
def flat_surface2xy(surface):
    '''Returns a tuple with x and y coordinates of a flat surface
    
    Parameters
    ----------
    surface: Surface
        flat surface
    
    Returns
    -------
    x: np.ndarray
        x coordinates
    y: np.ndarray
        y coordinates
    
    Notes
    -----
    If the surface is not flat (any z coordinate is non-zero), an exception
    is raised.
    '''

    s = surf.from_any(surface)
    v = s.vertices
    if any(v[:, 2] != 0):
        raise ValueError("Expected a flat surface with z=0 for all nodes")

    x = v[:, 0]
    y = v[:, 1]

    return x, y
Beispiel #4
0
def build_connectivity_matrix(source_surface_fn, nodes=None):
    """
    Given the brain surface (intermediate) saved in `source_surface_fn` and
    a subset of nodes, returns a sparse adjacency matrix to be used
    by the Ward algorithm as implemented in scikit learn.
    """
    surface = surf.from_any(source_surface_fn)
    triangles = surface.faces
    nvertices = surface.nvertices

    e = triangles2edges(triangles)
    conn = np.zeros((nvertices, nvertices), dtype=bool)
    # set adjacent nodes to 1
    for idx in e:
        conn[idx[0], idx[1]] = 1
        conn[idx[1], idx[0]] = 1
    # set diagonal to 1 -- useless but for completeness
    conn[np.diag_indices_from(conn)] = 1
    # get only nodes we have
    if nodes is not None:
        connectivity2 = conn[nodes, :][:, nodes]
    else:
        connectivity2 = conn
    # check it's really symmetric
    for x in xrange(len(connectivity2)):
        assert(np.array_equal(connectivity2[x, :], connectivity2[:, x]))
    # make it a sparse matrix for ward
    connectivity2_sparse = sparse.csr_matrix(connectivity2)
    return connectivity2_sparse
Beispiel #5
0
def flat_surface2xy(surface):
    '''Returns a tuple with x and y coordinates of a flat surface
    
    Parameters
    ----------
    surface: Surface
        flat surface
    
    Returns
    -------
    x: np.ndarray
        x coordinates
    y: np.ndarray
        y coordinates
    
    Notes
    -----
    If the surface is not flat (any z coordinate is non-zero), an exception
    is raised.
    '''

    s = surf.from_any(surface)
    v = s.vertices
    if any(v[:, 2] != 0):
        raise ValueError("Expected a flat surface with z=0 for all nodes")

    x = v[:, 0]
    y = v[:, 1]

    return x, y
Beispiel #6
0
def build_connectivity_matrix(source_surface_fn, nodes=None):
    """
    Given the brain surface (intermediate) saved in `source_surface_fn` and
    a subset of nodes, returns a sparse adjacency matrix to be used
    by the Ward algorithm as implemented in scikit learn.
    """
    surface = surf.from_any(source_surface_fn)
    triangles = surface.faces
    nvertices = surface.nvertices

    e = triangles2edges(triangles)
    conn = np.zeros((nvertices, nvertices), dtype=bool)
    # set adjacent nodes to 1
    for idx in e:
        conn[idx[0], idx[1]] = 1
        conn[idx[1], idx[0]] = 1
    # set diagonal to 1 -- useless but for completeness
    conn[np.diag_indices_from(conn)] = 1
    # get only nodes we have
    if nodes is not None:
        connectivity2 = conn[nodes, :][:, nodes]
    else:
        connectivity2 = conn
    # check it's really symmetric
    for x in xrange(len(connectivity2)):
        assert (np.array_equal(connectivity2[x, :], connectivity2[:, x]))
    # make it a sparse matrix for ward
    connectivity2_sparse = sparse.csr_matrix(connectivity2)
    return connectivity2_sparse
Beispiel #7
0
    def __init__(self, surface, curvature=None, min_nsteps=500,
                 range_='2_98%', threshold=None, color_map=None,
                 max_deformation=.5):
        '''
        Parameters
        ----------
        surface: surf.Surface
            a flat surface
        curvature: str or np.ndarray
            (Filename of) data representing curvature at each node. 
        min_steps: int
            Minimal side of output plots in pixel
        range_: str or float or tuple
            If a tuple (a,b), then this tuple is returned.
            If a float a, then (-a,a) is returned.
            "R(a)", where R(a) denotes the string representation
            of float a, is equivalent to range_=a.
            "R(a)_R(b)" is equivalent to range_=(a,b).
            "R(a)_R(b)%" indicates that the a-th and b-th 
            percentile of xs is taken to define the range.
            "R(a)%" is equivalent to "R(a)_R(100-a)%"
        threshold: str or float or tuple
            Indicates which values will be shown. Syntax as in range_
        color_map: str
            colormap to use
        max_deformation: float
            maximum deformation to make a non-flat surface flat.
            The normals of each face must have a dot product with the average
            face normal that is not less than (1-max_deformation); otherwise
            an exception is raised. The rationale for this option is that
            certain surfaces may be almost flat, and projecting the vertices
            on a truly flat surface should be fine. On the other hand, surfaces
            that are definitly not flat (such as full cortical surface models)
            should cause an error to be raised when it is attempted to flatten
            them
        '''

        self._surface = surf.from_any(surface)

        if curvature is None:
            self._curvature = None
        else:
            self._curvature = curvature_from_any(curvature)
            if self._surface.nvertices != len(self._curvature):
                raise ValueError("Surface has %d vertices, but curvature "
                                 "has %d values" %
                                 (self._surface.nvertices,
                                  self._curvature.size))

        self._min_nsteps = min_nsteps
        self._range_ = range_
        self._threshold = threshold
        self._color_map = color_map
        self._max_deformation = max_deformation

        self._grid_def = None
        self._underlay = None
Beispiel #8
0
def flat_surface2xy(surface, max_deformation):
    '''Returns a tuple with x and y coordinates of a flat surface
    
    Parameters
    ----------
    surface: Surface
        flat surface
    max_deformation: float
        maximum deformation to make a non-flat surface flat.
        The normals of each face must have a dot product with the average
        face normal that is not less than (1-max_deformation); otherwise
        an exception is raised. The rationale for this option is that certain
        surfaces may be almost flat, and those can be made flat without
        problem; but surfaces that are not flat, such as inflated surfaces,
        should not be flattable.
    
    Returns
    -------
    x: np.ndarray
        x coordinates
    y: np.ndarray
        y coordinates
    
    Notes
    -----
    If the surface is not flat (any z coordinate is non-zero), an exception
    is raised.
    '''

    s = surf.from_any(surface)
    face_normals = s.face_normals

    msk = np.all(np.logical_not(np.isnan(face_normals)), 1)

    avg_face_normal = s.nanmean_face_normal
    deformations = abs(
        1 - abs(np.dot(avg_face_normal[np.newaxis], face_normals[msk].T)))
    too_deformed = np.nonzero(deformations > max_deformation)[0]
    if len(too_deformed) > 0:
        raise ValueError('Surface is not sufficiently flat with '
                         'max_deformation=%.3f' % max_deformation)

    # find rotation so that surface is more or less orthogonal to
    # the unit vector (0,0,1)
    v = s.vertices
    z_axis = np.asarray([0, 0, 1.])
    r = vector_alignment_find_rotation(avg_face_normal, z_axis)

    # apply rotation
    v_rotated = r.dot(v.T)

    # discard z-coordinate
    x = v_rotated[0]
    y = v_rotated[1]

    return x, y
Beispiel #9
0
def flat_surface2xy(surface, max_deformation):
    '''Returns a tuple with x and y coordinates of a flat surface
    
    Parameters
    ----------
    surface: Surface
        flat surface
    max_deformation: float
        maximum deformation to make a non-flat surface flat.
        The normals of each face must have a dot product with the average
        face normal that is not less than (1-max_deformation); otherwise
        an exception is raised. The rationale for this option is that certain
        surfaces may be almost flat, and those can be made flat without
        problem; but surfaces that are not flat, such as inflated surfaces,
        should not be flattable.
    
    Returns
    -------
    x: np.ndarray
        x coordinates
    y: np.ndarray
        y coordinates
    
    Notes
    -----
    If the surface is not flat (any z coordinate is non-zero), an exception
    is raised.
    '''

    s = surf.from_any(surface)
    face_normals = s.face_normals

    msk = np.all(np.logical_not(np.isnan(face_normals)), 1)

    avg_face_normal = s.nanmean_face_normal
    deformations = abs(
        1 - abs(np.dot(avg_face_normal[np.newaxis], face_normals[msk].T)))
    too_deformed = np.nonzero(deformations > max_deformation)[0]
    if len(too_deformed) > 0:
        raise ValueError('Surface is not sufficiently flat with '
                         'max_deformation=%.3f' % max_deformation)

    # find rotation so that surface is more or less orthogonal to
    # the unit vector (0,0,1)
    v = s.vertices
    z_axis = np.asarray([0, 0, 1.])
    r = vector_alignment_find_rotation(avg_face_normal, z_axis)

    # apply rotation
    v_rotated = r.dot(v.T)

    # discard z-coordinate
    x = v_rotated[0]
    y = v_rotated[1]

    return x, y
Beispiel #10
0
    def get_node2voxels_mapping(self):
        n2v = super(VolSurfMinimalLowresMapping, self).\
                                get_node2voxels_mapping()

        # set low and high res intermediate surfaces
        lowres = surf.from_any(self._intermediate)
        highres = (self.pial_surface * .5) + \
                                (self.white_surface * .5)

        high2high_in_low = lowres.vonoroi_map_to_high_resolution_surf(highres)

        n_in_low2v = dict()
        ds = []

        for n, v2pos in n2v.iteritems():
            (n_in_low, d) = high2high_in_low[n]
            if v2pos is None:
                continue

            ds.append(d)


            if not n_in_low in n_in_low2v:
                # not there - just set the dictionary
                n_in_low2v[n_in_low] = v2pos
            else:
                # is there - see if it is none
                cur = n_in_low2v[n_in_low]
                if cur is None and not v2pos is None:
                    # also overwrite (v2pos can also be None, that's fine)
                    n_in_low2v[n_in_low] = v2pos
                elif v2pos is not None:
                    # update
                    for v, pos in v2pos.iteritems():
                        # minimal mapping, so voxel should not be there already
                        assert(not v in n_in_low2v[n_in_low])
                        cur[v] = pos

        if __debug__ and 'SVS' in debug.active:
            ds = np.asarray(ds)
            mu = np.mean(ds)
            n = len(ds)
            s = np.std(ds)

            debug('SVS', 'Reassigned %d nodes by moving %.2f +/- %.2f to low-res',
                        (n, mu, s))



        return n_in_low2v
Beispiel #11
0
    def get_node2voxels_mapping(self):
        n2v = super(VolSurfMinimalLowresMapping, self).\
                                get_node2voxels_mapping()

        # set low and high res intermediate surfaces
        lowres = surf.from_any(self._intermediate)
        highres = (self.pial_surface * .5) + \
                                (self.white_surface * .5)

        high2high_in_low = lowres.vonoroi_map_to_high_resolution_surf(highres)

        n_in_low2v = dict()
        ds = []

        for n, v2pos in n2v.iteritems():
            (n_in_low, d) = high2high_in_low[n]
            if v2pos is None:
                continue

            ds.append(d)


            if not n_in_low in n_in_low2v:
                # not there - just set the dictionary
                n_in_low2v[n_in_low] = v2pos
            else:
                # is there - see if it is none
                cur = n_in_low2v[n_in_low]
                if cur is None and not v2pos is None:
                    # also overwrite (v2pos can also be None, that's fine)
                    n_in_low2v[n_in_low] = v2pos
                elif v2pos is not None:
                    # update
                    for v, pos in v2pos.iteritems():
                        # minimal mapping, so voxel should not be there already
                        assert(not v in n_in_low2v[n_in_low])
                        cur[v] = pos

        if __debug__ and 'SVS' in debug.active:
            ds = np.asarray(ds)
            mu = np.mean(ds)
            n = len(ds)
            s = np.std(ds)

            debug('SVS', 'Reassigned %d nodes by moving %.2f +/- %.2f to low-res',
                        (n, mu, s))



        return n_in_low2v
Beispiel #12
0
    def __init__(self,
                 surface,
                 curvature=None,
                 min_nsteps=500,
                 range_='2_98%',
                 threshold=None,
                 color_map=None):
        '''
        Parameters
        ----------
        surface: surf.Surface
            a flat surface
        curvature: str or np.ndarray
            (Filename of) data representing curvature at each node. 
        min_steps: int
            Minimal side of output plots in pixel
        range_: str or float or tuple
            If a tuple (a,b), then this tuple is returned.
            If a float a, then (-a,a) is returned.
            "R(a)", where R(a) denotes the string representation
            of float a, is equivalent to range_=a.
            "R(a)_R(b)" is equivalent to range_=(a,b).
            "R(a)_R(b)%" indicates that the a-th and b-th 
            percentile of xs is taken to define the range.
            "R(a)%" is equivalent to "R(a)_R(100-a)%"
        threshold: str or float or tuple
            Indicates which values will be shown. Syntax as in range_
        color_map: str
            colormap to use
        '''

        self._surface = surf.from_any(surface)

        if curvature is None:
            self._curvature = None
        else:
            self._curvature = curvature_from_any(curvature)
            if self._surface.nvertices != self._curvature.size:
                raise ValueError(
                    "Surface has %d vertices, but curvature %d" %
                    (self._surface.nvertices, self._curvature.size))

        self._min_nsteps = min_nsteps
        self._range_ = range_
        self._threshold = threshold
        self._color_map = color_map

        self._grid_def = None
        self._underlay = None
Beispiel #13
0
    def get_surface(self, *args):
        '''
        Wizard-like function to get a surface

        Parameters
        ----------
        *args: list of str
            parts of the surface file name or description, such as
            'pial' (for pial surface), 'wm' (for white matter), or
            'lh' (for left hemisphere').

        Returns
        -------
        surf: surf.Surface

        '''
        return surf.from_any(self.get_surface_file(*args))
Beispiel #14
0
 def get_surface(self, *args):
     '''
     Wizard-like function to get a surface
      
     Parameters
     ----------
     *args: list of str 
         parts of the surface file name or description, such as 
         'pial' (for pial surface), 'wm' (for white matter), or 
         'lh' (for left hemisphere').
     
     Returns
     -------
     surf: surf.Surface
         
     '''
     return surf.from_any(self.get_surface_file(*args))
Beispiel #15
0
    def __init__(self, surface, curvature=None, min_nsteps=500,
                        range_='2_98%', threshold=None, color_map=None):
        '''
        Parameters
        ----------
        surface: surf.Surface
            a flat surface
        curvature: str or np.ndarray
            (Filename of) data representing curvature at each node. 
        min_steps: int
            Minimal side of output plots in pixel
        range_: str or float or tuple
            If a tuple (a,b), then this tuple is returned.
            If a float a, then (-a,a) is returned.
            "R(a)", where R(a) denotes the string representation
            of float a, is equivalent to range_=a.
            "R(a)_R(b)" is equivalent to range_=(a,b).
            "R(a)_R(b)%" indicates that the a-th and b-th 
            percentile of xs is taken to define the range.
            "R(a)%" is equivalent to "R(a)_R(100-a)%"
        threshold: str or float or tuple
            Indicates which values will be shown. Syntax as in range_
        color_map: str
            colormap to use
        '''

        self._surface = surf.from_any(surface)

        if curvature is None:
            self._curvature = None
        else:
            self._curvature = curvature_from_any(curvature)
            if self._surface.nvertices != self._curvature.size:
                raise ValueError("Surface has %d vertices, but curvature %d" %
                                  (self._surface.nvertices, self._curvature.size))

        self._min_nsteps = min_nsteps
        self._range_ = range_
        self._threshold = threshold
        self._color_map = color_map

        self._grid_def = None
        self._underlay = None
Beispiel #16
0
def voxel_selection(vol_surf_mapping,
                    radius,
                    source_surf=None,
                    source_surf_nodes=None,
                    distance_metric='dijkstra',
                    eta_step=10,
                    nproc=None,
                    outside_node_margin=None,
                    results_backend=None,
                    tmp_prefix='tmpvoxsel'):
    """
    Voxel selection for multiple center nodes on the surface

    Parameters
    ----------
    vol_surf_mapping: volsurf.VolSurfMapping
        Contains gray and white matter surface, and volume geometry
    radius: int or float
        Size of searchlight. If an integer, then it indicates the number of
        voxels. If a float, then it indicates the radius of the disc
    source_surf: surf.Surface or None
        Surface used to compute distance between nodes. If omitted, it is
        the average of the gray and white surfaces.
    source_surf_nodes: list of int or numpy array or None
        Indices of nodes in source_surf that serve as searchlight center.
        By default every node serves as a searchlight center.
    distance_metric: str
        Distance metric between nodes. 'euclidean' or 'dijksta' (default)
    eta_step: int
        Report progress every eta_step (default: 10).
    nproc: int or None
        Number of parallel threads. None means as many threads as the
        system supports. The pprocess is required for parallel threads; if
        it cannot be used, then a single thread is used.
    outside_node_margin: float or True or None (default)
        By default nodes outside the volume are skipped; using this
        parameter allows for a marign. If this value is a float (possibly
        np.inf), then all nodes within outside_node_margin Dijkstra
        distance from any node within the volume are still assigned
        associated voxels. If outside_node_margin is True, then a node is
        always assigned voxels regardless of its position in the volume.
    results_backend : 'native' or 'hdf5' or None (default).
        Specifies the way results are provided back from a processing block
        in case of nproc > 1. 'native' is pickling/unpickling of results by
        pprocess, while 'hdf5' would use h5save/h5load functionality.
        'hdf5' might be more time and memory efficient in some cases.
        If None, then 'hdf5' if used if available, else 'native'.
    tmp_prefix : str, optional
        If specified -- serves as a prefix for temporary files storage
        if results_backend == 'hdf5'.  Thus can specify the directory to use
        (trailing file path separator is not added automagically).

    Returns
    -------
    sel: volume_mask_dict.VolumeMaskDictionary
        Voxel selection results, that associates, which each node, the indices
        of the surrounding voxels.
    """

    # construct the intermediate surface, which is used
    # to measure distances
    intermediate_surf = (vol_surf_mapping.pial_surface * .5) + \
                        (vol_surf_mapping.white_surface * .5)

    if source_surf is None:
        source_surf = intermediate_surf
    else:
        source_surf = surf.from_any(source_surf)

    if _debug():
        debug(
            'SVS', "Generated high-res intermediate surface: "
            "%d nodes, %d faces" %
            (intermediate_surf.nvertices, intermediate_surf.nfaces))
        debug(
            'SVS', "Mapping source to high-res surface:"
            " %d nodes, %d faces" %
            (source_surf.nvertices, source_surf.nfaces))

    if distance_metric[0].lower() == 'e' and outside_node_margin:
        # euclidean distance: identity mapping
        # this is *slow*
        n = source_surf.nvertices
        xyz = source_surf.vertices
        src2intermediate = dict((i, tuple(xyz[i])) for i in xrange(n))
    else:
        # find a mapping from nodes in source_surf to those in
        # intermediate surface
        src2intermediate = source_surf.map_to_high_resolution_surf(\
                                                        intermediate_surf)

    # if no sources are given, then visit all ndoes
    if source_surf_nodes is None:
        source_surf_nodes = np.arange(source_surf.nvertices)

    n = len(source_surf_nodes)

    if _debug():
        debug('SVS', "Performing surface-based voxel selection"
              " for %d centers" % n)

    # visit in random order, for for better ETA estimate
    visitorder = list(np.random.permutation(len(source_surf_nodes)))

    # construct mapping from nodes to enclosing voxels
    n2v = vol_surf_mapping.get_node2voxels_mapping()

    if __debug__:
        debug('SVS', "Generated mapping from nodes" " to intersecting voxels")

    # build voxel selector
    voxel_selector = VoxelSelector(radius,
                                   intermediate_surf,
                                   n2v,
                                   distance_metric,
                                   outside_node_margin=outside_node_margin)

    if _debug():
        debug('SVS', "Instantiated voxel selector (radius %r)" % radius)

    # structure to keep output data. Initialize with None, then
    # make a sparse_attributes instance when we know what the attributes are
    node2volume_attributes = None

    attribute_mapper = voxel_selector.disc_voxel_indices_and_attributes

    srcs_order = [source_surf_nodes[node] for node in visitorder]
    src_trg_nodes = [(src, src2intermediate[src]) for src in srcs_order]

    if nproc is not None and nproc > 1 and not externals.exists('pprocess'):
        raise RuntimeError("The 'pprocess' module is required for "
                           "multiprocess searchlights. Please either "
                           "install python-pprocess, or reduce `nproc` "
                           "to 1 (got nproc=%i) or set to default None" %
                           nproc)

    if nproc is None:
        if externals.exists('pprocess'):
            try:
                import pprocess
                nproc = pprocess.get_number_of_cores() or 1
                if _debug():
                    debug("SVS", 'Using pprocess with %d cores' % nproc)
            except:
                if _debug():
                    debug("SVS", 'pprocess not available')

        if nproc is None:
            # importing pprocess failed - so use a single core
            nproc = 1
            debug("SVS", 'Using %d cores - pprocess not available' % nproc)

    # get the the voxel selection parameters
    parameter_dict = vol_surf_mapping.get_parameter_dict()
    parameter_dict.update(dict(radius=radius,
                               outside_node_margin=outside_node_margin,
                               distance_metric=distance_metric),
                          source_nvertices=source_surf.nvertices)

    init_output = lambda: volume_mask_dict.VolumeMaskDictionary(
        vol_surf_mapping.volgeom, intermediate_surf, meta=parameter_dict)

    if nproc > 1:
        if results_backend == 'hdf5':
            externals.exists('h5py', raise_=True)
        elif results_backend is None:
            if externals.exists(
                    'h5py') and externals.versions['hdf5'] >= '1.8.7':
                results_backend = 'hdf5'
            else:
                results_backend = 'native'
        if _debug():
            debug('SVS', "Using '%s' backend" % (results_backend, ))

        if not results_backend in ('native', 'hdf5'):
            raise ValueError('Illegal results backend %r' % results_backend)

        import pprocess
        n_srcs = len(src_trg_nodes)
        blocks = np.array_split(np.arange(n_srcs), nproc)

        results = pprocess.Map(limit=nproc)
        reducer = results.manage(pprocess.MakeParallel(_reduce_mapper))

        if __debug__:
            debug('SVS', "Starting %d child processes", (len(blocks), ))

        for i, block in enumerate(blocks):
            empty_dict = init_output()

            src_trg = []
            for idx in block:
                src_trg.append(src_trg_nodes[idx])

            if _debug():
                debug('SVS',
                      "  starting block %d/%d: %d centers" %
                      (i + 1, nproc, len(src_trg)),
                      cr=True)

            reducer(empty_dict,
                    attribute_mapper,
                    src_trg,
                    eta_step=eta_step,
                    proc_id='%d' % (i + 1, ),
                    results_backend=results_backend,
                    tmp_prefix=tmp_prefix)
        if _debug():
            debug('SVS', '')
            debug('SVS', 'Started all %d child processes' % (len(blocks)))
            tstart = time.time()

        node2volume_attributes = None
        for i, result in enumerate(results):
            if result is None:
                continue

            if results_backend == 'hdf5':
                result_fn = result
                result = h5load(result_fn)
                os.remove(result_fn)

            if node2volume_attributes is None:
                # first time we have actual results.
                # Use as a starting point
                node2volume_attributes = result
                if _debug():
                    debug('SVS', '')
                    debug(
                        'SVS', "Merging results from %d child "
                        "processes using '%s' backend" %
                        (len(blocks), results_backend))
            else:
                # merge new with current data
                node2volume_attributes.merge(result)
            if _debug():
                debug('SVS',
                      "  merged result block %d/%d" % (i + 1, nproc),
                      cr=True)

        if _debug():
            telapsed = time.time() - tstart
            debug('SVS', "")
            debug(
                'SVS', 'Merged results from %d child processed - '
                'took %s' % (len(blocks), seconds2prettystring(telapsed)))

    else:
        empty_dict = init_output()
        node2volume_attributes = _reduce_mapper(empty_dict,
                                                attribute_mapper,
                                                src_trg_nodes,
                                                eta_step=eta_step)
        debug('SVS', "")

    if _debug():
        if node2volume_attributes is None:
            msgs = [
                "Voxel selection completed: none of %d nodes have "
                "voxels associated" % len(visitorder)
            ]
        else:
            nvox_selected = np.sum(node2volume_attributes.get_mask() != 0)
            vg = vol_surf_mapping.volgeom

            msgs = [
                "Voxel selection completed: %d / %d nodes have "
                "voxels associated" %
                (len(node2volume_attributes.keys()), len(visitorder)),
                "Selected %d / %d  voxels (%.0f%%) in the mask at least once" %
                (nvox_selected, vg.nvoxels_mask,
                 100. * nvox_selected / vg.nvoxels_mask)
            ]

        for msg in msgs:
            debug("SVS", msg)

    if node2volume_attributes is None:
        warning('No voxels associated with any of %d nodes' % len(visitorder))
    return node2volume_attributes
def voxel_selection(vol_surf_mapping, radius, source_surf=None, source_surf_nodes=None,
                    distance_metric='dijkstra',
                    eta_step=10, nproc=None,
                    outside_node_margin=None,
                    results_backend=None, tmp_prefix='tmpvoxsel'):

    """
    Voxel selection for multiple center nodes on the surface

    Parameters
    ----------
    vol_surf_mapping: volsurf.VolSurfMapping
        Contains gray and white matter surface, and volume geometry
    radius: int or float
        Size of searchlight. If an integer, then it indicates the number of
        voxels. If a float, then it indicates the radius of the disc
    source_surf: surf.Surface or None
        Surface used to compute distance between nodes. If omitted, it is
        the average of the gray and white surfaces.
    source_surf_nodes: list of int or numpy array or None
        Indices of nodes in source_surf that serve as searchlight center.
        By default every node serves as a searchlight center.
    distance_metric: str
        Distance metric between nodes. 'euclidean' or 'dijksta' (default)
    eta_step: int
        Report progress every eta_step (default: 10).
    nproc: int or None
        Number of parallel threads. None means as many threads as the
        system supports. The pprocess is required for parallel threads; if
        it cannot be used, then a single thread is used.
    outside_node_margin: float or True or None (default)
        By default nodes outside the volume are skipped; using this
        parameter allows for a marign. If this value is a float (possibly
        np.inf), then all nodes within outside_node_margin Dijkstra
        distance from any node within the volume are still assigned
        associated voxels. If outside_node_margin is True, then a node is
        always assigned voxels regardless of its position in the volume.
    results_backend : 'native' or 'hdf5' or None (default).
        Specifies the way results are provided back from a processing block
        in case of nproc > 1. 'native' is pickling/unpickling of results by
        pprocess, while 'hdf5' would use h5save/h5load functionality.
        'hdf5' might be more time and memory efficient in some cases.
        If None, then 'hdf5' if used if available, else 'native'.
    tmp_prefix : str, optional
        If specified -- serves as a prefix for temporary files storage
        if results_backend == 'hdf5'.  Thus can specify the directory to use
        (trailing file path separator is not added automagically).

    Returns
    -------
    sel: volume_mask_dict.VolumeMaskDictionary
        Voxel selection results, that associates, which each node, the indices
        of the surrounding voxels.
    """

    # construct the intermediate surface, which is used
    # to measure distances
    intermediate_surf = (vol_surf_mapping.pial_surface * .5) + \
                        (vol_surf_mapping.white_surface * .5)

    if source_surf is None:
        source_surf = intermediate_surf
    else:
        source_surf = surf.from_any(source_surf)

    if _debug():
        debug('SVS', "Generated high-res intermediate surface: "
              "%d nodes, %d faces" %
              (intermediate_surf.nvertices, intermediate_surf.nfaces))
        debug('SVS', "Mapping source to high-res surface:"
              " %d nodes, %d faces" %
              (source_surf.nvertices, source_surf.nfaces))


    if distance_metric[0].lower() == 'e' and outside_node_margin:
        # euclidean distance: identity mapping
        # this is *slow*
        n = source_surf.nvertices
        xyz = source_surf.vertices
        src2intermediate = dict((i, tuple(xyz[i])) for i in range(n))
    else:
        # find a mapping from nodes in source_surf to those in
        # intermediate surface
        src2intermediate = source_surf.map_to_high_resolution_surf(\
                                                        intermediate_surf)

    # if no sources are given, then visit all ndoes
    if source_surf_nodes is None:
        source_surf_nodes = np.arange(source_surf.nvertices)

    n = len(source_surf_nodes)

    if _debug():
        debug('SVS',
              "Performing surface-based voxel selection"
              " for %d centers" % n)

    # visit in random order, for for better ETA estimate
    visitorder = list(np.random.permutation(len(source_surf_nodes)))

    # construct mapping from nodes to enclosing voxels
    n2v = vol_surf_mapping.get_node2voxels_mapping()

    if __debug__:
        debug('SVS', "Generated mapping from nodes"
              " to intersecting voxels")

    # build voxel selector
    voxel_selector = VoxelSelector(radius, intermediate_surf, n2v,
                                   distance_metric,
                                   outside_node_margin=outside_node_margin)

    if _debug():
        debug('SVS', "Instantiated voxel selector (radius %r)" % radius)


    # structure to keep output data. Initialize with None, then
    # make a sparse_attributes instance when we know what the attributes are
    node2volume_attributes = None

    attribute_mapper = voxel_selector.disc_voxel_indices_and_attributes

    srcs_order = [source_surf_nodes[node] for node in visitorder]
    src_trg_nodes = [(src, src2intermediate[src]) for src in srcs_order]

    if nproc is not None and nproc > 1 and not externals.exists('pprocess'):
        raise RuntimeError("The 'pprocess' module is required for "
                           "multiprocess searchlights. Please either "
                           "install python-pprocess, or reduce `nproc` "
                           "to 1 (got nproc=%i) or set to default None"
                           % nproc)

    if nproc is None:
        if externals.exists('pprocess'):
            try:
                import pprocess
                nproc = pprocess.get_number_of_cores() or 1
                if _debug() :
                    debug("SVS", 'Using pprocess with %d cores' % nproc)
            except:
                if _debug():
                    debug("SVS", 'pprocess not available')

        if nproc is None:
            # importing pprocess failed - so use a single core
            nproc = 1
            debug("SVS", 'Using %d cores - pprocess not available' % nproc)

    # get the the voxel selection parameters
    parameter_dict = vol_surf_mapping.get_parameter_dict()
    parameter_dict.update(dict(radius=radius,
                               outside_node_margin=outside_node_margin,
                               distance_metric=distance_metric),
                               source_nvertices=source_surf.nvertices)


    init_output = lambda: volume_mask_dict.VolumeMaskDictionary(
                                    vol_surf_mapping.volgeom,
                                    intermediate_surf,
                                    meta=parameter_dict)

    if nproc > 1:
        if results_backend == 'hdf5':
            externals.exists('h5py', raise_=True)
        elif results_backend is None:
            if externals.exists('h5py') and externals.versions['hdf5'] >= '1.8.7':
                results_backend = 'hdf5'
            else:
                results_backend = 'native'
        if _debug():
            debug('SVS', "Using '%s' backend" % (results_backend,))

        if not results_backend in ('native', 'hdf5'):
            raise ValueError('Illegal results backend %r' % results_backend)

        import pprocess
        n_srcs = len(src_trg_nodes)
        blocks = np.array_split(np.arange(n_srcs), nproc)

        results = pprocess.Map(limit=nproc)
        reducer = results.manage(pprocess.MakeParallel(_reduce_mapper))

        if __debug__:
            debug('SVS', "Starting %d child processes", (len(blocks),))

        for i, block in enumerate(blocks):
            empty_dict = init_output()

            src_trg = []
            for idx in block:
                src_trg.append(src_trg_nodes[idx])

            if _debug():
                debug('SVS', "  starting block %d/%d: %d centers" %
                            (i + 1, nproc, len(src_trg)), cr=True)

            reducer(empty_dict, attribute_mapper, src_trg,
                    eta_step=eta_step, proc_id='%d' % (i + 1,),
                    results_backend=results_backend, tmp_prefix=tmp_prefix)
        if _debug():
            debug('SVS', '')
            debug('SVS', 'Started all %d child processes' % (len(blocks)))
            tstart = time.time()

        node2volume_attributes = None
        for i, result in enumerate(results):
            if result is None:
                continue

            if results_backend == 'hdf5':
                result_fn = result
                result = h5load(result_fn)
                os.remove(result_fn)

            if node2volume_attributes is None:
                # first time we have actual results.
                # Use as a starting point
                node2volume_attributes = result
                if _debug():
                    debug('SVS', '')
                    debug('SVS', "Merging results from %d child "
                                 "processes using '%s' backend" %
                                 (len(blocks), results_backend))
            else:
                # merge new with current data
                node2volume_attributes.merge(result)
            if _debug():
                debug('SVS', "  merged result block %d/%d" % (i + 1, nproc),
                                cr=True)

        if _debug():
            telapsed = time.time() - tstart
            debug('SVS', "")
            debug('SVS', 'Merged results from %d child processed - '
                         'took %s' %
                         (len(blocks), seconds2prettystring(telapsed)))

    else:
        empty_dict = init_output()
        node2volume_attributes = _reduce_mapper(empty_dict,
                                                attribute_mapper,
                                                src_trg_nodes,
                                                eta_step=eta_step)
        debug('SVS', "")

    if _debug():
        if node2volume_attributes is None:
            msgs = ["Voxel selection completed: none of %d nodes have "
                    "voxels associated" % len(visitorder)]
        else:
            nvox_selected = np.sum(node2volume_attributes.get_mask() != 0)
            vg = vol_surf_mapping.volgeom

            msgs = ["Voxel selection completed: %d / %d nodes have "
                    "voxels associated" %
                    (len(node2volume_attributes.keys()), len(visitorder)),
                    "Selected %d / %d  voxels (%.0f%%) in the mask at least once" %
                    (nvox_selected, vg.nvoxels_mask,
                     100. * nvox_selected / vg.nvoxels_mask)]

        for msg in msgs:
            debug("SVS", msg)


    if node2volume_attributes is None:
        warning('No voxels associated with any of %d nodes' %
                        len(visitorder))
    return node2volume_attributes
Beispiel #18
0
def flat_surface2grid_mask(surface, min_nsteps, max_deformation):
    '''Computes a mask and corresponding coordinates from a flat surface 
    
    Parameters
    ----------
    surface: Surface
        flat surface
    min_nsteps: int
        minimum number of pixels in x and y direction
    max_deformation: float
        maximum deformation to make a non-flat surface flat.
        The normals of each face must have a dot product with the average
        face normal that is not less than (1-max_deformation); otherwise
        an exception is raised. The rationale for this option is that
        certain surfaces may be almost flat, and projecting the vertices
        on a truly flat surface should be fine. On the other hand, surfaces
        that are definitly not flat (such as full cortical surface models)
        should cause an error to be raised when it is attempted to flatten
        them
        
    Returns
    -------
    x: np.ndarray
        x coordinates of surface
    y: np.ndarray
        y coordinates of surface
    m: np.ndarray
        mask array of size PxQ, with min(P,Q)==min_nsteps.
        m[i,j]==True iff the position at (i,j) is 'inside' the flat surface
    xi: np.ndarray
        vector of length Q with interpolated x coordinates
    yi: np.ndarray
        vector of length P with interpolated y coordinates
    
    Notes
    -----
    The output of this function can be used with scipy.interpolate.griddata
    '''

    surface = surf.from_any(surface)
    x, y = flat_surface2xy(surface, max_deformation)
    xmin = np.min(x)

    xi, yi = unstructured_xy2grid_xy_vectors(x, y, min_nsteps)
    delta = xi[1] - xi[0]
    vi2xi = (x - xmin) / delta

    # compute paths of nodes on the border
    pths = surface.nodes_on_border_paths()

    # map x index to segments that cross the x coordinate
    # (a segment is a pair (i,j) where nodes i and j share a triangle
    #  and are on the border)
    xidx2segments = dict()

    for pth in pths:
        # make a tour across pairs (i,j)
        j = pth[-1]
        for i in pth:
            pq = vi2xi[i], vi2xi[j]
            p, q = min(pq), max(pq)
            # always go left (p) to right (q)
            for pqs in np.arange(np.ceil(p), np.ceil(q)):
                # take each point in between
                ipqs = int(pqs)

                # add to xidx2segments
                if not ipqs in xidx2segments:
                    xidx2segments[ipqs] = list()
                xidx2segments[ipqs].append((i, j))

            # take end point from last iteration as starting point
            # in next iteration
            j = i


    # space for the mask
    yxshape = len(yi), len(xi)
    msk = np.zeros(yxshape, dtype=np.bool_)

    # see which nodes are *inside* a surface 
    # (there can be multiple surfaces)
    for ii, xpos in enumerate(xi):
        if not ii in xidx2segments:
            continue
        segments = xidx2segments[ii]
        for jj, ypos in enumerate(yi):
            # based on PNPOLY (W Randoph Franklin)
            # http://www.ecse.rpi.edu/~wrf/Research/Short_Notes/pnpoly.html
            # retrieved Apr 2013
            c = False
            for i, j in segments:
                if ypos < (y[j] - y[i]) * (xpos - x[i]) / (x[j] - x[i]) + y[i]:
                    c = not c
            msk[jj, ii] = np.bool(c)

    return x, y, msk, xi, yi
Beispiel #19
0
    def __init__(self,
                 surface,
                 curvature=None,
                 min_nsteps=500,
                 range_='2_98%',
                 threshold=None,
                 color_map=None,
                 max_deformation=.5):
        '''
        Parameters
        ----------
        surface: surf.Surface
            a flat surface
        curvature: str or np.ndarray
            (Filename of) data representing curvature at each node. 
        min_steps: int
            Minimal side of output plots in pixel
        range_: str or float or tuple
            If a tuple (a,b), then this tuple is returned.
            If a float a, then (-a,a) is returned.
            "R(a)", where R(a) denotes the string representation
            of float a, is equivalent to range_=a.
            "R(a)_R(b)" is equivalent to range_=(a,b).
            "R(a)_R(b)%" indicates that the a-th and b-th 
            percentile of xs is taken to define the range.
            "R(a)%" is equivalent to "R(a)_R(100-a)%"
        threshold: str or float or tuple
            Indicates which values will be shown. Syntax as in range_
        color_map: str
            colormap to use
        max_deformation: float
            maximum deformation to make a non-flat surface flat.
            The normals of each face must have a dot product with the average
            face normal that is not less than (1-max_deformation); otherwise
            an exception is raised. The rationale for this option is that
            certain surfaces may be almost flat, and projecting the vertices
            on a truly flat surface should be fine. On the other hand, surfaces
            that are definitly not flat (such as full cortical surface models)
            should cause an error to be raised when it is attempted to flatten
            them
        '''

        self._surface = surf.from_any(surface)

        if curvature is None:
            self._curvature = None
        else:
            self._curvature = curvature_from_any(curvature)
            if self._surface.nvertices != len(self._curvature):
                raise ValueError(
                    "Surface has %d vertices, but curvature "
                    "has %d values" %
                    (self._surface.nvertices, self._curvature.size))

        self._min_nsteps = min_nsteps
        self._range_ = range_
        self._threshold = threshold
        self._color_map = color_map
        self._max_deformation = max_deformation

        self._grid_def = None
        self._underlay = None
Beispiel #20
0
def flat_surface2grid_mask(surface, min_nsteps, max_deformation):
    '''Computes a mask and corresponding coordinates from a flat surface 
    
    Parameters
    ----------
    surface: Surface
        flat surface
    min_nsteps: int
        minimum number of pixels in x and y direction
    max_deformation: float
        maximum deformation to make a non-flat surface flat.
        The normals of each face must have a dot product with the average
        face normal that is not less than (1-max_deformation); otherwise
        an exception is raised. The rationale for this option is that
        certain surfaces may be almost flat, and projecting the vertices
        on a truly flat surface should be fine. On the other hand, surfaces
        that are definitly not flat (such as full cortical surface models)
        should cause an error to be raised when it is attempted to flatten
        them
        
    Returns
    -------
    x: np.ndarray
        x coordinates of surface
    y: np.ndarray
        y coordinates of surface
    m: np.ndarray
        mask array of size PxQ, with min(P,Q)==min_nsteps.
        m[i,j]==True iff the position at (i,j) is 'inside' the flat surface
    xi: np.ndarray
        vector of length Q with interpolated x coordinates
    yi: np.ndarray
        vector of length P with interpolated y coordinates
    
    Notes
    -----
    The output of this function can be used with scipy.interpolate.griddata
    '''

    surface = surf.from_any(surface)
    x, y = flat_surface2xy(surface, max_deformation)
    xmin = np.min(x)

    xi, yi = unstructured_xy2grid_xy_vectors(x, y, min_nsteps)
    delta = xi[1] - xi[0]
    vi2xi = (x - xmin) / delta

    # compute paths of nodes on the border
    pths = surface.nodes_on_border_paths()

    # map x index to segments that cross the x coordinate
    # (a segment is a pair (i,j) where nodes i and j share a triangle
    #  and are on the border)
    xidx2segments = dict()

    for pth in pths:
        # make a tour across pairs (i,j)
        j = pth[-1]
        for i in pth:
            pq = vi2xi[i], vi2xi[j]
            p, q = min(pq), max(pq)
            # always go left (p) to right (q)
            for pqs in np.arange(np.ceil(p), np.ceil(q)):
                # take each point in between
                ipqs = int(pqs)

                # add to xidx2segments
                if not ipqs in xidx2segments:
                    xidx2segments[ipqs] = list()
                xidx2segments[ipqs].append((i, j))

            # take end point from last iteration as starting point
            # in next iteration
            j = i

    # space for the mask
    yxshape = len(yi), len(xi)
    msk = np.zeros(yxshape, dtype=np.bool_)

    # see which nodes are *inside* a surface
    # (there can be multiple surfaces)
    for ii, xpos in enumerate(xi):
        if not ii in xidx2segments:
            continue
        segments = xidx2segments[ii]
        for jj, ypos in enumerate(yi):
            # based on PNPOLY (W Randoph Franklin)
            # http://www.ecse.rpi.edu/~wrf/Research/Short_Notes/pnpoly.html
            # retrieved Apr 2013
            c = False
            for i, j in segments:
                if ypos < (y[j] - y[i]) * (xpos - x[i]) / (x[j] - x[i]) + y[i]:
                    c = not c
            msk[jj, ii] = np.bool(c)

    return x, y, msk, xi, yi