コード例 #1
0
def spectrum_of_largest(points,
                        faces,
                        spectrum_size=10,
                        exclude_labels=[-1],
                        normalization="areaindex",
                        areas=None,
                        verbose=False):
    """
    Compute Laplace-Beltrami spectrum on largest connected segment.
    In case a surface patch is fragmented, we select the largest fragment,
    remove extraneous triangular faces, and reindex indices.
    Parameters
    ----------
    points : list of lists of 3 floats
        x,y,z coordinates for each vertex of the structure
    faces : list of lists of 3 integers
        3 indices to vertices that form a triangle on the mesh
    spectrum_size : integer
        number of eigenvalues to be computed (the length of the spectrum)
    exclude_labels : list of integers
        background values to exclude
    normalization : string
        the method used to normalize eigenvalues
        if None, no normalization is used
        if "area", use area of the 2D structure as in Reuter et al. 2006
        if "index", divide eigenvalue by index to account for linear trend
        if "areaindex", do both (default)
    areas : numpy array or list of floats (or None)
        surface area scalar values for all vertices
    verbose : bool
        print statements?
    Returns
    -------
    spectrum : list
        first spectrum_size eigenvalues for Laplace-Beltrami spectrum
    Examples
    --------
    >>> # Spectrum for left postcentral + pars triangularis pial surfaces:
    >>> import numpy as np
    >>> from mindboggle.mio.vtks import read_scalars, read_vtk, write_vtk
    >>> from mindboggle.guts.mesh import keep_faces, reindex_faces_points
    >>> from mindboggle.shapes.laplace_beltrami import spectrum_of_largest
    >>> from mindboggle.mio.fetch_data import prep_tests
    >>> urls, fetch_data = prep_tests()
    >>> label_file = fetch_data(urls['left_freesurfer_labels'], '', '.vtk')
    >>> area_file = fetch_data(urls['left_area'], '', '.vtk')
    >>> spectrum_size = 6
    >>> exclude_labels = [-1]
    >>> normalization = None
    >>> points, indices, lines, faces, labels, f1, npoints, f2 = read_vtk(label_file,
    ...     return_first=True, return_array=True)
    >>> I20 = [i for i,x in enumerate(labels) if x==1020] # pars triangularis
    >>> I22 = [i for i,x in enumerate(labels) if x==1022] # postcentral
    >>> I22.extend(I20)
    >>> faces = keep_faces(faces, I22)
    >>> faces, points, o1 = reindex_faces_points(faces, points)
    >>> areas, u1 = read_scalars(area_file, True, True)
    >>> verbose = False
    >>> spectrum = spectrum_of_largest(points, faces, spectrum_size,
    ...     exclude_labels, normalization, areas, verbose)
    >>> [np.float("{0:.{1}f}".format(x, 5)) for x in spectrum[1::]]
    [0.00057, 0.00189, 0.00432, 0.00691, 0.00775]
    View both segments (skip test):
    >>> from mindboggle.mio.plots import plot_surfaces
    >>> scalars = np.zeros(np.shape(labels))
    >>> scalars[I22] = 1
    >>> vtk_file = 'test_two_labels.vtk'
    >>> write_vtk(vtk_file, points, indices, lines, faces,
    ...           scalars, scalar_names='scalars', scalar_type='int')
    >>> plot_surfaces(vtk_file) # doctest: +SKIP
    """
    import numpy as np
    #from scipy.sparse.linalg import eigsh, lobpcg

    from mindboggle.guts.segment import select_largest
    from mindboggle.shapes.laplace_beltrami import fem_laplacian

    if isinstance(areas, list):
        areas = np.array(areas)

    # Check to see if there are enough points:
    min_points_faces = spectrum_size
    npoints = len(points)
    if npoints < min_points_faces or len(faces) < min_points_faces:
        raise IOError("The input size {0} ({1} faces) should be much larger "
                      "than spectrum_size ({2})".format(
                          npoints, len(faces), spectrum_size))
        return None
    else:

        # --------------------------------------------------------------------
        # Select the largest segment (connected set of indices):
        # --------------------------------------------------------------------
        points, faces = select_largest(points,
                                       faces,
                                       exclude_labels,
                                       areas,
                                       reindex=True)

        # Alert if the number of indices is small:
        if len(points) < min_points_faces:
            raise IOError("The input size {0} is too small.".format(
                len(points)))
            return None
        elif faces:

            # ----------------------------------------------------------------
            # Compute spectrum:
            # ----------------------------------------------------------------
            spectrum = fem_laplacian(points, faces, spectrum_size,
                                     normalization, verbose)
            return spectrum
        else:
            return None
コード例 #2
0
def spectrum_per_label(vtk_file,
                       spectrum_size=10,
                       exclude_labels=[-1],
                       normalization='areaindex',
                       area_file='',
                       largest_segment=True,
                       verbose=False):
    """
    Compute Laplace-Beltrami spectrum per labeled region in a file.
    Parameters
    ----------
    vtk_file : string
        name of VTK surface mesh file containing index scalars (labels)
    spectrum_size : integer
        number of eigenvalues to be computed (the length of the spectrum)
    exclude_labels : list of integers
        labels to be excluded
    normalization : string
        the method used to normalize eigenvalues
        if None, no normalization is used
        if "area", use area of the 2D structure as in Reuter et al. 2006
        if "index", divide eigenvalue by index to account for linear trend
        if "areaindex", do both (default)
    area_file :  string (optional)
        name of VTK file with surface area scalar values
    largest_segment :  bool
        compute spectrum only for largest segment with a given label?
    verbose : bool
        print statements?
    Returns
    -------
    spectrum_lists : list of lists
        first eigenvalues for each label's Laplace-Beltrami spectrum
    label_list : list of integers
        list of unique labels for which spectra are obtained
    Examples
    --------
    >>> # Uncomment "if label==22:" below to run example:
    >>> # Spectrum for Twins-2-1 left postcentral (22) pial surface:
    >>> import numpy as np
    >>> from mindboggle.shapes.laplace_beltrami import spectrum_per_label
    >>> from mindboggle.mio.fetch_data import prep_tests
    >>> urls, fetch_data = prep_tests()
    >>> vtk_file = fetch_data(urls['left_freesurfer_labels'], '', '.vtk')
    >>> area_file = fetch_data(urls['left_area'], '', '.vtk')
    >>> spectrum_size = 6
    >>> exclude_labels = [0]  #[-1]
    >>> largest_segment = True
    >>> verbose = False
    >>> spectrum_lists, label_list = spectrum_per_label(vtk_file,
    ...     spectrum_size, exclude_labels, None, area_file, largest_segment,
    ...     verbose)
    >>> [np.float("{0:.{1}f}".format(x, 5)) for x in spectrum_lists[0]]
    [0.0, 0.00054, 0.00244, 0.00291, 0.00456, 0.00575]
    >>> label_list[0:10]
    [1029, 1005, 1011, 1021, 1008, 1025, 999, 1013, 1007, 1022]
    """
    from mindboggle.mio.vtks import read_vtk, read_scalars
    from mindboggle.guts.mesh import keep_faces, reindex_faces_points
    from mindboggle.shapes.laplace_beltrami import fem_laplacian,\
        spectrum_of_largest

    # Read VTK surface mesh file:
    points, indices, lines, faces, labels, scalar_names, npoints, \
        input_vtk = read_vtk(vtk_file)

    # Area file:
    if area_file:
        areas, u1 = read_scalars(area_file)
    else:
        areas = None

    # Loop through labeled regions:
    ulabels = []
    [
        ulabels.append(int(x)) for x in labels if x not in ulabels
        if x not in exclude_labels
    ]
    label_list = []
    spectrum_lists = []
    for label in ulabels:
        #if label == 22:
        #  print("DEBUG: COMPUTE FOR ONLY ONE LABEL")

        # Determine the indices per label:
        Ilabel = [i for i, x in enumerate(labels) if x == label]
        if verbose:
            print('{0} vertices for label {1}'.format(len(Ilabel), label))

        # Remove background faces:
        pick_faces = keep_faces(faces, Ilabel)
        pick_faces, pick_points, o1 = reindex_faces_points(pick_faces, points)

        # Compute Laplace-Beltrami spectrum for the label:
        if largest_segment:
            exclude_labels_inner = [-1]
            spectrum = spectrum_of_largest(pick_points, pick_faces,
                                           spectrum_size, exclude_labels_inner,
                                           normalization, areas, verbose)
        else:
            spectrum = fem_laplacian(pick_points, pick_faces, spectrum_size,
                                     normalization, verbose)

        # Append to a list of lists of spectra:
        spectrum_lists.append(spectrum)
        label_list.append(label)

    return spectrum_lists, label_list
コード例 #3
0
def spectrum_of_largest(points, faces, spectrum_size=10, exclude_labels=[-1],
                        normalization=None, areas=None):
    """
    Compute Laplace-Beltrami spectrum on largest connected segment.

    In case a surface patch is fragmented, we select the largest fragment,
    remove extraneous triangular faces, and reindex indices.

    Parameters
    ----------
    points : list of lists of 3 floats
        x,y,z coordinates for each vertex of the structure
    faces : list of lists of 3 integers
        3 indices to vertices that form a triangle on the mesh
    spectrum_size : integer
        number of eigenvalues to be computed (the length of the spectrum)
    exclude_labels : list of integers
        background values to exclude
    normalization : string
        the method used to normalize eigenvalues ('area' or None)
        if "area", use area of the 2D structure as in Reuter et al. 2006
    areas : numpy array or list of floats (or None)
        surface area scalar values for all vertices

    Returns
    -------
    spectrum : list
        first spectrum_size eigenvalues for Laplace-Beltrami spectrum

    Examples
    --------
    >>> # Spectrum for left postcentral + pars triangularis pial surfaces:
    >>> import os
    >>> import numpy as np
    >>> from mindboggle.mio.vtks import read_scalars, read_vtk, write_vtk
    >>> from mindboggle.guts.mesh import remove_faces, reindex_faces_points
    >>> from mindboggle.shapes.laplace_beltrami import spectrum_of_largest
    >>> path = os.environ['MINDBOGGLE_DATA']
    >>> label_file = os.path.join(path, 'arno', 'labels', 'lh.labels.DKT31.manual.vtk')
    >>> area_file = os.path.join(path, 'arno', 'shapes', 'lh.pial.area.vtk')
    >>> spectrum_size = 6
    >>> exclude_labels = [-1]
    >>> normalization = None
    >>> faces, lines, indices, points, u1, labels, u2,u3 = read_vtk(label_file,
    >>>      return_first=True, return_array=True)
    >>> I20 = [i for i,x in enumerate(labels) if x==20] # pars triangularis
    >>> I22 = [i for i,x in enumerate(labels) if x==22] # postcentral
    >>> I22.extend(I20)
    >>> faces = remove_faces(faces, I22)
    >>> faces, points, o1 = reindex_faces_points(faces, points)
    >>> areas, u1 = read_scalars(area_file, True, True)
    >>> #
    >>> spectrum_of_largest(points, faces, spectrum_size, exclude_labels,
    >>>                     normalization, areas)
    [6.3469513010430304e-18,
     0.0005178862383467463,
     0.0017434911095630772,
     0.003667561767487686,
     0.005429017880363784,
     0.006309346984678924]
    >>> # View both segments:
    >>> from mindboggle.mio.plots import plot_surfaces
    >>> scalars = np.zeros(np.shape(labels))
    >>> scalars[I22] = 1
    >>> vtk_file = 'test_two_labels.vtk'
    >>> write_vtk(vtk_file, points, indices, lines, faces,
    >>>           scalars, scalar_names='scalars', scalar_type='int')
    >>> plot_surfaces(vtk_file)

    """
    from scipy.sparse.linalg import eigsh, lobpcg
    import numpy as np

    from mindboggle.guts.segment import select_largest
    from mindboggle.shapes.laplace_beltrami import fem_laplacian

    if isinstance(areas, list):
        areas = np.array(areas)

    # Check to see if there are enough points:
    min_points_faces = spectrum_size
    npoints = len(points) 
    if npoints < min_points_faces or len(faces) < min_points_faces:
        print("The input size {0} ({1} faces) should be much larger "
              "than spectrum_size ({2})".
              format(npoints, len(faces), spectrum_size))
        return None
    else:

        #---------------------------------------------------------------------
        # Select the largest segment (connected set of indices):
        #---------------------------------------------------------------------
        points, faces = select_largest(points, faces, exclude_labels, areas,
                                       reindex=True)

        # Alert if the number of indices is small:
        if len(points) < min_points_faces:
            print("The input size {0} is too small.".format(len(points)))
            return None
        elif faces:

            #-----------------------------------------------------------------
            # Compute spectrum:
            #-----------------------------------------------------------------
            spectrum = fem_laplacian(points, faces, spectrum_size,
                                     normalization)
            return spectrum
        else:
            return None
コード例 #4
0
def spectrum_per_label(vtk_file, spectrum_size=10, exclude_labels=[-1],
                       normalization='area', area_file='',
                       largest_segment=True):
    """
    Compute Laplace-Beltrami spectrum per labeled region in a file.

    Parameters
    ----------
    vtk_file : string
        name of VTK surface mesh file containing index scalars (labels)
    spectrum_size : integer
        number of eigenvalues to be computed (the length of the spectrum)
    exclude_labels : list of integers
        labels to be excluded
    normalization : string
        the method used to normalize eigenvalues ('area' or None)
        if "area", use area of the 2D structure as in Reuter et al. 2006
    area_file :  string (optional)
        name of VTK file with surface area scalar values
    largest_segment :  Boolean
        compute spectrum only for largest segment with a given label?

    Returns
    -------
    spectrum_lists : list of lists
        first eigenvalues for each label's Laplace-Beltrami spectrum
    label_list : list of integers
        list of unique labels for which spectra are obtained

    Examples
    --------
    >>> # Uncomment "if label==22:" below to run example:
    >>> # Spectrum for Twins-2-1 left postcentral (22) pial surface:
    >>> import os
    >>> from mindboggle.shapes.laplace_beltrami import spectrum_per_label
    >>> path = os.environ['MINDBOGGLE_DATA']
    >>> vtk_file = os.path.join(path, 'arno', 'labels', 'lh.labels.DKT31.manual.vtk')
    >>> area_file = os.path.join(path, 'arno', 'shapes', 'lh.pial.area.vtk')
    >>> spectrum_size = 6
    >>> exclude_labels = [0]  #[-1]
    >>> largest_segment = True
    >>> spectrum_per_label(vtk_file, spectrum_size, exclude_labels, None,
    >>>                    area_file, largest_segment)
    ([[6.3469513010430304e-18,
       0.0005178862383467463,
       0.0017434911095630772,
       0.003667561767487686,
       0.005429017880363784,
       0.006309346984678924]],
     [22])

    """
    from mindboggle.mio.vtks import read_vtk, read_scalars
    from mindboggle.guts.mesh import remove_faces, reindex_faces_points
    from mindboggle.shapes.laplace_beltrami import fem_laplacian,\
        spectrum_of_largest

    # Read VTK surface mesh file:
    faces, u1, u2, points, u4, labels, u5, u6 = read_vtk(vtk_file)

    # Area file:
    if area_file:
        areas, u1 = read_scalars(area_file)
    else:
        areas = None

    # Loop through labeled regions:
    ulabels = []
    [ulabels.append(int(x)) for x in labels if x not in ulabels
     if x not in exclude_labels]
    label_list = []
    spectrum_lists = []
    for label in ulabels:
      #if label == 22:
      #  print("DEBUG: COMPUTE FOR ONLY ONE LABEL")

        # Determine the indices per label:
        Ilabel = [i for i,x in enumerate(labels) if x == label]
        print('{0} vertices for label {1}'.format(len(Ilabel), label))

        # Remove background faces:
        pick_faces = remove_faces(faces, Ilabel)
        pick_faces, pick_points, o1 = reindex_faces_points(pick_faces, points)

        # Compute Laplace-Beltrami spectrum for the label:
        if largest_segment:
            exclude_labels_inner = [-1]
            spectrum = spectrum_of_largest(pick_points, pick_faces,
                                           spectrum_size,
                                           exclude_labels_inner,
                                           normalization, areas)
        else:
            spectrum = fem_laplacian(pick_points, pick_faces,
                                     spectrum_size, normalization)

        # Append to a list of lists of spectra:
        spectrum_lists.append(spectrum)
        label_list.append(label)

    return spectrum_lists, label_list
コード例 #5
0
ファイル: laplace_beltrami.py プロジェクト: liob/mindboggle
def spectrum_per_label(vtk_file, spectrum_size=10, exclude_labels=[-1],
                       normalization='area', area_file='',
                       largest_segment=True, verbose=False):
    """
    Compute Laplace-Beltrami spectrum per labeled region in a file.

    Parameters
    ----------
    vtk_file : string
        name of VTK surface mesh file containing index scalars (labels)
    spectrum_size : integer
        number of eigenvalues to be computed (the length of the spectrum)
    exclude_labels : list of integers
        labels to be excluded
    normalization : string
        the method used to normalize eigenvalues ('area' or None)
        if "area", use area of the 2D structure as in Reuter et al. 2006
    area_file :  string (optional)
        name of VTK file with surface area scalar values
    largest_segment :  bool
        compute spectrum only for largest segment with a given label?
    verbose : bool
        print statements?

    Returns
    -------
    spectrum_lists : list of lists
        first eigenvalues for each label's Laplace-Beltrami spectrum
    label_list : list of integers
        list of unique labels for which spectra are obtained

    Examples
    --------
    >>> # Uncomment "if label==22:" below to run example:
    >>> # Spectrum for Twins-2-1 left postcentral (22) pial surface:
    >>> import numpy as np
    >>> from mindboggle.shapes.laplace_beltrami import spectrum_per_label
    >>> from mindboggle.mio.fetch_data import prep_tests
    >>> urls, fetch_data = prep_tests()
    >>> vtk_file = fetch_data(urls['left_freesurfer_labels'], '', '.vtk')
    >>> area_file = fetch_data(urls['left_area'], '', '.vtk')
    >>> spectrum_size = 6
    >>> exclude_labels = [0]  #[-1]
    >>> largest_segment = True
    >>> verbose = False
    >>> spectrum_lists, label_list = spectrum_per_label(vtk_file,
    ...     spectrum_size, exclude_labels, None, area_file, largest_segment,
    ...     verbose)
    >>> print(np.array_str(np.array(spectrum_lists[0][1::]),
    ...                    precision=5, suppress_small=True))
    [ 0.00054  0.00244  0.00291  0.00456  0.00575]
    >>> label_list[0:10]
    [1029, 1005, 1011, 1021, 1008, 1025, 999, 1013, 1007, 1022]

    """
    from mindboggle.mio.vtks import read_vtk, read_scalars
    from mindboggle.guts.mesh import keep_faces, reindex_faces_points
    from mindboggle.shapes.laplace_beltrami import fem_laplacian,\
        spectrum_of_largest

    # Read VTK surface mesh file:
    points, indices, lines, faces, labels, scalar_names, npoints, \
        input_vtk = read_vtk(vtk_file)

    # Area file:
    if area_file:
        areas, u1 = read_scalars(area_file)
    else:
        areas = None

    # Loop through labeled regions:
    ulabels = []
    [ulabels.append(int(x)) for x in labels if x not in ulabels
     if x not in exclude_labels]
    label_list = []
    spectrum_lists = []
    for label in ulabels:
      #if label == 22:
      #  print("DEBUG: COMPUTE FOR ONLY ONE LABEL")

        # Determine the indices per label:
        Ilabel = [i for i,x in enumerate(labels) if x == label]
        if verbose:
          print('{0} vertices for label {1}'.format(len(Ilabel), label))

        # Remove background faces:
        pick_faces = keep_faces(faces, Ilabel)
        pick_faces, pick_points, o1 = reindex_faces_points(pick_faces, points)

        # Compute Laplace-Beltrami spectrum for the label:
        if largest_segment:
            exclude_labels_inner = [-1]
            spectrum = spectrum_of_largest(pick_points, pick_faces,
                                           spectrum_size,
                                           exclude_labels_inner,
                                           normalization, areas, verbose)
        else:
            spectrum = fem_laplacian(pick_points, pick_faces, spectrum_size,
                                     normalization, verbose)

        # Append to a list of lists of spectra:
        spectrum_lists.append(spectrum)
        label_list.append(label)

    return spectrum_lists, label_list
コード例 #6
0
ファイル: laplace_beltrami.py プロジェクト: liob/mindboggle
def spectrum_of_largest(points, faces, spectrum_size=10, exclude_labels=[-1],
                        normalization=None, areas=None, verbose=False):
    """
    Compute Laplace-Beltrami spectrum on largest connected segment.

    In case a surface patch is fragmented, we select the largest fragment,
    remove extraneous triangular faces, and reindex indices.

    Parameters
    ----------
    points : list of lists of 3 floats
        x,y,z coordinates for each vertex of the structure
    faces : list of lists of 3 integers
        3 indices to vertices that form a triangle on the mesh
    spectrum_size : integer
        number of eigenvalues to be computed (the length of the spectrum)
    exclude_labels : list of integers
        background values to exclude
    normalization : string
        the method used to normalize eigenvalues ('area' or None)
        if "area", use area of the 2D structure as in Reuter et al. 2006
    areas : numpy array or list of floats (or None)
        surface area scalar values for all vertices
    verbose : bool
        print statements?

    Returns
    -------
    spectrum : list
        first spectrum_size eigenvalues for Laplace-Beltrami spectrum

    Examples
    --------
    >>> # Spectrum for left postcentral + pars triangularis pial surfaces:
    >>> import numpy as np
    >>> from mindboggle.mio.vtks import read_scalars, read_vtk, write_vtk
    >>> from mindboggle.guts.mesh import keep_faces, reindex_faces_points
    >>> from mindboggle.shapes.laplace_beltrami import spectrum_of_largest
    >>> from mindboggle.mio.fetch_data import prep_tests
    >>> urls, fetch_data = prep_tests()
    >>> label_file = fetch_data(urls['left_freesurfer_labels'], '', '.vtk')
    >>> area_file = fetch_data(urls['left_area'], '', '.vtk')
    >>> spectrum_size = 6
    >>> exclude_labels = [-1]
    >>> normalization = None
    >>> points, indices, lines, faces, labels, f1, npoints, f2 = read_vtk(label_file,
    ...     return_first=True, return_array=True)
    >>> I20 = [i for i,x in enumerate(labels) if x==1020] # pars triangularis
    >>> I22 = [i for i,x in enumerate(labels) if x==1022] # postcentral
    >>> I22.extend(I20)
    >>> faces = keep_faces(faces, I22)
    >>> faces, points, o1 = reindex_faces_points(faces, points)
    >>> areas, u1 = read_scalars(area_file, True, True)
    >>> verbose = False
    >>> spectrum = spectrum_of_largest(points, faces, spectrum_size,
    ...     exclude_labels, normalization, areas, verbose)
    >>> print(np.array_str(np.array(spectrum[1::]),
    ...                    precision=5, suppress_small=True))
    [ 0.00057  0.00189  0.00432  0.00691  0.00775]

    View both segments (skip test):

    >>> from mindboggle.mio.plots import plot_surfaces
    >>> scalars = np.zeros(np.shape(labels))
    >>> scalars[I22] = 1
    >>> vtk_file = 'test_two_labels.vtk'
    >>> write_vtk(vtk_file, points, indices, lines, faces,
    ...           scalars, scalar_names='scalars', scalar_type='int')
    >>> plot_surfaces(vtk_file) # doctest: +SKIP

    """
    import numpy as np
    #from scipy.sparse.linalg import eigsh, lobpcg

    from mindboggle.guts.segment import select_largest
    from mindboggle.shapes.laplace_beltrami import fem_laplacian

    if isinstance(areas, list):
        areas = np.array(areas)

    # Check to see if there are enough points:
    min_points_faces = spectrum_size
    npoints = len(points) 
    if npoints < min_points_faces or len(faces) < min_points_faces:
        raise IOError("The input size {0} ({1} faces) should be much larger "
                      "than spectrum_size ({2})".
                      format(npoints, len(faces), spectrum_size))
        return None
    else:

        # --------------------------------------------------------------------
        # Select the largest segment (connected set of indices):
        # --------------------------------------------------------------------
        points, faces = select_largest(points, faces, exclude_labels, areas,
                                       reindex=True)

        # Alert if the number of indices is small:
        if len(points) < min_points_faces:
            raise IOError("The input size {0} is too small.".
                          format(len(points)))
            return None
        elif faces:

            # ----------------------------------------------------------------
            # Compute spectrum:
            # ----------------------------------------------------------------
            spectrum = fem_laplacian(points, faces, spectrum_size,
                                     normalization, verbose)
            return spectrum
        else:
            return None
コード例 #7
0
ファイル: laplace_beltrami.py プロジェクト: jsalva/mindboggle
def spectrum_per_label(vtk_file, spectrum_size=10, exclude_labels=[-1],
                       normalization='area', area_file='',
                       largest_segment=True):
    """
    Compute Laplace-Beltrami spectrum per labeled region in a file.

    Parameters
    ----------
    vtk_file : string
        name of VTK surface mesh file containing index scalars (labels)
    spectrum_size : integer
        number of eigenvalues to be computed (the length of the spectrum)
    exclude_labels : list of integers
        labels to be excluded
    normalization : string
        the method used to normalize eigenvalues ('area' or None)
        if "area", use area of the 2D structure as in Reuter et al. 2006
    area_file :  string
        name of VTK file with surface area scalar values
    largest_segment :  Boolean
        compute spectrum only for largest segment with a given label?

    Returns
    -------
    spectrum_lists : list of lists
        first eigenvalues for each label's Laplace-Beltrami spectrum
    label_list : list of integers
        list of unique labels for which spectra are obtained

    Examples
    --------
    >>> # Uncomment "if label==22:" below to run example:
    >>> # Spectrum for Twins-2-1 left postcentral (22) pial surface:
    >>> import os
    >>> from mindboggle.shapes.laplace_beltrami import spectrum_per_label
    >>> path = os.environ['MINDBOGGLE_DATA']
    >>> vtk_file = os.path.join(path, 'arno', 'labels', 'lh.labels.DKT31.manual.vtk')
    >>> area_file = os.path.join(path, 'arno', 'shapes', 'lh.pial.area.vtk')
    >>> spectrum_size = 6
    >>> exclude_labels = [0]  #[-1]
    >>> largest_segment = True
    >>> spectrum_per_label(vtk_file, spectrum_size, exclude_labels, None,
    >>>                    area_file, largest_segment)
    ([[6.3469513010430304e-18,
       0.0005178862383467463,
       0.0017434911095630772,
       0.003667561767487686,
       0.005429017880363784,
       0.006309346984678924]],
     [22])

    """
    from mindboggle.utils.io_vtk import read_vtk, read_scalars
    from mindboggle.utils.mesh import remove_faces, reindex_faces_points
    from mindboggle.shapes.laplace_beltrami import fem_laplacian,\
        spectrum_of_largest

    # Read VTK surface mesh file:
    faces, u1, u2, points, u4, labels, u5, u6 = read_vtk(vtk_file)

    # Area file:
    if area_file:
        areas, u1 = read_scalars(area_file)
    else:
        areas = None

    # Loop through labeled regions:
    ulabels = []
    [ulabels.append(int(x)) for x in labels if x not in ulabels
     if x not in exclude_labels]
    label_list = []
    spectrum_lists = []
    for label in ulabels:
      #if label == 22:
      #  print("DEBUG: COMPUTE FOR ONLY ONE LABEL")

        # Determine the indices per label:
        Ilabel = [i for i,x in enumerate(labels) if x == label]
        print('{0} vertices for label {1}'.format(len(Ilabel), label))

        # Remove background faces:
        pick_faces = remove_faces(faces, Ilabel)
        pick_faces, pick_points, o1 = reindex_faces_points(pick_faces, points)

        # Compute Laplace-Beltrami spectrum for the label:
        if largest_segment:
            exclude_labels_inner = [-1]
            spectrum = spectrum_of_largest(pick_points, pick_faces,
                                           spectrum_size,
                                           exclude_labels_inner,
                                           normalization, areas)
        else:
            spectrum = fem_laplacian(pick_points, pick_faces,
                                     spectrum_size, normalization)

        # Append to a list of lists of spectra:
        spectrum_lists.append(spectrum)
        label_list.append(label)

    return spectrum_lists, label_list
コード例 #8
0
ファイル: laplace_beltrami.py プロジェクト: jsalva/mindboggle
def spectrum_of_largest(points, faces, spectrum_size=10, exclude_labels=[-1],
                        normalization=None, areas=None):
    """
    Compute Laplace-Beltrami spectrum on largest connected segment.

    In case a surface patch is fragmented, we select the largest fragment,
    remove extraneous triangular faces, and reindex indices.

    Parameters
    ----------
    points : list of lists of 3 floats
        x,y,z coordinates for each vertex of the structure
    faces : list of lists of 3 integers
        3 indices to vertices that form a triangle on the mesh
    spectrum_size : integer
        number of eigenvalues to be computed (the length of the spectrum)
    exclude_labels : list of integers
        background values to exclude
    normalization : string
        the method used to normalize eigenvalues ('area' or None)
        if "area", use area of the 2D structure as in Reuter et al. 2006
    areas : numpy array or list of floats (or None)
        surface area scalar values for all vertices

    Returns
    -------
    spectrum : list
        first spectrum_size eigenvalues for Laplace-Beltrami spectrum

    Examples
    --------
    >>> # Spectrum for left postcentral + pars triangularis pial surfaces:
    >>> import os
    >>> import numpy as np
    >>> from mindboggle.utils.io_vtk import read_scalars, read_vtk, write_vtk
    >>> from mindboggle.utils.mesh import remove_faces, reindex_faces_points
    >>> from mindboggle.shapes.laplace_beltrami import spectrum_of_largest
    >>> path = os.environ['MINDBOGGLE_DATA']
    >>> label_file = os.path.join(path, 'arno', 'labels', 'lh.labels.DKT31.manual.vtk')
    >>> area_file = os.path.join(path, 'arno', 'shapes', 'lh.pial.area.vtk')
    >>> spectrum_size = 6
    >>> exclude_labels = [-1]
    >>> normalization = None
    >>> faces, lines, indices, points, u1, labels, u2,u3 = read_vtk(label_file,
    >>>      return_first=True, return_array=True)
    >>> I20 = [i for i,x in enumerate(labels) if x==20] # pars triangularis
    >>> I22 = [i for i,x in enumerate(labels) if x==22] # postcentral
    >>> I22.extend(I20)
    >>> faces = remove_faces(faces, I22)
    >>> faces, points, o1 = reindex_faces_points(faces, points)
    >>> areas, u1 = read_scalars(area_file, True, True)
    >>> #
    >>> spectrum_of_largest(points, faces, spectrum_size, exclude_labels,
    >>>                     normalization, areas)
    [6.3469513010430304e-18,
     0.0005178862383467463,
     0.0017434911095630772,
     0.003667561767487686,
     0.005429017880363784,
     0.006309346984678924]
    >>> # View both segments:
    >>> from mindboggle.utils.plots import plot_surfaces
    >>> scalars = np.zeros(np.shape(labels))
    >>> scalars[I22] = 1
    >>> vtk_file = 'test_two_labels.vtk'
    >>> write_vtk(vtk_file, points, indices, lines, faces,
    >>>           scalars, scalar_names='scalars', scalar_type='int')
    >>> plot_surfaces(vtk_file)

    """
    from scipy.sparse.linalg import eigsh, lobpcg
    import numpy as np

    from mindboggle.utils.segment import select_largest
    from mindboggle.shapes.laplace_beltrami import fem_laplacian

    if isinstance(areas, list):
        areas = np.array(areas)

    # Check to see if there are enough points:
    min_points_faces = spectrum_size
    npoints = len(points) 
    if npoints < min_points_faces or len(faces) < min_points_faces:
        print("The input size {0} ({1} faces) should be much larger "
              "than spectrum_size ({2})".
              format(npoints, len(faces), spectrum_size))
        return None
    else:

        #---------------------------------------------------------------------
        # Select the largest segment (connected set of indices):
        #---------------------------------------------------------------------
        points, faces = select_largest(points, faces, exclude_labels, areas,
                                       reindex=True)

        # Alert if the number of indices is small:
        if len(points) < min_points_faces:
            print("The input size {0} is too small.".format(len(points)))
            return None
        elif faces:

            #-----------------------------------------------------------------
            # Compute spectrum:
            #-----------------------------------------------------------------
            spectrum = fem_laplacian(points, faces, spectrum_size,
                                     normalization)
            return spectrum
        else:
            return None
コード例 #9
0
def spectrum_of_largest(points, faces, n_eigenvalues=6, exclude_labels=[-1],
                        normalization=None, areas=None):
    """
    Compute Laplace-Beltrami spectrum on largest connected segment.

    In case a surface patch is fragmented, we select the largest fragment,
    remove extraneous triangular faces, and reindex indices.

    Parameters
    ----------
    points : list of lists of 3 floats
        x,y,z coordinates for each vertex of the structure
    faces : list of lists of 3 integers
        3 indices to vertices that form a triangle on the mesh
    n_eigenvalues : integer
        number of eigenvalues to be computed (the length of the spectrum)
    exclude_labels : list of integers
        background values to exclude
    normalization : string
        the method used to normalize eigenvalues ('area' or None)
        if "area", use area of the 2D structure as in Reuter et al. 2006
    areas : numpy array or list of floats (or None)
        surface area scalar values for all vertices

    Returns
    -------
    spectrum : list
        first n_eigenvalues eigenvalues for Laplace-Beltrami spectrum

    Examples
    --------
    >>> # Spectrum for one label (artificial composite), two fragments:
    >>> import os
    >>> import numpy as np
    >>> from mindboggle.utils.io_vtk import read_scalars, read_vtk, write_vtk
    >>> from mindboggle.utils.mesh import remove_faces
    >>> from mindboggle.shapes.laplace_beltrami import spectrum_of_largest
    >>> path = os.environ['MINDBOGGLE_DATA']
    >>> label_file = os.path.join(path, 'arno', 'labels', 'lh.labels.DKT25.manual.vtk')
    >>> area_file = os.path.join(path, 'arno', 'shapes', 'lh.pial.area.vtk')
    >>> n_eigenvalues = 6
    >>> exclude_labels = [0]  #[-1]
    >>> normalization = None
    >>> faces, lines, indices, points, foo1, labels, foo2, foo3 = read_vtk(label_file,
    >>>      return_first=True, return_array=True)
    >>> I2 = [i for i,x in enumerate(labels) if x==2] # cingulate
    >>> I22 = [i for i,x in enumerate(labels) if x==22] # postcentral
    >>> I2.extend(I22)
    >>> faces = remove_faces(faces, I2)
    >>> areas, u1 = read_scalars(area_file, True, True)
    >>> #
    >>> spectrum_of_largest(points, faces, n_eigenvalues, exclude_labels,
    >>>                     normalization, areas)
    >>> #
    >>> # View:
    >>> from mindboggle.utils.plots import plot_vtk
    >>> scalars = np.zeros(np.shape(labels))
    >>> scalars[I2] = 1
    >>> vtk_file = 'test_two_labels.vtk'
    >>> write_vtk(vtk_file, points, indices, lines, faces,
    >>>           scalars, scalar_names='scalars')
    >>> plot_vtk(vtk_file)
        Load "Labels" scalars from lh.labels.DKT25.manual.vtk
        Reduced 290134 to 29728 triangular faces
        Load "scalars" scalars from lh.pial.area.vtk
        2 segments
        Reduced 29728 to 14498 triangular faces
        Compute linear FEM Laplace-Beltrami spectrum
        [-8.764053090852845e-18,
         0.00028121452203987146,
         0.0010941205613292243,
         0.0017301461686759188,
         0.0034244633555606295,
         0.004280982704174599]

    """
    from scipy.sparse.linalg import eigsh, lobpcg
    import numpy as np

    from mindboggle.utils.mesh import find_neighbors, remove_faces, \
        reindex_faces_points
    from mindboggle.utils.segment import segment
    from mindboggle.shapes.laplace_beltrami import fem_laplacian

    # Areas:
    use_area = False
    if isinstance(areas, np.ndarray) and np.shape(areas):
        use_area = True
    elif isinstance(areas, list) and len(areas):
        areas = np.array(areas)
        use_area = True

    # Check to see if there are enough points:
    min_npoints = n_eigenvalues
    npoints = len(points) 
    if npoints < min_npoints or len(faces) < min_npoints:
        print("The input size {0} ({1} faces) should be much larger "
              "than n_eigenvalues {2}".
              format(npoints, len(faces), n_eigenvalues))
        return None
    else:

        #---------------------------------------------------------------------
        # Segment the indices into connected sets of indices:
        #---------------------------------------------------------------------
        # Construct neighbor lists:
        neighbor_lists = find_neighbors(faces, npoints)

        # Determine the indices:
        indices = [x for sublst in faces for x in sublst]

        # Segment:
        segments = segment(indices, neighbor_lists, min_region_size=1,
            seed_lists=[], keep_seeding=False, spread_within_labels=False,
            labels=[], label_lists=[], values=[], max_steps='', verbose=False)

        #---------------------------------------------------------------------
        # Select the largest segment (connected set of indices):
        #---------------------------------------------------------------------
        unique_segments = [x for x in np.unique(segments)
                           if x not in exclude_labels]
        if len(unique_segments) > 1:
            select_indices = []
            max_segment_area = 0
            for segment_number in unique_segments:
                segment_indices = [i for i,x in enumerate(segments)
                                   if x == segment_number]
                if use_area:
                    segment_area = np.sum(areas[segment_indices])
                else:
                    segment_area = len(segment_indices)
                if segment_area > max_segment_area:
                    select_indices = segment_indices
                    max_segment_area = len(select_indices)
            print('Maximum size of {0} segments: {1} vertices'.
                  format(len(unique_segments), len(select_indices)))

            #-----------------------------------------------------------------
            # Extract points and renumber faces for the selected indices:
            #-----------------------------------------------------------------
            faces = remove_faces(faces, select_indices)
        else:
            select_indices = indices

        # Alert if the number of indices is small:
        if len(select_indices) < min_npoints:
            print("The input size {0} is too small.".format(len(select_indices)))
            return None
        elif faces:

            #-----------------------------------------------------------------
            # Reindex indices in faces:
            #-----------------------------------------------------------------
            faces, points = reindex_faces_points(faces, points)

            #-----------------------------------------------------------------
            # Compute spectrum:
            #-----------------------------------------------------------------
            spectrum = fem_laplacian(points, faces, n_eigenvalues,
                                     normalization)

            return spectrum

        else:
            return None