Ejemplo n.º 1
0
def zernike_moments_per_label(vtk_file, order=10, exclude_labels=[-1],
                              scale_input=True,
                              decimate_fraction=0, decimate_smooth=25):
    """
    Compute the Zernike moments per labeled region in a file.

    Optionally decimate the input mesh.

    Parameters
    ----------
    vtk_file : string
        name of VTK surface mesh file containing index scalars (labels)
    order : integer
        number of moments to compute
    exclude_labels : list of integers
        labels to be excluded
    scale_input : Boolean
        translate and scale each object so it is bounded by a unit sphere?
        (this is the expected input to zernike_moments())
    decimate_fraction : float
        fraction of mesh faces to remove for decimation (1 for no decimation)
    decimate_smooth : integer
        number of smoothing steps for decimation

    Returns
    -------
    descriptors_lists : list of lists of floats
        Zernike descriptors per label
    label_list : list of integers
        list of unique labels for which moments are computed

    Examples
    --------
    >>> # Uncomment "if label==22:" below to run example:
    >>> # Twins-2-1 left postcentral (22) pial surface:
    >>> import os
    >>> from mindboggle.shapes.zernike.zernike import zernike_moments_per_label
    >>> path = os.environ['MINDBOGGLE_DATA']
    >>> vtk_file = os.path.join(path, 'arno', 'labels', 'lh.labels.DKT25.manual.vtk')
    >>> order = 3
    >>> exclude_labels = [0]
    >>> scale_input = True
    >>> zernike_moments_per_label(vtk_file, order, exclude_labels, scale_input)
    ([[0.00528486237819844,
       0.009571754617699853,
       0.0033489494903015944,
       0.00875603468268444,
       0.0015879536633349918,
       0.0008080165707033097]],
     [22])


    ([[0.0018758013185778298,
       0.001757973693050823,
       0.002352403177686726,
       0.0032281044369938286,
       0.002215900343702539,
       0.0019646380916703856]],
     [14.0])
    Arthur Mikhno's result:
    1.0e+07 *
    0.0000
    0.0179
    0.0008
    4.2547
    0.0534
    4.4043



    """
    import numpy as np
    from mindboggle.utils.io_vtk import read_vtk
    from mindboggle.utils.mesh import remove_faces
    from mindboggle.shapes.zernike.zernike import zernike_moments

    min_points_faces = 4

    #-------------------------------------------------------------------------
    # Read VTK surface mesh file:
    #-------------------------------------------------------------------------
    faces, u1,u2, points, u3, labels, u4,u5 = read_vtk(vtk_file)

    #-------------------------------------------------------------------------
    # Loop through labeled regions:
    #-------------------------------------------------------------------------
    ulabels = [x for x in np.unique(labels) if x not in exclude_labels]
    label_list = []
    descriptors_lists = []
    for label in ulabels:
      #if label == 22:
      #  print("DEBUG: COMPUTE FOR ONLY ONE LABEL")
      if label == 14:

        #---------------------------------------------------------------------
        # Determine the indices per label:
        #---------------------------------------------------------------------
        Ilabel = [i for i,x in enumerate(labels) if x == label]
        print('  {0} vertices for label {1}'.format(len(Ilabel), label))
        if len(Ilabel) > min_points_faces:

            #-----------------------------------------------------------------
            # Remove background faces:
            #-----------------------------------------------------------------
            pick_faces = remove_faces(faces, Ilabel)
            if len(pick_faces) > min_points_faces:

                #-------------------------------------------------------------
                # Compute Zernike moments for the label:
                #-------------------------------------------------------------
                descriptors = zernike_moments(points, pick_faces,
                                              order, scale_input,
                                              decimate_fraction,
                                              decimate_smooth)

                #-------------------------------------------------------------
                # Append to a list of lists of spectra:
                #-------------------------------------------------------------
                descriptors_lists.append(descriptors)
                label_list.append(label)

    return descriptors_lists, label_list
Ejemplo n.º 2
0
def spectrum_per_label(vtk_file, spectrum_size=10, exclude_labels=[-1],
                       normalization='area', area_file='',
                       largest_segment=True):
    """
    Compute Laplace-Beltrami spectrum per labeled region in a file.

    Parameters
    ----------
    vtk_file : string
        name of VTK surface mesh file containing index scalars (labels)
    spectrum_size : integer
        number of eigenvalues to be computed (the length of the spectrum)
    exclude_labels : list of integers
        labels to be excluded
    normalization : string
        the method used to normalize eigenvalues ('area' or None)
        if "area", use area of the 2D structure as in Reuter et al. 2006
    area_file :  string
        name of VTK file with surface area scalar values
    largest_segment :  Boolean
        compute spectrum only for largest segment with a given label?

    Returns
    -------
    spectrum_lists : list of lists
        first eigenvalues for each label's Laplace-Beltrami spectrum
    label_list : list of integers
        list of unique labels for which spectra are obtained

    Examples
    --------
    >>> # Uncomment "if label==22:" below to run example:
    >>> # Spectrum for Twins-2-1 left postcentral (22) pial surface:
    >>> import os
    >>> from mindboggle.shapes.laplace_beltrami import spectrum_per_label
    >>> path = os.environ['MINDBOGGLE_DATA']
    >>> vtk_file = os.path.join(path, 'arno', 'labels', 'lh.labels.DKT31.manual.vtk')
    >>> area_file = os.path.join(path, 'arno', 'shapes', 'lh.pial.area.vtk')
    >>> spectrum_size = 6
    >>> exclude_labels = [0]  #[-1]
    >>> largest_segment = True
    >>> spectrum_per_label(vtk_file, spectrum_size, exclude_labels, None,
    >>>                    area_file, largest_segment)
    ([[6.3469513010430304e-18,
       0.0005178862383467463,
       0.0017434911095630772,
       0.003667561767487686,
       0.005429017880363784,
       0.006309346984678924]],
     [22])

    """
    from mindboggle.utils.io_vtk import read_vtk, read_scalars
    from mindboggle.utils.mesh import remove_faces, reindex_faces_points
    from mindboggle.shapes.laplace_beltrami import fem_laplacian,\
        spectrum_of_largest

    # Read VTK surface mesh file:
    faces, u1, u2, points, u4, labels, u5, u6 = read_vtk(vtk_file)

    # Area file:
    if area_file:
        areas, u1 = read_scalars(area_file)
    else:
        areas = None

    # Loop through labeled regions:
    ulabels = []
    [ulabels.append(int(x)) for x in labels if x not in ulabels
     if x not in exclude_labels]
    label_list = []
    spectrum_lists = []
    for label in ulabels:
      #if label == 22:
      #  print("DEBUG: COMPUTE FOR ONLY ONE LABEL")

        # Determine the indices per label:
        Ilabel = [i for i,x in enumerate(labels) if x == label]
        print('{0} vertices for label {1}'.format(len(Ilabel), label))

        # Remove background faces:
        pick_faces = remove_faces(faces, Ilabel)
        pick_faces, pick_points, o1 = reindex_faces_points(pick_faces, points)

        # Compute Laplace-Beltrami spectrum for the label:
        if largest_segment:
            exclude_labels_inner = [-1]
            spectrum = spectrum_of_largest(pick_points, pick_faces,
                                           spectrum_size,
                                           exclude_labels_inner,
                                           normalization, areas)
        else:
            spectrum = fem_laplacian(pick_points, pick_faces,
                                     spectrum_size, normalization)

        # Append to a list of lists of spectra:
        spectrum_lists.append(spectrum)
        label_list.append(label)

    return spectrum_lists, label_list
Ejemplo n.º 3
0
def close_surface_pair(faces, points1, points2, scalars, background_value=-1):
    """
    Close a surface patch by connecting its border vertices with
    corresponding vertices in a second surface file.

    Assumes no lines or indices when reading VTK files in.

    Note ::

        Scalar values different than background define the surface patch.
        The two sets of points have a 1-to-1 mapping; they are from
        two surfaces whose corresponding vertices are shifted in position.
        For pial vs. gray-white matter, the two surfaces are not parallel,
        so connecting the vertices leads to intersecting faces.

    Parameters
    ----------
    faces : list of lists of integers
        each sublist contains 3 indices of vertices that form a face
        on a surface mesh
    points1 : list of lists of floats
        each sublist contains 3-D coordinates of a vertex on a surface mesh
    points2 : list of lists of floats
        points from second surface with 1-to-1 correspondence with points1
    scalars : numpy array of integers
        labels used to find foreground vertices
    background_value : integer
        scalar value for background vertices

    Returns
    -------
    closed_faces : list of lists of integers
        indices of vertices that form a face on the closed surface mesh
    closed_points : list of lists of floats
        3-D coordinates from points1 and points2
    closed_scalars : list of integers
        scalar values for points1 and points2

    Examples
    --------
    >>> # Example 1: build a cube by closing two parallel planes:
    >>> import os
    >>> from mindboggle.utils.morph import close_surface_pair
    >>> from mindboggle.utils.plots import plot_surfaces
    >>> from mindboggle.utils.io_vtk import write_vtk
    >>> # Build plane:
    >>> background_value = -1
    >>> n = 10  # plane edge length
    >>> points1 = []
    >>> for x in range(n):
    >>>     for y in range(n):
    >>>         points1.append([x,y,0])
    >>> points2 = [[x[0],x[1],1] for x in points1]
    >>> scalars = [background_value for x in range(len(points1))]
    >>> p = n*(n-1)/2 - 1
    >>> for i in [p, p+1, p+n, p+n+1]:
    >>>     scalars[i] = 1
    >>> faces = []
    >>> for x in range(n-1):
    >>>     for y in range(n-1):
    >>>         faces.append([x+y*n,x+n+y*n,x+n+1+y*n])
    >>>         faces.append([x+y*n,x+1+y*n,x+n+1+y*n])
    >>> #write_vtk('plane.vtk', points1, [], [], faces, scalars)
    >>> #plot_surfaces('plane.vtk') # doctest: +SKIP
    >>> closed_faces, closed_points, closed_scalars = close_surface_pair(faces, points1, points2, scalars, background_value)
    >>> # View:
    >>> write_vtk('cube.vtk', closed_points, [], [], closed_faces, closed_scalars, 'int')
    >>> plot_surfaces('cube.vtk') # doctest: +SKIP
    >>> #
    >>> # Example 2: Gray and white cortical brain surfaces:
    >>> import os
    >>> from mindboggle.utils.morph import close_surface_pair
    >>> from mindboggle.utils.plots import plot_surfaces
    >>> from mindboggle.utils.io_vtk import read_scalars, read_vtk, read_points, write_vtk
    >>> path = os.environ['MINDBOGGLE_DATA']
    >>> patch_surface1 = 'fold.pial.vtk'
    >>> whole_surface2 = 'fold.white.vtk'
    >>> # Select a single fold:
    >>> folds_file = os.path.join(path, 'arno', 'features', 'folds.vtk')
    >>> points1 = read_points(folds_file)
    >>> scalars, name = read_scalars(folds_file, True, True)
    >>> fold_number = 11
    >>> scalars[scalars != fold_number] = -1
    >>> white_surface = os.path.join(path, 'arno', 'freesurfer', 'lh.white.vtk')
    >>> faces, u1, u2, points2, N, u3, u4, u5 = read_vtk(white_surface)
    >>> background_value = -1
    >>> closed_faces, closed_points, closed_scalars = close_surface_pair(faces, points1, points2, scalars, background_value)
    >>> # View:
    >>> write_vtk('closed.vtk', closed_points, [], [], closed_faces, closed_scalars, name, 'int')
    >>> plot_surfaces('closed.vtk') # doctest: +SKIP

    """
    import sys
    import numpy as np

    from mindboggle.utils.mesh import find_neighbors, remove_faces
    from mindboggle.utils.segment import extract_borders

    if isinstance(scalars, list):
        scalars = np.array(scalars)

    N = len(points1)
    closed_points = points1 + points2

    # Find all vertex neighbors and surface patch border vertices:
    neighbor_lists = find_neighbors(faces, N)
    I = np.where(scalars != background_value)[0]
    scalars[scalars == background_value] = background_value + 1
    scalars[I] = background_value + 2
    scalars = scalars.tolist()
    borders, u1, u2 = extract_borders(range(N), scalars, neighbor_lists)
    if not len(borders):
        sys.exit('There are no border vertices!')
    borders = [x for x in borders if x in I]

    # Reindex copy of faces and combine with original (both zero-index):
    indices = range(N)
    indices2 = range(N, 2 * N)
    reindex = dict([(index, indices2[i]) for i, index in enumerate(indices)])
    faces = remove_faces(faces, I)
    faces2 = [[reindex[i] for i in face] for face in faces]
    closed_faces = faces + faces2

    # Connect border vertices between surface patches and add new faces:
    add_faces = []
    taken_already = []
    for index in borders:
        if index not in taken_already:
            neighbors = list(set(neighbor_lists[index]).intersection(borders))
            taken_already.append(index)
            #taken_already.extend([index] + neighbors)
            for neighbor in neighbors:
                add_faces.append([index, index + N, neighbor])
                add_faces.append([index + N, neighbor, neighbor + N])
    closed_faces = closed_faces + add_faces

    closed_scalars = scalars * 2

    return closed_faces, closed_points, closed_scalars
Ejemplo n.º 4
0
def write_average_face_values_per_label(
    input_indices_vtk, input_values_vtk="", area_file="", output_stem="", exclude_values=[-1], background_value=-1
):
    """
    Write out a separate VTK file for each integer (>-1)
    in (the first) scalar list of an input VTK file.
    Optionally write the values drawn from a second VTK file.

    Parameters
    ----------
    input_indices_vtk : string
        path of the input VTK file that contains indices as scalars
    input_values_vtk : string
        path of the input VTK file that contains values as scalars
    output_stem : string
        path and stem of the output VTK file
    exclude_values : list or array
        values to exclude
    background_value : integer or float
        background value in output VTK files
    scalar_name : string
        name of a lookup table of scalars values

    Examples
    --------
    >>> import os
    >>> from mindboggle.utils.io_table import write_average_face_values_per_label
    >>> path = os.environ['MINDBOGGLE_DATA']
    >>> input_indices_vtk = os.path.join(path, 'allen', 'labels', 'lh.DKTatlas100.gcs.vtk')
    >>> input_values_vtk = os.path.join(path, 'allen', 'shapes', 'lh.thickness.vtk')
    >>> area_file = os.path.join(path, 'allen', 'shapes', 'lh.pial.area.vtk')
    >>> output_stem = 'labels_thickness'
    >>> exclude_values = [-1]
    >>> background_value = -1
    >>> #
    >>> write_average_face_values_per_label(input_indices_vtk,
    >>>     input_values_vtk, area_file, output_stem, exclude_values, background_value)
    >>> #
    >>> # View:
    >>> #example_vtk = os.path.join(os.getcwd(), output_stem + '0.vtk')
    >>> #from mindboggle.utils.plots import plot_vtk
    >>> #plot_vtk(example_vtk)

    """
    import os
    import numpy as np
    from mindboggle.utils.io_vtk import read_scalars, read_vtk, write_vtk
    from mindboggle.utils.io_table import write_columns
    from mindboggle.utils.mesh import remove_faces

    # Load VTK file:
    faces, lines, indices, points, npoints, scalars, scalar_names, foo1 = read_vtk(input_indices_vtk, True, True)
    if area_file:
        area_scalars, name = read_scalars(area_file, True, True)
    print("Explode the scalar list in {0}".format(os.path.basename(input_indices_vtk)))
    if input_values_vtk != input_indices_vtk:
        values, name = read_scalars(input_values_vtk, True, True)
        print(
            "Explode the scalar list of values in {0} "
            "with the scalar list of indices in {1}".format(
                os.path.basename(input_values_vtk), os.path.basename(input_indices_vtk)
            )
        )
    else:
        values = np.copy(scalars)

    # Loop through unique (non-excluded) scalar values:
    unique_scalars = [int(x) for x in np.unique(scalars) if x not in exclude_values]
    for scalar in unique_scalars:

        keep_indices = [x for sublst in faces for x in sublst]
        new_faces = remove_faces(faces, keep_indices)

        # Create array and indices for scalar value:
        select_scalars = np.copy(scalars)
        select_scalars[scalars != scalar] = background_value
        scalar_indices = [i for i, x in enumerate(select_scalars) if x == scalar]
        print("  Scalar {0}: {1} vertices".format(scalar, len(scalar_indices)))

        # ---------------------------------------------------------------------
        # For each face, average vertex values:
        # ---------------------------------------------------------------------
        output_table = os.path.join(os.getcwd(), output_stem + str(scalar) + ".csv")
        columns = []
        for face in new_faces:
            values = []
            for index in face:
                if area_file:
                    values.append(scalars[index] / area_scalars[index])
                else:
                    values.append(scalars[index])
            columns.append(np.mean(values))

        # -----------------------------------------------------------------
        # Write to table:
        # -----------------------------------------------------------------
        write_columns(columns, "", output_table, delimiter=",", quote=False)
Ejemplo n.º 5
0
        return W, Path, Degree, TreeNbr

# Example use of the minimum spanning tree algorithm
if __name__ == "__main__" :
    import os
    import networkx as nx
    from mindboggle.utils.io_vtk import read_vtk, rewrite_scalars
    from mindboggle.utils.mesh import find_neighbors, remove_faces
    from mindboggle.utils.mesh import min_span_tree
    from mindboggle.utils.plots import plot_vtk
    data_path = os.environ['MINDBOGGLE_DATA']
    sulci_file = os.path.join(data_path, 'arno', 'features', 'sulci.vtk')
    faces, lines, indices, points, npoints, sulci, name, input_vtk = read_vtk(sulci_file)
    sulcus_ID = 1
    sulcus_indices = [i for i,x in enumerate(sulci) if x == sulcus_ID]
    sulcus_faces = remove_faces(faces, sulcus_indices)
    sulcus_neighbor_lists = find_neighbors(sulcus_faces, len(points))
    G=nx.Graph()
    G.add_nodes_from(sulcus_indices)
    for i, sulcus_neighbor_list in enumerate(sulcus_neighbor_lists):
        G.add_edges_from([[i,x] for x in sulcus_neighbor_list])
    adjacency_matrix = nx.adjacency_matrix(G, nodelist=None, weight='weight')
    indices_to_connect = [0, len(sulcus_indices)-1]
    adjacency_matrix2, W, Path, Degree, TreeNbr = min_span_tree(adjacency_matrix,
                                                                indices_to_connect)

    # Write results to vtk file and view:
    MST = np.zeros(len(points))
    MST[W] = 1
    rewrite_scalars(sulci_file, 'test_min_span_tree.vtk', MST, 'MST', MST)
Ejemplo n.º 6
0
def zernike_moments_per_label(vtk_file, order=20, exclude_labels=[-1], area_file="", largest_segment=True):
    """
    Compute the Zernike moments per labeled region in a file.

    Parameters
    ----------
    vtk_file : string
        name of VTK surface mesh file containing index scalars (labels)
    order : integer
        number of moments to compute
    exclude_labels : list of integers
        labels to be excluded
    area_file :  string
        name of VTK file with surface area scalar values
    largest_segment :  Boolean
        compute moments only for largest segment with a given label?

    Returns
    -------
    descriptors_lists : list of lists of floats
        Zernike descriptors per label
    label_list : list of integers
        list of unique labels for which moments are computed

    Examples
    --------
    >>> # Moments for label 22 (postcentral) in Twins-2-1:
    >>> import os
    >>> from mindboggle.shapes.zernike.zernike import zernike_moments_per_label
    >>> path = os.environ['MINDBOGGLE_DATA']
    >>> vtk_file = os.path.join(path, 'arno', 'labels', 'lh.labels.DKT25.manual.vtk')
    >>> area_file = os.path.join(path, 'arno', 'shapes', 'lh.pial.area.vtk')
    >>> order = 3
    >>> exclude_labels = [0]
    >>> largest_segment = True
    >>> zernike_moments_per_label(vtk_file, order, exclude_labels, area_file,
    >>>                           largest_segment)
    ([[7562.751480397972,
       143262239.5171249,
       1107670.7893994227,
       28487908892.820065,
       112922387.17238183,
       10250734140.30357]],
     [22])
    >>> order = 10
    >>> zernike_moments_per_label(vtk_file, order, exclude_labels, area_file,
    >>>                           largest_segment)
    ([[7562.751480397972,
       143262239.5171249,
       3308874674202.293,
       8.485211965384958e+16,
       2.3330162566631947e+21,
       6.743205749389719e+25,
       1107670.7893994227,
       28487908892.820065,
       750581458956752.5,
       2.08268406178679e+19,
       6.041241636463012e+23,
       112922387.17238183,
       3771094165018.0186,
       1.1436534456761454e+17,
       3.475222918728238e+21,
       1.0745294340540639e+26,
       10250734140.30357,
       429344737184365.75,
       1.4944306620454633e+19,
       4.98685998888202e+23,
       889109957039.494,
       4.5419095219797416e+16,
       1.798809048329269e+21,
       6.5720455808877056e+25,
       76646448525991.2,
       4.648745223427816e+18,
       2.067942924550439e+23,
       6705825311489244.0,
       4.701251187236028e+20,
       2.3147665646780795e+25,
       5.969381989053711e+17,
       4.728007168783364e+22,
       5.360784767352255e+19,
       4.7214146910478664e+24,
       4.813773883638603e+21,
       4.3049570618844856e+23]],
     [22])

    """
    from mindboggle.utils.io_vtk import read_vtk, read_scalars
    from mindboggle.utils.mesh import remove_faces
    from mindboggle.shapes.zernike.zernike import zernike_moments, zernike_moments_of_largest

    # Read VTK surface mesh file:
    faces, u1, u2, points, u4, labels, u5, u6 = read_vtk(vtk_file)

    # Area file:
    if area_file:
        areas, u1 = read_scalars(area_file)
    else:
        areas = None

    # Loop through labeled regions:
    ulabels = []
    [ulabels.append(int(x)) for x in labels if x not in ulabels if x not in exclude_labels]
    label_list = []
    descriptors_lists = []
    for label in ulabels:
        # if label==22:
        #  print("DEBUG: COMPUTE FOR ONLY ONE LABEL")

        # Determine the indices per label:
        label_indices = [i for i, x in enumerate(labels) if x == label]
        print("{0} vertices for label {1}".format(len(label_indices), label))

        # Remove background faces:
        select_faces = remove_faces(faces, label_indices)

        # Compute Zernike moments for the label:
        if largest_segment:
            exclude_labels_inner = [-1]
            descriptors = zernike_moments_of_largest(points, select_faces, order, exclude_labels_inner, areas)
        else:
            descriptors = zernike_moments(points, select_faces, order)

        # Append to a list of lists of spectra:
        descriptors_lists.append(descriptors)
        label_list.append(label)

    return descriptors_lists, label_list
Ejemplo n.º 7
0
def spectrum_per_label(vtk_file, n_eigenvalues=3, exclude_labels=[-1],
                       normalization='area', area_file=''):
    """
    Compute Laplace-Beltrami spectrum per labeled region in a file.

    Parameters
    ----------
    vtk_file : string
        name of VTK surface mesh file containing index scalars (labels)
    n_eigenvalues : integer
        number of eigenvalues to be computed (the length of the spectrum)
    exclude_labels : list of integers
        labels to be excluded
    normalization : string
        the method used to normalize eigenvalues ('area' or None)
        if "area", use area of the 2D structure as in Reuter et al. 2006
    area_file :  string
        name of VTK file with surface area scalar values

    Returns
    -------
    spectrum_lists : list of lists
        first eigenvalues for each label's Laplace-Beltrami spectrum
    label_list : list of integers
        list of unique labels for which spectra are obtained

    Examples
    --------
    >>> # Spectrum for label 22 (postcentral) in Twins-2-1:
    >>> import os
    >>> from mindboggle.shapes.laplace_beltrami import laplacian_per_label
    >>> path = os.environ['MINDBOGGLE_DATA']
    >>> vtk_file = os.path.join(path, 'arno', 'labels', 'lh.labels.DKT25.manual.vtk')
    >>> area_file = os.path.join(path, 'arno', 'shapes', 'lh.pial.area.vtk')
    >>> n_eigenvalues = 6
    >>> exclude_labels = [0]  #[-1]
    >>> laplacian_per_label(vtk_file, n_eigenvalues, exclude_labels,
    >>>                           normalization=None, area_file=area_file)
        Load "Labels" scalars from lh.labels.DKT25.manual.vtk
        Load "scalars" scalars from lh.pial.area.vtk
        7819 vertices for label 22
        Reduced 290134 to 15230 triangular faces
        Linear FEM Laplace-Beltrami spectrum:
        ([[6.3469513010430304e-18,
           0.0005178862383467463,
           0.0017434911095630772,
           0.003667561767487686,
           0.005429017880363784,
           0.006309346984678924]],
         [22])

    """
    from mindboggle.utils.io_vtk import read_vtk, read_scalars
    from mindboggle.utils.mesh import remove_faces
    from mindboggle.shapes.laplace_beltrami import spectrum_of_largest

    # Read VTK surface mesh file:
    faces, u1, u2, points, u4, labels, u5, u6 = read_vtk(vtk_file)

    # Area file:
    if area_file:
        areas, u1 = read_scalars(area_file)
    else:
        areas = None

    # Loop through labeled regions:
    ulabels = []
    [ulabels.append(int(x)) for x in labels if x not in ulabels
     if x not in exclude_labels]
    label_list = []
    spectrum_lists = []
    for label in ulabels:
      #if label==22:

        # Determine the indices per label:
        label_indices = [i for i,x in enumerate(labels) if x == label]
        print('{0} vertices for label {1}'.format(len(label_indices), label))

        # Remove background faces:
        select_faces = remove_faces(faces, label_indices)

        # Compute Laplace-Beltrami spectrum for the label:
        spectrum = spectrum_of_largest(points, select_faces, n_eigenvalues,
                                       exclude_labels, normalization, areas)

        # Append to a list of lists of spectra:
        spectrum_lists.append(spectrum)
        label_list.append(label)

    return spectrum_lists, label_list
Ejemplo n.º 8
0
def propagate(points, faces, region, seeds, labels,
              max_iters=500, tol=0.001, sigma=10):
    """
    Propagate labels to segment surface into contiguous regions,
    starting from seed vertices.

    Imports : mindboggle.labels.rebound

    Parameters
    ----------
    points : array (or list) of lists of three integers
        coordinates for all vertices
    faces : list of lists of three integers
        indices to three vertices per face (indices start from zero)
    region : list (or array) of integers
        values > -1 indicate inclusion in a region for all vertices
    seeds : numpy array of integers
        seed numbers for all vertices (default -1 for not a seed)
    labels : numpy array of integers
        label numbers for all vertices, with -1s for unlabeled vertices
    max_iters : integer
        maximum number of iterations to run graph-based learning algorithm
    tol: float
        threshold to assess convergence of the algorithm
    sigma: float
        gaussian kernel parameter

    Returns
    -------
    segments : numpy array of integers
        region numbers for all vertices (default -1)

    Examples
    --------
    >>> # Propagate labels between label boundary segments in a single fold:
    >>> import os
    >>> import numpy as np
    >>> import mindboggle.labels.rebound as rb
    >>> from mindboggle.utils.mesh import find_neighbors
    >>> from mindboggle.labels.labels import extract_borders
    >>> from mindboggle.utils.segment import propagate
    >>> from mindboggle.utils.io_vtk import read_scalars, read_vtk, rewrite_scalars
    >>> from mindboggle.labels.protocol import dkt_protocol
    >>> protocol = 'DKT25'
    >>> sulcus_names, sulcus_label_pair_lists, unique_sulcus_label_pairs,
    ...     label_names, label_numbers, cortex_names, cortex_numbers,
    ...     noncortex_names, noncortex_numbers = dkt_protocol(protocol)
    >>> path = os.environ['MINDBOGGLE_DATA']
    >>> folds_file = os.path.join(path, 'arno', 'features', 'folds.vtk')
    >>> labels_file = os.path.join(path, 'arno', 'labels', 'lh.labels.DKT25.manual.vtk')
    >>> folds, name = read_scalars(folds_file, return_first=True, return_array=True)
    >>> faces, lines, indices, points, npoints, labels, name, input_vtk = read_vtk(labels_file,
    >>>     return_first=True, return_array=True)
    >>> neighbor_lists = find_neighbors(faces, npoints)
    >>> indices_borders, label_pairs, foo = extract_borders(range(npoints),
    >>>     labels, neighbor_lists)
    >>> # Select a single fold
    >>> fold_ID = 2
    >>> indices_fold = [i for i,x in enumerate(folds) if x == fold_ID]
    >>> fold_array = -1 * np.ones(npoints)
    >>> fold_array[indices_fold] = 1
    >>> # Extract the boundary for this fold
    >>> indices_borders, label_pairs, foo = extract_borders(indices_fold,
    >>>     labels, neighbor_lists)
    >>> # Select boundary segments in the sulcus labeling protocol
    >>> seeds = -1 * np.ones(npoints)
    >>> for ilist,label_pair_list in enumerate(sulcus_label_pair_lists):
    >>>     I = [x for i,x in enumerate(indices_borders)
    >>>          if np.sort(label_pairs[i]).tolist() in label_pair_list]
    >>>     seeds[I] = ilist
    >>> #
    >>> segments = propagate(points, faces, fold_array, seeds, labels)
    >>> #
    >>> # Write results to vtk file and view:
    >>> rewrite_scalars(labels_file, 'propagate.vtk',
    >>>                 segments, 'segments', segments)
    >>> from mindboggle.utils.plots import plot_vtk
    >>> plot_vtk('propagate.vtk')

    """
    import numpy as np
    from mindboggle.utils.mesh import remove_faces
    import mindboggle.utils.kernels as kernels
    import mindboggle.labels.rebound as rb

    # Make sure arguments are numpy arrays
    if not isinstance(seeds, np.ndarray):
        seeds = np.array(seeds)
    if not isinstance(labels, np.ndarray):
        labels = np.array(labels)
    if not isinstance(points, np.ndarray):
        points = np.array(points)

    indices_region = [i for i,x in enumerate(region) if x > -1]
    if indices_region and faces:
        local_indices_region = -1 * np.ones(labels.shape)
        local_indices_region[indices_region] = range(len(indices_region))

        if points.size:

            n_sets = len(np.unique([x for x in seeds if x > -1]))
            if n_sets == 1:
                print('Segment {0} vertices from 1 set of seed vertices'.
                      format(len(indices_region)))
            else:
                print('Segment {0} vertices from {1} sets of seed vertices'.
                      format(len(indices_region), n_sets))

            # Remove faces whose three vertices are not among specified indices:
            refaces = remove_faces(faces, indices_region)

            # Set up rebound Bounds class instance
            B = rb.Bounds()
            if refaces:
                B.Faces = np.array(refaces)
                B.Indices = local_indices_region
                B.Points = points[indices_region]
                B.Labels = labels[indices_region]
                B.seed_labels = seeds[indices_region]
                B.num_points = len(B.Points)

                # Propagate seed IDs from seeds
                B.graph_based_learning(method='propagate_labels', realign=False,
                                       kernel=kernels.rbf_kernel, sigma=sigma,
                                       max_iters=max_iters, tol=tol, vis=False)
            else:
                print("  No faces")

            # Assign maximum probability seed IDs to each point of region
            max_prob_labels = B.assign_max_prob_label()

            # Return segment IDs in original vertex array
            segments = -1 * np.ones(len(points))
            segments[indices_region] = max_prob_labels
    else:
        segments = -1 * np.ones(len(points))

    return segments
Ejemplo n.º 9
0
def spectrum_per_label(vtk_file, spectrum_size=10, exclude_labels=[-1],
                       normalization='area', area_file='',
                       largest_segment=True):
    """
    Compute Laplace-Beltrami spectrum per labeled region in a file.

    Parameters
    ----------
    vtk_file : string
        name of VTK surface mesh file containing index scalars (labels)
    spectrum_size : integer
        number of eigenvalues to be computed (the length of the spectrum)
    exclude_labels : list of integers
        labels to be excluded
    normalization : string
        the method used to normalize eigenvalues ('area' or None)
        if "area", use area of the 2D structure as in Reuter et al. 2006
    area_file :  string
        name of VTK file with surface area scalar values
    largest_segment :  Boolean
        compute spectrum only for largest segment with a given label?

    Returns
    -------
    spectrum_lists : list of lists
        first eigenvalues for each label's Laplace-Beltrami spectrum
    label_list : list of integers
        list of unique labels for which spectra are obtained

    Examples
    --------
    >>> # Uncomment "if label==22:" below to run example:
    >>> # Spectrum for Twins-2-1 left postcentral (22) pial surface:
    >>> import os
    >>> from mindboggle.shapes.laplace_beltrami import spectrum_per_label
    >>> path = os.environ['MINDBOGGLE_DATA']
    >>> vtk_file = os.path.join(path, 'arno', 'labels', 'lh.labels.DKT31.manual.vtk')
    >>> area_file = os.path.join(path, 'arno', 'shapes', 'lh.pial.area.vtk')
    >>> spectrum_size = 6
    >>> exclude_labels = [0]  #[-1]
    >>> largest_segment = True
    >>> spectrum_per_label(vtk_file, spectrum_size, exclude_labels, None,
    >>>                    area_file, largest_segment)
    ([[6.3469513010430304e-18,
       0.0005178862383467463,
       0.0017434911095630772,
       0.003667561767487686,
       0.005429017880363784,
       0.006309346984678924]],
     [22])

    """
    from mindboggle.utils.io_vtk import read_vtk, read_scalars
    from mindboggle.utils.mesh import remove_faces, reindex_faces_points
    from mindboggle.shapes.laplace_beltrami import fem_laplacian,\
        spectrum_of_largest

    # Read VTK surface mesh file:
    faces, u1, u2, points, u4, labels, u5, u6 = read_vtk(vtk_file)

    # Area file:
    if area_file:
        areas, u1 = read_scalars(area_file)
    else:
        areas = None

    # Loop through labeled regions:
    ulabels = []
    [ulabels.append(int(x)) for x in labels if x not in ulabels
     if x not in exclude_labels]
    label_list = []
    spectrum_lists = []
    for label in ulabels:
      #if label == 22:
      #  print("DEBUG: COMPUTE FOR ONLY ONE LABEL")

        # Determine the indices per label:
        Ilabel = [i for i,x in enumerate(labels) if x == label]
        print('{0} vertices for label {1}'.format(len(Ilabel), label))

        # Remove background faces:
        pick_faces = remove_faces(faces, Ilabel)
        pick_faces, pick_points, o1 = reindex_faces_points(pick_faces, points)

        # Compute Laplace-Beltrami spectrum for the label:
        if largest_segment:
            exclude_labels_inner = [-1]
            spectrum = spectrum_of_largest(pick_points, pick_faces,
                                           spectrum_size,
                                           exclude_labels_inner,
                                           normalization, areas)
        else:
            spectrum = fem_laplacian(pick_points, pick_faces,
                                     spectrum_size, normalization)

        # Append to a list of lists of spectra:
        spectrum_lists.append(spectrum)
        label_list.append(label)

    return spectrum_lists, label_list
Ejemplo n.º 10
0
def spectrum_of_largest(points, faces, n_eigenvalues=6, exclude_labels=[-1],
                        normalization=None, areas=None):
    """
    Compute Laplace-Beltrami spectrum on largest connected segment.

    In case a surface patch is fragmented, we select the largest fragment,
    remove extraneous triangular faces, and reindex indices.

    Parameters
    ----------
    points : list of lists of 3 floats
        x,y,z coordinates for each vertex of the structure
    faces : list of lists of 3 integers
        3 indices to vertices that form a triangle on the mesh
    n_eigenvalues : integer
        number of eigenvalues to be computed (the length of the spectrum)
    exclude_labels : list of integers
        background values to exclude
    normalization : string
        the method used to normalize eigenvalues ('area' or None)
        if "area", use area of the 2D structure as in Reuter et al. 2006
    areas : numpy array or list of floats (or None)
        surface area scalar values for all vertices

    Returns
    -------
    spectrum : list
        first n_eigenvalues eigenvalues for Laplace-Beltrami spectrum

    Examples
    --------
    >>> # Spectrum for one label (artificial composite), two fragments:
    >>> import os
    >>> import numpy as np
    >>> from mindboggle.utils.io_vtk import read_scalars, read_vtk, write_vtk
    >>> from mindboggle.utils.mesh import remove_faces
    >>> from mindboggle.shapes.laplace_beltrami import spectrum_of_largest
    >>> path = os.environ['MINDBOGGLE_DATA']
    >>> label_file = os.path.join(path, 'arno', 'labels', 'lh.labels.DKT25.manual.vtk')
    >>> area_file = os.path.join(path, 'arno', 'shapes', 'lh.pial.area.vtk')
    >>> n_eigenvalues = 6
    >>> exclude_labels = [0]  #[-1]
    >>> normalization = None
    >>> faces, lines, indices, points, foo1, labels, foo2, foo3 = read_vtk(label_file,
    >>>      return_first=True, return_array=True)
    >>> I2 = [i for i,x in enumerate(labels) if x==2] # cingulate
    >>> I22 = [i for i,x in enumerate(labels) if x==22] # postcentral
    >>> I2.extend(I22)
    >>> faces = remove_faces(faces, I2)
    >>> areas, u1 = read_scalars(area_file, True, True)
    >>> #
    >>> spectrum_of_largest(points, faces, n_eigenvalues, exclude_labels,
    >>>                     normalization, areas)
    >>> #
    >>> # View:
    >>> from mindboggle.utils.plots import plot_vtk
    >>> scalars = np.zeros(np.shape(labels))
    >>> scalars[I2] = 1
    >>> vtk_file = 'test_two_labels.vtk'
    >>> write_vtk(vtk_file, points, indices, lines, faces,
    >>>           scalars, scalar_names='scalars')
    >>> plot_vtk(vtk_file)
        Load "Labels" scalars from lh.labels.DKT25.manual.vtk
        Reduced 290134 to 29728 triangular faces
        Load "scalars" scalars from lh.pial.area.vtk
        2 segments
        Reduced 29728 to 14498 triangular faces
        Compute linear FEM Laplace-Beltrami spectrum
        [-8.764053090852845e-18,
         0.00028121452203987146,
         0.0010941205613292243,
         0.0017301461686759188,
         0.0034244633555606295,
         0.004280982704174599]

    """
    from scipy.sparse.linalg import eigsh, lobpcg
    import numpy as np

    from mindboggle.utils.mesh import find_neighbors, remove_faces, \
        reindex_faces_points
    from mindboggle.utils.segment import segment
    from mindboggle.shapes.laplace_beltrami import fem_laplacian

    # Areas:
    use_area = False
    if isinstance(areas, np.ndarray) and np.shape(areas):
        use_area = True
    elif isinstance(areas, list) and len(areas):
        areas = np.array(areas)
        use_area = True

    # Check to see if there are enough points:
    min_npoints = n_eigenvalues
    npoints = len(points) 
    if npoints < min_npoints or len(faces) < min_npoints:
        print("The input size {0} ({1} faces) should be much larger "
              "than n_eigenvalues {2}".
              format(npoints, len(faces), n_eigenvalues))
        return None
    else:

        #---------------------------------------------------------------------
        # Segment the indices into connected sets of indices:
        #---------------------------------------------------------------------
        # Construct neighbor lists:
        neighbor_lists = find_neighbors(faces, npoints)

        # Determine the indices:
        indices = [x for sublst in faces for x in sublst]

        # Segment:
        segments = segment(indices, neighbor_lists, min_region_size=1,
            seed_lists=[], keep_seeding=False, spread_within_labels=False,
            labels=[], label_lists=[], values=[], max_steps='', verbose=False)

        #---------------------------------------------------------------------
        # Select the largest segment (connected set of indices):
        #---------------------------------------------------------------------
        unique_segments = [x for x in np.unique(segments)
                           if x not in exclude_labels]
        if len(unique_segments) > 1:
            select_indices = []
            max_segment_area = 0
            for segment_number in unique_segments:
                segment_indices = [i for i,x in enumerate(segments)
                                   if x == segment_number]
                if use_area:
                    segment_area = np.sum(areas[segment_indices])
                else:
                    segment_area = len(segment_indices)
                if segment_area > max_segment_area:
                    select_indices = segment_indices
                    max_segment_area = len(select_indices)
            print('Maximum size of {0} segments: {1} vertices'.
                  format(len(unique_segments), len(select_indices)))

            #-----------------------------------------------------------------
            # Extract points and renumber faces for the selected indices:
            #-----------------------------------------------------------------
            faces = remove_faces(faces, select_indices)
        else:
            select_indices = indices

        # Alert if the number of indices is small:
        if len(select_indices) < min_npoints:
            print("The input size {0} is too small.".format(len(select_indices)))
            return None
        elif faces:

            #-----------------------------------------------------------------
            # Reindex indices in faces:
            #-----------------------------------------------------------------
            faces, points = reindex_faces_points(faces, points)

            #-----------------------------------------------------------------
            # Compute spectrum:
            #-----------------------------------------------------------------
            spectrum = fem_laplacian(points, faces, n_eigenvalues,
                                     normalization)

            return spectrum

        else:
            return None
Ejemplo n.º 11
0
def rewrite_scalars(input_vtk, output_vtk, new_scalars,
                    new_scalar_names=['scalars'], filter_scalars=[]):
    """
    Load VTK format file and save a subset of scalars into a new file.

    Parameters
    ----------
    input_vtk : string
        input VTK file name
    output_vtk : string
        output VTK file name
    new_scalars : list of lists of floats (or single list or array of floats)
        each list (lookup table) contains new values to assign to the vertices
    new_scalar_names : string or list of strings
        each element is the new name for a lookup table
    filter_scalars : list or numpy array (optional)
        scalar values used to filter faces (values > -1 retained)

    Returns
    -------
    output_vtk : string
        output VTK file name

    Examples
    --------
    >>> # Write vtk file with curvature values on sulci
    >>> import os
    >>> from mindboggle.utils.io_vtk import read_scalars, rewrite_scalars
    >>> path = os.environ['MINDBOGGLE_DATA']
    >>> curv_file = os.path.join(path, 'arno', 'shapes', 'lh.pial.mean_curvature.vtk')
    >>> curvs, name = read_scalars(curv_file, True,True)
    >>> sulci_file = os.path.join(path, 'arno', 'features', 'sulci.vtk')
    >>> sulci, name = read_scalars(sulci_file)
    >>> #
    >>> rewrite_scalars(curv_file, 'rewrite_scalars.vtk',
    >>>                 [curvs, sulci], ['curvs', 'sulci'], sulci)
    >>> #
    >>> # View:
    >>> from mindboggle.utils.plots import plot_vtk
    >>> plot_vtk('rewrite_scalars.vtk')

    """
    import os
    import numpy as np

    from mindboggle.utils.mesh import remove_faces
    from mindboggle.utils.io_vtk import write_header, write_points, \
         write_vertices, write_faces, write_scalars, read_vtk, \
         scalars_checker

    # Convert numpy arrays to lists
    if isinstance(new_scalars, np.ndarray):
        new_scalars = new_scalars.tolist()
    if isinstance(filter_scalars, np.ndarray):
        filter_scalars = filter_scalars.tolist()

    # Output VTK file to current working directory
    output_vtk = os.path.join(os.getcwd(), output_vtk)

    # Load VTK file
    faces, lines, indices, points, npoints, scalars, name, input_vtk = read_vtk(input_vtk)

    # Find indices to nonzero values
    indices = range(npoints)
    if filter_scalars:
        indices_filter = [i for i,x in enumerate(filter_scalars) if x > -1]
        indices_remove = [i for i,x in enumerate(filter_scalars) if x == -1]
        # Remove surface mesh faces whose three vertices are not all in indices
        faces = remove_faces(faces, indices_filter)

    # Write VTK file
    Fp = open(output_vtk,'w')
    write_header(Fp)
    write_points(Fp, points)
    if indices:
        write_vertices(Fp, indices)
    if faces:
        write_faces(Fp, faces)
    if new_scalars:
        new_scalars, new_scalar_names = scalars_checker(new_scalars, new_scalar_names)

        for i, new_scalar_list in enumerate(new_scalars):
            if filter_scalars:
                for iremove in indices_remove:
                    new_scalar_list[iremove] = -1
            if i == 0:
                new_scalar_name = new_scalar_names[0]
                write_scalars(Fp, new_scalar_list, new_scalar_name)
            else:
                if len(new_scalar_names) < i + 1:
                    new_scalar_name = new_scalar_names[0]
                else:
                    new_scalar_name  = new_scalar_names[i]
                write_scalars(Fp, new_scalar_list, new_scalar_name, begin_scalars=False)
    else:
        print('Error: new_scalars is empty')
        exit()

    Fp.close()

    return output_vtk
Ejemplo n.º 12
0
def evaluate_deep_features(features_file, labels_file, sulci_file='', hemi='',
                           excludeIDs=[-1], output_vtk_name='', verbose=True):
    """
    Evaluate deep surface features by computing the minimum distance from each
    label boundary vertex to all of the feature vertices in the same sulcus,
    and from each feature vertex to all of the label boundary vertices in the
    same sulcus.  The label boundaries run along the deepest parts of sulci
    and correspond to fundi in the DKT cortical labeling protocol.

    Parameters
    ----------
    features_file : string
        VTK surface file with feature numbers for vertex scalars
    labels_file : string
        VTK surface file with label numbers for vertex scalars
    sulci_file : string
        VTK surface file with sulcus numbers for vertex scalars
    excludeIDs : list of integers
        feature/sulcus/label IDs to exclude (background set to -1)
    output_vtk_name : Boolean
        if not empty, output a VTK file beginning with output_vtk_name that
        contains a surface with mean distances as scalars
    verbose : Boolean
        print mean distances to standard output?

    Returns
    -------
    feature_to_fundus_mean_distances : numpy array [number of features x 1]
        mean distance from each feature to sulcus label boundary ("fundus")
    feature_to_fundus_sd_distances : numpy array [number of features x 1]
        standard deviations of feature-to-fundus distances
    feature_to_fundus_mean_distances_vtk : string
        VTK surface file containing feature_to_fundus_mean_distances
    fundus_to_feature_mean_distances : numpy array [number of features x 1]
        mean distances from each sulcus label boundary ("fundus") to feature
    fundus_to_feature_sd_distances : numpy array [number of features x 1]
        standard deviations of fundus-to-feature distances
    fundus_to_feature_mean_distances_vtk : string
        VTK surface file containing fundus_to_feature_mean_distances

    """
    import os
    import sys
    import numpy as np
    from mindboggle.utils.io_vtk import read_vtk, read_scalars, write_vtk
    from mindboggle.utils.mesh import find_neighbors, remove_faces
    from mindboggle.utils.segment import extract_borders
    from mindboggle.utils.compute import source_to_target_distances
    from mindboggle.LABELS import DKTprotocol

    dkt = DKTprotocol()
    #-------------------------------------------------------------------------
    # Load labels, features, and sulci:
    #-------------------------------------------------------------------------
    faces, lines, indices, points, npoints, labels, scalar_names, \
        input_vtk = read_vtk(labels_file, True, True)
    features, name = read_scalars(features_file, True, True)
    if sulci_file:
        sulci, name = read_scalars(sulci_file, True, True)
        # List of indices to sulcus vertices:
        sulcus_indices = [i for i,x in enumerate(sulci) if x != -1]
        segmentIDs = sulci
        sulcus_faces = remove_faces(faces, sulcus_indices)
    else:
        sulcus_indices = range(len(labels))
        segmentIDs = []
        sulcus_faces = faces

    #-------------------------------------------------------------------------
    # Prepare neighbors, label pairs, fundus IDs, and outputs:
    #-------------------------------------------------------------------------
    # Calculate neighbor lists for all points:
    print('Find neighbors to all vertices...')
    neighbor_lists = find_neighbors(faces, npoints)

    # Find label boundary points in any of the sulci:
    print('Find label boundary points in any of the sulci...')
    border_indices, border_label_tuples, unique_border_label_tuples = \
        extract_borders(sulcus_indices, labels, neighbor_lists,
                        ignore_values=[], return_label_pairs=True)
    if not len(border_indices):
        sys.exit('There are no label boundary points!')

    # Initialize an array of label boundaries fundus IDs
    # (label boundary vertices that define sulci in the labeling protocol):
    print('Build an array of label boundary fundus IDs...')
    label_boundary_fundi = -1 * np.ones(npoints)

    if hemi == 'lh':
        nsulcus_lists = len(dkt.left_sulcus_label_pair_lists)
    else:
        nsulcus_lists = len(dkt.right_sulcus_label_pair_lists)
    feature_to_fundus_mean_distances = -1 * np.ones(nsulcus_lists)
    feature_to_fundus_sd_distances = -1 * np.ones(nsulcus_lists)
    fundus_to_feature_mean_distances = -1 * np.ones(nsulcus_lists)
    fundus_to_feature_sd_distances = -1 * np.ones(nsulcus_lists)
    feature_to_fundus_mean_distances_vtk = ''
    fundus_to_feature_mean_distances_vtk = ''

    #-------------------------------------------------------------------------
    # Loop through sulci:
    #-------------------------------------------------------------------------
    # For each list of sorted label pairs (corresponding to a sulcus):
    for isulcus, label_pairs in enumerate(dkt.sulcus_label_pair_lists):

        # Keep the boundary points with label pair labels:
        fundus_indices = [x for i,x in enumerate(border_indices)
                          if np.unique(border_label_tuples[i]).tolist()
                          in label_pairs]

        # Store the points as sulcus IDs in the fundus IDs array:
        if fundus_indices:
            label_boundary_fundi[fundus_indices] = isulcus

    if len(np.unique(label_boundary_fundi)) > 1:

        #---------------------------------------------------------------------
        # Construct a feature-to-fundus distance matrix and VTK file:
        #---------------------------------------------------------------------
        # Construct a distance matrix:
        print('Construct a feature-to-fundus distance matrix...')
        sourceIDs = features
        targetIDs = label_boundary_fundi
        distances, distance_matrix = source_to_target_distances(
            sourceIDs, targetIDs, points, segmentIDs, excludeIDs)

        # Compute mean distances for each feature:
        nfeatures = min(np.shape(distance_matrix)[1], nsulcus_lists)
        for ifeature in range(nfeatures):
            feature_distances = [x for x in distance_matrix[:, ifeature]
                                 if x != -1]
            feature_to_fundus_mean_distances[ifeature] = \
                np.mean(feature_distances)
            feature_to_fundus_sd_distances[ifeature] = \
                np.std(feature_distances)

        if verbose:
            print('Feature-to-fundus mean distances:')
            print(feature_to_fundus_mean_distances)
            print('Feature-to-fundus standard deviations of distances:')
            print(feature_to_fundus_sd_distances)

        # Write resulting feature-label boundary distances to VTK file:
        if output_vtk_name:
            feature_to_fundus_mean_distances_vtk = os.path.join(os.getcwd(),
                output_vtk_name + '_feature_to_fundus_mean_distances.vtk')
            print('Write feature-to-fundus distances to {0}...'.
                  format(feature_to_fundus_mean_distances_vtk))
            write_vtk(feature_to_fundus_mean_distances_vtk, points,
                      [], [], sulcus_faces, [distances],
                      ['feature-to-fundus_distances'], 'float')

        #---------------------------------------------------------------------
        # Construct a fundus-to-feature distance matrix and VTK file:
        #---------------------------------------------------------------------
        # Construct a distance matrix:
        print('Construct a fundus-to-feature distance matrix...')
        sourceIDs = label_boundary_fundi
        targetIDs = features
        distances, distance_matrix = source_to_target_distances(
            sourceIDs, targetIDs, points, segmentIDs, excludeIDs)

        # Compute mean distances for each feature:
        nfeatures = min(np.shape(distance_matrix)[1], nsulcus_lists)
        for ifeature in range(nfeatures):
            fundus_distances = [x for x in distance_matrix[:, ifeature]
                                if x != -1]
            fundus_to_feature_mean_distances[ifeature] = \
                np.mean(fundus_distances)
            fundus_to_feature_sd_distances[ifeature] = \
                np.std(fundus_distances)

        if verbose:
            print('Fundus-to-feature mean distances:')
            print(fundus_to_feature_mean_distances)
            print('Fundus-to-feature standard deviations of distances:')
            print(fundus_to_feature_sd_distances)

        # Write resulting feature-label boundary distances to VTK file:
        if output_vtk_name:
            fundus_to_feature_mean_distances_vtk = os.path.join(os.getcwd(),
                output_vtk_name + '_fundus_to_feature_mean_distances.vtk')
            print('Write fundus-to-feature distances to {0}...'.
                  format(fundus_to_feature_mean_distances_vtk))
            write_vtk(fundus_to_feature_mean_distances_vtk, points,
                      [], [], sulcus_faces, [distances],
                      ['fundus-to-feature_distances'], 'float')

    #-------------------------------------------------------------------------
    # Return outputs:
    #-------------------------------------------------------------------------
    return feature_to_fundus_mean_distances, feature_to_fundus_sd_distances,\
           feature_to_fundus_mean_distances_vtk,\
           fundus_to_feature_mean_distances, fundus_to_feature_sd_distances,\
           fundus_to_feature_mean_distances_vtk
Ejemplo n.º 13
0
def zernike_moments_per_label(vtk_file, n_moments, exclude_labels=[-1]):
    """
    Compute the Zernike moments per labeled region in a file.

    Parameters
    ----------
    vtk_file : string
        name of VTK surface mesh file containing index scalars (labels)
    n_moments : integer
        number of moments to compute
    exclude_labels : list of integers
        labels to be excluded

    Returns
    -------
    moments : numpy matrix
        Zernike moments
    label_list : list of integers
        list of unique labels for which moments are obtained

    Examples
    --------
    >>> import os
    >>> from mindboggle.shapes.laplace_beltrami import laplacian_per_label
    >>> path = os.environ['MINDBOGGLE_DATA']
    >>> vtk_file = os.path.join(path, 'arno', 'labels', 'lh.labels.DKT25.manual.vtk')
    >>> n_moments = 20
    >>> zernike_moments_per_label(vtk_file, n_moments, exclude_labels=[-1]

    """
    from mindboggle.utils.io_vtk import read_vtk
    from mindboggle.utils.mesh import remove_faces
    from mindboggle.shapes.zernike.zernike import zernike_moments

    # Read VTK surface mesh file:
    faces, u1,u2, points, u4, labels, u5,u6 = read_vtk(vtk_file)

    # Loop through labeled regions:
    ulabels = []
    [ulabels.append(int(x)) for x in labels if x not in ulabels
     if x not in exclude_labels]
    label_list = []
    moments_lists = []
    for label in ulabels:
      #if label==22:

        # Determine the indices per label:
        label_indices = [i for i,x in enumerate(labels) if x == label]
        print('{0} vertices for label {1}'.format(len(label_indices), label))

        # Remove background faces:
        select_faces = remove_faces(faces, label_indices)

        # Compute Zernike moments for the label:
        moments = zernike_moments(points, select_faces, n_moments)

        # Append to a list of lists of spectra:
        moments_lists.append(moments)
        label_list.append(label)

    return moments_lists, label_list
Ejemplo n.º 14
0
def close_surface_pair(faces, points1, points2, scalars, background_value=-1):
    """
    Close a surface patch by connecting its border vertices with
    corresponding vertices in a second surface file.

    Assumes no lines or indices when reading VTK files in.

    Note ::

        Scalar values different than background define the surface patch.
        The two sets of points have a 1-to-1 mapping; they are from
        two surfaces whose corresponding vertices are shifted in position.
        For pial vs. gray-white matter, the two surfaces are not parallel,
        so connecting the vertices leads to intersecting faces.

    Parameters
    ----------
    faces : list of lists of integers
        each sublist contains 3 indices of vertices that form a face
        on a surface mesh
    points1 : list of lists of floats
        each sublist contains 3-D coordinates of a vertex on a surface mesh
    points2 : list of lists of floats
        points from second surface with 1-to-1 correspondence with points1
    scalars : numpy array of integers
        labels used to find foreground vertices
    background_value : integer
        scalar value for background vertices

    Returns
    -------
    closed_faces : list of lists of integers
        indices of vertices that form a face on the closed surface mesh
    closed_points : list of lists of floats
        3-D coordinates from points1 and points2
    closed_scalars : list of integers
        scalar values for points1 and points2

    Examples
    --------
    >>> # Example 1: build a cube by closing two parallel planes:
    >>> import os
    >>> from mindboggle.utils.morph import close_surface_pair
    >>> from mindboggle.utils.plots import plot_surfaces
    >>> from mindboggle.utils.io_vtk import write_vtk
    >>> # Build plane:
    >>> background_value = -1
    >>> n = 10  # plane edge length
    >>> points1 = []
    >>> for x in range(n):
    >>>     for y in range(n):
    >>>         points1.append([x,y,0])
    >>> points2 = [[x[0],x[1],1] for x in points1]
    >>> scalars = [background_value for x in range(len(points1))]
    >>> p = n*(n-1)/2 - 1
    >>> for i in [p, p+1, p+n, p+n+1]:
    >>>     scalars[i] = 1
    >>> faces = []
    >>> for x in range(n-1):
    >>>     for y in range(n-1):
    >>>         faces.append([x+y*n,x+n+y*n,x+n+1+y*n])
    >>>         faces.append([x+y*n,x+1+y*n,x+n+1+y*n])
    >>> #write_vtk('plane.vtk', points1, [], [], faces, scalars)
    >>> #plot_surfaces('plane.vtk') # doctest: +SKIP
    >>> closed_faces, closed_points, closed_scalars = close_surface_pair(faces, points1, points2, scalars, background_value)
    >>> # View:
    >>> write_vtk('cube.vtk', closed_points, [], [], closed_faces, closed_scalars, 'int')
    >>> plot_surfaces('cube.vtk') # doctest: +SKIP
    >>> #
    >>> # Example 2: Gray and white cortical brain surfaces:
    >>> import os
    >>> from mindboggle.utils.morph import close_surface_pair
    >>> from mindboggle.utils.plots import plot_surfaces
    >>> from mindboggle.utils.io_vtk import read_scalars, read_vtk, read_points, write_vtk
    >>> path = os.environ['MINDBOGGLE_DATA']
    >>> patch_surface1 = 'fold.pial.vtk'
    >>> whole_surface2 = 'fold.white.vtk'
    >>> # Select a single fold:
    >>> folds_file = os.path.join(path, 'arno', 'features', 'folds.vtk')
    >>> points1 = read_points(folds_file)
    >>> scalars, name = read_scalars(folds_file, True, True)
    >>> fold_number = 11
    >>> scalars[scalars != fold_number] = -1
    >>> white_surface = os.path.join(path, 'arno', 'freesurfer', 'lh.white.vtk')
    >>> faces, u1, u2, points2, N, u3, u4, u5 = read_vtk(white_surface)
    >>> background_value = -1
    >>> closed_faces, closed_points, closed_scalars = close_surface_pair(faces, points1, points2, scalars, background_value)
    >>> # View:
    >>> write_vtk('closed.vtk', closed_points, [], [], closed_faces, closed_scalars, name, 'int')
    >>> plot_surfaces('closed.vtk') # doctest: +SKIP

    """
    import sys
    import numpy as np

    from mindboggle.utils.mesh import find_neighbors, remove_faces
    from mindboggle.utils.segment import extract_borders

    if isinstance(scalars, list):
        scalars = np.array(scalars)

    N = len(points1)
    closed_points = points1 + points2

    # Find all vertex neighbors and surface patch border vertices:
    neighbor_lists = find_neighbors(faces, N)
    I = np.where(scalars != background_value)[0]
    scalars[scalars == background_value] = background_value + 1
    scalars[I] = background_value + 2
    scalars = scalars.tolist()
    borders, u1, u2 = extract_borders(range(N), scalars, neighbor_lists)
    if not len(borders):
        sys.exit('There are no border vertices!')
    borders = [x for x in borders if x in I]

    # Reindex copy of faces and combine with original (both zero-index):
    indices = range(N)
    indices2 = range(N, 2 * N)
    reindex = dict([(index, indices2[i]) for i, index in enumerate(indices)])
    faces = remove_faces(faces, I)
    faces2 = [[reindex[i] for i in face] for face in faces]
    closed_faces = faces + faces2

    # Connect border vertices between surface patches and add new faces:
    add_faces = []
    taken_already = []
    for index in borders:
        if index not in taken_already:
            neighbors = list(set(neighbor_lists[index]).intersection(borders))
            taken_already.append(index)
            #taken_already.extend([index] + neighbors)
            for neighbor in neighbors:
                add_faces.append([index, index + N, neighbor])
                add_faces.append([index + N, neighbor, neighbor + N])
    closed_faces = closed_faces + add_faces

    closed_scalars = scalars * 2

    return closed_faces, closed_points, closed_scalars
Ejemplo n.º 15
0
def plot_mask_surface(vtk_file, mask_file='', nonmask_value=-1,
                      masked_output='', remove_nonmask=False,
                      program='vtkviewer',
                      use_colormap=False, colormap_file=''):
    """
    Use vtkviewer or mayavi2 to visualize VTK surface mesh data.

    If a mask_file is provided, a temporary masked file is saved,
    and it is this file that is viewed.

    If using vtkviewer, can optionally provide colormap file
    or set $COLORMAP environment variable.

    Parameters
    ----------
    vtk_file : string
        name of VTK surface mesh file
    mask_file : string
        name of VTK surface mesh file to mask vtk_file vertices
    nonmask_value : integer
        nonmask (usually background) value
    masked_output : string
        temporary masked output file name
    remove_nonmask : Boolean
        remove vertices that are not in mask? (otherwise assign nonmask_value)
    program : string {'vtkviewer', 'mayavi2'}
        program to visualize VTK file
    use_colormap : Boolean
        use Paraview-style XML colormap file set by $COLORMAP env variable?
    colormap_file : string
        use colormap in given file if use_colormap==True?  if empty and
        use_colormap==True, use file set by $COLORMAP environment variable

    Examples
    --------
    >>> import os
    >>> from mindboggle.utils.plots import plot_mask_surface
    >>> path = os.environ['MINDBOGGLE_DATA']
    >>> vtk_file = os.path.join(path, 'arno', 'labels', 'lh.labels.DKT31.manual.vtk')
    >>> mask_file = os.path.join(path, 'test_one_label.vtk')
    >>> nonmask_value = 0 #-1
    >>> masked_output = ''
    >>> remove_nonmask = True
    >>> program = 'vtkviewer'
    >>> use_colormap = True
    >>> colormap_file = '' #'/software/mindboggle_tools/colormap.xml'
    >>> plot_mask_surface(vtk_file, mask_file, nonmask_value, masked_output, remove_nonmask, program, use_colormap, colormap_file)

    """
    import os
    import numpy as np

    from mindboggle.utils.mesh import remove_faces, reindex_faces_points
    from mindboggle.utils.utils import execute
    from mindboggle.utils.plots import plot_surfaces
    from mindboggle.utils.io_vtk import read_scalars, rewrite_scalars, \
                                        read_vtk, write_vtk

    #-------------------------------------------------------------------------
    # Filter mesh with non-background values from a second (same-size) mesh:
    #-------------------------------------------------------------------------
    if mask_file:
        mask, name = read_scalars(mask_file, True, True)
        if not masked_output:
            masked_output = os.path.join(os.getcwd(), 'temp.vtk')
        file_to_plot = masked_output

        #---------------------------------------------------------------------
        # Remove nonmask-valued vertices:
        #---------------------------------------------------------------------
        if remove_nonmask:
            #-----------------------------------------------------------------
            # Load VTK files:
            #-----------------------------------------------------------------
            faces, lines, indices, points, npoints, scalars, scalar_names, \
            o1 = read_vtk(vtk_file, True, True)
            #-----------------------------------------------------------------
            # Find mask indices, remove nonmask faces, and reindex:
            #-----------------------------------------------------------------
            Imask = [i for i,x in enumerate(mask) if x != nonmask_value]
            mask_faces = remove_faces(faces, Imask)
            mask_faces, points, \
            original_indices = reindex_faces_points(mask_faces, points)
            #-----------------------------------------------------------------
            # Write VTK file with scalar values:
            #-----------------------------------------------------------------
            if np.ndim(scalars) == 1:
                scalar_type = type(scalars[0]).__name__
            elif np.ndim(scalars) == 2:
                scalar_type = type(scalars[0][0]).__name__
            else:
                print("Undefined scalar type!")
            write_vtk(file_to_plot, points, [], [], mask_faces,
                      scalars[original_indices].tolist(), scalar_names,
                      scalar_type=scalar_type)
        else:
            scalars, name = read_scalars(vtk_file, True, True)
            scalars[mask == nonmask_value] = nonmask_value
            rewrite_scalars(vtk_file, file_to_plot, scalars)
    else:
        file_to_plot = vtk_file

    #-------------------------------------------------------------------------
    # Display with vtkviewer.py:
    #-------------------------------------------------------------------------
    if program == 'vtkviewer':
        plot_surfaces(file_to_plot, use_colormap=use_colormap,
                      colormap_file=colormap_file)
    #-------------------------------------------------------------------------
    # Display with mayavi2:
    #-------------------------------------------------------------------------
    elif program == 'mayavi2':
        cmd = ["mayavi2", "-d", file_to_plot, "-m", "Surface", "&"]
        execute(cmd, 'os')
Ejemplo n.º 16
0
def select_largest(points, faces, exclude_labels=[-1], areas=None,
                   reindex=True):
    """
    Select the largest segment (connected set of indices) in a mesh.

    In case a surface patch is fragmented, we select the largest fragment,
    remove extraneous triangular faces, and reindex indices.

    Parameters
    ----------
    points : list of lists of 3 floats
        x,y,z coordinates for each vertex of the structure
    faces : list of lists of 3 integers
        3 indices to vertices that form a triangle on the mesh
    exclude_labels : list of integers
        background values to exclude
    areas : numpy array or list of floats (or None)
        surface area scalar values for all vertices
    reindex : Boolean
        reindex indices in faces?

    Returns
    -------
    points : list of lists of 3 floats
        x,y,z coordinates for each vertex of the structure
    faces : list of lists of 3 integers
        3 indices to vertices that form a triangle on the mesh

    Examples
    --------
    >>> # Spectrum for one label (artificial composite), two fragments:
    >>> import os
    >>> import numpy as np
    >>> from mindboggle.utils.io_vtk import read_scalars, read_vtk, write_vtk
    >>> from mindboggle.utils.mesh import remove_faces
    >>> from mindboggle.utils.segment import select_largest
    >>> path = os.environ['MINDBOGGLE_DATA']
    >>> label_file = os.path.join(path, 'arno', 'labels', 'lh.labels.DKT31.manual.vtk')
    >>> area_file = os.path.join(path, 'arno', 'shapes', 'lh.pial.area.vtk')
    >>> exclude_labels = [-1]
    >>> faces, lines, indices, points, u1, labels, u2,u3 = read_vtk(label_file,
    >>>      return_first=True, return_array=True)
    >>> I19 = [i for i,x in enumerate(labels) if x==19] # pars orbitalis
    >>> I22 = [i for i,x in enumerate(labels) if x==22] # postcentral
    >>> I19.extend(I22)
    >>> faces = remove_faces(faces, I19)
    >>> areas, u1 = read_scalars(area_file, True, True)
    >>> reindex = True
    >>> #
    >>> points, faces = select_largest(points, faces, exclude_labels, areas,
    >>>                                reindex)
    >>> # View:
    >>> from mindboggle.utils.plots import plot_vtk
    >>> scalars = np.zeros(np.shape(labels))
    >>> scalars[I19] = 1
    >>> vtk_file = 'test_two_labels.vtk'
    >>> write_vtk(vtk_file, points, indices, lines, faces,
    >>>           scalars, scalar_names='scalars')
    >>> plot_vtk(vtk_file)

    """
    import numpy as np

    from mindboggle.utils.mesh import find_neighbors, remove_faces, \
        reindex_faces_points
    from mindboggle.utils.segment import segment

    # Areas:
    use_area = False
    if isinstance(areas, np.ndarray) and np.shape(areas):
        use_area = True
    elif isinstance(areas, list) and len(areas):
        areas = np.array(areas)
        use_area = True

    # Check to see if there are enough points:
    min_npoints = 4
    npoints = len(points)
    if npoints < min_npoints or len(faces) < min_npoints:
        print("The input size {0} ({1} faces) should be much larger "
              "than {2}". format(npoints, len(faces), min_npoints))
        return None
    else:

        #---------------------------------------------------------------------
        # Segment the indices into connected sets of indices:
        #---------------------------------------------------------------------
        # Construct neighbor lists:
        neighbor_lists = find_neighbors(faces, npoints)

        # Determine the indices:
        indices = [x for sublst in faces for x in sublst]

        # Segment:
        segments = segment(indices, neighbor_lists, min_region_size=1,
            seed_lists=[], keep_seeding=False, spread_within_labels=False,
            labels=[], label_lists=[], values=[], max_steps='', verbose=False)

        #---------------------------------------------------------------------
        # Select the largest segment (connected set of indices):
        #---------------------------------------------------------------------
        unique_segments = [x for x in np.unique(segments)
                           if x not in exclude_labels]
        if len(unique_segments) > 1:
            select_indices = []
            max_segment_area = 0
            for segment_number in unique_segments:
                segment_indices = [i for i,x in enumerate(segments)
                                   if x == segment_number]
                if use_area:
                    segment_area = np.sum(areas[segment_indices])
                else:
                    segment_area = len(segment_indices)
                if segment_area > max_segment_area:
                    select_indices = segment_indices
                    max_segment_area = len(select_indices)
            print('Maximum size of {0} segments: {1} vertices'.
                  format(len(unique_segments), len(select_indices)))

            #-----------------------------------------------------------------
            # Extract points and renumber faces for the selected indices:
            #-----------------------------------------------------------------
            faces = remove_faces(faces, select_indices)
        else:
            select_indices = indices

        # Alert if the number of indices is small:
        if len(select_indices) < min_npoints:
            print("The input size {0} is too small.".format(len(select_indices)))
            return None
        elif faces:

            #-----------------------------------------------------------------
            # Reindex indices in faces:
            #-----------------------------------------------------------------
            if reindex:
                faces, points = reindex_faces_points(faces, points)
                return points, faces
            else:
                points = np.array(points)
                points = points[select_indices].tolist()
                return points, faces
        else:
            return None
Ejemplo n.º 17
0
def rewrite_scalars(input_vtk, output_vtk, new_scalars,
                    new_scalar_names=['scalars'], filter_scalars=[],
                    background_value=-1):
    """
    Load VTK format file and save a subset of scalars into a new file.

    Parameters
    ----------
    input_vtk : string
        input VTK file name
    output_vtk : string
        output VTK file name
    new_scalars : list of lists of floats (or single list or array of floats)
        each list (lookup table) contains new values to assign to the vertices
    new_scalar_names : string or list of strings
        each element is the new name for a lookup table
    filter_scalars : list or numpy array (optional)
        scalar values used to filter faces (foreground values retained)
    background_value : integer
        background value

    Returns
    -------
    output_vtk : string
        output VTK file name

    Examples
    --------
    >>> # Write vtk file with curvature values on sulci
    >>> import os
    >>> from mindboggle.utils.io_vtk import read_scalars, rewrite_scalars
    >>> path = os.environ['MINDBOGGLE_DATA']
    >>> input_vtk = os.path.join(path, 'arno', 'shapes', 'lh.pial.mean_curvature.vtk')
    >>> sulci_file = os.path.join(path, 'arno', 'features', 'sulci.vtk')
    >>> output_vtk = 'rewrite_scalars.vtk'
    >>> curvs, name = read_scalars(input_vtk, True,True)
    >>> sulci, name = read_scalars(sulci_file)
    >>> new_scalars = [curvs, sulci]
    >>> new_scalar_names = ['curvs', 'sulci']
    >>> filter_scalars = sulci
    >>> background_value = -1
    >>> #
    >>> rewrite_scalars(input_vtk, output_vtk, new_scalars, new_scalar_names, filter_scalars, background_value)
    >>> #
    >>> # View:
    >>> from mindboggle.utils.plots import plot_surfaces
    >>> plot_surfaces('rewrite_scalars.vtk')

    """
    import os
    import numpy as np

    from mindboggle.utils.mesh import remove_faces
    from mindboggle.utils.io_vtk import write_header, write_points, \
        write_vertices, write_faces, write_scalars, read_vtk, scalars_checker

    # Convert numpy arrays to lists
    if isinstance(new_scalars, np.ndarray):
        new_scalars = new_scalars.tolist()
    if isinstance(filter_scalars, np.ndarray):
        filter_scalars = filter_scalars.tolist()

    # Output VTK file to current working directory
    output_vtk = os.path.join(os.getcwd(), output_vtk)

    # Load VTK file
    faces, lines, indices, points, npoints, scalars, name, \
        input_vtk = read_vtk(input_vtk)

    # Find indices to foreground values
    if filter_scalars:
        indices_keep = [i for i,x in enumerate(filter_scalars)
                        if x != background_value]
        indices_remove = [i for i,x in enumerate(filter_scalars)
                          if x == background_value]
        # Remove surface faces whose three vertices are not all in indices
        faces = remove_faces(faces, indices_keep)

    # Write VTK file
    Fp = open(output_vtk,'w')
    write_header(Fp)
    write_points(Fp, points)
    if indices:
        write_vertices(Fp, indices)
    if faces:
        write_faces(Fp, faces)
    if new_scalars:
        new_scalars, new_scalar_names = scalars_checker(new_scalars,
                                                        new_scalar_names)

        # scalars_checker() returns a list of lists for scalars:
        for i, new_scalar_list in enumerate(new_scalars):
            if filter_scalars:
                for iremove in indices_remove:
                    new_scalar_list[iremove] = background_value
            if np.ndim(new_scalar_list) == 1:
                scalar_type = type(new_scalar_list[0]).__name__
            elif np.ndim(new_scalar_list) == 2:
                scalar_type = type(new_scalar_list[0][0]).__name__
            else:
                print("Undefined scalar type!")
            if i == 0:
                new_scalar_name = new_scalar_names[0]
                write_scalars(Fp, new_scalar_list, new_scalar_name,
                              begin_scalars=True,
                              scalar_type=scalar_type)
            else:
                if len(new_scalar_names) < i + 1:
                    new_scalar_name = new_scalar_names[0]
                else:
                    new_scalar_name = new_scalar_names[i]
                write_scalars(Fp, new_scalar_list, new_scalar_name,
                              begin_scalars=False,
                              scalar_type=scalar_type)
    else:
        print('Error: new_scalars is empty')
        exit()

    Fp.close()

    if not os.path.exists(output_vtk):
        raise(IOError(output_vtk + " not found"))

    return output_vtk
Ejemplo n.º 18
0
def plot_mask_surface(vtk_file, mask_file='', nonmask_value=-1,
                      masked_output='', remove_nonmask=False,
                      program='vtkviewer',
                      use_colormap=False, colormap_file=''):
    """
    Use vtkviewer or mayavi2 to visualize VTK surface mesh data.

    If a mask_file is provided, a temporary masked file is saved,
    and it is this file that is viewed.

    If using vtkviewer, can optionally provide colormap file
    or set $COLORMAP environment variable.

    Parameters
    ----------
    vtk_file : string
        name of VTK surface mesh file
    mask_file : string
        name of VTK surface mesh file to mask vtk_file vertices
    nonmask_value : integer
        nonmask (usually background) value
    masked_output : string
        temporary masked output file name
    remove_nonmask : Boolean
        remove vertices that are not in mask? (otherwise assign nonmask_value)
    program : string {'vtkviewer', 'mayavi2'}
        program to visualize VTK file
    use_colormap : Boolean
        use Paraview-style XML colormap file set by $COLORMAP env variable?
    colormap_file : string
        use colormap in given file if use_colormap==True?  if empty and
        use_colormap==True, use file set by $COLORMAP environment variable

    Examples
    --------
    >>> import os
    >>> from mindboggle.utils.plots import plot_mask_surface
    >>> path = os.environ['MINDBOGGLE_DATA']
    >>> vtk_file = os.path.join(path, 'arno', 'labels', 'lh.labels.DKT31.manual.vtk')
    >>> mask_file = os.path.join(path, 'test_one_label.vtk')
    >>> nonmask_value = 0 #-1
    >>> masked_output = ''
    >>> remove_nonmask = True
    >>> program = 'vtkviewer'
    >>> use_colormap = True
    >>> colormap_file = '' #'/software/mindboggle_tools/colormap.xml'
    >>> plot_mask_surface(vtk_file, mask_file, nonmask_value, masked_output, remove_nonmask, program, use_colormap, colormap_file)

    """
    import os
    import numpy as np

    from mindboggle.utils.mesh import remove_faces, reindex_faces_points
    from mindboggle.utils.utils import execute
    from mindboggle.utils.plots import plot_surfaces
    from mindboggle.utils.io_vtk import read_scalars, rewrite_scalars, \
                                        read_vtk, write_vtk

    #-------------------------------------------------------------------------
    # Filter mesh with non-background values from a second (same-size) mesh:
    #-------------------------------------------------------------------------
    if mask_file:
        mask, name = read_scalars(mask_file, True, True)
        if not masked_output:
            masked_output = os.path.join(os.getcwd(), 'temp.vtk')
        file_to_plot = masked_output

        #---------------------------------------------------------------------
        # Remove nonmask-valued vertices:
        #---------------------------------------------------------------------
        if remove_nonmask:
            #-----------------------------------------------------------------
            # Load VTK files:
            #-----------------------------------------------------------------
            faces, lines, indices, points, npoints, scalars, scalar_names, \
            o1 = read_vtk(vtk_file, True, True)
            #-----------------------------------------------------------------
            # Find mask indices, remove nonmask faces, and reindex:
            #-----------------------------------------------------------------
            Imask = [i for i,x in enumerate(mask) if x != nonmask_value]
            mask_faces = remove_faces(faces, Imask)
            mask_faces, points, \
            original_indices = reindex_faces_points(mask_faces, points)
            #-----------------------------------------------------------------
            # Write VTK file with scalar values:
            #-----------------------------------------------------------------
            if np.ndim(scalars) == 1:
                scalar_type = type(scalars[0]).__name__
            elif np.ndim(scalars) == 2:
                scalar_type = type(scalars[0][0]).__name__
            else:
                print("Undefined scalar type!")
            write_vtk(file_to_plot, points, [], [], mask_faces,
                      scalars[original_indices].tolist(), scalar_names,
                      scalar_type=scalar_type)
        else:
            scalars, name = read_scalars(vtk_file, True, True)
            scalars[mask == nonmask_value] = nonmask_value
            rewrite_scalars(vtk_file, file_to_plot, scalars)
    else:
        file_to_plot = vtk_file

    #-------------------------------------------------------------------------
    # Display with vtkviewer.py:
    #-------------------------------------------------------------------------
    if program == 'vtkviewer':
        plot_surfaces(file_to_plot, use_colormap=use_colormap,
                      colormap_file=colormap_file)
    #-------------------------------------------------------------------------
    # Display with mayavi2:
    #-------------------------------------------------------------------------
    elif program == 'mayavi2':
        cmd = ["mayavi2", "-d", file_to_plot, "-m", "Surface", "&"]
        execute(cmd, 'os')
Ejemplo n.º 19
0
def explode_scalars(input_indices_vtk, input_values_vtk='', output_stem='',
                    exclude_values=[-1], background_value=-1,
                    output_scalar_name='scalars',
                    remove_background_faces=True, reindex=True):
    """
    Write out a separate VTK file for each integer (not in exclude_values)
    in (the first) scalar list of an input VTK file.
    Optionally write the values drawn from a second VTK file,
    remove background values, and reindex indices.

    Parameters
    ----------
    input_indices_vtk : string
        path of the input VTK file that contains indices as scalars
        (assumes that the scalars are a list of floats or integers)
    input_values_vtk : string
        path of the input VTK file that contains values as scalars
    output_stem : string
        path and stem of the output VTK file
    exclude_values : list or array
        values to exclude
    background_value : integer or float
        background value in output VTK files
    remove_background_faces : Boolean
        remove all faces whose three vertices are not all a given index?
    reindex : Boolean
        reindex all indices in faces?

    Examples
    --------
    >>> # Example 1:  explode sulci with thickness values
    >>> import os
    >>> from mindboggle.utils.io_vtk import explode_scalars
    >>> from mindboggle.utils.plots import plot_surfaces
    >>> path = os.environ['MINDBOGGLE_DATA']
    >>> input_indices_vtk = os.path.join(path, 'arno', 'features', 'sulci.vtk')
    >>> input_values_vtk = os.path.join(path, 'arno', 'shapes', 'lh.pial.travel_depth.vtk')
    >>> output_stem = 'sulci_depth'
    >>> #
    >>> explode_scalars(input_indices_vtk, input_values_vtk, output_stem)
    >>> #
    >>> # View:
    >>> example_vtk = os.path.join(os.getcwd(), output_stem + '0.vtk')
    >>> plot_surfaces(example_vtk)
    >>> #
    >>> # Example 2:  explode labels
    >>> import os
    >>> from mindboggle.utils.io_vtk import explode_scalars
    >>> from mindboggle.utils.plots import plot_surfaces
    >>> path = os.environ['MINDBOGGLE_DATA']
    >>> input_values_vtk = os.path.join(path, 'arno', 'labels',
    >>>                                 'lh.labels.DKT25.manual.vtk')
    >>> input_indices_vtk = input_values_vtk
    >>> output_stem = 'label'
    >>> exclude_values = [-1]
    >>> background_value = -1,
    >>> output_scalar_name = 'scalars'
    >>> remove_background_faces = True
    >>> reindex = True
    >>> #
    >>> explode_scalars(input_indices_vtk, input_values_vtk, output_stem,
    >>>                 exclude_values, background_value,
    >>>                 output_scalar_name, remove_background_faces, reindex)
    >>> # View:
    >>> example_vtk = os.path.join(os.getcwd(), output_stem + '2.vtk')
    >>> plot_surfaces(example_vtk)

    """
    import os
    import numpy as np
    from mindboggle.utils.io_vtk import read_scalars, read_vtk, write_vtk
    from mindboggle.utils.mesh import reindex_faces_points, remove_faces

    # Load VTK file:
    faces, lines, indices, points, npoints, scalars, scalar_names, \
        foo1 = read_vtk(input_indices_vtk, True, True)
    print("Explode the scalar list in {0}".
          format(os.path.basename(input_indices_vtk)))
    if input_values_vtk != input_indices_vtk:
        values, name = read_scalars(input_values_vtk, True, True)
        print("Explode the scalar list of values in {0} "
              "with the scalar list of indices in {1}".
              format(os.path.basename(input_values_vtk),
                     os.path.basename(input_indices_vtk)))
    else:
        values = np.copy(scalars)

    # Loop through unique (non-excluded) scalar values:
    unique_scalars = np.unique(scalars)
    if all(unique_scalars==np.round(unique_scalars)):
        unique_scalars = [int(x) for x in unique_scalars
                          if x not in exclude_values]
    else:
        unique_scalars = [x for x in unique_scalars
                          if x not in exclude_values]

    for scalar in unique_scalars:

        # Remove background (keep only faces with the scalar):
        if remove_background_faces:
            scalar_indices = [i for i,x in enumerate(scalars) if x == scalar]
            scalar_faces = remove_faces(faces, scalar_indices)
        else:
            scalar_faces = faces

        # Reindex:
        if reindex:
            scalar_faces, select_points, \
            o1 = reindex_faces_points(scalar_faces, points)
        else:
            select_points = points

        # Create array and indices for scalar value:
        if reindex:
            len_indices = len(select_points)
            select_values = scalar * np.ones(len_indices)
        else:
            select_values = np.copy(values)
            select_values[scalars != scalar] = background_value
            len_indices = len([i for i,x in enumerate(select_values)
                               if x != background_value])

        print("  Scalar {0}: {1} vertices".format(scalar, len_indices))

        # Write VTK file with scalar values (list of values):
        if np.ndim(select_values) == 1:
            scalar_type = type(select_values[0]).__name__
        elif np.ndim(select_values) == 2:
            scalar_type = type(select_values[0][0]).__name__
        else:
            print("Undefined scalar type!")
        output_vtk = os.path.join(os.getcwd(),
                                  output_stem + str(scalar) + '.vtk')
        write_vtk(output_vtk, select_points, indices, lines, scalar_faces,
                  select_values.tolist(), output_scalar_name,
                  scalar_type=scalar_type)
Ejemplo n.º 20
0
def zernike_moments_per_label(vtk_file,
                              order=10,
                              exclude_labels=[-1],
                              scale_input=True,
                              decimate_fraction=0,
                              decimate_smooth=25):
    """
    Compute the Zernike moments per labeled region in a file.

    Optionally decimate the input mesh.

    Parameters
    ----------
    vtk_file : string
        name of VTK surface mesh file containing index scalars (labels)
    order : integer
        number of moments to compute
    exclude_labels : list of integers
        labels to be excluded
    scale_input : Boolean
        translate and scale each object so it is bounded by a unit sphere?
        (this is the expected input to zernike_moments())
    decimate_fraction : float
        fraction of mesh faces to remove for decimation (1 for no decimation)
    decimate_smooth : integer
        number of smoothing steps for decimation

    Returns
    -------
    descriptors_lists : list of lists of floats
        Zernike descriptors per label
    label_list : list of integers
        list of unique labels for which moments are computed

    Examples
    --------
    >>> # Uncomment "if label==22:" below to run example:
    >>> # Twins-2-1 left postcentral (22) pial surface:
    >>> import os
    >>> from mindboggle.shapes.zernike.zernike import zernike_moments_per_label
    >>> path = os.path.join(os.environ['HOME'], 'mindboggled', 'OASIS-TRT-20-1')
    >>> vtk_file = os.path.join(path, 'labels', 'left_surface', 'relabeled_classifier.vtk')
    >>> order = 3
    >>> exclude_labels = [-1, 0]
    >>> scale_input = True
    >>> zernike_moments_per_label(vtk_file, order, exclude_labels, scale_input)
    ([[0.00528486237819844,
       0.009571754617699853,
       0.0033489494903015944,
       0.00875603468268444,
       0.0015879536633349918,
       0.0008080165707033097]],
     [22])


    ([[0.0018758013185778298,
       0.001757973693050823,
       0.002352403177686726,
       0.0032281044369938286,
       0.002215900343702539,
       0.0019646380916703856]],
     [14.0])
    Arthur Mikhno's result:
    1.0e+07 *
    0.0000
    0.0179
    0.0008
    4.2547
    0.0534
    4.4043



    """
    import numpy as np
    from mindboggle.utils.io_vtk import read_vtk
    from mindboggle.utils.mesh import remove_faces
    from mindboggle.shapes.zernike.zernike import zernike_moments

    min_points_faces = 4

    #-------------------------------------------------------------------------
    # Read VTK surface mesh file:
    #-------------------------------------------------------------------------
    faces, u1, u2, points, u3, labels, u4, u5 = read_vtk(vtk_file)

    #-------------------------------------------------------------------------
    # Loop through labeled regions:
    #-------------------------------------------------------------------------
    ulabels = [x for x in np.unique(labels) if x not in exclude_labels]
    label_list = []
    descriptors_lists = []
    for label in ulabels:
        #if label == 1022:  # 22:
        #    print("DEBUG: COMPUTE FOR ONLY ONE LABEL")

        #---------------------------------------------------------------------
        # Determine the indices per label:
        #---------------------------------------------------------------------
        Ilabel = [i for i, x in enumerate(labels) if x == label]
        print('  {0} vertices for label {1}'.format(len(Ilabel), label))
        if len(Ilabel) > min_points_faces:

            #-----------------------------------------------------------------
            # Remove background faces:
            #-----------------------------------------------------------------
            pick_faces = remove_faces(faces, Ilabel)
            if len(pick_faces) > min_points_faces:

                #-------------------------------------------------------------
                # Compute Zernike moments for the label:
                #-------------------------------------------------------------
                descriptors = zernike_moments(points, pick_faces, order,
                                              scale_input, decimate_fraction,
                                              decimate_smooth)

                #-------------------------------------------------------------
                # Append to a list of lists of spectra:
                #-------------------------------------------------------------
                descriptors_lists.append(descriptors)
                label_list.append(label)

    return descriptors_lists, label_list
Ejemplo n.º 21
0
def evaluate_deep_features(features_file,
                           labels_file,
                           sulci_file='',
                           hemi='',
                           excludeIDs=[-1],
                           output_vtk_name='',
                           verbose=True):
    """
    Evaluate deep surface features by computing the minimum distance from each
    label border vertex to all of the feature vertices in the same sulcus,
    and from each feature vertex to all of the label border vertices in the
    same sulcus.  The label borders run along the deepest parts of sulci
    and correspond to fundi in the DKT cortical labeling protocol.

    Parameters
    ----------
    features_file : string
        VTK surface file with feature numbers for vertex scalars
    labels_file : string
        VTK surface file with label numbers for vertex scalars
    sulci_file : string
        VTK surface file with sulcus numbers for vertex scalars
    excludeIDs : list of integers
        feature/sulcus/label IDs to exclude (background set to -1)
    output_vtk_name : Boolean
        if not empty, output a VTK file beginning with output_vtk_name that
        contains a surface with mean distances as scalars
    verbose : Boolean
        print mean distances to standard output?

    Returns
    -------
    feature_to_border_mean_distances : numpy array [number of features x 1]
        mean distance from each feature to sulcus label border
    feature_to_border_sd_distances : numpy array [number of features x 1]
        standard deviations of feature-to-border distances
    feature_to_border_distances_vtk : string
        VTK surface file containing feature-to-border distances
    border_to_feature_mean_distances : numpy array [number of features x 1]
        mean distances from each sulcus label border to feature
    border_to_feature_sd_distances : numpy array [number of features x 1]
        standard deviations of border-to-feature distances
    border_to_feature_distances_vtk : string
        VTK surface file containing border-to-feature distances

    """
    import os
    import sys
    import numpy as np
    from mindboggle.utils.io_vtk import read_vtk, read_scalars, write_vtk
    from mindboggle.utils.mesh import find_neighbors, remove_faces
    from mindboggle.utils.segment import extract_borders
    from mindboggle.utils.compute import source_to_target_distances
    from mindboggle.LABELS import DKTprotocol

    dkt = DKTprotocol()
    #-------------------------------------------------------------------------
    # Load labels, features, and sulci:
    #-------------------------------------------------------------------------
    faces, lines, indices, points, npoints, labels, scalar_names, \
        input_vtk = read_vtk(labels_file, True, True)
    features, name = read_scalars(features_file, True, True)
    if sulci_file:
        sulci, name = read_scalars(sulci_file, True, True)
        # List of indices to sulcus vertices:
        sulcus_indices = [i for i, x in enumerate(sulci) if x != -1]
        segmentIDs = sulci
        sulcus_faces = remove_faces(faces, sulcus_indices)
    else:
        sulcus_indices = range(len(labels))
        segmentIDs = []
        sulcus_faces = faces

    #-------------------------------------------------------------------------
    # Prepare neighbors, label pairs, border IDs, and outputs:
    #-------------------------------------------------------------------------
    # Calculate neighbor lists for all points:
    print('Find neighbors for all vertices...')
    neighbor_lists = find_neighbors(faces, npoints)

    # Find label border points in any of the sulci:
    print('Find label border points in any of the sulci...')
    border_indices, border_label_tuples, unique_border_label_tuples = \
        extract_borders(sulcus_indices, labels, neighbor_lists,
                        ignore_values=[], return_label_pairs=True)
    if not len(border_indices):
        sys.exit('There are no label border points!')

    # Initialize an array of label border IDs
    # (label border vertices that define sulci in the labeling protocol):
    print('Build an array of label border IDs...')
    label_borders = -1 * np.ones(npoints)

    if hemi == 'lh':
        nsulcus_lists = len(dkt.left_sulcus_label_pair_lists)
    else:
        nsulcus_lists = len(dkt.right_sulcus_label_pair_lists)
    feature_to_border_mean_distances = -1 * np.ones(nsulcus_lists)
    feature_to_border_sd_distances = -1 * np.ones(nsulcus_lists)
    border_to_feature_mean_distances = -1 * np.ones(nsulcus_lists)
    border_to_feature_sd_distances = -1 * np.ones(nsulcus_lists)
    feature_to_border_distances_vtk = ''
    border_to_feature_distances_vtk = ''

    #-------------------------------------------------------------------------
    # Loop through sulci:
    #-------------------------------------------------------------------------
    # For each list of sorted label pairs (corresponding to a sulcus):
    for isulcus, label_pairs in enumerate(dkt.sulcus_label_pair_lists):

        # Keep the border points with label pair labels:
        label_pair_border_indices = [
            x for i, x in enumerate(border_indices)
            if np.unique(border_label_tuples[i]).tolist() in label_pairs
        ]

        # Store the points as sulcus IDs in the border IDs array:
        if label_pair_border_indices:
            label_borders[label_pair_border_indices] = isulcus

    if len(np.unique(label_borders)) > 1:

        #---------------------------------------------------------------------
        # Construct a feature-to-border distance matrix and VTK file:
        #---------------------------------------------------------------------
        # Construct a distance matrix:
        print('Construct a feature-to-border distance matrix...')
        sourceIDs = features
        targetIDs = label_borders
        distances, distance_matrix = source_to_target_distances(
            sourceIDs, targetIDs, points, segmentIDs, excludeIDs)

        # Compute mean distances for each feature:
        nfeatures = min(np.shape(distance_matrix)[1], nsulcus_lists)
        for ifeature in range(nfeatures):
            feature_distances = [
                x for x in distance_matrix[:, ifeature] if x != -1
            ]
            feature_to_border_mean_distances[ifeature] = \
                np.mean(feature_distances)
            feature_to_border_sd_distances[ifeature] = \
                np.std(feature_distances)

        if verbose:
            print('Feature-to-border mean distances:')
            print(feature_to_border_mean_distances)
            print('Feature-to-border standard deviations of distances:')
            print(feature_to_border_sd_distances)

        # Write resulting feature-label border distances to VTK file:
        if output_vtk_name:
            feature_to_border_distances_vtk = os.path.join(
                os.getcwd(),
                output_vtk_name + '_feature_to_border_mean_distances.vtk')
            print('Write feature-to-border distances to {0}...'.format(
                feature_to_border_distances_vtk))
            write_vtk(feature_to_border_distances_vtk, points, [], [],
                      sulcus_faces, [distances],
                      ['feature-to-border_distances'], 'float')

        #---------------------------------------------------------------------
        # Construct a border-to-feature distance matrix and VTK file:
        #---------------------------------------------------------------------
        # Construct a distance matrix:
        print('Construct a border-to-feature distance matrix...')
        sourceIDs = label_borders
        targetIDs = features
        distances, distance_matrix = source_to_target_distances(
            sourceIDs, targetIDs, points, segmentIDs, excludeIDs)

        # Compute mean distances for each feature:
        nfeatures = min(np.shape(distance_matrix)[1], nsulcus_lists)
        for ifeature in range(nfeatures):
            border_distances = [
                x for x in distance_matrix[:, ifeature] if x != -1
            ]
            border_to_feature_mean_distances[ifeature] = \
                np.mean(border_distances)
            border_to_feature_sd_distances[ifeature] = \
                np.std(border_distances)

        if verbose:
            print('border-to-feature mean distances:')
            print(border_to_feature_mean_distances)
            print('border-to-feature standard deviations of distances:')
            print(border_to_feature_sd_distances)

        # Write resulting feature-label border distances to VTK file:
        if output_vtk_name:
            border_to_feature_distances_vtk = os.path.join(
                os.getcwd(),
                output_vtk_name + '_border_to_feature_mean_distances.vtk')
            print('Write border-to-feature distances to {0}...'.format(
                border_to_feature_distances_vtk))
            write_vtk(border_to_feature_distances_vtk, points, [], [],
                      sulcus_faces, [distances],
                      ['border-to-feature_distances'], 'float')

    #-------------------------------------------------------------------------
    # Return outputs:
    #-------------------------------------------------------------------------
    return feature_to_border_mean_distances, feature_to_border_sd_distances,\
           feature_to_border_distances_vtk,\
           border_to_feature_mean_distances, border_to_feature_sd_distances,\
           border_to_feature_distances_vtk