Exemplo n.º 1
0
def write_shape_stats(
    labels_or_file,
    sulci=[],
    fundi=[],
    affine_transform_file="",
    transform_format="itk",
    area_file="",
    mean_curvature_file="",
    travel_depth_file="",
    geodesic_depth_file="",
    convexity_file="",
    thickness_file="",
    labels_spectra=[],
    labels_spectra_IDs=[],
    sulci_spectra=[],
    sulci_spectra_IDs=[],
    exclude_labels=[-1],
    delimiter=",",
):
    """
    Make tables of shape statistics per label, fundus, and/or sulcus.

    Parameters
    ----------
    labels_or_file : list or string
        label number for each vertex or name of VTK file with index scalars
    sulci :  list of integers
        indices to sulci, one per vertex, with -1 indicating no sulcus
    fundi :  list of integers
        indices to fundi, one per vertex, with -1 indicating no fundus
    affine_transform_file : string
        affine transform file to standard space
    transform_format : string
        format for transform file
        Ex: 'txt' for text, 'itk' for ITK, and 'mat' for Matlab format
    area_file :  string
        name of VTK file with surface area scalar values
    mean_curvature_file :  string
        name of VTK file with mean curvature scalar values
    travel_depth_file :  string
        name of VTK file with travel depth scalar values
    geodesic_depth_file :  string
        name of VTK file with geodesic depth scalar values
    convexity_file :  string
        name of VTK file with convexity scalar values
    thickness_file :  string
        name of VTK file with thickness scalar values
    labels_spectra : list of lists of floats
        Laplace-Beltrami spectra for labeled regions
    labels_spectra_IDs : list of integers
        unique ID numbers (labels) for labels_spectra
    sulci_spectra : list of lists of floats
        Laplace-Beltrami spectra for sulci
    sulci_spectra_IDs : list of integers
        unique ID numbers (labels) for sulci_spectra
    exclude_labels : list of lists of integers
        indices to be excluded (in addition to -1)
    delimiter : string
        delimiter between columns, such as ','

    Returns
    -------
    label_table :  string
        output table filename for label shapes
    sulcus_table :  string
        output table filename for sulcus shapes
    fundus_table :  string
        output table filename for fundus shapes

    Examples
    --------
    >>> import os
    >>> from mindboggle.utils.io_vtk import read_scalars
    >>> from mindboggle.utils.io_table import write_shape_stats
    >>> path = os.environ['MINDBOGGLE_DATA']
    >>> labels_or_file = os.path.join(path, 'arno', 'labels', 'lh.labels.DKT25.manual.vtk')
    >>> sulci_file = os.path.join(path, 'arno', 'features', 'sulci.vtk')
    >>> fundi_file = os.path.join(path, 'arno', 'features', 'fundi.vtk')
    >>> sulci, name = read_scalars(sulci_file)
    >>> fundi, name = read_scalars(fundi_file)
    >>> affine_transform_file = os.path.join(path, 'arno', 'mri',
    >>> #    'affine_to_template.mat')
    >>>     't1weighted_brain.MNI152Affine.txt')
    >>> #transform_format = 'mat'
    >>> transform_format = 'itk'
    >>> area_file = os.path.join(path, 'arno', 'shapes', 'lh.pial.area.vtk')
    >>> mean_curvature_file = os.path.join(path, 'arno', 'shapes', 'lh.pial.mean_curvature.vtk')
    >>> travel_depth_file = os.path.join(path, 'arno', 'shapes', 'lh.pial.travel_depth.vtk')
    >>> geodesic_depth_file = os.path.join(path, 'arno', 'shapes', 'lh.pial.geodesic_depth.vtk')
    >>> convexity_file = ''
    >>> thickness_file = ''
    >>> delimiter = ','
    >>> #
    >>> import numpy as np
    >>> labels, name = read_scalars(labels_or_file)
    >>> labels_spectra = [[1,2,3] for x in labels]
    >>> labels_spectra_IDs = np.unique(labels).tolist()
    >>> sulci_spectra = [[1,2,3] for x in sulci]
    >>> sulci_spectra_IDs = np.unique(sulci).tolist()
    >>> exclude_labels = [-1]
    >>> #
    >>> write_shape_stats(labels_or_file, sulci, fundi,
    >>>     affine_transform_file, transform_format, area_file,
    >>>     mean_curvature_file, travel_depth_file, geodesic_depth_file,
    >>>     convexity_file, thickness_file, labels_spectra,
    >>>     labels_spectra_IDs, sulci_spectra,
    >>>     sulci_spectra_IDs, exclude_labels, delimiter)

    """
    import os
    import numpy as np
    from mindboggle.shapes.measure import means_per_label, stats_per_label, sum_per_label
    from mindboggle.utils.io_vtk import read_scalars, read_vtk, apply_affine_transform
    from mindboggle.utils.io_table import write_columns

    # Make sure inputs are lists:
    if isinstance(labels_or_file, np.ndarray):
        labels = labels_or_file.tolist()
    elif isinstance(labels_or_file, list):
        labels = labels_or_file
    elif isinstance(labels_or_file, str):
        labels, name = read_scalars(labels_or_file)
    if isinstance(sulci, np.ndarray):
        sulci = sulci.tolist()
    if isinstance(fundi, np.ndarray):
        fundi = fundi.tolist()

    # -------------------------------------------------------------------------
    # Feature lists, shape names, and shape files:
    # -------------------------------------------------------------------------
    # Feature lists:
    feature_lists = [labels, sulci, fundi]
    feature_names = ["label", "sulcus", "fundus"]
    spectra_lists = [labels_spectra, sulci_spectra]
    spectra_ID_lists = [labels_spectra_IDs, sulci_spectra_IDs]
    spectra_names = ["label spectrum", "sulcus spectrum"]
    table_names = ["label_shapes.csv", "sulcus_shapes.csv", "fundus_shapes.csv"]

    # Shape names corresponding to shape files below:
    shape_names = ["area", "mean curvature", "travel depth", "geodesic depth", "convexity", "thickness"]

    # Load shape files as a list of numpy arrays of per-vertex shape values:
    shape_files = [
        area_file,
        mean_curvature_file,
        travel_depth_file,
        geodesic_depth_file,
        convexity_file,
        thickness_file,
    ]
    shape_arrays = []
    column_names = []
    first_pass = True
    area_array = []
    for ishape, shape_file in enumerate(shape_files):
        if os.path.exists(shape_file):
            if first_pass:
                faces, lines, indices, points, npoints, scalars_array, name, input_vtk = read_vtk(
                    shape_file, True, True
                )
                points = np.array(points)
                first_pass = False
                if affine_transform_file:
                    affine_points, foo1 = apply_affine_transform(
                        affine_transform_file, points, transform_format, save_file=False
                    )
                    affine_points = np.array(affine_points)
            else:
                scalars_array, name = read_scalars(shape_file, True, True)
            if scalars_array.size:
                shape_arrays.append(scalars_array)

                # Store area array:
                if ishape == 0:
                    area_array = scalars_array.copy()

    # Initialize table file names:
    sulcus_table = None
    fundus_table = None

    # Loop through features / tables:
    for itable, feature_list in enumerate(feature_lists):
        table_column_names = []

        # ---------------------------------------------------------------------
        # For each feature, construct a table of average shape values:
        # ---------------------------------------------------------------------
        table_file = os.path.join(os.getcwd(), table_names[itable])
        if feature_list:
            feature_name = feature_names[itable]
            columns = []

            # -----------------------------------------------------------------
            # Mean positions in the original space:
            # -----------------------------------------------------------------
            # Compute mean position per feature:
            positions, sdevs, label_list, foo = means_per_label(points, feature_list, exclude_labels, area_array)

            # Append mean position per feature to columns:
            table_column_names.append("mean position")
            columns.append(positions)

            # -----------------------------------------------------------------
            # Mean positions in standard space:
            # -----------------------------------------------------------------
            if affine_transform_file:
                # Compute standard space mean position per feature:
                standard_positions, sdevs, label_list, foo = means_per_label(
                    affine_points, feature_list, exclude_labels, area_array
                )

                # Append standard space mean position per feature to columns:
                table_column_names.append("mean position in standard space")
                columns.append(standard_positions)

            # -----------------------------------------------------------------
            # Loop through shape measures:
            # -----------------------------------------------------------------
            table_column_names.extend(column_names[:])
            for ishape, shape_array in enumerate(shape_arrays):
                shape_name = shape_names[ishape]
                print("  Compute statistics on {0} {1}".format(feature_name, shape_name))

                # Append shape names and values per feature to columns:
                pr = feature_name + ": " + shape_name + ": "
                if np.size(area_array):
                    po = " (weighted)"
                else:
                    po = ""
                # -------------------------------------------------------------
                # Append total feature areas to columns:
                # -------------------------------------------------------------
                if ishape == 0 and np.size(area_array):
                    sums, label_list = sum_per_label(shape_array, feature_list, exclude_labels)
                    table_column_names.append(pr + "total")
                    columns.append(sums)
                # -------------------------------------------------------------
                # Append feature shape statistics to columns:
                # -------------------------------------------------------------
                else:
                    medians, mads, means, sdevs, skews, kurts, lower_quarts, upper_quarts, label_list = stats_per_label(
                        shape_array, feature_list, exclude_labels, area_array, precision=1
                    )

                    table_column_names.append(pr + "median" + po)
                    table_column_names.append(pr + "median absolute deviation" + po)
                    table_column_names.append(pr + "mean" + po)
                    table_column_names.append(pr + "standard deviation" + po)
                    table_column_names.append(pr + "skew" + po)
                    table_column_names.append(pr + "kurtosis" + po)
                    table_column_names.append(pr + "lower quartile" + po)
                    table_column_names.append(pr + "upper quartile" + po)
                    columns.append(medians)
                    columns.append(mads)
                    columns.append(means)
                    columns.append(sdevs)
                    columns.append(skews)
                    columns.append(kurts)
                    columns.append(lower_quarts)
                    columns.append(upper_quarts)

            # -----------------------------------------------------------------
            # Laplace-Beltrami spectra:
            # -----------------------------------------------------------------
            if itable in [0, 1]:
                spectra = spectra_lists[itable]
                spectra_name = spectra_names[itable]
                spectra_IDs = spectra_ID_lists[itable]

                # Order spectra into a list:
                spectrum_list = []
                for label in label_list:
                    if label in spectra_IDs:
                        spectrum = spectra[spectra_IDs.index(label)]
                        spectrum_list.append(spectrum)
                    else:
                        spectrum_list.append("")

                # Append spectral shape name and values to relevant columns:
                columns.append(spectrum_list)
                table_column_names.append(spectra_name)

            # -----------------------------------------------------------------
            # Write labels/IDs and values to table:
            # -----------------------------------------------------------------
            # Write labels/IDs to table:
            write_columns(label_list, feature_name, table_file, delimiter)

            # Append columns of shape values to table:
            if columns:
                write_columns(columns, table_column_names, table_file, delimiter, quote=True, input_table=table_file)
        else:
            # Write something to table:
            write_columns([], "", table_file, delimiter)

        # ---------------------------------------------------------------------
        # Return correct table file name:
        # ---------------------------------------------------------------------
        if itable == 0:
            label_table = table_file
        elif itable == 1:
            sulcus_table = table_file
        elif itable == 2:
            fundus_table = table_file

    return label_table, sulcus_table, fundus_table
Exemplo n.º 2
0
        if area_file and os.path.exists(area_file):
            area_array, name = read_scalars(area_file, True, True)

        #-----------------------------------------------------------------
        # Loop through shape measures:
        #-----------------------------------------------------------------
        table_column_names.extend(column_names[:])
        print('  Compute statistics on {0} {1}'.
             format(label_name, shape_name))
    
        #-------------------------------------------------------------
        # Mean shapes:
        #-------------------------------------------------------------
        medians, mads, means, sdevs, skews, kurts, \
        lower_quarts, upper_quarts, \
        label_list = stats_per_label(shape_array,
            label_array, exclude_labels, area_array, precision=1)
    
        # Append shape names and values to columns:
        pr = label_name + ": " + shape_name + ": "
        if np.size(area_array):
            po = " (weighted)"
        else:
            po = ""
        table_column_names.append(pr + 'median' + po)
        """
        table_column_names.append(pr + 'median absolute deviation' + po)
        table_column_names.append(pr + 'mean' + po)
        table_column_names.append(pr + 'standard deviation' + po)
        table_column_names.append(pr + 'skew' + po)
        table_column_names.append(pr + 'kurtosis' + po)
        table_column_names.append(pr + 'lower quartile' + po)