def get_skeleton_points(self, obj):
     '''Get points by skeletonizing the objects and decimating'''
     ii = []
     jj = []
     total_skel = np.zeros(obj.shape, bool)
     for labels, indexes in obj.get_labels():
         colors = morph.color_labels(labels)
         for color in range(1, np.max(colors) + 1):
             labels_mask = colors == color
             skel = morph.skeletonize(
                 labels_mask,
                 ordering=distance_transform_edt(labels_mask) *
                 poisson_equation(labels_mask))
             total_skel = total_skel | skel
     n_pts = np.sum(total_skel)
     if n_pts == 0:
         return np.zeros(0, np.int32), np.zeros(0, np.int32)
     i, j = np.where(total_skel)
     if n_pts > self.max_points.value:
         #
         # Decimate the skeleton by finding the branchpoints in the
         # skeleton and propagating from those.
         #
         markers = np.zeros(total_skel.shape, np.int32)
         branchpoints = \
             morph.branchpoints(total_skel) | morph.endpoints(total_skel)
         markers[branchpoints] = np.arange(np.sum(branchpoints)) + 1
         #
         # We compute the propagation distance to that point, then impose
         # a slightly arbitarary order to get an unambiguous ordering
         # which should number the pixels in a skeleton branch monotonically
         #
         ts_labels, distances = propagate(np.zeros(markers.shape), markers,
                                          total_skel, 1)
         order = np.lexsort((j, i, distances[i, j], ts_labels[i, j]))
         #
         # Get a linear space of self.max_points elements with bounds at
         # 0 and len(order)-1 and use that to select the points.
         #
         order = order[np.linspace(0,
                                   len(order) - 1,
                                   self.max_points.value).astype(int)]
         return i[order], j[order]
     return i, j
 def get_skeleton_points(self, obj):
     '''Get points by skeletonizing the objects and decimating'''
     ii = []
     jj = []
     total_skel = np.zeros(obj.shape, bool)
     for labels, indexes in obj.get_labels():
         colors = morph.color_labels(labels)
         for color in range(1, np.max(colors) + 1):
             labels_mask = colors == color
             skel = morph.skeletonize(
                 labels_mask,
                 ordering = distance_transform_edt(labels_mask) *
                 poisson_equation(labels_mask))
             total_skel = total_skel | skel
     n_pts = np.sum(total_skel)
     if n_pts == 0:
         return np.zeros(0, np.int32), np.zeros(0, np.int32)
     i, j = np.where(total_skel)
     if n_pts > self.max_points.value:
         #
         # Decimate the skeleton by finding the branchpoints in the
         # skeleton and propagating from those.
         #
         markers = np.zeros(total_skel.shape, np.int32)
         branchpoints = \
             morph.branchpoints(total_skel) | morph.endpoints(total_skel)
         markers[branchpoints] = np.arange(np.sum(branchpoints))+1
         #
         # We compute the propagation distance to that point, then impose
         # a slightly arbitarary order to get an unambiguous ordering
         # which should number the pixels in a skeleton branch monotonically
         #
         ts_labels, distances = propagate(np.zeros(markers.shape),
                                  markers, total_skel, 1)
         order = np.lexsort((j, i, distances[i, j], ts_labels[i, j]))
         #
         # Get a linear space of self.max_points elements with bounds at
         # 0 and len(order)-1 and use that to select the points.
         #
         order = order[
             np.linspace(0, len(order)-1, self.max_points.value).astype(int)]
         return i[order], j[order]
     return i, j
    def do_measurements(self, workspace, image_name, object_name,
                        center_object_name, center_choice,
                        bin_count_settings, dd):
        '''Perform the radial measurements on the image set

        workspace - workspace that holds images / objects
        image_name - make measurements on this image
        object_name - make measurements on these objects
        center_object_name - use the centers of these related objects as
                      the centers for radial measurements. None to use the
                      objects themselves.
        center_choice - the user's center choice for this object:
                      C_SELF, C_CENTERS_OF_OBJECTS or C_EDGES_OF_OBJECTS.
        bin_count_settings - the bin count settings group
        d - a dictionary for saving reusable partial results

        returns one statistics tuple per ring.
        '''
        assert isinstance(workspace, cpw.Workspace)
        assert isinstance(workspace.object_set, cpo.ObjectSet)
        bin_count = bin_count_settings.bin_count.value
        wants_scaled = bin_count_settings.wants_scaled.value
        maximum_radius = bin_count_settings.maximum_radius.value

        image = workspace.image_set.get_image(image_name,
                                              must_be_grayscale=True)
        objects = workspace.object_set.get_objects(object_name)
        labels, pixel_data = cpo.crop_labels_and_image(objects.segmented,
                                                       image.pixel_data)
        nobjects = np.max(objects.segmented)
        measurements = workspace.measurements
        assert isinstance(measurements, cpmeas.Measurements)
        heatmaps = {}
        for heatmap in self.heatmaps:
            if heatmap.object_name.get_objects_name() == object_name and \
                            image_name == heatmap.image_name.get_image_name() and \
                            heatmap.get_number_of_bins() == bin_count:
                dd[id(heatmap)] = \
                    heatmaps[MEASUREMENT_ALIASES[heatmap.measurement.value]] = \
                    np.zeros(labels.shape)
        if nobjects == 0:
            for bin in range(1, bin_count + 1):
                for feature in (F_FRAC_AT_D, F_MEAN_FRAC, F_RADIAL_CV):
                    feature_name = (
                        (feature + FF_GENERIC) % (image_name, bin, bin_count))
                    measurements.add_measurement(
                            object_name, "_".join([M_CATEGORY, feature_name]),
                            np.zeros(0))
                    if not wants_scaled:
                        measurement_name = "_".join([M_CATEGORY, feature,
                                                     image_name, FF_OVERFLOW])
                        measurements.add_measurement(
                                object_name, measurement_name, np.zeros(0))
            return [(image_name, object_name, "no objects", "-", "-", "-", "-")]
        name = (object_name if center_object_name is None
                else "%s_%s" % (object_name, center_object_name))
        if dd.has_key(name):
            normalized_distance, i_center, j_center, good_mask = dd[name]
        else:
            d_to_edge = distance_to_edge(labels)
            if center_object_name is not None:
                #
                # Use the center of the centering objects to assign a center
                # to each labeled pixel using propagation
                #
                center_objects = workspace.object_set.get_objects(center_object_name)
                center_labels, cmask = cpo.size_similarly(
                        labels, center_objects.segmented)
                pixel_counts = fix(scind.sum(
                        np.ones(center_labels.shape),
                        center_labels,
                        np.arange(1, np.max(center_labels) + 1, dtype=np.int32)))
                good = pixel_counts > 0
                i, j = (centers_of_labels(center_labels) + .5).astype(int)
                ig = i[good]
                jg = j[good]
                lg = np.arange(1, len(i) + 1)[good]
                if center_choice == C_CENTERS_OF_OTHER:
                    #
                    # Reduce the propagation labels to the centers of
                    # the centering objects
                    #
                    center_labels = np.zeros(center_labels.shape, int)
                    center_labels[ig, jg] = lg
                cl, d_from_center = propagate(np.zeros(center_labels.shape),
                                              center_labels,
                                              labels != 0, 1)
                #
                # Erase the centers that fall outside of labels
                #
                cl[labels == 0] = 0
                #
                # If objects are hollow or crescent-shaped, there may be
                # objects without center labels. As a backup, find the
                # center that is the closest to the center of mass.
                #
                missing_mask = (labels != 0) & (cl == 0)
                missing_labels = np.unique(labels[missing_mask])
                if len(missing_labels):
                    all_centers = centers_of_labels(labels)
                    missing_i_centers, missing_j_centers = \
                        all_centers[:, missing_labels - 1]
                    di = missing_i_centers[:, np.newaxis] - ig[np.newaxis, :]
                    dj = missing_j_centers[:, np.newaxis] - jg[np.newaxis, :]
                    missing_best = lg[np.argsort((di * di + dj * dj,))[:, 0]]
                    best = np.zeros(np.max(labels) + 1, int)
                    best[missing_labels] = missing_best
                    cl[missing_mask] = best[labels[missing_mask]]
                    #
                    # Now compute the crow-flies distance to the centers
                    # of these pixels from whatever center was assigned to
                    # the object.
                    #
                    iii, jjj = np.mgrid[0:labels.shape[0], 0:labels.shape[1]]
                    di = iii[missing_mask] - i[cl[missing_mask] - 1]
                    dj = jjj[missing_mask] - j[cl[missing_mask] - 1]
                    d_from_center[missing_mask] = np.sqrt(di * di + dj * dj)
            else:
                # Find the point in each object farthest away from the edge.
                # This does better than the centroid:
                # * The center is within the object
                # * The center tends to be an interesting point, like the
                #   center of the nucleus or the center of one or the other
                #   of two touching cells.
                #
                i, j = maximum_position_of_labels(d_to_edge, labels, objects.indices)
                center_labels = np.zeros(labels.shape, int)
                center_labels[i, j] = labels[i, j]
                #
                # Use the coloring trick here to process touching objects
                # in separate operations
                #
                colors = color_labels(labels)
                ncolors = np.max(colors)
                d_from_center = np.zeros(labels.shape)
                cl = np.zeros(labels.shape, int)
                for color in range(1, ncolors + 1):
                    mask = colors == color
                    l, d = propagate(np.zeros(center_labels.shape),
                                     center_labels,
                                     mask, 1)
                    d_from_center[mask] = d[mask]
                    cl[mask] = l[mask]
            good_mask = cl > 0
            if center_choice == C_EDGES_OF_OTHER:
                # Exclude pixels within the centering objects
                # when performing calculations from the centers
                good_mask = good_mask & (center_labels == 0)
            i_center = np.zeros(cl.shape)
            i_center[good_mask] = i[cl[good_mask] - 1]
            j_center = np.zeros(cl.shape)
            j_center[good_mask] = j[cl[good_mask] - 1]

            normalized_distance = np.zeros(labels.shape)
            if wants_scaled:
                total_distance = d_from_center + d_to_edge
                normalized_distance[good_mask] = (d_from_center[good_mask] /
                                                  (total_distance[good_mask] + .001))
            else:
                normalized_distance[good_mask] = \
                    d_from_center[good_mask] / maximum_radius
            dd[name] = [normalized_distance, i_center, j_center, good_mask]
        ngood_pixels = np.sum(good_mask)
        good_labels = labels[good_mask]
        bin_indexes = (normalized_distance * bin_count).astype(int)
        bin_indexes[bin_indexes > bin_count] = bin_count
        labels_and_bins = (good_labels - 1, bin_indexes[good_mask])
        histogram = coo_matrix((pixel_data[good_mask], labels_and_bins),
                               (nobjects, bin_count + 1)).toarray()
        sum_by_object = np.sum(histogram, 1)
        sum_by_object_per_bin = np.dstack([sum_by_object] * (bin_count + 1))[0]
        fraction_at_distance = histogram / sum_by_object_per_bin
        number_at_distance = coo_matrix((np.ones(ngood_pixels), labels_and_bins),
                                        (nobjects, bin_count + 1)).toarray()
        object_mask = number_at_distance > 0
        sum_by_object = np.sum(number_at_distance, 1)
        sum_by_object_per_bin = np.dstack([sum_by_object] * (bin_count + 1))[0]
        fraction_at_bin = number_at_distance / sum_by_object_per_bin
        mean_pixel_fraction = fraction_at_distance / (fraction_at_bin +
                                                      np.finfo(float).eps)
        masked_fraction_at_distance = masked_array(fraction_at_distance,
                                                   ~object_mask)
        masked_mean_pixel_fraction = masked_array(mean_pixel_fraction,
                                                  ~object_mask)
        # Anisotropy calculation.  Split each cell into eight wedges, then
        # compute coefficient of variation of the wedges' mean intensities
        # in each ring.
        #
        # Compute each pixel's delta from the center object's centroid
        i, j = np.mgrid[0:labels.shape[0], 0:labels.shape[1]]
        imask = i[good_mask] > i_center[good_mask]
        jmask = j[good_mask] > j_center[good_mask]
        absmask = (abs(i[good_mask] - i_center[good_mask]) >
                   abs(j[good_mask] - j_center[good_mask]))
        radial_index = (imask.astype(int) + jmask.astype(int) * 2 +
                        absmask.astype(int) * 4)
        statistics = []

        for bin in range(bin_count + (0 if wants_scaled else 1)):
            bin_mask = (good_mask & (bin_indexes == bin))
            bin_pixels = np.sum(bin_mask)
            bin_labels = labels[bin_mask]
            bin_radial_index = radial_index[bin_indexes[good_mask] == bin]
            labels_and_radii = (bin_labels - 1, bin_radial_index)
            radial_values = coo_matrix((pixel_data[bin_mask],
                                        labels_and_radii),
                                       (nobjects, 8)).toarray()
            pixel_count = coo_matrix((np.ones(bin_pixels), labels_and_radii),
                                     (nobjects, 8)).toarray()
            mask = pixel_count == 0
            radial_means = masked_array(radial_values / pixel_count, mask)
            radial_cv = np.std(radial_means, 1) / np.mean(radial_means, 1)
            radial_cv[np.sum(~mask, 1) == 0] = 0
            for measurement, feature, overflow_feature in (
                    (fraction_at_distance[:, bin], MF_FRAC_AT_D, OF_FRAC_AT_D),
                    (mean_pixel_fraction[:, bin], MF_MEAN_FRAC, OF_MEAN_FRAC),
                    (np.array(radial_cv), MF_RADIAL_CV, OF_RADIAL_CV)):

                if bin == bin_count:
                    measurement_name = overflow_feature % image_name
                else:
                    measurement_name = feature % (image_name, bin + 1, bin_count)
                measurements.add_measurement(object_name,
                                             measurement_name,
                                             measurement)
                if feature in heatmaps:
                    heatmaps[feature][bin_mask] = measurement[bin_labels - 1]
            radial_cv.mask = np.sum(~mask, 1) == 0
            bin_name = str(bin + 1) if bin < bin_count else "Overflow"
            statistics += [(image_name, object_name, bin_name, str(bin_count),
                            round(np.mean(masked_fraction_at_distance[:, bin]), 4),
                            round(np.mean(masked_mean_pixel_fraction[:, bin]), 4),
                            round(np.mean(radial_cv), 4))]
        return statistics
Example #4
0
    def do_measurements(self, workspace, image_name, object_name,
                        center_object_name, center_choice,
                        bin_count_settings, dd):
        '''Perform the radial measurements on the image set

        workspace - workspace that holds images / objects
        image_name - make measurements on this image
        object_name - make measurements on these objects
        center_object_name - use the centers of these related objects as
                      the centers for radial measurements. None to use the
                      objects themselves.
        center_choice - the user's center choice for this object:
                      C_SELF, C_CENTERS_OF_OBJECTS or C_EDGES_OF_OBJECTS.
        bin_count_settings - the bin count settings group
        d - a dictionary for saving reusable partial results

        returns one statistics tuple per ring.
        '''
        assert isinstance(workspace, cpw.Workspace)
        assert isinstance(workspace.object_set, cpo.ObjectSet)
        bin_count = bin_count_settings.bin_count.value
        wants_scaled = bin_count_settings.wants_scaled.value
        maximum_radius = bin_count_settings.maximum_radius.value

        image = workspace.image_set.get_image(image_name,
                                              must_be_grayscale=True)
        objects = workspace.object_set.get_objects(object_name)
        labels, pixel_data = cpo.crop_labels_and_image(objects.segmented,
                                                       image.pixel_data)
        nobjects = np.max(objects.segmented)
        measurements = workspace.measurements
        assert isinstance(measurements, cpmeas.Measurements)
        heatmaps = {}
        for heatmap in self.heatmaps:
            if heatmap.object_name.get_objects_name() == object_name and \
                            image_name == heatmap.image_name.get_image_name() and \
                            heatmap.get_number_of_bins() == bin_count:
                dd[id(heatmap)] = \
                    heatmaps[MEASUREMENT_ALIASES[heatmap.measurement.value]] = \
                    np.zeros(labels.shape)
        if nobjects == 0:
            for bin in range(1, bin_count + 1):
                for feature in (F_FRAC_AT_D, F_MEAN_FRAC, F_RADIAL_CV):
                    feature_name = (
                        (feature + FF_GENERIC) % (image_name, bin, bin_count))
                    measurements.add_measurement(
                            object_name, "_".join([M_CATEGORY, feature_name]),
                            np.zeros(0))
                    if not wants_scaled:
                        measurement_name = "_".join([M_CATEGORY, feature,
                                                     image_name, FF_OVERFLOW])
                        measurements.add_measurement(
                                object_name, measurement_name, np.zeros(0))
            return [(image_name, object_name, "no objects", "-", "-", "-", "-")]
        name = (object_name if center_object_name is None
                else "%s_%s" % (object_name, center_object_name))
        if dd.has_key(name):
            normalized_distance, i_center, j_center, good_mask = dd[name]
        else:
            d_to_edge = distance_to_edge(labels)
            if center_object_name is not None:
                #
                # Use the center of the centering objects to assign a center
                # to each labeled pixel using propagation
                #
                center_objects = workspace.object_set.get_objects(center_object_name)
                center_labels, cmask = cpo.size_similarly(
                        labels, center_objects.segmented)
                pixel_counts = fix(scind.sum(
                        np.ones(center_labels.shape),
                        center_labels,
                        np.arange(1, np.max(center_labels) + 1, dtype=np.int32)))
                good = pixel_counts > 0
                i, j = (centers_of_labels(center_labels) + .5).astype(int)
                ig = i[good]
                jg = j[good]
                lg = np.arange(1, len(i) + 1)[good]
                if center_choice == C_CENTERS_OF_OTHER:
                    #
                    # Reduce the propagation labels to the centers of
                    # the centering objects
                    #
                    center_labels = np.zeros(center_labels.shape, int)
                    center_labels[ig, jg] = lg
                cl, d_from_center = propagate(np.zeros(center_labels.shape),
                                              center_labels,
                                              labels != 0, 1)
                #
                # Erase the centers that fall outside of labels
                #
                cl[labels == 0] = 0
                #
                # If objects are hollow or crescent-shaped, there may be
                # objects without center labels. As a backup, find the
                # center that is the closest to the center of mass.
                #
                missing_mask = (labels != 0) & (cl == 0)
                missing_labels = np.unique(labels[missing_mask])
                if len(missing_labels):
                    all_centers = centers_of_labels(labels)
                    missing_i_centers, missing_j_centers = \
                        all_centers[:, missing_labels - 1]
                    di = missing_i_centers[:, np.newaxis] - ig[np.newaxis, :]
                    dj = missing_j_centers[:, np.newaxis] - jg[np.newaxis, :]
                    missing_best = lg[np.argsort((di * di + dj * dj,))[:, 0]]
                    best = np.zeros(np.max(labels) + 1, int)
                    best[missing_labels] = missing_best
                    cl[missing_mask] = best[labels[missing_mask]]
                    #
                    # Now compute the crow-flies distance to the centers
                    # of these pixels from whatever center was assigned to
                    # the object.
                    #
                    iii, jjj = np.mgrid[0:labels.shape[0], 0:labels.shape[1]]
                    di = iii[missing_mask] - i[cl[missing_mask] - 1]
                    dj = jjj[missing_mask] - j[cl[missing_mask] - 1]
                    d_from_center[missing_mask] = np.sqrt(di * di + dj * dj)
            else:
                # Find the point in each object farthest away from the edge.
                # This does better than the centroid:
                # * The center is within the object
                # * The center tends to be an interesting point, like the
                #   center of the nucleus or the center of one or the other
                #   of two touching cells.
                #
                i, j = maximum_position_of_labels(d_to_edge, labels, objects.indices)
                center_labels = np.zeros(labels.shape, int)
                center_labels[i, j] = labels[i, j]
                #
                # Use the coloring trick here to process touching objects
                # in separate operations
                #
                colors = color_labels(labels)
                ncolors = np.max(colors)
                d_from_center = np.zeros(labels.shape)
                cl = np.zeros(labels.shape, int)
                for color in range(1, ncolors + 1):
                    mask = colors == color
                    l, d = propagate(np.zeros(center_labels.shape),
                                     center_labels,
                                     mask, 1)
                    d_from_center[mask] = d[mask]
                    cl[mask] = l[mask]
            good_mask = cl > 0
            if center_choice == C_EDGES_OF_OTHER:
                # Exclude pixels within the centering objects
                # when performing calculations from the centers
                good_mask = good_mask & (center_labels == 0)
            i_center = np.zeros(cl.shape)
            i_center[good_mask] = i[cl[good_mask] - 1]
            j_center = np.zeros(cl.shape)
            j_center[good_mask] = j[cl[good_mask] - 1]

            normalized_distance = np.zeros(labels.shape)
            if wants_scaled:
                total_distance = d_from_center + d_to_edge
                normalized_distance[good_mask] = (d_from_center[good_mask] /
                                                  (total_distance[good_mask] + .001))
            else:
                normalized_distance[good_mask] = \
                    d_from_center[good_mask] / maximum_radius
            dd[name] = [normalized_distance, i_center, j_center, good_mask]
        ngood_pixels = np.sum(good_mask)
        good_labels = labels[good_mask]
        bin_indexes = (normalized_distance * bin_count).astype(int)
        bin_indexes[bin_indexes > bin_count] = bin_count
        labels_and_bins = (good_labels - 1, bin_indexes[good_mask])
        histogram = coo_matrix((pixel_data[good_mask], labels_and_bins),
                               (nobjects, bin_count + 1)).toarray()
        sum_by_object = np.sum(histogram, 1)
        sum_by_object_per_bin = np.dstack([sum_by_object] * (bin_count + 1))[0]
        fraction_at_distance = histogram / sum_by_object_per_bin
        number_at_distance = coo_matrix((np.ones(ngood_pixels), labels_and_bins),
                                        (nobjects, bin_count + 1)).toarray()
        object_mask = number_at_distance > 0
        sum_by_object = np.sum(number_at_distance, 1)
        sum_by_object_per_bin = np.dstack([sum_by_object] * (bin_count + 1))[0]
        fraction_at_bin = number_at_distance / sum_by_object_per_bin
        mean_pixel_fraction = fraction_at_distance / (fraction_at_bin +
                                                      np.finfo(float).eps)
        masked_fraction_at_distance = masked_array(fraction_at_distance,
                                                   ~object_mask)
        masked_mean_pixel_fraction = masked_array(mean_pixel_fraction,
                                                  ~object_mask)
        # Anisotropy calculation.  Split each cell into eight wedges, then
        # compute coefficient of variation of the wedges' mean intensities
        # in each ring.
        #
        # Compute each pixel's delta from the center object's centroid
        i, j = np.mgrid[0:labels.shape[0], 0:labels.shape[1]]
        imask = i[good_mask] > i_center[good_mask]
        jmask = j[good_mask] > j_center[good_mask]
        absmask = (abs(i[good_mask] - i_center[good_mask]) >
                   abs(j[good_mask] - j_center[good_mask]))
        radial_index = (imask.astype(int) + jmask.astype(int) * 2 +
                        absmask.astype(int) * 4)
        statistics = []

        for bin in range(bin_count + (0 if wants_scaled else 1)):
            bin_mask = (good_mask & (bin_indexes == bin))
            bin_pixels = np.sum(bin_mask)
            bin_labels = labels[bin_mask]
            bin_radial_index = radial_index[bin_indexes[good_mask] == bin]
            labels_and_radii = (bin_labels - 1, bin_radial_index)
            radial_values = coo_matrix((pixel_data[bin_mask],
                                        labels_and_radii),
                                       (nobjects, 8)).toarray()
            pixel_count = coo_matrix((np.ones(bin_pixels), labels_and_radii),
                                     (nobjects, 8)).toarray()
            mask = pixel_count == 0
            radial_means = masked_array(radial_values / pixel_count, mask)
            radial_cv = np.std(radial_means, 1) / np.mean(radial_means, 1)
            radial_cv[np.sum(~mask, 1) == 0] = 0
            for measurement, feature, overflow_feature in (
                    (fraction_at_distance[:, bin], MF_FRAC_AT_D, OF_FRAC_AT_D),
                    (mean_pixel_fraction[:, bin], MF_MEAN_FRAC, OF_MEAN_FRAC),
                    (np.array(radial_cv), MF_RADIAL_CV, OF_RADIAL_CV)):

                if bin == bin_count:
                    measurement_name = overflow_feature % image_name
                else:
                    measurement_name = feature % (image_name, bin + 1, bin_count)
                measurements.add_measurement(object_name,
                                             measurement_name,
                                             measurement)
                if feature in heatmaps:
                    heatmaps[feature][bin_mask] = measurement[bin_labels - 1]
            radial_cv.mask = np.sum(~mask, 1) == 0
            bin_name = str(bin + 1) if bin < bin_count else "Overflow"
            statistics += [(image_name, object_name, bin_name, str(bin_count),
                            round(np.mean(masked_fraction_at_distance[:, bin]), 4),
                            round(np.mean(masked_mean_pixel_fraction[:, bin]), 4),
                            round(np.mean(radial_cv), 4))]
        return statistics
    def run(self, workspace):
        #
        # Get some things we need from the workspace
        #
        measurements = workspace.measurements
        object_set = workspace.object_set
        #
        # Get the objects
        #
        objects_name = self.objects_name.value
        objects = object_set.get_objects(objects_name)
        #
        # First, I do it the (1) way to show how that code should look.
        # Later, I do it the (3) way and that will work even if objects.has_ijv
        # is False.
        if self.method == SUPPORT_BASIC:
            labels = objects.segmented
            #
            # The indices are the integer values representing each of the objects
            # in the labels matrix. scipy.ndimage functions often take an optional
            # argument that tells them which objects should be analyzed.
            # For instance, scipy.ndimage.mean takes an input image, a labels matrix
            # and the indices. If you don't supply the indices, it will just take
            # the mean of all labeled pixels, returning a single number.
            #
            indices = objects.indices
            #
            # Find the labeled pixels using labels != 0
            #
            foreground = labels != 0
            #
            # use scipy.ndimage.distance_transform_edt to find the distance of
            # every foreground pixel from the object edge
            #
            distance = scipy.ndimage.distance_transform_edt(foreground)
            #
            # call scipy.ndimage.mean(distance, labels, indices) to find the
            # mean distance in each object from its edge
            #
            values = scipy.ndimage.mean(distance, labels, indices)
            #
            # record the measurement using measurements.add_measurement
            # with an object name of "objects_name" and a measurement name
            # of M_MEAN_DISTANCE
            #
            measurements.add_measurement(objects_name, M_MEAN_DISTANCE, values)
        elif self.method == SUPPORT_OVERLAPPING:
            #
            # I'll use objects.get_labels to get labels matrices. This involves
            # a little extra work coallating the values, but not so bad.
            #
            # First of all, labels indices start at 1, but arrays start at
            # zero, so for "values", I'm going to cheat and waste values[0].
            # Later, I'll only use values[1:]
            #
            values = np.zeros(objects.count + 1)
            #
            # Now for the loop
            #
            for labels, indices in objects.get_labels():
                foreground = labels != 0
                distance = scipy.ndimage.distance_transform_edt(foreground)
                v1 = scipy.ndimage.mean(distance, labels, indices)
                #
                # We copy the values above into the appropriate slots
                #
                values[indices] = v1
            measurements.add_measurement(objects_name, M_MEAN_DISTANCE,
                                         values[1:])
        else:
            #
            # It's just a little expensive finding out which labels are
            # touching others. The trick here is to use a function from
            # cpmorphology called "color_labels". This is akin to the
            # four color theorem - you want to color objects so that no
            # two adjacent ones have the same color.
            #
            # After we've done that, we process each of the colors in turn,
            # knowing that each object is colored only once and none of its
            # neighbors have the same color.
            #
            # This is a good demo of why Python and Numpy are good choices
            # for image processing. We're handling some pretty abstract
            # concepts in just a few lines of code and the result, I hope,
            # is clear and readable.
            #
            from centrosome.cpmorphology import color_labels

            values = np.zeros(objects.count + 1)
            for labels, indices in objects.get_labels():
                clabels = color_labels(labels)
                #
                # np.unique returns the unique #s in an array.
                #
                colors = np.unique(clabels)
                for color in colors:
                    # 0 = background, so ignore it.
                    if color == 0:
                        continue
                    #
                    # Ok, here's a trick. clabels == color gets converted
                    # to either 1 (is the current color) or 0 (is not) and
                    # we can use that to mask only the labels for the current
                    # color by multiplying (0 * anything = 0)
                    #
                    foreground = clabels == color
                    mini_labels = labels * foreground
                    distance = scipy.ndimage.distance_transform_edt(foreground)
                    #
                    # And here's another trick - scipy.ndimage.mean returns
                    # NaN for any index that doesn't appear because the
                    # mean isn't computable. How lucky!
                    #
                    v1 = scipy.ndimage.mean(distance, mini_labels, indices)
                    good_v1 = ~np.isnan(v1)
                    values[indices[good_v1]] = v1[good_v1]
            measurements.add_measurement(objects_name, M_MEAN_DISTANCE,
                                         values[1:])
    def run(self, workspace):
        #
        # Get some things we need from the workspace
        #
        measurements = workspace.measurements
        object_set = workspace.object_set
        #
        # Get the objects
        #
        objects_name = self.objects_name.value
        objects = object_set.get_objects(objects_name)
        #
        # First, I do it the (1) way to show how that code should look.
        # Later, I do it the (3) way and that will work even if objects.has_ijv
        # is False.
        if self.method == SUPPORT_BASIC:
            labels = objects.segmented
            #
            # The indices are the integer values representing each of the objects
            # in the labels matrix. scipy.ndimage functions often take an optional
            # argument that tells them which objects should be analyzed.
            # For instance, scipy.ndimage.mean takes an input image, a labels matrix
            # and the indices. If you don't supply the indices, it will just take
            # the mean of all labeled pixels, returning a single number.
            #
            indices = objects.indices
            #
            # Find the labeled pixels using labels != 0
            #
            foreground = labels != 0
            #
            # use scipy.ndimage.distance_transform_edt to find the distance of
            # every foreground pixel from the object edge
            #
            distance = scipy.ndimage.distance_transform_edt(foreground)
            #
            # call scipy.ndimage.mean(distance, labels, indices) to find the
            # mean distance in each object from its edge
            #
            values = scipy.ndimage.mean(distance, labels, indices)
            #
            # record the measurement using measurements.add_measurement
            # with an object name of "objects_name" and a measurement name
            # of M_MEAN_DISTANCE
            #
            measurements.add_measurement(objects_name, M_MEAN_DISTANCE, values)
        elif self.method == SUPPORT_OVERLAPPING:
            #
            # I'll use objects.get_labels to get labels matrices. This involves
            # a little extra work coallating the values, but not so bad.
            #
            # First of all, labels indices start at 1, but arrays start at
            # zero, so for "values", I'm going to cheat and waste values[0].
            # Later, I'll only use values[1:]
            #
            values = np.zeros(objects.count + 1)
            #
            # Now for the loop
            #
            for labels, indices in objects.get_labels():
                foreground = labels != 0
                distance = scipy.ndimage.distance_transform_edt(foreground)
                v1 = scipy.ndimage.mean(distance, labels, indices)
                #
                # We copy the values above into the appropriate slots
                #
                values[indices] = v1
            measurements.add_measurement(objects_name, M_MEAN_DISTANCE, values[1:])
        else:
            #
            # It's just a little expensive finding out which labels are
            # touching others. The trick here is to use a function from
            # cpmorphology called "color_labels". This is akin to the
            # four color theorem - you want to color objects so that no
            # two adjacent ones have the same color.
            #
            # After we've done that, we process each of the colors in turn,
            # knowing that each object is colored only once and none of its
            # neighbors have the same color.
            #
            # This is a good demo of why Python and Numpy are good choices
            # for image processing. We're handling some pretty abstract
            # concepts in just a few lines of code and the result, I hope,
            # is clear and readable.
            #
            from centrosome.cpmorphology import color_labels

            values = np.zeros(objects.count + 1)
            for labels, indices in objects.get_labels():
                clabels = color_labels(labels)
                #
                # np.unique returns the unique #s in an array.
                #
                colors = np.unique(clabels)
                for color in colors:
                    # 0 = background, so ignore it.
                    if color == 0:
                        continue
                    #
                    # Ok, here's a trick. clabels == color gets converted
                    # to either 1 (is the current color) or 0 (is not) and
                    # we can use that to mask only the labels for the current
                    # color by multiplying (0 * anything = 0)
                    #
                    foreground = clabels == color
                    mini_labels = labels * foreground
                    distance = scipy.ndimage.distance_transform_edt(foreground)
                    #
                    # And here's another trick - scipy.ndimage.mean returns
                    # NaN for any index that doesn't appear because the
                    # mean isn't computable. How lucky!
                    #
                    v1 = scipy.ndimage.mean(distance, mini_labels, indices)
                    good_v1 = ~np.isnan(v1)
                    values[indices[good_v1]] = v1[good_v1]
            measurements.add_measurement(objects_name, M_MEAN_DISTANCE, values[1:])