예제 #1
0
 def run_on_image_setting(self, workspace, image):
     assert isinstance(workspace, cpw.Workspace)
     image_set = workspace.image_set
     measurements = workspace.measurements
     im = image_set.get_image(image.image_name.value,
                                 must_be_grayscale=True)
     #
     # Downsample the image and mask
     #
     new_shape = np.array(im.pixel_data.shape)
     if image.subsample_size.value < 1:
         new_shape = new_shape * image.subsample_size.value
         i,j = (np.mgrid[0:new_shape[0],0:new_shape[1]].astype(float) /
                image.subsample_size.value)
         pixels = scind.map_coordinates(im.pixel_data,(i,j),order=1)
         mask = scind.map_coordinates(im.mask.astype(float), (i,j)) > .9
     else:
         pixels = im.pixel_data
         mask = im.mask
     #
     # Remove background pixels using a greyscale tophat filter
     #
     if image.image_sample_size.value < 1:
         back_shape = new_shape * image.image_sample_size.value
         i,j = (np.mgrid[0:back_shape[0],0:back_shape[1]].astype(float) /
                image.image_sample_size.value)
         back_pixels = scind.map_coordinates(pixels,(i,j), order=1)
         back_mask = scind.map_coordinates(mask.astype(float), (i,j)) > .9
     else:
         back_pixels = pixels
         back_mask = mask
     radius = image.element_size.value
     back_pixels = morph.grey_erosion(back_pixels, radius, back_mask)
     back_pixels = morph.grey_dilation(back_pixels, radius, back_mask)
     if image.image_sample_size.value < 1:
         i,j = np.mgrid[0:new_shape[0],0:new_shape[1]].astype(float)
         #
         # Make sure the mapping only references the index range of
         # back_pixels.
         #
         i *= float(back_shape[0]-1)/float(new_shape[0]-1)
         j *= float(back_shape[1]-1)/float(new_shape[1]-1)
         back_pixels = scind.map_coordinates(back_pixels,(i,j), order=1)
     pixels -= back_pixels
     pixels[pixels < 0] = 0
     #
     # For each object, build a little record
     #
     class ObjectRecord(object):
         def __init__(self, name):
             self.name = name
             self.labels = workspace.object_set.get_objects(name).segmented
             self.nobjects = np.max(self.labels)
             if self.nobjects != 0:
                 self.range = np.arange(1, np.max(self.labels)+1)
                 self.labels = self.labels.copy()
                 self.labels[~ im.mask] = 0
                 self.current_mean = fix(
                     scind.mean(im.pixel_data,
                                self.labels,
                                self.range))
                 self.start_mean = np.maximum(
                     self.current_mean, np.finfo(float).eps)
     object_records = [ObjectRecord(ob.objects_name.value)
                       for ob in image.objects ]
     #
     # Transcribed from the Matlab module: granspectr function
     #
     # CALCULATES GRANULAR SPECTRUM, ALSO KNOWN AS SIZE DISTRIBUTION,
     # GRANULOMETRY, AND PATTERN SPECTRUM, SEE REF.:
     # J.Serra, Image Analysis and Mathematical Morphology, Vol. 1. Academic Press, London, 1989
     # Maragos,P. "Pattern spectrum and multiscale shape representation", IEEE Transactions on Pattern Analysis and Machine Intelligence, 11, N 7, pp. 701-716, 1989
     # L.Vincent "Granulometries and Opening Trees", Fundamenta Informaticae, 41, No. 1-2, pp. 57-90, IOS Press, 2000.
     # L.Vincent "Morphological Area Opening and Closing for Grayscale Images", Proc. NATO Shape in Picture Workshop, Driebergen, The Netherlands, pp. 197-208, 1992.
     # I.Ravkin, V.Temov "Bit representation techniques and image processing", Applied Informatics, v.14, pp. 41-90, Finances and Statistics, Moskow, 1988 (in Russian)
     # THIS IMPLEMENTATION INSTEAD OF OPENING USES EROSION FOLLOWED BY RECONSTRUCTION
     #
     ng = image.granular_spectrum_length.value
     startmean = np.mean(pixels[mask])
     ero = pixels.copy()
     # Mask the test image so that masked pixels will have no effect
     # during reconstruction
     #
     ero[~mask] = 0
     currentmean = startmean
     startmean = max(startmean, np.finfo(float).eps)
     
     footprint = np.array([[False,True,False],
                           [True ,True,True],
                           [False,True,False]])
     statistics = [ image.image_name.value]
     for i in range(1,ng+1):
         prevmean = currentmean
         ero = morph.grey_erosion(ero, mask = mask, footprint=footprint)
         rec = morph.grey_reconstruction(ero, pixels, footprint)
         currentmean = np.mean(rec[mask])
         gs = (prevmean - currentmean) * 100 / startmean
         statistics += [ "%.2f"%gs]
         feature = image.granularity_feature(i)
         measurements.add_image_measurement(feature, gs)
         #
         # Restore the reconstructed image to the shape of the
         # original image so we can match against object labels
         #
         orig_shape = im.pixel_data.shape
         i,j = np.mgrid[0:orig_shape[0],0:orig_shape[1]].astype(float)
         #
         # Make sure the mapping only references the index range of
         # back_pixels.
         #
         i *= float(new_shape[0]-1)/float(orig_shape[0]-1)
         j *= float(new_shape[1]-1)/float(orig_shape[1]-1)
         rec = scind.map_coordinates(rec,(i,j), order=1)
         
         #
         # Calculate the means for the objects
         #
         for object_record in object_records:
             assert isinstance(object_record, ObjectRecord)
             if object_record.nobjects > 0:
                 new_mean = fix(scind.mean(rec, object_record.labels, 
                                           object_record.range))
                 gss = ((object_record.current_mean - new_mean) * 100 / 
                        object_record.start_mean)
                 object_record.current_mean = new_mean
             else:
                 gss = np.zeros((0,))
             measurements.add_measurement(object_record.name, feature, gss)
     return statistics
예제 #2
0
    def run_on_image_setting(self, workspace, image):
        assert isinstance(workspace, cpw.Workspace)
        image_set = workspace.image_set
        measurements = workspace.measurements
        im = image_set.get_image(image.image_name.value,
                                 must_be_grayscale=True)
        #
        # Downsample the image and mask
        #
        new_shape = np.array(im.pixel_data.shape)
        if image.subsample_size.value < 1:
            new_shape = new_shape * image.subsample_size.value
            i, j = (np.mgrid[0:new_shape[0], 0:new_shape[1]].astype(float) /
                    image.subsample_size.value)
            pixels = scind.map_coordinates(im.pixel_data, (i, j), order=1)
            mask = scind.map_coordinates(im.mask.astype(float), (i, j)) > .9
        else:
            pixels = im.pixel_data
            mask = im.mask
        #
        # Remove background pixels using a greyscale tophat filter
        #
        if image.image_sample_size.value < 1:
            back_shape = new_shape * image.image_sample_size.value
            i, j = (np.mgrid[0:back_shape[0], 0:back_shape[1]].astype(float) /
                    image.image_sample_size.value)
            back_pixels = scind.map_coordinates(pixels, (i, j), order=1)
            back_mask = scind.map_coordinates(mask.astype(float), (i, j)) > .9
        else:
            back_pixels = pixels
            back_mask = mask
        radius = image.element_size.value
        back_pixels = morph.grey_erosion(back_pixels, radius, back_mask)
        back_pixels = morph.grey_dilation(back_pixels, radius, back_mask)
        if image.image_sample_size.value < 1:
            i, j = np.mgrid[0:new_shape[0], 0:new_shape[1]].astype(float)
            #
            # Make sure the mapping only references the index range of
            # back_pixels.
            #
            i *= float(back_shape[0] - 1) / float(new_shape[0] - 1)
            j *= float(back_shape[1] - 1) / float(new_shape[1] - 1)
            back_pixels = scind.map_coordinates(back_pixels, (i, j), order=1)
        pixels -= back_pixels
        pixels[pixels < 0] = 0

        #
        # For each object, build a little record
        #
        class ObjectRecord(object):
            def __init__(self, name):
                self.name = name
                self.labels = workspace.object_set.get_objects(name).segmented
                self.nobjects = np.max(self.labels)
                if self.nobjects != 0:
                    self.range = np.arange(1, np.max(self.labels) + 1)
                    self.labels = self.labels.copy()
                    self.labels[~im.mask] = 0
                    self.current_mean = fix(
                        scind.mean(im.pixel_data, self.labels, self.range))
                    self.start_mean = np.maximum(self.current_mean,
                                                 np.finfo(float).eps)

        object_records = [
            ObjectRecord(ob.objects_name.value) for ob in image.objects
        ]
        #
        # Transcribed from the Matlab module: granspectr function
        #
        # CALCULATES GRANULAR SPECTRUM, ALSO KNOWN AS SIZE DISTRIBUTION,
        # GRANULOMETRY, AND PATTERN SPECTRUM, SEE REF.:
        # J.Serra, Image Analysis and Mathematical Morphology, Vol. 1. Academic Press, London, 1989
        # Maragos,P. "Pattern spectrum and multiscale shape representation", IEEE Transactions on Pattern Analysis and Machine Intelligence, 11, N 7, pp. 701-716, 1989
        # L.Vincent "Granulometries and Opening Trees", Fundamenta Informaticae, 41, No. 1-2, pp. 57-90, IOS Press, 2000.
        # L.Vincent "Morphological Area Opening and Closing for Grayscale Images", Proc. NATO Shape in Picture Workshop, Driebergen, The Netherlands, pp. 197-208, 1992.
        # I.Ravkin, V.Temov "Bit representation techniques and image processing", Applied Informatics, v.14, pp. 41-90, Finances and Statistics, Moskow, 1988 (in Russian)
        # THIS IMPLEMENTATION INSTEAD OF OPENING USES EROSION FOLLOWED BY RECONSTRUCTION
        #
        ng = image.granular_spectrum_length.value
        startmean = np.mean(pixels[mask])
        ero = pixels.copy()
        # Mask the test image so that masked pixels will have no effect
        # during reconstruction
        #
        ero[~mask] = 0
        currentmean = startmean
        startmean = max(startmean, np.finfo(float).eps)

        footprint = np.array([[False, True, False], [True, True, True],
                              [False, True, False]])
        statistics = [image.image_name.value]
        for i in range(1, ng + 1):
            prevmean = currentmean
            ero = morph.grey_erosion(ero, mask=mask, footprint=footprint)
            rec = morph.grey_reconstruction(ero, pixels, footprint)
            currentmean = np.mean(rec[mask])
            gs = (prevmean - currentmean) * 100 / startmean
            statistics += ["%.2f" % gs]
            feature = image.granularity_feature(i)
            measurements.add_image_measurement(feature, gs)
            #
            # Restore the reconstructed image to the shape of the
            # original image so we can match against object labels
            #
            orig_shape = im.pixel_data.shape
            i, j = np.mgrid[0:orig_shape[0], 0:orig_shape[1]].astype(float)
            #
            # Make sure the mapping only references the index range of
            # back_pixels.
            #
            i *= float(new_shape[0] - 1) / float(orig_shape[0] - 1)
            j *= float(new_shape[1] - 1) / float(orig_shape[1] - 1)
            rec = scind.map_coordinates(rec, (i, j), order=1)

            #
            # Calculate the means for the objects
            #
            for object_record in object_records:
                assert isinstance(object_record, ObjectRecord)
                if object_record.nobjects > 0:
                    new_mean = fix(
                        scind.mean(rec, object_record.labels,
                                   object_record.range))
                    gss = ((object_record.current_mean - new_mean) * 100 /
                           object_record.start_mean)
                    object_record.current_mean = new_mean
                else:
                    gss = np.zeros((0, ))
                measurements.add_measurement(object_record.name, feature, gss)
        return statistics