Esempio n. 1
0
 def __call__(self, img, bgsource = None, methods = ['subtract'], **kwargs):
     if bgsource is None:
         logger.error(("No background image given to "
                 "RemoveBackgroundPreprocessor, doing nothing!"))
         return img
     if bgsource not in self.bgs:
         logger.debug("Loading background image {}".format(bgsource))
         # Load background image
         try:
             self.bgs[bgsource] = self.manager.loadimage(bgsource)
         except IOError, fire.errors.ImageCannotBeLoadedError:
             logger.error(("Unable to load background image '{}', doing "
                          "nothing!").format(bgsource))
             return img
         # If preprocessing steps are not explicity given, use preprocessing
         # chain used for all images, up to the point where background is
         # removed
         if not 'preprocsteps' in kwargs:
             preprocsteps = self.get_previous_preprocsteps()
             if preprocsteps is None:
                 return img
         else:
             preprocsteps = kwargs['preprocsteps']
         self.bgs[bgsource] = self.manager.preprocess(self.bgs[bgsource],
                                                      preprocsteps)
         self.bgs_nonzero[bgsource] = self.bgs[bgsource].copy()
         self.bgs_nonzero[bgsource][self.bgs[bgsource] == 0.] = (0.01 *
                                                 self.bgs[bgsource].max())
         logger.debug("Background image {} loaded".format(bgsource))
Esempio n. 2
0
def showimg(img, cbar = False, cmap = plt.cm.Greys_r):
    logger.debug("Plotting image, dimensions: {}".format(img.shape))
    plt.figure()
    plt.imshow(img, interpolation = 'none', cmap = cmap)
    if cbar:
        plt.colorbar()
    plt.show()
Esempio n. 3
0
def saveimg(filename, img, normalise = True):
    """Save img to filename using colormap cmap."""
    logger.debug("Saving image to {}".format(filename))
    if normalise:
        img -= img.min()
        img /= img.max()
        img = np.uint8(img*255)
    from PIL import Image
    im = Image.fromarray(img)
    im.save(filename)
Esempio n. 4
0
 def export(self, infile, detected, exportsteps):
     try:
         os.makedirs(os.path.dirname(fire.exporters.outfilebase(infile)))
     except OSError as e:
         if e.errno != 17:  # directory exists
             raise
     for ename, eargs in exportsteps:
         logger.debug("Calling exporter '{}' with arguments: {}".format(
                      ename, str(eargs)))
         self.exporters[ename](infile, detected, **eargs)
Esempio n. 5
0
    def __call__(
        self,
        img,
        prev_detected,
        seed_threshold=None,
        erode_factor=1,
        minsize=0,
        maxhole=0.5,
        checkQsize=0,
        minQ=0.5,
        **kwargs
    ):
        # Guess threshold if not given
        if seed_threshold is None:
            from fire.debug import showimg

            # Otsu needs int, convert to range 0-255
            normimg = (img - img.min()) / (img.max() - img.min())
            intimg = np.rint(normimg * 255).astype(np.uint8)
            intimg_threshold = self.mh.thresholding.otsu(intimg)
            # Convert received threshold back to image range
            seed_threshold = intimg_threshold / 255.0 * (img.max() - img.min()) + img.min()
            logger.debug("Guessed threshold (Otsu): {}".format(seed_threshold))
        # Find seeds
        binary = img > seed_threshold
        for i in range(erode_factor):
            binary = self.mh.morph.erode(binary)
        from fire.debug import showimg

        showimg(binary)
        seeds, nr_of_seeds = self.mh.label(binary)
        logger.debug("Number of watershed seeds: {}".format(nr_of_seeds))
        # Do watershed transformation
        labeled = self.mh.cwatershed(img.max() - img, seeds)
        logger.debug("Number of areas: {}".format(len(labeled)))
        # Extract sizes of area and filter for minsize
        sizes = np.bincount(labeled.flat)
        bigareas = np.argwhere(sizes > maxhole * minsize)
        logger.debug("With minimum size (holes not filled) {}: {}".format(maxhole * minsize, len(bigareas)))
        # Create detection info
        detected = NonOverlappingFreeAreasInfo(
            np.zeros(img.shape, dtype=NonOverlappingFreeAreasInfo.smallest_uint_type(len(bigareas)))
        )
        for bigarea in bigareas:
            filled = self.fill_holes(labeled == bigarea)
            size = np.count_nonzero(filled)
            if size < minsize:
                continue
            elif size < checkQsize and self.isoperimetric_quotient(filled) < minQ:
                continue
            else:
                detected.addarea(filled)
        logger.debug("Found {} areas".format(np.max(detected.labeledareas)))
        return [detected]
Esempio n. 6
0
 def processchain(self, img, procsteps):
     for p in self.processors.values():
         p.reinit()
     detected = []
     for pname, pargs in procsteps:
         logger.debug("Calling processor '{}' with arguments: {}".format(
                      pname, str(pargs)))
         detected.append(
             self.processors[pname](img, detected, **pargs)
             )
     return detected
Esempio n. 7
0
 def __call__(
     self,
     img,
     prev_detected,
     threshold=None,
     minsize=0,
     maxsize=None,
     steps_prelabel=[],
     steps_postlabel=[],
     **kwargs
 ):
     if threshold is None:
         threshold = self.otsu(img)
         logger.debug("Guessed threshold (Otsu): {}".format(threshold))
     binary = img > threshold
     for pdetected in prev_detected:
         for d in pdetected:
             binary[d.pixels()] = False
     self.backgroundinfos = []
     binary = self.do_steps(img, binary, steps_prelabel)
     # Label
     labeled, nr_labels = self.mh.label(binary)
     logger.debug("Number of areas: {}".format(nr_labels))
     # Filter for size requirements
     sizes = np.bincount(labeled.flat)
     considered_sizes = sizes >= minsize
     if maxsize is not None:
         considered_sizes = np.logical_and(considered_sizes, sizes <= maxsize)
     considered_labels = np.argwhere(considered_sizes)
     logger.debug("Meeting size requirements: {}".format(len(considered_labels)))
     # Create detection info
     detected = NonOverlappingFreeAreasInfo(
         np.zeros(img.shape, dtype=NonOverlappingFreeAreasInfo.smallest_uint_type(len(considered_labels)))
     )
     for area_label in considered_labels:
         if area_label == 0:
             continue  # ignore background
         thisarea = labeled == area_label
         try:
             thisarea = self.do_steps(img, thisarea, steps_postlabel)
         except self.RejectArea:
             continue
         detected.addarea(thisarea)
     logger.debug("Found {} areas".format(np.max(detected.labeledareas)))
     return self.backgroundinfos + [detected]
Esempio n. 8
0
    def __call__(self, img, prev_detected, **params):
        """Get list of circles in image using FCD.

        Args:
            img: Image to process (as nparray)
            gaussian: standard deviation for Gaussian kernel
            sobel: boolean value specifying whether to apply sobel
                filter
            minnorm: Gradient norm cutoff
            alpha: FCD gradient angle difference tolerance
            beta: FCD connecting line angle difference tolerance
            gamma: FCD relative gradient norm difference tolerance
            minr: Lower radius cutoff
            maxr: Upper radius cutoff
            radiusscaler: DBSCAN preprocessing radius coefficient
            minmembers: Minimum number of cluster members PER UNIT
                RADIUS for a cluster to be considered a detected circle
            epsilon: DBSCAN neighbourhood size
            minsamples: DBSCAN minimum cluster density in neighbourhood
            maxangspread: TODO DOCUMENT THIS

        Returns:
            A list of DiskInfos
        """
        # Copy given parameters and check for completeness
        self.p.update(params)
        if not self.all_params_set(params):
            logger.critical("First FCD parameter set must be complete")
            return []
        # Figure out if we can use some previous calculations
        startat = self.where_to_start(params)
        logger.debug("Starting at {}".format(startat))
        if self.img is None:
            self.img = np.copy(img)
        if self.neighbourhood_max_img is None and self.p["mincenterlevel"] is not None:
            self.neighbourhood_max_img = self.generic_filter(self.img, np.max, 3)
        if self.prev_detected is None:
            self.prev_detected = list(prev_detected)  # copy
        # Process
        firstnewcircle = len(self.allcircles)
        if startat <= self.PREPROCESS:
            logger.debug("Preprocessing image...")
            self.img_preproc = self.preprocess(self.img, self.p["gaussian"], self.p["sobel"])
        if startat <= self.GRADIENT:
            logger.debug("Computing gradient...")
            self.grad = self.gradient(self.img_preproc, prev_detected)
        if startat <= self.FINDCANDIDATES:
            logger.debug("Finding circle candidates...")
            self.candidates = self.findcandidates(
                self.grad,
                self.p["alpha"],
                self.p["beta"],
                self.p["gamma"],
                self.p["minnorm"],
                self.p["maxr"],
                self.p["mincenterlevel"],
            )
            logger.debug("Number of candidates: {}".format(len(self.candidates)))
        if startat <= self.CLUSTER:
            logger.debug("Clustering...")
            circles = self.cluster(
                self.candidates,
                self.p["minr"],
                self.p["maxr"],
                self.p["radiusscaler"],
                self.p["minmembers"],
                self.p["epsilon"],
                self.p["minsamples"],
                self.p["maxangspread"],
            )
            logger.debug("Number of detected circles: {}".format(len(circles)))
            # Shift circles by current offset and append index of this
            # parameter set to all circles
            if len(circles) > 0:
                newcircles = np.hstack((circles, self.paramindex * np.ones((circles.shape[0], 1))))
                self.allcircles = np.append(self.allcircles, newcircles, axis=0)
            # OLD:
            #   All detected circles, EXCEPT THE ONES JUST DETECTED IN THIS
            #   RUN, should be removed for future calls to findcandidates
            # NOW:
            #   All detected circles are removed from the gradient map for
            #   future calls to findcandidates
            # return allcircles
            logger.debug("Cleaning circle list...")
            self.allcircles = self.cleancirclelist(self.allcircles)
            logger.debug("Number of new circles: {}".format(len(self.allcircles) - firstnewcircle))
            self.paramindex += 1
        return [DiskInfo(c[0], c[1], c[2]) for c in self.allcircles[firstnewcircle:]]
Esempio n. 9
0
 def __call__(self, img, prev_detected, **params):
     if "tiles" in params:
         params["tiles_v"] = params["tiles"]
         params["tiles_h"] = params["tiles"]
     elif not ("tiles_v" in params and "tiles_h" in params):
         logger.error(
             (
                 "Invalid tiling information given to "
                 "TiledFCDProcessor. Provide either 'tiles' or both 'tiles_h' "
                 "and 'tiles_v'."
             )
         )
         return []
     # Copy given parameters and check for completeness
     self.p.update(params)
     if not self.all_params_set(params):
         logger.critical("First FCD parameter set must be complete")
         return []
     if self.img is None:
         self.img = np.copy(img)
     if self.neighbourhood_max_img is None and self.p["mincenterlevel"] is not None:
         self.neighbourhood_max_img = self.generic_filter(self.img, np.max, 3)
     if self.prev_detected is None:
         self.prev_detected = list(prev_detected)  # copy
     # Process
     firstnewcircle = len(self.allcircles)
     logger.debug("Preprocessing image...")
     # try:
     #    import pickle
     #    gradcircles = pickle.load(open("testdata/circles.pickle_5tiles", 'r'))
     # except IOError:
     if True:
         img_preproc = self.preprocess(self.img, self.p["gaussian"], self.p["sobel"])
         logger.debug("Computing gradient...")
         grads = self.tiled_gradients(
             img_preproc, prev_detected, self.p["tiles_v"], self.p["tiles_h"], self.p["maxr"] + 3
         )
         gradcircles = []
         for i, grad in enumerate(grads):
             logger.debug("Tile {} / {}".format(i + 1, len(grads)))
             logger.debug("Finding circle candidates...")
             candidates = self.findcandidates(
                 grad,
                 self.p["alpha"],
                 self.p["beta"],
                 self.p["gamma"],
                 self.p["minnorm"],
                 self.p["maxr"],
                 self.p["mincenterlevel"],
             )
             logger.debug("Number of candidates: {}".format(len(candidates)))
             logger.debug("Clustering...")
             circles = self.cluster(
                 candidates,
                 self.p["minr"],
                 self.p["maxr"],
                 self.p["radiusscaler"],
                 self.p["minmembers"],
                 self.p["epsilon"],
                 self.p["minsamples"],
                 self.p["maxangspread"],
             )
             logger.debug("Number of detected circles: {}".format(len(circles)))
             # Attach index of this tile to circles
             circles = np.hstack((circles, i * np.ones((circles.shape[0], 1))))
             gradcircles.append(circles)
         # import pickle
         # pickle.dump(gradcircles, open("testdata/circles.pickle", 'w'), -1)
     logger.debug("Cleaning circle list...")
     self.totmergers = 0
     self.totoverl = 0
     for i in range(self.p["tiles_v"] - 1):
         for j in range(self.p["tiles_h"] - 1):
             logger.debug(
                 "Circle group {} / {}".format(
                     i * (self.p["tiles_h"] - 1) + j + 1, (self.p["tiles_v"] - 1) * (self.p["tiles_h"] - 1)
                 )
             )
             tileindices = (
                 i * self.p["tiles_h"] + j,
                 i * self.p["tiles_h"] + j + 1,
                 (i + 1) * self.p["tiles_h"] + j,
                 (i + 1) * self.p["tiles_h"] + j + 1,
             )
             # Put circles of four-tile-groups into same list and clean
             groupcircles = np.concatenate(tuple(gradcircles[ti] for ti in tileindices))
             groupcircles = self.cleancirclelist(groupcircles)
             # Split returned list back into corresponding gradcircles
             for x in tileindices:
                 gradcircles[x] = groupcircles[groupcircles[:, -1] == x]
     logger.debug("Total mergers: {}".format(self.totmergers))
     logger.debug("Total overlaps: {}".format(self.totoverl))
     newcircles = np.concatenate(gradcircles)
     logger.debug("Number of new circles: {}".format(len(newcircles)))
     self.allcircles = np.concatenate((self.allcircles, newcircles))
     return [DiskInfo(c[0], c[1], c[2]) for c in self.allcircles[firstnewcircle:]]
Esempio n. 10
0
 def cleancirclelist(self, circles):
     """Return list containing no overlapping circles."""
     # Sort circles by radius, largest to smallest
     # circles = circles[circles[:,2].argsort()[::-1]]
     removelist = []
     # TODO: Might want to rewrite below using pdist (sparse distance matrix)
     # https://stackoverflow.com/questions/5323818/condensed-matrix-function-to-find-pairs/14839010#14839010
     dists = self.cdist(circles[:, :2], circles[:, :2])
     # Get boolean matrix of circle pairs where distance is smaller than 10%
     # of radius of first circle
     mergecandidates = dists < 0.1 * circles[:, 2]
     # Mirror along diagonal with logical or
     mergecandidates = np.logical_or(mergecandidates, mergecandidates.T)
     # Erase lower triangle and diagonal
     mergecandidates = np.triu(mergecandidates, 1)
     # Convert to indices
     mergecandidates = np.transpose(np.where(mergecandidates))
     # Get these where radii are roughly (< 10% difference) the same
     mergecandidates = mergecandidates[
         np.where(circles[mergecandidates[:, 0], 2] / circles[mergecandidates[:, 1], 2] > 0.90)
     ]
     mergecandidates = mergecandidates[
         np.where(circles[mergecandidates[:, 0], 2] / circles[mergecandidates[:, 1], 2] < 1.11)
     ]
     logger.debug("{} mergers".format(len(mergecandidates)))
     self.totmergers += len(mergecandidates)
     # Merge remaining candidates
     for c1_index, c2_index in mergecandidates:
         c1, c2 = circles[c1_index], circles[c2_index]
         c1[:3] = (c1[:3] * c1[3] + c2[:3] * c2[3]) / (c1[3] + c2[3])
         if dists[c1_index, c2_index] > 0.5:
             c1[3] += c2[3]
         else:
             # Probably duplicates from stitching our tiles
             c1[3] = max(c1[3], c2[3])
         c1[4] = min(c1[4], c2[4])
         removelist.append(c2_index)
     circles = np.delete(circles, removelist, axis=0)
     dists = np.delete(dists, removelist, axis=0)
     dists = np.delete(dists, removelist, axis=1)
     removelist = []
     # Similar to above, except now get all circles that overlap
     overlaplist = dists - circles[:, 2] - np.matrix(circles[:, 2]).T < -1
     overlaplist = np.triu(overlaplist, 1)
     overlaplist = np.transpose(np.where(overlaplist))
     logger.debug("{} overlaps".format(len(overlaplist)))
     self.totoverl += len(overlaplist)
     # Count number of occurences for each circle (no matter if c1 or c2)
     counts = np.bincount(overlaplist.flat)
     # Remove all circles that have two or more overlaps
     while len(counts) > 0 and counts.max() > 1:
         badcircle = counts.argmax()
         removelist.append(badcircle)
         # Remove all rows from overlaplist that contain badcircle
         # http://stackoverflow.com/questions/11453141/
         overlaplist = overlaplist[~(overlaplist == badcircle).any(axis=1)]
         counts = np.bincount(overlaplist.flat)
     # All remaining pairs are overlaps of only two circles
     for c1_index, c2_index in overlaplist:
         c1, c2 = circles[c1_index], circles[c2_index]
         # Remove circle with fewer members
         if c1[3] > c2[3]:
             removelist.append(c2_index)
         else:
             removelist.append(c1_index)
     return np.delete(circles, removelist, axis=0)
Esempio n. 11
0
 def loadconfig(self, configfile):
     logger.debug("Loading configuration from '{}'".format(configfile))
     # Lazy load yaml so we don't require it
     if configfile.endswith('.yml') or configfile.endswith('.yaml'):
         import yaml
         with open(configfile, 'r') as f:
             cfg = yaml.load(f)
     else:
         # FIXME: Assume JSON?
         logger.critical("Unknown config file type")
         raise NotImplementedError
     # Set default modules if none given:
     if not 'imgloaders' in cfg:
         cfg['imgloaders'] = [
             'fire.imageloaders.MahotasImageLoader',
             ]
     if not 'preprocessors' in cfg:
         cfg['preprocessors'] = {
             'crop': 'fire.preprocessors.CropPreprocessor',
             'makefloat': 'fire.preprocessors.MakeFloatPreprocessor',
             'greyscale': 'fire.preprocessors.GreyscalePreprocessor',
             'removebg': 'fire.preprocessors.RemoveBackgroundPreprocessor',
             }
     if not 'processors' in cfg:
         cfg['processors'] = {
             'threshold': 'fire.processors.ThresholdProcessor',
             'watershed': 'fire.processors.WatershedProcessor',
             'fcd': 'fire.processors.FCDProcessor',
             'tiledfcd': 'fire.processors.TiledFCDProcessor',
             'diskextend': 'fire.processors.DiskExtendProcessor',
             'removeouter': 'fire.processors.RemoveOuterProcessor',
             'dirtyremoveedges': 'fire.processors.DirtyRemoveEdgesProcessor',
             }
     if not 'exporters' in cfg:
         cfg['exporters'] = {
             'string': 'fire.exporters.StringExporter',
             'save': 'fire.exporters.SaveExporter',
             }
     # Load and set config
     for what in ('imgloaders', ):
         instances = []
         for modclass in cfg[what]:
             modulename, classname = modclass.rsplit('.', 1)
             mod = import_module(modulename)
             instances.append(getattr(mod, classname)(manager = self))
         setattr(self, what, instances)
     for what in ('preprocessors', 'processors', 'exporters'):
         instances = {}
         for modname, modclass in cfg[what].items():
             modulename, objectname = modclass.rsplit('.', 1)
             mod = import_module(modulename)
             obj = getattr(mod, objectname)
             if isinstance(obj, type):
                 # obj is a class and should be initialised
                 instances[modname] = obj(manager = self)
             else:
                 # Assume that obj is a function
                 instances[modname] = obj
         setattr(self, what, instances)
     for what in ('preprocsteps', 'procsteps', 'exportsteps'):
         steplist = []
         for rawstep in cfg[what]:
             if isinstance(rawstep, dict):
                 stepname = rawstep.keys()[0]
                 steplist.append( (stepname, {} if rawstep[stepname] is None
                                                else rawstep[stepname]) )
             else:
                 steplist.append( (rawstep, {}) )
         setattr(self, what, steplist)
Esempio n. 12
0
 def preprocess(self, img, preprocsteps):
     for ppname, ppargs in preprocsteps:
         logger.debug("Calling preprocessor '{}' with arguments: {}".format(
                      ppname, str(ppargs)))
         img = self.preprocessors[ppname](img, **ppargs)
     return img