Example #1
0
 def learn_flat(self, gts, feature_map, ws_is_gt, *args, **kwargs):
     if type(gts) != list:
         gts = [gts] # allow using single ground truth as input
     ctables = [contingency_table(self.get_segmentation(), gt) for gt in gts]
     assignments = [(ct == ct.max(axis=1)[:,newaxis]) for ct in ctables]
     return map(array, zip(*[
             self.learn_edge(e, ctables, assignments, feature_map, ws_is_gt)
             for e in self.real_edges()]))
Example #2
0
def best_possible_segmentation(ws, gt):
    """Build the best possible segmentation given a superpixel map."""
    cnt = contingency_table(ws, gt)
    assignment = cnt == cnt.max(axis=1)[:,newaxis]
    hard_assignment = where(assignment.sum(axis=1) > 1)[0]
    # currently ignoring hard assignment nodes
    assignment[hard_assignment,:] = 0
    ws = Rag(ws)
    for gt_node in range(1,cnt.shape[1]):
        ws.merge_subgraph(where(assignment[:,gt_node])[0])
    return ws.get_segmentation()
Example #3
0
 def set_ground_truth(self, gt=None):
     if gt is not None:
         gtm = gt.max()+1
         gt_ignore = [0, gtm] if (gt==0).any() else [gtm]
         seg_ignore = [0, self.boundary_body] if \
                     (self.segmentation==0).any() else [self.boundary_body]
         self.gt = morpho.pad(gt, gt_ignore)
         self.rig = contingency_table(self.segmentation, self.gt)
         self.rig[:, gt_ignore] = 0
         self.rig[seg_ignore, :] = 0
     else:
         self.gt = None
         # null pattern to transparently allow merging of nodes.
         # Bonus feature: counts how many sp's went into a single node.
         try:
             self.rig = ones(self.watershed.max()+1)
         except ValueError:
             self.rig = ones(self.number_of_nodes()+1)
Example #4
0
 def learn_agglomerate(self, gts, feature_map, min_num_samples=1,
                                                         *args, **kwargs):
     """Agglomerate while comparing to ground truth & classifying merges."""
     learn_flat = kwargs.get('learn_flat', True)
     learning_mode = kwargs.get('learning_mode', 'strict').lower()
     labeling_mode = kwargs.get('labeling_mode', 'assignment').lower()
     priority_mode = kwargs.get('priority_mode', 'random').lower()
     memory = kwargs.get('memory', True)
     unique = kwargs.get('unique', True)
     max_numepochs = kwargs.get('max_numepochs', 10)
     if priority_mode == 'mean' and unique: 
         max_numepochs = 2 if learn_flat else 1
     if priority_mode in ['random', 'mean'] and not memory:
         max_numepochs = 1
     label_type_keys = {'assignment':0, 'vi-sign':1, 
                                             'rand-sign':2, 'boundary':3}
     if type(gts) != list:
         gts = [gts] # allow using single ground truth as input
     master_ctables = \
             [contingency_table(self.get_segmentation(), gt) for gt in gts]
     # Match the watershed to the ground truths
     ws_is_gt = zeros_like(self.watershed).astype(float)
     for gt in gts:
         ws_is_gt += self.assign_gt_to_ws(gt)
     ws_is_gt /= float(len(gts))
     ws_is_gt = ws_is_gt>0.5
     alldata = []
     data = [[],[],[],[]]
     for numepochs in range(max_numepochs):
         ctables = deepcopy(master_ctables)
         if len(data[0]) > min_num_samples:
             break
         if learn_flat and numepochs == 0:
             alldata.append(self.learn_flat(gts, feature_map, ws_is_gt))
             data = unique_learning_data_elements(alldata) if unique else \
                    alldata[-1]
             continue
         g = self.copy()
         if priority_mode == 'mean':
             g.merge_priority_function = boundary_mean
         elif numepochs > 0 and priority_mode == 'active' or \
             numepochs % 2 == 1 and priority_mode == 'mixed':
             cl = kwargs.get('classifier', RandomForest())
             cl = cl.fit(data[0], data[1][:,label_type_keys[labeling_mode]])
             if type(cl) == RandomForest:
                 logging.info('classifier oob error: %.2f'%cl.oob)
             g.merge_priority_function = \
                                     classifier_probability(feature_map, cl)
         elif priority_mode == 'random' or \
             (priority_mode == 'active' and numepochs == 0):
             g.merge_priority_function = random_priority
         elif priority_mode == 'custom':
             g.merge_priority_function = kwargs.get('mpf', boundary_mean)
         g.show_progress = False # bug in MergeQueue usage causes
                                 # progressbar crash.
         g.rebuild_merge_queue()
         alldata.append(g._learn_agglomerate(ctables, feature_map, ws_is_gt,
                                             learning_mode, labeling_mode))
         if memory:
             if unique:
                 data = unique_learning_data_elements(alldata) 
             else:
                 data = concatenate_data_elements(alldata)
         else:
             data = alldata[-1]
         logging.debug('data size %d at epoch %d'%(len(data[0]), numepochs))
     return data, alldata