def _updateHypothesesGraph(self, arcFlowMap): """ After running merger resolving, insert new nodes, remove de-merged nodes and also update the links in the hypotheses graph. This also stores the new solution (`value` property) in the new nodes and links """ # update nodes for n in self.unresolvedGraph.nodes_iter(): # skip non-mergers if not 'newIds' in self.unresolvedGraph.node[n] or len( self.unresolvedGraph.node[n]['newIds']) < 2: continue # for this merger, insert all new nodes into the HG assert (len(self.unresolvedGraph.node[n]['newIds']) == self.unresolvedGraph.node[n]['count']) for newId, fit in zip(self.unresolvedGraph.node[n]['newIds'], self.unresolvedGraph.node[n]['fits']): traxel = Traxel() traxel.Id = newId traxel.Timestep = n[0] traxel.Features = self._fitToRegionCenter(fit) self.hypothesesGraph.addNodeFromTraxel(traxel, value=1) # remove merger from HG, which also removes all edges that would otherwise be dangling self.hypothesesGraph._graph.remove_node(n) # add new links for edge in self.resolvedGraph.edges_iter(): srcId = self.resolvedGraph.node[edge[0]]['id'] destId = self.resolvedGraph.node[edge[1]]['id'] value = arcFlowMap[(srcId, destId)] self.hypothesesGraph._graph.add_edge(edge[0], edge[1], value=value)
def test_insertEnergies(): skipLinkBias = 20 h = hg.HypothesesGraph() h._graph.add_path([(0, 1), (1, 1), (2, 1), (3, 1)]) for uuid, i in enumerate([(0, 1), (1, 1), (2, 1), (3, 1)]): t = Traxel() t.Timestep = i[0] t.Id = i[1] # fill in detProb, divProb, and center of mass t.Features['detProb'] = [0.2, 0.8] t.Features['divProb'] = [0.2, 0.8] t.Features['com'] = [float(i[0]), 0.0] h._graph.node[i]['traxel'] = t h._graph.node[i]['id'] = uuid # set up some dummy functions to compute probabilities from a traxel def detProbFunc(traxel): return traxel.Features['detProb'] def divProbFunc(traxel): return traxel.Features['divProb'] def boundaryCostFunc(traxel, forAppearance): return 1.0 def transProbFunc(traxelA, traxelB): dist = np.linalg.norm( np.array(traxelA.Features['com']) - np.array(traxelB.Features['com'])) return [1.0 - np.exp(-dist), np.exp(-dist)] h.insertEnergies(1, detProbFunc, transProbFunc, boundaryCostFunc, divProbFunc, skipLinkBias) for n in h.nodeIterator(): assert ('features' in h._graph.node[n]) assert (h._graph.node[n]['features'] == [[1.6094379124341003], [0.22314355131420971]]) assert ('divisionFeatures' in h._graph.node[n]) assert (h._graph.node[n]['divisionFeatures'] == [[1.6094379124341003], [0.22314355131420971] ]) assert ('appearanceFeatures' in h._graph.node[n]) assert (h._graph.node[n]['appearanceFeatures'] == [[0.0], [1.0]]) assert ('disappearanceFeatures' in h._graph.node[n]) assert (h._graph.node[n]['disappearanceFeatures'] == [[0.0], [1.0]]) for a in h.arcIterator(): assert ('features' in h._graph.edge[a[0]][a[1]]) srcTraxel = h._graph.node[h.source(a)]['traxel'] destTraxel = h._graph.node[h.target(a)]['traxel'] frame_gap = destTraxel.Timestep - srcTraxel.Timestep assert (h._graph.edge[a[0]][a[1]]['features'] == [[ 0.45867514538708193 ], [1.0 + skipLinkBias * (frame_gap - 1)]])
def test_trackletgraph(): h = hg.HypothesesGraph() h._graph.add_path([(0, 1), (1, 1), (2, 1), (3, 1)]) for i in [(0, 1), (1, 1), (2, 1), (3, 1)]: t = Traxel() t.Timestep = i[0] t.Id = i[1] h._graph.node[i]['traxel'] = t t = h.generateTrackletGraph() assert (t.countArcs() == 1) assert (t.countNodes() == 2) assert ('tracklet' in t._graph.node[(0, 1)])
def test_trackletgraph(): h = hg.HypothesesGraph() h._graph.add_path([(0,1),(1,1),(2,1),(3,1)]) for i in [(0,1),(1,1),(2,1),(3,1)]: t = Traxel() t.Timestep = i[0] t.Id = i[1] h._graph.node[i]['traxel'] = t t = h.generateTrackletGraph() assert(t.countArcs() == 1) assert(t.countNodes() == 2) assert('tracklet' in t._graph.node[(0,1)])
def test_insertEnergies(): skipLinkBias = 20 h = hg.HypothesesGraph() h._graph.add_path([(0,1),(1,1),(2,1),(3,1)]) for uuid, i in enumerate([(0,1),(1,1),(2,1),(3,1)]): t = Traxel() t.Timestep = i[0] t.Id = i[1] # fill in detProb, divProb, and center of mass t.Features['detProb'] = [0.2, 0.8] t.Features['divProb'] = [0.2, 0.8] t.Features['com'] = [float(i[0]), 0.0] h._graph.node[i]['traxel'] = t h._graph.node[i]['id'] = uuid # set up some dummy functions to compute probabilities from a traxel def detProbFunc(traxel): return traxel.Features['detProb'] def divProbFunc(traxel): return traxel.Features['divProb'] def boundaryCostFunc(traxel, forAppearance): return 1.0 def transProbFunc(traxelA, traxelB): dist = np.linalg.norm(np.array(traxelA.Features['com']) - np.array(traxelB.Features['com'])) return [1.0 - np.exp(-dist), np.exp(-dist)] h.insertEnergies(1, detProbFunc, transProbFunc, boundaryCostFunc, divProbFunc, skipLinkBias) for n in h.nodeIterator(): assert('features' in h._graph.node[n]) assert(h._graph.node[n]['features'] == [[1.6094379124341003], [0.22314355131420971]]) assert('divisionFeatures' in h._graph.node[n]) assert(h._graph.node[n]['divisionFeatures'] == [[1.6094379124341003], [0.22314355131420971]]) assert('appearanceFeatures' in h._graph.node[n]) assert(h._graph.node[n]['appearanceFeatures'] == [[0.0], [1.0]]) assert('disappearanceFeatures' in h._graph.node[n]) assert(h._graph.node[n]['disappearanceFeatures'] == [[0.0], [1.0]]) for a in h.arcIterator(): assert('features' in h._graph.edge[a[0]][a[1]]) srcTraxel = h._graph.node[h.source(a)]['traxel'] destTraxel = h._graph.node[h.target(a)]['traxel'] frame_gap = destTraxel.Timestep - srcTraxel.Timestep assert(h._graph.edge[a[0]][a[1]]['features'] == [[0.45867514538708193], [1.0 + skipLinkBias*(frame_gap-1)]])
def _updateHypothesesGraph(self, arcFlowMap): """ After running merger resolving, insert new nodes, remove de-merged nodes and also update the links in the hypotheses graph. This also stores the new solution (`value` property) in the new nodes and links """ # update nodes for n in self.unresolvedGraph.nodes_iter(): # skip non-mergers if not 'newIds' in self.unresolvedGraph.node[n] or len(self.unresolvedGraph.node[n]['newIds']) < 2: continue # for this merger, insert all new nodes into the HG assert(len(self.unresolvedGraph.node[n]['newIds']) == self.unresolvedGraph.node[n]['count']) for newId, fit in zip(self.unresolvedGraph.node[n]['newIds'], self.unresolvedGraph.node[n]['fits']): traxel = Traxel() traxel.Id = newId traxel.Timestep = n[0] traxel.Features = {'com': self._fitToRegionCenter(fit)} self.hypothesesGraph.addNodeFromTraxel(traxel, value=1, mergerValue=n[1], divisionValue=False) # remove merger from HG, which also removes all edges that would otherwise be dangling self.hypothesesGraph._graph.remove_node(n) # add new links only for merger nodes for edge in self.resolvedGraph.edges_iter(): # Add new edges that are connected to new merger nodes if 'mergerValue' in self.hypothesesGraph._graph.node[edge[0]] or 'mergerValue' in self.hypothesesGraph._graph.node[edge[1]]: srcId = self.resolvedGraph.node[edge[0]]['id'] destId = self.resolvedGraph.node[edge[1]]['id'] edgeValue = arcFlowMap[(srcId, destId)] # edges connected to mergers are set to "not used" in order to prevent multiple active outgoing edges from single nodes. The correct edges will be added later. if edgeValue > 0 and not self.resolvedGraph.node[edge[0]]['division']: for outEdge in self.hypothesesGraph._graph.out_edges(edge[0]): self.hypothesesGraph._graph.edge[outEdge[0]][outEdge[1]]['value'] = 0 for inEdge in self.hypothesesGraph._graph.in_edges(edge[1]): self.hypothesesGraph._graph.edge[inEdge[0]][inEdge[1]]['value'] = 0 # Add new edge connected to merger node self.hypothesesGraph._graph.add_edge(edge[0], edge[1], value=edgeValue)
def _updateHypothesesGraph(self, arcFlowMap): """ After running merger resolving, insert new nodes, remove de-merged nodes and also update the links in the hypotheses graph. This also stores the new solution (`value` property) in the new nodes and links """ # update nodes for n in self.unresolvedGraph.nodes_iter(): # skip non-mergers if not 'newIds' in self.unresolvedGraph.node[n] or len(self.unresolvedGraph.node[n]['newIds']) < 2: continue # for this merger, insert all new nodes into the HG assert(len(self.unresolvedGraph.node[n]['newIds']) == self.unresolvedGraph.node[n]['count']) for newId, fit in zip(self.unresolvedGraph.node[n]['newIds'], self.unresolvedGraph.node[n]['fits']): traxel = Traxel() traxel.Id = newId traxel.Timestep = n[0] traxel.Features = {'com': self._fitToRegionCenter(fit)} self.hypothesesGraph.addNodeFromTraxel(traxel, value=1, mergerValue=True, divisionValue=False) # remove merger from HG, which also removes all edges that would otherwise be dangling self.hypothesesGraph._graph.remove_node(n) # add new links only for merger nodes for edge in self.resolvedGraph.edges_iter(): # Add new edges that are connected to new merger nodes if 'mergerValue' in self.hypothesesGraph._graph.node[edge[0]] or 'mergerValue' in self.hypothesesGraph._graph.node[edge[1]]: srcId = self.resolvedGraph.node[edge[0]]['id'] destId = self.resolvedGraph.node[edge[1]]['id'] edgeValue = arcFlowMap[(srcId, destId)] # edges connected to mergers are set to "not used" in order to prevent multiple active outgoing edges from single nodes. The correct edges will be added later. if edgeValue > 0 and not self.resolvedGraph.node[edge[0]]['division']: for outEdge in self.hypothesesGraph._graph.out_edges(edge[0]): self.hypothesesGraph._graph.edge[outEdge[0]][outEdge[1]]['value'] = 0 for inEdge in self.hypothesesGraph._graph.in_edges(edge[1]): self.hypothesesGraph._graph.edge[inEdge[0]][inEdge[1]]['value'] = 0 # Add new edge connected to merger node self.hypothesesGraph._graph.add_edge(edge[0], edge[1], value=edgeValue)
def toHypothesesGraph(self): ''' From a json graph representation (and possibly a json result), set up a hypotheses graph with the respective links. WARNING: only builds the structure of the graph at the moment, features/probabilities are not inserted! WARNING: builds the trackletgraph, not the full graph! ''' from hytra.core.hypothesesgraph import HypothesesGraph from hytra.core.probabilitygenerator import Traxel # set up graph hypothesesGraph = HypothesesGraph() for s in self.model['segmentationHypotheses']: tracklet = self.uuidToTraxelMap[s['id']] assert (len(tracklet) > 0) traxel = Traxel() traxel.Timestep = tracklet[0][0] traxel.Id = tracklet[0][1] hypothesesGraph.addNodeFromTraxel(traxel, tracklet=tracklet) # adding nodes automatically assigns UUIDs, we replace them by the loaded one hypothesesGraph._graph.node[(traxel.Timestep, traxel.Id)]['id'] = s['id'] # insert edges for l in self.model['linkingHypotheses']: try: srcTracklet = self.uuidToTraxelMap[l['src']] destTracklet = self.uuidToTraxelMap[l['dest']] except: getLogger().warning( "Failed finding {} from JSON['linkingHypotheses'] in uuidToTraxelMap" .format((l['dest'], l['src']))) hypothesesGraph._graph.add_edge( (srcTracklet[0][0], srcTracklet[0][1]), ((destTracklet[0][0], destTracklet[0][1]))) # insert result if self.result is not None: hypothesesGraph.insertSolution(self.result) return hypothesesGraph
def toHypothesesGraph(self): ''' From a json graph representation (and possibly a json result), set up a hypotheses graph with the respective links. WARNING: only builds the structure of the graph at the moment, features/probabilities are not inserted! WARNING: builds the trackletgraph, not the full graph! ''' from hytra.core.hypothesesgraph import HypothesesGraph from hytra.core.probabilitygenerator import Traxel # set up graph hypothesesGraph = HypothesesGraph() for s in self.model['segmentationHypotheses']: tracklet = self.uuidToTraxelMap[s['id']] assert(len(tracklet) > 0) traxel = Traxel() traxel.Timestep = tracklet[0][0] traxel.Id = tracklet[0][1] hypothesesGraph.addNodeFromTraxel(traxel, tracklet=tracklet) # adding nodes automatically assigns UUIDs, we replace them by the loaded one hypothesesGraph._graph.node[(traxel.Timestep, traxel.Id)]['id'] = s['id'] # insert edges for l in self.model['linkingHypotheses']: try: srcTracklet = self.uuidToTraxelMap[l['src']] destTracklet = self.uuidToTraxelMap[l['dest']] except: getLogger().warning("Failed finding {} from JSON['linkingHypotheses'] in uuidToTraxelMap".format((l['dest'], l['src']))) hypothesesGraph._graph.add_edge((srcTracklet[0][0], srcTracklet[0][1]), ((destTracklet[0][0], destTracklet[0][1]))) # insert result if self.result is not None: hypothesesGraph.insertSolution(self.result) return hypothesesGraph
def _generate_traxelstore(self, time_range, x_range, y_range, z_range, size_range, x_scale=1.0, y_scale=1.0, z_scale=1.0, with_div=False, with_local_centers=False, with_classifier_prior=False): logger.info("generating traxels") self.progressVisitor.showState("Object features") self.progressVisitor.showProgress(0) traxelstore = ProbabilityGenerator() logger.info("fetching region features and division probabilities") feats = self.ObjectFeatures(time_range).wait() if with_div: if not self.DivisionProbabilities.ready() or len( self.DivisionProbabilities([0]).wait()[0]) == 0: msgStr = "\nDivision classifier has not been trained! " + \ "Uncheck divisible objects if your objects don't divide or " + \ "go back to the Division Detection applet and train it." raise DatasetConstraintError("Tracking", msgStr) self.progressVisitor.showState("Division probabilities") self.progressVisitor.showProgress(0) divProbs = self.DivisionProbabilities(time_range).wait() if with_local_centers: localCenters = self.RegionLocalCenters(time_range).wait() if with_classifier_prior: if not self.DetectionProbabilities.ready() or len( self.DetectionProbabilities([0]).wait()[0]) == 0: msgStr = "\nObject count classifier has not been trained! " + \ "Go back to the Object Count Classification applet and train it." raise DatasetConstraintError("Tracking", msgStr) self.progressVisitor.showState("Detection probabilities") self.progressVisitor.showProgress(0) detProbs = self.DetectionProbabilities(time_range).wait() logger.info("filling traxelstore") filtered_labels = {} total_count = 0 empty_frame = False numTimeStep = len(list(feats.keys())) countT = 0 stepStr = "Creating traxel store" self.progressVisitor.showState(stepStr + " ") for t in list(feats.keys()): countT += 1 self.progressVisitor.showProgress( old_div(countT, float(numTimeStep))) rc = feats[t][default_features_key]['RegionCenter'] lower = feats[t][default_features_key]['Coord<Minimum>'] upper = feats[t][default_features_key]['Coord<Maximum>'] if rc.size: rc = rc[1:, ...] lower = lower[1:, ...] upper = upper[1:, ...] ct = feats[t][default_features_key]['Count'] if ct.size: ct = ct[1:, ...] logger.debug("at timestep {}, {} traxels found".format( t, rc.shape[0])) count = 0 filtered_labels_at = [] for idx in range(rc.shape[0]): traxel = Traxel() # for 2d data, set z-coordinate to 0: if len(rc[idx]) == 2: x, y = rc[idx] z = 0 x_lower, y_lower = lower[idx] x_upper, y_upper = upper[idx] z_lower = 0 z_upper = 0 elif len(rc[idx]) == 3: x, y, z = rc[idx] x_lower, y_lower, z_lower = lower[idx] x_upper, y_upper, z_upper = upper[idx] else: raise DatasetConstraintError( "Tracking", "The RegionCenter feature must have dimensionality 2 or 3." ) size = ct[idx] if (x_upper < x_range[0] or x_lower >= x_range[1] or y_upper < y_range[0] or y_lower >= y_range[1] or z_upper < z_range[0] or z_lower >= z_range[1] or size < size_range[0] or size >= size_range[1]): filtered_labels_at.append(int(idx + 1)) continue else: count += 1 traxel.Id = int(idx + 1) traxel.Timestep = int(t) traxel.set_x_scale(x_scale) traxel.set_y_scale(y_scale) traxel.set_z_scale(z_scale) # Expects always 3 coordinates, z=0 for 2d data traxel.add_feature_array("com", 3) for i, v in enumerate([x, y, z]): traxel.set_feature_value('com', i, float(v)) traxel.add_feature_array("CoordMinimum", 3) for i, v in enumerate(lower[idx]): traxel.set_feature_value("CoordMinimum", i, float(v)) traxel.add_feature_array("CoordMaximum", 3) for i, v in enumerate(upper[idx]): traxel.set_feature_value("CoordMaximum", i, float(v)) if with_div: traxel.add_feature_array("divProb", 2) # idx+1 because rc and ct start from 1, divProbs starts from 0 prob = float(divProbs[t][idx + 1][1]) prob = float(prob) if prob < 0.0000001: prob = 0.0000001 if prob > 0.99999999: prob = 0.99999999 traxel.set_feature_value("divProb", 0, 1.0 - prob) traxel.set_feature_value("divProb", 1, prob) if with_classifier_prior: traxel.add_feature_array("detProb", len(detProbs[t][idx + 1])) for i, v in enumerate(detProbs[t][idx + 1]): val = float(v) if val < 0.0000001: val = 0.0000001 if val > 0.99999999: val = 0.99999999 traxel.set_feature_value("detProb", i, float(val)) # FIXME: check whether it is 2d or 3d data! if with_local_centers: traxel.add_feature_array("localCentersX", len(localCenters[t][idx + 1])) traxel.add_feature_array("localCentersY", len(localCenters[t][idx + 1])) traxel.add_feature_array("localCentersZ", len(localCenters[t][idx + 1])) for i, v in enumerate(localCenters[t][idx + 1]): traxel.set_feature_value("localCentersX", i, float(v[0])) traxel.set_feature_value("localCentersY", i, float(v[1])) traxel.set_feature_value("localCentersZ", i, float(v[2])) traxel.add_feature_array("count", 1) traxel.set_feature_value("count", 0, float(size)) if (x_upper < x_range[0] or x_lower >= x_range[1] or y_upper < y_range[0] or y_lower >= y_range[1] or z_upper < z_range[0] or z_lower >= z_range[1] or size < size_range[0] or size >= size_range[1]): logger.info("Omitting traxel with ID: {} {}".format( traxel.Id, t)) print("Omitting traxel with ID: {} {}".format( traxel.Id, t)) else: logger.debug("Adding traxel with ID: {} {}".format( traxel.Id, t)) traxelstore.TraxelsPerFrame.setdefault( int(t), {})[int(idx + 1)] = traxel if len(filtered_labels_at) > 0: filtered_labels[str(int(t) - time_range[0])] = filtered_labels_at logger.debug("at timestep {}, {} traxels passed filter".format( t, count)) if count == 0: empty_frame = True logger.info('Found empty frames for time {}'.format(t)) total_count += count self.parent.parent.trackingApplet.progressSignal(100) self.FilteredLabels.setValue(filtered_labels, check_changed=True) return traxelstore
def _generate_traxelstore(self, time_range, x_range, y_range, z_range, size_range, x_scale=1.0, y_scale=1.0, z_scale=1.0, with_div=False, with_local_centers=False, with_classifier_prior=False): logger.info("generating traxels") traxelstore = ProbabilityGenerator() logger.info("fetching region features and division probabilities") feats = self.ObjectFeatures(time_range).wait() if with_div: if not self.DivisionProbabilities.ready() or len(self.DivisionProbabilities([0]).wait()[0]) == 0: msgStr = "\nDivision classifier has not been trained! " + \ "Uncheck divisible objects if your objects don't divide or " + \ "go back to the Division Detection applet and train it." raise DatasetConstraintError ("Tracking",msgStr) divProbs = self.DivisionProbabilities(time_range).wait() if with_local_centers: localCenters = self.RegionLocalCenters(time_range).wait() if with_classifier_prior: if not self.DetectionProbabilities.ready() or len(self.DetectionProbabilities([0]).wait()[0]) == 0: msgStr = "\nObject count classifier has not been trained! " + \ "Go back to the Object Count Classification applet and train it." raise DatasetConstraintError ("Tracking",msgStr) detProbs = self.DetectionProbabilities(time_range).wait() logger.info("filling traxelstore") filtered_labels = {} total_count = 0 empty_frame = False for t in feats.keys(): rc = feats[t][default_features_key]['RegionCenter'] lower = feats[t][default_features_key]['Coord<Minimum>'] upper = feats[t][default_features_key]['Coord<Maximum>'] if rc.size: rc = rc[1:, ...] lower = lower[1:, ...] upper = upper[1:, ...] ct = feats[t][default_features_key]['Count'] if ct.size: ct = ct[1:, ...] logger.debug("at timestep {}, {} traxels found".format(t, rc.shape[0])) count = 0 filtered_labels_at = [] for idx in range(rc.shape[0]): traxel = Traxel() # for 2d data, set z-coordinate to 0: if len(rc[idx]) == 2: x, y = rc[idx] z = 0 elif len(rc[idx]) == 3: x, y, z = rc[idx] else: raise DatasetConstraintError ("Tracking", "The RegionCenter feature must have dimensionality 2 or 3.") size = ct[idx] if (x < x_range[0] or x >= x_range[1] or y < y_range[0] or y >= y_range[1] or z < z_range[0] or z >= z_range[1] or size < size_range[0] or size >= size_range[1]): filtered_labels_at.append(int(idx + 1)) continue else: count += 1 traxel.Id = int(idx + 1) traxel.Timestep = int(t) traxel.set_x_scale(x_scale) traxel.set_y_scale(y_scale) traxel.set_z_scale(z_scale) # Expects always 3 coordinates, z=0 for 2d data traxel.add_feature_array("com", 3) for i, v in enumerate([x, y, z]): traxel.set_feature_value('com', i, float(v)) traxel.add_feature_array("CoordMinimum", 3) for i, v in enumerate(lower[idx]): traxel.set_feature_value("CoordMinimum", i, float(v)) traxel.add_feature_array("CoordMaximum", 3) for i, v in enumerate(upper[idx]): traxel.set_feature_value("CoordMaximum", i, float(v)) if with_div: traxel.add_feature_array("divProb", 2) # idx+1 because rc and ct start from 1, divProbs starts from 0 prob = float(divProbs[t][idx + 1][1]) prob = float(prob) if prob < 0.0000001: prob = 0.0000001 if prob > 0.99999999: prob = 0.99999999 traxel.set_feature_value("divProb", 0, 1.0 - prob) traxel.set_feature_value("divProb", 1, prob) if with_classifier_prior: traxel.add_feature_array("detProb", len(detProbs[t][idx + 1])) for i, v in enumerate(detProbs[t][idx + 1]): val = float(v) if val < 0.0000001: val = 0.0000001 if val > 0.99999999: val = 0.99999999 traxel.set_feature_value("detProb", i, float(val)) # FIXME: check whether it is 2d or 3d data! if with_local_centers: traxel.add_feature_array("localCentersX", len(localCenters[t][idx + 1])) traxel.add_feature_array("localCentersY", len(localCenters[t][idx + 1])) traxel.add_feature_array("localCentersZ", len(localCenters[t][idx + 1])) for i, v in enumerate(localCenters[t][idx + 1]): traxel.set_feature_value("localCentersX", i, float(v[0])) traxel.set_feature_value("localCentersY", i, float(v[1])) traxel.set_feature_value("localCentersZ", i, float(v[2])) traxel.add_feature_array("count", 1) traxel.set_feature_value("count", 0, float(size)) # Add traxel to traxelstore after checking position, time, and size ranges if (x < x_range[0] or x >= x_range[1] or y < y_range[0] or y > y_range[1] or z < z_range[0] or z > z_range[1] or size < size_range[0] or size > size_range[1]): logger.info("Omitting traxel with ID: {}".format(traxel.Id)) else: traxelstore.TraxelsPerFrame.setdefault(int(t), {})[int(idx + 1)] = traxel if len(filtered_labels_at) > 0: filtered_labels[str(int(t) - time_range[0])] = filtered_labels_at logger.debug("at timestep {}, {} traxels passed filter".format(t, count)) if count == 0: empty_frame = True logger.info('Found empty frames') total_count += count self.FilteredLabels.setValue(filtered_labels, check_changed=True) return traxelstore