def __init__(self): self.path_manager = ds.PathManager() self.dset_store = ds.DetectionSetStore() self.hypothesis_manager = ds.HypothesisManager() self.claim_manager = ds.ClaimManager() self.graph = ds.Graph(self.dset_store, future_depth=3)
def __init__(self, app, parent=None): super(MainWindow, self).__init__(parent) self.resize(1000, 600) self.setWindowTitle('BeesBook Filtering Editor') self.central_widget = QtGui.QStackedWidget(self) self.setCentralWidget(self.central_widget) self.dset_store = ds.DetectionSetStore() self.path_manager = ds.PathManager() self.loader_tab = LoaderTab(self, app) self.editor_tab = EditorTab(self, app) self.central_widget.addWidget(self.loader_tab) self.central_widget.addWidget(self.editor_tab) self.central_widget.setCurrentWidget(self.loader_tab)
def __init__(self): self.path_manager = ds.PathManager() self.dset_store = ds.DetectionSetStore() self.claim_manager = ds.ClaimManager()
def load_tracks(self): if self.dset_store is None: print 'Error: no data folder loaded' return self.block_inputs(True) self.dset_store.delete_path_associations() self.path_manager = ds.PathManager(config.PATHS_FILE) if os.path.isfile(config.PATHS_FILE): try: with open(config.PATHS_FILE, 'rb') as paths_file: input = pickle.load(paths_file) if self.dset_store.source != input['source']: print 'Warning: data source for detections and paths do not match' paths_input = input['paths'] self.paths_load_progress.setMaximum(len(paths_input)) self.app.processEvents() for i, tag_id in enumerate(paths_input.keys()): self.path_manager.paths[tag_id] = {} for path_id in paths_input[tag_id].keys(): path = ds.Path(tag_id) self.path_manager.paths[tag_id][path_id] = path for frame, detection_data in paths_input[tag_id][ path_id].items(): timestamp = self.dset_store.get_timestamp(frame) if timestamp is not None: detection_id, pos_x, pos_y, readability = detection_data # data point is associated with a detection from the pipeline output if detection_id is not None: dset = self.dset_store.get(timestamp) if detection_id in dset.detections: detection = dset.detections[ detection_id] else: print 'Warning: detection_id not found, your truth file does not match your pipeline data. Please rematch!' continue # if two paths claim the same detection only the first one gets it if detection.path is None: detection.readability = readability path.add_detection(detection) # insert empty detection for every following path else: detection = ds.EmptyDetection( timestamp) detection.position = np.array( [pos_x, pos_y]) detection.readability = readability path.add_detection(detection) # data point is an empty detection else: detection = ds.EmptyDetection(timestamp) detection.position = np.array( [pos_x, pos_y]) detection.readability = readability path.add_detection(detection) self.paths_load_progress.setValue(i + 1) self.app.processEvents() self.paths_load_label.setText( str(len(paths_input)) + ' paths loaded') self.app.processEvents() except: pass else: self.paths_load_progress.setMaximum(1) self.paths_load_progress.setValue(1) self.paths_load_label.setText('will write to new file') self.app.processEvents() self.block_inputs(False)