def saveTest(): def startDetector(): detector.initMOG() detector.computeAll() tracker.trackAll(detector.detections) app = QtWidgets.QApplication(sys.argv) main_window = QtWidgets.QMainWindow() playback_manager = PlaybackManager(app, main_window) detector = Detector(playback_manager) tracker = Tracker(detector) fish_manager = FishManager(playback_manager, tracker) save_manager = SaveManager(playback_manager, detector, tracker, fish_manager) fish_manager.updateContentsSignal.connect( lambda: save_manager.saveFile(path, True)) playback_manager.openTestFile() detector._show_detections = True playback_manager.mapping_done.connect(startDetector) main_window.show() sys.exit(app.exec_())
def defaultTest(): app = QtWidgets.QApplication(sys.argv) main_window = QtWidgets.QMainWindow() playback_manager = PlaybackManager(app, main_window) playback_manager.fps = 1 playback_manager.openTestFile() detector = Detector(playback_manager) detector.mog_parameters.nof_bg_frames = 100 playback_manager.mapping_done.connect( lambda: startDetector(playback_manager, detector)) playback_manager.frame_available.connect( lambda t: handleFrame(detector, t)) data_model = DetectionDataModel(detector) detection_list = DetectionList(data_model) main_window.setCentralWidget(detection_list) main_window.show() sys.exit(app.exec_())
def playbackTest(): """ Test code to assure tracker works with detector. """ def forwardImage(tuple): ind, frame = tuple detections = detector.getDetection(ind) image = cv2.applyColorMap(frame, cv2.COLORMAP_OCEAN) image = tracker.visualize(image, ind) figure.displayImage((ind, image)) def startDetector(): detector.initMOG() detector.computeAll() tracker.trackAll(detector.detections) playback_manager.play() app = QtWidgets.QApplication(sys.argv) main_window = QtWidgets.QMainWindow() playback_manager = PlaybackManager(app, main_window) detector = Detector(playback_manager) tracker = Tracker(detector) playback_manager.fps = 10 playback_manager.openTestFile() playback_manager.frame_available.connect(forwardImage) detector.mog_parameters.nof_bg_frames = 500 detector._show_detections = True playback_manager.mapping_done.connect(startDetector) figure = TestFigure(playback_manager.togglePlay) main_window.setCentralWidget(figure) LogObject().print(detector.parameters) LogObject().print(detector.parameters.mog_parameters) LogObject().print(tracker.parameters) main_window.show() sys.exit(app.exec_())
def dataTest(): def startDetector(): detector.initMOG() detector.computeAll() tracker.trackAll(detector.detections) app = QtWidgets.QApplication(sys.argv) main_window = Window() playback_manager = PlaybackManager(app, main_window) detector = Detector(playback_manager) tracker = Tracker(detector) fish_manager = FishManager(playback_manager, tracker) fish_list = FishList(fish_manager, playback_manager) playback_manager.openTestFile() detector.mog_parameters.nof_bg_frames = 500 detector._show_detections = True playback_manager.mapping_done.connect(startDetector) main_window.setCentralWidget(fish_list) main_window.show() sys.exit(app.exec_())
def loadTest(): app = QtWidgets.QApplication(sys.argv) main_window = QtWidgets.QMainWindow() playback_manager = PlaybackManager(app, main_window) playback_manager.fps = 1 detector = Detector(playback_manager) detector.mog_parameters.nof_bg_frames = 100 file = playback_manager.selectLoadFile() playback_manager.openTestFile() playback_manager.mapping_done.connect( lambda: startDetector(playback_manager, detector)) detector.loadDetectionsFromFile(file) LogObject().print([d for d in detector.detections if d is not None]) data_model = DetectionDataModel(detector) detection_list = DetectionList(data_model) main_window.setCentralWidget(detection_list) main_window.show() sys.exit(app.exec_())
def playbackTest(): def forwardImage(tuple): ind, frame = tuple # detections = detector.compute(ind, frame) detections = detector.getCurrentDetection() image = cv2.applyColorMap(frame, cv2.COLORMAP_OCEAN) image = detector.overlayDetections(image, detections) figure.displayImage((ind, image)) def startDetector(): detector.initMOG() playback_manager.play() #LogObject().disconnectDefault() #LogObject().connect(LogObject().defaultPrint) app = QtWidgets.QApplication(sys.argv) main_window = QtWidgets.QMainWindow() playback_manager = PlaybackManager(app, main_window) playback_manager.fps = 10 playback_manager.openTestFile() playback_manager.frame_available.connect(forwardImage) detector = Detector(playback_manager) detector.mog_parameters.nof_bg_frames = 100 detector._show_detections = True detector._show_echogram_detections = True playback_manager.mapping_done.connect(startDetector) playback_manager.frame_available_early.connect(detector.compute_from_event) figure = TestFigure(playback_manager.togglePlay) main_window.setCentralWidget(figure) main_window.show() sys.exit(app.exec_())
self.vertical_detections = [ [d.center[0] for d in dets if d.center is not None] if dets is not None else [] for dets in self.detections ] def parametersDirty(self): return False class TestFishManager(QtCore.QAbstractTableModel): updateContentsSignal = QtCore.pyqtSignal() def __init__(self): super().__init__() self.show_fish = True self.show_echogram_fish = True app = QtWidgets.QApplication(sys.argv) main_window = QtWidgets.QMainWindow() playback_manager = PlaybackManager(app, main_window) playback_manager.openTestFile() detector = TestDetector(playback_manager.getFrameCount(), playback_manager.sonar.samplesPerBeam) fish_manager = TestFishManager() echogram = EchogramViewer(playback_manager, detector, fish_manager) echogram.onFileOpen(playback_manager.sonar) main_window.setCentralWidget(echogram) main_window.show() main_window.resize(900, 300) sys.exit(app.exec_())
class TrackProcess(QtCore.QObject): """ TrackProcess launches individual PlaybackManager, Detector and Tracker, separate from the ones associated with the UI. These are used for the tracking process of the file provided in the track method. Each file are intended to be processed with their own TrackProcess instances. """ exit_signal = QtCore.pyqtSignal() def __init__(self, app, display, file, save_directory, connection=None, testFile=False): super().__init__() self.app = app self.display = display self.figure = None self.file = file self.save_directory = os.path.abspath(save_directory) self.connection = connection self.testFile = testFile self.alive = True self.save_detections = True self.save_tracks = True if display: self.main_window = QtWidgets.QMainWindow() else: self.main_window = None self.playback_manager = PlaybackManager(self.app, self.main_window) self.detector = Detector(self.playback_manager) self.tracker = Tracker(self.detector) self.fish_manager = FishManager(self.playback_manager, self.tracker) self.playback_manager.fps = 100 self.playback_manager.runInThread(self.listenConnection) log = LogObject() log.disconnectDefault() #log.connect(writeToFile) log.connect(self.writeToConnection) log.print("Process created for file: ", self.file) def writeToConnection(self, value): if self.connection: self.connection.send(value) def forwardImage(self, tuple): """ Default function for forwarding the image, does not visualize the result. """ ind, frame = tuple detections = self.detector.getDetection(ind) def forwardImageDisplay(self, tuple): """ If the progress is visualized, this is used to forward the image. """ ind, frame = tuple detections = self.detector.getDetection(ind) image = cv2.applyColorMap(frame, cv2.COLORMAP_OCEAN) image = self.tracker.visualize(image, ind) self.figure.displayImage((ind, image)) def startTrackingProcess(self): """ Initiates detecting and tracking. Called from an event (mapping_done) when the playback_manager is ready to feed frames. """ self.detector.initMOG() self.detector.computeAll() self.tracker.trackAll(self.detector.detections) if self.display: self.playback_manager.play() def track(self): """ Handles the tracking process. Opens file and connects detection and tracking calls to the appropriate signals, so that they can be started when the file has been loaded. """ if self.testFile: self.playback_manager.openTestFile() else: self.playback_manager.loadFile(self.file) LogObject().print("Frame count:", self.playback_manager.getFrameCount()) if self.display: self.playback_manager.frame_available.connect( self.forwardImageDisplay) else: self.playback_manager.frame_available.connect(self.forwardImage) self.detector.mog_parameters.nof_bg_frames = 500 self.detector._show_detections = True self.playback_manager.mapping_done.connect(self.startTrackingProcess) self.tracker.all_computed_signal.connect(self.onAllComputed) if self.display: self.figure = TestFigure(self.playback_manager.togglePlay) self.main_window.setCentralWidget(self.figure) LogObject().print(self.detector.parameters) LogObject().print(self.detector.parameters.mog_parameters) LogObject().print(self.tracker.parameters) if self.display: self.main_window.show() def listenConnection(self): """ Listens the connection for messages. Currently, only terminate message (-1) is supported, but others should be easy to add when needed. """ while self.alive: if self.connection.poll(): id, msg = self.connection.recv() if id == -1: self.connection.send((-1, "Terminating")) self.quit() else: time.sleep(0.5) def getSaveFilePath(self, end_string): """ Formats the save file path. Detections and tracks are separated based on end_string. """ base_name = os.path.basename(self.file) file_name = os.path.splitext(base_name)[0] file_path = os.path.join(self.save_directory, "{}_{}".format(file_name, end_string)) return file_path def saveResults(self): """ Saves both detections and tracks to the directory provided earlier. """ file_name = os.path.splitext(self.file)[0] if self.save_detections: det_path = self.getSaveFilePath("dets.txt") self.detector.saveDetectionsToFile(det_path) if self.save_tracks: track_path = self.getSaveFilePath("tracks.txt") self.fish_manager.saveToFile(track_path) def onAllComputed(self): """ Saves and quits the process. """ self.saveResults() self.quit() def quit(self): self.alive = False self.app.quit()