def trackFrame(self, record=False, requestedOutput='raw'): """ Reimplementation of Tracker.trackFrame for the GUI """ try: frame = self._stream.read() if self.cameraCalibration is not None: frame = Frame(self.cameraCalibration.remap(frame)) fid = self._stream.currentFrameIdx if self.trackTo and (fid > self.trackTo): raise KeyboardInterrupt # stop recording if fid < self._stream.bgStartFrame: return frame.color( ), self.defaultPos, self.defaultPos # Skip junk frames elif self._stream.isBgFrame(): self._buildBg(frame) if record: self._stream._save(frame) return frame.color(), self.defaultPos, self.defaultPos elif self._stream.bgEndFrame < fid < self.trackFrom: if record: self._stream._save(frame) return frame.color( ), self.defaultPos, self.defaultPos # Skip junk frames else: # Tracked frame if fid == self.trackFrom: self._finaliseBg() sil = self._trackFrame(frame, 'b', requestedOutput=requestedOutput) if sil is None: if record: self._stream._save(frame) return None, self.defaultPos, self.defaultPos # Skip if no contour found else: self.silhouette = sil.copy() if self.roi is not None: self._checkMouseInRoi() self.paint(self.silhouette, 'c') self.silhouette.paint(curve=self.positions) if record: self._stream._save(self.silhouette) result = [self.silhouette, self.positions[-1]] if self.extractArena: distances = (self._getDistanceFromArenaCenter(), self._getDistanceFromArenaBorder()) self.distancesFromArena.append(distances) result.append(self.distancesFromArena[-1]) else: result.append(self.defaultPos) return result except VideoStreamFrameException as e: print(('Error with video_stream at frame {}: \n{}'.format(fid, e))) except (KeyboardInterrupt, EOFError) as e: msg = "Recording stopped by user" if ( type(e) == KeyboardInterrupt) else str(e) self._stream.stopRecording(msg) raise EOFError
def read(self): """ Returns the next frame after updating the count :return: A video frame :rtype: video_frame.Frame """ stream = self.stream self._cam.quickCapture(stream) # stream.array now contains the image data in BGR order frame = stream.array stream.truncate(0) self.currentFrameIdx +=1 return Frame(frame.astype(np.float32))
def read(self): """ Returns the next frame after updating the count :return: frame :rtype: video_frame.Frame :raises: VideoStreamFrameException when no frame can be read """ _, frame = self.stream.read() if frame is not None: self.currentFrameIdx +=1 return Frame(frame.astype(np.float32)) else: raise VideoStreamFrameException("UsbVideoStream frame not found")
def read(self): """ Returns the next frame after updating the count :return: frame :rtype: video_frame.Frame :raises: EOFError when end of stream is reached """ if self.currentFrameIdx > (len(self.imgs) - 2): raise EOFError("End of recording reached") img = self.imgs[self.currentFrameIdx] frame = Frame(img) self.currentFrameIdx += 1 return frame
def read(self, idx=None): """ Returns the next frame after updating the count :return: frame :rtype: video_frame.Frame :raises: EOFError when end of stream is reached """ if idx is None: self.currentFrameIdx += 1 else: self.currentFrameIdx = idx if self.currentFrameIdx > self.nFrames: raise EOFError("End of recording reached") frame = self.frames[self.currentFrameIdx] return Frame(frame)
def read(self): """ Returns the next frame after updating the count :return: frame :rtype: video_frame.Frame :raises: EOFError when end of stream is reached """ self.currentFrameIdx += 1 if self.currentFrameIdx > self.nFrames: raise EOFError("End of recording reached") gotFrame, frame = self.stream.read() if gotFrame: return Frame(frame.astype(np.float32)) else: raise VideoStreamFrameException("Could not get frame at index {}".format(self.currentFrameIdx))
def _buildBg(self, frame): """ Initialise the background if empty, expand otherwise. Will also initialise the arena roi if the option is selected :param frame: The video frame to use as background or part of the background. :type frame: video_frame.Frame """ if __debug__: print("Building background") bg = frame.denoise().blur().gray() if self.bg is None: self.bg = bg else: self.bg = Frame(np.dstack((self.bg, bg))) if self.extractArena: self.arena = self._extractArena()
def _getSilhouette(self, frame): """ Get the binary mask (8bits) of the mouse from the thresholded difference between frame and the background :param frame: The current frame to analyse :type frame: video_frame.Frame :returns: silhouette (the binary mask) :rtype: video_frame.Frame """ if self.normalise: frame = frame.normalise(self.bgAvgAvg) diff = Frame(cv2.absdiff(frame, self.bg)) if self.bgStd is not None: threshold = self.bgStd * self.nSds silhouette = diff > threshold silhouette = silhouette.astype(np.uint8) * 255 else: diff = diff.astype(np.uint8) silhouette = diff.threshold(self.threshold) if self.clearBorders: silhouette.clearBorders() return silhouette, diff