def __init__(self): pygame.init() self.screen = pygame.display.set_mode([640, 480]) self.sysfont = pygame.font.SysFont(None, 60) self.life_image = pygame.image.load("image/heart.png") self.start() self.running = True fps = FPS() while self.running: fps.start() for event in pygame.event.get(): if event.type == pygame.QUIT or (event.type == pygame.KEYUP and event.key == pygame.K_ESCAPE): self.running = False self.update() pygame.display.update() fps.end() delay = int(FRAME_RATE - fps.get_time() * 1000) if delay < 0: delay = 0 pygame.time.delay(delay) # 60 FPS pygame.quit()
"--num-frames", type=int, default=100, help="# of frames to loop over for FPS test") ap.add_argument("-d", "--display", type=int, default=-1, help="Whether or not frames should be displayed") args = vars(ap.parse_args()) #Baseline # grab a pointer to the video stream and initialize the FPS counter print("[INFO] sampling frames from webcam...") stream = cv2.VideoCapture(1) fps = FPS().start() # loop over some frames while fps._numFrames < args["num_frames"]: # grab the frame from the stream (grabbed, frame) = stream.read() #r = 150.0 / frame.shape[1] # dim = (150, int(frame.shape[0] * (150.0 / frame.shape[1]) ) ) # frame = cv2.resize(frame, dim, interpolation=cv2.INTER_AREA) # check to see if the frame should be displayed to our screen if args["display"] > 0: cv2.imshow("Frame", frame) key = cv2.waitKey(1) & 0xFF # update the FPS counter fps.update() # stop the timer and display FPS information fps.stop()
def __init__(self, vid_path, src=0, src2=1, *args, **kwargs): super(MainWindow, self).__init__(*args, **kwargs) ########################################################### ########################################################### ########################################################### #Camera params self.counter = 0 self.fps = FPS() self.sp = vid_path self.stream = cv2.VideoCapture(src) self.stream2 = cv2.VideoCapture(src2) self.default_fps = self.stream.get(cv2.CAP_PROP_FPS) self.default_fps2 = self.stream2.get(cv2.CAP_PROP_FPS) self.cam_size = (int(self.stream.get(cv2.CAP_PROP_FRAME_WIDTH)), int(self.stream.get(cv2.CAP_PROP_FRAME_HEIGHT))) self.cam_size2 = (int(self.stream2.get(cv2.CAP_PROP_FRAME_WIDTH)), int(self.stream2.get(cv2.CAP_PROP_FRAME_HEIGHT))) self.writer = cv2.VideoWriter(self.sp + 'outputVid.avi', cv2.VideoWriter_fourcc(*'XVID'), self.default_fps, self.cam_size) self.writer2 = cv2.VideoWriter(self.sp + 'outputVid2.avi', cv2.VideoWriter_fourcc(*'XVID'), self.default_fps2, self.cam_size2) self.cam_ind = src self.cam_ind2 = src2 self.stopped = False ########################################################### ########################################################### ########################################################### #GUI configuration self.disply_width = 500 self.display_height = 350 self.setWindowTitle("Qt live label demo") self.layout_base = QVBoxLayout() self.layout_h = QHBoxLayout() self.camera1 = QLabel("webcam1") self.camera2 = QLabel("webcam2") start_b = QPushButton("Start Video") stop_b = QPushButton("Stop Video") start_b.pressed.connect(self.start_record) stop_b.pressed.connect(self.stop_record) self.layout_h.addWidget(self.camera1) self.layout_h.addWidget(self.camera2) self.layout_base.addWidget(start_b) self.layout_base.addWidget(stop_b) self.layout_base.addLayout(self.layout_h) self.layout_h.setSpacing(15) self.widget = QWidget() self.widget.setLayout(self.layout_base) self.setCentralWidget(self.widget) self.camera1.resize(self.disply_width, self.display_height) self.camera2.resize(self.disply_width, self.display_height) self.show() self.threadpool = QThreadPool() print("Multithreading with maximum %d threads" % self.threadpool.maxThreadCount()) print("Cam 1 fps&size:", self.default_fps, self.cam_size) print("Cam 2 fps&size:", self.default_fps2, self.cam_size2) self.worker = Worker() self.timer = QTimer() self.timer.setInterval(1000) self.timer.timeout.connect(self.recurring_timer) self.timer.start()
# do a bit of cleanup cv2.destroyAllWindows() stream.close() rawCapture.close() camera.close() """ # created a *threaded *video stream, allow the camera sensor to warmup, # and start the FPS counter print("[INFO] sampling THREADED frames from `picamera` module...") vs = PiVideoStream().start() time.sleep(2.0) fps = FPS.FPS().start() # loop over some frames...this time using the threaded stream while fps._numFrames < args["num_frames"]: # grab the frame from the threaded video stream and resize it # to have a maximum width of 400 pixels frame = vs.read() #frame = imutils.resize(frame, width=400) # check to see if the frame should be displayed to our screen if args["display"] > 0: cv2.imshow("Frame", frame) key = cv2.waitKey(1) & 0xFF # update the FPS counter fps.update()
arduino.write(command.encode()) # start the capture (on camera channel 0) thread cap = WebcamVideoStream(src=0).start() # wait one second for everything to settle before reading first frame time.sleep(1) # create a video of the tracking timestamp = "{:%Y_%m_%d_%H_%M}".format(datetime.now()) fourcc = cv2.VideoWriter_fourcc('m', 'p', '4', 'v') out = cv2.VideoWriter('recordings/video' + timestamp + '.mov', fourcc, 30, (612, 425), True) # start FPS counter fps = FPS() fps.start() # record time timerStart = datetime.now() while True: # capture a frame frame = cap.read() # undistort and crop the frame # cv2.undistort() is slow so we use a remapping # undistorted = cv2.undistort(frame, mtx, dist, None, newcameramtx) undistorted = cv2.remap(frame, map1, map2, cv2.INTER_LINEAR) x, y, w, h = roi cropped = undistorted[y:y + h, x:x + w]