Beispiel #1
0
fps = FPS().start()
# loop over some frames
while fps._numFrames < args["num_frames"]:
    # grab the frame from the stream
    (grabbed, frame) = stream.read()
    #r = 150.0 / frame.shape[1]
    # dim = (150, int(frame.shape[0] *  (150.0 / frame.shape[1]) ) )
    # frame = cv2.resize(frame, dim, interpolation=cv2.INTER_AREA)
    # check to see if the frame should be displayed to our screen
    if args["display"] > 0:
        cv2.imshow("Frame", frame)
        key = cv2.waitKey(1) & 0xFF
    # update the FPS counter
    fps.update()
# stop the timer and display FPS information
fps.stop()
print("[INFO] elasped time: {:.2f}".format(fps.elapsed()))
print("[INFO] approx. FPS: {:.2f}".format(fps.fps()))
# do a bit of cleanup
stream.release()
cv2.destroyAllWindows()


def help():
    print("Help!!")


# created a *threaded* video stream, allow the camera sensor to warmup,
# and start the FPS counter
print("[INFO] sampling THREADED frames from webcam...")
vs = WebcamVideoStream(src=1).start()
Beispiel #2
0
class MainWindow(QMainWindow):
    ###########################################################
    ###########################################################
    ###########################################################
    #Constructor
    def __init__(self, vid_path, src=0, src2=1, *args, **kwargs):
        super(MainWindow, self).__init__(*args, **kwargs)

        ###########################################################
        ###########################################################
        ###########################################################
        #Camera params
        self.counter = 0
        self.fps = FPS()
        self.sp = vid_path
        self.stream = cv2.VideoCapture(src)
        self.stream2 = cv2.VideoCapture(src2)
        self.default_fps = self.stream.get(cv2.CAP_PROP_FPS)
        self.default_fps2 = self.stream2.get(cv2.CAP_PROP_FPS)
        self.cam_size = (int(self.stream.get(cv2.CAP_PROP_FRAME_WIDTH)),
                         int(self.stream.get(cv2.CAP_PROP_FRAME_HEIGHT)))
        self.cam_size2 = (int(self.stream2.get(cv2.CAP_PROP_FRAME_WIDTH)),
                          int(self.stream2.get(cv2.CAP_PROP_FRAME_HEIGHT)))
        self.writer = cv2.VideoWriter(self.sp + 'outputVid.avi',
                                      cv2.VideoWriter_fourcc(*'XVID'),
                                      self.default_fps, self.cam_size)
        self.writer2 = cv2.VideoWriter(self.sp + 'outputVid2.avi',
                                       cv2.VideoWriter_fourcc(*'XVID'),
                                       self.default_fps2, self.cam_size2)
        self.cam_ind = src
        self.cam_ind2 = src2
        self.stopped = False

        ###########################################################
        ###########################################################
        ###########################################################
        #GUI configuration
        self.disply_width = 500
        self.display_height = 350
        self.setWindowTitle("Qt live label demo")

        self.layout_base = QVBoxLayout()
        self.layout_h = QHBoxLayout()
        self.camera1 = QLabel("webcam1")
        self.camera2 = QLabel("webcam2")

        start_b = QPushButton("Start Video")
        stop_b = QPushButton("Stop Video")
        start_b.pressed.connect(self.start_record)
        stop_b.pressed.connect(self.stop_record)

        self.layout_h.addWidget(self.camera1)
        self.layout_h.addWidget(self.camera2)

        self.layout_base.addWidget(start_b)
        self.layout_base.addWidget(stop_b)
        self.layout_base.addLayout(self.layout_h)
        self.layout_h.setSpacing(15)

        self.widget = QWidget()
        self.widget.setLayout(self.layout_base)
        self.setCentralWidget(self.widget)

        self.camera1.resize(self.disply_width, self.display_height)
        self.camera2.resize(self.disply_width, self.display_height)

        self.show()

        self.threadpool = QThreadPool()
        print("Multithreading with maximum %d threads" %
              self.threadpool.maxThreadCount())
        print("Cam 1 fps&size:", self.default_fps, self.cam_size)
        print("Cam 2 fps&size:", self.default_fps2, self.cam_size2)
        self.worker = Worker()

        self.timer = QTimer()
        self.timer.setInterval(1000)
        self.timer.timeout.connect(self.recurring_timer)
        self.timer.start()

    def progress_fn(self, n):
        print("%d%% done" % n)

    def thread_fc(self):
        while True:
            # if the thread indicator variable is set, stop the thread
            if self.stopped == True:
                print("[Record-Info] elasped time: {:.2f}".format(
                    self.fps.elapsed()))
                print("[Record-Info] approx. FPS: {:.2f}".format(
                    self.fps.fps()))
                self.stream.release()
                self.stream2.release()
                self.writer.release()
                self.writer2.release()
                cv2.destroyAllWindows()
                return "Done"
            # otherwise, read the next frame from the stream
            (self.success, self.frame) = self.stream.read()
            (self.success2, self.frame2) = self.stream2.read()
            self.fps.update()
            if self.success:
                self.writer.write(self.frame)
            if self.success2:
                self.writer2.write(self.frame2)
            self.worker.signals.change_pixmap_signal.emit(
                [self.frame, self.frame2])

    def print_output(self, s):
        print("output", s)

    def thread_complete(self):
        print("Thread complete")

    def convert_cv_qt(self, cv_img):
        """Convert from an opencv image to QPixmap"""
        rgb_image = cv2.cvtColor(cv_img, cv2.COLOR_BGR2RGB)
        h, w, ch = rgb_image.shape
        bytes_per_line = ch * w
        convert_to_Qt_format = QImage(rgb_image.data, w, h, bytes_per_line,
                                      QImage.Format_RGB888)
        p = convert_to_Qt_format.scaled(self.disply_width, self.display_height,
                                        Qt.KeepAspectRatio)
        return QPixmap.fromImage(p)

    @pyqtSlot(np.ndarray)
    def update_image(self, cv_img):
        """Updates the image_label with a new opencv image"""
        qt_img = self.convert_cv_qt(cv_img[0])
        qt_img2 = self.convert_cv_qt(cv_img[1])
        self.camera1.setPixmap(qt_img)
        self.camera2.setPixmap(qt_img2)

    def execute_thread(self):
        # Pass the function to execute
        self.worker.begin_job(
            self.thread_fc
        )  # Any other args, kwargs are passed to the run function
        self.worker.signals.change_pixmap_signal.connect(self.update_image)
        self.worker.signals.result.connect(self.print_output)
        self.worker.signals.finished.connect(self.thread_complete)
        self.worker.signals.progress.connect(self.progress_fn)
        # Execute
        self.threadpool.start(self.worker)
        return self

    def start_record(self):
        print('start recording!', self.stopped)
        self.fps.start()
        if self.stopped == True:
            self.worker = Worker()
            self.stream = cv2.VideoCapture(self.cam_ind)
            self.stream2 = cv2.VideoCapture(self.cam_ind2)
            self.writer = cv2.VideoWriter(self.sp + 'outputVid.avi',
                                          cv2.VideoWriter_fourcc(*'XVID'),
                                          self.default_fps, self.cam_size)
            self.writer2 = cv2.VideoWriter(self.sp + 'outputVid2.avi',
                                           cv2.VideoWriter_fourcc(*'XVID'),
                                           self.default_fps2, self.cam_size2)
            self.stopped = False
            self.worker.open_record_loop()
        self.execute_thread()

    def stop_record(self):
        # indicate that the thread should be stopped
        print("Stop recording!")
        self.stopped = True
        self.worker.close_record_loop()
        self.fps.stop()
        self.execute_thread()

    def recurring_timer(self):
        self.counter += 1