Exemple #1
0
    def frames():
        camera = cv2.VideoCapture(Camera.video_source)

        camera.set(3,320)
        camera.set(4,240)

        width = int(camera.get(3))
        height = int(camera.get(4))

        M = cv2.getRotationMatrix2D((width / 2, height / 2), 180, 1)

        camera.set(cv2.CAP_PROP_BUFFERSIZE,1)
        # camera.rotation = 180
        # rawCapture = PiRGBArray(camera, size=(320, 240))

        # if not camera.isOpened():
        #     raise RuntimeError('Could not start camera.')
        start_time = time.time()
        fpscount = 0
        # print(cv2.useOptimized())
        cv2.setUseOptimized(True)
        # print(cv2.useOptimized())
        while True:
            # read current frame
            
            # img = cv2.warpAffine(img, M, (640, 480))
            # print(time.time()- start_time)
            _, img = camera.read()
            img = cv2.warpAffine(img, M, (320, 240))
            img = Vilib.human_detect_func(img)         
            img = Vilib.color_detect_func(img) 
            ##  FPS           
            # fpscount += 1
            # if (time.time()- start_time) >= 1:
            #     print(fpscount)
            #     start_time = time.time()
            #     fpscount = 0
            ##  FPS 
            # start_time = time.time()
            # t1 = cv2.getTickCount()

            
            # img = Vilib.human_detect_func(img)
            # t2 = cv2.getTickCount()
            # print(int(1/round((t2-t1)/cv2.getTickFrequency(),2)))

            # img = cv2.resize(img, (320,240), interpolation=cv2.INTER_LINEAR) 
            # img = Vilib.color_detect_func(img) 
            
            # encode as a jpeg image and return it
            yield cv2.imencode('.jpg', img)[1].tobytes() 
    def frames():
        with picamera.PiCamera() as camera:
            # let camera warm up
            # camera = PiCamera()
            camera.resolution = (320, 240)
            camera.framerate = 32
            camera.rotation = 180

            # rawCapture = PiRGBArray(camera, size=(320, 240))
            # time.sleep(2)

            # stream = io.BytesIO()
            rawCapture = PiRGBArray(camera, size=(320, 240))
            start_time = time.time()
            fpscount = 0
            Vilib.cdf_flag = True
            Vilib.hdf_flag = True
            Vilib.color_change('blue')
            print(cv2.useOptimized())
            cv2.setUseOptimized(True)
            print(cv2.useOptimized())
            for _ in camera.capture_continuous(rawCapture,
                                               format="bgr",
                                               use_video_port=True):
                # return current frame
                # stream.seek(0)
                img = _.array
                # img = frame.array
                t1 = cv2.getTickCount()
                img = Vilib.color_detect_func(img)
                img = Vilib.human_detect_func(img)
                t2 = cv2.getTickCount()
                print(round((t2 - t1) / cv2.getTickFrequency(), 3))
                # yield stream.read()
                yield cv2.imencode('.jpg', img)[1].tobytes()
                rawCapture.truncate(0)
# allow the camera to warmup
time.sleep(0.1)
start_time = time.time()
fpscount = 0
# capture frames from the camera
Vilib.cdf_flag = True
Vilib.hdf_flag = True
Vilib.color_change('blue')
print(cv2.useOptimized())
cv2.setUseOptimized(True)
print(cv2.useOptimized())
for frame in camera.capture_continuous(rawCapture,
                                       format="bgr",
                                       use_video_port=True):
    # grab the raw NumPy array representing the image, then initialize the timestamp
    # and occupied/unoccupied text
    img = frame.array
    t1 = cv2.getTickCount()
    img = Vilib.color_detect_func(img)
    img = Vilib.human_detect_func(img)
    t2 = cv2.getTickCount()
    print(round((t2 - t1) / cv2.getTickFrequency(), 3))
    # cv2.imshow("Frame", image)
    # key = cv2.waitKey(1) & 0xFF

    # clear the stream in preparation for the next frame
    rawCapture.truncate(0)

    # if the `q` key was pressed, break from the loop
    # if key == ord("q"):
    #     break