Пример #1
0
    def __init__(self):
        super().__init__()
        sys.path.append('./python_glview')
        import pyglview

        self._glviewer = pyglview.Viewer(
            keyboard_listener=self._proxy_kb_listener)
Пример #2
0
import cv2
import acapture
import sys
import numpy as np
import colorsys

sys.path.append('./python_glview') 

import pyglview
from OpenGL.GLUT import *
from logging import warning

bg = cv2.imread('bg.jpg', cv2.IMREAD_COLOR)
bg = cv2.resize(bg, dsize=(1280, 720), interpolation=cv2.INTER_CUBIC)

viewer = pyglview.Viewer()
cap = acapture.open(0) # Camera 0,  /dev/video0


got_sample = False
sample_range = None
sample_size = (10, 10)
threshold = 20

kernel = np.ones((10,10), np.uint8)

def restrict(color):
    # https://docs.opencv.org/trunk/df/d9d/tutorial_py_colorspaces.html
    # Max and min values of H, S, V in opencv
    min = (0, 0, 0)
    max = (179, 255, 255)
Пример #3
0
#!/usr/bin/env python3
import os
import pyglview
import cv2

cap = cv2.VideoCapture(os.path.join(os.path.expanduser('~'), "test.mp4"))
viewer = pyglview.Viewer(window_width=512,
                         window_height=512,
                         fullscreen=False,
                         opengl_direct=True)
# viewer = pyglview.Viewer(window_width=512,window_height=512,fullscreen=False,opengl_direct=False)


def loop():
    check, frame = cap.read()
    if check:
        frame = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
        viewer.set_image(frame)
    pass


viewer.set_loop(loop)
viewer.start()
print("Main thread ended")
def image2image(HOST, PORT, fd, quality):
    cap = aimage.open(fd)

    class ProtocolStack(bridge.client.StreamClientFactory):
        def on_connected(self):
            s = self.protocol_instance
            s.add_output_protocol(
                bridge.protocol.ImageEncoder(quality=quality))
            s.add_output_protocol(bridge.protocol.LengthSplitOut())

            s.add_input_protocol(bridge.protocol.LengthSplitIn())
            s.add_input_protocol(bridge.protocol.ImageDecoder())

        def on_disconnected(self):
            pass

    client_socket = bridge.client.EaterBridgeClient(
        host=HOST, port=PORT, protocol_stack=ProtocolStack)
    client_socket.start()

    def terminate(a, b):
        global G_stop_signal
        G_stop_signal = True
        client_socket.destroy()

    signal.signal(signal.SIGINT, terminate)
    signal.signal(signal.SIGTERM, terminate)

    req_queue_count = 0
    req_fps_cache = 0
    req_fps_count = 0
    v_fps_count = 0
    v_fps_cache = 0
    time_cache = 0

    vargs = edict()
    vargs.keyboard_listener = cap.keyboard_listener
    vargs.cpu = False
    vargs.double_buffer = True
    vargs.fullscreen = False
    vargs.fps = cap.fps
    vargs.quality = quality
    vargs.window_x = 50
    vargs.window_y = 50
    vargs.window_width = int(cap.width)
    vargs.window_height = int(cap.height)
    frame_times = np.zeros((vargs.window_width, 4), dtype=np.float32)
    frame_time_queue = []
    frame_index = 0
    f_fps_cache = 0
    f_fps_count = 0
    previous_time = 0
    fps_limit = 1.0 / vargs.fps

    def tob(b):
        return 'T' if b else 'F'

    display_info = f'CPU:{tob(vargs.cpu)} DBf:{tob(vargs.double_buffer)} FS:{tob(vargs.fullscreen)} {int(vargs.window_width)}x{int(vargs.window_height)} sFPS:{vargs.fps}'

    view = pyglview.Viewer(**vargs)
    message = ""
    previous_img = None

    def loop():
        nonlocal previous_img, client_socket, fps_limit, previous_time, time_cache, cap, view, f_fps_count, f_fps_cache, v_fps_count, v_fps_cache, req_fps_cache, req_fps_count, req_queue_count, message, frame_times, frame_time_queue, frame_index
        if G_stop_signal:
            raise Exception("Stop")
        now = time.time()
        if now - time_cache > 1.0:
            mem = psutil.virtual_memory()
            cpu_res = f'CPU:{str(psutil.cpu_percent()).rjust(4)}% '
            mresources = '{}{}MB({}%)'.format(
                cpu_res,
                str(round(mem.used / 1024 / 1024, 2)).rjust(8),
                str(round(mem.percent, 1)).rjust(4))
            v_fps_cache = v_fps_count
            req_fps_cache = req_fps_count
            f_fps_cache = f_fps_count
            message = f"CFPS:{str(f_fps_cache).rjust(3)} GLFPS:{str(v_fps_cache).rjust(3)} REQ:{str(req_fps_cache).rjust(3)} {mresources}"

            print("\033[0K", end="", flush=True)
            print(message, flush=True)
            print("\033[1A", end="", flush=True)

            time_cache = now
            v_fps_count = 0
            req_fps_count = 0
            f_fps_count = 0

        LIMIT_REQ = 8
        LIMIT_HF_REQ = 5
        if req_fps_cache < 20:
            LIMIT_REQ = 4
            LIMIT_HF_REQ = 3
        elif req_fps_cache < 30:
            LIMIT_REQ = 5
            LIMIT_HF_REQ = 4
        elif req_fps_cache < 40:
            LIMIT_REQ = 6
            LIMIT_HF_REQ = 4
        elif req_fps_cache < 50:
            LIMIT_REQ = 7
            LIMIT_HF_REQ = 5

        if req_queue_count <= LIMIT_REQ and now - previous_time >= (
                1.0 / (req_fps_cache + 1) * 0.90):
            previous_time = now
            push = True
            if req_queue_count >= LIMIT_HF_REQ:
                push = False
                previous_time = previous_time - (1.0 / (req_fps_cache + 1) *
                                                 0.90) * 0.9
            if push:
                check, img = cap.read()
                if previous_img is not None:
                    if img is previous_img:
                        check = False
                    else:
                        pass
                previous_img = img
                if check:
                    client_socket.write([img])
                    req_queue_count += 1
                    frame_time_queue.append(now)
        img = None
        blocks = client_socket.read()
        if blocks is not None:
            if isinstance(blocks, list):
                for data in blocks:
                    if img is None: img = np.array(data)
                    sub = now - frame_time_queue.pop(0)
                    sub *= 1000
                    req_queue_count -= 1
                    req_fps_count += 1
        if img is not None:
            aimage.draw_title(
                img=img,
                message=display_info +
                f' {str(int(sub)).rjust(4)}ms B:{str(req_queue_count).rjust(2)}'
            )
            aimage.draw_footer(img=img, message=message)
            view.set_image(img)
            frame_index += 1
            v_fps_count += 1
        f_fps_count += 1

    view.set_loop(loop)
    view.start()
Пример #5
0
    check,frame = cap.read() # non-blocking

    if check:
        if got_sample:
            image_copy = cv2.cvtColor(frame, cv2.COLOR_RGB2HSV)

            mask = cv2.inRange(image_copy, *sample_range)

            mask = cv2.morphologyEx(mask, cv2.MORPH_CLOSE, kernel)

            # make a copy of the resized BG to apply mask
            bg_masked = np.copy(resize_bg(image_copy.shape[:2]))

            bg_masked[mask==0] = [0, 0, 0]

            frame[mask != 0] = [0, 0, 0]

            frame = frame + bg_masked

        # Add text
        frame = text(frame)

        # Set the frame to OpenGl window
        viewer.set_image(frame)

viewer = pyglview.Viewer(keyboard_listener=keyboard_listener)
viewer.set_sample_size(sample_size)
viewer.set_sample_cb(sample_cb)
viewer.set_loop(loop)
viewer.start()
Пример #6
0
#!/usr/bin/env python3
import os
import sys

import acapture
import pyglview

import evaluator

basepath = os.getcwd()
sys.path.append(basepath)

if len(sys.argv) > 1:
    f = sys.argv[1]
cap = acapture.open(f)
view = pyglview.Viewer(keyboard_listener=cap.keyboard_listener)
model = evaluator.Evaluator()


def loop():
    try:
        check, frame = cap.read()
        if check:
            frame = model.render(frame)
            view.set_image(np.array(frame))
    except Exception as e:
        print(e)
        exit(9)


view.set_loop(loop)
Пример #7
0
    lookUpTable = np.empty((1, 256), np.uint8)
    for i in range(256):
        lookUpTable[0, i] = np.clip(pow(i / 255.0, g) * 255.0, 0, 255)
    img = cv2.LUT(img, lookUpTable)
    return img


if __name__ == '__main__':
    import acapture
    import pyglview
    import sys

    cap = acapture.open(sys.argv[1] if len(sys.argv) > 1 else os.path.
                        join(os.path.expanduser('~'), "test.mp4"))

    view = pyglview.Viewer(keyboard_listener=cap.keyboard_listener,
                           fullscreen=False)

    def loop():
        try:
            check, frame = cap.read()
            if check:
                view.set_image(frame)
                # view.set_image(gamma(frame, 0.6))
        except:
            traceback.print_exc()
            exit(9)
        pass

    view.set_loop(loop)
    view.start()
    print("Main thread ended")