Exemple #1
0
    def openValkka(self):
        self.thread = QValkkaThread() # the thread that's watching the mvision_processes
        self.thread.start()
        
        self.mvision_process.start()
        self.thread.addProcess(self.mvision_process)
        
        # """
        self.livethread = LiveThread(         # starts live stream services (using live555)
            name="live_thread",
            verbose=False
        )

        self.filethread = FileThread(
            name="file_thread",
            verbose=False
        )

        self.openglthread = OpenGLThread(     # starts frame presenting services
            name="mythread",
            n_720p=10,
            n_1080p=10,
            n_1440p=10,
            n_4K=10,
            verbose=False,
            msbuftime=100,
            affinity=-1
        )

        # this filterchain creates a shared memory server
        self.chain = ShmemFilterchain1(       # decoding and branching the stream happens here
            openglthread = self.openglthread,
            slot = 1,
            shmem_name = self.shmem_name,
            shmem_image_dimensions = self.shmem_image_dimensions,
            shmem_image_interval = self.shmem_image_interval,
            shmem_ringbuffer_size = self.shmem_ringbuffer_size
        )

        shmem_name, n_buffer, shmem_image_dimensions = self.chain.getShmemPars()
        self.video = QtWidgets.QWidget(self.video_area)
        self.win_id = int(self.video.winId())

        self.video_lay.addWidget(self.video, 0, 0)
        self.token = self.openglthread.connect(slot = 1, window_id = self.win_id)

        self.chain.decodingOn()  # tell the decoding thread to start its job

        self.mvision_process.activate(
            n_buffer                = self.shmem_ringbuffer_size, 
            image_dimensions        = self.shmem_image_dimensions, 
            shmem_name              = self.shmem_name  
        )
Exemple #2
0
  def __init__(self, pardic):
    self.pardic=pardic
    self.true_screens =[]
    self.openglthreads=[]
    self.findXScreens()
    
    # self.true_screens=[self.true_screens[0]]
    
    for n_gpu, screen in enumerate(self.true_screens):
    
      x_connection=":0."+str(n_gpu)
      # x_connection=":0.1"
      # x_connection=":1.0" # nopes
    
      print(pre,"GPUHandler: starting OpenGLThread with",x_connection)
    
      openglthread=OpenGLThread(     # starts frame presenting services
        name    ="gpu_"+str(n_gpu),
        n_720p   =self.pardic["n_720p"],   # reserve stacks of YUV video frames for various resolutions
        n_1080p  =self.pardic["n_1080p"],
        n_1440p  =self.pardic["n_1440p"],
        n_4K     =self.pardic["n_4K"],
        verbose =False,
        msbuftime=self.pardic["msbuftime"],
        affinity=self.pardic["gl affinity"],
        x_connection =x_connection
        )
      
      print(pre,"GPUHandler: OpenGLThread started")

      self.openglthreads.append(openglthread)
      
    if (self.openglthreads[0].hadVsync()):
      w=QtWidgets.QMessageBox.warning(None,"VBLANK WARNING","Syncing to vertical refresh enabled\n THIS WILL DESTROY YOUR FRAMERATE\n Disable it with 'export vblank_mode=0' for nvidia proprietary drivers, use 'export __GL_SYNC_TO_VBLANK=0'")
    def __init__(self, **kwargs):
        self.pre = self.__class__.__name__ + " : "  # auxiliary string for debugging output
        parameterInitCheck(
            GPUHandler.parameter_defs, kwargs, self
        )  # check kwargs agains parameter_defs, attach ok'd parameters to this object as attributes
        self.kwargs = kwargs
        self.true_screens = []  # list of QtCore.QScreen
        self.openglthreads = []  # list of OpenGLThread instances
        self.findXScreens()

        # self.true_screens=[self.true_screens[0]]

        for n_gpu, screen in enumerate(self.true_screens):

            x_connection = ":0." + str(n_gpu)
            # x_connection=":0.1"
            # x_connection=":1.0" # nopes

            print(pre, "GPUHandler: starting OpenGLThread with", x_connection)

            affinity = -1
            if self.cpu_scheme:
                affinity = self.cpu_scheme.getOpenGL()

            openglthread = OpenGLThread(
                name="gpu_" + str(n_gpu),
                # reserve stacks of YUV video frames for various resolutions
                n_720p=self.n_720p,
                n_1080p=self.n_1080p,
                n_1440p=self.n_1440p,
                n_4K=self.n_4K,
                verbose=False,
                msbuftime=self.msbuftime,
                affinity=affinity,
                x_connection=x_connection)

            print(pre, "GPUHandler: OpenGLThread started")

            self.openglthreads.append(openglthread)
  def openValkka(self):
    self.livethread=LiveThread(         # starts live stream services (using live555)
      name   ="live_thread",
      verbose=False
    )

    self.filethread=FileThread(
      name  ="file_thread",
      verbose=False
    )

    self.openglthread=OpenGLThread(     # starts frame presenting services
      name    ="mythread",
      n_720p   =10,
      n_1080p  =10,
      n_1440p  =10,
      n_4K     =10,
      verbose =False,
      msbuftime=100,
      affinity=-1
      )
    
    if (self.openglthread.hadVsync()):
      w=QtWidgets.QMessageBox.warning(self,"VBLANK WARNING","Syncing to vertical refresh enabled\n THIS WILL DESTROY YOUR FRAMERATE\n Disable it with 'export vblank_mode=0' for nvidia proprietary drivers, use 'export __GL_SYNC_TO_VBLANK=0'")
    
    cc=1
    
    self.chain=ShmemFilterchain1(       # decoding and branching the stream happens here
      openglthread=self.openglthread,
      slot        =cc,
      # this filterchain creates a shared memory server
      shmem_name             ="test_studio_file_"+str(cc),
      shmem_image_dimensions =(1920//4,1080//4),  # Images passed over shmem are quarter of the full-hd reso
      shmem_image_interval   =1000,               # YUV => RGB interpolation to the small size is done each 1000 milliseconds and passed on to the shmem ringbuffer
      shmem_ringbuffer_size  =10                  # Size of the shmem ringbuffer
      )
    
    shmem_name, n_buffer, shmem_image_dimensions =self.chain.getShmemPars()    
    # print(pre,"shmem_name, n_buffer, n_bytes",shmem_name,n_buffer,n_bytes)
    
    self.process=QValkkaMovementDetectorProcess("process_"+str(cc),shmem_name=shmem_name, n_buffer=n_buffer, image_dimensions=shmem_image_dimensions)
    
    self.process.signals.start_move.connect(self.set_moving_slot)
    self.process.signals.stop_move. connect(self.set_still_slot)
    
    if (valkka_xwin):
      # (1) Let OpenGLThread create the window
      self.win_id      =self.openglthread.createWindow(show=False)
      self.widget_pair =WidgetPair(self.video_area,self.win_id,TestWidget0)
      self.video       =self.widget_pair.getWidget()
    else:
      # (2) Let Qt create the window
      self.video     =QtWidgets.QWidget(self.video_area)
      self.win_id    =int(self.video.winId())
    
    self.video_lay.addWidget(self.video,0,0)
    self.token =self.openglthread.connect(slot=cc,window_id=self.win_id)
    
    self.chain.decodingOn() # tell the decoding thread to start its job
    
    # finally, give the multiprocesses to a qthread that's reading their message pipe
    self.thread =QValkkaThread(processes=[self.process])
class MyGui(QtWidgets.QMainWindow):
    class Frame:
        """Create a frame with text (indicating movement) and a video frame.  The video frame is created from a "foreign" window (created by Valkka)
        """
        def __init__(self, parent, win_id):
            self.widget = QtWidgets.QWidget(parent)
            self.lay = QtWidgets.QVBoxLayout(self.widget)

            self.text = QtWidgets.QLabel("", self.widget)
            self.text_stylesheet = self.text.styleSheet()

            # create the foreign widget / normal widget pair
            # normal widget of class TestWidget0
            self.widget_pair = WidgetPair(self.widget, win_id, TestWidget0)
            self.video = self.widget_pair.getWidget()

            self.lay.addWidget(self.text)
            self.lay.addWidget(self.video)
            self.text.setSizePolicy(QtWidgets.QSizePolicy.Minimum,
                                    QtWidgets.QSizePolicy.Minimum)
            self.video.setSizePolicy(QtWidgets.QSizePolicy.Expanding,
                                     QtWidgets.QSizePolicy.Expanding)
            self.set_still()

        def setText(self, txt):
            self.text.setText(txt)

        def set_still(self):
            self.setText("still")
            self.widget.setStyleSheet(self.text_stylesheet)

        def set_moving(self):
            self.setText("MOVING")
            self.widget.setStyleSheet(
                "QLabel {border: 2px; border-style:solid; border-color: red; margin:0 px; padding:0 px; border-radius:8px;}"
            )

    class NativeFrame:
        """Create a frame with text (indicating movement) and a video frame.  The video frame is created by Qt.
        """
        def __init__(self, parent):
            self.widget = QtWidgets.QWidget(parent)
            self.lay = QtWidgets.QVBoxLayout(self.widget)

            self.text = QtWidgets.QLabel("", self.widget)
            self.text_stylesheet = self.text.styleSheet()

            self.video = QtWidgets.QWidget(self.widget)

            self.lay.addWidget(self.text)
            self.lay.addWidget(self.video)
            self.text.setSizePolicy(QtWidgets.QSizePolicy.Minimum,
                                    QtWidgets.QSizePolicy.Minimum)
            self.video.setSizePolicy(QtWidgets.QSizePolicy.Expanding,
                                     QtWidgets.QSizePolicy.Expanding)
            self.set_still()

        def getWindowId(self):
            return int(self.video.winId())

        def setText(self, txt):
            self.text.setText(txt)

        def set_still(self):
            self.setText("still")
            self.widget.setStyleSheet(self.text_stylesheet)

        def set_moving(self):
            self.setText("MOVING")
            self.widget.setStyleSheet(
                "QLabel {border: 2px; border-style:solid; border-color: red; margin:0 px; padding:0 px; border-radius:8px;}"
            )

    debug = False

    # debug=True

    def __init__(self, pardic, parent=None):
        super(MyGui, self).__init__()
        self.pardic = pardic
        self.initVars()
        self.setupUi()
        if (self.debug):
            return
        self.openValkka()

    def initVars(self):
        pass

    def setupUi(self):
        self.setGeometry(QtCore.QRect(100, 100, 800, 800))
        self.w = QtWidgets.QWidget(self)
        self.setCentralWidget(self.w)
        self.lay = QtWidgets.QGridLayout(self.w)
        self.frames = []  # frames with movement detector alert and video
        self.addresses = self.pardic["cams"]

    def openValkka(self):
        # some constant values
        # Images passed over shmem are quarter of the full-hd reso
        shmem_image_dimensions = (1920 // 4, 1080 // 4)
        # YUV => RGB interpolation to the small size is done each 1000
        # milliseconds and passed on to the shmem ringbuffer
        shmem_image_interval = 1000
        shmem_ringbuffer_size = 10

        # the very first thing: create & start multiprocesses
        cs = 1
        self.processes = []
        for address in self.addresses:
            shmem_name = "test_studio_" + str(cs)
            process = QValkkaMovementDetectorProcess(
                "process_" + str(cs),
                shmem_name=shmem_name,
                n_buffer=shmem_ringbuffer_size,
                image_dimensions=shmem_image_dimensions)
            self.processes.append(process)

        print(self.processes)

        # Give the multiprocesses to a qthread that's reading their message
        # pipe
        self.thread = QValkkaThread(processes=self.processes)

        # starts the multiprocesses
        self.startProcesses()
        # ..so, forks have been done.  Now we can spawn threads

        self.livethread = LiveThread(  # starts live stream services (using live555)
            name="live_thread",
            verbose=False,
            affinity=self.pardic["live affinity"])

        self.openglthread = OpenGLThread(  # starts frame presenting services
            name="mythread",
            # reserve stacks of YUV video frames for various resolutions
            n_720p=self.pardic["n_720p"],
            n_1080p=self.pardic["n_1080p"],
            n_1440p=self.pardic["n_1440p"],
            n_4K=self.pardic["n_4K"],
            # naudio  =self.pardic["naudio"], # obsolete
            verbose=False,
            msbuftime=self.pardic["msbuftime"],
            affinity=self.pardic["gl affinity"])

        if (self.openglthread.hadVsync()):
            w = QtWidgets.QMessageBox.warning(
                self, "VBLANK WARNING",
                "Syncing to vertical refresh enabled\n THIS WILL DESTROY YOUR FRAMERATE\n Disable it with 'export vblank_mode=0' for nvidia proprietary drivers, use 'export __GL_SYNC_TO_VBLANK=0'"
            )

        tokens = []
        self.chains = []
        self.frames = []
        cs = 1
        cc = 0
        a = self.pardic["dec affinity start"]

        for address in self.addresses:
            # now livethread and openglthread are running
            if (a > self.pardic["dec affinity stop"]):
                a = self.pardic["dec affinity start"]
            print(pre, "openValkka: setting decoder thread on processor", a)

            # this filterchain creates a shared memory server
            # identifies shared memory buffer must be same as in the
            # multiprocess
            chain = ShmemFilterchain(  # decoding and branching the stream happens here
                livethread=self.livethread,
                openglthread=self.openglthread,
                address=address,
                slot=cs,
                affinity=a,
                shmem_name="test_studio_" + str(cs),
                shmem_image_dimensions=shmem_image_dimensions,
                shmem_image_interval=shmem_image_interval,
                shmem_ringbuffer_size=shmem_ringbuffer_size,
                msreconnect=10000
                # time_correction   =TimeCorrectionType_smart # this is the default, no need to specify
            )
            self.chains.append(chain)

            if (valkka_xwin):
                win_id = self.openglthread.createWindow(show=False)
                frame = self.Frame(self.w, win_id)
            else:
                frame = self.NativeFrame(self.w)
                win_id = frame.getWindowId()

            # print(pre,"setupUi: layout index, address : ",cc//4,cc%4,address)
            # self.lay.addWidget(frame.widget,cc//4,cc%4)

            nrow = self.pardic["videos per row"]
            print(pre, "setupUi: layout index, address : ", cc // nrow,
                  cc % nrow, address)
            self.lay.addWidget(frame.widget, cc // nrow, cc % nrow)

            self.frames.append(frame)

            token = self.openglthread.connect(slot=cs, window_id=win_id)
            tokens.append(token)

            # take corresponding analyzer multiprocess
            process = self.processes[cc]
            process.createClient(
            )  # creates the shared memory client at the multiprocess
            # connect signals to the nested widget
            process.signals.start_move.connect(frame.set_moving)
            process.signals.stop_move.connect(frame.set_still)

            chain.decodingOn()  # tell the decoding thread to start its job
            cs += 1  # TODO: crash when repeating the same slot number ..?
            a += 1
            cc += 1

    def startProcesses(self):
        self.thread.start()
        for p in self.processes:
            p.start()

    def stopProcesses(self):
        for p in self.processes:
            p.stop()
        print(pre, "stopping QThread")
        self.thread.stop()
        print(pre, "QThread stopped")

    def closeValkka(self):
        self.livethread.close()

        for chain in self.chains:
            chain.close()

        self.chains = []
        self.widget_pairs = []
        self.videoframes = []
        self.openglthread.close()

    def closeEvent(self, e):
        print(pre, "closeEvent!")
        self.stopProcesses()
        self.closeValkka()
        super().closeEvent(e)
    def openValkka(self):
        # some constant values
        # Images passed over shmem are quarter of the full-hd reso
        shmem_image_dimensions = (1920 // 4, 1080 // 4)
        # YUV => RGB interpolation to the small size is done each 1000
        # milliseconds and passed on to the shmem ringbuffer
        shmem_image_interval = 1000
        shmem_ringbuffer_size = 10

        # the very first thing: create & start multiprocesses
        cs = 1
        self.processes = []
        for address in self.addresses:
            shmem_name = "test_studio_" + str(cs)
            process = QValkkaMovementDetectorProcess(
                "process_" + str(cs),
                shmem_name=shmem_name,
                n_buffer=shmem_ringbuffer_size,
                image_dimensions=shmem_image_dimensions)
            self.processes.append(process)

        print(self.processes)

        # Give the multiprocesses to a qthread that's reading their message
        # pipe
        self.thread = QValkkaThread(processes=self.processes)

        # starts the multiprocesses
        self.startProcesses()
        # ..so, forks have been done.  Now we can spawn threads

        self.livethread = LiveThread(  # starts live stream services (using live555)
            name="live_thread",
            verbose=False,
            affinity=self.pardic["live affinity"])

        self.openglthread = OpenGLThread(  # starts frame presenting services
            name="mythread",
            # reserve stacks of YUV video frames for various resolutions
            n_720p=self.pardic["n_720p"],
            n_1080p=self.pardic["n_1080p"],
            n_1440p=self.pardic["n_1440p"],
            n_4K=self.pardic["n_4K"],
            # naudio  =self.pardic["naudio"], # obsolete
            verbose=False,
            msbuftime=self.pardic["msbuftime"],
            affinity=self.pardic["gl affinity"])

        if (self.openglthread.hadVsync()):
            w = QtWidgets.QMessageBox.warning(
                self, "VBLANK WARNING",
                "Syncing to vertical refresh enabled\n THIS WILL DESTROY YOUR FRAMERATE\n Disable it with 'export vblank_mode=0' for nvidia proprietary drivers, use 'export __GL_SYNC_TO_VBLANK=0'"
            )

        tokens = []
        self.chains = []
        self.frames = []
        cs = 1
        cc = 0
        a = self.pardic["dec affinity start"]

        for address in self.addresses:
            # now livethread and openglthread are running
            if (a > self.pardic["dec affinity stop"]):
                a = self.pardic["dec affinity start"]
            print(pre, "openValkka: setting decoder thread on processor", a)

            # this filterchain creates a shared memory server
            # identifies shared memory buffer must be same as in the
            # multiprocess
            chain = ShmemFilterchain(  # decoding and branching the stream happens here
                livethread=self.livethread,
                openglthread=self.openglthread,
                address=address,
                slot=cs,
                affinity=a,
                shmem_name="test_studio_" + str(cs),
                shmem_image_dimensions=shmem_image_dimensions,
                shmem_image_interval=shmem_image_interval,
                shmem_ringbuffer_size=shmem_ringbuffer_size,
                msreconnect=10000
                # time_correction   =TimeCorrectionType_smart # this is the default, no need to specify
            )
            self.chains.append(chain)

            if (valkka_xwin):
                win_id = self.openglthread.createWindow(show=False)
                frame = self.Frame(self.w, win_id)
            else:
                frame = self.NativeFrame(self.w)
                win_id = frame.getWindowId()

            # print(pre,"setupUi: layout index, address : ",cc//4,cc%4,address)
            # self.lay.addWidget(frame.widget,cc//4,cc%4)

            nrow = self.pardic["videos per row"]
            print(pre, "setupUi: layout index, address : ", cc // nrow,
                  cc % nrow, address)
            self.lay.addWidget(frame.widget, cc // nrow, cc % nrow)

            self.frames.append(frame)

            token = self.openglthread.connect(slot=cs, window_id=win_id)
            tokens.append(token)

            # take corresponding analyzer multiprocess
            process = self.processes[cc]
            process.createClient(
            )  # creates the shared memory client at the multiprocess
            # connect signals to the nested widget
            process.signals.start_move.connect(frame.set_moving)
            process.signals.stop_move.connect(frame.set_still)

            chain.decodingOn()  # tell the decoding thread to start its job
            cs += 1  # TODO: crash when repeating the same slot number ..?
            a += 1
            cc += 1
Exemple #7
0
class FileGUI(QtWidgets.QMainWindow):
    """Test your machine vision mvision_process and its widget with video files
    
    :param mvision_process:          QValkkaMultimvision_process-derived class
    :param shmem_image_interval:     How often the image is interpolated into rgb and passed to the mvision_process (milliseconds)
    """
    def __init__(self,
                 mvision_process,
                 mvision_master_process,
                 shmem_image_interval=1000,
                 shmem_ringbuffer_size=10,
                 shmem_image_dimensions=(1920 // 2, 1080 // 2),
                 shmem_name="test",
                 init_filename=None):

        super().__init__()
        assert (issubclass(mvision_process.__class__, QShmemProcess))

        self.mvision_process = mvision_process
        self.mvision_master_process = mvision_master_process

        # self.mvision_class          = mvision_class,
        self.shmem_image_interval = shmem_image_interval
        self.shmem_ringbuffer_size = shmem_ringbuffer_size
        self.shmem_image_dimensions = shmem_image_dimensions
        self.shmem_name = shmem_name

        self.init_filename = init_filename

        self.initVars()
        self.setupUi()

        self.mvision_widget = self.mvision_process.getWidget()
        # self.mvision_widget = QtWidgets.QWidget()
        self.mvision_widget.setParent(self.widget)
        self.widget_lay.addWidget(self.mvision_widget)

        self.mvision_widget.setSizePolicy(QtWidgets.QSizePolicy.Minimum,
                                          QtWidgets.QSizePolicy.Minimum)

        self.openValkka()

        if len(sys.argv) > 2:
            self.open_file_button_slot(fname_=sys.argv[2])

    def initVars(self):
        self.mode = "file"
        self.slot_reserved = False

    def setupUi(self):

        rec = QtWidgets.QApplication.desktop().screenGeometry()
        height = rec.height()
        width = rec.width()

        self.setGeometry(QtCore.QRect(0, 0, width, height // 2))
        self.w = QtWidgets.QWidget(self)
        self.setCentralWidget(self.w)
        self.lay = QtWidgets.QVBoxLayout(self.w)

        # return

        # divide window into three parts
        self.upper = QtWidgets.QWidget(self.w)
        self.middle = QtWidgets.QWidget(self.w)
        self.lower = QtWidgets.QWidget(self.w)
        self.lowest = QtWidgets.QWidget(self.w)
        self.lay.addWidget(self.upper)
        self.lay.addWidget(self.middle)
        self.lay.addWidget(self.lower)
        self.lay.addWidget(self.lowest)

        # upper part: detectors widget and the video itself
        self.upperlay = QtWidgets.QHBoxLayout(self.upper)

        # self.widget  =QtWidgets.QTextEdit(self.upper)
        self.widget = QtWidgets.QWidget(self.upper)
        self.widget_lay = QtWidgets.QVBoxLayout(self.widget)

        # self.widget = self.mvision_process.getWidget()
        # self.widget.setParent(self.upper)

        self.video_area = QtWidgets.QWidget(self.upper)
        self.video_lay = QtWidgets.QGridLayout(self.video_area)

        self.upperlay.addWidget(self.widget)
        self.upperlay.addWidget(self.video_area)
        self.widget.setSizePolicy(QtWidgets.QSizePolicy.Minimum,
                                  QtWidgets.QSizePolicy.Minimum)
        self.video_area.setSizePolicy(QtWidgets.QSizePolicy.Expanding,
                                      QtWidgets.QSizePolicy.Expanding)
        """
        [------|--------------------------------------]
        [Open File] [Close Live] [Play] [Stop] [Rewind]
        """

        self.middlelay = QtWidgets.QHBoxLayout(self.middle)
        self.slider = QtWidgets.QSlider(QtCore.Qt.Orientation.Horizontal,
                                        self.middle)
        self.middlelay.addWidget(self.slider)
        self.slider.setTracking(False)

        self.lowerlay = QtWidgets.QHBoxLayout(self.lower)
        self.open_file_button = QtWidgets.QPushButton("Open File", self.lower)
        self.close_file_button = QtWidgets.QPushButton("Close File",
                                                       self.lower)
        self.play_button = QtWidgets.QPushButton("Play", self.lower)
        self.stop_button = QtWidgets.QPushButton("Stop", self.lower)
        self.rewind_button = QtWidgets.QPushButton("<<", self.lower)
        self.seek_label = QtWidgets.QLabel("<<", self.lower)

        self.lowerlay.addWidget(self.open_file_button)
        self.lowerlay.addWidget(self.close_file_button)
        self.lowerlay.addWidget(self.play_button)
        self.lowerlay.addWidget(self.stop_button)
        self.lowerlay.addWidget(self.rewind_button)
        self.lowerlay.addWidget(self.seek_label)

        self.open_file_button.clicked.connect(self.open_file_button_slot)
        self.close_file_button.clicked.connect(self.close_file_button_slot)
        self.play_button.clicked.connect(self.play_button_slot)
        self.stop_button.clicked.connect(self.stop_button_slot)
        self.rewind_button.clicked.connect(self.rewind_button_slot)
        self.slider.valueChanged.connect(self.slider_slot)

        # lowest part: some text
        self.lowestlay = QtWidgets.QVBoxLayout(self.lowest)
        self.infotext = QtWidgets.QLabel("info text", self.lowest)
        self.lowestlay.addWidget(self.infotext)

    def openValkka(self):
        self.mvision_process.go()

        if self.mvision_master_process is not None:
            assert (issubclass(self.mvision_master_process.__class__,
                               QShmemProcess))
            self.mvision_master_process.go()

        self.livethread = LiveThread(  # starts live stream services (using live555)
            name="live_thread",
            verbose=False)

        self.filethread = FileThread(name="file_thread", verbose=False)

        self.openglthread = OpenGLThread(  # starts frame presenting services
            name="mythread",
            n_720p=10,
            n_1080p=10,
            n_1440p=10,
            n_4K=10,
            verbose=False,
            msbuftime=100,
            affinity=-1)

        # this filterchain creates a shared memory server
        self.chain = ShmemFilterchain1(  # decoding and branching the stream happens here
            openglthread=self.openglthread,
            slot=1,
            shmem_name=self.shmem_name,
            shmem_image_dimensions=self.shmem_image_dimensions,
            shmem_image_interval=self.shmem_image_interval,
            shmem_ringbuffer_size=self.shmem_ringbuffer_size)

        shmem_name, n_buffer, shmem_image_dimensions = self.chain.getShmemPars(
        )

        self.video = QtWidgets.QWidget(self.video_area)

        if hasattr(self.mvision_process, "analyzer_video_widget_class"):
            # the machine vision class may declare what video widget it wants to use to define the machine vision parameters (line crossing, zone intrusion, etc.)
            self.analyzer_widget = AnalyzerWidget(
                parent=self.video_area,
                analyzer_video_widget_class=self.mvision_process.
                analyzer_video_widget_class)
        else:
            self.analyzer_widget = AnalyzerWidget(parent=self.video_area)

        self.mvision_process.connectAnalyzerWidget(self.analyzer_widget)
        self.analyzer_widget.activate()

        self.win_id = int(self.video.winId())

        self.video_lay.addWidget(self.video, 0, 0)
        self.video_lay.addWidget(self.analyzer_widget, 0, 1)
        self.token = self.openglthread.connect(slot=1, window_id=self.win_id)

        self.chain.decodingOn()  # tell the decoding thread to start its job

        self.mvision_process.activate(
            n_buffer=self.shmem_ringbuffer_size,
            image_dimensions=self.shmem_image_dimensions,
            shmem_name=self.shmem_name)

        if self.mvision_master_process:
            self.mvision_process.setMasterProcess(self.mvision_master_process)

    def closeValkka(self):
        if self.mvision_master_process:
            self.mvision_process.unsetMasterProcess()
        self.mvision_process.disconnectAnalyzerWidget(self.analyzer_widget)
        self.livethread.close()
        self.chain.close()
        self.chain = None
        self.openglthread.close()

        self.mvision_process.requestStop()
        self.mvision_process.waitStop()

        if self.mvision_master_process:
            self.mvision_master_process.requestStop()
            self.mvision_master_process.waitStop()

    def showEvent(self, e):
        if self.init_filename is not None:
            self.open_file_button_slot(fname_=self.init_filename)
        e.accept()

    def closeEvent(self, e):
        print(pre, "closeEvent!")
        self.closeValkka()
        self.analyzer_widget.close()  # wtf do we need this!
        # super().closeEvent(e)
        e.accept()

    # *** slot ****

    def open_file_button_slot(self, fname_=None):
        if (self.slot_reserved):
            self.infotext.setText("Close the current file first")
            return
        if not fname_:
            fname = QtWidgets.QFileDialog.getOpenFileName(filter="*.mkv")[0]
        else:
            fname = fname_
        if (len(fname) > 0):
            print(pre, "open_file_button_slot: got filename", fname)
            self.chain.setFileContext(fname)
            self.filethread.openStream(self.chain.file_ctx)
            self.slot_reserved = True
            if (self.chain.fileStatusOk()):
                self.infotext.setText("Opened file " + fname)
                print("Duration:", self.chain.file_ctx.duration)
                self.slider.setMinimum(0)
                self.slider.setMaximum(self.chain.file_ctx.duration)
            else:
                self.infotext.setText("Can't play file " + fname)
        else:
            self.infotext.setText("No file opened")

    def close_file_button_slot(self):
        if (not self.slot_reserved):
            self.infotext.setText("Open a file first")
            return
        self.filethread.closeStream(self.chain.file_ctx)
        self.slot_reserved = False
        self.infotext.setText("Closed file")

    def open_live_button_slot(self):
        pass

    def play_button_slot(self):
        if (self.mode == "file"):
            if (not self.slot_reserved):
                self.infotext.setText("Open a file first")
                return
            self.filethread.playStream(self.chain.file_ctx)
        else:
            pass

    def rewind_button_slot(self):
        if (self.mode == "file"):
            if (not self.slot_reserved):
                self.infotext.setText("Open a file first")
                return
            self.chain.file_ctx.seektime_ = 0
            self.filethread.seekStream(self.chain.file_ctx)
        else:
            pass

    def stop_button_slot(self):
        if (self.mode == "file"):
            if (not self.slot_reserved):
                self.infotext.setText("Open a file first")
                return
            self.filethread.stopStream(self.chain.file_ctx)
        else:
            pass

    def slider_slot(self, v):
        print(">", v)
        self.chain.file_ctx.seektime_ = v
        # TODO: reset analyzer state
        self.seek_label.setText(str(v))
        self.mvision_process.resetAnalyzerState()
        self.filethread.seekStream(self.chain.file_ctx)

    def set_bounding_boxes_slot(self, bbox_list):
        self.openglthread.core.clearObjectsCall(self.token)
        for bbox in bbox_list:
            self.openglthread.core.addRectangleCall(
                self.token, bbox[0], bbox[1], bbox[2],
                bbox[3])  # left, right, top, bottom
Exemple #8
0
    def openValkka(self):
        self.mvision_process.go()

        if self.mvision_master_process is not None:
            assert (issubclass(self.mvision_master_process.__class__,
                               QShmemProcess))
            self.mvision_master_process.go()

        self.livethread = LiveThread(  # starts live stream services (using live555)
            name="live_thread",
            verbose=False)

        self.filethread = FileThread(name="file_thread", verbose=False)

        self.openglthread = OpenGLThread(  # starts frame presenting services
            name="mythread",
            n_720p=10,
            n_1080p=10,
            n_1440p=10,
            n_4K=10,
            verbose=False,
            msbuftime=100,
            affinity=-1)

        # this filterchain creates a shared memory server
        self.chain = ShmemFilterchain1(  # decoding and branching the stream happens here
            openglthread=self.openglthread,
            slot=1,
            shmem_name=self.shmem_name,
            shmem_image_dimensions=self.shmem_image_dimensions,
            shmem_image_interval=self.shmem_image_interval,
            shmem_ringbuffer_size=self.shmem_ringbuffer_size)

        shmem_name, n_buffer, shmem_image_dimensions = self.chain.getShmemPars(
        )

        self.video = QtWidgets.QWidget(self.video_area)

        if hasattr(self.mvision_process, "analyzer_video_widget_class"):
            # the machine vision class may declare what video widget it wants to use to define the machine vision parameters (line crossing, zone intrusion, etc.)
            self.analyzer_widget = AnalyzerWidget(
                parent=self.video_area,
                analyzer_video_widget_class=self.mvision_process.
                analyzer_video_widget_class)
        else:
            self.analyzer_widget = AnalyzerWidget(parent=self.video_area)

        self.mvision_process.connectAnalyzerWidget(self.analyzer_widget)
        self.analyzer_widget.activate()

        self.win_id = int(self.video.winId())

        self.video_lay.addWidget(self.video, 0, 0)
        self.video_lay.addWidget(self.analyzer_widget, 0, 1)
        self.token = self.openglthread.connect(slot=1, window_id=self.win_id)

        self.chain.decodingOn()  # tell the decoding thread to start its job

        self.mvision_process.activate(
            n_buffer=self.shmem_ringbuffer_size,
            image_dimensions=self.shmem_image_dimensions,
            shmem_name=self.shmem_name)

        if self.mvision_master_process:
            self.mvision_process.setMasterProcess(self.mvision_master_process)
    def openValkka(self):
        self.livethread = LiveThread(  # starts live stream services (using live555)
            name="live_thread",
            # verbose=True,
            verbose=False,
            affinity=self.pardic["live affinity"])

        self.openglthread = OpenGLThread(  # starts frame presenting services
            name="mythread",
            n_720p=self.pardic[
                "n_720p"],  # reserve stacks of YUV video frames for various resolutions
            n_1080p=self.pardic["n_1080p"],
            n_1440p=self.pardic["n_1440p"],
            n_4K=self.pardic["n_4K"],
            # naudio  =self.pardic["naudio"], # obsolete
            # verbose =True,
            verbose=False,
            msbuftime=self.pardic["msbuftime"],
            affinity=self.pardic["gl affinity"],
            x_connection=":0.0"
            # x_connection =":0.1" # works .. video appears on the other xscreen
        )
        """ # this results in a segfault
    print("> starting second OpenGLThread")
    # testing: start another OpenGLThread
    self.openglthread2=OpenGLThread(     # starts frame presenting services
      name    ="mythread2",
      n_720p   =self.pardic["n_720p"],   # reserve stacks of YUV video frames for various resolutions
      n_1080p  =self.pardic["n_1080p"],
      n_1440p  =self.pardic["n_1440p"],
      n_4K     =self.pardic["n_4K"],
      # naudio  =self.pardic["naudio"], # obsolete
      # verbose =True,
      verbose =False,
      msbuftime=self.pardic["msbuftime"],
      affinity=self.pardic["gl affinity"],
      x_connection =":0.1" # works .. video appears on the other xscreen
      )
    print("> second OpenGLThread started")
    """

        if (self.openglthread.hadVsync()):
            w = QtWidgets.QMessageBox.warning(
                self, "VBLANK WARNING",
                "Syncing to vertical refresh enabled\n THIS WILL DESTROY YOUR FRAMERATE\n Disable it with 'export vblank_mode=0' for nvidia proprietary drivers, use 'export __GL_SYNC_TO_VBLANK=0'"
            )

        tokens = []
        self.chains = []

        a = self.pardic["dec affinity start"]
        cw = 0  # widget / window index
        cs = 1  # slot / stream count

        ntotal = len(self.addresses) * self.pardic["replicate"]
        nrow = self.pardic["videos per row"]
        ncol = max((ntotal // self.pardic["videos per row"]) + 1, 2)

        for address in self.addresses:
            # now livethread and openglthread are running
            if (a > self.pardic["dec affinity stop"]):
                a = self.pardic["dec affinity start"]
            print(pre, "openValkka: setting decoder thread on processor", a)

            chain = BasicFilterchain(  # decoding and branching the stream happens here
                livethread=self.livethread,
                openglthread=self.openglthread,
                address=address,
                slot=cs,
                affinity=a,
                # verbose     =True
                verbose=False,
                msreconnect=10000,

                # flush_when_full =True
                flush_when_full=False,

                # time_correction   =TimeCorrectionType_dummy,  # Timestamp correction type: TimeCorrectionType_none, TimeCorrectionType_dummy, or TimeCorrectionType_smart (default)
                time_correction=TimeCorrectionType_smart,
                recv_buffer_size=
                0,  # Operating system socket ringbuffer size in bytes # 0 means default
                # recv_buffer_size  =1024*800,   # 800 KB
                reordering_mstime=
                0  # Reordering buffer time for Live555 packets in MILLIseconds # 0 means default
                # reordering_mstime =300
            )

            self.chains.append(
                chain
            )  # important .. otherwise chain will go out of context and get garbage collected ..

            for cc in range(0, self.pardic["replicate"]):
                if ("no_qt" in self.pardic):
                    # create our own x-windowses
                    win_id = self.openglthread.createWindow(show=True)
                else:

                    # *** Choose one of the following sections ***

                    # (1) Let Valkka create the windows/widget # use this: we get a window with correct parametrization
                    # win_id =self.openglthread.createWindow(show=False)
                    # fr     =getForeignWidget(self.w, win_id)

                    if (valkka_xwin == False):
                        # (2) Let Qt create the widget
                        fr = TestWidget0(None)
                        win_id = int(fr.winId())
                    else:
                        # """
                        # (3) Again, let Valkka create the window, but put on top a translucent widget (that catches mouse gestures)
                        win_id = self.openglthread.createWindow(show=False)
                        widget_pair = WidgetPair(None, win_id, TestWidget0)
                        fr = widget_pair.getWidget()
                        self.widget_pairs.append(widget_pair)
                        # """

                    print(pre, "setupUi: layout index, address : ", cw // nrow,
                          cw % nrow, address)
                    # self.lay.addWidget(fr,cw//nrow,cw%nrow) # floating windows instead

                    container = VideoContainer(None, fr, n=0)
                    container.getWidget().setGeometry(
                        self.desktop_handler.getGeometry(
                            nrow, ncol, cw % nrow, cw // nrow))
                    container.getWidget().show()

                    self.videoframes.append(container)

                token = self.openglthread.connect(
                    slot=cs, window_id=win_id
                )  # present frames with slot number cs at window win_id
                tokens.append(token)
                cw += 1

            cs += 1  # TODO: crash when repeating the same slot number ..?

            chain.decodingOn()  # tell the decoding thread to start its job
            a += 1
class MyGui(QtWidgets.QMainWindow):

    debug = False

    # debug=True

    def __init__(self, pardic, valkkafs, parent=None):
        super(MyGui, self).__init__()
        self.pardic = pardic
        self.valkkafs = valkkafs
        self.initVars()
        self.setupUi()
        if (self.debug):
            return
        self.openValkka()
        self.start_streams()

    def initVars(self):
        pass

    def setupUi(self):
        self.setGeometry(QtCore.QRect(100, 100, 800, 800))
        self.w = QtWidgets.QWidget(self)
        self.setCentralWidget(self.w)
        self.lay = QtWidgets.QGridLayout(self.w)

        self.videoframes = []
        self.widget_pairs = []
        self.addresses = self.pardic["cams"]

        # self.rec_window = QtWidgets.QMainWindow(self)
        # self.rec_window = QtWidgets.QTabWidget(None)
        self.rec_window = MyTabWidget(None)
        self.rec_window.setGeometry(QtCore.QRect(50, 50, 800, 800))
        self.rec_window.show()

        self.rec_video_tab = QtWidgets.QWidget(None)
        self.rec_video_lay = QtWidgets.QVBoxLayout(self.rec_video_tab)

        self.rec_calendar_tab = QtWidgets.QWidget(None)
        self.rec_calendar_lay = QtWidgets.QVBoxLayout(self.rec_calendar_tab)

        self.rec_window.addTab(self.rec_video_tab, "Video")
        self.rec_window.addTab(self.rec_calendar_tab, "Calendar")

        self.rec_video_area = QtWidgets.QWidget(self.rec_video_tab)
        self.rec_video_area.setSizePolicy(QtWidgets.QSizePolicy.Expanding,
                                          QtWidgets.QSizePolicy.Expanding)

        self.rec_video_area_lay = QtWidgets.QGridLayout(self.rec_video_area)
        self.rec_video_lay.addWidget(self.rec_video_area)

        # timeline
        self.timelinewidget = TimeLineWidget(datetime.date.today(),
                                             parent=self.rec_video_area)
        # self.timelinewidget.setLogLevel(logging.DEBUG)
        self.rec_video_lay.addWidget(self.timelinewidget)

        # buttons
        self.buttons = QtWidgets.QWidget(self.rec_video_area)
        self.buttons_lay = QtWidgets.QHBoxLayout(self.buttons)
        self.play_button = QtWidgets.QPushButton("play", self.buttons)
        self.stop_button = QtWidgets.QPushButton("stop", self.buttons)
        self.zoom_to_fs_button = QtWidgets.QPushButton("limits", self.buttons)
        self.buttons_lay.addWidget(self.play_button)
        self.buttons_lay.addWidget(self.stop_button)
        self.buttons_lay.addWidget(self.zoom_to_fs_button)
        self.rec_video_lay.addWidget(self.buttons)

        # calendar
        self.calendarwidget = CalendarWidget(datetime.date.today(),
                                             parent=self.rec_calendar_tab)
        self.rec_calendar_lay.addWidget(self.calendarwidget)

    def openValkka(self):
        self.valkkafsmanager = ValkkaFSManager(
            self.valkkafs,
            # read = False,   # debugging
            # cache = False,  # debugging
            # write = False   # debugging
        )

        self.playback_controller = PlaybackController(
            calendar_widget=self.calendarwidget,
            timeline_widget=self.timelinewidget,
            valkkafs_manager=self.valkkafsmanager,
            play_button=self.play_button,
            stop_button=self.stop_button,
            zoom_to_fs_button=self.zoom_to_fs_button)

        self.livethread = LiveThread(  # starts live stream services (using live555)
            name="live_thread",
            # verbose=True,
            verbose=False,
            affinity=self.pardic["live affinity"])

        self.openglthread = OpenGLThread(  # starts frame presenting services
            name="mythread",
            # reserve stacks of YUV video frames for various resolutions
            n_720p=self.pardic["n_720p"],
            n_1080p=self.pardic["n_1080p"],
            n_1440p=self.pardic["n_1440p"],
            n_4K=self.pardic["n_4K"],
            # naudio  =self.pardic["naudio"], # obsolete
            verbose=True,
            # verbose=False,
            msbuftime=self.pardic["msbuftime"],
            affinity=self.pardic["gl affinity"])

        if (self.openglthread.hadVsync()):
            w = QtWidgets.QMessageBox.warning(
                self, "VBLANK WARNING",
                "Syncing to vertical refresh enabled\n THIS WILL DESTROY YOUR FRAMERATE\n Disable it with 'export vblank_mode=0' for nvidia proprietary drivers, use 'export __GL_SYNC_TO_VBLANK=0'"
            )

        tokens = []
        self.chains = []

        a = self.pardic["dec affinity start"]
        cw = 0  # widget / window index
        cs = 1  # slot / stream count

        for address in self.addresses:
            # now livethread and openglthread are running
            if (a > self.pardic["dec affinity stop"]):
                a = self.pardic["dec affinity start"]
            print(pre, "openValkka: setting decoder thread on processor", a)

            if use_live:
                chain_live = ValkkaFSLiveFilterchain(  # decoding and branching the stream happens here
                    valkkafsmanager=self.valkkafsmanager,
                    id_rec=cs,  # identifies the stream in ValkkaFS
                    livethread=self.livethread,
                    address=address,
                    slot=cs,
                    affinity=a,
                    # verbose     =True
                    verbose=False,
                    msreconnect=10000,
                    # Reordering buffer time for Live555 packets in MILLIseconds # 0 means default
                    reordering_mstime=0
                    # reordering_mstime =300
                )

            rec_slot = cs + 100  # live and rec slot numbers must be kept separated ..

            chain_rec = ValkkaFSFileFilterchain(  # decoding and branching the stream happens here
                valkkafsmanager=self.valkkafsmanager,
                id_rec=cs,  # identifies the stream in ValkkaFS
                slot=rec_slot,
                affinity=a,
                # verbose     =True
                verbose=False)

            # send yuv to OpenGLThread
            if use_live:
                chain_live.connect_to_yuv("yuv_to_opengl_" + str(cs),
                                          self.openglthread.getInput())
            chain_rec.connect_to_yuv("yuv_to_opengl_" + str(cs),
                                     self.openglthread.getInput())

            # important .. otherwise chain will go out of context and get
            # garbage collected ..
            if use_live: self.chains.append(chain_live)
            self.chains.append(chain_rec)

            if ("no_qt" in self.pardic):
                # create our own x-windowses
                win_id = self.openglthread.createWindow(show=True)
                win_id_rec = self.openglthread.createWindow(show=True)

            else:

                # *** Choose one of the following sections ***

                # (1) Let Valkka create the windows/widget # use this: we get a window with correct parametrization
                # win_id =self.openglthread.createWindow(show=False)
                # fr     =getForeignWidget(self.w, win_id)

                if (valkka_xwin == False):
                    # (2) Let Qt create the widget
                    fr = TestWidget0(self.w)
                    win_id = int(fr.winId())

                    fr_rec = TestWidget0(self.rec_video_area)
                    win_id_rec = int(fr_rec.winId())

                else:
                    # """
                    # (3) Again, let Valkka create the window, but put on top a translucent widget (that catches mouse gestures)
                    win_id = self.openglthread.createWindow(show=False)
                    widget_pair = WidgetPair(self.w, win_id, TestWidget0)
                    fr = widget_pair.getWidget()
                    self.widget_pairs.append(widget_pair)

                    win_id_rec = self.openglthread.createWindow(show=False)
                    widget_pair = WidgetPair(self.rec_video_area, win_id_rec,
                                             TestWidget0)
                    fr_rec = widget_pair.getWidget()
                    self.widget_pairs.append(widget_pair)
                    # """

                nrow = self.pardic["videos per row"]
                print(pre, "setupUi: layout index, address : ", cw // nrow,
                      cw % nrow, address)

                self.lay.addWidget(fr, cw // nrow, cw % nrow)
                self.rec_video_area_lay.addWidget(fr_rec, cw // nrow,
                                                  cw % nrow)

                self.videoframes.append(fr)
                self.videoframes.append(fr_rec)

            # present frames with slot number cs at window win_id

            # rec_slot = cs # debug

            print(pre, "setupUi: live:", cs, win_id)
            print(pre, "setupUi: rec :", rec_slot, win_id_rec)

            token = self.openglthread.connect(slot=cs, window_id=win_id)
            tokens.append(token)
            token = self.openglthread.connect(slot=rec_slot,
                                              window_id=win_id_rec)
            tokens.append(token)

            cw += 1
            cs += 1

            if use_live:
                chain_live.decodingOn(
                )  # tell the decoding thread to start its job
            chain_rec.decodingOn()
            a += 1

    def closeValkka(self):
        self.livethread.close()

        self.valkkafsmanager.close()

        for chain in self.chains:
            chain.close()

        self.chains = []
        self.widget_pairs = []
        self.videoframes = []

        self.openglthread.close()

        # time.sleep(5)

    def start_streams(self):
        pass

    def stop_streams(self):
        pass

    def closeEvent(self, e):
        print("\n", pre, "closeEvent!\n")
        self.stop_streams()
        self.closeValkka()
        self.rec_window.forceClose()
        # self.rec_window.close()
        e.accept()
Exemple #11
0
    def openValkka(self):

        # RGB Shared memory
        shmem_image_dimensions = (1920 // 4, 1080 // 4)
        shmem_image_interval = 1000
        shmem_rignbuffer_size = 10

        # Frag MP4 Shared memory
        shmem_buffers = 10
        shmem_name = "FragMP4Shmem"
        cellsize = 1024 * 1024 * 3
        timeout = 1000

        cs = 1
        cc = 1
        self.processes = []
        for address in self.addresses:
            shmem_name = "camera" + str(cs)
            # print("shmem name is {} for process number {} ".format(shmem_name, cc))
            process = QValkkaFireDetectorProcess(
                "process" + str(cs),
                shmem_name=shmem_name,
                n_buffer=shmem_rignbuffer_size,
                image_dimensions=shmem_image_dimensions)
            self.processes.append(process)
            cs += 1
        print(self.processes)

        # Give the multiprocesses to a gthread that's reading their message / thread will be listening to the processes !?

        self.thread = QValkkaThread(processes=self.processes)

        # start the multiprocesses
        self.startProcesses()

        # Now that we successfully forked our multiprocesses lets spawn threads

        self.livethread = LiveThread(name="live",
                                     verbose=False,
                                     affinity=self.pardic["live_affinity"])
        self.openglthread = OpenGLThread(
            name="mythread",
            # reserve stacks of YUV video frames for various resolutions
            n_720p=50,
            n_1080p=50,
            n_1440p=50,
            n_4K=50,
            verbose=False,
            msbuftime=100,
            affinity=-1)
        # if (self.openglthread.hadVsync()):
        #     q = QtWidgets.QMessageBox.warning(self,
        #                                       "VBLANK WARNING",
        #                                       "Syncing to vertical refresh enabled \n THIS WILL DESTROY YOUR FRAMERATE\n disable it using 'export vblank_mode=0'")

        tokens = []
        self.chains = []
        self.frames = []

        cs = 1
        cc = 0

        x = 0
        y = 0
        cam_count = 0
        a = self.pardic["dec affinity start"]
        for address in self.addresses:

            # Livethread/openglthread are running
            print('address :', address)
            if (a > self.pardic["dec affinity stop"]):
                a = self.pardic["dec affinity start"]

            chain = VisionAlarmFilterChain(
                # decoding and branching happens here
                livethread=self.livethread,
                openglthread=self.openglthread,
                address=address,
                slot=cs,
                affinity=a,
                shmem_name="camera" + str(cs),
                shmem_image_dimensions=shmem_image_dimensions,
                shmem_image_interval=shmem_image_interval,
                shmem_ringbuffer_size=shmem_rignbuffer_size,
                msreconnect=1000,
                frag_shmem_buffers=shmem_buffers,
                frag_shmem_name=shmem_name,
                frag_shmem_cellsize=cellsize,
                frag_shmem_timeout=timeout,
            )
            self.chains.append(chain)

            win_id = self.openglthread.createWindow(show=False)
            frame = self.QtFrame(self.w, win_id)

            # print('setting up layout')
            if y > 1:
                x = 1
                y = 0
            self.wlay.addWidget(frame.widget, x, y)
            y += 1
            token = self.openglthread.connect(slot=cs, window_id=win_id)
            tokens.append(token)

            # take corresponding multiprocess
            process = self.processes[cc]
            process.createClient(
            )  # creates the shared memory client at the multiprocess
            # connect signals to the nested widget

            process.signals.Fire_detected.connect(self.addAlert)

            chain.decodingOn()  # start the decoding thread
            cs += 1
            a += 1
            cc += 1
            # FragMP4 shmem client
            client = FragMP4ShmemClient(name=shmem_name,
                                        n_ringbuffer=shmem_buffers,
                                        n_size=cellsize,
                                        mstimeout=timeout,
                                        verbose=False)
Exemple #12
0
class MyGui(QtWidgets.QMainWindow):
    class QtFrame:
        def __init__(self, parent, win_id):
            self.widget = QtWidgets.QWidget(parent)
            self.lay = QtWidgets.QVBoxLayout(self.widget)

            self.widget_pair = WidgetPair(self.widget, win_id, TestWidget0)
            self.video = self.widget_pair.getWidget()
            self.lay.addWidget(self.video)

        def getWindowId(self):
            return int(self.widget.winId())

    debug = False

    def __init__(self, pardic, parent=None):
        super(MyGui, self).__init__()
        self.pardic = pardic
        self.initVars()
        self.setupUI()

        if self.debug: return
        self.openValkka()

    def initVars(self):
        pass

    def setupUI(self):
        self.setWindowTitle('Vision alarm system')
        self.resize(1200, 800)

        self.menuBar().addMenu('Add Camera')
        self.menuBar().addMenu('Remove camera')

        self.main = QtWidgets.QWidget(self)
        self.setCentralWidget(self.main)
        self.w = QtWidgets.QWidget(self)
        # self.setCentralWidget(self.w)

        self.mainlay = QtWidgets.QVBoxLayout(self.main)
        self.mainlay.setSpacing(0)
        self.mainlay.setContentsMargins(0, 0, 0, 0)

        self.wlay = QtWidgets.QGridLayout(self.w)
        self.alert = QtWidgets.QTextEdit()

        self.mainlay.addWidget(self.w, 75)
        self.mainlay.addWidget(self.alert, 25)

        self.frames = []  # i currently don't know what it is used for
        self.addresses = self.pardic["cams"]
        print(self.addresses)

    def openValkka(self):

        # RGB Shared memory
        shmem_image_dimensions = (1920 // 4, 1080 // 4)
        shmem_image_interval = 1000
        shmem_rignbuffer_size = 10

        # Frag MP4 Shared memory
        shmem_buffers = 10
        shmem_name = "FragMP4Shmem"
        cellsize = 1024 * 1024 * 3
        timeout = 1000

        cs = 1
        cc = 1
        self.processes = []
        for address in self.addresses:
            shmem_name = "camera" + str(cs)
            # print("shmem name is {} for process number {} ".format(shmem_name, cc))
            process = QValkkaFireDetectorProcess(
                "process" + str(cs),
                shmem_name=shmem_name,
                n_buffer=shmem_rignbuffer_size,
                image_dimensions=shmem_image_dimensions)
            self.processes.append(process)
            cs += 1
        print(self.processes)

        # Give the multiprocesses to a gthread that's reading their message / thread will be listening to the processes !?

        self.thread = QValkkaThread(processes=self.processes)

        # start the multiprocesses
        self.startProcesses()

        # Now that we successfully forked our multiprocesses lets spawn threads

        self.livethread = LiveThread(name="live",
                                     verbose=False,
                                     affinity=self.pardic["live_affinity"])
        self.openglthread = OpenGLThread(
            name="mythread",
            # reserve stacks of YUV video frames for various resolutions
            n_720p=50,
            n_1080p=50,
            n_1440p=50,
            n_4K=50,
            verbose=False,
            msbuftime=100,
            affinity=-1)
        # if (self.openglthread.hadVsync()):
        #     q = QtWidgets.QMessageBox.warning(self,
        #                                       "VBLANK WARNING",
        #                                       "Syncing to vertical refresh enabled \n THIS WILL DESTROY YOUR FRAMERATE\n disable it using 'export vblank_mode=0'")

        tokens = []
        self.chains = []
        self.frames = []

        cs = 1
        cc = 0

        x = 0
        y = 0
        cam_count = 0
        a = self.pardic["dec affinity start"]
        for address in self.addresses:

            # Livethread/openglthread are running
            print('address :', address)
            if (a > self.pardic["dec affinity stop"]):
                a = self.pardic["dec affinity start"]

            chain = VisionAlarmFilterChain(
                # decoding and branching happens here
                livethread=self.livethread,
                openglthread=self.openglthread,
                address=address,
                slot=cs,
                affinity=a,
                shmem_name="camera" + str(cs),
                shmem_image_dimensions=shmem_image_dimensions,
                shmem_image_interval=shmem_image_interval,
                shmem_ringbuffer_size=shmem_rignbuffer_size,
                msreconnect=1000,
                frag_shmem_buffers=shmem_buffers,
                frag_shmem_name=shmem_name,
                frag_shmem_cellsize=cellsize,
                frag_shmem_timeout=timeout,
            )
            self.chains.append(chain)

            win_id = self.openglthread.createWindow(show=False)
            frame = self.QtFrame(self.w, win_id)

            # print('setting up layout')
            if y > 1:
                x = 1
                y = 0
            self.wlay.addWidget(frame.widget, x, y)
            y += 1
            token = self.openglthread.connect(slot=cs, window_id=win_id)
            tokens.append(token)

            # take corresponding multiprocess
            process = self.processes[cc]
            process.createClient(
            )  # creates the shared memory client at the multiprocess
            # connect signals to the nested widget

            process.signals.Fire_detected.connect(self.addAlert)

            chain.decodingOn()  # start the decoding thread
            cs += 1
            a += 1
            cc += 1
            # FragMP4 shmem client
            client = FragMP4ShmemClient(name=shmem_name,
                                        n_ringbuffer=shmem_buffers,
                                        n_size=cellsize,
                                        mstimeout=timeout,
                                        verbose=False)

    def startProcesses(self):
        self.thread.start()
        for p in self.processes:
            p.start()

    def stopProcesses(self):
        for p in self.processes:
            p.stop()
        print("stopping QThread")
        self.thread.stop()
        print("QThread stopped")

    def closeValkka(self):
        self.livethread.close()
        for chain in self.chains:
            chain.close()

        self.chains = []
        self.widget_pairs = []
        self.videoframes = []
        self.openglthread.close()

    def closeEvent(self, e):
        print("closeEvent!")
        self.stopProcesses()
        self.closeValkka()
        super().closeEvent()

    # Slot
    def addAlert(self):
        print('inside addAlert ')
        self.alert.append('Fire Detected on camera number 1')
        pass
Exemple #13
0
class FileGUI(QtWidgets.QMainWindow):
    """Test your machine vision mvision_process and its widget with video files
    
    :param mvision_process:          QValkkaMultimvision_process-derived class
    :param shmem_image_interval:     How often the image is interpolated into rgb and passed to the mvision_process (milliseconds)
    """

    def __init__(self, 
                 mvision_process, 
                 shmem_image_interval = 1000, 
                 shmem_ringbuffer_size = 10, 
                 shmem_image_dimensions = (1920 // 2, 1080 // 2),
                 shmem_name="test"):
        
        super().__init__()
        assert(issubclass(mvision_process.__class__, QValkkaShmemProcess2))
        
        self.mvision_process        = mvision_process
        self.shmem_image_interval   = shmem_image_interval
        self.shmem_ringbuffer_size  = shmem_ringbuffer_size
        self.shmem_image_dimensions = shmem_image_dimensions
        self.shmem_name             = shmem_name
        
        self.initVars()
        self.setupUi()
        
        self.mvision_widget = self.mvision_process.getWidget()
        # self.mvision_widget = QtWidgets.QWidget()
        self.mvision_widget.setParent(self.widget)
        self.widget_lay.addWidget(self.mvision_widget)
        
        self.openValkka()

    def initVars(self):
        self.mode = "file"
        self.slot_reserved = False

    def setupUi(self):
        self.setGeometry(QtCore.QRect(100, 100, 800, 800))
        self.w = QtWidgets.QWidget(self)
        self.setCentralWidget(self.w)
        self.lay = QtWidgets.QVBoxLayout(self.w)

        # return

        # divide window into three parts
        self.upper = QtWidgets.QWidget(self.w)
        self.lower = QtWidgets.QWidget(self.w)
        self.lowest = QtWidgets.QWidget(self.w)
        self.lay.addWidget(self.upper)
        self.lay.addWidget(self.lower)
        self.lay.addWidget(self.lowest)

        # upper part: detectors widget and the video itself
        self.upperlay = QtWidgets.QHBoxLayout(self.upper)

        # self.widget  =QtWidgets.QTextEdit(self.upper)
        self.widget  =QtWidgets.QWidget(self.upper)
        self.widget_lay = QtWidgets.QVBoxLayout(self.widget)

        # self.widget = self.mvision_process.getWidget()
        # self.widget.setParent(self.upper)

        self.video_area = QtWidgets.QWidget(self.upper)
        self.video_lay = QtWidgets.QGridLayout(self.video_area)

        self.upperlay.addWidget(self.widget)
        self.upperlay.addWidget(self.video_area)
        self.widget.setSizePolicy(
            QtWidgets.QSizePolicy.Minimum,
            QtWidgets.QSizePolicy.Minimum)
        self.video_area.setSizePolicy(
            QtWidgets.QSizePolicy.Expanding,
            QtWidgets.QSizePolicy.Expanding)

        # lower part: [Open File] [Close Live] [Play] [Stop] [Rewind]
        self.lowerlay = QtWidgets.QHBoxLayout(self.lower)
        self.open_file_button = QtWidgets.QPushButton("Open File", self.lower)
        self.close_file_button = QtWidgets.QPushButton(
            "Close File", self.lower)
        self.play_button = QtWidgets.QPushButton("Play", self.lower)
        self.stop_button = QtWidgets.QPushButton("Stop", self.lower)
        self.rewind_button = QtWidgets.QPushButton("<<", self.lower)

        self.lowerlay.addWidget(self.open_file_button)
        self.lowerlay.addWidget(self.close_file_button)
        self.lowerlay.addWidget(self.play_button)
        self.lowerlay.addWidget(self.stop_button)
        self.lowerlay.addWidget(self.rewind_button)

        self.open_file_button.clicked. connect(self.open_file_button_slot)
        self.close_file_button.clicked.connect(self.close_file_button_slot)
        self.play_button.clicked.      connect(self.play_button_slot)
        self.stop_button.clicked.      connect(self.stop_button_slot)
        self.rewind_button.clicked.    connect(self.rewind_button_slot)

        # lowest part: some text
        self.lowestlay = QtWidgets.QVBoxLayout(self.lowest)
        self.infotext = QtWidgets.QLabel("info text", self.lowest)
        self.lowestlay.addWidget(self.infotext)


    def openValkka(self):
        self.thread = QValkkaThread() # the thread that's watching the mvision_processes
        self.thread.start()
        
        self.mvision_process.start()
        self.thread.addProcess(self.mvision_process)
        
        # """
        self.livethread = LiveThread(         # starts live stream services (using live555)
            name="live_thread",
            verbose=False
        )

        self.filethread = FileThread(
            name="file_thread",
            verbose=False
        )

        self.openglthread = OpenGLThread(     # starts frame presenting services
            name="mythread",
            n_720p=10,
            n_1080p=10,
            n_1440p=10,
            n_4K=10,
            verbose=False,
            msbuftime=100,
            affinity=-1
        )

        # this filterchain creates a shared memory server
        self.chain = ShmemFilterchain1(       # decoding and branching the stream happens here
            openglthread = self.openglthread,
            slot = 1,
            shmem_name = self.shmem_name,
            shmem_image_dimensions = self.shmem_image_dimensions,
            shmem_image_interval = self.shmem_image_interval,
            shmem_ringbuffer_size = self.shmem_ringbuffer_size
        )

        shmem_name, n_buffer, shmem_image_dimensions = self.chain.getShmemPars()
        self.video = QtWidgets.QWidget(self.video_area)
        self.win_id = int(self.video.winId())

        self.video_lay.addWidget(self.video, 0, 0)
        self.token = self.openglthread.connect(slot = 1, window_id = self.win_id)

        self.chain.decodingOn()  # tell the decoding thread to start its job

        self.mvision_process.activate(
            n_buffer                = self.shmem_ringbuffer_size, 
            image_dimensions        = self.shmem_image_dimensions, 
            shmem_name              = self.shmem_name  
        )
        
        
    def closeValkka(self):
        # """
        self.livethread.close()
        self.chain.close()
        self.chain = None
        self.openglthread.close()
        # """
        print(self.mvision_process)
        self.thread.stop()
        

    def closeEvent(self, e):
        print(pre, "closeEvent!")
        self.closeValkka()
        super().closeEvent(e)

    # *** slot ****

    def open_file_button_slot(self):
        if (self.slot_reserved):
            self.infotext.setText("Close the current file first")
            return
        fname = QtWidgets.QFileDialog.getOpenFileName(filter="*.mkv")[0]
        if (len(fname) > 0):
            print(pre, "open_file_button_slot: got filename", fname)
            self.chain.setFileContext(fname)
            self.filethread.openStream(self.chain.file_ctx)
            self.slot_reserved = True
            if (self.chain.fileStatusOk()):
                self.infotext.setText("Opened file " + fname)
            else:
                self.infotext.setText("Can't play file " + fname)
        else:
            self.infotext.setText("No file opened")

    def close_file_button_slot(self):
        if (not self.slot_reserved):
            self.infotext.setText("Open a file first")
            return
        self.filethread.closeStream(self.chain.file_ctx)
        self.slot_reserved = False
        self.infotext.setText("Closed file")

    def open_live_button_slot(self):
        pass

    def play_button_slot(self):
        if (self.mode == "file"):
            if (not self.slot_reserved):
                self.infotext.setText("Open a file first")
                return
            self.filethread.playStream(self.chain.file_ctx)
        else:
            pass

    def rewind_button_slot(self):
        if (self.mode == "file"):
            if (not self.slot_reserved):
                self.infotext.setText("Open a file first")
                return
            self.chain.file_ctx.seektime_ = 0
            self.filethread.seekStream(self.chain.file_ctx)
        else:
            pass

    def stop_button_slot(self):
        if (self.mode == "file"):
            if (not self.slot_reserved):
                self.infotext.setText("Open a file first")
                return
            self.filethread.stopStream(self.chain.file_ctx)
        else:
            pass

    def set_bounding_boxes_slot(self, bbox_list):
        self.openglthread.core.clearObjectsCall(self.token)
        for bbox in bbox_list:
            self.openglthread.core.addRectangleCall(self.token, bbox[0], bbox[1], bbox[2], bbox[3]) # left, right, top, bottom
class MyGui(QtWidgets.QMainWindow):

  debug=False
  # debug=True

  def __init__(self):
    super(MyGui, self).__init__()
    # self.pardic=pardic
    self.initVars()
    self.setupUi()
    if (self.debug): 
      return
    self.openValkka()
    self.startProcesses()
    
    
  def initVars(self):
    self.messages=[]
    self.mode="file"
    self.slot_reserved=False


  def setupUi(self):
    self.setGeometry(QtCore.QRect(100,100,800,800))
    self.w=QtWidgets.QWidget(self)
    self.setCentralWidget(self.w)
    self.lay=QtWidgets.QVBoxLayout(self.w)
    
    # divide window into three parts
    self.upper  =QtWidgets.QWidget(self.w)
    self.lower  =QtWidgets.QWidget(self.w)
    self.lowest =QtWidgets.QWidget(self.w)
    self.lay.addWidget(self.upper)
    self.lay.addWidget(self.lower)
    self.lay.addWidget(self.lowest)
    
    # upper part: license plate list and the video
    self.upperlay   =QtWidgets.QHBoxLayout(self.upper)
    self.msg_list  =QtWidgets.QTextEdit(self.upper)
    
    self.video_area =QtWidgets.QWidget(self.upper)
    self.video_lay  =QtWidgets.QGridLayout(self.video_area)
    
    self.upperlay.addWidget(self.msg_list)
    self.upperlay.addWidget(self.video_area)
    self.msg_list.setSizePolicy(QtWidgets.QSizePolicy.Minimum,  QtWidgets.QSizePolicy.Minimum)
    self.video_area.setSizePolicy(QtWidgets.QSizePolicy.Expanding,QtWidgets.QSizePolicy.Expanding)
    
    # lower part: [Open File] [Close Live] [Play] [Stop] [Rewind]
    self.lowerlay  =QtWidgets.QHBoxLayout(self.lower)
    self.open_file_button =QtWidgets.QPushButton("Open File", self.lower)
    self.close_file_button=QtWidgets.QPushButton("Close File",self.lower)
    self.play_button      =QtWidgets.QPushButton("Play",self.lower)
    self.stop_button      =QtWidgets.QPushButton("Stop",self.lower)
    self.rewind_button    =QtWidgets.QPushButton("<<",  self.lower)
    
    self.lowerlay.addWidget(self.open_file_button)
    self.lowerlay.addWidget(self.close_file_button)
    self.lowerlay.addWidget(self.play_button)
    self.lowerlay.addWidget(self.stop_button)
    self.lowerlay.addWidget(self.rewind_button)
    
    self.open_file_button.clicked. connect(self.open_file_button_slot)
    self.close_file_button.clicked.connect(self.close_file_button_slot)
    self.play_button.clicked.      connect(self.play_button_slot)
    self.stop_button.clicked.      connect(self.stop_button_slot)
    self.rewind_button.clicked.    connect(self.rewind_button_slot)
    
    # lowest part: some text
    self.lowestlay=QtWidgets.QVBoxLayout(self.lowest)
    self.infotext =QtWidgets.QLabel("info text",self.lowest)
    self.lowestlay.addWidget(self.infotext)
    
    
  def openValkka(self):
    self.livethread=LiveThread(         # starts live stream services (using live555)
      name   ="live_thread",
      verbose=False
    )

    self.filethread=FileThread(
      name  ="file_thread",
      verbose=False
    )

    self.openglthread=OpenGLThread(     # starts frame presenting services
      name    ="mythread",
      n_720p   =10,
      n_1080p  =10,
      n_1440p  =10,
      n_4K     =10,
      verbose =False,
      msbuftime=100,
      affinity=-1
      )
    
    if (self.openglthread.hadVsync()):
      w=QtWidgets.QMessageBox.warning(self,"VBLANK WARNING","Syncing to vertical refresh enabled\n THIS WILL DESTROY YOUR FRAMERATE\n Disable it with 'export vblank_mode=0' for nvidia proprietary drivers, use 'export __GL_SYNC_TO_VBLANK=0'")
    
    cc=1
    
    self.chain=ShmemFilterchain1(       # decoding and branching the stream happens here
      openglthread=self.openglthread,
      slot        =cc,
      # this filterchain creates a shared memory server
      shmem_name             ="test_studio_file_"+str(cc),
      shmem_image_dimensions =(1920//4,1080//4),  # Images passed over shmem are quarter of the full-hd reso
      shmem_image_interval   =1000,               # YUV => RGB interpolation to the small size is done each 1000 milliseconds and passed on to the shmem ringbuffer
      shmem_ringbuffer_size  =10                  # Size of the shmem ringbuffer
      )
    
    shmem_name, n_buffer, shmem_image_dimensions =self.chain.getShmemPars()    
    # print(pre,"shmem_name, n_buffer, n_bytes",shmem_name,n_buffer,n_bytes)
    
    self.process=QValkkaMovementDetectorProcess("process_"+str(cc),shmem_name=shmem_name, n_buffer=n_buffer, image_dimensions=shmem_image_dimensions)
    
    self.process.signals.start_move.connect(self.set_moving_slot)
    self.process.signals.stop_move. connect(self.set_still_slot)
    
    if (valkka_xwin):
      # (1) Let OpenGLThread create the window
      self.win_id      =self.openglthread.createWindow(show=False)
      self.widget_pair =WidgetPair(self.video_area,self.win_id,TestWidget0)
      self.video       =self.widget_pair.getWidget()
    else:
      # (2) Let Qt create the window
      self.video     =QtWidgets.QWidget(self.video_area)
      self.win_id    =int(self.video.winId())
    
    self.video_lay.addWidget(self.video,0,0)
    self.token =self.openglthread.connect(slot=cc,window_id=self.win_id)
    
    self.chain.decodingOn() # tell the decoding thread to start its job
    
    # finally, give the multiprocesses to a qthread that's reading their message pipe
    self.thread =QValkkaThread(processes=[self.process])
    
  
  def startProcesses(self):
    self.process.start()
    self.thread.start()
  
  
  def stopProcesses(self):
    print(pre,"stopProcesses :",self.process)
    self.process.stop()
    self.thread.stop()
    print(pre,"QThread stopped")
    
    
  def closeValkka(self):
    self.livethread.close()
    self.chain.close()
    self.chain =None
    self.openglthread.close()
    
    
  def closeEvent(self,e):
    print(pre,"closeEvent!")
    self.stopProcesses()
    self.closeValkka()
    super().closeEvent(e)
    
    
  # *** slot ****
  def open_file_button_slot(self):
    if (self.slot_reserved):
      self.infotext.setText("Close the current file first")
      return
    fname=QtWidgets.QFileDialog.getOpenFileName(filter="*.mkv")[0]
    if (len(fname)>0):
      print(pre,"open_file_button_slot: got filename",fname)
      self.chain.setFileContext(fname)
      self.filethread.openStream(self.chain.file_ctx)
      self.slot_reserved=True
      if (self.chain.fileStatusOk()):
        self.infotext.setText("Opened file "+fname)
      else:
        self.infotext.setText("Can't play file "+fname)
    else:
      self.infotext.setText("No file opened")
    
  
  def close_file_button_slot(self):
    if (not self.slot_reserved):
      self.infotext.setText("Open a file first")
      return
    self.filethread.closeStream(self.chain.file_ctx)
    self.slot_reserved=False
    self.infotext.setText("Closed file")
  
  
  def open_live_button_slot(self):
    pass
  
  
  def play_button_slot(self):
    if (self.mode=="file"):
      if (not self.slot_reserved):
        self.infotext.setText("Open a file first")
        return
      self.filethread.playStream(self.chain.file_ctx)
    else:
      pass
    
    
  def rewind_button_slot(self):
    if (self.mode=="file"):
      if (not self.slot_reserved):
        self.infotext.setText("Open a file first")
        return
      self.chain.file_ctx.seektime_=0;
      self.filethread.seekStream(self.chain.file_ctx)
    else:
      pass
    
    
  def stop_button_slot(self):
    if (self.mode=="file"):
      if (not self.slot_reserved):
        self.infotext.setText("Open a file first")
        return
      self.filethread.stopStream(self.chain.file_ctx)
    else:
      pass
    
  
  def set_still_slot(self):
    self.infotext.setText("still")
    self.messages.append("Movement stopped at ")
    if (len(self.messages)>10): self.messages.pop(0)
    st=""
    for message in self.messages:
      st+=message+"\n"
    self.msg_list.setText(st)
    
      
  def set_moving_slot(self):
    self.infotext.setText("MOVING")
    self.messages.append("Movement started at ")
    if (len(self.messages)>10): self.messages.pop(0)
    st=""
    for message in self.messages:
      st+=message+"\n"
    self.msg_list.setText(st)
    def openValkka(self):
        self.valkkafsmanager = ValkkaFSManager(
            self.valkkafs,
            # read = False,   # debugging
            # cache = False,  # debugging
            # write = False   # debugging
        )

        self.playback_controller = PlaybackController(
            calendar_widget=self.calendarwidget,
            timeline_widget=self.timelinewidget,
            valkkafs_manager=self.valkkafsmanager,
            play_button=self.play_button,
            stop_button=self.stop_button,
            zoom_to_fs_button=self.zoom_to_fs_button)

        self.livethread = LiveThread(  # starts live stream services (using live555)
            name="live_thread",
            # verbose=True,
            verbose=False,
            affinity=self.pardic["live affinity"])

        self.openglthread = OpenGLThread(  # starts frame presenting services
            name="mythread",
            # reserve stacks of YUV video frames for various resolutions
            n_720p=self.pardic["n_720p"],
            n_1080p=self.pardic["n_1080p"],
            n_1440p=self.pardic["n_1440p"],
            n_4K=self.pardic["n_4K"],
            # naudio  =self.pardic["naudio"], # obsolete
            verbose=True,
            # verbose=False,
            msbuftime=self.pardic["msbuftime"],
            affinity=self.pardic["gl affinity"])

        if (self.openglthread.hadVsync()):
            w = QtWidgets.QMessageBox.warning(
                self, "VBLANK WARNING",
                "Syncing to vertical refresh enabled\n THIS WILL DESTROY YOUR FRAMERATE\n Disable it with 'export vblank_mode=0' for nvidia proprietary drivers, use 'export __GL_SYNC_TO_VBLANK=0'"
            )

        tokens = []
        self.chains = []

        a = self.pardic["dec affinity start"]
        cw = 0  # widget / window index
        cs = 1  # slot / stream count

        for address in self.addresses:
            # now livethread and openglthread are running
            if (a > self.pardic["dec affinity stop"]):
                a = self.pardic["dec affinity start"]
            print(pre, "openValkka: setting decoder thread on processor", a)

            if use_live:
                chain_live = ValkkaFSLiveFilterchain(  # decoding and branching the stream happens here
                    valkkafsmanager=self.valkkafsmanager,
                    id_rec=cs,  # identifies the stream in ValkkaFS
                    livethread=self.livethread,
                    address=address,
                    slot=cs,
                    affinity=a,
                    # verbose     =True
                    verbose=False,
                    msreconnect=10000,
                    # Reordering buffer time for Live555 packets in MILLIseconds # 0 means default
                    reordering_mstime=0
                    # reordering_mstime =300
                )

            rec_slot = cs + 100  # live and rec slot numbers must be kept separated ..

            chain_rec = ValkkaFSFileFilterchain(  # decoding and branching the stream happens here
                valkkafsmanager=self.valkkafsmanager,
                id_rec=cs,  # identifies the stream in ValkkaFS
                slot=rec_slot,
                affinity=a,
                # verbose     =True
                verbose=False)

            # send yuv to OpenGLThread
            if use_live:
                chain_live.connect_to_yuv("yuv_to_opengl_" + str(cs),
                                          self.openglthread.getInput())
            chain_rec.connect_to_yuv("yuv_to_opengl_" + str(cs),
                                     self.openglthread.getInput())

            # important .. otherwise chain will go out of context and get
            # garbage collected ..
            if use_live: self.chains.append(chain_live)
            self.chains.append(chain_rec)

            if ("no_qt" in self.pardic):
                # create our own x-windowses
                win_id = self.openglthread.createWindow(show=True)
                win_id_rec = self.openglthread.createWindow(show=True)

            else:

                # *** Choose one of the following sections ***

                # (1) Let Valkka create the windows/widget # use this: we get a window with correct parametrization
                # win_id =self.openglthread.createWindow(show=False)
                # fr     =getForeignWidget(self.w, win_id)

                if (valkka_xwin == False):
                    # (2) Let Qt create the widget
                    fr = TestWidget0(self.w)
                    win_id = int(fr.winId())

                    fr_rec = TestWidget0(self.rec_video_area)
                    win_id_rec = int(fr_rec.winId())

                else:
                    # """
                    # (3) Again, let Valkka create the window, but put on top a translucent widget (that catches mouse gestures)
                    win_id = self.openglthread.createWindow(show=False)
                    widget_pair = WidgetPair(self.w, win_id, TestWidget0)
                    fr = widget_pair.getWidget()
                    self.widget_pairs.append(widget_pair)

                    win_id_rec = self.openglthread.createWindow(show=False)
                    widget_pair = WidgetPair(self.rec_video_area, win_id_rec,
                                             TestWidget0)
                    fr_rec = widget_pair.getWidget()
                    self.widget_pairs.append(widget_pair)
                    # """

                nrow = self.pardic["videos per row"]
                print(pre, "setupUi: layout index, address : ", cw // nrow,
                      cw % nrow, address)

                self.lay.addWidget(fr, cw // nrow, cw % nrow)
                self.rec_video_area_lay.addWidget(fr_rec, cw // nrow,
                                                  cw % nrow)

                self.videoframes.append(fr)
                self.videoframes.append(fr_rec)

            # present frames with slot number cs at window win_id

            # rec_slot = cs # debug

            print(pre, "setupUi: live:", cs, win_id)
            print(pre, "setupUi: rec :", rec_slot, win_id_rec)

            token = self.openglthread.connect(slot=cs, window_id=win_id)
            tokens.append(token)
            token = self.openglthread.connect(slot=rec_slot,
                                              window_id=win_id_rec)
            tokens.append(token)

            cw += 1
            cs += 1

            if use_live:
                chain_live.decodingOn(
                )  # tell the decoding thread to start its job
            chain_rec.decodingOn()
            a += 1
class MyGui(QtWidgets.QMainWindow):

    debug = False

    # debug=True

    def __init__(self, pardic, parent=None):
        super(MyGui, self).__init__()
        self.pardic = pardic
        self.initVars()
        self.setupUi()
        if (self.debug):
            return
        self.openValkka()
        self.start_streams()

    def initVars(self):
        pass

    def setupUi(self):
        self.setGeometry(QtCore.QRect(100, 100, 800, 800))
        self.w = QtWidgets.QWidget(self)
        self.setCentralWidget(self.w)
        self.lay = QtWidgets.QGridLayout(self.w)

        self.videoframes = []
        self.widget_pairs = []
        self.addresses = self.pardic["cams"]

    def openValkka(self):
        # setValkkaLogLevel(loglevel_debug)
        core.setLiveOutPacketBuffermaxSize(95000)  # whoa
        # check this out:
        # http://lists.live555.com/pipermail/live-devel/2013-April/016803.html

        self.livethread = LiveThread(  # starts live stream services (using live555)
            name="live_thread",
            # verbose=True,
            verbose=False,
            affinity=self.pardic["live affinity"])

        self.livethread2 = LiveThread(  # second live thread for sending multicast streams
            name="live_thread2",
            # verbose=True,
            verbose=False,
            affinity=self.pardic["live2 affinity"])

        self.openglthread = OpenGLThread(  # starts frame presenting services
            name="mythread",
            n_720p=self.pardic[
                "n_720p"],  # reserve stacks of YUV video frames for various resolutions
            n_1080p=self.pardic["n_1080p"],
            n_1440p=self.pardic["n_1440p"],
            n_4K=self.pardic["n_4K"],
            # naudio  =self.pardic["naudio"], # obsolete
            # verbose =True,
            verbose=False,
            msbuftime=self.pardic["msbuftime"],
            affinity=self.pardic["gl affinity"])

        if (self.openglthread.hadVsync()):
            w = QtWidgets.QMessageBox.warning(
                self, "VBLANK WARNING",
                "Syncing to vertical refresh enabled\n THIS WILL DESTROY YOUR FRAMERATE\n Disable it with 'export vblank_mode=0' for nvidia proprietary drivers, use 'export __GL_SYNC_TO_VBLANK=0'"
            )

        tokens = []
        self.chains = []

        a = self.pardic["dec affinity start"]
        mport = self.pardic["mcast_start_port"]

        cw = 0  # widget / window index
        cs = 1  # slot / stream count

        for address in self.addresses:
            # now livethread and openglthread are running
            if (a > self.pardic["dec affinity stop"]):
                a = self.pardic["dec affinity start"]
            print(pre, "openValkka: setting decoder thread on processor", a)

            chain = MulticastFilterchain(  # decoding and branching the stream happens here
                incoming_livethread=self.livethread,
                outgoing_livethread=self.livethread2,
                openglthread=self.openglthread,
                address=address,
                multicast_address=mcast_address,
                multicast_port=mport,
                slot=cs,
                affinity=a,
                # verbose     =True
                verbose=False,
                msreconnect=10000)

            self.chains.append(
                chain
            )  # important .. otherwise chain will go out of context and get garbage collected ..

            # replicate=self.pardic["replicate"]
            replicate = 1

            for cc in range(0, replicate):
                if ("no_qt" in self.pardic):
                    # create our own x-windowses
                    win_id = self.openglthread.createWindow(show=True)
                else:

                    # *** Choose one of the following sections ***

                    # (1) Let Valkka create the windows/widget # use this: we get a window with correct parametrization
                    # win_id =self.openglthread.createWindow(show=False)
                    # fr     =getForeignWidget(self.w, win_id)

                    if (valkka_xwin == False):
                        # (2) Let Qt create the widget
                        fr = TestWidget0(self.w)
                        win_id = int(fr.winId())
                    else:
                        # """
                        # (3) Again, let Valkka create the window, but put on top a translucent widget (that catches mouse gestures)
                        win_id = self.openglthread.createWindow(show=False)
                        widget_pair = WidgetPair(self.w, win_id, TestWidget0)
                        fr = widget_pair.getWidget()
                        self.widget_pairs.append(widget_pair)
                        # """

                    nrow = self.pardic["videos per row"]
                    print(pre, "setupUi: layout index, address : ", cw // nrow,
                          cw % nrow, address)
                    self.lay.addWidget(fr, cw // nrow, cw % nrow)

                    # print(pre,"setupUi: layout index, address : ",cw//4,cw%4,address)
                    # self.lay.addWidget(fr,cw//4,cw%4)

                    self.videoframes.append(fr)

                token = self.openglthread.connect(
                    slot=cs, window_id=win_id
                )  # present frames with slot number cs at window win_id
                tokens.append(token)
                cw += 1

            cs += 1  # TODO: crash when repeating the same slot number ..?

            chain.decodingOn()  # tell the decoding thread to start its job
            a += 1
            mport += 4

    def closeValkka(self):
        self.livethread.close()

        for chain in self.chains:
            chain.close()

        self.widget_pairs = []
        self.videoframes = []
        self.openglthread.close()

        self.livethread2.close()

    def start_streams(self):
        pass

    def stop_streams(self):
        pass

    def closeEvent(self, e):
        print(pre, "closeEvent!")
        self.stop_streams()
        self.closeValkka()
        e.accept()
Exemple #17
0
"""
TODO: FileCacheThread should send initialization frame
- FileCacheThread .. per slot, two substream SetupFrame(s) .. or what?
- Video jerks a bit .. is this because the play edge is too close to the block edge and it runs empty before new frames arrive?
"""



setValkkaLogLevel(loglevel_debug)

def cb(mstime):
    print("mstime callback", mstime)

# create OpenGLThread (for drawing video) and AVThread (for decoding)
glthread = OpenGLThread(name="gl_thread")
ctx = core.FrameFifoContext()
avthread = core.AVThread(
    "avthread",
    glthread.getInput(),
    ctx)
av_in_filter = avthread.getFrameFilter()

avthread.startCall()
avthread.decodingOnCall()

# create an X-window
window_id = glthread.createWindow()

# map frames with slot 1 to that window
glthread.newRenderGroup(window_id)
class MyGui(QtWidgets.QMainWindow):

  debug=False
  # debug=True

  def __init__(self,pardic,parent=None):
    super(MyGui, self).__init__()
    self.pardic=pardic
    self.initVars()
    self.setupUi()
    if (self.debug): 
      return
    self.openValkka()
    self.start_streams()
    
    
  def initVars(self):
    pass


  def setupUi(self):
    self.setGeometry(QtCore.QRect(100,100,800,800))
    self.w=QtWidgets.QWidget(self)
    self.setCentralWidget(self.w)
    self.lay=QtWidgets.QGridLayout(self.w)
    
    self.videoframes =[]
    self.addresses=self.pardic["cams"]
    
  
  def openValkka(self):
    self.livethread=LiveThread(         # starts live stream services (using live555)
      name   ="live_thread",
      # verbose=True,
      verbose=False,
      affinity=self.pardic["live affinity"]
    )
    
    # create widgets before starting OpenGLThread and reserving frames
    cw=0 # widget / window index
    self.win_ids=[]
    for address in self.addresses:
      for cc in range(0,self.pardic["replicate"]):
        fr =TestWidget0(self.w)
        win_id =int(fr.winId())
        nrow=self.pardic["videos per row"]
        print(pre,"setupUi: layout index, address : ",cw//nrow,cw%nrow,address)
        self.lay.addWidget(fr,cw//nrow,cw%nrow)
        self.videoframes.append(fr)
        self.win_ids.append(win_id)
        cw+=1
         
    win_iter =iter(self.win_ids)
            
    self.openglthread=OpenGLThread(     # starts frame presenting services
      name    ="mythread",
      n_720p   =self.pardic["n_720p"],   # reserve stacks of YUV video frames for various resolutions
      n_1080p  =self.pardic["n_1080p"],
      n_1440p  =self.pardic["n_1440p"],
      n_4K     =self.pardic["n_4K"],
      # naudio  =self.pardic["naudio"], # obsolete
      # verbose =True,
      verbose =False,
      msbuftime=self.pardic["msbuftime"],
      affinity=self.pardic["gl affinity"]
      )

    
    if (self.openglthread.hadVsync()):
      w=QtWidgets.QMessageBox.warning(self,"VBLANK WARNING","Syncing to vertical refresh enabled\n THIS WILL DESTROY YOUR FRAMERATE\n Disable it with 'export vblank_mode=0' for nvidia proprietary drivers, use 'export __GL_SYNC_TO_VBLANK=0'")

    tokens     =[]
    self.chains=[]
    
    a =self.pardic["dec affinity start"]
    cw=0 # widget / window index
    cs=1 # slot / stream count
    
    for address in self.addresses:
      # now livethread and openglthread are running
      if (a>self.pardic["dec affinity stop"]): a=self.pardic["dec affinity start"]
      print(pre,"openValkka: setting decoder thread on processor",a)

      chain=BasicFilterchain(       # decoding and branching the stream happens here
        livethread  =self.livethread, 
        openglthread=self.openglthread,
        address     =address,
        slot        =cs,
        affinity    =a,
        # verbose     =True
        verbose     =False,
        msreconnect =10000,
        
        # flush_when_full =True
        flush_when_full =False,
        
        # time_correction   =TimeCorrectionType_dummy,  # Timestamp correction type: TimeCorrectionType_none, TimeCorrectionType_dummy, or TimeCorrectionType_smart (default)
        # time_correction   =TimeCorrectionType_smart,
        # # by default, no need to specify
        
        recv_buffer_size  =0,                        # Operating system socket ringbuffer size in bytes # 0 means default
        # recv_buffer_size  =1024*800,   # 800 KB
        
        reordering_mstime =0                           # Reordering buffer time for Live555 packets in MILLIseconds # 0 means default
        # reordering_mstime =300                         
        )
  
      self.chains.append(chain) # important .. otherwise chain will go out of context and get garbage collected ..
      
      for cc in range(0,self.pardic["replicate"]):
        token  =self.openglthread.connect(slot=cs,window_id=next(win_iter)) # present frames with slot number cs at window win_id
        tokens.append(token)
        cw+=1
      
      cs+=1
      chain.decodingOn() # tell the decoding thread to start its job
      a+=1
      
  
  def closeValkka(self):
    self.livethread.close()
    
    for chain in self.chains:
      chain.close()
    
    self.chains       =[]
    self.widget_pairs =[]
    self.videoframes  =[]
    self.openglthread.close()
    
    
  def start_streams(self):
    pass
    
    
  def stop_streams(self):
    pass

    
  def closeEvent(self,e):
    print(pre,"closeEvent!")
    self.stop_streams()
    self.closeValkka()
    e.accept()
Exemple #19
0
from valkka.api2 import BasicFilterchain
"""<rtf>
Instantiating the API level 2 LiveThread starts running the underlying cpp thread:
<rtf>"""
livethread = LiveThread(  # starts live stream services (using live555)
    name="live_thread",
    verbose=False,
    affinity=-1)
"""<rtf>
Same goes for OpenGLThread:
<rtf>"""
openglthread = OpenGLThread(
    name="glthread",
    n_720p=20,  # reserve stacks of YUV video frames for various resolutions
    n_1080p=20,
    n_1440p=0,
    n_4K=0,
    verbose=False,
    msbuftime=100,
    affinity=-1)
"""<rtf>
The filterchain and decoder (AVThread) are encapsulated into a single class.  Instantiating starts the AVThread (decoding is off by default):
<rtf>"""
chain = BasicFilterchain(  # decoding and branching the stream happens here
    livethread=livethread,
    openglthread=openglthread,
    address="rtsp://*****:*****@192.168.1.41",
    slot=1,
    affinity=-1,
    verbose=False,
    msreconnect=10000  # if no frames in ten seconds, try to reconnect