def openValkka(self): self.livethread=LiveThread( # starts live stream services (using live555) name ="live_thread", # verbose=True, verbose=False, affinity=self.pardic["live affinity"] ) self.gpu_handler=GPUHandler(self.pardic) a =self.pardic["dec affinity start"] cs=1 # slot / stream count for address in self.addresses: # now livethread and openglthread are running if (a>self.pardic["dec affinity stop"]): a=self.pardic["dec affinity start"] print(pre,"openValkka: setting decoder thread on processor",a) chain=ManagedFilterchain( # decoding and branching the stream happens here livethread =self.livethread, openglthreads =self.gpu_handler.openglthreads, address =address, slot =cs, affinity =a, msreconnect =10000, verbose=True ) self.chains.append(chain) # important .. otherwise chain will go out of context and get garbage collected .. cs+=1 a+=1
def openValkka(self): self.cpu_scheme = CPUScheme() # self.dm.camera_collection try: memory_config = next( self.dm.config_collection.get( {"classname": DataModel.MemoryConfigRow.__name__})) except StopIteration: print(pre, "Using default mem config") memory_config = default.memory_config n_frames = round( memory_config["msbuftime"] * default.fps / 1000.) # accumulated frames per buffering time = n_frames if (memory_config["bind"]): self.cpu_scheme = CPUScheme() else: self.cpu_scheme = CPUScheme(n_cores=-1) self.gpu_handler = GPUHandler( n_720p=memory_config["n_720p"] * n_frames, # n_cameras * n_frames n_1080p=memory_config["n_1080p"] * n_frames, n_1440p=memory_config["n_1440p"] * n_frames, n_4K=memory_config["n_4K"] * n_frames, msbuftime=memory_config["msbuftime"], verbose=False, cpu_scheme=self.cpu_scheme) self.livethread = LiveThread(name="live_thread", verbose=False, affinity=self.cpu_scheme.getLive()) self.usbthread = USBDeviceThread(name="usb_thread", verbose=False, affinity=self.cpu_scheme.getUSB()) self.filterchain_group = FilterChainGroup(datamodel=self.dm, livethread=self.livethread, usbthread=self.usbthread, gpu_handler=self.gpu_handler, cpu_scheme=self.cpu_scheme) self.filterchain_group.read() # self.filterchain_group.update() # TODO: use this once fixed try: from valkka.mvision import multiprocess except ImportError: pass else: if self.mvision: self.thread = multiprocess.QValkkaThread() self.thread.start()
def openValkka(self): self.thread = QValkkaThread() # the thread that's watching the mvision_processes self.thread.start() self.mvision_process.start() self.thread.addProcess(self.mvision_process) # """ self.livethread = LiveThread( # starts live stream services (using live555) name="live_thread", verbose=False ) self.filethread = FileThread( name="file_thread", verbose=False ) self.openglthread = OpenGLThread( # starts frame presenting services name="mythread", n_720p=10, n_1080p=10, n_1440p=10, n_4K=10, verbose=False, msbuftime=100, affinity=-1 ) # this filterchain creates a shared memory server self.chain = ShmemFilterchain1( # decoding and branching the stream happens here openglthread = self.openglthread, slot = 1, shmem_name = self.shmem_name, shmem_image_dimensions = self.shmem_image_dimensions, shmem_image_interval = self.shmem_image_interval, shmem_ringbuffer_size = self.shmem_ringbuffer_size ) shmem_name, n_buffer, shmem_image_dimensions = self.chain.getShmemPars() self.video = QtWidgets.QWidget(self.video_area) self.win_id = int(self.video.winId()) self.video_lay.addWidget(self.video, 0, 0) self.token = self.openglthread.connect(slot = 1, window_id = self.win_id) self.chain.decodingOn() # tell the decoding thread to start its job self.mvision_process.activate( n_buffer = self.shmem_ringbuffer_size, image_dimensions = self.shmem_image_dimensions, shmem_name = self.shmem_name )
def openValkka(self): self.mvision_process.go() if self.mvision_master_process is not None: assert (issubclass(self.mvision_master_process.__class__, QShmemProcess)) self.mvision_master_process.go() self.livethread = LiveThread( # starts live stream services (using live555) name="live_thread", verbose=False) self.filethread = FileThread(name="file_thread", verbose=False) self.openglthread = OpenGLThread( # starts frame presenting services name="mythread", n_720p=10, n_1080p=10, n_1440p=10, n_4K=10, verbose=False, msbuftime=100, affinity=-1) # this filterchain creates a shared memory server self.chain = ShmemFilterchain1( # decoding and branching the stream happens here openglthread=self.openglthread, slot=1, shmem_name=self.shmem_name, shmem_image_dimensions=self.shmem_image_dimensions, shmem_image_interval=self.shmem_image_interval, shmem_ringbuffer_size=self.shmem_ringbuffer_size) shmem_name, n_buffer, shmem_image_dimensions = self.chain.getShmemPars( ) self.video = QtWidgets.QWidget(self.video_area) if hasattr(self.mvision_process, "analyzer_video_widget_class"): # the machine vision class may declare what video widget it wants to use to define the machine vision parameters (line crossing, zone intrusion, etc.) self.analyzer_widget = AnalyzerWidget( parent=self.video_area, analyzer_video_widget_class=self.mvision_process. analyzer_video_widget_class) else: self.analyzer_widget = AnalyzerWidget(parent=self.video_area) self.mvision_process.connectAnalyzerWidget(self.analyzer_widget) self.analyzer_widget.activate() self.win_id = int(self.video.winId()) self.video_lay.addWidget(self.video, 0, 0) self.video_lay.addWidget(self.analyzer_widget, 0, 1) self.token = self.openglthread.connect(slot=1, window_id=self.win_id) self.chain.decodingOn() # tell the decoding thread to start its job self.mvision_process.activate( n_buffer=self.shmem_ringbuffer_size, image_dimensions=self.shmem_image_dimensions, shmem_name=self.shmem_name) if self.mvision_master_process: self.mvision_process.setMasterProcess(self.mvision_master_process)
class MyGui(QtWidgets.QMainWindow): #config_dir = setValkkaLocalDir("live", varname = "config_dir") #valkkafs_dir = setValkkaLocalDir("live","fs", varname = "valkkafs_dir") def __init__(self, parent=None): """ctor """ super(MyGui, self).__init__() self.initDirs() self.initVars() self.initConfigFiles() self.readDB() self.generateMethods() self.setupUi() self.startProcesses() self.openValkka() self.makeLogic() self.post() def getMargins(self): # https://doc.qt.io/qt-5/application-windows.html#x11-peculiarities if singleton.dx > 0: return singleton.dy = self.geometry().y() - self.y() # y() : with frame, geometry().y() : without frame singleton.dx = self.geometry().x() - self.x() singleton.dw = self.frameGeometry().width() - self.width() singleton.dh = self.frameGeometry().height() - self.height() print("getMargins: dy, dx, dw, dh", singleton.dy, singleton.dx, singleton.dw, singleton.dh) # dy, dx, dw, dh 29 4 8 33 # WARNING! Must move main window before this starts to give any values other than zero ..! # *** redefined Qt member functions *** def closeEvent(self, e): """Triggered when the main qt program exits """ print("gui : closeEvent!") self.closeContainers() # self.manage_cameras_win.unSetPropagate() # don't send signals .. if you don't do this: close => closeEvent => will trigger self.reOpen # self.manage_cameras_win.close() self.camera_list_win.unSetPropagate() self.camera_list_win.close() self.config_win.unSetPropagate() self.config_win.close() self.closeValkka() singleton.data_model.close() self.closeProcesses() e.accept() def initDirs(self): self.config_dir = singleton.config_dir self.valkkafs_dir = singleton.valkkafs_dir def initVars(self): """Define files & variables """ self.version_file = self.config_dir.getFile("version") self.layout_file = self.config_dir.getFile("layout") # singleton.thread = None # a QThread that reads multiprocessing pipes self.containers_grid = [] # list of instances of valkka.live.container.grid.VideoContainerNxM self.containers_playback = [] self.mvision_classes, self.mvision_client_classes, self.mvision_master_classes =\ tools.scanMVisionClasses( singleton.mvision_package_names ) if (len(self.mvision_classes) > 0 or len(self.mvision_client_classes) > 0): self.mvision = True else: self.mvision = False self.valkkafs = None self.config_modified = False self.valkkafs_modified = False def initConfigFiles(self): self.first_start = True ver = self.readVersionNumber() if ver is not None: # this indicates that the program has been started earlier print("valkka.live : loading config file for version number", ver) if ver: if (ver[0] == version.VERSION_MAJOR and ver[1] == version.VERSION_MINOR): self.first_start = False else: # incorrect version number print("valkka.live : clearing config") pass # .. or handle migration somehow if self.first_start: # first time program start # TODO: eula could be shown here print(pre, "initConfigFiles : first start") self.config_dir.reMake() self.saveVersionNumber() # self.saveConfigFile() # self.saveWindowLayout() # clears window layout self.first_start = True def readDB(self): """Datamodel includes the following files: config.dat, devices.dat """ singleton.data_model = DataModel(directory = self.config_dir.get()) # singleton.data_model = DataModel(directory = tools.getConfigDir()) if (self.first_start): print(pre, "readDB : first start") singleton.data_model.clearAll() singleton.data_model.saveAll() # If camera collection is corrupt if not singleton.data_model.checkCameraCollection(): singleton.data_model.clearCameraCollection() def saveVersionNumber(self): with open(self.version_file, "w") as f: f.write(version.get()) def readVersionNumber(self): try: with open(self.version_file, "r") as f: st = f.read() vs = [] for s in st.split("."): vs.append(int(s)) except: print("valkka.live : could not read version number") return None else: return vs def saveWindowLayout(self): self.serializeContainers() def loadWindowLayout(self): self.closeContainers() self.deSerializeContainers() # *** Generate Qt structures *** def generateMethods(self): """Autogenerate some member functions - Generates slot functions for launching containers """ for i in range(1, 5): # adds member function grid_ixi_slot(self) self.makeGridSlot(i, i) self.makePlaybackGridSlot(i, i) for cl in self.mvision_classes: self.makeMvisionSlot(cl) for cl in self.mvision_client_classes: self.makeMvisionClientSlot(cl) def setupUi(self): self.setStyleSheet(style.main_gui) self.setWindowTitle(singleton.program_name) self.setGeometry(QtCore.QRect(100, 100, 500, 500)) self.w = QtWidgets.QWidget(self) self.setCentralWidget(self.w) self.filemenu = FileMenu(parent=self) self.viewmenu = ViewMenu(parent=self) # grids up to 4x4 self.configmenu = ConfigMenu(parent=self) if self.mvision: mvision_elements = [] for cl in self.mvision_classes + self.mvision_client_classes: if cl.auto_menu: el = QuickMenuElement(title = cl.name, method_name = cl.name) mvision_elements.append(el) class MVisionMenu(QuickMenu): title = "Machine Vision" elements = mvision_elements self.mvisionmenu = MVisionMenu(parent = self) self.aboutmenu = AboutMenu(parent=self) # create container and their windows self.manage_cameras_container = singleton.data_model.getDeviceListAndForm(None) self.manage_memory_container = singleton.data_model.getConfigForm() self.manage_valkkafs_container = singleton.data_model.getValkkaFSForm() self.manage_memory_container.signals.save.connect(self.config_modified_slot) self.manage_cameras_container.getForm().signals.save_record.connect(self.config_modified_slot) self.manage_valkkafs_container.signals.save.connect(self.valkkafs_modified_slot) self.config_win = QTabCapsulate( "Configuration", [ (self.manage_cameras_container. widget, "Camera Configuration"), (self.manage_memory_container. widget, "Memory Configuration"), (self.manage_valkkafs_container.widget, "Recording Configuration") ] ) self.config_win.signals.close.connect(self.config_dialog_close_slot) # when the configuration dialog is reopened, inform the camera configuration form .. this way it can re-check if usb cams are available self.config_win.signals.show.connect(self.manage_cameras_container.getForm().show_slot) self.config_win.signals.show.connect(self.manage_cameras_container.choose_first_slot) # so that we have at least one device chosen self.makeCameraTree() self.camera_list_win = QCapsulate(self.treelist, "Camera List") self.wait_label = QtWidgets.QLabel("Restarting Valkka, please wait ..") self.wait_window = QCapsulate(self.wait_label, "Wait", nude = True) def makeCameraTree(self): self.root = HeaderListItem() self.treelist = BasicView(parent = None, root = self.root) self.updateCameraTree() def updateCameraTree(self): self.treelist.reset_() self.server = ServerListItem( name = "Localhost", ip = "127.0.0.1", parent = self.root) """ self.server1 = ServerListItem( name="First Server", ip="192.168.1.20", parent=self.root) """ """ self.camera1 = RTSPCameraListItem(camera=RTSPCameraDevice( ip="192.168.1.4", username="******", password="******"), parent=self.server1) self.camera2 = RTSPCameraListItem(camera=RTSPCameraDevice( ip="192.168.1.4", username="******", password="******"), parent=self.server1) """ devices = [] for row in singleton.data_model.camera_collection.get(): # print(pre, "makeCameraTree : row", row) if (row["classname"] == RTSPCameraRow.__name__): row.pop("classname") devices.append( RTSPCameraListItem( camera = RTSPCameraDevice(**row), parent = self.server ) ) elif (row["classname"] == USBCameraRow.__name__): row.pop("classname") devices.append( USBCameraListItem( camera = USBCameraDevice(**row), parent = self.server ) ) self.treelist.update() self.treelist.expandAll() def makeLogic(self): # *** When camera list has been closed, re-create the cameralist tree and update filterchains *** # self.manage_cameras_win.signals.close.connect(self.updateCameraTree) # now put into save_camera_config_slot # self.manage_cameras_win.signals.close.connect(self.filterchain_group.update) # TODO: use this once fixed # self.manage_cameras_win.signals.close.connect(self.filterchain_group.read) # TODO: eh.. lets be sure of this .. (are we releasing slots in the LiveThread etc.) # self.manage_cameras_win.signals.close.connect(self.save_camera_config_slot) # self.manage_memory_container.signals.save.connect(self.save_memory_conf_slot) # *** Menu bar connections *** # the self.filemenu.exit attribute was autogenerated self.filemenu.exit. triggered.connect(self.exit_slot) self.filemenu.save_window_layout. triggered.connect(self.save_window_layout_slot) self.filemenu.load_window_layout. triggered.connect(self.load_window_layout_slot) """ self.configmenu.manage_cameras. triggered.connect( self.manage_cameras_slot) self.configmenu.memory_usage. triggered.connect( self.memory_usage_slot) """ self.configmenu.configuration_dialog.triggered.connect(self.config_dialog_slot) self.viewmenu.camera_list. triggered.connect(self.camera_list_slot) self.aboutmenu.about_program. triggered.connect(self.about_slot) # *** Connect autogenerated menu calls into autogenerated slot functions *** for i in range(1, 5): # gets member function grid_ixi_slot slot_func = getattr(self, "grid_%ix%i_slot" % (i, i)) # gets member function grid_ixi from self.viewmenu.video_grid menu_func = getattr(self.viewmenu.video_grid, "grid_%ix%i" % (i, i)) menu_func.triggered.connect(slot_func) # i.e., like this : self.viewmenu.video_grid.grid_1x1.triggered.connect(slot_func) for i in range(1, 5): # gets member function grid_ixi_slot slot_func = getattr(self, "playback_grid_%ix%i_slot" % (i, i)) # gets member function grid_ixi from self.viewmenu.video_grid menu_func = getattr(self.viewmenu.playback_video_grid, "grid_%ix%i" % (i, i)) menu_func.triggered.connect(slot_func) # i.e., like this : self.viewmenu.video_grid.grid_1x1.triggered.connect(slot_func) # *** autogenerated machine vision menu and slots *** for cl in self.mvision_classes + self.mvision_client_classes: slot_func_name = cl.name+"_slot" if hasattr(self, slot_func_name): getattr(self.mvisionmenu,cl.name).triggered.connect(getattr(self,slot_func_name)) def post(self): pass # *** Container handling *** def serializeContainers(self): """Serializes the current view of open video grids (i.e. the view) returns a dictionary where the keys are complete classnames each value corresponds to a list of containers of the class described by the key each serialized container looks like this: :: dic={ "kwargs" : {}, # parameters that we're used to instantiate this class } A concrete example: :: {'valkka.live.container.grid.VideoContainerNxM': [ { # individual serialized container 'child_class': <class 'valkka.live.container.video.VideoContainer'>, 'child_pars': [{'device_id': -1}], 'geom': (604, 0, 300, 300), 'm_dim': 1, 'n_dim': 1, 'n_xscreen': 0, 'title': 'Video Grid' }, ... ] } - TODO: this stuff should be moved to the db .. ? Or just keep using files..? - Different row types: VideoContainerNxM : columns: child_class, child_pars, geom, etc.., LAYOUT_ID PlayVideoContainerNxM : .., LAYOUT_ID CameraListWindow : .., LAYOUT_ID - LAYOUT_ID identifies to which layout they belong """ """ container_list = [] # list of instances of classes in valkka.live.container, e.g. valkka.live.container.grid.VideoContainerNxM, etc. for container in self.containers_grid: # these are of the type valkka.live.container.grid.VideoContainerNxM print("gui: serialize containers : container=", pformat(container)) container_list.append(container.serialize()) # TODO: serialize self.containers_playback # classnames compatible with local namespace return { "valkka.live.container.grid.VideoContainerNxM" : container_list } """ singleton.data_model.layout_collection.clear() container_list = [] for container in self.containers_grid: ser = container.serialize() # print(ser) # {'title': 'Video Grid', 'n_xscreen': 0, 'child_class': <class 'valkka.live.container.video.VideoContainer'>, # 'child_pars': [{'device_id': -1}, {'device_id': -1}, {'device_id': -1}, {'device_id': -1}], 'geom': (604, 0, 300, 300), 'n_dim': 2, 'm_dim': 2} # singleton.data_model.layout_collection.new(VideoContainerNxMRow, ser) # nopes .. ser.update({"type":"VideoContainerNxM"}) container_list.append(ser) for container in self.containers_playback: ser = container.serialize() ser.update({"type":"PlayVideoContainerNxM"}) container_list.append(ser) ser = {"type": "QMainWindow", "geom": getCorrectedGeom(self)} container_list.append(ser) if self.camera_list_win.isVisible(): ser = {"type": "CameraListWindow", "geom": getCorrectedGeom(self.camera_list_win)} container_list.append(ser) singleton.data_model.layout_collection.new(LayoutContainerRow, {"layout" : container_list}) print(singleton.data_model.layout_collection) singleton.data_model.layout_collection.save() def deSerializeContainers(self): """Re-creates containers, based on the list saved into layout_collection This is the inverse of self.serializeContainers Containers must be closed & self.contiainers etc. list must be cleared before calling this """ # glo = globals() # print("glo>",glo) singleton.reCacheDevicesById() # singleton.devices_by_id will be used by the containers try: row = next(singleton.data_model.layout_collection.get()) except StopIteration: return container_list = row["layout"] # print(">", container_list) for container_dic in container_list: t = container_dic.pop("type") # get the type & remove it from the dict if t == "VideoContainerNxM": container_dic["child_class"] = nameToClass(container_dic.pop("child_class")) # swap from class name to class instance container_dic["geom"] = tuple(container_dic["geom"]) # woops.. tuple does not json-serialize, but is changed to list .. so change it back to tuplee # non-serializable parameters: dic = { "parent" : None, "gpu_handler" : self.gpu_handler, # RootContainers(s) pass this downstream to child containers "filterchain_group" : self.filterchain_group # RootContainers(s) pass this downstream to child containers } container_dic.update(dic) # now container has the parameters to instantiate the object print(">", container_dic) cont = container.VideoContainerNxM(**container_dic) # instantiate container cont.signals.closing.connect(self.rem_grid_container_slot) self.containers_grid.append(cont) if t == "PlayVideoContainerNxM": container_dic["child_class"] = nameToClass(container_dic.pop("child_class")) # swap from class name to class instance container_dic["geom"] = tuple(container_dic["geom"]) # woops.. tuple does not json-serialize, but is changed to list .. so change it back to tuplee # non-serializable parameters: dic = { "parent" : None, "gpu_handler" : self.gpu_handler, # RootContainers(s) pass this downstream to child containers "filterchain_group" : self.filterchain_group_play, "valkkafsmanager" : self.valkkafsmanager, "playback_controller" : self.playback_controller } container_dic.update(dic) # now container has the parameters to instantiate the object print(">", container_dic) cont = container.PlayVideoContainerNxM(**container_dic) # instantiate container cont.signals.closing.connect(self.rem_playback_grid_container_slot) self.containers_playback.append(cont) elif t == "QMainWindow": geom = container_dic["geom"] self.setGeometry(geom[0], geom[1], geom[2], geom[3]) elif t == "CameraListWindow": geom = container_dic["geom"] self.camera_list_win.setVisible(True) self.camera_list_win.setGeometry(geom[0], geom[1], geom[2], geom[3]) def closeContainers(self): print("gui: closeContainers: containers_grid =", self.containers_grid) for container in self.containers_grid: container.close() self.containers_grid = [] for container in self.containers_playback: container.close() self.containers_playback = [] # *** Multiprocess handling *** def startProcesses(self): """Create and start python multiprocesses Starting a multiprocess creates a process fork. In theory, there should be no problem in first starting the multithreading environment and after that perform forks (only the thread requestin the fork is copied), but in practice, all kinds of weird behaviour arises. Read all about it in here : http://www.linuxprogrammingblog.com/threads-and-fork-think-twice-before-using-them """ singleton.process_map = {} # each key is a list of started multiprocesses # self.process_avail = {} # count instances singleton.client_process_map = {} singleton.master_process_map = {} def span(mvision_classes: list, process_map: dict): for mvision_class in mvision_classes: name = mvision_class.name tag = mvision_class.tag num = mvision_class.max_instances if (tag not in process_map): process_map[tag] = [] # self.process_avail[tag] = num for n in range(0, num): print("startProcesses: spanning", tag, n) # verbose = True verbose = singleton.mvision_verbose p = mvision_class(verbose = verbose) # p.start() p.go() process_map[tag].append(p) span(self.mvision_classes, singleton.process_map) span(self.mvision_client_classes, singleton.client_process_map) span(self.mvision_master_classes, singleton.master_process_map) def closeProcesses(self): def stop(process_map): for key in process_map: for p in process_map[key]: # p.stop() p.requestStop() def wait(process_map): for key in process_map: for p in process_map[key]: p.waitStop() stop(singleton.process_map) stop(singleton.client_process_map) stop(singleton.master_process_map) wait(singleton.process_map) wait(singleton.client_process_map) wait(singleton.master_process_map) # *** Valkka *** def openValkka(self): self.cpu_scheme = CPUScheme() # singleton.data_model.camera_collection try: memory_config = next(singleton.data_model.config_collection.get({"classname" : MemoryConfigRow.__name__})) except StopIteration: print(pre, "Using default mem config") singleton.data_model.writeDefaultMemoryConfig() memory_config = default.get_memory_config() try: valkkafs_config = next(singleton.data_model.valkkafs_collection.get({"classname" : ValkkaFSConfigRow.__name__})) except StopIteration: print(pre, "Using default valkkafs config") singleton.data_model.writeDefaultValkkaFSConfig() valkkafs_config = default.get_valkkafs_config() n_frames = round(memory_config["msbuftime"] * default.fps / 1000.) # accumulated frames per buffering time = n_frames if (memory_config["bind"]): self.cpu_scheme = CPUScheme() else: self.cpu_scheme = CPUScheme(n_cores = -1) self.gpu_handler = GPUHandler( n_720p = memory_config["n_720p"] * n_frames, # n_cameras * n_frames n_1080p = memory_config["n_1080p"] * n_frames, n_1440p = memory_config["n_1440p"] * n_frames, n_4K = memory_config["n_4K"] * n_frames, msbuftime = memory_config["msbuftime"], verbose = False, cpu_scheme = self.cpu_scheme ) self.livethread = LiveThread( name = "live_thread", verbose = False, affinity = self.cpu_scheme.getLive() ) self.usbthread = USBDeviceThread( name = "usb_thread", verbose = False, affinity = self.cpu_scheme.getUSB() ) # see datamodel.row.ValkkaFSConfigRow blocksize = valkkafs_config["blocksize"] n_blocks = valkkafs_config["n_blocks"] fs_flavor = valkkafs_config["fs_flavor"] record = valkkafs_config["record"] # TODO: activate this if ValkkaFS changed in config! if fs_flavor == "file": partition_uuid = None else: partition_uuid = valkkafs_config["partition_uuid"] create_new_fs = False if self.valkkafs is None: # first time create_new_fs = False # try to load initially from disk else: print("openValkka: checking ValkkaFS") create_new_fs = not self.valkkafs.is_same( # has changed, so must recreate partition_uuid = partition_uuid, # None or a string blocksize = blocksize * 1024*1024, n_blocks = n_blocks ) if create_new_fs: print("openValkka: ValkkaFS changed!") if not create_new_fs: # let's try to load it print("openValkka: trying to load ValkkaFS") try: self.valkkafs = ValkkaFS.loadFromDirectory( dirname = singleton.valkkafs_dir.get() ) except ValkkaFSLoadError as e: print("openValkka: loading ValkkaFS failed with", e) create_new_fs = True # no luck, must recreate if create_new_fs: print("openValkka: (re)create ValkkaFS") self.valkkafs = ValkkaFS.newFromDirectory( dirname = singleton.valkkafs_dir.get(), blocksize = valkkafs_config["blocksize"] * 1024*1024, # MB n_blocks = valkkafs_config["n_blocks"], partition_uuid = partition_uuid, verbose = True ) # to keep things consistent.. singleton.data_model.valkkafs_collection.new( ValkkaFSConfigRow, { # "dirname" : default.valkkafs_config["dirname"], # not written to db for the moment "n_blocks" : default.get_valkkafs_config()["n_blocks"], "blocksize" : valkkafs_config["blocksize"], "fs_flavor" : valkkafs_config["fs_flavor"], "record" : record, "partition_uuid" : partition_uuid }) """ else: self.valkkafs = None """ # if no recording selected, set self.valkkafsmanager = None self.valkkafsmanager = ValkkaFSManager( self.valkkafs, write = record, # True or False read = record, cache = record ) self.playback_controller = PlaybackController(valkkafs_manager = self.valkkafsmanager) self.filterchain_group = LiveFilterChainGroup( datamodel = singleton.data_model, livethread = self.livethread, usbthread = self.usbthread, gpu_handler = self.gpu_handler, cpu_scheme = self.cpu_scheme) self.filterchain_group.read() if record: print("openValkka: ValkkaFS **RECORDING ACTIVATED**") self.filterchain_group.setRecording(RecordType.always, self.valkkafsmanager) # self.filterchain_group.update() # TODO: use this once fixed self.filterchain_group_play = PlaybackFilterChainGroup( datamodel = singleton.data_model, valkkafsmanager = self.valkkafsmanager, gpu_handler = self.gpu_handler, cpu_scheme = self.cpu_scheme) self.filterchain_group_play.read() try: from valkka.mvision import multiprocess except ImportError: pass """ else: if self.mvision: singleton.thread = multiprocess.QValkkaThread() singleton.thread.start() """ def closeValkka(self): # live => chain => opengl #self.livethread.close() # self.usbthread.close() print("Closing live & usb threads") self.livethread.requestClose() self.usbthread.requestClose() self.livethread.waitClose() self.usbthread.waitClose() print("Closing filterchains") self.filterchain_group.close() self.filterchain_group_play.close() print("Closing OpenGLThreads") self.gpu_handler.close() self.playback_controller.close() print("Closing ValkkaFS threads") self.valkkafsmanager.close() # print("Closing multiprocessing frontend") """ if singleton.thread: singleton.thread.stop() """ def reOpenValkka(self): print("gui: valkka reinit") self.wait_window.show() self.saveWindowLayout() self.closeContainers() self.closeValkka() self.openValkka() self.loadWindowLayout() self.wait_window.hide() # *** slot generators *** def makeGridSlot(self, n, m): """Create a n x m video grid, show it and add it to the list of video containers """ def slot_func(): cont = container.VideoContainerNxM( gpu_handler = self.gpu_handler, filterchain_group = self.filterchain_group, n_dim = n, m_dim = m ) cont.signals.closing.connect(self.rem_grid_container_slot) self.containers_grid.append(cont) self.getMargins() setattr(self, "grid_%ix%i_slot" % (n, m), slot_func) def makePlaybackGridSlot(self, n, m): """Create a n x m video grid, show it and add it to the list of video containers """ def slot_func(): cont = container.PlayVideoContainerNxM( gpu_handler = self.gpu_handler, filterchain_group = self.filterchain_group_play, n_dim = n, m_dim = m, valkkafsmanager = self.valkkafsmanager, playback_controller = self.playback_controller ) cont.signals.closing.connect(self.rem_playback_grid_container_slot) self.containers_playback.append(cont) setattr(self, "playback_grid_%ix%i_slot" % (n, m), slot_func) def makeMvisionSlot(self, cl): if cl.auto_menu == False: return def slot_func(): if ( (cl.tag in singleton.process_map) and (len(singleton.process_map[cl.tag])>0) ): cont = container.VideoContainerNxM( parent = None, gpu_handler = self.gpu_handler, filterchain_group = self.filterchain_group, title = cl.name, n_dim = 1, m_dim = 1, child_class = container.MVisionContainer, child_class_pars = { "mvision_class": cl, # "thread" : singleton.thread, # "process_map" : singleton.process_map }, ) cont.signals.closing.connect(self.rem_grid_container_slot) self.containers_grid.append(cont) else: QtWidgets.QMessageBox.about(self,"Enough!","Can't instantiate more detectors of this type (max number is "+str(cl.max_instances)+")") setattr(self, cl.name+"_slot", slot_func) def makeMvisionClientSlot(self, cl): if cl.auto_menu == False: return def slot_func(): if ( (cl.tag in singleton.client_process_map) and len(singleton.client_process_map[cl.tag]) > 0 ): master_tag = cl.master if singleton.get_avail_master_process(master_tag) is not None: cont = container.VideoContainerNxM( parent = None, gpu_handler = self.gpu_handler, filterchain_group = self.filterchain_group, title = cl.name, n_dim = 1, m_dim = 1, child_class = container.MVisionClientContainer, child_class_pars = { "mvision_class": cl, # "thread" : singleton.thread, # "process_map" : singleton.process_map }, ) cont.signals.closing.connect(self.rem_grid_container_slot) self.containers_grid.append(cont) else: QtWidgets.QMessageBox.about(self,"Enough!","Can't instantiate more master processes for this detector") else: QtWidgets.QMessageBox.about(self,"Enough!","Can't instantiate more detectors of this type (max number is "+str(cl.max_instances)+")") setattr(self, cl.name+"_slot", slot_func) # *** SLOTS *** # container related slots def rem_grid_container_slot(self, cont): print("gui: rem_grid_container_slot: removing container:",cont) print("gui: rem_grid_container_slot: containers:",self.containers_grid) try: self.containers_grid.remove(cont) except ValueError: print("gui: could not remove container",cont) print("gui: rem_grid_container_slot: containers now:", pformat(self.containers_grid)) def rem_playback_grid_container_slot(self, cont): print("gui: rem_playback_grid_container_slot: removing container:",cont) print("gui: rem_playback_grid_container_slot: containers:",self.containers_playback) try: self.containers_playback.remove(cont) except ValueError: print("gui: could not remove container",cont) print("gui: rem_playback_grid_container_slot: containers now:", pformat(self.containers_playback)) # explictly defined slot functions def exit_slot(self): self.close() def config_dialog_slot(self): self.config_modified = False self.valkkafs_modified = False self.config_win.show() self.manage_cameras_container.choose_first_slot() def config_modified_slot(self): self.config_modified = True def valkkafs_modified_slot(self): self.config_modified = True self.valkkafs_modified = True def camera_list_slot(self): self.camera_list_win.show() def config_dialog_close_slot(self): if (self.config_modified): self.updateCameraTree() self.reOpenValkka() def save_window_layout_slot(self): self.saveWindowLayout() def load_window_layout_slot(self): self.loadWindowLayout() def about_slot(self): QtWidgets.QMessageBox.about(self, "About", constant.program_info % (version.get(), version.getValkka()))
def openValkka(self): self.cpu_scheme = CPUScheme() # singleton.data_model.camera_collection try: memory_config = next(singleton.data_model.config_collection.get({"classname" : MemoryConfigRow.__name__})) except StopIteration: print(pre, "Using default mem config") singleton.data_model.writeDefaultMemoryConfig() memory_config = default.get_memory_config() try: valkkafs_config = next(singleton.data_model.valkkafs_collection.get({"classname" : ValkkaFSConfigRow.__name__})) except StopIteration: print(pre, "Using default valkkafs config") singleton.data_model.writeDefaultValkkaFSConfig() valkkafs_config = default.get_valkkafs_config() n_frames = round(memory_config["msbuftime"] * default.fps / 1000.) # accumulated frames per buffering time = n_frames if (memory_config["bind"]): self.cpu_scheme = CPUScheme() else: self.cpu_scheme = CPUScheme(n_cores = -1) self.gpu_handler = GPUHandler( n_720p = memory_config["n_720p"] * n_frames, # n_cameras * n_frames n_1080p = memory_config["n_1080p"] * n_frames, n_1440p = memory_config["n_1440p"] * n_frames, n_4K = memory_config["n_4K"] * n_frames, msbuftime = memory_config["msbuftime"], verbose = False, cpu_scheme = self.cpu_scheme ) self.livethread = LiveThread( name = "live_thread", verbose = False, affinity = self.cpu_scheme.getLive() ) self.usbthread = USBDeviceThread( name = "usb_thread", verbose = False, affinity = self.cpu_scheme.getUSB() ) # see datamodel.row.ValkkaFSConfigRow blocksize = valkkafs_config["blocksize"] n_blocks = valkkafs_config["n_blocks"] fs_flavor = valkkafs_config["fs_flavor"] record = valkkafs_config["record"] # TODO: activate this if ValkkaFS changed in config! if fs_flavor == "file": partition_uuid = None else: partition_uuid = valkkafs_config["partition_uuid"] create_new_fs = False if self.valkkafs is None: # first time create_new_fs = False # try to load initially from disk else: print("openValkka: checking ValkkaFS") create_new_fs = not self.valkkafs.is_same( # has changed, so must recreate partition_uuid = partition_uuid, # None or a string blocksize = blocksize * 1024*1024, n_blocks = n_blocks ) if create_new_fs: print("openValkka: ValkkaFS changed!") if not create_new_fs: # let's try to load it print("openValkka: trying to load ValkkaFS") try: self.valkkafs = ValkkaFS.loadFromDirectory( dirname = singleton.valkkafs_dir.get() ) except ValkkaFSLoadError as e: print("openValkka: loading ValkkaFS failed with", e) create_new_fs = True # no luck, must recreate if create_new_fs: print("openValkka: (re)create ValkkaFS") self.valkkafs = ValkkaFS.newFromDirectory( dirname = singleton.valkkafs_dir.get(), blocksize = valkkafs_config["blocksize"] * 1024*1024, # MB n_blocks = valkkafs_config["n_blocks"], partition_uuid = partition_uuid, verbose = True ) # to keep things consistent.. singleton.data_model.valkkafs_collection.new( ValkkaFSConfigRow, { # "dirname" : default.valkkafs_config["dirname"], # not written to db for the moment "n_blocks" : default.get_valkkafs_config()["n_blocks"], "blocksize" : valkkafs_config["blocksize"], "fs_flavor" : valkkafs_config["fs_flavor"], "record" : record, "partition_uuid" : partition_uuid }) """ else: self.valkkafs = None """ # if no recording selected, set self.valkkafsmanager = None self.valkkafsmanager = ValkkaFSManager( self.valkkafs, write = record, # True or False read = record, cache = record ) self.playback_controller = PlaybackController(valkkafs_manager = self.valkkafsmanager) self.filterchain_group = LiveFilterChainGroup( datamodel = singleton.data_model, livethread = self.livethread, usbthread = self.usbthread, gpu_handler = self.gpu_handler, cpu_scheme = self.cpu_scheme) self.filterchain_group.read() if record: print("openValkka: ValkkaFS **RECORDING ACTIVATED**") self.filterchain_group.setRecording(RecordType.always, self.valkkafsmanager) # self.filterchain_group.update() # TODO: use this once fixed self.filterchain_group_play = PlaybackFilterChainGroup( datamodel = singleton.data_model, valkkafsmanager = self.valkkafsmanager, gpu_handler = self.gpu_handler, cpu_scheme = self.cpu_scheme) self.filterchain_group_play.read() try: from valkka.mvision import multiprocess except ImportError: pass """
def openValkka(self): self.livethread = LiveThread( # starts live stream services (using live555) name="live_thread", # verbose=True, verbose=False, affinity=self.pardic["live affinity"]) self.openglthread = OpenGLThread( # starts frame presenting services name="mythread", n_720p=self.pardic[ "n_720p"], # reserve stacks of YUV video frames for various resolutions n_1080p=self.pardic["n_1080p"], n_1440p=self.pardic["n_1440p"], n_4K=self.pardic["n_4K"], # naudio =self.pardic["naudio"], # obsolete # verbose =True, verbose=False, msbuftime=self.pardic["msbuftime"], affinity=self.pardic["gl affinity"], x_connection=":0.0" # x_connection =":0.1" # works .. video appears on the other xscreen ) """ # this results in a segfault print("> starting second OpenGLThread") # testing: start another OpenGLThread self.openglthread2=OpenGLThread( # starts frame presenting services name ="mythread2", n_720p =self.pardic["n_720p"], # reserve stacks of YUV video frames for various resolutions n_1080p =self.pardic["n_1080p"], n_1440p =self.pardic["n_1440p"], n_4K =self.pardic["n_4K"], # naudio =self.pardic["naudio"], # obsolete # verbose =True, verbose =False, msbuftime=self.pardic["msbuftime"], affinity=self.pardic["gl affinity"], x_connection =":0.1" # works .. video appears on the other xscreen ) print("> second OpenGLThread started") """ if (self.openglthread.hadVsync()): w = QtWidgets.QMessageBox.warning( self, "VBLANK WARNING", "Syncing to vertical refresh enabled\n THIS WILL DESTROY YOUR FRAMERATE\n Disable it with 'export vblank_mode=0' for nvidia proprietary drivers, use 'export __GL_SYNC_TO_VBLANK=0'" ) tokens = [] self.chains = [] a = self.pardic["dec affinity start"] cw = 0 # widget / window index cs = 1 # slot / stream count ntotal = len(self.addresses) * self.pardic["replicate"] nrow = self.pardic["videos per row"] ncol = max((ntotal // self.pardic["videos per row"]) + 1, 2) for address in self.addresses: # now livethread and openglthread are running if (a > self.pardic["dec affinity stop"]): a = self.pardic["dec affinity start"] print(pre, "openValkka: setting decoder thread on processor", a) chain = BasicFilterchain( # decoding and branching the stream happens here livethread=self.livethread, openglthread=self.openglthread, address=address, slot=cs, affinity=a, # verbose =True verbose=False, msreconnect=10000, # flush_when_full =True flush_when_full=False, # time_correction =TimeCorrectionType_dummy, # Timestamp correction type: TimeCorrectionType_none, TimeCorrectionType_dummy, or TimeCorrectionType_smart (default) time_correction=TimeCorrectionType_smart, recv_buffer_size= 0, # Operating system socket ringbuffer size in bytes # 0 means default # recv_buffer_size =1024*800, # 800 KB reordering_mstime= 0 # Reordering buffer time for Live555 packets in MILLIseconds # 0 means default # reordering_mstime =300 ) self.chains.append( chain ) # important .. otherwise chain will go out of context and get garbage collected .. for cc in range(0, self.pardic["replicate"]): if ("no_qt" in self.pardic): # create our own x-windowses win_id = self.openglthread.createWindow(show=True) else: # *** Choose one of the following sections *** # (1) Let Valkka create the windows/widget # use this: we get a window with correct parametrization # win_id =self.openglthread.createWindow(show=False) # fr =getForeignWidget(self.w, win_id) if (valkka_xwin == False): # (2) Let Qt create the widget fr = TestWidget0(None) win_id = int(fr.winId()) else: # """ # (3) Again, let Valkka create the window, but put on top a translucent widget (that catches mouse gestures) win_id = self.openglthread.createWindow(show=False) widget_pair = WidgetPair(None, win_id, TestWidget0) fr = widget_pair.getWidget() self.widget_pairs.append(widget_pair) # """ print(pre, "setupUi: layout index, address : ", cw // nrow, cw % nrow, address) # self.lay.addWidget(fr,cw//nrow,cw%nrow) # floating windows instead container = VideoContainer(None, fr, n=0) container.getWidget().setGeometry( self.desktop_handler.getGeometry( nrow, ncol, cw % nrow, cw // nrow)) container.getWidget().show() self.videoframes.append(container) token = self.openglthread.connect( slot=cs, window_id=win_id ) # present frames with slot number cs at window win_id tokens.append(token) cw += 1 cs += 1 # TODO: crash when repeating the same slot number ..? chain.decodingOn() # tell the decoding thread to start its job a += 1
class MyGui(QtWidgets.QMainWindow): debug = False # debug=True def __init__(self, pardic, parent=None): super(MyGui, self).__init__() self.pardic = pardic self.initVars() self.setupUi() if (self.debug): return self.openValkka() self.start_streams() def initVars(self): pass def setupUi(self): self.setGeometry(QtCore.QRect(100, 100, 800, 800)) self.w = QtWidgets.QWidget(self) self.setCentralWidget(self.w) self.lay = QtWidgets.QGridLayout(self.w) self.videoframes = [] self.widget_pairs = [] self.addresses = self.pardic["cams"] def openValkka(self): # setValkkaLogLevel(loglevel_debug) core.setLiveOutPacketBuffermaxSize(95000) # whoa # check this out: # http://lists.live555.com/pipermail/live-devel/2013-April/016803.html self.livethread = LiveThread( # starts live stream services (using live555) name="live_thread", # verbose=True, verbose=False, affinity=self.pardic["live affinity"]) self.livethread2 = LiveThread( # second live thread for sending multicast streams name="live_thread2", # verbose=True, verbose=False, affinity=self.pardic["live2 affinity"]) self.openglthread = OpenGLThread( # starts frame presenting services name="mythread", n_720p=self.pardic[ "n_720p"], # reserve stacks of YUV video frames for various resolutions n_1080p=self.pardic["n_1080p"], n_1440p=self.pardic["n_1440p"], n_4K=self.pardic["n_4K"], # naudio =self.pardic["naudio"], # obsolete # verbose =True, verbose=False, msbuftime=self.pardic["msbuftime"], affinity=self.pardic["gl affinity"]) if (self.openglthread.hadVsync()): w = QtWidgets.QMessageBox.warning( self, "VBLANK WARNING", "Syncing to vertical refresh enabled\n THIS WILL DESTROY YOUR FRAMERATE\n Disable it with 'export vblank_mode=0' for nvidia proprietary drivers, use 'export __GL_SYNC_TO_VBLANK=0'" ) tokens = [] self.chains = [] a = self.pardic["dec affinity start"] mport = self.pardic["mcast_start_port"] cw = 0 # widget / window index cs = 1 # slot / stream count for address in self.addresses: # now livethread and openglthread are running if (a > self.pardic["dec affinity stop"]): a = self.pardic["dec affinity start"] print(pre, "openValkka: setting decoder thread on processor", a) chain = MulticastFilterchain( # decoding and branching the stream happens here incoming_livethread=self.livethread, outgoing_livethread=self.livethread2, openglthread=self.openglthread, address=address, multicast_address=mcast_address, multicast_port=mport, slot=cs, affinity=a, # verbose =True verbose=False, msreconnect=10000) self.chains.append( chain ) # important .. otherwise chain will go out of context and get garbage collected .. # replicate=self.pardic["replicate"] replicate = 1 for cc in range(0, replicate): if ("no_qt" in self.pardic): # create our own x-windowses win_id = self.openglthread.createWindow(show=True) else: # *** Choose one of the following sections *** # (1) Let Valkka create the windows/widget # use this: we get a window with correct parametrization # win_id =self.openglthread.createWindow(show=False) # fr =getForeignWidget(self.w, win_id) if (valkka_xwin == False): # (2) Let Qt create the widget fr = TestWidget0(self.w) win_id = int(fr.winId()) else: # """ # (3) Again, let Valkka create the window, but put on top a translucent widget (that catches mouse gestures) win_id = self.openglthread.createWindow(show=False) widget_pair = WidgetPair(self.w, win_id, TestWidget0) fr = widget_pair.getWidget() self.widget_pairs.append(widget_pair) # """ nrow = self.pardic["videos per row"] print(pre, "setupUi: layout index, address : ", cw // nrow, cw % nrow, address) self.lay.addWidget(fr, cw // nrow, cw % nrow) # print(pre,"setupUi: layout index, address : ",cw//4,cw%4,address) # self.lay.addWidget(fr,cw//4,cw%4) self.videoframes.append(fr) token = self.openglthread.connect( slot=cs, window_id=win_id ) # present frames with slot number cs at window win_id tokens.append(token) cw += 1 cs += 1 # TODO: crash when repeating the same slot number ..? chain.decodingOn() # tell the decoding thread to start its job a += 1 mport += 4 def closeValkka(self): self.livethread.close() for chain in self.chains: chain.close() self.widget_pairs = [] self.videoframes = [] self.openglthread.close() self.livethread2.close() def start_streams(self): pass def stop_streams(self): pass def closeEvent(self, e): print(pre, "closeEvent!") self.stop_streams() self.closeValkka() e.accept()
class MyGui(QtWidgets.QMainWindow): debug=False # debug=True def __init__(self): super(MyGui, self).__init__() # self.pardic=pardic self.initVars() self.setupUi() if (self.debug): return self.openValkka() self.startProcesses() def initVars(self): self.messages=[] self.mode="file" self.slot_reserved=False def setupUi(self): self.setGeometry(QtCore.QRect(100,100,800,800)) self.w=QtWidgets.QWidget(self) self.setCentralWidget(self.w) self.lay=QtWidgets.QVBoxLayout(self.w) # divide window into three parts self.upper =QtWidgets.QWidget(self.w) self.lower =QtWidgets.QWidget(self.w) self.lowest =QtWidgets.QWidget(self.w) self.lay.addWidget(self.upper) self.lay.addWidget(self.lower) self.lay.addWidget(self.lowest) # upper part: license plate list and the video self.upperlay =QtWidgets.QHBoxLayout(self.upper) self.msg_list =QtWidgets.QTextEdit(self.upper) self.video_area =QtWidgets.QWidget(self.upper) self.video_lay =QtWidgets.QGridLayout(self.video_area) self.upperlay.addWidget(self.msg_list) self.upperlay.addWidget(self.video_area) self.msg_list.setSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Minimum) self.video_area.setSizePolicy(QtWidgets.QSizePolicy.Expanding,QtWidgets.QSizePolicy.Expanding) # lower part: [Open File] [Close Live] [Play] [Stop] [Rewind] self.lowerlay =QtWidgets.QHBoxLayout(self.lower) self.open_file_button =QtWidgets.QPushButton("Open File", self.lower) self.close_file_button=QtWidgets.QPushButton("Close File",self.lower) self.play_button =QtWidgets.QPushButton("Play",self.lower) self.stop_button =QtWidgets.QPushButton("Stop",self.lower) self.rewind_button =QtWidgets.QPushButton("<<", self.lower) self.lowerlay.addWidget(self.open_file_button) self.lowerlay.addWidget(self.close_file_button) self.lowerlay.addWidget(self.play_button) self.lowerlay.addWidget(self.stop_button) self.lowerlay.addWidget(self.rewind_button) self.open_file_button.clicked. connect(self.open_file_button_slot) self.close_file_button.clicked.connect(self.close_file_button_slot) self.play_button.clicked. connect(self.play_button_slot) self.stop_button.clicked. connect(self.stop_button_slot) self.rewind_button.clicked. connect(self.rewind_button_slot) # lowest part: some text self.lowestlay=QtWidgets.QVBoxLayout(self.lowest) self.infotext =QtWidgets.QLabel("info text",self.lowest) self.lowestlay.addWidget(self.infotext) def openValkka(self): self.livethread=LiveThread( # starts live stream services (using live555) name ="live_thread", verbose=False ) self.filethread=FileThread( name ="file_thread", verbose=False ) self.openglthread=OpenGLThread( # starts frame presenting services name ="mythread", n_720p =10, n_1080p =10, n_1440p =10, n_4K =10, verbose =False, msbuftime=100, affinity=-1 ) if (self.openglthread.hadVsync()): w=QtWidgets.QMessageBox.warning(self,"VBLANK WARNING","Syncing to vertical refresh enabled\n THIS WILL DESTROY YOUR FRAMERATE\n Disable it with 'export vblank_mode=0' for nvidia proprietary drivers, use 'export __GL_SYNC_TO_VBLANK=0'") cc=1 self.chain=ShmemFilterchain1( # decoding and branching the stream happens here openglthread=self.openglthread, slot =cc, # this filterchain creates a shared memory server shmem_name ="test_studio_file_"+str(cc), shmem_image_dimensions =(1920//4,1080//4), # Images passed over shmem are quarter of the full-hd reso shmem_image_interval =1000, # YUV => RGB interpolation to the small size is done each 1000 milliseconds and passed on to the shmem ringbuffer shmem_ringbuffer_size =10 # Size of the shmem ringbuffer ) shmem_name, n_buffer, shmem_image_dimensions =self.chain.getShmemPars() # print(pre,"shmem_name, n_buffer, n_bytes",shmem_name,n_buffer,n_bytes) self.process=QValkkaMovementDetectorProcess("process_"+str(cc),shmem_name=shmem_name, n_buffer=n_buffer, image_dimensions=shmem_image_dimensions) self.process.signals.start_move.connect(self.set_moving_slot) self.process.signals.stop_move. connect(self.set_still_slot) if (valkka_xwin): # (1) Let OpenGLThread create the window self.win_id =self.openglthread.createWindow(show=False) self.widget_pair =WidgetPair(self.video_area,self.win_id,TestWidget0) self.video =self.widget_pair.getWidget() else: # (2) Let Qt create the window self.video =QtWidgets.QWidget(self.video_area) self.win_id =int(self.video.winId()) self.video_lay.addWidget(self.video,0,0) self.token =self.openglthread.connect(slot=cc,window_id=self.win_id) self.chain.decodingOn() # tell the decoding thread to start its job # finally, give the multiprocesses to a qthread that's reading their message pipe self.thread =QValkkaThread(processes=[self.process]) def startProcesses(self): self.process.start() self.thread.start() def stopProcesses(self): print(pre,"stopProcesses :",self.process) self.process.stop() self.thread.stop() print(pre,"QThread stopped") def closeValkka(self): self.livethread.close() self.chain.close() self.chain =None self.openglthread.close() def closeEvent(self,e): print(pre,"closeEvent!") self.stopProcesses() self.closeValkka() super().closeEvent(e) # *** slot **** def open_file_button_slot(self): if (self.slot_reserved): self.infotext.setText("Close the current file first") return fname=QtWidgets.QFileDialog.getOpenFileName(filter="*.mkv")[0] if (len(fname)>0): print(pre,"open_file_button_slot: got filename",fname) self.chain.setFileContext(fname) self.filethread.openStream(self.chain.file_ctx) self.slot_reserved=True if (self.chain.fileStatusOk()): self.infotext.setText("Opened file "+fname) else: self.infotext.setText("Can't play file "+fname) else: self.infotext.setText("No file opened") def close_file_button_slot(self): if (not self.slot_reserved): self.infotext.setText("Open a file first") return self.filethread.closeStream(self.chain.file_ctx) self.slot_reserved=False self.infotext.setText("Closed file") def open_live_button_slot(self): pass def play_button_slot(self): if (self.mode=="file"): if (not self.slot_reserved): self.infotext.setText("Open a file first") return self.filethread.playStream(self.chain.file_ctx) else: pass def rewind_button_slot(self): if (self.mode=="file"): if (not self.slot_reserved): self.infotext.setText("Open a file first") return self.chain.file_ctx.seektime_=0; self.filethread.seekStream(self.chain.file_ctx) else: pass def stop_button_slot(self): if (self.mode=="file"): if (not self.slot_reserved): self.infotext.setText("Open a file first") return self.filethread.stopStream(self.chain.file_ctx) else: pass def set_still_slot(self): self.infotext.setText("still") self.messages.append("Movement stopped at ") if (len(self.messages)>10): self.messages.pop(0) st="" for message in self.messages: st+=message+"\n" self.msg_list.setText(st) def set_moving_slot(self): self.infotext.setText("MOVING") self.messages.append("Movement started at ") if (len(self.messages)>10): self.messages.pop(0) st="" for message in self.messages: st+=message+"\n" self.msg_list.setText(st)
class MyGui(QtWidgets.QMainWindow): class Frame: """Create a frame with text (indicating movement) and a video frame. The video frame is created from a "foreign" window (created by Valkka) """ def __init__(self, parent, win_id): self.widget = QtWidgets.QWidget(parent) self.lay = QtWidgets.QVBoxLayout(self.widget) self.text = QtWidgets.QLabel("", self.widget) self.text_stylesheet = self.text.styleSheet() # create the foreign widget / normal widget pair # normal widget of class TestWidget0 self.widget_pair = WidgetPair(self.widget, win_id, TestWidget0) self.video = self.widget_pair.getWidget() self.lay.addWidget(self.text) self.lay.addWidget(self.video) self.text.setSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Minimum) self.video.setSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding) self.set_still() def setText(self, txt): self.text.setText(txt) def set_still(self): self.setText("still") self.widget.setStyleSheet(self.text_stylesheet) def set_moving(self): self.setText("MOVING") self.widget.setStyleSheet( "QLabel {border: 2px; border-style:solid; border-color: red; margin:0 px; padding:0 px; border-radius:8px;}" ) class NativeFrame: """Create a frame with text (indicating movement) and a video frame. The video frame is created by Qt. """ def __init__(self, parent): self.widget = QtWidgets.QWidget(parent) self.lay = QtWidgets.QVBoxLayout(self.widget) self.text = QtWidgets.QLabel("", self.widget) self.text_stylesheet = self.text.styleSheet() self.video = QtWidgets.QWidget(self.widget) self.lay.addWidget(self.text) self.lay.addWidget(self.video) self.text.setSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Minimum) self.video.setSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding) self.set_still() def getWindowId(self): return int(self.video.winId()) def setText(self, txt): self.text.setText(txt) def set_still(self): self.setText("still") self.widget.setStyleSheet(self.text_stylesheet) def set_moving(self): self.setText("MOVING") self.widget.setStyleSheet( "QLabel {border: 2px; border-style:solid; border-color: red; margin:0 px; padding:0 px; border-radius:8px;}" ) debug = False # debug=True def __init__(self, pardic, parent=None): super(MyGui, self).__init__() self.pardic = pardic self.initVars() self.setupUi() if (self.debug): return self.openValkka() def initVars(self): pass def setupUi(self): self.setGeometry(QtCore.QRect(100, 100, 800, 800)) self.w = QtWidgets.QWidget(self) self.setCentralWidget(self.w) self.lay = QtWidgets.QGridLayout(self.w) self.frames = [] # frames with movement detector alert and video self.addresses = self.pardic["cams"] def openValkka(self): # some constant values # Images passed over shmem are quarter of the full-hd reso shmem_image_dimensions = (1920 // 4, 1080 // 4) # YUV => RGB interpolation to the small size is done each 1000 # milliseconds and passed on to the shmem ringbuffer shmem_image_interval = 1000 shmem_ringbuffer_size = 10 # the very first thing: create & start multiprocesses cs = 1 self.processes = [] for address in self.addresses: shmem_name = "test_studio_" + str(cs) process = QValkkaMovementDetectorProcess( "process_" + str(cs), shmem_name=shmem_name, n_buffer=shmem_ringbuffer_size, image_dimensions=shmem_image_dimensions) self.processes.append(process) print(self.processes) # Give the multiprocesses to a qthread that's reading their message # pipe self.thread = QValkkaThread(processes=self.processes) # starts the multiprocesses self.startProcesses() # ..so, forks have been done. Now we can spawn threads self.livethread = LiveThread( # starts live stream services (using live555) name="live_thread", verbose=False, affinity=self.pardic["live affinity"]) self.openglthread = OpenGLThread( # starts frame presenting services name="mythread", # reserve stacks of YUV video frames for various resolutions n_720p=self.pardic["n_720p"], n_1080p=self.pardic["n_1080p"], n_1440p=self.pardic["n_1440p"], n_4K=self.pardic["n_4K"], # naudio =self.pardic["naudio"], # obsolete verbose=False, msbuftime=self.pardic["msbuftime"], affinity=self.pardic["gl affinity"]) if (self.openglthread.hadVsync()): w = QtWidgets.QMessageBox.warning( self, "VBLANK WARNING", "Syncing to vertical refresh enabled\n THIS WILL DESTROY YOUR FRAMERATE\n Disable it with 'export vblank_mode=0' for nvidia proprietary drivers, use 'export __GL_SYNC_TO_VBLANK=0'" ) tokens = [] self.chains = [] self.frames = [] cs = 1 cc = 0 a = self.pardic["dec affinity start"] for address in self.addresses: # now livethread and openglthread are running if (a > self.pardic["dec affinity stop"]): a = self.pardic["dec affinity start"] print(pre, "openValkka: setting decoder thread on processor", a) # this filterchain creates a shared memory server # identifies shared memory buffer must be same as in the # multiprocess chain = ShmemFilterchain( # decoding and branching the stream happens here livethread=self.livethread, openglthread=self.openglthread, address=address, slot=cs, affinity=a, shmem_name="test_studio_" + str(cs), shmem_image_dimensions=shmem_image_dimensions, shmem_image_interval=shmem_image_interval, shmem_ringbuffer_size=shmem_ringbuffer_size, msreconnect=10000 # time_correction =TimeCorrectionType_smart # this is the default, no need to specify ) self.chains.append(chain) if (valkka_xwin): win_id = self.openglthread.createWindow(show=False) frame = self.Frame(self.w, win_id) else: frame = self.NativeFrame(self.w) win_id = frame.getWindowId() # print(pre,"setupUi: layout index, address : ",cc//4,cc%4,address) # self.lay.addWidget(frame.widget,cc//4,cc%4) nrow = self.pardic["videos per row"] print(pre, "setupUi: layout index, address : ", cc // nrow, cc % nrow, address) self.lay.addWidget(frame.widget, cc // nrow, cc % nrow) self.frames.append(frame) token = self.openglthread.connect(slot=cs, window_id=win_id) tokens.append(token) # take corresponding analyzer multiprocess process = self.processes[cc] process.createClient( ) # creates the shared memory client at the multiprocess # connect signals to the nested widget process.signals.start_move.connect(frame.set_moving) process.signals.stop_move.connect(frame.set_still) chain.decodingOn() # tell the decoding thread to start its job cs += 1 # TODO: crash when repeating the same slot number ..? a += 1 cc += 1 def startProcesses(self): self.thread.start() for p in self.processes: p.start() def stopProcesses(self): for p in self.processes: p.stop() print(pre, "stopping QThread") self.thread.stop() print(pre, "QThread stopped") def closeValkka(self): self.livethread.close() for chain in self.chains: chain.close() self.chains = [] self.widget_pairs = [] self.videoframes = [] self.openglthread.close() def closeEvent(self, e): print(pre, "closeEvent!") self.stopProcesses() self.closeValkka() super().closeEvent(e)
class MyGui(QtWidgets.QMainWindow): def __init__(self, parent=None): super(MyGui, self).__init__() self.initVars() self.initConfigFiles() self.readDB() self.generateMethods() self.setupUi() self.startProcesses() self.openValkka() self.makeLogic() self.post() def initVars(self): self.thread = None # a QThread that reads multiprocess pipes self.containers = [] self.mvision_containers = [] self.mvision_classes = tools.scanMVisionClasses() if (len(self.mvision_classes) > 0): self.mvision = True else: self.mvision = False def initConfigFiles(self): # self.config_file = tools.getConfigFile("config") self.version_file = tools.getConfigFile("version") self.first_start = True if (tools.hasConfigDir() ): # this indicates that the program has been started earlier ver = self.readVersionNumber() print("valkka.live : loading config file for version number") if ver: if (ver[0] == version.VERSION_MAJOR and ver[1] == version.VERSION_MINOR): self.first_start = False else: # incorrect version number print("valkka.live : clearing config") pass # .. or handle migration somehow if self.first_start: # first time program start # TODO: eula could be shown here print(pre, "initConfigFiles : first start") tools.makeConfigDir() self.saveVersionNumber() # self.saveConfigFile() self.save_window_layout() self.first_start = True def readDB(self): """Datamodel includes the following files: config.dat, devices.dat """ self.dm = DataModel(directory=tools.getConfigDir()) if (self.first_start): print(pre, "readDB : first start") self.dm.clearAll() self.dm.saveAll() # If camera collection is corrupt if not self.dm.checkCameraCollection(): self.dm.clearCameraCollection() def generateMethods(self): """Generate some member functions """ for i in range(1, 5): # adds member function grid_ixi_slot(self) self.make_grid_slot(i, i) for cl in self.mvision_classes: self.make_mvision_slot(cl) def QCapsulate(self, widget, name, blocking=False, nude=False): """Helper function that encapsulates QWidget into a QMainWindow """ class QuickWindow(QtWidgets.QMainWindow): class Signals(QtCore.QObject): close = QtCore.Signal() show = QtCore.Signal() def __init__(self, blocking=False, parent=None, nude=False): super().__init__(parent) self.propagate = True # send signals or not self.setStyleSheet(style.main_gui) if (blocking): self.setWindowModality(QtCore.Qt.ApplicationModal) if (nude): # http://doc.qt.io/qt-5/qt.html#WindowType-enum # TODO: create a widget for a proper splashscreen (omitting X11 and centering manually) # self.setWindowFlags(QtCore.Qt.Popup) # Qt 5.9+ : setFlags() # self.setWindowFlags(QtCore.Qt.SplashScreen | QtCore.Qt.WindowStaysOnTopHint) self.setWindowFlags(QtCore.Qt.Dialog) self.signals = self.Signals() def closeEvent(self, e): if (self.propagate): self.signals.close.emit() e.accept() def showEvent(self, e): if (self.propagate): self.signals.show.emit() e.accept() def setPropagate(self): self.propagate = True def unSetPropagate(self): self.propagate = False win = QuickWindow(blocking=blocking, nude=nude) win.setCentralWidget(widget) win.setLayout(QtWidgets.QHBoxLayout()) win.setWindowTitle(name) return win def QTabCapsulate(self, name, widget_list, blocking=False): """Helper function that encapsulates QWidget into a QMainWindow :param widget_list: List of tuples : [(widget,"name"), (widget,"name"), ..] """ class QuickWindow(QtWidgets.QMainWindow): class Signals(QtCore.QObject): close = QtCore.Signal() show = QtCore.Signal() def __init__(self, blocking=False, parent=None): super().__init__(parent) self.propagate = True # send signals or not self.setStyleSheet(style.main_gui) if (blocking): self.setWindowModality(QtCore.Qt.ApplicationModal) self.signals = self.Signals() self.tab = QtWidgets.QTabWidget() self.setCentralWidget(self.tab) self.setLayout(QtWidgets.QHBoxLayout()) def closeEvent(self, e): if (self.propagate): self.signals.close.emit() e.accept() def showEvent(self, e): if (self.propagate): self.signals.show.emit() e.accept() def setPropagate(self): self.propagate = True def unSetPropagate(self): self.propagate = False win = QuickWindow(blocking=blocking) win.setWindowTitle(name) for w in widget_list: win.tab.addTab(w[0], w[1]) return win def setupUi(self): self.setStyleSheet(style.main_gui) self.setWindowTitle("Valkka Live") self.setGeometry(QtCore.QRect(100, 100, 500, 500)) self.w = QtWidgets.QWidget(self) self.setCentralWidget(self.w) self.filemenu = FileMenu(parent=self) self.viewmenu = ViewMenu(parent=self) # grids up to 4x4 self.configmenu = ConfigMenu(parent=self) if self.mvision: mvision_elements = [] for cl in self.mvision_classes: el = QuickMenuElement(title=cl.name, method_name=cl.name) mvision_elements.append(el) class MVisionMenu(QuickMenu): title = "Machine Vision" elements = mvision_elements self.mvisionmenu = MVisionMenu(parent=self) self.aboutmenu = AboutMenu(parent=self) # create container and their windows self.manage_cameras_container = self.dm.getDeviceListAndForm(None) #self.manage_cameras_win = self.QCapsulate( # self.manage_cameras_container.widget, "Camera Configuration", blocking = True) self.manage_memory_container = self.dm.getConfigForm() #self.manage_memory_win = self.QCapsulate( # self.manage_memory_container.widget, "Memory Configuration", blocking = True) # self.manage_memory_container.signals.save # self.manage_cameras_container.getForm().signals.save_record # ListAndForm : has a list and a formset (SlotFormSet). SlotFormSet has the signals self.manage_memory_container.signals.save.connect( self.config_modified_slot) self.manage_cameras_container.getForm().signals.save_record.connect( self.config_modified_slot) self.config_win = self.QTabCapsulate( "Configuration", [(self.manage_cameras_container.widget, "Camera Configuration"), (self.manage_memory_container.widget, "Memory Configuration")]) self.config_win.signals.close.connect(self.config_dialog_close_slot) # when the configuration dialog is reopened, inform the camera configuration form .. this way it can re-check if usb cams are available self.config_win.signals.show.connect( self.manage_cameras_container.getForm().show_slot) self.makeCameraTree() self.camera_list_win = self.QCapsulate(self.treelist, "Camera List") # self.camera_list_win.show() # self.treelist.show() # self.wait_widget = QtWidgets.QWidget() # self.wait_lay = QtWidgets.QHBoxLayout(self.wait_widget) # self.wait_label = QtWidgets.QLabel("Restarting Valkka, please wait ..", self.wait_widget) self.wait_label = QtWidgets.QLabel("Restarting Valkka, please wait ..") self.wait_window = self.QCapsulate(self.wait_label, "Wait", nude=True) # self.wait_window.show() # self.wait_window = QtWidgets.QMessageBox.information(None, "info","info") def makeCameraTree(self): self.root = HeaderListItem() self.treelist = BasicView(parent=None, root=self.root) self.updateCameraTree() def updateCameraTree(self): self.treelist.reset_() self.server = ServerListItem(name="Localhost", ip="127.0.0.1", parent=self.root) """ self.server1 = ServerListItem( name="First Server", ip="192.168.1.20", parent=self.root) """ """ self.camera1 = RTSPCameraListItem(camera=RTSPCameraDevice( ip="192.168.1.4", username="******", password="******"), parent=self.server1) self.camera2 = RTSPCameraListItem(camera=RTSPCameraDevice( ip="192.168.1.4", username="******", password="******"), parent=self.server1) """ devices = [] for row in self.dm.camera_collection.get(): # print(pre, "makeCameraTree : row", row) if (row["classname"] == DataModel.RTSPCameraRow.__name__): row.pop("classname") devices.append( RTSPCameraListItem( camera=DataModel.RTSPCameraDevice(**row), parent=self.server)) elif (row["classname"] == DataModel.USBCameraRow.__name__): row.pop("classname") devices.append( USBCameraListItem(camera=DataModel.USBCameraDevice(**row), parent=self.server)) self.treelist.update() self.treelist.expandAll() def makeLogic(self): # *** When camera list has been closed, re-create the cameralist tree and update filterchains *** # self.manage_cameras_win.signals.close.connect(self.updateCameraTree) # now put into save_camera_config_slot # self.manage_cameras_win.signals.close.connect(self.filterchain_group.update) # TODO: use this once fixed # self.manage_cameras_win.signals.close.connect(self.filterchain_group.read) # TODO: eh.. lets be sure of this .. (are we releasing slots in the LiveThread etc.) # self.manage_cameras_win.signals.close.connect(self.save_camera_config_slot) # self.manage_memory_container.signals.save.connect(self.save_memory_conf_slot) # *** Menu bar connections *** # the self.filemenu.exit attribute was autogenerated self.filemenu.exit.triggered.connect(self.exit_slot) self.filemenu.save_window_layout.triggered.connect( self.save_window_layout_slot) self.filemenu.load_window_layout.triggered.connect( self.load_window_layout_slot) """ self.configmenu.manage_cameras. triggered.connect( self.manage_cameras_slot) self.configmenu.memory_usage. triggered.connect( self.memory_usage_slot) """ self.configmenu.configuration_dialog.triggered.connect( self.config_dialog_slot) self.viewmenu.camera_list.triggered.connect(self.camera_list_slot) self.aboutmenu.about_valkka_live.triggered.connect(self.about_slot) # *** Connect autogenerated menu calls into autogenerated slot functions *** for i in range(1, 5): # gets member function grid_ixi_slot slot_func = getattr(self, "grid_%ix%i_slot" % (i, i)) # gets member function grid_ixi from self.viewmenu.video_grid menu_func = getattr(self.viewmenu.video_grid, "grid_%ix%i" % (i, i)) menu_func.triggered.connect(slot_func) # i.e., like this : self.viewmenu.video_grid.grid_1x1.triggered.connect(slot_func) # *** autogenerated machine vision menu and slots *** for cl in self.mvision_classes: getattr(self.mvisionmenu, cl.name).triggered.connect(getattr(self, cl.name + "_slot")) def post(self): """ self.mvision_container = container.VideoContainerNxM( parent = None, gpu_handler = self.gpu_handler, filterchain_group = self.filterchain_group, title = "MVision", n_dim = 1, m_dim = 1, child_class = container.MVisionContainer, child_class_pars = mvision ) """ def serializeContainers(self): """Serializes the current view of open video grids (i.e. the view) """ """ each serialized container looks like this: dic={# these are used when re-instantiating the view "classname" : self.__class__.__name__, "kwargs" : {}, # parameters that we're used to instantiate this class # these parameters are used by deserialize "x" : self.window.x(), "y" : self.window.y(), "width" : self.window.width(), "height" : self.window.height(), "streams" : streams } """ container_list = [] mvision_container_list = [] for container in self.containers: print("gui: serialize containers : container=", container) container_list.append(container.serialize()) for container in self.mvision_containers: mvision_container_list.append(container.serialize()) return { "container_list": container_list, "mvision_container_list": mvision_container_list } """ def saveConfigFile(self): configdump = json.dumps({ "containers": self.serializeContainers() }) f = open(self.config_file, "w") f.write(configdump) f.close() self.saveVersionNumber() def loadConfigFile(self): try: f = open(self.config_file, "r") except FileNotFoundError: config = constant.config_skeleton else: config = json.loads(f.read()) return config """ def saveVersionNumber(self): f = open(self.version_file, "w") f.write(version.get()) f.close() def readVersionNumber(self): try: f = open(self.version_file, "r") st = f.read() f.close() vs = [] for s in st.split("."): vs.append(int(s)) except: print("valkka.live : could not read version number") return None else: return vs def startProcesses(self): """Create and start python multiprocesses Starting a multiprocess creates a process fork. In theory, there should be no problem in first starting the multithreading environment and after that perform forks (only the thread requestin the fork is copied), but in practice, all kinds of weird behaviour arises. Read all about it in here : http://www.linuxprogrammingblog.com/threads-and-fork-think-twice-before-using-them """ self.process_map = {} # each key is a list of started multiprocesses # self.process_avail = {} # count instances for mvision_class in self.mvision_classes: name = mvision_class.name tag = mvision_class.tag num = mvision_class.max_instances if (tag not in self.process_map): self.process_map[tag] = [] # self.process_avail[tag] = num for n in range(0, num): p = mvision_class() p.start() self.process_map[tag].append(p) def closeProcesses(self): for key in self.process_map: for p in self.process_map[key]: p.stop() def openValkka(self): self.cpu_scheme = CPUScheme() # self.dm.camera_collection try: memory_config = next( self.dm.config_collection.get( {"classname": DataModel.MemoryConfigRow.__name__})) except StopIteration: print(pre, "Using default mem config") memory_config = default.memory_config n_frames = round( memory_config["msbuftime"] * default.fps / 1000.) # accumulated frames per buffering time = n_frames if (memory_config["bind"]): self.cpu_scheme = CPUScheme() else: self.cpu_scheme = CPUScheme(n_cores=-1) self.gpu_handler = GPUHandler( n_720p=memory_config["n_720p"] * n_frames, # n_cameras * n_frames n_1080p=memory_config["n_1080p"] * n_frames, n_1440p=memory_config["n_1440p"] * n_frames, n_4K=memory_config["n_4K"] * n_frames, msbuftime=memory_config["msbuftime"], verbose=False, cpu_scheme=self.cpu_scheme) self.livethread = LiveThread(name="live_thread", verbose=False, affinity=self.cpu_scheme.getLive()) self.usbthread = USBDeviceThread(name="usb_thread", verbose=False, affinity=self.cpu_scheme.getUSB()) self.filterchain_group = FilterChainGroup(datamodel=self.dm, livethread=self.livethread, usbthread=self.usbthread, gpu_handler=self.gpu_handler, cpu_scheme=self.cpu_scheme) self.filterchain_group.read() # self.filterchain_group.update() # TODO: use this once fixed try: from valkka.mvision import multiprocess except ImportError: pass else: if self.mvision: self.thread = multiprocess.QValkkaThread() self.thread.start() def closeValkka(self): # live => chain => opengl self.livethread.close() self.usbthread.close() self.filterchain_group.close() self.gpu_handler.close() if self.thread: self.thread.stop() def reOpenValkka(self): print("gui: valkka reinit") self.wait_window.show() self.save_window_layout("tmplayout") self.closeContainers() self.closeValkka() self.openValkka() self.load_window_layout("tmplayout") self.wait_window.hide() def closeContainers(self): print("gui: closeContainers: containers=", self.containers) print("gui: closeContainers: mvision containers=", self.mvision_containers) for container in self.containers: container.close() for container in self.mvision_containers: print("gui: closing mvision_container: ", container) container.close() self.containers = [] self.mvision_containers = [] def closeEvent(self, e): print("gui : closeEvent!") self.closeContainers() # self.manage_cameras_win.unSetPropagate() # don't send signals .. if you don't do this: close => closeEvent => will trigger self.reOpen # self.manage_cameras_win.close() self.camera_list_win.unSetPropagate() self.camera_list_win.close() self.config_win.unSetPropagate() self.config_win.close() self.closeValkka() self.dm.close() self.closeProcesses() e.accept() def rem_container_slot(self, cont): print("gui: rem_container_slot: removing container:", cont) print("gui: rem_container_slot: containers:", self.containers) try: self.containers.remove(cont) except ValueError: print("gui: could not remove container", cont) print("gui: rem_container_slot: containers now:", self.containers) def rem_mvision_container_slot(self, cont): print("gui: rem_mvision_container_slot: removing mvision container:", cont) print("gui: rem_mvision_container_slot: mvision containers:", self.mvision_containers) try: self.mvision_containers.remove(cont) except ValueError: print( "gui: rem_mvision_container_slot: could not remove container", cont) print("gui: rem_mvision_container_slot: mvision containers now:", self.mvision_containers) # slot function makers def make_grid_slot(self, n, m): """Create a n x m video grid, show it and add it to the list of video containers """ def slot_func(): cont = container.VideoContainerNxM( gpu_handler=self.gpu_handler, filterchain_group=self.filterchain_group, n_dim=n, m_dim=m) cont.signals.closing.connect(self.rem_container_slot) self.containers.append(cont) setattr(self, "grid_%ix%i_slot" % (n, m), slot_func) def make_mvision_slot(self, cl): def slot_func(): if ((cl.tag in self.process_map) and (len(self.process_map[cl.tag]) > 0)): cont = container.VideoContainerNxM( parent=None, gpu_handler=self.gpu_handler, filterchain_group=self.filterchain_group, title=cl.name, n_dim=1, m_dim=1, child_class=container.MVisionContainer, # serializable parameters (for re-creating this container): child_class_pars={"mvision_class": cl}, # non-seriazable parameters: child_class_pars_={ "thread": self.thread, "process_map": self.process_map }) cont.signals.closing.connect(self.rem_mvision_container_slot) self.mvision_containers.append(cont) else: QtWidgets.QMessageBox.about( self, "Enough!", "Can't instantiate more detectors of this type (max number is " + str(cl.max_instances) + ")") setattr(self, cl.name + "_slot", slot_func) def save_window_layout(self, filename="layout"): container_dic = self.serializeContainers() print(pre, "save_window_layout : container_dic =", container_dic) # f = open(tools.getConfigFile(filename), "w") # f.write(json.dumps(container_list)) f = open(tools.getConfigFile(filename), "wb") f.write(pickle.dumps(container_dic)) f.close() def load_window_layout(self, filename="layout"): self.closeContainers() # f = open(tools.getConfigFile(filename), "r") # container_list = json.loads(f.read()) f = open(tools.getConfigFile(filename), "rb") container_dic = pickle.loads(f.read()) f.close() print("load_window_layout: container_dic: ", container_dic) namespace = container.__dict__ # devices_by_id = self.dm.getDevicesById({"classname" : DataModel.RTSPCameraRow.__name__}) devices_by_id = self.dm.getDevicesById() for cont in container_dic["container_list"]: classname = cont["classname"] kwargs = cont["kwargs"] kwargs["gpu_handler"] = self.gpu_handler kwargs["filterchain_group"] = self.filterchain_group class_instance = namespace[classname] container_instance = class_instance(** kwargs) # create the container # move it to the right position container_instance.deSerialize(cont, devices_by_id) self.containers.append(container_instance) for cont in container_dic["mvision_container_list"]: print("\nload_window_layout: mvision:", cont) # explictly defined slot functions def exit_slot(self): self.close() """ def manage_cameras_slot(self): self.manage_cameras_win.show() def memory_usage_slot(self): self.manage_memory_win.show() """ def config_dialog_slot(self): self.config_modified = False self.config_win.show() self.manage_cameras_container.choose_first_slot() def config_modified_slot(self): self.config_modified = True def camera_list_slot(self): self.camera_list_win.show() """ def save_memory_conf_slot(self): self.manage_memory_win.close() self.reOpenValkka() def save_camera_config_slot(self): self.updateCameraTree() self.reOpenValkka() """ def config_dialog_close_slot(self): if (self.config_modified): self.updateCameraTree() self.reOpenValkka() def save_window_layout_slot(self): self.save_window_layout() def load_window_layout_slot(self): self.load_window_layout() def about_slot(self): QtWidgets.QMessageBox.about( self, "About", constant.program_info % (version.get(), version.getValkka()))
class MyGui(QtWidgets.QMainWindow): debug=False # debug=True def __init__(self,pardic,parent=None): super(MyGui, self).__init__() # print(pre,"Qapp=",QtCore.QCoreApplication.instance()) self.pardic=pardic self.initVars() self.setupUi() if (self.debug): return self.openValkka() self.start_streams() def initVars(self): self.videocontainers = [] self.chains = [] def makeMenus(self): class FileMenu(QuickMenu): title="File" elements=[ QuickMenuElement(title="Add New View"), QuickMenuElement(title="Exit") ] self.filemenu=FileMenu(self) self.filemenu.add_new_view.triggered.connect(self.add_new_view_slot) self.filemenu.exit.triggered.connect(self.exit_slot) def add_new_view_slot(self): print("add new view") self.videocontainers.append(VideoContainer(self.gpu_handler,self.chains)) def exit_slot(self): self.close() def setupUi(self): self.desktop_handler =DesktopHandler() print(self.desktop_handler) self.setGeometry(QtCore.QRect(100,100,800,800)) self.w=QtWidgets.QWidget(self) self.setCentralWidget(self.w) self.lay=QtWidgets.QGridLayout(self.w) self.makeMenus() self.addresses=self.pardic["cams"] def openValkka(self): self.livethread=LiveThread( # starts live stream services (using live555) name ="live_thread", # verbose=True, verbose=False, affinity=self.pardic["live affinity"] ) self.gpu_handler=GPUHandler(self.pardic) a =self.pardic["dec affinity start"] cs=1 # slot / stream count for address in self.addresses: # now livethread and openglthread are running if (a>self.pardic["dec affinity stop"]): a=self.pardic["dec affinity start"] print(pre,"openValkka: setting decoder thread on processor",a) chain=ManagedFilterchain( # decoding and branching the stream happens here livethread =self.livethread, openglthreads =self.gpu_handler.openglthreads, address =address, slot =cs, affinity =a, msreconnect =10000, verbose=True ) self.chains.append(chain) # important .. otherwise chain will go out of context and get garbage collected .. cs+=1 a+=1 def closeValkka(self): self.livethread.close() for chain in self.chains: chain.close() self.chains =[] self.widget_pairs =[] self.videoframes =[] self.gpu_handler.close() def start_streams(self): pass def stop_streams(self): pass def closeEvent(self,e): print(pre,"closeEvent!") for vc in self.videocontainers: vc.close() self.stop_streams() self.closeValkka() e.accept()
class FileGUI(QtWidgets.QMainWindow): """Test your machine vision mvision_process and its widget with video files :param mvision_process: QValkkaMultimvision_process-derived class :param shmem_image_interval: How often the image is interpolated into rgb and passed to the mvision_process (milliseconds) """ def __init__(self, mvision_process, shmem_image_interval = 1000, shmem_ringbuffer_size = 10, shmem_image_dimensions = (1920 // 2, 1080 // 2), shmem_name="test"): super().__init__() assert(issubclass(mvision_process.__class__, QValkkaShmemProcess2)) self.mvision_process = mvision_process self.shmem_image_interval = shmem_image_interval self.shmem_ringbuffer_size = shmem_ringbuffer_size self.shmem_image_dimensions = shmem_image_dimensions self.shmem_name = shmem_name self.initVars() self.setupUi() self.mvision_widget = self.mvision_process.getWidget() # self.mvision_widget = QtWidgets.QWidget() self.mvision_widget.setParent(self.widget) self.widget_lay.addWidget(self.mvision_widget) self.openValkka() def initVars(self): self.mode = "file" self.slot_reserved = False def setupUi(self): self.setGeometry(QtCore.QRect(100, 100, 800, 800)) self.w = QtWidgets.QWidget(self) self.setCentralWidget(self.w) self.lay = QtWidgets.QVBoxLayout(self.w) # return # divide window into three parts self.upper = QtWidgets.QWidget(self.w) self.lower = QtWidgets.QWidget(self.w) self.lowest = QtWidgets.QWidget(self.w) self.lay.addWidget(self.upper) self.lay.addWidget(self.lower) self.lay.addWidget(self.lowest) # upper part: detectors widget and the video itself self.upperlay = QtWidgets.QHBoxLayout(self.upper) # self.widget =QtWidgets.QTextEdit(self.upper) self.widget =QtWidgets.QWidget(self.upper) self.widget_lay = QtWidgets.QVBoxLayout(self.widget) # self.widget = self.mvision_process.getWidget() # self.widget.setParent(self.upper) self.video_area = QtWidgets.QWidget(self.upper) self.video_lay = QtWidgets.QGridLayout(self.video_area) self.upperlay.addWidget(self.widget) self.upperlay.addWidget(self.video_area) self.widget.setSizePolicy( QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Minimum) self.video_area.setSizePolicy( QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding) # lower part: [Open File] [Close Live] [Play] [Stop] [Rewind] self.lowerlay = QtWidgets.QHBoxLayout(self.lower) self.open_file_button = QtWidgets.QPushButton("Open File", self.lower) self.close_file_button = QtWidgets.QPushButton( "Close File", self.lower) self.play_button = QtWidgets.QPushButton("Play", self.lower) self.stop_button = QtWidgets.QPushButton("Stop", self.lower) self.rewind_button = QtWidgets.QPushButton("<<", self.lower) self.lowerlay.addWidget(self.open_file_button) self.lowerlay.addWidget(self.close_file_button) self.lowerlay.addWidget(self.play_button) self.lowerlay.addWidget(self.stop_button) self.lowerlay.addWidget(self.rewind_button) self.open_file_button.clicked. connect(self.open_file_button_slot) self.close_file_button.clicked.connect(self.close_file_button_slot) self.play_button.clicked. connect(self.play_button_slot) self.stop_button.clicked. connect(self.stop_button_slot) self.rewind_button.clicked. connect(self.rewind_button_slot) # lowest part: some text self.lowestlay = QtWidgets.QVBoxLayout(self.lowest) self.infotext = QtWidgets.QLabel("info text", self.lowest) self.lowestlay.addWidget(self.infotext) def openValkka(self): self.thread = QValkkaThread() # the thread that's watching the mvision_processes self.thread.start() self.mvision_process.start() self.thread.addProcess(self.mvision_process) # """ self.livethread = LiveThread( # starts live stream services (using live555) name="live_thread", verbose=False ) self.filethread = FileThread( name="file_thread", verbose=False ) self.openglthread = OpenGLThread( # starts frame presenting services name="mythread", n_720p=10, n_1080p=10, n_1440p=10, n_4K=10, verbose=False, msbuftime=100, affinity=-1 ) # this filterchain creates a shared memory server self.chain = ShmemFilterchain1( # decoding and branching the stream happens here openglthread = self.openglthread, slot = 1, shmem_name = self.shmem_name, shmem_image_dimensions = self.shmem_image_dimensions, shmem_image_interval = self.shmem_image_interval, shmem_ringbuffer_size = self.shmem_ringbuffer_size ) shmem_name, n_buffer, shmem_image_dimensions = self.chain.getShmemPars() self.video = QtWidgets.QWidget(self.video_area) self.win_id = int(self.video.winId()) self.video_lay.addWidget(self.video, 0, 0) self.token = self.openglthread.connect(slot = 1, window_id = self.win_id) self.chain.decodingOn() # tell the decoding thread to start its job self.mvision_process.activate( n_buffer = self.shmem_ringbuffer_size, image_dimensions = self.shmem_image_dimensions, shmem_name = self.shmem_name ) def closeValkka(self): # """ self.livethread.close() self.chain.close() self.chain = None self.openglthread.close() # """ print(self.mvision_process) self.thread.stop() def closeEvent(self, e): print(pre, "closeEvent!") self.closeValkka() super().closeEvent(e) # *** slot **** def open_file_button_slot(self): if (self.slot_reserved): self.infotext.setText("Close the current file first") return fname = QtWidgets.QFileDialog.getOpenFileName(filter="*.mkv")[0] if (len(fname) > 0): print(pre, "open_file_button_slot: got filename", fname) self.chain.setFileContext(fname) self.filethread.openStream(self.chain.file_ctx) self.slot_reserved = True if (self.chain.fileStatusOk()): self.infotext.setText("Opened file " + fname) else: self.infotext.setText("Can't play file " + fname) else: self.infotext.setText("No file opened") def close_file_button_slot(self): if (not self.slot_reserved): self.infotext.setText("Open a file first") return self.filethread.closeStream(self.chain.file_ctx) self.slot_reserved = False self.infotext.setText("Closed file") def open_live_button_slot(self): pass def play_button_slot(self): if (self.mode == "file"): if (not self.slot_reserved): self.infotext.setText("Open a file first") return self.filethread.playStream(self.chain.file_ctx) else: pass def rewind_button_slot(self): if (self.mode == "file"): if (not self.slot_reserved): self.infotext.setText("Open a file first") return self.chain.file_ctx.seektime_ = 0 self.filethread.seekStream(self.chain.file_ctx) else: pass def stop_button_slot(self): if (self.mode == "file"): if (not self.slot_reserved): self.infotext.setText("Open a file first") return self.filethread.stopStream(self.chain.file_ctx) else: pass def set_bounding_boxes_slot(self, bbox_list): self.openglthread.core.clearObjectsCall(self.token) for bbox in bbox_list: self.openglthread.core.addRectangleCall(self.token, bbox[0], bbox[1], bbox[2], bbox[3]) # left, right, top, bottom
def openValkka(self): self.cpu_scheme = CPUScheme() # singleton.data_model.camera_collection try: memory_config = next( singleton.data_model.config_collection.get( {"classname": MemoryConfigRow.__name__})) except StopIteration: print(pre, "Using default mem config") singleton.data_model.writeDefaultMemoryConfig() memory_config = default.get_memory_config() try: valkkafs_config = next( singleton.data_model.valkkafs_collection.get( {"classname": ValkkaFSConfigRow.__name__})) except StopIteration: print(pre, "Using default valkkafs config") singleton.data_model.writeDefaultValkkaFSConfig() valkkafs_config = default.get_valkkafs_config() n_frames = round( memory_config["msbuftime"] * default.fps / 1000.) # accumulated frames per buffering time = n_frames if (memory_config["bind"]): self.cpu_scheme = CPUScheme() else: self.cpu_scheme = CPUScheme(n_cores=-1) self.gpu_handler = GPUHandler( n_720p=memory_config["n_720p"] * n_frames, # n_cameras * n_frames n_1080p=memory_config["n_1080p"] * n_frames, n_1440p=memory_config["n_1440p"] * n_frames, n_4K=memory_config["n_4K"] * n_frames, msbuftime=memory_config["msbuftime"], verbose=False, cpu_scheme=self.cpu_scheme) self.livethread = LiveThread(name="live_thread", verbose=False, affinity=self.cpu_scheme.getLive()) self.usbthread = USBDeviceThread(name="usb_thread", verbose=False, affinity=self.cpu_scheme.getUSB()) # see datamodel.row.ValkkaFSConfigRow blocksize = valkkafs_config["blocksize"] n_blocks = valkkafs_config["n_blocks"] #fs_flavor = valkkafs_config["fs_flavor"] #record = valkkafs_config["record"] self.filterchain_group = LiveFilterChainGroup( datamodel=singleton.data_model, livethread=self.livethread, usbthread=self.usbthread, gpu_handler=self.gpu_handler, cpu_scheme=self.cpu_scheme) self.filterchain_group.read() # TODO: RecordType..? if singleton.use_playback: print("openValkka: ValkkaFS **PLAYBACK & RECORDING ACTIVATED**") # ValkkaSingleFSHandler: # directory handling and valkkafs <-> stream id association self.valkka_fs_handler = ValkkaSingleFSHandler( basedir=singleton.valkkafs_dir.get(), blocksize=blocksize * 1024 * 1024, # MB n_blocks=n_blocks) if self.valkkafs_modified: print("openValkka: removing all recorded streams") self.valkka_fs_handler.clear() self.valkka_fs_handler.wipe() for row in singleton.data_model.camera_collection.get(): _id = row["_id"] # get stream id slot = row["slot"] classname = row["classname"] if classname != "EmptyRow": # print(">", row) self.valkka_fs_handler.load(_id) # ..creates new valkka if doesn't exist self.valkkafsmanager = ValkkaFSManager( self.valkka_fs_handler.tolist()) self.valkkafsmanager.start() #self.filterchain_group.setRecording(RecordType.always, self.valkkafsmanager)# OLD # self.filterchain_group: source for live stream # self.filterchain_group_play: sink where the playback/saved stream should # be sent self.filterchain_group_play = PlaybackFilterChainGroup( datamodel=singleton.data_model, gpu_handler=self.gpu_handler, cpu_scheme=self.cpu_scheme) self.filterchain_group_play.read() # print("openValkka: self.filterchain_group_play: len=", len(self.filterchain_group_play)) # connect live & playback filterchains with the manager for valkkafs, inputfilter in self.valkkafsmanager.iterateFsInput(): _id = self.valkka_fs_handler.getId(valkkafs) if _id is None: print("WARNING: main: could not get id for", valkkafs) continue playback_fc = self.filterchain_group_play.get(_id=_id) if playback_fc is None: print("WARNING: main: could not find _id", _id,\ "in playback filterchain group") """ for chain in self.filterchain_group_play.chains: print(">>", chain) for key, getter in chain.iterateGetters(): print(">", key, getter()) """ continue live_fc = self.filterchain_group.get(_id=_id) if live_fc is None: print("WARNING: main: could not find _id", _id,\ "in live filterchain group") continue self.valkkafsmanager.map_( valkkafs=valkkafs, # read & cached stream is sent/output'd here: framefilter=playback_fc.getInputFilter(), write_slot=live_fc.slot, read_slot=playback_fc.slot, _id=_id) # frames coming from the live stream are sent to # valkkafsmanager's correct inputfilter # (there is one corresponding to each valkkafs) live_fc.connectRecTo(inputfilter, RecordType.always) # TODO: disconnect at exit..? # self.filterchain_group.update() # TODO: use this once fixed self.playback_controller = PlaybackController( valkkafs_manager=self.valkkafsmanager)
class FileGUI(QtWidgets.QMainWindow): """Test your machine vision mvision_process and its widget with video files :param mvision_process: QValkkaMultimvision_process-derived class :param shmem_image_interval: How often the image is interpolated into rgb and passed to the mvision_process (milliseconds) """ def __init__(self, mvision_process, mvision_master_process, shmem_image_interval=1000, shmem_ringbuffer_size=10, shmem_image_dimensions=(1920 // 2, 1080 // 2), shmem_name="test", init_filename=None): super().__init__() assert (issubclass(mvision_process.__class__, QShmemProcess)) self.mvision_process = mvision_process self.mvision_master_process = mvision_master_process # self.mvision_class = mvision_class, self.shmem_image_interval = shmem_image_interval self.shmem_ringbuffer_size = shmem_ringbuffer_size self.shmem_image_dimensions = shmem_image_dimensions self.shmem_name = shmem_name self.init_filename = init_filename self.initVars() self.setupUi() self.mvision_widget = self.mvision_process.getWidget() # self.mvision_widget = QtWidgets.QWidget() self.mvision_widget.setParent(self.widget) self.widget_lay.addWidget(self.mvision_widget) self.mvision_widget.setSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Minimum) self.openValkka() if len(sys.argv) > 2: self.open_file_button_slot(fname_=sys.argv[2]) def initVars(self): self.mode = "file" self.slot_reserved = False def setupUi(self): rec = QtWidgets.QApplication.desktop().screenGeometry() height = rec.height() width = rec.width() self.setGeometry(QtCore.QRect(0, 0, width, height // 2)) self.w = QtWidgets.QWidget(self) self.setCentralWidget(self.w) self.lay = QtWidgets.QVBoxLayout(self.w) # return # divide window into three parts self.upper = QtWidgets.QWidget(self.w) self.middle = QtWidgets.QWidget(self.w) self.lower = QtWidgets.QWidget(self.w) self.lowest = QtWidgets.QWidget(self.w) self.lay.addWidget(self.upper) self.lay.addWidget(self.middle) self.lay.addWidget(self.lower) self.lay.addWidget(self.lowest) # upper part: detectors widget and the video itself self.upperlay = QtWidgets.QHBoxLayout(self.upper) # self.widget =QtWidgets.QTextEdit(self.upper) self.widget = QtWidgets.QWidget(self.upper) self.widget_lay = QtWidgets.QVBoxLayout(self.widget) # self.widget = self.mvision_process.getWidget() # self.widget.setParent(self.upper) self.video_area = QtWidgets.QWidget(self.upper) self.video_lay = QtWidgets.QGridLayout(self.video_area) self.upperlay.addWidget(self.widget) self.upperlay.addWidget(self.video_area) self.widget.setSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Minimum) self.video_area.setSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding) """ [------|--------------------------------------] [Open File] [Close Live] [Play] [Stop] [Rewind] """ self.middlelay = QtWidgets.QHBoxLayout(self.middle) self.slider = QtWidgets.QSlider(QtCore.Qt.Orientation.Horizontal, self.middle) self.middlelay.addWidget(self.slider) self.slider.setTracking(False) self.lowerlay = QtWidgets.QHBoxLayout(self.lower) self.open_file_button = QtWidgets.QPushButton("Open File", self.lower) self.close_file_button = QtWidgets.QPushButton("Close File", self.lower) self.play_button = QtWidgets.QPushButton("Play", self.lower) self.stop_button = QtWidgets.QPushButton("Stop", self.lower) self.rewind_button = QtWidgets.QPushButton("<<", self.lower) self.seek_label = QtWidgets.QLabel("<<", self.lower) self.lowerlay.addWidget(self.open_file_button) self.lowerlay.addWidget(self.close_file_button) self.lowerlay.addWidget(self.play_button) self.lowerlay.addWidget(self.stop_button) self.lowerlay.addWidget(self.rewind_button) self.lowerlay.addWidget(self.seek_label) self.open_file_button.clicked.connect(self.open_file_button_slot) self.close_file_button.clicked.connect(self.close_file_button_slot) self.play_button.clicked.connect(self.play_button_slot) self.stop_button.clicked.connect(self.stop_button_slot) self.rewind_button.clicked.connect(self.rewind_button_slot) self.slider.valueChanged.connect(self.slider_slot) # lowest part: some text self.lowestlay = QtWidgets.QVBoxLayout(self.lowest) self.infotext = QtWidgets.QLabel("info text", self.lowest) self.lowestlay.addWidget(self.infotext) def openValkka(self): self.mvision_process.go() if self.mvision_master_process is not None: assert (issubclass(self.mvision_master_process.__class__, QShmemProcess)) self.mvision_master_process.go() self.livethread = LiveThread( # starts live stream services (using live555) name="live_thread", verbose=False) self.filethread = FileThread(name="file_thread", verbose=False) self.openglthread = OpenGLThread( # starts frame presenting services name="mythread", n_720p=10, n_1080p=10, n_1440p=10, n_4K=10, verbose=False, msbuftime=100, affinity=-1) # this filterchain creates a shared memory server self.chain = ShmemFilterchain1( # decoding and branching the stream happens here openglthread=self.openglthread, slot=1, shmem_name=self.shmem_name, shmem_image_dimensions=self.shmem_image_dimensions, shmem_image_interval=self.shmem_image_interval, shmem_ringbuffer_size=self.shmem_ringbuffer_size) shmem_name, n_buffer, shmem_image_dimensions = self.chain.getShmemPars( ) self.video = QtWidgets.QWidget(self.video_area) if hasattr(self.mvision_process, "analyzer_video_widget_class"): # the machine vision class may declare what video widget it wants to use to define the machine vision parameters (line crossing, zone intrusion, etc.) self.analyzer_widget = AnalyzerWidget( parent=self.video_area, analyzer_video_widget_class=self.mvision_process. analyzer_video_widget_class) else: self.analyzer_widget = AnalyzerWidget(parent=self.video_area) self.mvision_process.connectAnalyzerWidget(self.analyzer_widget) self.analyzer_widget.activate() self.win_id = int(self.video.winId()) self.video_lay.addWidget(self.video, 0, 0) self.video_lay.addWidget(self.analyzer_widget, 0, 1) self.token = self.openglthread.connect(slot=1, window_id=self.win_id) self.chain.decodingOn() # tell the decoding thread to start its job self.mvision_process.activate( n_buffer=self.shmem_ringbuffer_size, image_dimensions=self.shmem_image_dimensions, shmem_name=self.shmem_name) if self.mvision_master_process: self.mvision_process.setMasterProcess(self.mvision_master_process) def closeValkka(self): if self.mvision_master_process: self.mvision_process.unsetMasterProcess() self.mvision_process.disconnectAnalyzerWidget(self.analyzer_widget) self.livethread.close() self.chain.close() self.chain = None self.openglthread.close() self.mvision_process.requestStop() self.mvision_process.waitStop() if self.mvision_master_process: self.mvision_master_process.requestStop() self.mvision_master_process.waitStop() def showEvent(self, e): if self.init_filename is not None: self.open_file_button_slot(fname_=self.init_filename) e.accept() def closeEvent(self, e): print(pre, "closeEvent!") self.closeValkka() self.analyzer_widget.close() # wtf do we need this! # super().closeEvent(e) e.accept() # *** slot **** def open_file_button_slot(self, fname_=None): if (self.slot_reserved): self.infotext.setText("Close the current file first") return if not fname_: fname = QtWidgets.QFileDialog.getOpenFileName(filter="*.mkv")[0] else: fname = fname_ if (len(fname) > 0): print(pre, "open_file_button_slot: got filename", fname) self.chain.setFileContext(fname) self.filethread.openStream(self.chain.file_ctx) self.slot_reserved = True if (self.chain.fileStatusOk()): self.infotext.setText("Opened file " + fname) print("Duration:", self.chain.file_ctx.duration) self.slider.setMinimum(0) self.slider.setMaximum(self.chain.file_ctx.duration) else: self.infotext.setText("Can't play file " + fname) else: self.infotext.setText("No file opened") def close_file_button_slot(self): if (not self.slot_reserved): self.infotext.setText("Open a file first") return self.filethread.closeStream(self.chain.file_ctx) self.slot_reserved = False self.infotext.setText("Closed file") def open_live_button_slot(self): pass def play_button_slot(self): if (self.mode == "file"): if (not self.slot_reserved): self.infotext.setText("Open a file first") return self.filethread.playStream(self.chain.file_ctx) else: pass def rewind_button_slot(self): if (self.mode == "file"): if (not self.slot_reserved): self.infotext.setText("Open a file first") return self.chain.file_ctx.seektime_ = 0 self.filethread.seekStream(self.chain.file_ctx) else: pass def stop_button_slot(self): if (self.mode == "file"): if (not self.slot_reserved): self.infotext.setText("Open a file first") return self.filethread.stopStream(self.chain.file_ctx) else: pass def slider_slot(self, v): print(">", v) self.chain.file_ctx.seektime_ = v # TODO: reset analyzer state self.seek_label.setText(str(v)) self.mvision_process.resetAnalyzerState() self.filethread.seekStream(self.chain.file_ctx) def set_bounding_boxes_slot(self, bbox_list): self.openglthread.core.clearObjectsCall(self.token) for bbox in bbox_list: self.openglthread.core.addRectangleCall( self.token, bbox[0], bbox[1], bbox[2], bbox[3]) # left, right, top, bottom
class MyGui(QtWidgets.QMainWindow): class QtFrame: def __init__(self, parent, win_id): self.widget = QtWidgets.QWidget(parent) self.lay = QtWidgets.QVBoxLayout(self.widget) self.widget_pair = WidgetPair(self.widget, win_id, TestWidget0) self.video = self.widget_pair.getWidget() self.lay.addWidget(self.video) def getWindowId(self): return int(self.widget.winId()) debug = False def __init__(self, pardic, parent=None): super(MyGui, self).__init__() self.pardic = pardic self.initVars() self.setupUI() if self.debug: return self.openValkka() def initVars(self): pass def setupUI(self): self.setWindowTitle('Vision alarm system') self.resize(1200, 800) self.menuBar().addMenu('Add Camera') self.menuBar().addMenu('Remove camera') self.main = QtWidgets.QWidget(self) self.setCentralWidget(self.main) self.w = QtWidgets.QWidget(self) # self.setCentralWidget(self.w) self.mainlay = QtWidgets.QVBoxLayout(self.main) self.mainlay.setSpacing(0) self.mainlay.setContentsMargins(0, 0, 0, 0) self.wlay = QtWidgets.QGridLayout(self.w) self.alert = QtWidgets.QTextEdit() self.mainlay.addWidget(self.w, 75) self.mainlay.addWidget(self.alert, 25) self.frames = [] # i currently don't know what it is used for self.addresses = self.pardic["cams"] print(self.addresses) def openValkka(self): # RGB Shared memory shmem_image_dimensions = (1920 // 4, 1080 // 4) shmem_image_interval = 1000 shmem_rignbuffer_size = 10 # Frag MP4 Shared memory shmem_buffers = 10 shmem_name = "FragMP4Shmem" cellsize = 1024 * 1024 * 3 timeout = 1000 cs = 1 cc = 1 self.processes = [] for address in self.addresses: shmem_name = "camera" + str(cs) # print("shmem name is {} for process number {} ".format(shmem_name, cc)) process = QValkkaFireDetectorProcess( "process" + str(cs), shmem_name=shmem_name, n_buffer=shmem_rignbuffer_size, image_dimensions=shmem_image_dimensions) self.processes.append(process) cs += 1 print(self.processes) # Give the multiprocesses to a gthread that's reading their message / thread will be listening to the processes !? self.thread = QValkkaThread(processes=self.processes) # start the multiprocesses self.startProcesses() # Now that we successfully forked our multiprocesses lets spawn threads self.livethread = LiveThread(name="live", verbose=False, affinity=self.pardic["live_affinity"]) self.openglthread = OpenGLThread( name="mythread", # reserve stacks of YUV video frames for various resolutions n_720p=50, n_1080p=50, n_1440p=50, n_4K=50, verbose=False, msbuftime=100, affinity=-1) # if (self.openglthread.hadVsync()): # q = QtWidgets.QMessageBox.warning(self, # "VBLANK WARNING", # "Syncing to vertical refresh enabled \n THIS WILL DESTROY YOUR FRAMERATE\n disable it using 'export vblank_mode=0'") tokens = [] self.chains = [] self.frames = [] cs = 1 cc = 0 x = 0 y = 0 cam_count = 0 a = self.pardic["dec affinity start"] for address in self.addresses: # Livethread/openglthread are running print('address :', address) if (a > self.pardic["dec affinity stop"]): a = self.pardic["dec affinity start"] chain = VisionAlarmFilterChain( # decoding and branching happens here livethread=self.livethread, openglthread=self.openglthread, address=address, slot=cs, affinity=a, shmem_name="camera" + str(cs), shmem_image_dimensions=shmem_image_dimensions, shmem_image_interval=shmem_image_interval, shmem_ringbuffer_size=shmem_rignbuffer_size, msreconnect=1000, frag_shmem_buffers=shmem_buffers, frag_shmem_name=shmem_name, frag_shmem_cellsize=cellsize, frag_shmem_timeout=timeout, ) self.chains.append(chain) win_id = self.openglthread.createWindow(show=False) frame = self.QtFrame(self.w, win_id) # print('setting up layout') if y > 1: x = 1 y = 0 self.wlay.addWidget(frame.widget, x, y) y += 1 token = self.openglthread.connect(slot=cs, window_id=win_id) tokens.append(token) # take corresponding multiprocess process = self.processes[cc] process.createClient( ) # creates the shared memory client at the multiprocess # connect signals to the nested widget process.signals.Fire_detected.connect(self.addAlert) chain.decodingOn() # start the decoding thread cs += 1 a += 1 cc += 1 # FragMP4 shmem client client = FragMP4ShmemClient(name=shmem_name, n_ringbuffer=shmem_buffers, n_size=cellsize, mstimeout=timeout, verbose=False) def startProcesses(self): self.thread.start() for p in self.processes: p.start() def stopProcesses(self): for p in self.processes: p.stop() print("stopping QThread") self.thread.stop() print("QThread stopped") def closeValkka(self): self.livethread.close() for chain in self.chains: chain.close() self.chains = [] self.widget_pairs = [] self.videoframes = [] self.openglthread.close() def closeEvent(self, e): print("closeEvent!") self.stopProcesses() self.closeValkka() super().closeEvent() # Slot def addAlert(self): print('inside addAlert ') self.alert.append('Fire Detected on camera number 1') pass
def openValkka(self): # some constant values # Images passed over shmem are quarter of the full-hd reso shmem_image_dimensions = (1920 // 4, 1080 // 4) # YUV => RGB interpolation to the small size is done each 1000 # milliseconds and passed on to the shmem ringbuffer shmem_image_interval = 1000 shmem_ringbuffer_size = 10 # the very first thing: create & start multiprocesses cs = 1 self.processes = [] for address in self.addresses: shmem_name = "test_studio_" + str(cs) process = QValkkaMovementDetectorProcess( "process_" + str(cs), shmem_name=shmem_name, n_buffer=shmem_ringbuffer_size, image_dimensions=shmem_image_dimensions) self.processes.append(process) print(self.processes) # Give the multiprocesses to a qthread that's reading their message # pipe self.thread = QValkkaThread(processes=self.processes) # starts the multiprocesses self.startProcesses() # ..so, forks have been done. Now we can spawn threads self.livethread = LiveThread( # starts live stream services (using live555) name="live_thread", verbose=False, affinity=self.pardic["live affinity"]) self.openglthread = OpenGLThread( # starts frame presenting services name="mythread", # reserve stacks of YUV video frames for various resolutions n_720p=self.pardic["n_720p"], n_1080p=self.pardic["n_1080p"], n_1440p=self.pardic["n_1440p"], n_4K=self.pardic["n_4K"], # naudio =self.pardic["naudio"], # obsolete verbose=False, msbuftime=self.pardic["msbuftime"], affinity=self.pardic["gl affinity"]) if (self.openglthread.hadVsync()): w = QtWidgets.QMessageBox.warning( self, "VBLANK WARNING", "Syncing to vertical refresh enabled\n THIS WILL DESTROY YOUR FRAMERATE\n Disable it with 'export vblank_mode=0' for nvidia proprietary drivers, use 'export __GL_SYNC_TO_VBLANK=0'" ) tokens = [] self.chains = [] self.frames = [] cs = 1 cc = 0 a = self.pardic["dec affinity start"] for address in self.addresses: # now livethread and openglthread are running if (a > self.pardic["dec affinity stop"]): a = self.pardic["dec affinity start"] print(pre, "openValkka: setting decoder thread on processor", a) # this filterchain creates a shared memory server # identifies shared memory buffer must be same as in the # multiprocess chain = ShmemFilterchain( # decoding and branching the stream happens here livethread=self.livethread, openglthread=self.openglthread, address=address, slot=cs, affinity=a, shmem_name="test_studio_" + str(cs), shmem_image_dimensions=shmem_image_dimensions, shmem_image_interval=shmem_image_interval, shmem_ringbuffer_size=shmem_ringbuffer_size, msreconnect=10000 # time_correction =TimeCorrectionType_smart # this is the default, no need to specify ) self.chains.append(chain) if (valkka_xwin): win_id = self.openglthread.createWindow(show=False) frame = self.Frame(self.w, win_id) else: frame = self.NativeFrame(self.w) win_id = frame.getWindowId() # print(pre,"setupUi: layout index, address : ",cc//4,cc%4,address) # self.lay.addWidget(frame.widget,cc//4,cc%4) nrow = self.pardic["videos per row"] print(pre, "setupUi: layout index, address : ", cc // nrow, cc % nrow, address) self.lay.addWidget(frame.widget, cc // nrow, cc % nrow) self.frames.append(frame) token = self.openglthread.connect(slot=cs, window_id=win_id) tokens.append(token) # take corresponding analyzer multiprocess process = self.processes[cc] process.createClient( ) # creates the shared memory client at the multiprocess # connect signals to the nested widget process.signals.start_move.connect(frame.set_moving) process.signals.stop_move.connect(frame.set_still) chain.decodingOn() # tell the decoding thread to start its job cs += 1 # TODO: crash when repeating the same slot number ..? a += 1 cc += 1
def openValkka(self): # RGB Shared memory shmem_image_dimensions = (1920 // 4, 1080 // 4) shmem_image_interval = 1000 shmem_rignbuffer_size = 10 # Frag MP4 Shared memory shmem_buffers = 10 shmem_name = "FragMP4Shmem" cellsize = 1024 * 1024 * 3 timeout = 1000 cs = 1 cc = 1 self.processes = [] for address in self.addresses: shmem_name = "camera" + str(cs) # print("shmem name is {} for process number {} ".format(shmem_name, cc)) process = QValkkaFireDetectorProcess( "process" + str(cs), shmem_name=shmem_name, n_buffer=shmem_rignbuffer_size, image_dimensions=shmem_image_dimensions) self.processes.append(process) cs += 1 print(self.processes) # Give the multiprocesses to a gthread that's reading their message / thread will be listening to the processes !? self.thread = QValkkaThread(processes=self.processes) # start the multiprocesses self.startProcesses() # Now that we successfully forked our multiprocesses lets spawn threads self.livethread = LiveThread(name="live", verbose=False, affinity=self.pardic["live_affinity"]) self.openglthread = OpenGLThread( name="mythread", # reserve stacks of YUV video frames for various resolutions n_720p=50, n_1080p=50, n_1440p=50, n_4K=50, verbose=False, msbuftime=100, affinity=-1) # if (self.openglthread.hadVsync()): # q = QtWidgets.QMessageBox.warning(self, # "VBLANK WARNING", # "Syncing to vertical refresh enabled \n THIS WILL DESTROY YOUR FRAMERATE\n disable it using 'export vblank_mode=0'") tokens = [] self.chains = [] self.frames = [] cs = 1 cc = 0 x = 0 y = 0 cam_count = 0 a = self.pardic["dec affinity start"] for address in self.addresses: # Livethread/openglthread are running print('address :', address) if (a > self.pardic["dec affinity stop"]): a = self.pardic["dec affinity start"] chain = VisionAlarmFilterChain( # decoding and branching happens here livethread=self.livethread, openglthread=self.openglthread, address=address, slot=cs, affinity=a, shmem_name="camera" + str(cs), shmem_image_dimensions=shmem_image_dimensions, shmem_image_interval=shmem_image_interval, shmem_ringbuffer_size=shmem_rignbuffer_size, msreconnect=1000, frag_shmem_buffers=shmem_buffers, frag_shmem_name=shmem_name, frag_shmem_cellsize=cellsize, frag_shmem_timeout=timeout, ) self.chains.append(chain) win_id = self.openglthread.createWindow(show=False) frame = self.QtFrame(self.w, win_id) # print('setting up layout') if y > 1: x = 1 y = 0 self.wlay.addWidget(frame.widget, x, y) y += 1 token = self.openglthread.connect(slot=cs, window_id=win_id) tokens.append(token) # take corresponding multiprocess process = self.processes[cc] process.createClient( ) # creates the shared memory client at the multiprocess # connect signals to the nested widget process.signals.Fire_detected.connect(self.addAlert) chain.decodingOn() # start the decoding thread cs += 1 a += 1 cc += 1 # FragMP4 shmem client client = FragMP4ShmemClient(name=shmem_name, n_ringbuffer=shmem_buffers, n_size=cellsize, mstimeout=timeout, verbose=False)
def openValkka(self): self.livethread=LiveThread( # starts live stream services (using live555) name ="live_thread", verbose=False ) self.filethread=FileThread( name ="file_thread", verbose=False ) self.openglthread=OpenGLThread( # starts frame presenting services name ="mythread", n_720p =10, n_1080p =10, n_1440p =10, n_4K =10, verbose =False, msbuftime=100, affinity=-1 ) if (self.openglthread.hadVsync()): w=QtWidgets.QMessageBox.warning(self,"VBLANK WARNING","Syncing to vertical refresh enabled\n THIS WILL DESTROY YOUR FRAMERATE\n Disable it with 'export vblank_mode=0' for nvidia proprietary drivers, use 'export __GL_SYNC_TO_VBLANK=0'") cc=1 self.chain=ShmemFilterchain1( # decoding and branching the stream happens here openglthread=self.openglthread, slot =cc, # this filterchain creates a shared memory server shmem_name ="test_studio_file_"+str(cc), shmem_image_dimensions =(1920//4,1080//4), # Images passed over shmem are quarter of the full-hd reso shmem_image_interval =1000, # YUV => RGB interpolation to the small size is done each 1000 milliseconds and passed on to the shmem ringbuffer shmem_ringbuffer_size =10 # Size of the shmem ringbuffer ) shmem_name, n_buffer, shmem_image_dimensions =self.chain.getShmemPars() # print(pre,"shmem_name, n_buffer, n_bytes",shmem_name,n_buffer,n_bytes) self.process=QValkkaMovementDetectorProcess("process_"+str(cc),shmem_name=shmem_name, n_buffer=n_buffer, image_dimensions=shmem_image_dimensions) self.process.signals.start_move.connect(self.set_moving_slot) self.process.signals.stop_move. connect(self.set_still_slot) if (valkka_xwin): # (1) Let OpenGLThread create the window self.win_id =self.openglthread.createWindow(show=False) self.widget_pair =WidgetPair(self.video_area,self.win_id,TestWidget0) self.video =self.widget_pair.getWidget() else: # (2) Let Qt create the window self.video =QtWidgets.QWidget(self.video_area) self.win_id =int(self.video.winId()) self.video_lay.addWidget(self.video,0,0) self.token =self.openglthread.connect(slot=cc,window_id=self.win_id) self.chain.decodingOn() # tell the decoding thread to start its job # finally, give the multiprocesses to a qthread that's reading their message pipe self.thread =QValkkaThread(processes=[self.process])
class MyGui(QtWidgets.QMainWindow): debug = False # debug=True def __init__(self, pardic, valkkafs, parent=None): super(MyGui, self).__init__() self.pardic = pardic self.valkkafs = valkkafs self.initVars() self.setupUi() if (self.debug): return self.openValkka() self.start_streams() def initVars(self): pass def setupUi(self): self.setGeometry(QtCore.QRect(100, 100, 800, 800)) self.w = QtWidgets.QWidget(self) self.setCentralWidget(self.w) self.lay = QtWidgets.QGridLayout(self.w) self.videoframes = [] self.widget_pairs = [] self.addresses = self.pardic["cams"] # self.rec_window = QtWidgets.QMainWindow(self) # self.rec_window = QtWidgets.QTabWidget(None) self.rec_window = MyTabWidget(None) self.rec_window.setGeometry(QtCore.QRect(50, 50, 800, 800)) self.rec_window.show() self.rec_video_tab = QtWidgets.QWidget(None) self.rec_video_lay = QtWidgets.QVBoxLayout(self.rec_video_tab) self.rec_calendar_tab = QtWidgets.QWidget(None) self.rec_calendar_lay = QtWidgets.QVBoxLayout(self.rec_calendar_tab) self.rec_window.addTab(self.rec_video_tab, "Video") self.rec_window.addTab(self.rec_calendar_tab, "Calendar") self.rec_video_area = QtWidgets.QWidget(self.rec_video_tab) self.rec_video_area.setSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding) self.rec_video_area_lay = QtWidgets.QGridLayout(self.rec_video_area) self.rec_video_lay.addWidget(self.rec_video_area) # timeline self.timelinewidget = TimeLineWidget(datetime.date.today(), parent=self.rec_video_area) # self.timelinewidget.setLogLevel(logging.DEBUG) self.rec_video_lay.addWidget(self.timelinewidget) # buttons self.buttons = QtWidgets.QWidget(self.rec_video_area) self.buttons_lay = QtWidgets.QHBoxLayout(self.buttons) self.play_button = QtWidgets.QPushButton("play", self.buttons) self.stop_button = QtWidgets.QPushButton("stop", self.buttons) self.zoom_to_fs_button = QtWidgets.QPushButton("limits", self.buttons) self.buttons_lay.addWidget(self.play_button) self.buttons_lay.addWidget(self.stop_button) self.buttons_lay.addWidget(self.zoom_to_fs_button) self.rec_video_lay.addWidget(self.buttons) # calendar self.calendarwidget = CalendarWidget(datetime.date.today(), parent=self.rec_calendar_tab) self.rec_calendar_lay.addWidget(self.calendarwidget) def openValkka(self): self.valkkafsmanager = ValkkaFSManager( self.valkkafs, # read = False, # debugging # cache = False, # debugging # write = False # debugging ) self.playback_controller = PlaybackController( calendar_widget=self.calendarwidget, timeline_widget=self.timelinewidget, valkkafs_manager=self.valkkafsmanager, play_button=self.play_button, stop_button=self.stop_button, zoom_to_fs_button=self.zoom_to_fs_button) self.livethread = LiveThread( # starts live stream services (using live555) name="live_thread", # verbose=True, verbose=False, affinity=self.pardic["live affinity"]) self.openglthread = OpenGLThread( # starts frame presenting services name="mythread", # reserve stacks of YUV video frames for various resolutions n_720p=self.pardic["n_720p"], n_1080p=self.pardic["n_1080p"], n_1440p=self.pardic["n_1440p"], n_4K=self.pardic["n_4K"], # naudio =self.pardic["naudio"], # obsolete verbose=True, # verbose=False, msbuftime=self.pardic["msbuftime"], affinity=self.pardic["gl affinity"]) if (self.openglthread.hadVsync()): w = QtWidgets.QMessageBox.warning( self, "VBLANK WARNING", "Syncing to vertical refresh enabled\n THIS WILL DESTROY YOUR FRAMERATE\n Disable it with 'export vblank_mode=0' for nvidia proprietary drivers, use 'export __GL_SYNC_TO_VBLANK=0'" ) tokens = [] self.chains = [] a = self.pardic["dec affinity start"] cw = 0 # widget / window index cs = 1 # slot / stream count for address in self.addresses: # now livethread and openglthread are running if (a > self.pardic["dec affinity stop"]): a = self.pardic["dec affinity start"] print(pre, "openValkka: setting decoder thread on processor", a) if use_live: chain_live = ValkkaFSLiveFilterchain( # decoding and branching the stream happens here valkkafsmanager=self.valkkafsmanager, id_rec=cs, # identifies the stream in ValkkaFS livethread=self.livethread, address=address, slot=cs, affinity=a, # verbose =True verbose=False, msreconnect=10000, # Reordering buffer time for Live555 packets in MILLIseconds # 0 means default reordering_mstime=0 # reordering_mstime =300 ) rec_slot = cs + 100 # live and rec slot numbers must be kept separated .. chain_rec = ValkkaFSFileFilterchain( # decoding and branching the stream happens here valkkafsmanager=self.valkkafsmanager, id_rec=cs, # identifies the stream in ValkkaFS slot=rec_slot, affinity=a, # verbose =True verbose=False) # send yuv to OpenGLThread if use_live: chain_live.connect_to_yuv("yuv_to_opengl_" + str(cs), self.openglthread.getInput()) chain_rec.connect_to_yuv("yuv_to_opengl_" + str(cs), self.openglthread.getInput()) # important .. otherwise chain will go out of context and get # garbage collected .. if use_live: self.chains.append(chain_live) self.chains.append(chain_rec) if ("no_qt" in self.pardic): # create our own x-windowses win_id = self.openglthread.createWindow(show=True) win_id_rec = self.openglthread.createWindow(show=True) else: # *** Choose one of the following sections *** # (1) Let Valkka create the windows/widget # use this: we get a window with correct parametrization # win_id =self.openglthread.createWindow(show=False) # fr =getForeignWidget(self.w, win_id) if (valkka_xwin == False): # (2) Let Qt create the widget fr = TestWidget0(self.w) win_id = int(fr.winId()) fr_rec = TestWidget0(self.rec_video_area) win_id_rec = int(fr_rec.winId()) else: # """ # (3) Again, let Valkka create the window, but put on top a translucent widget (that catches mouse gestures) win_id = self.openglthread.createWindow(show=False) widget_pair = WidgetPair(self.w, win_id, TestWidget0) fr = widget_pair.getWidget() self.widget_pairs.append(widget_pair) win_id_rec = self.openglthread.createWindow(show=False) widget_pair = WidgetPair(self.rec_video_area, win_id_rec, TestWidget0) fr_rec = widget_pair.getWidget() self.widget_pairs.append(widget_pair) # """ nrow = self.pardic["videos per row"] print(pre, "setupUi: layout index, address : ", cw // nrow, cw % nrow, address) self.lay.addWidget(fr, cw // nrow, cw % nrow) self.rec_video_area_lay.addWidget(fr_rec, cw // nrow, cw % nrow) self.videoframes.append(fr) self.videoframes.append(fr_rec) # present frames with slot number cs at window win_id # rec_slot = cs # debug print(pre, "setupUi: live:", cs, win_id) print(pre, "setupUi: rec :", rec_slot, win_id_rec) token = self.openglthread.connect(slot=cs, window_id=win_id) tokens.append(token) token = self.openglthread.connect(slot=rec_slot, window_id=win_id_rec) tokens.append(token) cw += 1 cs += 1 if use_live: chain_live.decodingOn( ) # tell the decoding thread to start its job chain_rec.decodingOn() a += 1 def closeValkka(self): self.livethread.close() self.valkkafsmanager.close() for chain in self.chains: chain.close() self.chains = [] self.widget_pairs = [] self.videoframes = [] self.openglthread.close() # time.sleep(5) def start_streams(self): pass def stop_streams(self): pass def closeEvent(self, e): print("\n", pre, "closeEvent!\n") self.stop_streams() self.closeValkka() self.rec_window.forceClose() # self.rec_window.close() e.accept()
def openValkka(self): self.livethread = LiveThread( # starts live stream services (using live555) name="live_thread", # verbose=True, verbose=False, affinity=self.pardic["live affinity"]) self.gpu_handler = GPUHandler(self.pardic) self.chains = [] a = self.pardic["dec affinity start"] cw = 0 # widget / window index cs = 1 # slot / stream count ntotal = len(self.addresses) * self.pardic["replicate"] nrow = self.pardic["videos per row"] ncol = max((ntotal // self.pardic["videos per row"]) + 1, 2) for address in self.addresses: # now livethread and openglthread are running if (a > self.pardic["dec affinity stop"]): a = self.pardic["dec affinity start"] print(pre, "openValkka: setting decoder thread on processor", a) chain = OpenFilterchain( # decoding and branching the stream happens here livethread=self.livethread, address=address, slot=cs, affinity=a, # verbose =True verbose=False, msreconnect=10000, # flush_when_full =True flush_when_full=False, # time_correction =TimeCorrectionType_dummy, # Timestamp correction type: TimeCorrectionType_none, TimeCorrectionType_dummy, or TimeCorrectionType_smart (default) time_correction=TimeCorrectionType_smart, recv_buffer_size= 0, # Operating system socket ringbuffer size in bytes # 0 means default # recv_buffer_size =1024*800, # 800 KB reordering_mstime= 0 # Reordering buffer time for Live555 packets in MILLIseconds # 0 means default # reordering_mstime =300 ) # send stream from all OpenFilterchain to all GPUs for glthread in self.gpu_handler.openglthreads: chain.connect(glthread.name, glthread.getInput( )) # OpenGLThread.getInput() returns the input FrameFilter self.chains.append( chain ) # important .. otherwise chain will go out of context and get garbage collected .. for cc in range(0, self.pardic["replicate"]): print(pre, "setupUi: layout index, address : ", cw // nrow, cw % nrow, address) # self.lay.addWidget(fr,cw//nrow,cw%nrow) # floating windows instead container = VideoContainer(cs, self.gpu_handler) container.getWidget().setGeometry( self.desktop_handler.getGeometry(nrow, ncol, cw % nrow, cw // nrow)) container.getWidget().show() self.videoframes.append(container) cw += 1 cs += 1 # TODO: crash when repeating the same slot number ..? chain.decodingOn() # tell the decoding thread to start its job a += 1
def openValkka(self): self.valkkafsmanager = ValkkaFSManager( self.valkkafs, # read = False, # debugging # cache = False, # debugging # write = False # debugging ) self.playback_controller = PlaybackController( calendar_widget=self.calendarwidget, timeline_widget=self.timelinewidget, valkkafs_manager=self.valkkafsmanager, play_button=self.play_button, stop_button=self.stop_button, zoom_to_fs_button=self.zoom_to_fs_button) self.livethread = LiveThread( # starts live stream services (using live555) name="live_thread", # verbose=True, verbose=False, affinity=self.pardic["live affinity"]) self.openglthread = OpenGLThread( # starts frame presenting services name="mythread", # reserve stacks of YUV video frames for various resolutions n_720p=self.pardic["n_720p"], n_1080p=self.pardic["n_1080p"], n_1440p=self.pardic["n_1440p"], n_4K=self.pardic["n_4K"], # naudio =self.pardic["naudio"], # obsolete verbose=True, # verbose=False, msbuftime=self.pardic["msbuftime"], affinity=self.pardic["gl affinity"]) if (self.openglthread.hadVsync()): w = QtWidgets.QMessageBox.warning( self, "VBLANK WARNING", "Syncing to vertical refresh enabled\n THIS WILL DESTROY YOUR FRAMERATE\n Disable it with 'export vblank_mode=0' for nvidia proprietary drivers, use 'export __GL_SYNC_TO_VBLANK=0'" ) tokens = [] self.chains = [] a = self.pardic["dec affinity start"] cw = 0 # widget / window index cs = 1 # slot / stream count for address in self.addresses: # now livethread and openglthread are running if (a > self.pardic["dec affinity stop"]): a = self.pardic["dec affinity start"] print(pre, "openValkka: setting decoder thread on processor", a) if use_live: chain_live = ValkkaFSLiveFilterchain( # decoding and branching the stream happens here valkkafsmanager=self.valkkafsmanager, id_rec=cs, # identifies the stream in ValkkaFS livethread=self.livethread, address=address, slot=cs, affinity=a, # verbose =True verbose=False, msreconnect=10000, # Reordering buffer time for Live555 packets in MILLIseconds # 0 means default reordering_mstime=0 # reordering_mstime =300 ) rec_slot = cs + 100 # live and rec slot numbers must be kept separated .. chain_rec = ValkkaFSFileFilterchain( # decoding and branching the stream happens here valkkafsmanager=self.valkkafsmanager, id_rec=cs, # identifies the stream in ValkkaFS slot=rec_slot, affinity=a, # verbose =True verbose=False) # send yuv to OpenGLThread if use_live: chain_live.connect_to_yuv("yuv_to_opengl_" + str(cs), self.openglthread.getInput()) chain_rec.connect_to_yuv("yuv_to_opengl_" + str(cs), self.openglthread.getInput()) # important .. otherwise chain will go out of context and get # garbage collected .. if use_live: self.chains.append(chain_live) self.chains.append(chain_rec) if ("no_qt" in self.pardic): # create our own x-windowses win_id = self.openglthread.createWindow(show=True) win_id_rec = self.openglthread.createWindow(show=True) else: # *** Choose one of the following sections *** # (1) Let Valkka create the windows/widget # use this: we get a window with correct parametrization # win_id =self.openglthread.createWindow(show=False) # fr =getForeignWidget(self.w, win_id) if (valkka_xwin == False): # (2) Let Qt create the widget fr = TestWidget0(self.w) win_id = int(fr.winId()) fr_rec = TestWidget0(self.rec_video_area) win_id_rec = int(fr_rec.winId()) else: # """ # (3) Again, let Valkka create the window, but put on top a translucent widget (that catches mouse gestures) win_id = self.openglthread.createWindow(show=False) widget_pair = WidgetPair(self.w, win_id, TestWidget0) fr = widget_pair.getWidget() self.widget_pairs.append(widget_pair) win_id_rec = self.openglthread.createWindow(show=False) widget_pair = WidgetPair(self.rec_video_area, win_id_rec, TestWidget0) fr_rec = widget_pair.getWidget() self.widget_pairs.append(widget_pair) # """ nrow = self.pardic["videos per row"] print(pre, "setupUi: layout index, address : ", cw // nrow, cw % nrow, address) self.lay.addWidget(fr, cw // nrow, cw % nrow) self.rec_video_area_lay.addWidget(fr_rec, cw // nrow, cw % nrow) self.videoframes.append(fr) self.videoframes.append(fr_rec) # present frames with slot number cs at window win_id # rec_slot = cs # debug print(pre, "setupUi: live:", cs, win_id) print(pre, "setupUi: rec :", rec_slot, win_id_rec) token = self.openglthread.connect(slot=cs, window_id=win_id) tokens.append(token) token = self.openglthread.connect(slot=rec_slot, window_id=win_id_rec) tokens.append(token) cw += 1 cs += 1 if use_live: chain_live.decodingOn( ) # tell the decoding thread to start its job chain_rec.decodingOn() a += 1
class MyGui(QtWidgets.QMainWindow): debug=False # debug=True def __init__(self,pardic,parent=None): super(MyGui, self).__init__() self.pardic=pardic self.initVars() self.setupUi() if (self.debug): return self.openValkka() self.start_streams() def initVars(self): pass def setupUi(self): self.setGeometry(QtCore.QRect(100,100,800,800)) self.w=QtWidgets.QWidget(self) self.setCentralWidget(self.w) self.lay=QtWidgets.QGridLayout(self.w) self.videoframes =[] self.addresses=self.pardic["cams"] def openValkka(self): self.livethread=LiveThread( # starts live stream services (using live555) name ="live_thread", # verbose=True, verbose=False, affinity=self.pardic["live affinity"] ) # create widgets before starting OpenGLThread and reserving frames cw=0 # widget / window index self.win_ids=[] for address in self.addresses: for cc in range(0,self.pardic["replicate"]): fr =TestWidget0(self.w) win_id =int(fr.winId()) nrow=self.pardic["videos per row"] print(pre,"setupUi: layout index, address : ",cw//nrow,cw%nrow,address) self.lay.addWidget(fr,cw//nrow,cw%nrow) self.videoframes.append(fr) self.win_ids.append(win_id) cw+=1 win_iter =iter(self.win_ids) self.openglthread=OpenGLThread( # starts frame presenting services name ="mythread", n_720p =self.pardic["n_720p"], # reserve stacks of YUV video frames for various resolutions n_1080p =self.pardic["n_1080p"], n_1440p =self.pardic["n_1440p"], n_4K =self.pardic["n_4K"], # naudio =self.pardic["naudio"], # obsolete # verbose =True, verbose =False, msbuftime=self.pardic["msbuftime"], affinity=self.pardic["gl affinity"] ) if (self.openglthread.hadVsync()): w=QtWidgets.QMessageBox.warning(self,"VBLANK WARNING","Syncing to vertical refresh enabled\n THIS WILL DESTROY YOUR FRAMERATE\n Disable it with 'export vblank_mode=0' for nvidia proprietary drivers, use 'export __GL_SYNC_TO_VBLANK=0'") tokens =[] self.chains=[] a =self.pardic["dec affinity start"] cw=0 # widget / window index cs=1 # slot / stream count for address in self.addresses: # now livethread and openglthread are running if (a>self.pardic["dec affinity stop"]): a=self.pardic["dec affinity start"] print(pre,"openValkka: setting decoder thread on processor",a) chain=BasicFilterchain( # decoding and branching the stream happens here livethread =self.livethread, openglthread=self.openglthread, address =address, slot =cs, affinity =a, # verbose =True verbose =False, msreconnect =10000, # flush_when_full =True flush_when_full =False, # time_correction =TimeCorrectionType_dummy, # Timestamp correction type: TimeCorrectionType_none, TimeCorrectionType_dummy, or TimeCorrectionType_smart (default) # time_correction =TimeCorrectionType_smart, # # by default, no need to specify recv_buffer_size =0, # Operating system socket ringbuffer size in bytes # 0 means default # recv_buffer_size =1024*800, # 800 KB reordering_mstime =0 # Reordering buffer time for Live555 packets in MILLIseconds # 0 means default # reordering_mstime =300 ) self.chains.append(chain) # important .. otherwise chain will go out of context and get garbage collected .. for cc in range(0,self.pardic["replicate"]): token =self.openglthread.connect(slot=cs,window_id=next(win_iter)) # present frames with slot number cs at window win_id tokens.append(token) cw+=1 cs+=1 chain.decodingOn() # tell the decoding thread to start its job a+=1 def closeValkka(self): self.livethread.close() for chain in self.chains: chain.close() self.chains =[] self.widget_pairs =[] self.videoframes =[] self.openglthread.close() def start_streams(self): pass def stop_streams(self): pass def closeEvent(self,e): print(pre,"closeEvent!") self.stop_streams() self.closeValkka() e.accept()
"""<rtf> First, import API level 2: <rtf>""" import time from valkka.api2 import LiveThread, OpenGLThread from valkka.api2 import BasicFilterchain """<rtf> Instantiating the API level 2 LiveThread starts running the underlying cpp thread: <rtf>""" livethread = LiveThread( # starts live stream services (using live555) name="live_thread", verbose=False, affinity=-1) """<rtf> Same goes for OpenGLThread: <rtf>""" openglthread = OpenGLThread( name="glthread", n_720p=20, # reserve stacks of YUV video frames for various resolutions n_1080p=20, n_1440p=0, n_4K=0, verbose=False, msbuftime=100, affinity=-1) """<rtf> The filterchain and decoder (AVThread) are encapsulated into a single class. Instantiating starts the AVThread (decoding is off by default): <rtf>""" chain = BasicFilterchain( # decoding and branching the stream happens here livethread=livethread, openglthread=openglthread,