def makeChain(self): """Create the filterchains """ self.fork = core.ForkFrameFilterN("fork_" + str(self.slot)) self.fork_yuv = core.ForkFrameFilterN("fork_yuv_" + str(self.slot)) self.framefifo_ctx = core.FrameFifoContext() self.framefifo_ctx.n_basic = self.n_basic self.framefifo_ctx.n_setup = self.n_setup self.framefifo_ctx.n_signal = self.n_signal self.framefifo_ctx.flush_when_full = True self.avthread = core.AVThread( "avthread_" + self.idst, self.fork_yuv, # writes to self.fork_yuv self.framefifo_ctx) self.avthread.setAffinity(self.affinity) self.info = core.InfoFrameFilter("debug") # initial connections : recorded stream self.connect_to_stream( "rec_decode_" + str(self.slot), self.avthread.getBlockingFrameFilter()) # self.fork to AVThread # self.connect_to_stream("rec_decode_"+str(self.slot), self.info) # debug # # self.valkkafs.setOutput(_id, slot, framefilter) self.ctx = self.valkkafsmanager.setOutput( self.id_rec, self.slot, self.fork) # recorded stream to self.fork
def makeChain(self): """Create the filter chain """ # *** main_branch *** self.fork_filter = core.ForkFrameFilterN("av_fork_at_slot_" + str( self.slot)) # FrameFilter chains can be attached to ForkFrameFilterN after it's been instantiated self.framefifo_ctx = core.FrameFifoContext() self.framefifo_ctx.n_basic = self.n_basic self.framefifo_ctx.n_setup = self.n_setup self.framefifo_ctx.n_signal = self.n_signal self.framefifo_ctx.flush_when_full = self.flush_when_full self.avthread = core.AVThread( "avthread_" + self.idst, self.fork_filter, self.framefifo_ctx) if (self.verbose): print(self.pre,"binding AVThread to core", int(self.affinity)) self.avthread.setAffinity(self.affinity) # get input FrameFilter from AVThread self.av_in_filter = self.avthread.getFrameFilter() # *** swscale_branch *** self.sws_fork_filter = core.ForkFrameFilterN("sws_fork_at_slot_" + str(self.slot)) self.sws_filter = core.SwScaleFrameFilter("sws_filter", self.width, self.height, self.sws_fork_filter) self.interval_filter = core.TimeIntervalFrameFilter("interval_filter", self.shmem_image_interval, self.sws_filter)
def makeChain(self): """Create the filterchains """ self.fork = core.ForkFrameFilterN("fork_" + str(self.slot)) self.fork_yuv = core.ForkFrameFilterN("fork_yuv_" + str(self.slot)) self.framefifo_ctx = core.FrameFifoContext() self.framefifo_ctx.n_basic = self.n_basic self.framefifo_ctx.n_setup = self.n_setup self.framefifo_ctx.n_signal = self.n_signal self.framefifo_ctx.flush_when_full = True self.avthread = core.AVThread( "avthread_" + self.idst, self.fork_yuv, # writes to self.fork_yuv self.framefifo_ctx) self.avthread.setAffinity(self.affinity) # initial connections : live stream self.createLiveContext() # LiveThread writes to self.fork self.connect_to_stream( "live_decode_" + str(self.slot), self.avthread.getFrameFilter()) # self.fork to AVThread self.connect_to_stream("recorder_" + str(self.slot), self.valkkafsmanager.getFrameFilter() ) # self.fork to ValkkaFSWriterThread self.valkkafsmanager.setInput(self.id_rec, self.slot)
def make_decode_branch(self): self.fork_filter_decode = core.ForkFrameFilterN("fork_filter_decode_" + str(self.slot)) self.framefifo_ctx = core.FrameFifoContext() self.framefifo_ctx.n_basic = self.n_basic self.framefifo_ctx.n_setup = self.n_setup self.framefifo_ctx.n_signal = self.n_signal self.framefifo_ctx.flush_when_full = self.flush_when_full self.avthread = core.AVThread("avthread_" + str(self.slot), self.fork_filter_decode, self.framefifo_ctx) if self.affinity > -1: # affinity overwrites number of threads self.avthread.setAffinity(self.affinity) elif self.number_of_threads > 1: self.avthread.setNumberOfThreads( self.number_of_threads) # two by default # get input FrameFilter from AVThread self.av_in_filter = self.avthread.getFrameFilter() # connect to main: self.fork_filter_main.connect("decoding_" + str(self.slot), self.av_in_filter)
def make_filesystem_branch(self): self.fork_filter_file = core.ForkFrameFilterN("fork_filter_file_" + str(self.slot)) self.fs_gate = core.GateFrameFilter("fs_gate_" + str(self.slot), self.fork_filter_file) # connect to main: self.fork_filter_main.connect("fs_gate_" + str(self.slot), self.fs_gate) self.fs_gate.unSet()
def makeChain(self): """Create the filter chain """ self.fork_filter=core.ForkFrameFilterN("av_fork_at_slot_"+str(self.slot)) # FrameFilter chains can attached to ForkFrameFilterN after it's been instantiated self.framefifo_ctx=core.FrameFifoContext() self.framefifo_ctx.n_basic =self.n_basic self.framefifo_ctx.n_setup =self.n_setup self.framefifo_ctx.n_signal =self.n_signal self.framefifo_ctx.flush_when_full =self.flush_when_full self.avthread =core.AVThread("avthread_"+self.idst, self.fork_filter, self.framefifo_ctx) self.avthread.setAffinity(self.affinity) self.av_in_filter =self.avthread.getFrameFilter() # get input FrameFilter from AVThread
def make_analysis_branch(self): """Connect only if movement detector is required: - Recording on movement - Analysis on movement """ self.sws_fork_filter = core.ForkFrameFilterN("sws_fork_" + str(self.slot)) self.sws_filter = core.SwScaleFrameFilter("sws_scale_" + str(self.slot), self.width, self.height, self.sws_fork_filter) self.sws_gate = core.GateFrameFilter("sws_gate_" + str(self.slot), self.sws_filter) self.movement_filter = core.MovementFrameFilter("movement_" + str(self.slot), # self.movement_interval, self.shmem_image_interval, self.movement_treshold, self.movement_duration, self.sws_gate )
def make_qt_branch(self): """Connect only if bitmaps needed at the Qt side """ self.qt_fork_filter = core.ForkFrameFilterN("qt_fork_" + str(self.slot)) self.qt_sws_filter =\ core.SwScaleFrameFilter("qt_sws_scale_" + str(self.slot), self.width, self.height, self.qt_fork_filter) self.qt_interval = core.TimeIntervalFrameFilter( "qt_interval_" + str(self.slot), # self.shmem_image_interval, 500, self.qt_sws_filter ) self.qt_gate = core.GateFrameFilter("qt_gate_" + str(self.slot), self.qt_interval) # connect to main: self.fork_filter_decode.connect("qt_branch_" + str(self.slot), self.qt_gate)
def make_decode_branch(self): self.fork_filter_decode = core.ForkFrameFilterN("fork_filter_decode_" + str(self.slot)) self.framefifo_ctx = core.FrameFifoContext() self.framefifo_ctx.n_basic = self.n_basic self.framefifo_ctx.n_setup = self.n_setup self.framefifo_ctx.n_signal = self.n_signal self.framefifo_ctx.flush_when_full = self.flush_when_full self.avthread = core.AVThread( "avthread_" + str(self.slot), self.fork_filter_decode, self.framefifo_ctx) self.avthread.setAffinity(self.affinity) # get input FrameFilter from AVThread self.av_in_filter = self.avthread.getFrameFilter() # connect to main: self.fork_filter_main.connect("decoding_" + str(self.slot), self.av_in_filter)
def make_main_branch(self): self.fork_filter_main = core.ForkFrameFilterN("fork_filter_main_" + str(self.slot))
def make_main_branch(self): self.fork_filter_main = core.ForkFrameFilterN("fork_filter_main_" + str(self.slot)) self.valkkafsmanager.setOutput(self._id, self.slot, self.fork_filter_main)
def __call__(self, livethread=None, openglthread=None): """ Register running live & openglthreads, construct filterchain, start threads """ assert (livethread is not None) self.livethread = livethread self.openglthread = openglthread # Construct Filter graph from end to beginning # Main branch self.main_fork = core.ForkFrameFilterN("main_fork" + str(self.slot)) # connect livethread to main branch self.live_ctx = core.LiveConnectionContext( core.LiveConnectionType_rtsp, self.rtsp_address, self.slot, self.main_fork) # stream rights to main_fork # Some aditional parameters you can give to livethread streaming context ## 1 : for NATs and Streaming over the internet, use tcp streaming self.live_ctx.request_tcp = True ## 2 : if you don't have enough buffering or timestamps are wrong, use this: # self.live_ctx.time_correction = core.TimeCorrectionType_smart ## 3 : enable automatic reconnection every 10 seconds if camera is offline self.live_ctx.mstimeout = 10000 self.livethread.registerStreamCall(self.live_ctx) # Branch B : Mux Branch self.fmp4_shmem = core.FragMP4ShmemFrameFilter( self.fmp4_shmem_name, self.fmp4_shmem_buffers, self.fmp4_shmem_cellsize) print(">", self.fmp4_sync_event) self.fmp4_shmem.useFd(self.fmp4_sync_event) self.fmp4_muxer = core.FragMP4MuxFrameFilter("mp4_muxer", self.fmp4_shmem) # self.fmp4_muxer.activate() # connect main branch to mux branch self.main_fork.connect("fragmp4_terminal" + str(self.slot), self.fmp4_muxer) # muxer must be connected from the very beginning so that it receives setupframes, sent only in the beginning of streaming process # Branch A : Decoding Branch self.decode_fork = core.ForkFrameFilterN("decode_fork_" + str(self.slot)) self.avthread = core.AVThread( "avthread_" + str(self.slot), self.decode_fork) # Here avthread feeds decode_fork # connect main branch to avthread to decode_fork self.avthread_in_filter = self.avthread.getFrameFilter() self.main_fork.connect("decoder_" + str(self.slot), self.avthread_in_filter) # Branch A : Sub_Branch_A.1 : RGB shared memory self.rgb_shmem_filter = core.RGBShmemFrameFilter( self.rgb_shmem_name, self.rgb_shmem_buffers, self.width, self.height) self.rgb_shmem_filter.useFd(self.rgb_sync_event) self.sws_filter = core.SwScaleFrameFilter("sws_filter", self.width, self.height, self.rgb_shmem_filter) self.interval_filter = core.TimeIntervalFrameFilter( "interval_filter", self.image_interval, self.sws_filter) self.decode_fork.connect("rgb_shmem_terminal" + str(self.slot), self.interval_filter) # Branch A : Sub_Branch_A.2 : OpenGl branch Displaying if self.openglthread is not None: # connect decode frames in opengl self.opengl_input_filter = self.openglthread.getFrameFilter() self.decode_fork.connect("gl_terminal" + str(self.slot), self.opengl_input_filter) # Create X window # # win_id = self.openglthread.createWindow(show=False) # frame = QtFrame(self.widget, win_id) # self.lay.addWidget(frame.widget, 0, 0) # # token = self.openglthread.connect(slot=self.slot, window_id=win_id) # if token == 0: # print("mapping failled ! ") # else: # print("mapping done ! ") self.window_id = self.openglthread.createWindow() self.openglthread.newRenderGroupCall(self.window_id) self.context_id = self.openglthread.newRenderContextCall( self.slot, self.window_id, 0) self.livethread.playStreamCall(self.live_ctx) self.avthread.startCall() self.avthread.decodingOnCall()