def __init__(self, verbose=False, clockrate=90000): ''' gst-launch -e appsrc ! rtph264depay ! h264parse ! mp4mux ! filesink location=file.mp4 ''' # TODO check if gst initiated Gst.init(None) Gst.debug_set_active(True) #Gst.debug_set_default_threshold(Gst.DebugLevel.MEMDUMP) Gst.debug_set_default_threshold(Gst.DebugLevel.LOG) Gst.debug_set_colored(False) # config params self.clockrate = clockrate self.verbose = verbose # GST modules self.rtpfeed = None self.mediafeed = None self.codecfeed = None self.mp4feed = None self.filefeed = None # Pipeline self.pipeline = None # Video Frames self.payloads = deque() self.feedcount = 0 self.payloadcount = 0
def main(): # Standard GStreamer initialization Gst.debug_set_active(True) Gst.debug_set_default_threshold(4) GObject.threads_init() Gst.init(None) pipeline = Gst.Pipeline() source = create_element_or_error("nvarguscamerasrc", "camera-source") sink = create_element_or_error("nvoverlaysink", "overlay") source.set_property('sensor-id', 0) pipeline.add(source) pipeline.add(sink) source.link(sink) loop = GObject.MainLoop() bus = pipeline.get_bus() bus.add_signal_watch() bus.connect("message", bus_call, loop) pipeline.set_state(Gst.State.PLAYING) try: loop.run() except: pass # Cleanup pipeline.set_state(Gst.State.NULL)
def _check(self, song): old_threshold = Gst.debug_get_default_threshold() Gst.debug_set_default_threshold(Gst.DebugLevel.NONE) pipeline = Gst.parse_launch( "uridecodebin uri=%s ! fakesink" % song("~uri")) bus = pipeline.get_bus() pipeline.set_state(Gst.State.PLAYING) try: while 1: message = bus.timed_pop(Gst.SECOND * 10) if not message or message.type == Gst.MessageType.ERROR: if message: debug = message.parse_error()[0].message else: debug = "timed out" # only print a warning for platforms where we control # the shipped dependencies. if sys.platform == "darwin" or os.name == "nt": print_w("GStreamer: Decoding %r failed (%s)" % (song("~format"), debug)) break if message.type == Gst.MessageType.EOS: break finally: pipeline.set_state(Gst.State.NULL) Gst.debug_set_default_threshold(old_threshold)
def _check(self, song): old_threshold = Gst.debug_get_default_threshold() Gst.debug_set_default_threshold(Gst.DebugLevel.NONE) pipeline = Gst.parse_launch( "uridecodebin uri=%s ! fakesink" % song("~uri")) bus = pipeline.get_bus() pipeline.set_state(Gst.State.PLAYING) error = None try: while 1: message = bus.timed_pop(Gst.SECOND * 40) if not message or message.type == Gst.MessageType.ERROR: if message: error = message.parse_error()[0].message else: error = "timed out" break if message.type == Gst.MessageType.EOS: break finally: pipeline.set_state(Gst.State.NULL) Gst.debug_set_default_threshold(old_threshold) return error
def main(): Gst.init(sys.argv) # init gstreamer # this line sets the gstreamer default logging level # it can be removed in normal applications # gstreamer logging can contain verry useful information # when debugging your application # see https://gstreamer.freedesktop.org/documentation/tutorials/basic/debugging-tools.html # for further details Gst.debug_set_default_threshold(Gst.DebugLevel.WARNING) serial = None pipeline = Gst.parse_launch( "tcambin name=source ! videoconvert ! ximagesink") source = pipeline.get_by_name("source") # serial is defined, thus make the source open that device if serial is not None: source.set_property("serial", serial) pipeline.set_state(Gst.State.PLAYING) # stream for 2 seconds before switching to trigger mode # this is simply to show that the device is running time.sleep(2) trigger_mode_type = source.get_tcam_property_type("Trigger Mode") if trigger_mode_type == "enum": source.set_tcam_property("Trigger Mode", "On") else: source.set_tcam_property("Trigger Mode", True) wait = True while wait: input_text = input( "Press 'Enter' to trigger an image.\n q + enter to stop the stream." ) if input_text == "q": break else: ret = source.set_tcam_property("Software Trigger", True) if ret: print("=== Triggered image. ===\n") else: print("!!! Could not trigger. !!!\n") # deactivate trigger mode # this is simply to prevent confusion when the camera ist started without wanting to trigger if trigger_mode_type == "enum": source.set_tcam_property("Trigger Mode", "Off") else: source.set_tcam_property("Trigger Mode", False) # this stops the pipeline and frees all resources pipeline.set_state(Gst.State.NULL)
def _check(self, song): old_threshold = Gst.debug_get_default_threshold() Gst.debug_set_default_threshold(Gst.DebugLevel.NONE) pipeline = Gst.parse_launch("uridecodebin uri=%s ! fakesink" % song("~uri")) bus = pipeline.get_bus() pipeline.set_state(Gst.State.PLAYING) error = None try: while 1: message = bus.timed_pop(Gst.SECOND * 40) if not message or message.type == Gst.MessageType.ERROR: if message: error = message.parse_error()[0].message else: error = "timed out" break if message.type == Gst.MessageType.EOS: break finally: pipeline.set_state(Gst.State.NULL) Gst.debug_set_default_threshold(old_threshold) return error
def _build_gst_pipeline(self): log.debug("Building new gstreamer pipeline") pipeline_args = self._get_pipeline_args() log.debug("Initializing gstreamer pipeline") self.gst_pipeline = Gst.parse_launch(pipeline_args) self.gst_video_source = self.gst_pipeline.get_by_name('source') self.gst_video_source.props.uri = self.source.uri self.gst_video_source_connect_id = self.gst_video_source.connect( 'autoplug-continue', self.on_autoplug_continue) assert self.gst_video_source_connect_id self.gst_queue0 = self.gst_pipeline.get_by_name('queue0') self.gst_vconvert = self.gst_pipeline.get_by_name('vconvert') self.gst_queue1 = self.gst_pipeline.get_by_name('queue1') self.gst_appsink = self.gst_pipeline.get_by_name('appsink') log.debug("appsink: %s", str(self.gst_appsink)) log.debug("appsink will emit signals: %s", self.gst_appsink.props.emit_signals) # register to receive new image sample events from gst self._gst_appsink_connect_id = self.gst_appsink.connect( 'new-sample', self.on_new_sample) self.mainloop = GObject.MainLoop() if log.getEffectiveLevel() <= logging.DEBUG: # set Gst debug log level Gst.debug_set_active(True) Gst.debug_set_default_threshold(3) # Set up a pipeline bus watch to catch errors. self.gst_bus = self.gst_pipeline.get_bus() self.gst_bus.add_signal_watch() self.gst_bus.connect('message', self.on_bus_message, self.mainloop)
def __init__(self, uri, moviewindow): Gst.init(None) Gst.debug_set_active(True) Gst.debug_set_default_threshold(2) self.video_width = 1920 self.video_height = 1080 self.crop_left = 900 self.crop_right = 20 self.crop_bottom = 20 self.crop_top = 20 # self.inFileLocation = "../../../media/webos.mp4" self.inFileLocation = "/home/kemal/Developer/vdwll/media/brbad.mp4" # "/../../../media/pixar.mp4" # self.uri = "https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_trailer-480p.webm" # self.uri = "rtsp://184.72.239.149/vod/mp4:BigBuckBunny_175k.mov" # self.uri = "rtsp://127.0.0.1:8554/test" # self.uri = "rtsp://192.168.1.32:5540/ch0" # self.uri = "http://192.168.1.32:8080/playlist.m3u" self.uri = uri self.constructPipeline() self.is_playing = False self.connectSignals()
def main(): """ """ Gst.init(sys.argv) # init gstreamer # this line sets the gstreamer default logging level # it can be removed in normal applications # gstreamer logging can contain verry useful information # when debugging your application # see https://gstreamer.freedesktop.org/documentation/tutorials/basic/debugging-tools.html # for further details Gst.debug_set_default_threshold(Gst.DebugLevel.WARNING) source = Gst.ElementFactory.make("tcambin") # The tcambin wraps the tcamsrc and offers additional # formats by implicitly converting # source = Gst.ElementFactory.make("tcambin") serial = None if serial: source.set_property("serial", serial) source.set_state(Gst.State.READY) print_formats(source) source.set_state(Gst.State.NULL)
def __init__(self): Gst.init(None) Gst.debug_set_active(True) Gst.debug_set_default_threshold(2) self.data = CustomData() self.data.pipeline = Gst.parse_launch( 'playbin uri=https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_trailer-480p.webm' ) bus = self.data.pipeline.get_bus() ret = self.data.pipeline.set_state(Gst.State.PLAYING) if ret == Gst.StateChangeReturn.FAILURE: print('ERROR: Unable to set the pipeline to the playing state.') sys.exit(-1) elif ret == Gst.StateChangeReturn.NO_PREROLL: self.data.is_live = True self.data.main_loop = GLib.MainLoop.new(None, False) bus.add_signal_watch() bus.connect('message', self.cb_message, self.data) self.data.main_loop.run()
def gst_debug(level): """Sets Gstreamer debug output Args: level : Gstreamer debug level. """ if level: Gst.debug_set_active(True) Gst.debug_set_default_threshold(level)
def main(): Gst.init(sys.argv) # init gstreamer # this line sets the gstreamer default logging level # it can be removed in normal applications # gstreamer logging can contain verry useful information # when debugging your application # see https://gstreamer.freedesktop.org/documentation/tutorials/basic/debugging-tools.html # for further details Gst.debug_set_default_threshold(Gst.DebugLevel.WARNING) serial = None pipeline = Gst.parse_launch("tcambin name=bin" " ! video/x-raw,format=BGRx,width=640,height=480,framerate=30/1" " ! tee name=t" " ! queue" " ! videoconvert" " ! ximagesink" " t." " ! queue" " ! videoconvert" " ! avimux" " ! filesink name=fsink") # to save a video without live view reduce the pipeline to the following: # pipeline = Gst.parse_launch("tcambin name=bin" # " ! video/x-raw,format=BGRx,width=640,height=480,framerate=30/1" # " ! videoconvert" # " ! avimux" # " ! filesink name=fsink") # serial is defined, thus make the source open that device if serial is not None: camera = pipeline.get_by_name("bin") camera.set_property("serial", serial) file_location = "/tmp/tiscamera-save-stream.avi" fsink = pipeline.get_by_name("fsink") fsink.set_property("location", file_location) pipeline.set_state(Gst.State.PLAYING) print("Press Ctrl-C to stop.") try: while True: time.sleep(1) except KeyboardInterrupt: pass finally: pipeline.set_state(Gst.State.NULL)
def init(): try: assert os.environ.get('GST_DEBUG_DUMP_DOT_DIR', None) except (NameError, AssertionError): os.environ['GST_DEBUG_DUMP_DOT_DIR'] = os.getcwd() GObject.threads_init() Gst.init(None) Gst.debug_set_active(True) Gst.debug_set_colored(True) Gst.debug_set_default_threshold(Gst.DebugLevel.WARNING)
def init(): try: assert os.environ.get("GST_DEBUG_DUMP_DOT_DIR", None) except (NameError, AssertionError): os.environ["GST_DEBUG_DUMP_DOT_DIR"] = os.getcwd() GObject.threads_init() Gst.init(None) Gst.debug_set_active(True) Gst.debug_set_colored(True) Gst.debug_set_default_threshold(Gst.DebugLevel.WARNING)
def __init__(self, rtsp_uri=None, moviewindow=None, video_width=1920, video_height=1080, crop_left=0, crop_right=0, crop_bottom=0, crop_top=0): Gst.init(None) Gst.debug_set_active(True) Gst.debug_set_default_threshold(2) self.data = CustomData() self.video_width = video_width self.video_height = video_height self.crop_left = crop_left self.crop_right = crop_right self.crop_bottom = crop_bottom self.crop_top = crop_top self.uri = rtsp_uri self.moviewindow = moviewindow self.player = None self.uridecodebin = None self.data.pipeline = Gst.ElementFactory.make("playbin", "playbin") # Initialize audio pipeline elements self.audioconvert = None self.queue2 = None self.audiosink = None # Initialize video pipeline elements self.autoconvert = None self.videosink = None self.capsfilter = None self.videoscale = None self.colorspace = None self.queue1 = None self.videobox = None self.construct_pipeline() self.is_playing = False self.connect_signals() bus = self.data.pipeline.get_bus() bus.add_signal_watch() bus.enable_sync_message_emission() bus.connect('message', self.cb_message, self.data) bus.connect("sync-message::element", self.on_sync_message_playbin)
def main(): Gst.init(sys.argv) # init gstreamer # this line sets the gstreamer default logging level # it can be removed in normal applications # gstreamer logging can contain verry useful information # when debugging your application # see https://gstreamer.freedesktop.org/documentation/tutorials/basic/debugging-tools.html # for further details Gst.debug_set_default_threshold(Gst.DebugLevel.WARNING) serial = None pipeline = Gst.parse_launch("tcambin name=source" " ! videoconvert" " ! appsink name=sink") # test for error if not pipeline: print("Could not create pipeline.") sys.exit(1) # The user has not given a serial, so we prompt for one if serial is not None: source = pipeline.get_by_name("source") source.set_property("serial", serial) sink = pipeline.get_by_name("sink") # tell appsink to notify us when it receives an image sink.set_property("emit-signals", True) user_data = "This is our user data" # tell appsink what function to call when it notifies us sink.connect("new-sample", callback, user_data) pipeline.set_state(Gst.State.PLAYING) print("Press Ctrl-C to stop.") # We wait with this thread until a # KeyboardInterrupt in the form of a Ctrl-C # arrives. This will cause the pipline # to be set to state NULL try: while True: time.sleep(1) except KeyboardInterrupt: pass finally: pipeline.set_state(Gst.State.NULL)
def init(librarian): # Enable error messages by default if Gst.debug_get_default_threshold() == Gst.DebugLevel.NONE: Gst.debug_set_default_threshold(Gst.DebugLevel.ERROR) if Gst.Element.make_from_uri(Gst.URIType.SRC, "file:///fake/path/for/gst", ""): return GStreamerPlayer(librarian) else: raise PlayerError( _("Unable to open input files"), _("GStreamer has no element to handle reading files. Check " "your GStreamer installation settings."))
def init(librarian): # Enable error messages by default if Gst.debug_get_default_threshold() == Gst.DebugLevel.NONE: Gst.debug_set_default_threshold(Gst.DebugLevel.ERROR) if Gst.Element.make_from_uri( Gst.URIType.SRC, "file:///fake/path/for/gst", ""): return GStreamerPlayer(librarian) else: raise PlayerError( _("Unable to open input files"), _("GStreamer has no element to handle reading files. Check " "your GStreamer installation settings."))
def main(): Gst.init(sys.argv) # this line sets the gstreamer default logging level # it can be removed in normal applications # gstreamer logging can contain verry useful information # when debugging your application # see https://gstreamer.freedesktop.org/documentation/tutorials/basic/debugging-tools.html # for further details Gst.debug_set_default_threshold(Gst.DebugLevel.WARNING) pipeline = Gst.parse_launch("tcambin name=source ! fakesink") if not pipeline: print("Unable to create pipeline") return 1 # Set this to a serial string for a specific camera serial = None camera = Gst.ElementFactory.make("tcambin") if serial: # This is gstreamer set_property camera.set_property("serial", serial) # in the READY state the camera will always be initialized # in the PLAYING state additional properties may appear from gstreamer elements pipeline.set_state(Gst.State.PLAYING) if not block_until_playing(pipeline): print("Unable to start pipeline") # Print properties for a before/after comparison state = camera.get_property("state") print("State of device is:\n{}".format(state)) # Change JSON description here # not part of this example camera.set_property("state", state) # Print properties for a before/after comparison state = camera.get_property("state") print("State of device is:\n{}".format(state)) # cleanup, reset state pipeline.set_state(Gst.State.NULL)
def _setup_gstreamer(self): Gst.init() # Setup the Debug mode for GStreamer Gst.debug_set_active(False) Gst.debug_set_default_threshold(3) import platform if (platform.machine() == 'x86_64'): # If there is PC self._init_audio_pub_stream() else: # If there is raspberry Pi self._init_audio_rec_stream()
def _setup_gstreamer(self): Gst.init() # Setup the Debug mode for GStreamer Gst.debug_set_active(False) Gst.debug_set_default_threshold(3) import platform if(platform.machine() == 'x86_64'): # If there is PC self._init_audio_pub_stream() else: # If there is raspberry Pi self._init_audio_rec_stream()
def main(): Gst.init(sys.argv) # init gstreamer # this line sets the gstreamer default logging level # it can be removed in normal applications # gstreamer logging can contain verry useful information # when debugging your application # see https://gstreamer.freedesktop.org/documentation/tutorials/basic/debugging-tools.html # for further details Gst.debug_set_default_threshold(Gst.DebugLevel.WARNING) pipeline = Gst.parse_launch("tcambin name=source ! fakesink") if not pipeline: print("Unable to create pipeline") return 1 # set this to a specific camera serial if you # do not want to use the default camera serial = None # get the tcambin to retrieve a property list through it source = pipeline.get_by_name("source") # serial is defined, thus make the source open that device if serial is not None: source.set_property("serial", serial) print("Properties before state PLAYING:") list_properties(source) # in the READY state the camera will always be initialized # in the PLAYING sta1te additional properties may appear from gstreamer elements pipeline.set_state(Gst.State.PLAYING) # helper function to ensure we have the right state # alternatively wait for the first image if not block_until_playing(pipeline): print("Unable to start pipeline") print("Properties during state PLAYING:") list_properties(source) pipeline.set_state(Gst.State.NULL) return 0
def __init__(self, uri, moviewindow, width, height): Gst.init(None) Gst.debug_set_active(True) Gst.debug_set_default_threshold(2) self.width = 0 # source olarak gelen videonun pixel olarak genişliği self.height = 0 # source olarak gelen videonun pixel olarak yüksekliği self.data = CustomData() self.uri = uri self.movie_window = moviewindow self.data.pipeline = Gst.ElementFactory.make("playbin", "playbin") self.data.pipeline.set_property("uri", self.uri) self.construct_mod_queue(video_width=width, video_height=height) # rtspsrc kullanırsan aşağıdaki gibi : # self.data.pipeline = Gst.parse_launch( # "rtspsrc location={} latency=500 timeout=18446744073709551 tcp-timeout=18446744073709551 ! decodebin ! autovideosink".format(self.uri)) self.streams_list = [ ] # streamleri analiz etmek için kullanmış ama bizde bir anlamı yok. bus = self.data.pipeline.get_bus() ret = self.data.pipeline.set_state(Gst.State.PLAYING) if ret == Gst.StateChangeReturn.FAILURE: print('ERROR: Unable to set the pipeline to the playing state.') sys.exit(-1) elif ret == Gst.StateChangeReturn.NO_PREROLL: print("Buffer oluşturmayacağız data live data...") self.data.is_live = True bus.add_signal_watch() bus.enable_sync_message_emission() bus.connect('message', self.cb_message, self.data) # bunu yapınca bus.connect("sync-message::element", self.on_sync_message) # connect to interesting signals in playbin self.data.pipeline.connect("video-tags-changed", self.on_tags_changed) self.data.pipeline.connect("audio-tags-changed", self.on_tags_changed) self.data.pipeline.connect("text-tags-changed", self.on_tags_changed)
def main(): Gst.init(None) Gst.debug_set_active(True) Gst.debug_set_default_threshold(3) glib_loop = GLib.MainLoop() dbus_loop = dbus.mainloop.glib.DBusGMainLoop(set_as_default=True) app = application.VideoStreamerServerApplication(sys.argv) session_bus = dbus.SessionBus() if session_bus.request_name(vstreamer_utils.DBUS_NAME ) != dbus.bus.REQUEST_NAME_REPLY_PRIMARY_OWNER: app.logger.error("Application already running") return 1 server_controller = server.ServerController(session_bus) server_controller.start() return app.exec_()
def __init__(self, rtsp_uri=None, moviewindow=None, video_width=1920, video_height=1080, crop_left=0, crop_right=0, crop_bottom=0, crop_top=0): Gst.init(None) Gst.debug_set_active(True) Gst.debug_set_default_threshold(2) self.data = CustomData() self.video_width = video_width self.video_height = video_height self.crop_left = crop_left self.crop_right = crop_right self.crop_bottom = crop_bottom self.crop_top = crop_top self.uri = uri self.player = None self.uridecodebin = None # Initialize audio pipeline elements self.audioconvert = None self.queue2 = None self.audiosink = None # Initialize video pipeline elements self.autoconvert = None self.videosink = None self.capsfilter = None self.videoscale = None self.colorspace = None self.queue1 = None self.videobox = None self.construct_pipeline() self.is_playing = False self.connect_signals()
def main(): Gst.init(sys.argv) # this line sets the gstreamer default logging level # it can be removed in normal applications # gstreamer logging can contain verry useful information # when debugging your application # see https://gstreamer.freedesktop.org/documentation/tutorials/basic/debugging-tools.html # for further details Gst.debug_set_default_threshold(Gst.DebugLevel.WARNING) # Set this to a serial string for a specific camera serial = None camera = Gst.ElementFactory.make("tcambin") if serial: # This is gstreamer set_property camera.set_property("serial", serial) # in the READY state the camera will always be initialized camera.set_state(Gst.State.READY) # Print properties for a before/after comparison print_properties(camera) # Set properties camera.set_tcam_property("Exposure Auto", False) camera.set_tcam_property("Gain Auto", False) # Some cameras offer exposure and gain as doubles instead of integers. # In that case the used GValue type has to be changed when setting the property. # Some cameras might offer 'Exposure' as 'Exposure Time (us)'. # camera.set_tcam_property("Exposure", 3000.0) camera.set_tcam_property("Brightness", 200) print_properties(camera) # cleanup, reset state camera.set_state(Gst.State.NULL)
def __init__(self, device, size, rotation, onFatalError, mainLoop, debugLevel): if not Gst.init_check(None): raise ImportError if debugLevel > 0: Gst.debug_set_active(True) Gst.debug_set_default_threshold(debugLevel) self._onFatalError = onFatalError self._mainLop = mainLoop self._toreDownAlready = False #pipeline control self._currentPipelineState = None self._pipelineStateCondition = Condition() ## Make sure attach and detach operation wait for each other to complete ## self._photoBinAttachDetachLock = Lock() self._localVideoBinAttachDetachLock = Lock() ########################################################################### self._pipeline = Gst.Pipeline() self._videoSrcBin = self._getVideoSrcBin(self._pipeline, device, size, rotation) self._videoEncBin = self._getVideoEncBin(size, rotation) self._photoCaptureBin = PhotoCaptureBin(self._onNoMorePhotos) self._localVideoBin = ImgVideoEncBin(size, rotation, self._onStopPhotoSeq) self._pipeline.add(self._videoEncBin.bin) self._pipeline.add(self._photoCaptureBin.bin) self._pipeline.add(self._localVideoBin.bin) self._bus = self._pipeline.get_bus() self._bus.set_flushing(True) self._busListener = BusListener(self._bus) self._busListener.addListener(Gst.MessageType.ERROR, self._onBusError) self._busListener.addListener(Gst.MessageType.EOS, self._onBusEos) self._busListener.addListener(Gst.MessageType.STATE_CHANGED, self._onBusStateChanged) self._busListener.addListener(Gst.MessageType.REQUEST_STATE, self._onRequestState) self._busListener.start()
def main(): Gst.init(sys.argv) # this line sets the gstreamer default logging level # it can be removed in normal applications # gstreamer logging can contain verry useful information # when debugging your application # see https://gstreamer.freedesktop.org/documentation/tutorials/basic/debugging-tools.html # for further details Gst.debug_set_default_threshold(Gst.DebugLevel.WARNING) serial = None pipeline = Gst.parse_launch( "tcambin name=source ! videoconvert ! ximagesink") if not pipeline: print("Could not create pipeline") sys.exit(1) if serial: src = pipeline.get_by_name("source") src.set_property("serial", serial) src = None bus = pipeline.get_bus() bus.add_signal_watch() bus.enable_sync_message_emission() bus.connect('message', bus_callback, None) pipeline.set_state(Gst.State.PLAYING) print( "Disconnect your camera to trigger a device lost or press enter to stop the stream." ) # we work with a event loop to be automatically # be notified when new messages occur. loop.run() pipeline.set_state(Gst.State.NULL)
def run(self): self.logger.info("Starting maverick-visiond") if 'debug' in self.config.args and self.config.args.debug: Gst.debug_set_active(True) Gst.debug_set_default_threshold(self.config.args.debug) if 'retry' not in self.config.args or not self.config.args.retry: self.retry = 30 else: self.retry = float(self.config.args.retry) # Start the zeroconf thread if self.config.args.zeroconf: self.zeroconf = StreamAdvert(self.config) self.zeroconf.start() else: self.zeroconf = None self.janus = JanusInterface(self.config, self.zeroconf) self.janus.start() # Start the pipeline. Trap any errors and wait for 30sec before trying again. while not self._should_shutdown: try: if 'pipeline_override' in self.config.args and self.config.args.pipeline_override: self.logger.info("pipeline_override set, constructing manual pipeline") self.manualconstruct() else: self.logger.info("pipeline_override is not set, auto-constructing pipeline") self.autoconstruct() except ValueError as e: self.logger.critical("Error constructing pipeline: {}, retrying in {} sec".format(repr(e), self.retry)) # Inform systemd that start is complete #self.logger.info("Notifying systemd of startup failure") #self.notify.notify("ERRNO=1") #self.notify.notify("STATUS=Error constructing pipeline: {}".format(repr(e))) self.logger.info("Notifying systemd of startup completion") self.notify.notify("READY=1") self.notify.notify("STATUS=Manual Pipeline Initialisation Complete") sys.exit(0)
def main(): Gst.init(None) Gst.debug_set_active(True) Gst.debug_set_default_threshold(5) gui = Application() # create a pyro daemon with object daemon = Pyro4.Daemon(host=IP) obj = RemoteCommander(gui) ns = Pyro4.locateNS() uri = daemon.register(obj) ns.register(IP, uri) gui.install_pyro_event_callback(daemon) urimsg = "Pyro object uri = {0}".format(uri) print(urimsg) GObject.threads_init() exitStatus = gui.run(sys.argv) sys.exit(exitStatus)
def __init__(self, device, size, rotation, onFatalError, mainLoop, debugLevel): if not Gst.init_check(None): raise ImportError if debugLevel > 0: Gst.debug_set_active(True) Gst.debug_set_default_threshold(debugLevel) self._onFatalError = onFatalError self._mainLop = mainLoop self._toreDownAlready = False #pipeline control self._currentPipelineState = None self._pipelineStateCondition = Condition() self._photoBinAttachDetachLock = Lock() #Make sure attach and detach operation wait for each other to complete self._pipeline = Gst.Pipeline() self._videoSrcBin = self._getVideoSrcBin(self._pipeline, device, size, rotation) self._videoEncBin = self._getVideoEncBin(size, rotation) self._photoCaptureBin = PhotoCaptureBin(self._onNoMorePhotos) self._pipeline.add(self._videoEncBin.bin) self._pipeline.add(self._photoCaptureBin.bin) self._bus = self._pipeline.get_bus() self._bus.set_flushing(True) self._busListener = BusListener(self._bus) self._busListener.addListener(Gst.MessageType.ERROR, self._onBusError) self._busListener.addListener(Gst.MessageType.EOS, self._onBusEos) self._busListener.addListener(Gst.MessageType.STATE_CHANGED, self._onBusStateChanged) self._busListener.addListener(Gst.MessageType.REQUEST_STATE, self._onRequestState) self._busListener.start()
def remote_setGstDebug(self, debug): """ Sets the GStreamer debugging levels based on the passed debug string. @since: 0.4.2 """ self.debug('Setting GStreamer debug level to %s' % debug) if not debug: return for part in debug.split(','): glob = None value = None pair = part.split(':') if len(pair) == 1: # assume only the value value = int(pair[0]) elif len(pair) == 2: glob, value = pair value = int(value) else: self.warning("Cannot parse GStreamer debug setting '%s'." % part) continue if glob: try: # value has to be an integer Gst.debug_set_threshold_for_name(glob, value) except TypeError: self.warning("Cannot set glob %s to value %s" % ( glob, value)) else: Gst.debug_set_default_threshold(value) self.comp.uiState.set('gst-debug', debug)
def ignore_gst_errors(): old = Gst.debug_get_default_threshold() Gst.debug_set_default_threshold(Gst.DebugLevel.NONE) yield Gst.debug_set_default_threshold(old)
def init(librarian): # Enable error messages by default if Gst.debug_get_default_threshold() == Gst.DebugLevel.NONE: Gst.debug_set_default_threshold(Gst.DebugLevel.ERROR) return GStreamerPlayer(librarian)
def __init__(self): """ Constructor __init__(Gstreamer) :since: v0.2.00 """ Abstract.__init__(self) CallbackContextMixin.__init__(self) self.discovery_timeout = 10 """ Processing may take some time. Wait for this amount of seconds. """ self._instance_lock = InstanceLock() """ Thread safety lock """ self.pipeline = None """ GStreamer pipeline in use """ self._glib_mainloop = None """ GObject mainloop """ self.local = local() """ Local data handle """ self.log_handler = NamedLoader.get_singleton("dNG.data.logging.LogHandler", False) """ The LogHandler is called whenever debug messages should be logged or errors happened. """ self.metadata = None """ Cached metadata instance """ self.source_url = None """ GStreamer source URI """ Settings.read_file("{0}/settings/pas_gapi_gstreamer.json".format(Settings.get("path_data"))) Settings.read_file("{0}/settings/pas_gapi_gstreamer_caps.json".format(Settings.get("path_data"))) Settings.read_file("{0}/settings/pas_gapi_gstreamer_mimetypes.json".format(Settings.get("path_data"))) with Gstreamer._lock: gst_debug_enabled = Settings.get("pas_gapi_gstreamer_debug_enabled", False) if (Gstreamer.debug_mode != gst_debug_enabled): Gst.debug_set_default_threshold(Gst.DebugLevel.DEBUG if (gst_debug_enabled) else Gst.DebugLevel.NONE) Gstreamer.debug_mode = gst_debug_enabled # # discovery_timeout = float(Settings.get("pas_gapi_gstreamer_discovery_timeout", 0)) if (discovery_timeout > 0): self.discovery_timeout = discovery_timeout self.start()
# --gst-debug-help Print available debug categories and exit # --gst-debug-level=LEVEL Default debug level from 1 (only error) to 9 (anything) or 0 for no output # --gst-debug=LIST Comma-separated list of category_name:level pairs to set specificlevels for the individual categories. Example: GST_AUTOPLUG:5,GST_ELEMENT_*:3 # --gst-debug-no-color Disable colored debugging output # --gst-debug-color-mode Changes coloring mode of the debug log. Possible modes: off, on, disable, auto, unix # --gst-debug-disable Disable debugging # --gst-plugin-spew Enable verbose plugin loading diagnostics # --gst-plugin-path=PATHS Colon-separated paths containing plugins # --gst-plugin-load=PLUGINS Comma-separated list of plugins to preload in addition to the list stored in environment variable GST_PLUGIN_PATH # --gst-disable-segtrap Disable trapping of segmentation faults during plugin loading # --gst-disable-registry-update Disable updating the registry # --gst-disable-registry-fork Disable spawning a helper process while scanning the registry ########################################################################## Gst.init(None) Gst.debug_set_active(True) Gst.debug_set_default_threshold(1) except ImportError: print( textwrap.dedent(""" ERROR: A GObject Python package was not found. Mopidy requires GStreamer to work. GStreamer is a C library with a number of dependencies itself, and cannot be installed with the regular Python tools like pip. Please see http://docs.mopidy.com/en/latest/installation/ for instructions on how to install the required dependencies. """)) raise else: Gst.init([])
def __init__(self, s1, s2, s3, s4, outType): if s1 is not None: self.source1 = s1 if s2 is not None: self.source2 = s2 if s3 is not None: self.source3 = s3 if s4 is not None: self.source4 = s4 self.otype = outType Gst.debug_set_active(True) Gst.debug_set_default_threshold(3) self.mainloop = GLib.MainLoop() self.pipeline = Gst.Pipeline() self.clock = self.pipeline.get_pipeline_clock() self.bus = self.pipeline.get_bus() self.bus.add_signal_watch() self.bus.connect('message::error', self.on_error) sources = [['rtmpsrc', self.source1], ['rtmpsrc', self.source2], ['rtmpsrc', self.source3], ['rtmpsrc', self.source4]] # Video input index = 0 for source in sources: self.malm([ [source[0], None, { 'location': source[1], 'do-timestamp': 1 }], ['queue', None, {}], ['decodebin', 'decoder{}'.format(index), { 'use-buffering': 1 }] ]) decoder = getattr(self, 'decoder{}'.format(index)) prev = None self.malm([['queue', 'video_in{}'.format(index), {}], ['videoconvert', None, {}], ['videoscale', None, {}], [ 'capsfilter', None, { 'caps': 'video/x-raw, width={}, height={}'.format( settings.x1, settings.y1) } ], ['queue', 'video_out{}'.format(index), {}]]) prev = None self.malm([ ['queue', 'audio_in{}'.format(index), {}], ['audioconvert', None, {}], #['audioresample', None, {}], #['capsfilter', None, {'caps': 'audio/x-raw, rate=44100'}], ['queue', 'audio_out{}'.format(index), {}] ]) if index == 0: decoder.connect('pad-added', self.__on_decoded_pad) elif index == 1: decoder.connect('pad-added', self.__on_decoded_pad1) elif index == 2: decoder.connect('pad-added', self.__on_decoded_pad2) elif index == 3: decoder.connect('pad-added', self.__on_decoded_pad3) index += 1 #video mixer prev = None self.malm([['videomixer', 'm', {}], ['videoscale', None, {}], [ 'capsfilter', None, { 'caps': 'video/x-raw, width={}, height={}'.format( settings.width, settings.height) } ], ['videorate', None, {}], [ 'capsfilter', None, { 'caps': 'video/x-raw, framerate=30000/1001' } ], ['queue', None, {}], ['x264enc', None, { 'tune': 'zerolatency' }], ['queue', 'vmix_out', {}]]) vmix_pads = [] vmix_pads.append(self.m.get_request_pad('sink_%u')) vmix_pads.append(self.m.get_request_pad('sink_%u')) vmix_pads.append(self.m.get_request_pad('sink_%u')) vmix_pads.append(self.m.get_request_pad('sink_%u')) vmix_pads[1].set_property('xpos', settings.x1) vmix_pads[2].set_property('ypos', settings.y1) vmix_pads[3].set_property('xpos', settings.x1) vmix_pads[3].set_property('ypos', settings.y1) vmix_pads[0].set_offset(2000000000) #audio mixer prev = None self.malm([['audiomixer', 'ma', {}], ['audioconvert', None, {}], ['queue', None, {}], ['voaacenc', None, { 'bitrate': 96000 }], [ 'queue', None, { 'max-size-bytes': 0, 'max-size-buffers': 0, 'max-size-time': 0 } ], ['aacparse', None, {}], ['queue', 'amix_out', {}]]) prev = None if self.otype == 'rtmp': #flvmux for streaming to RTMP self.malm([['flvmux', 'smux', { 'streamable': 1 }], ['rtmpsink', None, { 'sync': 0, 'location': settings.outRtmp }]]) elif self.otype == 'hls': #mpegtsmux for streaming to HLS self.malm([['mpegtsmux', 'smux', {}], [ 'hlssink', None, { 'playlist-root': settings.outHLS, 'playlist-location': settings.playListLocation, 'location': settings.fileLocation, 'max-files': 20, 'target-duration': 8 } ]]) else: print(self.otype + ' is not supported, Exiting...') return # Video input index = 0 for source in sources: video_out = getattr(self, 'video_out{}'.format(index)) audio_out = getattr(self, 'audio_out{}'.format(index)) video_out.link(self.m) audio_out.link(self.ma) index += 1 self.amix_out.link(self.smux) self.vmix_out.link(self.smux)
from gi.repository import Gio import threading GObject.threads_init() Gst.init(None) print '********************************************************' print 'GObject: ' pp.pprint(GObject.pygobject_version) print '' print 'Gst: ' pp.pprint(Gst.version_string()) print '********************************************************' Gst.debug_set_active(True) Gst.debug_set_default_threshold(3) import StringIO import re import ConfigParser import signal from IPython.core.debugger import Tracer from IPython.core import ultratb sys.excepthook = ultratb.FormattedTB(mode='Verbose', color_scheme='Linux', call_pdb=True, ostream=sys.__stdout__)