def __init__(self, vidSocket): # read in the socket self.vidSocket = vidSocket # set up the drawing area self.window = Gtk.Window() self.window.connect('destroy', self.quit) self.window.set_default_size(800, 450) self.drawingarea = Gtk.DrawingArea() self.window.add(self.drawingarea) # Create the GStreamer Pipeline self.pipeline = Gst.Pipeline() # Create pipeline elements self.udpsrc = Gst.ElementFactory.make('udpsrc', 'udpsrc') self.pipeline.add(self.udpsrc) self.udpsrc.set_property( 'socket', Gio.Socket().new_from_fd(self.vidSocket.fileno())) self.buffer = Gst.ElementFactory.make('rtpjitterbuffer', None) self.pipeline.add(self.buffer) self.depay = Gst.ElementFactory.make('rtph264depay', None) self.pipeline.add(self.depay) self.avdec_h264 = Gst.ElementFactory.make('avdec_h264', None) self.pipeline.add(self.avdec_h264) #self.videoconvert = Gst.ElementFactory.make('videoconvert', None) #self.pipeline.add(self.videoconvert) self.vidSink = Gst.ElementFactory.make('autovideosink', None) self.pipeline.add(self.vidSink) # link all pipeline elements that can be linked without a connection self.udpsrc.link_filtered( self.buffer, Gst.caps_from_string("application/x-rtp, payload=96")) self.buffer.link(self.depay) self.depay.link(self.avdec_h264) self.avdec_h264.link(self.vidSink) # Create bus to get events from GStreamer pipeline self.bus = self.pipeline.get_bus() self.bus.add_signal_watch() self.bus.connect('message::error', self.on_error) # This is needed to make the video output in the drawing area self.bus.enable_sync_message_emission() self.bus.connect('sync-message::element', self.on_sync_message)
def __init__(self, vidSocket): # read in the socket self.vidSocket = vidSocket # set up the drawing area self.window = Gtk.Window() self.window.connect('destroy', self.quit) self.window.set_default_size(800, 450) self.drawingarea = Gtk.DrawingArea() self.window.add(self.drawingarea) # Create the GStreamer Pipeline self.pipeline = Gst.Pipeline() # Create pipeline elements self.udpsrc = Gst.ElementFactory.make('udpsrc', 'udpsrc') self.pipeline.add(self.udpsrc) self.udpsrc.set_property( 'socket', Gio.Socket().new_from_fd(self.vidSocket.fileno())) self.queue = Gst.ElementFactory.make('queue', 'queue') self.pipeline.add(self.queue) self.avdec_h264 = Gst.ElementFactory.make('avdec_h264', None) self.pipeline.add(self.avdec_h264) self.videoconvert = Gst.ElementFactory.make('videoconvert', None) self.pipeline.add(self.videoconvert) self.xvimagesink = Gst.ElementFactory.make('xvimagesink', 'videosink') self.pipeline.add(self.xvimagesink) # link all pipeline elements that can be linked without a connection self.udpsrc.link(self.queue) # end of videopipeline self.queue.link(self.avdec_h264) self.avdec_h264.link(self.videoconvert) self.videoconvert.link(self.xvimagesink) # Create bus to get events from GStreamer pipeline self.bus = self.pipeline.get_bus() self.bus.add_signal_watch() self.bus.connect('message::error', self.on_error) # This is needed to make the video output in the drawing area self.bus.enable_sync_message_emission() self.bus.connect('sync-message::element', self.on_sync_message)
# Create socket which will send a keep alive message for the live data stream data_socket = mksock(peer) td = threading.Timer(0, send_keepalive_msg, [data_socket, KA_DATA_MSG, peer]) td.start() # Create socket which will send a keep alive message for the live video stream video_socket = mksock(peer) tv = threading.Timer(0, send_keepalive_msg, [video_socket, KA_VIDEO_MSG, peer]) tv.start() # Create gstreamer pipeline and connect live video socket to it pipeline = Gst.Pipeline() udpsrc = Gst.ElementFactory.make('udpsrc', 'src') udpsrc.set_property('socket', Gio.Socket().new_from_fd(video_socket.fileno())) pipeline.add(udpsrc) tsparse = Gst.ElementFactory.make('tsparse', None) pipeline.add(tsparse) demux = Gst.ElementFactory.make('tsdemux', None) pipeline.add(demux) queue = Gst.ElementFactory.make('queue', None) pipeline.add(queue) h264 = Gst.ElementFactory.make('avdec_h264', None) pipeline.add(h264) videoConvert = Gst.ElementFactory.make('videoconvert', None)
def __init__(self, vidSocket): # read in the socket self.vidSocket = vidSocket # set up the drawing area self.window = Gtk.Window() self.window.connect('destroy', self.quit) self.window.set_default_size(800, 450) self.drawingarea = Gtk.DrawingArea() self.window.add(self.drawingarea) # Create the GStreamer Pipeline self.pipeline = Gst.Pipeline() # Create pipeline elements self.udpsrc = Gst.ElementFactory.make('udpsrc', 'udpsrc') self.pipeline.add(self.udpsrc) self.udpsrc.set_property( 'socket', Gio.Socket().new_from_fd(self.vidSocket.fileno())) self.queue = Gst.ElementFactory.make('queue', 'queue') self.pipeline.add(self.queue) self.queue.set_property('min-threshold-time', 100000) self.tsparse = Gst.ElementFactory.make( 'tsparse', None) # parse mpeg-2 stream into packets of set size self.pipeline.add(self.tsparse) self.demux = Gst.ElementFactory.make( 'tsdemux', None) # remove audio from mpeg-2 stream self.demux.connect("pad-added", self.demux_added) self.pipeline.add(self.demux) #self.queue = Gst.ElementFactory.make('queue', 'queue') # add to queue #self.pipeline.add(self.queue) self.h264parse = Gst.ElementFactory.make('h264parse', None) self.pipeline.add(self.h264parse) self.avdec_h264 = Gst.ElementFactory.make('avdec_h264', None) self.pipeline.add(self.avdec_h264) self.videoconvert = Gst.ElementFactory.make('videoconvert', None) self.pipeline.add(self.videoconvert) self.xvimagesink = Gst.ElementFactory.make('xvimagesink', 'videosink') self.pipeline.add(self.xvimagesink) self.xvimagesink.set_property('sync', 'FALSE') # link all pipeline elements that can be linked without a connection self.udpsrc.link(self.queue) self.queue.link(self.tsparse) self.tsparse.link(self.demux) self.h264parse.link(self.avdec_h264) #self.queue.link(self.avdec_h264) self.avdec_h264.link(self.videoconvert) self.videoconvert.link(self.xvimagesink) # Create bus to get events from GStreamer pipeline self.bus = self.pipeline.get_bus() self.bus.add_signal_watch() self.bus.connect('message::error', self.on_error) # This is needed to make the video output in the drawing area self.bus.enable_sync_message_emission() self.bus.connect('sync-message::element', self.on_sync_message)