def graph_pipeline(self):
     Gst.debug_bin_to_dot_file(self.pipeline, Gst.DebugGraphDetails.ALL,
                               "pipeline")
     try:
         os.system("dot -Tpng -o /tmp/pipeline.png /tmp/pipeline.dot")
     except Exception, e:
         print e
Exemple #2
0
    def __handle_error_message(self, message):
        # Error handling code is from quodlibet
        gerror, debug_info = message.parse_error()
        message_text = ""
        if gerror:
            message_text = gerror.message.rstrip(".")

        if message_text == "":
            # The most readable part is always the last..
            message_text = debug_info[debug_info.rfind(':') + 1 :]

            # .. unless there's nothing in it.
            if ' ' not in message_text:
                if debug_info.startswith('playsink'):
                    message_text += _(
                        ': Possible audio device error, is it plugged in?'
                    )

        self.logger.error("Playback error: %s", message_text)
        self.logger.debug("- Extra error info: %s", debug_info)

        envname = 'GST_DEBUG_DUMP_DOT_DIR'
        if envname not in os.environ:
            import xl.xdg

            os.environ[envname] = xl.xdg.get_logs_dir()

        Gst.debug_bin_to_dot_file(self.playbin, Gst.DebugGraphDetails.ALL, self.name)
        self.logger.debug(
            "- Pipeline debug info written to file '%s/%s.dot'",
            os.environ[envname],
            self.name,
        )

        self.engine._error_func(self, message_text)
Exemple #3
0
    def source_removed_cb (self, source):
        logging.debug('SOURCE REMOVED CB %s', source)
        if source in self.pipeline.children:
            self.pipeline.remove(source)
        logging.debug('SOURCE BIN REMOVED FROM PIPELINE OK')
        for coll in [self._to_remove, self.audio_avg, self.audio_peak]:
            try:
                coll.pop(source)
            except KeyError:
                pass
        logging.debug('SOURCE BIN REMOVED POP FROM COLL OK')

        for idx, sink in enumerate(self.preview_sinks):
            if sink in source:
                self.preview_sinks.pop(idx)
                break
        logging.debug('SOURCE BIN REMOVED SINK POP OK')

        self.pipeline.set_state(Gst.State.PLAYING)
        self.pipeline.recalculate_latency()

        self.emit('source-disconnected', source)

        logging.debug('SOURCE BIN REMOVED OK')
        Gst.debug_bin_to_dot_file(self.pipeline, Gst.DebugGraphDetails.NON_DEFAULT_PARAMS | Gst.DebugGraphDetails.MEDIA_TYPE , 'debug_core_source_removed')
Exemple #4
0
    def bus_state_changed_cb(self, bus, msg, arg=None):
        if msg.src != self.pipeline:
            return True
        prev, new, pending = msg.parse_state_changed()
        curr_state = [prev, new, pending]
        if new != self._last_state[1]:
            self.emit('state-changed', prev, new, pending)

            name = {
                Gst.State.PLAYING: 'playing',
                Gst.State.PAUSED: 'paused',
                Gst.State.NULL: 'stopped',
            }.get(new, None)
            if name:
                self.emit(name)

            if new not in [Gst.State.PAUSED, Gst.State.PLAYING]:
                self.emit('position', 0)

        self.pipeline.get_by_name('vscale').set_property('add-borders', True)

        if new == Gst.State.PLAYING:
            Gst.debug_bin_to_dot_file(
                self.pipeline, Gst.DebugGraphDetails.NON_DEFAULT_PARAMS
                | Gst.DebugGraphDetails.MEDIA_TYPE
                | Gst.DebugGraphDetails.CAPS_DETAILS,
                'debug_interplayer_start')
        logging.debug('InterPlayer STATE CHANGE: %s', curr_state)
        self._last_state = curr_state

        return True
Exemple #5
0
    def __handle_error_message(self, message):
        # Error handling code is from quodlibet
        gerror, debug_info = message.parse_error()
        message_text = ""
        if gerror:
            message_text = gerror.message.rstrip(".")

        if message_text == "":
            # The most readable part is always the last..
            message_text = debug_info[debug_info.rfind(':') + 1:]

            # .. unless there's nothing in it.
            if ' ' not in message_text:
                if debug_info.startswith('playsink'):
                    message_text += _(
                        ': Possible audio device error, is it plugged in?')

        self.logger.error("Playback error: %s", message_text)
        self.logger.debug("- Extra error info: %s", debug_info)

        envname = 'GST_DEBUG_DUMP_DOT_DIR'
        if envname not in os.environ:
            import xl.xdg

            os.environ[envname] = xl.xdg.get_logs_dir()

        Gst.debug_bin_to_dot_file(self.playbin, Gst.DebugGraphDetails.ALL,
                                  self.name)
        self.logger.debug(
            "- Pipeline debug info written to file '%s/%s.dot'",
            os.environ[envname],
            self.name,
        )

        self.engine._error_func(self, message_text)
Exemple #6
0
    def build_pipeline(self, channels, sinkname, samplerate, srcname,
                       parse_element='wavparse'):
        self.channels = channels
        self.srcname = srcname
        self.sink = self.make_add_link(sinkname, None)
        self.classifier = self.make_add_link('classify', self.sink)
        self.capsfilter = self.make_add_link('capsfilter', self.classifier)
        self.interleave = self.make_add_link('interleave', self.capsfilter)
        self.sources = []
        for i in range(channels):
            ac = self.make_add_link('audioconvert', self.interleave)
            ar = self.make_add_link('audioresample', ac)
            if srcname == 'filesrc':
                wp = self.make_add_link(parse_element, ar)
                fs = self.make_add_link(srcname, wp)
            else:
                cf = self.make_add_link('capsfilter', ar)
                cf.set_property("caps", Gst.caps_from_string("audio/x-raw, "
                                                             "layout=(string)interleaved, "
                                                             "channel-mask=(bitmask)0x0, "
                                                             "rate=%d, channels=1"
                                                             % (samplerate,)))
                fs = self.make_add_link(srcname, cf)
            self.sources.append(fs)

        caps =  Gst.caps_from_string("audio/x-raw, "
                                     "layout=(string)interleaved, "
                                     "channel-mask=(bitmask)0x0, "
                                     "rate=%d, channels=%d"
                                     % (samplerate, channels))
        self.capsfilter.set_property("caps", caps)
        if 0:
            Gst.debug_bin_to_dot_file(self.pipeline, Gst.DebugGraphDetails.ALL,
                                      "pipeline.dot")
Exemple #7
0
 def graph_pipeline(self):
     Gst.debug_bin_to_dot_file(self.pipeline, Gst.DebugGraphDetails.ALL,
                               "pipeline")
     try:
         os.system("dot -Tpng -o /tmp/pipeline.png /tmp/pipeline.dot")
     except Exception, e:
         print e
Exemple #8
0
    def stream_stop(self):
        if self.stream_state == 'stopped': return

        print('stopping stream')
        Gst.debug_bin_to_dot_file(self.pipeline, Gst.DebugGraphDetails.ALL, 'slapdash')
        
        self.pipeline.send_event(Gst.Event.new_eos())
Exemple #9
0
 def _add_src():
     self.previews[source] = preview
     preview.set_source(source)
     self.app.add_input_source(source)
     Gst.debug_bin_to_dot_file(
         app.pipeline, Gst.DebugGraphDetails.NON_DEFAULT_PARAMS
         | Gst.DebugGraphDetails.MEDIA_TYPE
         | Gst.DebugGraphDetails.CAPS_DETAILS, 'source_added_cb')
Exemple #10
0
    def add_audio_insert(self, source):
        self.pipeline.add(source)
        self.audio_inserts.append(source)

        source.link_filtered(self.insert_mixer, AUDIO_CAPS)

        source.initialize()
        source.sync_state_with_parent()
        Gst.debug_bin_to_dot_file(self.pipeline, Gst.DebugGraphDetails.NON_DEFAULT_PARAMS | Gst.DebugGraphDetails.MEDIA_TYPE , 'debug_add_insert')
    def init_request(self, id, caps_str):
        """
        Sets appsrc caps property for new request, changes filesink location property accordin to outdir, 
        starts transitioning pipeline to PLAYING state, and pushes empty buffer to appcrs.
        """
        self.request_id = id
        self.log.info("Initializing request: %s" % (self.request_id))

        # caps (capabilities) is media type (or content type)
        if caps_str and len(caps_str) > 0:
            self.appsrc.set_property('caps', Gst.caps_from_string(caps_str))
            self.log.info(
                "Set appsrc property: %s = %s" %
                ('caps', self.appsrc.get_property('caps').to_string()))
        else:
            self.appsrc.set_property('caps', None)

        # make sure decoder is not silent
        self.asr.set_property('silent', False)

        if self.outdir:  # change filesink location property /dev/null -> outdir
            self.pipeline.set_state(Gst.State.PAUSED)
            self.filesink.set_state(Gst.State.NULL)
            self.filesink.set_property(
                'location', '%s/%s.raw' % (self.outdir, self.request_id))
            self.filesink.set_state(Gst.State.PLAYING)

        ret = self.pipeline.set_state(Gst.State.PLAYING)
        if ret == Gst.StateChangeReturn.FAILURE:
            print("ERROR: Unable to set the pipeline to the PLAYING state",
                  file=sys.stderr)
            sys.exit(-1)
        else:
            self.log.info("Setting pipeline to PLAYING: %s" %
                          (Gst.Element.state_change_return_get_name(ret)))

        ret = self.filesink.set_state(Gst.State.PLAYING)
        if self.filesink.set_state(
                Gst.State.PLAYING) == Gst.StateChangeReturn.FAILURE:
            print("ERROR: Unable to set the filesink to the PLAYING state",
                  file=sys.stderr)
            sys.exit(-1)
        else:
            self.log.info("Setting filesink to PLAYING: %s" %
                          (Gst.Element.state_change_return_get_name(ret)))
        # Create a new empty buffer
        #buf = Gst.Buffer.new_allocate(None, 0, None)
        #if buf:
        #    self.log.info("Pushing empty buffer to pipeline")
        #    # Push empty buffer into the appsrc (to avoid hang on client diconnect)
        #    self.appsrc.emit('push-buffer', buf)

        self.finished = False

        Gst.debug_bin_to_dot_file(self.pipeline, Gst.DebugGraphDetails.ALL,
                                  '%s_init' % self.request_id)
Exemple #12
0
 def __init_preview(self):
     self.preview = XVideoContainer(self.mixer_have_audio,self.mixer_have_video,
                                    self.host_mixer,self.port_mixer,
                                    self.__the_window.WidgetVistaPrevia.winId(),False)
     
     Gst.debug_bin_to_dot_file (self.preview.pipeline,Gst.DebugGraphDetails.ALL, "preview_mixer_creado")
     
     self.preview.play_pipeline()
     self.add_preview(self.mixer_have_audio,self.mixer_have_video,
                                    self.host_mixer,self.port_mixer,False)
 def on_debug_activate(self):
     dotfile = "/home/pi/dev/bossjones-github/scarlett-dbus-poc/_debug/generator-listener.dot"
     pngfile = "/home/pi/dev/bossjones-github/scarlett-dbus-poc/_debug/generator-listener-pipeline.png"  # NOQA
     if os.access(dotfile, os.F_OK):
         os.remove(dotfile)
     if os.access(pngfile, os.F_OK):
         os.remove(pngfile)
     Gst.debug_bin_to_dot_file(self.pipelines_stack[0],
                               Gst.DebugGraphDetails.ALL, "generator-listener")
     os.system('/usr/bin/dot' + " -Tpng -o " + pngfile + " " + dotfile)
Exemple #14
0
    def __init__(self, **kwargs):
        pipeline = Gst.Pipeline(name="wfdstream")

        wfdbin = Gst.Bin()
        self._build_pipeline(wfdbin)
        Gst.debug_bin_to_dot_file(wfdbin,
                                  Gst.DebugGraphDetails.MEDIA_TYPE,
                                  "wfdmedia-stream")
        pipeline.add(wfdbin)

        super().__init__(element=pipeline, **kwargs)
Exemple #15
0
    def __init__(self):
        self.pipeline = Gst.Pipeline()
        # sondan başa geliyoruz sanırım
        sink = self.make_add_link('xvimagesink')
        videoconvert = self.make_add_link('videoconvert', sink)
        theoradec = self.make_add_link('theoradec', videoconvert)
        oggdemux = self.make_add_link('oggdemux', theoradec)
        self.filesrc = self.make_add_link('filesrc', oggdemux, 'src')
        self.filesrc.set_property('location', 'video.ogv')

        Gst.debug_bin_to_dot_file(self.pipeline, Gst.DebugGraphDetails.ALL, 'pipeline.dot')
Exemple #16
0
    def run(self):
        self.pipeline.set_state(Gst.State.PLAYING)
        Gst.debug_bin_to_dot_file(self.pipeline, Gst.DebugGraphDetails.ALL, 'tx-graph')
        
        while self.caps == None:
            caps = self.transport.get_by_name('udpsink').get_static_pad('sink').get_property('caps')

            if caps == None:
                print('Waiting for audio interface/caps')
                time.sleep(0.1)
            else:
                self.caps = caps.to_string()
Exemple #17
0
 def on_debug_activate(self, name):
     print('do debug image')
     dotfile = "/tmp/" + name + ".dot"
     pdffile = "/tmp/" + name + ".pdf"
     if os.access(dotfile, os.F_OK):
         os.remove(dotfile)
     if os.access(pdffile, os.F_OK):
         os.remove(pdffile)
     Gst.debug_bin_to_dot_file(self.pipeline, Gst.DebugGraphDetails.ALL, name)
     try:
         os.system("dot -Tpdf -o " + pdffile + " " + dotfile)
     except os.error:
         print("The debug feature requires graphviz (dot) to be installed.")
Exemple #18
0
 def dump_dot_file(self, basename='pipeline'):
     directory = os.environ.get('GST_DEBUG_DUMP_DOT_DIR', os.getcwd())
     if directory:
         dotfile = os.path.join(directory, '{0}.dot'.format(basename))
         if os.path.isfile(dotfile):
             logger.debug('Removing existing dotfile {0}'.format(dotfile))
             os.remove(dotfile)
         logger.debug('Dumping graph to {0}'.format(dotfile))
         Gst.debug_bin_to_dot_file(self.pipeline, Gst.DebugGraphDetails.ALL, basename)
         return dotfile
     else:
         logger.error('You need to define the Gst_DEBUG_DUMP_DOT_DIR env var to dump a .dot graph of the running pipeline')
         return None
 def dump_dot_file(self, basename='pipeline'):
     directory = os.environ.get('GST_DEBUG_DUMP_DOT_DIR', os.getcwd())
     if directory:
         dotfile = os.path.join(directory, '{0}.dot'.format(basename))
         if os.path.isfile(dotfile):
             logger.debug('Removing existing dotfile {0}'.format(dotfile))
             os.remove(dotfile)
         logger.debug('Dumping graph to {0}'.format(dotfile))
         Gst.debug_bin_to_dot_file(self.pipeline, Gst.DebugGraphDetails.ALL, basename)
         return dotfile
     else:
         logger.error('You need to define the Gst_DEBUG_DUMP_DOT_DIR env var to dump a .dot graph of the running pipeline')
         return None
    def dump_graph(self, output_dir="."):
        """
        Produces .dot and .pdf file of pipeline graph.

        :return: None
        """
        dotfile = os.path.join(output_dir, self.name + ".dot")
        if os.access(dotfile, os.F_OK):
            os.remove(dotfile)
        pdffile = os.path.join(output_dir, self.name + ".pdf")
        Gst.debug_bin_to_dot_file(self.pipeline, Gst.DebugGraphDetails.ALL,
                                  self.name)
        os.system("dot -Tpdf " + dotfile + " -o " + pdffile)
Exemple #21
0
    def __init__(self, loop, pipeline_desc):
        desc = pipeline_desc.format(fd=1)
        logging.info('pipeline description: %s' % desc)
        self.pipeline = Gst.Pipeline()
        self.bin = Gst.parse_bin_from_description(desc, False)
        self.pipeline.add(self.bin)
        self.stream_sink = MultiFdSink(
            self.pipeline.get_by_name('stream_sink'), name='stream')
        self.tee = self.pipeline.get_by_name('t1')
        self.mjpeg_bin = None
        loop.create_task(watch_bus(self.pipeline.get_bus()))

        Gst.debug_bin_to_dot_file(self.pipeline, Gst.DebugGraphDetails.ALL,
                                  "graph.dot")
Exemple #22
0
 def on_debug_activate(self):
     fn = 'pipeline-debug-graph'
     fn_dot = "%s/%s.dot" % (os.environ.get("GST_DEBUG_DUMP_DOT_DIR"), fn)
     fn_png = "%s/%s.png" % (os.environ.get("GST_DEBUG_DUMP_DOT_DIR"), fn)
     Gst.debug_bin_to_dot_file(self.pipeline, Gst.DebugGraphDetails.ALL, fn)
     try:
         os.system("dot -Tpng %s > %s" % (fn_dot, fn_png))
         print('Pipline graph written to %s' % fn_png)
         #Gtk.show_uri(None, "file://%s" % fn_png, 0)
     except:
         print('Failure!')
         # check if graphviz is installed with a simple test
         if os.system('which dot'):
             print("Graphviz does not seem to be installed.")
 def _on_state_changed(self, bus, msg):
     states = msg.parse_state_changed()
     # To state is PLAYING
     if msg.src.get_name() == "pipeline" and states[1] == 4:
         dotfile = "/home/pi/dev/bossjones-github/scarlett-dbus-poc/_debug/generator-player.dot"
         pngfile = "/home/pi/dev/bossjones-github/scarlett-dbus-poc/_debug/generator-player-pipeline.png"  # NOQA
         if os.access(dotfile, os.F_OK):
             os.remove(dotfile)
         if os.access(pngfile, os.F_OK):
             os.remove(pngfile)
         Gst.debug_bin_to_dot_file(msg.src,
                                   Gst.DebugGraphDetails.ALL, "generator-player")
         os.system('/usr/bin/dot' + " -Tpng -o " + pngfile + " " + dotfile)
         print("pipeline dot file created in " +
               os.getenv("GST_DEBUG_DUMP_DOT_DIR"))
Exemple #24
0
 def on_debug_activate(self):
         n = self.n_peers
         fn = 'pipeline-bug-graph' 
         fn_dot = "%s/%s.dot" % (os.environ.get("GST_DEBUG_DUMP_DOT_DIR"), fn)
         fn_pdf = "%s/%s%s.pdf" % (os.environ.get("GST_DEBUG_DUMP_DOT_DIR"), fn,str(n))
         Gst.debug_bin_to_dot_file(self.pipe, Gst.DebugGraphDetails.ALL, fn)
         try:
             os.system("dot -Tpdf %s > %s" % (fn_dot, fn_pdf))
             print('Pipline graph written to %s' % fn_pdf)
         except:
             print('Failure!')
             # check if graphviz is installed with a simple test
             if os.system('which dot'):
                 print("Graphviz does not seem to be installed.")   
         self.n_peers += 1
Exemple #25
0
def bin_to_pdf(
    bin_: Gst.Bin,
    details: Gst.DebugGraphDetails,
    filename: str,
) -> Optional[str]:
    """
    Dump a Gst.Bin to pdf using 
    `Gst.debug_bin_to_dot_file <https://lazka.github.io/pgi-docs/Gst-1.0/functions.html#Gst.debug_bin_to_dot_file>`_
    and graphviz.
    Will launch the 'dot' subprocess in the background with Popen.
    Does not check whether the process completes, but a .dot is
    created in any case. Has the same arguments as 
    `Gst.debug_bin_to_dot_file <https://lazka.github.io/pgi-docs/Gst-1.0/functions.html#Gst.debug_bin_to_dot_file>`_

    :returns: the path to the created file (.dot or .pdf) or None if
              GST_DEBUG_DUMP_DOT_DIR not found in os.environ

    :arg bin_: the bin to make a .pdf visualization of
    :arg details: a Gst.DebugGraphDetails choice (see gstreamer docs)
    :arg filename: a base filename to use (not full path, with no extension)
         usually this is the name of the bin you can get with some_bin.name
    """
    if 'GST_DEBUG_DUMP_DOT_DIR' in os.environ:
        dot_dir = os.environ['GST_DEBUG_DUMP_DOT_DIR']
        dot_file = os.path.join(dot_dir, f'{filename}.dot')
        pdf_file = os.path.join(dot_dir, f'{filename}.pdf')
        logger.debug(f"writing {bin_.name} to {dot_file}")
        Gst.debug_bin_to_dot_file(bin_, details, filename)
        dot_exe = shutil.which('dot')
        if dot_exe:
            logger.debug(f"converting {os.path.basename(dot_file)} to "
                         f"{os.path.basename(pdf_file)} in background")
            command = ('nohup', dot_exe, '-Tpdf', dot_file, f'-o{pdf_file}')
            logger.debug(f"running: {' '.join(command)}")
            subprocess.Popen(
                command,
                stdout=subprocess.DEVNULL,
                stderr=subprocess.DEVNULL,
                preexec_fn=os.setpgrp,
            )
        else:
            logger.warning(
                f'graphviz does not appear to be installed, so cannot convert'
                f'{dot_file} to pdf. You can install graphviz with '
                f'"sudo apt install graphviz" on Linux for Tegra or Ubuntu.')
            return dot_file
        return pdf_file
    return None
    def pad_added_handler(self, element, pad):
        """
        'pad-added' signal callback
        """
        self.log.info("Got 'pad-added' signal from %s" % (element.get_name()))
        if element is self.decodebin:
            if self.use_cutter:
                # Link decodebin's src pad to cutter's sink
                cutter_pad = self.cutter.get_static_pad('sink')
                if not cutter_pad.is_linked():
                    if pad.link(cutter_pad) is not Gst.PadLinkReturn.OK:
                        print(
                            "ERROR: 'decodebin' and 'cutter' could not be linked",
                            file=sys.stderr)
                        self.pipeline.set_state(Gst.State.NULL)
                        sys.exit(-1)
                    else:
                        self.log.info("Linked 'decodebin' to 'cutter'")
                else:
                    self.log.warning("cutter's sink pad is already linked")
            else:
                # Link decodebin's src pad to audioconvert's sink
                audioconvert_pad = self.audioconvert.get_static_pad('sink')
                if not audioconvert_pad.is_linked():
                    if pad.link(audioconvert_pad) is not Gst.PadLinkReturn.OK:
                        print(
                            "ERROR: 'decoder' and 'audioconvert' could not be linked",
                            file=sys.stderr)
                        self.pipeline.set_state(Gst.State.NULL)
                        sys.exit(-1)
                    else:
                        self.log.info("Linked 'decodebin' to 'audioconvert'")
                else:
                    self.log.warning(
                        "audioconvert's sink pad is already linked")

            ret = self.decodebin.set_state(Gst.State.PLAYING)
            if ret == Gst.StateChangeReturn.FAILURE:
                print("ERROR: Unable to set decodebin to the PLAYING state",
                      file=sys.stderr)
                self.pipeline.set_state(Gst.State.NULL)
                sys.exit(-1)
            else:
                self.log.info("Setting decodebin to PLAYING: %s" %
                              (Gst.Element.state_change_return_get_name(ret)))

            Gst.debug_bin_to_dot_file(self.pipeline, Gst.DebugGraphDetails.ALL,
                                      '%s_decodebin' % self.request_id)
Exemple #27
0
    def _start_record_ok(self, sink, *data):
        Gst.debug_bin_to_dot_file(self.pipeline, Gst.DebugGraphDetails.NON_DEFAULT_PARAMS | Gst.DebugGraphDetails.MEDIA_TYPE , 'record_started%d' % len(self._rec_ok))
        logging.debug('got ready-to-record from %s', sink)
        logging.debug('got ready-to-record, ok count: %d', len(self._rec_ok))

        ok = True
        for el in self._rec_ok:
            if not el.ready_to_record:
                ok = False

        if ok:
            logging.debug('RECORD , GOING TO PLAYING: %s', self.pipeline.set_state(Gst.State.PLAYING))
            for el in self._rec_ok:
                logging.debug('RECORD, SETTING INPUT %s TO PLAYING: %s', el, el.set_state(Gst.State.PLAYING))
            self.pipeline.recalculate_latency()
            self.emit('record-started')
Exemple #28
0
    def add_preview(self,have_audio,have_video,host,port,sync=True):    

        widget = DockWidget_Fuente()
        source_preview = XVideoContainer(have_audio,have_video,host,port,
                                        widget.the_widget.WidgetFuenteVideo.winId(),sync)
        
        self.__the_window.LayoutFuentes.addWidget(widget,0,self.preview_number)
        
              
        source_preview.play_pipeline()
        self.previews.append(source_preview)
        
        Gst.debug_bin_to_dot_file (source_preview.pipeline,Gst.DebugGraphDetails.ALL, 
                                   "preview-"+str(self.preview_number)+"_creado")
        Gst.debug_bin_to_dot_file (self.mixer.pipeline,Gst.DebugGraphDetails.ALL, 
                                   "mixer-preview-"+str(self.preview_number)+"_creado")
        self.preview_number = self.preview_number + 1
def gen_pipe_dot(pipeline, name):
    """Generate dot graph"""

    date_time = datetime.now().strftime("%Y%m%d_%H%M%S")
    dot_filename = "{}_pipeline_{}".format(name, date_time)

    # Print debug info
    print("GST_DEBUG_DUMP_DOT_DIR=" + os.environ["GST_DEBUG_DUMP_DOT_DIR"])

    print(
        "dot {} graph file: {}/{}.dot".format(
            name, os.environ["GST_DEBUG_DUMP_DOT_DIR"], dot_filename
        )
    )

    # dot graph generation
    Gst.debug_bin_to_dot_file(pipeline, Gst.DebugGraphDetails.ALL, dot_filename)

    return
Exemple #30
0
    def do_setup(self, self_display_sink, other_sink):
        if app.settings.get('video_see_self'):
            tee = ('! tee name=split ! queue name=self-display-queue split. ! '
                   'queue name=network-queue')
        else:
            tee = ''

        self.sink = other_sink
        self.pipeline.add(self.sink)

        self.src_bin = self.make_bin_from_config('video_input_device',
                                                 '%%s %s' % tee,
                                                 _('video input'))

        self.pipeline.add(self.src_bin)
        if app.settings.get('video_see_self'):
            self.pipeline.add(self_display_sink)
            self_display_queue = self.src_bin.get_by_name('self-display-queue')
            self_display_queue.get_static_pad('src').link_maybe_ghosting(
                self_display_sink.get_static_pad('sink'))

        self.src_bin.get_static_pad('src').link(
            self.p2psession.get_property('sink-pad'))

        # The following is needed for farstream to process ICE requests:
        self.pipeline.set_state(Gst.State.PLAYING)

        if log.getEffectiveLevel() == logging.DEBUG:
            # Use 'export GST_DEBUG_DUMP_DOT_DIR=/tmp/' before starting Gajim
            timestamp = datetime.now().strftime('%m-%d-%Y-%H-%M-%S')
            name = f'video-graph-{timestamp}'
            debug_dir = os.environ.get('GST_DEBUG_DUMP_DOT_DIR')
            name_dot = f'{debug_dir}/{name}.dot'
            name_png = f'{debug_dir}/{name}.png'
            Gst.debug_bin_to_dot_file(self.pipeline, Gst.DebugGraphDetails.ALL,
                                      name)
            if debug_dir:
                try:
                    os.system(f'dot -Tpng {name_dot} > {name_png}')
                except Exception:
                    log.debug('Could not save pipeline graph. Make sure '
                              'graphviz is installed.')
Exemple #31
0
def mixer_check_cb(*user_data):

    global new_client

    if len(new_client) > 0 and len(clients) >= 2:

        print("\nnew client(s), checking mixer links... ", end="")

        # only try to link things when all clients have all stream decoders in place
        for c in clients:
            if not clients[c].ready():
                print("not all decoders in place yet, waiting.")
                return True  #GLib.SOURCE_CONTINUE

        print("all client decoders ready.")

        create_frontmixer_queue()

        ssrc = new_client.pop(0)
        print("  setting up mixers for new client " + ssrc)

        # create surface/audio mixers for _all_ clients that don't have one yet
        # needs to loop through all clients for the case where 2 or more clients
        # appear simultaneously, otherwise there are no mixers to link to
        for c in clients:
            clients[c].create_mixers()

        # add missing frontmixer links
        clients[ssrc].link_to_front()

        # add missing surface/audio mixer links
        clients[ssrc].link_all_streams()

        # write out debug dot file (needs envvar GST_DEBUG_DUMP_DOT_DIR set)
        Gst.debug_bin_to_dot_file(pipeline, Gst.DebugGraphDetails(15), "debug")

    # re-schedule the callback if there are more new clients to process
    if len(new_client) > 0:
        GLib.timeout_add(1000, mixer_check_cb, None)

    return False  # GLib.SOURCE_REMOVE
def push_buffer(src, ml):
    global fnum
    global pipeline
    fname = "H264/%05d" % fnum
    try:
        with open(fname, "rb") as f:
            buf = Gst.Buffer.new_wrapped(list(f.read()))
    except FileNotFoundError:
        ml.quit()
        return

    fnum += 1
    print("Pushed %s: %s" %
          (fname,
           src.push_sample(
               Gst.Sample.new(buf, Gst.Caps("video/x-h264"), None, None))))

    Gst.debug_bin_to_dot_file(pipeline, Gst.DebugGraphDetails.ALL,
                              os.path.basename(fname))

    return True
    def on_message(self, bus, message):
        mtype = message.type
        # print(mtype)
        if mtype == Gst.MessageType.ERROR:
            print("we got an error, life is shit")
            err, debug = message.parse_error()
            print(err)
            print(debug)
            Gst.debug_bin_to_dot_file (self.pipeline, \
            Gst.DebugGraphDetails.ALL, 'transmageddon-debug-graph')
            #self.emit('got-error', err.message)
        elif mtype == Gst.MessageType.ELEMENT:
            if GstPbutils.is_missing_plugin_message(message):
                print("missing something")
                if self.missingplugin == False:  #don't think this is correct if more than one plugin installed
                    self.missingplugin = message
                    GstPbutils.missing_plugin_message_get_description(message)
                    GstPbutils.missing_plugin_message_get_installer_detail(
                        message)
                    self.uridecoder.set_state(Gst.State.NULL)
                    self.emit('missing-plugin')

        elif mtype == Gst.MessageType.ASYNC_DONE:
            self.emit('ready-for-querying')
        elif mtype == Gst.MessageType.EOS:
            self.usedstreamids = []
            #removing multipass cache file when done
            if (self.streamdata['multipass'] !=
                    0) and (self.streamdata['passcounter'] !=
                            self.streamdata['multipass']):
                if os.access(self.cachefile, os.F_OK):
                    os.remove(self.cachefile)
                    os.remove(self.cachefile + '.mbtree')
            # print(self.streamdata['passcounter'])
            self.emit('got-eos')
            self.pipeline.set_state(Gst.State.NULL)
        elif mtype == Gst.MessageType.APPLICATION:
            self.pipeline.set_state(Gst.State.NULL)
            self.pipeline.remove(self.uridecoder)
        return True
Exemple #34
0
    def build_pipeline(self,
                       channels,
                       sinkname,
                       samplerate,
                       srcname,
                       parse_element='wavparse'):
        self.channels = channels
        self.srcname = srcname
        self.sink = self.make_add_link(sinkname, None)
        self.classifier = self.make_add_link('classify', self.sink)
        self.capsfilter = self.make_add_link('capsfilter', self.classifier)
        self.interleave = self.make_add_link('interleave', self.capsfilter)
        self.sources = []
        for i in range(channels):
            ac = self.make_add_link('audioconvert', self.interleave)
            ar = self.make_add_link('audioresample', ac)
            if srcname == 'filesrc':
                wp = self.make_add_link(parse_element, ar)
                fs = self.make_add_link(srcname, wp)
            else:
                cf = self.make_add_link('capsfilter', ar)
                cf.set_property(
                    "caps",
                    Gst.caps_from_string("audio/x-raw, "
                                         "layout=(string)interleaved, "
                                         "channel-mask=(bitmask)0x0, "
                                         "rate=%d, channels=1" %
                                         (samplerate, )))
                fs = self.make_add_link(srcname, cf)
            self.sources.append(fs)

        caps = Gst.caps_from_string("audio/x-raw, "
                                    "layout=(string)interleaved, "
                                    "channel-mask=(bitmask)0x0, "
                                    "rate=%d, channels=%d" %
                                    (samplerate, channels))
        self.capsfilter.set_property("caps", caps)
        if 0:
            Gst.debug_bin_to_dot_file(self.pipeline, Gst.DebugGraphDetails.ALL,
                                      "pipeline.dot")
Exemple #35
0
    async def add_mjpeg_sink(self, fd):
        if self.mjpeg_bin is None:
            desc = 'videorate name=in ! video/x-raw,framerate={rate}/1 ! videoscale ! video/x-raw,width={width},height={height} ! queue ! jpegenc ! multifdsink name=sink'.format(
                rate=args.mjpeg_framerate,
                width=args.mjpeg_width,
                height=args.mjpeg_height)
            self.mjpeg_bin = Gst.parse_bin_from_description(desc, False)
            self.mjpeg_sink = MultiFdSink(self.mjpeg_bin.get_by_name('sink'),
                                          name='mjpeg')
            self.pipeline.add(self.mjpeg_bin)
            self.tee.link(self.mjpeg_bin.get_by_name('in'))
            self.mjpeg_bin.set_state(Gst.State.PLAYING)
            Gst.debug_bin_to_dot_file(self.pipeline, Gst.DebugGraphDetails.ALL,
                                      "graph.dot")

        await self.mjpeg_sink.add_fd(fd)

        if self.mjpeg_sink.active_clients() == 0:
            self.tee.unlink(self.mjpeg_bin)
            self.mjpeg_bin.set_state(Gst.State.NULL)
            self.pipeline.remove(self.mjpeg_bin)
            self.mjpeg_bin = None
Exemple #36
0
    async def grab_frame(self):
        logging.debug('Grabbing frame')
        bin = Gst.parse_bin_from_description('jpegenc ! appsink name=sink',
                                             True)
        queue = asyncio.Queue()

        def on_frame(sink):
            logging.debug('Have frame')
            sample = sink.emit("pull-sample")
            sink.set_emit_signals(False)
            buf = sample.get_buffer()
            (result, map_info) = buf.map(Gst.MapFlags.READ)
            try:
                assert result
                queue.put_nowait(map_info.data)
            except asyncio.QueueFull as e:
                pass
            finally:
                buf.unmap(map_info)
            return Gst.FlowReturn.OK

        sink = bin.get_by_name('sink')
        sink.set_emit_signals(True)
        sink.connect('new-sample', on_frame)
        bin.set_state(Gst.State.PLAYING)
        self.bin.add(bin)
        pad = self.tee.get_compatible_pad(bin.pads[0], None)
        self.tee.link(bin)
        frame = await queue.get()
        Gst.debug_bin_to_dot_file(self.pipeline, Gst.DebugGraphDetails.ALL,
                                  "graph.dot")
        self.tee.unlink(bin)
        self.tee.release_request_pad(pad)
        bin.set_state(Gst.State.NULL)
        self.bin.remove(bin)
        del bin
        return frame
    def on_message(self, bus, message):
        mtype = message.type
        # print(mtype)
        if mtype == Gst.MessageType.ERROR:
            print("we got an error, life is shit")
            err, debug = message.parse_error()
            print(err)
            print(debug)
            Gst.debug_bin_to_dot_file(self.pipeline, Gst.DebugGraphDetails.ALL, "transmageddon-debug-graph")
            # self.emit('got-error', err.message)
        elif mtype == Gst.MessageType.ELEMENT:
            if GstPbutils.is_missing_plugin_message(message):
                print("missing something")
                if self.missingplugin == False:  # don't think this is correct if more than one plugin installed
                    self.missingplugin = message
                    GstPbutils.missing_plugin_message_get_description(message)
                    GstPbutils.missing_plugin_message_get_installer_detail(message)
                    self.uridecoder.set_state(Gst.State.NULL)
                    self.emit("missing-plugin")

        elif mtype == Gst.MessageType.ASYNC_DONE:
            self.emit("ready-for-querying")
        elif mtype == Gst.MessageType.EOS:
            self.usedstreamids = []
            # removing multipass cache file when done
            if (self.streamdata["multipass"] != 0) and (self.streamdata["passcounter"] != self.streamdata["multipass"]):
                if os.access(self.cachefile, os.F_OK):
                    os.remove(self.cachefile)
                    os.remove(self.cachefile + ".mbtree")
            # print(self.streamdata['passcounter'])
            self.emit("got-eos")
            self.pipeline.set_state(Gst.State.NULL)
        elif mtype == Gst.MessageType.APPLICATION:
            self.pipeline.set_state(Gst.State.NULL)
            self.pipeline.remove(self.uridecoder)
        return True
    def close(self):
        if self.n_branch > 0:
            if not os.path.exists(self.saving_path):
                os.mkdir(self.saving_path)
            elif not os.path.isdir(self.saving_path):
                raise Exception("Output directory %s already exists as a file" % self.saving_path)
            for stream in self.list_of_streams:
                stream.set_state(Gst.State.NULL)


            # Gst.debug_bin_to_dot_file(self.pipeline_ON_AIR, Gst.DebugGraphDetails.ALL, "pipeline_ON_AIR")
            # os.system("dot -Tpng -o " + self.saving_path + "pipeline_ON_AIR.png " + self.saving_path+ "pipeline_ON_AIR.dot" )



            dotfile = self.saving_path+ "pipeline_ON_AIR.dot"
            pngfile = self.saving_path + "pipeline_ON_AIR.png "
            if os.access(dotfile, os.F_OK):
                os.remove(dotfile)
            if os.access(pngfile, os.F_OK):
                os.remove(pngfile)
            Gst.debug_bin_to_dot_file(self.pipeline_ON_AIR, Gst.DebugGraphDetails.ALL, "pipeline_ON_AIR")
            # # check if graphviz is installed with a simple test
            # try:
            #     dot = which.which("dot")
            os.system("dot" + " -Tpng -o " + pngfile + " " + dotfile)
            #     Gtk.show_uri(None, "file://"+pngfile, 0)
            # except which.WhichError:
            #     print "The debug feature requires graphviz (dot) to be installed."
            #     print "Transmageddon can not find the (dot) binary."




            time.sleep(.10)
            self.pipeline_ON_AIR.send_event(Gst.Event.new_eos())
Exemple #39
0
 def quit(self, window):
     Gst.debug_bin_to_dot_file(self.pipeline, Gst.DebugGraphDetails.ALL, 'pipeline')
     self.pipeline.set_state(Gst.State.NULL)
     Gtk.main_quit()
Exemple #40
0
        logging.error('Cannot load theme: %s', theme_name)
        return

    screen = Gdk.Screen.get_default()
    context = Gtk.StyleContext()
    context.add_provider_for_screen(screen, provider,
                                    Gtk.STYLE_PROVIDER_PRIORITY_USER)


if __name__ == "__main__":

    logging.basicConfig(level=logging.DEBUG)

    theme = config.get('theme', None)
    if theme:
        load_theme(theme)

    app = TetraApp()

    w2 = MainWindow(app)

    app.start()

    Gst.debug_bin_to_dot_file(
        app.pipeline, Gst.DebugGraphDetails.NON_DEFAULT_PARAMS
        | Gst.DebugGraphDetails.MEDIA_TYPE
        | Gst.DebugGraphDetails.CAPS_DETAILS, 'debug_start')

    Gtk.main()
    sys.exit(0)
Exemple #41
0
    def __init__(self):
        self.window = Gtk.Window(Gtk.WindowType.TOPLEVEL)
        self.window.connect("destroy", self.on_quit)
        self.window.set_size_request(640, 360)

        self.da = Gtk.DrawingArea()
        self.da.modify_bg(Gtk.StateType.NORMAL, Gdk.Color(0, 0, 0))
        self.window.add(self.da)

        self.pipe = Gst.Pipeline.new("record_test")
        tcpsrc = Gst.ElementFactory.make("tcpclientsrc", "tcpsrc")
        tcpsrc.set_property("host", "192.168.0.79")
        # tcpsrc.set_property('host', "songsul.iptime.org")
        tcpsrc.set_property("port", 5001)
        self.pipe.add(tcpsrc)

        # q = Gst.ElementFactory.make('queue', None)
        # self.pipe.add(q)

        gdpdepay = Gst.ElementFactory.make("gdpdepay", None)
        self.pipe.add(gdpdepay)
        rtpdepay = Gst.ElementFactory.make("rtph264depay", None)
        self.pipe.add(rtpdepay)
        parser = Gst.ElementFactory.make("h264parse", None)
        self.pipe.add(parser)
        tee = Gst.ElementFactory.make("tee", None)
        self.pipe.add(tee)

        monitor_q = Gst.ElementFactory.make("queue", None)
        self.pipe.add(monitor_q)
        avdec = Gst.ElementFactory.make("avdec_h264", None)
        self.pipe.add(avdec)
        vidconv = Gst.ElementFactory.make("videoconvert", None)
        self.pipe.add(vidconv)
        # monitor_sink = Gst.ElementFactory.make('autovideosink', None)
        monitor_sink = Gst.ElementFactory.make("xvimagesink", None)
        monitor_sink.set_property("sync", False)
        self.pipe.add(monitor_sink)

        record_q = Gst.ElementFactory.make("queue", None)
        # record_q.set_property('leaky', 2)
        record_q.set_property("max-size-time", 30 * Gst.SECOND)
        self.pipe.add(record_q)
        caps = Gst.caps_from_string("video/x-h264, alignment=au, stream-format=avc")
        capsfilter = Gst.ElementFactory.make("capsfilter", None)
        self.pipe.add(capsfilter)
        capsfilter.set_property("caps", caps)
        record_sink = Gst.ElementFactory.make("appsink", None)
        record_sink.set_property("emit-signals", True)
        record_sink.connect("new-sample", self.on_new_sample_recsink)
        self.pipe.add(record_sink)

        tcpsrc.link(gdpdepay)
        gdpdepay.link(rtpdepay)
        rtpdepay.link(parser)
        parser.link(tee)

        monitor_q.link(avdec)
        avdec.link(vidconv)
        vidconv.link(monitor_sink)

        record_q.link(capsfilter)
        capsfilter.link(record_sink)

        t_pad = tee.get_request_pad("src_%u")
        q_pad = monitor_q.get_static_pad("sink")
        t_pad.link(q_pad)

        t_pad = tee.get_request_pad("src_%u")
        q_pad = record_q.get_static_pad("sink")
        t_pad.link(q_pad)

        bus = self.pipe.get_bus()
        bus.add_signal_watch()
        bus.enable_sync_message_emission()
        bus.connect("message", self.on_message_cb)
        bus.connect("sync-message::element", self.on_sync_message_cb)
        bus.unref()

        """ Video Recording Element Initialize """
        self.rec_pipe = Gst.ElementFactory.make("pipeline", "record_pipeline")

        self.rec_src = Gst.ElementFactory.make("appsrc", "rec_src")
        # self.rec_src.set_property('do-timestamp', True)
        self.rec_pipe.add(self.rec_src)

        mp4mux = Gst.ElementFactory.make("mp4mux", None)
        # mp4mux.set_property('streamable', True)
        self.rec_pipe.add(mp4mux)

        self.filesink = Gst.ElementFactory.make("filesink", None)
        self.filesink.set_property("async", False)
        self.rec_pipe.add(self.filesink)

        self.rec_src.link(mp4mux)
        mp4mux.link(self.filesink)

        rec_bus = self.rec_pipe.get_bus()
        rec_bus.add_signal_watch()
        rec_bus.connect("message", self.on_rec_message_cb)
        rec_bus.unref()

        self.rec_timer_id = 0
        self.rec_lock = threading.Lock()
        self.is_playing = False

        self.window.show_all()

        Gst.debug_bin_to_dot_file(self.pipe, Gst.DebugGraphDetails.ALL, "app_recording_test")
        Gst.debug_bin_to_dot_file(self.rec_pipe, Gst.DebugGraphDetails.ALL, "record_pipe_test")

        self.pipe.set_state(Gst.State.PLAYING)
Exemple #42
0
 def dot(self):
     Gst.debug_bin_to_dot_file (self.mixer.pipeline,Gst.DebugGraphDetails.ALL, 
                                "Mixer-"+str(self.mixer_count))
     
     self.mixer_count = self.mixer_count + 1
Exemple #43
0
def video(items, location):
    mainloop = GObject.MainLoop()
    
    pipeline = Gst.parse_launch(
        'appsrc name=src block=true caps="image/jpeg,framerate=30/1,width=640,height=480" ! '
        'jpegdec ! '
        'matroskamux ! filesink name=sink  '
    )
    
    pipeline.get_by_name("sink").set_property("location", location)

    current_i = 0
    total_time = 0
    
    def src_need_data(src, need_bytes):
        nonlocal current_i, total_time
        
        filename, duration = items[current_i]
        #logging.debug(filename)
        if os.path.exists(filename):
            with open(filename, 'rb') as f:
                data = f.read()
            #help(Gst.Buffer)
            buf = Gst.Buffer.new_wrapped(data)
            
            duration = duration * Gst.SECOND
            total_time += duration
            
            buf.pts = total_time
            buf.duration = duration
            src.emit("push-buffer", buf)
        
            current_i += 1
            if current_i % 10 == 0:
                logging.info(current_i)
            if len(items) - 1 <= current_i :
                logging.info('Done')
                src.emit("end-of-stream")
        else:
            logging.info('No more files - Done')
            src.emit("end-of-stream")
    
    pipeline.get_by_name("src").connect("need-data", src_need_data)
    
    
    def bus_message(bus, message):
        t = message.type
    
        if t == Gst.MessageType.EOS:
            pipeline.set_state(Gst.State.NULL)
            logging.info('Done')
            mainloop.quit()
    
        if t == Gst.MessageType.ERROR:
            pipeline.set_state(Gst.State.NULL)
            error = message.parse_error()
            logging.error('{}\n{}'.format(*error))
    
    bus = pipeline.get_bus()
    bus.add_signal_watch()
    bus.connect("message", bus_message)
    
    pipeline.set_state(Gst.State.PLAYING)
    Gst.debug_bin_to_dot_file(pipeline, Gst.DebugGraphDetails.ALL, 'debug')
    mainloop.run()
Exemple #44
0
 def select(self, url):
     index = self.url_to_index[url]
     Gst.debug_bin_to_dot_file(self.pipe, Gst.DebugGraphDetails.CAPS_DETAILS, "pipe")
     selected_pad = self.selector.get_static_pad("sink_{}".format(index))
     self.selector.set_property('active-pad', selected_pad)
 def on_state_changed(self,bus,msg):
     states=msg.parse_state_changed()
     if msg.src.get_name() == "recorder" and states[1]==4 and os.getenv("GST_DEBUG_DUMP_DOT_DIR"): #To state is PLAYING
         Gst.debug_bin_to_dot_file (msg.src, Gst.DebugGraphDetails.ALL, "recorder-ts")
         logger.debug("pipeline dot file created in "+os.getenv("GST_DEBUG_DUMP_DOT_DIR"))
Exemple #46
0
 def run(self):
     self.pipeline.set_state(Gst.State.PLAYING)
     Gst.debug_bin_to_dot_file(self.pipeline, Gst.DebugGraphDetails.ALL, 'pipeline')
     Gtk.main()
pipeline = Gst.Pipeline()
bus = pipeline.get_bus()

bin = Gst.Bin.new("bin1")

vsink = Gst.ElementFactory.make("xvimagesink", None)
bin.add(vsink)

decodebin = Gst.ElementFactory.make("uridecodebin", None)
bin.add(decodebin)
decodebin.connect("pad-added", on_new_decoded_pad)
# decodebin.connect("no-more-pads", self.on_no_more_pads)


pipeline.add(bin)
pipeline.set_state(Gst.State.PLAYING)

cmd = ""
while cmd != "exit":
    cmd = raw_input(">>> ")

    if cmd == "graph":
        Gst.debug_bin_to_dot_file(pipeline, Gst.DebugGraphDetails.ALL, "pipeline")
        os.system("dot -Tpng -o /tmp/pipeline.png /tmp/pipeline.dot")

    if cmd == "go":
        # uri = 'file:///home/lsimons/workspace/open-playout/media/sintel_trailer-480p.mp4'
        uri = "file:///home/lsimons/workspace/open-playout/media/3wanderings.mpg"
        decodebin.set_property("uri", uri)
Exemple #48
0
 def __init_preview(self):
     self.preview = XVideoContainer(False,True,
                                    self.host_mixer,self.port_mixer,
                                    self.__the_widget.FramePreview.winId(),False)
     
     Gst.debug_bin_to_dot_file (self.preview.pipeline,Gst.DebugGraphDetails.ALL, "dockwidget_mixer-preview")
 def dot(self):
     Gst.debug_bin_to_dot_file (self.rtmp_stream.pipeline,Gst.DebugGraphDetails.ALL, "rtmp-"+str(self.dot_count))
     self.dot_count = self.dot_count + 1
Exemple #50
0
    def __init__(self):
        self.window = Gtk.Window(Gtk.WindowType.TOPLEVEL)
        self.window.connect('destroy', self.on_quit)
        self.window.set_size_request(640, 360)
        
        self.da = Gtk.DrawingArea()
        self.da.modify_bg(Gtk.StateType.NORMAL, Gdk.Color(0, 0, 0))
        self.window.add(self.da)
        
        self.pipe = Gst.Pipeline.new('record_test')
        tcpsrc = Gst.ElementFactory.make('tcpclientsrc', 'tcpsrc')
        tcpsrc.set_property('host', "192.168.0.79")
        #tcpsrc.set_property('host', "songsul.iptime.org")
        tcpsrc.set_property('port', 5001)
        self.pipe.add(tcpsrc)
        
        #q = Gst.ElementFactory.make('queue', None)
        #self.pipe.add(q)
        
        gdpdepay = Gst.ElementFactory.make('gdpdepay', None)
        self.pipe.add(gdpdepay)
        rtpdepay = Gst.ElementFactory.make('rtph264depay', None)
        self.pipe.add(rtpdepay)
        avdec = Gst.ElementFactory.make('avdec_h264', None)
        self.pipe.add(avdec)
        tee = Gst.ElementFactory.make('tee', None)
        self.pipe.add(tee)
        
        monitor_q = Gst.ElementFactory.make('queue', None)
        self.pipe.add(monitor_q)
        vidconv = Gst.ElementFactory.make('videoconvert', None)
        self.pipe.add(vidconv)
        #monitor_sink = Gst.ElementFactory.make('autovideosink', None)
        monitor_sink = Gst.ElementFactory.make('xvimagesink', None)
        monitor_sink.set_property('sync', False)
        self.pipe.add(monitor_sink)

        record_q = Gst.ElementFactory.make('queue', None)
        #record_q.set_property('leaky', 2)
        record_q.set_property('max-size-time', 30 * Gst.SECOND)
        self.pipe.add(record_q)
        conv = Gst.ElementFactory.make('videoconvert', None)
        self.pipe.add(conv)
        x264enc = Gst.ElementFactory.make('x264enc', 'iframe')
        x264enc.set_property('tune', 0x00000004)
        self.pipe.add(x264enc)
        caps = Gst.caps_from_string("video/x-h264, profile=baseline, width=1280, height=720")
        filter = Gst.ElementFactory.make('capsfilter', None)
        self.pipe.add(filter)
        filter.set_property('caps', caps)
        record_sink = Gst.ElementFactory.make('appsink', None)
        record_sink.set_property('emit-signals', True)
        record_sink.connect('new-sample', self.on_new_sample_recsink)
        self.pipe.add(record_sink)
        
        tcpsrc.link(gdpdepay)
        gdpdepay.link(rtpdepay)
        rtpdepay.link(avdec)
        avdec.link(tee)
        
        monitor_q.link(vidconv)
        vidconv.link(monitor_sink)
        
        record_q.link(conv)
        conv.link(x264enc)
        x264enc.link(filter)
        filter.link(record_sink)
        
        t_pad = tee.get_request_pad('src_%u')
        q_pad = monitor_q.get_static_pad('sink')
        t_pad.link(q_pad)
        
        t_pad = tee.get_request_pad('src_%u')
        q_pad = record_q.get_static_pad('sink')
        t_pad.link(q_pad)
         
        bus = self.pipe.get_bus()
        bus.add_signal_watch()
        bus.enable_sync_message_emission()
        bus.connect("message", self.on_message_cb)
        bus.connect("sync-message::element", self.on_sync_message_cb)
        bus.unref()
        
        """ Video Recording Element Initialize """
        self.rec_pipe = Gst.ElementFactory.make('pipeline', 'record_pipeline')
        
        self.rec_src = Gst.ElementFactory.make('appsrc', 'rec_src')
        self.rec_pipe.add(self.rec_src)
        
        parse = Gst.ElementFactory.make('h264parse', None)
        self.rec_pipe.add(parse)
        
        mp4mux = Gst.ElementFactory.make('mp4mux', None)
        #mp4mux.set_property('streamable', True)
        self.rec_pipe.add(mp4mux)
        
        self.filesink = Gst.ElementFactory.make('filesink', None)
        self.filesink.set_property('async', False)
        self.rec_pipe.add(self.filesink)
        
        self.rec_src.link(parse)
        parse.link(mp4mux)
        mp4mux.link(self.filesink)
        
        rec_bus = self.rec_pipe.get_bus()
        rec_bus.add_signal_watch()
        rec_bus.connect('message', self.on_rec_message_cb)
        rec_bus.unref()
        
        self.rec_timer_id = 0
        self.rec_lock = threading.Lock()

        self.window.show_all()

        Gst.debug_bin_to_dot_file(self.pipe, Gst.DebugGraphDetails.ALL, 'app_recording_test')
        Gst.debug_bin_to_dot_file(self.rec_pipe, Gst.DebugGraphDetails.ALL, 'record_pipe_test')
                
        self.pipe.set_state(Gst.State.PLAYING)
        print("레코딩 시작")
        self.start_recording()
Exemple #51
0
 def run(self):
     self.pipeline.set_state(Gst.State.PLAYING)
     Gst.debug_bin_to_dot_file(self.pipeline, Gst.DebugGraphDetails.ALL, 'rx-graph')
     print('Listening for stream on %s:%i' % (self.link_config.receiver_host, self.link_config.port))
    def __init__(self, STREAMDATA, AUDIODATA, VIDEODATA):
        GObject.GObject.__init__(self)

        # Choose plugin based on Container name
        self.audiodata = AUDIODATA
        self.videodata = VIDEODATA
        self.streamdata = STREAMDATA

        # set preset directory
        Gst.preset_set_app_dir("/usr/share/transmageddon/presets/")

        # Choose plugin based on Codec Name
        # or switch to remuxing mode if any of the values are set to 'pastr'
        self.stoptoggle = False

        self.doaudio = False
        self.preset = self.streamdata["devicename"]
        self.blackborderflag = False
        self.missingplugin = False
        self.probestreamid = False
        self.sinkpad = None
        self.usedstreamids = []

        # switching width and height around for rotationchoices where it makes sense
        if int(self.videodata[0]["rotationvalue"]) == 1 or int(self.videodata[0]["rotationvalue"]) == 3:
            nwidth = self.videodata[0]["videoheight"]
            nheight = self.videodata[0]["videowidth"]
            self.videodata[0]["videoheight"] = nheight
            self.videodata[0]["videowidth"] = nwidth

        # if needed create a variable to store the filename of the multipass \
        # statistics file
        if self.streamdata["multipass"] != 0:
            videoencoderplugin = codecfinder.get_video_encoder_element(self.videodata[0]["outputvideocaps"])
            videoencoder = Gst.ElementFactory.make(videoencoderplugin, "videoencoder")
            properties = videoencoder.get_property_names()
            if "multipass-cache-file" in properties:
                self.cachefile = str(GLib.get_user_cache_dir()) + "/transmageddon/" + "multipass-cache-file" + ".log"
            else:
                self.streamdata["multipass"] = 0

        # gather preset data if relevant
        if self.preset != "nopreset":
            self.provide_presets()

        # Create transcoding pipeline
        self.pipeline = Gst.Pipeline()
        self.pipeline.set_state(Gst.State.PAUSED)

        # first check if we have a container format, if not set up output
        # for possible outputs should not be hardcoded

        if self.streamdata["container"] == False:
            x = 0
            while x < len(self.audiodata):
                if self.audiodata[x]["outputaudiocaps"] != False:
                    if not (
                        self.audiodata[x]["outputaudiocaps"].intersect(
                            Gst.caps_from_string("audio/mpeg, mpegversion=1, layer=3")
                        )
                    ).is_empty():
                        self.streamdata["container"] = Gst.caps_from_string("application/x-id3")
                x = x + 1
        else:
            self.encodebinprofile = GstPbutils.EncodingContainerProfile.new(
                "containerformat", None, self.streamdata["container"], None
            )

            # What to do if we are not doing video passthrough (we only support video inside a
            # container format
            if self.videodata[0]["outputvideocaps"] != False:
                if (self.videodata[0]["dopassthrough"] == False) and (self.streamdata["passcounter"] == int(0)):
                    self.videoflipper = Gst.ElementFactory.make("videoflip", None)
                    self.videoflipper.set_property("method", int(self.videodata[0]["rotationvalue"]))
                    self.pipeline.add(self.videoflipper)

                    self.colorspaceconverter = Gst.ElementFactory.make("videoconvert", None)
                    self.pipeline.add(self.colorspaceconverter)

                    self.deinterlacer = Gst.ElementFactory.make("avdeinterlace", None)
                    self.pipeline.add(self.deinterlacer)

                    self.deinterlacer.link(self.colorspaceconverter)
                    self.colorspaceconverter.link(self.videoflipper)
                    self.deinterlacer.set_state(Gst.State.PAUSED)
                    self.colorspaceconverter.set_state(Gst.State.PAUSED)
                    self.videoflipper.set_state(Gst.State.PAUSED)
            # this part of the pipeline is used for both passthrough and re-encoding
            if self.videodata[0]["outputvideocaps"] != False:
                videopreset = None
                self.videoprofile = GstPbutils.EncodingVideoProfile.new(
                    self.videodata[0]["outputvideocaps"], videopreset, Gst.Caps.new_any(), 0
                )
                self.encodebinprofile.add_profile(self.videoprofile)

        # We do not need to do anything special for passthrough for audio, since we are not
        # including any extra elements between uridecodebin and encodebin
        x = 0
        while x < len(self.audiodata):
            # print(self.audiodata[x]['outputaudiocaps'])
            if self.audiodata[x]["outputaudiocaps"] != False:
                audiopreset = None
                if self.streamdata["container"] == False:
                    self.encodebinprofile = GstPbutils.EncodingAudioProfile.new(
                        self.audiodata[x]["outputaudiocaps"], audiopreset, Gst.Caps.new_any(), 0
                    )
                else:
                    audioprofile = GstPbutils.EncodingAudioProfile.new(
                        self.audiodata[x]["outputaudiocaps"], audiopreset, Gst.Caps.new_any(), 0
                    )
                    audioprofile.set_name("audioprofilename" + str(x))
                    self.encodebinprofile.add_profile(audioprofile)
            x = x + 1

        # Dealing with Video multipass encoding
        if self.streamdata["passcounter"] != int(0) and self.streamdata["multipass"] != int(0):
            videoencoderplugin = codecfinder.get_video_encoder_element(self.videodata[0]["outputvideocaps"])
            self.videoencoder = Gst.ElementFactory.make(videoencoderplugin, "videoencoder")
            self.pipeline.add(self.videoencoder)
            GstPresetType = GObject.type_from_name("GstPreset")
            if GstPresetType in GObject.type_interfaces(self.videoencoder):
                self.videoencoder.load_preset("Pass " + str(self.streamdata["passcounter"]))
                properties = self.videoencoder.get_property_names()
                if "multipass-cache-file" in properties:
                    self.videoencoder.set_property("multipass-cache-file", self.cachefile)
                else:
                    self.streamdata["multipass"] = 0
            self.multipassfakesink = Gst.ElementFactory.make("fakesink", "multipassfakesink")
            self.pipeline.add(self.multipassfakesink)
            self.videoencoder.set_state(Gst.State.PAUSED)
            self.multipassfakesink.set_state(Gst.State.PAUSED)

        else:
            self.encodebin = Gst.ElementFactory.make("encodebin", None)
            self.encodebin.connect("element-added", self.OnEncodebinElementAdd)
            self.encodebin.set_property("profile", self.encodebinprofile)
            self.encodebin.set_property("avoid-reencoding", True)
            self.pipeline.add(self.encodebin)
            self.encodebin.set_state(Gst.State.PAUSED)
            self.audiopads = {}
            x = 0
            while x < len(self.audiodata):
                if self.audiodata[x]["outputaudiocaps"] != False:
                    if self.streamdata["container"] != False:
                        self.audiopads[x] = self.encodebin.emit("request-profile-pad", "audioprofilename" + str(x))
                x = x + 1

        self.uridecoder = Gst.ElementFactory.make("uridecodebin", "uridecoder")
        self.uridecoder.set_property("uri", self.streamdata["filechoice"])
        self.uridecoder.connect("autoplug-continue", self.on_autoplug_continue)
        self.uridecoder.connect("pad-added", self.OnDynamicPad)
        self.uridecoder.connect("source-setup", self.dvdreadproperties)

        self.uridecoder.set_state(Gst.State.PAUSED)
        self.pipeline.add(self.uridecoder)

        if self.streamdata["passcounter"] != int(0):
            self.videoencoder.link(self.multipassfakesink)
        else:
            self.transcodefileoutput = Gst.ElementFactory.make("filesink", "transcodefileoutput")
            self.transcodefileoutput.set_property(
                "location", (self.streamdata["outputdirectory"] + "/" + self.streamdata["outputfilename"])
            )
            self.pipeline.add(self.transcodefileoutput)
            self.encodebin.link(self.transcodefileoutput)
            self.transcodefileoutput.set_state(Gst.State.PAUSED)
        self.uridecoder.set_state(Gst.State.PAUSED)
        self.BusMessages = self.BusWatcher()
        Gst.debug_bin_to_dot_file(self.pipeline, Gst.DebugGraphDetails.ALL, "transmageddon-debug-graph")
        # we need to wait on this one before going further
        self.uridecoder.connect("no-more-pads", self.noMorePads)
Exemple #53
0
 def stop(self):
     print('Exiting...')
     Gst.debug_bin_to_dot_file(self.pipeline, Gst.DebugGraphDetails.ALL,
                               'stream')
     self.pipeline.set_state(Gst.State.NULL)
     self.mainloop.quit()
Exemple #54
0
 def on_message(self, bus, message):
     '''
         This is called on the main thread
     '''
     
     if message.type == Gst.MessageType.BUFFERING:
         percent = message.parse_buffering()
         if not percent < 100:
             self.logger.info('Buffering complete')
         if percent % 5 == 0:
             event.log_event('playback_buffering', self.engine.player, percent)
     
     elif message.type == Gst.MessageType.TAG:
         """ Update track length and optionally metadata from gstreamer's parser.
             Useful for streams and files mutagen doesn't understand. """
         
         current = self.current_track
         
         if not current.is_local():
             gst_utils.parse_stream_tags(current, message.parse_tag())
         
         if current and not current.get_tag_raw('__length'):
             res, raw_duration = self.playbin.query_duration(Gst.Format.TIME)
             if not res:
                 self.logger.error("Couldn't query duration")
                 raw_duration = 0
             duration = float(raw_duration)/Gst.SECOND
             if duration > 0:
                 current.set_tag_raw('__length', duration)
     
     elif message.type == Gst.MessageType.EOS and \
         not self.get_gst_state() == Gst.State.PAUSED:
         self.engine._eos_func(self)
     
     elif message.type == Gst.MessageType.STREAM_START and \
             message.src == self.playbin and \
             self.buffered_track is not None:
         
         # This handles starting the next track during gapless transition
         buffered_track = self.buffered_track
         self.buffered_track = None
         play_args = self.engine.player.engine_autoadvance_notify_next(buffered_track) + (True, True)
         self.engine._next_track(*play_args)
     
     elif message.type == Gst.MessageType.STATE_CHANGED:
         
         # This idea from quodlibet: pulsesink will not notify us when
         # volume changes if the stream is paused, so do it when the
         # state changes.
         if message.src == self.audio_sink:
             self.playbin.notify("volume")
     
     elif message.type == Gst.MessageType.ERROR:
         
         # Error handling code is from quodlibet
         gerror, debug_info = message.parse_error()
         message_text = ""
         if gerror:
             message_text = gerror.message.rstrip(".")
              
         if message_text == "":
             # The most readable part is always the last..
             message_text = debug_info[debug_info.rfind(':') + 1:]
             
             # .. unless there's nothing in it.
             if ' ' not in message_text:
                 if debug_info.startswith('playsink'):
                     message_text += _(': Possible audio device error, is it plugged in?')
         
         self.logger.error("Playback error: %s", message_text)
         self.logger.debug("- Extra error info: %s", debug_info)
         
         envname = 'GST_DEBUG_DUMP_DOT_DIR'
         if envname not in os.environ:
             import xl.xdg
             os.environ[envname] = xl.xdg.get_logs_dir()
          
         Gst.debug_bin_to_dot_file(self.playbin, Gst.DebugGraphDetails.ALL, self.name)
         self.logger.debug("- Pipeline debug info written to file '%s/%s.dot'",
                           os.environ[envname], self.name)
         
         self.engine._error_func(self, message_text)
     
     # TODO: Missing plugin error handling from quod libet
     # -- http://cgit.freedesktop.org/gstreamer/gstreamer/tree/docs/design/part-missing-plugins.txt
     
     return True
    window.show_all()
    window.realize()
    gstPipeline.get_by_name(GST_VIEW_NAME).set_window_handle(gDrawAreaSink.get_window().get_xid())


    # Initialize the MVNC device

    dev = fx.Device(ncs_names[0])
    dev.OpenDevice()
    gGraph = dev.AllocateGraph(get_graph_from_disk())

    # Initialize input and output threads to pass images to the
    # MVNC device and to read results from the inferences made on thos images.

    gCallback = put_output
    start_thread()

    if gstPipeline.set_state(Gst.State.PLAYING) == Gst.StateChangeReturn.FAILURE:
        gstPipeline.set_state(Gst.State.NULL)
    else:
        Gst.debug_bin_to_dot_file (gstPipeline,Gst.DebugGraphDetails.ALL,'playing-pipeline')    # export GST_DEBUG_DUMP_DOT_DIR=/tmp/
        Gtk.main()
        Gst.debug_bin_to_dot_file (gstPipeline,Gst.DebugGraphDetails.ALL,'shutting-down-pipeline')
        gstPipeline.set_state(Gst.State.NULL)
        print("exiting main loop")
        gGraph.DeallocateGraph()
        dev.CloseDevice()
        print("mvnc device closed")
        stop_thread()
Exemple #56
0
 def closeEvent(self, event):
     Gst.debug_bin_to_dot_file (self.mixer.pipeline,Gst.DebugGraphDetails.ALL, "tcp_mixer_play")
     for preview in self.previews:
         preview.stop_pipeline()