Exemplo n.º 1
0
    def _detect_camera(self):
        v4l2src = gst.element_factory_make('v4l2src')
        if v4l2src.props.device_name is None:
            return

        self._has_camera = True

        # Figure out if we can place a framerate limit on the v4l2 element,
        # which in theory will make it all the way down to the hardware.
        # ideally, we should be able to do this by checking caps. However, I
        # can't find a way to do this (at this time, XO-1 cafe camera driver
        # doesn't support framerate changes, but gstreamer caps suggest
        # otherwise)
        pipeline = gst.Pipeline()
        caps = gst.Caps('video/x-raw-yuv,framerate=10/1')
        fsink = gst.element_factory_make('fakesink')
        pipeline.add(v4l2src, fsink)
        v4l2src.link(fsink, caps)
        self._can_limit_framerate = pipeline.set_state(gst.STATE_PAUSED) != gst.STATE_CHANGE_FAILURE
        pipeline.set_state(gst.STATE_NULL)
Exemplo n.º 2
0
    def constructPipeline(self):
        """
        Add and link elements in a GStreamer pipeline.
        """
        # Create the pipeline instance
        self.player = gst.Pipeline()

        # Define pipeline elements
        self.filesrc = gst.element_factory_make("filesrc")
        self.filesrc.set_property("location", self.inFileLocation)
        self.decodebin = gst.element_factory_make("decodebin")

        # Add elements to the pipeline
        self.player.add(self.filesrc, self.decodebin)

        # Link elements in the pipeline.
        gst.element_link_many(self.filesrc, self.decodebin)

        self.constructAudioPipeline()
        self.constructVideoPipeline()
Exemplo n.º 3
0
 def __init__(self, elementname="fsrtpconference"):
     self.pipeline = gst.Pipeline()
     signal.signal(signal.SIGINT, self.int_handler)
     notifier = farsight.ElementAddedNotifier()
     notifier.connect("element-added", self.element_added_cb)
     notifier.add(self.pipeline)
     self.pipeline.get_bus().set_sync_handler(self.sync_handler)
     self.pipeline.get_bus().add_watch(self.async_handler)
     self.conf = gst.element_factory_make(elementname)
     # Sets lets our own cname
     self.conf.set_property("sdes-cname", mycname)
     self.pipeline.add(self.conf)
     if VIDEO:
         self.videosource = FsUIVideoSource(self.pipeline)
         self.videosession = FsUISession(self.conf, self.videosource)
     if AUDIO:
         self.audiosource = FsUIAudioSource(self.pipeline)
         self.audiosession = FsUISession(self.conf, self.audiosource)
         self.adder = None
     self.pipeline.set_state(gst.STATE_PLAYING)
Exemplo n.º 4
0
    def start(self, pos):
        songs = self.list_of_songs()
        if (self.STATUS != 0):
            self.stop()

        self.__pipeline = gst.Pipeline("player")
        source = gst.element_factory_make("gnomevfssrc", "gvfs-source")
        decoder = gst.element_factory_make("mad", "mp3-decoder")
        sink = gst.element_factory_make("alsasink", "alsa-output")

        self.__pipeline.add(source, decoder, sink)
        gst.element_link_many(source, decoder, sink)
        bus = self.__pipeline.get_bus()
        bus.add_signal_watch()
        bus.connect("message", self.bus_call)
        self.__pipeline.get_by_name("gvfs-source").set_property(
            "location", songs[pos])
        print songs[pos]
        self.__pipeline.set_state(gst.STATE_PLAYING)
        self.STATUS = 1
Exemplo n.º 5
0
 def _setup_pipe(self):
     # have to fix the caps because gst cant deal with having them change.
     # TODO: make this a preference and/or autodetect optimal based on the
     #   output device - if its a 48000hz-native chip we dont want to send it
     #   44100hz audio all the time.
     #   Or better yet, fix gst to handle changing caps :D
     self.caps = gst.Caps("audio/x-raw-int, "
                          "endianness=(int)1234, "
                          "signed=(boolean)true, "
                          "width=(int)16, "
                          "depth=(int)16, "
                          "rate=(int)44100, "
                          "channels=(int)2")
     self._pipe = gst.Pipeline()
     self.adder = gst.element_factory_make("adder")
     self.audio_queue = gst.element_factory_make("queue")
     self._load_queue_values()
     self._pipe.add(self.adder, self.audio_queue, self._mainbin)
     self.adder.link(self.audio_queue)
     self.audio_queue.link(self._mainbin)
Exemplo n.º 6
0
def __init__(self):
    self.converter = gst.Pipeline("converter")
    source = gst.element_factory_make("filesrc", "file-source")
    decode = gst.element_factory_make("decodebin", "decode")
    self.decode = gst.element_factory_make("wavenc", "wave-enc")
    audioconv = gst.element_factory_make("audioconvert", "converter")
    filesink = gst.element_factory_make("filesink", "file-output")

    self.converter.add(source, decode, self.decode, audioconv, filesink)
    gst.element_link_many(source, decode)
    gst.element_link_many(self.decode, audioconv, filesink)

    filepath = self.entry.get_text()
    if os.path.isfile(filepath):
        self.player.get_by_name("file-source").set_property(
            "location", filepath)
        self.player.set_state(gst.STATE_PLAYING)
    else:
        self.player.set_state(gst.STATE_NULL)
        self.button.set_label("Start")
Exemplo n.º 7
0
    def __init__(self, location):
        # The pipeline
        self.pipeline = gst.Pipeline()

        # Create bus and connect several handlers
        self.bus = self.pipeline.get_bus()
        self.bus.add_signal_watch()
        self.bus.connect('message::eos', self.on_eos)
        self.bus.connect('message::tag', self.on_tag)
        self.bus.connect('message::error', self.on_error)

        # Create elements
        self.src = gst.element_factory_make('filesrc')
        self.dec = gst.element_factory_make('decodebin')
        self.conv = gst.element_factory_make('audioconvert')
        self.rsmpl = gst.element_factory_make('audioresample')
        self.sink = gst.element_factory_make('alsasink')

        # Set 'location' property on filesrc
        self.src.set_property('location', location)

        # Connect handler for 'new-decoded-pad' signal
        self.dec.connect('new-decoded-pad', self.on_new_decoded_pad)

        # Add elements to pipeline
        self.pipeline.add(self.src, self.dec, self.conv, self.rsmpl, self.sink)

        # Link *some* elements
        # This is completed in self.on_new_decoded_pad()
        self.src.link(self.dec)
        gst.element_link_many(self.conv, self.rsmpl, self.sink)

        # Reference used in self.on_new_decoded_pad()
        self.apad = self.conv.get_pad('sink')

        # The MainLoop
        self.mainloop = gobject.MainLoop()

        # And off we go!
        self.pipeline.set_state(gst.STATE_PLAYING)
        self.mainloop.run()
Exemplo n.º 8
0
    def createPipeline(self, w):
        """Given a window, creates a pipeline and connects it to the window"""

        # code will make the ximagesink output in the specified window
        def set_xid(window):
            gtk.gdk.threads_enter()
            videosink.set_xwindow_id(window.window.xid)
            videosink.expose()
            gtk.gdk.threads_leave()

        # this code receives the messages from the pipeline. if we
        # need to set X11 id, then we call set_xid
        def bus_handler(unused_bus, message):
            if message.type == gst.MESSAGE_ELEMENT:
                if message.structure.get_name() == 'prepare-xwindow-id':
                    set_xid(w)
            return gst.BUS_PASS

        # create our pipeline, and connect our bus_handler
        self.pipeline = gst.Pipeline()
        bus = self.pipeline.get_bus()
        bus.set_sync_handler(bus_handler)

        videosink = gst.element_factory_make("xvimagesink", "sink")
        videosink.set_property("force-aspect-ratio", True)
        videosink.set_property("handle-expose", True)
        #videosink = gst.element_factory_make("v4l2sink", "sink")
        #videosink.set_property("device", "/dev/video7")
        scale = gst.element_factory_make("videoscale", "scale")
        cspace = gst.element_factory_make("ffmpegcolorspace", "cspace")

        audiosink = gst.element_factory_make("jackaudiosink")
        audioconvert = gst.element_factory_make("audioconvert")

        # pipeline looks like: ... ! cspace ! scale ! sink
        #                      ... ! audioconvert ! autoaudiosink
        self.pipeline.add(cspace, scale, videosink, audiosink, audioconvert)
        scale.link(videosink)
        cspace.link(scale)
        audioconvert.link(audiosink)
        return (self.pipeline, (cspace, audioconvert))
Exemplo n.º 9
0
def convert(flacfile, mp3file):
    print "Converting %s... " % flacfile
    src = gst.element_factory_make("filesrc", "src")
    src.set_property("location", flacfile)
    #src_pad = src.get_pad("src")

    flac = gst.element_factory_make("flacdec", "decoder")

    mp3 = gst.element_factory_make("lame", "encoder")
    mp3.set_property("bitrate", 192)
    #mp3.set_property("quality", 2)
    #mp3.set_property("vbr", 4)
    #mp3.set_property("vbr-quality", 2)

    id3 = gst.element_factory_make("id3v2mux", "tagger")

    #xing = gst.element_factory_make("xingmux", "vbrfixer")

    sink = gst.element_factory_make("filesink", "sink")
    sink.set_property("location", mp3file)

    bin = gst.Pipeline()
    bin.add(src,flac,mp3,id3,sink)
    gst.element_link_many(src,flac,mp3,id3,sink)
    #bin.connect("error", error_cb)

    bin.set_state(gst.STATE_PLAYING)

    print "GO"
    bus = bin.get_bus()
    while 1:
        msg = bus.poll(gst.MESSAGE_EOS | gst.MESSAGE_ERROR, gst.SECOND)
        if msg:
            print msg
            print "DUN"
            break

    print "STOP"
    bin.set_state(gst.STATE_NULL);

    print "Done.\n"
Exemplo n.º 10
0
    def save_file(self, uri):
        pipeline = gst.Pipeline()

        playbin = gst.element_factory_make("playbin")
        pipeline.add(playbin)
        playbin.set_property("uri", self.playbin.get_property("uri"))

        bin = gst.Bin("speed-bin")

        speedchanger = gst.element_factory_make("pitch")
        speedchanger.set_property("tempo",
                                  self.speedchanger.get_property("tempo"))
        speedchanger.set_property("pitch",
                                  self.speedchanger.get_property("pitch"))
        bin.add(speedchanger)

        audioconvert = gst.element_factory_make("audioconvert")
        bin.add(audioconvert)

        encoder = gst.element_factory_make("wavenc")
        bin.add(encoder)

        filesink = gst.element_factory_make("filesink")
        bin.add(filesink)
        filesink.set_property("location", uri)

        gst.element_link_many(speedchanger, audioconvert)
        gst.element_link_many(audioconvert, encoder)
        gst.element_link_many(encoder, filesink)

        sink_pad = gst.GhostPad("sink", speedchanger.get_pad("sink"))
        bin.add_pad(sink_pad)
        playbin.set_property("audio-sink", bin)

        bus = playbin.get_bus()
        bus.add_signal_watch()
        bus.connect("message", self.on_message)

        pipeline.set_state(gst.STATE_PLAYING)

        return (pipeline, playbin)
Exemplo n.º 11
0
    def __init__(self):
        window = gtk.Window(gtk.WINDOW_TOPLEVEL)
        window.set_title("Intercepting web-cam")
        window.set_default_size(500, 400)
        window.connect("destroy", gtk.main_quit, "WM destroy")
        vbox = gtk.VBox()
        window.add(vbox)
        self.movie_window = gtk.DrawingArea()
        vbox.add(self.movie_window)
        hbox = gtk.HBox()
        vbox.pack_start(hbox, False)
        hbox.set_border_width(10)
        hbox.pack_start(gtk.Label())
        self.button = gtk.Button("Start")
        self.button.connect("clicked", self.start_stop)
        hbox.pack_start(self.button, False)
        self.button2 = gtk.Button("Quit")
        self.button2.connect("clicked", self.exit)
        hbox.pack_start(self.button2, False)
        hbox.add(gtk.Label())
        window.show_all()

        # Set up the gstreamer pipeline
        self.player = gst.Pipeline("player")
        src = gst.element_factory_make("v4l2src")
        cf = gst.element_factory_make("capsfilter")
        WIDTH, HEIGHT, FRAMERATE = 640, 480, 15
        caps = gst.caps_from_string(
            "video/x-raw-yuv,format=(fourcc)YUY2,width=%d,height=%d,framerate=%d/1"
            % (WIDTH, HEIGHT, FRAMERATE))
        cf.set_property("caps", caps)
        filter = NewElement()
        sink = gst.element_factory_make('autovideosink')
        self.player.add(src, cf, filter, sink)
        gst.element_link_many(src, cf, filter, sink)

        bus = self.player.get_bus()
        bus.add_signal_watch()
        bus.enable_sync_message_emission()
        bus.connect("message", self.on_message)
        bus.connect("sync-message::element", self.on_sync_message)
Exemplo n.º 12
0
    def search(self, db, entry, is_playing, on_search_completed, *args):

        # only search if we're not already playing this entry
        if is_playing:
            print "not checking for embedded cover art in playing entry"
            on_search_completed(self, entry, None, *args)
            return

        # only search local files
        uri = db.entry_get(entry, rhythmdb.PROP_LOCATION)
        if uri.startswith("file://") is False:
            print "not checking for embedded cover art in non-local entry %s" % uri
            on_search_completed(self, entry, None, *args)
            return

        self.entry = entry
        self.args = args
        self.callback = on_search_completed
        self.args = args
        self.got_pixbuf = False

        # set up pipeline and bus callbacks
        self.pipeline = gst.Pipeline()
        bus = self.pipeline.get_bus()
        bus.add_signal_watch()
        bus.connect("message::tag", self._tag_cb)
        bus.connect("message::state-changed", self._state_changed_cb)
        bus.connect("message::error", self._error_cb)

        # create elements
        self.src = gst.element_make_from_uri(gst.URI_SRC, uri)
        self.decodebin = gst.element_factory_make("decodebin2")
        self.sink = gst.element_factory_make("fakesink")
        self.decodebin.connect('new-decoded-pad', self._decoded_pad_cb)

        self.pipeline.add(self.src, self.decodebin, self.sink)
        self.src.link(self.decodebin)

        self.sinkpad = self.sink.get_pad('sink')

        self.pipeline.set_state(gst.STATE_PAUSED)
Exemplo n.º 13
0
        def __init__(self, tw):
            ''' Set up the stream. We save to a raw .wav file and then
            convert the sound to .ogg for saving. '''
            datapath = get_path(tw.parent, 'instance')
            self.capture_file = os.path.join(datapath, 'output.wav')
            self.save_file = os.path.join(datapath, 'output.ogg')
            self._eos_cb = None

            self._can_limit_framerate = False
            self._recording = False

            self._audio_transcode_handler = None
            self._transcode_id = None

            self._pipeline = gst.Pipeline("Record")
            self._create_audiobin()
            self._pipeline.add(self._audiobin)

            bus = self._pipeline.get_bus()
            bus.add_signal_watch()
            bus.connect('message', self._bus_message_handler)
Exemplo n.º 14
0
    def __init__(self):
        self.player = gst.Pipeline("player")
        source = gst.element_factory_make("filesrc", "file-source")
        decoder = gst.element_factory_make("mad", "mp3-decoder")
        conv = gst.element_factory_make("audioconvert", "converter")
        sink = gst.element_factory_make("alsasink", "alsa-output")

        self.player.add(source, decoder, conv, sink)
        gst.element_link_many(source, decoder, conv, sink)

        self.state = 'stop'

        bus = self.player.get_bus()
        bus.add_signal_watch()
        bus.connect("message", self.on_message)

        self.play_list = Player_list([
            '/home/root/simple-todo-read-only/mp3/ay.mp3',
            '/home/root/simple-todo-read-only/mp3/hx.mp3',
            '/home/root/simple-todo-read-only/mp3/kxbsn.mp3'
        ])
Exemplo n.º 15
0
 def __init__(self):
     window = gtk.Window(gtk.WINDOW_TOPLEVEL)
     window.set_title("Videotestsrc-Player")
     window.set_default_size(300, -1)
     window.connect("destroy", gtk.main_quit, "WM destroy")
     vbox = gtk.VBox()
     window.add(vbox)
     self.button = gtk.Button("Start")
     self.button.connect("clicked", self.start_stop)
     vbox.add(self.button)
     window.show_all()
     
     self.player = gst.Pipeline("player")
     source = gst.element_factory_make("videotestsrc", "video-source")
     sink = gst.element_factory_make("xvimagesink", "video-output")
     caps = gst.Caps("video/x-raw-yuv, width=320, height=230")
     filter = gst.element_factory_make("capsfilter", "filter")
     filter.set_property("caps", caps)
     
     self.player.add(source, filter, sink)
     gst.element_link_many(source, filter, sink)
Exemplo n.º 16
0
    def __init__(self, ip='192.168.5.52'):

        gobject.GObject.__init__(self)

        self.pipeline = gst.Pipeline()

        desktop = ximagesrc_bin()
        video_out = Out_lan_smokeenc_bin(ip)

        self.pipeline.add(desktop)
        self.pipeline.add(video_out)

        desktop.link(video_out)

        self.bus = self.pipeline.get_bus()
        self.bus.add_signal_watch()
        self.bus.connect('message', self.__on_mensaje)
        self.bus.enable_sync_message_emission()
        self.bus.connect('sync-message', self.__sync_message)

        self.pipeline.set_state(gst.STATE_PLAYING)
Exemplo n.º 17
0
    def __init__(self):
        self.pipeline = gst.Pipeline()
        #buttons
        self.play_button = gtk.Button(stock=gtk.STOCK_MEDIA_PLAY)
        self.play_button.connect("clicked", self.play_onclick)
        self.stop_button = gtk.Button(stock=gtk.STOCK_MEDIA_STOP)
        self.stop_button.connect("clicked", self.stop_onclick)
        self.quit_button = gtk.Button(stock=gtk.STOCK_QUIT)
        self.quit_button.connect("clicked", self.quit_onclick)

        self.buttons = gtk.HButtonBox()
        self.buttons.add(self.play_button)
        self.buttons.add(self.stop_button)
        self.buttons.add(self.quit_button)

        #pack window
        self.window = gtk.Window()
        self.vbox = gtk.VBox()   # vertical box
        self.vbox.pack_start(self.buttons)
        self.window.add(self.vbox)
        self.window.connect("destroy", self.destroy)
        self.window.show_all()
Exemplo n.º 18
0
    def __init__(self):
        pipeline = gst.Pipeline("time lapse")
        source = gst.element_factory_make("v4lsrc", "source")
        text = gst.element_factory_make("cairotextoverlay")
        tee = gst.element_factory_make("tee")

        self.caps = gst.Caps("video/x-raw-yuv, width=640,height=480")
        filter = gst.element_factory_make("capsfilter", "filter")
        filter.set_property("caps", self.caps)

        text.set_property("halign", "left")
        text.set_property("valign", "bottom")

        pipeline.add(source, filter, text, tee)

        #link all elements
        gst.element_link_many(source, filter, text, tee)

        self.pipeline = pipeline
        self.text = text
        self.tee = tee
        self.outputs = []
Exemplo n.º 19
0
    def _create_pipeline_v0(self):
        self.sample_rate = None

        self.pipeline = gst.Pipeline(self._instanceName)

        # Create an AppSrc to push data in; the format will be determined later
        self.source = gst.element_factory_make('appsrc', 'source')
        self.pipeline.add(self.source)

        # Create sample rate and format converters to help with connecting
        converter = gst.element_factory_make('audioconvert', 'converter')
        resampler = gst.element_factory_make('audioresample', 'resampler')
        self.pipeline.add(converter)
        self.pipeline.add(resampler)

        # Create an ALSA sink for output
        sink = gst.element_factory_make("alsasink", "sink")
        self.pipeline.add(sink)

        self.source.link(converter)
        converter.link(resampler)
        resampler.link(sink)
Exemplo n.º 20
0
def ripTrack(device, trackNo, filename, callbackProgress, callbackComplete):
    cdp = gst.element_factory_make("cdparanoia", "ripper")
    cdp.set_property("device", device)
    cdp.set_property("paranoia-mode", 4)
    cdp.set_property("abort-on-skip", True)
    track_format = gst.format_get_by_nick("track")
    src_pad = cdp.get_pad("src")

    flac = gst.element_factory_make("flacenc", "encoder")

    sink = gst.element_factory_make("filesink", "sink")
    sink.set_property("location", filename)

    bin = gst.Pipeline()
    bin.add_many(cdp, flac, sink)
    gst.element_link_many(cdp, flac, sink)

    bin.set_state(gst.STATE_PAUSED)

    seek = gst.event_new_segment_seek(
        track_format | gst.SEEK_METHOD_SET | gst.SEEK_FLAG_FLUSH, trackNo - 1,
        trackNo)
    src_pad.send_event(seek)

    res = bin.set_state(gst.STATE_PLAYING)

    lastsecs = -1
    while bin.iterate():
        nanos = src_pad.query(gst.QUERY_POSITION, gst.FORMAT_TIME)
        length = src_pad.query(gst.QUERY_TOTAL, gst.FORMAT_TIME)
        secs = nanos / gst.SECOND
        lensecs = length / gst.SECOND
        if secs <> lastsecs and secs > 0:
            #print "secs %d, lensecs %d, rate %f" % (secs,lensecs, rate)
            callbackProgress(trackNo, secs, lensecs)
            lastsecs = secs

    res = bin.set_state(gst.STATE_NULL)
    callbackComplete(trackNo)
Exemplo n.º 21
0
  def _GetAudioPipe(self, host, port, password):
    pipe = gst.Pipeline('olpc-audio')
    caps = gst.Caps('audio/x-raw-int,rate=8000,channels=1,depth=8')
    elems = []

    def Add(name, properties=None):
      elem = gst.element_factory_make(name, name)
      properties = properties or {}
      for property, value in properties.iteritems():
        elem.set_property(property, value)
      pipe.add(elem)
      elems.append(elem)

    Add('alsasrc')
    Add('capsfilter', {'caps': caps})
    Add('audioconvert')
    Add('vorbisenc')
    Add('shout2send', {'ip': host, 'port': port, 'password': password,
                       'mount': '/olpc.ogg'})

    gst.element_link_many(*elems)
    return pipe
Exemplo n.º 22
0
    def constructPipeline(self):
        """
        Create the pipeline, add and link elements.
        """
        self.pipeline = gst.Pipeline()
        self.gnlfilesrc = \
        gst.element_factory_make("gnlfilesource")

        # Set properties of filesrc element
        # Note: the gnlfilesource signal will be connected
        # in self.connect_signals()
        self.gnlfilesrc.set_property("uri", "file:///" + self.inFileLocation)

        self.colorSpace = gst.element_factory_make("ffmpegcolorspace")

        self.encoder = gst.element_factory_make("ffenc_png")

        self.filesink = gst.element_factory_make("filesink")

        self.pipeline.add(self.gnlfilesrc, self.colorSpace, self.encoder,
                          self.filesink)
        gst.element_link_many(self.colorSpace, self.encoder, self.filesink)
Exemplo n.º 23
0
    def create_pipeline(self):
        p = gst.Pipeline()

        width, height = self.framesize.split("x")
        width, height = int(width), int(height)

        src = gst.element_factory_make("videotestsrc")
        src.props.num_buffers = self.num_buffers
        bitrate = self.bitrate
        scaler = gst.element_factory_make("videoscale")

        enc = gst.element_factory_make(self.element, "encoder")

        if self.mode is not None:
            enc.props.mode = self.mode

        if self.intra_refresh is not None:
            enc.props.intra_refresh = self.intra_refresh

        enc.props.bitrate = bitrate
        ident = gst.element_factory_make("identity")

        sink = gst.element_factory_make("fakesink")

        s = gst.Structure("video/x-raw-yuv")
        s["format"] = gst.Fourcc(self.format)
        s["width"] = width
        s["height"] = height
        s["framerate"] = gst.Fraction(self.framerate, 1)

        caps = gst.element_factory_make("capsfilter", "capsf")
        caps.props.caps = gst.Caps(s)

        p.add(src, scaler, caps, enc, ident, sink)
        gst.element_link_many(src, scaler, caps, enc, ident, sink)

        ident.connect("handoff", self.handoff)
        ident.set_property("signal-handoffs", True)
        return p
Exemplo n.º 24
0
 def init_audio(self):
     self.unlink_gst()
     if (self.enable):
         self.chain = []
         self.pipeline = gst.Pipeline("mypipeline")
         self.source = self.make_and_chain("appsrc")
         rs = SignalGen.sample_rates[self.sample_rate]
         self.rate = float(rs)
         self.interval = 1.0 / self.rate
         caps = gst.Caps('audio/x-raw-int,'
                         'endianness=(int)1234,'
                         'channels=(int)2,'
                         'width=(int)32,'
                         'depth=(int)32,'
                         'signed=(boolean)true,'
                         'rate=(int)%s' % rs)
         self.source.set_property('caps', caps)
         self.sink = self.make_and_chain("autoaudiosink")
         self.pipeline.add(*self.chain)
         gst.element_link_many(*self.chain)
         self.source.connect('need-data', self.need_data)
         self.pipeline.set_state(gst.STATE_PLAYING)
Exemplo n.º 25
0
    def constructPipeline(self):
        """
        Create an instance of gst.Pipeline, create, add element objects
        to this pipeline. Create appropriate connections between the elements.
        """
        self.pipeline = gst.Pipeline("pipeline")

        self.filesrc = gst.element_factory_make("filesrc")
        self.filesrc.set_property("location", self.inFileLocation)

        self.filesink = gst.element_factory_make("filesink")
        self.filesink.set_property("location", self.outFileLocation)

        self.decodebin = gst.element_factory_make("decodebin")
        self.audioconvert = gst.element_factory_make("audioconvert")

        audio_encoder = self.audioEncoders[self.outputFormat]
        muxer_str = self.muxers[self.outputFormat]
        video_encoder = self.videoEncoders[self.outputFormat]

        self.audio_encoder = gst.element_factory_make(audio_encoder)
        self.muxer = gst.element_factory_make(muxer_str)
        self.video_encoder = gst.element_factory_make(video_encoder)

        self.queue1 = gst.element_factory_make("queue")
        self.queue2 = gst.element_factory_make("queue")
        self.queue3 = gst.element_factory_make("queue")

        self.pipeline.add(self.filesrc, self.decodebin, self.video_encoder,
                          self.muxer, self.queue1, self.queue2, self.queue3,
                          self.audioconvert, self.audio_encoder, self.filesink)

        gst.element_link_many(self.filesrc, self.decodebin)

        gst.element_link_many(self.queue1, self.video_encoder, self.muxer,
                              self.filesink)

        gst.element_link_many(self.queue2, self.audioconvert,
                              self.audio_encoder, self.queue3, self.muxer)
	def __init__(self, channel):
		#defines a pipeline, and a player. Adds player to pipeline
		self.pipeline = gst.Pipeline("player")
		self.player = gst.element_factory_make("playbin", "player")
		self.pipeline.add(self.player)

		#pulse sink for audio and fakesink for video
		pulse = gst.element_factory_make("alsasink", "alsa")
		fakesink = gst.element_factory_make("fakesink", "fakesink")

		#video properties are retained to "trick" the pi that there is a channel for video, not nescessary
		self.player.set_property('uri', channel)
		self.player.set_property("audio-sink", pulse)
		self.player.set_property("video-sink", fakesink)

		#sets the bus and signal watch to check for changes to the bus
		self.bus = self.player.get_bus()
		self.bus.add_signal_watch()
		self.bus.enable_sync_message_emission()
		
		#emits message
		self.bus.connect("message", self.on_message)
Exemplo n.º 27
0
    def convertir(self, ruta, comienzo, pos):
        gobject.threads_init()
        global pipeline, adder, PAD_MUTEX, bus
        pipeline = gst.Pipeline("mypipeline")
        adder = gst.element_factory_make("adder")

        PAD_MUTEX = threading.Lock()

        bus = pipeline.get_bus()
        bus.enable_sync_message_emission()
        bus.connect("sync-message", self.handle_message)

        many = []
        comp = self.get_comp(ruta, comienzo)
        many.append(comp)

        convert = gst.element_factory_make("audioconvert")
        resample = gst.element_factory_make("audioresample")

        caps = gst.Caps("audio/x-raw-int, channels=1, width=16, rate=8000")
        filt = gst.element_factory_make("capsfilter")
        filt.set_property("caps", caps)

        enc = gst.element_factory_make("wavenc")

        #sink = gst.element_factory_make("alsasink")

        sink = gst.element_factory_make("filesink")
        outloc = "salida_" + str(
            pos) + ".wav"  #os.path.join(bundle.dir, "punch.wav")
        #print pos
        sink.set_property("location", outloc)

        pipeline.add(*many)
        pipeline.add(adder, resample, convert, filt, enc, sink)
        gst.element_link_many(adder, resample, convert, filt, enc, sink)

        pipeline.set_state(gst.STATE_PLAYING)
        gtk.main()
Exemplo n.º 28
0
def main():
    type = 'async'
    loop = gobject.MainLoop()

    pipeline = gst.Pipeline("cutter")
    src = gst.element_factory_make("sinesrc", "src")
    cutter = gst.element_factory_make("cutter")
    cutter.set_property('threshold', 0.5)
    sink = gst.element_factory_make("fakesink", "sink")
    pipeline.add(src, cutter, sink)
    src.link(cutter)
    cutter.link(sink)

    control = gst.Controller(src, "volume")
    control.set_interpolation_mode("volume", gst.INTERPOLATE_LINEAR)

    control.set("volume", 0, 0.0)
    control.set("volume", 2 * gst.SECOND, 1.0)
    control.set("volume", 4 * gst.SECOND, 0.0)
    control.set("volume", 6 * gst.SECOND, 1.0)
    control.set("volume", 8 * gst.SECOND, 0.0)
    control.set("volume", 10 * gst.SECOND, 1.0)

    bus = pipeline.get_bus()

    if type == 'async':
        bus.add_signal_watch()
        bus.connect('message::element', on_message_application, loop)
    else:
        # FIXME: needs wrapping in gst-python
        bus.set_sync_handler(bus.sync_signal_handler)
        bus.connect('sync-message::element', on_message_application, loop)

    pipeline.set_state(gst.STATE_PLAYING)

    loop.run()

    pipeline.set_state(gst.STATE_NULL)
Exemplo n.º 29
0
    def create_pipeline(self):
        p = gst.Pipeline()

        width, height = self.framesize.split("x")
        width, height = int(width), int(height)

        if self.location:
            src = gst.element_factory_make("filesrc")
            src.props.location = self.location
            if self.format == "I420":
                bpp = 1.5
            elif self.format == "UYVY":
                bpp = 2
            src.props.blocksize = int(width * height * bpp)
        else:
            src = gst.element_factory_make("videotestsrc")
            src.props.num_buffers = self.num_buffers

        enc = gst.element_factory_make(self.element)
        enc.props.bitrate = self.bitrate

        sink = gst.element_factory_make("filesink")
        sink.props.location = self.tmp_filename

        s = gst.Structure("video/x-raw-yuv")
        s["format"] = gst.Fourcc(self.format)
        s["width"] = width
        s["height"] = height
        s["framerate"] = gst.Fraction(self.framerate, 1)

        capf = gst.element_factory_make("capsfilter")
        capf.props.caps = gst.Caps(s)
        p.add(src, capf, enc, sink)

        if not gst.element_link_many(src, capf, enc, sink):
            print " pipeline creation error !!"

        return p
Exemplo n.º 30
0
    def __init__(self,
                 EndOfSongCallback=None,
                 PlayerReadyCallback=None,
                 ErrorCallback=None):
        self.EndOfSongCallback = EndOfSongCallback
        self.PlayerReadyCallback = PlayerReadyCallback
        self.ErrorCallback = ErrorCallback

        self.thread = GobjectThread()

        self.pipeline = gst.Pipeline('pipeline')

        #Concrete players must create these elements and add them in the pipeline
        #and link them (see the commented code in AddPipelineElements for an example)
        self.volume = None
        self.equalizer = None

        self.AddPipelineElements(self.pipeline)

        self.bus = self.pipeline.get_bus()
        self.bus.add_signal_watch()
        self.bus.enable_sync_message_emission()
        self.bus.connect('message', self._OnMessage)