コード例 #1
0
    def _CreateGesElements(self):
        self.timeline = GES.Timeline.new()
        self.timeline.add_track(GES.VideoTrack.new())
        self.timeline.add_track(GES.AudioTrack.new())
        self.timeline.set_auto_transition(0)

        self._GesUpdateRestrictionCaps()

        self.pipeline = GES.Pipeline()
        self.pipeline.set_timeline(self.timeline)

        bus = self.pipeline.get_bus()
        bus.add_signal_watch()
        # Workaound: connecting to all messages results in "Python int too large to convert to C long" ???
        bus.connect("message::error", self._GstOnMessage)
        bus.connect("message::eos", self._GstOnMessage)
        bus.connect("message::state-changed", self._GstOnMessage)
        bus.connect("message::warning", self._GstOnMessage)

        self.layer_video = GES.Layer()
        self.layer_vertical_effect = GES.Layer()
        self.layer_vertical_effect.set_priority(1)
        self.layer_audio = GES.Layer()
        self.layer_audio.set_priority(2)
        self.timeline.add_layer(self.layer_video)
        self.timeline.add_layer(self.layer_vertical_effect)
        self.timeline.add_layer(self.layer_audio)

        self.project = self.timeline.get_asset()
        self.project.connect("asset-added", self._GesOnAssetAdded)
        self.project.connect("error-loading-asset",
                             self._GesOnErrorLoadingAsset)
コード例 #2
0
    def _CreateGesElements(self):
        self.timeline = GES.Timeline.new()
        self.timeline.add_track(GES.VideoTrack.new())
        self.timeline.add_track(GES.AudioTrack.new())
        self.timeline.set_auto_transition(0)

        self._GesUpdateRestrictionCaps()

        self.pipeline = GES.Pipeline()
        self.pipeline.set_timeline(self.timeline)

        bus = self.pipeline.get_bus()
        bus.add_signal_watch()
        bus.connect("message", self._GstOnMessage)

        self.layer_video = GES.Layer()
        self.layer_vertical_effect = GES.Layer()
        self.layer_vertical_effect.set_priority(1)
        self.layer_audio = GES.Layer()
        self.layer_audio.set_priority(2)
        self.timeline.add_layer(self.layer_video)
        self.timeline.add_layer(self.layer_vertical_effect)
        self.timeline.add_layer(self.layer_audio)

        self.project = self.timeline.get_asset()
        self.project.connect("asset-added", self._GesOnAssetAdded)
        self.project.connect("error-loading-asset", self._GesOnErrorLoadingAsset)
コード例 #3
0
def play_timeline(timeline):
    pipeline = GES.Pipeline()
    pipeline.set_timeline(timeline)
    bus = pipeline.get_bus()
    bus.add_signal_watch()
    loop = GLib.MainLoop()
    bus.connect("message", bus_message_cb, loop, pipeline)
    pipeline.set_state(Gst.State.PLAYING)

    loop.run()
コード例 #4
0
    def __init__(self, uri):
        timeline = GES.Timeline.new_audio_video()
        layer = timeline.append_layer()
        layer.add_clip(GES.UriClip.new(uri))
        self.pipeline = pipeline = GES.Pipeline()
        pipeline.set_timeline(timeline)
        pipeline.set_state(Gst.State.PLAYING)
        bus = pipeline.get_bus()
        bus.add_signal_watch()
        bus.connect("message", self.bus_message_cb)

        self.loop = GLib.MainLoop()
if __name__ == "__main__":
    Gst.init(None)
    GES.init()

    timeline = GES.Timeline.new_audio_video()
    layer = GES.Layer()
    timeline.add_layer(layer)
    asset = GES.Asset.request(GES.TestClip, None)

    layer.add_asset(asset, 0 * Gst.SECOND, 0, 10 * Gst.SECOND,
                    GES.TrackType.UNKNOWN)

    timeline.commit()

    pipeline = GES.Pipeline()
    pipeline.add_timeline(timeline)

    format = [
        "video/quicktime,variant=iso", "video/x-h264",
        "audio/mpeg,mpegversion=1,layer=3", "mov"
    ]

    container_profile = encoderProfile(format[0], format[1], format[2])

    pipeline.set_render_settings(outputFile + "." + format[3],
                                 container_profile)
    pipeline.set_mode(GES.PipelineFlags.RENDER)
    pipeline.set_state(Gst.State.PLAYING)

    bus = pipeline.get_bus()
コード例 #6
0
 def _create_pipeline(self, timeline):
     self.pipeline = GES.Pipeline()
     self.pipeline.set_timeline(timeline)
     bus = self.pipeline.get_bus()
     bus.add_signal_watch()
     bus.connect("message", self.bus_message_cb)
コード例 #7
0
    def render(self, filename, progress_callback=None):
        # Works if source is a type
        self.progress_cb = progress_callback

        # FIXME: considering single-video for the moment
        media_uri = self.controller.get_default_media()
        media_uri = helper.path2uri(media_uri)

        logger.warning("Extracting clips from %s", media_uri)
        asset = GES.UriClipAsset.request_sync(media_uri)

        timeline = GES.Timeline.new_audio_video()
        layer = timeline.append_layer()

        start_on_timeline = 0

        self.total_duration = sum(a.fragment.duration * Gst.MSECOND
                                  for a in self.elements)
        clips = []
        for a in self.elements:
            start_position_asset = a.fragment.begin * Gst.MSECOND
            duration = a.fragment.duration * Gst.MSECOND
            # GES.TrackType.UNKNOWN => add every kind of stream to the timeline
            clips.append(
                layer.add_asset(asset, start_on_timeline, start_position_asset,
                                duration, GES.TrackType.UNKNOWN))
            start_on_timeline += duration

        timeline.commit()

        # Build the encoding pipeline
        pipeline = GES.Pipeline()
        pipeline.set_timeline(timeline)

        container_profile = \
            GstPbutils.EncodingContainerProfile.new("montage-profile",
                                                    "Pitivi encoding profile",
                                                    Gst.Caps("video/webm"),
                                                    None)
        video_profile = GstPbutils.EncodingVideoProfile.new(
            Gst.Caps("video/x-vp8"), None, Gst.Caps("video/x-raw"), 0)

        container_profile.add_profile(video_profile)

        audio_profile = GstPbutils.EncodingAudioProfile.new(
            Gst.Caps("audio/x-vorbis"), None, Gst.Caps("audio/x-raw"), 0)

        container_profile.add_profile(audio_profile)

        pipeline.set_render_settings(helper.path2uri(filename),
                                     container_profile)
        pipeline.set_mode(GES.PipelineFlags.RENDER)

        self.pipeline = pipeline
        logger.warning("Starting encoding")
        self.pipeline.set_state(Gst.State.PLAYING)

        bus = self.pipeline.get_bus()
        bus.add_signal_watch()
        bus.connect("message", self.bus_message_cb)
        GLib.timeout_add(300, self.duration_querier)
コード例 #8
0
    def __init__(self, world, socketIO):
        # member initialization
        self.world = world
        self.socketIO = socketIO
        self.active_dialogs = []
        self.users = []
        self.label_queue = Queue()
        self.curr_label = None
        self.fullscreen = False
        self.girl = None

        #GES stuff
        self.timeline = GES.Timeline.new_audio_video()
        self.layer = GES.Layer()
        self.timeline.add_layer(self.layer)
        self.open_file(self.world.video_path)

        self.pipeline = GES.Pipeline()
        self.pipeline.set_timeline(self.timeline)
        self.pipeline.set_state(Gst.State.PAUSED)

        # GES bins
        sinkbin = Gst.Bin.new("sinkbin")
        convert1 = Gst.ElementFactory.make("videoconvert")
        sinkbin.add(convert1)
        pad = convert1.get_static_pad("sink")
        ghostpad = Gst.GhostPad.new("sink", pad)
        sinkbin.add_pad(ghostpad)
        cairooverlay = Gst.ElementFactory.make("cairooverlay")
        sinkbin.add(cairooverlay)
        cairooverlay.connect('draw', self.on_draw)
        convert1.link(cairooverlay)
        convert2 = Gst.ElementFactory.make("videoconvert")
        sinkbin.add(convert2)
        cairooverlay.link(convert2)
        videosink = Gst.ElementFactory.make("xvimagesink")
        sinkbin.add(videosink)
        convert2.link(videosink)
        self.pipeline.preview_set_video_sink(sinkbin)

        # GTK window stuff
        self.window = Gtk.Window()
        self.window.set_title("Averoid Adventures")

        accel = Gtk.AccelGroup()
        accel.connect(Gdk.keyval_from_name('Q'), 0, 0, self.on_q_pressed)
        accel.connect(Gdk.keyval_from_name('F'), 0, 0, self.on_f_pressed)
        self.window.add_accel_group(accel)

        self.window.connect("delete-event", self.window_closed)

        self.window.show_all()
        self.window.realize()
        xid = self.window.get_window().get_xid()
        videosink.set_window_handle(xid)

        #state machine stuff
        self.state = STATE_IDLE
        self.state_funcs = {
            STATE_IDLE: [self.enter_idle_cb, self.idle_cb, self.leave_idle_cb],
            STATE_CHOICE:
            [self.enter_choice_cb, self.choice_cb, self.leave_choice_cb],
            STATE_JUMP: [self.enter_jump_cb, self.jump_cb, self.leave_jump_cb],
            STATE_SPORTSBALL: [
                self.enter_sportsball_cb, self.sportsball_cb,
                self.leave_sportsball_cb
            ],
            STATE_POEM: [self.enter_poem_cb, self.poem_cb, self.leave_poem_cb]
        }
        self.end_label_time = -1  #TODO: better solution

        time.sleep(1)
        self.jump_label(self.world.current_label)
        self.pipeline.set_state(Gst.State.PLAYING)