def configure_pipeline(self, pipeline, properties): self.debug('configure_pipeline for disker') self._clock = pipeline.get_clock() self._symlinkToLastRecording = \ properties.get('symlink-to-last-recording', None) self._symlinkToCurrentRecording = \ properties.get('symlink-to-current-recording', None) self._recordAtStart = properties.get('start-recording', True) self._defaultFilenameTemplate = properties.get( 'filename', '%s.%%Y%%m%%d-%%H%%M%%S' % self.getName()) self._startFilenameTemplate = self._defaultFilenameTemplate icalfn = properties.get('ical-schedule') if self._can_schedule and icalfn: self.scheduleRecordings(open(icalfn, 'r')) elif icalfn: # ical schedule is set, but self._can_schedule is False def missingModule(moduleName): m = messages.Error(T_( N_("An iCal file has been specified for scheduling, " "but the '%s' module is not installed.\n"), moduleName), mid='error-python-%s' % moduleName) documentation.messageAddPythonInstall(m, moduleName) self.debug(m) self.addMessage(m) if not eventcalendar.HAS_ICALENDAR: missingModule('icalendar') if not eventcalendar.HAS_DATEUTIL: missingModule('dateutil') # self._can_schedule is False, so one of the above surely happened raise errors.ComponentSetupHandledError() self.writeIndex = properties.get('write-index', False) self.reactToMarks = properties.get('react-to-stream-markers', False) self.syncOnTdt = properties.get('sync-on-tdt', False) self.timeOverlap = properties.get('time-overlap', 0) sink = self.get_element('fdsink') if gstreamer.element_factory_has_property('multifdsink', 'resend-streamheader'): sink.set_property('resend-streamheader', False) else: self.debug("resend-streamheader property not available, " "resending streamheader when it changes in the caps") sink.get_pad('sink').connect('notify::caps', self._notify_caps_cb) # connect to client-removed so we can detect errors in file writing sink.connect('client-removed', self._client_removed_cb) if self.writeIndex: sink.connect('client-added', self._client_added_cb) if self.reactToMarks: pfx = properties.get('stream-marker-filename-prefix', '%03d.') self._markerPrefix = pfx if self.reactToMarks or self.writeIndex or self.syncOnTdt: sink.get_pad("sink").add_data_probe(self._src_pad_probe)
def configure_pipeline(self, pipeline, properties): self.debug('configure_pipeline for disker') self._clock = pipeline.get_clock() self._symlinkToLastRecording = \ properties.get('symlink-to-last-recording', None) self._symlinkToCurrentRecording = \ properties.get('symlink-to-current-recording', None) self._recordAtStart = properties.get('start-recording', True) self._defaultFilenameTemplate = properties.get('filename', '%s.%%Y%%m%%d-%%H%%M%%S' % self.getName()) self._startFilenameTemplate = self._defaultFilenameTemplate icalfn = properties.get('ical-schedule') if self._can_schedule and icalfn: self.scheduleRecordings(open(icalfn, 'r')) elif icalfn: # ical schedule is set, but self._can_schedule is False def missingModule(moduleName): m = messages.Error(T_(N_( "An iCal file has been specified for scheduling, " "but the '%s' module is not installed.\n"), moduleName), mid='error-python-%s' % moduleName) documentation.messageAddPythonInstall(m, moduleName) self.debug(m) self.addMessage(m) if not eventcalendar.HAS_ICALENDAR: missingModule('icalendar') if not eventcalendar.HAS_DATEUTIL: missingModule('dateutil') # self._can_schedule is False, so one of the above surely happened raise errors.ComponentSetupHandledError() self.writeIndex = properties.get('write-index', False) self.reactToMarks = properties.get('react-to-stream-markers', False) self.syncOnTdt = properties.get('sync-on-tdt', False) self.timeOverlap = properties.get('time-overlap', 0) sink = self.get_element('fdsink') if gstreamer.element_factory_has_property('multifdsink', 'resend-streamheader'): sink.set_property('resend-streamheader', False) else: self.debug("resend-streamheader property not available, " "resending streamheader when it changes in the caps") sink.get_pad('sink').connect('notify::caps', self._notify_caps_cb) # connect to client-removed so we can detect errors in file writing sink.connect('client-removed', self._client_removed_cb) if self.writeIndex: sink.connect('client-added', self._client_added_cb) if self.reactToMarks: pfx = properties.get('stream-marker-filename-prefix', '%03d.') self._markerPrefix = pfx if self.reactToMarks or self.writeIndex or self.syncOnTdt: sink.get_pad("sink").add_data_probe(self._src_pad_probe)
def setup_burst_mode(self, sink): if self.burst_on_connect: if self.burst_time and gstreamer.element_factory_has_property("multifdsink", "units-max"): self.debug("Configuring burst mode for %f second burst", self.burst_time) # Set a burst for configurable minimum time, plus extra to # start from a keyframe if needed. sink.set_property("sync-method", 4) # burst-keyframe sink.set_property("burst-unit", 2) # time sink.set_property("burst-value", long(self.burst_time * Gst.SECOND)) # We also want to ensure that we have sufficient data available # to satisfy this burst; and an appropriate maximum, all # specified in units of time. sink.set_property("time-min", long((self.burst_time + 5) * Gst.SECOND)) sink.set_property("unit-type", 2) # time sink.set_property("units-soft-max", long((self.burst_time + 8) * Gst.SECOND)) sink.set_property("units-max", long((self.burst_time + 10) * Gst.SECOND)) elif self.burst_size: self.debug("Configuring burst mode for %d kB burst", self.burst_size) # If we have a burst-size set, use modern # needs-recent-multifdsink behaviour to have complex bursting. # In this mode, we burst a configurable minimum, plus extra # so we start from a keyframe (or less if we don't have a # keyframe available) sink.set_property("sync-method", "burst-keyframe") sink.set_property("burst-unit", "bytes") sink.set_property("burst-value", self.burst_size * 1024) # To use burst-on-connect, we need to ensure that multifdsink # has a minimum amount of data available - assume 512 kB beyond # the burst amount so that we should have a keyframe available sink.set_property("bytes-min", (self.burst_size + 512) * 1024) # And then we need a maximum still further above that - the # exact value doesn't matter too much, but we want it # reasonably small to limit memory usage. multifdsink doesn't # give us much control here, we can only specify the max # values in buffers. We assume each buffer is close enough # to 4kB - true for asf and ogg, at least sink.set_property("buffers-soft-max", (self.burst_size + 1024) / 4) sink.set_property("buffers-max", (self.burst_size + 2048) / 4) else: # Old behaviour; simple burst-from-latest-keyframe self.debug("simple burst-on-connect, setting sync-method 2") sink.set_property("sync-method", 2) sink.set_property("buffers-soft-max", 250) sink.set_property("buffers-max", 500) else: self.debug("no burst-on-connect, setting sync-method 0") sink.set_property("sync-method", self.defaultSyncMethod) sink.set_property("buffers-soft-max", 250) sink.set_property("buffers-max", 500)
def init(self): if not gstreamer.get_plugin_version('coreelements'): raise errors.MissingElementError('identity') if not gstreamer.element_factory_has_property('identity', 'check-imperfect-timestamp'): self.checkTimestamp = False self.checkOffset = False self.addMessage( messages.Info(T_(N_( "You will get more debugging information " "if you upgrade to GStreamer 0.10.13 or later.")))) self.EATER_TMPL = self.FDSRC_TMPL + ' %(queue)s ' + self.DEPAY_TMPL if self.checkTimestamp or self.checkOffset: self.EATER_TMPL += " ! identity name=%(name)s-identity silent=TRUE" if self.checkTimestamp: self.EATER_TMPL += " check-imperfect-timestamp=1" if self.checkOffset: self.EATER_TMPL += " check-imperfect-offset=1"
def init(self): if not gstreamer.get_plugin_version('coreelements'): raise errors.MissingElementError('identity') if not gstreamer.element_factory_has_property( 'identity', 'check-imperfect-timestamp'): self.checkTimestamp = False self.checkOffset = False self.addMessage( messages.Info( T_( N_("You will get more debugging information " "if you upgrade to GStreamer 0.10.13 or later.")))) self.EATER_TMPL = self.FDSRC_TMPL + ' %(queue)s ' + self.DEPAY_TMPL if self.checkTimestamp or self.checkOffset: self.EATER_TMPL += " ! identity name=%(name)s-identity silent=TRUE" if self.checkTimestamp: self.EATER_TMPL += " check-imperfect-timestamp=1" if self.checkOffset: self.EATER_TMPL += " check-imperfect-offset=1"
def _configure_sink(self, sink): self.setup_burst_mode(sink) if gstreamer.element_factory_has_property("multifdsink", "resend-streamheader"): sink.set_property("resend-streamheader", False) else: self.debug( "resend-streamheader property not available, " "resending streamheader when it changes in the caps" ) sink.set_property("timeout", self.timeout) sink.connect("deep-notify::caps", self._notify_caps_cb) # these are made threadsafe using idle_add in the handler sink.connect("client-added", self._client_added_handler) # We now require a sufficiently recent multifdsink anyway that we can # use the new client-fd-removed signal sink.connect("client-fd-removed", self._client_fd_removed_cb) sink.connect("client-removed", self._client_removed_cb) sink.caps = None
def _configure_sink(self, sink): self.setup_burst_mode(sink) if gstreamer.element_factory_has_property('multifdsink', 'resend-streamheader'): sink.set_property('resend-streamheader', False) else: self.debug("resend-streamheader property not available, " "resending streamheader when it changes in the caps") sink.set_property('timeout', self.timeout) sink.connect('deep-notify::caps', self._notify_caps_cb) # these are made threadsafe using idle_add in the handler sink.connect('client-added', self._client_added_handler) # We now require a sufficiently recent multifdsink anyway that we can # use the new client-fd-removed signal sink.connect('client-fd-removed', self._client_fd_removed_cb) sink.connect('client-removed', self._client_removed_cb) sink.caps = None
def _use_audiorate(self): return gstreamer.element_factory_has_property('audiorate', 'skip-to-first')
def testHasProperty(self): b = gstreamer.element_factory_has_property('fakesrc', 'num-buffers') self.failUnless(b) b = gstreamer.element_factory_has_property('fakesrc', 'i-do-not-exist') self.failIf(b)
def testFakeSrc(self): hassync = gstreamer.element_factory_has_property('fakesrc', 'sync') self.failUnless(hassync) hasync = gstreamer.element_factory_has_property('fakesrc', 'ync') self.failIf(hasync)
from twisted.python import failure from twisted.internet import defer, reactor, interfaces, gtk3reactor from twisted.web import client, error from flumotion.common import testsuite from flumotion.common import log, errors from flumotion.common.planet import moods from flumotion.component.converters.video import video from flumotion.common import gstreamer from flumotion.test import comptest attr = testsuite.attr if not gstreamer.element_factory_exists('deinterlace')\ or not gstreamer.element_factory_has_property('deinterlace', 'method'): skip="GStreamer element 'deinterlace' is too old or doesn't exists" class TestVideoConverter(comptest.CompTestTestCase, log.Loggable): def setUp(self): self.tp = comptest.ComponentTestHelper() prod = ('videotestsrc is-live=true ! ' 'video/x-raw-rgb,framerate=(fraction)1/2,width=320,height=240,' 'pixel-aspect-ratio=1/2,interlaced=true') self.s = 'flumotion.component.converters.video.video.Converter' self.prod = comptest.pipeline_src(prod) def tearDown(self):
def _use_audiorate(self): return gstreamer.element_factory_has_property("audiorate", "skip-to-first")
def setup_burst_mode(self, sink): if self.burst_on_connect: if self.burst_time and \ gstreamer.element_factory_has_property('multifdsink', 'units-max'): self.debug("Configuring burst mode for %f second burst", self.burst_time) # Set a burst for configurable minimum time, plus extra to # start from a keyframe if needed. sink.set_property('sync-method', 4) # burst-keyframe sink.set_property('burst-unit', 2) # time sink.set_property('burst-value', long(self.burst_time * gst.SECOND)) # We also want to ensure that we have sufficient data available # to satisfy this burst; and an appropriate maximum, all # specified in units of time. sink.set_property('time-min', long((self.burst_time + 5) * gst.SECOND)) sink.set_property('unit-type', 2) # time sink.set_property('units-soft-max', long((self.burst_time + 8) * gst.SECOND)) sink.set_property('units-max', long((self.burst_time + 10) * gst.SECOND)) elif self.burst_size: self.debug("Configuring burst mode for %d kB burst", self.burst_size) # If we have a burst-size set, use modern # needs-recent-multifdsink behaviour to have complex bursting. # In this mode, we burst a configurable minimum, plus extra # so we start from a keyframe (or less if we don't have a # keyframe available) sink.set_property('sync-method', 'burst-keyframe') sink.set_property('burst-unit', 'bytes') sink.set_property('burst-value', self.burst_size * 1024) # To use burst-on-connect, we need to ensure that multifdsink # has a minimum amount of data available - assume 512 kB beyond # the burst amount so that we should have a keyframe available sink.set_property('bytes-min', (self.burst_size + 512) * 1024) # And then we need a maximum still further above that - the # exact value doesn't matter too much, but we want it # reasonably small to limit memory usage. multifdsink doesn't # give us much control here, we can only specify the max # values in buffers. We assume each buffer is close enough # to 4kB - true for asf and ogg, at least sink.set_property('buffers-soft-max', (self.burst_size + 1024) / 4) sink.set_property('buffers-max', (self.burst_size + 2048) / 4) else: # Old behaviour; simple burst-from-latest-keyframe self.debug("simple burst-on-connect, setting sync-method 2") sink.set_property('sync-method', 2) sink.set_property('buffers-soft-max', 250) sink.set_property('buffers-max', 500) else: self.debug("no burst-on-connect, setting sync-method 0") sink.set_property('sync-method', self.defaultSyncMethod) sink.set_property('buffers-soft-max', 250) sink.set_property('buffers-max', 500)
from twisted.python import failure from twisted.internet import defer, reactor, interfaces, gtk2reactor from twisted.web import client, error from flumotion.common import testsuite from flumotion.common import log, errors from flumotion.common.planet import moods from flumotion.component.converters.video import video from flumotion.common import gstreamer from flumotion.test import comptest attr = testsuite.attr if not gstreamer.element_factory_exists('deinterlace')\ or not gstreamer.element_factory_has_property('deinterlace', 'method'): skip = "GStreamer element 'deinterlace' is too old or doesn't exists" class TestVideoConverter(comptest.CompTestTestCase, log.Loggable): def setUp(self): self.tp = comptest.ComponentTestHelper() prod = ('videotestsrc is-live=true ! ' 'video/x-raw-rgb,framerate=(fraction)1/2,width=320,height=240,' 'pixel-aspect-ratio=1/2,interlaced=true') self.s = 'flumotion.component.converters.video.video.Converter' self.prod = comptest.pipeline_src(prod) def tearDown(self): comptest.cleanup_reactor()