def test_connectWithIsDeprecated(self): """ L{PosixReactorBase} implements the deprecated L{IReactorArbitrary}, and L{PosixReactorBase.connectWith} is a part of that interface. To avoid unnecessary deprecation warnings when importing posixbase, the L{twisted.internet.interfaces._IReactorArbitrary} alias that doesn't have the deprecation warning is imported, and instead L{PosixReactorBase.connectWith} generates its own deprecation warning. """ class fakeConnector: def __init__(self, *args, **kw): pass def connect(self): pass reactor = TrivialReactor() reactor.connectWith(fakeConnector) warnings = self.flushWarnings([self.test_connectWithIsDeprecated]) self.assertEqual(len(warnings), 1) self.assertEqual(warnings[0]['category'], DeprecationWarning) self.assertEqual( "connectWith is deprecated since Twisted 10.1. " "See IReactorFDSet.", warnings[0]['message'])
def startConnecting(self, host, port, authenticator, timeout=30, bindAddress=None): """Optional helper method to connect to a remote feed server. This method starts a client factory connecting via a L{PassableClientConnector}. It offers the possibility of cancelling an in-progress connection via the stopConnecting() method. @param host: the remote host name @type host: str @param port: the tcp port on which to connect @param port int @param authenticator: the authenticator, normally provided by the worker @type authenticator: L{flumotion.twisted.pb.Authenticator} @returns: a deferred that will fire with the remote reference, once we have authenticated. """ assert self._factory is None self._factory = FeedClientFactory(self) reactor.connectWith(PassableClientConnector, host, port, self._factory, timeout, bindAddress) return self._factory.login(authenticator)
def updatePorterDetails(self, path, username, password): """Provide a new set of porter login information, for when we're in slave mode and the porter changes. If we're currently connected, this won't disconnect - it'll just change the information so that next time we try and connect we'll use the new ones """ if self.type == 'slave': self._porterUsername = username self._porterPassword = password creds = credentials.UsernamePassword(self._porterUsername, self._porterPassword) self._pbclient.startLogin(creds, self._pbclient.medium) # If we've changed paths, we must do some extra work. if path != self._porterPath: self.debug("Changing porter login to use \"%s\"", path) self._porterPath = path self._pbclient.stopTrying() # Stop trying to connect with the # old connector. self._pbclient.resetDelay() reactor.connectWith(fdserver.FDConnector, self._porterPath, self._pbclient, 10, checkPID=False) else: raise errors.WrongStateError( "Can't specify porter details in master mode")
def test_connectWithIsDeprecated(self): """ L{PosixReactorBase} implements the deprecated L{IReactorArbitrary}, and L{PosixReactorBase.connectWith} is a part of that interface. To avoid unnecessary deprecation warnings when importing posixbase, the L{twisted.internet.interfaces._IReactorArbitrary} alias that doesn't have the deprecation warning is imported, and instead L{PosixReactorBase.connectWith} generates its own deprecation warning. """ class fakeConnector: def __init__(self, *args, **kw): pass def connect(self): pass reactor = TrivialReactor() reactor.connectWith(fakeConnector) warnings = self.flushWarnings([self.test_connectWithIsDeprecated]) self.assertEquals(len(warnings), 1) self.assertEquals(warnings[0]["category"], DeprecationWarning) self.assertEquals( "connectWith is deprecated since Twisted 10.1. " "See IReactorFDSet.", warnings[0]["message"] )
def updatePorterDetails(self, path, username, password): """Provide a new set of porter login information, for when we're in slave mode and the porter changes. If we're currently connected, this won't disconnect - it'll just change the information so that next time we try and connect we'll use the new ones """ if self.type == 'slave': self._porterUsername = username self._porterPassword = password creds = credentials.UsernamePassword(self._porterUsername, self._porterPassword) self._pbclient.startLogin(creds, self._pbclient.medium) # If we've changed paths, we must do some extra work. if path != self._porterPath: self.debug("Changing porter login to use \"%s\"", path) self._porterPath = path self._pbclient.stopTrying() # Stop trying to connect with the # old connector. self._pbclient.resetDelay() reactor.connectWith( fdserver.FDConnector, self._porterPath, self._pbclient, 10, checkPID=False) else: raise errors.WrongStateError( "Can't specify porter details in master mode")
def do_setup(self): root = self._get_root() self._site = self.siteClass(resource=root) if self.type == 'slave': # Streamer is slaved to a porter. # We have two things we want to do in parallel: # - ParseLaunchComponent.do_start() # - log in to the porter, then register our mountpoint with # the porter. # So, we return a DeferredList with a deferred for each of # these tasks. The second one's a bit tricky: we pass a dummy # deferred to our PorterClientFactory that gets fired once # we've done all of the tasks the first time (it's an # automatically-reconnecting client factory, and we only fire # this deferred the first time) self._porterDeferred = d = defer.Deferred() mountpoints = [self.mountPoint] if self.multi_files: self._pbclient = porterclient.HTTPPorterClientFactory( self._site, [], d, prefixes=mountpoints) else: self._pbclient = porterclient.HTTPPorterClientFactory( self._site, mountpoints, d) creds = credentials.UsernamePassword(self._porterUsername, self._porterPassword) self._pbclient.startLogin(creds, self._pbclient.medium) self.info("Starting porter login at \"%s\"", self._porterPath) # This will eventually cause d to fire reactor.connectWith(fdserver.FDConnector, self._porterPath, self._pbclient, 10, checkPID=False) else: # Streamer is standalone. try: iface = self.iface or "" self.info('Listening on port %d, interface=%r', self.port, iface) self._tport = reactor.listenTCP(self.port, self._site, interface=iface) except error.CannotListenError: t = 'Port %d is not available.' % self.port self.warning(t) m = messages.Error( T_(N_("Network error: TCP port %d is not available."), self.port)) self.addMessage(m) self.setMood(moods.sad) return defer.fail(errors.ComponentSetupHandledError(t))
def _updatePath(self, path): # If we've changed paths, we must do some extra work. if path == self._porterPath: return self._porterPath = path # Stop trying to connect with the old connector. self._pbclient.stopTrying() self._pbclient.resetDelay() reactor.connectWith(fdserver.FDConnector, self._porterPath, self._pbclient, 10, checkPID=False)
def do_setup(self): root = self._get_root() self._site = self.siteClass(resource=root) if self.type == 'slave': # Streamer is slaved to a porter. # We have two things we want to do in parallel: # - ParseLaunchComponent.do_start() # - log in to the porter, then register our mountpoint with # the porter. # So, we return a DeferredList with a deferred for each of # these tasks. The second one's a bit tricky: we pass a dummy # deferred to our PorterClientFactory that gets fired once # we've done all of the tasks the first time (it's an # automatically-reconnecting client factory, and we only fire # this deferred the first time) self._porterDeferred = d = defer.Deferred() mountpoints = [self.mountPoint] if self.multi_files: self._pbclient = porterclient.HTTPPorterClientFactory( self._site, [], d, prefixes=mountpoints) else: self._pbclient = porterclient.HTTPPorterClientFactory( self._site, mountpoints, d) creds = credentials.UsernamePassword(self._porterUsername, self._porterPassword) self._pbclient.startLogin(creds, self._pbclient.medium) self.info("Starting porter login at \"%s\"", self._porterPath) # This will eventually cause d to fire reactor.connectWith( fdserver.FDConnector, self._porterPath, self._pbclient, 10, checkPID=False) else: # Streamer is standalone. try: iface = self.iface or "" self.info('Listening on port %d, interface=%r', self.port, iface) self._tport = reactor.listenTCP( self.port, self._site, interface=iface) except error.CannotListenError: t = 'Port %d is not available.' % self.port self.warning(t) m = messages.Error(T_(N_( "Network error: TCP port %d is not available."), self.port)) self.addMessage(m) self.setMood(moods.sad) return defer.fail(errors.ComponentSetupHandledError(t))
def main(args): parser = options.OptionParser(domain="flumotion-job") log.debug('job', 'Parsing arguments (%r)' % ', '.join(args)) opts, args = parser.parse_args(args) # check if a config file was specified; if so, parse config and copy over if len(args) != 3: parser.error("must pass an avatarId and a path to the socket: %r" % args) avatarId = args[1] socket = args[2] # log our standardized starting marker log.info('job', "Starting job '%s'" % avatarId) # register all package paths (FIXME: this should go away when # components and all deps come from manager) # this is still necessary so that code from other projects can be imported from flumotion.common import setup setup.setupPackagePath() log.info('job', 'Connecting to worker on socket %s' % (socket)) job_factory = job.JobClientFactory(avatarId) reactor.connectWith(fdserver.FDConnector, socket, job_factory, 10, checkPID=False) reactor.addSystemEventTrigger('before', 'shutdown', job_factory.medium.shutdownHandler) # log our standardized started marker log.info('job', "Started job '%s'" % avatarId) reactor.run() # log our standardized stopping marker log.info('job', "Stopping job '%s'" % avatarId) # log our standardized stopped marker log.info('job', "Stopped job '%s'" % avatarId) return 0
def connectWith(self, connectorType, *args, **kw): """ Start an instance of the given C{connectorType} connecting. @type connectorType: type which implements C{IConnector} """ self.extraConnectors.append((connectorType, args, kw)) if self.running: from twisted.internet import reactor return reactor.connectWith(connectorType, *args, **kw)
def main(args): parser = OptionParser(domain="flumotion-job") log.debug('job', 'Parsing arguments (%r)' % ', '.join(args)) options, args = parser.parse_args(args) # check if a config file was specified; if so, parse config and copy over if len(args) != 3: parser.error("must pass an avatarId and a path to the socket: %r" % args) avatarId = args[1] socket = args[2] # log our standardized starting marker log.info('job', "Starting job '%s'" % avatarId) # register all package paths (FIXME: this should go away when # components and all deps come from manager) # this is still necessary so that code from other projects can be imported from flumotion.common import setup setup.setupPackagePath() log.info('job', 'Connecting to worker on socket %s' % (socket)) job_factory = job.JobClientFactory(avatarId) reactor.connectWith(fdserver.FDConnector, socket, job_factory, 10, checkPID=False) # should probably move this to boot if 'FLU_PROFILE' in os.environ: try: import statprof statprof.start() print 'Profiling started.' def stop_profiling(): statprof.stop() statprof.display() reactor.addSystemEventTrigger('before', 'shutdown', stop_profiling) except ImportError, e: print ('Profiling requested, but statprof is not available (%s)' % e)
def main(args): parser = OptionParser(domain="flumotion-job") log.debug('job', 'Parsing arguments (%r)' % ', '.join(args)) options, args = parser.parse_args(args) # check if a config file was specified; if so, parse config and copy over if len(args) != 3: parser.error("must pass an avatarId and a path to the socket: %r" % args) avatarId = args[1] socket = args[2] # log our standardized starting marker log.info('job', "Starting job '%s'" % avatarId) # register all package paths (FIXME: this should go away when # components and all deps come from manager) # this is still necessary so that code from other projects can be imported from flumotion.common import setup setup.setupPackagePath() log.info('job', 'Connecting to worker on socket %s' % (socket)) job_factory = job.JobClientFactory(avatarId) reactor.connectWith(fdserver.FDConnector, socket, job_factory, 10, checkPID=False) reactor.addSystemEventTrigger('before', 'shutdown', job_factory.medium.shutdownHandler) # log our standardized started marker log.info('job', "Started job '%s'" % avatarId) reactor.run() # log our standardized stopping marker log.info('job', "Stopping job '%s'" % avatarId) # log our standardized stopped marker log.info('job', "Stopped job '%s'" % avatarId) return 0
# or a list in a dictionary key. self._extraListeners[(portType, args, tuple( kw.items()))] = (reactor.listenWith(portType, *args, **kw)) for host, port, factory, ctxFactory, timeout, bindAddress in self.sslConnectors: reactor.connectSSL(host, port, factory, ctxFactory, timeout, bindAddress) for host, port, factory, timeout, bindAddress in self.tcpConnectors: reactor.connectTCP(host, port, factory, timeout, bindAddress) for rhost, rport, protocol, lport, interface, size in self.udpConnectors: reactor.connectUDP(rhost, rport, protocol, lport, interface, size) for address, factory, timeout in self.unixConnectors: reactor.connectUNIX(address, factory, timeout) for connectorType, args, kw in self.extraConnectors: reactor.connectWith(connectorType, *args, **kw) for service in self.services.values(): service.startService() self.running = 1 def run(self, save=1, installSignalHandlers=1): """run(save=1, installSignalHandlers=1) Run this application, running the main loop if necessary. If 'save' is true, then when this Application is shut down, it will be persisted to a pickle. 'installSignalHandlers' is passed through to reactor.run(), the function that starts the mainloop. """ from twisted.internet import reactor if not self._boundPorts:
except error.CannotListenError, msg: log.msg('error on SSL port %s: %s' % (port, msg)) return for portType, args, kw in self.extraPorts: self._extraListeners[(portType, args, tuple(kw.items()))] = ( reactor.listenWith(portType, *args, **kw)) for host, port, factory, ctxFactory, timeout, bindAddress in self.sslConnectors: reactor.connectSSL(host, port, factory, ctxFactory, timeout, bindAddress) for host, port, factory, timeout, bindAddress in self.tcpConnectors: reactor.connectTCP(host, port, factory, timeout, bindAddress) for rhost, rport, protocol, lport, interface, size in self.udpConnectors: reactor.connectUDP(rhost, rport, protocol, lport, interface, size) for address, factory, timeout in self.unixConnectors: reactor.connectUNIX(address, factory, timeout) for connectorType, args, kw in self.extraConnectors: reactor.connectWith(connectorType, *args, **kw) for service in self.services.values(): service.startService() self.running = 1 def run(self, save=1, installSignalHandlers=1): """run(save=1, installSignalHandlers=1) Run this application, running the main loop if necessary. If 'save' is true, then when this Application is shut down, it will be persisted to a pickle. 'installSignalHandlers' is passed through to reactor.run(), the function that starts the mainloop. """ from twisted.internet import reactor if not self._boundPorts: self.bindPorts() self._save = save
def do_setup(self): props = self.config['properties'] self._authenticator = DigestAuth(self) realm = props.get('realm', "Flumotion Windows Media Server Component") self._authenticator.setRealm(realm) if 'bouncer' in props: bouncerName = props['bouncer'] self._authenticator.setBouncerName(bouncerName) if not props.get('secure', True): self._authenticator.enableReplayAttacks() # Watch for data flow through identity to turn hungry/happy as # appropriate self._inactivatedByPadMonitor = False identity = self.pipeline.get_by_name("identity") self.debug("Adding pad monitor") self._padMonitor = PadMonitor(self, identity.get_pad('src'), 'identity-source') if self.type == 'pull': host = self.config['properties'].get('host', 'localhost') port = self.config['properties'].get('port', 80) wmsPath = self.config['properties'].get('wms-path', None) if wmsPath: factory = WMSServerPullFactory(self._srcelement, wmsPath) else: factory = WMSPullFactory(self._srcelement) self.info("Pulling from %s:%d", host, port) reactor.connectTCP(host, port, factory) elif self.type == 'slave': # Slaved to a porter... factory = WMSPushFactory(self._authenticator, self._srcelement) self._porterDeferred = d = defer.Deferred() mountpoints = [self.mountPoint] self._pbclient = porterclient.HTTPPorterClientFactory( factory, mountpoints, d) creds = credentials.UsernamePassword(self._porterUsername, self._porterPassword) self._pbclient.startLogin(creds, self._pbclient.medium) self.info('Starting porter login at "%s"', self._porterPath) # This will eventually cause d to fire reactor.connectWith( fdserver.FDConnector, self._porterPath, self._pbclient, 10, checkPID=False) else: # Streamer is standalone. factory = WMSPushFactory(self._authenticator, self._srcelement) try: self.info('Listening on tcp port %d', self.port) reactor.listenTCP(self.port, factory) except error.CannotListenError: t = 'Port %d is not available.' % self.port self.warning(t) m = Error(T_(N_( "Network error: TCP port %d is not available."), self.port)) self.addMessage(m) self.setMood(moods.sad) return defer.fail(errors.ComponentStartHandledError(t))
def do_setup(self): self.have_properties(self.config['properties']) root = self._rootResource if root is None: root = self._getDefaultRootResource() if root is None: raise errors.WrongStateError( "a resource or path property must be set") site = Site(root, self) self._timeoutRequestsCallLater = reactor.callLater( self.REQUEST_TIMEOUT, self._timeoutRequests) # Create statistics handler and start updating ui state self.stats = serverstats.ServerStatistics() updater = StatisticsUpdater(self.uiState, "request-statistics") self.stats.startUpdates(updater) updater = StatisticsUpdater(self.uiState, "provider-statistics") self._fileProviderPlug.startStatsUpdates(updater) self._updateUptime() d = defer.Deferred() if self.type == 'slave': # Streamer is slaved to a porter. if self._singleFile: self._pbclient = porterclient.HTTPPorterClientFactory( site, [self.mountPoint], d) else: self._pbclient = porterclient.HTTPPorterClientFactory( site, [], d, prefixes=[self.mountPoint]) creds = credentials.UsernamePassword(self._porterUsername, self._porterPassword) self._pbclient.startLogin(creds, self._pbclient.medium) self.info("Logging to porter on socketPath %s", self._porterPath) # This will eventually cause d to fire reactor.connectWith(fdserver.FDConnector, self._porterPath, self._pbclient, 10, checkPID=False) else: # File Streamer is standalone. try: self.debug('Going to listen on port %d' % self.port) iface = "" # we could be listening on port 0, in which case we need # to figure out the actual port we listen on self._twistedPort = reactor.listenTCP(self.port, site, interface=iface) self.port = self._twistedPort.getHost().port self.info('Listening on interface %r on port %d', iface, self.port) except error.CannotListenError: t = 'Port %d is not available.' % self.port self.warning(t) m = messages.Error(T_(N_( "Network error: TCP port %d is not available."), self.port)) self.addMessage(m) self.setMood(moods.sad) return defer.fail(errors.ComponentSetupHandledError(t)) # fire callback so component gets happy d.callback(None) # we are responsible for setting component happy def setComponentHappy(result): self.httpauth.scheduleKeepAlive() self.setMood(moods.happy) return result d.addCallback(setComponentHappy) return d
def do_setup(self): self.have_properties(self.config['properties']) root = self._rootResource if root is None: root = self._getDefaultRootResource() if root is None: raise errors.WrongStateError( "a resource or path property must be set") site = Site(root, self) self._timeoutRequestsCallLater = reactor.callLater( self.REQUEST_TIMEOUT, self._timeoutRequests) # Create statistics handler and start updating ui state self.stats = serverstats.ServerStatistics() updater = StatisticsUpdater(self.uiState, "request-statistics") self.stats.startUpdates(updater) updater = StatisticsUpdater(self.uiState, "provider-statistics") self._fileProviderPlug.startStatsUpdates(updater) self._updateUptime() d = defer.Deferred() if self.type == 'slave': # Streamer is slaved to a porter. if self._singleFile: self._pbclient = porterclient.HTTPPorterClientFactory( site, [self.mountPoint], d) else: self._pbclient = porterclient.HTTPPorterClientFactory( site, [], d, prefixes=[self.mountPoint]) creds = credentials.UsernamePassword(self._porterUsername, self._porterPassword) self._pbclient.startLogin(creds, self._pbclient.medium) self.info("Logging to porter on socketPath %s", self._porterPath) # This will eventually cause d to fire reactor.connectWith(fdserver.FDConnector, self._porterPath, self._pbclient, 10, checkPID=False) else: # File Streamer is standalone. try: self.debug('Going to listen on port %d' % self.port) iface = "" # we could be listening on port 0, in which case we need # to figure out the actual port we listen on self._twistedPort = reactor.listenTCP(self.port, site, interface=iface) self.port = self._twistedPort.getHost().port self.info('Listening on interface %r on port %d', iface, self.port) except error.CannotListenError: t = 'Port %d is not available.' % self.port self.warning(t) m = messages.Error( T_(N_("Network error: TCP port %d is not available."), self.port)) self.addMessage(m) self.setMood(moods.sad) return defer.fail(errors.ComponentSetupHandledError(t)) # fire callback so component gets happy d.callback(None) # we are responsible for setting component happy def setComponentHappy(result): self.httpauth.scheduleKeepAlive() self.setMood(moods.happy) return result d.addCallback(setComponentHappy) return d