class StubbedTest(unittest.TestCase): def openLegacyStore(self): """ Extract the Store tarball associated with this test, open it, and return it. """ temp = self.mktemp() f = sys.modules[self.__module__].__file__ dfn = os.path.join( os.path.dirname(f), os.path.basename(f).split("test_")[1].split('.py')[0]+'.axiom') arcname = dfn + '.tbz2' tarball = tarfile.open(arcname, 'r:bz2') for member in tarball.getnames(): tarball.extract(member, temp) return Store(os.path.join(temp, os.path.basename(dfn))) def setUp(self): """ Prepare to test a stub by opening and then fully upgrading the legacy store. """ self.store = self.openLegacyStore() self.service = IService(self.store) self.service.startService() return self.store.whenFullyUpgraded() def tearDown(self): return self.service.stopService()
class StubbedTest(unittest.TestCase): def openLegacyStore(self): """ Extract the Store tarball associated with this test, open it, and return it. """ temp = self.mktemp() f = sys.modules[self.__module__].__file__ dfn = os.path.join( os.path.dirname(f), os.path.basename(f).split("test_")[1].split('.py')[0] + '.axiom') arcname = dfn + '.tbz2' tarball = tarfile.open(arcname, 'r:bz2') for member in tarball.getnames(): tarball.extract(member, temp) return Store(os.path.join(temp, os.path.basename(dfn))) def setUp(self): """ Prepare to test a stub by opening and then fully upgrading the legacy store. """ self.store = self.openLegacyStore() self.service = IService(self.store) self.service.startService() return self.store.whenFullyUpgraded() def tearDown(self): return self.service.stopService()
def setUp(self): """ Make a store, an account/substore, an indexer, and call startService() on the superstore's IService so the batch process interactions that happen in fulltext.py work """ self.dbdir = self.mktemp() self.path = u'index' superstore = store.Store(self.dbdir) loginSystem = LoginSystem(store=superstore) installOn(loginSystem, superstore) account = loginSystem.addAccount(u'testuser', u'example.com', None) substore = account.avatars.open() self.store = substore self.indexer = self.createIndexer() self.svc = IService(superstore) self.svc.startService() # Make sure the indexer is actually available writer = self.openWriteIndex() writer.close()
def setUp(self): """ Prepare to test a stub by opening and then fully upgrading the legacy store. """ self.store = self.openLegacyStore() self.service = IService(self.store) self.service.startService() return self.store.whenFullyUpgraded()
def testStoreServicePowerup(self): s = Store() ss = SillyService(store=s) s.powerUp(ss, IService) IService(s).startService() IService(s).stopService() self.assertEqual(ss.started, 1) self.assertEqual(ss.stopped, 1) self.assertEqual(ss.running, 0)
def test_schedulerStartsWhenServiceStarts(self): """ Test that IScheduler(store).startService() gets called whenever IService(store).startService() is called. """ service = IService(self.store) service.startService() scheduler = service.getServiceNamed(SITE_SCHEDULER) self.assertTrue(scheduler.running)
def testItemServicePowerup(self): s = Store() sm = Summer(store=s) ss = SillyService(store=s) sm.powerUp(ss, IService) IService(sm).startService() IService(sm).stopService() self.assertEqual(ss.started, 1) self.assertEqual(ss.stopped, 1) self.assertEqual(ss.running, 0)
def main(*config_file_paths): # Setup logging root_logger = logging.getLogger() root_logger.setLevel(logging.INFO) root_logger.addHandler(logging.handlers.SysLogHandler('/dev/log')) application = make_application(*config_file_paths) service = IService(application) service.startService() reactor.addSystemEventTrigger('before', 'shutdown', service.stopService) reactor.run()
def makeService(self, options): store = Store(options['dbdir']) siteService = IService(store) site = Site( RootResource(store=store, trackingID=options['tracking-id'])) site.displayTracebacks = not options['notracebacks'] siteService.addService( strports.service(options['port'], site, reactor=reactor)) return siteService
def setUp(self): temp = self.mktemp() f = sys.modules[self.__module__].__file__ dfn = os.path.join( os.path.dirname(f), os.path.basename(f).split("test_")[1].split('.py')[0]+'.axiom') arcname = dfn + '.tbz2' tarball = tarfile.open(arcname, 'r:bz2') for member in tarball.getnames(): tarball.extract(member, temp) self.store = Store(os.path.join(temp, os.path.basename(dfn))) self.service = IService(self.store) self.service.startService() return self.store.whenFullyUpgraded()
def closeStore(self): """ Close C{self.currentStore} and discard the reference. If there is a store service running, stop it first. """ service = IService(self.currentStore) if service.running: result = service.stopService() else: result = succeed(None) def close(ignored): self.currentStore.close() self.currentStore = None result.addCallback(close) return result
def test_as_app(self): """The agent class can be accessed as an application.""" app = BaseAgent().as_app() multi_service = IService(app, None) self.assertTrue(IServiceCollection.providedBy(multi_service)) services = list(multi_service) self.assertEqual(len(services), 1)
def test_interface(self): """ L{Scheduler} provides L{IScheduler} (which it proxies) and L{IService} (which is a no-op). """ self.assertTrue(IScheduler.providedBy(self.oldScheduler)) self.assertTrue(IService.providedBy(self.oldScheduler))
def setUp(self): self.clock = Clock() scheduler = Scheduler(store=self.siteStore) self.stubTime(scheduler) installOn(scheduler, self.siteStore) IService(self.siteStore).startService()
class SubSchedTest(SchedTest): def setUp(self): self.storePath = self.mktemp() self.store = Store(self.storePath) Scheduler(store=self.store).installOn(self.store) self.svc = IService(self.store) self.svc.startService() def tearDown(self): return self.svc.stopService() def testSubScheduler(self): substoreItem = SubStore.createNew(self.store, ['scheduler_test']) substore = substoreItem.open() SubScheduler(store=substore).installOn(substore) return self._doTestScheduler(substore)
def test_setServiceParent(self): """ Test that the C{self.portType.setServiceParent} method adds the C{self.portType} to the Axiom Store Service as a child. """ port = self.port(store=self.store) port.setServiceParent(self.store) self.failUnlessIn(port, list(IService(self.store)))
def _startService(self): """ Start the service and make sure we know it's started so tearDown can shut it down. """ assert not self.serviceStarted self.serviceStarted = True return IService(self.topdb).startService()
def closeStore(self): """ Close C{self.currentTopStore} and C{self.currentSubStore}. If there is a store service running in C{self.currentTopStore}, stop it first. """ service = IService(self.currentTopStore) if service.running: result = service.stopService() else: result = succeed(None) def stopped(ignored): self.currentSubStore.close() self.currentTopStore.close() self.currentSubStore = None self.currentTopStore = None result.addCallback(stopped) return result
def test_namedService(self): """ The site store IScheduler implementation can be retrieved as a named service from the store's IServiceCollection powerup. """ self.assertIdentical( IService(self.store).getServiceNamed(SITE_SCHEDULER), IScheduler(self.store))
def testNoResults(self): """ Test that the string 'no results' appears in the flattened HTML response to a search on an empty index """ service = IService(self.indexer.store.parent) service.startService() def gotSearchResult((fragment,)): deferred = renderLivePage(ThemedFragmentWrapper(fragment)) def rendered(res): self.assertIn('no results', res.lower()) return service.stopService() return deferred.addCallback(rendered) s = self.indexer.store deferred = ixmantissa.ISearchAggregator(s).search(u'hi', {}, None, None) return deferred.addCallback(gotSearchResult)
def test_disownServiceParent(self): """ Test that the C{self.portType.disownServiceParent} method removes the C{self.portType} from the Axiom Store Service. """ port = self.port(store=self.store) port.setServiceParent(self.store) port.disownServiceParent() self.failIfIn(port, list(IService(self.store)))
def makeService(cls, options): """ Create an L{IService} for the database specified by the given configuration. """ from axiom.store import Store store = Store(options['dbdir'], debug=options['debug']) service = IService(store) _CheckSystemVersion(store).setServiceParent(service) return service
def test_applicationComponents(self): """ Check L{twisted.application.service.Application} instantiation. """ app = Application("app-name") self.assertTrue(verifyObject(IService, IService(app))) self.assertTrue(verifyObject(IServiceCollection, IServiceCollection(app))) self.assertTrue(verifyObject(IProcess, IProcess(app))) self.assertTrue(verifyObject(IPersistable, IPersistable(app)))
def makeService(self, options): service = MultiService() store = Store(options['db']) store.querySQL('PRAGMA journal_mode=WAL;') store.querySQL('PRAGMA synchronous=NORMAL;') IService(store).setServiceParent(service) site = Site(IndexRouter(store=store).router.resource()) webService = strports.service(options['port'], site, reactor=reactor) webService.setServiceParent(service) return service
def test_defaultService(self): """ Test the value of a Slave service in it's simplest configuration. """ service = CalDAVServiceMaker().makeService(self.options) self.failUnless(IService(service), "%s does not provide IService" % (service, )) self.failUnless(service.services, "No services configured") self.failUnless(isinstance(service, CalDAVService), "%s is not a CalDAVService" % (service, ))
def makeService(cls, options): """ Create an L{IService} for the database specified by the given configuration. """ from axiom.store import Store jm = options['journal-mode'] if jm is not None: jm = jm.decode('ascii') store = Store(options['dbdir'], debug=options['debug'], journalMode=jm) service = IService(store) _CheckSystemVersion(store).setServiceParent(service) return service
def setUp(self): self.clock = Clock() self.dbdir = filepath.FilePath(self.mktemp()) self.store = Store(self.dbdir) self.substoreItem = SubStore.createNew(self.store, ['sub']) self.substore = self.substoreItem.open() self.scheduler = IScheduler(self.store) self.subscheduler = IScheduler(self.substore) self.scheduler.callLater = self.clock.callLater self.scheduler.now = lambda: Time.fromPOSIXTimestamp(self.clock.seconds()) self.subscheduler.now = lambda: Time.fromPOSIXTimestamp(self.clock.seconds()) IService(self.store).startService()
def Application(name, uid=None, gid=None): """ Return a compound class. Return an object supporting the L{IService}, L{IPausable}, L{IReloadable}, L{IServiceCollection}, L{IProcess} and L{sob.IPersistable} interfaces, with the given parameters. Always access the return value by explicit casting to one of the interfaces. """ ret = components.Componentized() availableComponents = [TopLevelService(), Process(uid, gid), sob.Persistent(ret, name)] for comp in availableComponents: ret.addComponent(comp, ignoreClass=1) IService(ret).setName(name) return ret
def setUp(self): self.config = { 'MEMCACHE_SERVERS': { 'c0': { 'host': 'localhost' }, 'c1': { 'host': 'localhost' }, 'c2': { 'host': 'localhost' }, }, } self.config_id = conf.settings.add_config(self.config) self.app = Application(__name__) self.memcache = app.build_memcache(self.app) IService(self.app).startService() yield timed.sleep(1)
def _main_async(reactor, argv=None, _abort_for_test=False): if argv is None: argv = sys.argv if not _abort_for_test: # Some log messages would be discarded if we did not set up things early. configure_logging() # Option parsing is done before importing the main modules so as to avoid the cost of initializing gnuradio if we are aborting early. TODO: Make that happen for createConfig too. argParser = argparse.ArgumentParser(prog=argv[0]) argParser.add_argument('config_path', metavar='CONFIG', help='path of configuration directory or file') argParser.add_argument('--create', dest='createConfig', action='store_true', help='write template configuration file to CONFIG and exit') argParser.add_argument('-g, --go', dest='openBrowser', action='store_true', help='open the UI in a web browser') argParser.add_argument('--force-run', dest='force_run', action='store_true', help='Run DSP even if no client is connected (for debugging).') args = argParser.parse_args(args=argv[1:]) # Verify we can actually run. # Note that this must be done before we actually load core modules, because we might get an import error then. version_report = yield _check_versions() if version_report: print(version_report, file=sys.stderr) sys.exit(1) # Write config file and exit if asked ... if args.createConfig: write_default_config(args.config_path) _log.info('Created default configuration at: {config_path}', config_path=args.config_path) sys.exit(0) # TODO: Consider using a return value or something instead # ... else read config file config_obj = Config(reactor=reactor, log=_log) execute_config(config_obj, args.config_path) yield config_obj._wait_and_validate() _log.info('Constructing...') app = config_obj._create_app() reactor.addSystemEventTrigger('during', 'shutdown', app.close_all_devices) _log.info('Restoring state...') pfg = PersistenceFileGlue( reactor=reactor, root_object=app, filename=config_obj._state_filename, get_defaults=_app_defaults) _log.info('Starting web server...') services = MultiService() for maker in config_obj._service_makers: IService(maker(app)).setServiceParent(services) services.startService() _log.info('ShinySDR is ready.') for service in services: # TODO: should have an interface (currently no proper module to put it in) service.announce(args.openBrowser) if args.force_run: _log.debug('force_run') # TODO kludge, make this less digging into guts app.get_receive_flowgraph().get_monitor().state()['fft'].subscribe2(lambda v: None, the_subscription_context) if _abort_for_test: services.stopService() yield pfg.sync() defer.returnValue(app) else: yield defer.Deferred() # never fires
def stop(self): self.poller.reset_all_tasks() d = IService(self.app).stopService() d.addCallback(self.all_services_stoped)
pass if start: try: port = Settings.HIVEPort except: port = 54322 thebe = internet.SSLServer(port, thebeProto, xmlrpc.ServerContextFactory()) thebe.setServiceParent(application) tums.setServiceParent(application) tumsSSL.setServiceParent(application) infoserv = internet.TCPServer(9681, InfoServ.deploy()) infoserv.setServiceParent(application) flowDb = Database.AggregatorDatabase() axiomBatch = IService(flowDb.store) axiomBatch.setServiceParent(application) #Update the Firewall Shorewall.upgradeRules() ## TwistD bootstrap code nodaemon = 0 log = '/var/log/tums.log' if len(sys.argv) > 1: if sys.argv[1] == "-n": nodaemon = 1 log = None if __name__ == '__main__':
class IndexerAPISearchTestsMixin(IndexerTestsMixin): """ Test ISearchProvider search API on indexer objects """ def setUp(self): """ Make a store, an account/substore, an indexer, and call startService() on the superstore's IService so the batch process interactions that happen in fulltext.py work """ self.dbdir = self.mktemp() self.path = u'index' superstore = store.Store(self.dbdir) loginSystem = LoginSystem(store=superstore) installOn(loginSystem, superstore) account = loginSystem.addAccount(u'testuser', u'example.com', None) substore = account.avatars.open() self.store = substore self.indexer = self.createIndexer() self.svc = IService(superstore) self.svc.startService() # Make sure the indexer is actually available writer = self.openWriteIndex() writer.close() def tearDown(self): """ Stop the service we started in C{setUp} """ return self.svc.stopService() def _indexSomeItems(self): writer = self.openWriteIndex() for i in xrange(5): writer.add(IndexableThing( _documentType=u'thing', _uniqueIdentifier=str(i), _textParts=[u'text'], _keywordParts={})) writer.close() def testIndexerSearching(self): """ Test calling search() on the indexer item directly """ def gotResult(res): self.assertEquals(identifiersFrom(res), range(5)) self._indexSomeItems() return self.indexer.search(u'text').addCallback(gotResult) def testIndexerSearchingCount(self): """ Test calling search() on the indexer item directly, with a count arg """ def gotResult(res): self.assertEquals(identifiersFrom(res), [0]) self._indexSomeItems() return self.indexer.search(u'text', count=1).addCallback(gotResult) def testIndexerSearchingOffset(self): """ Test calling search() on the indexer item directly, with an offset arg """ def gotResult(res): self.assertEquals(identifiersFrom(res), [1, 2, 3, 4]) self._indexSomeItems() return self.indexer.search(u'text', offset=1).addCallback(gotResult) def testIndexerSearchingCountOffset(self): """ Test calling search() on the indexer item directly, with count & offset args """ def gotResult(res): self.assertEquals(identifiersFrom(res), [1, 2, 3]) self._indexSomeItems() return self.indexer.search(u'text', count=3, offset=1) def test_DifficultTokens(self): """ Test searching for fragments of phone numbers, email addresses, and urls. """ writer = self.openWriteIndex() specimens = [u"trevor 718-555-1212", u"bob [email protected]", u"atop http://divmod.org/projects/atop"] for i, txt in enumerate(specimens): writer.add(IndexableThing( _documentType=u'thing', _uniqueIdentifier=str(i), _textParts=[txt], _keywordParts={})) writer.close() def gotResult(res): return identifiersFrom(res) def testResults(results): self.assertEqual(results, [[0], [1], [2], [0], [1], [2]]) return gatherResults( [self.indexer.search(u'718').addCallback(gotResult), self.indexer.search(u'moddiv').addCallback(gotResult), self.indexer.search(u'divmod').addCallback(gotResult), self.indexer.search(u'718-555').addCallback(gotResult), self.indexer.search(u'rjones@moddiv').addCallback(gotResult), self.indexer.search(u'divmod.org').addCallback(gotResult), ] ).addCallback(testResults) def test_unicodeSearch(self): return self.indexer.search(u'\N{WHITE SMILING FACE}')
def tearDown(self): return IService(self.store).stopService()
def startStoreService(self): svc = IService(self.currentTopStore) svc.getServiceNamed("Batch Processing Controller").disownServiceParent() svc.startService()
def stopStoreService(self): service = IService(self.store) if service.running: return service.stopService()
def startStoreService(self): """ Start the Store Service. """ service = IService(self.store) service.startService()