def test_updateIndexFromCallableWithNone(self): uidutil = IntIdsStub() provideUtility(uidutil, IIntIds) catalog = Catalog() index = FieldIndex('getAuthor', None, field_callable=True) catalog['author'] = index ob1 = stoopidCallable(author = "joe") ob1id = uidutil.register(ob1) catalog.index_doc(ob1id, ob1) res = catalog.searchResults(author=('joe','joe')) names = [x.author for x in res] names.sort() self.assertEqual(len(names), 1) self.assertEqual(names, ['joe']) ob1.author = None catalog.index_doc(ob1id, ob1) #the index must be empty now because None values are never indexed res = catalog.searchResults(author=(None, None)) self.assertEqual(len(res), 0)
def setUp(self): provideAdapter(persistentFieldAdapter) self.registry = Registry() self.registry.registerInterface(ICachePurgingSettings) provideUtility(self.registry, IRegistry) self.settings = self.registry.forInterface(ICachePurgingSettings) self.settings.enabled = True self.settings.cachingProxies = ('http://localhost:1234',) @implementer(IPurgePaths) @adapter(FauxContext) class FauxPurgePaths(object): def __init__(self, context): self.context = context def getRelativePaths(self): return ['/foo', '/bar'] def getAbsolutePaths(self): return [] provideAdapter(FauxPurgePaths, name="test1") @implementer(IPurger) class FauxPurger(object): def purgeSync(self, url, httpVerb='PURGE'): return "200 OK", "cached", None provideUtility(FauxPurger())
def main(argv=None): """ run this as a cron job and execute all time based transitions """ db = create_engine('postgres://localhost/bungeni', echo=False) component.provideUtility( db, IDatabaseEngine, 'bungeni-db' ) model.metadata.bind = db session = Session() component.provideAdapter( bungeni.core.workflows.states.WorkflowState, (bungeni.core.interfaces.IBungeniContent,)) component.provideAdapter( bungeni.core.workflows.question.QuestionWorkflowAdapter, (domain.Question,)) component.provideAdapter( bungeni.core.workflows.states.StateWorkflowInfo, (domain.Question,)) component.provideHandler( bungeni.core.workflows.question.workflowTransitionEventDispatcher) # add autitor for time based transitions #component.provideAdapter( # (bungeni.core.interfaces.IAuditable, bungeni.core.interfaces.IQuestion, ), # (domain.Question, )) #component.provideAdapter( audit.objectModified, #(domain.Question, bungeni.core.interfaces.IAuditable, )) deferAdmissibleQuestions() session.flush() session.commit()
def test_IndexRaisingValueGetter(self): """We can have indexes whose values are determined by callable methods. Raising an exception in the method should not be silently ignored That would cause index corruption -- the index would be out of sync""" uidutil = IntIdsStub() provideUtility(uidutil, IIntIds) catalog = Catalog() index = FieldIndex('getAuthor', None, field_callable=True) catalog['author'] = index ob1 = stoopidCallable(author = "joe") ob1id = uidutil.register(ob1) catalog.index_doc(ob1id, ob1) res = catalog.searchResults(author=('joe','joe')) names = [x.author for x in res] names.sort() self.assertEqual(len(names), 1) self.assertEqual(names, ['joe']) ob2 = stoopidCallable() # no author here, will raise AttributeError ob2id = uidutil.register(ob2) try: catalog.index_doc(ob2id, ob2) self.fail("AttributeError exception should be raised") except AttributeError: #this is OK, we WANT to have the exception pass
def test_basic_tile_purge_cache(self): provideHandler(queuePurge) request = self.request alsoProvides(request, IAttributeAnnotatable) setRequest(request) registry = queryUtility(IRegistry) registry.registerInterface(ICachePurgingSettings) provideUtility(registry, IRegistry) settings = registry.forInterface(ICachePurgingSettings) settings.enabled = True settings.cachingProxies = ('http://*****:*****@@collective.cover.basic/test', '/c1/@@collective.cover.basic/test/@@images/image', '/c1/@@collective.cover.basic/test/@@images/icon', '/c1/@@collective.cover.basic/test/@@images/mini', '/c1/@@collective.cover.basic/test/@@images/large', '/c1/@@collective.cover.basic/test/@@images/listing', '/c1/@@collective.cover.basic/test/@@images/thumb', '/c1/@@collective.cover.basic/test/@@images/preview', '/c1/@@collective.cover.basic/test/@@images/tile']), IAnnotations(request)['plone.cachepurging.urls'])
def setup_db(): db = create_engine('postgres://localhost/bungeni-test', echo=False) component.provideUtility( db, IDatabaseEngine, 'bungeni-db' ) schema.metadata.bind = db # !+DROP_ALL(ah,sep-2011) #drop_all(db) schema.metadata.drop_all() schema.metadata.create_all() schema.metadata.reflect() schema.QuestionSequence.create(db) schema.MotionSequence.create(db) schema.registrySequence.create(db) schema.AgendaItemRegistrySequence.create(db) schema.QuestionRegistrySequence.create(db) schema.MotionRegistrySequence.create(db) schema.BillRegistrySequence.create(db) schema.TabledDocumentRegistrySequence.create(db) schema.ReportRegistrySequence.create(db) schema.tabled_documentSequence.create(db) security.metadata.bind = db security.metadata.drop_all() security.metadata.create_all() return db
def xsltSetUp(test): sectionsSetUp(test) class XSLTSource(SampleSource): classProvides(ISectionBlueprint) implements(ISection) def __init__(self, *args, **kw): super(XSLTSource, self).__init__(*args, **kw) self.sample = ( {}, {'_type': 'Weblog'}, {'_old_type': 'Blog'}, {'_old_type': 'Blog', '_type': 'Weblog', '_files': {'manifest': {'data': 'xml', 'name': 'manifest.xml'}}}, {'_old_type': 'Blog', '_type': 'Weblog', '_files': {'marshall': {'data': 'xml', 'name': 'marshall.xml'}}}, ) provideUtility(XSLTSource, name=u'quintagroup.transmogrifier.tests.xsltsource') from quintagroup.transmogrifier.xslt import XSLTSection, stylesheet_registry XSLTSection.applyTransformations = lambda self, xml, xslt: 'transformed xml' test.globs['stylesheet_registry'] = stylesheet_registry
def test_request_not_annotatable(self): context = FauxContext() request = FauxRequest() setRequest(request) configlet= CachePurgingConfiglet() provideUtility(configlet, ICachePurgingConfiglet) settings = getUtility(ICachePurgingConfiglet) settings.enabled = True settings.cachingProxies = ('http://localhost:1234',) class FauxPurgePaths(object): implements(IPurgePaths) adapts(FauxContext) def __init__(self, context): self.context = context def getRelativePaths(self): return ['/foo', '/bar'] def getAbsolutePaths(self): return [] provideAdapter(FauxPurgePaths, name="test1") try: notify(Purge(context)) except: self.fail()
def setUp(self): provideUtility(SolrConnectionConfig(), ISolrConnectionConfig) self.mngr = SolrConnectionManager() self.mngr.setHost(active=True) self.conn = self.mngr.getConnection() self.search = Search() self.search.manager = self.mngr
def test_request_not_annotatable(self): request = FauxRequest() configlet= CachePurgingConfiglet() provideUtility(configlet, ICachePurgingConfiglet) settings = getUtility(ICachePurgingConfiglet) settings.enabled = True settings.cachingProxies = ('http://localhost:1234',) class FauxPurger(object): implements(IPurger) def __init__(self): self.purged = [] def purgeAsync(self, url, httpVerb='PURGE'): self.purged.append(url) purger = FauxPurger() provideUtility(purger) notify(EndRequestEvent(None, request)) self.assertEquals([], purger.purged)
def test_purge(self): request = FauxRequest() alsoProvides(request, IAttributeAnnotatable) IAnnotations(request)['zojax.cachepurging.urls'] = set(['/foo', '/bar']) configlet= CachePurgingConfiglet() provideUtility(configlet, ICachePurgingConfiglet) settings = getUtility(ICachePurgingConfiglet) settings.enabled = True settings.cachingProxies = ('http://localhost:1234',) class FauxPurger(object): implements(IPurger) def __init__(self): self.purged = [] def purgeAsync(self, url, httpVerb='PURGE'): self.purged.append(url) purger = FauxPurger() provideUtility(purger) notify(EndRequestEvent(None, request)) self.assertEquals(['http://localhost:1234/foo', 'http://localhost:1234/bar'], purger.purged)
def setUp(self): super(CatalogUpdaterXMLAdapterTest, self).setUp() self.logger = DummyLogger('CatalogUpdaterLogger', []) dummy_cu = DummyCatalogUpdaterUtility() dummy_cu._logger = self.logger provideUtility(dummy_cu, ICatalogUpdater, name="catalog_updater")
def test_enabled(self): context = FauxContext() request = FauxRequest() alsoProvides(request, IAttributeAnnotatable) setRequest(request) configlet= CachePurgingConfiglet() provideUtility(configlet, ICachePurgingConfiglet) settings = getUtility(ICachePurgingConfiglet) settings.enabled = True settings.cachingProxies = ('http://localhost:1234',) class FauxPurgePaths(object): implements(IPurgePaths) adapts(FauxContext) def __init__(self, context): self.context = context def getRelativePaths(self): return ['/foo', '/bar'] def getAbsolutePaths(self): return [] provideAdapter(FauxPurgePaths, name="test1") notify(Purge(context)) self.assertEquals({'zojax.cachepurging.urls': set(['/foo', '/bar'])}, dict(IAnnotations(request)))
def test_getNextUtility(self): # test local site vs. global site global_dummy = DummyUtility() provideUtility(global_dummy, IDummyUtility) local_dummy = DummyUtility() sm = zapi.getSiteManager() sm.registerUtility(IDummyUtility, local_dummy) self.assertEquals(zapi.getUtility(IDummyUtility), local_dummy) self.assertEquals(getNextUtility(self.folder.site, IDummyUtility), global_dummy) # test local site vs. nested local site manage_addDummySite(self.folder.site, 'subsite') enableLocalSiteHook(self.folder.site.subsite) setSite(self.folder.site.subsite) sublocal_dummy = DummyUtility() sm = zapi.getSiteManager() sm.registerUtility(IDummyUtility, sublocal_dummy) self.assertEquals(zapi.getUtility(IDummyUtility), sublocal_dummy) self.assertEquals(getNextUtility(self.folder.site.subsite, IDummyUtility), local_dummy) self.assertEquals(getNextUtility(self.folder.site, IDummyUtility), global_dummy)
def test_applyTransform_streamiterator(self): tmp = tempfile.mkstemp()[1] try: with open(tmp, 'w') as out: out.write('foo') @implementer(ITransformer) class FauxTransformer(object): def __call__(self, request, result, encoding): return filestream_iterator(tmp) transformer = FauxTransformer() provideUtility(transformer) published = FauxPublished() request = FauxRequest(published) applyTransformOnSuccess(FauxPubEvent(request)) self.assertTrue( isinstance( request.response.getBody(), filestream_iterator ) ) finally: os.unlink(tmp)
def setupGenericImage(site): """ Add generic image within portal_depiction if it doesn't exists """ tool = queryUtility(IDepictionTool, context=site) tool = tool.__of__(site) if 'generic' in tool.objectIds(): return img = site.restrictedTraverse( '++resource++eea.depiction.images/generic.jpg') data = img.GET() # needed for tests storage = queryUtility(IStorage, name="__builtin__.str") if storage is None: from plone.namedfile.storages import StringStorable provideUtility(StringStorable(), IStorage, name="__builtin__.str") image = NamedBlobImage(data=data, contentType="image/jpeg", filename=u"generic.jpg") id = tool.invokeFactory('Image', id='generic', title='Generic') obj = tool._getOb(id) if IBaseObject.providedBy(obj): obj.edit(image=image) else: obj.image = image
def testFilterQuerySubstitution(self): def optimize(**params): query = dict(a="a:23", b="b:42", c="c:(23 42)") optimizeQueryParameters(query, params) return query, params # first test without the configuration utility self.assertEqual(optimize(), (dict(a="a:23", b="b:42", c="c:(23 42)"), dict())) # now unconfigured... config = SolrConnectionConfig() provideUtility(config, ISolrConnectionConfig) self.assertEqual(optimize(), (dict(a="a:23", b="b:42", c="c:(23 42)"), dict())) config.filter_queries = ["a"] self.assertEqual(optimize(), (dict(b="b:42", c="c:(23 42)"), dict(fq=["a:23"]))) self.assertEqual(optimize(fq="x:13"), (dict(b="b:42", c="c:(23 42)"), dict(fq=["x:13", "a:23"]))) self.assertEqual( optimize(fq=["x:13", "y:17"]), (dict(b="b:42", c="c:(23 42)"), dict(fq=["x:13", "y:17", "a:23"])) ) config.filter_queries = ["a", "c"] self.assertEqual(optimize(), (dict(b="b:42"), dict(fq=["a:23", "c:(23 42)"]))) self.assertEqual(optimize(fq="x:13"), (dict(b="b:42"), dict(fq=["x:13", "a:23", "c:(23 42)"]))) self.assertEqual( optimize(fq=["x:13", "y:17"]), (dict(b="b:42"), dict(fq=["x:13", "y:17", "a:23", "c:(23 42)"])) ) # also test substitution of combined filter queries config.filter_queries = ["a c"] self.assertEqual(optimize(), (dict(b="b:42"), dict(fq=["a:23 c:(23 42)"]))) config.filter_queries = ["a c", "b"] self.assertEqual(optimize(), ({"*": "*:*"}, dict(fq=["a:23 c:(23 42)", "b:42"]))) # for multiple matches the first takes precedence config.filter_queries = ["a", "a c", "b"] self.assertEqual(optimize(), (dict(c="c:(23 42)"), dict(fq=["a:23", "b:42"]))) # parameters not contained in the query must not be converted config.filter_queries = ["a nonexisting", "b"] self.assertEqual(optimize(), (dict(a="a:23", c="c:(23 42)"), dict(fq=["b:42"])))
def test_match_abort(self): provideAdapter(DefaultRulesetLookup) provideUtility(Registry(), IRegistry) registry = getUtility(IRegistry) registry.registerInterface(ICacheSettings) settings = registry.forInterface(ICacheSettings) settings.enabled = True z3c.caching.registry.register(DummyView, 'testrule') settings.operationMapping = {'testrule': 'op1'} @implementer(ICachingOperation) @adapter(Interface, Interface) class DummyOperation(object): def __init__(self, published, request): self.published = published self.request = request def modifyResponse(self, rulename, response): pass def interceptResponse(self, rulename, response): response.addHeader('X-Cache-Foo', 'test') return None provideAdapter(DummyOperation, name='op1') view = DummyView() request = DummyRequest(view, DummyResponse()) intercept(DummyEvent(request)) self.assertEqual({'PUBLISHED': view}, dict(request)) self.assertEqual({'X-Cache-Rule': ['testrule'], 'X-Cache-Foo': ['test']}, dict(request.response))
def setUp(self): super(TestGeneratorIntegration, self).setUp() import plone.i18n.normalizer provideUtility(plone.i18n.normalizer.idnormalizer, plone.i18n.normalizer.IIDNormalizer) # use an empty permission mapping registry registry = getUtility(IActionGroupRegistry) self._ori_permissions = registry._permissions registry._permissions = {} self.register_permissions(**{ 'cmf.ModifyPortalContent': 'Modify portal content', 'zope2.View': 'View', 'zope2.AccessContentsInformation': \ 'Access contents information', 'zope2.DeleteObjects': 'Delete objects', 'cmf.AddPortalContent': 'Add portal content', 'cmf.AccessFuturePortalContent': \ 'Access future portal content', 'ATContentTypes: Add Image': 'ATContentTypes: Add Image', }) self.map_permissions(['View', 'Access contents information'], 'view') self.map_permissions(['Modify portal content'], 'edit') self.map_permissions(['Delete objects'], 'delete') self.map_permissions(['Add portal content', 'ATContentTypes: Add Image'], 'add') self.map_permissions(['Add portal content', 'ATContentTypes: Add Folder'], 'add folder', move=False) self.map_permissions(['Access future portal content'], 'view future') self.map_permissions(['ATContentTypes: Add Image'], 'edit', workflow_name='my_custom_workflow')
def test_swallow_other_error(self): @implementer(IRulesetLookup) @adapter(Interface, Interface) class DummyRulesetLookup(object): def __init__(self, published, request): self.published = published self.request = request def __call__(self): raise AttributeError('Should be swallowed and logged') provideAdapter(DummyRulesetLookup) provideUtility(Registry(), IRegistry) registry = getUtility(IRegistry) registry.registerInterface(ICacheSettings) settings = registry.forInterface(ICacheSettings) settings.enabled = True settings.operationMapping = {'foo': 'bar'} view = DummyView() request = DummyRequest(view, DummyResponse()) try: intercept(DummyEvent(request)) except Exception: self.fail('Intercept should not raise')
def test_off_switch(self): provideAdapter(DefaultRulesetLookup) provideUtility(Registry(), IRegistry) registry = getUtility(IRegistry) registry.registerInterface(ICacheSettings) settings = registry.forInterface(ICacheSettings) settings.enabled = False z3c.caching.registry.register(DummyView, 'testrule') settings.operationMapping = {'testrule': 'op1'} @implementer(ICachingOperation) @adapter(Interface, Interface) class DummyOperation(object): def __init__(self, published, request): self.published = published self.request = request def interceptResponse(self, rulename, response): return None def modifyResponse(self, rulename, response): response['X-Mutated'] = rulename provideAdapter(DummyOperation, name='op1') view = DummyView() request = DummyRequest(view, DummyResponse()) MutatorTransform(view, request).transformUnicode(u'', 'utf-8') self.assertEqual({'PUBLISHED': view}, dict(request)) self.assertEqual({}, dict(request.response))
def testInsertExtra(self): class Inserter(object): implements(ISection) def __init__(self, transmogrifier, name, options, previous): self.previous = previous def __iter__(self): count = 0 for item in self.previous: item['pipeline'] = 1 yield item yield dict(id='extra-%02d' % count) count += 1 provideUtility(Inserter, ISectionBlueprint, name=u'collective.transmogrifier.tests.inserter') splitter = self._makeOne(dict( inserter=dict( blueprint='collective.transmogrifier.tests.inserter')), {'pipeline-1': 'inserter', 'pipeline-2': ''}, (dict(id='item-%02d' % i) for i in range(3))) self.assertEqual(list(splitter), [ dict(id='item-00', pipeline=1), # p1 advanced, look at p2 dict(id='item-00'), # p2 advanced, look at p1 dict(id='extra-00'), # p1 did not advance dict(id='item-01', pipeline=1), # p1 advanced, look at p2 dict(id='item-01'), # p2 advanced, look at p1 dict(id='extra-01'), # p1 did not advance dict(id='item-02', pipeline=1), # p1 advanced, condition isDone dict(id='extra-02'), # last in p1 after isDone, l.a. p2 dict(id='item-02'), # p2 advanced ]) # p2 is done
def testSkipItems(self): class Skip(object): implements(ISection) def __init__(self, transmogrifier, name, options, previous): self.previous = previous def __iter__(self): count = 0 for item in self.previous: if count % 2: item["pipeline"] = 1 yield item count += 1 provideUtility(Skip, ISectionBlueprint, name=u"collective.transmogrifier.tests.skip") splitter = self._makeOne( dict(skip=dict(blueprint="collective.transmogrifier.tests.skip")), {"pipeline-1": "skip", "pipeline-2": ""}, (dict(id="item-%02d" % i) for i in range(4)), ) self.assertEqual( list(splitter), [ dict(id="item-01", pipeline=1), # p1 is ahead dict(id="item-00"), # p2 advanced, p1 is skipped dict(id="item-01"), # p2 advanced, p1 no longer ahead dict(id="item-03", pipeline=1), # p1 is ahead again dict(id="item-02"), # p2 advanced, p1 is skipped dict(id="item-03"), # p2 advanced, p1 no longer ahead ], ) # p1 is done, p2 is done
def setUp(self): super(TestArchiver, self).setUp() grok('opengever.dossier.archive') grok('opengever.dossier.behaviors.filing') file_path = os.path.join( os.path.dirname(opengever.dossier.__file__), 'vdexvocabs', 'type_prefixes.vdex') vocabulary_registry = getVocabularyRegistry() try: vocabulary_registry.get(None, 'opengever.dossier.type_prefixes') except VocabularyRegistryError: vocabulary_registry.register( 'opengever.dossier.type_prefixes', VdexVocabulary(file_path)) proxy = self.mocker.mock() proxy.client_id self.mocker.result('SKA ARCH') self.mocker.count(0, None) registry = self.mocker.mock() provideUtility(provides=IRegistry, component=registry) registry.forInterface(IBaseClientID) self.mocker.result(proxy) self.mocker.count(0, None)
def testInsertExtra(self): class Inserter(object): implements(ISection) def __init__(self, transmogrifier, name, options, previous): self.previous = previous def __iter__(self): count = 0 for item in self.previous: item["pipeline"] = 1 yield item yield dict(id="extra-%02d" % count) count += 1 provideUtility(Inserter, ISectionBlueprint, name=u"collective.transmogrifier.tests.inserter") splitter = self._makeOne( dict(inserter=dict(blueprint="collective.transmogrifier.tests.inserter")), {"pipeline-1": "inserter", "pipeline-2": ""}, (dict(id="item-%02d" % i) for i in range(3)), ) self.assertEqual( list(splitter), [ dict(id="item-00", pipeline=1), # p1 advanced, look at p2 dict(id="item-00"), # p2 advanced, look at p1 dict(id="extra-00"), # p1 did not advance dict(id="item-01", pipeline=1), # p1 advanced, look at p2 dict(id="item-01"), # p2 advanced, look at p1 dict(id="extra-01"), # p1 did not advance dict(id="item-02", pipeline=1), # p1 advanced, condition isDone dict(id="extra-02"), # last in p1 after isDone, l.a. p2 dict(id="item-02"), # p2 advanced ], ) # p2 is done
def testPortalContentLanguage(self): from zope.component import provideUtility from zope.i18n.interfaces import ITranslationDomain from zope.i18n.simpletranslationdomain import SimpleTranslationDomain # Let's fake the news title translations messages = { ('de', u'news-title'): u'Foo', ('pt_BR', u'news-title'): u'Bar', } pfp = SimpleTranslationDomain('plonefrontpage', messages) provideUtility(pfp, ITranslationDomain, name='plonefrontpage') # Setup the new placeholder folders self.folder.invokeFactory('Folder', 'brazilian') self.folder.invokeFactory('Folder', 'german') # Check if the content is being created in German self.folder.german.setLanguage('de') self.loginAsPortalOwner() setuphandlers.setupPortalContent(self.folder.german) # self.assertEqual(self.folder.german.news.Title(), 'Foo') # Check if the content is being created in a composite # language code, in this case Brazilian Portuguese self.folder.brazilian.setLanguage('pt-br') setuphandlers.setupPortalContent(self.folder.brazilian)
def setUp(test): placelesssetup.setUp() # Attempt to initialize mappers only if their not already mapped. try: orm.class_mapper(schema.Content) except orm.exc.UnmappedClassError: schema.initialize_mapper() component.provideAdapter(transform.StringTransform) component.provideAdapter(transform.IntegerTransform) component.provideAdapter(transform.FloatTransform) component.provideAdapter(transform.DateTimeTransform) component.provideAdapter(transform.LinesTransform) component.provideAdapter(transform.BooleanTransform) component.provideAdapter(transform.FileTransform) component.provideAdapter(transform.PhotoTransform) component.provideAdapter(transform.ReferenceTransform) component.provideUtility(peer.PeerRegistry()) component.provideAdapter( peer.PeerFactory, (interfaces.IMirrored, interfaces.ISchemaTransformer)) component.provideAdapter(transform.SchemaTransformer, (interfaces.IMirrored, interfaces.IMetaData)) component.provideAdapter(serializer.Serializer, (interfaces.IMirrored,)) component.provideAdapter( operation.OperationFactory, (interfaces.IMirrored,)) component.provideUtility(operation.OperationBufferFactory())
def testSkipItems(self): class Skip(object): implements(ISection) def __init__(self, transmogrifier, name, options, previous): self.previous = previous def __iter__(self): count = 0 for item in self.previous: if count % 2: item['pipeline'] = 1 yield item count += 1 provideUtility(Skip, ISectionBlueprint, name=u'collective.transmogrifier.tests.skip') splitter = self._makeOne(dict( skip=dict( blueprint='collective.transmogrifier.tests.skip')), {'pipeline-1': 'skip', 'pipeline-2': ''}, (dict(id='item-%02d' % i) for i in range(4))) self.assertEqual(list(splitter), [ dict(id='item-01', pipeline=1), # p1 is ahead dict(id='item-00'), # p2 advanced, p1 is skipped dict(id='item-01'), # p2 advanced, p1 no longer ahead dict(id='item-03', pipeline=1), # p1 is ahead again dict(id='item-02'), # p2 advanced, p1 is skipped dict(id='item-03') # p2 advanced, p1 no longer ahead ]) # p1 is done, p2 is done
def setUp( self ): import transaction from zope.component.testing import setUp as componentSetUp from zope.component import provideUtility from zope.component import provideAdapter from zope.traversing.adapters import DefaultTraversable from zope.publisher.http import HTTPCharsets from Testing.ZopeTestCase import ZopeLite from Testing.makerequest import makerequest from Products.PageTemplates.interfaces \ import IUnicodeEncodingConflictResolver from Products.PageTemplates.unicodeconflictresolver \ import PreferredCharsetResolver componentSetUp() provideAdapter(DefaultTraversable, (None,)) provideUtility(PreferredCharsetResolver, IUnicodeEncodingConflictResolver) provideAdapter(HTTPCharsets) transaction.begin() app = ZopeLite.app() self.root = makerequest(app) # set the request charset to enable conversions to utf-8 self.root.REQUEST['HTTP_ACCEPT_CHARSET'] = '*'
def test_dont_swallow_conflict_error(self): @implementer(IRulesetLookup) @adapter(Interface, Interface) class DummyRulesetLookup(object): def __init__(self, published, request): self.published = published self.request = request def __call__(self): raise ConflictError() provideAdapter(DummyRulesetLookup) provideUtility(Registry(), IRegistry) registry = getUtility(IRegistry) registry.registerInterface(ICacheSettings) settings = registry.forInterface(ICacheSettings) settings.enabled = True settings.operationMapping = {'foo': 'bar'} view = DummyView() request = DummyRequest(view, DummyResponse()) self.assertRaises(ConflictError, intercept, DummyEvent(request))
def make_app(config_file=None, settings=None): app_settings.update(_delayed_default_settings) # Initialize aiohttp app app = web.Application(router=TraversalRouter()) # Create root Application root = ApplicationRoot(config_file) root.app = app provideUtility(root, IApplication, 'root') # Initialize global (threadlocal) ZCA configuration app.config = ConfigurationMachine() registerCommonDirectives(app.config) if config_file is not None: with open(config_file, 'r') as config: settings = json.load(config) elif settings is None: raise Exception('Neither configuration or settings') import plone.server configure.include("zope.component") configure.include("zope.annotation") configure.include("plone.server", "meta.zcml") # bbb configure.scan('plone.server.translation') configure.scan('plone.server.renderers') configure.scan('plone.server.api') configure.scan('plone.server.content') configure.scan('plone.server.security') configure.scan('plone.server.json') configure.scan('plone.server.behaviors') configure.scan('plone.server.languages') configure.scan('plone.server.permissions') configure.scan('plone.server.migrate.migrations') configure.scan('plone.server.auth.participation') configure.scan('plone.server.auth.principalrole') configure.scan('plone.server.catalog.index') configure.scan('plone.server.catalog.catalog') configure.scan('plone.server.framing') configure.scan('plone.server.file') configure.scan('plone.server.types') load_application(plone.server, root, settings) for ep in iter_entry_points('plone.server'): # auto-include applications # What an "app" include consists of... # 1. load zcml if present # 2. load "includeme" module function if present # 3. load app_settings dict if present in the module if ep.module_name not in settings.get('applications', []): continue load_application(ep.load(), root, settings) try: app.config.execute_actions() except ConfigurationConflictError as e: logger.error(str(e._conflicts)) raise e # XXX we clear now to save some memory # it's unclear to me if this is necesary or not but it seems to me that # we don't need things registered in both components AND here. configure.clear() # update *after* plugins loaded update_app_settings(settings) content_type = ContentNegotiatorUtility( 'content_type', app_settings['renderers'].keys()) language = ContentNegotiatorUtility( 'language', app_settings['languages'].keys()) provideUtility(content_type, IContentNegotiation, 'content_type') provideUtility(language, IContentNegotiation, 'language') for database in app_settings['databases']: for key, dbconfig in database.items(): factory = getUtility( IDatabaseConfigurationFactory, name=dbconfig['storage']) root[key] = factory(key, dbconfig) for static in app_settings['static']: for key, file_path in static.items(): root[key] = StaticFile(file_path) root.set_root_user(app_settings['root_user']) if RSA is not None and not app_settings.get('rsa'): key = RSA.generate(2048) pub_jwk = {'k': key.publickey().exportKey('PEM')} priv_jwk = {'k': key.exportKey('PEM')} app_settings['rsa'] = { 'pub': pub_jwk, 'priv': priv_jwk } # Set router root app.router.set_root(root) for utility in getAllUtilitiesRegisteredFor(IAsyncUtility): # In case there is Utilties that are registered from zcml ident = asyncio.ensure_future(utility.initialize(app=app), loop=app.loop) root.add_async_utility(ident, {}) app.on_cleanup.append(close_utilities) for util in app_settings['utilities']: root.add_async_utility(util) # Load cached Schemas load_cached_schema() return app
def configuration(self): urltool = getToolByName(getSite(), 'portal_url') portal = urltool.getPortalObject() try: saconnect = ISQLAlchemyConnectionStrings(portal) dsn = saconnect['spdo'] logger(u"Utilizando configuração local: " + unicode(dsn, 'utf-8')) except (ComponentLookupError, KeyError), e: dsn = DEFAULT_DSN logger(u"Utilizando configuração padrão: " + unicode(dsn, 'utf-8')) return (dsn, ), {} SPDOEngineGlobalUtility = EngineFactory(DEFAULT_DSN) provideUtility(SPDOEngineGlobalUtility, provides=IEngineFactory, name=u'spdo_engine') ## GloballyScopedSession - um banco de dados por instancia #SPDOGloballyScopedSession = GloballyScopedSession(u'spdo_engine', extension=ZopeVersionedExtension()) #provideUtility(SPDOGloballyScopedSession, provides=IScopedSession, name=u'spdo_session') def ScopeID(): urltool = getToolByName(getSite(), 'portal_url') obj = urltool.getPortalObject() return '-'.join(obj.getPhysicalPath()[1:]) # SiteScopedSession - um banco de dados por site class SPDOSiteScopedSession(SiteScopedSession):
def register_foo_utility(portal_setup): provideUtility(foo, provides=IFoo)
def is_debug_mode(self): return api.env.debug_mode() @implementer(ICatalogFactory) class UserPropertiesSoupCatalogFactory(object): def __call__(self, context): catalog = Catalog() path = NodeAttributeIndexer('path') catalog['path'] = CatalogFieldIndex(path) uuid = NodeAttributeIndexer('uuid') catalog['uuid'] = CatalogFieldIndex(uuid) return catalog provideUtility(UserPropertiesSoupCatalogFactory(), name='uuid_preserver') class preserveUUIDs(grok.View): grok.context(IPloneSiteRoot) def render(self): portal = api.portal.get() soup = get_soup('uuid_preserver', portal) pc = api.portal.get_tool('portal_catalog') results = pc.searchResults() for result in results: record = Record() record.attrs['uuid'] = result.UID record.attrs['path'] = result.getPath()
def afterSetUp(self): self.select_engine() zope.component.provideAdapter(DefaultTraversable, (None, )) zope.component.provideAdapter(HTTPCharsets, (None, )) provideUtility(PreferredCharsetResolver, IUnicodeEncodingConflictResolver)
for row in fieldValue: row['Form'] = child_form_id row['Plomino_Parent_Document'] = doc.id tmp = TemporaryDocument( db, child_form, row, real_doc=doc) tmp = tmp.__of__(db) for f in fields: if f in fields_to_render: row[f] = tmp.getRenderedItem(f) rendered_values.append(row) fieldValue = rendered_values if mapped_fields and child_form_id: mapped = [] for row in fieldValue: mapped.append([row[c] for c in mapped_fields]) fieldValue = mapped return {'rawdata': rawValue, 'rendered': fieldValue} component.provideUtility(DatagridField, IPlominoField, 'DATAGRID') for f in getFields(IDatagridField).values(): setattr(DatagridField, f.getName(), DictionaryProperty(f, 'parameters')) class SettingForm(BaseForm): """ """ form_fields = form.Fields(IDatagridField)
def provideUtility(provided, component): ztapi.provideUtility(component, provided)
class AlchemyFormEngineFactory(EngineFactory): def configuration(self): urltool = getToolByName(getSite(), 'portal_url') portal = urltool.getPortalObject() try: saconnect = ISQLAlchemyConnectionStrings(portal) dsn = saconnect['alchemyform'] except (ComponentLookupError, KeyError), e: dsn = DEFAULT_DSN return (dsn, ), {} AlchemyFormEngineGlobalUtility = EngineFactory(DEFAULT_DSN) provideUtility(AlchemyFormEngineGlobalUtility, provides=IEngineFactory, name=u'alchemyform_engine') def ScopeID(): urltool = getToolByName(getSite(), 'portal_url') obj = urltool.getPortalObject() return '-'.join(obj.getPhysicalPath()[1:]) # SiteScopedSession - um banco de dados por site class AlchemyFormSiteScopedSession(SiteScopedSession): def siteScopeFunc(self): return ScopeID()
def tearDownZope(self, app): # Store old purger provideUtility(self.oldPurger, IPurger)
def _utility(ob): component.provideUtility(ob, provides, name) return ob
async def register_utilities(self): component.provideUtility(CloudClient(), ICloudctl) component.provideUtility(Fetch(), IFetch)
from zope import interface, component import unittest from Student import Student from IStudents import IStudents, Students component.provideUtility(Students()) stud = component.getUtility(IStudents) class TestStudent(unittest.TestCase): def setUp(self): self.St1 = Student("Dima", "IMEI", 1) self.St2 = Student("Vasya", "Physics", 3) def test_init(self): self.assertEqual((self.St1.name, self.St1.faculty, self.St1.course), ("Dima", "IMEI", 1)) self.assertEqual((self.St2.name, self.St2.faculty, self.St2.course), ("Vasya", "Physics", 3)) def test_Add(self): self.assertTrue(stud.add("Dima", "IMEI", 1)) self.assertEqual(stud.slist[0].name, "Dima") def test_Delete(self): self.assertTrue(stud.delete("Dima")) self.assertEqual(stud.slist, []) if __name__ == '__main__': unittest.main()
def setUp(self): from zope.component import provideUtility from Products.PageTemplates.interfaces import IUnicodeEncodingConflictResolver from Products.PageTemplates.unicodeconflictresolver import DefaultUnicodeEncodingConflictResolver provideUtility(DefaultUnicodeEncodingConflictResolver, IUnicodeEncodingConflictResolver)
from zope import component from ctrl.core.extension import CtrlExtension from ctrl.core.interfaces import (ICommandRunner, ICtrlExtension, ISubcommand) from .command import ConfigSubcommand class CtrlConfigExtension(CtrlExtension): def register_adapters(self): component.provideAdapter(factory=ConfigSubcommand, adapts=[ICommandRunner], provides=ISubcommand, name='config') async def register_utilities(self): pass # component.provideUtility( # config, # provides=ICtrlConfig) # register the extension component.provideUtility(CtrlConfigExtension(), ICtrlExtension, 'config')
def setUp(self): self.exchange = Exchange(backend=SimpleBackend('XXX')) self.exchange.setrate('AAA', Decimal('2')) self.exchange.setrate('BBB', Decimal('8')) provideUtility(self.exchange, IExchange)
from .client import CloudClient from .command import CloudSubcommand from .settings import CloudSettings from .utils import Fetch class CtrlCloudExtension(CtrlExtension): @property def requires(self): return ['config', 'command'] def register_adapters(self): component.provideAdapter(factory=CloudSubcommand, adapts=[ICommandRunner], provides=ISubcommand, name='cloud') component.provideAdapter(factory=CloudSettings, adapts=[ISettings], provides=ICloudSettings) async def register_utilities(self): component.provideUtility(CloudClient(), ICloudctl) component.provideUtility(Fetch(), IFetch) # register the extension component.provideUtility(CtrlCloudExtension(), ICtrlExtension, 'cloud')
def test_unavailable_backend_conversion_error(self): exchange = Exchange() provideUtility(exchange, IExchange) with self.assertRaises(ExchangeBackendNotInstalled): self.PriceClass('2', 'AAA').to('BBB')
def tearDown(self): provideUtility(self.orig_renderer, ICitationRenderer)
def setUpPloneSite(self, portal): applyProfile(portal, 'plone.app.intid:default') # Install into Plone site using portal_setup setRoles(portal, TEST_USER_ID, ['Member', 'Contributor', 'Manager']) # portal workaround self.portal = portal # test fti generation fti = DexterityFTI('TransmogrifyDexterityFTI') fti.schema = 'transmogrify.dexterity.testing.ITestSchema' fti.klass = 'plone.dexterity.content.Container' fti.behaviors = ('plone.app.dexterity.behaviors.metadata.IBasic',) self.portal.portal_types._setObject('TransmogrifyDexterityFTI', fti) register(fti) # create test schema source and provide it @implementer(ISection) class SchemaSource(SampleSource): provider(ISectionBlueprint) def __init__(self, transmogrifier, name, options, previous): super( SchemaSource, self).__init__( transmogrifier, name, options, previous) sourcecontent = options.get('source-content', 'full') if sourcecontent == 'full': self.sample = ( dict(_path='/spam', foo='one value', _type='TransmogrifyDexterityFTI', title='Spam', description='Lorem Ipsum bla bla!', test_file={ 'data': zptlogo, 'filename': 'zptlogo.gif'}, test_date='2010-10-12', test_datetime='2010-10-12 17:59:59', fieldnotchanged='nochange', ), dict(_path='/two', foo='Bla', _type='TransmogrifyDexterityFTI', title='My Second Object', # description=None, # None is not valid for this # field. test_file=zptlogo, _filename="testlogo.gif", test_date=date(2010, 0o1, 0o1, ), test_datetime=datetime(2010, 0o1, 0o1, 17, 59, 59), fieldnotchanged='nochange', ), ) elif sourcecontent == 'onlytitle': self.sample = ( dict(_path='/spam', _type='TransmogrifyDexterityFTI', title='Spammety spam'), dict(_path='/two', _type='TransmogrifyDexterityFTI', title='My Awesome Second Object'), ) provideUtility(SchemaSource, name=u'transmogrify.dexterity.testsource')
def test_unavailable_rate_conversion_error(self): exchange = Exchange(backend=SimpleBackend('AAA')) provideUtility(exchange, IExchange) with self.assertRaises(ExchangeRateNotFound): self.PriceClass('2', 'AAA').to('BBB')
def test_no_settings(self): registry = Registry() registry.registerInterface(ICachePurgingSettings) provideUtility(registry, IRegistry) self.assertEqual(False, utils.isCachePurgingEnabled())
os.close(1) os.close(2) # redirect stdin, stdout, and stderr to /dev/null os.dup2(os.open('/dev/null', os.O_RDWR), 0) os.dup2(0, 1) os.dup2(0, 2) # chdir to the filesystem root so we don't prevent unmounting os.chdir('/') # write PID to the pidfile f = open(self.pidfile, 'w') f.write("%i\n" % os.getpid()) f.close() atexit.register(self._removePid) # register the scheduler scheduler = Scheduler() provideUtility(scheduler, IScheduler) scheduler.setServiceParent(self) # configure the statistics manager stats.setServiceParent(self) stats.configure(self.settings) # configure the plugin manager plugins = PluginManager() plugins.setServiceParent(self) plugins.configure(self.settings) # configure the auth manager auth = AuthManager() auth.setServiceParent(self) auth.configure(self.settings) # configure the bier manager events = EventManager(plugins) events.setServiceParent(self)
def test_hit(self): from zope.component import provideUtility from zope.security.interfaces import IPermission permission = object() provideUtility(permission, IPermission, 'testing') self._callFUT(None, 'testing') # no raise
def __init__(self): self.shutdown = False super(ZenHub, self).__init__() load_config("hub.zcml", ZENHUB_MODULE) notify(HubWillBeCreatedEvent(self)) if self.options.profiling: self.profiler = ContinuousProfiler('zenhub', log=self.log) self.profiler.start() self.zem = self.dmd.ZenEventManager # responsible for sending messages to the queues load_config_override('twistedpublisher.zcml', QUEUEMESSAGING_MODULE) notify(HubCreatedEvent(self)) self.sendEvent(eventClass=App_Start, summary="%s started" % self.name, severity=0) # Initialize ZenHub's RPC servers self._monitor = StatsMonitor() self._status_reporter = ZenHubStatusReporter(self._monitor) self._pools = make_pools() self._service_manager = make_service_manager(self._pools) authenticators = getCredentialCheckers(self.options.passwordfile) self._server_factory = make_server_factory( self._pools, self._service_manager, authenticators, ) self._xmlrpc_manager = XmlRpcManager(self.dmd, authenticators[0]) register_legacy_worklist_metrics() # Invalidation Processing self._invalidation_manager = InvalidationManager( self.dmd, self.log, self.async_syncdb, self.storage.poll_invalidations, self.sendEvent, poll_interval=self.options.invalidation_poll_interval, ) # Setup Metric Reporting self._metric_manager = MetricManager( daemon_tags={ 'zenoss_daemon': 'zenhub', 'zenoss_monitor': self.options.monitor, 'internal': True, }) provideUtility(self._metric_manager) self._metric_writer = self._metric_manager.metric_writer self.rrdStats = self._metric_manager.get_rrd_stats( self._getConf(), self.zem.sendEvent, ) # set up SIGUSR2 handling try: signal.signal(signal.SIGUSR2, self.sighandler_USR2) except ValueError as ex: log.warn("Exception registering USR2 signal handler: %s", ex) # If we get called multiple times, this will generate an exception: # ValueError: signal only works in main thread # Ignore it as we've already set up the signal handler. pass # ZEN-26671 Wait at least this duration in secs # before signaling a worker process self.SIGUSR_TIMEOUT = 5
def _utility(factory): ob = factory(*args, **kwargs) component.provideUtility(ob, provides, name) return ob
def setUp(self): provideUtility(provides=IRegistry, component=CollectiveSolrMockRegistry(), name=u'')
:return combined static and perfomance templates ''' keys = key.split(':') subject = keys.pop() asmt_type = keys[1] path = os.path.sep.join(keys) custom_template = super()._load_template(subject, path, pattern='*.xml') custom_template_content = custom_template.get_asmt_metadata_template() if custom_template else \ self.default_meta_template_mgr.get_template(subject, path=asmt_type) if not custom_template_content: raise MetadataException('Unable to load metadata for key {0}'.format(key)) standard_template = self.meta_template_mgr.get_template(subject) return MetadataTemplate(deep_merge(standard_template, custom_template_content)) def _get_performance_configured_path(self): return conf.get('smarter_score_batcher.metadata.performance', '../../resources/meta/performance') def _get_static_configured_path(self): return conf.get('smarter_score_batcher.metadata.static', '../../resources/meta/static') def _get_default_configured_path(self): return conf.get('smarter_score_batcher.metadata.default', '../../resources/meta/default') def _get_conversion_func(self): return generate_performance_metadata component.provideUtility(PerfMetadataTemplateManager(), IMetadataTemplateManager)
def setUpIntIds(): provideAdapter(KeyReferenceStub) provideHandler(addIntIdSubscriber, [ILocation, IObjectAddedEvent]) provideUtility(IntIds(), IIntIds) testing_registry.setupTimetablesComponents()
def search(self, **kwargs): # HACK, we should be able to setup a persistent utility provideUtility(self.ids, IIntIds) return list(self.catalog.searchResults(**kwargs))
def captchaSetUp(test): provideUtility(DummyKeyManager(), IKeyManager)