def add_to_repo(obj, event): """ Add a newly created object to the version repository. Intended use is as an IObjectAddedEvent subscriber. """ if find_interface(obj, IIntranets): # Exclude /offices from repo return repo = find_repo(obj) if repo is None: return if not repo.history(obj.docid, True): # It is not in the repo, so add it adapter = queryAdapter(obj, IObjectVersion) if adapter is not None: if adapter.comment is None: adapter.comment = 'Content created.' repo.archive(adapter) container = event.parent adapter = queryAdapter(container, IContainerVersion) if adapter is not None: request = get_current_request() user = authenticated_userid(request) repo.archive_container(adapter, user) # Recurse into children if adding a subtree if IFolder.providedBy(obj): for name, child in obj.items(): fake_event = FakeEvent() fake_event.parent = obj add_to_repo(child, fake_event)
def graph(self): """ Construct graph and mark broken relations if any """ if self._graph is not None: return self._graph # Valid graph edge self._graph = PyGraph() value_from = self.context.getField('from').getAccessor(self.context)() nfrom = self.tool.get(value_from) if nfrom: node = queryAdapter(nfrom, INode) self._graph.add_node(node()) else: self._bad_content.add(value_from) self._bad_relations.add(self.context.Title()) value_to = self.context.getField('to').getAccessor(self.context)() nto = self.tool.get(value_to) if nto: if value_from != value_to: node = queryAdapter(nto, INode) self._graph.add_node(node()) else: self._bad_content.add(value_to) self._bad_relations.add(self.context.Title()) edge = queryAdapter(self.context, IEdge)() if edge: self._graph.add_edge(edge) return self._graph
def graph(self): """ Construct graph and mark broken relations if any """ if self._graph is not None: return self._graph self._graph = PyGraph() xtool = queryAdapter(self.context, IToolAccessor) typesIds = set() for ctype in xtool.types(proxy=False): typesIds.add(ctype.getId()) node = queryAdapter(ctype, INode) self._graph.add_node(node()) for relation in xtool.relations(proxy=False): field = relation.getField('to') value_from = field.getAccessor(relation)() field = relation.getField('from') value_to = field.getAccessor(relation)() edge = queryAdapter(relation, IEdge)() if edge: self._graph.add_edge(edge) else: self._bad_relations.add(relation.Title()) if value_from not in typesIds: self._bad_content.add(value_from) if value_to not in typesIds: self._bad_content.add(value_to) return self._graph
def install_profile_upgrades(self, *profileids): gatherer = queryAdapter(self.portal_setup, IUpgradeInformationGatherer) upgrade_info = [(profile['id'], map(itemgetter('id'), profile['upgrades'])) for profile in gatherer.get_upgrades() if profile['id'] in profileids] executioner = queryAdapter(self.portal_setup, IExecutioner) executioner.install(upgrade_info)
def test_adapters_update_on_LicenceSchedule_modification(self): """ When an other content type is selected on the field 'scheduled_contenttype' of a ScheduleConfig, we have to unregister all the IToTaskConfig adapter of each TaskConfig of this ScheduleConfig and register them back for the new portal_type. """ schedule_config = self.schedule_config task_config = self.task_config folder = self.portal.config document = api.content.create(type='Document', id='doc', container=self.portal) # the adapter should be registered for IATFolder adapter = getAdapter(folder, IToTaskConfig, task_config.UID()) msg = "an adapter providing IToTaskConfig should have been registered for IATFolder" self.assertTrue(adapter is not None, msg) #... but not for IATDocument adapter = queryAdapter(document, IToTaskConfig, task_config.UID()) msg = "not adapter should have been registered for IATDocument yet..." self.assertTrue(adapter is None, msg) # modify 'scheduled_contenttype' then manually trigger the modification event schedule_config.scheduled_contenttype = ('Document', interface_to_tuple(IATDocument)) notify(ObjectModifiedEvent(schedule_config)) # old IToTaskConfig adapter should be unregistered for IATFolder adapter = queryAdapter(folder, IToTaskConfig, task_config.UID()) msg = "the adapter should have been unregistered when modifying the ScheduleConfig" self.assertTrue(adapter is None, msg) # new IToTaskConfig adapter should be registered for IATDocument adapter = queryAdapter(document, IToTaskConfig, task_config.UID()) msg = "an adapter providing IToTaskConfig should have been registered for IATDocument" self.assertTrue(adapter is not None, msg)
def _back_references(source_object, attribute_name, translation=None): catalog = getUtility(ICatalog) intids = getUtility(IIntIds) lang = queryAdapter(source_object, ILanguage).get_language() if translation: lang = queryAdapter(translation, ILanguage).get_language() gsm = getSecurityManager() result = [] for rel in catalog.findRelations({ 'to_id': intids.getId(aq_inner(source_object)), 'from_attribute':attribute_name }): obj = intids.queryObject(rel.from_id) if obj is not None and checkPermission('zope2.View', obj): if ITranslatable.providedBy(obj): trans_manager = ITranslationManager(aq_inner(obj)) try: trans_obj = trans_manager.get_translation(lang) except Unauthorized, e: continue if trans_obj: result.append(trans_obj) continue if gsm.checkPermission('zope2.View', obj): result.append(obj)
def init_repo(repo, context): if context.__name__ == 'TEMP': return if IFolder.providedBy(context): for child in context.values(): init_repo(repo, child) try: repo.history(context, True) return except: # Not in repo pass version = queryAdapter(context, IObjectVersion) if version is not None: print "Updating version for %s" % model_path(context) repo.archive(version) container = queryAdapter(context, IContainerVersion) if container is not None: print "Updating container version for %s" % model_path(context) user = getattr(context, 'creator', None) if user is None: user = get_setting(context, 'system_user', 'admin') repo.archive_container(container, user) context._p_deactivate() # try not to run out of memory
def __iter__(self): checkme = aq_base(self.context) if IBibliographicItem.providedBy(checkme): biblio = queryAdapter(self.context, IBibliographicReference) if biblio is not None: yield biblio # common collection/topic API elif hasattr(checkme, 'queryCatalog'): for brain in self.context.queryCatalog(): obj = brain.getObject() biblio = queryAdapter(obj, IBibliographicReference) if biblio is not None: yield biblio # folderish thingies elif hasattr(checkme, 'objectValues'): for obj in self.context.objectValues(): if IBibliographicItem.providedBy(obj): try: biblio = queryAdapter(obj, IBibliographicReference) except AttributeError, e: # this can happen if a bibliographic item does # not have one of the required attributes for the # adapter. Log the error msg = "Adaptation of {} to IBibliographicReference " msg += "failed due to a missing attribute: {}" logger.warn(msg.format(obj.id, str(e))) continue if biblio is not None: yield biblio
def test_bug_22_at_plone_org(self): """If ICanonicalLink adapter is not found for the context object - page rendering should not break, but only canonical link should disappear. """ curl = re.compile('<link\srel\s*=\s*"canonical"\s+' \ '[^>]*href\s*=\s*\"([^\"]*)\"[^>]*>', re.S|re.M) # When adapter registered for the object - canoncal link present on the page self.assertNotEqual( queryAdapter(self.my_doc, ICanonicalLink), None) res = self.publish(path=self.mydoc_path, basic=self.basic_auth) self.assertNotEqual(curl.search(res.getBody()), None) # Now remove adapter from the registry -> this should : # - not break page on rendering; # - canonical link will be absent on the page gsm = getGlobalSiteManager() gsm.unregisterAdapter(DefaultCanonicalLinkAdapter, [ITraversable,], ICanonicalLink) self.assertEqual( queryAdapter(self.my_doc, ICanonicalLink), None) res = self.publish(path=self.mydoc_path, basic=self.basic_auth) self.assertEqual(curl.search(res.getBody()), None) # register adapter back in the global site manager gsm.registerAdapter(DefaultCanonicalLinkAdapter, [ITraversable,], ICanonicalLink)
def _handleReferences( self, baseline, wc, mode, wc_ref ): annotations = IAnnotations( wc_ref ) baseline_adapter = interfaces.ICheckinCheckoutReference( baseline ) # handle forward references for relationship in baseline.getRelationships(): # look for a named relation adapter first adapter = component.queryAdapter( baseline, interfaces.ICheckinCheckoutReference, relationship ) if adapter is None: # default adapter = baseline_adapter references = baseline.getReferenceImpl( relationship ) mode_method = getattr( adapter, mode ) mode_method( baseline, wc, references, annotations ) mode = mode + "BackReferences" # handle backward references for relationship in baseline.getBRelationships(): adapter = component.queryAdapter( baseline, interfaces.ICheckinCheckoutReference, relationship ) if adapter is None: adapter = baseline_adapter references = baseline.getBackReferenceImpl( relationship ) mode_method = getattr( adapter, mode ) mode_method( baseline, wc, references, annotations )
def testI18nResource(self): self.assertEqual(component.queryAdapter(request, name='test'), None) path1 = os.path.join(tests_path, 'testfiles', 'test.pt') path2 = os.path.join(tests_path, 'testfiles', 'test2.pt') xmlconfig(StringIO(template % ( ''' <browser:i18n-resource name="test" defaultLanguage="fr"> <browser:translation language="en" file="%s" /> <browser:translation language="fr" file="%s" /> </browser:i18n-resource> ''' % (path1, path2) ))) v = component.getAdapter(request, name='test') self.assertEqual( component.queryAdapter(request, name='test').__class__, I18nFileResource) self.assertEqual(v._testData('en'), open(path1, 'rb').read()) self.assertEqual(v._testData('fr'), open(path2, 'rb').read()) # translation must be provided for the default language config = StringIO(template % ( ''' <browser:i18n-resource name="test" defaultLanguage="fr"> <browser:translation language="en" file="%s" /> <browser:translation language="lt" file="%s" /> </browser:i18n-resource> ''' % (path1, path2) )) self.assertRaises(ConfigurationError, xmlconfig, config)
def __call__(self, **kwargs): PostOnly(self.request) form = self.request.form recurse = form.get('workflow_unarchive_recurse', False) context = self.context ploneview = getMultiAdapter((context, self.request), name='plone') if ploneview.isDefaultPageInFolder(): context = self.context.getParentNode() if recurse: catalog = getToolByName(context, 'portal_catalog') query = {'path': '/'.join(context.getPhysicalPath())} brains = catalog.searchResults(query) for brain in brains: obj = brain.getObject() if IObjectArchived.providedBy(obj): storage = queryAdapter(obj, IObjectArchivator) storage.unarchive(obj) msg = "Object and contents have been unarchived" else: storage = queryAdapter(context, IObjectArchivator) storage.unarchive(context) msg = "Object has been unarchived" IStatusMessage(context.REQUEST).add(msg, 'info') return self.request.response.redirect(context.absolute_url())
def _at_back_references(source_object, relationship, translation=None): lang = queryAdapter(source_object, ILanguage).get_language() if translation: lang = queryAdapter(translation, ILanguage).get_language() refs = IReferenceable(source_object).getBRefs(relationship=relationship) gsm = getSecurityManager() result = [] for obj in refs: if ITranslatable.providedBy(obj): trans_manager = ITranslationManager(aq_inner(obj)) try: trans_obj = trans_manager.get_translation(lang) except Unauthorized: continue if trans_obj: result.append(trans_obj) continue if gsm.checkPermission('zope2.View', obj): result.append(obj) return result
def add_to_repo(obj, event): """ Add a newly created object to the version repository. Intended use is as an IObjectAddedEvent subscriber. """ repo = find_repo(obj) if repo is None: return try: # If we're undeleting an object, it might already be in the repo repo.history(obj.docid) except: # It is not in the repo, so add it adapter = queryAdapter(obj, IObjectVersion) if adapter is not None: if adapter.comment is None: adapter.comment = 'Content created.' repo.archive(adapter) container = event.parent adapter = queryAdapter(container, IContainerVersion) if adapter is not None: request = get_current_request() user = authenticated_userid(request) repo.archive_container(adapter, user) # Recurse into children if adding a subtree if IFolder.providedBy(obj): for name, child in obj.items(): fake_event = FakeEvent() fake_event.parent = obj add_to_repo(child, fake_event)
def test_indexes(self): """Test Indexer indexes. """ with Transaction(): factory = self.root.manage_addProduct['silva.app.document'] factory.manage_addDocument('document', 'Test Document') version = self.root.document.get_editable() version.body.save(version, TestRequest(), """ <p> <h1>Test Document</h1> <a class="anchor" name="first" title="First anchor">First anchor</a> Some text. <a class="anchor" name="second" title="Second anchor">First anchor</a> </p> """) # There are no entries by default, and not published. indexes = queryAdapter(self.root.document, IIndexEntries) self.assertTrue(verifyObject(IIndexEntries, indexes)) self.assertEqual(indexes.get_title(), '') self.assertEqual(indexes.get_entries(), []) # Published, we see the title and the entries IPublicationWorkflow(self.root.document).publish() indexes = queryAdapter(self.root.document, IIndexEntries) self.assertTrue(verifyObject(IIndexEntries, indexes)) self.assertEqual(indexes.get_title(), 'Test Document') self.assertEqual(indexes.get_entries(), [('first', 'First anchor'), ('second', 'Second anchor')])
def _getFieldObjects(self, objTypes=None, includeFSMarkers=False): """ return list of enclosed fields """ # This function currently checks to see if # an object is a form field by looking to see # if it has an fgField attribute. # Make sure we look through fieldsets if objTypes is not None: objTypes = list(objTypes)[:] objTypes.append('FieldsetFolder') myObjs = [] for obj in self.objectValues(objTypes): # use shasattr to make sure we're not aquiring # fgField by acquisition # TODO: If I stick with this scheme for enable overrides, # I'm probably going to want to find a way to cache the result # in the request. _getFieldObjects potentially gets called # several times in a request. # first, see if the field enable override is set if shasattr(obj, 'fgTEnabled') and obj.getRawFgTEnabled(): # process the override enabled TALES expression # create a context for expression evaluation context = getExprContext(self, obj) # call the tales expression, passing our custom context enabled = obj.getFgTEnabled(expression_context=context) else: enabled = True if enabled: if shasattr(obj, 'fgField'): myObjs.append(obj) if shasattr(obj, 'fieldsetFields'): if queryAdapter(obj, interface=ISchemaExtender, name=config.PROJECT_NAME + FieldsetFolderExtender.__name__): # Product is not installed --> nothing to patch obj.setTitle(obj.Title()) obj.setDescription(obj.Description()) myObjs += obj.fieldsetFields(objTypes, includeFSMarkers) for field in myObjs: if not queryAdapter(field, interface=ISchemaExtender, name=config.PROJECT_NAME + BaseFormFieldExtender.__name__): # Product is not installed --> nothing to patch continue field.setTitle(field.Title()) field.setDescription(field.Description()) if hasattr(field,'setFgDefault'): field.setFgDefault(field.getFgDefault()) if isinstance(field.fgField, (StringVocabularyField, LinesVocabularyField,)): field.fgVocabulary = field.getFgVocabulary() if isinstance(field.fgField, LikertField): field.setLikertAnswers(field.getLikertAnswers()) field.setLikertQuestions(field.getLikertQuestions()) return myObjs
def test_filedetails(self): """Testing file details adapters for: * generic content type * image * file """ content = Content() adapter = queryAdapter(content, IContentDetails) self.assertTrue(isinstance(adapter, BaseContentDetails)) data = adapter.get_data() self.assertEqual([i['label'] for i in data], [u'Title', u'Description', u'Type']) image = Content() alsoProvides(image, IATImage) img_adapter = queryAdapter(image, IContentDetails) self.assertTrue(isinstance(img_adapter, ImageDetails)) self.assertEqual([i['label'] for i in img_adapter.get_data()], [u'Title', u'Description', u'Type', u'Pixel (w, h)', u'Preview']) file_ = Content() alsoProvides(file_, IATFile) file_adapter = queryAdapter(file_, IContentDetails) self.assertTrue(isinstance(file_adapter, FileDetails)) self.assertEqual([i['label'] for i in file_adapter.get_data()], [u'Title', u'Description', u'Type', u'Size', u'Content type'])
def json(self, **kwargs): """ Implement this method in order to provide a valid exhibit JSON """ res = {'items': [], 'properties': {}} # Get data adapter = queryAdapter(self.context, IVisualizationData) if not (adapter or adapter.data): return simplejson.dumps(res) # Update JSON with existing annotations properties accessor = queryAdapter(self.context, IVisualizationConfig) my_json = {'items': [], 'properties': {}} my_json = getattr(accessor, 'json', {}) column_types = kwargs.get('column_types', None) or self.column_types(my_json) annotations = kwargs.get('annotations', None) or self.annotations(my_json) # Convert to JSON datafile = StringIO(adapter.data) converter = queryUtility(ITable2JsonConverter) try: _cols, res = converter(datafile, column_types=column_types, annotations=annotations) except Exception, err: logger.debug(err) return simplejson.dumps(res)
def _gotSession(sess): log.msg( 'bit.bot.http.socket: BotSocketProtocol.dataReceived._gotSession', sess) if sess: getUtility(ISockets).add('bot', sessionid, token, self) request = queryAdapter(self, ISocketRequest, name=data['request']) if data['request'] == 'message': message = data['message'] if message.startswith('>'): request = getAdapter(self, ISocketRequest, name="command") message = message[1:] data['command'] = message.strip().split(' ')[0] data['args'] = ' '.join(message.strip().split(' ')[1:]) elif message.startswith('~'): request = getAdapter( self, ISocketRequest, name="subscribe") data['subscribe'] = message[1:] else: request = getAdapter( self, ISocketRequest, name="message") else: request = queryAdapter( self, ISocketRequest, name=data['request']) if request: request.load(sessionid, sess, data) else: print 'NO REQUEST ADAPTER FOR: %s' % data['request']
def json(self, column_types=None): """ Implement this method in order to provide a valid exhibit JSON """ res = {'items': [], 'properties': {}} # Get data adapter = queryAdapter(self.context, IVisualizationData) if not (adapter or adapter.data): return simplejson.dumps(res) # Update JSON with existing annotations properties accessor = queryAdapter(self.context, IVisualizationConfig) my_json = {'items': [], 'properties': {}} if accessor: my_json = accessor.json if not column_types: column_types = dict( (key, value.get('columnType', value.get('valueType', 'text')) if isinstance(value, dict) else value) for key, value in my_json.get('properties', {}).items() ) # Convert to JSON datafile = StringIO(adapter.data) converter = queryUtility(ITable2JsonConverter) try: _cols, res = converter(datafile, column_types) except Exception, err: logger.debug(err) return simplejson.dumps(res)
def create_booking(self, order, cart_data, uid, count, comment): brain = get_catalog_brain(self.context, uid) # brain could be None if uid for item in cookie which no longer exists. if not brain: return buyable = brain.getObject() item_state = get_item_state(buyable, self.request) if not item_state.validate_count(count): msg = u'Item no longer available {0}'.format(buyable.id) logger.warning(msg) raise CheckoutError(msg) item_stock = get_item_stock(buyable) # stock not applied, state new if item_stock is None: available = None state = ifaces.STATE_NEW # calculate state from stock else: if item_stock.available is not None: item_stock.available -= float(count) available = item_stock.available state = ifaces.STATE_NEW if available is None or available >= 0.0\ else ifaces.STATE_RESERVED item_data = get_item_data_provider(buyable) vendor = acquire_vendor_or_shop_root(buyable) booking = OOBTNode() booking.attrs['email'] = order.attrs['personal_data.email'] booking.attrs['uid'] = uuid.uuid4() booking.attrs['buyable_uid'] = uid booking.attrs['buyable_count'] = count booking.attrs['buyable_comment'] = comment booking.attrs['order_uid'] = order.attrs['uid'] booking.attrs['vendor_uid'] = uuid.UUID(IUUID(vendor)) booking.attrs['creator'] = order.attrs['creator'] booking.attrs['created'] = order.attrs['created'] booking.attrs['exported'] = False booking.attrs['title'] = brain and brain.Title or 'unknown' booking.attrs['net'] = item_data.net booking.attrs['vat'] = item_data.vat booking.attrs['discount_net'] = item_data.discount_net(count) booking.attrs['currency'] = cart_data.currency booking.attrs['quantity_unit'] = item_data.quantity_unit booking.attrs['remaining_stock_available'] = available booking.attrs['state'] = state booking.attrs['salaried'] = ifaces.SALARIED_NO booking.attrs['tid'] = 'none' shipping_info = queryAdapter(buyable, IShippingItem) if shipping_info: booking.attrs['shippable'] = shipping_info.shippable else: booking.attrs['shippable'] = False trading_info = queryAdapter(buyable, ifaces.ITrading) if trading_info: booking.attrs['item_number'] = trading_info.item_number booking.attrs['gtin'] = trading_info.gtin else: booking.attrs['item_number'] = None booking.attrs['gtin'] = None return booking
def test_SEOCanonicalAdapterRegistration(self): portal_seocanonical = queryAdapter(self.portal, interface=ISEOCanonicalPath) self.assertTrue(portal_seocanonical is not None, "Not registered ISEOCanonicalPath adapter") mydoc_seocanonical = queryAdapter(self.mydoc, interface=ISEOCanonicalPath) self.assertTrue(mydoc_seocanonical is not None, "Not registered ISEOCanonicalPath adapter")
def test_canonicalAdapterRegistration(self): canonical_portal = queryAdapter(self.portal, interface=ICanonicalPath) self.assertTrue(canonical_portal is not None, "Not registered ICanonicalPath adapter for portal root") canonical_mydoc = queryAdapter(self.mydoc, interface=ICanonicalPath) self.assertTrue(canonical_mydoc is not None, "Not registered ICanonicalPath adapter for the documnent")
def isAvailable(self): wf = queryAdapter(self.context, IWorkspaceFactory, 'forum') if wf is None: wf = queryAdapter(self.context, IWorkspaceFactory, 'forums') if wf is None or not self.context.isEnabled(wf): return return super(ForumPermission, self).isAvailable()
def get_microdata(self, brain): # look for a type-specific adapter, if any adapter = queryAdapter(brain, interface=IMicrodataVocabulary, name=brain.microdata_itemtype) if not adapter: # fallback to basic Thing adapter adapter = queryAdapter(brain, interface=IMicrodataVocabulary, name=u'') return adapter
def related_events(self): result = [] curr_lang = queryAdapter(self.context, ILanguage).get_language() for i in self.refs: if IATEvent.providedBy(i): item_lang = queryAdapter(i, ILanguage).get_language() if item_lang == curr_lang: result.append(i) return result
def sync_criterion(self, translation, source, target): # First look up a syncer for a specific field syncer = queryAdapter(source, ICriterionSyncer, name=source.Field()) if syncer is not None: syncer.sync(translation, target) else: # Fall back to using a generic criterion type syncer syncer = queryAdapter(source, ICriterionSyncer) if syncer is not None: syncer.sync(translation, target)
def _verifyImport(self, obj): adapted = queryAdapter(object(), IAnotherDummy2) self.failUnless(IAnotherDummy2.providedBy(adapted)) self.failUnless(adapted.verify()) adapted = queryAdapter(object(), IAnotherDummy2, name=u'foo') self.failUnless(IAnotherDummy2.providedBy(adapted)) self.failUnless(adapted.verify()) dummy = DummyObject() results = [adap.verify() for adap in subscribers([dummy], IAnotherDummy2)] self.assertEquals(results, [True]) dummy = DummyObject() handle(dummy) self.assertEquals(dummy.handled, 1) util = queryUtility(IDummyInterface2, name=u'foo') self.failUnless(IDummyInterface.providedBy(util)) self.failUnless(util.verify()) self.failUnless(util.__parent__ == obj) name = ('Products.GenericSetup.tests.test_components.' 'IDummyInterface2-foo') self.assertEquals(util.__name__, name) self.failUnless(name in obj.objectIds()) util = queryUtility(IDummyInterface) self.failUnless(IDummyInterface.providedBy(util)) self.failUnless(util.verify()) self.failUnless(util.__parent__ == obj) name = 'dummy_utility' self.assertEquals(util.__name__, name) self.failUnless(name in obj.objectIds()) util = queryUtility(IDummyInterface, name='dummy tool name') self.failUnless(IDummyInterface.providedBy(util)) self.failUnless(util.verify()) self.assertEqual(util.meta_type, 'dummy tool') # make sure we can get the tool by normal means tool = getattr(obj.aq_parent, 'dummy_tool') self.assertEqual(tool.meta_type, 'dummy tool') self.assertEquals(repr(aq_base(util)), repr(aq_base(tool))) util = queryUtility(IDummyInterface2, name='dummy tool name2') self.failUnless(IDummyInterface2.providedBy(util)) self.failUnless(util.verify()) self.assertEqual(util.meta_type, 'dummy tool2') # make sure we can get the tool by normal means tool = getattr(obj.aq_parent, 'dummy_tool2') self.assertEqual(tool.meta_type, 'dummy tool2') self.assertEquals(repr(aq_base(util)), repr(aq_base(tool)))
def load(self, sessionid, sess, data): log.msg("bit.bot.http.request: CommandRequest.load: ", sessionid, data["message"]) self.session_id = sessionid msg = data["command"] command_name = msg.strip().split(" ")[0] command = queryAdapter(self, ICommand, command_name) self.session_id = sessionid if not command: command = queryAdapter(self, ICommand) msg = "help %s" % command_name return command.load(sessionid, msg).addCallback(self.response)
def model_deleted(model, event): from opennode.oms.model.traversal import canonical_path timestamp = int(time.time() * 1000) parent = event.container if IStream.providedBy(parent) or queryAdapter(parent, IStream): IStream(parent).add((timestamp, dict(event="remove", name=model.__name__, url=canonical_path(parent)))) if IStream.providedBy(model) or queryAdapter(model, IStream): IStream(model).add((timestamp, dict(event="delete", name=model.__name__, url=canonical_path(model))))
def callObject(self, request, ob): if request.method == "OPTIONS": # Our request is an OPTIONS request. # To statisfy the CORS requirements, we answer. # In order to keep this pluggable, we adapt the result # of the traversing instead of catching the request before # traversal. cors = ICORS(ob, None) if cors is None: # we don't have a CORS handler. # it means we don't even have a generic adapter. # Raise Unauthorized : CORS is not allowed. raise Unauthorized return cors.OPTIONS(request) elif IRESTNode.providedBy(ob): # The returned object is already a REST node return ob(request).encode('utf-8') else: # We make sure the name = request.environment["HTTP_X_UVCSITE_REST"].lower() restnode = queryAdapter(ob, IRESTNode, name=name) if restnode is None: raise NotImplementedError("No REST node %s" % name) return restnode(request).encode('utf-8')
def onSpreadSheetChanged(obj, evt): """ Handle spreadsheet """ if not evt.spreadsheet: return request = getattr(obj, 'REQUEST', None) if not request: return mutator = queryAdapter(obj, IVisualizationConfig) if not mutator: return new_json = {'items': [], 'properties': {}} new_json['properties'].update(mutator.json.get('properties', {})) datafile = StringIO(evt.spreadsheet) converter = queryUtility(ITable2JsonConverter) try: columns, data = converter(datafile) except Exception, err: logger.exception(err) return
def migrate_rowfilters(context): """ Migrate dashboard image charts""" ctool = getToolByName(context, 'portal_catalog') brains = ctool.unrestrictedSearchResults(portal_type='DavizVisualization') logger.info('Migrating %s Visualizations ...', len(brains)) for brain in brains: logger.info('Migrating %s', brain.getURL()) visualization = brain.getObject() mutator = queryAdapter(visualization, IVisualizationConfig) for view in mutator.views: if view.get('chartsconfig'): config = view.get('chartsconfig') for chart in config.get('charts', []): if chart.get('row_filters'): row_filters_str = chart.get('row_filters') row_filters = json.loads(row_filters_str) migrated_rf = {} for row in row_filters.keys(): filters = row_filters.get(row) if type(filters) == list: migrated_rf[row] = {} migrated_rf[row]['values'] = filters migrated_rf[row]['type'] = 'hidden' else: migrated_rf[row] = filters migrated_rf_str = json.dumps(migrated_rf) chart['row_filters'] = migrated_rf_str data = {} data['chartsconfig'] = config mutator.edit_view('googlechart.googlecharts', **data) logger.info('Migrating Visualizations ... DONE')
def languages(self): languages_info = super(LanguageSelectorViewlet, self).languages() results = [] translation_group = queryAdapter(self.context, ITG) if translation_group is None: translation_group = NOTG for lang_info in languages_info: # Avoid to modify the original language dict data = lang_info.copy() data['translated'] = True query_extras = { 'set_language': data['code'], } post_path = getPostPath(self.context, self.request) if post_path: query_extras['post_path'] = post_path site = getSite() data['url'] = addQuery( self.request, site.absolute_url().rstrip("/") + "/@@multilingual-selector/%s/%s" % (translation_group, lang_info['code']), **query_extras) results.append(data) return results
def get_properties(self, recipient_data=tuple()): document = self.context dossier = document.get_parent_dossier() repofolder = self.get_repofolder(dossier) repo = self.get_repo(dossier) site = self.get_site(dossier) member = self.get_member(self.request) proposal = document.get_proposal() properties = {} for obj in [ document, dossier, repofolder, repo, site, member, proposal ]: property_provider = queryAdapter(obj, IDocPropertyProvider) obj_properties = {} if property_provider is not None: obj_properties = property_provider.get_properties() properties.update(obj_properties) for recipient in recipient_data: provider = recipient.get_doc_property_provider(prefix='recipient') properties.update(provider.get_properties()) return properties
def contextual_arg_parser(self, args, partial=False): """If command offers a contextual parser, use it, otherwise fall back to a normal parser.""" parser = yield self.arg_parser(partial=partial) contextual = queryAdapter(self, IContextualCmdArgumentsSyntax) if contextual: try: # We have to use a partial parser for this, because: # a) help printing is inhibited # b) it won't print errors # c) it will ignore mandatory arguments (e.g. if the context is not the only mandatory arg). partial_parser = yield self.arg_parser(partial=True) parsed, rest = partial_parser.parse_known_args(args) except ArgumentParsingError: # Fall back to uncontextualied parsed in case of parsing errors. # This happens when the "context defining" argument is declared as mandatory # but it's not yet present on the command line. defer.returnValue(parser) contextual_parser = yield contextual.arguments( parser, parsed, rest) defer.returnValue(contextual_parser) defer.returnValue(parser)
def init_container(docid, path, repo, site): try: repo.container_contents(docid) # Already in repo return except NoResultFound: # Not in repo pass context = find_resource(site, path) if context.__name__ == 'TEMP': return if find_interface(context, IIntranets): return container = queryAdapter(context, IContainerVersion) if container is not None: print "Updating container version for %s" % resource_path(context) user = getattr(context, 'creator', None) if user is None: user = get_setting(context, 'system_user', 'admin') repo.archive_container(container, user) context._p_deactivate()
def registerInterface(self, interface, omit=(), prefix=None): if prefix is None: prefix = interface.__identifier__ if not prefix.endswith("."): prefix += '.' for name, field in getFieldsInOrder(interface): if name in omit or field.readonly: continue record_name = prefix + name persistent_field = queryAdapter(field, IPersistentField) if persistent_field is None: raise TypeError( "There is no persistent field equivalent for the field " "`{0}` of type `{1}`.".format(name, field.__class__.__name__)) persistent_field.interfaceName = interface.__identifier__ persistent_field.fieldName = name value = persistent_field.default # Attempt to retain the exisiting value if record_name in self.records: existing_record = self.records[record_name] value = existing_record.value bound_field = persistent_field.bind(existing_record) try: bound_field.validate(value) except: value = persistent_field.default self.records[record_name] = Record(persistent_field, value, _validate=False)
def apply_security_to_subscription(subscription, event): """ Set the permissions for the *subscription* when it is added to a container. By default, only the *owner_id* of the *subscription* gets any permissions (and those permissions are ``zope.View`` and ``nti.actions.delete``). If there is no owner, no permissions are added. If you want to add additional permissions, simply add an additional subscriber. If you want to change or replace the default permissions, add an adapter for the subscription (in the current site) implementing ``IWebhookSubscriptionSecuritySetter``; in that case you will be completely responsible for all security declarations. """ if not subscription.owner_id: return setter = component.queryAdapter(subscription, IWebhookSubscriptionSecuritySetter, default=_default_security_setter) setter(subscription)
def validate(self, value): super(IntGreaterEqualThanParentValidator, self).validate(value) # should not be negative if int(value) < 0: raise schema.interfaces.TooSmall() # get parent value #XXX CHANGED FROM PATH_TRANSLATED TO PATH_INFO because the test # don't work if '++add++' in self.request.get('PATH_INFO', object()): obj = self.context else: obj = self.context.aq_inner.aq_parent parent_value = -1 while parent_value < 0 and not ISiteRoot.providedBy(obj): cf_obj = queryAdapter(obj, ILifeCycle) if cf_obj: try: parent_value = int(self.field.get(cf_obj)) except AttributeError: pass except TypeError: parent_value = 0 try: obj = obj.aq_inner.aq_parent except AttributeError: return # should not be smaller than parent if parent_value > -1 and int(value) < parent_value: raise schema.interfaces.TooBig()
def __call__(self, **kwargs): if self.request: kwargs.update(self.request.form) service = queryAdapter(self.context, IJsonProvider) jsondata = kwargs.pop('type', 'search') if jsondata == 'groups': res = service.groups(**kwargs) elif jsondata == 'biogroups': res = service.biogroups(**kwargs) elif jsondata == 'countries': res = service.countries(**kwargs) elif jsondata == 'nuts': res = service.nuts(**kwargs) elif jsondata == 'cities': res = service.cities(**kwargs) elif jsondata == 'natural': res = service.natural_features(**kwargs) else: res = service.search(**kwargs) if kwargs.get('print', None): return pformat(res) return simplejson.dumps(res)
def _save_audio_metadata(self): """Write the audio metadata fields of this object as metadata on the raw file data. """ mime_type = self.context.get_content_type() accessor = component.queryAdapter(self.context, interfaces.IAudioDataAccessor, unicode(mime_type)) if accessor is not None: field = self.context.getPrimaryField() filename = fileutils.write_ofsfile_to_tempfile( field.getEditAccessor(self.context)()) accessor.store(filename) zodb_file = field.getEditAccessor(self.context)() fin = open(filename, 'rb') # very inefficient, loading whole file in memory upon upload # TODO: fix in-memory loading data, size = zodb_file._read_data(fin) zodb_file.update_data(data, mime_type, size) fin.close() os.remove(filename)
def __init__(self, context, request, name=None): """ Construct composed form given (default) schema an a tuple of ordered additional schema key/value pairs of (string) component name keys to schema values. """ self.context = context self.request = request # form definition will either be context, or adaptation of context. # see uu.formlibrary.definition.form_definition for adapter example. if name is None: self.definition = IFormDefinition(self.context) else: self.definition = queryAdapter( self.context, IFormDefinition, name=name, ) self._schema = self.definition.schema self.groups = [] # modified by updateFieldsFromSchemata() self.components = IFormComponents(self.definition) self.group_schemas = self._group_schemas() self.group_titles = self._group_titles() # mapping: schema to names: self.schema_names = dict(invert(self.group_schemas)) # ordered list of additional schema for AutoExtensibleForm: self._additionalSchemata = tuple( [t[1] for t in self.group_schemas if t[0]]) # super(ComposedForm, self).__init__(self, context, request) form.Form.__init__(self, context, request) self.saved = False # initial value: no duplication of save... self.save_attempt = False # flag for save attempt, success or not self._status = IStatusMessage(self.request)
def getFields(self): """ Returns provenance list field """ ptool = queryUtility(IProgressTool) settings = queryAdapter(ptool, ISettings) ctype = getattr(self.context, 'portal_type', '') fields = () if not settings: return fields allowed = settings.viewletVisibleFor or [] if ctype in allowed: fields += self.fieldsProgress allowed = settings.trailViewletVisibleFor or [] if ctype in allowed: fields += self.fieldsTrail allowed = settings.metadataViewletVisibleFor or [] if ctype in allowed: fields += self.fieldsMetadata return fields
def fix_default_layout(context): """ In eea.facetednavigation < 4.0 the default layout was folder_summary_view. As in Plone 4 folder_summary_view doesn't wrap the listing in a macro, the default layout for eea.facetednavigation > 4.0 is folder_listing. Still, we need to keep backward compatibility, at least when using with EEA site. Therefore this upgrade step is available only in EEA context, as folder_summary_view was customized in eea.design in order to define the 'content-core' macro. """ ctool = getToolByName(context, 'portal_catalog') iface = interfaceToName(context, IFacetedNavigable) brains = ctool.unrestrictedSearchResults(object_provides=iface) for brain in brains: doc = brain.getObject() anno = queryAdapter(doc, IAnnotations) if anno.get(ANNO_FACETED_LAYOUT, ''): # Not using the default one, skipping continue logger.info('Updating faceted layout to folder_summary_view for: %s', doc.absolute_url()) anno[ANNO_FACETED_LAYOUT] = 'folder_summary_view'
def render_extension(econtext, name): """TALES extension renderer See :ref:`tales` for complete description. The requested extension can be called with our without arguments, like in ${structure:tales:my_expression} or ${structure:tales:my_expression(arg1, arg2)}. In the second form, arguments will be passed to the "render" method; arguments can be static (like strings or integers), or can be variables defined into current template context; other Python expressions including computations or functions calls are actually not supported, but dotted syntax is supported to access inner attributes of variables. """ def get_value(econtext, arg): """Extract argument value from context Extension expression language is quite simple. Values can be given as positioned strings, integers or named arguments of the same types. """ arg = arg.strip() if arg.startswith('"') or arg.startswith("'"): # may be a quoted string... return arg[1:-1] if '=' in arg: key, value = arg.split('=', 1) value = get_value(econtext, value) return {key.strip(): value} try: arg = int(arg) # check integer value except ValueError: args = arg.split('.') result = econtext.get(args.pop(0)) for arg in args: # pylint: disable=redefined-argument-from-local result = getattr(result, arg) return result else: return arg name = name.strip() context = econtext.get('context') request = econtext.get('request') view = econtext.get('view') args, kwargs = [], {} func_match = FUNCTION_EXPRESSION.match(name) if func_match: name, arguments = func_match.groups() for arg in map(lambda x: get_value(econtext, x), ARGUMENTS_EXPRESSION.findall(arguments)): if isinstance(arg, dict): kwargs.update(arg) else: args.append(arg) extension = queryMultiAdapter((context, request, view), ITALESExtension, name=name) if extension is None: extension = queryMultiAdapter((context, request), ITALESExtension, name=name) if extension is None: extension = queryAdapter(context, ITALESExtension, name=name) # return an empty string if the extension was not found. if extension is None: return '' # Insert the data gotten from the context addTALNamespaceData(extension, econtext) return extension.render(*args, **kwargs)
def browserDefault(self, obj): """Sets default so we can return whatever we want instead of index_html. This method is complex, and interacts with mechanisms such as IBrowserDefault (implemented in CMFDynamicViewFTI), LinguaPlone and various mechanisms for setting the default page. The method returns a tuple (obj, [path]) where path is a path to a template or other object to be acquired and displayed on the object. The path is determined as follows: 0. If we're c oming from WebDAV, make sure we don't return a contained object "default page" ever 1. If there is an index_html attribute (either a contained object or an explicit attribute) on the object, return that as the "default page". Note that this may be used by things like File and Image to return the contents of the file, for example, not just content-space objects created by the user. 2. If the object implements IBrowserDefault, query this for the default page. 3. If the object has a property default_page set and this gives a list of, or single, object id, and that object is is found in the folder or is the name of a skin template, return that id 4. If the property default_page is set in site_properties and that property contains a list of ids of which one id is found in the folder, return that id 5. If the object implements IBrowserDefault, try to get the selected layout. 6. If the type has a 'folderlisting' action and no default page is set, use this action. This permits folders to have the default 'view' action be 'string:${object_url}/' and hence default to a default page when clicking the 'view' tab, whilst allowing the fallback action to be specified TTW in portal_types (this action is typically hidden) 7. If nothing else is found, fall back on the object's 'view' action. 8. If this is not found, raise an AttributeError """ # WebDAV in Zope is odd it takes the incoming verb eg: PROPFIND # and then requests that object, for example for: /, with verb PROPFIND # means acquire PROPFIND from the folder and call it # its all very odd and WebDAV'y request = getattr(self, 'REQUEST', None) if request is not None and 'REQUEST_METHOD' in request: if request['REQUEST_METHOD'] not in ['GET', 'POST']: return obj, [request['REQUEST_METHOD']] # Now back to normal # # 1. Get an attribute or contained object index_html # # Note: The base PloneFolder, as well as ATCT's ATCTOrderedFolder # defines a method index_html() which returns a ReplaceableWrapper. # This is needed for WebDAV to work properly, and to avoid implicit # acquisition of index_html's, which are generally on-object only. # For the purposes of determining a default page, we don't want to # use this index_html(), nor the ComputedAttribute which defines it. if not isinstance(getattr(obj, 'index_html', None), ReplaceableWrapper): index_obj = getattr(aq_base(obj), 'index_html', None) if index_obj is not None \ and not isinstance(index_obj, ComputedAttribute): return obj, ['index_html'] # # 2. Look for a default_page managed by an IBrowserDefault-implementing # object # # 3. Look for a default_page property on the object # # 4. Try the default sitewide default_page setting # if obj.isPrincipiaFolderish: defaultPage = self.getDefaultPage(obj) if defaultPage is not None: if defaultPage in obj: return obj, [defaultPage] # Avoid infinite recursion in the case that the page id == the # object id elif (defaultPage != obj.getId() and defaultPage != '/'.join(obj.getPhysicalPath())): # For the default_page property, we may get things in the # skin layers or with an explicit path - split this path # to comply with the __browser_default__() spec return obj, defaultPage.split('/') # 5. If there is no default page, try IBrowserDefault.getLayout() if IBrowserDefault.providedBy(obj): browserDefault = obj else: browserDefault = queryAdapter(obj, IBrowserDefault) if browserDefault is not None: layout = browserDefault.getLayout() if layout is None: raise AttributeError( "%s has no assigned layout, perhaps it needs an FTI" % obj) else: return obj, [layout] # # 6. If the object has a 'folderlisting' action, use this # # This allows folders to determine in a flexible manner how they are # displayed when there is no default page, whilst still using # browserDefault() to show contained objects by default on the 'view' # action (this applies to old-style folders only, IBrowserDefault is # managed explicitly above) if base_hasattr(obj, 'getTypeInfo'): try: # XXX: This isn't quite right since it assumes the action # starts with ${object_url}. Should we raise an error if # it doesn't? act = obj.getTypeInfo().getActionInfo( 'folder/folderlisting')['url'].split('/')[-1] return obj, [act] except ValueError: pass # # 7. Fall back on the 'view' action # try: # XXX: This isn't quite right since it assumes the action # starts with ${object_url}. Should we raise an error if # it doesn't? act = obj.getTypeInfo().getActionInfo( 'object/view')['url'].split('/')[-1] return obj, [act] except ValueError: pass # # 8. If we can't find this either, raise an exception # raise AttributeError( "Failed to get a default page or view_action for %s" % (obj.absolute_url(), ))
def queryModelDescriptor(ob): if not IInterface.providedBy(ob): ob = filter(IIModelInterface.providedBy, list(interface.implementedBy(ob)))[0] name = "%s.%s" % (ob.__module__, ob.__name__) return component.queryAdapter(ob, IModelDescriptor, name)
def get_all_translations(self, content): """Return all translations excluding the just modified content""" content_lang = queryAdapter(content, ILanguage).get_language() translations = ITranslationManager(content).get_translated_languages() translations.remove(content_lang) return translations
def get_vocabulary(self): """ Look up named vocabulary and check permissions. Unline p.a.content.browser.vocabulary.VocabularyView this resolves a IPersonalizedVocabularyFactory and calls it with both context and request to enable personalization. """ # --- only slightly changed from upstream --- # Look up named vocabulary and check permission. context = self.context factory_name = self.request.get('name', None) field_name = self.request.get('field', None) if not factory_name: raise VocabLookupException('No factory provided.') if factory_name in base_perms: # don't mess with upstream vocabulary handling return super(PersonalizedVocabularyView, self).get_vocabulary() authorized = None sm = getSecurityManager() if (factory_name not in _permissions or not INavigationRoot.providedBy(context)): # Check field specific permission if field_name: permission_checker = queryAdapter(context, IFieldPermissionChecker) if permission_checker is not None: authorized = permission_checker.validate( field_name, factory_name) if not authorized: # zope admin misses workspace access, go figure logger.error("Vocabulary %s lookup (%s) not allowed", factory_name, field_name) raise VocabLookupException('Vocabulary lookup not allowed') # Short circuit if we are on the site root and permission is # in global registry elif not sm.checkPermission(_permissions[factory_name], context): raise VocabLookupException('Vocabulary lookup not allowed') factory = queryUtility(IVocabularyFactory, factory_name) if not factory: raise VocabLookupException('No factory with name "%s" exists.' % factory_name) # This part is for backwards-compatibility with the first # generation of vocabularies created for plone.app.widgets, # which take the (unparsed) query as a parameter of the vocab # factory rather than as a separate search method. if type(factory) is FunctionType: factory_spec = inspect.getargspec(factory) else: factory_spec = inspect.getargspec(factory.__call__) query = _parseJSON(self.request.get('query', '')) if query and 'query' in factory_spec.args: vocabulary = factory(context, query=query) # This is what is reached for non-legacy vocabularies. elif IPersonalizedVocabularyFactory.providedBy(factory): # patternslib select2 queries for "q" instead of "query" if not query and self.request.get('q', False): query = _parseJSON(self.request.get('q')) # this is the key customization: feed in the request vocabulary = factory(context, self.request, query=query) else: # default fallback vocabulary = factory(context) return vocabulary
def query(self, batch=True, sort=False, **kwargs): """ Search using given criteria """ if self.request: kwargs.update(self.request.form) kwargs.pop('sort[]', None) kwargs.pop('sort', None) # jQuery >= 1.4 adds type to params keys # $.param({ a: [2,3,4] }) // "a[]=2&a[]=3&a[]=4" # Let's fix this kwargs = dict((key.replace('[]', ''), val) for key, val in kwargs.items()) query = self.criteria(sort=sort, **kwargs) # We don't want to do an unnecessary sort for a counter query counter_query = kwargs.pop('counter_query', False) if counter_query: query.pop('sort_on', None) query.pop('sort_order', None) catalog = getUtility(IFacetedCatalog) num_per_page = 20 criteria = ICriteria(self.context) brains_filters = [] for cid, criterion in criteria.items(): widgetclass = criteria.widget(cid=cid) widget = widgetclass(self.context, self.request, criterion) if widget.widget_type == 'resultsperpage': num_per_page = widget.results_per_page(kwargs) brains_filter = queryAdapter(widget, IWidgetFilterBrains) if brains_filter: brains_filters.append(brains_filter) b_start = safeToInt(kwargs.get('b_start', 0)) # make sure orphans is an integer, // is used so in Python3 we have an # integer division as by default, a division result is a float orphans = num_per_page * 20 // 100 # orphans = 20% of items per page if batch and not brains_filters: # add b_start and b_size to query to use better sort algorithm query['b_start'] = b_start query['b_size'] = num_per_page + orphans try: brains = catalog(self.context, **query) except Exception as err: logger.exception(err) return Batch([], 20, 0) if not brains: return Batch([], 20, 0) # Apply after query (filter) on brains start = time.time() for brains_filter in brains_filters: brains = brains_filter(brains, kwargs) if not batch: return brains if isinstance(brains, GeneratorType): brains = [brain for brain in brains] delta = time.time() - start if delta > 30: logger.warn("Very slow IWidgetFilterBrains adapters: %s at %s", brains_filters, self.context.absolute_url()) return Batch(brains, num_per_page, b_start, orphan=orphans)
def publish(request, handle_errors=True): try: # finally to clean up to_raise and close request to_raise = None while True: publication = request.publication try: try: obj = None try: try: request.processInputs() publication.beforeTraversal(request) obj = publication.getApplication(request) obj = request.traverse(obj) publication.afterTraversal(request, obj) result = publication.callObject(request, obj) response = request.response if result is not response: response.setResult(result) publication.afterCall(request, obj) except: exc_info = sys.exc_info() publication.handleException( obj, request, exc_info, True) if not handle_errors: # Reraise only if there is no adapter # indicating that we shouldn't reraise = component.queryAdapter( exc_info[1], IReRaiseException, default=None) if reraise is None or reraise(): raise finally: publication.endRequest(request, obj) break # Successful. except Retry as retryException: if request.supportsRetry(): # Create a copy of the request and use it. newrequest = request.retry() request.close() request = newrequest elif handle_errors: # Output the original exception. publication = request.publication publication.handleException( obj, request, retryException.getOriginalException(), False) break else: to_raise = retryException.getOriginalException() if to_raise is None: # There is no original exception inside # the Retry, so just reraise it. raise break except: # Bad exception handler or retry method. # Re-raise after outputting the response. if handle_errors: request.response.internalError() to_raise = sys.exc_info() break else: raise response = request.response if to_raise is not None: six.reraise(to_raise[0], to_raise[1], to_raise[2]) finally: to_raise = None # Avoid circ. ref. request.close() # Close database connections, etc. # Return the request, since it might be a different object than the one # that was passed in. return request
def options(self, section=u'', margin=True): """ Get options for given section """ return queryAdapter(self.context, IPDFOptionsMaker, name=section)
def import_(self, import_context, subdir, root=False): """ See IFilesystemImporter. """ context = self.context if not root: subdir = '%s/%s' % (subdir, context.getId()) prop_adapter = IINIAware(context, None) if prop_adapter is not None: prop_text = import_context.readDataFile( '.properties', subdir=subdir, ) if prop_text is not None: prop_adapter.put_ini(prop_text) preserve = import_context.readDataFile('.preserve', subdir) must_preserve = self._mustPreserve() prior = context.objectIds() if not preserve: preserve = [] else: preserve = _globtest(preserve, prior) preserve.extend([x[0] for x in must_preserve]) for id in prior: if id not in preserve: context._delObject(id) objects = import_context.readDataFile('.objects', subdir) if objects is None: return dialect = 'excel' stream = StringIO(objects) rowiter = reader(stream, dialect) rows = filter(None, tuple(rowiter)) existing = context.objectIds() for object_id, type_name in rows: if object_id not in existing: object = self._makeInstance(object_id, type_name, subdir, import_context) if object is None: logger = import_context.getLogger('SFWA') logger.warning("Couldn't make instance: %s/%s" % (subdir, object_id)) continue wrapped = context._getOb(object_id) adapted = queryAdapter(wrapped, IFilesystemImporter) if adapted is not None: adapted.import_(import_context, subdir)
def test_default_merge_templates_registration(self): adapter = queryAdapter(self.test_podtemplate, ITemplatesToMerge) self.assertTrue(isinstance(adapter, TemplatesToMergeForPODTemplate))
def test_can_adapt(self): self.assertTrue( queryAdapter(self.doc, ICollaborativelyEditable) is not None)
def __init__(self, context, request): super(View, self).__init__(context, request) self.static = component.queryAdapter( self.request, interface.Interface, name=self.module_info.package_dotted_name)
def test_CustomNamedTemplate(self): view = BrowserView(self.portal, self.req) custom_named_template = queryAdapter(view, INamedTemplate, name="datagridref_popup") self.assertNotEqual(custom_named_template, None)
class DavizSupport(DavizPublicSupport): """ Enable/Disable visualization """ def _redirect(self, msg='', to='/daviz-edit.html'): """ Return or redirect """ if self.request: if msg: status = queryAdapter(self.request, IStatusMessage) if status: status.addStatusMessage(str(msg), type='info') if to: self.request.response.redirect(self.context.absolute_url() + to) else: self.request.response.redirect(self.context.absolute_url() + "/view") return msg @property def can_enable(self): """ See IVisualizationSubtyper """ return not self.is_visualization @property def can_disable(self): """ See IVisualizationSubtyper """ return self.is_visualization @property def can_edit(self): """ Can edit visualization """ if not self.is_visualization: return False # Is locked locked = queryMultiAdapter((self.context, self.request), name=u'plone_lock_info') locked = getattr(locked, 'is_locked_for_current_user', lambda: False) if locked(): return False return True @property def is_visualization(self): """ Is visualization enabled? """ return IVisualizationEnabled.providedBy(self.context) def enable(self): """ Enable visualization """ visualization = queryAdapter(self.context, IVisualizationData) datafile = StringIO(visualization.data) converter = queryUtility(ITable2JsonConverter) try: columns, json = converter(datafile) except Exception, err: logger.exception(err) return self._redirect(('An error occured while trying to convert ' 'attached file. Please ensure you provided ' 'a valid CSV file'), '/view') if not IVisualizationEnabled.providedBy(self.context): alsoProvides(self.context, IVisualizationEnabled) # Update annotations mutator = queryAdapter(self.context, IVisualizationConfig) mutator.json = json notify(VisualizationEnabledEvent(self.context, columns=columns)) return self._redirect('Visualization enabled')
def getPortlets(self): """Work out which portlets to display, returning a list of dicts describing assignments to render. """ if IPortletContext.providedBy(self.context): pcontext = self.context else: pcontext = queryAdapter(self.context, IPortletContext) if pcontext is None: return [] # Holds a list of (category, key, assignment). categories = [] # Keeps track of the blacklisting status for global categores # (user, group, content type). The status is either True (blocked) # or False (not blocked). blacklisted = {} # This is the name of the manager (column) we're rendering manager = self.storage.__name__ # 1. Fetch blacklisting status for each global category # First, find out which categories we will need to determine # blacklist status for for category, key in pcontext.globalPortletCategories(False): blacklisted[category] = None # Then walk the content hierarchy to find out what blacklist status # was assigned. Note that the blacklist is tri-state; if it's None it # means no assertion has been made (i.e. the category has neither been # whitelisted or blacklisted by this object or any parent). The first # item to give either a blacklisted (True) or whitelisted (False) # value for a given item will set the appropriate value. Parents of # this item that also set a black- or white-list value will then be # ignored. # Whilst walking the hierarchy, we also collect parent portlets, # until we hit the first block. current = self.context currentpc = pcontext blacklistFetched = set() parentsBlocked = False while current is not None and currentpc is not None: if ILocalPortletAssignable.providedBy(current): assignable = current else: assignable = queryAdapter(current, ILocalPortletAssignable) if assignable is not None: if IAnnotations.providedBy(assignable): annotations = assignable else: annotations = queryAdapter(assignable, IAnnotations) if not parentsBlocked: local = annotations.get(CONTEXT_ASSIGNMENT_KEY, None) if local is not None: localManager = local.get(manager, None) if localManager is not None: categories.extend([(CONTEXT_CATEGORY, currentpc.uid, a) for a in localManager.values()]) lpam = getMultiAdapter((assignable, self.storage), ILocalPortletAssignmentManager) if lpam.getBlacklistStatus(CONTEXT_CATEGORY): parentsBlocked = True for cat, cat_status in blacklisted.items(): local_status = lpam.getBlacklistStatus(cat) if local_status is not None: blacklistFetched.add(cat) if cat_status is None: blacklisted[cat] = local_status # We can abort if parents are blocked and we've fetched all # blacklist statuses if parentsBlocked and len(blacklistFetched) == len(blacklisted): break # Check the parent - if there is no parent, we will stop current = currentpc.getParent() if current is not None: if IPortletContext.providedBy(current): currentpc = current else: currentpc = queryAdapter(current, IPortletContext) # Get all global mappings for non-blacklisted categories for category, key in pcontext.globalPortletCategories(False): if not blacklisted[category]: mapping = self.storage.get(category, None) if mapping is not None: for a in mapping.get(key, {}).values(): categories.append((category, key, a, )) assignments = [] for category, key, assignment in categories: try: settings = IPortletAssignmentSettings(assignment) if not settings.get('visible', True): continue except TypeError: # Portlet does not exist any longer continue assignments.append({'category': category, 'key': key, 'name': str(assignment.__name__), 'assignment': assignment }) return assignments