def _factory(self, container, entry): logger.info("Creating Activity : %s" % entry['title']) oid = self._create_obj_for_title(container, 'wcc.activity.activity', entry['title']) obj = container._getOb(oid) # set description IDublinCore(obj).description = entry['description'] # set effectiveDate # edate = dateutil.parser.parse(entry['effectiveDate']) # obj.getField('effectiveDate').set(obj, edate) # set bodytext IBodyText(obj).text = entry['bodytext'] # remember original url anno = IAnnotations(obj) anno.setdefault('wcc.metadata', PersistentDict()) anno['wcc.metadata']['original_url'] = entry['orig_url'] anno['wcc.metadata']['lang_urls'] = entry['lang_urls'] anno['wcc.metadata']['id_url'] = entry.get('id_url', None) obj.reindexObject() logger.info("Created %s" % obj.absolute_url())
def __init__(self, context): self.context = context annotations = IAnnotations(context) self.approved = annotations.setdefault(APPROVED_KEY, OOSet()) self.disapproved = annotations.setdefault(DISAPPROVED_KEY, OOSet()) self.favorite = annotations.setdefault(FAVORITE_KEY, OOSet()) self.followed = annotations.setdefault(FOLLOWED_KEY, OOSet())
def create_gallery(self, container, entry): logger.info("Creating Gallery : %s" % entry['title']) # create folder oid = self._create_obj_for_title(container, 'Folder', entry['title']) obj = container._getOb(oid) # create images inside folder for imgdata in entry['images']: imgoid = self._create_obj_for_title(obj, 'Image', imgdata['caption']) imgobj = obj._getOb(imgoid) imgobj.getField('image').set(imgobj, b64decode(imgdata['image'])) imgobj.reindexObject() # create default view of folder pageoid = self._create_obj_for_title(obj, 'Document', 'index') pageobj = obj._getOb(pageoid) pageobj.setTitle(entry['title']) pageobj.getField('text').set(pageobj, entry['bodytext']) # obj.setDefaultPage(pageoid) obj.setLayout('atct_album_view') anno = IAnnotations(obj) anno.setdefault('wcc.metadata', PersistentDict()) anno['wcc.metadata']['original_url'] = entry['orig_url'] anno['wcc.metadata']['lang_urls'] = entry['lang_urls'] anno['wcc.metadata']['id_url'] = entry.get('id_url', None) pageobj.reindexObject() obj.reindexObject() logger.info("Created %s" % obj.absolute_url())
class SectionFeedback: """ Adapter for storing general feedback about transmogrifier progress """ implements(ISectionFeedback) def __init__(self, transmogrifier): self.storage = IAnnotations(transmogrifier).setdefault(MYKEY, {}) self.storage.setdefault('sections', {}) def success(self, section, msg): "" sdata = self.storage.setdefault(section,{}) total = sdata.setdefault('success_total',0) sdata['success_total'] += 1 sdata.setdefault('msgs',[]).append((True,msg)) def ignored(self, section, msg): "" sdata = self.storage.setdefault(section,{}) total = sdata.setdefault('ignore_total',0) sdata['ignore_total'] += 1 sdata.setdefault('msgs',[]).append((False,msg)) def getTotals(self, section): sdata = self.storage.setdefault(section,{}) return (sdata.get('success_total', 0), sdata.get('ignore_total', 0))
def getUserRelatableProxy(user): mtool = getToolByName(user, 'portal_membership') memberdata = mtool.getMemberById(user.getId()) intids = getUtility(IIntIds) intids.register(memberdata) annotation = IAnnotations(memberdata) annotation.setdefault(USER_ANNOTATION, user.getId()) return memberdata
def test_annotations(self): self.folder.invokeFactory('Document', 'd1') target = self.folder['d1'] targetAnnotations = IAnnotations(target) targetAnnotations[u'test.key'] = 123 targetAnnotations[u'other.key'] = 456 draft = Draft() draftAnnotations = IAnnotations(draft) draftAnnotations[u'some.key'] = 234 proxy = DraftProxy(draft, target) proxyAnnotations = IAnnotations(proxy) self.assertEqual(123, proxyAnnotations[u'test.key']) self.assertEqual(234, proxyAnnotations[u'some.key']) proxyAnnotations[u'test.key'] = 789 self.assertEqual(789, proxyAnnotations[u'test.key']) self.assertEqual(123, targetAnnotations[u'test.key']) # Annotations API self.assertEqual(789, proxyAnnotations.get(u'test.key')) keys = proxyAnnotations.keys() self.assertTrue(u'test.key' in keys) self.assertTrue(u'some.key' in keys) self.assertTrue(u'other.key' in keys) self.assertEqual(789, proxyAnnotations.setdefault(u'test.key', -1)) self.assertEqual(234, proxyAnnotations.setdefault(u'some.key', -1)) self.assertEqual(456, proxyAnnotations.setdefault(u'other.key', -1)) self.assertEqual(-1, proxyAnnotations.setdefault(u'new.key', -1)) del proxyAnnotations[u'test.key'] self.assertFalse(u'test.key' in proxyAnnotations) self.assertFalse(u'test.key' in draftAnnotations) self.assertTrue(u'test.key' in targetAnnotations) self.assertTrue(u'test.key' in draft._proxyAnnotationsDeleted) del proxyAnnotations[u'some.key'] self.assertFalse(u'some.key' in proxyAnnotations) self.assertFalse(u'some.key' in draftAnnotations) self.assertFalse(u'some.key' in targetAnnotations) self.assertTrue(u'some.key' in draft._proxyAnnotationsDeleted) # this key was never in the proxy/draft del proxyAnnotations[u'other.key'] self.assertFalse(u'other.key' in proxyAnnotations) self.assertFalse(u'other.key' in draftAnnotations) self.assertTrue(u'other.key' in targetAnnotations) self.assertTrue(u'other.key' in draft._proxyAnnotationsDeleted)
def _get_change_data(self): """If request defines change_data, use it, else return a dummy dict. """ cd = IAnnotations(common.get_request()).get("change_data") if cd is None: cd = {} cd.setdefault("note", cd.get("note", "")) cd.setdefault("date_active", cd.get("date_active", None)) return cd
def cache(self): if self.request.get('force_cache_update', None): self.update_cache() anno = IAnnotations(self.context) anno.setdefault(self.anno_key, PersistentDict()) cache = anno[self.anno_key] if not cache.has_key('total'): self.update_cache() return cache
def _get_change_data(self): """If request defines change_data, use it, else return a dummy dict. """ cd = IAnnotations(common.get_request()).get("change_data") if cd is None: cd = {} cd.setdefault("note", cd.get("note", "")) cd.setdefault("date_active", cd.get("date_active", None)) return cd
def test_annotations(self): self.folder.invokeFactory('Document', 'd1') target = self.folder['d1'] targetAnnotations = IAnnotations(target) targetAnnotations[u"test.key"] = 123 targetAnnotations[u"other.key"] = 456 draft = Draft() draftAnnotations = IAnnotations(draft) draftAnnotations[u"some.key"] = 234 proxy = DraftProxy(draft, target) proxyAnnotations = IAnnotations(proxy) self.assertEquals(123, proxyAnnotations[u"test.key"]) self.assertEquals(234, proxyAnnotations[u"some.key"]) proxyAnnotations[u"test.key"] = 789 self.assertEquals(789, proxyAnnotations[u"test.key"]) self.assertEquals(123, targetAnnotations[u"test.key"]) # Annotations API self.assertEquals(789, proxyAnnotations.get(u"test.key")) keys = proxyAnnotations.keys() self.failUnless(u"test.key" in keys) self.failUnless(u"some.key" in keys) self.failUnless(u"other.key" in keys) self.assertEquals(789, proxyAnnotations.setdefault(u"test.key", -1)) self.assertEquals(234, proxyAnnotations.setdefault(u"some.key", -1)) self.assertEquals(456, proxyAnnotations.setdefault(u"other.key", -1)) self.assertEquals(-1, proxyAnnotations.setdefault(u"new.key", -1)) del proxyAnnotations[u"test.key"] self.failIf(u"test.key" in proxyAnnotations) self.failIf(u"test.key" in draftAnnotations) self.failUnless(u"test.key" in targetAnnotations) self.failUnless(u"test.key" in draft._proxyAnnotationsDeleted) del proxyAnnotations[u"some.key"] self.failIf(u"some.key" in proxyAnnotations) self.failIf(u"some.key" in draftAnnotations) self.failIf(u"some.key" in targetAnnotations) self.failUnless(u"some.key" in draft._proxyAnnotationsDeleted) del proxyAnnotations[u"other.key"] # this key was never in the proxy/draft self.failIf(u"other.key" in proxyAnnotations) self.failIf(u"other.key" in draftAnnotations) self.failUnless(u"other.key" in targetAnnotations) self.failUnless(u"other.key" in draft._proxyAnnotationsDeleted)
def cache(self): if (self.request.get('force_statsportlet_update', None) or self.data.always_update): self.update_cache() anno = IAnnotations(self.data) anno.setdefault(self.anno_key, PersistentDict()) cache = anno[self.anno_key] if not cache.has_key('total'): self.update_cache() return cache
def _get_author_id_map(self, container): m = {} for brain in container.portal_catalog(portal_type='wcc.books.author'): obj = brain.getObject() anno = IAnnotations(obj) anno.setdefault('wcc.metadata', PersistentDict()) data = anno['wcc.metadata'].get('authordata', None) if data: m[data['id']] = obj return m
def cache(self): if (self.request.get('force_statsportlet_update', None) or self.data.always_update): self.update_cache() anno = IAnnotations(self.data) anno.setdefault(self.anno_key, PersistentDict()) cache = anno[self.anno_key] if not cache.has_key('total'): self.update_cache() return cache
def _factory(self, container, entry): logger.info("Creating EWNSWW Item : %s" % entry['title']) logger.info("orig_url: %s" % entry['orig_url']) # find container parent = self._find_parent(container, entry['orig_url']) # create container folder oid = os.path.basename( urlparse.urlparse(entry['orig_url']).path.replace('.html','') ) if 'tx_wecdiscussion' in entry['orig_url']: oid = entry['title'] oid = self._create_obj_for_title(parent, 'Folder', oid) obj = parent._getOb(oid) obj.setTitle(entry['title']) # create page pageoid = self._create_obj_for_title(obj, 'Document', oid) page = obj._getOb(pageoid) page.setTitle(entry['title']) # set bodytext page.getField('text').set(page, entry['bodytext']) # set image if entry.get('image',None): try: page.getField('image').set(page, b64decode(entry['image'])) except: logger.info( "Unable to import image for : %s" % obj.absolute_url() ) if entry.get('imageCaption', None): page.getField('imageCaption').set(page, entry['imageCaption']) if entry.get('video_url', None): page.getField('video_url').set(page, entry['video_url']) obj.setDefaultPage(pageoid) # remember original url anno = IAnnotations(page) anno.setdefault('wcc.metadata', PersistentDict()) anno['wcc.metadata']['original_url'] = entry['orig_url'] anno['wcc.metadata']['lang_urls'] = entry['lang_urls'] anno['wcc.metadata']['id_url'] = entry.get('id_url', None) pprint.pprint(entry['lang_urls']) page.reindexObject() obj.reindexObject() transaction.savepoint(optimistic=True) logger.info("Created %s" % obj.absolute_url())
def update_cache(self): anno = IAnnotations(self.context) anno.setdefault(self.anno_key, PersistentDict()) cache = anno[self.anno_key] cache['total'] = self._total cache['international_count'] = self._international_count cache['domestic_count'] = self._domestic_count cache['themes'] = self._themes cache['themes_international'] = self._themes_international cache['themes_domestic'] = self._themes_domestic cache['domestic'] = self._domestic cache['international'] = self._international
def _get_change_data(self): """If request defines change_data, use it, else return a dummy dict. """ try: cd = IAnnotations(common.get_request()).get("change_data") assert cd is not None, "change_data dict is None." except (TypeError, AssertionError): # Could not adapt... under testing, the "request" is a # participation that has no IAnnotations. cd = {} cd.setdefault("note", cd.get("note", "")) cd.setdefault("date_active", cd.get("date_active", None)) return cd
def _get_change_data(self): """If request defines change_data, use it, else return a dummy dict. """ try: cd = IAnnotations(common.get_request()).get("change_data") assert cd is not None, "change_data dict is None." except (TypeError, AssertionError): # Could not adapt... under testing, the "request" is a # participation that has no IAnnotations. cd = {} cd.setdefault("note", cd.get("note", "")) cd.setdefault("date_active", cd.get("date_active", None)) return cd
def _update_content(self, obj, entry): if not IDocument.providedBy(obj): logger.error("%s is not document" % obj.absolute_url()) return logger.info("Updating content %s" % obj.absolute_url()) obj.title = entry['title'] # set description dcobj = IDublinCore(obj) dcobj.description = entry['description'] # set effectiveDate edate = dateutil.parser.parse(entry['effectiveDate']) dcobj.effective = edate # set descriptors dcobj.subjects = entry['related_descriptors'] # set bodytext IBodyText(obj).text = entry.get('bodytext','') # set main fields obj.document_owner = entry['owner'].strip() obj.document_type = entry['document_type'] obj.document_status = entry['status'] related_links = [{ 'url': i['url'], 'label': i['title'], 'description': i['description'] } for i in entry['related_links']] obj.related_links = related_links if entry['file']: f = NamedBlobFile( data = b64decode(entry['file']['data']), filename=entry['file']['name'] ) obj.file = f # remember original url anno = IAnnotations(obj) anno.setdefault('wcc.metadata', PersistentDict()) anno['wcc.metadata']['original_url'] = entry['orig_url'] anno['wcc.metadata']['lang_urls'] = entry['lang_urls'] anno['wcc.metadata']['id_url'] = entry.get('id_url', None) obj.reindexObject()
def update_cache(self): anno = IAnnotations(self.data) anno.setdefault(self.anno_key, PersistentDict()) cache = anno[self.anno_key] cache['total'] = self._total() cache['international_count'] = self._international_count() cache['domestic_count'] = self._domestic_count() cache['themes'] = self._themes() cache['themes_international'] = self._themes_international() cache['themes_domestic'] = self._themes_domestic() cache['domestic'] = self._domestic() cache['international'] = self._international() cache['percent_missions_with_reports'] = ( self._percent_missions_with_reports())
def _folder_factory(self, container, entry): logger.info("Creating Folder : %s" % entry['title']) oid = self._create_obj_for_title(container, 'Folder', entry['title']) obj = container._getOb(oid) anno = IAnnotations(obj) anno.setdefault('wcc.metadata', PersistentDict()) anno['wcc.metadata']['original_url'] = entry['orig_url'] anno['wcc.metadata']['lang_urls'] = entry['lang_urls'] anno['wcc.metadata']['id_url'] = entry.get('id_url', None) obj.reindexObject() return obj
def update_cache(self): anno = IAnnotations(self.data) anno.setdefault(self.anno_key, PersistentDict()) cache = anno[self.anno_key] cache['total'] = self._total() cache['international_count'] = self._international_count() cache['domestic_count'] = self._domestic_count() cache['themes'] = self._themes() cache['themes_international'] = self._themes_international() cache['themes_domestic'] = self._themes_domestic() cache['domestic'] = self._domestic() cache['international'] = self._international() cache['percent_missions_with_reports'] = ( self._percent_missions_with_reports() )
def __init__(self, context): self.context = context annot = IAnnotations(self.context) self.listen_annot = listen_annot = annot.setdefault(PROJECTNAME, OOBTree()) self.emails = listen_annot.setdefault('emails', OOBTree()) self.members = listen_annot.setdefault('members', OOBTree())
def __init__(self, context): self.trust_caller = False self.context = context annot = IAnnotations(context) listen_annot = annot.setdefault(PROJECTNAME, OOBTree()) self.pend = listen_annot.setdefault(pendinglist_annotation, OOBTree())
def queuePurge(event, force=False): """ so this is a little wonky here... We need to also purge here because plone.cachepurging will only update paths if caching proxies are defined. The deal here is that with cloudflare, we do not want to define caching proxies or we may not be """ request = getRequest() if request is None: return annotations = IAnnotations(request, None) if annotations is None: return registry = queryUtility(IRegistry) if registry is None: return settings = registry.forInterface(ICachePurgingSettings, check=False) if not settings.enabled: return # so we're enabled, BUT we also need to NOT have proxies defined # in order to register here if bool(settings.cachingProxies) and not force: return paths = annotations.setdefault(KEY, set()) paths.update(getPathsToPurge(event.object, request))
def getContent(self): """Get the annotations with the local MLS config.""" annotations = IAnnotations(self.context) return annotations.get( CONFIGURATION_KEY, annotations.setdefault(CONFIGURATION_KEY, {}), )
def pending_status(self, user): annot = IAnnotations(self.context) listen_annot = annot.setdefault(PROJECTNAME, OOBTree()) subscribe_pending_list = getAdapter(self.context, IMembershipPendingList, 'pending_sub_email') unsubscribe_pending_list = getAdapter(self.context, IMembershipPendingList, 'pending_unsub_email') sub_mod_pending_list = getAdapter(self.context, IMembershipPendingList, 'pending_sub_mod_email') email_address = is_email(user) and user or lookup_email( user, self.context) inlist = lambda lst: lst.is_pending(email_address) status = lambda msg, lst: msg + lst.get_pending_time(email_address) status_msg = '' if inlist(subscribe_pending_list): status_msg += status( _(u'subscription pending user confirmation: '), subscribe_pending_list) if inlist(unsubscribe_pending_list): status_msg += status( _(u'unsubscription pending user confirmation: '), unsubscribe_pending_list) if inlist(sub_mod_pending_list): status_msg += status( _(u'subscription pending manager moderation: '), sub_mod_pending_list) return status_msg
def queuePurge(event, force=False): """ so this is a little wonky here... We need to also purge here because plone.cachepurging will only update paths if caching proxies are defined. The deal here is that with cloudflare, we do not want to define caching proxies or we may not be """ request = getRequest() if request is None: return annotations = IAnnotations(request, None) if annotations is None: return registry = queryUtility(IRegistry) if registry is None: return settings = registry.forInterface(ICachePurgingSettings, check=False) if not settings.enabled: return # so we're enabled, BUT we also need to NOT have proxies defined # in order to register here if bool(settings.cachingProxies) and not force: return paths = annotations.setdefault(KEY, set()) paths.update(getPathsToPurge(event.object, request))
def __init__(self, context, manager): self.context = context self.manager = manager self.key_id = COL_KEY + manager.__name__ annotations = IAnnotations(context) self._col = annotations.setdefault(self.key_id, '')
def getContent(self): """Get the annotations with the local MLS config.""" annotations = IAnnotations(self.context) return annotations.get( CONFIGURATION_KEY, annotations.setdefault(CONFIGURATION_KEY, {}), )
def _getNamesDict(self): try: annotations = IAnnotations(self.context) except ComponentLookupError: return {} return annotations.setdefault( JEKYLL_IGNORED_SYMPTOMS, PersistentDict())
def _getNamesDict(self): try: annotations = IAnnotations(self.context) except ComponentLookupError: return {} return annotations.setdefault(JEKYLL_IGNORED_SYMPTOMS, PersistentDict())
def _update_AgentInfoPortlet_ProfilePage(self, folders, data): """Override Annotation for plone.mls.listing AgentInfo inside AgentProfilePages""" #get agents portrait/ avatar url avatar_url = self.membershiptool.getPersonalPortrait(id=self.userid).absolute_url() #get AgencyInfo agency = self.__AgencyInfo for folder in folders: if IAgentFolder.providedBy(folder) and ILocalAgencyInfo.providedBy(folder): #get annotations for this folder mls_ano = IAnnotations(folder).get("plone.mls.listing.localagencyinfo", None) if mls_ano is None: #initialize Annotations anno = IAnnotations(folder) anno.get("plone.mls.listing.localagencyinfo", anno.setdefault("plone.mls.listing.localagencyinfo", {})) mls_ano = IAnnotations(folder).get("plone.mls.listing.localagencyinfo", {}) # set global Agency Info mls_ano['agency_name'] = agency.get('agency_name', u'Krain Real Estate') mls_ano['agency_logo_url'] = agency.get('agency_logo_url', u'') mls_ano['agency_office_phone'] = agency.get('agency_office_phone', u'') mls_ano['agency_website'] = agency.get('agency_website', u'') #Agent Info mls_ano['agent_name'] = data.get('fullname', u'') mls_ano['agent_office_phone'] = data.get('office_phone', u'') mls_ano['agent_cell_phone'] = data.get('cell_phone', u'') mls_ano['agent_email'] = data.get('email', u'') mls_ano['agent_avatar_url'] = avatar_url #force overrding of Any other agent mls_ano['force'] = 'selected'
def queuePurge(event, force=False): """ so this is a little wonky here... We need to also purge here because plone.cachepurging will only update paths if caching proxies are defined. The deal here is that with cloudflare, we do not want to define caching proxies or we may not be """ request = getRequest() if request is None: return annotations = IAnnotations(request, None) if annotations is None: return registry = queryUtility(IRegistry) if registry is None: return settings = registry.forInterface(ICachePurgingSettings, check=False) if not settings.enabled: return # THIS IS THE OVERRIDE HERE!!! # the original event goes forward IF there are cache proxies defined. # so we check if they are NOT and then only force puring if bool(settings.cachingProxies) and not force: return paths = annotations.setdefault(KEY, set()) paths.update(getPathsToPurge(event.object, request))
def _storage(self): """ Get the dictionary of tags for this object """ context = self.context annotations = IAnnotations(context) tags = annotations.setdefault(ANNOTATIONS_KEY, PersistentDict()) return tags
def __init__(self, context, manager): self.context = context self.manager = manager self.key_id = SPAN_KEY + manager.__name__ annotations = IAnnotations(context) self._span = annotations.setdefault(self.key_id, '')
def handleUpdate(self, action): data, errors = self.extractData() if errors: self.status = self.formErrorsMessage return annotations = IAnnotations(self.context) storage = annotations.setdefault('geoportlet', PersistentMapping()) database = Software77GeoDatabase(storage) url = data['url'] url = url and url.encode('utf-8') or None try: count = database.update(url) except IOError as exc: IStatusMessage(self.request).addStatusMessage( _(u"An error occurred: ${error}.", mapping={'error': exc}), "info") else: try: count = '{0:,}'.format(count) except ValueError: pass IStatusMessage(self.request).addStatusMessage( _(u"Database updated (${count} records read).", mapping={'count': count}), "info")
def _storage(self): """ Get the dictionary of tags for this object """ context = self.context annotations = IAnnotations(context) tags = annotations.setdefault(ANNOTATIONS_KEY, PersistentDict()) return tags
def __init__(self, transmogrifier, name, options, previous): # read additional config in cfg file, and apply to default self.debug_infos = {} self.transmogrifier = transmogrifier self.name = name self.options = options self.previous = previous self.context = transmogrifier.context if "path-key" in options: pathkeys = options["path-key"].splitlines() else: pathkeys = defaultKeys(options["blueprint"], name, "path") self.pathkey = Matcher(*pathkeys) self.typekey = defaultMatcher(options, "type-key", name, "type", ("portal_type", "Type")) if options.get("exclude-type", None): self.exclude_type = ast.literal_eval( options.get("exclude-type", None)) annotations = IAnnotations(self.context.REQUEST) self.items_in = annotations.setdefault(ITEMS_IN, {})
def initializeAnnotations(obj, event): """Ensure that we don't delegate certain annotations by setting them from the beginning. """ annotations = IAnnotations(obj) annotations.setdefault(DefaultOrdering.ORDER_KEY, PersistentList()) annotations.setdefault(DefaultOrdering.POS_KEY, OIBTree()) annotations.setdefault(CONTENTRULES_KEY, None) annotations.setdefault(CONTEXT_ASSIGNMENT_KEY, OOBTree())
def initializeAnnotations(obj, event): """Ensure that we don't delegate certain annotations by setting them from the beginning. """ annotations = IAnnotations(obj) annotations.setdefault(DefaultOrdering.ORDER_KEY, PersistentList()) annotations.setdefault(DefaultOrdering.POS_KEY, OIBTree()) annotations.setdefault(CONTENTRULES_KEY, None) annotations.setdefault(CONTEXT_ASSIGNMENT_KEY, OOBTree())
def __init__(self, context, request): super(ModerationView, self).__init__(context, request) self.mem_list = IWriteMembershipList(context) annot = IAnnotations(self.context) self.listen_annot = annot.setdefault(PROJECTNAME, OOBTree()) self.mod_post_pending_list = getAdapter(context, IPostPendingList, 'pending_mod_post') self.pmod_post_pending_list = getAdapter(context, IPostPendingList, 'pending_pmod_post') self.sub_pending_list = getAdapter(context, IMembershipPendingList, 'pending_sub_mod_email')
def __init__(self, context): self.context = context key = str(context) annotations = IAnnotations(context) storage = annotations.setdefault(ANNOT_KEY, PersistentMapping()) storage.setdefault('hmac_key', key) self.storage = storage
def __init__(self, context): self.context = context key = str(context) annotations = IAnnotations(context) storage = annotations.setdefault(ANNOT_KEY, PersistentMapping()) storage.setdefault('hmac_key', key) self.storage = storage
def delay(request, name, fn): """Register a function that will be called at the end of the request. """ ann = IAnnotations(request, None) if ann is None: return ann.setdefault('plone.app.debugtoolbar.delayed', {})[name] = fn
def _factory(self, container, entry): title = '%s' % (entry.get('By (author)', 'Unknown')) logger.info("Creating Author : %s" % title) oid = self._create_obj_for_title(container, 'wcc.books.author', title) obj = container._getOb(oid) obj.setTitle(title) obj.description = entry.get('Author Information', '') anno = IAnnotations(obj) anno.setdefault('wcc.metadata', PersistentDict()) anno['wcc.metadata']['authordata'] = entry logger.info("Created %s" % obj.absolute_url())
def update(self): anno = IAnnotations(self.context) anno.setdefault(self.anno_key, PersistentDict()) cache = anno[self.anno_key] cache['office'] = self._top_items_by_index('office') cache['theme'] = self._top_items_by_index('theme') cache['mission_location'] = self._top_items_by_index( 'mission_location') cache['creator'] = self._top_items_by_index('Creator') cache['years'] = {} for year in range(2010, datetime.now().year + 1): yearcache = {} yearcache['office'] = self._top_items_by_index('office', year) yearcache['theme'] = self._top_items_by_index('theme', year) yearcache['mission_location'] = self._top_items_by_index( 'mission_location', year) yearcache['creator'] = self._top_items_by_index('Creator', year) cache['years'][year] = yearcache
def __init__(self, context): self.context = context if self.bcrypt is None: return annotations = IAnnotations(context) storage = annotations.setdefault(ANNOT_KEY, PersistentMapping()) storage.setdefault('bcrypt_salt', self.bcrypt.gensalt()) self.storage = storage
def test_available(self): context = self.portal viewlet = self.get_viewlet(context, 'plone.belowcontentbody', 'mapviewlet') self.assertFalse(viewlet.available()) coll = api.content.create(type='Collection', container=self.portal, id='collection') viewlet = self.get_viewlet(coll, 'plone.belowcontentbody', 'mapviewlet') subtyper = getMultiAdapter((coll, self.request), name=u'faceted_subtyper') subtyper.enable() self.assertFalse(viewlet.available()) annotations = IAnnotations(coll) annotations.setdefault(ANNO_FACETED_LAYOUT, 'faceted-map-view') self.assertTrue(viewlet.available())
def _annotateHash(self, node): """ Use the hash type specified in the xml file, and annotate mbtool If not specified, use a default hash, iff not already annotated """ for child in node.childNodes: if child.nodeName != 'hash-type': continue htype = str(child.getAttribute('name')) if htype not in HASHERS: raise ValueError('Unknown hash type: %s - Specify one of %s' % (htype, HASHERS)) mbtool = getToolByName(self.context, 'membrane_tool') annot = IAnnotations(mbtool) annot.setdefault(ANNOT_KEY, PersistentMapping()) annot[ANNOT_KEY]['hash_type'] = htype self._logger.info("Remember hash-type imported: %s" % htype) break
def cart(self): """Return a list of items currently in cart. Also initialize the cart along the way if necessary. :return: list of UUIDs of all the items in cart :rtype: list of strings """ # get the zope.annotations object stored on current member object annotations = IAnnotations(api.user.get_current()) return annotations.setdefault('cart', CartSet())
def _locks(self, create=True): if self.__locks is not None: return self.__locks annotations = IAnnotations(self.context) locks = annotations.get(ANNOTATION_KEY, None) if locks is None and create: locks = annotations.setdefault(ANNOTATION_KEY, PersistentDict()) if locks is not None: self.__locks = locks return self.__locks else: return {}
def userLoggedIn(user, event): userip = get_ip(user.REQUEST) logintime = datetime.now() mtool = getToolByName(getSite(), 'portal_membership') if not mtool: return member = mtool.getMemberById(user.getId()) if not member: return anno = IAnnotations(member) data = anno.setdefault('login_history', PersistentList()) data.append({'date': logintime, 'ip': userip})
def queuePurge(event): """Find URLs to purge and queue them for later """ request = getRequest() if request is None: return annotations = IAnnotations(request, None) if annotations is None: return if not isCachePurgingEnabled(): return paths = annotations.setdefault(KEY, set()) paths.update(getPathsToPurge(event.object, request))
def add(self, data): """Add documents to be indexed containing binary data. This uses Apache Tika `ExtractingRequestHandler` to upload binary data, and extract the textual representation of the binary data for indexing. :seealso: https://cwiki.apache.org/confluence/display/solr\ /Uploading+Data+with+Solr+Cell+using+Apache+Tika :param data: The key/value data to index in Solr :type data: collections.Mapping :returns: """ # limit async optimizations to per-thread avoids inconsistencies key = 'ploneintranet.search.indexers.SearchableText:{}'.format( threading.current_thread().ident) annotations = IAnnotations(self.context) scheduled = annotations.setdefault(key, False) # our mutex maxdelay = datetime.timedelta(minutes=5) # failsafe mutex expire # final re-entry where an async dispatched job is handled sync if self.context.REQUEST.get('attributes') == 'SearchableText': logger.info("Handle reindex of SearchableText <%s>", threading.current_thread().ident) # remove mutex for current object on current thread annotations[key] = False data = self._add_handler(data) # on repeat reindex(), do not dispatch multiple async jobs # 'maxdelay' failsafe auto-expires the 'scheduled' mutex elif scheduled and datetime.datetime.now() - scheduled < maxdelay: logger.info("SearchableText reindex already in progress <%s>", threading.current_thread().ident) # initial entry point, dispatch async reindex for handling above elif 'SearchableText' in data: logger.info("Dispatch reindex of SearchableText async <%s>", threading.current_thread().ident) # mutex limits open async index jobs to 1 per object per thread annotations[key] = datetime.datetime.now() # Dispatch an async job to reindex the blob ReindexObject(self.context, self.context.REQUEST)( data=dict(attributes=["SearchableText"]), countdown=10) super(BinaryAdder, self).add(data)
def _locks(self, create=True): if self.__locks is not None: return self.__locks annotations = IAnnotations(self.context) locks = annotations.get(ANNOTATION_KEY, None) if locks is None and create: locks = annotations.setdefault(ANNOTATION_KEY, PersistentDict()) try: safeWrite(annotations.obj.__annotations__) except AttributeError: pass if locks is not None: self.__locks = locks return self.__locks else: return {}