def _cleanup_nonces(request): cachelist = caching.get_cache_list(request, 'openid-nonce', 'farm') # really openid should have a method to check this... texpired = time.time() - nonce.SKEW for name in cachelist: entry = caching.CacheEntry(request, 'openid-nonce', name, scope='farm', use_pickle=False) try: timestamp = int(entry.content()) if timestamp < texpired: entry.remove() except caching.CacheError: pass
def mainloop(self): self.init_request() request = self.request # clean page scope cache entries keys = [ 'text_html', 'pagelinks', 'hitcounts', ] pages = request.rootpage.getPageList(user='') for pagename in pages: arena = Page(request, pagename) for key in keys: caching.CacheEntry(request, arena, key, scope='item').remove() # clean wiki scope cache entries arena_key_list = [ ('charts', 'hitcounts'), ('charts', 'pagehits'), ('charts', 'useragents'), ] for arena, key in arena_key_list: caching.CacheEntry(request, arena, key, scope='wiki').remove() # clean dict and groups related cache arena_scope_list = [ ('pagedicts', 'wiki'), ('pagegroups', 'wiki'), ('users', 'userdir'), ] for arena, scope in arena_scope_list: for key in caching.get_cache_list(request, arena, scope): caching.CacheEntry(request, arena, key, scope=scope).remove() # clean drafts of users uids = user.getUserList(request) for key in uids: caching.CacheEntry(request, 'drafts', key, scope='wiki').remove() # clean language cache files caching.CacheEntry(request, 'i18n', 'meta', scope='wiki').remove() wiki_languages = i18n.wikiLanguages().keys() for key in wiki_languages: caching.CacheEntry(request, 'i18n', key, scope='wiki').remove()
def mainloop(self): self.init_request() request = self.request # clean page scope cache entries keys = ['text_html', 'pagelinks', 'hitcounts', ] pages = request.rootpage.getPageList(user='') for pagename in pages: arena = Page(request, pagename) for key in keys: caching.CacheEntry(request, arena, key, scope='item').remove() # clean wiki scope cache entries arena_key_list = [ ('charts', 'hitcounts'), ('charts', 'pagehits'), ('charts', 'useragents'), ('user', 'name2id'), ] for arena, key in arena_key_list: caching.CacheEntry(request, arena, key, scope='wiki').remove() # clean dict and groups related cache arena_scope_list = [('pagedicts', 'wiki'), ('pagegroups', 'wiki'), ] for arena, scope in arena_scope_list: for key in caching.get_cache_list(request, arena, scope): caching.CacheEntry(request, arena, key, scope=scope).remove() # clean drafts of users uids = user.getUserList(request) for key in uids: caching.CacheEntry(request, 'drafts', key, scope='wiki').remove() # clean language cache files caching.CacheEntry(request, 'i18n', 'meta', scope='wiki').remove() wiki_languages = i18n.wikiLanguages().keys() for key in wiki_languages: caching.CacheEntry(request, 'i18n', key, scope='wiki').remove()
def mainloop(self): self.init_request() request = self.request # clean page scope cache entries keys = ["text_html", "pagelinks", "hitcounts"] pages = request.rootpage.getPageList(user="") for pagename in pages: arena = Page(request, pagename) for key in keys: caching.CacheEntry(request, arena, key, scope="item").remove() # clean wiki scope cache entries arena_key_list = [ ("charts", "hitcounts"), ("charts", "pagehits"), ("charts", "useragents"), ("user", "name2id"), ] for arena, key in arena_key_list: caching.CacheEntry(request, arena, key, scope="wiki").remove() # clean dict and groups related cache arena_scope_list = [("pagedicts", "wiki"), ("pagegroups", "wiki")] for arena, scope in arena_scope_list: for key in caching.get_cache_list(request, arena, scope): caching.CacheEntry(request, arena, key, scope=scope).remove() # clean drafts of users uids = user.getUserList(request) for key in uids: caching.CacheEntry(request, "drafts", key, scope="wiki").remove() # clean language cache files caching.CacheEntry(request, "i18n", "meta", scope="wiki").remove() wiki_languages = i18n.wikiLanguages().keys() for key in wiki_languages: caching.CacheEntry(request, "i18n", key, scope="wiki").remove()
def list_pagecachefiles(request, pagename): page = Page(request, pagename) return caching.get_cache_list(request, page, 'item')