示例#1
0
文件: moinoid.py 项目: aahlad/soar
def _cleanup_nonces(request):
    cachelist = caching.get_cache_list(request, 'openid-nonce', 'farm')
    # really openid should have a method to check this...
    texpired = time.time() - nonce.SKEW
    for name in cachelist:
        entry = caching.CacheEntry(request, 'openid-nonce', name,
                                   scope='farm', use_pickle=False)
        try:
            timestamp = int(entry.content())
            if timestamp < texpired:
                entry.remove()
        except caching.CacheError:
            pass
示例#2
0
def _cleanup_nonces(request):
    cachelist = caching.get_cache_list(request, 'openid-nonce', 'farm')
    # really openid should have a method to check this...
    texpired = time.time() - nonce.SKEW
    for name in cachelist:
        entry = caching.CacheEntry(request, 'openid-nonce', name,
                                   scope='farm', use_pickle=False)
        try:
            timestamp = int(entry.content())
            if timestamp < texpired:
                entry.remove()
        except caching.CacheError:
            pass
示例#3
0
    def mainloop(self):
        self.init_request()
        request = self.request

        # clean page scope cache entries
        keys = [
            'text_html',
            'pagelinks',
            'hitcounts',
        ]
        pages = request.rootpage.getPageList(user='')
        for pagename in pages:
            arena = Page(request, pagename)
            for key in keys:
                caching.CacheEntry(request, arena, key, scope='item').remove()

        # clean wiki scope cache entries
        arena_key_list = [
            ('charts', 'hitcounts'),
            ('charts', 'pagehits'),
            ('charts', 'useragents'),
        ]
        for arena, key in arena_key_list:
            caching.CacheEntry(request, arena, key, scope='wiki').remove()

        # clean dict and groups related cache
        arena_scope_list = [
            ('pagedicts', 'wiki'),
            ('pagegroups', 'wiki'),
            ('users', 'userdir'),
        ]
        for arena, scope in arena_scope_list:
            for key in caching.get_cache_list(request, arena, scope):
                caching.CacheEntry(request, arena, key, scope=scope).remove()

        # clean drafts of users
        uids = user.getUserList(request)
        for key in uids:
            caching.CacheEntry(request, 'drafts', key, scope='wiki').remove()

        # clean language cache files
        caching.CacheEntry(request, 'i18n', 'meta', scope='wiki').remove()
        wiki_languages = i18n.wikiLanguages().keys()
        for key in wiki_languages:
            caching.CacheEntry(request, 'i18n', key, scope='wiki').remove()
示例#4
0
    def mainloop(self):
        self.init_request()
        request = self.request

        # clean page scope cache entries
        keys = ['text_html', 'pagelinks', 'hitcounts', ]
        pages = request.rootpage.getPageList(user='')
        for pagename in pages:
            arena = Page(request, pagename)
            for key in keys:
                caching.CacheEntry(request, arena, key, scope='item').remove()

        # clean wiki scope cache entries
        arena_key_list = [
            ('charts', 'hitcounts'),
            ('charts', 'pagehits'),
            ('charts', 'useragents'),
            ('user', 'name2id'),
        ]
        for arena, key in arena_key_list:
            caching.CacheEntry(request, arena, key, scope='wiki').remove()

        # clean dict and groups related cache
        arena_scope_list =  [('pagedicts', 'wiki'),
                             ('pagegroups', 'wiki'),
        ]
        for arena, scope in arena_scope_list:
            for key in caching.get_cache_list(request, arena, scope):
                caching.CacheEntry(request, arena, key, scope=scope).remove()

        # clean drafts of users
        uids = user.getUserList(request)
        for key in uids:
            caching.CacheEntry(request, 'drafts', key, scope='wiki').remove()

        # clean language cache files
        caching.CacheEntry(request, 'i18n', 'meta', scope='wiki').remove()
        wiki_languages = i18n.wikiLanguages().keys()
        for key in wiki_languages:
            caching.CacheEntry(request, 'i18n', key, scope='wiki').remove()
示例#5
0
    def mainloop(self):
        self.init_request()
        request = self.request

        # clean page scope cache entries
        keys = ["text_html", "pagelinks", "hitcounts"]
        pages = request.rootpage.getPageList(user="")
        for pagename in pages:
            arena = Page(request, pagename)
            for key in keys:
                caching.CacheEntry(request, arena, key, scope="item").remove()

        # clean wiki scope cache entries
        arena_key_list = [
            ("charts", "hitcounts"),
            ("charts", "pagehits"),
            ("charts", "useragents"),
            ("user", "name2id"),
        ]
        for arena, key in arena_key_list:
            caching.CacheEntry(request, arena, key, scope="wiki").remove()

        # clean dict and groups related cache
        arena_scope_list = [("pagedicts", "wiki"), ("pagegroups", "wiki")]
        for arena, scope in arena_scope_list:
            for key in caching.get_cache_list(request, arena, scope):
                caching.CacheEntry(request, arena, key, scope=scope).remove()

        # clean drafts of users
        uids = user.getUserList(request)
        for key in uids:
            caching.CacheEntry(request, "drafts", key, scope="wiki").remove()

        # clean language cache files
        caching.CacheEntry(request, "i18n", "meta", scope="wiki").remove()
        wiki_languages = i18n.wikiLanguages().keys()
        for key in wiki_languages:
            caching.CacheEntry(request, "i18n", key, scope="wiki").remove()
示例#6
0
def list_pagecachefiles(request, pagename):
    page = Page(request, pagename)
    return caching.get_cache_list(request, page, 'item')
示例#7
0
def list_pagecachefiles(request, pagename):
    page = Page(request, pagename)
    return caching.get_cache_list(request, page, 'item')