def test_put_cache_file_like_data(self): """Test if put_cache() works when we give it a file like object for the content""" request = self.request key = 'nooneknowsit' filename = "test.png" data = "dontcareatall" data_file = StringIO.StringIO(data) cache.put(request, key, data_file) url = cache.url(request, key) assert key in url meta_cache = caching.CacheEntry(request, arena=cache.cache_arena, scope=cache.cache_scope, key=key + '.meta', use_pickle=True) meta = meta_cache.content() assert meta['httpdate_last_modified'].endswith( ' GMT' ) # only a very rough check, it has used cache mtime as last_modified assert ("Content-Type", "application/octet-stream") in meta['headers'] assert ("Content-Length", len(data)) in meta['headers'] data_cache = caching.CacheEntry(request, arena=cache.cache_arena, scope=cache.cache_scope, key=key + '.data') cached = data_cache.content() assert data == cached
def exists(request, key, strict=False): """ Check if a cached object for this key exists. @param request: the request object @param key: non-guessable key into cache (str) @param strict: if True, also check the data cache, not only meta (bool, default: False) @return: is object cached? (bool) """ if strict: data_cache = caching.CacheEntry(request, cache_arena, key + '.data', cache_scope, do_locking=do_locking) data_cached = data_cache.exists() else: data_cached = True # we assume data will be there if meta is there meta_cache = caching.CacheEntry(request, cache_arena, key + '.meta', cache_scope, do_locking=do_locking, use_pickle=True) meta_cached = meta_cache.exists() return meta_cached and data_cached
def test_put_cache_complex(self): """Test if put_cache() works for a more complex, practical scenario: As 'source' we just use some random integer as count value. The 'rendered representation' of it is just the word "spam" repeated count times, which we cache. The cache key calculation (for the 'non-guessable' keys) is also rather simple. In real world, source would be likely some big image, rendered representation of it a thumbnail / preview of it. Or some LaTeX source and its rendered representation as png image. Key calculation could be some MAC or some other hard to guess and unique string. """ import random request = self.request render = lambda data: "spam" * data secret = 4223 keycalc = lambda data: str(data * secret) source = random.randint(1, 100) rendered1 = render(source) key1 = keycalc(source) cache.put(request, key1, rendered1) url1 = cache.url(request, key1) assert 'key=%s' % key1 in url1 data_cache = caching.CacheEntry(request, arena=cache.cache_arena, scope=cache.cache_scope, key=key1 + '.data') cached1 = data_cache.content() assert render(source) == cached1 # if that succeeds, we have stored the rendered representation of source in the cache under key1 # now we use some different source, render it and store it in the cache source = source * 2 rendered2 = render(source) key2 = keycalc(source) cache.put(request, key2, rendered2) url2 = cache.url(request, key2) assert 'key=%s' % key2 in url2 data_cache = caching.CacheEntry(request, arena=cache.cache_arena, scope=cache.cache_scope, key=key2 + '.data') cached2 = data_cache.content() assert render(source) == cached2 # if that succeeds, we have stored the rendered representation of updated source in the cache under key2 assert url2 != url1 # URLs must be different for different source (implies different keys)
def save(self): """ Save user account data to user account file on disk. This saves all member variables, except "id" and "valid" and those starting with an underscore. """ if not self.id: return user_dir = self._cfg.user_dir if not os.path.exists(user_dir): os.makedirs(user_dir) self.last_saved = str(time.time()) # !!! should write to a temp file here to avoid race conditions, # or even better, use locking data = codecs.open(self.__filename(), "w", config.charset) data.write("# Data saved '%s' for id '%s'\n" % (time.strftime( self._cfg.datetime_fmt, time.localtime(time.time())), self.id)) attrs = self.persistent_items() attrs.sort() for key, value in attrs: # Encode list values if isinstance(value, list): key += '[]' value = encodeList(value) # Encode dict values elif isinstance(value, dict): key += '{}' value = encodeDict(value) line = u"%s=%s" % (key, unicode(value)) line = line.replace('\n', ' ').replace('\r', ' ') # no lineseps data.write(line + '\n') data.close() arena = 'user' key = 'name2id' caching.CacheEntry(self._request, arena, key, scope='wiki').remove() try: del self._request.cfg.cache.name2id except: pass key = 'openid2id' caching.CacheEntry(self._request, arena, key, scope='wiki').remove() try: del self._request.cfg.cache.openid2id except: pass if not self.disabled: self.valid = 1 if not self._stored: self._stored = True event = events.UserCreatedEvent(self._request, self) events.send_event(event)
def _cleanStats(self): # cleans all involved cache and log files nuke_eventlog(self.request) # hits is based on hitcounts which reads the cache caching.CacheEntry(self.request, 'charts', 'hitcounts', scope='wiki').remove() arena = Page(self.request, self.pagename) caching.CacheEntry(self.request, arena, 'hitcounts', scope='item').remove()
def test_persistence_simple(self): """ test if cache persists (on disk) """ test_data = '12345abcde' cache = caching.CacheEntry(self.request, 'test_arena', 'test_key', 'wiki') cache.update(test_data) del cache cache = caching.CacheEntry(self.request, 'test_arena', 'test_key', 'wiki') assert test_data == cache.content()
def setup_class(self): request = self.request become_trusted(request) self.page = create_page(request, self.pagename, u"Foo!") # for that test eventlog needs to be empty nuke_eventlog(self.request) # hits is based on hitcounts which reads the cache caching.CacheEntry(request, 'charts', 'pagehits', scope='wiki').remove() caching.CacheEntry(request, 'charts', 'hitcounts', scope='wiki').remove()
def execute(pagename, request): """ Handle refresh action """ # Without arguments, refresh action will refresh the page text_html cache. arena = request.values.get('arena', 'Page.py') if arena == 'Page.py': arena = Page(request, pagename) key = request.values.get('key', 'text_html') # Remove cache entry (if exists), and send the page from MoinMoin import caching caching.CacheEntry(request, arena, key, scope='item').remove() caching.CacheEntry(request, arena, "pagelinks", scope='item').remove() request.page.send_page()
def _getUserIdByKey(request, key, search): """ Get the user ID for a specified key/value pair. This method must only be called for keys that are guaranteed to be unique. @param key: the key to look in @param search: the value to look for @return the corresponding user ID or None """ if not search or not key: return None cfg = request.cfg cachekey = '%s2id' % key try: _key2id = getattr(cfg.cache, cachekey) except AttributeError: arena = 'user' cache = caching.CacheEntry(request, arena, cachekey, scope='wiki', use_pickle=True) try: _key2id = cache.content() except caching.CacheError: _key2id = {} setattr(cfg.cache, cachekey, _key2id) uid = _key2id.get(search, None) if uid is None: for userid in getUserList(request): u = User(request, id=userid) if hasattr(u, key): value = getattr(u, key) if isinstance(value, list): for val in value: _key2id[val] = userid else: _key2id[value] = userid arena = 'user' cache = caching.CacheEntry(request, arena, cachekey, scope='wiki', use_pickle=True) try: cache.update(_key2id) except caching.CacheError: pass uid = _key2id.get(search, None) return uid
def remove(request, key): """ delete headers/data cache for key """ meta_cache = caching.CacheEntry(request, cache_arena, key + '.meta', cache_scope, do_locking=do_locking, use_pickle=True) meta_cache.remove() data_cache = caching.CacheEntry(request, cache_arena, key + '.data', cache_scope, do_locking=do_locking) data_cache.remove()
def get_cache(self, locking): return caching.CacheEntry(self.request, self.xapian_dir, self.queuename, scope='dir', use_pickle=True, do_locking=locking)
def updatePageSubCache(self): """ When a user profile is saved, we update the page subscriber's cache """ scope, arena, key = 'userdir', 'users', 'pagesubscriptions' cache = caching.CacheEntry(self._request, arena=arena, key=key, scope=scope, use_pickle=True, do_locking=False) if not cache.exists(): return # if no cache file exists, just don't do anything cache.lock('w') page_sub = cache.content() # we only store entries for valid users with some page subscriptions if self.valid and self.subscribed_pages: page_sub[self.id] = { 'name': self.name, 'email': self.email, 'subscribed_pages': self.subscribed_pages, } elif page_sub.get(self.id): del page_sub[self.id] cache.update(page_sub) cache.unlock()
def test_remove(self): """ test if cache file removal works """ cache = caching.CacheEntry(self.request, 'test_arena', 'test_key', 'wiki') assert cache.exists() cache.remove() assert not cache.exists()
def _load_group(self): request = self.request group_name = self.name page = Page(request, group_name) if page.exists(): arena = 'pagegroups' key = wikiutil.quoteWikinameFS(group_name) cache = caching.CacheEntry(request, arena, key, scope='wiki', use_pickle=True) try: cache_mtime = cache.mtime() page_mtime = wikiutil.version2timestamp(page.mtime_usecs()) # TODO: fix up-to-date check mtime granularity problems. # # cache_mtime is float while page_mtime is integer # The comparision needs to be done on the lowest type of both if int(cache_mtime) > int(page_mtime): # cache is uptodate return cache.content() else: raise caching.CacheError except caching.CacheError: # either cache does not exist, is erroneous or not uptodate: recreate it members, member_groups = super(WikiGroup, self)._load_group() cache.update((members, member_groups)) return members, member_groups else: raise GroupDoesNotExistError(group_name)
def rebuildLookupCaches(request): """complete attrs -> userid lookup cache rebuild""" # as there may be thousands of users and reading all profiles is # expensive, we just have 1 lookup cache for all interesting user attrs, # so we only need to read all profiles ONCE to build the cache. scope, arena, key = 'userdir', 'users', 'lookup' diskcache = caching.CacheEntry(request, arena, key, scope=scope, use_pickle=True, do_locking=False) diskcache.lock('w') cache = {} for attrname in CACHED_USER_ATTRS: cache[attrname] = {} for userid in getUserList(request): u = User(request, id=userid) if u.valid: for attrname in CACHED_USER_ATTRS: if hasattr(u, attrname): attr2id = cache[attrname] value = getattr(u, attrname) if isinstance(value, list): for val in value: attr2id[val] = userid else: attr2id[value] = userid cache_with_lowercase = addLowerCaseKeys(cache) setMemoryLookupCaches(request, cache_with_lowercase) diskcache.update(cache) diskcache.unlock() return cache
def teardown_method(self, method): """ Run after each test Remove user and reset user listing cache. """ # Remove user file and user if self.user is not None: try: path = self.user._User__filename() os.remove(path) except OSError: pass del self.user # Restore original user self.request.cookies = self.saved_cookie self.request.user = self.saved_user # Remove user name to id cache, or next test will fail caching.CacheEntry(self.request, 'user', 'name2id', scope='wiki').remove() try: del self.request.cfg.cache.name2id except: pass
def init_wiki_tree(self): """ Init the wiki tree structure and wiki tree info (or loads it from cache) See build_wiki_tree for the wiki_tree and wiki_tree_info data structure. """ request = self.request self.wiki_tree = {} # Init wiki tree cache cache = caching.CacheEntry( request, 'wiki_tree', "%s@%s" % (wikiutil.url_quote(self.root), request.user.id)) if self.is_moin_1_5: refresh = request.form.get('action', ['show'])[0] == 'refresh' else: refresh = request.action == 'refresh' # Check if there's a cached wiki tree and no refresh action invoked if cache.exists() and not refresh: version, data = cPickle.loads(cache.content()) if version == self.release: # Only use cached data if it correspondents to the theme version # This avoids errors when data structure changed self.wiki_tree, self.wiki_info = data if not self.wiki_tree: self.build_wiki_tree() # Cache the wiki tree cache.update( cPickle.dumps([self.release, [self.wiki_tree, self.wiki_info]])) return
def check_pagecachefile(request, pagename, cfname): page = Page(request, pagename) data_cache = caching.CacheEntry(request, page, cfname, scope='item', do_locking=True) return data_cache, data_cache.exists()
def test_mtime(self): """ test if cache mtime yields correct values """ test_data = '12345abcde' now = time.time() cache = caching.CacheEntry(self.request, 'test_arena', 'test_key', 'wiki') cache.update(test_data) assert now - 2 <= cache.mtime() <= now + 2
def __init__(self, macro): self.macro = macro self.request = macro.request self.cache = caching.CacheEntry(self.request, 'charts', 'pagehits', scope='wiki', use_pickle=True)
def storeAssociation(self, server_url, association): ce = caching.CacheEntry(self.request, 'openid', self.key(server_url), scope='wiki', use_pickle=True) if ce.exists(): assocs = ce.content() else: assocs = [] assocs += [association.serialize()] ce.update(assocs)
def _get_datafile(request, key): """ get an open data file for the data cached for key """ data_cache = caching.CacheEntry(request, cache_arena, key + '.data', cache_scope, do_locking=do_locking) data_cache.open(mode='r') return data_cache
def test_persistence_encode(self): """ test if cache persists (on disk), use encoded string """ test_data = u"üöäÜÖÄß" cache = caching.CacheEntry(self.request, 'test_arena', 'test_key', 'wiki', use_encode=True) cache.update(test_data) del cache cache = caching.CacheEntry(self.request, 'test_arena', 'test_key', 'wiki', use_encode=True) cache_data = cache.content() assert type(cache_data) == type(test_data) assert cache_data == test_data
def mainloop(self): self.init_request() request = self.request # clean page scope cache entries keys = [ 'text_html', 'pagelinks', 'hitcounts', ] pages = request.rootpage.getPageList(user='') for pagename in pages: arena = Page(request, pagename) for key in keys: caching.CacheEntry(request, arena, key, scope='item').remove() # clean wiki scope cache entries arena_key_list = [ ('charts', 'hitcounts'), ('charts', 'pagehits'), ('charts', 'useragents'), ('user', 'name2id'), ] for arena, key in arena_key_list: caching.CacheEntry(request, arena, key, scope='wiki').remove() # clean dict and groups related cache arena_scope_list = [ ('pagedicts', 'wiki'), ('pagegroups', 'wiki'), ] for arena, scope in arena_scope_list: for key in caching.get_cache_list(request, arena, scope): caching.CacheEntry(request, arena, key, scope=scope).remove() # clean drafts of users uids = user.getUserList(request) for key in uids: caching.CacheEntry(request, 'drafts', key, scope='wiki').remove() # clean language cache files wiki_languages = i18n.wikiLanguages().keys() for key in wiki_languages: caching.CacheEntry(request, 'i18n', key, scope='wiki').remove()
def _get_headers(request, key): """ get last_modified and headers cached for key """ meta_cache = caching.CacheEntry(request, cache_arena, key + '.meta', cache_scope, do_locking=do_locking, use_pickle=True) meta = meta_cache.content() return meta['last_modified'], meta['headers']
def nuke_user(request, username): """ completely delete a user """ user_dir = request.cfg.user_dir user_id = user.getUserId(request, username) # really get rid of the user fpath = os.path.join(user_dir, user_id) os.remove(fpath) # delete cache arena = 'user' key = 'name2id' caching.CacheEntry(request, arena, key, scope='wiki').remove()
def useNonce(self, server_url, timestamp, salt): val = ''.join([str(server_url), str(timestamp), str(salt)]) csum = hash_new('sha1', val).hexdigest() ce = caching.CacheEntry(self.request, 'openid-nonce', csum, scope='farm', use_pickle=False) if ce.exists(): # nonce already used! return False ce.update(str(timestamp)) if randint(0, 999) == 0: self.request.add_finisher(_cleanup_nonces) return True
def loadLookupCaches(request): """load lookup cache contents into memory: cfg.cache.XXX2id""" scope, arena, cachekey = 'userdir', 'users', 'lookup' diskcache = caching.CacheEntry(request, arena, cachekey, scope=scope, use_pickle=True) try: cache = diskcache.content() except caching.CacheError: cache = {} for attrname in CACHED_USER_ATTRS: cache[attrname] = {} cache_with_lowercase = addLowerCaseKeys(cache) setMemoryLookupCaches(request, cache_with_lowercase)
def _cleanup_nonces(request): cachelist = caching.get_cache_list(request, 'openid-nonce', 'farm') # really openid should have a method to check this... texpired = time.time() - nonce.SKEW for name in cachelist: entry = caching.CacheEntry(request, 'openid-nonce', name, scope='farm', use_pickle=False) try: timestamp = int(entry.content()) if timestamp < texpired: entry.remove() except caching.CacheError: pass
def test_update_needed(self): """ test update check) """ test_data1 = u'does not matter' test_data2 = u'something else' page_name = u'Caching_TestPage' page = PageEditor(self.request, page_name) page._write_file(test_data1) cache = caching.CacheEntry(self.request, page, 'test_key', 'item') cache.update(test_data1) assert not cache.needsUpdate(page._text_filename()) time.sleep(3) # XXX fails without, due to mtime granularity page = PageEditor(self.request, page_name) page._write_file(test_data2) assert cache.needsUpdate(page._text_filename())