def entity_search(self): cache_key = cache.make_key([self.q,self.opts.get("details")]) print "\n\n\n\n\nCACHEKEY:",cache_key cache_result = None if self.cache and self.opts.get("use_cached") != False: cache_result = self.cache.read(cache_key) if cache_result: self.r["response"] = cache_result["value"]["response"] self.r["cursor"] = cache_result["value"]["cursor"] self.r["time_cached"] = cache_result["timestamp"] self.r["cached"] = True else: resp = self.db.entity_search(self.q,include_internal_objects=False,include_details=self.opts.get("details",False),get_history=self.opts.get("get_history",False)) self.r["response"] = resp['entities'] self.r["cursor"] = resp['cursor'] if self.db.errors: self.r["status"]="error" self.r["message"]=self.db.errors self.db.errors = [] return False if self.cache: self.cache.write_pair(cache_key,{"response":self.r["response"],"cursor":self.r["cursor"]}) self.r["cached"] = False return True
def apply_filters(hunk, filters, type, cache=None, **kwargs): """Apply the given list of filters to the hunk, returning a new ``MemoryHunk`` object. ``kwargs`` are options that should be passed along to the filters. If ``hunk`` is a file hunk, a ``source_path`` key will automatically be added to ``kwargs``. """ assert type in ('input', 'output') # Short-circuit # TODO: This can actually be improved by looking at "type" and # whether any of the existing filters handles this type. if not filters: return hunk if cache: key = make_key(hunk.key(), filters, type) content = cache.get(key) if not content in (False, None): return MemoryHunk(content) kwargs = kwargs.copy() if hasattr(hunk, 'filename'): kwargs.setdefault('source_path', hunk.filename) data = StringIO.StringIO(hunk.data()) for filter in filters: func = getattr(filter, type, False) if func: out = StringIO.StringIO() func(data, out, **kwargs) data = out data.seek(0) # Note that the key used to cache this hunk is different from the key # the hunk will expose to subsequent merges, i.e. hunk.key() is always # based on the actual content, and does not match the key used to cache # the key. This latter key also includes information about for example # the filters used. # # It wouldn't have to be this way. Hunk could subsequently expose their # cache key through hunk.key(). This would work as well, but would be # an inferior solution: Imagine a source file which receives # non-substantial changes, in the sense that they do not affect the # filter output, for example whitespace. If a hunk's key is the cache # key, such a change would invalidate the caches for all subsequent # operations on this hunk as well, even though it didn't actually change # after all. content = data.getvalue() if cache: cache.set(key, content) return MemoryHunk(content)
def apply_filters(hunk, filters, type, cache=None, **kwargs): """Apply the given list of filters to the hunk, returning a new ``MemoryHunk`` object. ``kwargs`` are options that should be passed along to the filters. If ``hunk`` is a file hunk, a ``source_path`` key will automatically be added to ``kwargs``. """ assert type in ('input', 'output') # Short-circuit # TODO: This can actually be improved by looking at "type" and # whether any of the existing filters handles this type. if not filters: return hunk if cache: key = make_key(hunk.key(), filters, type) content = cache.get(key) if not content in (False, None): return MemoryHunk(content) kwargs = kwargs.copy() if hasattr(hunk, 'filename'): kwargs.setdefault('source_path', hunk.filename) data = StringIO.StringIO(hunk.data()) for filter in filters: func = getattr(filter, type, False) if func: out = StringIO.StringIO() func(data, out, **kwargs) data = out data.seek(0) # Note that the key used to cache this hunk is different from the key # the hunk will expose to subsequent merges, i.e. hunk.key() is always # based on the actual content, and does not match the cache key. The # latter also includes information about for example the filters used. # # It wouldn't have to be this way. Hunk could subsequently expose their # cache key through hunk.key(). This would work as well, but would be # an inferior solution: Imagine a source file which receives # non-substantial changes, in the sense that they do not affect the # filter output, for example whitespace. If a hunk's key is the cache # key, such a change would invalidate the caches for all subsequent # operations on this hunk as well, even though it didn't actually change # after all. content = data.getvalue() if cache: cache.set(key, content) return MemoryHunk(content)
def key(self): return make_key(self.data())
def cache(self): cache.set(cache.make_key(self.url), { 'files': self.files, 'discovered': self.discovered, })