def test_disable(): backend = LocalCache() cache = Cache(backend, enabled=False) c = cache("counter")(CallCounter()) assert c() == 1, 'called the first time' assert c() == 2, 'called the second time too' assert c.cached() == 3, 'called even when you get the cached val' assert not backend.get("counter")
def test_bust(): backend = LocalCache() cache = Cache(backend, bust=True) c = cache("counter")(CallCounter()) assert c() == 1 assert c() == 2 assert c.cached() == 2, 'not called if you just get the cached val' assert backend.get("counter")
def test_basic(): backend = LocalCache() cache = Cache(backend, enabled=True) c = cache("counter")(CallCounter()) assert c() == 1, 'called the first time' assert c() == 1, 'not called the second time' assert c.refresh() == 2 assert backend.get("counter")
def main(): testBooks = { 'Agata_Christie': 'Tajemnica_Wawrzynow.txt', 'Janusz_A_Zajdel': 'Awaria.txt', 'Paulo_Coelho': 'Alchemik.txt', 'George_Orwell': 'Orwell_George_-_Rok_1984.txt', 'Sapkowski_Andrzej': 'Pani_Jeziora.txt', 'Andre_Norton': 'Andre_Norton_-_Prekursorka.txt', 'Dick_Philip_K': 'Dick_Philip_K_-_Kolonia.txt', 'Gordon_R_Dickson': 'Gordon_R_Dickson_-_Nekromanta.txt', 'Lem_Stanislaw': 'Lem_Stanislaw_-_Bajki_robotow.txt', 'Terry_Pratchett': 'Terry_Pratchett_-_Ruchome_Obrazki.txt', } firstN = 10 scores = [] for name, testBook in testBooks.items(): pFindBest = partial(findBest, tuple(testBooks.values()), testBook) result = LocalCache.load(f'{name}.scoreResult', pFindBest) score = list(OrderedDict(result).keys()).index(name) scores.append(score) print(f"Author: {name} has score: {score} for book: {testBook}") print( f"First {firstN} best results: \n{pprint.pformat(result[:firstN])}\n\n" ) else: print(f"scores: {scores}") print( f"Average score: {sum(scores)/len(testBooks)}, std: {np.std(scores)}" )
def make_app(stores_path='/tmp', cache_enabled=True, cache_type='memory', cache_size=10000, cache_host='localhost', cache_port=6379, cache_ttl=86400, gc_interval=86400): global app global cache # monkey patch bottle to increase BaseRequest.MEMFILE_MAX BaseRequest.MEMFILE_MAX = 1024000 setup_logging() app.config['gitstores_path'] = stores_path app.config['gc_interval'] = gc_interval cache_backend = None if cache_type == 'memory': cache_backend = LocalCache(cache_size) elif cache_type == 'redis': try: import redis cache_backend = RedisCache(redis.Redis(cache_host, cache_port), cache_ttl) except ImportError: pass cache = QueryCache(backend=cache_backend, enabled=cache_enabled) if gc_interval > 0: t = threading.Thread(target=run_gc) t.setDaemon(True) t.start() return app
def getGraph(degree) -> GraphModel: def _getGraph(): loader = getLoaderPrimaryForm() g = GraphModel(loader, degree) g.processGraphs(slice(51555)) return g return LocalCache.load(f'graph{degree}', _getGraph)
def getTfIdfModel() -> TfidfModel: def _tfIdf(): loader = getLoaderPrimaryForm() tfidf = TfidfModel(loader) tfidf.calcVector() return tfidf return LocalCache.load("tfidf", _tfIdf)
class Chanko(object): """Top-level object of the chanko""" def __init__(self): self.base = os.getcwd() self.config = os.path.join(self.base, 'config') self.architecture = getoutput("dpkg --print-architecture") self.trustedkeys = os.path.join(self.config, 'trustedkeys.gpg') self.sources_list = os.path.join(self.config, 'sources.list') for f in (self.sources_list, self.trustedkeys): if not os.path.exists(f): raise Error("required file not found: " + f) conf = ChankoConfig(os.path.join(self.config, 'chanko.conf')) os.environ['CCURL_CACHE'] = conf.ccurl_cache self.archives = os.path.join(self.base, 'archives') makedirs(os.path.join(self.archives, 'partial')) plan_path = os.path.join(self.base, 'plan') plan_cpp = conf.plan_cpp.replace("-", " -").strip() plan_cpp = plan_cpp.split(" ") if plan_cpp else [] self.plan = Plan(plan_path, self.architecture, plan_cpp) self.local_cache = LocalCache(self) self.remote_cache = RemoteCache(self) def get_package_candidates(self, packages, nodeps=False): if not self.remote_cache.has_lists: self.remote_cache.refresh() return get_uris(self, self.remote_cache, packages, nodeps) def get_packages(self, candidates=None, packages=None, nodeps=False): if packages: candidates = self.get_package_candidates(packages, nodeps) if not candidates: return False result = download_uris(candidates) if result: self.local_cache.refresh() return result
def test_arguments(): cache = Cache(LocalCache()) @cache("mykey") def return_arguments(*a, **kw): return (a, kw) result = (return_arguments(1, two=2) == ((1,), dict(two=2))) assert result, 'arguments are passed in to the calculated function'
def __init__(self): self.base = os.getcwd() self.config = os.path.join(self.base, 'config') self.architecture = getoutput("dpkg --print-architecture") self.trustedkeys = os.path.join(self.config, 'trustedkeys.gpg') self.sources_list = os.path.join(self.config, 'sources.list') for f in (self.sources_list, self.trustedkeys): if not os.path.exists(f): raise Error("required file not found: " + f) conf = ChankoConfig(os.path.join(self.config, 'chanko.conf')) os.environ['CCURL_CACHE'] = conf.ccurl_cache self.archives = os.path.join(self.base, 'archives') makedirs(os.path.join(self.archives, 'partial')) plan_path = os.path.join(self.base, 'plan') plan_cpp = conf.plan_cpp.replace("-", " -").strip() plan_cpp = plan_cpp.split(" ") if plan_cpp else [] self.plan = Plan(plan_path, self.architecture, plan_cpp) self.local_cache = LocalCache(self) self.remote_cache = RemoteCache(self)
def test_hash_arguments(): backend = LocalCache() cache = Cache(backend) @cache("mykey") def expensive(*a, **kw): pass expensive(1, foo=2) expensive(1, foo=2) expensive(2, foo=3) keys = backend._cache.keys() assert len(keys) == 2, "only two keys are set" assert ("mykey/args:") in keys[0]
def test_locache(self): acache = LocalCache(timeout=5) self.assertEquals(None, acache.get("key")) acache.set("key", "value") self.assertEquals("value", acache.get("key")) time.sleep(5) self.assertEquals(None, acache.get("key")) acache.set("akey", "avalue", timeout=2) self.assertEquals("avalue", acache.get("akey")) time.sleep(2) self.assertEquals(None, acache.get("akey")) acache = LocalCache(timeout=300) for i in xrange(500): acache.set("%03d" % i, "%03d") end = start = -1 for i in xrange(500, 0, -1): if acache.get("%03d" % i): end = i break for i in xrange(500): if acache.get("%03d" % i): start = i break self.assertTrue((end-start) < 300)
def getLoaderPrimaryForm() -> Loader: return LocalCache.load('loaderPrimaryForm', lambda: Loader(primaryForm=getPrimaryForm()))
def getPrimaryForm() -> PrimaryForm: return LocalCache.load('primaryForm', lambda: PrimaryForm(primaryFormPath))
def run(self, kwargs): # Verify argument validity self.__verify_arguments__() # Get cache name from remote url cache_name = self.get_cache_name() # Get cache path cache_path = os.path.join(self.cache_root, cache_name) # Get cache object (will raise an exception if it doesn't exist) cache = LocalCache(cache_path, host_id=default_host_id) cache.DEBUG = self.DEBUG cache.lock(os.path.join(cache_path, self.dependency_platform, self.dependency_name, self.dependency_version)) # Try to get entry from cache entry = cache.get(self.dependency_platform, self.dependency_name, self.dependency_version) # Set state if entry is None: state = EntryState.DOWNLOADED else: state = EntryState.CACHE # Verify Remote MD5 # We need to download the file, it wasn't in the cache if entry is None: print (self.format_entry_name() + ": Not in cache") full_url = s3.join_s3_url( self.dependency_repo, self.dependency_platform, self.dependency_name, self.dependency_version ) print (self.format_entry_name() + ": Downloading " + full_url) # Get Downloader downloader = s3.AsyncS3Downloader(None) # Set Downloader arguments downloader.source_url = full_url downloader.destination_path = os.path.join(self.cache_root, "downloading") + os.sep downloader.start() # Wait for downloader to finish #TODO: Do something with the reported progress while downloader.status != module.DONE: time.sleep(0.5) # Check for an exception, if so bubble it up if downloader.exception is not None: raise downloader.exception print self.format_entry_name() + ": Download complete" if downloader.result is None or len(downloader.result) == 0: raise EntryError(self.format_entry_name() + ": Unable to find remote entry '" + full_url + "'") # Iterate of the result (downloaded files) for item, checksum in downloader.result: local_file_checksum = mfile.md5_checksum(item) if checksum != local_file_checksum: print ( self.format_entry_name() + ": WARNING: Downloaded file does not match the checksum on the server" ) print (self.format_entry_name() + ": WARNING: local:\t" + str(local_file_checksum)) print (self.format_entry_name() + ": WARNING: server:\t" + str(checksum)) if self.dependency_unpack: print (self.format_entry_name() + ": Unpacking...") # Put will unlock cache.put( item, self.dependency_platform, self.dependency_name, self.dependency_version, unpack=self.dependency_unpack, ) entry = cache.get(self.dependency_platform, self.dependency_name, self.dependency_version) if entry is None: raise EntryError(self.format_entry_name() + ": Error retrieving entry from cache.") cache.unlock(os.path.join(cache_path, self.dependency_platform, self.dependency_name, self.dependency_version)) # Entry is not None, return all the files listed in the entry that aren't the configuration files return [os.path.abspath(os.path.join(entry.path, f)) for f in os.listdir(entry.path) if f != ".umpire"], state
def run(self,kwargs): #Verify argument validity self.__verify_arguments__() #Get cache name from remote url cache_name = self.get_cache_name() #Get cache path cache_path = os.path.join(self.cache_root, cache_name) #Get cache object (will raise an exception if it doesn't exist) cache = LocalCache(cache_path, host_id=default_host_id) cache.DEBUG = self.DEBUG cache.lock(os.path.join(cache_path,self.dependency_platform, self.dependency_name, self.dependency_version)) #Try to get entry from cache entry = cache.get(self.dependency_platform, self.dependency_name, self.dependency_version) #Set state if entry is None: state = EntryState.DOWNLOADED else: state = EntryState.CACHE #Verify Remote MD5 #We need to download the file, it wasn't in the cache if entry is None: print (self.format_entry_name() + ": Not in cache") full_url = s3.join_s3_url(self.dependency_repo, self.dependency_platform, self.dependency_name, self.dependency_version) print (self.format_entry_name() + ": Downloading " + full_url) #Get Downloader downloader = s3.AsyncS3Downloader(None) #Set Downloader arguments downloader.source_url = full_url downloader.destination_path = os.path.join(self.cache_root, "downloading") + os.sep downloader.start() #Wait for downloader to finish #TODO: Do something with the reported progress while downloader.status != module.DONE: time.sleep(0.5) #Check for an exception, if so bubble it up if downloader.exception is not None: raise downloader.exception print self.format_entry_name() + ": Download complete" if downloader.result is None or len(downloader.result) == 0: raise EntryError(self.format_entry_name() + ": Unable to find remote entry '" + full_url + "'") #Iterate of the result (downloaded files) for item, checksum in downloader.result: local_file_checksum = mfile.md5_checksum(item) if checksum != local_file_checksum: print(self.format_entry_name() + ": WARNING: Downloaded file does not match the checksum on the server") print(self.format_entry_name() + ": WARNING: local:\t" + str(local_file_checksum)) print(self.format_entry_name() + ": WARNING: server:\t" + str(checksum)) if self.dependency_unpack: print (self.format_entry_name() + ": Unpacking...") #Put will unlock cache.put(item,self.dependency_platform, self.dependency_name, self.dependency_version, unpack=self.dependency_unpack) entry = cache.get(self.dependency_platform, self.dependency_name, self.dependency_version) if entry is None: raise EntryError(self.format_entry_name() + ": Error retrieving entry from cache.") cache.unlock(os.path.join(cache_path,self.dependency_platform, self.dependency_name, self.dependency_version)) #Entry is not None, return all the files listed in the entry that aren't the configuration files return [os.path.abspath(os.path.join(entry.path,f)) for f in os.listdir(entry.path) if f != ".umpire"], state