def make_app(stores_path='/tmp', cache_enabled=True, cache_type='memory', cache_size=10000, cache_host='localhost', cache_port=6379, cache_ttl=86400, gc_interval=86400): global app global cache # monkey patch bottle to increase BaseRequest.MEMFILE_MAX BaseRequest.MEMFILE_MAX = 1024000 setup_logging() app.config['gitstores_path'] = stores_path app.config['gc_interval'] = gc_interval cache_backend = None if cache_type == 'memory': cache_backend = LocalCache(cache_size) elif cache_type == 'redis': try: import redis cache_backend = RedisCache(redis.Redis(cache_host, cache_port), cache_ttl) except ImportError: pass cache = QueryCache(backend=cache_backend, enabled=cache_enabled) if gc_interval > 0: t = threading.Thread(target=run_gc) t.setDaemon(True) t.start() return app
def test_arguments(): cache = Cache(LocalCache()) @cache("mykey") def return_arguments(*a, **kw): return (a, kw) result = (return_arguments(1, two=2) == ((1,), dict(two=2))) assert result, 'arguments are passed in to the calculated function'
def test_bust(): backend = LocalCache() cache = Cache(backend, bust=True) c = cache("counter")(CallCounter()) assert c() == 1 assert c() == 2 assert c.cached() == 2, 'not called if you just get the cached val' assert backend.get("counter")
def test_disable(): backend = LocalCache() cache = Cache(backend, enabled=False) c = cache("counter")(CallCounter()) assert c() == 1, 'called the first time' assert c() == 2, 'called the second time too' assert c.cached() == 3, 'called even when you get the cached val' assert not backend.get("counter")
def test_basic(): backend = LocalCache() cache = Cache(backend, enabled=True) c = cache("counter")(CallCounter()) assert c() == 1, 'called the first time' assert c() == 1, 'not called the second time' assert c.refresh() == 2 assert backend.get("counter")
def test_hash_arguments(): backend = LocalCache() cache = Cache(backend) @cache("mykey") def expensive(*a, **kw): pass expensive(1, foo=2) expensive(1, foo=2) expensive(2, foo=3) keys = backend._cache.keys() assert len(keys) == 2, "only two keys are set" assert ("mykey/args:") in keys[0]
def __init__(self): self.base = os.getcwd() self.config = os.path.join(self.base, 'config') self.architecture = getoutput("dpkg --print-architecture") self.trustedkeys = os.path.join(self.config, 'trustedkeys.gpg') self.sources_list = os.path.join(self.config, 'sources.list') for f in (self.sources_list, self.trustedkeys): if not os.path.exists(f): raise Error("required file not found: " + f) conf = ChankoConfig(os.path.join(self.config, 'chanko.conf')) os.environ['CCURL_CACHE'] = conf.ccurl_cache self.archives = os.path.join(self.base, 'archives') makedirs(os.path.join(self.archives, 'partial')) plan_path = os.path.join(self.base, 'plan') plan_cpp = conf.plan_cpp.replace("-", " -").strip() plan_cpp = plan_cpp.split(" ") if plan_cpp else [] self.plan = Plan(plan_path, self.architecture, plan_cpp) self.local_cache = LocalCache(self) self.remote_cache = RemoteCache(self)
def run(self,kwargs): #Verify argument validity self.__verify_arguments__() #Get cache name from remote url cache_name = self.get_cache_name() #Get cache path cache_path = os.path.join(self.cache_root, cache_name) #Get cache object (will raise an exception if it doesn't exist) cache = LocalCache(cache_path, host_id=default_host_id) cache.DEBUG = self.DEBUG cache.lock(os.path.join(cache_path,self.dependency_platform, self.dependency_name, self.dependency_version)) #Try to get entry from cache entry = cache.get(self.dependency_platform, self.dependency_name, self.dependency_version) #Set state if entry is None: state = EntryState.DOWNLOADED else: state = EntryState.CACHE #Verify Remote MD5 #We need to download the file, it wasn't in the cache if entry is None: print (self.format_entry_name() + ": Not in cache") full_url = s3.join_s3_url(self.dependency_repo, self.dependency_platform, self.dependency_name, self.dependency_version) print (self.format_entry_name() + ": Downloading " + full_url) #Get Downloader downloader = s3.AsyncS3Downloader(None) #Set Downloader arguments downloader.source_url = full_url downloader.destination_path = os.path.join(self.cache_root, "downloading") + os.sep downloader.start() #Wait for downloader to finish #TODO: Do something with the reported progress while downloader.status != module.DONE: time.sleep(0.5) #Check for an exception, if so bubble it up if downloader.exception is not None: raise downloader.exception print self.format_entry_name() + ": Download complete" if downloader.result is None or len(downloader.result) == 0: raise EntryError(self.format_entry_name() + ": Unable to find remote entry '" + full_url + "'") #Iterate of the result (downloaded files) for item, checksum in downloader.result: local_file_checksum = mfile.md5_checksum(item) if checksum != local_file_checksum: print(self.format_entry_name() + ": WARNING: Downloaded file does not match the checksum on the server") print(self.format_entry_name() + ": WARNING: local:\t" + str(local_file_checksum)) print(self.format_entry_name() + ": WARNING: server:\t" + str(checksum)) if self.dependency_unpack: print (self.format_entry_name() + ": Unpacking...") #Put will unlock cache.put(item,self.dependency_platform, self.dependency_name, self.dependency_version, unpack=self.dependency_unpack) entry = cache.get(self.dependency_platform, self.dependency_name, self.dependency_version) if entry is None: raise EntryError(self.format_entry_name() + ": Error retrieving entry from cache.") cache.unlock(os.path.join(cache_path,self.dependency_platform, self.dependency_name, self.dependency_version)) #Entry is not None, return all the files listed in the entry that aren't the configuration files return [os.path.abspath(os.path.join(entry.path,f)) for f in os.listdir(entry.path) if f != ".umpire"], state