def test_locache(self): acache = LocalCache(timeout=5) self.assertEquals(None, acache.get("key")) acache.set("key", "value") self.assertEquals("value", acache.get("key")) time.sleep(5) self.assertEquals(None, acache.get("key")) acache.set("akey", "avalue", timeout=2) self.assertEquals("avalue", acache.get("akey")) time.sleep(2) self.assertEquals(None, acache.get("akey")) acache = LocalCache(timeout=300) for i in xrange(500): acache.set("%03d" % i, "%03d") end = start = -1 for i in xrange(500, 0, -1): if acache.get("%03d" % i): end = i break for i in xrange(500): if acache.get("%03d" % i): start = i break self.assertTrue((end-start) < 300)
def test_bust(): backend = LocalCache() cache = Cache(backend, bust=True) c = cache("counter")(CallCounter()) assert c() == 1 assert c() == 2 assert c.cached() == 2, 'not called if you just get the cached val' assert backend.get("counter")
def test_basic(): backend = LocalCache() cache = Cache(backend, enabled=True) c = cache("counter")(CallCounter()) assert c() == 1, 'called the first time' assert c() == 1, 'not called the second time' assert c.refresh() == 2 assert backend.get("counter")
def test_disable(): backend = LocalCache() cache = Cache(backend, enabled=False) c = cache("counter")(CallCounter()) assert c() == 1, 'called the first time' assert c() == 2, 'called the second time too' assert c.cached() == 3, 'called even when you get the cached val' assert not backend.get("counter")
def run(self,kwargs): #Verify argument validity self.__verify_arguments__() #Get cache name from remote url cache_name = self.get_cache_name() #Get cache path cache_path = os.path.join(self.cache_root, cache_name) #Get cache object (will raise an exception if it doesn't exist) cache = LocalCache(cache_path, host_id=default_host_id) cache.DEBUG = self.DEBUG cache.lock(os.path.join(cache_path,self.dependency_platform, self.dependency_name, self.dependency_version)) #Try to get entry from cache entry = cache.get(self.dependency_platform, self.dependency_name, self.dependency_version) #Set state if entry is None: state = EntryState.DOWNLOADED else: state = EntryState.CACHE #Verify Remote MD5 #We need to download the file, it wasn't in the cache if entry is None: print (self.format_entry_name() + ": Not in cache") full_url = s3.join_s3_url(self.dependency_repo, self.dependency_platform, self.dependency_name, self.dependency_version) print (self.format_entry_name() + ": Downloading " + full_url) #Get Downloader downloader = s3.AsyncS3Downloader(None) #Set Downloader arguments downloader.source_url = full_url downloader.destination_path = os.path.join(self.cache_root, "downloading") + os.sep downloader.start() #Wait for downloader to finish #TODO: Do something with the reported progress while downloader.status != module.DONE: time.sleep(0.5) #Check for an exception, if so bubble it up if downloader.exception is not None: raise downloader.exception print self.format_entry_name() + ": Download complete" if downloader.result is None or len(downloader.result) == 0: raise EntryError(self.format_entry_name() + ": Unable to find remote entry '" + full_url + "'") #Iterate of the result (downloaded files) for item, checksum in downloader.result: local_file_checksum = mfile.md5_checksum(item) if checksum != local_file_checksum: print(self.format_entry_name() + ": WARNING: Downloaded file does not match the checksum on the server") print(self.format_entry_name() + ": WARNING: local:\t" + str(local_file_checksum)) print(self.format_entry_name() + ": WARNING: server:\t" + str(checksum)) if self.dependency_unpack: print (self.format_entry_name() + ": Unpacking...") #Put will unlock cache.put(item,self.dependency_platform, self.dependency_name, self.dependency_version, unpack=self.dependency_unpack) entry = cache.get(self.dependency_platform, self.dependency_name, self.dependency_version) if entry is None: raise EntryError(self.format_entry_name() + ": Error retrieving entry from cache.") cache.unlock(os.path.join(cache_path,self.dependency_platform, self.dependency_name, self.dependency_version)) #Entry is not None, return all the files listed in the entry that aren't the configuration files return [os.path.abspath(os.path.join(entry.path,f)) for f in os.listdir(entry.path) if f != ".umpire"], state
def run(self, kwargs): # Verify argument validity self.__verify_arguments__() # Get cache name from remote url cache_name = self.get_cache_name() # Get cache path cache_path = os.path.join(self.cache_root, cache_name) # Get cache object (will raise an exception if it doesn't exist) cache = LocalCache(cache_path, host_id=default_host_id) cache.DEBUG = self.DEBUG cache.lock(os.path.join(cache_path, self.dependency_platform, self.dependency_name, self.dependency_version)) # Try to get entry from cache entry = cache.get(self.dependency_platform, self.dependency_name, self.dependency_version) # Set state if entry is None: state = EntryState.DOWNLOADED else: state = EntryState.CACHE # Verify Remote MD5 # We need to download the file, it wasn't in the cache if entry is None: print (self.format_entry_name() + ": Not in cache") full_url = s3.join_s3_url( self.dependency_repo, self.dependency_platform, self.dependency_name, self.dependency_version ) print (self.format_entry_name() + ": Downloading " + full_url) # Get Downloader downloader = s3.AsyncS3Downloader(None) # Set Downloader arguments downloader.source_url = full_url downloader.destination_path = os.path.join(self.cache_root, "downloading") + os.sep downloader.start() # Wait for downloader to finish #TODO: Do something with the reported progress while downloader.status != module.DONE: time.sleep(0.5) # Check for an exception, if so bubble it up if downloader.exception is not None: raise downloader.exception print self.format_entry_name() + ": Download complete" if downloader.result is None or len(downloader.result) == 0: raise EntryError(self.format_entry_name() + ": Unable to find remote entry '" + full_url + "'") # Iterate of the result (downloaded files) for item, checksum in downloader.result: local_file_checksum = mfile.md5_checksum(item) if checksum != local_file_checksum: print ( self.format_entry_name() + ": WARNING: Downloaded file does not match the checksum on the server" ) print (self.format_entry_name() + ": WARNING: local:\t" + str(local_file_checksum)) print (self.format_entry_name() + ": WARNING: server:\t" + str(checksum)) if self.dependency_unpack: print (self.format_entry_name() + ": Unpacking...") # Put will unlock cache.put( item, self.dependency_platform, self.dependency_name, self.dependency_version, unpack=self.dependency_unpack, ) entry = cache.get(self.dependency_platform, self.dependency_name, self.dependency_version) if entry is None: raise EntryError(self.format_entry_name() + ": Error retrieving entry from cache.") cache.unlock(os.path.join(cache_path, self.dependency_platform, self.dependency_name, self.dependency_version)) # Entry is not None, return all the files listed in the entry that aren't the configuration files return [os.path.abspath(os.path.join(entry.path, f)) for f in os.listdir(entry.path) if f != ".umpire"], state