def main(max_size, new_entries): cache = ftp_stat_cache.StatCache() cache.resize(max_size) #cache = {} # Populate cache until it's full. t1 = time.time() L = range(max_size) random.shuffle(L) for i in xrange(max_size): rand = L[i] # The cache checks if entries start with "/". cache["/%d" % rand] = rand t2 = time.time() print_statistics("Filling cache with %d entries" % max_size, t1, t2) # Read the cache. for i in xrange(max_size): data = cache["/%d" % i] t3 = time.time() print_statistics("Reading the cache", t2, t3) # Now that the cache is full, try to add 100 more entries, # implicitly replacing old entries. L = range(new_entries) random.shuffle(L) for i in xrange(new_entries): rand = L[i] # Make sure to add entries, not replace them. cache["/%d" % (max_size + rand)] = rand t4 = time.time() print_statistics("Replacing %d entries" % new_entries, t3, t4)
def __init__(self, host): self._host = host self._path = host.path # Use the Unix directory parser by default. self._parser = UnixParser() # Allow one chance to switch to another parser if the default # doesn't work. self._allow_parser_switching = True # Cache only lstat results. `stat` works locally on `lstat` results. self._lstat_cache = ftp_stat_cache.StatCache()
def setUp(self): self.cache = ftp_stat_cache.StatCache()