def shrink(self, limit=None): """Reduces the size of the cache to meet a limit. Parameters ---------- limit : int, optional Maximum size of the cache in bytes. """ if limit is None: limit = rc.get('decoder_cache', 'size') if is_string(limit): limit = human2bytes(limit) fileinfo = [] excess = -limit for path in self.get_files(): stat = safe_stat(path) if stat is not None: aligned_size = byte_align(stat.st_size, self._fragment_size) excess += aligned_size fileinfo.append((stat.st_atime, aligned_size, path)) # Remove the least recently accessed first fileinfo.sort() for _, size, path in fileinfo: if excess <= 0: break excess -= size safe_remove(path)
def shrink(self, limit=None): """Reduces the size of the cache to meet a limit. Parameters ---------- limit : int, optional Maximum size of the cache in bytes. """ if limit is None: limit = rc.get('decoder_cache', 'size') if is_string(limit): limit = human2bytes(limit) fileinfo = [] for filename in self.get_files(): path = os.path.join(self.cache_dir, filename) stat = safe_stat(path) if stat is not None: fileinfo.append((stat.st_atime, stat.st_size, path)) # Remove the least recently accessed first fileinfo.sort() excess = self.get_size_in_bytes() - limit for _, size, path in fileinfo: if excess <= 0: break excess -= size safe_remove(path) # We may have removed a decoder file but not solver_info file # or vice versa, so we'll remove all orphans self.remove_orphans()
def shrink(self, limit=None): # noqa: C901 """Reduces the size of the cache to meet a limit. Parameters ---------- limit : int, optional Maximum size of the cache in bytes. """ if self.readonly: logger.info("Tried to shrink a readonly cache.") return if self._index is None: warnings.warn("Cannot shrink outside of a `with cache` block.") return if limit is None: limit = rc.get('decoder_cache', 'size') if is_string(limit): limit = human2bytes(limit) self._close_fd() fileinfo = [] excess = -limit for path in self.get_files(): stat = safe_stat(path) if stat is not None: aligned_size = byte_align(stat.st_size, self._fragment_size) excess += aligned_size fileinfo.append((stat.st_atime, aligned_size, path)) # Remove the least recently accessed first fileinfo.sort() for _, size, path in fileinfo: if excess <= 0: break excess -= size self._index.remove_file_entry(path) safe_remove(path) self._index.sync()
def shrink(self, limit=None): # noqa: C901 """Reduces the size of the cache to meet a limit. Parameters ---------- limit : int, optional Maximum size of the cache in bytes. """ if self.readonly: logger.info("Tried to shrink a readonly cache.") return if limit is None: limit = rc.get('decoder_cache', 'size') if is_string(limit): limit = human2bytes(limit) self._close_fd() fileinfo = [] excess = -limit for path in self.get_files(): stat = safe_stat(path) if stat is not None: aligned_size = byte_align(stat.st_size, self._fragment_size) excess += aligned_size fileinfo.append((stat.st_atime, aligned_size, path)) # Remove the least recently accessed first fileinfo.sort() try: with self._index: for _, size, path in fileinfo: if excess <= 0: break excess -= size self.remove_file(path) except TimeoutError: logger.debug("Not shrinking cache. Lock could not be acquired.")
def shrink(self, limit=None): # noqa: C901 """Reduces the size of the cache to meet a limit. Parameters ---------- limit : int, optional Maximum size of the cache in bytes. """ if self.readonly: logger.info("Tried to shrink a readonly cache.") return if limit is None: limit = rc.get('decoder_cache', 'size') if isinstance(limit, str): limit = human2bytes(limit) self._close_fd() fileinfo = [] excess = -limit for path in self.get_files(): stat = safe_stat(path) if stat is not None: aligned_size = byte_align(stat.st_size, self._fragment_size) excess += aligned_size fileinfo.append((stat.st_atime, aligned_size, path)) # Remove the least recently accessed first fileinfo.sort() try: with self._index: for _, size, path in fileinfo: if excess <= 0: break excess -= size self.remove_file(path) except TimeoutError: logger.debug("Not shrinking cache. Lock could not be acquired.")
def shrink(self, limit=None): """Reduces the size of the cache to meet a limit. Parameters ---------- limit : int, optional Maximum size of the cache in bytes. """ if limit is None: limit = rc.get('decoder_cache', 'size') if is_string(limit): limit = human2bytes(limit) filelist = [] for filename in os.listdir(self.cache_dir): key, ext = os.path.splitext(filename) if ext == self._SOLVER_INFO_EXT: continue path = os.path.join(self.cache_dir, filename) stat = os.stat(path) filelist.append((stat.st_atime, key)) filelist.sort() excess = self.get_size_in_bytes() - limit for _, key in filelist: if excess <= 0: break decoder_path = os.path.join( self.cache_dir, key + self._DECODER_EXT) solver_info_path = os.path.join( self.cache_dir, key + self._SOLVER_INFO_EXT) excess -= os.stat(decoder_path).st_size os.remove(decoder_path) if os.path.exists(solver_info_path): excess -= os.stat(solver_info_path).st_size os.remove(solver_info_path)
def test_human2bytes(): assert human2bytes('1 MB') == 1048576 assert human2bytes('1.5 GB') == 1610612736 assert human2bytes('14 B') == 14
def test_human2bytes(): assert human2bytes("1 MB") == 1048576 assert human2bytes("1.5 GB") == 1610612736 assert human2bytes("14 B") == 14 assert human2bytes("1B") == 1 assert human2bytes("1 B") == 1