def shrink(self, limit=None): """Reduces the size of the cache to meet a limit. Parameters ---------- limit : int, optional Maximum size of the cache in bytes. """ if limit is None: limit = rc.get('decoder_cache', 'size') if is_string(limit): limit = human2bytes(limit) fileinfo = [] excess = -limit for path in self.get_files(): stat = safe_stat(path) if stat is not None: aligned_size = byte_align(stat.st_size, self._fragment_size) excess += aligned_size fileinfo.append((stat.st_atime, aligned_size, path)) # Remove the least recently accessed first fileinfo.sort() for _, size, path in fileinfo: if excess <= 0: break excess -= size safe_remove(path)
def shrink(self, limit=None): """Reduces the size of the cache to meet a limit. Parameters ---------- limit : int, optional Maximum size of the cache in bytes. """ if limit is None: limit = rc.get('decoder_cache', 'size') if is_string(limit): limit = human2bytes(limit) fileinfo = [] excess = -limit for path in self.get_files(): stat = safe_stat(path) if stat is not None: aligned_size = byte_align(stat.st_size, self._fragment_size) excess += aligned_size fileinfo.append((stat.st_atime, aligned_size, path)) # Remove the least recently accessed first fileinfo.sort() for _, size, path in fileinfo: if excess <= 0: break excess -= size safe_remove(path)
def get_size_in_bytes(self): """Returns the size of the cache in bytes as an int. Returns ------- int """ stats = (safe_stat(f) for f in self.get_files()) return sum(byte_align(st.st_size, self._fragment_size) for st in stats if st is not None)
def get_size_in_bytes(self): """Returns the size of the cache in bytes as an int. Returns ------- int """ stats = (safe_stat(f) for f in self.get_files()) return sum(byte_align(st.st_size, self._fragment_size) for st in stats if st is not None)
def shrink(self, limit=None): # noqa: C901 """Reduces the size of the cache to meet a limit. Parameters ---------- limit : int, optional Maximum size of the cache in bytes. """ if self.readonly: logger.info("Tried to shrink a readonly cache.") return if self._index is None: warnings.warn("Cannot shrink outside of a `with cache` block.") return if limit is None: limit = rc.get('decoder_cache', 'size') if is_string(limit): limit = human2bytes(limit) self._close_fd() fileinfo = [] excess = -limit for path in self.get_files(): stat = safe_stat(path) if stat is not None: aligned_size = byte_align(stat.st_size, self._fragment_size) excess += aligned_size fileinfo.append((stat.st_atime, aligned_size, path)) # Remove the least recently accessed first fileinfo.sort() for _, size, path in fileinfo: if excess <= 0: break excess -= size self._index.remove_file_entry(path) safe_remove(path) self._index.sync()
def shrink(self, limit=None): # noqa: C901 """Reduces the size of the cache to meet a limit. Parameters ---------- limit : int, optional Maximum size of the cache in bytes. """ if self.readonly: logger.info("Tried to shrink a readonly cache.") return if limit is None: limit = rc.get('decoder_cache', 'size') if is_string(limit): limit = human2bytes(limit) self._close_fd() fileinfo = [] excess = -limit for path in self.get_files(): stat = safe_stat(path) if stat is not None: aligned_size = byte_align(stat.st_size, self._fragment_size) excess += aligned_size fileinfo.append((stat.st_atime, aligned_size, path)) # Remove the least recently accessed first fileinfo.sort() try: with self._index: for _, size, path in fileinfo: if excess <= 0: break excess -= size self.remove_file(path) except TimeoutError: logger.debug("Not shrinking cache. Lock could not be acquired.")
def shrink(self, limit=None): # noqa: C901 """Reduces the size of the cache to meet a limit. Parameters ---------- limit : int, optional Maximum size of the cache in bytes. """ if self.readonly: logger.info("Tried to shrink a readonly cache.") return if limit is None: limit = rc.get('decoder_cache', 'size') if isinstance(limit, str): limit = human2bytes(limit) self._close_fd() fileinfo = [] excess = -limit for path in self.get_files(): stat = safe_stat(path) if stat is not None: aligned_size = byte_align(stat.st_size, self._fragment_size) excess += aligned_size fileinfo.append((stat.st_atime, aligned_size, path)) # Remove the least recently accessed first fileinfo.sort() try: with self._index: for _, size, path in fileinfo: if excess <= 0: break excess -= size self.remove_file(path) except TimeoutError: logger.debug("Not shrinking cache. Lock could not be acquired.")
def test_byte_align(): assert byte_align(5, 16) == 16 assert byte_align(23, 8) == 24 assert byte_align(13, 1) == 13 assert byte_align(0, 16) == 0 assert byte_align(32, 8) == 32
def test_byte_align(): assert byte_align(5, 16) == 16 assert byte_align(23, 8) == 24 assert byte_align(13, 1) == 13 assert byte_align(0, 16) == 0 assert byte_align(32, 8) == 32