def test_simple_clear(self): directory = os.path.join(self.files_folder, "e") path = os.path.join(directory, "f.txt") with self.trash.lock(): count, size, delta_files = self.trash.add(path) self.assertEquals(count, 1) self.assertEquals(size, 10) delta_files = unify(delta_files, directory) self.assertEquals(delta_files, ["f.txt"]) count, size, delta_files = self.trash.remove(path) self.assertEquals(count, 1) self.assertEquals(size, 10) delta_files = [f_d[0] for f_d in delta_files] delta_files = unify(delta_files, directory) self.assertEquals(delta_files, ["f.txt"]) files = list(utils.search(directory, "*", "*")) files = [os.path.relpath(f, directory) for f in files] files.sort() self.assertEquals(files, ["g.txt", "h.png", "j", "k"]) files = stamp.get_versions_list(self.trash.to_internal(path)) self.assertTrue(len(files) == 0)
def autoclean_by_date(self): """Очищает корзину по дате удаления. """ clear_days = self.days file_time_list = self.trash.get_file_time_list() now = datetime.datetime.utcnow() old_files = ((f, t) for f, t in file_time_list if (now - t).days > clear_days) with self.trash.lock(): for path, dtime in old_files: debug_fmt = ("Removing {path}(removed time: {dtime}) " "becouse it's too old.") debug_line = debug_fmt.format(path=path, dtime=dtime) logging.debug(debug_line) path_int = self.trash.to_internal(path) last_version = len(stamp.get_versions_list(path_int)) - 1 self.trash.remove(path, last_version)
def autoclean_by_same_count(self): """Очищает файлы с одинаковые. """ clean_same_count = self.same_count file_time = self.trash.get_file_time_list() dct = stamp.get_file_list_dict(file_time) with self.trash.lock(): for path, versions in dct.iteritems(): if len(versions) > clean_same_count: for dtime in reversed(versions[clean_same_count - 1:]): debug_fmt = ("Removing {path} (removed time: {dtime}) " "becouse there are a lot of same file") debug_line = debug_fmt.format(path=path, dtime=dtime) logging.debug(debug_line) path_int = self.trash.to_internal(path) last_version = len(stamp.get_versions_list(path_int)) self.trash.remove(path, last_version) last_version -= 1
def test_multi_remove(self): directory = self.files_folder path = os.path.join(directory, "a.txt") with self.trash.lock(): self.trash.add(path) with open(path, "w") as f: f.write("1th\n") self.trash.add(path) with open(path, "w") as f: f.write("2th\n") self.trash.add(path) with open(path, "w") as f: f.write("3th\n") self.trash.add(path) count, size, delta_files = self.trash.remove(path, how_old=1) self.assertEquals(count, 1) self.assertEquals(size, 4) delta_files = [f_d[0] for f_d in delta_files] delta_files = unify(delta_files, directory) self.assertEquals(delta_files, ["a.txt"]) self.trash.restore(path, how_old=1) f = open(path, "r") line = f.read() self.assertEquals(line, "1th\n") count, size, delta_files = self.trash.remove(path) self.assertEquals(count, 2) self.assertEquals(size, 14) delta_files = [f_d[0] for f_d in delta_files] delta_files = unify(delta_files, directory) self.assertEquals(delta_files, ["a.txt", "a.txt"]) path = self.trash.to_internal(path) self.assertTrue(len(stamp.get_versions_list(path)) == 0)
def autoclean_by_files_count(self): """Очищает по числу файлов. """ clean_count = self.count file_time_list = self.trash.get_file_time_list() with self.trash.lock(): index = 0 while clean_count <= self.trash.get_count(): path, dtime = file_time_list[index] debug_fmt = ("Removing {path}(removed time: {dtime}) " "to free bukkit({excess} files excess)") excess = self.trash.get_count() - clean_count + 1 debug_line = debug_fmt.format(path=path, dtime=dtime, excess=excess) logging.debug(debug_line) path_int = self.trash.to_internal(path) last_version = len(stamp.get_versions_list(path_int)) - 1 self.trash.remove(path, last_version) index += 1
def autoclean_by_trash_size(self): """Очищает корзину по размеру файлов. """ clean_size = self.size file_time_list = self.trash.get_file_time_list() with self.trash.lock(): index = 0 while clean_size <= self.trash.get_size(): path, dtime = file_time_list[index] debug_fmt = ("Removing {path} (removed time: {dtime})" "to free bukkit({excess} bytes excess)") excess = self.trash.get_size() - clean_size debug_line = debug_fmt.format(path=path, dtime=dtime, excess=excess) logging.debug(debug_line) path_int = self.trash.to_internal(path) last_version = len(stamp.get_versions_list(path_int)) - 1 self.trash.remove(path, last_version) index += 1
def remove(self, path, how_old=-1): """Удаляет элемент из корзины навсегда. Возвращает количестов очищенных файлов и их размер, список очищенных объектов. Работа возможна только во время блокировки корзины. Позиционные аргументы: path -- путь к элементу в корзине Непозиционные аргументы: how_old -- версия файла в порядке устарения даты удаления. По умолчанию: -1 (все версии) Эффективно пересчитывает новый размер корзины и количество файлов в ней. """ removed = [] path = self.to_internal(path) delta_count = 0 delta_size = 0 if not os.path.isdir(path): if how_old >= 0: full_path = stamp.get_version(path, how_old) delta_count += 1 delta_size += utils.get_files_size(full_path) removed.append(full_path) if not self.dryrun: os.remove(full_path) else: for vers in stamp.get_versions_list(path): full_path = stamp.add_stamp(path, vers) delta_count += 1 delta_size += utils.get_files_size(full_path) removed.append(full_path) if not self.dryrun: os.remove(full_path) else: for dirpath, _, filenames in os.walk(path, topdown=False): removed.append(dirpath) for element in filenames: full_path = os.path.join(dirpath, element) delta_count += 1 delta_size += utils.get_files_size(full_path) removed.append(full_path) if not self.dryrun: os.remove(full_path) if not self.dryrun: os.rmdir(dirpath) if self.is_locked() and not self.dryrun: self._size -= delta_size self._count -= delta_count removed_stplited = [stamp.split_stamp(f) for f in removed] removed_stplited_ext = [(self.to_external(f), d) for f, d in removed_stplited] return delta_count, delta_size, removed_stplited_ext
def test_versions_list(self): vers = stamp.get_versions_list(self.path) self.assertEqual(vers, [self.dtime3, self.dtime2, self.dtime1])