def test_copy_file(self): path = self.temp_path("a/b/c.txt") fsutil.create_file(path, content="hello world") dest = self.temp_path("x/y/z.txt") fsutil.copy_file(path, dest) self.assertTrue(fsutil.is_file(path)) self.assertTrue(fsutil.is_file(dest)) self.assertEqual(fsutil.get_file_hash(path), fsutil.get_file_hash(dest))
def list_dir(request): path = request.GET.get('path', '') with_dirs = request.GET.get('dirs', 1) with_files = request.GET.get('files', 0) node = Node.parse(relative=path) result = [] if not node.is_exist or node.is_file: raise Http404 if with_dirs: try: folders = fsutil.list_dirs(node.absolute) except FileNotFoundError: raise Http404 for i, folder in enumerate(folders): relative_path = folder.replace(base_files_dir, '') id_path = '/'.join(fsutil.split_path(relative_path)) folders[i] = { 'id': hashlib.sha1(folder.encode('utf-8')).hexdigest(), 'load': reverse('bloom:file_manager:list_dir', params={'path': id_path}), 'class': 'folder', 'children': len(fsutil.list_dirs(folder)) > 0, 'text': fsutil.get_filename(folder), 'url': static(relative_path), 'list': reverse('bloom:file_manager:list_dir', params={ 'path': id_path, 'files': 1 }), } result.extend(folders) if with_files: try: files = fsutil.list_files(node.absolute) except FileNotFoundError: raise Http404 for i, file in enumerate(files): files[i] = { 'id': fsutil.get_file_hash(file, 'sha1'), 'class': 'file ex-' + fsutil.get_file_extension(file), 'text': fsutil.get_filename(file), 'url': static(file.replace(static_dir, '')), } result.extend(files) if not path and not with_files: result = { 'id': 'root', 'text': 'Root Folder', 'children': result, 'state': { 'opened': True, 'selected': True }, 'list': reverse('bloom:file_manager:list_dir', params={'files': 1}), } return JsonResponse(result, safe=False)
def test_get_file_hash(self): path = self.temp_path("a/b/c.txt") fsutil.create_file(path, content="Hello World") hash = fsutil.get_file_hash(path) self.assertEqual(hash, "b10a8db164e0754105b7a99be72e3fe5")
def list_files(startpath, filter=None, catfunc=None, wbufferfilename=".data.json"): """ liste tous les fichiers (sous-répertoires inclus) à partir de 'startpath' puis filtre suivant les extensions données par 'filter' - exemple ['jpg', 'png'] puis classe les fichiers en fonction de catfunc (défault *) puis écrit un fichier json dans le répertoire scanné (wbufferfilename) """ for root, dirs, files in os.walk(startpath): # remove hidden files = [f for f in files if not f[0] == "."] dirs[:] = [d for d in dirs if not d[0] == "."] # tree level = root.replace(startpath, "").count(os.sep) indent = " " * 1 * (level) output_string = "{}{}/".format(indent, os.path.basename(root)) subindent = " " * 1 * (level + 1) # init dict for json wbuffer = dict() wbuffer["_root"] = { "root": root, "startpath": root.replace(startpath, ""), "level": level, } # cosmetic print print(f"{output_string} {root} {filter}") # scan files for f in files: path = f"{root}/{f}" ext = os.path.splitext(f)[1][1:] file_noext = os.path.splitext(f)[0] # skip if ext not in filter if filter and ext not in filter: continue # get file informations f_date = fsutil.get_file_creation_date(path) f_size = fsutil.get_file_size(path) f_hash = fsutil.get_file_hash(path) # get file category by function and write dict for json if catfunc: cat = catfunc(file_noext, ext) else: cat = "*" _tmp = { "file": f, "ext": ext, "date": f_date, "size": f_size, "hash": f_hash, } if cat in wbuffer: wbuffer[cat].append(_tmp) else: wbuffer[cat] = [_tmp] # cosmetic print(f"{subindent} {ext} > {f} > {f_date} > {f_size} bytes > {f_hash}") # write json wbuffer_file = f"{root}/{wbufferfilename}" with open(wbuffer_file, "w+", encoding="utf-8") as fp: json.dump(wbuffer, fp, cls=DateTimeEncoder) # cosmetic print(f"file {wbuffer_file} saved")