def calc_dir_sizes(filestat_file, dirstat_file): lines = FileUtil.read_all(filestat_file).splitlines() dir2size = {} for filepath, size in FileUtil.parse_path_sizes(lines): dir2size[filepath] = size dirpath = os.path.dirname(filepath) while dirpath: try: dir2size[dirpath] += size except KeyError: dir2size[dirpath] = size dirpath = dirpath.rpartition("/")[0] iter_lines = FileUtil.combine_path_sizes(sorted(dir2size.items())) FileUtil.write_all(dirstat_file, iter_lines)
def test_01_file_util(self): from fileutil import FileUtil test_file = __file__ + ".test_01.data" path_sizes = [ ("path1", 1), ("path2", 2), ("path3", 3), ] iter_lines = FileUtil.combine_path_sizes(path_sizes) FileUtil.write_all(test_file, iter_lines) data = FileUtil.read_all(test_file) self.assertEqual(""" path1: 1 path2: 2 path3: 3 """.strip(), data.strip()) path2size = dict([(path, size) for path, size in FileUtil.parse_path_sizes(data.splitlines())]) self.assertEqual(path2size['path2'], 2)
def scan_file_sizes(from_path, stat_file): iter_path_sizes = FileUtil.walk_path_sizes(from_path) iter_lines = FileUtil.combine_path_sizes(iter_path_sizes) FileUtil.write_all(stat_file, iter_lines)
def write_d3_treemap_json(json_file, node): j = to_json(node.name2childs.values()) data = json.dumps(j, indent=4) FileUtil.write_all(json_file, data)
def write_d3_rect_json(json_file, rects): data = json.dumps(rects, indent=4) FileUtil.write_all(json_file, data)