def materialise_structure( structure: Folder, path: Path, dry_run=True, replacements=None, ignore_existing_folders=False, ): """Materialise or dry run the folder structure""" logger.debug("Replacements: %s", replacements) location = path / Path(structure.as_posix()) logger.info("🗃 %s", location) if not dry_run: try: location.mkdir() except FileExistsError: if ignore_existing_folders: logger.warning( "Folder %s already exists and will not be recreated", location) else: logger.error( "The destination folder %s already exists. Either delete it, use a new path or pass the --ignore-existing-folders flag", location, ) sys.exit(-1) for item in structure.iterdir(): if item.is_dir(): materialise_structure(item, path, dry_run, replacements) else: location = path / Path(item.as_posix()) logger.info( "📁 %s (%s characters)", location, len("".join(item.contents)), ) if replacements is not None and item.replacements is not None: new_contents = [] for line in item.contents: new_contents += [ replace_replacements(item.replacements, replacements, line) ] item.set_contents("\n".join(new_contents)) logger.info("Replacements applied to %s", item.name) if not dry_run: if location.exists(): logger.error( "File %s already exists at that location. Delete it first", location, ) sys.exit(-1) with location.open("w") as destination: for line in item.contents: destination.write(line) destination.write("\n")
def build_file_markdown(current: Folder, base="", max_length=15): """Generate markdown out of the folder structure""" markdown: List[str] = [] for item in current.iterdir(): if item.is_dir(): markdown += build_file_markdown(item, base=base + "/" + item.basename, max_length=max_length) else: if base.startswith("/"): base = base[1:] if base == "": basename = f"{item.name}" else: basename = f"{base}/{item.name}" markdown += [""] + [f"# `{basename}`"] if item.contents is not None: markdown += [""] + [f"```{language(item.suffix)}"] if item.suffix != "md": if len(item.contents) > max_length >= 0: markdown += (item.contents[0:max_length - 1] + [""] + [ellipsis(item.suffix)]) else: markdown += item.contents else: markdown += [ "Content from Markdown files is ignored, since the output would break parsing" ] markdown += ["```"] return markdown
def test_randomised_tree_descent(execution_number, depth, width): sl = [l for l in ascii_uppercase * RANGE * RANGE * RANGE * RANGE] random.shuffle(sl) def random_tree(depth, width): if depth == 0: return [File(sl.pop()) for _ in range(width)] folder_indexes = random.sample(range(width), random.randint(0, width - 1) + 1) results = [File(sl.pop()) for _ in range(width)] for index in folder_indexes: results[index] = Folder(sl.pop(), contents=random_tree(depth - 1, width)) return results randomised = Folder("", contents=random_tree(1 + depth, 1 + width)) print(randomised) all_lines = [line for line in tree(randomised)] parsed = TreeParser(all_lines)() print(randomised) print("Randomised:") print("\n".join(all_lines)) print(parsed) print("Parsed:") print("\n".join([l for l in tree(parsed)])) assert parsed == randomised
def test_randomised_idempotent(execution_number, depth, width): sl = [l for l in ascii_uppercase * 2 * RANGE * RANGE * RANGE * RANGE] random.shuffle(sl) def random_tree(depth, width): """More complex random tree. We need to make sure there are no duplicate files or folders, and that they have content""" def purge_repeated_files(results): def rename_if_needed(fil, nrf): if fil.name in nrf: doubled = fil.name + fil.name fil._rename(doubled) rename_if_needed(fil, nrf) non_repeated_filenames = [] cleaned_results = [] for fil in results: rename_if_needed(fil, non_repeated_filenames) non_repeated_filenames += [fil.name] cleaned_results += [fil] return cleaned_results if depth == 0: results = [ File(sl.pop()).set_contents("contents=" + sl.pop()) for _ in range(width) ] return purge_repeated_files(results) folder_indexes = random.sample(range(width), random.randint(0, width - 1) + 1) results = [ File(sl.pop()).set_contents("contents=" + sl.pop()) for _ in range(width) ] cleaned_results = purge_repeated_files(results) for index in folder_indexes: cleaned_results[index] = Folder(sl.pop(), contents=random_tree( depth - 1, width)) cleaned_folders = purge_repeated_files(cleaned_results) return cleaned_folders randomised = Folder("", contents=random_tree(1 + depth, 1 + width)) all_lines = [line for line in tree(randomised)] markdown = "\n".join( build_markdown(build_tree(randomised, ignore_globs=None, include_globs=None), max_length=15)) structure = _process_markdown(markdown, replacements=None) new_markdown = "\n".join( build_markdown(build_tree(structure, ignore_globs=None, include_globs=None), max_length=15)) assert structure == randomised
def test_traverse_path(): # The File/Folder hierarchy in path_traverser emulates pathlib's Path, so this needs to match empty = File.EMPTY_CONTENTS.format(mode="r") mbp = Folder( "base", [ File("foo").set_contents(empty), Folder( "bar", [ File("baz").set_contents(empty), File("foobar").set_contents(empty) ], ), ], ) traversed = Traverser()(mbp) assert traversed == mbp
def _handle_folder(self): logger.debug("Adding a folder for Prev: %s", self._previous_line) if len(self._result) > 0: basename = self._result[-1].basename else: basename = "" self._result += [ Folder(nodename(self._previous_line), basename=basename) ]
def test_tree_descent(): mbp = Folder( "", [ File("foo"), Folder("bar", [File("baz"), File("foobar")]), File("zzz"), Folder("meh", [File("buzz")]), ], ) all_lines = list(tree(mbp)) print("Original") print("\n".join(all_lines)) parsed = TreeParser(all_lines)() print(parsed) all_lines = list(tree(parsed)) print("parsed back") print("\n".join(all_lines)) assert parsed == mbp
def random_tree(depth, width): if depth == 0: return [File(sl.pop()) for _ in range(width)] folder_indexes = random.sample(range(width), random.randint(0, width - 1) + 1) results = [File(sl.pop()) for _ in range(width)] for index in folder_indexes: results[index] = Folder(sl.pop(), contents=random_tree(depth - 1, width)) return results
def _traverser(self, base_path, depth=0): tree = [] first_level = list(base_path.iterdir()) logger.debug("Here is the first level: %s", first_level) for path in first_level: if not matches_glob(path, self.include_globs): if matches_glob(path, self.ignore_globs): continue if path.name.startswith("."): continue if path.is_dir(): tree += [self._handle_dir(path, depth + 1)] else: tree += [self._handle_file(path, base_path)] return Folder(base_path.name, tree, depth)
def random_tree(depth, width): """More complex random tree. We need to make sure there are no duplicate files or folders, and that they have content""" def purge_repeated_files(results): def rename_if_needed(fil, nrf): if fil.name in nrf: doubled = fil.name + fil.name fil._rename(doubled) rename_if_needed(fil, nrf) non_repeated_filenames = [] cleaned_results = [] for fil in results: rename_if_needed(fil, non_repeated_filenames) non_repeated_filenames += [fil.name] cleaned_results += [fil] return cleaned_results if depth == 0: results = [ File(sl.pop()).set_contents("contents=" + sl.pop()) for _ in range(width) ] return purge_repeated_files(results) folder_indexes = random.sample(range(width), random.randint(0, width - 1) + 1) results = [ File(sl.pop()).set_contents("contents=" + sl.pop()) for _ in range(width) ] cleaned_results = purge_repeated_files(results) for index in folder_indexes: cleaned_results[index] = Folder(sl.pop(), contents=random_tree( depth - 1, width)) cleaned_folders = purge_repeated_files(cleaned_results) return cleaned_folders