def test_nobranch(self): tree = CleanTree(WorkingTree(self._root_dir)) self.assertWalkEqual( tree.walk("."), [ (".", ["data_dir"], ["bar", "тест", "code.py", "foo"]), (join("data_dir"), ["data_sub_dir"], ["data"]), (join("data_dir", "data_sub_dir"), [], ["data_sub"]), ], ) self.assertWalkEqual( tree.walk(join("data_dir", "data_sub_dir")), [(join("data_dir", "data_sub_dir"), [], ["data_sub"])], )
def __init__(self, root_dir=None): from dvc.state import State from dvc.lock import make_lock from dvc.scm import SCM from dvc.cache import Cache from dvc.data_cloud import DataCloud from dvc.repo.metrics import Metrics from dvc.scm.tree import WorkingTree from dvc.repo.tag import Tag from dvc.utils import makedirs root_dir = self.find_root(root_dir) self.root_dir = os.path.abspath(os.path.realpath(root_dir)) self.dvc_dir = os.path.join(self.root_dir, self.DVC_DIR) self.config = Config(self.dvc_dir) self.scm = SCM(self.root_dir) self.tree = CleanTree(WorkingTree(self.root_dir)) self.tmp_dir = os.path.join(self.dvc_dir, "tmp") makedirs(self.tmp_dir, exist_ok=True) hardlink_lock = self.config.config["core"].get("hardlink_lock", False) self.lock = make_lock( os.path.join(self.dvc_dir, "lock"), tmp_dir=os.path.join(self.dvc_dir, "tmp"), hardlink_lock=hardlink_lock, friendly=True, ) # NOTE: storing state and link_state in the repository itself to avoid # any possible state corruption in 'shared cache dir' scenario. self.state = State(self, self.config.config) core = self.config.config[Config.SECTION_CORE] level = core.get(Config.SECTION_CORE_LOGLEVEL) if level: logger.setLevel(level.upper()) self.cache = Cache(self) self.cloud = DataCloud(self) self.metrics = Metrics(self) self.tag = Tag(self) self._ignore()
def test_nobranch(self): tree = CleanTree(LocalRemoteTree(None, {"url": self._root_dir})) self.assertWalkEqual( tree.walk("."), [ (".", ["data_dir"], ["bar", "тест", "code.py", "foo"]), (join("data_dir"), ["data_sub_dir"], ["data"]), (join("data_dir", "data_sub_dir"), [], ["data_sub"]), ], ) self.assertWalkEqual( tree.walk(join("data_dir", "data_sub_dir")), [(join("data_dir", "data_sub_dir"), [], ["data_sub"])], )
def test_path_object_and_str_are_valid_types_get_mtime_and_size( path, repo_dir): tree = CleanTree(WorkingTree(repo_dir.root_dir)) time, size = get_mtime_and_size(path, tree) object_time, object_size = get_mtime_and_size(PathInfo(path), tree) assert time == object_time assert size == object_size
def tree(self, tree): if is_working_tree(tree) or tree.tree_root == self.root_dir: root = None else: root = self.root_dir self._tree = (tree if isinstance(tree, CleanTree) else CleanTree( tree, root)) # Our graph cache is no longer valid, as it was based on the previous # tree. self._reset()
def _ls_files_repo(path_info, recursive=None): from dvc.compat import fspath from dvc.ignore import CleanTree from dvc.path_info import PathInfo from dvc.scm.tree import WorkingTree if not os.path.exists(fspath(path_info)): return [] files = [] tree = CleanTree(WorkingTree(path_info)) try: for dirpath, dirnames, filenames in tree.walk(path_info): files.extend(PathInfo(dirpath, f) for f in filenames) if not recursive: files.extend(PathInfo(dirpath, d) for d in dirnames) break except NotADirectoryError: if os.path.isfile(fspath(path_info)): files = [path_info] return [_get_fs_node(f) for f in files]
def _ls_files_repo(target_path_info, recursive=None): from dvc.compat import fspath from dvc.ignore import CleanTree from dvc.path_info import PathInfo from dvc.scm.tree import WorkingTree if not os.path.exists(fspath(target_path_info)): return [] files = [] tree = CleanTree(WorkingTree(target_path_info)) try: for dirpath, dirnames, filenames in tree.walk(target_path_info): files.extend(map(lambda f: PathInfo(dirpath, f), filenames)) if not recursive: files.extend(map(lambda d: PathInfo(dirpath, d), dirnames)) break except NotADirectoryError: if os.path.isfile(fspath(target_path_info)): return [target_path_info] return files
def test_path_object_and_str_are_valid_types_get_mtime_and_size(tmp_dir): tmp_dir.gen( {"dir": {"dir_file": "dir file content"}, "file": "file_content"} ) tree = CleanTree(WorkingTree(tmp_dir)) time, size = get_mtime_and_size("dir", tree) object_time, object_size = get_mtime_and_size(PathInfo("dir"), tree) assert time == object_time assert size == object_size time, size = get_mtime_and_size("file", tree) object_time, object_size = get_mtime_and_size(PathInfo("file"), tree) assert time == object_time assert size == object_size
def test_ignore_on_branch(tmp_dir, scm, dvc): tmp_dir.scm_gen({"foo": "foo", "bar": "bar"}, commit="add files") scm.checkout("branch", create_new=True) tmp_dir.scm_gen(DvcIgnore.DVCIGNORE_FILE, "foo", commit="add ignore") scm.checkout("master") assert _files_set(".", dvc.tree) == {"./foo", "./bar"} tree = CleanTree(scm.get_tree("branch")) assert _files_set(".", tree) == { to_posixpath(os.path.join(tree.tree_root, DvcIgnore.DVCIGNORE_FILE)), to_posixpath(os.path.join(tree.tree_root, "bar")), }
def test(self): tree = CleanTree(LocalRemoteTree(None, {"url": self.root_dir})) file_time, file_size = get_mtime_and_size(self.DATA, tree) dir_time, dir_size = get_mtime_and_size(self.DATA_DIR, tree) actual_file_size = os.path.getsize(self.DATA) actual_dir_size = os.path.getsize(self.DATA) + os.path.getsize( self.DATA_SUB) self.assertIs(type(file_time), str) self.assertIs(type(file_size), str) self.assertEqual(file_size, str(actual_file_size)) self.assertIs(type(dir_time), str) self.assertIs(type(dir_size), str) self.assertEqual(dir_size, str(actual_dir_size))
def test(self): tree = CleanTree(WorkingTree(self.root_dir)) file_time, file_size = get_mtime_and_size(self.DATA, tree) dir_time, dir_size = get_mtime_and_size(self.DATA_DIR, tree) actual_file_size = os.path.getsize(self.DATA) actual_dir_size = os.path.getsize(self.DATA) + os.path.getsize( self.DATA_SUB) self.assertIs(type(file_time), str) self.assertIs(type(file_size), str) self.assertEqual(file_size, str(actual_file_size)) self.assertIs(type(dir_time), str) self.assertIs(type(dir_size), str) self.assertEqual(dir_size, str(actual_dir_size))
def test_path_object_and_str_are_valid_types_get_mtime_and_size(tmp_dir): tmp_dir.gen({ "dir": { "dir_file": "dir file content" }, "file": "file_content" }) tree = CleanTree(LocalRemoteTree(None, {"url": os.fspath(tmp_dir)})) time, size = get_mtime_and_size("dir", tree) object_time, object_size = get_mtime_and_size(PathInfo("dir"), tree) assert time == object_time assert size == object_size time, size = get_mtime_and_size("file", tree) object_time, object_size = get_mtime_and_size(PathInfo("file"), tree) assert time == object_time assert size == object_size
def _ls(repo, path_info, recursive=None, dvc=False): from dvc.ignore import CleanTree from dvc.repo.tree import DvcTree from dvc.scm.tree import WorkingTree if dvc: tree = DvcTree(repo) else: tree = CleanTree(WorkingTree(repo.root_dir)) ret = {} try: for root, dirs, files in tree.walk(path_info.fspath): for fname in files: info = PathInfo(root) / fname path = str(info.relative_to(path_info)) ret[path] = { "isout": dvc, "isdir": False, "isexec": False if dvc else tree.isexec(info.fspath), } if not recursive: for dname in dirs: info = PathInfo(root) / dname path = str(info.relative_to(path_info)) ret[path] = { "isout": tree.isdvc(info.fspath) if dvc else False, "isdir": True, "isexec": False if dvc else tree.isexec(info.fspath), } break except NotADirectoryError: return { path_info.name: { "isout": dvc, "isdir": False, "isexec": False if dvc else tree.isexec(path_info.fspath), } } except FileNotFoundError: return {} return ret
class Repo(object): DVC_DIR = ".dvc" from dvc.repo.destroy import destroy from dvc.repo.install import install from dvc.repo.add import add from dvc.repo.remove import remove from dvc.repo.lock import lock as lock_stage from dvc.repo.move import move from dvc.repo.run import run from dvc.repo.imp import imp from dvc.repo.imp_url import imp_url from dvc.repo.reproduce import reproduce from dvc.repo.checkout import _checkout from dvc.repo.push import push from dvc.repo.fetch import _fetch from dvc.repo.pull import pull from dvc.repo.status import status from dvc.repo.gc import gc from dvc.repo.commit import commit from dvc.repo.diff import diff from dvc.repo.brancher import brancher from dvc.repo.get import get from dvc.repo.get_url import get_url from dvc.repo.update import update def __init__(self, root_dir=None): from dvc.state import State from dvc.lock import make_lock from dvc.scm import SCM from dvc.cache import Cache from dvc.data_cloud import DataCloud from dvc.repo.metrics import Metrics from dvc.scm.tree import WorkingTree from dvc.repo.tag import Tag from dvc.utils import makedirs root_dir = self.find_root(root_dir) self.root_dir = os.path.abspath(os.path.realpath(root_dir)) self.dvc_dir = os.path.join(self.root_dir, self.DVC_DIR) self.config = Config(self.dvc_dir) self.scm = SCM(self.root_dir) self.tree = CleanTree(WorkingTree(self.root_dir)) self.tmp_dir = os.path.join(self.dvc_dir, "tmp") makedirs(self.tmp_dir, exist_ok=True) hardlink_lock = self.config.config["core"].get("hardlink_lock", False) self.lock = make_lock( os.path.join(self.dvc_dir, "lock"), tmp_dir=os.path.join(self.dvc_dir, "tmp"), hardlink_lock=hardlink_lock, friendly=True, ) # NOTE: storing state and link_state in the repository itself to avoid # any possible state corruption in 'shared cache dir' scenario. self.state = State(self, self.config.config) core = self.config.config[Config.SECTION_CORE] level = core.get(Config.SECTION_CORE_LOGLEVEL) if level: logger.setLevel(level.upper()) self.cache = Cache(self) self.cloud = DataCloud(self) self.metrics = Metrics(self) self.tag = Tag(self) self._ignore() @property def tree(self): return self._tree @tree.setter def tree(self, tree): self._tree = tree # Our graph cache is no longer valid, as it was based on the previous # tree. self._reset() def __repr__(self): return "Repo: '{root_dir}'".format(root_dir=self.root_dir) @classmethod def find_root(cls, root=None): if root is None: root = os.getcwd() else: root = os.path.abspath(os.path.realpath(root)) while True: dvc_dir = os.path.join(root, cls.DVC_DIR) if os.path.isdir(dvc_dir): return root if os.path.ismount(root): break root = os.path.dirname(root) raise NotDvcRepoError(root) @classmethod def find_dvc_dir(cls, root=None): root_dir = cls.find_root(root) return os.path.join(root_dir, cls.DVC_DIR) @staticmethod def init(root_dir=os.curdir, no_scm=False, force=False): from dvc.repo.init import init init(root_dir=root_dir, no_scm=no_scm, force=force) return Repo(root_dir) def unprotect(self, target): return self.cache.local.unprotect(PathInfo(target)) def _ignore(self): from dvc.updater import Updater updater = Updater(self.dvc_dir) flist = ([self.config.config_local_file, updater.updater_file] + [self.lock.lockfile, updater.lock.lockfile, self.tmp_dir] + self.state.files) if path_isin(self.cache.local.cache_dir, self.root_dir): flist += [self.cache.local.cache_dir] self.scm.ignore_list(flist) def check_modified_graph(self, new_stages): """Generate graph including the new stage to check for errors""" self._collect_graph(self.stages + new_stages) def collect(self, target, with_deps=False, recursive=False, graph=None): import networkx as nx from dvc.stage import Stage G = graph or self.graph if not target: return list(G) target = os.path.abspath(target) if recursive and os.path.isdir(target): stages = nx.dfs_postorder_nodes(G) return [stage for stage in stages if path_isin(stage.path, target)] stage = Stage.load(self, target) if not with_deps: return [stage] pipeline = get_pipeline(get_pipelines(G), stage) return list(nx.dfs_postorder_nodes(pipeline, stage)) def collect_granular(self, target, *args, **kwargs): if not target: return [(stage, None) for stage in self.stages] try: out, = self.find_outs_by_path(target, strict=False) filter_info = PathInfo(os.path.abspath(target)) return [(out.stage, filter_info)] except OutputNotFoundError: stages = self.collect(target, *args, **kwargs) return [(stage, None) for stage in stages] def used_cache( self, targets=None, all_branches=False, with_deps=False, all_tags=False, all_commits=False, remote=None, force=False, jobs=None, recursive=False, ): """Get the stages related to the given target and collect the `info` of its outputs. This is useful to know what files from the cache are _in use_ (namely, a file described as an output on a stage). The scope is, by default, the working directory, but you can use `all_branches` or `all_tags` to expand scope. Returns: A dictionary with Schemes (representing output's location) as keys, and a list with the outputs' `dumpd` as values. """ from funcy.py2 import icat from dvc.cache import NamedCache cache = NamedCache() for branch in self.brancher( all_branches=all_branches, all_tags=all_tags, all_commits=all_commits, ): targets = targets or [None] pairs = icat( self.collect_granular( target, recursive=recursive, with_deps=with_deps) for target in targets) suffix = "({})".format(branch) if branch else "" for stage, filter_info in pairs: used_cache = stage.get_used_cache( remote=remote, force=force, jobs=jobs, filter_info=filter_info, ) cache.update(used_cache, suffix=suffix) return cache def _collect_graph(self, stages=None): """Generate a graph by using the given stages on the given directory The nodes of the graph are the stage's path relative to the root. Edges are created when the output of one stage is used as a dependency in other stage. The direction of the edges goes from the stage to its dependency: For example, running the following: $ dvc run -o A "echo A > A" $ dvc run -d A -o B "echo B > B" $ dvc run -d B -o C "echo C > C" Will create the following graph: ancestors <-- | C.dvc -> B.dvc -> A.dvc | | | --> descendants | ------- pipeline ------> | v (weakly connected components) Args: stages (list): used to build a graph, if None given, collect stages in the repository. Raises: OutputDuplicationError: two outputs with the same path StagePathAsOutputError: stage inside an output directory OverlappingOutputPathsError: output inside output directory CyclicGraphError: resulting graph has cycles """ import networkx as nx from dvc.exceptions import ( OutputDuplicationError, StagePathAsOutputError, OverlappingOutputPathsError, ) G = nx.DiGraph() stages = stages or self.stages stages = [stage for stage in stages if stage] outs = {} for stage in stages: for out in stage.outs: if out.path_info in outs: dup_stages = [stage, outs[out.path_info].stage] raise OutputDuplicationError(str(out), dup_stages) outs[out.path_info] = out for stage in stages: for out in stage.outs: for p in out.path_info.parents: if p in outs: raise OverlappingOutputPathsError(outs[p], out) for stage in stages: stage_path_info = PathInfo(stage.path) for p in chain([stage_path_info], stage_path_info.parents): if p in outs: raise StagePathAsOutputError(stage, str(outs[p])) for stage in stages: G.add_node(stage) for dep in stage.deps: if dep.path_info is None: continue for out_path_info, out in outs.items(): if out_path_info.overlaps(dep.path_info): G.add_node(out.stage) G.add_edge(stage, out.stage) check_acyclic(G) return G @cached_property def graph(self): return self._collect_graph() @cached_property def pipelines(self): return get_pipelines(self.graph) @staticmethod def _filter_out_dirs(dirs, outs, root_dir): def filter_dirs(dname): path = os.path.join(root_dir, dname) for out in outs: if path == os.path.normpath(out): return False return True return list(filter(filter_dirs, dirs)) @cached_property def stages(self): """ Walks down the root directory looking for Dvcfiles, skipping the directories that are related with any SCM (e.g. `.git`), DVC itself (`.dvc`), or directories tracked by DVC (e.g. `dvc add data` would skip `data/`) NOTE: For large repos, this could be an expensive operation. Consider using some memoization. """ from dvc.stage import Stage stages = [] outs = [] for root, dirs, files in self.tree.walk(self.root_dir): for fname in files: path = os.path.join(root, fname) if not Stage.is_valid_filename(path): continue stage = Stage.load(self, path) for out in stage.outs: if out.scheme == "local": outs.append(out.fspath + out.sep) stages.append(stage) dirs[:] = self._filter_out_dirs(dirs, outs, root) return stages def find_outs_by_path(self, path, outs=None, recursive=False, strict=True): if not outs: outs = [out for stage in self.stages for out in stage.outs] abs_path = os.path.abspath(path) path_info = PathInfo(abs_path) is_dir = self.tree.isdir(abs_path) match = path_info.__eq__ if strict else path_info.isin_or_eq def func(out): if out.scheme == "local" and match(out.path_info): return True if is_dir and recursive and out.path_info.isin(path_info): return True return False matched = list(filter(func, outs)) if not matched: raise OutputNotFoundError(path, self) return matched def find_out_by_relpath(self, relpath): path = os.path.join(self.root_dir, relpath) out, = self.find_outs_by_path(path) return out def is_dvc_internal(self, path): path_parts = os.path.normpath(path).split(os.path.sep) return self.DVC_DIR in path_parts @contextmanager def open(self, path, remote=None, mode="r", encoding=None): """Opens a specified resource as a file descriptor""" cause = None try: out, = self.find_outs_by_path(path) except OutputNotFoundError as e: out = None cause = e if out and out.use_cache: try: with self._open_cached(out, remote, mode, encoding) as fd: yield fd return except FileNotFoundError as e: raise FileMissingError(relpath(path, self.root_dir), cause=e) if self.tree.exists(path): with self.tree.open(path, mode, encoding) as fd: yield fd return raise FileMissingError(relpath(path, self.root_dir), cause=cause) def _open_cached(self, out, remote=None, mode="r", encoding=None): if out.isdir(): raise ValueError("Can't open a dir") cache_file = self.cache.local.checksum_to_path_info(out.checksum) cache_file = fspath_py35(cache_file) if os.path.exists(cache_file): return open(cache_file, mode=mode, encoding=encoding) try: remote_obj = self.cloud.get_remote(remote) remote_info = remote_obj.checksum_to_path_info(out.checksum) return remote_obj.open(remote_info, mode=mode, encoding=encoding) except RemoteActionNotImplemented: with self.state: cache_info = out.get_used_cache(remote=remote) self.cloud.pull(cache_info, remote=remote) return open(cache_file, mode=mode, encoding=encoding) def close(self): self.scm.close() @locked def checkout(self, *args, **kwargs): return self._checkout(*args, **kwargs) @locked def fetch(self, *args, **kwargs): return self._fetch(*args, **kwargs) def _reset(self): self.__dict__.pop("graph", None) self.__dict__.pop("stages", None) self.__dict__.pop("pipelines", None) self.__dict__.pop("dvcignore", None)
def tree(self, tree): self._tree = tree if isinstance(tree, CleanTree) else CleanTree(tree) # Our graph cache is no longer valid, as it was based on the previous # tree. self._reset()