def download(self, to, jobs=None): from dvc.checkout import checkout from dvc.config import NoRemoteError from dvc.exceptions import NoOutputOrStageError from dvc.objects import save from dvc.objects.stage import stage odb = self.repo.odb.local with self._make_repo(cache_dir=odb.cache_dir) as repo: if self.def_repo.get(self.PARAM_REV_LOCK) is None: self.def_repo[self.PARAM_REV_LOCK] = repo.get_rev() path_info = PathInfo(repo.root_dir) / self.def_path try: repo.fetch([path_info.fspath], jobs=jobs, recursive=True) except (NoOutputOrStageError, NoRemoteError): pass obj = stage( odb, path_info, repo.repo_fs, jobs=jobs, follow_subrepos=False, ) save(odb, obj, jobs=jobs) checkout(to.path_info, to.fs, obj, odb)
def commit(self, filter_info=None): if not self.exists: raise self.DoesNotExistError(self) assert self.hash_info if self.use_cache: granular = ( self.is_dir_checksum and filter_info and filter_info != self.path_info ) if granular: obj = self._commit_granular_dir(filter_info) else: obj = ostage( self.odb, filter_info or self.path_info, self.fs, self.odb.fs.PARAM_CHECKSUM, dvcignore=self.dvcignore, ) osave(self.odb, obj) checkout( filter_info or self.path_info, self.fs, obj, self.odb, relink=True, dvcignore=self.dvcignore, state=self.repo.state, ) self.set_exec()
def commit(self, filter_info=None): if not self.exists: raise self.DoesNotExistError(self) assert self.hash_info if self.use_cache: obj = ostage( self.odb, filter_info or self.path_info, self.fs, self.odb.fs.PARAM_CHECKSUM, dvcignore=self.dvcignore, ) objects.save(self.odb, obj) checkout( filter_info or self.path_info, self.fs, obj, self.odb, relink=True, dvcignore=self.dvcignore, state=self.repo.state, ) self.set_exec()
def download(self, to, jobs=None): from dvc.checkout import checkout for odb, objs in self.get_used_objs().items(): self.repo.cloud.pull(objs, jobs=jobs, odb=odb) obj = self.get_obj() checkout( to.path_info, to.fs, obj, self.repo.odb.local, dvcignore=None, state=self.repo.state, )
def checkout( self, force=False, progress_callback=None, relink=False, filter_info=None, allow_missing=False, **kwargs, ): if not self.use_cache: if progress_callback: progress_callback(str(self.path_info), self.get_files_number(filter_info)) return None try: res = checkout( self.path_info, self.tree, self.hash_info, self.cache, force=force, progress_callback=progress_callback, relink=relink, filter_info=filter_info, **kwargs, ) except CheckoutError: if allow_missing or self.checkpoint: return None raise self.set_exec() return res
def commit(self, filter_info=None): if not self.exists: raise self.DoesNotExistError(self) assert self.hash_info if self.use_cache: obj = ostage(self.odb, filter_info or self.path_info, self.fs) objects.save(self.odb, obj) checkout( filter_info or self.path_info, self.fs, obj, self.odb, relink=True, ) self.set_exec()
def download(self, to, jobs=None): from dvc.checkout import checkout from dvc.objects import save from dvc.objects.stage import stage odb = self.repo.odb.local with self._make_repo(cache_dir=odb.cache_dir) as repo: if self.def_repo.get(self.PARAM_REV_LOCK) is None: self.def_repo[self.PARAM_REV_LOCK] = repo.get_rev() path_info = PathInfo(repo.root_dir) / self.def_path obj = stage( odb, path_info, repo.repo_fs, jobs=jobs, follow_subrepos=False, ) save(odb, obj, jobs=jobs) checkout(to.path_info, to.fs, obj, odb)
def download(self, to, jobs=None): from dvc.checkout import checkout cache = self.repo.cache.local with self._make_repo(cache_dir=cache.cache_dir) as repo: if self.def_repo.get(self.PARAM_REV_LOCK) is None: self.def_repo[self.PARAM_REV_LOCK] = repo.get_rev() path_info = PathInfo(repo.root_dir) / self.def_path hash_info = cache.save( path_info, repo.repo_tree, None, jobs=jobs, follow_subrepos=False, ) checkout(to.path_info, to.tree, hash_info, cache)
def download(self, to, jobs=None): from dvc.checkout import checkout from dvc.objects import save from dvc.objects.db.git import GitObjectDB from dvc.repo.fetch import fetch_from_odb for odb, objs in self.get_used_objs().items(): if not isinstance(odb, GitObjectDB): fetch_from_odb(self.repo, odb, objs, jobs=jobs) obj = self.get_obj() save(self.repo.odb.local, obj, jobs=jobs) checkout( to.path_info, to.fs, obj, self.repo.odb.local, dvcignore=None, state=self.repo.state, )
def commit(self, filter_info=None): if not self.exists: raise self.DoesNotExistError(self) assert self.hash_info if self.use_cache: self.cache.save( self.path_info, self.tree, self.hash_info, filter_info=filter_info, ) checkout( self.path_info, self.tree, self.hash_info, self.cache, relink=True, filter_info=filter_info, ) self.set_exec()
def checkout( self, force=False, progress_callback=None, relink=False, filter_info=None, allow_missing=False, checkpoint_reset=False, **kwargs, ): if not self.use_cache: if progress_callback: progress_callback( str(self.path_info), self.get_files_number(filter_info) ) return None obj = self.get_obj(filter_info=filter_info) if not obj and (filter_info and filter_info != self.path_info): # backward compatibility return None if self.checkpoint and checkpoint_reset: if self.exists: self.remove() return None added = not self.exists try: modified = checkout( filter_info or self.path_info, self.fs, obj, self.odb, force=force, progress_callback=progress_callback, relink=relink, state=self.repo.state, **kwargs, ) except CheckoutError: if allow_missing or self.checkpoint: return None raise self.set_exec() return added, False if added else modified