def _process(dest_tree, src_tree, collected_files, download=False): from dvc.remote.base import _log_exceptions from_infos = [] to_infos = [] names = [] for from_info in collected_files: from_infos.append(from_info) fname = from_info.relative_to(src_tree.path_info) names.append(str(fname)) to_infos.append(dest_tree.path_info / fname) total = len(from_infos) if download: func = partial( _log_exceptions(src_tree.download, "download"), dir_mode=dest_tree.dir_mode, file_mode=dest_tree.file_mode, ) desc = "Downloading" else: func = partial(_log_exceptions(dest_tree.upload, "upload")) desc = "Uploading" with Tqdm(total=total, unit="file", desc=desc) as pbar: func = pbar.wrap_fn(func) # TODO: parallelize this, currently --jobs for repro applies to # number of repro executors not download threads with ThreadPoolExecutor(max_workers=1) as dl_executor: fails = sum(dl_executor.map(func, from_infos, to_infos, names)) if fails: if download: raise DownloadError(fails) raise UploadError(fails)
def pull(self, remote): remote = self.repo.cloud.get_remote(remote) return self._transfer( _log_exceptions(remote.tree.download, "download"), remote.tree, self.repo.cache.local.tree, )
def pull(self, remote): from dvc.remote.base import _log_exceptions remote = self.repo.cloud.get_remote(remote) return self._transfer( _log_exceptions(remote.fs.download, "download"), remote.fs, self.repo.odb.local.fs, )
def push(self, remote): from dvc.remote.base import _log_exceptions remote = self.repo.cloud.get_remote(remote) return self._transfer( _log_exceptions(remote.tree.upload, "upload"), self.repo.cache.local.tree, remote.tree, )