def test_checkout_for_external_outputs(tmp_dir, dvc): dvc.cache.s3 = CloudCache(S3RemoteTree(dvc, {"url": S3.get_url()})) remote = Remote(S3RemoteTree(dvc, {"url": S3.get_url()})) file_path = remote.path_info / "foo" remote.tree.s3.put_object( Bucket=remote.path_info.bucket, Key=file_path.path, Body="foo" ) dvc.add(str(remote.path_info / "foo"), external=True) remote.tree.remove(file_path) stats = dvc.checkout(force=True) assert stats == {**empty_checkout, "added": [str(file_path)]} assert remote.tree.exists(file_path) remote.tree.s3.put_object( Bucket=remote.path_info.bucket, Key=file_path.path, Body="foo\nfoo" ) stats = dvc.checkout(force=True) assert stats == {**empty_checkout, "modified": [str(file_path)]}
def test_checkout_recursive(tmp_dir, dvc): tmp_dir.gen({"dir": {"foo": "foo", "bar": "bar"}}) dvc.add("dir", recursive=True) (tmp_dir / "dir" / "foo").unlink() (tmp_dir / "dir" / "bar").unlink() stats = dvc.checkout(["dir"], recursive=True) assert set(stats["added"]) == { os.path.join("dir", "foo"), os.path.join("dir", "bar"), } @pytest.mark.skipif( not S3.should_test(), reason="Only run with S3 credentials" ) def test_checkout_for_external_outputs(tmp_dir, dvc): dvc.cache.s3 = CloudCache(S3RemoteTree(dvc, {"url": S3.get_url()})) remote = Remote(S3RemoteTree(dvc, {"url": S3.get_url()})) file_path = remote.path_info / "foo" remote.tree.s3.put_object( Bucket=remote.path_info.bucket, Key=file_path.path, Body="foo" ) dvc.add(str(remote.path_info / "foo"), external=True) remote.tree.remove(file_path) stats = dvc.checkout(force=True) assert stats == {**empty_checkout, "added": [str(file_path)]}
def test_checkout_recursive(tmp_dir, dvc): tmp_dir.gen({"dir": {"foo": "foo", "bar": "bar"}}) dvc.add("dir", recursive=True) (tmp_dir / "dir" / "foo").unlink() (tmp_dir / "dir" / "bar").unlink() stats = dvc.checkout(["dir"], recursive=True) assert set(stats["added"]) == { os.path.join("dir", "foo"), os.path.join("dir", "bar"), } @pytest.mark.skipif(not S3.should_test(), reason="Only run with S3 credentials") def test_checkout_for_external_outputs(tmp_dir, dvc): dvc.cache.s3 = CloudCache(S3Tree(dvc, {"url": S3.get_url()})) remote = Remote(S3Tree(dvc, {"url": S3.get_url()})) file_path = remote.tree.path_info / "foo" remote.tree.s3.meta.client.put_object(Bucket=remote.tree.path_info.bucket, Key=file_path.path, Body="foo") dvc.add(str(remote.tree.path_info / "foo"), external=True) remote.tree.remove(file_path) stats = dvc.checkout(force=True) assert stats == {**empty_checkout, "added": [str(file_path)]}
def _get_src_dst(): base_info = RemoteS3.path_cls(S3.get_url()) return base_info / "from", base_info / "to"
def _get_src_dst(): base_info = S3RemoteTree.PATH_CLS(S3.get_url()) return base_info / "from", base_info / "to"
def _test(self): url = S3.get_url() self.main(["remote", "add", TEST_REMOTE, url]) self._test_cloud(TEST_REMOTE)
def _should_test(self): return S3.should_test()
def _get_url(self): return S3.get_url()
def _get_src_dst(): base_info = S3FileSystem.PATH_CLS(S3.get_url()) return base_info / "from", base_info / "to"