def _run_twice_with_same_outputs(self): ret = main( [ "run", "--single-stage", "--outs", self.FOO, f"echo {self.FOO_CONTENTS} > {self.FOO}", ] ) self.assertEqual(0, ret) output_file_content = self._read_content_only(self.FOO) self.assertEqual([self.FOO_CONTENTS], output_file_content) ret = main( [ "run", self._outs_command, self.FOO, "--force", "--single-stage", f"echo {self.BAR_CONTENTS} >> {self.FOO}", ] ) self.assertEqual(0, ret)
def test_config_show_origin_single(tmp_dir, dvc, capsys): (tmp_dir / ".dvc" / "config").write_text( "['remote \"myremote\"']\n" " url = s3://bucket/path\n" " region = myregion\n" ) assert ( main(["config", "--show-origin", "--project", "remote.myremote.url"]) == 0 ) out, _ = capsys.readouterr() assert ( "{}\t{}\n".format(os.path.join(".dvc", "config"), "s3://bucket/path") in out ) assert ( main(["config", "--show-origin", "--local", "remote.myremote.url"]) == 251 ) assert main(["config", "--list", "--project", "--show-origin"]) == 0 out, _ = capsys.readouterr() assert ( "{}\t{}\n".format( os.path.join(".dvc", "config"), "remote.myremote.url=s3://bucket/path", ) in out )
def test_shared_cache_dir(tmp_dir): cache_dir = os.path.abspath(os.path.join(os.curdir, "cache")) for d in ["dir1", "dir2"]: os.mkdir(d) with (tmp_dir / d).chdir(): ret = main(["init", "--no-scm"]) assert ret == 0 ret = main(["config", "cache.dir", cache_dir]) assert ret == 0 assert not os.path.exists(os.path.join(".dvc", "cache")) (tmp_dir / d).gen({"common": "common", "unique": d}) ret = main(["add", "common", "unique"]) assert ret == 0 assert not os.path.exists(os.path.join("dir1", ".dvc", "cache")) assert not os.path.exists(os.path.join("dir2", ".dvc", "cache")) subdirs = list( filter( lambda x: os.path.isdir(os.path.join(cache_dir, x)), os.listdir(cache_dir), )) assert len(subdirs) == 3 assert len(os.listdir(os.path.join(cache_dir, subdirs[0]))) == 1 assert len(os.listdir(os.path.join(cache_dir, subdirs[1]))) == 1 assert len(os.listdir(os.path.join(cache_dir, subdirs[2]))) == 1
def test(self): ret = main(["import-url", self.FOO, "import"]) self.assertEqual(ret, 0) self.assertTrue(os.path.exists("import.dvc")) ret = main(["import-url", "non-existing-file", "import"]) self.assertNotEqual(ret, 0)
def test(self): fname_master = "file_in_a_master" branch_master = "master" fname_branch = "file_in_a_branch" branch_1 = "b1" self.dvc.scm.add(self.dvc.scm.untracked_files()) self.dvc.scm.commit("add all files") self.commit_data_file(fname_master) self.dvc.scm.checkout(branch_1, True) ret = main(["checkout", "--force"]) self.assertEqual(ret, 0) self.commit_data_file(fname_branch) self.dvc.scm.checkout(branch_master) ret = main(["checkout", "--force"]) self.assertEqual(ret, 0) ignored = self.read_ignored() self.assertEqual(len(ignored), 1) self.assertIn("/" + fname_master, ignored) self.dvc.scm.checkout(branch_1) ret = main(["checkout", "--force"]) self.assertEqual(ret, 0) ignored = self.read_ignored() self.assertIn("/" + fname_branch, ignored)
def test_config_set(tmp_dir, dvc): assert main(["config", "core.analytics", "false"]) == 0 assert (tmp_dir / ".dvc" / "config").read_text() == textwrap.dedent( """\ [core] no_scm = True analytics = false """ ) assert not (tmp_dir / ".dvc" / "config.local").exists() assert main(["config", "core.analytics", "true"]) == 0 assert (tmp_dir / ".dvc" / "config").read_text() == textwrap.dedent( """\ [core] no_scm = True analytics = true """ ) assert not (tmp_dir / ".dvc" / "config.local").exists() assert main(["config", "core.analytics", "--unset"]) == 0 assert (tmp_dir / ".dvc" / "config").read_text() == textwrap.dedent( """\ [core] no_scm = True """ ) assert not (tmp_dir / ".dvc" / "config.local").exists()
def test(self): """ Making sure that 'remote' syntax is handled properly for local outs. """ cwd = os.getcwd() remote = "myremote" ret = main(["remote", "add", remote, cwd]) self.assertEqual(ret, 0) self.dvc.config.load() foo = f"remote://{remote}/{self.FOO}" ret = main(["add", foo]) self.assertEqual(ret, 0) d = load_yaml("foo.dvc") self.assertEqual(d["outs"][0]["path"], foo) bar = os.path.join(cwd, self.BAR) ret = main(["add", bar]) self.assertEqual(ret, 0) d = load_yaml("bar.dvc") self.assertEqual(d["outs"][0]["path"], self.BAR)
def test_implied_cloud(dvc, mocker): mock_status = mocker.patch( "dvc.repo.status._cloud_status", return_value=True ) main(["status", "--remote", "something"]) assert mock_status.called
def test_force_import(self): remove(self.BAR) ret = main(["import-url", self.FOO, self.BAR]) self.assertEqual(ret, 0) patch_get = patch.object( LocalFileSystem, "get", side_effect=LocalFileSystem.get, autospec=True, ) patch_checkout = patch.object( Output, "checkout", side_effect=Output.checkout, autospec=True, ) with patch_get as mock_download: with patch_checkout as mock_checkout: assert main(["unfreeze", "bar.dvc"]) == 0 ret = main(["repro", "--force", "bar.dvc"]) self.assertEqual(ret, 0) self.assertEqual(mock_download.call_count, 1) self.assertEqual(mock_checkout.call_count, 0)
def test(self): # Use copy to test for changes in the inodes ret = main(["config", "cache.type", "copy"]) self.assertEqual(ret, 0) ret = main(["add", self.DATA_DIR]) self.assertEqual(0, ret) stage_path = self.DATA_DIR + DVC_FILE_SUFFIX stage = load_yaml(stage_path) staged_files = self.outs_info(stage) # move instead of remove, to lock inode assigned to stage_files[0].path # if we were to use remove, we might end up with same inode assigned to # newly checked out file shutil.move(staged_files[0].path, "random_name") ret = main(["checkout", "--force", stage_path]) self.assertEqual(ret, 0) checkedout_files = self.outs_info(stage) self.assertEqual(len(staged_files), len(checkedout_files)) self.assertEqual(staged_files[0].path, checkedout_files[0].path) self.assertNotEqual(staged_files[0].inode, checkedout_files[0].inode) self.assertEqual(staged_files[1].inode, checkedout_files[1].inode)
def test_overwrite(self): remote_name = "a" remote_url = "s3://bucket/name" self.assertEqual(main(["remote", "add", remote_name, remote_url]), 0) self.assertEqual(main(["remote", "add", remote_name, remote_url]), 251) self.assertEqual( main(["remote", "add", "-f", remote_name, remote_url]), 0)
def test(self): ret = main(["add", self.FOO, self.BAR, "--file", "error.dvc"]) self.assertNotEqual(0, ret) ret = main(["add", "-R", self.DATA_DIR, "--file", "error.dvc"]) self.assertNotEqual(0, ret) with self.assertRaises(RecursiveAddingWhileUsingFilename): self.dvc.add(self.DATA_DIR, recursive=True, fname="error.dvc") ret = main(["add", self.DATA_DIR, "--file", "data_directory.dvc"]) self.assertEqual(0, ret) self.assertTrue(os.path.exists("data_directory.dvc")) ret = main(["add", self.FOO, "--file", "bar.dvc"]) self.assertEqual(0, ret) self.assertTrue(os.path.exists("bar.dvc")) self.assertFalse(os.path.exists("foo.dvc")) os.remove("bar.dvc") ret = main(["add", self.FOO, "--file", "bar.dvc"]) self.assertEqual(0, ret) self.assertTrue(os.path.exists("bar.dvc")) self.assertFalse(os.path.exists("foo.dvc"))
def test_machine_modify_susccess(tmp_dir, dvc, machine_config, slot, value): assert main(["machine", "modify", "foo", slot, value]) == 0 assert ( tmp_dir / ".dvc" / "config" ).read_text() == machine_config + f" {slot} = {value}\n" assert main(["machine", "modify", "--unset", "foo", slot]) == 0 assert (tmp_dir / ".dvc" / "config").read_text() == machine_config
def test(self): ret = main(["config", "cache.type", "hardlink"]) self.assertEqual(ret, 0) ret = main(["add", self.FOO]) self.assertEqual(ret, 0) cache = os.path.join( ".dvc", "cache", "ac", "bd18db4cc2f85cedef654fccc4a4d8" ) self.assertFalse(os.access(self.FOO, os.W_OK)) self.assertFalse(os.access(cache, os.W_OK)) ret = main(["unprotect", self.FOO]) self.assertEqual(ret, 0) self.assertTrue(os.access(self.FOO, os.W_OK)) if os.name == "nt": # NOTE: cache is now unprotected, because NTFS doesn't allow # deleting read-only files, so we have to try to set write perms # on files that we try to delete, which propagates to the cache # file. But it should be restored after the next cache check, hence # why we call `dvc status` here. self.assertTrue(os.access(cache, os.W_OK)) ret = main(["status"]) self.assertEqual(ret, 0) self.assertFalse(os.access(cache, os.W_OK))
def test_machine_modify_startup_script(tmp_dir, dvc, machine_config): slot, value = "startup_script", "start.sh" assert main(["machine", "modify", "foo", slot, value]) == 0 assert ( tmp_dir / ".dvc" / "config" ).read_text() == machine_config + f" {slot} = ../{value}\n" assert main(["machine", "modify", "--unset", "foo", slot]) == 0 assert (tmp_dir / ".dvc" / "config").read_text() == machine_config
def test(self): ret = main(["checkout", "--force", self.foo_stage.path]) self.assertEqual(ret, 0) ret = main(["checkout", "--force", self.data_dir_stage.path]) self.assertEqual(ret, 0) self._test_checkout()
def test_fetch_stats(tmp_dir, dvc, fs, msg, capsys, local_remote): tmp_dir.dvc_gen(fs) dvc.push() clean(list(fs.keys()), dvc) main(["fetch"]) out, _ = capsys.readouterr() assert msg in out
def test(self): with open(self.CODE, "w+", encoding="utf-8") as fobj: fobj.write("import sys\n") fobj.write("import os\n") fobj.write("with open(sys.argv[1], 'a+') as fobj:\n") fobj.write(" fobj.write('foo')\n") ret = main(["config", "cache.type", "symlink"]) self.assertEqual(ret, 0) self.assertEqual(ret, 0) ret = main([ "run", "-d", self.CODE, "-o", self.FOO, "--single-stage", "python", self.CODE, self.FOO, ]) self.assertEqual(ret, 0) if os.name == "nt": # NOTE: Windows symlink perms don't propagate to the target self.assertTrue(os.access(self.FOO, os.W_OK)) else: self.assertFalse(os.access(self.FOO, os.W_OK)) self.assertTrue(system.is_symlink(self.FOO)) with open(self.FOO, encoding="utf-8") as fd: self.assertEqual(fd.read(), "foo") ret = main([ "run", "--force", "--no-run-cache", "--single-stage", "-d", self.CODE, "-o", self.FOO, "python", self.CODE, self.FOO, ]) self.assertEqual(ret, 0) if os.name == "nt": # NOTE: Windows symlink perms don't propagate to the target self.assertTrue(os.access(self.FOO, os.W_OK)) else: self.assertFalse(os.access(self.FOO, os.W_OK)) self.assertTrue(system.is_symlink(self.FOO)) with open(self.FOO, encoding="utf-8") as fd: self.assertEqual(fd.read(), "foo")
def test_check_ignore_non_matching(tmp_dir, dvc, non_matching, caplog, capsys): tmp_dir.gen(DvcIgnore.DVCIGNORE_FILE, "other") if non_matching: assert main(["check-ignore", "-d", "-n", "file"]) == 1 else: assert main(["check-ignore", "-d", "file"]) == 1 out, _ = capsys.readouterr() assert ("::\tfile\n" in out) is non_matching
def test_remote_references(self): ssh_url = "ssh://user@localhost:23" assert main(["remote", "add", "storage", ssh_url]) == 0 assert main(["remote", "add", "cache", "remote://storage/tmp"]) == 0 assert main(["config", "cache.ssh", "cache"]) == 0 self.dvc.__init__() assert self.dvc.odb.ssh.fs_path == "/tmp"
def test(self): ret = main(["add", self.DATA_DIR, self.FOO, self.BAR]) self.assertEqual(0, ret) ret = main(["checkout", "-R", self.DATA_DIR]) self.assertEqual(0, ret) self.assertTrue(os.path.exists(self.FOO)) self.assertTrue(os.path.exists(self.BAR))
def test_upper_case_remote(tmp_dir, dvc, local_cloud): remote_name = "UPPERCASEREMOTE" tmp_dir.gen("foo", "foo") ret = main(["remote", "add", remote_name, local_cloud.url]) assert ret == 0 ret = main(["push", "-r", remote_name]) assert ret == 0
def test_remote_modify_validation(dvc): remote_name = "drive" unsupported_config = "unsupported_config" assert (main(["remote", "add", "-d", remote_name, "gdrive://test/test"]) == 0) assert (main( ["remote", "modify", remote_name, unsupported_config, "something"]) == 251) config = configobj.ConfigObj(dvc.config.files["repo"]) assert unsupported_config not in config[f'remote "{remote_name}"']
def test(self): ret = main(["add", self.FOO, "--no-commit"]) self.assertEqual(ret, 0) self.assertTrue(os.path.isfile(self.FOO)) self.assertFalse(os.path.exists(self.dvc.odb.local.cache_dir)) ret = main(["commit", self.FOO + ".dvc"]) self.assertEqual(ret, 0) self.assertTrue(os.path.isfile(self.FOO)) self.assertEqual(len(os.listdir(self.dvc.odb.local.cache_dir)), 1)
def test(self): ret = main(["config", "core.checksum_jobs", "1"]) self.assertEqual(ret, 0) remote = "mys3" ret = main(["remote", "add", remote, "s3://bucket/name"]) self.assertEqual(ret, 0) ret = main(["remote", "remove", remote]) self.assertEqual(ret, 0)
def test(self): stages = self.dvc.add(self.FOO) self.assertEqual(len(stages), 1) self.assertTrue(stages[0] is not None) ret = main(["move", self.FOO, self.FOO + "1"]) self.assertEqual(ret, 0) ret = main(["move", "non-existing-file", "dst"]) self.assertNotEqual(ret, 0)
def test_run_metrics(tmp_dir, scm, dvc, exp_stage, mocker): from dvc.cli import main mocker.patch.object(dvc.experiments, "run", return_value={"abc123": "abc123"}) show_mock = mocker.patch.object(dvc.metrics, "show", return_value={}) main(["exp", "run", "-m"]) assert show_mock.called_once()
def test_cmd_remove_gitignore_multistage(tmp_dir, scm, dvc, run_copy): (stage, ) = tmp_dir.dvc_gen("foo", "foo") stage1 = run_copy("foo", "foo1", single_stage=True) stage2 = run_copy("foo1", "foo2", name="copy-foo1-foo2") assert (tmp_dir / ".gitignore").exists() assert main(["remove", stage2.addressing]) == 0 assert main(["remove", stage1.addressing]) == 0 assert main(["remove", stage.addressing]) == 0 assert not (tmp_dir / ".gitignore").exists()
def test(self): self.swap_foo_with_bar() ret = main(["status"]) self.assertEqual(ret, 0) ret = main(["repro", self._get_stage_target(self.stage)]) self.assertEqual(ret, 0) ret = main(["repro", "non-existing-file"]) self.assertNotEqual(ret, 0)
def test(self): ret = main(["import-url", "--file", "bar.dvc", self.external_source]) self.assertEqual(0, ret) self.assertTrue(os.path.exists("bar.dvc")) os.remove("bar.dvc") os.mkdir("sub") path = os.path.join("sub", "bar.dvc") ret = main(["import-url", "--file", path, self.external_source, "out"]) self.assertEqual(0, ret) self.assertTrue(os.path.exists(path))