def test_show_running_workspace(tmp_dir, scm, dvc, exp_stage, capsys): pid_dir = os.path.join(dvc.tmp_dir, dvc.experiments.EXEC_PID_DIR) makedirs(pid_dir, True) info = ExecutorInfo(None, None, None, BaseExecutor.DEFAULT_LOCATION) pidfile = os.path.join(pid_dir, f"workspace{BaseExecutor.PIDFILE_EXT}") (tmp_dir / pidfile).dump(info.to_dict()) assert dvc.experiments.show()["workspace"] == { "baseline": { "data": { "metrics": {"metrics.yaml": {"data": {"foo": 1}}}, "params": {"params.yaml": {"data": {"foo": 1}}}, "queued": False, "running": True, "executor": info.location, "timestamp": None, } } } capsys.readouterr() assert main(["exp", "show", "--no-pager"]) == 0 cap = capsys.readouterr() assert "Running" in cap.out assert info.location in cap.out
def test_show_running_executor(tmp_dir, scm, dvc, exp_stage): baseline_rev = scm.get_rev() dvc.experiments.run(exp_stage.addressing, params=["foo=2"], queue=True) exp_rev = dvc.experiments.scm.resolve_rev(f"{EXPS_STASH}@{{0}}") pid_dir = os.path.join(dvc.tmp_dir, dvc.experiments.EXEC_PID_DIR) makedirs(pid_dir, True) info = ExecutorInfo(None, None, None, BaseExecutor.DEFAULT_LOCATION) pidfile = os.path.join(pid_dir, f"{exp_rev}{BaseExecutor.PIDFILE_EXT}") (tmp_dir / pidfile).dump(info.to_dict()) results = dvc.experiments.show() exp_data = get_in(results, [baseline_rev, exp_rev, "data"]) assert not exp_data["queued"] assert exp_data["running"] assert exp_data["executor"] == info.location assert not results["workspace"]["baseline"]["data"]["running"]
def test_show_running_checkpoint( tmp_dir, scm, dvc, checkpoint_stage, workspace, mocker ): from dvc.repo.experiments.base import EXEC_BRANCH from dvc.repo.experiments.executor.local import TempDirExecutor baseline_rev = scm.get_rev() dvc.experiments.run( checkpoint_stage.addressing, params=["foo=2"], queue=True ) stash_rev = dvc.experiments.scm.resolve_rev(f"{EXPS_STASH}@{{0}}") run_results = dvc.experiments.run(run_all=True) checkpoint_rev = first(run_results) exp_ref = first(exp_refs_by_rev(scm, checkpoint_rev)) pid_dir = os.path.join(dvc.tmp_dir, dvc.experiments.EXEC_PID_DIR) makedirs(pid_dir, True) executor = ( BaseExecutor.DEFAULT_LOCATION if workspace else TempDirExecutor.DEFAULT_LOCATION ) info = ExecutorInfo(123, "foo.git", baseline_rev, executor) rev = "workspace" if workspace else stash_rev pidfile = os.path.join(pid_dir, f"{rev}{BaseExecutor.PIDFILE_EXT}") (tmp_dir / pidfile).dump(info.to_dict()) mocker.patch.object( BaseExecutor, "fetch_exps", return_value=[str(exp_ref)] ) if workspace: scm.set_ref(EXEC_BRANCH, str(exp_ref), symbolic=True) results = dvc.experiments.show() checkpoint_res = get_in(results, [baseline_rev, checkpoint_rev, "data"]) assert checkpoint_res["running"] assert checkpoint_res["executor"] == info.location assert not results["workspace"]["baseline"]["data"]["running"]
def make_executor_info(**kwargs): # set default values for required info fields for key in ( "git_url", "baseline_rev", "location", "root_dir", "dvc_dir", ): if key not in kwargs: kwargs[key] = "" return ExecutorInfo(**kwargs)
def run(self): from dvc.repo.experiments.executor.base import ( BaseExecutor, ExecutorInfo, ) from dvc.utils.serialize import load_json info = ExecutorInfo.from_dict(load_json(self.args.infofile)) BaseExecutor.reproduce( info=info, rev="", queue=None, log_level=logger.getEffectiveLevel(), infofile=self.args.infofile, ) return 0
def test_reproduce(tmp_dir, scm, dvc, cloud, exp_stage, mocker): from sshfs import SSHFileSystem as _sshfs rev = scm.get_rev() root_url = cloud / SSHExecutor.gen_dirname() mocker.patch.object(SSHFileSystem, "exists", return_value=True) mock_execute = mocker.patch.object(_sshfs, "execute") info = ExecutorInfo( str(root_url), rev, "machine-foo", str(root_url.path), ".dvc", ) infofile = str((root_url / "foo.run").path) SSHExecutor.reproduce( info, rev, infofile=infofile, fs_factory=partial(_ssh_factory, cloud), ) assert mock_execute.called_once() _name, args, _kwargs = mock_execute.mock_calls[0] assert f"dvc exp exec-run --infofile {infofile}" in args[0]