def test(self): self.dvc.scm.commit('init') self.dvc.scm.branch('one') self.dvc.scm.branch('two') self._do_write('master') self._do_write('one') self._do_write('two') self.dvc = Project('.') res = self.dvc.metrics_show('metrics.json', all_branches=True, typ='json', xpath='metrics') self.assertEqual( res, { "master": { "metrics.json": ["master"] }, "one": { "metrics.json": ["one"] }, "two": { "metrics.json": ["two"] } })
def test(self): if not self.should_test(): return cache = self.scheme + self.scheme_sep + self.bucket + self.sep + str(uuid.uuid4()) ret = main(['config', 'cache.' + self.cache_scheme, 'myrepo']) self.assertEqual(ret, 0) ret = main(['remote', 'add', 'myrepo', cache]) self.assertEqual(ret, 0) remote_name = 'myremote' remote_key = str(uuid.uuid4()) remote = self.scheme + self.scheme_sep + self.bucket + self.sep + remote_key ret = main(['remote', 'add', remote_name, remote]) self.assertEqual(ret, 0) self.dvc = Project('.') foo_key = remote_key + self.sep + self.FOO bar_key = remote_key + self.sep + self.BAR foo_path = self.scheme + self.scheme_sep + self.bucket + self.sep + foo_key bar_path = self.scheme + self.scheme_sep + self.bucket + self.sep + bar_key # Using both plain and remote notation out_foo_path = 'remote://' + remote_name + '/' + self.FOO out_bar_path = bar_path self.write(self.bucket, foo_key, self.FOO_CONTENTS) sleep() import_stage = self.dvc.imp(out_foo_path, 'import') self.assertTrue(os.path.exists('import')) self.assertTrue(filecmp.cmp('import', self.FOO)) import_remote_stage = self.dvc.imp(out_foo_path, out_foo_path + '_imported') cmd_stage = self.dvc.run(outs=[out_bar_path], deps=[out_foo_path], cmd=self.cmd(foo_path, bar_path)) self.write(self.bucket, foo_key, self.BAR_CONTENTS) sleep() stages = self.dvc.reproduce(import_stage.path) self.assertEqual(len(stages), 1) self.assertTrue(os.path.exists('import')) self.assertTrue(filecmp.cmp('import', self.BAR)) stages = self.dvc.reproduce(cmd_stage.path) self.assertEqual(len(stages), 1) self.dvc.gc() self.dvc.remove(cmd_stage.path, outs_only=True) self.dvc.checkout(cmd_stage.path)
def _test_metrics(self, func): self.dvc.scm.commit("init") self.dvc.scm.branch("one") self.dvc.scm.branch("two") func("master") func("one") func("two") self.dvc = Project(".") res = self.dvc.metrics_show("metrics.json", all_branches=True, typ="json", xpath="metrics") self.assertEqual( res, { "master": { "metrics.json": ["master"] }, "one": { "metrics.json": ["one"] }, "two": { "metrics.json": ["two"] }, }, )
def test(self): if not self.should_test(): return cache = self.scheme + '://' + self.bucket + '/' + str(uuid.uuid4()) ret = main(['config', 'cache.' + self.scheme, 'myrepo']) self.assertEqual(ret, 0) ret = main(['remote', 'add', 'myrepo', cache]) self.assertEqual(ret, 0) self.dvc = Project('.') foo_key = str(uuid.uuid4()) + '/' + self.FOO bar_key = str(uuid.uuid4()) + '/' + self.BAR foo_path = self.scheme + '://' + self.bucket + '/' + foo_key bar_path = self.scheme + '://' + self.bucket + '/' + bar_key self.write(self.bucket, foo_key, 'foo') stage = self.dvc.run(outs=[bar_path], deps=[foo_path], cmd='{} {} {}'.format(self.cmd, foo_path, bar_path)) self.write(self.bucket, foo_key, 'bar') stages = self.dvc.reproduce(stage.path) self.assertEqual(len(stages), 1)
def test(self): time.sleep(1) # NOTE: using 'copy' so that cache and link don't have same inode ret = main(['config', 'cache.type', 'copy']) self.assertEqual(ret, 0) self.dvc = Project('.') stages = self.dvc.add(self.DATA_DIR) self.assertEqual(len(stages), 1) self.assertEqual(len(stages[0].outs), 1) out = stages[0].outs[0] # NOTE: modifying cache file for one of the files inside the directory # to check if dvc will detect that the cache is corrupted. entry = self.dvc.cache.local.load_dir_cache(out.md5)[0] md5 = entry[self.dvc.cache.local.PARAM_MD5] cache = self.dvc.cache.local.get(md5) with open(cache, 'w+') as fobj: fobj.write('1') self.dvc.checkout(force=True) self.assertFalse(os.path.exists(cache))
def run_cmd(self): try: Project.init('.', no_scm=self.args.no_scm) except InitError as e: Logger.error('Failed to initiate dvc', e) return 1 return 0
def test(self): from dvc.project import Project self.dvc = Project(self._root_dir) with self.assertRaises(MoveNotDataSourceError): self.dvc.move(self.file1, "dst") ret = main(["move", self.file1, "dst"]) self.assertNotEqual(ret, 0)
def run_cmd(self): from dvc.project import Project, InitError try: Project.init('.', no_scm=self.args.no_scm, force=self.args.force) except InitError as e: Logger.error('Failed to initiate dvc', e) return 1 return 0
def test(self): ret1 = main(["remote", "add", "mycache", self.external_cache]) ret2 = main(["remote", "add", "myremote", self.remote]) self.assertEqual(ret1, 0) self.assertEqual(ret2, 0) self.dvc = Project(".") # Import with StaticFileServer(): import_url = urljoin(self.remote, self.FOO) import_output = "imported_file" import_stage = self.dvc.imp(import_url, import_output) self.assertTrue(os.path.exists(import_output)) self.assertTrue(filecmp.cmp(import_output, self.FOO, shallow=False)) self.dvc.remove("imported_file.dvc") with StaticFileServer(handler="Content-MD5"): import_url = urljoin(self.remote, self.FOO) import_output = "imported_file" import_stage = self.dvc.imp(import_url, import_output) self.assertTrue(os.path.exists(import_output)) self.assertTrue(filecmp.cmp(import_output, self.FOO, shallow=False)) # Run --deps with StaticFileServer(): run_dependency = urljoin(self.remote, self.BAR) run_output = "remote_file" cmd = 'open("{}", "w+")'.format(run_output) with open("create-output.py", "w") as fd: fd.write(cmd) run_stage = self.dvc.run( deps=[run_dependency], outs=[run_output], cmd="python create-output.py", ) self.assertTrue(run_stage is not None) self.assertTrue(os.path.exists(run_output)) # Pull self.dvc.remove(import_stage.path, outs_only=True) self.assertFalse(os.path.exists(import_output)) shutil.move(self.local_cache, self.external_cache_id) self.assertFalse(os.path.exists(self.local_cache)) with StaticFileServer(): self.dvc.pull(import_stage.path, remote="mycache") self.assertTrue(os.path.exists(import_output))
class TestMoveNotDataSource(TestRepro): def test(self): from dvc.project import Project self.dvc = Project(self._root_dir) with self.assertRaises(MoveNotDataSourceError): self.dvc.move(self.file1, 'dst') ret = main(['move', self.file1, 'dst']) self.assertNotEqual(ret, 0)
def setUp(self): super(TestGCMultipleProjects, self).setUp() self.additional_path = TestDir.mkdtemp() self.additional_git = Repo.init(self.additional_path) self.additional_dvc = Project.init(self.additional_path) cache_path = os.path.join(self._root_dir, '.dvc', 'cache') config_path = os.path.join(self.additional_path, '.dvc', 'config.local') cfg = configobj.ConfigObj() cfg.filename = config_path cfg['cache'] = {'dir': cache_path} cfg.write() self.additional_dvc = Project(self.additional_path)
def setUp(self): super(TestGCMultipleProjects, self).setUp() self.additional_path = TestDir.mkdtemp() self.additional_git = Repo.init(self.additional_path) self.additional_dvc = Project.init(self.additional_path) cache_path = os.path.join(self._root_dir, ".dvc", "cache") config_path = os.path.join(self.additional_path, ".dvc", "config.local") cfg = configobj.ConfigObj() cfg.filename = config_path cfg["cache"] = {"dir": cache_path} cfg.write() self.additional_dvc = Project(self.additional_path)
def test(self): """ Making sure that 'remote' syntax is handled properly for local outs. """ cwd = os.getcwd() remote = "myremote" ret = main(["remote", "add", remote, cwd]) self.assertEqual(ret, 0) self.dvc = Project() foo = "remote://{}/{}".format(remote, self.FOO) ret = main(["add", foo]) self.assertEqual(ret, 0) with open("foo.dvc", "r") as fobj: d = yaml.safe_load(fobj) self.assertEqual(d["outs"][0]["path"], foo) bar = os.path.join(cwd, self.BAR) ret = main(["add", bar]) self.assertEqual(ret, 0) with open("bar.dvc", "r") as fobj: d = yaml.safe_load(fobj) self.assertEqual(d["outs"][0]["path"], bar)
def test(self): """ Making sure that 'remote' syntax is handled properly for local outs. """ cwd = os.getcwd() remote = 'myremote' ret = main(['remote', 'add', remote, cwd]) self.assertEqual(ret, 0) self.dvc = Project() foo = 'remote://{}/{}'.format(remote, self.FOO) ret = main(['add', foo]) self.assertEqual(ret, 0) with open('foo.dvc', 'r') as fobj: d = yaml.safe_load(fobj) self.assertEqual(d['outs'][0]['path'], foo) bar = os.path.join(cwd, self.BAR) ret = main(['add', bar]) self.assertEqual(ret, 0) with open('bar.dvc', 'r') as fobj: d = yaml.safe_load(fobj) self.assertEqual(d['outs'][0]['path'], bar)
def _is_enabled(cmd=None): from dvc.config import Config from dvc.project import Project from dvc.exceptions import NotDvcProjectError from dvc.command.daemon import CmdDaemonBase if os.getenv("DVC_TEST"): return False if isinstance(cmd, CmdDaemonBase): return False if cmd is None or not hasattr(cmd, "config"): try: dvc_dir = Project.find_dvc_dir() config = Config(dvc_dir) assert config is not None except NotDvcProjectError: config = Config(validate=False) assert config is not None else: config = cmd.config assert config is not None core = config.config.get(Config.SECTION_CORE, {}) enabled = core.get(Config.SECTION_CORE_ANALYTICS, True) logger.debug("Analytics is {}abled.".format("en" if enabled else "dis")) return enabled
def __init__(self, args): from dvc.project import Project, NotDvcProjectError self.args = args try: dvc_dir = os.path.join(Project.find_root(), Project.DVC_DIR) saved_exc = None except NotDvcProjectError as exc: dvc_dir = None saved_exc = exc self.config = Config(dvc_dir, validate=False) if self.args.system: self.configobj = self.config._system_config elif self.args.glob: self.configobj = self.config._global_config elif self.args.local: if dvc_dir is None: raise saved_exc self.configobj = self.config._local_config else: if dvc_dir is None: raise saved_exc self.configobj = self.config._project_config
def __init__(self, args): from dvc.project import Project self.project = Project() self.config = self.project.config self.args = args self._set_loglevel(args)
def __init__(self, args): self.project = Project(self._find_root()) self.args = args if args.quiet and not args.verbose: self.project.logger.be_quiet() elif not args.quiet and args.verbose: self.project.logger.be_verbose()
class TestReproExternalBase(TestDvc): def should_test(self): return False def test(self): if not self.should_test(): return cache = self.scheme + '://' + self.bucket + '/' + str(uuid.uuid4()) ret = main(['config', 'cache.' + self.scheme, 'myrepo']) self.assertEqual(ret, 0) ret = main(['remote', 'add', 'myrepo', cache]) self.assertEqual(ret, 0) remote_name = 'myremote' remote_key = str(uuid.uuid4()) remote = self.scheme + '://' + self.bucket + '/' + remote_key ret = main(['remote', 'add', remote_name, remote]) self.assertEqual(ret, 0) self.dvc = Project('.') foo_key = remote_key + '/' + self.FOO bar_key = remote_key + '/' + self.BAR foo_path = self.scheme + '://' + self.bucket + '/' + foo_key bar_path = self.scheme + '://' + self.bucket + '/' + bar_key # Using both plain and remote notation out_foo_path = 'remote://' + remote_name + '/' + self.FOO out_bar_path = bar_path self.write(self.bucket, foo_key, 'foo') stage = self.dvc.run(outs=[out_bar_path], deps=[out_foo_path], cmd='{} {} {}'.format(self.cmd, foo_path, bar_path)) self.write(self.bucket, foo_key, 'bar') stages = self.dvc.reproduce(stage.path) self.assertEqual(len(stages), 1)
def test(self): ret1 = main(['remote', 'add', 'mycache', self.external_cache]) ret2 = main(['remote', 'add', 'myremote', self.remote]) self.assertEqual(ret1, 0) self.assertEqual(ret2, 0) self.dvc = Project('.') # Import with StaticFileServer(): import_url = urljoin(self.remote, self.FOO) import_output = 'imported_file' import_stage = self.dvc.imp(import_url, import_output) self.assertTrue(os.path.exists(import_output)) self.assertTrue(filecmp.cmp(import_output, self.FOO, shallow=False)) # Run --deps with StaticFileServer(): run_dependency = urljoin(self.remote, self.BAR) run_output = 'remote_file' cmd = 'open("{}", "w+")'.format(run_output) with open('create-output.py', 'w') as fd: fd.write(cmd) run_stage = self.dvc.run(deps=[run_dependency], outs=[run_output], cmd='python create-output.py') self.assertTrue(run_stage is not None) self.assertTrue(os.path.exists(run_output)) # Pull self.dvc.remove(import_stage.path, outs_only=True) self.assertFalse(os.path.exists(import_output)) shutil.move(self.local_cache, self.external_cache_id) self.assertFalse(os.path.exists(self.local_cache)) with StaticFileServer(): self.dvc.pull(import_stage.path, remote='mycache') self.assertTrue(os.path.exists(import_output))
def run(self): import os from dvc.project import Project from dvc.updater import Updater root_dir = Project.find_root() dvc_dir = os.path.join(root_dir, Project.DVC_DIR) updater = Updater(dvc_dir) updater.fetch(detach=False) return 0
def run_cmd(self): from dvc.project import Project, InitError try: self.project = Project.init('.', no_scm=self.args.no_scm, force=self.args.force) self.config = self.project.config except InitError: logger.error('failed to initiate dvc') return 1 return 0
def test(self): # NOTE: using 'copy' so that cache and link don't have same inode ret = main(["config", "cache.type", "copy"]) self.assertEqual(ret, 0) self.dvc = Project(".") stages = self.dvc.add(self.DATA_DIR) self.assertEqual(len(stages), 1) self.assertEqual(len(stages[0].outs), 1) out = stages[0].outs[0] # NOTE: modifying cache file for one of the files inside the directory # to check if dvc will detect that the cache is corrupted. entry = self.dvc.cache.local.load_dir_cache(out.checksum)[0] checksum = entry[self.dvc.cache.local.PARAM_CHECKSUM] cache = self.dvc.cache.local.get(checksum) with open(cache, "w+") as fobj: fobj.write("1") self.dvc.checkout(force=True) self.assertFalse(os.path.exists(cache))
def collect(self): from dvc.scm import SCM from dvc.utils import is_binary from dvc.project import Project from dvc.exceptions import NotDvcProjectError self.info[self.PARAM_DVC_VERSION] = VERSION self.info[self.PARAM_IS_BINARY] = is_binary() self.info[self.PARAM_USER_ID] = self._get_user_id() self.info[self.PARAM_SYSTEM_INFO] = self._collect_system_info() try: scm = SCM(root_dir=Project._find_root()) self.info[self.PARAM_SCM_CLASS] = type(scm).__name__ except NotDvcProjectError: pass
class TestCachedMetrics(TestDvc): def _do_write(self, branch): self.dvc.scm.checkout(branch) self.dvc.checkout(force=True) with open('metrics.json', 'w+') as fd: json.dump({'metrics': branch}, fd) stages = self.dvc.add('metrics.json') self.assertEqual(len(stages), 1) stage = stages[0] self.assertNotEqual(stage, None) self.dvc.scm.add(['.gitignore', 'metrics.json.dvc']) self.dvc.scm.commit(branch) def test(self): self.dvc.scm.commit('init') self.dvc.scm.branch('one') self.dvc.scm.branch('two') self._do_write('master') self._do_write('one') self._do_write('two') self.dvc = Project('.') res = self.dvc.metrics_show('metrics.json', all_branches=True, typ='json', xpath='metrics') self.assertEqual( res, { "master": { "metrics.json": ["master"] }, "one": { "metrics.json": ["one"] }, "two": { "metrics.json": ["two"] } })
class TestCachedMetrics(TestDvc): def _do_write(self, branch): self.dvc.scm.checkout(branch) self.dvc.checkout(force=True) with open("metrics.json", "w+") as fd: json.dump({"metrics": branch}, fd) stages = self.dvc.add("metrics.json") self.assertEqual(len(stages), 1) stage = stages[0] self.assertNotEqual(stage, None) self.dvc.scm.add([".gitignore", "metrics.json.dvc"]) self.dvc.scm.commit(branch) def test(self): self.dvc.scm.commit("init") self.dvc.scm.branch("one") self.dvc.scm.branch("two") self._do_write("master") self._do_write("one") self._do_write("two") self.dvc = Project(".") res = self.dvc.metrics_show( "metrics.json", all_branches=True, typ="json", xpath="metrics" ) self.assertEqual( res, { "master": {"metrics.json": ["master"]}, "one": {"metrics.json": ["one"]}, "two": {"metrics.json": ["two"]}, }, )
def setUp(self): super(TestDvc, self).setUp() self.dvc = Project.init(self._root_dir) self.dvc.logger.be_verbose()
def test_api(self): Project.init(no_scm=True) self._test_init()
def test_api(self): with self.assertRaises(InitError): Project.init()
def test_api(self): Project.init() self._test_init()