def test_info_outside_of_repo(tmp_dir, caplog): dvc_info = get_dvc_info() assert re.search(r"DVC version: \d+\.\d+\.\d+.*", dvc_info) assert re.search(f"Platform: {PYTHON_VERSION_REGEX} on .*", dvc_info) assert find_supported_remotes(dvc_info) assert not re.search(r"Cache types: .*", dvc_info) assert "Repo:" not in dvc_info
def test_plugin_versions(tmp_dir, dvc): from dvc.fs import FS_MAP dvc_info = get_dvc_info() remotes = find_supported_remotes(dvc_info) for remote, dependencies in remotes.items(): assert dependencies.keys() == FS_MAP[remote].REQUIRES.keys()
def test_info_outside_of_repo(tmp_dir, caplog): dvc_info = get_dvc_info() assert re.search(r"DVC version: \d+\.\d+\.\d+.*", dvc_info) assert re.search(r"Platform: Python \d\.\d+\.\d+ on .*", dvc_info) assert re.search(r"Supports: .*", dvc_info) assert not re.search(r"Cache types: .*", dvc_info) assert "Repo:" not in dvc_info
def _log_unknown_exceptions() -> None: from dvc.info import get_dvc_info from dvc.logger import FOOTER logger.exception("unexpected error") if logger.isEnabledFor(logging.DEBUG): logger.debug("Version info for developers:\n%s", get_dvc_info()) logger.info(FOOTER) return None
def run(self): from dvc.info import get_dvc_info from dvc.updater import notify_updates dvc_info = get_dvc_info() ui.write(dvc_info, force=True) notify_updates() return 0
def test_remotes(tmp_dir, dvc, caplog): tmp_dir.add_remote(name="server", url="ssh://localhost", default=False) tmp_dir.add_remote(name="r1", url="azure://example.com/path", default=False) tmp_dir.add_remote(name="r2", url="remote://server/path", default=False) dvc_info = get_dvc_info() assert re.search("Remotes: (ssh, azure|azure, ssh)", dvc_info)
def test_caches(tmp_dir, dvc, caplog): tmp_dir.add_remote(name="sshcache", url="ssh://example.com/path", default=False) with tmp_dir.dvc.config.edit() as conf: conf["cache"]["ssh"] = "sshcache" dvc_info = get_dvc_info() # Order of cache types is runtime dependent assert re.search("Caches: (local, ssh|ssh, local)", dvc_info)
def test_info_in_subdir(tmp_dir, scm, caplog): dvc_subdir = tmp_dir / "subdir" dvc_subdir.mkdir() with dvc_subdir.chdir(): dvc_subdir.init(scm=False, dvc=True) with dvc_subdir.dvc.config.edit() as conf: del conf["core"]["no_scm"] dvc_info = get_dvc_info() assert "Repo: dvc (subdir), git" in dvc_info
def test_info_in_repo(scm_init, tmp_dir): tmp_dir.init(scm=scm_init, dvc=True) # Create `.dvc/cache`, that is needed to check supported link types. os.mkdir(tmp_dir.dvc.odb.local.cache_dir) dvc_info = get_dvc_info() assert re.search(r"DVC version: \d+\.\d+\.\d+.*", dvc_info) assert re.search(f"Platform: {PYTHON_VERSION_REGEX} on .*", dvc_info) assert find_supported_remotes(dvc_info) assert re.search(r"Cache types: .*", dvc_info) if scm_init: assert "Repo: dvc, git" in dvc_info else: assert "Repo: dvc (no_scm)" in dvc_info
def test_info_in_repo(scm_init, tmp_dir): tmp_dir.init(scm=scm_init, dvc=True) # Create `.dvc/cache`, that is needed to check supported link types. os.mkdir(tmp_dir.dvc.cache.local.cache_dir) dvc_info = get_dvc_info() assert re.search(r"DVC version: \d+\.\d+\.\d+.*", dvc_info) assert re.search(r"Platform: Python \d\.\d+\.\d+ on .*", dvc_info) assert re.search(r"Supports: .*", dvc_info) assert re.search(r"Cache types: .*", dvc_info) if scm_init: assert "Repo: dvc, git" in dvc_info else: assert "Repo: dvc (no_scm)" in dvc_info
def test_fs_info_in_repo(tmp_dir, dvc, caplog): os.mkdir(dvc.odb.local.cache_dir) dvc_info = get_dvc_info() assert re.search(r"Cache directory: .* on .*", dvc_info) assert re.search(r"Workspace directory: .* on .*", dvc_info)
def main(argv=None): # noqa: C901 """Main entry point for dvc CLI. Args: argv: optional list of arguments to parse. sys.argv is used by default. Returns: int: command's return code. """ from dvc._debug import debugtools from dvc.config import ConfigError from dvc.exceptions import DvcException, NotDvcRepoError from dvc.logger import FOOTER, disable_other_loggers # NOTE: stderr/stdout may be closed if we are running from dvc.daemon. # On Linux we directly call cli.main after double forking and closing # the copied parent's standard file descriptors. If we make any logging # calls in this state it will cause an exception due to writing to a closed # file descriptor. if sys.stderr.closed: # pylint: disable=using-constant-test logging.disable() elif sys.stdout.closed: # pylint: disable=using-constant-test logging.disable(logging.INFO) args = None disable_other_loggers() outerLogLevel = logger.level try: args = parse_args(argv) level = None if args.quiet: level = logging.CRITICAL elif args.verbose == 1: level = logging.DEBUG elif args.verbose > 1: level = logging.TRACE if level is not None: logger.setLevel(level) logger.trace(args) if not sys.stdout.closed and not args.quiet: from dvc.ui import ui ui.enable() with debugtools(args): cmd = args.func(args) ret = cmd.do_run() except ConfigError: logger.exception("configuration error") ret = 251 except KeyboardInterrupt: logger.exception("interrupted by the user") ret = 252 except NotDvcRepoError: logger.exception("") ret = 253 except DvcException: ret = 255 logger.exception("") except DvcParserError: ret = 254 except Exception as exc: # noqa, pylint: disable=broad-except # pylint: disable=no-member import errno if isinstance(exc, OSError) and exc.errno == errno.EMFILE: from dvc.utils import error_link logger.exception( "too many open files, please visit " "{} to see how to handle this " "problem".format(error_link("many-files")), extra={"tb_only": True}, ) else: from dvc.info import get_dvc_info logger.exception("unexpected error") dvc_info = get_dvc_info() logger.debug("Version info for developers:\n%s", dvc_info) logger.info(FOOTER) ret = 255 try: from dvc import analytics if analytics.is_enabled(): analytics.collect_and_send_report(args, ret) return ret finally: logger.setLevel(outerLogLevel) from dvc.external_repo import clean_repos # Remove cached repos in the end of the call, these are anonymous # so won't be reused by any other subsequent run anyway. clean_repos()
def main(argv=None): # noqa: C901 """Run dvc CLI command. Args: argv: optional list of arguments to parse. sys.argv is used by default. Returns: int: command's return code. """ args = None disable_other_loggers() outerLogLevel = logger.level try: args = parse_args(argv) level = None if args.quiet: level = logging.CRITICAL elif args.verbose == 1: level = logging.DEBUG elif args.verbose > 1: level = logging.TRACE if level is not None: logger.setLevel(level) logger.trace(args) if not args.quiet: from dvc.ui import ui ui.enable() with debugtools(args): cmd = args.func(args) ret = cmd.run() except ConfigError: logger.exception("configuration error") ret = 251 except KeyboardInterrupt: logger.exception("interrupted by the user") ret = 252 except NotDvcRepoError: logger.exception("") ret = 253 except DvcParserError: ret = 254 except DvcException: ret = 255 logger.exception("") except Exception as exc: # noqa, pylint: disable=broad-except # pylint: disable=no-member import errno if isinstance(exc, OSError) and exc.errno == errno.EMFILE: from dvc.utils import error_link logger.exception( "too many open files, please visit " "{} to see how to handle this " "problem".format(error_link("many-files")), extra={"tb_only": True}, ) else: from dvc.info import get_dvc_info logger.exception("unexpected error") dvc_info = get_dvc_info() logger.debug("Version info for developers:\n%s", dvc_info) logger.info(FOOTER) ret = 255 try: from dvc import analytics if analytics.is_enabled(): analytics.collect_and_send_report(args, ret) return ret finally: logger.setLevel(outerLogLevel) from dvc.fs.pool import close_pools # Closing pools by-hand to prevent weird messages when closing SSH # connections. See https://github.com/iterative/dvc/issues/3248 for # more info. close_pools() from dvc.external_repo import clean_repos # Remove cached repos in the end of the call, these are anonymous # so won't be reused by any other subsequent run anyway. clean_repos()
def test_fs_info_outside_of_repo(tmp_dir, caplog): dvc_info = get_dvc_info() assert re.search(rf"DVC version: {DVC_VERSION_REGEX}", dvc_info) assert re.search(f"Platform: {PYTHON_VERSION_REGEX} on .*", dvc_info) assert find_supported_remotes(dvc_info)
def run(self): dvc_info = get_dvc_info() logger.info(dvc_info) return 0
def run(self): from dvc.info import get_dvc_info dvc_info = get_dvc_info() ui.write(dvc_info, force=True) return 0
def test_info_in_broken_git_repo(tmp_dir, dvc, scm, caplog): shutil.rmtree(dvc.scm.dir) dvc_info = get_dvc_info() assert "Repo: dvc, git (broken)" in dvc_info
def test_fs_info_outside_of_repo(tmp_dir, caplog): dvc_info = get_dvc_info() assert re.search(r"DVC version: \d+\.\d+\.\d+.*", dvc_info) assert re.search(f"Platform: {PYTHON_VERSION_REGEX} on .*", dvc_info) assert re.search(r"Supports: .*", dvc_info)
def test_remotes_empty(tmp_dir, dvc, caplog): # No remotes are configured dvc_info = get_dvc_info() assert "Remotes: None" in dvc_info
def run(self): from dvc.info import get_dvc_info dvc_info = get_dvc_info() logger.info(dvc_info) return 0