def do_conda_install(conda: PathLike, prefix: str, name: str, file: str) -> None: _conda = partial(_invoke_conda, conda, prefix, name, check_call=True) kind = "env" if file.endswith(".yml") else "explicit" if kind == "explicit": with open(file) as explicit_env: pip_requirements = [ line.split("# pip ")[1] for line in explicit_env if line.startswith("# pip ") ] else: pip_requirements = [] _conda([ *(["env"] if kind == "env" and not is_micromamba(conda) else []), "create", "--file", file, *([] if kind == "env" else ["--yes"]), ], ) if not pip_requirements: return with tempfile.NamedTemporaryFile() as tf: write_file("\n".join(pip_requirements), tf.name) _conda(["run"], ["pip", "install", "--no-deps", "-r", tf.name])
def test_run_lock_with_pip_environment_different_names_same_deps( monkeypatch, pip_environment_different_names_same_deps, conda_exe): with filelock.FileLock( str(pip_environment_different_names_same_deps.parent / "filelock")): monkeypatch.chdir(pip_environment_different_names_same_deps.parent) if is_micromamba(conda_exe): monkeypatch.setenv("CONDA_FLAGS", "-v") run_lock([pip_environment_different_names_same_deps], conda_exe=conda_exe)
def test_run_lock_with_local_package(monkeypatch, pip_local_package_environment, conda_exe): with filelock.FileLock( str(pip_local_package_environment.parent / "filelock")): monkeypatch.chdir(pip_local_package_environment.parent) if is_micromamba(conda_exe): monkeypatch.setenv("CONDA_FLAGS", "-v") virtual_package_repo = default_virtual_package_repodata() with virtual_package_repo: lock_spec = make_lock_spec( src_files=[pip_local_package_environment], virtual_package_repo=virtual_package_repo, ) assert not any(p.manager == "pip" for p in lock_spec.dependencies ), "conda-lock ignores editable pip deps"
def test_run_lock_with_update(monkeypatch, update_environment, conda_exe): monkeypatch.chdir(update_environment.parent) if is_micromamba(conda_exe): monkeypatch.setenv("CONDA_FLAGS", "-v") pre_lock = { p.name: p for p in parse_conda_lock_file(update_environment.parent / DEFAULT_LOCKFILE_NAME).package } run_lock([update_environment], conda_exe=conda_exe, update=["pydantic"]) post_lock = { p.name: p for p in parse_conda_lock_file(update_environment.parent / DEFAULT_LOCKFILE_NAME).package } assert post_lock["pydantic"].version == "1.8.2" assert post_lock["python"].version == pre_lock["python"].version
def test_fake_conda_env(conda_exe, conda_lock_yaml): lockfile_content = parse_conda_lock_file(conda_lock_yaml) with fake_conda_environment(lockfile_content.package, platform="linux-64") as prefix: subprocess.call([ conda_exe, "list", "--debug", "-p", prefix, "--json", ]) packages = json.loads( subprocess.check_output([ conda_exe, "list", "--debug", "-p", prefix, "--json", ])) locked = { p.name: p for p in lockfile_content.package if p.manager == "conda" and p.platform == "linux-64" } assert len(packages) == len(locked) for env_package in packages: locked_package = locked[env_package["name"]] platform = env_package["platform"] path = pathlib.PurePosixPath( urlsplit(urldefrag(locked_package.url)[0]).path) if is_micromamba(conda_exe): assert (env_package["base_url"] == f"https://conda.anaconda.org/conda-forge/{platform}") assert env_package["channel"] == f"conda-forge/{platform}" else: assert (env_package["base_url"] == "https://conda.anaconda.org/conda-forge") assert env_package["channel"] == "conda-forge" assert env_package["dist_name"] == f"{path.name[:-8]}" assert platform == path.parent.name
def test_run_lock_with_input_hash_check( monkeypatch, input_hash_zlib_environment: pathlib.Path, conda_exe, capsys): with filelock.FileLock(str(input_hash_zlib_environment.parent / "filelock")): monkeypatch.chdir(input_hash_zlib_environment.parent) if is_micromamba(conda_exe): monkeypatch.setenv("CONDA_FLAGS", "-v") lockfile = input_hash_zlib_environment.parent / "conda-linux-64.lock" if lockfile.exists(): lockfile.unlink() run_lock( [input_hash_zlib_environment], platforms=["linux-64"], conda_exe=conda_exe, check_input_hash=True, ) stat = lockfile.stat() created = stat.st_mtime_ns with open(lockfile) as f: previous_hash = extract_input_hash(f.read()) assert previous_hash is not None assert len(previous_hash) == 64 capsys.readouterr() run_lock( [input_hash_zlib_environment], platforms=["linux-64"], conda_exe=conda_exe, check_input_hash=True, ) stat = lockfile.stat() assert stat.st_mtime_ns == created output = capsys.readouterr() assert "Spec hash already locked for" in output.err
def test_virtual_packages(conda_exe, monkeypatch, kind, capsys): test_dir = TEST_DIR.joinpath("test-cuda") monkeypatch.chdir(test_dir) if is_micromamba(conda_exe): monkeypatch.setenv("CONDA_FLAGS", "-v") if kind == "env" and not conda_supports_env(conda_exe): pytest.skip( f"Standalone conda @ '{conda_exe}' does not support materializing from environment files." ) platform = "linux-64" from click.testing import CliRunner, Result for lockfile in glob(f"conda-{platform}.*"): os.unlink(lockfile) with capsys.disabled(): runner = CliRunner(mix_stderr=False) result = runner.invoke( main, [ "lock", "--conda", conda_exe, "-p", platform, "-k", kind, ], ) print(result.stdout, file=sys.stdout) print(result.stderr, file=sys.stderr) if result.exception: raise result.exception assert result.exit_code == 0 for lockfile in glob(f"conda-{platform}.*"): os.unlink(lockfile) runner = CliRunner(mix_stderr=False) result = runner.invoke( main, [ "lock", "--conda", conda_exe, "-p", platform, "-k", kind, "--virtual-package-spec", test_dir / "virtual-packages-old-glibc.yaml", ], ) # micromamba doesn't respect the CONDA_OVERRIDE_XXX="" env vars appropriately so it will include the # system virtual packages regardless of whether they should be present. Skip this check in that case if not is_micromamba(conda_exe): assert result.exit_code != 0
def test_install(request, kind, tmp_path, conda_exe, zlib_environment, monkeypatch, capsys): if is_micromamba(conda_exe): monkeypatch.setenv("CONDA_FLAGS", "-v") if kind == "env" and not conda_supports_env(conda_exe): pytest.skip( f"Standalone conda @ '{conda_exe}' does not support materializing from environment files." ) package = "zlib" platform = "linux-64" lock_filename_template = (request.node.name + "conda-{platform}-{dev-dependencies}.lock") lock_filename = (request.node.name + "conda-linux-64-true.lock" + (".yml" if kind == "env" else "")) try: os.remove(lock_filename) except OSError: pass from click.testing import CliRunner with capsys.disabled(): runner = CliRunner(mix_stderr=False) result = runner.invoke( main, [ "lock", "--conda", conda_exe, "-p", platform, "-f", zlib_environment, "-k", kind, "--filename-template", lock_filename_template, ], catch_exceptions=False, ) print(result.stdout, file=sys.stdout) print(result.stderr, file=sys.stderr) assert result.exit_code == 0 env_name = "test_env" def invoke_install(*extra_args): with capsys.disabled(): return runner.invoke( main, [ "install", "--conda", conda_exe, "--prefix", tmp_path / env_name, *extra_args, lock_filename, ], catch_exceptions=False, ) if sys.platform.lower().startswith("linux"): context = contextlib.nullcontext() else: # since by default we do platform validation we would expect this to fail context = pytest.raises(PlatformValidationError) with context: result = invoke_install() print(result.stdout, file=sys.stdout) print(result.stderr, file=sys.stderr) if pathlib.Path(lock_filename).exists: logging.debug( "lockfile contents: \n\n=======\n%s\n\n==========", pathlib.Path(lock_filename).read_text(), ) if sys.platform.lower().startswith("linux"): assert _check_package_installed( package=package, prefix=str(tmp_path / env_name), ), f"Package {package} does not exist in {tmp_path} environment"
def test_run_lock_with_pip(monkeypatch, pip_environment, conda_exe): monkeypatch.chdir(pip_environment.parent) if is_micromamba(conda_exe): monkeypatch.setenv("CONDA_FLAGS", "-v") run_lock([pip_environment], conda_exe=conda_exe)
def test_run_lock(monkeypatch, zlib_environment, conda_exe): monkeypatch.chdir(zlib_environment.parent) if is_micromamba(conda_exe): monkeypatch.setenv("CONDA_FLAGS", "-v") run_lock([zlib_environment], conda_exe=conda_exe)
def update_specs_for_arch( conda: PathLike, specs: List[str], locked: Dict[str, LockedDependency], update: List[str], platform: str, channels: Sequence[str], ) -> DryRunInstall: """ Update a previous solution for the given platform Parameters ---------- conda : Path to conda, mamba, or micromamba specs : Conda package specifications locked : Previous solution for the given platform (conda packages only) update : Named of packages to update to the latest version compatible with specs platform : Target platform channels : Channels to query """ with fake_conda_environment(locked.values(), platform=platform) as prefix: installed: Dict[str, LinkAction] = { entry["name"]: entry for entry in json.loads( subprocess.check_output( [str(conda), "list", "-p", prefix, "--json"], env=conda_env_override(platform), ) ) } spec_for_name = {v.split("[")[0]: v for v in specs} to_update = [ spec_for_name[name] for name in set(installed).intersection(update) ] if to_update: # NB: [micro]mamba and mainline conda have different semantics for `install` and `update` # - conda: # * update -> apply all nonmajor updates unconditionally (unless pinned) # * install -> install or update target to latest version compatible with constraint # - micromamba: # * update -> update target to latest version compatible with constraint # * install -> update target if current version incompatible with constraint, otherwise _do nothing_ # - mamba: # * update -> apply all nonmajor updates unconditionally (unless pinned) # * install -> update target if current version incompatible with constraint, otherwise _do nothing_ # Our `update` should always update the target to the latest version compatible with the constraint, # while updating as few other packages as possible. With mamba this can only be done with pinning. if pathlib.Path(conda).name.startswith("mamba"): # pin non-updated packages to prevent _any_ movement pinned_filename = pathlib.Path(prefix) / "conda-meta" / "pinned" assert not pinned_filename.exists() with open(pinned_filename, "w") as pinned: for name in set(installed.keys()).difference(update): pinned.write(f'{name} =={installed[name]["version"]}\n') args = [ str(conda), "update", *_get_conda_flags(channels=channels, platform=platform), ] print( "Warning: mamba cannot update single packages without resorting to pinning. " "If the update fails to solve, try with conda or micromamba instead.", file=sys.stderr, ) else: args = [ str(conda), "update" if is_micromamba(conda) else "install", *_get_conda_flags(channels=channels, platform=platform), ] proc = subprocess.run( args + ["-p", prefix, "--json", "--dry-run", *to_update], env=conda_env_override(platform), stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding="utf8", ) try: proc.check_returncode() except subprocess.CalledProcessError as exc: err_json = json.loads(proc.stdout) raise RuntimeError( f"Could not lock the environment for platform {platform}: {err_json.get('message')}" ) from exc dryrun_install: DryRunInstall = json.loads(proc.stdout) else: dryrun_install = {"actions": {"LINK": [], "FETCH": []}} if "actions" not in dryrun_install: dryrun_install["actions"] = {"LINK": [], "FETCH": []} updated = {entry["name"]: entry for entry in dryrun_install["actions"]["LINK"]} for package in set(installed).difference(updated): entry = installed[package] fn = f'{entry["dist_name"]}.tar.bz2' if is_micromamba(conda): channel = f'{entry["base_url"]}' else: channel = f'{entry["base_url"]}/{entry["platform"]}' url = f"{channel}/{fn}" md5 = locked[package].hash.md5 if md5 is None: raise RuntimeError("Conda packages require non-null md5 hashes") sha256 = locked[package].hash.sha256 dryrun_install["actions"]["FETCH"].append( { "name": entry["name"], "channel": channel, "url": url, "fn": fn, "md5": md5, "sha256": sha256, "version": entry["version"], "depends": [ f"{k} {v}".strip() for k, v in locked[entry["name"]].dependencies.items() ], "constrains": [], "subdir": entry["platform"], "timestamp": 0, } ) dryrun_install["actions"]["LINK"].append(entry) return _reconstruct_fetch_actions(conda, platform, dryrun_install)
def _reconstruct_fetch_actions( conda: PathLike, platform: str, dry_run_install: DryRunInstall ) -> DryRunInstall: """ Conda may choose to link a previously downloaded distribution from pkgs_dirs rather than downloading a fresh one. Find the repodata record in existing distributions that have only a LINK action, and use it to synthesize a corresponding FETCH action with the metadata we need to extract for the package plan. """ if "LINK" not in dry_run_install["actions"]: dry_run_install["actions"]["LINK"] = [] if "FETCH" not in dry_run_install["actions"]: dry_run_install["actions"]["FETCH"] = [] link_actions = {p["name"]: p for p in dry_run_install["actions"]["LINK"]} fetch_actions = {p["name"]: p for p in dry_run_install["actions"]["FETCH"]} link_only_names = set(link_actions.keys()).difference(fetch_actions.keys()) # NB: micromamba does not support info --json, nor does it appear to honor pkgs_dirs from .condarc if not is_micromamba(conda): if link_only_names: pkgs_dirs = [ pathlib.Path(d) for d in json.loads( subprocess.check_output( [str(conda), "info", "--json"], env=conda_env_override(platform) ) )["pkgs_dirs"] ] else: pkgs_dirs = [] for link_pkg_name in link_only_names: link_action = link_actions[link_pkg_name] for pkgs_dir in pkgs_dirs: record = ( pkgs_dir / link_action["dist_name"] / "info" / "repodata_record.json" ) if record.exists(): with open(record) as f: repodata: FetchAction = json.load(f) break else: raise FileExistsError( f'Distribution \'{link_action["dist_name"]}\' not found in pkgs_dirs {pkgs_dirs}' ) dry_run_install["actions"]["FETCH"].append(repodata) else: # NB: micromamba LINK actions contain the same metadata as FETCH # actions, and so can be used to fill out the FETCH section. # Explicitly copy key-by-key to make missing keys obvious, should # this change in the future. for link_pkg_name in link_only_names: item = cast(Dict[str, Any], link_actions[link_pkg_name]) repodata = { "channel": item["channel"], "constrains": item.get("constrains"), "depends": item.get("depends"), "fn": item["fn"], "md5": item["md5"], "name": item["name"], "subdir": item["subdir"], "timestamp": item["timestamp"], "url": item["url"], "version": item["version"], "sha256": item.get("sha256"), } dry_run_install["actions"]["FETCH"].append(repodata) return dry_run_install