def run_lock( environment_files: List[pathlib.Path], conda_exe: Optional[str], platforms: Optional[List[str]] = None, mamba: bool = False, micromamba: bool = False, include_dev_dependencies: bool = True, channel_overrides: Optional[Sequence[str]] = None, filename_template: Optional[str] = None, kinds: Optional[List[str]] = None, lockfile_path: pathlib.Path = pathlib.Path(DEFAULT_LOCKFILE_NAME), check_input_hash: bool = False, extras: Optional[AbstractSet[str]] = None, virtual_package_spec: Optional[pathlib.Path] = None, update: Optional[List[str]] = None, ) -> None: if environment_files == DEFAULT_FILES: if lockfile_path.exists(): lock_content = parse_conda_lock_file(lockfile_path) # reconstruct native paths locked_environment_files = [ pathlib.Path( pathlib.PurePosixPath(lockfile_path).parent / pathlib.PurePosixPath(p)) for p in lock_content.metadata.sources ] if all(p.exists() for p in locked_environment_files): environment_files = locked_environment_files else: missing = [ p for p in locked_environment_files if not p.exists() ] print( f"{lockfile_path} was created from {[str(p) for p in locked_environment_files]}," f" but some files ({[str(p) for p in missing]}) do not exist. Falling back to" f" {[str(p) for p in environment_files]}.", file=sys.stderr, ) else: long_ext_file = pathlib.Path("environment.yaml") if long_ext_file.exists() and not environment_files[0].exists(): environment_files = [long_ext_file] _conda_exe = determine_conda_executable(conda_exe, mamba=mamba, micromamba=micromamba) make_lock_files( conda=_conda_exe, src_files=environment_files, platform_overrides=platforms, channel_overrides=channel_overrides, virtual_package_spec=virtual_package_spec, update=update, kinds=kinds or DEFAULT_KINDS, lockfile_path=lockfile_path, filename_template=filename_template, include_dev_dependencies=include_dev_dependencies, extras=extras, check_input_hash=check_input_hash, )
def render( dev_dependencies, kind, filename_template, extras, log_level, lock_file, pdb, ): """Render multi-platform lockfile into single-platform env or explicit file""" logging.basicConfig(level=log_level) if pdb: def handle_exception(exc_type, exc_value, exc_traceback): import pdb pdb.post_mortem(exc_traceback) sys.excepthook = handle_exception lock_content = parse_conda_lock_file(pathlib.Path(lock_file)) do_render( lock_content, filename_template=filename_template, kinds=kind, include_dev_dependencies=dev_dependencies, extras=extras, )
def test_run_lock_with_update(monkeypatch, update_environment, conda_exe): monkeypatch.chdir(update_environment.parent) if is_micromamba(conda_exe): monkeypatch.setenv("CONDA_FLAGS", "-v") pre_lock = { p.name: p for p in parse_conda_lock_file(update_environment.parent / DEFAULT_LOCKFILE_NAME).package } run_lock([update_environment], conda_exe=conda_exe, update=["pydantic"]) post_lock = { p.name: p for p in parse_conda_lock_file(update_environment.parent / DEFAULT_LOCKFILE_NAME).package } assert post_lock["pydantic"].version == "1.8.2" assert post_lock["python"].version == pre_lock["python"].version
def test_fake_conda_env(conda_exe, conda_lock_yaml): lockfile_content = parse_conda_lock_file(conda_lock_yaml) with fake_conda_environment(lockfile_content.package, platform="linux-64") as prefix: subprocess.call([ conda_exe, "list", "--debug", "-p", prefix, "--json", ]) packages = json.loads( subprocess.check_output([ conda_exe, "list", "--debug", "-p", prefix, "--json", ])) locked = { p.name: p for p in lockfile_content.package if p.manager == "conda" and p.platform == "linux-64" } assert len(packages) == len(locked) for env_package in packages: locked_package = locked[env_package["name"]] platform = env_package["platform"] path = pathlib.PurePosixPath( urlsplit(urldefrag(locked_package.url)[0]).path) if is_micromamba(conda_exe): assert (env_package["base_url"] == f"https://conda.anaconda.org/conda-forge/{platform}") assert env_package["channel"] == f"conda-forge/{platform}" else: assert (env_package["base_url"] == "https://conda.anaconda.org/conda-forge") assert env_package["channel"] == "conda-forge" assert env_package["dist_name"] == f"{path.name[:-8]}" assert platform == path.parent.name
def _render_lockfile_for_install( filename: str, include_dev_dependencies: bool = True, extras: Optional[AbstractSet[str]] = None, ): """ Render lock content into a temporary, explicit lockfile for the current platform Parameters ---------- filename : Path to conda-lock.yml include_dev_dependencies : Include development dependencies in output extras : Optional dependency groups to include in output """ if not filename.endswith(DEFAULT_LOCKFILE_NAME): yield filename return from ensureconda.resolve import platform_subdir lock_content = parse_conda_lock_file(pathlib.Path(filename)) platform = platform_subdir() if platform not in lock_content.metadata.platforms: raise PlatformValidationError( f"Dependencies are not locked for the current platform ({platform})" ) with tempfile.NamedTemporaryFile(mode="w") as tf: content = render_lockfile_for_platform( lockfile=lock_content, kind="explicit", platform=platform, include_dev_dependencies=include_dev_dependencies, extras=extras, ) tf.write("\n".join(content) + "\n") tf.flush() yield tf.name
def make_lock_files( conda: PathLike, src_files: List[pathlib.Path], kinds: List[str], lockfile_path: pathlib.Path = pathlib.Path(DEFAULT_LOCKFILE_NAME), platform_overrides: Optional[Sequence[str]] = None, channel_overrides: Optional[Sequence[str]] = None, virtual_package_spec: Optional[pathlib.Path] = None, update: Optional[List[str]] = None, include_dev_dependencies: bool = True, filename_template: Optional[str] = None, extras: Optional[AbstractSet[str]] = None, check_input_hash: bool = False, ): """ Generate a lock file from the src files provided Parameters ---------- conda : Path to conda, mamba, or micromamba src_files : Files to parse requirements from kinds : Lockfile formats to output lockfile_path : Path to a conda-lock.yml to create or update platform_overrides : Platforms to solve for. Takes precedence over platforms found in src_files. channel_overrides : Channels to use. Takes precedence over channels found in src_files. virtual_package_spec : Path to a virtual package repository that defines each platform. update : Names of dependencies to update to their latest versions, regardless of whether the constraint in src_files has changed. include_dev_dependencies : Include development dependencies in explicit or env output filename_template : Format for names of rendered explicit or env files. Must include {platform}. extras : Include the given extras in explicit or env output check_input_hash : Do not re-solve for each target platform for which specifications are unchanged """ # initialize virtual package fake if virtual_package_spec and virtual_package_spec.exists(): virtual_package_repo = virtual_package_repo_from_specification( virtual_package_spec) else: virtual_package_repo = default_virtual_package_repodata() with virtual_package_repo: lock_spec = make_lock_spec( src_files=src_files, channel_overrides=channel_overrides, platform_overrides=platform_overrides, virtual_package_repo=virtual_package_repo, ) lock_content: Optional[Lockfile] = None platforms_to_lock: List[str] = [] platforms_already_locked: List[str] = [] if lockfile_path.exists(): import yaml try: lock_content = parse_conda_lock_file(lockfile_path) except (yaml.error.YAMLError, FileNotFoundError): logger.warning( "Failed to parse existing lock. Regenerating from scratch" ) lock_content = None else: lock_content = None if lock_content is not None: platforms_already_locked = list(lock_content.metadata.platforms) update_spec = UpdateSpecification(locked=lock_content.package, update=update) for platform in lock_spec.platforms: if (update or platform not in lock_content.metadata.platforms or not check_input_hash or lock_spec.content_hash_for_platform(platform) != lock_content.metadata.content_hash[platform]): platforms_to_lock.append(platform) if platform in platforms_already_locked: platforms_already_locked.remove(platform) else: platforms_to_lock = lock_spec.platforms update_spec = UpdateSpecification() if platforms_already_locked: print( f"Spec hash already locked for {sorted(platforms_already_locked)}. Skipping solve.", file=sys.stderr, ) platforms_to_lock = sorted(set(platforms_to_lock)) if platforms_to_lock: print(f"Locking dependencies for {platforms_to_lock}...", file=sys.stderr) lock_content = lock_content | create_lockfile_from_spec( conda=conda, spec=lock_spec, platforms=platforms_to_lock, lockfile_path=lockfile_path, update_spec=update_spec, ) if "lock" in kinds: write_conda_lock_file(lock_content, lockfile_path) print( " - Install lock using:", KIND_USE_TEXT["lock"].format(lockfile=str(lockfile_path)), file=sys.stderr, ) assert lock_content is not None do_render( lock_content, kinds=[k for k in kinds if k != "lock"], include_dev_dependencies=include_dev_dependencies, filename_template=filename_template, extras=extras, check_input_hash=check_input_hash, )