def test_activate_activates_different_virtualenv_with_envs_file( tmp_dir, manager, poetry, config, mocker): if "VIRTUAL_ENV" in os.environ: del os.environ["VIRTUAL_ENV"] venv_name = manager.generate_env_name("simple-project", str(poetry.file.parent)) envs_file = TOMLFile(Path(tmp_dir) / "envs.toml") doc = tomlkit.document() doc[venv_name] = {"minor": "3.7", "patch": "3.7.1"} envs_file.write(doc) os.mkdir(os.path.join(tmp_dir, "{}-py3.7".format(venv_name))) config.merge({"virtualenvs": {"path": str(tmp_dir)}}) mocker.patch( "subprocess.check_output", side_effect=check_output_wrapper(Version.parse("3.6.6")), ) mocker.patch( "subprocess.Popen.communicate", side_effect=[("/prefix", None), ("/prefix", None), ("/prefix", None)], ) m = mocker.patch("poetry.utils.env.EnvManager.build_venv", side_effect=build_venv) env = manager.activate("python3.6", NullIO()) m.assert_called_with( Path(tmp_dir) / "{}-py3.6".format(venv_name), executable="python3.6", flags={ "always-copy": False, "system-site-packages": False }, with_pip=True, with_setuptools=True, with_wheel=True, ) assert envs_file.exists() envs = envs_file.read() assert envs[venv_name]["minor"] == "3.6" assert envs[venv_name]["patch"] == "3.6.6" assert env.path == Path(tmp_dir) / "{}-py3.6".format(venv_name) assert env.base == Path("/prefix")
def test_activate_does_not_recreate_when_switching_minor( tmp_dir: str, manager: EnvManager, poetry: Poetry, config: Config, mocker: MockerFixture, ): if "VIRTUAL_ENV" in os.environ: del os.environ["VIRTUAL_ENV"] venv_name = manager.generate_env_name("simple-project", str(poetry.file.parent)) envs_file = TOMLFile(Path(tmp_dir) / "envs.toml") doc = tomlkit.document() doc[venv_name] = {"minor": "3.7", "patch": "3.7.0"} envs_file.write(doc) os.mkdir(os.path.join(tmp_dir, f"{venv_name}-py3.7")) os.mkdir(os.path.join(tmp_dir, f"{venv_name}-py3.6")) config.merge({"virtualenvs": {"path": str(tmp_dir)}}) mocker.patch( "subprocess.check_output", side_effect=check_output_wrapper(Version.parse("3.6.6")), ) mocker.patch( "subprocess.Popen.communicate", side_effect=[("/prefix", None), ("/prefix", None), ("/prefix", None)], ) build_venv_m = mocker.patch("poetry.utils.env.EnvManager.build_venv", side_effect=build_venv) remove_venv_m = mocker.patch("poetry.utils.env.EnvManager.remove_venv", side_effect=EnvManager.remove_venv) env = manager.activate("python3.6", NullIO()) build_venv_m.assert_not_called() remove_venv_m.assert_not_called() assert envs_file.exists() envs = envs_file.read() assert envs[venv_name]["minor"] == "3.6" assert envs[venv_name]["patch"] == "3.6.6" assert env.path == Path(tmp_dir) / f"{venv_name}-py3.6" assert env.base == Path("/prefix") assert (Path(tmp_dir) / f"{venv_name}-py3.6").exists()
def test_activate_activates_non_existing_virtualenv_no_envs_file( tmp_dir: str, manager: EnvManager, poetry: Poetry, config: Config, mocker: MockerFixture, ): if "VIRTUAL_ENV" in os.environ: del os.environ["VIRTUAL_ENV"] config.merge({"virtualenvs": {"path": str(tmp_dir)}}) mocker.patch( "subprocess.check_output", side_effect=check_output_wrapper(), ) mocker.patch( "subprocess.Popen.communicate", side_effect=[("/prefix", None), ("/prefix", None)], ) m = mocker.patch("poetry.utils.env.EnvManager.build_venv", side_effect=build_venv) env = manager.activate("python3.7", NullIO()) venv_name = EnvManager.generate_env_name("simple-project", str(poetry.file.parent)) m.assert_called_with( Path(tmp_dir) / f"{venv_name}-py3.7", executable="/usr/bin/python3.7", flags={ "always-copy": False, "system-site-packages": False, "no-pip": False, "no-setuptools": False, }, prompt="simple-project-py3.7", ) envs_file = TOMLFile(Path(tmp_dir) / "envs.toml") assert envs_file.exists() envs = envs_file.read() assert envs[venv_name]["minor"] == "3.7" assert envs[venv_name]["patch"] == "3.7.1" assert env.path == Path(tmp_dir) / f"{venv_name}-py3.7" assert env.base == Path("/prefix")
def test_activate_activates_same_virtualenv_with_envs_file( tmp_dir: str, manager: EnvManager, poetry: "Poetry", config: "Config", mocker: "MockerFixture", ): if "VIRTUAL_ENV" in os.environ: del os.environ["VIRTUAL_ENV"] venv_name = manager.generate_env_name("simple-project", str(poetry.file.parent)) envs_file = TOMLFile(Path(tmp_dir) / "envs.toml") doc = tomlkit.document() doc[venv_name] = {"minor": "3.7", "patch": "3.7.1"} envs_file.write(doc) os.mkdir(os.path.join(tmp_dir, f"{venv_name}-py3.7")) config.merge({"virtualenvs": {"path": str(tmp_dir)}}) mocker.patch( "subprocess.check_output", side_effect=check_output_wrapper(), ) mocker.patch( "subprocess.Popen.communicate", side_effect=[("/prefix", None)], ) m = mocker.patch("poetry.utils.env.EnvManager.create_venv") env = manager.activate("python3.7", NullIO()) m.assert_not_called() assert envs_file.exists() envs = envs_file.read() assert envs[venv_name]["minor"] == "3.7" assert envs[venv_name]["patch"] == "3.7.1" assert env.path == Path(tmp_dir) / f"{venv_name}-py3.7" assert env.base == Path("/prefix")
def test_activate_with_in_project_setting_does_not_fail_if_no_venvs_dir( manager: EnvManager, poetry: Poetry, config: Config, tmp_dir: str, mocker: MockerFixture, ): if "VIRTUAL_ENV" in os.environ: del os.environ["VIRTUAL_ENV"] config.merge({ "virtualenvs": { "path": str(Path(tmp_dir) / "virtualenvs"), "in-project": True, } }) mocker.patch( "subprocess.check_output", side_effect=check_output_wrapper(), ) mocker.patch( "subprocess.Popen.communicate", side_effect=[("/prefix", None), ("/prefix", None)], ) m = mocker.patch("poetry.utils.env.EnvManager.build_venv") manager.activate("python3.7", NullIO()) m.assert_called_with( poetry.file.parent / ".venv", executable="/usr/bin/python3.7", flags={ "always-copy": False, "system-site-packages": False, "no-pip": False, "no-setuptools": False, }, prompt="simple-project-py3.7", ) envs_file = TOMLFile(Path(tmp_dir) / "virtualenvs" / "envs.toml") assert not envs_file.exists()
def test_activate_activates_non_existing_virtualenv_no_envs_file( mocker: MockerFixture, tester: CommandTester, venv_cache: Path, venv_name: str, venvs_in_cache_config: None, ): mocker.patch( "subprocess.check_output", side_effect=check_output_wrapper(), ) mock_build_env = mocker.patch("poetry.utils.env.EnvManager.build_venv", side_effect=build_venv) tester.execute("3.7") venv_py37 = venv_cache / f"{venv_name}-py3.7" mock_build_env.assert_called_with( venv_py37, executable="/usr/bin/python3.7", flags={ "always-copy": False, "system-site-packages": False, "no-pip": False, "no-setuptools": False, }, prompt="simple-project-py3.7", ) envs_file = TOMLFile(venv_cache / "envs.toml") assert envs_file.exists() envs = envs_file.read() assert envs[venv_name]["minor"] == "3.7" assert envs[venv_name]["patch"] == "3.7.1" expected = f"""\ Creating virtualenv {venv_py37.name} in {venv_py37.parent} Using virtualenv: {venv_py37} """ assert tester.io.fetch_output() == expected
def test_activate_activates_non_existing_virtualenv_no_envs_file( mocker, tester, venv_cache, venv_name, venvs_in_cache_config): mocker.patch( "subprocess.check_output", side_effect=check_output_wrapper(), ) mock_build_env = mocker.patch("poetry.utils.env.EnvManager.build_venv", side_effect=build_venv) tester.execute("3.7") venv_py37 = venv_cache / "{}-py3.7".format(venv_name) mock_build_env.assert_called_with( venv_py37, executable="python3.7", flags={ "always-copy": False, "system-site-packages": False }, with_pip=True, with_setuptools=True, with_wheel=True, ) envs_file = TOMLFile(venv_cache / "envs.toml") assert envs_file.exists() envs = envs_file.read() assert envs[venv_name]["minor"] == "3.7" assert envs[venv_name]["patch"] == "3.7.1" expected = """\ Creating virtualenv {} in {} Using virtualenv: {} """.format( venv_py37.name, venv_py37.parent, venv_py37, ) assert expected == tester.io.fetch_output()
def deactivate(self, io: IO) -> None: venv_path = self._poetry.config.get("virtualenvs.path") if venv_path is None: venv_path = Path(CACHE_DIR) / "virtualenvs" else: venv_path = Path(venv_path) name = self._poetry.package.name name = self.generate_env_name(name, str(self._poetry.file.parent)) envs_file = TOMLFile(venv_path / self.ENVS_FILE) if envs_file.exists(): envs = envs_file.read() env = envs.get(name) if env is not None: io.write_line( "Deactivating virtualenv: <comment>{}</comment>".format( venv_path / (name + "-py{}".format(env["minor"])))) del envs[name] envs_file.write(envs)
def test_activate_activates_non_existing_virtualenv_no_envs_file( tmp_dir, manager, poetry, config, mocker): if "VIRTUAL_ENV" in os.environ: del os.environ["VIRTUAL_ENV"] config.merge({"virtualenvs": {"path": str(tmp_dir)}}) mocker.patch( "poetry.utils._compat.subprocess.check_output", side_effect=check_output_wrapper(), ) mocker.patch( "poetry.utils._compat.subprocess.Popen.communicate", side_effect=[("/prefix", None), ("/prefix", None)], ) m = mocker.patch("poetry.utils.env.EnvManager.build_venv", side_effect=build_venv) env = manager.activate("python3.7", NullIO()) venv_name = EnvManager.generate_env_name("simple-project", str(poetry.file.parent)) m.assert_called_with( Path(tmp_dir) / "{}-py3.7".format(venv_name), executable="python3.7", flags={"always-copy": False}, ) envs_file = TOMLFile(Path(tmp_dir) / "envs.toml") assert envs_file.exists() envs = envs_file.read() assert envs[venv_name]["minor"] == "3.7" assert envs[venv_name]["patch"] == "3.7.1" assert env.path == Path(tmp_dir) / "{}-py3.7".format(venv_name) assert env.base == Path("/prefix")
def remove(self, python: str) -> "Env": venv_path = self._poetry.config.get("virtualenvs.path") if venv_path is None: venv_path = Path(CACHE_DIR) / "virtualenvs" else: venv_path = Path(venv_path) cwd = self._poetry.file.parent envs_file = TOMLFile(venv_path / self.ENVS_FILE) base_env_name = self.generate_env_name(self._poetry.package.name, str(cwd)) if python.startswith(base_env_name): venvs = self.list() for venv in venvs: if venv.path.name == python: # Exact virtualenv name if not envs_file.exists(): self.remove_venv(venv.path) return venv venv_minor = ".".join( str(v) for v in venv.version_info[:2]) base_env_name = self.generate_env_name(cwd.name, str(cwd)) envs = envs_file.read() current_env = envs.get(base_env_name) if not current_env: self.remove_venv(venv.path) return venv if current_env["minor"] == venv_minor: del envs[base_env_name] envs_file.write(envs) self.remove_venv(venv.path) return venv raise ValueError( '<warning>Environment "{}" does not exist.</warning>'.format( python)) try: python_version = Version.parse(python) python = "python{}".format(python_version.major) if python_version.precision > 1: python += ".{}".format(python_version.minor) except ValueError: # Executable in PATH or full executable path pass try: python_version = decode( subprocess.check_output( list_to_shell_command([ python, "-c", "\"import sys; print('.'.join([str(s) for s in sys.version_info[:3]]))\"", ]), shell=True, )) except CalledProcessError as e: raise EnvCommandError(e) python_version = Version.parse(python_version.strip()) minor = "{}.{}".format(python_version.major, python_version.minor) name = "{}-py{}".format(base_env_name, minor) venv = venv_path / name if not venv.exists(): raise ValueError( '<warning>Environment "{}" does not exist.</warning>'.format( name)) if envs_file.exists(): envs = envs_file.read() current_env = envs.get(base_env_name) if current_env is not None: current_minor = current_env["minor"] if current_minor == minor: del envs[base_env_name] envs_file.write(envs) self.remove_venv(venv) return VirtualEnv(venv)
def get(self, reload: bool = False) -> Union["VirtualEnv", "SystemEnv"]: if self._env is not None and not reload: return self._env python_minor = ".".join([str(v) for v in sys.version_info[:2]]) venv_path = self._poetry.config.get("virtualenvs.path") if venv_path is None: venv_path = Path(CACHE_DIR) / "virtualenvs" else: venv_path = Path(venv_path) cwd = self._poetry.file.parent envs_file = TOMLFile(venv_path / self.ENVS_FILE) env = None base_env_name = self.generate_env_name(self._poetry.package.name, str(cwd)) if envs_file.exists(): envs = envs_file.read() env = envs.get(base_env_name) if env: python_minor = env["minor"] # Check if we are inside a virtualenv or not # Conda sets CONDA_PREFIX in its envs, see # https://github.com/conda/conda/issues/2764 env_prefix = os.environ.get("VIRTUAL_ENV", os.environ.get("CONDA_PREFIX")) conda_env_name = os.environ.get("CONDA_DEFAULT_ENV") # It's probably not a good idea to pollute Conda's global "base" env, since # most users have it activated all the time. in_venv = env_prefix is not None and conda_env_name != "base" if not in_venv or env is not None: # Checking if a local virtualenv exists if self._poetry.config.get("virtualenvs.in-project") is not False: if (cwd / ".venv").exists() and (cwd / ".venv").is_dir(): venv = cwd / ".venv" return VirtualEnv(venv) create_venv = self._poetry.config.get("virtualenvs.create", True) if not create_venv: return SystemEnv(Path(sys.prefix)) venv_path = self._poetry.config.get("virtualenvs.path") if venv_path is None: venv_path = Path(CACHE_DIR) / "virtualenvs" else: venv_path = Path(venv_path) name = "{}-py{}".format(base_env_name, python_minor.strip()) venv = venv_path / name if not venv.exists(): return SystemEnv(Path(sys.prefix)) return VirtualEnv(venv) if env_prefix is not None: prefix = Path(env_prefix) base_prefix = None else: prefix = Path(sys.prefix) base_prefix = self.get_base_prefix() return VirtualEnv(prefix, base_prefix)
def activate(self, python: str, io: IO) -> "Env": venv_path = self._poetry.config.get("virtualenvs.path") if venv_path is None: venv_path = Path(CACHE_DIR) / "virtualenvs" else: venv_path = Path(venv_path) cwd = self._poetry.file.parent envs_file = TOMLFile(venv_path / self.ENVS_FILE) try: python_version = Version.parse(python) python = "python{}".format(python_version.major) if python_version.precision > 1: python += ".{}".format(python_version.minor) except ValueError: # Executable in PATH or full executable path pass try: python_version = decode( subprocess.check_output( list_to_shell_command([ python, "-c", "\"import sys; print('.'.join([str(s) for s in sys.version_info[:3]]))\"", ]), shell=True, )) except CalledProcessError as e: raise EnvCommandError(e) python_version = Version.parse(python_version.strip()) minor = "{}.{}".format(python_version.major, python_version.minor) patch = python_version.text create = False is_root_venv = self._poetry.config.get("virtualenvs.in-project") # If we are required to create the virtual environment in the root folder, # create or recreate it if needed if is_root_venv: create = False venv = self._poetry.file.parent / ".venv" if venv.exists(): # We need to check if the patch version is correct _venv = VirtualEnv(venv) current_patch = ".".join( str(v) for v in _venv.version_info[:3]) if patch != current_patch: create = True self.create_venv(io, executable=python, force=create) return self.get(reload=True) envs = tomlkit.document() base_env_name = self.generate_env_name(self._poetry.package.name, str(cwd)) if envs_file.exists(): envs = envs_file.read() current_env = envs.get(base_env_name) if current_env is not None: current_minor = current_env["minor"] current_patch = current_env["patch"] if current_minor == minor and current_patch != patch: # We need to recreate create = True name = "{}-py{}".format(base_env_name, minor) venv = venv_path / name # Create if needed if not venv.exists() or venv.exists() and create: in_venv = os.environ.get("VIRTUAL_ENV") is not None if in_venv or not venv.exists(): create = True if venv.exists(): # We need to check if the patch version is correct _venv = VirtualEnv(venv) current_patch = ".".join( str(v) for v in _venv.version_info[:3]) if patch != current_patch: create = True self.create_venv(io, executable=python, force=create) # Activate envs[base_env_name] = {"minor": minor, "patch": patch} envs_file.write(envs) return self.get(reload=True)
def create_poetry( self, cwd=None, io=None): # type: (Optional[Path], Optional[IO]) -> Poetry if io is None: io = NullIO() base_poetry = super(Factory, self).create_poetry(cwd) locker = Locker(base_poetry.file.parent / "poetry.lock", base_poetry.local_config) # Loading global configuration config = self.create_config(io) # Loading local configuration local_config_file = TOMLFile(base_poetry.file.parent / "poetry.toml") if local_config_file.exists(): if io.is_debug(): io.write_line("Loading configuration file {}".format( local_config_file.path)) config.merge(local_config_file.read()) # Load local sources repositories = {} existing_repositories = config.get("repositories", {}) for source in base_poetry.pyproject.poetry_config.get("source", []): name = source.get("name") url = source.get("url") if name and url: if name not in existing_repositories: repositories[name] = {"url": url} config.merge({"repositories": repositories}) poetry = Poetry( base_poetry.file.path, base_poetry.local_config, base_poetry.package, locker, config, ) # Configuring sources sources = poetry.local_config.get("source", []) for source in sources: repository = self.create_legacy_repository(source, config) is_default = source.get("default", False) is_secondary = source.get("secondary", False) if io.is_debug(): message = "Adding repository {} ({})".format( repository.name, repository.url) if is_default: message += " and setting it as the default one" elif is_secondary: message += " and setting it as secondary" io.write_line(message) poetry.pool.add_repository(repository, is_default, secondary=is_secondary) # Always put PyPI last to prefer private repositories # but only if we have no other default source if not poetry.pool.has_default(): has_sources = bool(sources) poetry.pool.add_repository(PyPiRepository(), not has_sources, has_sources) else: if io.is_debug(): io.write_line("Deactivating the PyPI repository") return poetry
class Locker: _VERSION = "1.1" _legacy_keys = ["dependencies", "source", "extras", "dev-dependencies"] _relevant_keys = [*_legacy_keys, "group"] def __init__(self, lock: str | Path, local_config: dict[str, Any]) -> None: self._lock = TOMLFile(lock) self._local_config = local_config self._lock_data: TOMLDocument | None = None self._content_hash = self._get_content_hash() @property def lock(self) -> TOMLFile: return self._lock @property def lock_data(self) -> TOMLDocument: if self._lock_data is None: self._lock_data = self._get_lock_data() return self._lock_data def is_locked(self) -> bool: """ Checks whether the locker has been locked (lockfile found). """ if not self._lock.exists(): return False return "package" in self.lock_data def is_fresh(self) -> bool: """ Checks whether the lock file is still up to date with the current hash. """ lock = self._lock.read() metadata = lock.get("metadata", {}) if "content-hash" in metadata: fresh: bool = self._content_hash == metadata["content-hash"] return fresh return False def locked_repository(self) -> Repository: """ Searches and returns a repository of locked packages. """ from poetry.factory import Factory from poetry.repositories import Repository if not self.is_locked(): return Repository("poetry-locked") lock_data = self.lock_data packages = Repository("poetry-locked") locked_packages = cast("list[dict[str, Any]]", lock_data["package"]) if not locked_packages: return packages for info in locked_packages: source = info.get("source", {}) source_type = source.get("type") url = source.get("url") if source_type in ["directory", "file"]: url = self._lock.path.parent.joinpath(url).resolve().as_posix() package = Package( info["name"], info["version"], info["version"], source_type=source_type, source_url=url, source_reference=source.get("reference"), source_resolved_reference=source.get("resolved_reference"), ) package.description = info.get("description", "") package.category = info.get("category", "main") package.optional = info["optional"] metadata = cast("dict[str, Any]", lock_data["metadata"]) name = info["name"] if "hashes" in metadata: # Old lock so we create dummy files from the hashes hashes = cast("dict[str, Any]", metadata["hashes"]) package.files = [{"name": h, "hash": h} for h in hashes[name]] else: files = metadata["files"][name] package.files = files package.python_versions = info["python-versions"] extras = info.get("extras", {}) if extras: for name, deps in extras.items(): package.extras[name] = [] for dep in deps: try: dependency = Dependency.create_from_pep_508(dep) except InvalidRequirement: # handle lock files with invalid PEP 508 m = re.match( r"^(.+?)(?:\[(.+?)])?(?:\s+\((.+)\))?$", dep) if not m: raise dep_name = m.group(1) extras = m.group(2) or "" constraint = m.group(3) or "*" dependency = Dependency(dep_name, constraint, extras=extras.split(",")) package.extras[name].append(dependency) if "marker" in info: package.marker = parse_marker(info["marker"]) else: # Compatibility for old locks if "requirements" in info: dep = Dependency("foo", "0.0.0") for name, value in info["requirements"].items(): if name == "python": dep.python_versions = value elif name == "platform": dep.platform = value split_dep = dep.to_pep_508(False).split(";") if len(split_dep) > 1: package.marker = parse_marker(split_dep[1].strip()) for dep_name, constraint in info.get("dependencies", {}).items(): root_dir = self._lock.path.parent if package.source_type == "directory": # root dir should be the source of the package relative to the lock # path assert package.source_url is not None root_dir = Path(package.source_url) if isinstance(constraint, list): for c in constraint: package.add_dependency( Factory.create_dependency(dep_name, c, root_dir=root_dir)) continue package.add_dependency( Factory.create_dependency(dep_name, constraint, root_dir=root_dir)) if "develop" in info: package.develop = info["develop"] packages.add_package(package) return packages @staticmethod def __get_locked_package( dependency: Dependency, packages_by_name: dict[str, list[Package]], decided: dict[Package, Dependency] | None = None, ) -> Package | None: """ Internal helper to identify corresponding locked package using dependency version constraints. """ decided = decided or {} # Get the packages that are consistent with this dependency. packages = [ package for package in packages_by_name.get(dependency.name, []) if package.python_constraint.allows_all(dependency.python_constraint) and dependency.constraint.allows(package.version) ] # If we've previously made a choice that is compatible with the current # requirement, stick with it. for package in packages: old_decision = decided.get(package) if (old_decision is not None and not old_decision.marker.intersect( dependency.marker).is_empty()): return package return next(iter(packages), None) @classmethod def __walk_dependencies( cls, dependencies: list[Dependency], packages_by_name: dict[str, list[Package]], ) -> dict[Package, Dependency]: nested_dependencies: dict[Package, Dependency] = {} visited: set[tuple[Dependency, BaseMarker]] = set() while dependencies: requirement = dependencies.pop(0) if (requirement, requirement.marker) in visited: continue visited.add((requirement, requirement.marker)) locked_package = cls.__get_locked_package(requirement, packages_by_name, nested_dependencies) if not locked_package: raise RuntimeError(f"Dependency walk failed at {requirement}") if requirement.extras: locked_package = locked_package.with_features( requirement.extras) # create dependency from locked package to retain dependency metadata # if this is not done, we can end-up with incorrect nested dependencies constraint = requirement.constraint marker = requirement.marker requirement = locked_package.to_dependency() requirement.marker = requirement.marker.intersect(marker) requirement.constraint = constraint for require in locked_package.requires: if require.is_optional() and not any( require in locked_package.extras[feature] for feature in locked_package.features): continue require = deepcopy(require) require.marker = require.marker.intersect( requirement.marker.without_extras()) if not require.marker.is_empty(): dependencies.append(require) key = locked_package if key not in nested_dependencies: nested_dependencies[key] = requirement else: nested_dependencies[key].marker = nested_dependencies[ key].marker.union(requirement.marker) return nested_dependencies @classmethod def get_project_dependencies( cls, project_requires: list[Dependency], locked_packages: list[Package], ) -> Iterable[tuple[Package, Dependency]]: # group packages entries by name, this is required because requirement might use # different constraints. packages_by_name: dict[str, list[Package]] = {} for pkg in locked_packages: if pkg.name not in packages_by_name: packages_by_name[pkg.name] = [] packages_by_name[pkg.name].append(pkg) # Put higher versions first so that we prefer them. for packages in packages_by_name.values(): packages.sort( key=lambda package: package.version, reverse=True, ) nested_dependencies = cls.__walk_dependencies( dependencies=project_requires, packages_by_name=packages_by_name, ) return nested_dependencies.items() def get_project_dependency_packages( self, project_requires: list[Dependency], project_python_marker: BaseMarker | None = None, extras: bool | Sequence[str] | None = None, ) -> Iterator[DependencyPackage]: # Apply the project python marker to all requirements. if project_python_marker is not None: marked_requires: list[Dependency] = [] for require in project_requires: require = deepcopy(require) require.marker = require.marker.intersect( project_python_marker) marked_requires.append(require) project_requires = marked_requires repository = self.locked_repository() # Build a set of all packages required by our selected extras extra_package_names: set[str] | None = None if extras is not True: extra_package_names = set( get_extra_package_names( repository.packages, self.lock_data.get("extras", {}), extras or (), )) # If a package is optional and we haven't opted in to it, do not select selected = [] for dependency in project_requires: try: package = repository.find_packages(dependency=dependency)[0] except IndexError: continue if extra_package_names is not None and ( package.optional and package.name not in extra_package_names): # a package is locked as optional, but is not activated via extras continue selected.append(dependency) for package, dependency in self.get_project_dependencies( project_requires=selected, locked_packages=repository.packages, ): yield DependencyPackage(dependency=dependency, package=package) def set_lock_data(self, root: Package, packages: list[Package]) -> bool: files: dict[str, Any] = table() package_specs = self._lock_packages(packages) # Retrieving hashes for package in package_specs: if package["name"] not in files: files[package["name"]] = [] for f in package["files"]: file_metadata = inline_table() for k, v in sorted(f.items()): file_metadata[k] = v files[package["name"]].append(file_metadata) if files[package["name"]]: package_files = item(files[package["name"]]) assert isinstance(package_files, Array) files[package["name"]] = package_files.multiline(True) del package["files"] lock = document() lock["package"] = package_specs if root.extras: lock["extras"] = { extra: [dep.pretty_name for dep in deps] for extra, deps in sorted(root.extras.items()) } lock["metadata"] = { "lock-version": self._VERSION, "python-versions": root.python_versions, "content-hash": self._content_hash, "files": files, } if not self.is_locked() or lock != self.lock_data: self._write_lock_data(lock) return True return False def _write_lock_data(self, data: TOMLDocument) -> None: self.lock.write(data) # Checking lock file data consistency if data != self.lock.read(): raise RuntimeError("Inconsistent lock file data.") self._lock_data = None def _get_content_hash(self) -> str: """ Returns the sha256 hash of the sorted content of the pyproject file. """ content = self._local_config relevant_content = {} for key in self._relevant_keys: data = content.get(key) if data is None and key not in self._legacy_keys: continue relevant_content[key] = data return sha256(json.dumps(relevant_content, sort_keys=True).encode()).hexdigest() def _get_lock_data(self) -> TOMLDocument: if not self._lock.exists(): raise RuntimeError( "No lockfile found. Unable to read locked packages") try: lock_data: TOMLDocument = self._lock.read() except TOMLKitError as e: raise RuntimeError(f"Unable to read the lock file ({e}).") metadata = cast("Table", lock_data["metadata"]) lock_version = Version.parse(metadata.get("lock-version", "1.0")) current_version = Version.parse(self._VERSION) # We expect the locker to be able to read lock files # from the same semantic versioning range accepted_versions = parse_constraint( f"^{Version.from_parts(current_version.major, 0)}") lock_version_allowed = accepted_versions.allows(lock_version) if lock_version_allowed and current_version < lock_version: logger.warning( "The lock file might not be compatible with the current version of" " Poetry.\nUpgrade Poetry to ensure the lock file is read properly or," " alternatively, regenerate the lock file with the `poetry lock`" " command.") elif not lock_version_allowed: raise RuntimeError( "The lock file is not compatible with the current version of Poetry.\n" "Upgrade Poetry to be able to read the lock file or, alternatively, " "regenerate the lock file with the `poetry lock` command.") return lock_data def _lock_packages(self, packages: list[Package]) -> list[dict[str, Any]]: locked = [] for package in sorted( packages, key=lambda x: ( x.name, x.version, x.source_type or "", x.source_url or "", x.source_subdirectory or "", x.source_reference or "", x.source_resolved_reference or "", ), ): spec = self._dump_package(package) locked.append(spec) return locked def _dump_package(self, package: Package) -> dict[str, Any]: dependencies: dict[str, list[Any]] = {} for dependency in sorted( package.requires, key=lambda d: d.name, ): if dependency.pretty_name not in dependencies: dependencies[dependency.pretty_name] = [] constraint = inline_table() if dependency.is_directory(): dependency = cast("DirectoryDependency", dependency) constraint["path"] = dependency.path.as_posix() if dependency.develop: constraint["develop"] = True elif dependency.is_file(): dependency = cast("FileDependency", dependency) constraint["path"] = dependency.path.as_posix() elif dependency.is_url(): dependency = cast("URLDependency", dependency) constraint["url"] = dependency.url elif dependency.is_vcs(): dependency = cast("VCSDependency", dependency) constraint[dependency.vcs] = dependency.source if dependency.branch: constraint["branch"] = dependency.branch elif dependency.tag: constraint["tag"] = dependency.tag elif dependency.rev: constraint["rev"] = dependency.rev else: constraint["version"] = str(dependency.pretty_constraint) if dependency.extras: constraint["extras"] = sorted(dependency.extras) if dependency.is_optional(): constraint["optional"] = True if not dependency.marker.is_any(): constraint["markers"] = str(dependency.marker) dependencies[dependency.pretty_name].append(constraint) # All the constraints should have the same type, # but we want to simplify them if it's possible for dependency_name, constraints in dependencies.items(): if all( len(constraint) == 1 and "version" in constraint for constraint in constraints): dependencies[dependency_name] = [ constraint["version"] for constraint in constraints ] data: dict[str, Any] = { "name": package.pretty_name, "version": package.pretty_version, "description": package.description or "", "category": package.category, "optional": package.optional, "python-versions": package.python_versions, "files": sorted( package.files, key=lambda x: x["file"], # type: ignore[no-any-return] ), } if dependencies: data["dependencies"] = table() for k, constraints in dependencies.items(): if len(constraints) == 1: data["dependencies"][k] = constraints[0] else: data["dependencies"][k] = array().multiline(True) for constraint in constraints: data["dependencies"][k].append(constraint) if package.extras: extras = {} for name, deps in package.extras.items(): # TODO: This should use dep.to_pep_508() once this is fixed # https://github.com/python-poetry/poetry-core/pull/102 extras[name] = [ dep.base_pep_508_name if not dep.constraint.is_any() else dep.name for dep in deps ] data["extras"] = extras if package.source_url: url = package.source_url if package.source_type in ["file", "directory"]: # The lock file should only store paths relative to the root project url = Path( os.path.relpath( Path(url).as_posix(), self._lock.path.parent.as_posix())).as_posix() data["source"] = {} if package.source_type: data["source"]["type"] = package.source_type data["source"]["url"] = url if package.source_reference: data["source"]["reference"] = package.source_reference if package.source_resolved_reference: data["source"][ "resolved_reference"] = package.source_resolved_reference if package.source_type in ["directory", "git"]: data["develop"] = package.develop return data
def create_poetry( self, cwd: Path | None = None, io: IO | None = None, disable_plugins: bool = False, disable_cache: bool = False, ) -> Poetry: if io is None: io = NullIO() base_poetry = super().create_poetry(cwd) locker = Locker(base_poetry.file.parent / "poetry.lock", base_poetry.local_config) # Loading global configuration with warnings.catch_warnings(): # this is preserved to ensure export plugin tests pass in ci, # once poetry-plugin-export version is updated to use one that do not # use Factory.create_config(), this can be safely removed. warnings.filterwarnings("ignore", category=DeprecationWarning) config = self.create_config() # Loading local configuration local_config_file = TOMLFile(base_poetry.file.parent / "poetry.toml") if local_config_file.exists(): if io.is_debug(): io.write_line( f"Loading configuration file {local_config_file.path}") config.merge(local_config_file.read()) # Load local sources repositories = {} existing_repositories = config.get("repositories", {}) for source in base_poetry.pyproject.poetry_config.get("source", []): name = source.get("name") url = source.get("url") if name and url and name not in existing_repositories: repositories[name] = {"url": url} config.merge({"repositories": repositories}) poetry = Poetry( base_poetry.file.path, base_poetry.local_config, base_poetry.package, locker, config, ) # Configuring sources self.configure_sources( poetry, poetry.local_config.get("source", []), config, io, disable_cache=disable_cache, ) plugin_manager = PluginManager(Plugin.group, disable_plugins=disable_plugins) plugin_manager.load_plugins() poetry.set_plugin_manager(plugin_manager) plugin_manager.activate(poetry, io) return poetry
class Locker(object): _VERSION = "1.1" _relevant_keys = ["dependencies", "dev-dependencies", "source", "extras"] def __init__(self, lock, local_config): # type: (Path, dict) -> None self._lock = TOMLFile(lock) self._local_config = local_config self._lock_data = None self._content_hash = self._get_content_hash() @property def lock(self): # type: () -> TOMLFile return self._lock @property def lock_data(self): if self._lock_data is None: self._lock_data = self._get_lock_data() return self._lock_data def is_locked(self): # type: () -> bool """ Checks whether the locker has been locked (lockfile found). """ if not self._lock.exists(): return False return "package" in self.lock_data def is_fresh(self): # type: () -> bool """ Checks whether the lock file is still up to date with the current hash. """ lock = self._lock.read() metadata = lock.get("metadata", {}) if "content-hash" in metadata: return self._content_hash == lock["metadata"]["content-hash"] return False def locked_repository(self, with_dev_reqs=False ): # type: (bool) -> poetry.repositories.Repository """ Searches and returns a repository of locked packages. """ from poetry.factory import Factory if not self.is_locked(): return poetry.repositories.Repository() lock_data = self.lock_data packages = poetry.repositories.Repository() if with_dev_reqs: locked_packages = lock_data["package"] else: locked_packages = [ p for p in lock_data["package"] if p["category"] == "main" ] if not locked_packages: return packages for info in locked_packages: source = info.get("source", {}) source_type = source.get("type") url = source.get("url") if source_type in ["directory", "file"]: url = self._lock.path.parent.joinpath(url).resolve().as_posix() package = Package( info["name"], info["version"], info["version"], source_type=source_type, source_url=url, source_reference=source.get("reference"), source_resolved_reference=source.get("resolved_reference"), ) package.description = info.get("description", "") package.category = info["category"] package.optional = info["optional"] if "hashes" in lock_data["metadata"]: # Old lock so we create dummy files from the hashes package.files = [{ "name": h, "hash": h } for h in lock_data["metadata"]["hashes"][info["name"]]] else: package.files = lock_data["metadata"]["files"][info["name"]] package.python_versions = info["python-versions"] extras = info.get("extras", {}) if extras: for name, deps in extras.items(): package.extras[name] = [] for dep in deps: m = re.match(r"^(.+?)(?:\s+\((.+)\))?$", dep) dep_name = m.group(1) constraint = m.group(2) or "*" package.extras[name].append( Dependency(dep_name, constraint)) if "marker" in info: package.marker = parse_marker(info["marker"]) else: # Compatibility for old locks if "requirements" in info: dep = Dependency("foo", "0.0.0") for name, value in info["requirements"].items(): if name == "python": dep.python_versions = value elif name == "platform": dep.platform = value split_dep = dep.to_pep_508(False).split(";") if len(split_dep) > 1: package.marker = parse_marker(split_dep[1].strip()) for dep_name, constraint in info.get("dependencies", {}).items(): if isinstance(constraint, list): for c in constraint: package.add_dependency( Factory.create_dependency( dep_name, c, root_dir=self._lock.path.parent)) continue package.add_dependency( Factory.create_dependency(dep_name, constraint, root_dir=self._lock.path.parent)) if "develop" in info: package.develop = info["develop"] packages.add_package(package) return packages def get_project_dependencies( self, project_requires, pinned_versions=False, with_nested=False): # type: (List[Dependency], bool, bool) -> Any packages = self.locked_repository().packages # group packages entries by name, this is required because requirement might use # different constraints packages_by_name = {} for pkg in packages: if pkg.name not in packages_by_name: packages_by_name[pkg.name] = [] packages_by_name[pkg.name].append(pkg) def __get_locked_package( _dependency, ): # type: (Dependency) -> Optional[Package] """ Internal helper to identify corresponding locked package using dependency version constraints. """ for _package in packages_by_name.get(_dependency.name, []): if _dependency.constraint.allows(_package.version): return _package return None project_level_dependencies = set() dependencies = [] for dependency in project_requires: dependency = deepcopy(dependency) if pinned_versions: locked_package = __get_locked_package(dependency) if locked_package: dependency.set_constraint( locked_package.to_dependency().constraint) project_level_dependencies.add(dependency.name) dependencies.append(dependency) if not with_nested: # return only with project level dependencies return dependencies nested_dependencies = list() for pkg in packages: # type: Package for requirement in pkg.requires: # type: Dependency if requirement.name in project_level_dependencies: # project level dependencies take precedence continue if pinned_versions: requirement.set_constraint( __get_locked_package( requirement).to_dependency().constraint) # dependencies use extra to indicate that it was activated via parent # package's extras marker = requirement.marker.without_extras() for project_requirement in project_requires: if (pkg.name == project_requirement.name and project_requirement.constraint.allows( pkg.version)): requirement.marker = marker.intersect( project_requirement.marker) break else: # this dependency was not from a project requirement requirement.marker = marker.intersect(pkg.marker) if requirement not in nested_dependencies: nested_dependencies.append(requirement) return sorted( itertools.chain(dependencies, nested_dependencies), key=lambda x: x.name.lower(), ) def set_lock_data(self, root, packages): # type: (...) -> bool files = table() packages = self._lock_packages(packages) # Retrieving hashes for package in packages: if package["name"] not in files: files[package["name"]] = [] for f in package["files"]: file_metadata = inline_table() for k, v in sorted(f.items()): file_metadata[k] = v files[package["name"]].append(file_metadata) if files[package["name"]]: files[package["name"]] = item( files[package["name"]]).multiline(True) del package["files"] lock = document() lock["package"] = packages if root.extras: lock["extras"] = { extra: [dep.pretty_name for dep in deps] for extra, deps in sorted(root.extras.items()) } lock["metadata"] = OrderedDict([ ("lock-version", self._VERSION), ("python-versions", root.python_versions), ("content-hash", self._content_hash), ("files", files), ]) if not self.is_locked() or lock != self.lock_data: self._write_lock_data(lock) return True return False def _write_lock_data(self, data): self.lock.write(data) # Checking lock file data consistency if data != self.lock.read(): raise RuntimeError("Inconsistent lock file data.") self._lock_data = None def _get_content_hash(self): # type: () -> str """ Returns the sha256 hash of the sorted content of the pyproject file. """ content = self._local_config relevant_content = {} for key in self._relevant_keys: relevant_content[key] = content.get(key) content_hash = sha256( json.dumps(relevant_content, sort_keys=True).encode()).hexdigest() return content_hash def _get_lock_data(self): # type: () -> dict if not self._lock.exists(): raise RuntimeError( "No lockfile found. Unable to read locked packages") try: lock_data = self._lock.read() except TOMLKitError as e: raise RuntimeError("Unable to read the lock file ({}).".format(e)) lock_version = Version.parse(lock_data["metadata"].get( "lock-version", "1.0")) current_version = Version.parse(self._VERSION) # We expect the locker to be able to read lock files # from the same semantic versioning range accepted_versions = parse_constraint("^{}".format( Version(current_version.major, 0))) lock_version_allowed = accepted_versions.allows(lock_version) if lock_version_allowed and current_version < lock_version: logger.warning( "The lock file might not be compatible with the current version of Poetry.\n" "Upgrade Poetry to ensure the lock file is read properly or, alternatively, " "regenerate the lock file with the `poetry lock` command.") elif not lock_version_allowed: raise RuntimeError( "The lock file is not compatible with the current version of Poetry.\n" "Upgrade Poetry to be able to read the lock file or, alternatively, " "regenerate the lock file with the `poetry lock` command.") return lock_data def _lock_packages( self, packages): # type: (List['poetry.packages.Package']) -> list locked = [] for package in sorted(packages, key=lambda x: x.name): spec = self._dump_package(package) locked.append(spec) return locked def _dump_package(self, package): # type: (Package) -> dict dependencies = {} for dependency in sorted(package.requires, key=lambda d: d.name): if dependency.pretty_name not in dependencies: dependencies[dependency.pretty_name] = [] constraint = inline_table() constraint["version"] = str(dependency.pretty_constraint) if dependency.extras: constraint["extras"] = sorted(dependency.extras) if dependency.is_optional(): constraint["optional"] = True if not dependency.marker.is_any(): constraint["markers"] = str(dependency.marker) dependencies[dependency.pretty_name].append(constraint) # All the constraints should have the same type, # but we want to simplify them if it's possible for dependency, constraints in tuple(dependencies.items()): if all(len(constraint) == 1 for constraint in constraints): dependencies[dependency] = [ constraint["version"] for constraint in constraints ] data = OrderedDict([ ("name", package.pretty_name), ("version", package.pretty_version), ("description", package.description or ""), ("category", package.category), ("optional", package.optional), ("python-versions", package.python_versions), ("files", sorted(package.files, key=lambda x: x["file"])), ]) if dependencies: data["dependencies"] = table() for k, constraints in dependencies.items(): if len(constraints) == 1: data["dependencies"][k] = constraints[0] else: data["dependencies"][k] = array().multiline(True) for constraint in constraints: data["dependencies"][k].append(constraint) if package.extras: extras = {} for name, deps in package.extras.items(): extras[name] = [ str(dep) if not dep.constraint.is_any() else dep.name for dep in deps ] data["extras"] = extras if package.source_url: url = package.source_url if package.source_type in ["file", "directory"]: # The lock file should only store paths relative to the root project url = Path( os.path.relpath( Path(url).as_posix(), self._lock.path.parent.as_posix())).as_posix() data["source"] = OrderedDict() if package.source_type: data["source"]["type"] = package.source_type data["source"]["url"] = url if package.source_reference: data["source"]["reference"] = package.source_reference if package.source_resolved_reference: data["source"][ "resolved_reference"] = package.source_resolved_reference if package.source_type == "directory": data["develop"] = package.develop return data
def handle(self) -> int | None: from pathlib import Path from poetry.core.pyproject.exceptions import PyProjectException from poetry.core.toml.file import TOMLFile from poetry.config.file_config_source import FileConfigSource from poetry.factory import Factory from poetry.locations import CONFIG_DIR config = Factory.create_config(self.io) config_file = TOMLFile(Path(CONFIG_DIR) / "config.toml") try: local_config_file = TOMLFile(self.poetry.file.parent / "poetry.toml") if local_config_file.exists(): config.merge(local_config_file.read()) except (RuntimeError, PyProjectException): local_config_file = TOMLFile(Path.cwd() / "poetry.toml") if self.option("local"): config.set_config_source(FileConfigSource(local_config_file)) if not config_file.exists(): config_file.path.parent.mkdir(parents=True, exist_ok=True) config_file.touch(mode=0o0600) if self.option("list"): self._list_configuration(config.all(), config.raw()) return 0 setting_key = self.argument("key") if not setting_key: return 0 if self.argument("value") and self.option("unset"): raise RuntimeError( "You can not combine a setting value with --unset") # show the value if no value is provided if not self.argument("value") and not self.option("unset"): m = re.match(r"^repos?(?:itories)?(?:\.(.+))?", self.argument("key")) value: str | dict[str, Any] if m: if not m.group(1): value = {} if config.get("repositories") is not None: value = config.get("repositories") else: repo = config.get(f"repositories.{m.group(1)}") if repo is None: raise ValueError( f"There is no {m.group(1)} repository defined") value = repo self.line(str(value)) else: if setting_key not in self.unique_config_values: raise ValueError(f"There is no {setting_key} setting.") value = config.get(setting_key) if not isinstance(value, str): value = json.dumps(value) self.line(value) return 0 values: list[str] = self.argument("value") unique_config_values = self.unique_config_values if setting_key in unique_config_values: if self.option("unset"): config.config_source.remove_property(setting_key) return None return self._handle_single_value( config.config_source, setting_key, unique_config_values[setting_key], values, ) # handle repositories m = re.match(r"^repos?(?:itories)?(?:\.(.+))?", self.argument("key")) if m: if not m.group(1): raise ValueError( "You cannot remove the [repositories] section") if self.option("unset"): repo = config.get(f"repositories.{m.group(1)}") if repo is None: raise ValueError( f"There is no {m.group(1)} repository defined") config.config_source.remove_property( f"repositories.{m.group(1)}") return 0 if len(values) == 1: url = values[0] config.config_source.add_property( f"repositories.{m.group(1)}.url", url) return 0 raise ValueError( "You must pass the url. " "Example: poetry config repositories.foo https://bar.com") # handle auth m = re.match(r"^(http-basic|pypi-token)\.(.+)", self.argument("key")) if m: from poetry.utils.password_manager import PasswordManager password_manager = PasswordManager(config) if self.option("unset"): if m.group(1) == "http-basic": password_manager.delete_http_password(m.group(2)) elif m.group(1) == "pypi-token": password_manager.delete_pypi_token(m.group(2)) return 0 if m.group(1) == "http-basic": if len(values) == 1: username = values[0] # Only username, so we prompt for password password = self.secret("Password:"******"Expected one or two arguments " f"(username, password), got {len(values)}") else: username = values[0] password = values[1] password_manager.set_http_password(m.group(2), username, password) elif m.group(1) == "pypi-token": if len(values) != 1: raise ValueError( f"Expected only one argument (token), got {len(values)}" ) token = values[0] password_manager.set_pypi_token(m.group(2), token) return 0 # handle certs m = re.match(r"(?:certificates)\.([^.]+)\.(cert|client-cert)", self.argument("key")) if m: if self.option("unset"): config.auth_config_source.remove_property( f"certificates.{m.group(1)}.{m.group(2)}") return 0 if len(values) == 1: config.auth_config_source.add_property( f"certificates.{m.group(1)}.{m.group(2)}", values[0]) else: raise ValueError("You must pass exactly 1 value") return 0 raise ValueError(f"Setting {self.argument('key')} does not exist")
class Locker: _VERSION = "1.1" _relevant_keys = ["dependencies", "group", "source", "extras"] def __init__(self, lock: Union[str, Path], local_config: dict) -> None: self._lock = TOMLFile(lock) self._local_config = local_config self._lock_data = None self._content_hash = self._get_content_hash() @property def lock(self) -> TOMLFile: return self._lock @property def lock_data(self) -> "TOMLDocument": if self._lock_data is None: self._lock_data = self._get_lock_data() return self._lock_data def is_locked(self) -> bool: """ Checks whether the locker has been locked (lockfile found). """ if not self._lock.exists(): return False return "package" in self.lock_data def is_fresh(self) -> bool: """ Checks whether the lock file is still up to date with the current hash. """ lock = self._lock.read() metadata = lock.get("metadata", {}) if "content-hash" in metadata: return self._content_hash == lock["metadata"]["content-hash"] return False def locked_repository( self, with_dev_reqs: bool = False) -> poetry.repositories.Repository: """ Searches and returns a repository of locked packages. """ from poetry.factory import Factory if not self.is_locked(): return poetry.repositories.Repository() lock_data = self.lock_data packages = poetry.repositories.Repository() if with_dev_reqs: locked_packages = lock_data["package"] else: locked_packages = [ p for p in lock_data["package"] if p["category"] == "main" ] if not locked_packages: return packages for info in locked_packages: source = info.get("source", {}) source_type = source.get("type") url = source.get("url") if source_type in ["directory", "file"]: url = self._lock.path.parent.joinpath(url).resolve().as_posix() package = Package( info["name"], info["version"], info["version"], source_type=source_type, source_url=url, source_reference=source.get("reference"), source_resolved_reference=source.get("resolved_reference"), ) package.description = info.get("description", "") package.category = info.get("category", "main") package.groups = info.get("groups", ["default"]) package.optional = info["optional"] if "hashes" in lock_data["metadata"]: # Old lock so we create dummy files from the hashes package.files = [{ "name": h, "hash": h } for h in lock_data["metadata"]["hashes"][info["name"]]] else: package.files = lock_data["metadata"]["files"][info["name"]] package.python_versions = info["python-versions"] extras = info.get("extras", {}) if extras: for name, deps in extras.items(): package.extras[name] = [] for dep in deps: try: dependency = Dependency.create_from_pep_508(dep) except InvalidRequirement: # handle lock files with invalid PEP 508 m = re.match( r"^(.+?)(?:\[(.+?)])?(?:\s+\((.+)\))?$", dep) dep_name = m.group(1) extras = m.group(2) or "" constraint = m.group(3) or "*" dependency = Dependency(dep_name, constraint, extras=extras.split(",")) package.extras[name].append(dependency) if "marker" in info: package.marker = parse_marker(info["marker"]) else: # Compatibility for old locks if "requirements" in info: dep = Dependency("foo", "0.0.0") for name, value in info["requirements"].items(): if name == "python": dep.python_versions = value elif name == "platform": dep.platform = value split_dep = dep.to_pep_508(False).split(";") if len(split_dep) > 1: package.marker = parse_marker(split_dep[1].strip()) for dep_name, constraint in info.get("dependencies", {}).items(): root_dir = self._lock.path.parent if package.source_type == "directory": # root dir should be the source of the package relative to the lock path root_dir = Path(package.source_url) if isinstance(constraint, list): for c in constraint: package.add_dependency( Factory.create_dependency(dep_name, c, root_dir=root_dir)) continue package.add_dependency( Factory.create_dependency(dep_name, constraint, root_dir=root_dir)) if "develop" in info: package.develop = info["develop"] packages.add_package(package) return packages @staticmethod def __get_locked_package( _dependency: Dependency, packages_by_name: Dict[str, List[Package]]) -> Optional[Package]: """ Internal helper to identify corresponding locked package using dependency version constraints. """ for _package in packages_by_name.get(_dependency.name, []): if _dependency.constraint.allows(_package.version): return _package return None @classmethod def __walk_dependency_level( cls, dependencies: List[Dependency], level: int, pinned_versions: bool, packages_by_name: Dict[str, List[Package]], project_level_dependencies: Set[str], nested_dependencies: Dict[Tuple[str, str], Dependency], ) -> Dict[Tuple[str, str], Dependency]: if not dependencies: return nested_dependencies next_level_dependencies = [] for requirement in dependencies: key = (requirement.name, requirement.pretty_constraint) locked_package = cls.__get_locked_package(requirement, packages_by_name) if locked_package: # create dependency from locked package to retain dependency metadata # if this is not done, we can end-up with incorrect nested dependencies marker = requirement.marker requirement = locked_package.to_dependency() requirement.marker = requirement.marker.intersect(marker) key = (requirement.name, requirement.pretty_constraint) if pinned_versions: requirement.set_constraint( locked_package.to_dependency().constraint) if key not in nested_dependencies: for require in locked_package.requires: if require.marker.is_empty(): require.marker = requirement.marker else: require.marker = require.marker.intersect( requirement.marker) require.marker = require.marker.intersect( locked_package.marker) next_level_dependencies.append(require) if requirement.name in project_level_dependencies and level == 0: # project level dependencies take precedence continue if not locked_package: # we make a copy to avoid any side-effects requirement = deepcopy(requirement) if key not in nested_dependencies: nested_dependencies[key] = requirement else: nested_dependencies[key].marker = nested_dependencies[ key].marker.union(requirement.marker) return cls.__walk_dependency_level( dependencies=next_level_dependencies, level=level + 1, pinned_versions=pinned_versions, packages_by_name=packages_by_name, project_level_dependencies=project_level_dependencies, nested_dependencies=nested_dependencies, ) @classmethod def get_project_dependencies( cls, project_requires: List[Dependency], locked_packages: List[Package], pinned_versions: bool = False, with_nested: bool = False, ) -> Iterable[Dependency]: # group packages entries by name, this is required because requirement might use different constraints packages_by_name = {} for pkg in locked_packages: if pkg.name not in packages_by_name: packages_by_name[pkg.name] = [] packages_by_name[pkg.name].append(pkg) project_level_dependencies = set() dependencies = [] for dependency in project_requires: dependency = deepcopy(dependency) locked_package = cls.__get_locked_package(dependency, packages_by_name) if locked_package: locked_dependency = locked_package.to_dependency() locked_dependency.marker = dependency.marker.intersect( locked_package.marker) if not pinned_versions: locked_dependency.set_constraint(dependency.constraint) dependency = locked_dependency project_level_dependencies.add(dependency.name) dependencies.append(dependency) if not with_nested: # return only with project level dependencies return dependencies nested_dependencies = cls.__walk_dependency_level( dependencies=dependencies, level=0, pinned_versions=pinned_versions, packages_by_name=packages_by_name, project_level_dependencies=project_level_dependencies, nested_dependencies=dict(), ) # Merge same dependencies using marker union for requirement in dependencies: key = (requirement.name, requirement.pretty_constraint) if key not in nested_dependencies: nested_dependencies[key] = requirement else: nested_dependencies[key].marker = nested_dependencies[ key].marker.union(requirement.marker) return sorted(nested_dependencies.values(), key=lambda x: x.name.lower()) def get_project_dependency_packages( self, project_requires: List[Dependency], dev: bool = False, extras: Optional[Union[bool, Sequence[str]]] = None, ) -> Iterator[DependencyPackage]: repository = self.locked_repository(with_dev_reqs=dev) # Build a set of all packages required by our selected extras extra_package_names = (None if (isinstance(extras, bool) and extras is True) else ()) if extra_package_names is not None: extra_package_names = set( get_extra_package_names( repository.packages, self.lock_data.get("extras", {}), extras or (), )) # If a package is optional and we haven't opted in to it, do not select selected = [] for dependency in project_requires: try: package = repository.find_packages(dependency=dependency)[0] except IndexError: continue if extra_package_names is not None and ( package.optional and package.name not in extra_package_names): # a package is locked as optional, but is not activated via extras continue selected.append(dependency) for dependency in self.get_project_dependencies( project_requires=selected, locked_packages=repository.packages, with_nested=True, ): try: package = repository.find_packages(dependency=dependency)[0] except IndexError: continue for extra in dependency.extras: package.requires_extras.append(extra) yield DependencyPackage(dependency=dependency, package=package) def set_lock_data(self, root: Package, packages: List[Package]) -> bool: files = table() packages = self._lock_packages(packages) # Retrieving hashes for package in packages: if package["name"] not in files: files[package["name"]] = [] for f in package["files"]: file_metadata = inline_table() for k, v in sorted(f.items()): file_metadata[k] = v files[package["name"]].append(file_metadata) if files[package["name"]]: files[package["name"]] = item( files[package["name"]]).multiline(True) del package["files"] lock = document() lock["package"] = packages if root.extras: lock["extras"] = { extra: [dep.pretty_name for dep in deps] for extra, deps in sorted(root.extras.items()) } lock["metadata"] = dict([ ("lock-version", self._VERSION), ("python-versions", root.python_versions), ("content-hash", self._content_hash), ("files", files), ]) if not self.is_locked() or lock != self.lock_data: self._write_lock_data(lock) return True return False def _write_lock_data(self, data: "TOMLDocument") -> None: self.lock.write(data) # Checking lock file data consistency if data != self.lock.read(): raise RuntimeError("Inconsistent lock file data.") self._lock_data = None def _get_content_hash(self) -> str: """ Returns the sha256 hash of the sorted content of the pyproject file. """ content = self._local_config relevant_content = {} for key in self._relevant_keys: relevant_content[key] = content.get(key) content_hash = sha256( json.dumps(relevant_content, sort_keys=True).encode()).hexdigest() return content_hash def _get_lock_data(self) -> "TOMLDocument": if not self._lock.exists(): raise RuntimeError( "No lockfile found. Unable to read locked packages") try: lock_data = self._lock.read() except TOMLKitError as e: raise RuntimeError(f"Unable to read the lock file ({e}).") lock_version = Version.parse(lock_data["metadata"].get( "lock-version", "1.0")) current_version = Version.parse(self._VERSION) # We expect the locker to be able to read lock files # from the same semantic versioning range accepted_versions = parse_constraint("^{}".format( Version.from_parts(current_version.major, 0))) lock_version_allowed = accepted_versions.allows(lock_version) if lock_version_allowed and current_version < lock_version: logger.warning( "The lock file might not be compatible with the current version of Poetry.\n" "Upgrade Poetry to ensure the lock file is read properly or, alternatively, " "regenerate the lock file with the `poetry lock` command.") elif not lock_version_allowed: raise RuntimeError( "The lock file is not compatible with the current version of Poetry.\n" "Upgrade Poetry to be able to read the lock file or, alternatively, " "regenerate the lock file with the `poetry lock` command.") return lock_data def _lock_packages(self, packages: List[Package]) -> list: locked = [] for package in sorted(packages, key=lambda x: x.name): spec = self._dump_package(package) locked.append(spec) return locked def _dump_package(self, package: Package) -> dict: dependencies = {} for dependency in sorted(package.requires, key=lambda d: d.name): if dependency.pretty_name not in dependencies: dependencies[dependency.pretty_name] = [] constraint = inline_table() if dependency.is_directory() or dependency.is_file(): constraint["path"] = dependency.path.as_posix() if dependency.is_directory() and dependency.develop: constraint["develop"] = True elif dependency.is_url(): constraint["url"] = dependency.url elif dependency.is_vcs(): constraint[dependency.vcs] = dependency.source if dependency.branch: constraint["branch"] = dependency.branch elif dependency.tag: constraint["tag"] = dependency.tag elif dependency.rev: constraint["rev"] = dependency.rev else: constraint["version"] = str(dependency.pretty_constraint) if dependency.extras: constraint["extras"] = sorted(dependency.extras) if dependency.is_optional(): constraint["optional"] = True if not dependency.marker.is_any(): constraint["markers"] = str(dependency.marker) dependencies[dependency.pretty_name].append(constraint) # All the constraints should have the same type, # but we want to simplify them if it's possible for dependency, constraints in tuple(dependencies.items()): if all( len(constraint) == 1 and "version" in constraint for constraint in constraints): dependencies[dependency] = [ constraint["version"] for constraint in constraints ] data = dict([ ("name", package.pretty_name), ("version", package.pretty_version), ("description", package.description or ""), ("category", package.category), ("optional", package.optional), ("python-versions", package.python_versions), ("files", sorted(package.files, key=lambda x: x["file"])), ]) if dependencies: data["dependencies"] = table() for k, constraints in dependencies.items(): if len(constraints) == 1: data["dependencies"][k] = constraints[0] else: data["dependencies"][k] = array().multiline(True) for constraint in constraints: data["dependencies"][k].append(constraint) if package.extras: extras = {} for name, deps in package.extras.items(): # TODO: This should use dep.to_pep_508() once this is fixed # https://github.com/python-poetry/poetry-core/pull/102 extras[name] = [ dep.base_pep_508_name if not dep.constraint.is_any() else dep.name for dep in deps ] data["extras"] = extras if package.source_url: url = package.source_url if package.source_type in ["file", "directory"]: # The lock file should only store paths relative to the root project url = Path( os.path.relpath( Path(url).as_posix(), self._lock.path.parent.as_posix())).as_posix() data["source"] = dict() if package.source_type: data["source"]["type"] = package.source_type data["source"]["url"] = url if package.source_reference: data["source"]["reference"] = package.source_reference if package.source_resolved_reference: data["source"][ "resolved_reference"] = package.source_resolved_reference if package.source_type in ["directory", "git"]: data["develop"] = package.develop return data
def test_activate_activates_recreates_for_different_patch( tmp_dir: str, manager: EnvManager, poetry: "Poetry", config: "Config", mocker: "MockerFixture", ): if "VIRTUAL_ENV" in os.environ: del os.environ["VIRTUAL_ENV"] venv_name = manager.generate_env_name("simple-project", str(poetry.file.parent)) envs_file = TOMLFile(Path(tmp_dir) / "envs.toml") doc = tomlkit.document() doc[venv_name] = {"minor": "3.7", "patch": "3.7.0"} envs_file.write(doc) os.mkdir(os.path.join(tmp_dir, f"{venv_name}-py3.7")) config.merge({"virtualenvs": {"path": str(tmp_dir)}}) mocker.patch( "subprocess.check_output", side_effect=check_output_wrapper(), ) mocker.patch( "subprocess.Popen.communicate", side_effect=[ ("/prefix", None), ('{"version_info": [3, 7, 0]}', None), ("/prefix", None), ("/prefix", None), ("/prefix", None), ], ) build_venv_m = mocker.patch( "poetry.utils.env.EnvManager.build_venv", side_effect=build_venv ) remove_venv_m = mocker.patch( "poetry.utils.env.EnvManager.remove_venv", side_effect=EnvManager.remove_venv ) env = manager.activate("python3.7", NullIO()) build_venv_m.assert_called_with( Path(tmp_dir) / f"{venv_name}-py3.7", executable="/usr/bin/python3.7", flags={"always-copy": False, "system-site-packages": False}, with_pip=True, with_setuptools=True, with_wheel=True, ) remove_venv_m.assert_called_with(Path(tmp_dir) / f"{venv_name}-py3.7") assert envs_file.exists() envs = envs_file.read() assert envs[venv_name]["minor"] == "3.7" assert envs[venv_name]["patch"] == "3.7.1" assert env.path == Path(tmp_dir) / f"{venv_name}-py3.7" assert env.base == Path("/prefix") assert (Path(tmp_dir) / f"{venv_name}-py3.7").exists()