def test_executor_should_use_cached_link_and_hash( tmp_venv: VirtualEnv, pool: Pool, config: Config, io: BufferedIO, mocker: MockerFixture, fixture_dir: FixtureDirGetter, ): # Produce a file:/// URI that is a valid link link_cached = Link( fixture_dir("distributions").joinpath( "demo-0.1.0-py2.py3-none-any.whl").as_uri()) mocker.patch( "poetry.installation.chef.Chef.get_cached_archive_for_link", return_value=link_cached, ) package = Package("demo", "0.1.0") # Set package.files so the executor will attempt to hash the package package.files = [{ "file": "demo-0.1.0-py2.py3-none-any.whl", "hash": "sha256:70e704135718fffbcbf61ed1fc45933cfd86951a744b681000eaaa75da31f17a", # noqa: E501 }] executor = Executor(tmp_venv, pool, config, io) archive = executor._download_link( Install(package), Link("https://example.com/demo-0.1.0-py2.py3-none-any.whl"), ) assert archive == link_cached
def test_package_clone(f: Factory) -> None: # TODO(nic): this test is not future-proof, in that any attributes added # to the Package object and not filled out in this test setup might # cause comparisons to match that otherwise should not. A factory method # to create a Package object with all fields fully randomized would be the # most rigorous test for this, but that's likely overkill. p = Package( "lol_wut", "3.141.5926535", pretty_version="③.⑭.⑮", source_type="git", source_url="http://some.url", source_reference="fe4d2adabf3feb5d32b70ab5c105285fa713b10c", source_resolved_reference="fe4d2adabf3feb5d32b70ab5c105285fa713b10c", features=["abc", "def"], develop=random.choice((True, False)), ) p.add_dependency(Factory.create_dependency("foo", "^1.2.3")) p.add_dependency(Factory.create_dependency("foo", "^1.2.3", groups=["dev"])) p.files = (["file1", "file2", "file3"], ) # type: ignore[assignment] p.homepage = "https://some.other.url" p.repository_url = "http://bug.farm" p.documentation_url = "http://lorem.ipsum/dolor/sit.amet" p2 = p.clone() assert p == p2 assert p.__dict__ == p2.__dict__ assert len(p2.requires) == 1 assert len(p2.all_requires) == 2
def test_chooser_chooses_distributions_that_match_the_package_hashes( env, mock_pypi, mock_legacy, source_type, pool, ): chooser = Chooser(pool, env) package = Package("isort", "4.3.4") files = [ { "hash": "sha256:b9c40e9750f3d77e6e4d441d8b0266cf555e7cdabdcff33c4fd06366ca761ef8", "filename": "isort-4.3.4.tar.gz", } ] if source_type == "legacy": package = Package( package.name, package.version.text, source_type="legacy", source_reference="foo", source_url="https://foo.bar/simple/", ) package.files = files link = chooser.choose_for(package) assert "isort-4.3.4.tar.gz" == link.filename
def test_chooser_throws_an_error_if_package_hashes_do_not_match( env, mock_pypi, mock_legacy, source_type, pool, ): chooser = Chooser(pool, env) package = Package("isort", "4.3.4") files = [{ "hash": "sha256:0000000000000000000000000000000000000000000000000000000000000000", "filename": "isort-4.3.4.tar.gz", }] if source_type == "legacy": package = Package( package.name, package.version.text, source_type="legacy", source_reference="foo", source_url="https://foo.bar/simple/", ) package.files = files with pytest.raises(RuntimeError) as e: chooser.choose_for(package) assert files[0]["hash"] in str(e)
def test_executor_should_use_cached_link_and_hash( config, io, pool, mocker, fixture_dir, tmp_dir ): # Produce a file:/// URI that is a valid link link_cached = Link( fixture_dir("distributions") .joinpath("demo-0.1.0-py2.py3-none-any.whl") .as_uri() ) mocker.patch.object( Chef, "get_cached_archive_for_link", side_effect=lambda _: link_cached ) env = MockEnv(path=Path(tmp_dir)) executor = Executor(env, pool, config, io) package = Package("demo", "0.1.0") package.files = [ { "file": "demo-0.1.0-py2.py3-none-any.whl", "hash": "md5:15507846fd4299596661d0197bfb4f90", } ] archive = executor._download_link( Install(package), Link("https://example.com/demo-0.1.0-py2.py3-none-any.whl") ) assert archive == link_cached
def test_executor_should_check_every_possible_hash_types( config, io, pool, mocker, fixture_dir, tmp_dir ): mocker.patch.object( Chef, "get_cached_archive_for_link", side_effect=lambda link: link, ) mocker.patch.object( Executor, "_download_archive", return_value=fixture_dir("distributions").joinpath( "demo-0.1.0-py2.py3-none-any.whl" ), ) env = MockEnv(path=Path(tmp_dir)) executor = Executor(env, pool, config, io) package = Package("demo", "0.1.0") package.files = [ { "file": "demo-0.1.0-py2.py3-none-any.whl", "hash": "md5:15507846fd4299596661d0197bfb4f90", } ] archive = executor._download_link( Install(package), Link("https://example.com/demo-0.1.0-py2.py3-none-any.whl") ) assert archive == fixture_dir("distributions").joinpath( "demo-0.1.0-py2.py3-none-any.whl" )
def test_requirement(installer: PipInstaller): package = Package("ipython", "7.5.0") package.files = [ {"file": "foo-0.1.0.tar.gz", "hash": "md5:dbdc53e3918f28fa335a173432402a00"}, { "file": "foo.0.1.0.whl", "hash": "e840810029224b56cd0d9e7719dc3b39cf84d577f8ac686547c8ba7a06eeab26", }, ] result = installer.requirement(package, formatted=True) expected = ( "ipython==7.5.0 " "--hash md5:dbdc53e3918f28fa335a173432402a00 " "--hash sha256:e840810029224b56cd0d9e7719dc3b39cf84d577f8ac686547c8ba7a06eeab26" "\n" ) assert result == expected
def test_executor_should_check_every_possible_hash_types_before_failing( config, io, pool, mocker, fixture_dir, tmp_dir ): mocker.patch.object( Chef, "get_cached_archive_for_link", side_effect=lambda link: link, ) mocker.patch.object( Executor, "_download_archive", return_value=fixture_dir("distributions").joinpath( "demo-0.1.0-py2.py3-none-any.whl" ), ) env = MockEnv(path=Path(tmp_dir)) executor = Executor(env, pool, config, io) package = Package("demo", "0.1.0") package.files = [ {"file": "demo-0.1.0-py2.py3-none-any.whl", "hash": "md5:123456"}, {"file": "demo-0.1.0-py2.py3-none-any.whl", "hash": "sha256:123456"}, ] expected_message = ( "Invalid hashes " "(" "md5:15507846fd4299596661d0197bfb4f90, " "sha256:70e704135718fffbcbf61ed1fc45933cfd86951a744b681000eaaa75da31f17a" ") " "for demo (0.1.0) using archive demo-0.1.0-py2.py3-none-any.whl. " "Expected one of md5:123456, sha256:123456." ) with pytest.raises(RuntimeError, match=re.escape(expected_message)): executor._download_link( Install(package), Link("https://example.com/demo-0.1.0-py2.py3-none-any.whl"), )
def locked_repository( self, with_dev_reqs: bool = False) -> poetry.repositories.Repository: """ Searches and returns a repository of locked packages. """ from poetry.factory import Factory if not self.is_locked(): return poetry.repositories.Repository() lock_data = self.lock_data packages = poetry.repositories.Repository() if with_dev_reqs: locked_packages = lock_data["package"] else: locked_packages = [ p for p in lock_data["package"] if p["category"] == "main" ] if not locked_packages: return packages for info in locked_packages: source = info.get("source", {}) source_type = source.get("type") url = source.get("url") if source_type in ["directory", "file"]: url = self._lock.path.parent.joinpath(url).resolve().as_posix() package = Package( info["name"], info["version"], info["version"], source_type=source_type, source_url=url, source_reference=source.get("reference"), source_resolved_reference=source.get("resolved_reference"), ) package.description = info.get("description", "") package.category = info.get("category", "main") package.groups = info.get("groups", ["default"]) package.optional = info["optional"] if "hashes" in lock_data["metadata"]: # Old lock so we create dummy files from the hashes package.files = [{ "name": h, "hash": h } for h in lock_data["metadata"]["hashes"][info["name"]]] else: package.files = lock_data["metadata"]["files"][info["name"]] package.python_versions = info["python-versions"] extras = info.get("extras", {}) if extras: for name, deps in extras.items(): package.extras[name] = [] for dep in deps: try: dependency = Dependency.create_from_pep_508(dep) except InvalidRequirement: # handle lock files with invalid PEP 508 m = re.match( r"^(.+?)(?:\[(.+?)])?(?:\s+\((.+)\))?$", dep) dep_name = m.group(1) extras = m.group(2) or "" constraint = m.group(3) or "*" dependency = Dependency(dep_name, constraint, extras=extras.split(",")) package.extras[name].append(dependency) if "marker" in info: package.marker = parse_marker(info["marker"]) else: # Compatibility for old locks if "requirements" in info: dep = Dependency("foo", "0.0.0") for name, value in info["requirements"].items(): if name == "python": dep.python_versions = value elif name == "platform": dep.platform = value split_dep = dep.to_pep_508(False).split(";") if len(split_dep) > 1: package.marker = parse_marker(split_dep[1].strip()) for dep_name, constraint in info.get("dependencies", {}).items(): root_dir = self._lock.path.parent if package.source_type == "directory": # root dir should be the source of the package relative to the lock path root_dir = Path(package.source_url) if isinstance(constraint, list): for c in constraint: package.add_dependency( Factory.create_dependency(dep_name, c, root_dir=root_dir)) continue package.add_dependency( Factory.create_dependency(dep_name, constraint, root_dir=root_dir)) if "develop" in info: package.develop = info["develop"] packages.add_package(package) return packages
def locked_repository(self, with_dev_reqs=False ): # type: (bool) -> poetry.repositories.Repository """ Searches and returns a repository of locked packages. """ if not self.is_locked(): return poetry.repositories.Repository() lock_data = self.lock_data packages = poetry.repositories.Repository() if with_dev_reqs: locked_packages = lock_data["package"] else: locked_packages = [ p for p in lock_data["package"] if p["category"] == "main" ] if not locked_packages: return packages for info in locked_packages: package = Package(info["name"], info["version"], info["version"]) package.description = info.get("description", "") package.category = info["category"] package.optional = info["optional"] if "hashes" in lock_data["metadata"]: # Old lock so we create dummy files from the hashes package.files = [{ "name": h, "hash": h } for h in lock_data["metadata"]["hashes"][info["name"]]] else: package.files = lock_data["metadata"]["files"][info["name"]] package.python_versions = info["python-versions"] extras = info.get("extras", {}) if extras: for name, deps in extras.items(): package.extras[name] = [] for dep in deps: m = re.match(r"^(.+?)(?:\s+\((.+)\))?$", dep) dep_name = m.group(1) constraint = m.group(2) or "*" package.extras[name].append( Dependency(dep_name, constraint)) if "marker" in info: package.marker = parse_marker(info["marker"]) else: # Compatibility for old locks if "requirements" in info: dep = Dependency("foo", "0.0.0") for name, value in info["requirements"].items(): if name == "python": dep.python_versions = value elif name == "platform": dep.platform = value split_dep = dep.to_pep_508(False).split(";") if len(split_dep) > 1: package.marker = parse_marker(split_dep[1].strip()) for dep_name, constraint in info.get("dependencies", {}).items(): if isinstance(constraint, list): for c in constraint: package.add_dependency(dep_name, c) continue package.add_dependency(dep_name, constraint) if "develop" in info: package.develop = info["develop"] if "source" in info: package.source_type = info["source"].get("type", "") package.source_url = info["source"]["url"] package.source_reference = info["source"]["reference"] packages.add_package(package) return packages
def to_package( self, name: Optional[str] = None, extras: Optional[List[str]] = None, root_dir: Optional[Path] = None, ) -> Package: """ Create a new `poetry.core.packages.package.Package` instance using metadata from this instance. :param name: Name to use for the package, if not specified name from this instance is used. :param extras: Extras to activate for this package. :param root_dir: Optional root directory to use for the package. If set, dependency strings will be parsed relative to this directory. """ name = name or self.name if not self.version: # The version could not be determined, so we raise an error since it is mandatory. raise RuntimeError( "Unable to retrieve the package version for {}".format(name)) package = Package( name=name, version=self.version, source_type=self._source_type, source_url=self._source_url, source_reference=self._source_reference, ) package.description = self.summary package.root_dir = root_dir package.python_versions = self.requires_python or "*" package.files = self.files if root_dir or (self._source_type in {"directory"} and self._source_url): # this is a local poetry project, this means we can extract "richer" requirement information # eg: development requirements etc. poetry_package = self._get_poetry_package( path=root_dir or self._source_url) if poetry_package: package.extras = poetry_package.extras package.requires = poetry_package.requires return package seen_requirements = set() for req in self.requires_dist or []: try: # Attempt to parse the PEP-508 requirement string dependency = Dependency.create_from_pep_508( req, relative_to=root_dir) except InvalidMarker: # Invalid marker, We strip the markers hoping for the best req = req.split(";")[0] dependency = Dependency.create_from_pep_508( req, relative_to=root_dir) except ValueError: # Likely unable to parse constraint so we skip it self._log( "Invalid constraint ({}) found in {}-{} dependencies, " "skipping".format(req, package.name, package.version), level="warning", ) continue if dependency.in_extras: # this dependency is required by an extra package for extra in dependency.in_extras: if extra not in package.extras: # this is the first time we encounter this extra for this package package.extras[extra] = [] package.extras[extra].append(dependency) req = dependency.to_pep_508(with_extras=True) if req not in seen_requirements: package.requires.append(dependency) seen_requirements.add(req) return package
def to_package( self, name: str | None = None, extras: list[str] | None = None, root_dir: Path | None = None, ) -> Package: """ Create a new `poetry.core.packages.package.Package` instance using metadata from this instance. :param name: Name to use for the package, if not specified name from this instance is used. :param extras: Extras to activate for this package. :param root_dir: Optional root directory to use for the package. If set, dependency strings will be parsed relative to this directory. """ name = name or self.name if not name: raise RuntimeError("Unable to create package with no name") if not self.version: # The version could not be determined, so we raise an error since it is # mandatory. raise RuntimeError( f"Unable to retrieve the package version for {name}") package = Package( name=name, version=self.version, source_type=self._source_type, source_url=self._source_url, source_reference=self._source_reference, ) if self.summary is not None: package.description = self.summary package.root_dir = root_dir package.python_versions = self.requires_python or "*" package.files = self.files # If this is a local poetry project, we can extract "richer" requirement # information, eg: development requirements etc. if root_dir is not None: path = root_dir elif self._source_type == "directory" and self._source_url is not None: path = Path(self._source_url) else: path = None if path is not None: poetry_package = self._get_poetry_package(path=path) if poetry_package: package.extras = poetry_package.extras for dependency in poetry_package.requires: package.add_dependency(dependency) return package seen_requirements = set() for req in self.requires_dist or []: try: # Attempt to parse the PEP-508 requirement string dependency = Dependency.create_from_pep_508( req, relative_to=root_dir) except InvalidMarker: # Invalid marker, We strip the markers hoping for the best req = req.split(";")[0] dependency = Dependency.create_from_pep_508( req, relative_to=root_dir) except ValueError: # Likely unable to parse constraint so we skip it logger.debug( f"Invalid constraint ({req}) found in" f" {package.name}-{package.version} dependencies, skipping", ) continue if dependency.in_extras: # this dependency is required by an extra package for extra in dependency.in_extras: if extra not in package.extras: # this is the first time we encounter this extra for this # package package.extras[extra] = [] package.extras[extra].append(dependency) req = dependency.to_pep_508(with_extras=True) if req not in seen_requirements: package.add_dependency(dependency) seen_requirements.add(req) return package