def test_pip_install_link(tmp_dir, tmp_venv, fixture_dir): file_path = Link( path_to_url(fixture_dir("distributions/demo-0.1.0-py2.py3-none-any.whl")) ) result = pip_install(file_path, tmp_venv) assert "Successfully installed demo-0.1.0" in result
def base_pep_508_name(self): # type: () -> str requirement = self.pretty_name if self.extras: requirement += "[{}]".format(",".join(self.extras)) path = path_to_url(self.path) if self.path.is_absolute() else self.path requirement += " @ {}".format(path) return requirement
def base_pep_508_name(self) -> str: requirement = self.pretty_name if self.extras: extras = ",".join(sorted(self.extras)) requirement += f"[{extras}]" path = path_to_url(self.path) if self.path.is_absolute() else self.path requirement += f" @ {path}" return requirement
def __init__( self, url: str, comes_from: Any | None = None, requires_python: str | None = None, metadata: str | bool | None = None, ) -> None: """ Object representing a parsed link from https://pypi.python.org/simple/* url: url of the resource pointed to (href of the link) comes_from: instance of HTMLPage where the link was found, or string. requires_python: String containing the `Requires-Python` metadata field, specified in PEP 345. This may be specified by a data-requires-python attribute in the HTML link tag, as described in PEP 503. metadata: String of the syntax `<hashname>=<hashvalue>` representing the hash of the Core Metadata file. This may be specified by a data-dist-info-metadata attribute in the HTML link tag, as described in PEP 658. """ # url can be a UNC windows share if url.startswith("\\\\"): url = path_to_url(url) self.url = url self.comes_from = comes_from self.requires_python = requires_python if requires_python else None if isinstance(metadata, str): metadata = { "true": True, "": False, "false": False }.get(metadata.strip().lower(), metadata) self._metadata = metadata
def _export_requirements_txt( self, cwd: "Path", output: Union["IO", str], with_hashes: bool = True, dev: bool = False, extras: Optional[Union[bool, Sequence[str]]] = None, with_credentials: bool = False, with_urls: bool = True, ) -> None: indexes = set() content = "" dependency_lines = set() for dependency_package in self._poetry.locker.get_project_dependency_packages( project_requires=self._poetry.package.all_requires, dev=dev, extras=extras): line = "" dependency = dependency_package.dependency package = dependency_package.package if package.develop: line += "-e " requirement = dependency.to_pep_508(with_extras=False) is_direct_local_reference = (dependency.is_file() or dependency.is_directory()) is_direct_remote_reference = dependency.is_vcs( ) or dependency.is_url() if is_direct_remote_reference: line = requirement elif is_direct_local_reference: dependency_uri = path_to_url(dependency.source_url) line = f"{dependency.name} @ {dependency_uri}" else: line = f"{package.name}=={package.version}" if not is_direct_remote_reference and ";" in requirement: markers = requirement.split(";", 1)[1].strip() if markers: line += f" ; {markers}" if (not is_direct_remote_reference and not is_direct_local_reference and package.source_url): indexes.add(package.source_url) if package.files and with_hashes: hashes = [] for f in package.files: h = f["hash"] algorithm = "sha256" if ":" in h: algorithm, h = h.split(":") if algorithm not in self.ALLOWED_HASH_ALGORITHMS: continue hashes.append(f"{algorithm}:{h}") if hashes: sep = " \\\n" line += sep + sep.join(f" --hash={h}" for h in hashes) dependency_lines.add(line) content += "\n".join(sorted(dependency_lines)) content += "\n" if indexes and with_urls: # If we have extra indexes, we add them to the beginning of the output indexes_header = "" for index in sorted(indexes): repositories = [ r for r in self._poetry.pool.repositories if r.url == index.rstrip("/") ] if not repositories: continue repository = repositories[0] if (self._poetry.pool.has_default() and repository is self._poetry.pool.repositories[0]): url = (repository.authenticated_url if with_credentials else repository.url) indexes_header = f"--index-url {url}\n" continue url = (repository.authenticated_url if with_credentials else repository.url) parsed_url = urllib.parse.urlsplit(url) if parsed_url.scheme == "http": indexes_header += f"--trusted-host {parsed_url.netloc}\n" indexes_header += f"--extra-index-url {url}\n" content = indexes_header + "\n" + content self._output(content, cwd, output)
def create_from_pep_508(cls, name: str, relative_to: Path | None = None) -> Dependency: """ Resolve a PEP-508 requirement string to a `Dependency` instance. If a `relative_to` path is specified, this is used as the base directory if the identified dependency is of file or directory type. """ from poetry.core.packages.url_dependency import URLDependency from poetry.core.packages.utils.link import Link from poetry.core.packages.utils.utils import is_archive_file from poetry.core.packages.utils.utils import is_installable_dir from poetry.core.packages.utils.utils import is_url from poetry.core.packages.utils.utils import path_to_url from poetry.core.packages.utils.utils import strip_extras from poetry.core.packages.utils.utils import url_to_path from poetry.core.packages.vcs_dependency import VCSDependency from poetry.core.utils.patterns import wheel_file_re from poetry.core.vcs.git import ParsedUrl from poetry.core.version.requirements import Requirement # Removing comments parts = name.split(" #", 1) name = parts[0].strip() if len(parts) > 1: rest = parts[1] if " ;" in rest: name += " ;" + rest.split(" ;", 1)[1] req = Requirement(name) name = req.name link = None if is_url(name): link = Link(name) elif req.url: link = Link(req.url) else: path_str = os.path.normpath(os.path.abspath(name)) p, extras = strip_extras(path_str) if os.path.isdir(p) and (os.path.sep in name or name.startswith(".")): if not is_installable_dir(p): raise ValueError( f"Directory {name!r} is not installable. File 'setup.py' " "not found.") link = Link(path_to_url(p)) elif is_archive_file(p): link = Link(path_to_url(p)) # it's a local file, dir, or url if link: is_file_uri = link.scheme == "file" is_relative_uri = is_file_uri and re.search(r"\.\./", link.url) # Handle relative file URLs if is_file_uri and is_relative_uri: path = Path(link.path) if relative_to: path = relative_to / path link = Link(path_to_url(path)) # wheel file version = None if link.is_wheel: m = wheel_file_re.match(link.filename) if not m: raise ValueError(f"Invalid wheel name: {link.filename}") name = m.group("name") version = m.group("ver") dep: Dependency | None = None if link.scheme.startswith("git+"): url = ParsedUrl.parse(link.url) dep = VCSDependency( name, "git", url.url, rev=url.rev, directory=url.subdirectory, extras=req.extras, ) elif link.scheme == "git": dep = VCSDependency(name, "git", link.url_without_fragment, extras=req.extras) elif link.scheme in ["http", "https"]: dep = URLDependency(name, link.url, extras=req.extras) elif is_file_uri: # handle RFC 8089 references path = url_to_path(req.url) dep = _make_file_or_dir_dep(name=name, path=path, base=relative_to, extras=req.extras) else: with suppress(ValueError): # this is a local path not using the file URI scheme dep = _make_file_or_dir_dep( name=name, path=Path(req.url), base=relative_to, extras=req.extras, ) if dep is None: dep = Dependency(name, version or "*", extras=req.extras) if version: dep._constraint = parse_constraint(version) else: constraint: VersionConstraint | str if req.pretty_constraint: constraint = req.constraint else: constraint = "*" dep = Dependency(name, constraint, extras=req.extras) if req.marker: dep.marker = req.marker return dep
def test_url_to_path_path_to_url_symmetry_win(): path = r"C:\tmp\file" assert url_to_path(path_to_url(path)) == Path(path) unc_path = r"\\unc\share\path" assert url_to_path(path_to_url(unc_path)) == Path(unc_path)
def test_path_to_url_win(): assert path_to_url("c:/tmp/file") == "file:///c:/tmp/file" assert path_to_url("c:\\tmp\\file") == "file:///c:/tmp/file" assert path_to_url(r"\\unc\as\path") == "file://unc/as/path" path = Path(".") / "file" assert path_to_url("file") == "file:///" + path.absolute().as_posix()
def test_path_to_url_unix(): assert path_to_url("/tmp/file") == "file:///tmp/file" path = Path(".") / "file" assert path_to_url("file") == "file://" + path.absolute().as_posix()
def _export_requirements_txt( self, cwd, output, with_hashes=True, dev=False, extras=None, with_credentials=False, ): # type: (Path, Union[IO, str], bool, bool, Optional[Union[bool, Sequence[str]]], bool) -> None indexes = set() content = "" dependency_lines = set() for dependency_package in self._poetry.locker.get_project_dependency_packages( project_requires=self._poetry.package.all_requires, dev=dev, extras=extras ): line = "" dependency = dependency_package.dependency package = dependency_package.package if package.develop: line += "-e " requirement = dependency.to_pep_508(with_extras=False) is_direct_local_reference = ( dependency.is_file() or dependency.is_directory() ) is_direct_remote_reference = dependency.is_vcs() or dependency.is_url() if is_direct_remote_reference: line = requirement elif is_direct_local_reference: dependency_uri = path_to_url(dependency.source_url) line = "{} @ {}".format(dependency.name, dependency_uri) else: line = "{}=={}".format(package.name, package.version) if not is_direct_remote_reference: if ";" in requirement: markers = requirement.split(";", 1)[1].strip() if markers: line += "; {}".format(markers) if ( not is_direct_remote_reference and not is_direct_local_reference and package.source_url ): indexes.add(package.source_url) if package.files and with_hashes: hashes = [] for f in package.files: h = f["hash"] algorithm = "sha256" if ":" in h: algorithm, h = h.split(":") if algorithm not in self.ALLOWED_HASH_ALGORITHMS: continue hashes.append("{}:{}".format(algorithm, h)) if hashes: line += " \\\n" for i, h in enumerate(hashes): line += " --hash={}{}".format( h, " \\\n" if i < len(hashes) - 1 else "" ) dependency_lines.add(line) content += "\n".join(sorted(dependency_lines)) content += "\n" if indexes: # If we have extra indexes, we add them to the beginning of the output indexes_header = "" for index in sorted(indexes): repositories = [ r for r in self._poetry.pool.repositories if r.url == index.rstrip("/") ] if not repositories: continue repository = repositories[0] if ( self._poetry.pool.has_default() and repository is self._poetry.pool.repositories[0] ): url = ( repository.authenticated_url if with_credentials else repository.url ) indexes_header = "--index-url {}\n".format(url) continue url = ( repository.authenticated_url if with_credentials else repository.url ) parsed_url = urlparse.urlsplit(url) if parsed_url.scheme == "http": indexes_header += "--trusted-host {}\n".format(parsed_url.netloc) indexes_header += "--extra-index-url {}\n".format(url) content = indexes_header + "\n" + content self._output(content, cwd, output)