def __init__( self, project: str, version: str = "0.1.0", description: str = "", readme_format: str = "md", author: Optional[str] = None, license: Optional[str] = None, # noqa python: str = "*", dependencies: Optional[Dict[str, str]] = None, dev_dependencies: Optional[Dict[str, str]] = None, ): self._project = canonicalize_name(project).replace(".", "-") self._package_path_relative = Path( *(module_name(part) for part in canonicalize_name(project).split("."))) self._package_name = ".".join(self._package_path_relative.parts) self._version = version self._description = description self._readme_format = readme_format.lower() if self._readme_format not in self.ACCEPTED_README_FORMATS: raise ValueError( "Invalid readme format '{}', use one of {}.".format( readme_format, ", ".join(self.ACCEPTED_README_FORMATS))) self._license = license self._python = python self._dependencies = dependencies or {} self._dev_dependencies = dev_dependencies or {} if not author: author = "Your Name <*****@*****.**>" self._author = author
def __init__( self, project: str, version: str = "0.1.0", description: str = "", readme_format: str = "md", author: str | None = None, license: str | None = None, python: str = "*", dependencies: dict[str, str | Mapping[str, Any]] | None = None, dev_dependencies: dict[str, str | Mapping[str, Any]] | None = None, ) -> None: self._project = canonicalize_name(project).replace(".", "-") self._package_path_relative = Path( *(module_name(part) for part in canonicalize_name(project).split("."))) self._package_name = ".".join(self._package_path_relative.parts) self._version = version self._description = description self._readme_format = readme_format.lower() self._license = license self._python = python self._dependencies = dependencies or {} self._dev_dependencies = dev_dependencies or {} if not author: author = "Your Name <*****@*****.**>" self._author = author
def get_existing_packages_from_input( self, packages: list[str], section: dict[str, Any] ) -> list[str]: existing_packages = [] for name in packages: for key in section: if canonicalize_name(key) == canonicalize_name(name): existing_packages.append(name) return existing_packages
def _remove_packages(self, packages: list[str], section: dict[str, Any], group_name: str) -> list[str]: removed = [] group = self.poetry.package.dependency_group(group_name) section_keys = list(section.keys()) for package in packages: for existing_package in section_keys: if canonicalize_name(existing_package) == canonicalize_name( package): del section[existing_package] removed.append(package) group.remove_dependency(package) return removed
def __init__( self, name, # type: str constraint, # type: str optional=False, # type: bool category="main", # type: str allows_prereleases=False, # type: bool ): self._name = canonicalize_name(name) self._pretty_name = name try: if not isinstance(constraint, VersionConstraint): self._constraint = parse_constraint(constraint) else: self._constraint = constraint except ValueError: self._constraint = parse_constraint("*") self._pretty_constraint = str(constraint) self._optional = optional self._category = category self._allows_prereleases = allows_prereleases self._python_versions = "*" self._python_constraint = parse_constraint("*") self._platform = "*" self._platform_constraint = EmptyConstraint() self._extras = [] self._in_extras = [] self._activated = not self._optional self.is_root = False
def __init__(self, name, # type: str constraint, # type: str optional=False, # type: bool category='main', # type: str allows_prereleases=False # type: bool ): self._name = canonicalize_name(name) self._pretty_name = name self._parser = VersionParser() try: if not isinstance(constraint, BaseConstraint): self._constraint = self._parser.parse_constraints(constraint) else: self._constraint = constraint except ValueError: self._constraint = self._parser.parse_constraints('*') self._pretty_constraint = constraint self._optional = optional self._category = category self._allows_prereleases = allows_prereleases self._python_versions = '*' self._python_constraint = self._parser.parse_constraints('*') self._platform = '*' self._platform_constraint = EmptyConstraint() self._extras = [] self._in_extras = []
def link_package_data(cls, link: Link) -> Package | None: name, version_string, version = None, None, None m = wheel_file_re.match(link.filename) or sdist_file_re.match(link.filename) if m: name = canonicalize_name(m.group("name")) version_string = m.group("ver") else: info, ext = link.splitext() match = cls.VERSION_REGEX.match(info) if match: name = match.group(1) version_string = match.group(2) if version_string: try: version = Version.parse(version_string) except ValueError: logger.debug( "Skipping url (%s) due to invalid version (%s)", link.url, version ) return None pkg = None if name and version: pkg = Package(name, version, source_url=link.url) return pkg
def _get_release_info(self, name, version): # type: (str, str) -> dict page = self._get("/{}/".format( canonicalize_name(name).replace(".", "-"))) if page is None: raise ValueError('No package named "{}"'.format(name)) data = { "name": name, "version": version, "summary": "", "requires_dist": [], "requires_python": [], "digests": [], } links = list(page.links_for_version(Version.parse(version))) if not links: raise ValueError( 'No valid distribution links found for package: "{}" version: "{}"' .format(name, version)) urls = {} hashes = [] default_link = links[0] for link in links: if link.is_wheel: urls["bdist_wheel"] = link.url elif link.filename.endswith(".tar.gz"): urls["sdist"] = link.url elif link.filename.endswith( (".zip", ".bz2")) and "sdist" not in urls: urls["sdist"] = link.url hash = link.hash if link.hash_name == "sha256": hashes.append(hash) data["digests"] = hashes if not urls: if default_link.is_wheel: m = wheel_file_re.match(default_link.filename) python = m.group("pyver") platform = m.group("plat") if python == "py2.py3" and platform == "any": urls["bdist_wheel"] = default_link.url elif default_link.filename.endswith(".tar.gz"): urls["sdist"] = default_link.url elif (default_link.filename.endswith((".zip", ".bz2")) and "sdist" not in urls): urls["sdist"] = default_link.url else: return data info = self._get_info_from_urls(urls) data["summary"] = info["summary"] data["requires_dist"] = info["requires_dist"] data["requires_python"] = info["requires_python"] return data
def find_packages(self, name, constraint=None, extras=None, allow_prereleases=False): packages = [] if constraint is None: constraint = "*" if not isinstance(constraint, VersionConstraint): constraint = parse_constraint(constraint) if isinstance(constraint, VersionRange): if (constraint.max is not None and constraint.max.is_prerelease() or constraint.min is not None and constraint.min.is_prerelease()): allow_prereleases = True key = name if not constraint.is_any(): key = "{}:{}".format(key, str(constraint)) if self._cache.store("matches").has(key): versions = self._cache.store("matches").get(key) else: page = self._get("/{}/".format( canonicalize_name(name).replace(".", "-"))) if page is None: return [] versions = [] for version in page.versions: if version.is_prerelease() and not allow_prereleases: continue if constraint.allows(version): versions.append(version) self._cache.store("matches").put(key, versions, 5) for version in versions: package = Package(name, version) package.source_type = "legacy" package.source_reference = self.name package.source_url = self._url if extras is not None: package.requires_extras = extras packages.append(package) self._log( "{} packages found for {} {}".format(len(packages), name, str(constraint)), level="debug", ) return packages
def links_for_version(self, name: str, version: Version) -> Iterator[Link]: name = canonicalize_name(name) for link in self.links: pkg = self.link_package_data(link) if pkg and pkg.name == name and pkg.version == version: yield link
def _get_release_info(self, name, version): # type: (str, str) -> dict page = self._get('/{}'.format( canonicalize_name(name).replace('.', '-'))) if page is None: raise ValueError('No package named "{}"'.format(name)) data = { 'name': name, 'version': version, 'summary': '', 'requires_dist': [], 'requires_python': [], 'digests': [] } links = list(page.links_for_version(Version.parse(version))) urls = {} hashes = [] default_link = links[0] for link in links: if link.is_wheel: urls['bdist_wheel'] = link.url elif link.filename.endswith('.tar.gz'): urls['sdist'] = link.url elif link.filename.endswith( ('.zip', '.bz2')) and 'sdist' not in urls: urls['sdist'] = link.url hash = link.hash if link.hash_name == 'sha256': hashes.append(hash) data['digests'] = hashes if not urls: if default_link.is_wheel: m = wheel_file_re.match(default_link.filename) python = m.group('pyver') platform = m.group('plat') if python == 'py2.py3' and platform == 'any': urls['bdist_wheel'] = default_link.url elif default_link.filename.endswith('.tar.gz'): urls['sdist'] = default_link.url elif default_link.filename.endswith( ('.zip', '.bz2')) and 'sdist' not in urls: urls['sdist'] = default_link.url else: return data info = self._get_info_from_urls(urls) data['summary'] = info['summary'] data['requires_dist'] = info['requires_dist'] data['requires_python'] = info['requires_python'] return data
def versions(self, name: str) -> Iterator[Version]: name = canonicalize_name(name) seen: set[Version] = set() for link in self.links: pkg = self.link_package_data(link) if pkg and pkg.name == name and pkg.version not in seen: seen.add(pkg.version) yield pkg.version
def _get_release_info(self, name, version): # type: (str, str) -> dict page = self._get("/{}/".format(canonicalize_name(name).replace(".", "-"))) if page is None: raise ValueError('No package named "{}"'.format(name)) data = { "name": name, "version": version, "summary": "", "requires_dist": [], "requires_python": [], "digests": [], } links = list(page.links_for_version(Version.parse(version))) urls = {} hashes = [] default_link = links[0] for link in links: if link.is_wheel: urls["bdist_wheel"] = link.url elif link.filename.endswith(".tar.gz"): urls["sdist"] = link.url elif link.filename.endswith((".zip", ".bz2")) and "sdist" not in urls: urls["sdist"] = link.url hash = link.hash if link.hash_name == "sha256": hashes.append(hash) data["digests"] = hashes if not urls: if default_link.is_wheel: m = wheel_file_re.match(default_link.filename) python = m.group("pyver") platform = m.group("plat") if python == "py2.py3" and platform == "any": urls["bdist_wheel"] = default_link.url elif default_link.filename.endswith(".tar.gz"): urls["sdist"] = default_link.url elif ( default_link.filename.endswith((".zip", ".bz2")) and "sdist" not in urls ): urls["sdist"] = default_link.url else: return data info = self._get_info_from_urls(urls) data["summary"] = info["summary"] data["requires_dist"] = info["requires_dist"] data["requires_python"] = info["requires_python"] return data
def __init__(self, name, version, pretty_version=None): """ Creates a new in memory package. """ self._pretty_name = name self._name = canonicalize_name(name) if not isinstance(version, Version): self._version = Version.parse(version) self._pretty_version = pretty_version or version else: self._version = version self._pretty_version = pretty_version or self._version.text self.description = "" self._authors = [] self.homepage = None self.repository_url = None self.keywords = [] self._license = None self.readme = None self.source_type = "" self.source_reference = "" self.source_url = "" self.requires = [] self.dev_requires = [] self.extras = {} self.requires_extras = [] self.category = "main" self.hashes = [] self.optional = False # Requirements for making it mandatory self.requirements = {} self.build = None self.include = [] self.exclude = [] self.classifiers = [] self._python_versions = "*" self._python_constraint = parse_constraint("*") self._platform = "*" self._platform_constraint = EmptyConstraint() self.root_dir = None self.develop = False
def test_generate_choice_list( tester: CommandTester, package_name: str, _generate_choice_list_packages: list[Package], ): init_command = tester.command packages = _generate_choice_list_packages choices = init_command._generate_choice_list( packages, canonicalize_name(package_name)) assert choices[0] == "Flask"
def get_extra_package_names( packages: Sequence["Package"], extras: Mapping[str, List[str]], extra_names: Sequence[str], ) -> Iterator[str]: """ Returns all package names required by the given extras. :param packages: A collection of packages, such as from Repository.packages :param extras: A mapping of `extras` names to lists of package names, as defined in the `extras` section of `poetry.lock`. :param extra_names: A list of strings specifying names of extra groups to resolve. """ from poetry.core.packages.package import Package # noqa from poetry.utils.helpers import canonicalize_name if not extra_names: return [] # lookup for packages by name, faster than looping over packages repeatedly packages_by_name = {package.name: package for package in packages} # get and flatten names of packages we've opted into as extras extra_package_names = [ canonicalize_name(extra_package_name) for extra_name in extra_names for extra_package_name in extras.get(extra_name, ()) ] # keep record of packages seen during recursion in order to avoid recursion error seen_package_names = set() def _extra_packages(package_names: Iterable[str]) -> Iterator[str]: """Recursively find dependencies for packages names""" # for each extra package name for package_name in package_names: # Find the actual Package object. A missing key indicates an implicit # dependency (like setuptools), which should be ignored package = packages_by_name.get(canonicalize_name(package_name)) if package: if package.name not in seen_package_names: seen_package_names.add(package.name) yield package.name # Recurse for dependencies for dependency_package_name in _extra_packages( dependency.name for dependency in package.requires if dependency.name not in seen_package_names ): seen_package_names.add(dependency_package_name) yield dependency_package_name return _extra_packages(extra_package_names)
def __init__(self, name, version, pretty_version=None): """ Creates a new in memory package. """ self._pretty_name = name self._name = canonicalize_name(name) self._version = str(parse_version(version)) self._pretty_version = pretty_version or version self.description = '' self._stability = parse_stability(version) self._dev = self._stability == 'dev' self._authors = [] self.homepage = None self.repository_url = None self.keywords = [] self._license = None self.readme = None self.source_type = '' self.source_reference = '' self.source_url = '' self.requires = [] self.dev_requires = [] self.extras = {} self.requires_extras = [] self._parser = VersionParser() self.category = 'main' self.hashes = [] self.optional = False # Requirements for making it mandatory self.requirements = {} self.build = None self.include = [] self.exclude = [] self.classifiers = [] self._python_versions = '*' self._python_constraint = self._parser.parse_constraints('*') self._platform = '*' self._platform_constraint = EmptyConstraint() self.cwd = None
def __init__(self, name, version, pretty_version=None): """ Creates a new in memory package. """ self._pretty_name = name self._name = canonicalize_name(name) if not isinstance(version, Version): self._version = Version.parse(version) self._pretty_version = pretty_version or version else: self._version = version self._pretty_version = pretty_version or self._version.text self.description = "" self._authors = [] self._maintainers = [] self.homepage = None self.repository_url = None self.documentation_url = None self.keywords = [] self._license = None self.readme = None self.source_name = "" self.source_type = "" self.source_reference = "" self.source_url = "" self.requires = [] self.dev_requires = [] self.extras = {} self.requires_extras = [] self.category = "main" self.hashes = [] self.optional = False self.classifiers = [] self._python_versions = "*" self._python_constraint = parse_constraint("*") self._python_marker = AnyMarker() self.platform = None self.marker = AnyMarker() self.root_dir = None self.develop = True
def _extra_packages(package_names): """Recursively find dependencies for packages names""" # for each extra pacakge name for package_name in package_names: # Find the actual Package object. A missing key indicates an implicit # dependency (like setuptools), which should be ignored package = packages_by_name.get(canonicalize_name(package_name)) if package: yield package.name # Recurse for dependencies for dependency_package_name in _extra_packages( dependency.name for dependency in package.requires): yield dependency_package_name
def __init__(self, name, version, pretty_version=None): """ Creates a new in memory package. """ self._pretty_name = name self._name = canonicalize_name(name) if not isinstance(version, Version): self._version = Version.parse(version) self._pretty_version = pretty_version or version else: self._version = version self._pretty_version = pretty_version or self._version.text self.description = "" self._authors = [] self.homepage = None self.repository_url = None self.keywords = [] self._license = None self.readme = None self.source_type = "" self.source_reference = "" self.source_url = "" self.requires = [] self.dev_requires = [] self.extras = {} self.requires_extras = [] self.category = "main" self.hashes = [] self.optional = False # Requirements for making it mandatory self.requirements = {} self.classifiers = [] self._python_versions = "*" self._python_constraint = parse_constraint("*") self._platform = "*" self._platform_constraint = EmptyConstraint() self.root_dir = None self.develop = False
def from_package(cls, package): # type: (...) -> Metadata meta = cls() meta.name = canonicalize_name(package.name) meta.version = normalize_version(package.version.text) meta.summary = package.description if package.readme: with package.readme.open(encoding="utf-8") as f: meta.description = f.read() meta.keywords = ",".join(package.keywords) meta.home_page = package.homepage or package.repository_url meta.author = package.author_name meta.author_email = package.author_email if package.license: meta.license = package.license.id meta.classifiers = package.all_classifiers # Version 1.2 meta.maintainer = meta.author meta.maintainer_email = meta.author_email # Requires python if package.python_versions != "*": meta.requires_python = format_python_constraint( package.python_constraint) meta.requires_dist = [d.to_pep_508() for d in package.requires] # Version 2.1 if package.readme: if package.readme.suffix == ".rst": meta.description_content_type = "text/x-rst" elif package.readme.suffix in [".md", ".markdown"]: meta.description_content_type = "text/markdown" else: meta.description_content_type = "text/plain" meta.provides_extra = [e for e in package.extras] if package.urls: for name, url in package.urls.items(): if name == "Homepage" and meta.home_page == url: continue meta.project_urls += ("{}, {}".format(name, url), ) return meta
def _get_release_info(self, name, version): # type: (str, str) -> dict page = self._get("/{}/".format(canonicalize_name(name).replace(".", "-"))) if page is None: raise PackageNotFound('No package named "{}"'.format(name)) data = { "name": name, "version": version, "summary": "", "requires_dist": [], "requires_python": None, "files": [], "_cache_version": str(self.CACHE_VERSION), } links = list(page.links_for_version(Version.parse(version))) if not links: raise PackageNotFound( 'No valid distribution links found for package: "{}" version: "{}"'.format( name, version ) ) urls = defaultdict(list) url_requires_python_dict = {} files = [] for link in links: if link.is_wheel: urls["bdist_wheel"].append(link.url) elif link.filename.endswith( (".tar.gz", ".zip", ".bz2", ".xz", ".Z", ".tar") ): urls["sdist"].append(link.url) url_requires_python_dict[link.url] = link.requires_python h = link.hash if h: h = link.hash_name + ":" + link.hash files.append({"file": link.filename, "hash": h}) data["files"] = files info = self._get_info_from_urls(urls, url_requires_python_dict) data["summary"] = info["summary"] data["requires_dist"] = info["requires_dist"] data["requires_python"] = info["requires_python"] return data
def _get_release_info(self, name, version): # type: (str, str) -> dict page = self._get("/{}/".format(canonicalize_name(name).replace(".", "-"))) if page is None: raise PackageNotFound('No package named "{}"'.format(name)) data = PackageInfo( name=name, version=version, summary="", platform=None, requires_dist=[], requires_python=None, files=[], cache_version=str(self.CACHE_VERSION), ) links = list(page.links_for_version(Version.parse(version))) if not links: raise PackageNotFound( 'No valid distribution links found for package: "{}" version: "{}"'.format( name, version ) ) urls = defaultdict(list) files = [] for link in links: if link.is_wheel: urls["bdist_wheel"].append(link.url) elif link.filename.endswith( (".tar.gz", ".zip", ".bz2", ".xz", ".Z", ".tar") ): urls["sdist"].append(link.url) h = link.hash if h: h = link.hash_name + ":" + link.hash files.append({"file": link.filename, "hash": h}) data.files = files info = self._get_info_from_urls(urls) data.summary = info.summary data.requires_dist = info.requires_dist data.requires_python = info.requires_python return data.asdict()
def __init__( self, name, # type: str constraint, # type: str optional=False, # type: bool category="main", # type: str allows_prereleases=False, # type: bool source_name=None, # type: Optional[str] global_options=[], # type: list[str] ): self._name = canonicalize_name(name) self._pretty_name = name try: if not isinstance(constraint, VersionConstraint): self._constraint = parse_constraint(constraint) else: self._constraint = constraint except ValueError: self._constraint = parse_constraint("*") self._pretty_constraint = str(constraint) self._optional = optional self._category = category if isinstance(self._constraint, VersionRange) and self._constraint.min: allows_prereleases = (allows_prereleases or self._constraint.min.is_prerelease()) self._allows_prereleases = allows_prereleases self._source_name = source_name self._python_versions = "*" self._python_constraint = parse_constraint("*") self._transitive_python_versions = None self._transitive_python_constraint = None self._transitive_marker = None self._extras = [] self._in_extras = [] self.global_opts = list(global_options) self._activated = not self._optional self.is_root = False self.marker = AnyMarker()
def _extra_packages(package_names: Iterable[str]) -> Iterator[str]: """Recursively find dependencies for packages names""" # for each extra package name for package_name in package_names: # Find the actual Package object. A missing key indicates an implicit # dependency (like setuptools), which should be ignored package = packages_by_name.get(canonicalize_name(package_name)) if package: if package.name not in seen_package_names: seen_package_names.add(package.name) yield package.name # Recurse for dependencies for dependency_package_name in _extra_packages( dependency.name for dependency in package.requires if dependency.name not in seen_package_names): seen_package_names.add(dependency_package_name) yield dependency_package_name
def from_package(cls, package): # type: (...) -> Metadata meta = cls() meta.name = canonicalize_name(package.name) meta.version = package.version meta.summary = package.description if package.readme: with package.readme.open() as f: meta.description = f.read() meta.keywords = ','.join(package.keywords) meta.home_page = package.homepage or package.repository_url meta.author = package.author_name meta.author_email = package.author_email if package.license: meta.license = package.license.id meta.classifiers = package.all_classifiers # Version 1.2 meta.maintainer = meta.author meta.maintainer_email = meta.author_email meta.requires_python = package.python_constraint meta.requires_dist = [d.to_pep_508() for d in package.requires] # Requires python meta.requires_python = format_python_constraint( package.python_constraint) # Version 2.1 if package.readme: if package.readme.suffix == '.rst': meta.description_content_type = 'text/x-rst' elif package.readme.suffix in ['.md', '.markdown']: meta.description_content_type = 'text/markdown' else: meta.description_content_type = 'text/plain' meta.provides_extra = [e for e in package.extras] return meta
def from_package(cls, package): # type: (...) -> Metadata meta = cls() meta.name = canonicalize_name(package.name) meta.version = normalize_version(package.version.text) meta.summary = package.description if package.readme: with package.readme.open() as f: meta.description = f.read() meta.keywords = ",".join(package.keywords) meta.home_page = package.homepage or package.repository_url meta.author = package.author_name meta.author_email = package.author_email if package.license: meta.license = package.license.id meta.classifiers = package.all_classifiers # Version 1.2 meta.maintainer = meta.author meta.maintainer_email = meta.author_email # Requires python if not package.python_constraint.is_any(): meta.requires_python = format_python_constraint(package.python_constraint) meta.requires_dist = [d.to_pep_508() for d in package.requires] # Version 2.1 if package.readme: if package.readme.suffix == ".rst": meta.description_content_type = "text/x-rst" elif package.readme.suffix in [".md", ".markdown"]: meta.description_content_type = "text/markdown" else: meta.description_content_type = "text/plain" meta.provides_extra = [e for e in package.extras] return meta
def find_packages( self, name, constraint=None, extras=None, allow_prereleases=False ): packages = [] if constraint is not None and not isinstance(constraint, VersionConstraint): constraint = parse_constraint(constraint) key = name if constraint: key = "{}:{}".format(key, str(constraint)) if self._cache.store("matches").has(key): versions = self._cache.store("matches").get(key) else: page = self._get("/{}/".format(canonicalize_name(name).replace(".", "-"))) if page is None: return [] versions = [] for version in page.versions: if not constraint or (constraint and constraint.allows(version)): versions.append(version) self._cache.store("matches").put(key, versions, 5) for version in versions: package = Package(name, version) package.source_type = "legacy" package.source_url = self._url if extras is not None: package.requires_extras = extras packages.append(package) self._log( "{} packages found for {} {}".format(len(packages), name, str(constraint)), level="debug", ) return packages
def _get_release_info(self, name, version): # type: (str, str) -> dict page = self._get("/{}/".format( canonicalize_name(name).replace(".", "-"))) if page is None: raise PackageNotFound('No package named "{}"'.format(name)) data = { "name": name, "version": version, "summary": "", "requires_dist": [], "requires_python": None, "digests": [], } links = list(page.links_for_version(Version.parse(version))) if not links: raise PackageNotFound( 'No valid distribution links found for package: "{}" version: "{}"' .format(name, version)) urls = defaultdict(list) hashes = [] for link in links: if link.is_wheel: urls["bdist_wheel"].append(link.url) elif link.filename.endswith( (".tar.gz", ".zip", ".bz2", ".xz", ".Z", ".tar")): urls["sdist"].append(link.url) hash = link.hash if link.hash_name == "sha256": hashes.append(hash) data["digests"] = hashes info = self._get_info_from_urls(urls) data["summary"] = info["summary"] data["requires_dist"] = info["requires_dist"] data["requires_python"] = info["requires_python"] return data
def find_packages(self, name, constraint=None, extras=None, allow_prereleases=False): packages = [] if constraint is not None and not isinstance(constraint, VersionConstraint): constraint = parse_constraint(constraint) key = name if constraint: key = "{}:{}".format(key, str(constraint)) if self._cache.store("matches").has(key): versions = self._cache.store("matches").get(key) else: page = self._get("/{}".format( canonicalize_name(name).replace(".", "-"))) if page is None: raise ValueError('No package named "{}"'.format(name)) versions = [] for version in page.versions: if not constraint or (constraint and constraint.allows(version)): versions.append(version) self._cache.store("matches").put(key, versions, 5) for version in versions: package = Package(name, version) package.source_type = "legacy" package.source_url = self._url if extras is not None: package.requires_extras = extras packages.append(package) return packages
def __init__( self, name, # type: str constraint, # type: str optional=False, # type: bool category="main", # type: str allows_prereleases=False, # type: bool ): self._name = canonicalize_name(name) self._pretty_name = name try: if not isinstance(constraint, VersionConstraint): self._constraint = parse_constraint(constraint) else: self._constraint = constraint except ValueError: self._constraint = parse_constraint("*") self._pretty_constraint = str(constraint) self._optional = optional self._category = category if isinstance(self._constraint, VersionRange) and self._constraint.min: allows_prereleases = ( allows_prereleases or self._constraint.min.is_prerelease() ) self._allows_prereleases = allows_prereleases self._python_versions = "*" self._python_constraint = parse_constraint("*") self._platform = "*" self._platform_constraint = EmptyConstraint() self._extras = [] self._in_extras = [] self._activated = not self._optional self.is_root = False
def get_extra_package_names( packages, # type: Sequence[Package] extras, # type: Mapping[str, List[str]] extra_names, # type: Sequence[str] ): # type: (...) -> Iterator[str] """ Returns all package names required by the given extras. :param packages: A collection of packages, such as from Repository.packages :param extras: A mapping of `extras` names to lists of package names, as defined in the `extras` section of `poetry.lock`. :param extra_names: A list of strings specifying names of extra groups to resolve. """ if not extra_names: return [] # lookup for packages by name, faster than looping over packages repeatedly packages_by_name = {package.name: package for package in packages} # get and flatten names of packages we've opted into as extras extra_package_names = [ canonicalize_name(extra_package_name) for extra_name in extra_names for extra_package_name in extras.get(extra_name, ()) ] def _extra_packages(package_names): """Recursively find dependencies for packages names""" # for each extra pacakge name for package_name in package_names: # Find the actual Package object. A missing key indicates an implicit # dependency (like setuptools), which should be ignored package = packages_by_name.get(canonicalize_name(package_name)) if package: yield package.name # Recurse for dependencies for dependency_package_name in _extra_packages( dependency.name for dependency in package.requires): yield dependency_package_name return _extra_packages(extra_package_names)
def handle(self) -> int: from poetry.plugins.application_plugin import ApplicationPlugin from poetry.plugins.plugin import Plugin from poetry.plugins.plugin_manager import PluginManager from poetry.repositories.installed_repository import InstalledRepository from poetry.utils.env import EnvManager from poetry.utils.helpers import canonicalize_name from poetry.utils.helpers import pluralize plugins: dict[str, dict[str, Any]] = defaultdict(lambda: { "package": None, "plugins": [], "application_plugins": [], }) entry_points = ( PluginManager(ApplicationPlugin.group).get_plugin_entry_points() + PluginManager(Plugin.group).get_plugin_entry_points()) system_env = EnvManager.get_system_env(naive=True) installed_repository = InstalledRepository.load(system_env, with_dependencies=True) packages_by_name = { pkg.name: pkg for pkg in installed_repository.packages } for entry_point in entry_points: plugin = entry_point.load() category = "plugins" if issubclass(plugin, ApplicationPlugin): category = "application_plugins" package = packages_by_name[canonicalize_name( entry_point.distro.name)] plugins[package.pretty_name]["package"] = package plugins[package.pretty_name][category].append(entry_point) for name, info in plugins.items(): package = info["package"] description = " " + package.description if package.description else "" self.line("") self.line( f" • <c1>{name}</c1> (<c2>{package.version}</c2>){description}" ) provide_line = " " if info["plugins"]: count = len(info["plugins"]) provide_line += f" <info>{count}</info> plugin{pluralize(count)}" if info["application_plugins"]: if info["plugins"]: provide_line += " and" count = len(info["application_plugins"]) provide_line += ( f" <info>{count}</info> application plugin{pluralize(count)}" ) self.line(provide_line) if package.requires: self.line("") self.line(" <info>Dependencies</info>") for dependency in package.requires: self.line(f" - {dependency.pretty_name}" f" (<c2>{dependency.pretty_constraint}</c2>)") return 0
def whitelist(self, packages): # type: (dict) -> Installer self._whitelist = [canonicalize_name(p) for p in packages] return self
def develop(self, packages): # type: (dict) -> Installer self._develop = [canonicalize_name(p) for p in packages] return self
def whitelist(self, packages: Iterable[str]) -> Installer: self._whitelist = [canonicalize_name(p) for p in packages] return self
def whitelist(self, packages): # type: (dict) -> Installer self._whitelist = [canonicalize_name(p) for p in packages] return self