class ProjectFile(object): """A file in the Pipfile project.""" location = attr.ib() line_ending = attr.ib() model = attr.ib() @classmethod def read(cls, location, model_cls, invalid_ok=False): if not os.path.exists(location) and not invalid_ok: raise FileNotFoundError(location) try: with io.open(location, encoding="utf-8") as f: model = model_cls.load(f) line_ending = preferred_newlines(f) except Exception: if not invalid_ok: raise model = None line_ending = DEFAULT_NEWLINES return cls(location=location, line_ending=line_ending, model=model) def write(self): kwargs = {"encoding": "utf-8", "newline": self.line_ending} with io.open(self.location, "w", **kwargs) as f: self.model.dump(f) def dumps(self): strio = io.StringIO() self.model.dump(strio) return strio.getvalue()
class VersionPath(SystemPath): base = attr.ib(default=None, validator=optional_instance_of(Path)) # type: Path name = attr.ib(default=None) # type: str @classmethod def create(cls, path, only_python=True, pythons=None, name=None): """Accepts a path to a base python version directory. Generates the version listings for it""" from .path import PathEntry path = ensure_path(path) path_entries = defaultdict(PathEntry) bin_ = "{base}/bin" if path.as_posix().endswith(Path(bin_).name): path = path.parent bin_dir = ensure_path(bin_.format(base=path.as_posix())) if not name: name = path.name current_entry = PathEntry.create(bin_dir, is_root=True, only_python=True, pythons=pythons, name=name) path_entries[bin_dir.as_posix()] = current_entry return cls(name=name, base=bin_dir, paths=path_entries)
class ParsedTag(object): #: The marker string corresponding to the tag marker_string = attr.ib(default=None) # type: Optional[str] #: The python version represented by the tag python_version = attr.ib(default=None) # type: Optional[str] #: The platform represented by the tag platform_system = attr.ib(default=None) # type: Optional[str] #: the ABI represented by the tag abi = attr.ib(default=None) # type: Optional[str]
class VersionMap(object): versions = attr.ib( factory=defaultdict ) # type: DefaultDict[Tuple[int, Optional[int], Optional[int], bool, bool, bool], List[PathEntry]] def add_entry(self, entry): # type: (...) -> None version = entry.as_python # type: PythonVersion if version: _ = self.versions[version.version_tuple] paths = {p.path for p in self.versions.get(version.version_tuple, [])} if entry.path not in paths: self.versions[version.version_tuple].append(entry) def merge(self, target): # type: (VersionMap) -> None for version, entries in target.versions.items(): if version not in self.versions: self.versions[version] = entries else: current_entries = { p.path for p in self.versions[version] # type: ignore if version in self.versions } new_entries = {p.path for p in entries} new_entries -= current_entries self.versions[version].extend( [e for e in entries if e.path in new_entries] )
class ExtrasCollection(object): #: The name of the extras collection (e.g. 'security') name = attr.ib(type=str) #: The dependency the collection belongs to parent = attr.ib(type="Dependency") #: The members of the collection dependencies = attr.ib(factory=set) # type: Set["Dependency"] def add_dependency(self, dependency): # type: ("Dependency") -> "ExtrasCollection" if not isinstance(dependency, Dependency): raise TypeError( "Expected a Dependency instance, received {0!r}".format(dependency) ) dependencies = self.dependencies.copy() dependencies.add(dependency) return attr.evolve(self, dependencies=dependencies)
class Digest(object): #: The algorithm declared for the digest, e.g. 'sha256' algorithm = attr.ib( type=str, validator=attr.validators.in_(VALID_ALGORITHMS.keys()), eq=True ) #: The digest value value = attr.ib(type=str, validator=validate_digest, eq=True) def __str__(self): # type: () -> str return "{0}:{1}".format(self.algorithm, self.value) @classmethod def create(cls, algorithm, value): # type: (str, str) -> "Digest" return cls(algorithm=algorithm, value=value) @classmethod def collection_from_dict(cls, digest_dict): # type: (TDigestDict) -> List["Digest"] return [cls.create(k, v) for k, v in digest_dict.items()]
class Version: major = attr.ib() minor = attr.ib() patch = attr.ib() def __str__(self): parts = [self.major, self.minor] if self.patch is not None: parts.append(self.patch) return '.'.join(str(p) for p in parts) @classmethod def parse(cls, name): """Parse an X.Y.Z or X.Y string into a version tuple. """ match = re.match(r'^(\d+)\.(\d+)(?:\.(\d+))?$', name) if not match: raise ValueError(f'invalid version name {name!r}') major = int(match.group(1)) minor = int(match.group(2)) patch = match.group(3) if patch is not None: patch = int(patch) return cls(major, minor, patch) @property def cmpkey(self): """Make the version a comparable tuple. Some old Python versions does not have a patch part, e.g. 2.7.0 is named "2.7" in pyenv. Fix that, otherwise `None` will fail to compare with int. """ return (self.major, self.minor, self.patch or 0) def matches_minor(self, other): """Check whether this version matches the other in (major, minor). """ return (self.major, self.minor) == (other.major, other.minor)
class PythonFinder(BaseFinder, BasePath): root = attr.ib(default=None, validator=optional_instance_of(Path), type=Path) # should come before versions, because its value is used in versions's default initializer. #: Whether to ignore any paths which raise exceptions and are not actually python ignore_unsupported = attr.ib(default=True, type=bool) #: Glob path for python versions off of the root directory version_glob_path = attr.ib(default="versions/*", type=str) #: The function to use to sort version order when returning an ordered verion set sort_function = attr.ib(default=None) # type: Callable #: The root locations used for discovery roots = attr.ib(default=attr.Factory(defaultdict), type=defaultdict) #: List of paths discovered during search paths = attr.ib(type=list) #: shim directory shim_dir = attr.ib(default="shims", type=str) #: Versions discovered in the specified paths _versions = attr.ib(default=attr.Factory(defaultdict), type=defaultdict) _pythons = attr.ib(default=attr.Factory(defaultdict), type=defaultdict) def __del__(self): # type: () -> None self._versions = defaultdict() self._pythons = defaultdict() self.roots = defaultdict() self.paths = [] @property def expanded_paths(self): # type: () -> Generator return ( path for path in unnest(p for p in self.versions.values()) if path is not None ) @property def is_pyenv(self): # type: () -> bool return is_in_path(str(self.root), PYENV_ROOT) @property def is_asdf(self): # type: () -> bool return is_in_path(str(self.root), ASDF_DATA_DIR) def get_version_order(self): # type: () -> List[Path] version_paths = [ p for p in self.root.glob(self.version_glob_path) if not (p.parent.name == "envs" or p.name == "envs") ] versions = {v.name: v for v in version_paths} version_order = [] # type: List[Path] if self.is_pyenv: version_order = [ versions[v] for v in parse_pyenv_version_order() if v in versions ] elif self.is_asdf: version_order = [ versions[v] for v in parse_asdf_version_order() if v in versions ] for version in version_order: if version in version_paths: version_paths.remove(version) if version_order: version_order += version_paths else: version_order = version_paths return version_order def get_bin_dir(self, base): # type: (Union[Path, str]) -> Path if isinstance(base, six.string_types): base = Path(base) if os.name == "nt": return base return base / "bin" @classmethod def version_from_bin_dir(cls, entry): # type: (PathEntry) -> Optional[PathEntry] py_version = None py_version = next(iter(entry.find_all_python_versions()), None) return py_version def _iter_version_bases(self): # type: () -> Iterator[Tuple[Path, PathEntry]] from .path import PathEntry for p in self.get_version_order(): bin_dir = self.get_bin_dir(p) if bin_dir.exists() and bin_dir.is_dir(): entry = PathEntry.create( path=bin_dir.absolute(), only_python=False, name=p.name, is_root=True ) self.roots[p] = entry yield (p, entry) def _iter_versions(self): # type: () -> Iterator[Tuple[Path, PathEntry, Tuple]] for base_path, entry in self._iter_version_bases(): version = None version_entry = None try: version = PythonVersion.parse(entry.name) except (ValueError, InvalidPythonVersion): version_entry = next(iter(entry.find_all_python_versions()), None) if version is None: if not self.ignore_unsupported: raise continue if version_entry is not None: version = version_entry.py_version.as_dict() except Exception: if not self.ignore_unsupported: raise logger.warning( "Unsupported Python version %r, ignoring...", base_path.name, exc_info=True, ) continue if version is not None: version_tuple = ( version.get("major"), version.get("minor"), version.get("patch"), version.get("is_prerelease"), version.get("is_devrelease"), version.get("is_debug"), ) yield (base_path, entry, version_tuple) @property def versions(self): # type: () -> DefaultDict[Tuple, PathEntry] if not self._versions: for _, entry, version_tuple in self._iter_versions(): self._versions[version_tuple] = entry return self._versions def _iter_pythons(self): # type: () -> Iterator for path, entry, version_tuple in self._iter_versions(): if path.as_posix() in self._pythons: yield self._pythons[path.as_posix()] elif version_tuple not in self.versions: for python in entry.find_all_python_versions(): yield python else: yield self.versions[version_tuple] @paths.default def get_paths(self): # type: () -> List[PathEntry] _paths = [base for _, base in self._iter_version_bases()] return _paths @property def pythons(self): # type: () -> DefaultDict[str, PathEntry] if not self._pythons: from .path import PathEntry self._pythons = defaultdict(PathEntry) # type: DefaultDict[str, PathEntry] for python in self._iter_pythons(): python_path = python.path.as_posix() # type: ignore self._pythons[python_path] = python return self._pythons @pythons.setter def pythons(self, value): # type: (DefaultDict[str, PathEntry]) -> None self._pythons = value def get_pythons(self): # type: () -> DefaultDict[str, PathEntry] return self.pythons @overload @classmethod def create(cls, root, sort_function, version_glob_path=None, ignore_unsupported=True): # type: (str, Callable, Optional[str], bool) -> PythonFinder root = ensure_path(root) if not version_glob_path: version_glob_path = "versions/*" return cls( root=root, path=root, ignore_unsupported=ignore_unsupported, # type: ignore sort_function=sort_function, version_glob_path=version_glob_path, ) def find_all_python_versions( self, major=None, # type: Optional[Union[str, int]] minor=None, # type: Optional[int] patch=None, # type: Optional[int] pre=None, # type: Optional[bool] dev=None, # type: Optional[bool] arch=None, # type: Optional[str] name=None, # type: Optional[str] ): # type: (...) -> List[PathEntry] """Search for a specific python version on the path. Return all copies :param major: Major python version to search for. :type major: int :param int minor: Minor python version to search for, defaults to None :param int patch: Patch python version to search for, defaults to None :param bool pre: Search for prereleases (default None) - prioritize releases if None :param bool dev: Search for devreleases (default None) - prioritize releases if None :param str arch: Architecture to include, e.g. '64bit', defaults to None :param str name: The name of a python version, e.g. ``anaconda3-5.3.0`` :return: A list of :class:`~pythonfinder.models.PathEntry` instances matching the version requested. :rtype: List[:class:`~pythonfinder.models.PathEntry`] """ call_method = "find_all_python_versions" if self.is_dir else "find_python_version" sub_finder = operator.methodcaller( call_method, major, minor, patch, pre, dev, arch, name ) if not any([major, minor, patch, name]): pythons = [ next(iter(py for py in base.find_all_python_versions()), None) for _, base in self._iter_version_bases() ] else: pythons = [sub_finder(path) for path in self.paths] pythons = expand_paths(pythons, True) version_sort = operator.attrgetter("as_python.version_sort") paths = [ p for p in sorted(pythons, key=version_sort, reverse=True) if p is not None ] return paths def find_python_version( self, major=None, # type: Optional[Union[str, int]] minor=None, # type: Optional[int] patch=None, # type: Optional[int] pre=None, # type: Optional[bool] dev=None, # type: Optional[bool] arch=None, # type: Optional[str] name=None, # type: Optional[str] ): # type: (...) -> Optional[PathEntry] """Search or self for the specified Python version and return the first match. :param major: Major version number. :type major: int :param int minor: Minor python version to search for, defaults to None :param int patch: Patch python version to search for, defaults to None :param bool pre: Search for prereleases (default None) - prioritize releases if None :param bool dev: Search for devreleases (default None) - prioritize releases if None :param str arch: Architecture to include, e.g. '64bit', defaults to None :param str name: The name of a python version, e.g. ``anaconda3-5.3.0`` :returns: A :class:`~pythonfinder.models.PathEntry` instance matching the version requested. """ sub_finder = operator.methodcaller( "find_python_version", major, minor, patch, pre, dev, arch, name ) version_sort = operator.attrgetter("as_python.version_sort") unnested = [sub_finder(self.roots[path]) for path in self.roots] unnested = [ p for p in unnested if p is not None and p.is_python and p.as_python is not None ] paths = sorted(list(unnested), key=version_sort, reverse=True) return next(iter(p for p in paths if p is not None), None) def which(self, name): # type: (str) -> Optional[PathEntry] """Search in this path for an executable. :param executable: The name of an executable to search for. :type executable: str :returns: :class:`~pythonfinder.models.PathEntry` instance. """ matches = (p.which(name) for p in self.paths) non_empty_match = next(iter(m for m in matches if m is not None), None) return non_empty_match
class Dependency(object): #: The name of the dependency name = attr.ib(type=str) #: A requirement instance requirement = attr.ib(type=PackagingRequirement, eq=False) #: The specifier defined in the dependency definition specifier = attr.ib(type=SpecifierSet, converter=create_specifierset, eq=False) #: Any extras this dependency declares extras = attr.ib(factory=tuple, validator=validate_extras) # type: Tuple[str, ...] #: The name of the extra meta-dependency this one came from (e.g. 'security') from_extras = attr.ib(default=None, eq=False) # type: Optional[str] #: The declared specifier set of allowable python versions for this dependency python_version = attr.ib( default="", type=SpecifierSet, converter=create_specifierset, eq=False ) #: The parent of this dependency (i.e. where it came from) parent = attr.ib(default=None) # type: Optional[Dependency] #: The markers for this dependency markers = attr.ib(default=None, eq=False) # type: Optional[Marker] _specset_str = attr.ib(default="", type=str) _python_version_str = attr.ib(default="", type=str) _marker_str = attr.ib(default="", type=str) def __str__(self): # type: () -> str return str(self.requirement) def as_line(self): # type: () -> str line_str = "{0}".format(self.name) if self.extras: line_str = "{0}[{1}]".format(line_str, ",".join(self.extras)) if self.specifier: line_str = "{0}{1!s}".format(line_str, self.specifier) py_version_part = "" if self.python_version: specifiers = normalize_specifier_set(self.python_version) markers = [] if specifiers is not None: markers = [marker_from_specifier(str(s)) for s in specifiers] py_version_part = reduce(merge_markers, markers) if self.markers: line_str = "{0}; {1}".format(line_str, str(self.markers)) if py_version_part: line_str = "{0} and {1}".format(line_str, py_version_part) elif py_version_part and not self.markers: line_str = "{0}; {1}".format(line_str, py_version_part) return line_str def pin(self): # type: () -> "Package" base_package = get_package(self.name) sorted_releases = sorted( base_package.releases.non_yanked_releases, key=operator.attrgetter("parsed_version"), reverse=True, ) version = next( iter(self.specifier.filter((r.version for r in sorted_releases))), None ) if not version: version = next( iter( self.specifier.filter( (r.version for r in sorted_releases), prereleases=True ) ), None, ) if not version: raise RuntimeError( "Failed to resolve {0} ({1!s})".format(self.name, self.specifier) ) match = get_package_version(self.name, str(version)) return match @classmethod def from_requirement(cls, req, parent=None): # type: (PackagingRequirement, Optional["Dependency"]) -> "Dependency" from_extras, marker, python_version = None, None, None specset_str, py_version_str, marker_str = "", "", "" if req.marker: marker = Marker(str(req.marker)) from_extras = next(iter(list(get_contained_extras(marker))), None) python_version = get_contained_pyversions(marker) marker = get_without_extra(get_without_pyversion(marker)) if not str(marker) or not marker or not marker._markers: marker = None req.marker = marker if marker is not None: marker_str = str(marker) if req.specifier: specset_str = str(req.specifier) if python_version: py_version_str = str(python_version) return cls( name=req.name, specifier=req.specifier, extras=tuple(sorted(set(req.extras))) if req.extras is not None else req.extras, requirement=req, from_extras=from_extras, python_version=python_version, markers=marker, parent=parent, specset_str=specset_str, python_version_str=py_version_str, marker_str=marker_str, ) @classmethod def from_info(cls, info): # type: ("PackageInfo") -> "Dependency" marker_str = "" specset_str, py_version_str = "", "" if info.requires_python: # XXX: Some markers are improperly formatted -- we already handle most cases # XXX: but learned about new broken formats, such as # XXX: python_version in "2.6 2.7 3.2 3.3" (note the lack of commas) # XXX: as a marker on a dependency of a library called 'pickleshare' # XXX: Some packages also have invalid markers with stray characters, # XXX: such as 'algoliasearch' try: marker = marker_from_specifier(info.requires_python) except Exception: marker_str = "" else: if not marker or not marker._markers: marker_str = "" else: marker_str = "{0!s}".format(marker) req_str = "{0}=={1}".format(info.name, info.version) if marker_str: req_str = "{0}; {1}".format(req_str, marker_str) req = PackagingRequirement(req_str) requires_python_str = ( info.requires_python if info.requires_python is not None else "" ) if req.specifier: specset_str = str(req.specifier) if requires_python_str: py_version_str = requires_python_str return cls( name=info.name, specifier=req.specifier, extras=tuple(sorted(set(req.extras))) if req.extras is not None else req.extras, requirement=req, from_extras=None, python_version=SpecifierSet(requires_python_str), markers=None, parent=None, specset_str=specset_str, python_version_str=py_version_str, marker_str=marker_str, ) @classmethod def from_str(cls, depstr, parent=None): # type: (str, Optional["Dependency"]) -> "Dependency" try: req = PackagingRequirement(depstr) except Exception: raise return cls.from_requirement(req, parent=parent) def add_parent(self, parent): # type: ("Dependency") -> "Dependency" return attr.evolve(self, parent=parent)
class Lockfile(object): path = attr.ib(validator=optional_instance_of(Path), type=Path) _requirements = attr.ib(default=attr.Factory(list), type=list) _dev_requirements = attr.ib(default=attr.Factory(list), type=list) projectfile = attr.ib(validator=is_projectfile, type=ProjectFile) _lockfile = attr.ib(validator=is_lockfile, type=plette.lockfiles.Lockfile) newlines = attr.ib(default=DEFAULT_NEWLINES, type=str) @path.default def _get_path(self): return Path(os.curdir).joinpath("Pipfile.lock").absolute() @projectfile.default def _get_projectfile(self): return self.load_projectfile(self.path) @_lockfile.default def _get_lockfile(self): return self.projectfile.model @property def lockfile(self): return self._lockfile @property def section_keys(self): return ["default", "develop"] @property def extended_keys(self): return [ k for k in itertools.product(self.section_keys, ["", "vcs", "editable"]) ] def get(self, k): return self.__getitem__(k) def __contains__(self, k): check_lockfile = k in self.extended_keys or self.lockfile.__contains__( k) if check_lockfile: return True return super(Lockfile, self).__contains__(k) def __setitem__(self, k, v): lockfile = self._lockfile lockfile.__setitem__(k, v) def __getitem__(self, k, *args, **kwargs): retval = None lockfile = self._lockfile section = None pkg_type = None try: retval = lockfile[k] except KeyError: if "-" in k: section, _, pkg_type = k.rpartition("-") vals = getattr(lockfile.get(section, {}), "_data", {}) if pkg_type == "vcs": retval = {k: v for k, v in vals.items() if is_vcs(v)} elif pkg_type == "editable": retval = {k: v for k, v in vals.items() if is_editable(v)} if retval is None: raise else: retval = getattr(retval, "_data", retval) return retval def __getattr__(self, k, *args, **kwargs): retval = None lockfile = super(Lockfile, self).__getattribute__("_lockfile") try: return super(Lockfile, self).__getattribute__(k) except AttributeError: retval = getattr(lockfile, k, None) if retval is not None: return retval return super(Lockfile, self).__getattribute__(k, *args, **kwargs) def get_deps(self, dev=False, only=True): deps = {} if dev: deps.update(self.develop._data) if only: return deps deps = merge_items([deps, self.default._data]) return deps @classmethod def read_projectfile(cls, path): """Read the specified project file and provide an interface for writing/updating. :param str path: Path to the target file. :return: A project file with the model and location for interaction :rtype: :class:`~requirementslib.models.project.ProjectFile` """ pf = ProjectFile.read(path, plette.lockfiles.Lockfile, invalid_ok=True) return pf @classmethod def lockfile_from_pipfile(cls, pipfile_path): from .pipfile import Pipfile if os.path.isfile(pipfile_path): if not os.path.isabs(pipfile_path): pipfile_path = os.path.abspath(pipfile_path) pipfile = Pipfile.load(os.path.dirname(pipfile_path)) return plette.lockfiles.Lockfile.with_meta_from(pipfile._pipfile) raise PipfileNotFound(pipfile_path) @classmethod def load_projectfile(cls, path, create=True, data=None): """Given a path, load or create the necessary lockfile. :param str path: Path to the project root or lockfile :param bool create: Whether to create the lockfile if not found, defaults to True :raises OSError: Thrown if the project root directory doesn't exist :raises FileNotFoundError: Thrown if the lockfile doesn't exist and ``create=False`` :return: A project file instance for the supplied project :rtype: :class:`~requirementslib.models.project.ProjectFile` """ if not path: path = os.curdir path = Path(path).absolute() project_path = path if path.is_dir() else path.parent lockfile_path = path if path.is_file( ) else project_path / "Pipfile.lock" if not project_path.exists(): raise OSError("Project does not exist: %s" % project_path.as_posix()) elif not lockfile_path.exists() and not create: raise FileNotFoundError("Lockfile does not exist: %s" % lockfile_path.as_posix()) projectfile = cls.read_projectfile(lockfile_path.as_posix()) if not lockfile_path.exists(): if not data: path_str = lockfile_path.as_posix() if path_str[-5:] == ".lock": pipfile = Path(path_str[:-5]) else: pipfile = project_path.joinpath("Pipfile") lf = cls.lockfile_from_pipfile(pipfile) else: lf = plette.lockfiles.Lockfile(data) projectfile.model = lf return projectfile @classmethod def from_data(cls, path, data, meta_from_project=True): """Create a new lockfile instance from a dictionary. :param str path: Path to the project root. :param dict data: Data to load into the lockfile. :param bool meta_from_project: Attempt to populate the meta section from the project root, default True. """ if path is None: raise MissingParameter("path") if data is None: raise MissingParameter("data") if not isinstance(data, dict): raise TypeError("Expecting a dictionary for parameter 'data'") path = os.path.abspath(str(path)) if os.path.isdir(path): project_path = path elif not os.path.isdir(path) and os.path.isdir(os.path.dirname(path)): project_path = os.path.dirname(path) pipfile_path = os.path.join(project_path, "Pipfile") lockfile_path = os.path.join(project_path, "Pipfile.lock") if meta_from_project: lockfile = cls.lockfile_from_pipfile(pipfile_path) lockfile.update(data) else: lockfile = plette.lockfiles.Lockfile(data) projectfile = ProjectFile(line_ending=DEFAULT_NEWLINES, location=lockfile_path, model=lockfile) return cls( projectfile=projectfile, lockfile=lockfile, newlines=projectfile.line_ending, path=Path(projectfile.location), ) @classmethod def load(cls, path, create=True): """Create a new lockfile instance. :param project_path: Path to project root or lockfile :type project_path: str or :class:`pathlib.Path` :param str lockfile_name: Name of the lockfile in the project root directory :param pipfile_path: Path to the project pipfile :type pipfile_path: :class:`pathlib.Path` :returns: A new lockfile representing the supplied project paths :rtype: :class:`~requirementslib.models.lockfile.Lockfile` """ try: projectfile = cls.load_projectfile(path, create=create) except JSONDecodeError: path = os.path.abspath(path) path = Path( os.path.join(path, "Pipfile.lock") if os.path. isdir(path) else path) formatted_path = path.as_posix() backup_path = "%s.bak" % formatted_path LockfileCorruptException.show(formatted_path, backup_path=backup_path) path.rename(backup_path) cls.load(formatted_path, create=True) lockfile_path = Path(projectfile.location) creation_args = { "projectfile": projectfile, "lockfile": projectfile.model, "newlines": projectfile.line_ending, "path": lockfile_path, } return cls(**creation_args) @classmethod def create(cls, path, create=True): return cls.load(path, create=create) @property def develop(self): return self._lockfile.develop @property def default(self): return self._lockfile.default def get_requirements(self, dev=True, only=False): """Produces a generator which generates requirements from the desired section. :param bool dev: Indicates whether to use dev requirements, defaults to False :return: Requirements from the relevant the relevant pipfile :rtype: :class:`~requirementslib.models.requirements.Requirement` """ deps = self.get_deps(dev=dev, only=only) for k, v in deps.items(): yield Requirement.from_pipfile(k, v) @property def dev_requirements(self): if not self._dev_requirements: self._dev_requirements = list( self.get_requirements(dev=True, only=True)) return self._dev_requirements @property def requirements(self): if not self._requirements: self._requirements = list( self.get_requirements(dev=False, only=True)) return self._requirements @property def dev_requirements_list(self): return [{ name: entry._data } for name, entry in self._lockfile.develop.items()] @property def requirements_list(self): return [{ name: entry._data } for name, entry in self._lockfile.default.items()] def write(self): self.projectfile.model = copy.deepcopy(self._lockfile) self.projectfile.write() def as_requirements(self, include_hashes=False, dev=False): """Returns a list of requirements in pip-style format.""" lines = [] section = self.dev_requirements if dev else self.requirements for req in section: kwargs = {"include_hashes": include_hashes} if req.editable: kwargs["include_markers"] = False r = req.as_line(**kwargs) lines.append(r.strip()) return lines
class PythonVersion(object): major = attr.ib(default=0, type=int) minor = attr.ib(default=None) # type: Optional[int] patch = attr.ib(default=None) # type: Optional[int] is_prerelease = attr.ib(default=False, type=bool) is_postrelease = attr.ib(default=False, type=bool) is_devrelease = attr.ib(default=False, type=bool) is_debug = attr.ib(default=False, type=bool) version = attr.ib(default=None) # type: Version architecture = attr.ib(default=None) # type: Optional[str] comes_from = attr.ib(default=None) # type: Optional[PathEntry] executable = attr.ib(default=None) # type: Optional[str] company = attr.ib(default=None) # type: Optional[str] name = attr.ib(default=None, type=str) def __getattribute__(self, key): result = super(PythonVersion, self).__getattribute__(key) if key in ["minor", "patch"] and result is None: executable = None # type: Optional[str] if self.executable: executable = self.executable elif self.comes_from: executable = self.comes_from.path.as_posix() if executable is not None: if not isinstance(executable, six.string_types): executable = executable.as_posix() instance_dict = self.parse_executable(executable) for k in instance_dict.keys(): try: super(PythonVersion, self).__getattribute__(k) except AttributeError: continue else: setattr(self, k, instance_dict[k]) result = instance_dict.get(key) return result @property def version_sort(self): # type: () -> Tuple[int, int, Optional[int], int, int] """ A tuple for sorting against other instances of the same class. Returns a tuple of the python version but includes points for core python, non-dev, and non-prerelease versions. So released versions will have 2 points for this value. E.g. ``(1, 3, 6, 6, 2)`` is a release, ``(1, 3, 6, 6, 1)`` is a prerelease, ``(1, 3, 6, 6, 0)`` is a dev release, and ``(1, 3, 6, 6, 3)`` is a postrelease. ``(0, 3, 7, 3, 2)`` represents a non-core python release, e.g. by a repackager of python like Continuum. """ company_sort = 1 if (self.company and self.company == "PythonCore") else 0 release_sort = 2 if self.is_postrelease: release_sort = 3 elif self.is_prerelease: release_sort = 1 elif self.is_devrelease: release_sort = 0 elif self.is_debug: release_sort = 1 return ( company_sort, self.major, self.minor, self.patch if self.patch else 0, release_sort, ) @property def version_tuple(self): # type: () -> Tuple[int, Optional[int], Optional[int], bool, bool, bool] """ Provides a version tuple for using as a dictionary key. :return: A tuple describing the python version meetadata contained. :rtype: tuple """ return ( self.major, self.minor, self.patch, self.is_prerelease, self.is_devrelease, self.is_debug, ) def matches( self, major=None, # type: Optional[int] minor=None, # type: Optional[int] patch=None, # type: Optional[int] pre=False, # type: bool dev=False, # type: bool arch=None, # type: Optional[str] debug=False, # type: bool python_name=None, # type: Optional[str] ): # type: (...) -> bool result = False if arch: own_arch = self.get_architecture() if arch.isdigit(): arch = "{0}bit".format(arch) if ( (major is None or self.major == major) and (minor is None or self.minor == minor) and (patch is None or self.patch == patch) and (pre is None or self.is_prerelease == pre) and (dev is None or self.is_devrelease == dev) and (arch is None or own_arch == arch) and (debug is None or self.is_debug == debug) and ( python_name is None or (python_name and self.name) and (self.name == python_name or self.name.startswith(python_name)) ) ): result = True return result def as_major(self): # type: () -> PythonVersion self_dict = attr.asdict(self, recurse=False, filter=_filter_none).copy() self_dict.update({"minor": None, "patch": None}) return self.create(**self_dict) def as_minor(self): # type: () -> PythonVersion self_dict = attr.asdict(self, recurse=False, filter=_filter_none).copy() self_dict.update({"patch": None}) return self.create(**self_dict) def as_dict(self): # type: () -> Dict[str, Union[int, bool, Version, None]] return { "major": self.major, "minor": self.minor, "patch": self.patch, "is_prerelease": self.is_prerelease, "is_postrelease": self.is_postrelease, "is_devrelease": self.is_devrelease, "is_debug": self.is_debug, "version": self.version, "company": self.company, } def update_metadata(self, metadata): # type: (Dict[str, Union[str, int, Version]]) -> None """ Update the metadata on the current :class:`pythonfinder.models.python.PythonVersion` Given a parsed version dictionary from :func:`pythonfinder.utils.parse_python_version`, update the instance variables of the current version instance to reflect the newly supplied values. """ for key in metadata: try: _ = getattr(self, key) except AttributeError: continue else: setattr(self, key, metadata[key]) @classmethod @lru_cache(maxsize=1024) def parse(cls, version): # type: (str) -> Dict[str, Union[str, int, Version]] """ Parse a valid version string into a dictionary Raises: ValueError -- Unable to parse version string ValueError -- Not a valid python version TypeError -- NoneType or unparseable type passed in :param str version: A valid version string :return: A dictionary with metadata about the specified python version. :rtype: dict """ if version is None: raise TypeError("Must pass a value to parse!") version_dict = parse_python_version(str(version)) if not version_dict: raise ValueError("Not a valid python version: %r" % version) return version_dict def get_architecture(self): # type: () -> str if self.architecture: return self.architecture arch = None if self.comes_from is not None: arch, _ = platform.architecture(self.comes_from.path.as_posix()) elif self.executable is not None: arch, _ = platform.architecture(self.executable) if arch is None: arch, _ = platform.architecture(sys.executable) self.architecture = arch return self.architecture @classmethod def from_path(cls, path, name=None, ignore_unsupported=True, company=None): # type: (Union[str, PathEntry], Optional[str], bool, Optional[str]) -> PythonVersion """ Parses a python version from a system path. Raises: ValueError -- Not a valid python path :param path: A string or :class:`~pythonfinder.models.path.PathEntry` :type path: str or :class:`~pythonfinder.models.path.PathEntry` instance :param str name: Name of the python distribution in question :param bool ignore_unsupported: Whether to ignore or error on unsupported paths. :param Optional[str] company: The company or vendor packaging the distribution. :return: An instance of a PythonVersion. :rtype: :class:`~pythonfinder.models.python.PythonVersion` """ from .path import PathEntry if not isinstance(path, PathEntry): path = PathEntry.create(path, is_root=False, only_python=True, name=name) from ..environment import IGNORE_UNSUPPORTED ignore_unsupported = ignore_unsupported or IGNORE_UNSUPPORTED path_name = getattr(path, "name", path.path.name) # str if not path.is_python: if not (ignore_unsupported or IGNORE_UNSUPPORTED): raise ValueError("Not a valid python path: %s" % path.path) try: instance_dict = cls.parse(path_name) except Exception: instance_dict = cls.parse_executable(path.path.absolute().as_posix()) else: if instance_dict.get("minor") is None and looks_like_python(path.path.name): instance_dict = cls.parse_executable(path.path.absolute().as_posix()) if ( not isinstance(instance_dict.get("version"), Version) and not ignore_unsupported ): raise ValueError("Not a valid python path: %s" % path) if instance_dict.get("patch") is None: instance_dict = cls.parse_executable(path.path.absolute().as_posix()) if name is None: name = path_name if company is None: company = guess_company(path.path.as_posix()) instance_dict.update( {"comes_from": path, "name": name, "executable": path.path.as_posix()} ) return cls(**instance_dict) # type: ignore @classmethod @lru_cache(maxsize=1024) def parse_executable(cls, path): # type: (str) -> Dict[str, Optional[Union[str, int, Version]]] result_dict = {} # type: Dict[str, Optional[Union[str, int, Version]]] result_version = None # type: Optional[str] if path is None: raise TypeError("Must pass a valid path to parse.") if not isinstance(path, six.string_types): path = path.as_posix() # if not looks_like_python(path): # raise ValueError("Path %r does not look like a valid python path" % path) try: result_version = get_python_version(path) except Exception: raise ValueError("Not a valid python path: %r" % path) if result_version is None: raise ValueError("Not a valid python path: %s" % path) result_dict = cls.parse(result_version.strip()) return result_dict @classmethod def from_windows_launcher(cls, launcher_entry, name=None, company=None): # type: (Environment, Optional[str], Optional[str]) -> PythonVersion """Create a new PythonVersion instance from a Windows Launcher Entry :param launcher_entry: A python launcher environment object. :param Optional[str] name: The name of the distribution. :param Optional[str] company: The name of the distributing company. :return: An instance of a PythonVersion. :rtype: :class:`~pythonfinder.models.python.PythonVersion` """ from .path import PathEntry creation_dict = cls.parse(launcher_entry.info.version) base_path = ensure_path(launcher_entry.info.install_path.__getattr__("")) default_path = base_path / "python.exe" if not default_path.exists(): default_path = base_path / "Scripts" / "python.exe" exe_path = ensure_path( getattr(launcher_entry.info.install_path, "executable_path", default_path) ) company = getattr(launcher_entry, "company", guess_company(exe_path.as_posix())) creation_dict.update( { "architecture": getattr( launcher_entry.info, "sys_architecture", SYSTEM_ARCH ), "executable": exe_path, "name": name, "company": company, } ) py_version = cls.create(**creation_dict) comes_from = PathEntry.create(exe_path, only_python=True, name=name) py_version.comes_from = comes_from py_version.name = comes_from.name return py_version @classmethod def create(cls, **kwargs): # type: (...) -> PythonVersion if "architecture" in kwargs: if kwargs["architecture"].isdigit(): kwargs["architecture"] = "{0}bit".format(kwargs["architecture"]) return cls(**kwargs)
class Project(object): root = attr.ib() _p = attr.ib(init=False) _l = attr.ib(init=False) def __attrs_post_init__(self): self.root = root = os.path.abspath(self.root) self._p = ProjectFile.read(os.path.join(root, "Pipfile"), plette.Pipfile) self._l = ProjectFile.read(os.path.join(root, "Pipfile.lock"), plette.Lockfile, invalid_ok=True) @property def pipfile(self): return self._p.model @property def pipfile_location(self): return self._p.location @property def lockfile(self): return self._l.model @property def lockfile_location(self): return self._l.location @lockfile.setter def lockfile(self, new): self._l.model = new def is_synced(self): return self.lockfile and self.lockfile.is_up_to_date(self.pipfile) def _get_pipfile_section(self, develop, insert=True): name = "dev-packages" if develop else "packages" try: section = self.pipfile[name] except KeyError: section = plette.models.PackageCollection(tomlkit.table()) if insert: self.pipfile[name] = section return section def contains_key_in_pipfile(self, key): sections = [ self._get_pipfile_section(develop=False, insert=False), self._get_pipfile_section(develop=True, insert=False), ] return any((packaging.utils.canonicalize_name(name) == packaging.utils.canonicalize_name(key)) for section in sections for name in section) def add_line_to_pipfile(self, line, develop): from pipenv.vendor.requirementslib import Requirement requirement = Requirement.from_line(line) section = self._get_pipfile_section(develop=develop) key = requirement.normalized_name entry = next(iter(requirement.as_pipfile().values())) if isinstance(entry, dict): # HACK: TOMLKit prefers to expand tables by default, but we # always want inline tables here. Also tomlkit.inline_table # does not have `update()`. table = tomlkit.inline_table() for k, v in entry.items(): table[k] = v entry = table section[key] = entry def remove_keys_from_pipfile(self, keys, default, develop): keys = {packaging.utils.canonicalize_name(key) for key in keys} sections = [] if default: sections.append( self._get_pipfile_section(develop=False, insert=False)) if develop: sections.append( self._get_pipfile_section(develop=True, insert=False)) for section in sections: removals = set() for name in section: if packaging.utils.canonicalize_name(name) in keys: removals.add(name) for key in removals: del section._data[key] def remove_keys_from_lockfile(self, keys): keys = {packaging.utils.canonicalize_name(key) for key in keys} removed = False for section_name in ("default", "develop"): try: section = self.lockfile[section_name] except KeyError: continue removals = set() for name in section: if packaging.utils.canonicalize_name(name) in keys: removals.add(name) removed = removed or bool(removals) for key in removals: del section._data[key] if removed: # HACK: The lock file no longer represents the Pipfile at this # point. Set the hash to an arbitrary invalid value. self.lockfile.meta.hash = plette.models.Hash({"__invalid__": ""}) def difference_lockfile(self, lockfile): """Generate a difference between the current and given lockfiles. Returns a 2-tuple containing differences in default in develop sections. Each element is a 2-tuple of dicts. The first, `inthis`, contains entries only present in the current lockfile; the second, `inthat`, contains entries only present in the given one. If a key exists in both this and that, but the values differ, the key is present in both dicts, pointing to values from each file. """ diff_data = { "default": SectionDifference({}, {}), "develop": SectionDifference({}, {}), } for section_name, section_diff in diff_data.items(): try: this = self.lockfile[section_name]._data except (KeyError, TypeError): this = {} try: that = lockfile[section_name]._data except (KeyError, TypeError): that = {} for key, this_value in this.items(): try: that_value = that[key] except KeyError: section_diff.inthis[key] = this_value continue if not _are_pipfile_entries_equal(this_value, that_value): section_diff.inthis[key] = this_value section_diff.inthat[key] = that_value for key, that_value in that.items(): if key not in this: section_diff.inthat[key] = that_value return FileDifference(**diff_data)
class AbstractDependency(object): name = attr.ib() # type: STRING_TYPE specifiers = attr.ib() markers = attr.ib() candidates = attr.ib() requirement = attr.ib() parent = attr.ib() finder = attr.ib() dep_dict = attr.ib(default=attr.Factory(dict)) @property def version_set(self): """Return the set of versions for the candidates in this abstract dependency. :return: A set of matching versions :rtype: set(str) """ if len(self.candidates) == 1: return set() return set( packaging.version.parse(version_from_ireq(c)) for c in self.candidates) def compatible_versions(self, other): """Find compatible version numbers between this abstract dependency and another one. :param other: An abstract dependency to compare with. :type other: :class:`~requirementslib.models.dependency.AbstractDependency` :return: A set of compatible version strings :rtype: set(str) """ if len(self.candidates) == 1 and next(iter(self.candidates)).editable: return self elif len(other.candidates) == 1 and next(iter( other.candidates)).editable: return other return self.version_set & other.version_set def compatible_abstract_dep(self, other): """Merge this abstract dependency with another one. Return the result of the merge as a new abstract dependency. :param other: An abstract dependency to merge with :type other: :class:`~requirementslib.models.dependency.AbstractDependency` :return: A new, combined abstract dependency :rtype: :class:`~requirementslib.models.dependency.AbstractDependency` """ from .requirements import Requirement if len(self.candidates) == 1 and next(iter(self.candidates)).editable: return self elif len(other.candidates) == 1 and next(iter( other.candidates)).editable: return other new_specifiers = self.specifiers & other.specifiers markers = set(self.markers) if self.markers else set() if other.markers: markers.add(other.markers) new_markers = None if markers: new_markers = packaging.markers.Marker(" or ".join( str(m) for m in sorted(markers))) new_ireq = copy.deepcopy(self.requirement.ireq) new_ireq.req.specifier = new_specifiers new_ireq.req.marker = new_markers new_requirement = Requirement.from_line(format_requirement(new_ireq)) compatible_versions = self.compatible_versions(other) if isinstance(compatible_versions, AbstractDependency): return compatible_versions candidates = [ c for c in self.candidates if packaging.version.parse( version_from_ireq(c)) in compatible_versions ] dep_dict = {} candidate_strings = [format_requirement(c) for c in candidates] for c in candidate_strings: if c in self.dep_dict: dep_dict[c] = self.dep_dict.get(c) return AbstractDependency( name=self.name, specifiers=new_specifiers, markers=new_markers, candidates=candidates, requirement=new_requirement, parent=self.parent, dep_dict=dep_dict, finder=self.finder, ) def get_deps(self, candidate): """Get the dependencies of the supplied candidate. :param candidate: An installrequirement :type candidate: :class:`~pipenv.patched.notpip._internal.req.req_install.InstallRequirement` :return: A list of abstract dependencies :rtype: list[:class:`~requirementslib.models.dependency.AbstractDependency`] """ key = format_requirement(candidate) if key not in self.dep_dict: from .requirements import Requirement req = Requirement.from_line(key) req = req.merge_markers(self.markers) self.dep_dict[key] = req.get_abstract_dependencies() return self.dep_dict[key] @classmethod def from_requirement(cls, requirement, parent=None): """Creates a new :class:`~requirementslib.models.dependency.AbstractDependency` from a :class:`~requirementslib.models.requirements.Requirement` object. This class is used to find all candidates matching a given set of specifiers and a given requirement. :param requirement: A requirement for resolution :type requirement: :class:`~requirementslib.models.requirements.Requirement` object. """ name = requirement.normalized_name specifiers = requirement.ireq.specifier if not requirement.editable else "" markers = requirement.ireq.markers extras = requirement.ireq.extras is_pinned = is_pinned_requirement(requirement.ireq) is_constraint = bool(parent) _, finder = get_finder(sources=None) candidates = [] if not is_pinned and not requirement.editable: for r in requirement.find_all_matches(finder=finder): req = make_install_requirement( name, r.version, extras=extras, markers=markers, constraint=is_constraint, ) req.req.link = getattr(r, "location", getattr(r, "link", None)) req.parent = parent candidates.append(req) candidates = sorted( set(candidates), key=lambda k: packaging.version.parse(version_from_ireq(k) ), ) else: candidates = [requirement.ireq] return cls( name=name, specifiers=specifiers, markers=markers, candidates=candidates, requirement=requirement, parent=parent, finder=finder, ) @classmethod def from_string(cls, line, parent=None): from .requirements import Requirement req = Requirement.from_line(line) abstract_dep = cls.from_requirement(req, parent=parent) return abstract_dep
class ReleaseUrlCollection(Sequence): #: A list of release URLs urls = attr.ib(converter=create_release_urls_from_list) #: the name of the package name = attr.ib(default=None) # type: Optional[str] @classmethod def create(cls, urls, name=None): # type: (TReleasesList, Optional[str]) -> "ReleaseUrlCollection" return cls(urls=urls, name=name) @property def wheels(self): # type: () -> Iterator[ReleaseUrl] for url in self.urls: if not url.is_wheel: continue yield url @property def sdists(self): # type: () -> Iterator[ReleaseUrl] for url in self.urls: if not url.is_sdist: continue yield url def __iter__(self): # type: () -> Iterator[ReleaseUrl] return iter(self.urls) def __getitem__(self, key): # type: (int) -> ReleaseUrl return self.urls.__getitem__(key) def __len__(self): # type: () -> int return len(self.urls) @property def latest(self): # type: () -> Optional[ReleaseUrl] if not self.urls: return None return next( iter(sorted(self.urls, key=operator.attrgetter("upload_time"), reverse=True)) ) @property def latest_timestamp(self): # type: () -> Optional[datetime.datetime] latest = self.latest if latest is not None: return latest.upload_time return None def find_package_type(self, type_): # type: (str) -> Optional[ReleaseUrl] """Given a package type (e.g. sdist, bdist_wheel), find the matching release. :param str type_: A package type from :const:`~PACKAGE_TYPES` :return: The package from this collection matching that type, if available :rtype: Optional[ReleaseUrl] """ if type_ not in PACKAGE_TYPES: raise ValueError( "Invalid package type: {0}. Expected one of {1}".format( type_, " ".join(PACKAGE_TYPES) ) ) return next(iter(url for url in self.urls if url.packagetype == type_), None)
class ReleaseCollection(object): releases = attr.ib( factory=list, converter=instance_check_converter(list, get_releases_from_package), # type: ignore ) # type: List[Release] def __iter__(self): # type: () -> Iterator[Release] return iter(self.releases) def __getitem__(self, key): # type: (str) -> Release result = next(iter(r for r in self.releases if r.version == key), None) if result is None: raise KeyError(key) return result def __len__(self): # type: () -> int return len(self.releases) def get_latest_lockfile(self): # type: () -> Dict[str, Union[str, List[str]]] return self.latest.to_lockfile() def wheels(self): # type: () -> Iterator[ReleaseUrl] for release in self.sort_releases(): for wheel in release.wheels: yield wheel def sdists(self): # type: () -> Iterator[ReleaseUrl] for release in self.sort_releases(): for sdist in release.sdists: yield sdist @property def non_yanked_releases(self): # type: () -> List[Release] return list(r for r in self.releases if not r.yanked) def sort_releases(self): # type: () -> List[Release] return sorted( self.non_yanked_releases, key=operator.attrgetter("latest_timestamp"), reverse=True, ) @property def latest(self): # type: () -> Optional[Release] return next(iter(r for r in self.sort_releases() if not r.yanked)) @classmethod def load(cls, releases, name=None): # type: (Union[TReleasesDict, List[Release]], Optional[str]) -> "ReleaseCollection" if not isinstance(releases, list): releases = get_releases_from_package(releases, name=name) return cls(releases)
class BasePath(object): path = attr.ib(default=None) # type: Path _children = attr.ib(default=attr.Factory(dict), order=False) # type: Dict[str, PathEntry] only_python = attr.ib(default=False) # type: bool name = attr.ib(type=str) _py_version = attr.ib(default=None, order=False) # type: Optional[PythonVersion] _pythons = attr.ib(default=attr.Factory(defaultdict), order=False) # type: DefaultDict[str, PathEntry] _is_dir = attr.ib(default=None, order=False) # type: Optional[bool] _is_executable = attr.ib(default=None, order=False) # type: Optional[bool] _is_python = attr.ib(default=None, order=False) # type: Optional[bool] def __str__(self): # type: () -> str return fs_str("{0}".format(self.path.as_posix())) def __lt__(self, other): # type: ("BasePath") -> bool return self.path.as_posix() < other.path.as_posix() def __lte__(self, other): # type: ("BasePath") -> bool return self.path.as_posix() <= other.path.as_posix() def __gt__(self, other): # type: ("BasePath") -> bool return self.path.as_posix() > other.path.as_posix() def __gte__(self, other): # type: ("BasePath") -> bool return self.path.as_posix() >= other.path.as_posix() def which(self, name): # type: (str) -> Optional[PathEntry] """Search in this path for an executable. :param executable: The name of an executable to search for. :type executable: str :returns: :class:`~pythonfinder.models.PathEntry` instance. """ valid_names = [name] + [ "{0}.{1}".format(name, ext).lower() if ext else "{0}".format(name).lower() for ext in KNOWN_EXTS ] children = self.children found = None if self.path is not None: found = next( (children[(self.path / child).as_posix()] for child in valid_names if (self.path / child).as_posix() in children), None, ) return found def __del__(self): for key in ["_is_dir", "_is_python", "_is_executable", "_py_version"]: if getattr(self, key, None): try: delattr(self, key) except Exception: print("failed deleting key: {0}".format(key)) self._children = {} for key in list(self._pythons.keys()): del self._pythons[key] self._pythons = None self._py_version = None self.path = None @property def children(self): # type: () -> Dict[str, PathEntry] if not self.is_dir: return {} return self._children @property def as_python(self): # type: () -> PythonVersion py_version = None if self.py_version: return self.py_version if not self.is_dir and self.is_python: try: from .python import PythonVersion py_version = PythonVersion.from_path( # type: ignore path=self, name=self.name) except (ValueError, InvalidPythonVersion): pass if py_version is None: pass self.py_version = py_version return py_version # type: ignore @name.default def get_name(self): # type: () -> Optional[str] if self.path: return self.path.name return None @property def is_dir(self): # type: () -> bool if self._is_dir is None: if not self.path: ret_val = False try: ret_val = self.path.is_dir() except OSError: ret_val = False self._is_dir = ret_val return self._is_dir @is_dir.setter def is_dir(self, val): # type: (bool) -> None self._is_dir = val @is_dir.deleter def is_dir(self): # type: () -> None self._is_dir = None @property def is_executable(self): # type: () -> bool if self._is_executable is None: if not self.path: self._is_executable = False else: self._is_executable = path_is_known_executable(self.path) return self._is_executable @is_executable.setter def is_executable(self, val): # type: (bool) -> None self._is_executable = val @is_executable.deleter def is_executable(self): # type: () -> None self._is_executable = None @property def is_python(self): # type: () -> bool if self._is_python is None: if not self.path: self._is_python = False else: self._is_python = self.is_executable and (looks_like_python( self.path.name)) return self._is_python @is_python.setter def is_python(self, val): # type: (bool) -> None self._is_python = val @is_python.deleter def is_python(self): # type: () -> None self._is_python = None def get_py_version(self): # type: () -> Optional[PythonVersion] from ..environment import IGNORE_UNSUPPORTED if self.is_dir: return None if self.is_python: py_version = None from .python import PythonVersion try: py_version = PythonVersion.from_path( # type: ignore path=self, name=self.name) except (InvalidPythonVersion, ValueError): py_version = None except Exception: if not IGNORE_UNSUPPORTED: raise return py_version return None @property def py_version(self): # type: () -> Optional[PythonVersion] if not self._py_version: py_version = self.get_py_version() self._py_version = py_version else: py_version = self._py_version return py_version @py_version.setter def py_version(self, val): # type: (Optional[PythonVersion]) -> None self._py_version = val @py_version.deleter def py_version(self): # type: () -> None self._py_version = None def _iter_pythons(self): # type: () -> Iterator if self.is_dir: for entry in self.children.values(): if entry is None: continue elif entry.is_dir: for python in entry._iter_pythons(): yield python elif entry.is_python and entry.as_python is not None: yield entry elif self.is_python and self.as_python is not None: yield self # type: ignore @property def pythons(self): # type: () -> DefaultDict[Union[str, Path], PathEntry] if not self._pythons: from .path import PathEntry self._pythons = defaultdict(PathEntry) for python in self._iter_pythons(): python_path = python.path.as_posix() # type: ignore self._pythons[python_path] = python return self._pythons def __iter__(self): # type: () -> Iterator for entry in self.children.values(): yield entry def __next__(self): # type: () -> Generator return next(iter(self)) def next(self): # type: () -> Generator return self.__next__() def find_all_python_versions( self, major=None, # type: Optional[Union[str, int]] minor=None, # type: Optional[int] patch=None, # type: Optional[int] pre=None, # type: Optional[bool] dev=None, # type: Optional[bool] arch=None, # type: Optional[str] name=None, # type: Optional[str] ): # type: (...) -> List[PathEntry] """Search for a specific python version on the path. Return all copies :param major: Major python version to search for. :type major: int :param int minor: Minor python version to search for, defaults to None :param int patch: Patch python version to search for, defaults to None :param bool pre: Search for prereleases (default None) - prioritize releases if None :param bool dev: Search for devreleases (default None) - prioritize releases if None :param str arch: Architecture to include, e.g. '64bit', defaults to None :param str name: The name of a python version, e.g. ``anaconda3-5.3.0`` :return: A list of :class:`~pythonfinder.models.PathEntry` instances matching the version requested. :rtype: List[:class:`~pythonfinder.models.PathEntry`] """ call_method = "find_all_python_versions" if self.is_dir else "find_python_version" sub_finder = operator.methodcaller(call_method, major, minor, patch, pre, dev, arch, name) if not self.is_dir: return sub_finder(self) unnested = [sub_finder(path) for path in expand_paths(self)] version_sort = operator.attrgetter("as_python.version_sort") unnested = [ p for p in unnested if p is not None and p.as_python is not None ] paths = sorted(unnested, key=version_sort, reverse=True) return list(paths) def find_python_version( self, major=None, # type: Optional[Union[str, int]] minor=None, # type: Optional[int] patch=None, # type: Optional[int] pre=None, # type: Optional[bool] dev=None, # type: Optional[bool] arch=None, # type: Optional[str] name=None, # type: Optional[str] ): # type: (...) -> Optional[PathEntry] """Search or self for the specified Python version and return the first match. :param major: Major version number. :type major: int :param int minor: Minor python version to search for, defaults to None :param int patch: Patch python version to search for, defaults to None :param bool pre: Search for prereleases (default None) - prioritize releases if None :param bool dev: Search for devreleases (default None) - prioritize releases if None :param str arch: Architecture to include, e.g. '64bit', defaults to None :param str name: The name of a python version, e.g. ``anaconda3-5.3.0`` :returns: A :class:`~pythonfinder.models.PathEntry` instance matching the version requested. """ version_matcher = operator.methodcaller("matches", major, minor, patch, pre, dev, arch, python_name=name) if not self.is_dir: if self.is_python and self.as_python and version_matcher( self.py_version): return self # type: ignore matching_pythons = [ [entry, entry.as_python.version_sort] for entry in self._iter_pythons() if (entry is not None and entry.as_python is not None and version_matcher(entry.py_version)) ] results = sorted(matching_pythons, key=operator.itemgetter(1, 0), reverse=True) return next(iter(r[0] for r in results if r is not None), None)
class SystemPath(object): global_search = attr.ib(default=True) paths = attr.ib( default=attr.Factory(defaultdict) ) # type: DefaultDict[str, Union[PythonFinder, PathEntry]] _executables = attr.ib(default=attr.Factory(list)) # type: List[PathEntry] _python_executables = attr.ib( default=attr.Factory(dict) ) # type: Dict[str, PathEntry] path_order = attr.ib(default=attr.Factory(list)) # type: List[str] python_version_dict = attr.ib() # type: DefaultDict[Tuple, List[PythonVersion]] only_python = attr.ib(default=False, type=bool) pyenv_finder = attr.ib(default=None) # type: Optional[PythonFinder] asdf_finder = attr.ib(default=None) # type: Optional[PythonFinder] windows_finder = attr.ib(default=None) # type: Optional[WindowsFinder] system = attr.ib(default=False, type=bool) _version_dict = attr.ib( default=attr.Factory(defaultdict) ) # type: DefaultDict[Tuple, List[PathEntry]] ignore_unsupported = attr.ib(default=False, type=bool) __finders = attr.ib( default=attr.Factory(dict) ) # type: Dict[str, Union[WindowsFinder, PythonFinder]] def _register_finder(self, finder_name, finder): # type: (str, Union[WindowsFinder, PythonFinder]) -> "SystemPath" if finder_name not in self.__finders: self.__finders[finder_name] = finder return self def clear_caches(self): for key in ["executables", "python_executables", "version_dict", "path_entries"]: if key in self.__dict__: del self.__dict__[key] for finder in list(self.__finders.keys()): del self.__finders[finder] self.__finders = {} return attr.evolve( self, executables=[], python_executables={}, python_version_dict=defaultdict(list), version_dict=defaultdict(list), pyenv_finder=None, windows_finder=None, asdf_finder=None, path_order=[], paths=defaultdict(PathEntry), ) def __del__(self): for key in ["executables", "python_executables", "version_dict", "path_entries"]: try: del self.__dict__[key] except KeyError: pass for finder in list(self.__finders.keys()): del self.__finders[finder] self.__finders = {} self._python_executables = {} self._executables = [] self.python_version_dict = defaultdict(list) self._version_dict = defaultdict(list) self.path_order = [] self.pyenv_finder = None self.asdf_finder = None self.paths = defaultdict(PathEntry) self.__finders = {} @property def finders(self): # type: () -> List[str] return [k for k in self.__finders.keys()] @staticmethod def check_for_pyenv(): return PYENV_INSTALLED or os.path.exists(normalize_path(PYENV_ROOT)) @staticmethod def check_for_asdf(): return ASDF_INSTALLED or os.path.exists(normalize_path(ASDF_DATA_DIR)) @python_version_dict.default def create_python_version_dict(self): # type: () -> DefaultDict[Tuple, List[PythonVersion]] return defaultdict(list) @cached_property def executables(self): # type: () -> List[PathEntry] self.executables = [ p for p in chain(*(child.children.values() for child in self.paths.values())) if p.is_executable ] return self.executables @cached_property def python_executables(self): # type: () -> Dict[str, PathEntry] python_executables = {} for child in self.paths.values(): if child.pythons: python_executables.update(dict(child.pythons)) for finder_name, finder in self.__finders.items(): if finder.pythons: python_executables.update(dict(finder.pythons)) self._python_executables = python_executables return self._python_executables @cached_property def version_dict(self): # type: () -> DefaultDict[Tuple, List[PathEntry]] self._version_dict = defaultdict( list ) # type: DefaultDict[Tuple, List[PathEntry]] for finder_name, finder in self.__finders.items(): for version, entry in finder.versions.items(): if finder_name == "windows": if entry not in self._version_dict[version]: self._version_dict[version].append(entry) continue if entry not in self._version_dict[version] and entry.is_python: self._version_dict[version].append(entry) for p, entry in self.python_executables.items(): version = entry.as_python # type: PythonVersion if not version: continue if not isinstance(version, tuple): version = version.version_tuple if version and entry not in self._version_dict[version]: self._version_dict[version].append(entry) return self._version_dict def _run_setup(self): # type: () -> "SystemPath" if not self.__class__ == SystemPath: return self new_instance = self path_order = new_instance.path_order[:] path_entries = self.paths.copy() if self.global_search and "PATH" in os.environ: path_order = path_order + os.environ["PATH"].split(os.pathsep) path_order = list(dedup(path_order)) path_instances = [ ensure_path(p.strip('"')) for p in path_order if not any( is_in_path(normalize_path(str(p)), normalize_path(shim)) for shim in SHIM_PATHS ) ] path_entries.update( { p.as_posix(): PathEntry.create( path=p.absolute(), is_root=True, only_python=self.only_python ) for p in path_instances if p.exists() } ) new_instance = attr.evolve( new_instance, path_order=[p.as_posix() for p in path_instances if p.exists()], paths=path_entries, ) if os.name == "nt" and "windows" not in self.finders: new_instance = new_instance._setup_windows() #: slice in pyenv if self.check_for_pyenv() and "pyenv" not in self.finders: new_instance = new_instance._setup_pyenv() #: slice in asdf if self.check_for_asdf() and "asdf" not in self.finders: new_instance = new_instance._setup_asdf() venv = os.environ.get("VIRTUAL_ENV") if os.name == "nt": bin_dir = "Scripts" else: bin_dir = "bin" if venv and (new_instance.system or new_instance.global_search): p = ensure_path(venv) path_order = [(p / bin_dir).as_posix()] + new_instance.path_order new_instance = attr.evolve(new_instance, path_order=path_order) paths = new_instance.paths.copy() paths[p] = new_instance.get_path(p.joinpath(bin_dir)) new_instance = attr.evolve(new_instance, paths=paths) if new_instance.system: syspath = Path(sys.executable) syspath_bin = syspath.parent if syspath_bin.name != bin_dir and syspath_bin.joinpath(bin_dir).exists(): syspath_bin = syspath_bin / bin_dir path_order = [syspath_bin.as_posix()] + new_instance.path_order paths = new_instance.paths.copy() paths[syspath_bin] = PathEntry.create( path=syspath_bin, is_root=True, only_python=False ) new_instance = attr.evolve(new_instance, path_order=path_order, paths=paths) return new_instance def _get_last_instance(self, path): # type: (str) -> int reversed_paths = reversed(self.path_order) paths = [normalize_path(p) for p in reversed_paths] normalized_target = normalize_path(path) last_instance = next(iter(p for p in paths if normalized_target in p), None) if last_instance is None: raise ValueError("No instance found on path for target: {0!s}".format(path)) path_index = self.path_order.index(last_instance) return path_index def _slice_in_paths(self, start_idx, paths): # type: (int, List[Path]) -> "SystemPath" before_path = [] # type: List[str] after_path = [] # type: List[str] if start_idx == 0: after_path = self.path_order[:] elif start_idx == -1: before_path = self.path_order[:] else: before_path = self.path_order[: start_idx + 1] after_path = self.path_order[start_idx + 2 :] path_order = before_path + [p.as_posix() for p in paths] + after_path if path_order == self.path_order: return self return attr.evolve(self, path_order=path_order) def _remove_path(self, path): # type: (str) -> "SystemPath" path_copy = [p for p in reversed(self.path_order[:])] new_order = [] target = normalize_path(path) path_map = {normalize_path(pth): pth for pth in self.paths.keys()} new_paths = self.paths.copy() if target in path_map: del new_paths[path_map[target]] for current_path in path_copy: normalized = normalize_path(current_path) if normalized != target: new_order.append(normalized) new_order = [ensure_path(p).as_posix() for p in reversed(new_order)] return attr.evolve(self, path_order=new_order, paths=new_paths) def _setup_asdf(self): # type: () -> "SystemPath" if "asdf" in self.finders and self.asdf_finder is not None: return self from .python import PythonFinder os_path = os.environ["PATH"].split(os.pathsep) asdf_finder = PythonFinder.create( root=ASDF_DATA_DIR, ignore_unsupported=True, sort_function=parse_asdf_version_order, version_glob_path="installs/python/*", ) asdf_index = None try: asdf_index = self._get_last_instance(ASDF_DATA_DIR) except ValueError: asdf_index = 0 if is_in_path(next(iter(os_path), ""), ASDF_DATA_DIR) else -1 if asdf_index is None: # we are in a virtualenv without global pyenv on the path, so we should # not write pyenv to the path here return self # * These are the root paths for the finder _ = [p for p in asdf_finder.roots] new_instance = self._slice_in_paths(asdf_index, [asdf_finder.root]) paths = self.paths.copy() paths[asdf_finder.root] = asdf_finder paths.update(asdf_finder.roots) return ( attr.evolve(new_instance, paths=paths, asdf_finder=asdf_finder) ._remove_path(normalize_path(os.path.join(ASDF_DATA_DIR, "shims"))) ._register_finder("asdf", asdf_finder) ) def reload_finder(self, finder_name): # type: (str) -> "SystemPath" if finder_name is None: raise TypeError("Must pass a string as the name of the target finder") finder_attr = "{0}_finder".format(finder_name) setup_attr = "_setup_{0}".format(finder_name) try: current_finder = getattr(self, finder_attr) # type: Any except AttributeError: raise ValueError("Must pass a valid finder to reload.") try: setup_fn = getattr(self, setup_attr) except AttributeError: raise ValueError("Finder has no valid setup function: %s" % finder_name) if current_finder is None: # TODO: This is called 'reload', should we load a new finder for the first # time here? lets just skip that for now to avoid unallowed finders pass if (finder_name == "pyenv" and not PYENV_INSTALLED) or ( finder_name == "asdf" and not ASDF_INSTALLED ): # Don't allow loading of finders that aren't explicitly 'installed' as it were return self setattr(self, finder_attr, None) if finder_name in self.__finders: del self.__finders[finder_name] return setup_fn() def _setup_pyenv(self): # type: () -> "SystemPath" if "pyenv" in self.finders and self.pyenv_finder is not None: return self from .python import PythonFinder os_path = os.environ["PATH"].split(os.pathsep) pyenv_finder = PythonFinder.create( root=PYENV_ROOT, sort_function=parse_pyenv_version_order, version_glob_path="versions/*", ignore_unsupported=self.ignore_unsupported, ) pyenv_index = None try: pyenv_index = self._get_last_instance(PYENV_ROOT) except ValueError: pyenv_index = 0 if is_in_path(next(iter(os_path), ""), PYENV_ROOT) else -1 if pyenv_index is None: # we are in a virtualenv without global pyenv on the path, so we should # not write pyenv to the path here return self # * These are the root paths for the finder _ = [p for p in pyenv_finder.roots] new_instance = self._slice_in_paths(pyenv_index, [pyenv_finder.root]) paths = new_instance.paths.copy() paths[pyenv_finder.root] = pyenv_finder paths.update(pyenv_finder.roots) return ( attr.evolve(new_instance, paths=paths, pyenv_finder=pyenv_finder) ._remove_path(os.path.join(PYENV_ROOT, "shims")) ._register_finder("pyenv", pyenv_finder) ) def _setup_windows(self): # type: () -> "SystemPath" if "windows" in self.finders and self.windows_finder is not None: return self from .windows import WindowsFinder windows_finder = WindowsFinder.create() root_paths = (p for p in windows_finder.paths if p.is_root) path_addition = [p.path.as_posix() for p in root_paths] new_path_order = self.path_order[:] + path_addition new_paths = self.paths.copy() new_paths.update({p.path: p for p in root_paths}) return attr.evolve( self, windows_finder=windows_finder, path_order=new_path_order, paths=new_paths, )._register_finder("windows", windows_finder) def get_path(self, path): # type: (Union[str, Path]) -> PathType if path is None: raise TypeError("A path must be provided in order to generate a path entry.") path = ensure_path(path) _path = self.paths.get(path) if not _path: _path = self.paths.get(path.as_posix()) if not _path and path.as_posix() in self.path_order and path.exists(): _path = PathEntry.create( path=path.absolute(), is_root=True, only_python=self.only_python ) self.paths[path.as_posix()] = _path if not _path: raise ValueError("Path not found or generated: {0!r}".format(path)) return _path def _get_paths(self): # type: () -> Generator[Union[PathType, WindowsFinder], None, None] for path in self.path_order: try: entry = self.get_path(path) except ValueError: continue else: yield entry @cached_property def path_entries(self): # type: () -> List[Union[PathType, WindowsFinder]] paths = list(self._get_paths()) return paths def find_all(self, executable): # type: (str) -> List[Union[PathEntry, FinderType]] """ Search the path for an executable. Return all copies. :param executable: Name of the executable :type executable: str :returns: List[PathEntry] """ sub_which = operator.methodcaller("which", executable) filtered = (sub_which(self.get_path(k)) for k in self.path_order) return list(filtered) def which(self, executable): # type: (str) -> Union[PathEntry, None] """ Search for an executable on the path. :param executable: Name of the executable to be located. :type executable: str :returns: :class:`~pythonfinder.models.PathEntry` object. """ sub_which = operator.methodcaller("which", executable) filtered = (sub_which(self.get_path(k)) for k in self.path_order) return next(iter(f for f in filtered if f is not None), None) def _filter_paths(self, finder): # type: (Callable) -> Iterator for path in self._get_paths(): if path is None: continue python_versions = finder(path) if python_versions is not None: for python in python_versions: if python is not None: yield python def _get_all_pythons(self, finder): # type: (Callable) -> Iterator for python in self._filter_paths(finder): if python is not None and python.is_python: yield python def get_pythons(self, finder): # type: (Callable) -> Iterator sort_key = operator.attrgetter("as_python.version_sort") pythons = [entry for entry in self._get_all_pythons(finder)] for python in sorted(pythons, key=sort_key, reverse=True): if python is not None: yield python def find_all_python_versions( self, major=None, # type: Optional[Union[str, int]] minor=None, # type: Optional[int] patch=None, # type: Optional[int] pre=None, # type: Optional[bool] dev=None, # type: Optional[bool] arch=None, # type: Optional[str] name=None, # type: Optional[str] ): # type (...) -> List[PathEntry] """Search for a specific python version on the path. Return all copies :param major: Major python version to search for. :type major: int :param int minor: Minor python version to search for, defaults to None :param int patch: Patch python version to search for, defaults to None :param bool pre: Search for prereleases (default None) - prioritize releases if None :param bool dev: Search for devreleases (default None) - prioritize releases if None :param str arch: Architecture to include, e.g. '64bit', defaults to None :param str name: The name of a python version, e.g. ``anaconda3-5.3.0`` :return: A list of :class:`~pythonfinder.models.PathEntry` instances matching the version requested. :rtype: List[:class:`~pythonfinder.models.PathEntry`] """ sub_finder = operator.methodcaller( "find_all_python_versions", major, minor, patch, pre, dev, arch, name ) alternate_sub_finder = None if major and not (minor or patch or pre or dev or arch or name): alternate_sub_finder = operator.methodcaller( "find_all_python_versions", None, None, None, None, None, None, major ) if os.name == "nt" and self.windows_finder: windows_finder_version = sub_finder(self.windows_finder) if windows_finder_version: return windows_finder_version values = list(self.get_pythons(sub_finder)) if not values and alternate_sub_finder is not None: values = list(self.get_pythons(alternate_sub_finder)) return values def find_python_version( self, major=None, # type: Optional[Union[str, int]] minor=None, # type: Optional[Union[str, int]] patch=None, # type: Optional[Union[str, int]] pre=None, # type: Optional[bool] dev=None, # type: Optional[bool] arch=None, # type: Optional[str] name=None, # type: Optional[str] sort_by_path=False, # type: bool ): # type: (...) -> PathEntry """Search for a specific python version on the path. :param major: Major python version to search for. :type major: int :param int minor: Minor python version to search for, defaults to None :param int patch: Patch python version to search for, defaults to None :param bool pre: Search for prereleases (default None) - prioritize releases if None :param bool dev: Search for devreleases (default None) - prioritize releases if None :param str arch: Architecture to include, e.g. '64bit', defaults to None :param str name: The name of a python version, e.g. ``anaconda3-5.3.0`` :param bool sort_by_path: Whether to sort by path -- default sort is by version(default: False) :return: A :class:`~pythonfinder.models.PathEntry` instance matching the version requested. :rtype: :class:`~pythonfinder.models.PathEntry` """ major, minor, patch, name = split_version_and_name(major, minor, patch, name) sub_finder = operator.methodcaller( "find_python_version", major, minor, patch, pre, dev, arch, name ) alternate_sub_finder = None if name and not (minor or patch or pre or dev or arch or major): alternate_sub_finder = operator.methodcaller( "find_all_python_versions", None, None, None, None, None, None, name ) if major and minor and patch: _tuple_pre = pre if pre is not None else False _tuple_dev = dev if dev is not None else False version_tuple = (major, minor, patch, _tuple_pre, _tuple_dev) version_tuple_pre = (major, minor, patch, True, False) if os.name == "nt" and self.windows_finder: windows_finder_version = sub_finder(self.windows_finder) if windows_finder_version: return windows_finder_version if sort_by_path: paths = [self.get_path(k) for k in self.path_order] for path in paths: found_version = sub_finder(path) if found_version: return found_version if alternate_sub_finder: for path in paths: found_version = alternate_sub_finder(path) if found_version: return found_version ver = next(iter(self.get_pythons(sub_finder)), None) if not ver and alternate_sub_finder is not None: ver = next(iter(self.get_pythons(alternate_sub_finder)), None) if ver: if ver.as_python.version_tuple[:5] in self.python_version_dict: self.python_version_dict[ver.as_python.version_tuple[:5]].append(ver) else: self.python_version_dict[ver.as_python.version_tuple[:5]] = [ver] return ver @classmethod def create( cls, path=None, # type: str system=False, # type: bool only_python=False, # type: bool global_search=True, # type: bool ignore_unsupported=True, # type: bool ): # type: (...) -> SystemPath """Create a new :class:`pythonfinder.models.SystemPath` instance. :param path: Search path to prepend when searching, defaults to None :param path: str, optional :param bool system: Whether to use the running python by default instead of searching, defaults to False :param bool only_python: Whether to search only for python executables, defaults to False :param bool ignore_unsupported: Whether to ignore unsupported python versions, if False, an error is raised, defaults to True :return: A new :class:`pythonfinder.models.SystemPath` instance. :rtype: :class:`pythonfinder.models.SystemPath` """ path_entries = defaultdict( PathEntry ) # type: DefaultDict[str, Union[PythonFinder, PathEntry]] paths = [] # type: List[str] if ignore_unsupported: os.environ["PYTHONFINDER_IGNORE_UNSUPPORTED"] = fs_str("1") if global_search: if "PATH" in os.environ: paths = os.environ["PATH"].split(os.pathsep) path_order = [] # type: List[str] if path: path_order = [path] path_instance = ensure_path(path) path_entries.update( { path_instance.as_posix(): PathEntry.create( path=path_instance.absolute(), is_root=True, only_python=only_python, ) } ) paths = [path] + paths paths = [p for p in paths if not any(is_in_path(p, shim) for shim in SHIM_PATHS)] _path_objects = [ensure_path(p.strip('"')) for p in paths] path_entries.update( { p.as_posix(): PathEntry.create( path=p.absolute(), is_root=True, only_python=only_python ) for p in _path_objects if p.exists() } ) instance = cls( paths=path_entries, path_order=path_order, only_python=only_python, system=system, global_search=global_search, ignore_unsupported=ignore_unsupported, ) instance = instance._run_setup() return instance
class Pipfile(object): path = attr.ib(validator=is_path, type=Path) projectfile = attr.ib(validator=is_projectfile, type=ProjectFile) _pipfile = attr.ib(type=PipfileLoader) _pyproject = attr.ib(default=attr.Factory(tomlkit.document), type=tomlkit.toml_document.TOMLDocument) build_system = attr.ib(default=attr.Factory(dict), type=dict) _requirements = attr.ib(default=attr.Factory(list), type=list) _dev_requirements = attr.ib(default=attr.Factory(list), type=list) @path.default def _get_path(self): # type: () -> Path return Path(os.curdir).absolute() @projectfile.default def _get_projectfile(self): # type: () -> ProjectFile return self.load_projectfile(os.curdir, create=False) @_pipfile.default def _get_pipfile(self): # type: () -> Union[plette.pipfiles.Pipfile, PipfileLoader] return self.projectfile.model @property def root(self): return self.path.parent @property def extended_keys(self): return [ k for k in itertools.product(("packages", "dev-packages"), ("", "vcs", "editable")) ] @property def pipfile(self): # type: () -> Union[PipfileLoader, plette.pipfiles.Pipfile] return self._pipfile def get_deps(self, dev=False, only=True): # type: (bool, bool) -> Dict[Text, Dict[Text, Union[List[Text], Text]]] deps = {} # type: Dict[Text, Dict[Text, Union[List[Text], Text]]] if dev: deps.update(dict(self.pipfile._data.get("dev-packages", {}))) if only: return deps return tomlkit_value_to_python( merge_items([deps, dict(self.pipfile._data.get("packages", {}))])) def get(self, k): # type: (Text) -> Any return self.__getitem__(k) def __contains__(self, k): # type: (Text) -> bool check_pipfile = k in self.extended_keys or self.pipfile.__contains__(k) if check_pipfile: return True return False def __getitem__(self, k, *args, **kwargs): # type: ignore retval = None pipfile = self._pipfile section = None pkg_type = None try: retval = pipfile[k] except KeyError: if "-" in k: section, _, pkg_type = k.rpartition("-") vals = getattr(pipfile.get(section, {}), "_data", {}) vals = tomlkit_value_to_python(vals) if pkg_type == "vcs": retval = {k: v for k, v in vals.items() if is_vcs(v)} elif pkg_type == "editable": retval = {k: v for k, v in vals.items() if is_editable(v)} if retval is None: raise else: retval = getattr(retval, "_data", retval) return retval def __getattr__(self, k, *args, **kwargs): # type: ignore retval = None pipfile = super(Pipfile, self).__getattribute__("_pipfile") try: retval = super(Pipfile, self).__getattribute__(k) except AttributeError: retval = getattr(pipfile, k, None) if retval is not None: return retval return super(Pipfile, self).__getattribute__(k, *args, **kwargs) @property def requires_python(self): # type: () -> bool return getattr( self._pipfile.requires, "python_version", getattr(self._pipfile.requires, "python_full_version", None), ) @property def allow_prereleases(self): # type: () -> bool return self._pipfile.get("pipenv", {}).get("allow_prereleases", False) @classmethod def read_projectfile(cls, path): # type: (Text) -> ProjectFile """Read the specified project file and provide an interface for writing/updating. :param Text path: Path to the target file. :return: A project file with the model and location for interaction :rtype: :class:`~requirementslib.models.project.ProjectFile` """ pf = ProjectFile.read(path, PipfileLoader, invalid_ok=True) return pf @classmethod def load_projectfile(cls, path, create=False): # type: (Text, bool) -> ProjectFile """Given a path, load or create the necessary pipfile. :param Text path: Path to the project root or pipfile :param bool create: Whether to create the pipfile if not found, defaults to True :raises OSError: Thrown if the project root directory doesn't exist :raises FileNotFoundError: Thrown if the pipfile doesn't exist and ``create=False`` :return: A project file instance for the supplied project :rtype: :class:`~requirementslib.models.project.ProjectFile` """ if not path: raise RuntimeError( "Must pass a path to classmethod 'Pipfile.load'") if not isinstance(path, Path): path = Path(path).absolute() pipfile_path = path if path.is_file() else path.joinpath("Pipfile") project_path = pipfile_path.parent if not project_path.exists(): raise FileNotFoundError("%s is not a valid project path!" % path) elif not pipfile_path.exists() or not pipfile_path.is_file(): if not create: raise RequirementError("%s is not a valid Pipfile" % pipfile_path) return cls.read_projectfile(pipfile_path.as_posix()) @classmethod def load(cls, path, create=False): # type: (Text, bool) -> Pipfile """Given a path, load or create the necessary pipfile. :param Text path: Path to the project root or pipfile :param bool create: Whether to create the pipfile if not found, defaults to True :raises OSError: Thrown if the project root directory doesn't exist :raises FileNotFoundError: Thrown if the pipfile doesn't exist and ``create=False`` :return: A pipfile instance pointing at the supplied project :rtype:: class:`~requirementslib.models.pipfile.Pipfile` """ projectfile = cls.load_projectfile(path, create=create) pipfile = projectfile.model creation_args = { "projectfile": projectfile, "pipfile": pipfile, "path": Path(projectfile.location), } return cls(**creation_args) def write(self): # type: () -> None self.projectfile.model = copy.deepcopy(self._pipfile) self.projectfile.write() @property def dev_packages(self): # type: () -> List[Requirement] return self.dev_requirements @property def packages(self): # type: () -> List[Requirement] return self.requirements @property def dev_requirements(self): # type: () -> List[Requirement] if not self._dev_requirements: packages = tomlkit_value_to_python( self.pipfile.get("dev-packages", {})) self._dev_requirements = [ Requirement.from_pipfile(k, v) for k, v in packages.items() if v is not None ] return self._dev_requirements @property def requirements(self): # type: () -> List[Requirement] if not self._requirements: packages = tomlkit_value_to_python(self.pipfile.get( "packages", {})) self._requirements = [ Requirement.from_pipfile(k, v) for k, v in packages.items() if v is not None ] return self._requirements def _read_pyproject(self): # type: () -> None pyproject = self.path.parent.joinpath("pyproject.toml") if pyproject.exists(): self._pyproject = tomlkit.loads(pyproject.read_text()) build_system = self._pyproject.get("build-system", None) if build_system and not build_system.get("build_backend"): build_system[ "build-backend"] = "setuptools.build_meta:__legacy__" elif not build_system or not build_system.get("requires"): build_system = { "requires": ["setuptools>=40.8", "wheel"], "build-backend": "setuptools.build_meta:__legacy__", } self.build_system = build_system @property def build_requires(self): # type: () -> List[Text] if not self.build_system: self._read_pyproject() return self.build_system.get("requires", []) @property def build_backend(self): # type: () -> Text if not self.build_system: self._read_pyproject() return self.build_system.get("build-backend", None)
class DependencyResolver(object): pinned_deps = attr.ib(default=attr.Factory(dict)) #: A dictionary of abstract dependencies by name dep_dict = attr.ib(default=attr.Factory(dict)) #: A dictionary of sets of version numbers that are valid for a candidate currently candidate_dict = attr.ib(default=attr.Factory(dict)) #: A historical record of pins pin_history = attr.ib(default=attr.Factory(dict)) #: Whether to allow prerelease dependencies allow_prereleases = attr.ib(default=False) #: Stores hashes for each dependency hashes = attr.ib(default=attr.Factory(dict)) #: A hash cache hash_cache = attr.ib(default=attr.Factory(HashCache)) #: A finder for searching the index finder = attr.ib(default=None) #: Whether to include hashes even from incompatible wheels include_incompatible_hashes = attr.ib(default=True) #: A cache for storing available canddiates when using all wheels _available_candidates_cache = attr.ib(default=attr.Factory(dict)) @classmethod def create(cls, finder=None, allow_prereleases=False, get_all_hashes=True): if not finder: from .dependencies import get_finder finder_args = [] if allow_prereleases: finder_args.append("--pre") finder = get_finder(*finder_args) creation_kwargs = { "allow_prereleases": allow_prereleases, "include_incompatible_hashes": get_all_hashes, "finder": finder, "hash_cache": HashCache(), } resolver = cls(**creation_kwargs) return resolver @property def dependencies(self): return list(self.dep_dict.values()) @property def resolution(self): return list(self.pinned_deps.values()) def add_abstract_dep(self, dep): """Add an abstract dependency by either creating a new entry or merging with an old one. :param dep: An abstract dependency to add :type dep: :class:`~requirementslib.models.dependency.AbstractDependency` :raises ResolutionError: Raised when the given dependency is not compatible with an existing abstract dependency. """ if dep.name in self.dep_dict: compatible_versions = self.dep_dict[dep.name].compatible_versions(dep) if compatible_versions: self.candidate_dict[dep.name] = compatible_versions self.dep_dict[dep.name] = self.dep_dict[dep.name].compatible_abstract_dep( dep ) else: raise ResolutionError else: self.candidate_dict[dep.name] = dep.version_set self.dep_dict[dep.name] = dep def pin_deps(self): """Pins the current abstract dependencies and adds them to the history dict. Adds any new dependencies to the abstract dependencies already present by merging them together to form new, compatible abstract dependencies. """ for name in list(self.dep_dict.keys()): candidates = self.dep_dict[name].candidates[:] abs_dep = self.dep_dict[name] while candidates: pin = candidates.pop() # Move on from existing pins if the new pin isn't compatible if name in self.pinned_deps: if self.pinned_deps[name].editable: continue old_version = version_from_ireq(self.pinned_deps[name]) if not pin.editable: new_version = version_from_ireq(pin) if ( new_version != old_version and new_version not in self.candidate_dict[name] ): continue pin.parent = abs_dep.parent pin_subdeps = self.dep_dict[name].get_deps(pin) backup = self.dep_dict.copy(), self.candidate_dict.copy() try: for pin_dep in pin_subdeps: self.add_abstract_dep(pin_dep) except ResolutionError: self.dep_dict, self.candidate_dict = backup continue else: self.pinned_deps[name] = pin break def resolve(self, root_nodes, max_rounds=20): """Resolves dependencies using a backtracking resolver and multiple endpoints. Note: this resolver caches aggressively. Runs for *max_rounds* or until any two pinning rounds yield the same outcome. :param root_nodes: A list of the root requirements. :type root_nodes: list[:class:`~requirementslib.models.requirements.Requirement`] :param max_rounds: The max number of resolution rounds, defaults to 20 :param max_rounds: int, optional :raises RuntimeError: Raised when max rounds is exceeded without a resolution. """ if self.dep_dict: raise RuntimeError("Do not use the same resolver more than once") if not self.hash_cache: self.hash_cache = HashCache() # Coerce input into AbstractDependency instances. # We accept str, Requirement, and AbstractDependency as input. from ..utils import log from .dependencies import AbstractDependency for dep in root_nodes: if isinstance(dep, str): dep = AbstractDependency.from_string(dep) elif not isinstance(dep, AbstractDependency): dep = AbstractDependency.from_requirement(dep) self.add_abstract_dep(dep) for round_ in range(max_rounds): self.pin_deps() self.pin_history[round_] = self.pinned_deps.copy() if round_ > 0: previous_round = set(self.pin_history[round_ - 1].values()) current_values = set(self.pin_history[round_].values()) difference = current_values - previous_round else: difference = set(self.pin_history[round_].values()) log.debug("\n") log.debug("{:=^30}".format(" Round {0} ".format(round_))) log.debug("\n") if difference: log.debug("New Packages: ") for d in difference: log.debug("{:>30}".format(format_requirement(d))) elif round_ >= 3: log.debug("Stable Pins: ") for d in current_values: log.debug("{:>30}".format(format_requirement(d))) return else: log.debug("No New Packages.") # TODO: Raise a better error. raise RuntimeError("cannot resolve after {} rounds".format(max_rounds)) def get_hashes(self): for dep in self.pinned_deps.values(): if dep.name not in self.hashes: self.hashes[dep.name] = self.get_hashes_for_one(dep) return self.hashes.copy() def get_hashes_for_one(self, ireq): if not self.finder: from .dependencies import get_finder finder_args = [] if self.allow_prereleases: finder_args.append("--pre") self.finder = get_finder(*finder_args) if ireq.editable: return set() from pipenv.vendor.pip_shims import VcsSupport vcs = VcsSupport() if ( ireq.link and ireq.link.scheme in vcs.all_schemes and "ssh" in ireq.link.scheme ): return set() if not is_pinned_requirement(ireq): raise TypeError("Expected pinned requirement, got {}".format(ireq)) matching_candidates = set() with self.allow_all_wheels(): from .dependencies import find_all_matches matching_candidates = find_all_matches( self.finder, ireq, pre=self.allow_prereleases ) return { self.hash_cache.get_hash( getattr(candidate, "location", getattr(candidate, "link", None)) ) for candidate in matching_candidates } @contextmanager def allow_all_wheels(self): """Monkey patches pip.Wheel to allow wheels from all platforms and Python versions. This also saves the candidate cache and set a new one, or else the results from the previous non-patched calls will interfere. """ def _wheel_supported(self, tags=None): # Ignore current platform. Support everything. return True def _wheel_support_index_min(self, tags=None): # All wheels are equal priority for sorting. return 0 original_wheel_supported = Wheel.supported original_support_index_min = Wheel.support_index_min Wheel.supported = _wheel_supported Wheel.support_index_min = _wheel_support_index_min try: yield finally: Wheel.supported = original_wheel_supported Wheel.support_index_min = original_support_index_min
class PackageInfo(object): name = attr.ib(type=str) version = attr.ib(type=str) package_url = attr.ib(type=str) summary = attr.ib(type=str, default=None) # type: Optional[str] author = attr.ib(type=str, default=None) # type: Optional[str] keywords = attr.ib(factory=list, converter=split_keywords) # type: List[str] description = attr.ib(type=str, default="") download_url = attr.ib(type=str, default="") home_page = attr.ib(type=str, default="") license = attr.ib(type=str, default="") maintainer = attr.ib(type=str, default="") maintainer_email = attr.ib(type=str, default="") downloads = attr.ib(factory=dict) # type: Dict[str, int] docs_url = attr.ib(default=None) # type: Optional[str] platform = attr.ib(type=str, default="") project_url = attr.ib(type=str, default="") project_urls = attr.ib(factory=dict) # type: Dict[str, str] requires_python = attr.ib(default=None) # type: Optional[str] requires_dist = attr.ib(factory=list) # type: List[Dependency] release_url = attr.ib(default=None) # type: Optional[str] description_content_type = attr.ib(type=str, default="text/md") bugtrack_url = attr.ib(default=None) # type: str classifiers = attr.ib(factory=list) # type: List[str] author_email = attr.ib(default=None) # type: Optional[str] markers = attr.ib(default=None) # type: Optional[str] dependencies = attr.ib(default=None) # type: Tuple[Dependency] @classmethod def from_json(cls, info_json): # type: (TPackageInfo) -> "PackageInfo" return cls(**filter_dict(info_json)) # type: ignore def to_dependency(self): # type: () -> Dependency return Dependency.from_info(self) def create_dependencies(self, force=False): # type: (bool) -> "PackageInfo" """Create values for **self.dependencies**. :param bool force: Sets **self.dependencies** to an empty tuple if it would be None, defaults to False. :return: An updated instance of the current object with **self.dependencies** updated accordingly. :rtype: :class:`PackageInfo` """ if not self.dependencies and not self.requires_dist: if force: return attr.evolve(self, dependencies=tuple()) return self self_dependency = self.to_dependency() deps = set() self_dependencies = tuple() if not self.dependencies else self.dependencies for dep in self_dependencies: if dep is None: continue new_dep = dep.add_parent(self_dependency) deps.add(new_dep) created_deps = create_dependencies(self.requires_dist, parent=self_dependency) if created_deps is not None: for dep in created_deps: if dep is None: continue deps.add(dep) return attr.evolve(self, dependencies=tuple(sorted(deps)))
class WindowsFinder(BaseFinder): paths = attr.ib(default=attr.Factory(list), type=list) version_list = attr.ib(default=attr.Factory(list), type=list) _versions = attr.ib() # type: DefaultDict[Tuple, PathEntry] _pythons = attr.ib() # type: DefaultDict[str, PathEntry] def find_all_python_versions( self, major=None, # type: Optional[Union[str, int]] minor=None, # type: Optional[int] patch=None, # type: Optional[int] pre=None, # type: Optional[bool] dev=None, # type: Optional[bool] arch=None, # type: Optional[str] name=None, # type: Optional[str] ): # type (...) -> List[PathEntry] version_matcher = operator.methodcaller("matches", major, minor, patch, pre, dev, arch, python_name=name) pythons = [py for py in self.version_list if version_matcher(py)] version_sort = operator.attrgetter("version_sort") return [ c.comes_from for c in sorted(pythons, key=version_sort, reverse=True) if c.comes_from ] def find_python_version( self, major=None, # type: Optional[Union[str, int]] minor=None, # type: Optional[int] patch=None, # type: Optional[int] pre=None, # type: Optional[bool] dev=None, # type: Optional[bool] arch=None, # type: Optional[str] name=None, # type: Optional[str] ): # type: (...) -> Optional[PathEntry] return next( iter(v for v in self.find_all_python_versions( major=major, minor=minor, patch=patch, pre=pre, dev=dev, arch=arch, name=name, )), None, ) @_versions.default def get_versions(self): # type: () -> DefaultDict[Tuple, PathEntry] versions = defaultdict( PathEntry) # type: DefaultDict[Tuple, PathEntry] from pipenv.vendor.pythonfinder._vendor.pep514tools import environment as pep514env env_versions = pep514env.findall() path = None for version_object in env_versions: install_path = getattr(version_object.info, "install_path", None) name = getattr(version_object, "tag", None) company = getattr(version_object, "company", None) if install_path is None: continue try: path = ensure_path(install_path.__getattr__("")) except AttributeError: continue if not path.exists(): continue try: py_version = PythonVersion.from_windows_launcher( version_object, name=name, company=company) except (InvalidPythonVersion, AttributeError): continue if py_version is None: continue self.version_list.append(py_version) python_path = (py_version.comes_from.path if py_version.comes_from else py_version.executable) python_kwargs = { python_path: py_version } if python_path is not None else {} base_dir = PathEntry.create(path, is_root=True, only_python=True, pythons=python_kwargs) versions[py_version.version_tuple[:5]] = base_dir self.paths.append(base_dir) return versions @property def versions(self): # type: () -> DefaultDict[Tuple, PathEntry] if not self._versions: self._versions = self.get_versions() return self._versions @_pythons.default def get_pythons(self): # type: () -> DefaultDict[str, PathEntry] pythons = defaultdict() # type: DefaultDict[str, PathEntry] for version in self.version_list: _path = ensure_path(version.comes_from.path) pythons[_path.as_posix()] = version.comes_from return pythons @property def pythons(self): # type: () -> DefaultDict[str, PathEntry] return self._pythons @pythons.setter def pythons(self, value): # type: (DefaultDict[str, PathEntry]) -> None self._pythons = value @classmethod def create(cls, *args, **kwargs): # type: (Type[FinderType], Any, Any) -> FinderType return cls()
class ReleaseUrl(object): #: The MD5 digest of the given release md5_digest = attr.ib(type=Digest) #: The package type of the url packagetype = attr.ib(type=str, validator=attr.validators.in_(PACKAGE_TYPES)) #: The upload timestamp from the package upload_time = attr.ib( type=datetime.datetime, converter=instance_check_converter(datetime.datetime, dateutil.parser.parse), # type: ignore ) #: The ISO8601 formatted upload timestamp of the package upload_time_iso_8601 = attr.ib( type=datetime.datetime, converter=instance_check_converter(datetime.datetime, dateutil.parser.parse), # type: ignore ) #: The size in bytes of the package size = attr.ib(type=int) #: The URL of the package url = attr.ib(type=str) #: The digests of the package digests = attr.ib( converter=instance_check_converter(list, create_digest_collection) # type: ignore ) # type: List[Digest] #: The name of the package name = attr.ib(type=str, default=None) #: The available comments of the given upload comment_text = attr.ib(type=str, default="") #: Whether the url has been yanked from the server yanked = attr.ib(type=bool, default=False) #: The number of downloads (deprecated) downloads = attr.ib(type=int, default=-1) #: The filename of the current upload filename = attr.ib(type=str, default="") #: Whether the upload has a signature has_sig = attr.ib(type=bool, default=False) #: The python_version attribute of the upload (e.g. 'source', 'py27', etc) python_version = attr.ib(type=str, default="source") #: The 'requires_python' restriction on the package requires_python = attr.ib(type=str, default=None) #: A list of valid aprsed tags from the upload tags = attr.ib(factory=list) # type: List[ParsedTag] @property def is_wheel(self): # type: () -> bool return os.path.splitext(self.filename)[-1].lower() == ".whl" @property def is_sdist(self): # type: () -> bool return self.python_version == "source" @property def markers(self): # type: () -> Optional[str] # TODO: Compare dependencies in parent and add markers for python version # TODO: Compare dependencies in parent and add markers for platform # XXX: We can't use wheel-based markers until we do it via derived markers by # XXX: comparing in the parent (i.e. 'Release' instance or so) and merging # XXX: down to the common / minimal set of markers otherwise we wind up # XXX: with an unmanageable set and combinatorial explosion # if self.is_wheel: # return self.get_markers_from_wheel() if self.requires_python: return marker_from_specifier(self.requires_python) return None @property def pep508_url(self): # type: () -> str markers = self.markers req_str = "{0} @ {1}#egg={0}".format(self.name, self.url) if markers: req_str = "{0}; {1}".format(req_str, markers) return req_str def get_markers_from_wheel(self): # type: () -> str supported_platforms = [] # type: List[str] supported_pyversions = [] supported_abis = [] markers = [] for parsed_tag in self.tags: if parsed_tag.marker_string: markers.append(Marker(parsed_tag.marker_string)) if parsed_tag.python_version: supported_pyversions.append(parsed_tag.python_version) if parsed_tag.abi: supported_abis.append(parsed_tag.abi) if not (markers or supported_platforms): return "" if ( all(pyversion in supported_pyversions for pyversion in ["2", "3"]) and not supported_platforms ): marker_line = "" else: marker_line = " or ".join(["{}".format(str(marker)) for marker in markers]) return marker_line def get_dependencies(self): # type: () -> Tuple["ReleaseUrl", Dict[str, Union[List[str], str]]] results = {"requires_python": None} requires_dist = [] # type: List[str] if self.is_wheel: metadata = get_remote_wheel_metadata(self.url) if metadata is not None: requires_dist = metadata.run_requires if not self.requires_python: results["requires_python"] = metadata._legacy.get("Requires-Python") else: try: metadata = get_remote_sdist_metadata(self.pep508_url) except Exception: requires_dist = [] else: requires_dist = [str(v) for v in metadata.requires.values()] results["requires_dist"] = requires_dist requires_python = getattr(self, "requires_python", results["requires_python"]) return attr.evolve(self, requires_python=requires_python), results @property def sha256(self): # type: () -> str return next( iter(digest for digest in self.digests if digest.algorithm == "sha256") ).value @classmethod def create(cls, release_dict, name=None): # type: (TReleaseUrlDict, Optional[str]) -> "ReleaseUrl" valid_digest_keys = set("{0}_digest".format(k) for k in VALID_ALGORITHMS.keys()) digest_keys = set(release_dict.keys()) & valid_digest_keys creation_kwargs = ( {} ) # type: Dict[str, Union[bool, int, str, Digest, TDigestDict]] creation_kwargs = {k: v for k, v in release_dict.items() if k not in digest_keys} if name is not None: creation_kwargs["name"] = name for k in digest_keys: digest = release_dict[k] if not isinstance(digest, str): raise TypeError("Digests must be strings, got {!r}".format(digest)) creation_kwargs[k] = Digest.create(k.replace("_digest", ""), digest) release_url = cls(**filter_dict(creation_kwargs)) # type: ignore if release_url.is_wheel: supported_tags = [ parse_tag(Tag(*tag)) for tag in distlib.wheel.Wheel(release_url.url).tags ] release_url = attr.evolve(release_url, tags=supported_tags) return release_url
class VCSRepository(object): DEFAULT_RUN_ARGS = None url = attr.ib() # type: str name = attr.ib() # type: str checkout_directory = attr.ib() # type: str vcs_type = attr.ib() # type: str parsed_url = attr.ib() # type: URI subdirectory = attr.ib(default=None) # type: Optional[str] commit_sha = attr.ib(default=None) # type: Optional[str] ref = attr.ib(default=None) # type: Optional[str] repo_backend = attr.ib() # type: Any clone_log = attr.ib(default=None) # type: Optional[str] @parsed_url.default def get_parsed_url(self): # type: () -> URI return URI.parse(self.url) @repo_backend.default def get_repo_backend(self): if self.DEFAULT_RUN_ARGS is None: default_run_args = self.monkeypatch_pip() else: default_run_args = self.DEFAULT_RUN_ARGS from pip_shims.shims import VcsSupport VCS_SUPPORT = VcsSupport() backend = VCS_SUPPORT.get_backend(self.vcs_type) # repo = backend(url=self.url) if backend.run_command.__func__.__defaults__ != default_run_args: backend.run_command.__func__.__defaults__ = default_run_args return backend @property def is_local(self): # type: () -> bool url = self.url if "+" in url: url = url.split("+")[1] return url.startswith("file") def obtain(self): # type: () -> None lt_pip_19_2 = (pip_shims.parsed_pip_version.parsed_version < pip_shims.parse_version("19.2")) if lt_pip_19_2: self.repo_backend = self.repo_backend(self.url) if os.path.exists(self.checkout_directory ) and not self.repo_backend.is_repository_directory( self.checkout_directory): self.repo_backend.unpack(self.checkout_directory) elif not os.path.exists(self.checkout_directory): if lt_pip_19_2: self.repo_backend.obtain(self.checkout_directory) else: self.repo_backend.obtain(self.checkout_directory, self.parsed_url) else: if self.ref: self.checkout_ref(self.ref) if not self.commit_sha: self.commit_sha = self.get_commit_hash() def checkout_ref(self, ref): # type: (str) -> None rev_opts = self.repo_backend.make_rev_options(ref) if not any([ self.repo_backend.is_commit_id_equal(self.checkout_directory, ref), self.repo_backend.is_commit_id_equal(self.checkout_directory, rev_opts), self.is_local, ]): self.update(ref) def update(self, ref): # type: (str) -> None target_ref = self.repo_backend.make_rev_options(ref) if pip_shims.parse_version( pip_shims.pip_version) > pip_shims.parse_version("18.0"): self.repo_backend.update(self.checkout_directory, self.url, target_ref) else: self.repo_backend.update(self.checkout_directory, target_ref) self.commit_sha = self.get_commit_hash() def get_commit_hash(self, ref=None): # type: (Optional[str]) -> str with pip_shims.shims.global_tempdir_manager(): return self.repo_backend.get_revision(self.checkout_directory) @classmethod def monkeypatch_pip(cls): # type: () -> Tuple[Any, ...] from pip_shims.compat import get_allowed_args target_module = pip_shims.shims.VcsSupport.__module__ pip_vcs = importlib.import_module(target_module) args, kwargs = get_allowed_args(pip_vcs.VersionControl.run_command) run_command_defaults = pip_vcs.VersionControl.run_command.__defaults__ if "show_stdout" not in args and "show_stdout" not in kwargs: new_defaults = run_command_defaults else: # set the default to not write stdout, the first option sets this value new_defaults = [False] + list(run_command_defaults)[1:] new_defaults = tuple(new_defaults) if six.PY3: try: pip_vcs.VersionControl.run_command.__defaults__ = new_defaults except AttributeError: pip_vcs.VersionControl.run_command.__func__.__defaults__ = new_defaults else: pip_vcs.VersionControl.run_command.__func__.__defaults__ = new_defaults sys.modules[target_module] = pip_vcs cls.DEFAULT_RUN_ARGS = new_defaults return new_defaults
class Release(Sequence): #: The version of the release version = attr.ib(type=str) #: The URL collection for the release urls = attr.ib( converter=instance_check_converter( # type: ignore ReleaseUrlCollection, convert_release_urls_to_collection ), type=ReleaseUrlCollection, ) #: the name of the package name = attr.ib(default=None) # type: Optional[str] def __iter__(self): # type: () -> Iterator[ReleaseUrlCollection] return iter(self.urls) def __getitem__(self, key): return self.urls[key] def __len__(self): # type: () -> int return len(self.urls) @property def yanked(self): # type: () -> bool if not self.urls: return True return False @property def parsed_version(self): # type: () -> packaging.version._BaseVersion return packaging.version.parse(self.version) @property def wheels(self): # type: () -> Iterator[ReleaseUrl] return self.urls.wheels @property def sdists(self): # type: () -> Iterator[ReleaseUrl] return self.urls.sdists @property def latest(self): # type: () -> ReleaseUrl return self.urls.latest @property def latest_timestamp(self): # type: () -> datetime.datetime return self.urls.latest_timestamp def to_lockfile(self): # type: () -> Dict[str, Union[List[str], str]] return { "hashes": [str(url.sha256) for url in self.urls if url.sha256 is not None], "version": "=={0}".format(self.version), }
class PipenvMarkers(object): """System-level requirements - see PEP508 for more detail""" os_name = attr.ib(default=None, validator=attr.validators.optional(validate_markers)) sys_platform = attr.ib( default=None, validator=attr.validators.optional(validate_markers) ) platform_machine = attr.ib( default=None, validator=attr.validators.optional(validate_markers) ) platform_python_implementation = attr.ib( default=None, validator=attr.validators.optional(validate_markers) ) platform_release = attr.ib( default=None, validator=attr.validators.optional(validate_markers) ) platform_system = attr.ib( default=None, validator=attr.validators.optional(validate_markers) ) platform_version = attr.ib( default=None, validator=attr.validators.optional(validate_markers) ) python_version = attr.ib( default=None, validator=attr.validators.optional(validate_markers) ) python_full_version = attr.ib( default=None, validator=attr.validators.optional(validate_markers) ) implementation_name = attr.ib( default=None, validator=attr.validators.optional(validate_markers) ) implementation_version = attr.ib( default=None, validator=attr.validators.optional(validate_markers) ) @property def line_part(self): return " and ".join( [ "{0} {1}".format(k, v) for k, v in attr.asdict(self, filter=filter_none).items() ] ) @property def pipfile_part(self): return {"markers": self.as_line} @classmethod def make_marker(cls, marker_string): try: marker = Marker(marker_string) except InvalidMarker: raise RequirementError( "Invalid requirement: Invalid marker %r" % marker_string ) return marker @classmethod def from_line(cls, line): if ";" in line: line = line.rsplit(";", 1)[1].strip() marker = cls.make_marker(line) return marker @classmethod def from_pipfile(cls, name, pipfile): attr_fields = [field.name for field in attr.fields(cls)] found_keys = [k for k in pipfile.keys() if k in attr_fields] marker_strings = ["{0} {1}".format(k, pipfile[k]) for k in found_keys] if pipfile.get("markers"): marker_strings.append(pipfile.get("markers")) markers = set() for marker in marker_strings: markers.add(marker) combined_marker = None try: combined_marker = cls.make_marker(" and ".join(sorted(markers))) except RequirementError: pass else: return combined_marker
class URI(object): #: The target hostname, e.g. `amazon.com` host = attr.ib(type=str) #: The URI Scheme, e.g. `salesforce` scheme = attr.ib(default="https", type=str) #: The numeric port of the url if specified port = attr.ib(default=None, type=int) #: The url path, e.g. `/path/to/endpoint` path = attr.ib(default="", type=str) #: Query parameters, e.g. `?variable=value...` query = attr.ib(default="", type=str) #: URL Fragments, e.g. `#fragment=value` fragment = attr.ib(default="", type=str) #: Subdirectory fragment, e.g. `&subdirectory=blah...` subdirectory = attr.ib(default="", type=str) #: VCS ref this URI points at, if available ref = attr.ib(default="", type=str) #: The username if provided, parsed from `user:password@hostname` username = attr.ib(default="", type=str) #: Password parsed from `user:password@hostname` password = attr.ib(default="", type=str, repr=False) #: An orderedmultidict representing query fragments query_dict = attr.ib(factory=omdict, type=omdict) #: The name of the specified package in case it is a VCS URI with an egg fragment name = attr.ib(default="", type=str) #: Any extras requested from the requirement extras = attr.ib(factory=tuple, type=tuple) #: Whether the url was parsed as a direct pep508-style URL is_direct_url = attr.ib(default=False, type=bool) #: Whether the url was an implicit `git+ssh` url (passed as `git+git@`) is_implicit_ssh = attr.ib(default=False, type=bool) _auth = attr.ib(default=None, type=str, repr=False) _fragment_dict = attr.ib(factory=dict, type=dict) _username_is_quoted = attr.ib(type=bool, default=False) _password_is_quoted = attr.ib(type=bool, default=False) def _parse_query(self): # type: () -> URI query = self.query if self.query is not None else "" query_dict = omdict() queries = query.split("&") query_items = [] subdirectory = self.subdirectory if self.subdirectory else None for q in queries: key, _, val = q.partition("=") val = unquote_plus(val) if key == "subdirectory" and not subdirectory: subdirectory = val else: query_items.append((key, val)) query_dict.load(query_items) return attr.evolve(self, query_dict=query_dict, subdirectory=subdirectory, query=query) def _parse_fragment(self): # type: () -> URI subdirectory = self.subdirectory if self.subdirectory else "" fragment = self.fragment if self.fragment else "" if self.fragment is None: return self fragments = self.fragment.split("&") fragment_items = {} name = self.name if self.name else "" extras = self.extras for q in fragments: key, _, val = q.partition("=") val = unquote_plus(val) fragment_items[key] = val if key == "egg": from .utils import parse_extras name, stripped_extras = pip_shims.shims._strip_extras(val) if stripped_extras: extras = tuple(parse_extras(stripped_extras)) elif key == "subdirectory": subdirectory = val return attr.evolve( self, fragment_dict=fragment_items, subdirectory=subdirectory, fragment=fragment, extras=extras, name=name, ) def _parse_auth(self): # type: () -> URI if self._auth: username, _, password = self._auth.partition(":") username_is_quoted, password_is_quoted = False, False quoted_username, quoted_password = "", "" if password: quoted_password = quote(password) password_is_quoted = quoted_password != password if username: quoted_username = quote(username) username_is_quoted = quoted_username != username return attr.evolve( self, username=quoted_username, password=quoted_password, username_is_quoted=username_is_quoted, password_is_quoted=password_is_quoted, ) return self def get_password(self, unquote=False, include_token=True): # type: (bool, bool) -> str password = self.password if self.password else "" if password and unquote and self._password_is_quoted: password = url_unquote(password) return password def get_username(self, unquote=False): # type: (bool) -> str username = self.username if self.username else "" if username and unquote and self._username_is_quoted: username = url_unquote(username) return username @staticmethod def parse_subdirectory(url_part): # type: (str) -> Tuple[str, Optional[str]] subdir = None if "&subdirectory" in url_part: url_part, _, subdir = url_part.rpartition("&") if "#egg=" not in url_part: subdir = "#{0}".format(subdir.strip()) else: subdir = "&{0}".format(subdir.strip()) return url_part.strip(), subdir @classmethod def get_parsed_url(cls, url): # if there is a "#" in the auth section, this could break url parsing parsed_url = _get_parsed_url(url) if "@" in url and "#" in url: scheme = "{0}://".format(parsed_url.scheme) if parsed_url.scheme == "file": scheme = "{0}/".format(scheme) url_without_scheme = url.replace(scheme, "") maybe_auth, _, maybe_url = url_without_scheme.partition("@") if "#" in maybe_auth and (not parsed_url.host or "." not in parsed_url.host): new_parsed_url = _get_parsed_url("{0}{1}".format( scheme, maybe_url)) new_parsed_url = new_parsed_url._replace(auth=maybe_auth) return new_parsed_url return parsed_url @classmethod def parse(cls, url): # type: (S) -> URI from .utils import DIRECT_URL_RE, split_ref_from_uri is_direct_url = False name_with_extras = None is_implicit_ssh = url.strip().startswith("git+git@") if is_implicit_ssh: from ..utils import add_ssh_scheme_to_git_uri url = add_ssh_scheme_to_git_uri(url) direct_match = DIRECT_URL_RE.match(url) if direct_match is not None: is_direct_url = True name_with_extras, _, url = url.partition("@") name_with_extras = name_with_extras.strip() url, ref = split_ref_from_uri(url.strip()) if "file:/" in url and "file:///" not in url: url = url.replace("file:/", "file:///") parsed = cls.get_parsed_url(url) # if there is a "#" in the auth section, this could break url parsing if not (parsed.scheme and parsed.host): # check if this is a file uri if not (parsed.scheme and parsed.path and (parsed.scheme == "file" or parsed.scheme.endswith("+file"))): raise ValueError( "Failed parsing URL {0!r} - Not a valid url".format(url)) parsed_dict = dict(parsed._asdict()).copy() parsed_dict["is_direct_url"] = is_direct_url parsed_dict["is_implicit_ssh"] = is_implicit_ssh parsed_dict.update(**update_url_name_and_fragment( name_with_extras, ref, parsed_dict)) # type: ignore return cls( **parsed_dict)._parse_auth()._parse_query()._parse_fragment() def to_string( self, escape_password=True, # type: bool unquote=True, # type: bool direct=None, # type: Optional[bool] strip_ssh=False, # type: bool strip_ref=False, # type: bool strip_name=False, # type: bool strip_subdir=False, # type: bool ): # type: (...) -> str """Converts the current URI to a string, unquoting or escaping the password as needed. :param escape_password: Whether to replace password with ``----``, default True :param escape_password: bool, optional :param unquote: Whether to unquote url-escapes in the password, default False :param unquote: bool, optional :param bool direct: Whether to format as a direct URL :param bool strip_ssh: Whether to strip the SSH scheme from the url (git only) :param bool strip_ref: Whether to drop the VCS ref (if present) :param bool strip_name: Whether to drop the name and extras (if present) :param bool strip_subdir: Whether to drop the subdirectory (if present) :return: The reconstructed string representing the URI :rtype: str """ if direct is None: direct = self.is_direct_url if escape_password: password = "******" if self.password else "" if password: username = self.get_username(unquote=unquote) elif self.username: username = "******" else: username = "" else: password = self.get_password(unquote=unquote) username = self.get_username(unquote=unquote) auth = "" if username: if password: auth = "{username}:{password}@".format(password=password, username=username) else: auth = "{username}@".format(username=username) query = "" if self.query: query = "{query}?{self.query}".format(query=query, self=self) subdir_prefix = "#" if not direct: if self.name and not strip_name: fragment = "#egg={self.name_with_extras}".format(self=self) subdir_prefix = "&" elif not strip_name and (self.extras and self.scheme and self.scheme.startswith("file")): from .utils import extras_to_string fragment = extras_to_string(self.extras) else: fragment = "" query = "{query}{fragment}".format(query=query, fragment=fragment) if self.subdirectory and not strip_subdir: query = "{query}{subdir_prefix}subdirectory={self.subdirectory}".format( query=query, subdir_prefix=subdir_prefix, self=self) host_port_path = self.get_host_port_path(strip_ref=strip_ref) url = "{self.scheme}://{auth}{host_port_path}{query}".format( self=self, auth=auth, host_port_path=host_port_path, query=query) if strip_ssh: from ..utils import strip_ssh_from_git_uri url = strip_ssh_from_git_uri(url) if self.name and direct and not strip_name: return "{self.name_with_extras}@ {url}".format(self=self, url=url) return url def get_host_port_path(self, strip_ref=False): # type: (bool) -> str host = self.host if self.host else "" if self.port is not None: host = "{host}:{self.port!s}".format(host=host, self=self) path = "{self.path}".format(self=self) if self.path else "" if self.ref and not strip_ref: path = "{path}@{self.ref}".format(path=path, self=self) return "{host}{path}".format(host=host, path=path) @property def hidden_auth(self): # type: () -> str auth = "" if self.username and self.password: password = "******" username = self.get_username(unquote=True) auth = "{username}:{password}".format(username=username, password=password) elif self.username and not self.password: auth = "****" return auth @property def name_with_extras(self): # type: () -> str from .utils import extras_to_string if not self.name: return "" extras = extras_to_string(self.extras) return "{self.name}{extras}".format(self=self, extras=extras) @property def as_link(self): # type: () -> Link link = pip_shims.shims.Link( self.to_string(escape_password=False, strip_ssh=False, direct=False)) return link @property def bare_url(self): # type: () -> str return self.to_string( escape_password=False, strip_ssh=self.is_implicit_ssh, direct=False, strip_name=True, strip_ref=True, strip_subdir=True, ) @property def url_without_fragment_or_ref(self): # type: () -> str return self.to_string( escape_password=False, strip_ssh=self.is_implicit_ssh, direct=False, strip_name=True, strip_ref=True, ) @property def url_without_fragment(self): # type: () -> str return self.to_string( escape_password=False, strip_ssh=self.is_implicit_ssh, direct=False, strip_name=True, ) @property def url_without_ref(self): # type: () -> str return self.to_string( escape_password=False, strip_ssh=self.is_implicit_ssh, direct=False, strip_ref=True, ) @property def base_url(self): # type: () -> str return self.to_string( escape_password=False, strip_ssh=self.is_implicit_ssh, direct=False, unquote=False, ) @property def full_url(self): # type: () -> str return self.to_string(escape_password=False, strip_ssh=False, direct=False) @property def secret(self): # type: () -> str return self.full_url @property def safe_string(self): # type: () -> str return self.to_string(escape_password=True, unquote=True) @property def unsafe_string(self): # type: () -> str return self.to_string(escape_password=False, unquote=True) @property def uri_escape(self): # type: () -> str return self.to_string(escape_password=False, unquote=False) @property def is_installable(self): # type: () -> bool return self.is_file_url and is_installable_file(self.bare_url) @property def is_vcs(self): # type: () -> bool from ..utils import VCS_SCHEMES return self.scheme in VCS_SCHEMES @property def is_file_url(self): # type: () -> bool return all([self.scheme, self.scheme == "file"]) def __str__(self): # type: () -> str return self.to_string(escape_password=True, unquote=True)
class PathEntry(BasePath): is_root = attr.ib(default=True, type=bool, order=False) def __lt__(self, other): # type: (BasePath) -> bool return self.path.as_posix() < other.path.as_posix() def __lte__(self, other): # type: (BasePath) -> bool return self.path.as_posix() <= other.path.as_posix() def __gt__(self, other): # type: (BasePath) -> bool return self.path.as_posix() > other.path.as_posix() def __gte__(self, other): # type: (BasePath) -> bool return self.path.as_posix() >= other.path.as_posix() def __del__(self): if getattr(self, "_children"): del self._children BasePath.__del__(self) def _filter_children(self): # type: () -> Iterator[Path] if self.only_python: children = filter_pythons(self.path) else: children = self.path.iterdir() return children def _gen_children(self): # type: () -> Iterator shim_paths = get_shim_paths() pass_name = self.name != self.path.name pass_args = {"is_root": False, "only_python": self.only_python} if pass_name: if self.name is not None and isinstance(self.name, six.string_types): pass_args["name"] = self.name # type: ignore elif self.path is not None and isinstance(self.path.name, six.string_types): pass_args["name"] = self.path.name # type: ignore if not self.is_dir: yield (self.path.as_posix(), self) elif self.is_root: for child in self._filter_children(): if any(is_in_path(str(child), shim) for shim in shim_paths): continue if self.only_python: try: entry = PathEntry.create(path=child, **pass_args) # type: ignore except (InvalidPythonVersion, ValueError): continue else: try: entry = PathEntry.create(path=child, **pass_args) # type: ignore except (InvalidPythonVersion, ValueError): continue yield (child.as_posix(), entry) return @property def children(self): # type: () -> Dict[str, PathEntry] children = getattr(self, "_children", {}) # type: Dict[str, PathEntry] if not children: for child_key, child_val in self._gen_children(): children[child_key] = child_val self.children = children return self._children @children.setter def children(self, val): # type: (Dict[str, PathEntry]) -> None self._children = val @children.deleter def children(self): # type: () -> None del self._children @classmethod def create(cls, path, is_root=False, only_python=False, pythons=None, name=None): # type: (Union[str, Path], bool, bool, Dict[str, PythonVersion], Optional[str]) -> PathEntry """Helper method for creating new :class:`pythonfinder.models.PathEntry` instances. :param str path: Path to the specified location. :param bool is_root: Whether this is a root from the environment PATH variable, defaults to False :param bool only_python: Whether to search only for python executables, defaults to False :param dict pythons: A dictionary of existing python objects (usually from a finder), defaults to None :param str name: Name of the python version, e.g. ``anaconda3-5.3.0`` :return: A new instance of the class. :rtype: :class:`pythonfinder.models.PathEntry` """ target = ensure_path(path) guessed_name = False if not name: guessed_name = True name = target.name creation_args = { "path": target, "is_root": is_root, "only_python": only_python, "name": name, } if pythons: creation_args["pythons"] = pythons _new = cls(**creation_args) if pythons and only_python: children = {} child_creation_args = {"is_root": False, "only_python": only_python} if not guessed_name: child_creation_args["name"] = _new.name # type: ignore for pth, python in pythons.items(): if any(shim in normalize_path(str(pth)) for shim in SHIM_PATHS): continue pth = ensure_path(pth) children[pth.as_posix()] = PathEntry( # type: ignore py_version=python, path=pth, **child_creation_args ) _new._children = children return _new
class Package(object): info = attr.ib(type=PackageInfo, converter=convert_package_info) last_serial = attr.ib(type=int) releases = attr.ib( type=ReleaseCollection, converter=instance_check_converter( # type: ignore ReleaseCollection, convert_releases_to_collection ), ) # XXX: Note: sometimes releases have no urls at the top level (e.g. pyrouge) urls = attr.ib( type=ReleaseUrlCollection, converter=instance_check_converter( # type: ignore ReleaseUrlCollection, convert_release_urls_to_collection ), ) @urls.default def _get_urls_collection(self): return functools.partial( convert_release_urls_to_collection, urls=[], name=self.name ) @property def name(self): # type: () -> str return self.info.name @property def version(self): # type: () -> str return self.info.version @property def requirement(self): # type: () -> PackagingRequirement return self.info.to_dependency().requirement @property def latest_sdist(self): # type: () -> ReleaseUrl return next(iter(self.urls.sdists)) @property def latest_wheels(self): # type: () -> Iterator[ReleaseUrl] for wheel in self.urls.wheels: yield wheel @property def dependencies(self): # type: () -> List[Dependency] if self.info.dependencies is None and list(self.urls): rval = self.get_dependencies() return rval.dependencies return list(self.info.dependencies) def get_dependencies(self): # type: () -> "Package" urls = [] # type: List[ReleaseUrl] deps = set() # type: Set[str] info = self.info if info.dependencies is None: for url in self.urls: try: url, dep_dict = url.get_dependencies() except (RuntimeError, TypeError): # This happens if we are parsing `setup.py` and we fail if url.is_sdist: continue else: raise markers = url.markers dep_list = dep_dict.get("requires_dist", []) for dep in dep_list: # XXX: We need to parse these as requirements and "and" the markers # XXX: together because they may contain "extra" markers which we # XXX: will need to parse and remove deps.add(add_markers_to_dep(dep, markers)) urls.append(url) if None in deps: deps.remove(None) info = attr.evolve( self.info, requires_dist=tuple(sorted(deps)) ).create_dependencies(force=True) return attr.evolve(self, info=info, urls=urls) @classmethod def from_json(cls, package_json): # type: (Dict[str, Any]) -> "Package" info = convert_package_info(package_json["info"]).create_dependencies() releases = convert_releases_to_collection( package_json["releases"], name=info.name ) urls = convert_release_urls_to_collection(package_json["urls"], name=info.name) return cls( info=info, releases=releases, urls=urls, last_serial=package_json["last_serial"], ) def pin_dependencies(self, include_extras=None): # type: (Optional[List[str]]) -> Tuple[List["Package"], Dict[str, List[SpecifierSet]]] deps = [] if include_extras: include_extras = list(sorted(set(include_extras))) else: include_extras = [] constraints = defaultdict(list) for dep in self.dependencies: if dep.from_extras and dep.from_extras not in include_extras: continue if dep.specifier: constraints[dep.name].append(dep.specifier) try: pinned = dep.pin() except requests.exceptions.HTTPError: continue deps.append(pinned) return deps, constraints def get_latest_lockfile(self): # type: () -> Dict[str, Dict[str, Union[List[str], str]]] lockfile = {} constraints = {dep.name: dep.specifier for dep in self.dependencies} deps, _ = self.pin_dependencies() for dep in deps: dep = dep.get_dependencies() for sub_dep in dep.dependencies: if sub_dep.name not in constraints: logger.info( "Adding {0} (from {1}) {2!s}".format( sub_dep.name, dep.name, sub_dep.specifier ) ) constraints[sub_dep.name] = sub_dep.specifier else: existing = "{0} (from {1}): {2!s} + ".format( sub_dep.name, dep.name, constraints[sub_dep.name] ) new_specifier = sub_dep.specifier merged = constraints[sub_dep.name] & new_specifier logger.info( "Updating: {0}{1!s} = {2!s}".format( existing, new_specifier, merged ) ) constraints[sub_dep.name] = merged lockfile.update({dep.info.name: dep.releases.get_latest_lockfile()}) for sub_dep_name, specset in constraints.items(): try: sub_dep_pkg = get_package(sub_dep_name) except requests.exceptions.HTTPError: continue logger.info("Getting package: {0} ({1!s})".format(sub_dep, specset)) sorted_releases = list( sorted( sub_dep_pkg.releases, key=operator.attrgetter("parsed_version"), reverse=True, ) ) try: version = next(iter(specset.filter((r.version for r in sorted_releases)))) except StopIteration: logger.info( "No version of {0} matches specifier: {1}".format(sub_dep, specset) ) logger.info( "Available versions: {0}".format( " ".join([r.version for r in sorted_releases]) ) ) raise sub_dep_instance = get_package_version(sub_dep_name, version=str(version)) if sub_dep_instance is None: continue lockfile.update( { sub_dep_instance.info.name: sub_dep_instance.releases.get_latest_lockfile() } ) # lockfile.update(dep.get_latest_lockfile()) lockfile.update({self.info.name: self.releases.get_latest_lockfile()}) return lockfile def as_dict(self): # type: () -> Dict[str, Any] return json.loads(self.serialize()) def serialize(self): # type: () -> str return json.dumps(attr.asdict(self), cls=PackageEncoder, indent=4)