Пример #1
0
    def incompatibilities_for(
            self, package: DependencyPackage) -> list[Incompatibility]:
        """
        Returns incompatibilities that encapsulate a given package's dependencies,
        or that it can't be safely selected.

        If multiple subsequent versions of this package have the same
        dependencies, this will return incompatibilities that reflect that. It
        won't return incompatibilities that have already been returned by a
        previous call to _incompatibilities_for().
        """
        if package.is_root():
            dependencies = package.all_requires
        else:
            dependencies = package.requires

            if not package.python_constraint.allows_all(
                    self._python_constraint):
                transitive_python_constraint = get_python_constraint_from_marker(
                    package.dependency.transitive_marker)
                intersection = package.python_constraint.intersect(
                    transitive_python_constraint)
                difference = transitive_python_constraint.difference(
                    intersection)

                # The difference is only relevant if it intersects
                # the root package python constraint
                difference = difference.intersect(self._python_constraint)
                if (transitive_python_constraint.is_any()
                        or self._python_constraint.intersect(
                            package.dependency.python_constraint).is_empty()
                        or intersection.is_empty()
                        or not difference.is_empty()):
                    return [
                        Incompatibility(
                            [Term(package.to_dependency(), True)],
                            PythonCause(package.python_versions,
                                        str(self._python_constraint)),
                        )
                    ]

        _dependencies = [
            dep for dep in dependencies if dep.name not in self.UNSAFE_PACKAGES
            and self._python_constraint.allows_any(dep.python_constraint) and (
                not self._env or dep.marker.validate(self._env.marker_env))
        ]
        dependencies = self._get_dependencies_with_overrides(
            _dependencies, package)

        return [
            Incompatibility(
                [Term(package.to_dependency(), True),
                 Term(dep, False)],
                DependencyCause(),
            ) for dep in dependencies
        ]
Пример #2
0
    def solve_in_compatibility_mode(self, overrides, use_latest=None):
        locked = {}
        for package in self._locked.packages:
            locked[package.name] = DependencyPackage(package.to_dependency(),
                                                     package)

        packages = []
        depths = []
        for override in overrides:
            self._provider.debug(
                "<comment>Retrying dependency resolution "
                "with the following overrides ({}).</comment>".format(
                    override))
            self._provider.set_overrides(override)
            _packages, _depths = self._solve(use_latest=use_latest)
            for index, package in enumerate(_packages):
                if package not in packages:
                    packages.append(package)
                    depths.append(_depths[index])
                    continue
                else:
                    idx = packages.index(package)
                    pkg = packages[idx]
                    depths[idx] = max(depths[idx], _depths[index])

                    for dep in package.requires:
                        if dep not in pkg.requires:
                            pkg.requires.append(dep)

        return packages, depths
Пример #3
0
def check_solver_result(
    root, provider, result=None, error=None, tries=None, locked=None, use_latest=None
):
    if locked is not None:
        locked = {k: DependencyPackage(l.to_dependency(), l) for k, l in locked.items()}

    solver = VersionSolver(root, provider, locked=locked, use_latest=use_latest)

    try:
        solution = solver.solve()
    except SolveFailure as e:
        if error:
            assert str(e) == error

            if tries is not None:
                assert solver.solution.attempted_solutions == tries

            return

        raise

    packages = {}
    for package in solution.packages:
        packages[package.name] = str(package.version)

    assert result == packages

    if tries is not None:
        assert solution.attempted_solutions == tries
Пример #4
0
    def solve_in_compatibility_mode(self, constraints, use_latest=None):
        locked = {}
        for package in self._locked.packages:
            locked[package.name] = DependencyPackage(package.to_dependency(),
                                                     package)

        packages = []
        depths = []
        for constraint in constraints:
            constraint = parse_constraint(constraint)
            intersection = constraint.intersect(
                self._package.python_constraint)

            self._provider.debug(
                "<comment>Retrying dependency resolution "
                "for Python ({}).</comment>".format(intersection))
            with self._package.with_python_versions(str(intersection)):
                _packages, _depths = self._solve(use_latest=use_latest)
                for index, package in enumerate(_packages):
                    if package not in packages:
                        packages.append(package)
                        depths.append(_depths[index])
                        continue
                    else:
                        idx = packages.index(package)
                        pkg = packages[idx]
                        depths[idx] = max(depths[idx], _depths[index])
                        pkg.marker = pkg.marker.union(package.marker)

                        for dep in package.requires:
                            if dep not in pkg.requires:
                                pkg.requires.append(dep)

        return packages, depths
Пример #5
0
    def _solve(self, use_latest=None):
        if self._provider._overrides:
            self._overrides.append(self._provider._overrides)

        locked = {}
        for package in self._locked.packages:
            locked[package.name] = DependencyPackage(package.to_dependency(),
                                                     package)

        try:
            result = resolve_version(self._package,
                                     self._provider,
                                     locked=locked,
                                     use_latest=use_latest)

            packages = result.packages
        except OverrideNeeded as e:
            return self.solve_in_compatibility_mode(e.overrides,
                                                    use_latest=use_latest)
        except SolveFailure as e:
            raise SolverProblemError(e)

        results = dict(
            depth_first_search(PackageNode(self._package, packages),
                               aggregate_package_nodes))
        # Return the packages in their original order with associated depths
        final_packages = packages
        depths = [results[package] for package in packages]

        return final_packages, depths
Пример #6
0
    def _solve(
        self,
        use_latest: list[str] | None = None
    ) -> tuple[list[Package], list[int]]:
        if self._provider._overrides:
            self._overrides.append(self._provider._overrides)

        locked: dict[str, list[DependencyPackage]] = defaultdict(list)
        for package in self._locked.packages:
            locked[package.name].append(
                DependencyPackage(package.to_dependency(), package))
        for dependency_packages in locked.values():
            dependency_packages.sort(
                key=lambda p: p.package.version,
                reverse=True,
            )

        try:
            result = resolve_version(self._package,
                                     self._provider,
                                     locked=locked,
                                     use_latest=use_latest)

            packages = result.packages
        except OverrideNeeded as e:
            return self.solve_in_compatibility_mode(e.overrides,
                                                    use_latest=use_latest)
        except SolveFailure as e:
            raise SolverProblemError(e)

        combined_nodes = depth_first_search(
            PackageNode(self._package, packages))
        results = dict(
            aggregate_package_nodes(nodes) for nodes in combined_nodes)

        # Merging feature packages with base packages
        final_packages = []
        depths = []
        for package in packages:
            if package.features:
                for _package in packages:
                    if (not _package.features and _package.name == package.name
                            and _package.version == package.version):
                        for dep in package.requires:
                            # Prevent adding base package as a dependency to itself
                            if _package.name == dep.name:
                                continue

                            if dep not in _package.requires:
                                _package.add_dependency(dep)
            else:
                final_packages.append(package)
                depths.append(results[package])

        # Return the packages in their original order with associated depths
        return final_packages, depths
Пример #7
0
    def _solve(
            self,
            use_latest: List[str] = None) -> Tuple[List["Package"], List[int]]:
        if self._provider._overrides:
            self._overrides.append(self._provider._overrides)

        locked = {
            package.name: DependencyPackage(package.to_dependency(), package)
            for package in self._locked.packages
        }

        try:
            result = resolve_version(self._package,
                                     self._provider,
                                     locked=locked,
                                     use_latest=use_latest)

            packages = result.packages
        except OverrideNeeded as e:
            return self.solve_in_compatibility_mode(e.overrides,
                                                    use_latest=use_latest)
        except SolveFailure as e:
            raise SolverProblemError(e)

        # NOTE passing explicit empty array for seen to reset between invocations during
        # update + install cycle
        results = dict(
            depth_first_search(PackageNode(self._package, packages, seen=[]),
                               aggregate_package_nodes))

        # Merging feature packages with base packages
        final_packages = []
        depths = []
        for package in packages:
            if package.features:
                for _package in packages:
                    if (_package.name == package.name
                            and not _package.is_same_package_as(package)
                            and _package.version == package.version):
                        for dep in package.requires:
                            if dep.is_same_package_as(_package):
                                continue

                            if dep not in _package.requires:
                                _package.add_dependency(dep)

                continue

            final_packages.append(package)
            depths.append(results[package])

        # Return the packages in their original order with associated depths
        return final_packages, depths
Пример #8
0
    def get_project_dependency_packages(
        self,
        project_requires: list[Dependency],
        project_python_marker: BaseMarker | None = None,
        extras: bool | Sequence[str] | None = None,
    ) -> Iterator[DependencyPackage]:
        # Apply the project python marker to all requirements.
        if project_python_marker is not None:
            marked_requires: list[Dependency] = []
            for require in project_requires:
                require = deepcopy(require)
                require.marker = require.marker.intersect(
                    project_python_marker)
                marked_requires.append(require)
            project_requires = marked_requires

        repository = self.locked_repository()

        # Build a set of all packages required by our selected extras
        extra_package_names: set[str] | None = None

        if extras is not True:
            extra_package_names = set(
                get_extra_package_names(
                    repository.packages,
                    self.lock_data.get("extras", {}),
                    extras or (),
                ))

        # If a package is optional and we haven't opted in to it, do not select
        selected = []
        for dependency in project_requires:
            try:
                package = repository.find_packages(dependency=dependency)[0]
            except IndexError:
                continue

            if extra_package_names is not None and (
                    package.optional
                    and package.name not in extra_package_names):
                # a package is locked as optional, but is not activated via extras
                continue

            selected.append(dependency)

        for package, dependency in self.get_project_dependencies(
                project_requires=selected,
                locked_packages=repository.packages,
        ):
            for extra in dependency.extras:
                package.requires_extras.append(extra)

            yield DependencyPackage(dependency=dependency, package=package)
Пример #9
0
    def get_project_dependency_packages(
        self,
        project_requires: List[Dependency],
        dev: bool = False,
        extras: Optional[Union[bool, Sequence[str]]] = None,
    ) -> Iterator[DependencyPackage]:
        repository = self.locked_repository(with_dev_reqs=dev)

        # Build a set of all packages required by our selected extras
        extra_package_names = (
            None if (isinstance(extras, bool) and extras is True) else ()
        )

        if extra_package_names is not None:
            extra_package_names = set(
                get_extra_package_names(
                    repository.packages,
                    self.lock_data.get("extras", {}),
                    extras or (),
                )
            )

        # If a package is optional and we haven't opted in to it, do not select
        selected = []
        for dependency in project_requires:
            try:
                package = repository.find_packages(dependency=dependency)[0]
            except IndexError:
                continue

            if extra_package_names is not None and (
                package.optional and package.name not in extra_package_names
            ):
                # a package is locked as optional, but is not activated via extras
                continue

            selected.append(dependency)

        for dependency in self.get_project_dependencies(
            project_requires=selected,
            locked_packages=repository.packages,
            with_nested=True,
        ):
            try:
                package = repository.find_packages(dependency=dependency)[0]
            except IndexError:
                continue

            for extra in dependency.extras:
                package.requires_extras.append(extra)

            yield DependencyPackage(dependency=dependency, package=package)
Пример #10
0
    def _solve(
            self,
            use_latest: List[str] = None) -> Tuple[List[Package], List[int]]:
        if self._provider._overrides:
            self._overrides.append(self._provider._overrides)

        locked = {}
        for package in self._locked.packages:
            locked[package.name] = DependencyPackage(package.to_dependency(),
                                                     package)

        try:
            result = resolve_version(self._package,
                                     self._provider,
                                     locked=locked,
                                     use_latest=use_latest)

            packages = result.packages
        except OverrideNeeded as e:
            return self.solve_in_compatibility_mode(e.overrides,
                                                    use_latest=use_latest)
        except SolveFailure as e:
            raise SolverProblemError(e)

        results = dict(
            depth_first_search(PackageNode(self._package, packages),
                               aggregate_package_nodes))

        # Merging feature packages with base packages
        final_packages = []
        depths = []
        for package in packages:
            if package.features:
                for _package in packages:
                    if (_package.name == package.name
                            and not _package.is_same_package_as(package)
                            and _package.version == package.version):
                        for dep in package.requires:
                            if dep.is_same_package_as(_package):
                                continue

                            if dep not in _package.requires:
                                _package.requires.append(dep)

                continue

            final_packages.append(package)
            depths.append(results[package])

        # Return the packages in their original order with associated depths
        return final_packages, depths
Пример #11
0
    def _get_locked(
        self, dependency: Dependency, *, allow_similar: bool = False
    ) -> DependencyPackage | None:
        if dependency.name in self._use_latest:
            return None

        locked = self._locked.get(dependency.name, [])
        for package in locked:
            if (allow_similar or dependency.is_same_package_as(package.package)) and (
                dependency.constraint.allows(package.version)
                or package.is_prerelease()
                and dependency.constraint.allows(package.version.next_patch())
            ):
                return DependencyPackage(dependency, package.package)
        return None
Пример #12
0
def test_complete_package_preserves_source_type_with_subdirectories(
        provider: Provider, root: ProjectPackage) -> None:
    dependency_one = Factory.create_dependency(
        "one",
        {
            "git": "https://github.com/demo/subdirectories.git",
            "subdirectory": "one",
        },
    )
    dependency_one_copy = Factory.create_dependency(
        "one",
        {
            "git": "https://github.com/demo/subdirectories.git",
            "subdirectory": "one-copy",
        },
    )
    dependency_two = Factory.create_dependency(
        "two",
        {
            "git": "https://github.com/demo/subdirectories.git",
            "subdirectory": "two"
        },
    )

    root.add_dependency(
        Factory.create_dependency(
            "one",
            {
                "git": "https://github.com/demo/subdirectories.git",
                "subdirectory": "one",
            },
        ))
    root.add_dependency(dependency_one_copy)
    root.add_dependency(dependency_two)

    complete_package = provider.complete_package(
        DependencyPackage(root.to_dependency(), root))

    requires = complete_package.package.all_requires
    assert len(requires) == 3
    assert {r.to_pep_508()
            for r in requires} == {
                dependency_one.to_pep_508(),
                dependency_one_copy.to_pep_508(),
                dependency_two.to_pep_508(),
            }
Пример #13
0
def test_complete_package_preserves_source_type(provider: Provider,
                                                root: ProjectPackage) -> None:
    fixtures = Path(__file__).parent.parent / "fixtures"
    project_dir = fixtures.joinpath("with_conditional_path_deps")
    for folder in ["demo_one", "demo_two"]:
        path = (project_dir / folder).as_posix()
        root.add_dependency(Factory.create_dependency("demo", {"path": path}))

    complete_package = provider.complete_package(
        DependencyPackage(root.to_dependency(), root))

    requires = complete_package.package.all_requires
    assert len(requires) == 2
    assert {requires[0].source_url, requires[1].source_url} == {
        project_dir.joinpath("demo_one").as_posix(),
        project_dir.joinpath("demo_two").as_posix(),
    }
Пример #14
0
def check_solver_result(
    root: "ProjectPackage",
    provider: "Provider",
    result: Optional[Dict[str, str]] = None,
    error: Optional[str] = None,
    tries: Optional[int] = None,
    locked: Optional[Dict[str, Package]] = None,
    use_latest: Optional[List[str]] = None,
) -> None:
    if locked is not None:
        locked = {
            k: DependencyPackage(l.to_dependency(), l)
            for k, l in locked.items()
        }

    solver = VersionSolver(root,
                           provider,
                           locked=locked,
                           use_latest=use_latest)
    try:
        solution = solver.solve()
    except SolveFailure as e:
        if error:
            assert str(e) == error

            if tries is not None:
                assert solver.solution.attempted_solutions == tries

            return

        raise
    except AssertionError as e:
        if error:
            assert str(e) == error
            return
        raise

    packages = {}
    for package in solution.packages:
        packages[package.name] = str(package.version)

    assert result == packages

    if tries is not None:
        assert solution.attempted_solutions == tries
Пример #15
0
    def _solve(self, use_latest=None):
        self._branches.append(self._package.python_versions)

        locked = {}
        for package in self._locked.packages:
            locked[package.name] = DependencyPackage(package.to_dependency(),
                                                     package)

        try:
            result = resolve_version(self._package,
                                     self._provider,
                                     locked=locked,
                                     use_latest=use_latest)

            packages = result.packages
        except CompatibilityError as e:
            return self.solve_in_compatibility_mode(e.constraints,
                                                    use_latest=use_latest)
        except SolveFailure as e:
            raise SolverProblemError(e)

        graph = self._build_graph(self._package, packages)

        depths = []
        final_packages = []
        for package in packages:
            category, optional, marker, depth = self._get_tags_for_package(
                package, graph)

            if marker is None:
                marker = AnyMarker()
            if marker.is_empty():
                continue

            package.category = category
            package.optional = optional
            package.marker = marker

            depths.append(depth)
            final_packages.append(package)

        return final_packages, depths
Пример #16
0
    def _solve(self, use_latest=None):
        if self._provider._overrides:
            self._overrides.append(self._provider._overrides)

        locked = {}
        for package in self._locked.packages:
            locked[package.name] = DependencyPackage(package.to_dependency(),
                                                     package)

        try:
            result = resolve_version(self._package,
                                     self._provider,
                                     locked=locked,
                                     use_latest=use_latest)

            packages = result.packages
        except OverrideNeeded as e:
            return self.solve_in_compatibility_mode(e.overrides,
                                                    use_latest=use_latest)
        except SolveFailure as e:
            raise SolverProblemError(e)

        graph = self._build_graph(self._package, packages)

        depths = []
        final_packages = []
        for package in packages:
            category, optional, depth = self._get_tags_for_package(
                package, graph)

            package.category = category
            package.optional = optional

            depths.append(depth)
            final_packages.append(package)

        return final_packages, depths
Пример #17
0
    def complete_package(
        self, package
    ):  # type: (DependencyPackage) -> DependencyPackage

        if package.is_root():
            package = package.clone()
            requires = package.all_requires
        elif not package.is_root() and package.source_type not in {
            "directory",
            "file",
            "url",
            "git",
        }:
            package = DependencyPackage(
                package.dependency,
                self._pool.package(
                    package.name,
                    package.version.text,
                    extras=list(package.dependency.extras),
                    repository=package.dependency.source_name,
                ),
            )
            requires = package.requires
        else:
            requires = package.requires

        if self._load_deferred:
            # Retrieving constraints for deferred dependencies
            for r in requires:
                if r.is_directory():
                    self.search_for_directory(r)
                elif r.is_file():
                    self.search_for_file(r)
                elif r.is_vcs():
                    self.search_for_vcs(r)
                elif r.is_url():
                    self.search_for_url(r)

        optional_dependencies = []
        _dependencies = []

        # If some extras/features were required, we need to
        # add a special dependency representing the base package
        # to the current package
        if package.dependency.extras:
            for extra in package.dependency.extras:
                if extra not in package.extras:
                    continue

                optional_dependencies += [d.name for d in package.extras[extra]]

            package = package.with_features(list(package.dependency.extras))
            _dependencies.append(package.without_features().to_dependency())

        for dep in requires:
            if not self._python_constraint.allows_any(dep.python_constraint):
                continue

            if dep.name in self.UNSAFE_PACKAGES:
                continue

            if self._env and not dep.marker.validate(self._env.marker_env):
                continue

            if not package.is_root():
                if (dep.is_optional() and dep.name not in optional_dependencies) or (
                    dep.in_extras
                    and not set(dep.in_extras).intersection(package.dependency.extras)
                ):
                    continue

            _dependencies.append(dep)

        overrides = self._overrides.get(package, {})
        dependencies = []
        overridden = []
        for dep in _dependencies:
            if dep.name in overrides:
                if dep.name in overridden:
                    continue

                dependencies.append(overrides[dep.name])
                overridden.append(dep.name)

                continue

            dependencies.append(dep)

        # Searching for duplicate dependencies
        #
        # If the duplicate dependencies have the same constraint,
        # the requirements will be merged.
        #
        # For instance:
        #   - enum34; python_version=="2.7"
        #   - enum34; python_version=="3.3"
        #
        # will become:
        #   - enum34; python_version=="2.7" or python_version=="3.3"
        #
        # If the duplicate dependencies have different constraints
        # we have to split the dependency graph.
        #
        # An example of this is:
        #   - pypiwin32 (220); sys_platform == "win32" and python_version >= "3.6"
        #   - pypiwin32 (219); sys_platform == "win32" and python_version < "3.6"
        duplicates = dict()
        for dep in dependencies:
            if dep.name not in duplicates:
                duplicates[dep.name] = []

            duplicates[dep.name].append(dep)

        dependencies = []
        for dep_name, deps in duplicates.items():
            if len(deps) == 1:
                dependencies.append(deps[0])
                continue

            self.debug("<debug>Duplicate dependencies for {}</debug>".format(dep_name))

            # Regrouping by constraint
            by_constraint = dict()
            for dep in deps:
                if dep.constraint not in by_constraint:
                    by_constraint[dep.constraint] = []

                by_constraint[dep.constraint].append(dep)

            # We merge by constraint
            for constraint, _deps in by_constraint.items():
                new_markers = []
                for dep in _deps:
                    marker = dep.marker.without_extras()
                    if marker.is_any():
                        # No marker or only extras
                        continue

                    new_markers.append(marker)

                if not new_markers:
                    continue

                dep = _deps[0]
                dep.marker = dep.marker.union(MarkerUnion(*new_markers))
                by_constraint[constraint] = [dep]

                continue

            if len(by_constraint) == 1:
                self.debug(
                    "<debug>Merging requirements for {}</debug>".format(str(deps[0]))
                )
                dependencies.append(list(by_constraint.values())[0][0])
                continue

            # We leave dependencies as-is if they have the same
            # python/platform constraints.
            # That way the resolver will pickup the conflict
            # and display a proper error.
            _deps = [value[0] for value in by_constraint.values()]
            seen = set()
            for _dep in _deps:
                pep_508_dep = _dep.to_pep_508(False)
                if ";" not in pep_508_dep:
                    _requirements = ""
                else:
                    _requirements = pep_508_dep.split(";")[1].strip()

                if _requirements not in seen:
                    seen.add(_requirements)

            if len(_deps) != len(seen):
                for _dep in _deps:
                    dependencies.append(_dep)

                continue

            # At this point, we raise an exception that will
            # tell the solver to make new resolutions with specific overrides.
            #
            # For instance, if the foo (1.2.3) package has the following dependencies:
            #   - bar (>=2.0) ; python_version >= "3.6"
            #   - bar (<2.0) ; python_version < "3.6"
            #
            # then the solver will need to make two new resolutions
            # with the following overrides:
            #   - {<Package foo (1.2.3): {"bar": <Dependency bar (>=2.0)>}
            #   - {<Package foo (1.2.3): {"bar": <Dependency bar (<2.0)>}
            markers = []
            for constraint, _deps in by_constraint.items():
                markers.append(_deps[0].marker)

            _deps = [_dep[0] for _dep in by_constraint.values()]
            self.debug(
                "<warning>Different requirements found for {}.</warning>".format(
                    ", ".join(
                        "<c1>{}</c1> <fg=default>(<c2>{}</c2>)</> with markers <b>{}</b>".format(
                            d.name,
                            d.pretty_constraint,
                            d.marker if not d.marker.is_any() else "*",
                        )
                        for d in _deps[:-1]
                    )
                    + " and "
                    + "<c1>{}</c1> <fg=default>(<c2>{}</c2>)</> with markers <b>{}</b>".format(
                        _deps[-1].name,
                        _deps[-1].pretty_constraint,
                        _deps[-1].marker if not _deps[-1].marker.is_any() else "*",
                    )
                )
            )

            # We need to check if one of the duplicate dependencies
            # has no markers. If there is one, we need to change its
            # environment markers to the inverse of the union of the
            # other dependencies markers.
            # For instance, if we have the following dependencies:
            #   - ipython
            #   - ipython (1.2.4) ; implementation_name == "pypy"
            #
            # the marker for `ipython` will become `implementation_name != "pypy"`.
            any_markers_dependencies = [d for d in _deps if d.marker.is_any()]
            other_markers_dependencies = [d for d in _deps if not d.marker.is_any()]

            if any_markers_dependencies:
                marker = other_markers_dependencies[0].marker
                for other_dep in other_markers_dependencies[1:]:
                    marker = marker.union(other_dep.marker)

                for i, d in enumerate(_deps):
                    if d.marker.is_any():
                        _deps[i].marker = marker.invert()

            overrides = []
            for _dep in _deps:
                current_overrides = self._overrides.copy()
                package_overrides = current_overrides.get(package, {}).copy()
                package_overrides.update({_dep.name: _dep})
                current_overrides.update({package: package_overrides})
                overrides.append(current_overrides)

            raise OverrideNeeded(*overrides)

        # Modifying dependencies as needed
        clean_dependencies = []
        for dep in dependencies:
            if not package.dependency.transitive_marker.without_extras().is_any():
                marker_intersection = (
                    package.dependency.transitive_marker.without_extras().intersect(
                        dep.marker.without_extras()
                    )
                )
                if marker_intersection.is_empty():
                    # The dependency is not needed, since the markers specified
                    # for the current package selection are not compatible with
                    # the markers for the current dependency, so we skip it
                    continue

                dep.transitive_marker = marker_intersection

            if not package.dependency.python_constraint.is_any():
                python_constraint_intersection = dep.python_constraint.intersect(
                    package.dependency.python_constraint
                )
                if python_constraint_intersection.is_empty():
                    # This dependency is not needed under current python constraint.
                    continue
                dep.transitive_python_versions = str(python_constraint_intersection)

            clean_dependencies.append(dep)

        package.requires = clean_dependencies

        return package
Пример #18
0
    def complete_package(self,
                         package: DependencyPackage) -> DependencyPackage:
        if package.is_root():
            package = package.clone()
            requires = package.all_requires
        elif not package.is_root() and package.source_type not in {
                "directory",
                "file",
                "url",
                "git",
        }:
            package = DependencyPackage(
                package.dependency,
                self._pool.package(
                    package.name,
                    package.version.text,
                    extras=list(package.dependency.extras),
                    repository=package.dependency.source_name,
                ),
            )
            requires = package.requires
        else:
            requires = package.requires

        if self._load_deferred:
            # Retrieving constraints for deferred dependencies
            for r in requires:
                if r.is_directory():
                    self.search_for_directory(r)
                elif r.is_file():
                    self.search_for_file(r)
                elif r.is_vcs():
                    self.search_for_vcs(r)
                elif r.is_url():
                    self.search_for_url(r)

        optional_dependencies = []
        _dependencies = []

        # If some extras/features were required, we need to
        # add a special dependency representing the base package
        # to the current package
        if package.dependency.extras:
            for extra in package.dependency.extras:
                if extra not in package.extras:
                    continue

                optional_dependencies += [
                    d.name for d in package.extras[extra]
                ]

            package = package.with_features(list(package.dependency.extras))
            _dependencies.append(package.without_features().to_dependency())

        for dep in requires:
            if not self._python_constraint.allows_any(dep.python_constraint):
                continue

            if dep.name in self.UNSAFE_PACKAGES:
                continue

            if self._env and not dep.marker.validate(self._env.marker_env):
                continue

            if not package.is_root() and (
                (dep.is_optional() and dep.name not in optional_dependencies)
                    or (dep.in_extras and not set(dep.in_extras).intersection(
                        package.dependency.extras))):
                continue

            _dependencies.append(dep)

        dependencies = self._get_dependencies_with_overrides(
            _dependencies, package)

        # Searching for duplicate dependencies
        #
        # If the duplicate dependencies have the same constraint,
        # the requirements will be merged.
        #
        # For instance:
        #   - enum34; python_version=="2.7"
        #   - enum34; python_version=="3.3"
        #
        # will become:
        #   - enum34; python_version=="2.7" or python_version=="3.3"
        #
        # If the duplicate dependencies have different constraints
        # we have to split the dependency graph.
        #
        # An example of this is:
        #   - pypiwin32 (220); sys_platform == "win32" and python_version >= "3.6"
        #   - pypiwin32 (219); sys_platform == "win32" and python_version < "3.6"
        duplicates: dict[str, list[Dependency]] = {}
        for dep in dependencies:
            if dep.complete_name not in duplicates:
                duplicates[dep.complete_name] = []

            duplicates[dep.complete_name].append(dep)

        dependencies = []
        for dep_name, deps in duplicates.items():
            if len(deps) == 1:
                dependencies.append(deps[0])
                continue

            self.debug(f"<debug>Duplicate dependencies for {dep_name}</debug>")

            deps = self._merge_dependencies_by_marker(deps)
            deps = self._merge_dependencies_by_constraint(deps)

            if len(deps) == 1:
                self.debug(
                    f"<debug>Merging requirements for {deps[0]!s}</debug>")
                dependencies.append(deps[0])
                continue

            # We leave dependencies as-is if they have the same
            # python/platform constraints.
            # That way the resolver will pickup the conflict
            # and display a proper error.
            seen = set()
            for dep in deps:
                pep_508_dep = dep.to_pep_508(False)
                if ";" not in pep_508_dep:
                    _requirements = ""
                else:
                    _requirements = pep_508_dep.split(";")[1].strip()

                if _requirements not in seen:
                    seen.add(_requirements)

            if len(deps) != len(seen):
                for dep in deps:
                    dependencies.append(dep)

                continue

            # At this point, we raise an exception that will
            # tell the solver to make new resolutions with specific overrides.
            #
            # For instance, if the foo (1.2.3) package has the following dependencies:
            #   - bar (>=2.0) ; python_version >= "3.6"
            #   - bar (<2.0) ; python_version < "3.6"
            #
            # then the solver will need to make two new resolutions
            # with the following overrides:
            #   - {<Package foo (1.2.3): {"bar": <Dependency bar (>=2.0)>}
            #   - {<Package foo (1.2.3): {"bar": <Dependency bar (<2.0)>}

            def fmt_warning(d: Dependency) -> str:
                marker = d.marker if not d.marker.is_any() else "*"
                return (
                    f"<c1>{d.name}</c1> <fg=default>(<c2>{d.pretty_constraint}</c2>)</>"
                    f" with markers <b>{marker}</b>")

            warnings = ", ".join(fmt_warning(d) for d in deps[:-1])
            warnings += f" and {fmt_warning(deps[-1])}"
            self.debug(
                f"<warning>Different requirements found for {warnings}.</warning>"
            )

            # We need to check if one of the duplicate dependencies
            # has no markers. If there is one, we need to change its
            # environment markers to the inverse of the union of the
            # other dependencies markers.
            # For instance, if we have the following dependencies:
            #   - ipython
            #   - ipython (1.2.4) ; implementation_name == "pypy"
            #
            # the marker for `ipython` will become `implementation_name != "pypy"`.
            #
            # Further, we have to merge the constraints of the requirements
            # without markers into the constraints of the requirements with markers.
            # for instance, if we have the following dependencies:
            #   - foo (>= 1.2)
            #   - foo (!= 1.2.1) ; python == 3.10
            #
            # the constraint for the second entry will become (!= 1.2.1, >= 1.2)
            any_markers_dependencies = [d for d in deps if d.marker.is_any()]
            other_markers_dependencies = [
                d for d in deps if not d.marker.is_any()
            ]

            marker = other_markers_dependencies[0].marker
            for other_dep in other_markers_dependencies[1:]:
                marker = marker.union(other_dep.marker)
            inverted_marker = marker.invert()

            if any_markers_dependencies:
                for dep_any in any_markers_dependencies:
                    dep_any.marker = inverted_marker
                    for dep_other in other_markers_dependencies:
                        dep_other.set_constraint(
                            dep_other.constraint.intersect(dep_any.constraint))
            elif not inverted_marker.is_empty(
            ) and self._python_constraint.allows_any(
                    get_python_constraint_from_marker(inverted_marker)):
                # if there is no any marker dependency
                # and the inverted marker is not empty,
                # a dependency with the inverted union of all markers is required
                # in order to not miss other dependencies later, for instance:
                #   - foo (1.0) ; python == 3.7
                #   - foo (2.0) ; python == 3.8
                #   - bar (2.0) ; python == 3.8
                #   - bar (3.0) ; python == 3.9
                #
                # the last dependency would be missed without this,
                # because the intersection with both foo dependencies is empty
                inverted_marker_dep = deps[0].with_constraint(
                    EmptyConstraint())
                inverted_marker_dep.marker = inverted_marker
                deps.append(inverted_marker_dep)

            overrides = []
            overrides_marker_intersection: BaseMarker = AnyMarker()
            for dep_overrides in self._overrides.values():
                for dep in dep_overrides.values():
                    overrides_marker_intersection = (
                        overrides_marker_intersection.intersect(dep.marker))
            for dep in deps:
                if not overrides_marker_intersection.intersect(
                        dep.marker).is_empty():
                    current_overrides = self._overrides.copy()
                    package_overrides = current_overrides.get(package,
                                                              {}).copy()
                    package_overrides.update({dep.name: dep})
                    current_overrides.update({package: package_overrides})
                    overrides.append(current_overrides)

            if overrides:
                raise OverrideNeeded(*overrides)

        # Modifying dependencies as needed
        clean_dependencies = []
        for dep in dependencies:
            if not package.dependency.transitive_marker.without_extras(
            ).is_any():
                marker_intersection = (package.dependency.transitive_marker.
                                       without_extras().intersect(
                                           dep.marker.without_extras()))
                if marker_intersection.is_empty():
                    # The dependency is not needed, since the markers specified
                    # for the current package selection are not compatible with
                    # the markers for the current dependency, so we skip it
                    continue

                dep.transitive_marker = marker_intersection

            if not package.dependency.python_constraint.is_any():
                python_constraint_intersection = dep.python_constraint.intersect(
                    package.dependency.python_constraint)
                if python_constraint_intersection.is_empty():
                    # This dependency is not needed under current python constraint.
                    continue
                dep.transitive_python_versions = str(
                    python_constraint_intersection)

            clean_dependencies.append(dep)

        package = DependencyPackage(
            package.dependency, package.with_dependency_groups([], only=True))

        for dep in clean_dependencies:
            package.add_dependency(dep)

        return package
Пример #19
0
    def complete_package(
            self, package):  # type: (DependencyPackage) -> DependencyPackage
        if package.is_root():
            package = package.clone()

        if not package.is_root() and package.source_type not in {
                "directory",
                "file",
                "url",
                "git",
        }:
            package = DependencyPackage(
                package.dependency,
                self._pool.package(
                    package.name,
                    package.version.text,
                    extras=package.requires_extras,
                    repository=package.dependency.source_name,
                ),
            )

        dependencies = [
            r for r in package.requires
            if self._package.python_constraint.allows_any(r.python_constraint)
        ]

        # Searching for duplicate dependencies
        #
        # If the duplicate dependencies have the same constraint,
        # the requirements will be merged.
        #
        # For instance:
        #   - enum34; python_version=="2.7"
        #   - enum34; python_version=="3.3"
        #
        # will become:
        #   - enum34; python_version=="2.7" or python_version=="3.3"
        #
        # If the duplicate dependencies have different constraints
        # we have to split the dependency graph.
        #
        # An example of this is:
        #   - pypiwin32 (220); sys_platform == "win32" and python_version >= "3.6"
        #   - pypiwin32 (219); sys_platform == "win32" and python_version < "3.6"
        duplicates = OrderedDict()
        for dep in dependencies:
            if dep.name not in duplicates:
                duplicates[dep.name] = []

            duplicates[dep.name].append(dep)

        dependencies = []
        for dep_name, deps in duplicates.items():
            if len(deps) == 1:
                dependencies.append(deps[0])
                continue

            self.debug("<debug>Duplicate dependencies for {}</debug>".format(
                dep_name))

            # Regrouping by constraint
            by_constraint = OrderedDict()
            for dep in deps:
                if dep.constraint not in by_constraint:
                    by_constraint[dep.constraint] = []

                by_constraint[dep.constraint].append(dep)

            # We merge by constraint
            for constraint, _deps in by_constraint.items():
                new_markers = []
                for dep in _deps:
                    marker = dep.marker.without_extras()
                    if marker.is_empty():
                        # No marker or only extras
                        continue

                    new_markers.append(marker)

                if not new_markers:
                    continue

                dep = _deps[0]
                dep.marker = dep.marker.union(MarkerUnion(*new_markers))
                by_constraint[constraint] = [dep]

                continue

            if len(by_constraint) == 1:
                self.debug("<debug>Merging requirements for {}</debug>".format(
                    str(deps[0])))
                dependencies.append(list(by_constraint.values())[0][0])
                continue

            # We leave dependencies as-is if they have the same
            # python/platform constraints.
            # That way the resolver will pickup the conflict
            # and display a proper error.
            _deps = [value[0] for value in by_constraint.values()]
            seen = set()
            for _dep in _deps:
                pep_508_dep = _dep.to_pep_508(False)
                if ";" not in pep_508_dep:
                    _requirements = ""
                else:
                    _requirements = pep_508_dep.split(";")[1].strip()

                if _requirements not in seen:
                    seen.add(_requirements)

            if len(_deps) != len(seen):
                for _dep in _deps:
                    dependencies.append(_dep)

                continue

            # At this point, we raise an exception that will
            # tell the solver to enter compatibility mode
            # which means it will resolve for subsets
            # Python constraints
            #
            # For instance, if our root package requires Python ~2.7 || ^3.6
            # And we have one dependency that requires Python <3.6
            # and the other Python >=3.6 than the solver will solve
            # dependencies for Python >=2.7,<2.8 || >=3.4,<3.6
            # and Python >=3.6,<4.0
            python_constraints = []
            for constraint, _deps in by_constraint.items():
                python_constraints.append(_deps[0].python_versions)

            _deps = [str(_dep[0]) for _dep in by_constraint.values()]
            self.debug(
                "<warning>Different requirements found for {}.</warning>".
                format(", ".join(_deps[:-1]) + " and " + _deps[-1]))
            raise CompatibilityError(*python_constraints)

        # Modifying dependencies as needed
        for dep in dependencies:
            if not package.dependency.python_constraint.is_any():
                dep.transitive_python_versions = str(
                    dep.python_constraint.intersect(
                        package.dependency.python_constraint))

            if (package.dependency.is_directory()
                    or package.dependency.is_file()) and (dep.is_directory()
                                                          or dep.is_file()):
                if dep.path.as_posix().startswith(package.source_url):
                    relative = (Path(package.source_url) /
                                dep.path).relative_to(package.source_url)
                else:
                    relative = Path(package.source_url) / dep.path

                # TODO: Improve the way we set the correct relative path for dependencies
                dep._path = relative

        package.requires = dependencies

        return package