def complete_package( self, package): # type: (DependencyPackage) -> DependencyPackage if package.is_root(): package = package.clone() if not package.is_root() and package.source_type not in { "directory", "file", "url", "git", }: package = DependencyPackage( package.dependency, self._pool.package( package.name, package.version.text, extras=package.requires_extras, repository=package.dependency.source_name, ), ) dependencies = [ r for r in package.requires if self._package.python_constraint.allows_any(r.python_constraint) ] # Searching for duplicate dependencies # # If the duplicate dependencies have the same constraint, # the requirements will be merged. # # For instance: # - enum34; python_version=="2.7" # - enum34; python_version=="3.3" # # will become: # - enum34; python_version=="2.7" or python_version=="3.3" # # If the duplicate dependencies have different constraints # we have to split the dependency graph. # # An example of this is: # - pypiwin32 (220); sys_platform == "win32" and python_version >= "3.6" # - pypiwin32 (219); sys_platform == "win32" and python_version < "3.6" duplicates = OrderedDict() for dep in dependencies: if dep.name not in duplicates: duplicates[dep.name] = [] duplicates[dep.name].append(dep) dependencies = [] for dep_name, deps in duplicates.items(): if len(deps) == 1: dependencies.append(deps[0]) continue self.debug("<debug>Duplicate dependencies for {}</debug>".format( dep_name)) # Regrouping by constraint by_constraint = OrderedDict() for dep in deps: if dep.constraint not in by_constraint: by_constraint[dep.constraint] = [] by_constraint[dep.constraint].append(dep) # We merge by constraint for constraint, _deps in by_constraint.items(): new_markers = [] for dep in _deps: marker = dep.marker.without_extras() if marker.is_empty(): # No marker or only extras continue new_markers.append(marker) if not new_markers: continue dep = _deps[0] dep.marker = dep.marker.union(MarkerUnion(*new_markers)) by_constraint[constraint] = [dep] continue if len(by_constraint) == 1: self.debug("<debug>Merging requirements for {}</debug>".format( str(deps[0]))) dependencies.append(list(by_constraint.values())[0][0]) continue # We leave dependencies as-is if they have the same # python/platform constraints. # That way the resolver will pickup the conflict # and display a proper error. _deps = [value[0] for value in by_constraint.values()] seen = set() for _dep in _deps: pep_508_dep = _dep.to_pep_508(False) if ";" not in pep_508_dep: _requirements = "" else: _requirements = pep_508_dep.split(";")[1].strip() if _requirements not in seen: seen.add(_requirements) if len(_deps) != len(seen): for _dep in _deps: dependencies.append(_dep) continue # At this point, we raise an exception that will # tell the solver to enter compatibility mode # which means it will resolve for subsets # Python constraints # # For instance, if our root package requires Python ~2.7 || ^3.6 # And we have one dependency that requires Python <3.6 # and the other Python >=3.6 than the solver will solve # dependencies for Python >=2.7,<2.8 || >=3.4,<3.6 # and Python >=3.6,<4.0 python_constraints = [] for constraint, _deps in by_constraint.items(): python_constraints.append(_deps[0].python_versions) _deps = [str(_dep[0]) for _dep in by_constraint.values()] self.debug( "<warning>Different requirements found for {}.</warning>". format(", ".join(_deps[:-1]) + " and " + _deps[-1])) raise CompatibilityError(*python_constraints) # Modifying dependencies as needed for dep in dependencies: if not package.dependency.python_constraint.is_any(): dep.transitive_python_versions = str( dep.python_constraint.intersect( package.dependency.python_constraint)) if (package.dependency.is_directory() or package.dependency.is_file()) and (dep.is_directory() or dep.is_file()): if dep.path.as_posix().startswith(package.source_url): relative = (Path(package.source_url) / dep.path).relative_to(package.source_url) else: relative = Path(package.source_url) / dep.path # TODO: Improve the way we set the correct relative path for dependencies dep._path = relative package.requires = dependencies return package
def complete_package( self, package): # type: (DependencyPackage) -> DependencyPackage if package.is_root(): package = package.clone() requires = package.all_requires elif not package.is_root() and package.source_type not in { "directory", "file", "url", "git", }: package = DependencyPackage( package.dependency, self._pool.package( package.name, package.version.text, extras=package.dependency.extras, repository=package.dependency.source_name, ), ) requires = package.requires else: requires = package.requires if self._load_deferred: # Retrieving constraints for deferred dependencies for r in requires: if r.is_directory(): self.search_for_directory(r) elif r.is_file(): self.search_for_file(r) elif r.is_vcs(): self.search_for_vcs(r) elif r.is_url(): self.search_for_url(r) optional_dependencies = [] activated_extras = [] for extra in package.dependency.extras: if extra not in package.extras: continue activated_extras.append(extra) optional_dependencies += [d.name for d in package.extras[extra]] _dependencies = [] # If some extras/features were required, we need to # add a special dependency representing the base package # to the current package if package.dependency.extras: if activated_extras: package = package.with_features(activated_extras) _dependencies.append(package.without_features().to_dependency()) for dep in requires: if not self._python_constraint.allows_any(dep.python_constraint): continue if dep.name in self.UNSAFE_PACKAGES: continue if self._env and not dep.marker.validate(self._env.marker_env): continue if (dep.is_optional() and dep.name not in optional_dependencies and not package.is_root()): continue _dependencies.append(dep) overrides = self._overrides.get(package, {}) dependencies = [] overridden = [] for dep in _dependencies: if dep.name in overrides: if dep.name in overridden: continue dependencies.append(overrides[dep.name]) overridden.append(dep.name) continue dependencies.append(dep) # Searching for duplicate dependencies # # If the duplicate dependencies have the same constraint, # the requirements will be merged. # # For instance: # - enum34; python_version=="2.7" # - enum34; python_version=="3.3" # # will become: # - enum34; python_version=="2.7" or python_version=="3.3" # # If the duplicate dependencies have different constraints # we have to split the dependency graph. # # An example of this is: # - pypiwin32 (220); sys_platform == "win32" and python_version >= "3.6" # - pypiwin32 (219); sys_platform == "win32" and python_version < "3.6" duplicates = OrderedDict() for dep in dependencies: if dep.name not in duplicates: duplicates[dep.name] = [] duplicates[dep.name].append(dep) dependencies = [] for dep_name, deps in duplicates.items(): if len(deps) == 1: dependencies.append(deps[0]) continue self.debug("<debug>Duplicate dependencies for {}</debug>".format( dep_name)) # Regrouping by constraint by_constraint = OrderedDict() for dep in deps: if dep.constraint not in by_constraint: by_constraint[dep.constraint] = [] by_constraint[dep.constraint].append(dep) # We merge by constraint for constraint, _deps in by_constraint.items(): new_markers = [] for dep in _deps: marker = dep.marker.without_extras() if marker.is_any(): # No marker or only extras continue new_markers.append(marker) if not new_markers: continue dep = _deps[0] dep.marker = dep.marker.union(MarkerUnion(*new_markers)) by_constraint[constraint] = [dep] continue if len(by_constraint) == 1: self.debug("<debug>Merging requirements for {}</debug>".format( str(deps[0]))) dependencies.append(list(by_constraint.values())[0][0]) continue # We leave dependencies as-is if they have the same # python/platform constraints. # That way the resolver will pickup the conflict # and display a proper error. _deps = [value[0] for value in by_constraint.values()] seen = set() for _dep in _deps: pep_508_dep = _dep.to_pep_508(False) if ";" not in pep_508_dep: _requirements = "" else: _requirements = pep_508_dep.split(";")[1].strip() if _requirements not in seen: seen.add(_requirements) if len(_deps) != len(seen): for _dep in _deps: dependencies.append(_dep) continue # At this point, we raise an exception that will # tell the solver to make new resolutions with specific overrides. # # For instance, if the foo (1.2.3) package has the following dependencies: # - bar (>=2.0) ; python_version >= "3.6" # - bar (<2.0) ; python_version < "3.6" # # then the solver will need to make two new resolutions # with the following overrides: # - {<Package foo (1.2.3): {"bar": <Dependency bar (>=2.0)>} # - {<Package foo (1.2.3): {"bar": <Dependency bar (<2.0)>} markers = [] for constraint, _deps in by_constraint.items(): markers.append(_deps[0].marker) _deps = [_dep[0] for _dep in by_constraint.values()] self.debug( "<warning>Different requirements found for {}.</warning>". format(", ".join( "<c1>{}</c1> <fg=default>(<c2>{}</c2>)</> with markers <b>{}</b>" .format( d.name, d.pretty_constraint, d.marker if not d.marker.is_any() else "*", ) for d in _deps[:-1]) + " and " + "<c1>{}</c1> <fg=default>(<c2>{}</c2>)</> with markers <b>{}</b>" .format( _deps[-1].name, _deps[-1].pretty_constraint, _deps[-1]. marker if not _deps[-1].marker.is_any() else "*", ))) # We need to check if one of the duplicate dependencies # has no markers. If there is one, we need to change its # environment markers to the inverse of the union of the # other dependencies markers. # For instance, if we have the following dependencies: # - ipython # - ipython (1.2.4) ; implementation_name == "pypy" # # the marker for `ipython` will become `implementation_name != "pypy"`. any_markers_dependencies = [d for d in _deps if d.marker.is_any()] other_markers_dependencies = [ d for d in _deps if not d.marker.is_any() ] if any_markers_dependencies: marker = other_markers_dependencies[0].marker for other_dep in other_markers_dependencies[1:]: marker = marker.union(other_dep.marker) for i, d in enumerate(_deps): if d.marker.is_any(): _deps[i].marker = marker.invert() overrides = [] for _dep in _deps: current_overrides = self._overrides.copy() package_overrides = current_overrides.get(package, {}).copy() package_overrides.update({_dep.name: _dep}) current_overrides.update({package: package_overrides}) overrides.append(current_overrides) raise OverrideNeeded(*overrides) # Modifying dependencies as needed clean_dependencies = [] for dep in dependencies: if not package.dependency.transitive_marker.without_extras( ).is_any(): marker_intersection = package.dependency.transitive_marker.without_extras( ).intersect(dep.marker.without_extras()) if marker_intersection.is_empty(): # The dependency is not needed, since the markers specified # for the current package selection are not compatible with # the markers for the current dependency, so we skip it continue dep.transitive_marker = marker_intersection if not package.dependency.python_constraint.is_any(): python_constraint_intersection = dep.python_constraint.intersect( package.dependency.python_constraint) if python_constraint_intersection.is_empty(): # This dependency is not needed under current python constraint. continue dep.transitive_python_versions = str( python_constraint_intersection) clean_dependencies.append(dep) package.requires = clean_dependencies return package