def tweak_piptools_depcache_filename(version_info, platform, *args, **kwargs): depcache = DependencyCache(*args, **kwargs) # pylint: disable=protected-access cache_file = os.path.join( os.path.dirname(depcache._cache_file), "depcache-{}-ptc{}-py{}.{}-mocked-py{}.{}.json".format( platform, __version__, *sys.version_info[:2], *version_info[:2] ), ) log.info("Tweaking the pip-tools depcache file to: %s", cache_file) depcache._cache_file = cache_file # pylint: enable=protected-access if os.environ["PIP_TOOLS_COMPILE_CLEAN_CACHE"] == "1": if os.path.exists(cache_file): os.unlink(cache_file) return depcache
def test_reverse_dependencies(from_line, tmpdir): # Since this is a test, make a temporary directory. Converting to str from py.path. tmp_dir_path = str(tmpdir) # Create a cache object. The keys are packages, and the values are lists # of packages on which the keys depend. cache = DependencyCache(cache_dir=tmp_dir_path) cache[from_line("top==1.2")] = ["middle>=0.3", "bottom>=5.1.2"] cache[from_line("top[xtra]==1.2")] = [ "middle>=0.3", "bottom>=5.1.2", "bonus==0.4" ] cache[from_line("middle==0.4")] = ["bottom<6"] cache[from_line("bottom==5.3.5")] = [] cache[from_line("bonus==0.4")] = [] # In this case, we're using top 1.2 without an extra, so the "bonus" package # is not depended upon. reversed_no_extra = cache.reverse_dependencies([ from_line("top==1.2"), from_line("middle==0.4"), from_line("bottom==5.3.5"), from_line("bonus==0.4"), ]) assert reversed_no_extra == { "middle": {"top"}, "bottom": {"middle", "top"} } # Now we're using top 1.2 with the "xtra" extra, so it depends # on the "bonus" package. reversed_extra = cache.reverse_dependencies([ from_line("top[xtra]==1.2"), from_line("middle==0.4"), from_line("bottom==5.3.5"), from_line("bonus==0.4"), ]) assert reversed_extra == { "middle": {"top"}, "bottom": {"middle", "top"}, "bonus": {"top"}, } # Clean up our temp directory rmtree(tmp_dir_path)
def __init__(self, click_context): from piptools.cache import DependencyCache self.dependency_cache = DependencyCache() # NOTE: Debugging # Show some debugging information. from piptools.logging import log log.verbose = True self.__click_context = click_context
def test_reverse_dependencies(from_line, tmpdir): # Since this is a test, make a temporary directory. Converting to str from py.path. tmp_dir_path = str(tmpdir) # Create a cache object. The keys are packages, and the values are lists # of packages on which the keys depend. cache = DependencyCache(cache_dir=tmp_dir_path) cache[from_line("top==1.2")] = ["middle>=0.3", "bottom>=5.1.2"] cache[from_line("top[xtra]==1.2")] = ["middle>=0.3", "bottom>=5.1.2", "bonus==0.4"] cache[from_line("middle==0.4")] = ["bottom<6"] cache[from_line("bottom==5.3.5")] = [] cache[from_line("bonus==0.4")] = [] # In this case, we're using top 1.2 without an extra, so the "bonus" package # is not depended upon. reversed_no_extra = cache.reverse_dependencies( [ from_line("top==1.2"), from_line("middle==0.4"), from_line("bottom==5.3.5"), from_line("bonus==0.4"), ] ) assert reversed_no_extra == {"middle": {"top"}, "bottom": {"middle", "top"}} # Now we're using top 1.2 with the "xtra" extra, so it depends # on the "bonus" package. reversed_extra = cache.reverse_dependencies( [ from_line("top[xtra]==1.2"), from_line("middle==0.4"), from_line("bottom==5.3.5"), from_line("bonus==0.4"), ] ) assert reversed_extra == { "middle": {"top"}, "bottom": {"middle", "top"}, "bonus": {"top"}, } # Clean up our temp directory rmtree(tmp_dir_path)
def _prepare_resolver(self, requirements: TRequirements) -> Resolver: if isinstance(requirements, Path): constraints = parse_requirements( str(requirements), self._repository.session, self._repository.finder, options=self._repository.options, ) else: constraints = { InstallRequirement(PipRequirement(req), comes_from="line") for req in requirements } cache = DependencyCache(self.CACHE_DIR) resolver = Resolver(constraints, self._repository, cache) resolver.resolve = partial(resolver.resolve, max_rounds=100) return resolver
def _get_release_info(self, name, version): # type: (str, str) -> dict from pip.req import InstallRequirement from pip.exceptions import InstallationError ireq = InstallRequirement.from_line('{}=={}'.format(name, version)) resolver = Resolver([ireq], self._repository, cache=DependencyCache(self._cache_dir.as_posix())) try: requirements = list(resolver._iter_dependencies(ireq)) except (InstallationError, RequirementParseError): # setup.py egg-info error most likely # So we assume no dependencies requirements = [] requires = [] for dep in requirements: constraint = str(dep.req.specifier) require = dep.name if constraint: require += ' ({})'.format(constraint) requires.append(require) try: hashes = resolver.resolve_hashes([ireq])[ireq] except IndexError: # Sometimes pip-tools fails when getting indices hashes = [] hashes = [h.split(':')[1] for h in hashes] data = { 'name': name, 'version': version, 'summary': '', 'requires_dist': requires, 'digests': hashes } resolver.repository.freshen_build_caches() return data
def _get_release_info(self, name: str, version: str) -> dict: from pip.req import InstallRequirement from pip.exceptions import InstallationError ireq = InstallRequirement.from_line(f'{name}=={version}') resolver = Resolver([ireq], self._repository, cache=DependencyCache(self._cache_dir.as_posix())) try: requirements = list(resolver._iter_dependencies(ireq)) except (InstallationError, RequirementParseError): # setup.py egg-info error most likely # So we assume no dependencies requirements = [] requires = [] for dep in requirements: constraint = str(dep.req.specifier) require = f'{dep.name}' if constraint: require += f' ({constraint})' requires.append(require) hashes = resolver.resolve_hashes([ireq])[ireq] hashes = [h.split(':')[1] for h in hashes] data = { 'name': name, 'version': version, 'summary': '', 'requires_dist': requires, 'digests': hashes } resolver.repository.freshen_build_caches() return data
def test_read_cache_permission_error(tmpdir): cache = DependencyCache(cache_dir=tmpdir) with open(cache._cache_file, "w") as fp: os.fchmod(fp.fileno(), 0o000) with pytest.raises(IOError, match="Permission denied"): cache.cache
def test_read_cache_does_not_exist(tmpdir): cache = DependencyCache(cache_dir=tmpdir) assert cache.cache == {}
def depcache(tmpdir): return DependencyCache(str(tmpdir / "dep-cache"))
def _resolve(self, deps): # Checking if we should active prereleases prereleases = False for dep in deps: if dep.accepts_prereleases(): prereleases = True break constraints = [dep.as_requirement() for dep in deps] command = get_pip_command() opts, _ = command.parse_args([]) resolver = Resolver( constraints, PyPIRepository(opts, command._build_session(opts)), cache=DependencyCache(CACHE_DIR), prereleases=prereleases ) matches = resolver.resolve() pinned = [m for m in matches if not m.editable and is_pinned_requirement(m)] unpinned = [m for m in matches if m.editable or not is_pinned_requirement(m)] reversed_dependencies = resolver.reverse_dependencies(matches) # Complete reversed dependencies with cache cache = resolver.dependency_cache.cache for m in unpinned: name = key_from_req(m.req) if name not in cache: continue dependencies = cache[name][list(cache[name].keys())[0]] for dep in dependencies: dep = canonicalize_name(dep) if dep not in reversed_dependencies: reversed_dependencies[dep] = set() reversed_dependencies[dep].add(canonicalize_name(name)) hashes = resolver.resolve_hashes(pinned) packages = [] for m in matches: name = key_from_req(m.req) if name in self.UNSAFE: continue version = str(m.req.specifier) if m in unpinned: url, specifier = m.link.url.split('@') rev, _ = specifier.split('#') version = self._get_vcs_version(url, rev) checksum = 'sha1:{}'.format(version['rev']) else: version = version.replace('==', '') checksum = list(hashes[m]) # Figuring out category and optionality category = None optional = False # Checking if it's a main dependency for dep in deps: if dep.name == name: category = dep.category optional = dep.optional break if not category: def _category(child): opt = False cat = None parents = reversed_dependencies.get(child, set()) for parent in parents: for dep in deps: if dep.name != parent: continue opt = dep.optional if dep.category == 'main': # Dependency is given by at least one main package # We flag it as main return 'main', opt return 'dev', opt cat, op = _category(parent) if cat is not None: return cat, opt return cat, opt category, optional = _category(name) # If category is still None at this point # The dependency must have come from a VCS # dependency. To avoid missing packages # we assume "main" category and not optional if category is None: category = 'main' optional = False if not isinstance(checksum, list): checksum = [checksum] # Retrieving Python restriction if any python = self._get_pythons_for_package( name, reversed_dependencies, deps ) python = list(python) if '*' in python: # If at least one parent gave a wildcard # Then it should be installed for any Python version python = ['*'] package = { 'name': name, 'version': version, 'checksum': checksum, 'category': category, 'optional': optional, 'python': python } packages.append(package) return sorted(packages, key=lambda p: p['name'].lower())
def depcache(tmpdir): return DependencyCache(str(tmpdir))
def on_end(self, event): # Our config object python_config = event.config["python"] # Pip / PyPI repository = PyPIRepository([], cache_dir=CACHE_DIR) # We just need to construct this structure if use_uniform_requirements == True requirements_by_name = {} if python_config.use_uniform_requirements: tmpfile = tempfile.NamedTemporaryFile(mode="wt", delete=False) for extra in itertools.chain((None,), python_config.get_extras()): tmpfile.write("\n".join(python_config.get_requirements(extra=extra)) + "\n") tmpfile.flush() constraints = list( parse_requirements( tmpfile.name, finder=repository.finder, session=repository.session, options=repository.options ) ) # This resolver is able to evaluate ALL the dependencies along the extras resolver = Resolver( constraints, repository, cache=DependencyCache(CACHE_DIR), # cache=DependencyCache(tempfile.tempdir), prereleases=False, clear_caches=False, allow_unsafe=False, ) for req in resolver.resolve(max_rounds=10): requirements_by_name[parse_requirement(str(req.req)).name] = SimpleNamespace( requirement=format_requirement(req).strip().replace(" ", ""), url=req.link ) python_config.check_duplicate_dependencies_uniform(requirements_by_name) # Now it iterates along the versions in extras and looks for the requirements and its dependencies, using the # structure created above to select the unified versions (unless the flag indicates otherwise). for extra in itertools.chain((None,), python_config.get_extras()): requirements_file = "requirements{}.txt".format("-" + extra if extra else "") if python_config.override_requirements or not os.path.exists(requirements_file): tmpfile = tempfile.NamedTemporaryFile(mode="wt", delete=False) tmpfile.write("\n".join(python_config.get_requirements(extra=extra)) + "\n") tmpfile.flush() constraints = list( parse_requirements( tmpfile.name, finder=repository.finder, session=repository.session, options=repository.options ) ) resolver = Resolver( constraints, repository, cache=DependencyCache(CACHE_DIR), prereleases=False, clear_caches=False, allow_unsafe=False, ) if not python_config.use_uniform_requirements: python_config.check_duplicate_dependencies_nonuniform(extra, resolver) requirements_list = [] for req in resolver.resolve(max_rounds=10): if req.name != python_config.get("name"): requirement = python_config.get_requirement_info_by_name(req, requirements_by_name) if requirement: requirements_list.append(requirement) self.render_file_inline( requirements_file, "\n".join( ( "-e .{}".format("[" + extra + "]" if extra else ""), *(("-r requirements.txt",) if extra else ()), *python_config.get_vendors(extra=extra), *sorted(requirements_list), ) ), override=python_config.override_requirements, ) # Updates setup file setup = python_config.get_setup() context = { "url": setup.pop("url", ""), "download_url": setup.pop("download_url", ""), } for k, v in context.items(): context[k] = context[k].format(name=setup["name"], user=getuser(), version="{version}") context.update( { "entry_points": setup.pop("entry_points", {}), "extras_require": python_config.get("extras_require"), "install_requires": python_config.get("install_requires"), "python": python_config, "setup": setup, "banner": get_override_warning_banner(), } ) # Render (with overwriting) the allmighty setup.py self.render_file("setup.py", "python/setup.py.j2", context, override=True)