def resolve_deps(deps, sources=None, verbose=False, hashes=False): constraints = [] for dep in deps: if dep.startswith('-e '): constraint = pip.req.InstallRequirement.from_editable(dep[len('-e '):]) else: constraint = pip.req.InstallRequirement.from_line(dep) constraints.append(constraint) pip_command = get_pip_command() pip_args = [] if sources: pip_args.extend(['-i', sources[0]['url']]) pip_options, _ = pip_command.parse_args(pip_args) pypi = PyPIRepository(pip_options=pip_options, session=requests) if verbose: logging.log.verbose = True r = Resolver(constraints=constraints, repository=pypi) results = [] _hashes = r.resolve_hashes(r.resolve()) # convert to a dictionary indexed by package names instead of install req objects resolved_hashes = {} for req, _hash in _hashes.items(): resolved_hashes[pep423_name(req.name)] = { 'version': clean_pkg_version(req.specifier), 'hashes': list(_hash) } for result in r.resolve(): name = pep423_name(result.name) version = clean_pkg_version(result.specifier) if hashes: try: collected_hashes = [] r = requests.get('https://pypi.org/pypi/{0}/json'.format(name)) for release in r.json()['releases'][version]: collected_hashes.append(release['digests']['sha256']) collected_hashes = ['sha256:' + s for s in collected_hashes] # Add pypi resolved hashes if name in resolved_hashes and resolved_hashes[name]['version'] == version: collected_hashes.extend(resolved_hashes[name]['hashes']) results.append({'name': name, 'version': version, 'hashes': collected_hashes}) except ValueError: results.append({'name': name, 'version': version}) else: results.append({'name': name, 'version': version}) return results
def _get_release_info(self, name, version): # type: (str, str) -> dict from pip.req import InstallRequirement from pip.exceptions import InstallationError ireq = InstallRequirement.from_line('{}=={}'.format(name, version)) resolver = Resolver([ireq], self._repository, cache=DependencyCache(self._cache_dir.as_posix())) try: requirements = list(resolver._iter_dependencies(ireq)) except (InstallationError, RequirementParseError): # setup.py egg-info error most likely # So we assume no dependencies requirements = [] requires = [] for dep in requirements: constraint = str(dep.req.specifier) require = dep.name if constraint: require += ' ({})'.format(constraint) requires.append(require) try: hashes = resolver.resolve_hashes([ireq])[ireq] except IndexError: # Sometimes pip-tools fails when getting indices hashes = [] hashes = [h.split(':')[1] for h in hashes] data = { 'name': name, 'version': version, 'summary': '', 'requires_dist': requires, 'digests': hashes } resolver.repository.freshen_build_caches() return data
def _get_release_info(self, name: str, version: str) -> dict: from pip.req import InstallRequirement from pip.exceptions import InstallationError ireq = InstallRequirement.from_line(f'{name}=={version}') resolver = Resolver([ireq], self._repository, cache=DependencyCache(self._cache_dir.as_posix())) try: requirements = list(resolver._iter_dependencies(ireq)) except (InstallationError, RequirementParseError): # setup.py egg-info error most likely # So we assume no dependencies requirements = [] requires = [] for dep in requirements: constraint = str(dep.req.specifier) require = f'{dep.name}' if constraint: require += f' ({constraint})' requires.append(require) hashes = resolver.resolve_hashes([ireq])[ireq] hashes = [h.split(':')[1] for h in hashes] data = { 'name': name, 'version': version, 'summary': '', 'requires_dist': requires, 'digests': hashes } resolver.repository.freshen_build_caches() return data
def resolve_deps(deps, which, which_pip, project, sources=None, verbose=False, python=False, clear=False, pre=False): """Given a list of dependencies, return a resolved list of dependencies, using pip-tools -- and their hashes, using the warehouse API / pip. """ index_lookup = {} markers_lookup = {} python_path = which('python') with HackedPythonVersion(python_version=python, python_path=python_path): class PipCommand(pip.basecommand.Command): """Needed for pip-tools.""" name = 'PipCommand' constraints = [] for dep in deps: t = tempfile.mkstemp(prefix='pipenv-', suffix='-requirement.txt')[1] with open(t, 'w') as f: f.write(dep) if dep.startswith('-e '): constraint = pip.req.InstallRequirement.from_editable( dep[len('-e '):]) else: constraint = [ c for c in pip.req.parse_requirements( t, session=pip._vendor.requests) ][0] # extra_constraints = [] if ' -i ' in dep: index_lookup[constraint.name] = project.get_source( url=dep.split(' -i ')[1]).get('name') if constraint.markers: markers_lookup[constraint.name] = str( constraint.markers).replace('"', "'") constraints.append(constraint) pip_command = get_pip_command() pip_args = [] if sources: pip_args = prepare_pip_source_args(sources, pip_args) if verbose: print('Using pip: {0}'.format(' '.join(pip_args))) pip_options, _ = pip_command.parse_args(pip_args) session = pip_command._build_session(pip_options) pypi = PyPIRepository(pip_options=pip_options, session=session) if verbose: logging.log.verbose = True results = [] resolved_tree = set() resolver = Resolver(constraints=constraints, repository=pypi, clear_caches=clear, prereleases=pre) # pre-resolve instead of iterating to avoid asking pypi for hashes of editable packages try: resolved_tree.update( resolver.resolve(max_rounds=PIPENV_MAX_ROUNDS)) except (NoCandidateFound, DistributionNotFound, HTTPError) as e: click.echo( '{0}: Your dependencies could not be resolved. You likely have a mismatch in your sub-dependencies.\n ' 'You can use {1} to bypass this mechanism, then run {2} to inspect the situation.' ''.format(crayons.red('Warning', bold=True), crayons.red('$ pipenv install --skip-lock'), crayons.red('$ pipenv graph')), err=True) click.echo(crayons.blue(e)) sys.exit(1) for result in resolved_tree: if not result.editable: name = pep423_name(result.name) version = clean_pkg_version(result.specifier) index = index_lookup.get(result.name) if not markers_lookup.get(result.name): markers = str( result.markers) if result.markers and 'extra' not in str( result.markers) else None else: markers = markers_lookup.get(result.name) collected_hashes = [] if 'python.org' in '|'.join([source['url'] for source in sources]): try: # Grab the hashes from the new warehouse API. r = requests.get( 'https://pypi.org/pypi/{0}/json'.format(name), timeout=10) api_releases = r.json()['releases'] cleaned_releases = {} for api_version, api_info in api_releases.items(): cleaned_releases[clean_pkg_version( api_version)] = api_info for release in cleaned_releases[version]: collected_hashes.append(release['digests']['sha256']) collected_hashes = [ 'sha256:' + s for s in collected_hashes ] # Collect un-collectable hashes. if not collected_hashes: collected_hashes = list( list(resolver.resolve_hashes([result ]).items())[0][1]) except (ValueError, KeyError): if verbose: print('Error fetching {}'.format(name)) d = {'name': name, 'version': version, 'hashes': collected_hashes} if index: d.update({'index': index}) if markers: d.update({'markers': markers.replace('"', "'")}) results.append(d) return results
def _resolve(self, deps): # Checking if we should active prereleases prereleases = False for dep in deps: if dep.accepts_prereleases(): prereleases = True break constraints = [dep.as_requirement() for dep in deps] command = get_pip_command() opts, _ = command.parse_args([]) resolver = Resolver( constraints, PyPIRepository(opts, command._build_session(opts)), cache=DependencyCache(CACHE_DIR), prereleases=prereleases ) matches = resolver.resolve() pinned = [m for m in matches if not m.editable and is_pinned_requirement(m)] unpinned = [m for m in matches if m.editable or not is_pinned_requirement(m)] reversed_dependencies = resolver.reverse_dependencies(matches) # Complete reversed dependencies with cache cache = resolver.dependency_cache.cache for m in unpinned: name = key_from_req(m.req) if name not in cache: continue dependencies = cache[name][list(cache[name].keys())[0]] for dep in dependencies: dep = canonicalize_name(dep) if dep not in reversed_dependencies: reversed_dependencies[dep] = set() reversed_dependencies[dep].add(canonicalize_name(name)) hashes = resolver.resolve_hashes(pinned) packages = [] for m in matches: name = key_from_req(m.req) if name in self.UNSAFE: continue version = str(m.req.specifier) if m in unpinned: url, specifier = m.link.url.split('@') rev, _ = specifier.split('#') version = self._get_vcs_version(url, rev) checksum = 'sha1:{}'.format(version['rev']) else: version = version.replace('==', '') checksum = list(hashes[m]) # Figuring out category and optionality category = None optional = False # Checking if it's a main dependency for dep in deps: if dep.name == name: category = dep.category optional = dep.optional break if not category: def _category(child): opt = False cat = None parents = reversed_dependencies.get(child, set()) for parent in parents: for dep in deps: if dep.name != parent: continue opt = dep.optional if dep.category == 'main': # Dependency is given by at least one main package # We flag it as main return 'main', opt return 'dev', opt cat, op = _category(parent) if cat is not None: return cat, opt return cat, opt category, optional = _category(name) # If category is still None at this point # The dependency must have come from a VCS # dependency. To avoid missing packages # we assume "main" category and not optional if category is None: category = 'main' optional = False if not isinstance(checksum, list): checksum = [checksum] # Retrieving Python restriction if any python = self._get_pythons_for_package( name, reversed_dependencies, deps ) python = list(python) if '*' in python: # If at least one parent gave a wildcard # Then it should be installed for any Python version python = ['*'] package = { 'name': name, 'version': version, 'checksum': checksum, 'category': category, 'optional': optional, 'python': python } packages.append(package) return sorted(packages, key=lambda p: p['name'].lower())
def resolve_deps(deps, which, which_pip, project, sources=None, verbose=False, python=False, clear=False): """Given a list of dependencies, return a resolved list of dependencies, using pip-tools -- and their hashes, using the warehouse API / pip. """ with HackedPythonVersion(python): class PipCommand(pip.basecommand.Command): """Needed for pip-tools.""" name = 'PipCommand' constraints = [] extra_constraints = [] for dep in deps: t = tempfile.mkstemp(prefix='pipenv-', suffix='-requirement.txt')[1] with open(t, 'w') as f: f.write(dep) if dep.startswith('-e '): constraint = pip.req.InstallRequirement.from_editable(dep[len('-e '):]) # Resolve extra constraints from -e packages (that rely on setuptools.) extra_constraints = best_matches_from(dep[len('-e '):], which=which, which_pip=which_pip, project=project) extra_constraints = [pip.req.InstallRequirement.from_line(c) for c in extra_constraints] else: constraint = [c for c in pip.req.parse_requirements(t, session=pip._vendor.requests)][0] extra_constraints = [] constraints.append(constraint) constraints.extend(extra_constraints) pip_command = get_pip_command() pip_args = [] if sources: pip_args.extend(['-i', sources[0]['url']]) pip_options, _ = pip_command.parse_args(pip_args) pypi = PyPIRepository(pip_options=pip_options, session=requests) if verbose: logging.log.verbose = True resolver = Resolver(constraints=constraints, repository=pypi, clear_caches=clear) results = [] # pre-resolve instead of iterating to avoid asking pypi for hashes of editable packages resolved_tree = resolver.resolve() for result in resolved_tree: name = pep423_name(result.name) version = clean_pkg_version(result.specifier) collected_hashes = [] try: # Grab the hashes from the new warehouse API. r = requests.get('https://pypi.org/pypi/{0}/json'.format(name)) api_releases = r.json()['releases'] cleaned_releases = {} for api_version, api_info in api_releases.items(): cleaned_releases[clean_pkg_version(api_version)] = api_info for release in cleaned_releases[version]: collected_hashes.append(release['digests']['sha256']) collected_hashes = ['sha256:' + s for s in collected_hashes] # Collect un-collectable hashes. if not collected_hashes: collected_hashes = list(list(resolver.resolve_hashes([result]).items())[0][1]) except (ValueError, KeyError): pass results.append({'name': name, 'version': version, 'hashes': collected_hashes}) return results
def resolve_deps(deps, sources=None, verbose=False): """Given a list of dependencies, return a resolved list of dependencies, using pip-tools -- and their hashes, using the warehouse API / pip. """ constraints = [] for dep in deps: if dep.startswith('-e '): constraint = pip.req.InstallRequirement.from_editable( dep[len('-e '):]) else: constraint = pip.req.InstallRequirement.from_line(dep) constraints.append(constraint) pip_command = get_pip_command() pip_args = [] if sources: pip_args.extend(['-i', sources[0]['url']]) pip_options, _ = pip_command.parse_args(pip_args) pypi = PyPIRepository(pip_options=pip_options, session=requests) if verbose: logging.log.verbose = True resolver = Resolver(constraints=constraints, repository=pypi) results = [] # pre-resolve instead of iterating to avoid asking pypi for hashes of editable packages resolved_tree = resolver.resolve() for result in resolved_tree: name = pep423_name(result.name) version = clean_pkg_version(result.specifier) collected_hashes = [] try: # Grab the hashes from the new warehouse API. r = requests.get('https://pypi.org/pypi/{0}/json'.format(name)) api_releases = r.json()['releases'] cleaned_releases = {} for api_version, api_info in api_releases.items(): cleaned_releases[clean_pkg_version(api_version)] = api_info for release in cleaned_releases[version]: collected_hashes.append(release['digests']['sha256']) collected_hashes = ['sha256:' + s for s in collected_hashes] # Collect un-collectable hashes. if not collected_hashes: collected_hashes = list( list(resolver.resolve_hashes([result]).items())[0][1]) except (ValueError, KeyError): pass results.append({ 'name': name, 'version': version, 'hashes': collected_hashes }) return results
def resolve_deps(deps, sources=None, verbose=False, hashes=False): constraints = [] for dep in deps: if dep.startswith('-e '): constraint = pip.req.InstallRequirement.from_editable( dep[len('-e '):]) else: constraint = pip.req.InstallRequirement.from_line(dep) constraints.append(constraint) pip_command = get_pip_command() pip_args = [] if sources: pip_args.extend(['-i', sources[0]['url']]) pip_options, _ = pip_command.parse_args(pip_args) pypi = PyPIRepository(pip_options=pip_options, session=requests) if verbose: logging.log.verbose = True resolver = Resolver(constraints=constraints, repository=pypi) results = [] # pre-resolve instead of iterating to avoid asking pypi for hashes of editable packages resolved_tree = resolver.resolve() for result in resolved_tree: name = pep423_name(result.name) version = clean_pkg_version(result.specifier) if hashes: try: collected_hashes = [] r = requests.get('https://pypi.org/pypi/{0}/json'.format(name)) for release in r.json()['releases'][version]: collected_hashes.append(release['digests']['sha256']) collected_hashes = ['sha256:' + s for s in collected_hashes] # Collect un-collectable hashes. if not collected_hashes: collected_hashes = list( resolver.resolve_hashes([result]).items()[0][1]) results.append({ 'name': name, 'version': version, 'hashes': collected_hashes }) except ValueError: results.append({'name': name, 'version': version}) else: results.append({'name': name, 'version': version}) return results