def resolve_deps(deps, sources=None): constraints = [] for dep in deps: if dep.startswith('-e '): constraint = pip.req.InstallRequirement.from_editable( dep[len('-e '):]) else: constraint = pip.req.InstallRequirement.from_line(dep) constraints.append(constraint) pip_command = get_pip_command() pip_args = [] if sources: pip_args.extend(['-i', sources[0]['url']]) pip_options, _ = pip_command.parse_args(pip_args) pypi = PyPIRepository(pip_options=pip_options, session=requests) r = Resolver(constraints=constraints, repository=pypi) results = [] for result in r.resolve(): results.append({ 'name': pep423_name(result.name), 'version': six.u(str(result.specifier)).replace('==', '') }) return results
def _get_repository(): pip_command = get_pip_command() pip_args = [] pip_options, _ = pip_command.parse_args(pip_args) session = pip_command._build_session(pip_options) repository = MockedPyPIRepository(pip_options, session) return repository
def __init__(self, name, url): if name == 'pypi': raise ValueError('The name [pypi] is reserved for repositories') self._packages = [] self._name = name self._url = url command = get_pip_command() opts, _ = command.parse_args([]) self._session = command._build_session(opts) self._repository = PyPIRepository(opts, self._session) self._cache_dir = Path(CACHE_DIR) / 'cache' / 'repositories' / name self._cache = CacheManager({ 'default': 'releases', 'serializer': 'json', 'stores': { 'releases': { 'driver': 'file', 'path': str(self._cache_dir) }, 'packages': { 'driver': 'dict' }, 'matches': { 'driver': 'dict' } } })
def test_generate_hashes_without_interfering_with_each_other(from_line): pip_command = get_pip_command() pip_options, _ = pip_command.parse_args([]) session = pip_command._build_session(pip_options) repository = PyPIRepository(pip_options, session) repository.get_hashes(from_line('cffi==1.9.1')) repository.get_hashes(from_line('matplotlib==2.0.2'))
def test_get_hashes_editable_empty_set(from_editable): pip_command = get_pip_command() pip_options, _ = pip_command.parse_args([]) session = pip_command._build_session(pip_options) repository = PyPIRepository(pip_options, session) ireq = from_editable('git+https://github.com/django/django.git#egg=django') assert repository.get_hashes(ireq) == set()
def resolve_deps(deps, sources=None, verbose=False, hashes=False): constraints = [] for dep in deps: if dep.startswith('-e '): constraint = pip.req.InstallRequirement.from_editable(dep[len('-e '):]) else: constraint = pip.req.InstallRequirement.from_line(dep) constraints.append(constraint) pip_command = get_pip_command() pip_args = [] if sources: pip_args.extend(['-i', sources[0]['url']]) pip_options, _ = pip_command.parse_args(pip_args) pypi = PyPIRepository(pip_options=pip_options, session=requests) if verbose: logging.log.verbose = True r = Resolver(constraints=constraints, repository=pypi) results = [] _hashes = r.resolve_hashes(r.resolve()) # convert to a dictionary indexed by package names instead of install req objects resolved_hashes = {} for req, _hash in _hashes.items(): resolved_hashes[pep423_name(req.name)] = { 'version': clean_pkg_version(req.specifier), 'hashes': list(_hash) } for result in r.resolve(): name = pep423_name(result.name) version = clean_pkg_version(result.specifier) if hashes: try: collected_hashes = [] r = requests.get('https://pypi.org/pypi/{0}/json'.format(name)) for release in r.json()['releases'][version]: collected_hashes.append(release['digests']['sha256']) collected_hashes = ['sha256:' + s for s in collected_hashes] # Add pypi resolved hashes if name in resolved_hashes and resolved_hashes[name]['version'] == version: collected_hashes.extend(resolved_hashes[name]['hashes']) results.append({'name': name, 'version': version, 'hashes': collected_hashes}) except ValueError: results.append({'name': name, 'version': version}) else: results.append({'name': name, 'version': version}) return results
def get_pypi_repository(): """ Get a PyPIRepository object for the tests. :rtype: PyPIRepository """ pip_command = get_pip_command() pip_options = pip_command.parse_args([])[0] session = pip_command._build_session(pip_options) return PyPIRepository(pip_options, session)
def test_get_hashes_local_repository_cache_miss(from_line): pip_command = get_pip_command() pip_options, _ = pip_command.parse_args([]) session = pip_command._build_session(pip_options) repository = PyPIRepository(pip_options, session) existing_pins = {} local_repository = LocalRequirementsRepository(existing_pins, repository) with repository.allow_all_wheels(): hashes = local_repository.get_hashes(from_line('cffi==1.9.1')) assert hashes == EXPECTED
def __call__(self, requirement): pip_command = get_pip_command() pip_options, _ = pip_command.parse_args(self._pip_args) session = pip_command._build_session(pip_options) repo = PyPIRepository(pip_options, session) candidates = repo.find_all_candidates(requirement.name) if len(candidates) == 0: raise PackageNotFound( f"could not find a package named {requirement.name}") return sorted(candidates, key=lambda c: c.version)[-1].version.base_version
def test_generate_hashes_all_platforms(from_line): expected = { 'sha256:04b133ef629ae2bc05f83d0b079a964494a9cd17914943e690c57209b44aae20', 'sha256:0f1b3193c17b93c75e73eeac92f22eec4c98a021d9969b1c347d1944fae0d26b', 'sha256:1fb1cf40c315656f98f4d3acfb1bd031a14a9a69d155e9a180d5f9b52eaf745a', 'sha256:20af85d8e154b50f540bc8d517a0dbf6b1c20b5d06e572afda919d5dafd1d06b', 'sha256:2570f93b42c61013ab4b26e23aa25b640faf5b093ad7dd3504c3a8eadd69bc24', 'sha256:2f4e2872833ee3764dfc168dea566b7dd83b01ac61b377490beba53b5ece57f7', 'sha256:31776a37a67424e7821324b9e03a05aa6378bbc2bccc58fa56402547f82803c6', 'sha256:353421c76545f1d440cacc137abc865f07eab9df0dd3510c0851a2ca04199e90', 'sha256:36d06de7b09b1eba54b1f5f76e2221afef7489cc61294508c5a7308a925a50c6', 'sha256:3f1908d0bcd654f8b7b73204f24336af9f020b707fb8af937e3e2279817cbcd6', 'sha256:5268de3a18f031e9787c919c1b9137ff681ea696e76740b1c6c336a26baaa58a', 'sha256:563e0bd53fda03c151573217b3a49b3abad8813de9dd0632e10090f6190fdaf8', 'sha256:5e1368d13f1774852f9e435260be19ad726bbfb501b80472f61c2dc768a0692a', 'sha256:60881c79eb72cb75bd0a4be5e31c9e431739146c4184a2618cabea3938418984', 'sha256:6120b62a642a40e47eb6c9ff00c02be69158fc7f7c5ff78e42a2c739d1c57cd6', 'sha256:65c223e77f87cb463191ace3398e0a6d84ce4ac575d42eb412a220b099f593d6', 'sha256:6fbf8db55710959344502b58ab937424173ad8b5eb514610bcf56b119caa350a', 'sha256:74aadea668c94eef4ceb09be3d0eae6619e28b4f1ced4e29cd43a05bb2cfd7a4', 'sha256:7be1efa623e1ed91b15b1e62e04c536def1d75785eb930a0b8179ca6b65ed16d', 'sha256:83266cdede210393889471b0c2631e78da9d4692fcca875af7e958ad39b897ee', 'sha256:86c68a3f8246495962446c6f96f6a27f182b91208187b68f1e87ec3dfd29fa32', 'sha256:9163f7743cf9991edaddf9cf886708e288fab38e1b9fec9c41c15c85c8f7f147', 'sha256:97d9f338f91b7927893ea6500b953e4b4b7e47c6272222992bb76221e17056ff', 'sha256:a7930e73a4359b52323d09de6d6860840314aa09346cbcf4def8875e1b07ebc7', 'sha256:ada8a42c493e4934a1a8875c2bc9efcb1b88c09883f70375bfa053ab32d6a118', 'sha256:b0bc2d83cc0ba0e8f0d9eca2ffe07f72f33bec7d84547071e7e875d4cca8272d', 'sha256:b5412a65605c642adf3e1544b59b8537daf5696dedadd2b3cbebc42e24da45ed', 'sha256:ba6b5205fced1625b6d9d55f9ef422f9667c5d95f18f07c0611eb964a3355331', 'sha256:bcaf3d86385daaab0ae51c9c53ebe70a6c1c5dfcb9e311b13517e04773ddf6b6', 'sha256:cfa15570ecec1ea6bee089e86fd4deae6208c96a811344ce246de5e5c9ac824a', 'sha256:d3e3063af1fa6b59e255da9a812891cdaf24b90fbaf653c02797871069b7c4c9', 'sha256:d9cfe26ecea2fec320cd0cac400c9c2435328994d23596ee6df63945fe7292b0', 'sha256:e5ef800ef8ef9ee05ae9a5b7d7d9cf7d6c936b32e312e54823faca3034ee16ab', 'sha256:f1366150acf611d09d37ffefb3559ed3ffeb1713643d3cd10716d6c5da3f83fb', 'sha256:f4eb9747a37120b35f59c8e96265e87b0c432ff010d32fc0772992aa14659502', 'sha256:f8264463cc08cd696ad17e4bf3c80f3344628c04c11ffdc545ddf0798bc17316', 'sha256:f8ba54848dfe280b1be0d6e699544cee4ba10d566f92464538063d9e645aed3e', 'sha256:f93d1edcaea7b6a7a8fbf936f4492a9a0ee0b4cb281efebd5e1dd73e5e432c71', 'sha256:fc8865c7e0ac25ddd71036c2b9a799418b32d9acb40400d345b8791b6e1058cb', 'sha256:fce6b0cb9ade1546178c031393633b09c4793834176496c99a94de0bfa471b27', 'sha256:fde17c52d7ce7d55a9fb263b57ccb5da6439915b5c7105617eb21f636bb1bd9c', } pip_command = get_pip_command() pip_options, _ = pip_command.parse_args( ['--index-url', PyPIRepository.DEFAULT_INDEX_URL]) session = pip_command._build_session(pip_options) repository = PyPIRepository(pip_options, session) ireq = from_line('cffi==1.9.1') with repository.allow_all_wheels(): assert repository.get_hashes(ireq) == expected
def test_get_hashes_local_repository_cache_miss(from_line): pip_command = get_pip_command() pip_options, _ = pip_command.parse_args( ['--index-url', PyPIRepository.DEFAULT_INDEX_URL]) session = pip_command._build_session(pip_options) repository = PyPIRepository(pip_options, session) existing_pins = {} local_repository = LocalRequirementsRepository(existing_pins, repository) with repository.allow_all_wheels(): hashes = local_repository.get_hashes(from_line('cffi==1.9.1')) assert hashes == EXPECTED
def test_generate_hashes_all_platforms(from_line): expected = { 'sha256:04b133ef629ae2bc05f83d0b079a964494a9cd17914943e690c57209b44aae20', 'sha256:0f1b3193c17b93c75e73eeac92f22eec4c98a021d9969b1c347d1944fae0d26b', 'sha256:1fb1cf40c315656f98f4d3acfb1bd031a14a9a69d155e9a180d5f9b52eaf745a', 'sha256:20af85d8e154b50f540bc8d517a0dbf6b1c20b5d06e572afda919d5dafd1d06b', 'sha256:2570f93b42c61013ab4b26e23aa25b640faf5b093ad7dd3504c3a8eadd69bc24', 'sha256:2f4e2872833ee3764dfc168dea566b7dd83b01ac61b377490beba53b5ece57f7', 'sha256:31776a37a67424e7821324b9e03a05aa6378bbc2bccc58fa56402547f82803c6', 'sha256:353421c76545f1d440cacc137abc865f07eab9df0dd3510c0851a2ca04199e90', 'sha256:36d06de7b09b1eba54b1f5f76e2221afef7489cc61294508c5a7308a925a50c6', 'sha256:3f1908d0bcd654f8b7b73204f24336af9f020b707fb8af937e3e2279817cbcd6', 'sha256:5268de3a18f031e9787c919c1b9137ff681ea696e76740b1c6c336a26baaa58a', 'sha256:563e0bd53fda03c151573217b3a49b3abad8813de9dd0632e10090f6190fdaf8', 'sha256:5e1368d13f1774852f9e435260be19ad726bbfb501b80472f61c2dc768a0692a', 'sha256:60881c79eb72cb75bd0a4be5e31c9e431739146c4184a2618cabea3938418984', 'sha256:6120b62a642a40e47eb6c9ff00c02be69158fc7f7c5ff78e42a2c739d1c57cd6', 'sha256:65c223e77f87cb463191ace3398e0a6d84ce4ac575d42eb412a220b099f593d6', 'sha256:6fbf8db55710959344502b58ab937424173ad8b5eb514610bcf56b119caa350a', 'sha256:74aadea668c94eef4ceb09be3d0eae6619e28b4f1ced4e29cd43a05bb2cfd7a4', 'sha256:7be1efa623e1ed91b15b1e62e04c536def1d75785eb930a0b8179ca6b65ed16d', 'sha256:83266cdede210393889471b0c2631e78da9d4692fcca875af7e958ad39b897ee', 'sha256:86c68a3f8246495962446c6f96f6a27f182b91208187b68f1e87ec3dfd29fa32', 'sha256:9163f7743cf9991edaddf9cf886708e288fab38e1b9fec9c41c15c85c8f7f147', 'sha256:97d9f338f91b7927893ea6500b953e4b4b7e47c6272222992bb76221e17056ff', 'sha256:a7930e73a4359b52323d09de6d6860840314aa09346cbcf4def8875e1b07ebc7', 'sha256:ada8a42c493e4934a1a8875c2bc9efcb1b88c09883f70375bfa053ab32d6a118', 'sha256:b0bc2d83cc0ba0e8f0d9eca2ffe07f72f33bec7d84547071e7e875d4cca8272d', 'sha256:b5412a65605c642adf3e1544b59b8537daf5696dedadd2b3cbebc42e24da45ed', 'sha256:ba6b5205fced1625b6d9d55f9ef422f9667c5d95f18f07c0611eb964a3355331', 'sha256:bcaf3d86385daaab0ae51c9c53ebe70a6c1c5dfcb9e311b13517e04773ddf6b6', 'sha256:cfa15570ecec1ea6bee089e86fd4deae6208c96a811344ce246de5e5c9ac824a', 'sha256:d3e3063af1fa6b59e255da9a812891cdaf24b90fbaf653c02797871069b7c4c9', 'sha256:d9cfe26ecea2fec320cd0cac400c9c2435328994d23596ee6df63945fe7292b0', 'sha256:e5ef800ef8ef9ee05ae9a5b7d7d9cf7d6c936b32e312e54823faca3034ee16ab', 'sha256:f1366150acf611d09d37ffefb3559ed3ffeb1713643d3cd10716d6c5da3f83fb', 'sha256:f4eb9747a37120b35f59c8e96265e87b0c432ff010d32fc0772992aa14659502', 'sha256:f8264463cc08cd696ad17e4bf3c80f3344628c04c11ffdc545ddf0798bc17316', 'sha256:f8ba54848dfe280b1be0d6e699544cee4ba10d566f92464538063d9e645aed3e', 'sha256:f93d1edcaea7b6a7a8fbf936f4492a9a0ee0b4cb281efebd5e1dd73e5e432c71', 'sha256:fc8865c7e0ac25ddd71036c2b9a799418b32d9acb40400d345b8791b6e1058cb', 'sha256:fce6b0cb9ade1546178c031393633b09c4793834176496c99a94de0bfa471b27', 'sha256:fde17c52d7ce7d55a9fb263b57ccb5da6439915b5c7105617eb21f636bb1bd9c', } pip_command = get_pip_command() pip_options, _ = pip_command.parse_args([]) session = pip_command._build_session(pip_options) repository = PyPIRepository(pip_options, session) ireq = from_line('cffi==1.9.1') with repository.allow_all_wheels(): assert repository.get_hashes(ireq) == expected
def resolve_deps(deps, sources=None, verbose=False, hashes=False): constraints = [] for dep in deps: if dep.startswith('-e '): constraint = pip.req.InstallRequirement.from_editable(dep[len('-e '):]) else: constraint = pip.req.InstallRequirement.from_line(dep) constraints.append(constraint) pip_command = get_pip_command() pip_args = [] if sources: pip_args.extend(['-i', sources[0]['url']]) pip_options, _ = pip_command.parse_args(pip_args) pypi = PyPIRepository(pip_options=pip_options, session=requests) if verbose: logging.log.verbose = True resolver = Resolver(constraints=constraints, repository=pypi) results = [] # pre-resolve instead of iterating to avoid asking pypi for hashes of editable packages resolved_tree = resolver.resolve() for result in resolved_tree: name = pep423_name(result.name) version = clean_pkg_version(result.specifier) if hashes: try: collected_hashes = [] r = requests.get('https://pypi.org/pypi/{0}/json'.format(name)) api_releases = r.json()['releases'] cleaned_releases = {} for api_version, api_info in api_releases.items(): cleaned_releases[clean_pkg_version(api_version)] = api_info for release in cleaned_releases[version]: collected_hashes.append(release['digests']['sha256']) collected_hashes = ['sha256:' + s for s in collected_hashes] # Collect un-collectable hashes. if not collected_hashes:
def on_end(self, event): # Our config object python_config = event.config["python"] # Pip / PyPI pip_command = get_pip_command() pip_options, _ = pip_command.parse_args([]) session = pip_command._build_session(pip_options) repository = PyPIRepository(pip_options, session) for extra in itertools.chain((None, ), python_config.get_extras()): requirements_file = "requirements{}.txt".format( "-" + extra if extra else "") if python_config.override_requirements or not os.path.exists( requirements_file): tmpfile = tempfile.NamedTemporaryFile(mode="wt", delete=False) if extra: tmpfile.write("\n".join( python_config.get_requirements(extra=extra))) else: tmpfile.write("\n".join(python_config.get_requirements())) tmpfile.flush() constraints = list( parse_requirements(tmpfile.name, finder=repository.finder, session=repository.session, options=pip_options)) resolver = Resolver(constraints, repository, prereleases=False, clear_caches=False, allow_unsafe=False) self.render_file_inline( requirements_file, "\n".join(( "-e .{}".format("[" + extra + "]" if extra else ""), *(("-r requirements.txt", ) if extra else ()), *python_config.get_vendors(extra=extra), *sorted( format_requirement(req) for req in resolver.resolve(max_rounds=10) if req.name != python_config.get("name")), )), override=python_config.override_requirements, )
def on_end(self, event): # Our config object python_config = event.config["python"] # Pip / PyPI pip_command = get_pip_command() pip_options, _ = pip_command.parse_args([]) session = pip_command._build_session(pip_options) repository = PyPIRepository(pip_options, session) for extra in itertools.chain((None,), python_config.get_extras()): requirements_file = "requirements{}.txt".format("-" + extra if extra else "") if python_config.override_requirements or not os.path.exists(requirements_file): tmpfile = tempfile.NamedTemporaryFile(mode="wt", delete=False) if extra: tmpfile.write("\n".join(python_config.get_requirements(extra=extra))) else: tmpfile.write("\n".join(python_config.get_requirements())) tmpfile.flush() constraints = list( parse_requirements( tmpfile.name, finder=repository.finder, session=repository.session, options=pip_options ) ) resolver = Resolver(constraints, repository, prereleases=False, clear_caches=False, allow_unsafe=False) self.render_file_inline( requirements_file, "\n".join( ( "-e .{}".format("[" + extra + "]" if extra else ""), *(("-r requirements.txt",) if extra else ()), *python_config.get_vendors(extra=extra), *sorted( format_requirement(req) for req in resolver.resolve(max_rounds=10) if req.name != python_config.get("name") ), ) ), override=python_config.override_requirements, )
def on_end(self, event): # Our config object python_config = event.config['python'] # Pip / PyPI pip_command = get_pip_command() pip_options, _ = pip_command.parse_args([]) session = pip_command._build_session(pip_options) repository = PyPIRepository(pip_options, session) for extra in itertools.chain((None, ), python_config.get_extras()): tmpfile = tempfile.NamedTemporaryFile(mode='wt', delete=False) if extra: tmpfile.write('\n'.join( python_config.get_requirements(extra=extra))) else: tmpfile.write('\n'.join(python_config.get_requirements())) tmpfile.flush() constraints = list( parse_requirements(tmpfile.name, finder=repository.finder, session=repository.session, options=pip_options)) resolver = Resolver(constraints, repository, prereleases=False, clear_caches=False, allow_unsafe=False) self.render_file_inline( 'requirements{}.txt'.format('-' + extra if extra else ''), '\n'.join(( '-e .{}'.format('[' + extra + ']' if extra else ''), *(('-r requirements.txt', ) if extra else ()), *sorted( format_requirement(req) for req in resolver.resolve(max_rounds=10) if req.name != python_config.get('name')), )))
def resolve_deps(deps, which, which_pip, project, sources=None, verbose=False, python=False, clear=False, pre=False): """Given a list of dependencies, return a resolved list of dependencies, using pip-tools -- and their hashes, using the warehouse API / pip. """ index_lookup = {} markers_lookup = {} python_path = which('python') with HackedPythonVersion(python_version=python, python_path=python_path): class PipCommand(pip.basecommand.Command): """Needed for pip-tools.""" name = 'PipCommand' constraints = [] for dep in deps: t = tempfile.mkstemp(prefix='pipenv-', suffix='-requirement.txt')[1] with open(t, 'w') as f: f.write(dep) if dep.startswith('-e '): constraint = pip.req.InstallRequirement.from_editable( dep[len('-e '):]) else: constraint = [ c for c in pip.req.parse_requirements( t, session=pip._vendor.requests) ][0] # extra_constraints = [] if ' -i ' in dep: index_lookup[constraint.name] = project.get_source( url=dep.split(' -i ')[1]).get('name') if constraint.markers: markers_lookup[constraint.name] = str( constraint.markers).replace('"', "'") constraints.append(constraint) pip_command = get_pip_command() pip_args = [] if sources: pip_args = prepare_pip_source_args(sources, pip_args) if verbose: print('Using pip: {0}'.format(' '.join(pip_args))) pip_options, _ = pip_command.parse_args(pip_args) session = pip_command._build_session(pip_options) pypi = PyPIRepository(pip_options=pip_options, session=session) if verbose: logging.log.verbose = True results = [] resolved_tree = set() resolver = Resolver(constraints=constraints, repository=pypi, clear_caches=clear, prereleases=pre) # pre-resolve instead of iterating to avoid asking pypi for hashes of editable packages try: resolved_tree.update( resolver.resolve(max_rounds=PIPENV_MAX_ROUNDS)) except (NoCandidateFound, DistributionNotFound, HTTPError) as e: click.echo( '{0}: Your dependencies could not be resolved. You likely have a mismatch in your sub-dependencies.\n ' 'You can use {1} to bypass this mechanism, then run {2} to inspect the situation.' ''.format(crayons.red('Warning', bold=True), crayons.red('$ pipenv install --skip-lock'), crayons.red('$ pipenv graph')), err=True) click.echo(crayons.blue(e)) sys.exit(1) for result in resolved_tree: if not result.editable: name = pep423_name(result.name) version = clean_pkg_version(result.specifier) index = index_lookup.get(result.name) if not markers_lookup.get(result.name): markers = str( result.markers) if result.markers and 'extra' not in str( result.markers) else None else: markers = markers_lookup.get(result.name) collected_hashes = [] if 'python.org' in '|'.join([source['url'] for source in sources]): try: # Grab the hashes from the new warehouse API. r = requests.get( 'https://pypi.org/pypi/{0}/json'.format(name), timeout=10) api_releases = r.json()['releases'] cleaned_releases = {} for api_version, api_info in api_releases.items(): cleaned_releases[clean_pkg_version( api_version)] = api_info for release in cleaned_releases[version]: collected_hashes.append(release['digests']['sha256']) collected_hashes = [ 'sha256:' + s for s in collected_hashes ] # Collect un-collectable hashes. if not collected_hashes: collected_hashes = list( list(resolver.resolve_hashes([result ]).items())[0][1]) except (ValueError, KeyError): if verbose: print('Error fetching {}'.format(name)) d = {'name': name, 'version': version, 'hashes': collected_hashes} if index: d.update({'index': index}) if markers: d.update({'markers': markers.replace('"', "'")}) results.append(d) return results
def actually_resolve_reps(deps, index_lookup, markers_lookup, project, sources, verbose, clear, pre): class PipCommand(pip.basecommand.Command): """Needed for pip-tools.""" name = 'PipCommand' constraints = [] for dep in deps: t = tempfile.mkstemp(prefix='pipenv-', suffix='-requirement.txt')[1] with open(t, 'w') as f: f.write(dep) if dep.startswith('-e '): constraint = pip.req.InstallRequirement.from_editable(dep[len('-e '):]) else: constraint = [c for c in pip.req.parse_requirements(t, session=pip._vendor.requests)][0] # extra_constraints = [] if ' -i ' in dep: index_lookup[constraint.name] = project.get_source(url=dep.split(' -i ')[1]).get('name') if constraint.markers: markers_lookup[constraint.name] = str(constraint.markers).replace('"', "'") constraints.append(constraint) pip_command = get_pip_command() pip_args = [] if sources: pip_args = prepare_pip_source_args(sources, pip_args) if verbose: print('Using pip: {0}'.format(' '.join(pip_args))) pip_options, _ = pip_command.parse_args(pip_args) session = pip_command._build_session(pip_options) pypi = PyPIRepository(pip_options=pip_options, session=session) if verbose: logging.log.verbose = True resolved_tree = set() resolver = Resolver(constraints=constraints, repository=pypi, clear_caches=clear, prereleases=pre) # pre-resolve instead of iterating to avoid asking pypi for hashes of editable packages try: resolved_tree.update(resolver.resolve(max_rounds=PIPENV_MAX_ROUNDS)) except (NoCandidateFound, DistributionNotFound, HTTPError) as e: click.echo( '{0}: Your dependencies could not be resolved. You likely have a mismatch in your sub-dependencies.\n ' 'You can use {1} to bypass this mechanism, then run {2} to inspect the situation.' ''.format( crayons.red('Warning', bold=True), crayons.red('$ pipenv install --skip-lock'), crayons.red('$ pipenv graph') ), err=True) click.echo(crayons.blue(e)) if 'no version found at all' in str(e): click.echo(crayons.blue('Please check your version specifier and version number. See PEP440 for more information.')) raise RuntimeError return resolved_tree
def _resolve(self, deps): # Checking if we should active prereleases prereleases = False for dep in deps: if dep.accepts_prereleases(): prereleases = True break constraints = [dep.as_requirement() for dep in deps] command = get_pip_command() opts, _ = command.parse_args([]) resolver = Resolver( constraints, PyPIRepository(opts, command._build_session(opts)), cache=DependencyCache(CACHE_DIR), prereleases=prereleases ) matches = resolver.resolve() pinned = [m for m in matches if not m.editable and is_pinned_requirement(m)] unpinned = [m for m in matches if m.editable or not is_pinned_requirement(m)] reversed_dependencies = resolver.reverse_dependencies(matches) # Complete reversed dependencies with cache cache = resolver.dependency_cache.cache for m in unpinned: name = key_from_req(m.req) if name not in cache: continue dependencies = cache[name][list(cache[name].keys())[0]] for dep in dependencies: dep = canonicalize_name(dep) if dep not in reversed_dependencies: reversed_dependencies[dep] = set() reversed_dependencies[dep].add(canonicalize_name(name)) hashes = resolver.resolve_hashes(pinned) packages = [] for m in matches: name = key_from_req(m.req) if name in self.UNSAFE: continue version = str(m.req.specifier) if m in unpinned: url, specifier = m.link.url.split('@') rev, _ = specifier.split('#') version = self._get_vcs_version(url, rev) checksum = 'sha1:{}'.format(version['rev']) else: version = version.replace('==', '') checksum = list(hashes[m]) # Figuring out category and optionality category = None optional = False # Checking if it's a main dependency for dep in deps: if dep.name == name: category = dep.category optional = dep.optional break if not category: def _category(child): opt = False cat = None parents = reversed_dependencies.get(child, set()) for parent in parents: for dep in deps: if dep.name != parent: continue opt = dep.optional if dep.category == 'main': # Dependency is given by at least one main package # We flag it as main return 'main', opt return 'dev', opt cat, op = _category(parent) if cat is not None: return cat, opt return cat, opt category, optional = _category(name) # If category is still None at this point # The dependency must have come from a VCS # dependency. To avoid missing packages # we assume "main" category and not optional if category is None: category = 'main' optional = False if not isinstance(checksum, list): checksum = [checksum] # Retrieving Python restriction if any python = self._get_pythons_for_package( name, reversed_dependencies, deps ) python = list(python) if '*' in python: # If at least one parent gave a wildcard # Then it should be installed for any Python version python = ['*'] package = { 'name': name, 'version': version, 'checksum': checksum, 'category': category, 'optional': optional, 'python': python } packages.append(package) return sorted(packages, key=lambda p: p['name'].lower())
def resolve_deps(deps, sources=None, verbose=False, hashes=False): constraints = [] for dep in deps: if dep.startswith('-e '): constraint = pip.req.InstallRequirement.from_editable(dep[len('-e '):]) else: constraint = pip.req.InstallRequirement.from_line(dep) constraints.append(constraint) pip_command = get_pip_command() pip_args = [] if sources: pip_args.extend(['-i', sources[0]['url']]) pip_options, _ = pip_command.parse_args(pip_args) pypi = PyPIRepository(pip_options=pip_options, session=requests) if verbose: logging.log.verbose = True resolver = Resolver(constraints=constraints, repository=pypi) results = [] # pre-resolve instead of iterating to avoid asking pypi for hashes of editable packages resolved_tree = resolver.resolve() for result in resolved_tree: return results def format_toml(data): """Pretty-formats a given toml string.""" data = data.split('\n') for i, line in enumerate(data): if i > 0: if line.startswith('['): data[i] = '\n{0}'.format(line) return '\n'.join(data) def multi_split(s, split): """Splits on multiple given separators.""" for r in split: s = s.replace(r, '|') return [i for i in s.split('|') if len(i) > 0] def convert_deps_from_pip(dep): """"Converts a pip-formatted dependency to a Pipfile-formatted one.""" dependency = {} import requirements req = [r for r in requirements.parse(dep)][0] # VCS Installs. if req.vcs: if req.name is None: raise ValueError('pipenv requires an #egg fragment for version controlled ' 'dependencies. Please install remote dependency ' 'in the form {0}#egg=<package-name>.'.format(req.uri)) # Crop off the git+, etc part. dependency[req.name] = {req.vcs: req.uri[len(req.vcs) + 1:]} # Add --editable, if it's there. if req.editable: dependency[req.name].update({'editable': True}) # Add the specifier, if it was provided. if req.revision: dependency[req.name].update({'ref': req.revision}) elif req.specs or req.extras: specs = None # Comparison operators: e.g. Django>1.10 if req.specs: r = multi_split(dep, '=<>') specs = dep[len(r[0]):] dependency[req.name] = specs # Extras: e.g. requests[socks] if req.extras: dependency[req.name] = {'extras': req.extras} if specs: dependency[req.name].update({'version': specs}) # Bare dependencies: e.g. requests else: dependency[dep] = '*' return dependency def convert_deps_to_pip(deps, r=True): """"Converts a Pipfile-formatteddependency to a pip-formatted one.""" dependencies = [] for dep in deps.keys(): # Default (e.g. '>1.10'). extra = deps[dep] if isinstance(deps[dep], six.string_types) else '' version = '' # Get rid of '*'. if deps[dep] == '*' or str(extra) == '{}': extra = '' hash = '' # Support for single hash (spec 1). if 'hash' in deps[dep]: hash = ' --hash={0}'.format(deps[dep]['hash']) # Support for multiple hashes (spec 2). if 'hashes' in deps[dep]: hash = '{0} '.format(''.join([' --hash={0} '.format(h) for h in deps[dep]['hashes']])) # Support for extras (e.g. requests[socks]) if 'extras' in deps[dep]: extra = '[{0}]'.format(deps[dep]['extras'][0]) if 'version' in deps[dep]: version = deps[dep]['version'] # Support for version control maybe_vcs = [vcs for vcs in VCS_LIST if vcs in deps[dep]] vcs = maybe_vcs[0] if maybe_vcs else None if vcs: extra = '{0}+{1}'.format(vcs, deps[dep][vcs]) # Support for @refs. if 'ref' in deps[dep]: extra += '@{0}'.format(deps[dep]['ref']) extra += '#egg={0}'.format(dep) # Support for editable. if 'editable' in deps[dep]: # Support for --egg. dep = '-e ' else: dep = '' dependencies.append('{0}{1}{2}{3}'.format(dep, extra, version, hash)) if not r: return dependencies # Write requirements.txt to tmp directory. f = tempfile.NamedTemporaryFile(suffix='-requirements.txt', delete=False) f.write('\n'.join(dependencies).encode('utf-8')) return f.name def mkdir_p(newdir): """works the way a good mkdir should :) - already exists, silently complete - regular file in the way, raise an exception - parent directory(ies) does not exist, make them as well From: http://code.activestate.com/recipes/82465-a-friendly-mkdir/ """ if os.path.isdir(newdir): pass elif os.path.isfile(newdir): raise OSError("a file with the same name as the desired dir, '{0}', already exists.".format(newdir)) else: head, tail = os.path.split(newdir) if head and not os.path.isdir(head): mkdir_p(head) if tail: os.mkdir(newdir) def is_required_version(version, specified_version): """Check to see if there's a hard requirement for version number provided in the Pipfile. """ # Certain packages may be defined with multiple values. if isinstance(specified_version, dict): specified_version = specified_version.get('version', '') if specified_version.startswith('=='): return version.strip() == specified_version.split('==')[1].strip() return True def is_vcs(pipfile_entry): """Determine if dictionary entry from Pipfile is for a vcs dependency.""" if isinstance(pipfile_entry, dict): return any(key for key in pipfile_entry.keys() if key in VCS_LIST) return False def pep440_version(version): # use pip built in version parser return str(pip.index.parse_version(version)) def pep423_name(name): """Normalize package name to PEP 423 style standard.""" return name.lower().replace('_', '-') def proper_case(package_name): """Properly case project name from pypi.org""" # Hit the simple API. r = requests.get('https://pypi.org/pypi/{0}/json'.format(package_name), timeout=0.3, stream=True) if not r.ok: raise IOError('Unable to find package {0} in PyPI repository.'.format(package_name)) r = parse.parse('https://pypi.org/pypi/{name}/json', r.url) good_name = r['name'] return good_name def split_vcs(split_file): """Split VCS dependencies out from file.""" if 'packages' in split_file or 'dev-packages' in split_file: sections = ('packages', 'dev-packages') elif 'default' in split_file or 'develop' in split_file: sections = ('default', 'develop') # For each vcs entry in a given section, move it to section-vcs. for section in sections: entries = split_file.get(section, {}) vcs_dict = dict((k, entries.pop(k)) for k in list(entries.keys()) if is_vcs(entries[k])) split_file[section+'-vcs'] = vcs_dict return split_file def recase_file(file_dict): """Recase file before writing to output.""" if 'packages' in file_dict or 'dev-packages' in file_dict: sections = ('packages', 'dev-packages') elif 'default' in file_dict or 'develop' in file_dict: sections = ('default', 'develop') for section in sections: file_section = file_dict.get(section, {}) # Try to properly case each key if we can. for key in list(file_section.keys()): try: cased_key = proper_case(key) except IOError: cased_key = key file_section[cased_key] = file_section.pop(key) return file_dict def walk_up(bottom): """mimic os.walk, but walk 'up' instead of down the directory tree. From: https://gist.github.com/zdavkeos/1098474 """ bottom = os.path.realpath(bottom) # get files in current dir try: names = os.listdir(bottom) except Exception: return dirs, nondirs = [], [] for name in names: if os.path.isdir(os.path.join(bottom, name)): dirs.append(name) else: nondirs.append(name) yield bottom, dirs, nondirs new_path = os.path.realpath(os.path.join(bottom, '..')) # see if we are at the top if new_path == bottom: return for x in walk_up(new_path): yield x def find_requirements(max_depth=3): """Returns the path of a Pipfile in parent directories.""" i = 0 for c, d, f in walk_up(os.getcwd()): i += 1 if i < max_depth: if 'requirements.txt': r = os.path.join(c, 'requirements.txt') if os.path.isfile(r): return r raise RuntimeError('No requirements.txt found!')
def resolve_deps(deps, sources=None, verbose=False): """Given a list of dependencies, return a resolved list of dependencies, using pip-tools -- and their hashes, using the warehouse API / pip. """ constraints = [] for dep in deps: if dep.startswith('-e '): constraint = pip.req.InstallRequirement.from_editable( dep[len('-e '):]) else: constraint = pip.req.InstallRequirement.from_line(dep) constraints.append(constraint) pip_command = get_pip_command() pip_args = [] if sources: pip_args.extend(['-i', sources[0]['url']]) pip_options, _ = pip_command.parse_args(pip_args) pypi = PyPIRepository(pip_options=pip_options, session=requests) if verbose: logging.log.verbose = True resolver = Resolver(constraints=constraints, repository=pypi) results = [] # pre-resolve instead of iterating to avoid asking pypi for hashes of editable packages resolved_tree = resolver.resolve() for result in resolved_tree: name = pep423_name(result.name) version = clean_pkg_version(result.specifier) collected_hashes = [] try: # Grab the hashes from the new warehouse API. r = requests.get('https://pypi.org/pypi/{0}/json'.format(name)) api_releases = r.json()['releases'] cleaned_releases = {} for api_version, api_info in api_releases.items(): cleaned_releases[clean_pkg_version(api_version)] = api_info for release in cleaned_releases[version]: collected_hashes.append(release['digests']['sha256']) collected_hashes = ['sha256:' + s for s in collected_hashes] # Collect un-collectable hashes. if not collected_hashes: collected_hashes = list( list(resolver.resolve_hashes([result]).items())[0][1]) except (ValueError, KeyError): pass results.append({ 'name': name, 'version': version, 'hashes': collected_hashes }) return results
def resolve_deps(deps, which, which_pip, project, sources=None, verbose=False, python=False, clear=False): """Given a list of dependencies, return a resolved list of dependencies, using pip-tools -- and their hashes, using the warehouse API / pip. """ with HackedPythonVersion(python): class PipCommand(pip.basecommand.Command): """Needed for pip-tools.""" name = 'PipCommand' constraints = [] extra_constraints = [] for dep in deps: t = tempfile.mkstemp(prefix='pipenv-', suffix='-requirement.txt')[1] with open(t, 'w') as f: f.write(dep) if dep.startswith('-e '): constraint = pip.req.InstallRequirement.from_editable(dep[len('-e '):]) # Resolve extra constraints from -e packages (that rely on setuptools.) extra_constraints = best_matches_from(dep[len('-e '):], which=which, which_pip=which_pip, project=project) extra_constraints = [pip.req.InstallRequirement.from_line(c) for c in extra_constraints] else: constraint = [c for c in pip.req.parse_requirements(t, session=pip._vendor.requests)][0] extra_constraints = [] constraints.append(constraint) constraints.extend(extra_constraints) pip_command = get_pip_command() pip_args = [] if sources: pip_args.extend(['-i', sources[0]['url']]) pip_options, _ = pip_command.parse_args(pip_args) pypi = PyPIRepository(pip_options=pip_options, session=requests) if verbose: logging.log.verbose = True resolver = Resolver(constraints=constraints, repository=pypi, clear_caches=clear) results = [] # pre-resolve instead of iterating to avoid asking pypi for hashes of editable packages resolved_tree = resolver.resolve() for result in resolved_tree: name = pep423_name(result.name) version = clean_pkg_version(result.specifier) collected_hashes = [] try: # Grab the hashes from the new warehouse API. r = requests.get('https://pypi.org/pypi/{0}/json'.format(name)) api_releases = r.json()['releases'] cleaned_releases = {} for api_version, api_info in api_releases.items(): cleaned_releases[clean_pkg_version(api_version)] = api_info for release in cleaned_releases[version]: collected_hashes.append(release['digests']['sha256']) collected_hashes = ['sha256:' + s for s in collected_hashes] # Collect un-collectable hashes. if not collected_hashes: collected_hashes = list(list(resolver.resolve_hashes([result]).items())[0][1]) except (ValueError, KeyError): pass results.append({'name': name, 'version': version, 'hashes': collected_hashes}) return results