Ejemplo n.º 1
0
def run_command(cmd, *args, is_verbose=False, **kwargs):
    """
    Take an input command and run it, handling exceptions and error codes and returning
    its stdout and stderr.

    :param cmd: The list of command and arguments.
    :type cmd: list
    :returns: A 2-tuple of the output and error from the command
    :rtype: Tuple[str, str]
    :raises: exceptions.PipenvCmdError
    """

    from pipenv._compat import decode_for_output
    from pipenv.cmdparse import Script

    catch_exceptions = kwargs.pop("catch_exceptions", True)
    if isinstance(cmd, ((str,), list, tuple)):
        cmd = Script.parse(cmd)
    if not isinstance(cmd, Script):
        raise TypeError("Command input must be a string, list or tuple")
    if "env" not in kwargs:
        kwargs["env"] = os.environ.copy()
    kwargs["env"]["PYTHONIOENCODING"] = "UTF-8"
    command = [cmd.command, *cmd.args]
    if is_verbose:
        click_echo(f"Running command: $ {cmd.cmdify()}")
    c = subprocess_run(command, *args, **kwargs)
    if is_verbose:
        click_echo(
            "Command output: {}".format(crayons.cyan(decode_for_output(c.stdout))),
            err=True,
        )
    if c.returncode and catch_exceptions:
        raise PipenvCmdError(cmd.cmdify(), c.stdout, c.stderr, c.returncode)
    return c
Ejemplo n.º 2
0
    def _get_hashes_from_pypi(self, ireq):
        from pipenv.vendor.pip_shims import shims

        pkg_url = f"https://pypi.org/pypi/{ireq.name}/json"
        session = _get_requests_session(self.project.s.PIPENV_MAX_RETRIES)
        try:
            collected_hashes = set()
            # Grab the hashes from the new warehouse API.
            r = session.get(pkg_url, timeout=10)
            api_releases = r.json()["releases"]
            cleaned_releases = {}
            for api_version, api_info in api_releases.items():
                api_version = clean_pkg_version(api_version)
                cleaned_releases[api_version] = api_info
            version = ""
            if ireq.specifier:
                spec = next(iter(s for s in ireq.specifier), None)
                if spec:
                    version = spec.version
            for release in cleaned_releases[version]:
                collected_hashes.add(release["digests"][shims.FAVORITE_HASH])
            return self.prepend_hash_types(collected_hashes,
                                           shims.FAVORITE_HASH)
        except (ValueError, KeyError, ConnectionError):
            if self.project.s.is_verbose():
                click_echo(
                    "{}: Error generating hash for {}".format(
                        crayons.red("Warning", bold=True), ireq.name),
                    err=True,
                )
            return None
Ejemplo n.º 3
0
 def pip_options(self):
     if self._pip_options is None:
         pip_options, _ = self.pip_command.parser.parse_args(self.pip_args)
         pip_options.cache_dir = environments.PIPENV_CACHE_DIR
         self._pip_options = pip_options
     if environments.is_verbose():
         click_echo(crayons.blue("Using pip: {0}".format(" ".join(
             self.pip_args))),
                    err=True)
     return self._pip_options
Ejemplo n.º 4
0
def venv_resolve_deps(
    deps, which, project, pre=False, verbose=False, clear=False, allow_global=False
):
    import delegator
    from . import resolver
    import json

    resolver = escape_grouped_arguments(resolver.__file__.rstrip('co'))
    cmd = '{0} {1} {2} {3} {4} {5}'.format(
        escape_grouped_arguments(which('python')),
        resolver,
        '--pre' if pre else '',
        '--verbose' if verbose else '',
        '--clear' if clear else '',
        '--system' if allow_global else '',
    )
    os.environ['PIPENV_PACKAGES'] = '\n'.join(deps)
    c = delegator.run(cmd, block=True)
    del os.environ['PIPENV_PACKAGES']
    try:
        assert c.return_code == 0
    except AssertionError:
        if verbose:
            click_echo(c.out, err=True)
            click_echo(c.err, err=True)
        else:
            click_echo(c.err[int(len(c.err) / 2) - 1:], err=True)
        sys.exit(c.return_code)
    if verbose:
        click_echo(c.out.split('RESULTS:')[0], err=True)
    try:
        return json.loads(c.out.split('RESULTS:')[1].strip())

    except IndexError:
        raise RuntimeError('There was a problem with locking.')
Ejemplo n.º 5
0
def resolve(cmd, sp):
    from .vendor import delegator
    from .cmdparse import Script
    from .vendor.pexpect.exceptions import EOF, TIMEOUT
    from .vendor.vistir.compat import to_native_string
    EOF.__module__ = "pexpect.exceptions"
    from ._compat import decode_output
    c = delegator.run(Script.parse(cmd).cmdify(),
                      block=False,
                      env=os.environ.copy())
    _out = decode_output("")
    result = None
    out = to_native_string("")
    while True:
        try:
            result = c.expect(u"\n", timeout=environments.PIPENV_TIMEOUT)
        except (EOF, TIMEOUT):
            pass
        if result is None:
            break
        _out = c.subprocess.before
        if _out is not None:
            _out = decode_output("{0}".format(_out))
            out += _out
            sp.text = to_native_string("{0}".format(_out[:100]))
        if environments.is_verbose():
            if _out is not None:
                sp._hide_cursor()
                sp.write(_out.rstrip())
                sp._show_cursor()
    c.block()
    if c.return_code != 0:
        sp.red.fail(
            environments.PIPENV_SPINNER_FAIL_TEXT.format("Locking Failed!"))
        click_echo(c.out.strip(), err=True)
        click_echo(c.err.strip(), err=True)
        sys.exit(c.return_code)
    return c
Ejemplo n.º 6
0
def venv_resolve_deps(
    deps,
    which,
    project,
    pre=False,
    clear=False,
    allow_global=False,
    pypi_mirror=None,
):
    from .vendor.vistir.misc import fs_str
    from .vendor import delegator
    from . import resolver
    import json

    if not deps:
        return []
    resolver = escape_grouped_arguments(resolver.__file__.rstrip("co"))
    cmd = "{0} {1} {2} {3} {4}".format(
        escape_grouped_arguments(which("python", allow_global=allow_global)),
        resolver,
        "--pre" if pre else "",
        "--clear" if clear else "",
        "--system" if allow_global else "",
    )
    with temp_environ():
        os.environ = {fs_str(k): fs_str(val) for k, val in os.environ.items()}
        os.environ["PIPENV_PACKAGES"] = str("\n".join(deps))
        if pypi_mirror:
            os.environ["PIPENV_PYPI_MIRROR"] = str(pypi_mirror)
        os.environ["PIPENV_VERBOSITY"] = str(environments.PIPENV_VERBOSITY)
        c = delegator.run(cmd, block=True)
    try:
        assert c.return_code == 0
    except AssertionError:
        if environments.is_verbose():
            click_echo(c.out, err=True)
            click_echo(c.err, err=True)
        else:
            click_echo(c.err[(int(len(c.err) / 2) - 1):], err=True)
        sys.exit(c.return_code)
    if environments.is_verbose():
        click_echo(c.out.split("RESULTS:")[0], err=True)
    try:
        return json.loads(c.out.split("RESULTS:")[1].strip())

    except IndexError:
        raise RuntimeError("There was a problem with locking.")
Ejemplo n.º 7
0
def venv_resolve_deps(
    deps,
    which,
    project,
    pre=False,
    verbose=False,
    clear=False,
    allow_global=False,
    pypi_mirror=None,
):
    from .vendor import delegator
    from . import resolver
    import json

    if not deps:
        return []
    resolver = escape_grouped_arguments(resolver.__file__.rstrip("co"))
    cmd = "{0} {1} {2} {3} {4} {5}".format(
        escape_grouped_arguments(which("python", allow_global=allow_global)),
        resolver,
        "--pre" if pre else "",
        "--verbose" if verbose else "",
        "--clear" if clear else "",
        "--system" if allow_global else "",
    )
    with temp_environ():
        os.environ["PIPENV_PACKAGES"] = "\n".join(deps)
        if pypi_mirror:
            os.environ["PIPENV_PYPI_MIRROR"] = str(pypi_mirror)
        c = delegator.run(cmd, block=True)
    try:
        assert c.return_code == 0
    except AssertionError:
        if verbose:
            click_echo(c.out, err=True)
            click_echo(c.err, err=True)
        else:
            click_echo(c.err[int(len(c.err) / 2) - 1 :], err=True)
        sys.exit(c.return_code)
    if verbose:
        click_echo(c.out.split("RESULTS:")[0], err=True)
    try:
        return json.loads(c.out.split("RESULTS:")[1].strip())

    except IndexError:
        raise RuntimeError("There was a problem with locking.")
Ejemplo n.º 8
0
def echo(message=None, file=None, nl=True, err=False, color=None, carriage_return=False):
    """
    Patched click echo function.
    """
    message = message or ''
    if carriage_return and nl:
        click_echo(message + '\r\n', file, False, err, color)
    elif carriage_return and not nl:
        click_echo(message + '\r', file, False, err, color)
    else:
        click_echo(message, file, nl, err, color)
Ejemplo n.º 9
0
def actually_resolve_deps(deps,
                          index_lookup,
                          markers_lookup,
                          project,
                          sources,
                          verbose,
                          clear,
                          pre,
                          req_dir=None):
    from .vendor.packaging.markers import default_environment
    from .patched.notpip._internal import basecommand
    from .patched.notpip._internal.cmdoptions import no_binary, only_binary
    from .patched.notpip._internal.req import parse_requirements
    from .patched.notpip._internal.exceptions import DistributionNotFound
    from .patched.notpip._vendor.requests.exceptions import HTTPError
    from pipenv.patched.piptools.resolver import Resolver
    from pipenv.patched.piptools.repositories.pypi import PyPIRepository
    from pipenv.patched.piptools.scripts.compile import get_pip_command
    from pipenv.patched.piptools import logging as piptools_logging
    from pipenv.patched.piptools.exceptions import NoCandidateFound
    from .vendor.requirementslib import Requirement
    from ._compat import TemporaryDirectory, NamedTemporaryFile

    class PipCommand(basecommand.Command):
        """Needed for pip-tools."""
        name = 'PipCommand'

    constraints = []
    cleanup_req_dir = False
    if not req_dir:
        req_dir = TemporaryDirectory(suffix='-requirements', prefix='pipenv-')
        cleanup_req_dir = True
    for dep in deps:
        if not dep:
            continue
        url = None
        if ' -i ' in dep:
            dep, url = dep.split(' -i ')
        req = Requirement.from_line(dep)

        # extra_constraints = []

        if url:
            index_lookup[req.name] = project.get_source(url=url).get('name')
        # strip the marker and re-add it later after resolution
        # but we will need a fallback in case resolution fails
        # eg pypiwin32
        if req.markers:
            markers_lookup[req.name] = req.markers.replace('"', "'")
        constraints.append(req.constraint_line)

    pip_command = get_pip_command()
    constraints_file = None
    pip_args = []
    if sources:
        pip_args = prepare_pip_source_args(sources, pip_args)
    if verbose:
        print('Using pip: {0}'.format(' '.join(pip_args)))
    with NamedTemporaryFile(mode='w',
                            prefix='pipenv-',
                            suffix='-constraints.txt',
                            dir=req_dir.name,
                            delete=False) as f:
        if sources:
            requirementstxt_sources = ' '.join(pip_args) if pip_args else ''
            requirementstxt_sources = requirementstxt_sources.replace(
                ' --', '\n--')
            f.write(u'{0}\n'.format(requirementstxt_sources))
        f.write(u'\n'.join([_constraint for _constraint in constraints]))
        constraints_file = f.name
    pip_options, _ = pip_command.parser.parse_args(pip_args)
    pip_options.cache_dir = PIPENV_CACHE_DIR
    session = pip_command._build_session(pip_options)
    pypi = PyPIRepository(pip_options=pip_options,
                          use_json=False,
                          session=session)
    constraints = parse_requirements(constraints_file,
                                     finder=pypi.finder,
                                     session=pypi.session,
                                     options=pip_options)
    constraints = [c for c in constraints]
    if verbose:
        logging.log.verbose = True
        piptools_logging.log.verbose = True
    resolved_tree = set()
    resolver = Resolver(constraints=constraints,
                        repository=pypi,
                        clear_caches=clear,
                        prereleases=pre)
    # pre-resolve instead of iterating to avoid asking pypi for hashes of editable packages
    hashes = None
    try:
        results = resolver.resolve(max_rounds=PIPENV_MAX_ROUNDS)
        hashes = resolver.resolve_hashes(results)
        resolved_tree.update(results)
    except (NoCandidateFound, DistributionNotFound, HTTPError) as e:
        click_echo(
            '{0}: Your dependencies could not be resolved. You likely have a '
            'mismatch in your sub-dependencies.\n  '
            'You can use {1} to bypass this mechanism, then run {2} to inspect '
            'the situation.\n  '
            'Hint: try {3} if it is a pre-release dependency.'
            ''.format(
                crayons.red('Warning', bold=True),
                crayons.red('$ pipenv install --skip-lock'),
                crayons.red('$ pipenv graph'),
                crayons.red('$ pipenv lock --pre'),
            ),
            err=True,
        )
        click_echo(crayons.blue(str(e)), err=True)
        if 'no version found at all' in str(e):
            click_echo(
                crayons.blue(
                    'Please check your version specifier and version number. See PEP440 for more information.'
                ))
        if cleanup_req_dir:
            req_dir.cleanup()
        raise RuntimeError
    if cleanup_req_dir:
        req_dir.cleanup()
    return (resolved_tree, hashes, markers_lookup, resolver)
Ejemplo n.º 10
0
def venv_resolve_deps(
    deps,
    which,
    project,
    pre=False,
    clear=False,
    allow_global=False,
    pypi_mirror=None,
    dev=False,
    pipfile=None,
    lockfile=None,
    keep_outdated=False,
):
    """
    Resolve dependencies for a pipenv project, acts as a portal to the target environment.

    Regardless of whether a virtual environment is present or not, this will spawn
    a subprocess which is isolated to the target environment and which will perform
    dependency resolution.  This function reads the output of that call and mutates
    the provided lockfile accordingly, returning nothing.

    :param List[:class:`~requirementslib.Requirement`] deps: A list of dependencies to resolve.
    :param Callable which: [description]
    :param project: The pipenv Project instance to use during resolution
    :param Optional[bool] pre: Whether to resolve pre-release candidates, defaults to False
    :param Optional[bool] clear: Whether to clear the cache during resolution, defaults to False
    :param Optional[bool] allow_global: Whether to use *sys.executable* as the python binary, defaults to False
    :param Optional[str] pypi_mirror: A URL to substitute any time *pypi.org* is encountered, defaults to None
    :param Optional[bool] dev: Whether to target *dev-packages* or not, defaults to False
    :param pipfile: A Pipfile section to operate on, defaults to None
    :type pipfile: Optional[Dict[str, Union[str, Dict[str, bool, List[str]]]]]
    :param Dict[str, Any] lockfile: A project lockfile to mutate, defaults to None
    :param bool keep_outdated: Whether to retain outdated dependencies and resolve with them in mind, defaults to False
    :raises RuntimeError: Raised on resolution failure
    :return: Nothing
    :rtype: None
    """

    import json
    import tempfile

    from pipenv import resolver
    from pipenv._compat import decode_for_output
    from pipenv.vendor.vistir.compat import Path

    results = []
    pipfile_section = "dev-packages" if dev else "packages"
    lockfile_section = "develop" if dev else "default"
    if not deps:
        if not project.pipfile_exists:
            return None
        deps = project.parsed_pipfile.get(pipfile_section, {})
    if not deps:
        return None

    if not pipfile:
        pipfile = getattr(project, pipfile_section, {})
    if not lockfile:
        lockfile = project._lockfile
    req_dir = create_tracked_tempdir(prefix="pipenv", suffix="requirements")
    cmd = [
        which("python", allow_global=allow_global),
        Path(resolver.__file__.rstrip("co")).as_posix(),
    ]
    if pre:
        cmd.append("--pre")
    if clear:
        cmd.append("--clear")
    if allow_global:
        cmd.append("--system")
    if dev:
        cmd.append("--dev")
    target_file = tempfile.NamedTemporaryFile(prefix="resolver",
                                              suffix=".json",
                                              delete=False)
    target_file.close()
    cmd.extend(["--write", make_posix(target_file.name)])
    with temp_environ():
        os.environ.update({k: str(val) for k, val in os.environ.items()})
        if pypi_mirror:
            os.environ["PIPENV_PYPI_MIRROR"] = str(pypi_mirror)
        os.environ["PIPENV_VERBOSITY"] = str(project.s.PIPENV_VERBOSITY)
        os.environ["PIPENV_REQ_DIR"] = req_dir
        os.environ["PIP_NO_INPUT"] = "1"
        pipenv_site_dir = get_pipenv_sitedir()
        if pipenv_site_dir is not None:
            os.environ["PIPENV_SITE_DIR"] = pipenv_site_dir
        else:
            os.environ.pop("PIPENV_SITE_DIR", None)
        if keep_outdated:
            os.environ["PIPENV_KEEP_OUTDATED"] = "1"
        with create_spinner(text=decode_for_output("Locking..."),
                            setting=project.s) as sp:
            # This conversion is somewhat slow on local and file-type requirements since
            # we now download those requirements / make temporary folders to perform
            # dependency resolution on them, so we are including this step inside the
            # spinner context manager for the UX improvement
            sp.write(decode_for_output("Building requirements..."))
            deps = convert_deps_to_pip(deps,
                                       project,
                                       r=False,
                                       include_index=True)
            constraints = set(deps)
            os.environ["PIPENV_PACKAGES"] = str("\n".join(constraints))
            sp.write(decode_for_output("Resolving dependencies..."))
            c = resolve(cmd, sp, project=project)
            results = c.stdout.strip()
            if c.returncode == 0:
                sp.green.ok(
                    environments.PIPENV_SPINNER_OK_TEXT.format("Success!"))
                if not project.s.is_verbose() and c.stderr.strip():
                    click_echo(crayons.yellow(f"Warning: {c.stderr.strip()}"),
                               err=True)
            else:
                sp.red.fail(
                    environments.PIPENV_SPINNER_FAIL_TEXT.format(
                        "Locking Failed!"))
                click_echo(f"Output: {c.stdout.strip()}", err=True)
                click_echo(f"Error: {c.stderr.strip()}", err=True)
    try:
        with open(target_file.name) as fh:
            results = json.load(fh)
    except (IndexError, json.JSONDecodeError):
        click_echo(c.stdout.strip(), err=True)
        click_echo(c.stderr.strip(), err=True)
        if os.path.exists(target_file.name):
            os.unlink(target_file.name)
        raise RuntimeError("There was a problem with locking.")
    if os.path.exists(target_file.name):
        os.unlink(target_file.name)
    if lockfile_section not in lockfile:
        lockfile[lockfile_section] = {}
    prepare_lockfile(results, pipfile, lockfile[lockfile_section])
Ejemplo n.º 11
0
def resolve_deps(
    deps,
    which,
    project,
    sources=None,
    verbose=False,
    python=False,
    clear=False,
    pre=False,
    allow_global=False,
):
    """Given a list of dependencies, return a resolved list of dependencies,
    using pip-tools -- and their hashes, using the warehouse API / pip9.
    """
    from pip9._vendor.requests.exceptions import ConnectionError

    index_lookup = {}
    markers_lookup = {}
    python_path = which('python', allow_global=allow_global)
    backup_python_path = sys.executable
    results = []
    # First (proper) attempt:
    with HackedPythonVersion(python_version=python, python_path=python_path):
        try:
            resolved_tree, resolver = actually_resolve_reps(
                deps,
                index_lookup,
                markers_lookup,
                project,
                sources,
                verbose,
                clear,
                pre,
            )
        except RuntimeError:
            # Don't exit here, like usual.
            resolved_tree = None
    # Second (last-resort) attempt:
    if resolved_tree is None:
        with HackedPythonVersion(
                python_version='.'.join([str(s)
                                         for s in sys.version_info[:3]]),
                python_path=backup_python_path,
        ):
            try:
                # Attempt to resolve again, with different Python version information,
                # particularly for particularly particular packages.
                resolved_tree, resolver = actually_resolve_reps(
                    deps,
                    index_lookup,
                    markers_lookup,
                    project,
                    sources,
                    verbose,
                    clear,
                    pre,
                )
            except RuntimeError:
                sys.exit(1)
    for result in resolved_tree:
        if not result.editable:
            name = pep423_name(result.name)
            version = clean_pkg_version(result.specifier)
            index = index_lookup.get(result.name)
            if not markers_lookup.get(result.name):
                markers = str(
                    result.markers) if result.markers and 'extra' not in str(
                        result.markers) else None
            else:
                markers = markers_lookup.get(result.name)
            collected_hashes = []
            if any('python.org' in source['url'] or 'pypi.org' in source['url']
                   for source in sources):
                try:
                    # Grab the hashes from the new warehouse API.
                    r = _get_requests_session().get(
                        'https://pypi.org/pypi/{0}/json'.format(name),
                        timeout=10,
                    )
                    api_releases = r.json()['releases']
                    cleaned_releases = {}
                    for api_version, api_info in api_releases.items():
                        cleaned_releases[clean_pkg_version(
                            api_version)] = api_info
                    for release in cleaned_releases[version]:
                        collected_hashes.append(release['digests']['sha256'])
                    collected_hashes = [
                        'sha256:' + s for s in collected_hashes
                    ]
                except (ValueError, KeyError, ConnectionError):
                    if verbose:
                        click_echo('{0}: Error generating hash for {1}'.format(
                            crayons.red('Warning', bold=True), name))
            # Collect un-collectable hashes (should work with devpi).
            try:
                collected_hashes = collected_hashes + list(
                    list(resolver.resolve_hashes([result]).items())[0][1])
            except (ValueError, KeyError, ConnectionError, IndexError):
                if verbose:
                    print('Error generating hash for {}'.format(name))
            collected_hashes = sorted(set(collected_hashes))
            d = {'name': name, 'version': version, 'hashes': collected_hashes}
            if index:
                d.update({'index': index})
            if markers:
                d.update({'markers': markers.replace('"', "'")})
            results.append(d)
    return results
Ejemplo n.º 12
0
def actually_resolve_reps(deps, index_lookup, markers_lookup, project, sources,
                          verbose, clear, pre):
    from pip9 import basecommand, req
    from pip9._vendor import requests as pip_requests
    from pip9.exceptions import DistributionNotFound
    from pip9._vendor.requests.exceptions import HTTPError
    from pipenv.patched.piptools.resolver import Resolver
    from pipenv.patched.piptools.repositories.pypi import PyPIRepository
    from pipenv.patched.piptools.scripts.compile import get_pip_command
    from pipenv.patched.piptools import logging as piptools_logging
    from pipenv.patched.piptools.exceptions import NoCandidateFound

    class PipCommand(basecommand.Command):
        """Needed for pip-tools."""
        name = 'PipCommand'

    constraints = []
    req_dir = tempfile.mkdtemp(prefix='pipenv-', suffix='-requirements')
    for dep in deps:
        if dep:
            if dep.startswith('-e '):
                constraint = req.InstallRequirement.from_editable(
                    dep[len('-e '):])
            else:
                fd, t = tempfile.mkstemp(prefix='pipenv-',
                                         suffix='-requirement.txt',
                                         dir=req_dir)
                with os.fdopen(fd, 'w') as f:
                    f.write(dep)
                constraint = [
                    c for c in req.parse_requirements(t, session=pip_requests)
                ][0]
            # extra_constraints = []
            if ' -i ' in dep:
                index_lookup[constraint.name] = project.get_source(
                    url=dep.split(' -i ')[1]).get('name')
            if constraint.markers:
                markers_lookup[constraint.name] = str(
                    constraint.markers).replace('"', "'")
            constraints.append(constraint)
    rmtree(req_dir)
    pip_command = get_pip_command()
    pip_args = []
    if sources:
        pip_args = prepare_pip_source_args(sources, pip_args)
    if verbose:
        print('Using pip: {0}'.format(' '.join(pip_args)))
    pip_options, _ = pip_command.parse_args(pip_args)
    session = pip_command._build_session(pip_options)
    pypi = PyPIRepository(pip_options=pip_options,
                          use_json=False,
                          session=session)
    if verbose:
        logging.log.verbose = True
        piptools_logging.log.verbose = True
    resolved_tree = set()
    resolver = Resolver(
        constraints=constraints,
        repository=pypi,
        clear_caches=clear,
        prereleases=pre,
    )
    # pre-resolve instead of iterating to avoid asking pypi for hashes of editable packages
    try:
        resolved_tree.update(resolver.resolve(max_rounds=PIPENV_MAX_ROUNDS))
    except (NoCandidateFound, DistributionNotFound, HTTPError) as e:
        click_echo(
            '{0}: Your dependencies could not be resolved. You likely have a mismatch in your sub-dependencies.\n  '
            'You can use {1} to bypass this mechanism, then run {2} to inspect the situation.'
            ''.format(
                crayons.red('Warning', bold=True),
                crayons.red('$ pipenv install --skip-lock'),
                crayons.red('$ pipenv graph'),
            ),
            err=True,
        )
        click_echo(crayons.blue(str(e)), err=True)
        if 'no version found at all' in str(e):
            click_echo(
                crayons.blue(
                    'Please check your version specifier and version number. See PEP440 for more information.'
                ))
        raise RuntimeError

    return resolved_tree, resolver
Ejemplo n.º 13
0
def actually_resolve_deps(
    deps,
    index_lookup,
    markers_lookup,
    project,
    sources,
    clear,
    pre,
    req_dir=None,
):
    from .patched.notpip._internal import basecommand
    from .patched.notpip._internal.req import parse_requirements
    from .patched.notpip._internal.exceptions import DistributionNotFound
    from .patched.notpip._vendor.requests.exceptions import HTTPError
    from pipenv.patched.piptools.resolver import Resolver
    from pipenv.patched.piptools.repositories.pypi import PyPIRepository
    from pipenv.patched.piptools.scripts.compile import get_pip_command
    from pipenv.patched.piptools import logging as piptools_logging
    from pipenv.patched.piptools.exceptions import NoCandidateFound
    from .vendor.requirementslib.models.requirements import Requirement
    from ._compat import TemporaryDirectory, NamedTemporaryFile

    class PipCommand(basecommand.Command):
        """Needed for pip-tools."""

        name = "PipCommand"

    constraints = []
    cleanup_req_dir = False
    if not req_dir:
        req_dir = TemporaryDirectory(suffix="-requirements", prefix="pipenv-")
        cleanup_req_dir = True
    for dep in deps:
        if not dep:
            continue
        url = None
        indexes, trusted_hosts, remainder = parse_indexes(dep)
        if indexes:
            url = indexes[0]
        dep = " ".join(remainder)
        req = Requirement.from_line(dep)

        # extra_constraints = []

        if url:
            index_lookup[req.name] = project.get_source(url=url).get("name")
        # strip the marker and re-add it later after resolution
        # but we will need a fallback in case resolution fails
        # eg pypiwin32
        if req.markers:
            markers_lookup[req.name] = req.markers.replace('"', "'")
        constraints.append(req.constraint_line)

    pip_command = get_pip_command()
    constraints_file = None
    pip_args = []
    if sources:
        pip_args = prepare_pip_source_args(sources, pip_args)
    if environments.is_verbose():
        print("Using pip: {0}".format(" ".join(pip_args)))
    with NamedTemporaryFile(
            mode="w",
            prefix="pipenv-",
            suffix="-constraints.txt",
            dir=req_dir.name,
            delete=False,
    ) as f:
        if sources:
            requirementstxt_sources = " ".join(pip_args) if pip_args else ""
            requirementstxt_sources = requirementstxt_sources.replace(
                " --", "\n--")
            f.write(u"{0}\n".format(requirementstxt_sources))
        f.write(u"\n".join([_constraint for _constraint in constraints]))
        constraints_file = f.name
    pip_options, _ = pip_command.parser.parse_args(pip_args)
    pip_options.cache_dir = environments.PIPENV_CACHE_DIR
    session = pip_command._build_session(pip_options)
    pypi = PyPIRepository(pip_options=pip_options,
                          use_json=False,
                          session=session)
    constraints = parse_requirements(constraints_file,
                                     finder=pypi.finder,
                                     session=pypi.session,
                                     options=pip_options)
    constraints = [c for c in constraints]
    if environments.is_verbose():
        logging.log.verbose = True
        piptools_logging.log.verbose = True
    resolved_tree = set()
    resolver = Resolver(constraints=constraints,
                        repository=pypi,
                        clear_caches=clear,
                        prereleases=pre)
    # pre-resolve instead of iterating to avoid asking pypi for hashes of editable packages
    hashes = None
    try:
        results = resolver.resolve(max_rounds=environments.PIPENV_MAX_ROUNDS)
        hashes = resolver.resolve_hashes(results)
        resolved_tree.update(results)
    except (NoCandidateFound, DistributionNotFound, HTTPError) as e:
        click_echo(
            "{0}: Your dependencies could not be resolved. You likely have a "
            "mismatch in your sub-dependencies.\n  "
            "First try clearing your dependency cache with {1}, then try the original command again.\n "
            "Alternatively, you can use {2} to bypass this mechanism, then run "
            "{3} to inspect the situation.\n  "
            "Hint: try {4} if it is a pre-release dependency."
            "".format(
                crayons.red("Warning", bold=True),
                crayons.red("$ pipenv lock --clear"),
                crayons.red("$ pipenv install --skip-lock"),
                crayons.red("$ pipenv graph"),
                crayons.red("$ pipenv lock --pre"),
            ),
            err=True,
        )
        click_echo(crayons.blue(str(e)), err=True)
        if "no version found at all" in str(e):
            click_echo(
                crayons.blue(
                    "Please check your version specifier and version number. See PEP440 for more information."
                ))
        if cleanup_req_dir:
            req_dir.cleanup()
        raise RuntimeError
    if cleanup_req_dir:
        req_dir.cleanup()
    return (resolved_tree, hashes, markers_lookup, resolver)
Ejemplo n.º 14
0
def hello(count, name):
    for _ in range(count):
        click_echo("Hello, %s!" % name)
Ejemplo n.º 15
0
    def get_deps_from_req(cls, req, resolver=None, resolve_vcs=True):
        # type: (Requirement, Optional["Resolver"], bool) -> Tuple[Set[str], Dict[str, Dict[str, Union[str, bool, List[str]]]]]
        from pipenv.vendor.requirementslib.models.requirements import Requirement
        from pipenv.vendor.requirementslib.models.utils import (
            _requirement_to_str_lowercase_name, )
        from pipenv.vendor.requirementslib.utils import is_installable_dir

        # TODO: this is way too complex, refactor this
        constraints = set()  # type: Set[str]
        locked_deps = {
        }  # type: Dict[str, Dict[str, Union[str, bool, List[str]]]]
        if (req.is_file_or_url or req.is_vcs) and not req.is_wheel:
            # for local packages with setup.py files and potential direct url deps:
            if req.is_vcs:
                req_list, lockfile = get_vcs_deps(reqs=[req])
                req = next(iter(req for req in req_list if req is not None),
                           req_list)
                entry = lockfile[pep423_name(req.normalized_name)]
            else:
                _, entry = req.pipfile_entry
            parsed_line = req.req.parsed_line  # type: Line
            try:
                name = req.normalized_name
            except TypeError:
                raise RequirementError(req=req)
            setup_info = req.req.setup_info
            setup_info.get_info()
            locked_deps[pep423_name(name)] = entry
            requirements = []
            # Allow users to toggle resolution off for non-editable VCS packages
            # but leave it on for local, installable folders on the filesystem
            if resolve_vcs or (req.editable or parsed_line.is_wheel or
                               (req.is_file_or_url and parsed_line.is_local
                                and is_installable_dir(parsed_line.path))):
                requirements = [
                    v for v in getattr(setup_info, "requires", {}).values()
                ]
            for r in requirements:
                if getattr(r, "url",
                           None) and not getattr(r, "editable", False):
                    if r is not None:
                        if not r.url:
                            continue
                        line = _requirement_to_str_lowercase_name(r)
                        new_req, _, _ = cls.parse_line(line)
                        if r.marker and not r.marker.evaluate():
                            new_constraints = {}
                            _, new_entry = req.pipfile_entry
                            new_lock = {
                                pep423_name(new_req.normalized_name): new_entry
                            }
                        else:
                            new_constraints, new_lock = cls.get_deps_from_req(
                                new_req, resolver)
                        locked_deps.update(new_lock)
                        constraints |= new_constraints
                # if there is no marker or there is a valid marker, add the constraint line
                elif r and (not r.marker or
                            (r.marker and r.marker.evaluate())):
                    line = _requirement_to_str_lowercase_name(r)
                    constraints.add(line)
            # ensure the top level entry remains as provided
            # note that we shouldn't pin versions for editable vcs deps
            if not req.is_vcs:
                if req.specifiers:
                    locked_deps[name]["version"] = req.specifiers
                elif parsed_line.setup_info and parsed_line.setup_info.version:
                    locked_deps[name]["version"] = "=={}".format(
                        parsed_line.setup_info.version)
            # if not req.is_vcs:
            locked_deps.update({name: entry})
        else:
            # if the dependency isn't installable, don't add it to constraints
            # and instead add it directly to the lock
            if (req and req.requirement
                    and (req.requirement.marker
                         and not req.requirement.marker.evaluate())):
                pypi = resolver.finder if resolver else None
                ireq = req.ireq
                best_match = (pypi.find_best_candidate(
                    ireq.name, ireq.specifier).best_candidate
                              if pypi else None)
                if best_match:
                    ireq.req.specifier = ireq.specifier.__class__(
                        f"=={best_match.version}")
                    hashes = resolver.collect_hashes(ireq) if resolver else []
                    new_req = Requirement.from_ireq(ireq)
                    new_req = new_req.add_hashes(hashes)
                    name, entry = new_req.pipfile_entry
                    locked_deps[pep423_name(name)] = translate_markers(entry)
                    click_echo(
                        "{} doesn't match your environment, "
                        "its dependencies won't be resolved.".format(
                            req.as_line()),
                        err=True,
                    )
                else:
                    click_echo(
                        "Could not find a version of {} that matches your environment, "
                        "it will be skipped.".format(req.as_line()),
                        err=True,
                    )
                return constraints, locked_deps
            constraints.add(req.constraint_line)
            return constraints, locked_deps
        return constraints, locked_deps
Ejemplo n.º 16
0
def venv_resolve_deps(deps,
                      which,
                      project,
                      pre=False,
                      clear=False,
                      allow_global=False,
                      pypi_mirror=None,
                      dev=False,
                      pipfile=None,
                      lockfile=None):
    from .vendor.vistir.misc import fs_str
    from .vendor.vistir.compat import Path, to_native_string, JSONDecodeError
    from .vendor.vistir.path import create_tracked_tempdir
    from . import resolver
    import json

    vcs_deps = []
    vcs_lockfile = {}
    results = []
    pipfile_section = "dev_packages" if dev else "packages"
    lockfile_section = "develop" if dev else "default"
    vcs_section = "vcs_{0}".format(pipfile_section)
    vcs_deps = getattr(project, vcs_section, [])
    if not deps and not vcs_deps:
        return {}

    if not pipfile:
        pipfile = getattr(project, pipfile_section, None)
    if not lockfile:
        lockfile = project._lockfile
    req_dir = create_tracked_tempdir(prefix="pipenv", suffix="requirements")
    if vcs_deps:
        with create_spinner(text=fs_str("Pinning VCS Packages...")) as sp:
            vcs_reqs, vcs_lockfile = get_vcs_deps(
                project,
                which=which,
                clear=clear,
                pre=pre,
                allow_global=allow_global,
                dev=dev,
            )
            vcs_deps = [req.as_line() for req in vcs_reqs if req.editable]
    cmd = [
        which("python", allow_global=allow_global),
        Path(resolver.__file__.rstrip("co")).as_posix()
    ]
    if pre:
        cmd.append("--pre")
    if clear:
        cmd.append("--clear")
    if allow_global:
        cmd.append("--system")
    with temp_environ():
        os.environ = {fs_str(k): fs_str(val) for k, val in os.environ.items()}
        os.environ["PIPENV_PACKAGES"] = str("\n".join(deps))
        if pypi_mirror:
            os.environ["PIPENV_PYPI_MIRROR"] = str(pypi_mirror)
        os.environ["PIPENV_VERBOSITY"] = str(environments.PIPENV_VERBOSITY)
        os.environ["PIPENV_REQ_DIR"] = fs_str(req_dir)
        os.environ["PIP_NO_INPUT"] = fs_str("1")
        with create_spinner(text=fs_str("Locking...")) as sp:
            c = resolve(cmd, sp)
            results = c.out
            if vcs_deps:
                with temp_environ():
                    os.environ["PIPENV_PACKAGES"] = str("\n".join(vcs_deps))
                    sp.text = to_native_string("Locking VCS Dependencies...")
                    vcs_c = resolve(cmd, sp)
                    vcs_results, vcs_err = vcs_c.out, vcs_c.err
            else:
                vcs_results, vcs_err = "", ""
            sp.green.ok(environments.PIPENV_SPINNER_OK_TEXT.format("Success!"))
    outputs = [results, vcs_results]
    if environments.is_verbose():
        for output in outputs:
            click_echo(output.split("RESULTS:")[0], err=True)
    try:
        results = json.loads(results.split("RESULTS:")[1].strip())
        if vcs_results:
            # For vcs dependencies, treat the initial pass at locking (i.e. checkout)
            # as the pipfile entry because it gets us an actual ref to use
            vcs_results = json.loads(vcs_results.split("RESULTS:")[1].strip())
            vcs_lockfile = prepare_lockfile(vcs_results, vcs_lockfile.copy(),
                                            vcs_lockfile)
        else:
            vcs_results = []

    except (IndexError, JSONDecodeError):
        for out, err in [(c.out, c.err), (vcs_results, vcs_err)]:
            click_echo(out.strip(), err=True)
            click_echo(err.strip(), err=True)
        raise RuntimeError("There was a problem with locking.")
    lockfile[lockfile_section] = prepare_lockfile(results, pipfile,
                                                  lockfile[lockfile_section])
    lockfile[lockfile_section].update(vcs_lockfile)
Ejemplo n.º 17
0
def resolve_deps(
    deps,
    which,
    project,
    sources=None,
    verbose=False,
    python=False,
    clear=False,
    pre=False,
    allow_global=False,
):
    """Given a list of dependencies, return a resolved list of dependencies,
    using pip-tools -- and their hashes, using the warehouse API / pip.
    """
    from .patched.notpip._vendor.requests.exceptions import ConnectionError
    from ._compat import TemporaryDirectory

    index_lookup = {}
    markers_lookup = {}
    python_path = which("python", allow_global=allow_global)
    backup_python_path = sys.executable
    results = []
    if not deps:
        return results
    # First (proper) attempt:
    req_dir = TemporaryDirectory(prefix="pipenv-", suffix="-requirements")
    with HackedPythonVersion(python_version=python, python_path=python_path):
        try:
            resolved_tree, hashes, markers_lookup, resolver = actually_resolve_deps(
                deps,
                index_lookup,
                markers_lookup,
                project,
                sources,
                verbose,
                clear,
                pre,
                req_dir=req_dir,
            )
        except RuntimeError:
            # Don't exit here, like usual.
            resolved_tree = None
    # Second (last-resort) attempt:
    if resolved_tree is None:
        with HackedPythonVersion(
            python_version=".".join([str(s) for s in sys.version_info[:3]]),
            python_path=backup_python_path,
        ):
            try:
                # Attempt to resolve again, with different Python version information,
                # particularly for particularly particular packages.
                resolved_tree, hashes, markers_lookup, resolver = actually_resolve_deps(
                    deps,
                    index_lookup,
                    markers_lookup,
                    project,
                    sources,
                    verbose,
                    clear,
                    pre,
                    req_dir=req_dir,
                )
            except RuntimeError:
                req_dir.cleanup()
                sys.exit(1)
    for result in resolved_tree:
        if not result.editable:
            name = pep423_name(result.name)
            version = clean_pkg_version(result.specifier)
            index = index_lookup.get(result.name)
            if not markers_lookup.get(result.name):
                markers = (
                    str(result.markers)
                    if result.markers and "extra" not in str(result.markers)
                    else None
                )
            else:
                markers = markers_lookup.get(result.name)
            collected_hashes = []
            if result in hashes:
                collected_hashes = list(hashes.get(result))
            elif any(
                "python.org" in source["url"] or "pypi.org" in source["url"]
                for source in sources
            ):
                pkg_url = "https://pypi.org/pypi/{0}/json".format(name)
                session = _get_requests_session()
                try:
                    # Grab the hashes from the new warehouse API.
                    r = session.get(pkg_url, timeout=10)
                    api_releases = r.json()["releases"]
                    cleaned_releases = {}
                    for api_version, api_info in api_releases.items():
                        api_version = clean_pkg_version(api_version)
                        cleaned_releases[api_version] = api_info
                    for release in cleaned_releases[version]:
                        collected_hashes.append(release["digests"]["sha256"])
                    collected_hashes = ["sha256:" + s for s in collected_hashes]
                except (ValueError, KeyError, ConnectionError):
                    if verbose:
                        click_echo(
                            "{0}: Error generating hash for {1}".format(
                                crayons.red("Warning", bold=True), name
                            )
                        )
            # # Collect un-collectable hashes (should work with devpi).
            # try:
            #     collected_hashes = collected_hashes + list(
            #         list(resolver.resolve_hashes([result]).items())[0][1]
            #     )
            # except (ValueError, KeyError, ConnectionError, IndexError):
            #     if verbose:
            #         print('Error generating hash for {}'.format(name))
            collected_hashes = sorted(set(collected_hashes))
            d = {"name": name, "version": version, "hashes": collected_hashes}
            if index:
                d.update({"index": index})
            if markers:
                d.update({"markers": markers.replace('"', "'")})
            results.append(d)
    req_dir.cleanup()
    return results
Ejemplo n.º 18
0
def actually_resolve_deps(
    deps,
    index_lookup,
    markers_lookup,
    project,
    sources,
    verbose,
    clear,
    pre,
    req_dir=None,
):
    from .vendor.packaging.markers import default_environment
    from .patched.notpip._internal import basecommand
    from .patched.notpip._internal.cmdoptions import no_binary, only_binary
    from .patched.notpip._internal.req import parse_requirements
    from .patched.notpip._internal.exceptions import DistributionNotFound
    from .patched.notpip._vendor.requests.exceptions import HTTPError
    from pipenv.patched.piptools.resolver import Resolver
    from pipenv.patched.piptools.repositories.pypi import PyPIRepository
    from pipenv.patched.piptools.scripts.compile import get_pip_command
    from pipenv.patched.piptools import logging as piptools_logging
    from pipenv.patched.piptools.exceptions import NoCandidateFound
    from .vendor.requirementslib import Requirement
    from ._compat import TemporaryDirectory, NamedTemporaryFile

    class PipCommand(basecommand.Command):
        """Needed for pip-tools."""

        name = "PipCommand"

    constraints = []
    cleanup_req_dir = False
    if not req_dir:
        req_dir = TemporaryDirectory(suffix="-requirements", prefix="pipenv-")
        cleanup_req_dir = True
    for dep in deps:
        if not dep:
            continue
        url = None
        if " -i " in dep:
            dep, url = dep.split(" -i ")
        req = Requirement.from_line(dep)

        # extra_constraints = []

        if url:
            index_lookup[req.name] = project.get_source(url=url).get("name")
        # strip the marker and re-add it later after resolution
        # but we will need a fallback in case resolution fails
        # eg pypiwin32
        if req.markers:
            markers_lookup[req.name] = req.markers.replace('"', "'")
        constraints.append(req.constraint_line)

    pip_command = get_pip_command()
    constraints_file = None
    pip_args = []
    if sources:
        pip_args = prepare_pip_source_args(sources, pip_args)
    if verbose:
        print("Using pip: {0}".format(" ".join(pip_args)))
    with NamedTemporaryFile(
        mode="w",
        prefix="pipenv-",
        suffix="-constraints.txt",
        dir=req_dir.name,
        delete=False,
    ) as f:
        if sources:
            requirementstxt_sources = " ".join(pip_args) if pip_args else ""
            requirementstxt_sources = requirementstxt_sources.replace(" --", "\n--")
            f.write(u"{0}\n".format(requirementstxt_sources))
        f.write(u"\n".join([_constraint for _constraint in constraints]))
        constraints_file = f.name
    pip_options, _ = pip_command.parser.parse_args(pip_args)
    pip_options.cache_dir = PIPENV_CACHE_DIR
    session = pip_command._build_session(pip_options)
    pypi = PyPIRepository(pip_options=pip_options, use_json=False, session=session)
    constraints = parse_requirements(
        constraints_file, finder=pypi.finder, session=pypi.session, options=pip_options
    )
    constraints = [c for c in constraints]
    if verbose:
        logging.log.verbose = True
        piptools_logging.log.verbose = True
    resolved_tree = set()
    resolver = Resolver(
        constraints=constraints, repository=pypi, clear_caches=clear, prereleases=pre
    )
    # pre-resolve instead of iterating to avoid asking pypi for hashes of editable packages
    hashes = None
    try:
        results = resolver.resolve(max_rounds=PIPENV_MAX_ROUNDS)
        hashes = resolver.resolve_hashes(results)
        resolved_tree.update(results)
    except (NoCandidateFound, DistributionNotFound, HTTPError) as e:
        click_echo(
            "{0}: Your dependencies could not be resolved. You likely have a "
            "mismatch in your sub-dependencies.\n  "
            "You can use {1} to bypass this mechanism, then run {2} to inspect "
            "the situation.\n  "
            "Hint: try {3} if it is a pre-release dependency."
            "".format(
                crayons.red("Warning", bold=True),
                crayons.red("$ pipenv install --skip-lock"),
                crayons.red("$ pipenv graph"),
                crayons.red("$ pipenv lock --pre"),
            ),
            err=True,
        )
        click_echo(crayons.blue(str(e)), err=True)
        if "no version found at all" in str(e):
            click_echo(
                crayons.blue(
                    "Please check your version specifier and version number. See PEP440 for more information."
                )
            )
        if cleanup_req_dir:
            req_dir.cleanup()
        raise RuntimeError
    if cleanup_req_dir:
        req_dir.cleanup()
    return (resolved_tree, hashes, markers_lookup, resolver)
Ejemplo n.º 19
0
def venv_resolve_deps(
    deps,
    which,
    project,
    pre=False,
    clear=False,
    allow_global=False,
    pypi_mirror=None,
):
    from .vendor.vistir.misc import fs_str
    from .vendor.vistir.compat import Path, to_native_string, JSONDecodeError
    from .vendor.vistir.path import create_tracked_tempdir
    from .cmdparse import Script
    from .core import spinner
    from .vendor.pexpect.exceptions import EOF, TIMEOUT
    from .vendor import delegator
    from . import resolver
    from ._compat import decode_output
    import json

    if not deps:
        return []

    req_dir = create_tracked_tempdir(prefix="pipenv", suffix="requirements")
    cmd = [
        which("python", allow_global=allow_global),
        Path(resolver.__file__.rstrip("co")).as_posix()
    ]
    if pre:
        cmd.append("--pre")
    if clear:
        cmd.append("--clear")
    if allow_global:
        cmd.append("--system")
    with temp_environ():
        os.environ = {fs_str(k): fs_str(val) for k, val in os.environ.items()}
        os.environ["PIPENV_PACKAGES"] = str("\n".join(deps))
        if pypi_mirror:
            os.environ["PIPENV_PYPI_MIRROR"] = str(pypi_mirror)
        os.environ["PIPENV_VERBOSITY"] = str(environments.PIPENV_VERBOSITY)
        os.environ["PIPENV_REQ_DIR"] = fs_str(req_dir)
        os.environ["PIP_NO_INPUT"] = fs_str("1")
        out = to_native_string("")
        EOF.__module__ = "pexpect.exceptions"
        with spinner(text=fs_str("Locking..."), spinner_name=environments.PIPENV_SPINNER,
                nospin=environments.PIPENV_NOSPIN) as sp:
            c = delegator.run(Script.parse(cmd).cmdify(), block=False, env=os.environ.copy())
            _out = decode_output("")
            result = None
            while True:
                try:
                    result = c.expect(u"\n", timeout=environments.PIPENV_TIMEOUT)
                except (EOF, TIMEOUT):
                    pass
                if result is None:
                    break
                _out = c.subprocess.before
                if _out is not None:
                    _out = decode_output("{0}".format(_out))
                    out += _out
                    sp.text = to_native_string("{0}".format(_out[:100]))
                if environments.is_verbose():
                    if _out is not None:
                        sp._hide_cursor()
                        sp.write(_out.rstrip())
                        sp._show_cursor()
            c.block()
            if c.return_code != 0:
                sp.red.fail(environments.PIPENV_SPINNER_FAIL_TEXT.format(
                    "Locking Failed!"
                ))
                click_echo(c.out.strip(), err=True)
                click_echo(c.err.strip(), err=True)
                sys.exit(c.return_code)
            else:
                sp.green.ok(environments.PIPENV_SPINNER_OK_TEXT.format("Success!"))
    if environments.is_verbose():
        click_echo(c.out.split("RESULTS:")[0], err=True)
    try:
        return json.loads(c.out.split("RESULTS:")[1].strip())

    except (IndexError, JSONDecodeError):
        click_echo(c.out.strip(), err=True)
        click_echo(c.err.strip(), err=True)
        raise RuntimeError("There was a problem with locking.")
Ejemplo n.º 20
0
def actually_resolve_deps(
    deps,
    index_lookup,
    markers_lookup,
    project,
    sources,
    clear,
    pre,
    req_dir=None,
):
    from .vendor.pip_shims.shims import (
        Command, parse_requirements, DistributionNotFound
    )
    from .vendor.requests.exceptions import HTTPError
    from pipenv.patched.piptools.resolver import Resolver
    from pipenv.patched.piptools.repositories.pypi import PyPIRepository
    from pipenv.patched.piptools.scripts.compile import get_pip_command
    from pipenv.patched.piptools import logging as piptools_logging
    from pipenv.patched.piptools.exceptions import NoCandidateFound
    from .vendor.requirementslib.models.requirements import Requirement
    from .vendor.vistir.path import (
        create_tracked_tempdir, create_tracked_tempfile, url_to_path,
    )
    from .vendor.vistir.compat import Path, to_native_string

    class PipCommand(Command):
        """Needed for pip-tools."""

        name = "PipCommand"

    constraints = []
    needs_hash = []
    if not req_dir:
        req_dir = create_tracked_tempdir(suffix="-requirements", prefix="pipenv-")
    for dep in deps:
        if not dep:
            continue
        url = None
        indexes, trusted_hosts, remainder = parse_indexes(dep)
        if indexes:
            url = indexes[0]
        dep = " ".join(remainder)
        req = Requirement.from_line(dep)
        new_ireq = req.as_ireq()
        if getattr(new_ireq, "link", None) and new_ireq.link.is_wheel and new_ireq.link.scheme == 'file':
            needs_hash.append(new_ireq)

        # extra_constraints = []

        if url:
            index_lookup[req.name] = project.get_source(url=url).get("name")
        # strip the marker and re-add it later after resolution
        # but we will need a fallback in case resolution fails
        # eg pypiwin32
        if req.markers:
            markers_lookup[req.name] = req.markers.replace('"', "'")
        constraints.append(req.constraint_line)

    pip_command = get_pip_command()
    constraints_file = None
    pip_args = []
    if sources:
        pip_args = prepare_pip_source_args(sources, pip_args)
    if environments.is_verbose():
        click_echo(crayons.blue("Using pip: {0}".format(" ".join(pip_args))), err=True)
    constraints_file = create_tracked_tempfile(
        mode="w",
        prefix="pipenv-",
        suffix="-constraints.txt",
        dir=req_dir,
        delete=False,
    )
    if sources:
        requirementstxt_sources = " ".join(pip_args) if pip_args else ""
        requirementstxt_sources = requirementstxt_sources.replace(" --", "\n--")
    constraints_file.write(u"{0}\n".format(requirementstxt_sources))
    constraints_file.write(u"\n".join([_constraint for _constraint in constraints]))
    constraints_file.close()
    pip_options, _ = pip_command.parser.parse_args(pip_args)
    pip_options.cache_dir = environments.PIPENV_CACHE_DIR
    session = pip_command._build_session(pip_options)
    pypi = PyPIRepository(pip_options=pip_options, use_json=False, session=session)
    constraints = parse_requirements(
        constraints_file.name, finder=pypi.finder, session=pypi.session, options=pip_options
    )
    constraints = [c for c in constraints]
    if environments.is_verbose():
        logging.log.verbose = True
        piptools_logging.log.verbose = True
    resolved_tree = set()
    resolver = Resolver(
        constraints=constraints, repository=pypi, clear_caches=clear, prereleases=pre
    )
    # pre-resolve instead of iterating to avoid asking pypi for hashes of editable packages
    hashes = {
        ireq: pypi._hash_cache.get_hash(ireq.link)
        for ireq in constraints if (getattr(ireq, "link", None)
        # We can only hash artifacts, but we don't want normal pypi artifcats since the
        # normal resolver handles those
        and ireq.link.is_artifact and not (is_pypi_url(ireq.link.url) or
        # We also don't want to try to hash directories as this will fail (editable deps)
        (ireq.link.scheme == "file" and Path(to_native_string(url_to_path(ireq.link.url))).is_dir())))
    }
    try:
        results = resolver.resolve(max_rounds=environments.PIPENV_MAX_ROUNDS)
        resolved_tree.update(results)
    except (NoCandidateFound, DistributionNotFound, HTTPError) as e:
        click_echo(
            "{0}: Your dependencies could not be resolved. You likely have a "
            "mismatch in your sub-dependencies.\n  "
            "First try clearing your dependency cache with {1}, then try the original command again.\n "
            "Alternatively, you can use {2} to bypass this mechanism, then run "
            "{3} to inspect the situation.\n  "
            "Hint: try {4} if it is a pre-release dependency."
            "".format(
                crayons.red("Warning", bold=True),
                crayons.red("$ pipenv lock --clear"),
                crayons.red("$ pipenv install --skip-lock"),
                crayons.red("$ pipenv graph"),
                crayons.red("$ pipenv lock --pre"),
            ),
            err=True,
        )
        click_echo(crayons.blue(str(e)), err=True)
        if "no version found at all" in str(e):
            click_echo(
                crayons.blue(
                    "Please check your version specifier and version number. See PEP440 for more information."
                ), err=True
            )
        raise RuntimeError
    else:
        resolved_hashes = resolver.resolve_hashes(results)
        for ireq, ireq_hashes in resolved_hashes.items():
            if ireq not in hashes:
                hashes[ireq] = ireq_hashes
    return (resolved_tree, hashes, markers_lookup, resolver)
Ejemplo n.º 21
0
def resolve_deps(deps,
                 which,
                 project,
                 sources=None,
                 python=False,
                 clear=False,
                 pre=False,
                 allow_global=False,
                 req_dir=None):
    """Given a list of dependencies, return a resolved list of dependencies,
    using pip-tools -- and their hashes, using the warehouse API / pip.
    """
    from .vendor.requests.exceptions import ConnectionError
    from .vendor.requirementslib.models.requirements import Requirement

    index_lookup = {}
    markers_lookup = {}
    python_path = which("python", allow_global=allow_global)
    if not os.environ.get("PIP_SRC"):
        os.environ["PIP_SRC"] = project.virtualenv_src_location
    backup_python_path = sys.executable
    results = []
    if not deps:
        return results
    # First (proper) attempt:
    req_dir = req_dir if req_dir else os.environ.get("req_dir", None)
    if not req_dir:
        from .vendor.vistir.path import create_tracked_tempdir
        req_dir = create_tracked_tempdir(prefix="pipenv-",
                                         suffix="-requirements")
    with HackedPythonVersion(python_version=python, python_path=python_path):
        try:
            resolved_tree, hashes, markers_lookup, resolver = actually_resolve_deps(
                deps,
                index_lookup,
                markers_lookup,
                project,
                sources,
                clear,
                pre,
                req_dir=req_dir,
            )
        except RuntimeError:
            # Don't exit here, like usual.
            resolved_tree = None
    # Second (last-resort) attempt:
    if resolved_tree is None:
        with HackedPythonVersion(
                python_version=".".join([str(s)
                                         for s in sys.version_info[:3]]),
                python_path=backup_python_path,
        ):
            try:
                # Attempt to resolve again, with different Python version information,
                # particularly for particularly particular packages.
                resolved_tree, hashes, markers_lookup, resolver = actually_resolve_deps(
                    deps,
                    index_lookup,
                    markers_lookup,
                    project,
                    sources,
                    clear,
                    pre,
                    req_dir=req_dir,
                )
            except RuntimeError:
                sys.exit(1)
    for result in resolved_tree:
        if not result.editable:
            req = Requirement.from_ireq(result)
            name = pep423_name(req.name)
            version = str(req.get_version())
            index = index_lookup.get(result.name)
            req.index = index
            collected_hashes = []
            if result in hashes:
                collected_hashes = list(hashes.get(result))
            elif any("python.org" in source["url"]
                     or "pypi.org" in source["url"] for source in sources):
                pkg_url = "https://pypi.org/pypi/{0}/json".format(name)
                session = _get_requests_session()
                try:
                    # Grab the hashes from the new warehouse API.
                    r = session.get(pkg_url, timeout=10)
                    api_releases = r.json()["releases"]
                    cleaned_releases = {}
                    for api_version, api_info in api_releases.items():
                        api_version = clean_pkg_version(api_version)
                        cleaned_releases[api_version] = api_info
                    for release in cleaned_releases[version]:
                        collected_hashes.append(release["digests"]["sha256"])
                    collected_hashes = [
                        "sha256:" + s for s in collected_hashes
                    ]
                except (ValueError, KeyError, ConnectionError):
                    if environments.is_verbose():
                        click_echo("{0}: Error generating hash for {1}".format(
                            crayons.red("Warning", bold=True), name),
                                   err=True)
            # # Collect un-collectable hashes (should work with devpi).
            # try:
            #     collected_hashes = collected_hashes + list(
            #         list(resolver.resolve_hashes([result]).items())[0][1]
            #     )
            # except (ValueError, KeyError, ConnectionError, IndexError):
            #     if verbose:
            #         print('Error generating hash for {}'.format(name))
            req.hashes = sorted(set(collected_hashes))
            name, _entry = req.pipfile_entry
            entry = {}
            if isinstance(_entry, six.string_types):
                entry["version"] = _entry.lstrip("=")
            else:
                entry.update(_entry)
                entry["version"] = version
            entry["name"] = name
            # if index:
            #     d.update({"index": index})
            if markers_lookup.get(result.name):
                entry.update({"markers": markers_lookup.get(result.name)})
            entry = translate_markers(entry)
            results.append(entry)
    return results
Ejemplo n.º 22
0
def resolve_deps(
    deps,
    which,
    project,
    sources=None,
    python=False,
    clear=False,
    pre=False,
    allow_global=False,
):
    """Given a list of dependencies, return a resolved list of dependencies,
    using pip-tools -- and their hashes, using the warehouse API / pip.
    """
    from .patched.notpip._vendor.requests.exceptions import ConnectionError
    from ._compat import TemporaryDirectory

    index_lookup = {}
    markers_lookup = {}
    python_path = which("python", allow_global=allow_global)
    backup_python_path = sys.executable
    results = []
    if not deps:
        return results
    # First (proper) attempt:
    req_dir = TemporaryDirectory(prefix="pipenv-", suffix="-requirements")
    with HackedPythonVersion(python_version=python, python_path=python_path):
        try:
            resolved_tree, hashes, markers_lookup, resolver = actually_resolve_deps(
                deps,
                index_lookup,
                markers_lookup,
                project,
                sources,
                clear,
                pre,
                req_dir=req_dir,
            )
        except RuntimeError:
            # Don't exit here, like usual.
            resolved_tree = None
    # Second (last-resort) attempt:
    if resolved_tree is None:
        with HackedPythonVersion(
            python_version=".".join([str(s) for s in sys.version_info[:3]]),
            python_path=backup_python_path,
        ):
            try:
                # Attempt to resolve again, with different Python version information,
                # particularly for particularly particular packages.
                resolved_tree, hashes, markers_lookup, resolver = actually_resolve_deps(
                    deps,
                    index_lookup,
                    markers_lookup,
                    project,
                    sources,
                    clear,
                    pre,
                    req_dir=req_dir,
                )
            except RuntimeError:
                req_dir.cleanup()
                sys.exit(1)
    for result in resolved_tree:
        if not result.editable:
            name = pep423_name(result.name)
            version = clean_pkg_version(result.specifier)
            index = index_lookup.get(result.name)
            if not markers_lookup.get(result.name):
                markers = (
                    str(result.markers)
                    if result.markers and "extra" not in str(result.markers)
                    else None
                )
            else:
                markers = markers_lookup.get(result.name)
            collected_hashes = []
            if result in hashes:
                collected_hashes = list(hashes.get(result))
            elif any(
                "python.org" in source["url"] or "pypi.org" in source["url"]
                for source in sources
            ):
                pkg_url = "https://pypi.org/pypi/{0}/json".format(name)
                session = _get_requests_session()
                try:
                    # Grab the hashes from the new warehouse API.
                    r = session.get(pkg_url, timeout=10)
                    api_releases = r.json()["releases"]
                    cleaned_releases = {}
                    for api_version, api_info in api_releases.items():
                        api_version = clean_pkg_version(api_version)
                        cleaned_releases[api_version] = api_info
                    for release in cleaned_releases[version]:
                        collected_hashes.append(release["digests"]["sha256"])
                    collected_hashes = ["sha256:" + s for s in collected_hashes]
                except (ValueError, KeyError, ConnectionError):
                    if environments.is_verbose():
                        click_echo(
                            "{0}: Error generating hash for {1}".format(
                                crayons.red("Warning", bold=True), name
                            )
                        )
            # # Collect un-collectable hashes (should work with devpi).
            # try:
            #     collected_hashes = collected_hashes + list(
            #         list(resolver.resolve_hashes([result]).items())[0][1]
            #     )
            # except (ValueError, KeyError, ConnectionError, IndexError):
            #     if verbose:
            #         print('Error generating hash for {}'.format(name))
            collected_hashes = sorted(set(collected_hashes))
            d = {"name": name, "version": version, "hashes": collected_hashes}
            if index:
                d.update({"index": index})
            if markers:
                d.update({"markers": markers.replace('"', "'")})
            results.append(d)
    req_dir.cleanup()
    return results