Esempio n. 1
0
def resolve_deps(deps, sources=None, verbose=False, hashes=False):

    constraints = []

    for dep in deps:
        if dep.startswith('-e '):
            constraint = pip.req.InstallRequirement.from_editable(dep[len('-e '):])
        else:
            constraint = pip.req.InstallRequirement.from_line(dep)
        constraints.append(constraint)

    pip_command = get_pip_command()

    pip_args = []

    if sources:
        pip_args.extend(['-i', sources[0]['url']])

    pip_options, _ = pip_command.parse_args(pip_args)

    pypi = PyPIRepository(pip_options=pip_options, session=requests)

    if verbose:
        logging.log.verbose = True

    r = Resolver(constraints=constraints, repository=pypi)
    results = []
    _hashes = r.resolve_hashes(r.resolve())
    # convert to a dictionary indexed by package names instead of install req objects
    resolved_hashes = {}
    for req, _hash in _hashes.items():
        resolved_hashes[pep423_name(req.name)] = {
            'version': clean_pkg_version(req.specifier),
            'hashes': list(_hash)
        }

    for result in r.resolve():
        name = pep423_name(result.name)
        version = clean_pkg_version(result.specifier)

        if hashes:
            try:
                collected_hashes = []
                r = requests.get('https://pypi.org/pypi/{0}/json'.format(name))
                for release in r.json()['releases'][version]:
                    collected_hashes.append(release['digests']['sha256'])

                collected_hashes = ['sha256:' + s for s in collected_hashes]
                # Add pypi resolved hashes
                if name in resolved_hashes and resolved_hashes[name]['version'] == version:
                    collected_hashes.extend(resolved_hashes[name]['hashes'])

                results.append({'name': name, 'version': version, 'hashes': collected_hashes})
            except ValueError:
                results.append({'name': name, 'version': version})
        else:
            results.append({'name': name, 'version': version})

    return results
Esempio n. 2
0
def resolve_deps(deps, sources=None):

    constraints = []

    for dep in deps:
        if dep.startswith('-e '):
            constraint = pip.req.InstallRequirement.from_editable(
                dep[len('-e '):])
        else:
            constraint = pip.req.InstallRequirement.from_line(dep)
        constraints.append(constraint)

    pip_command = get_pip_command()

    pip_args = []

    if sources:
        pip_args.extend(['-i', sources[0]['url']])

    pip_options, _ = pip_command.parse_args(pip_args)

    pypi = PyPIRepository(pip_options=pip_options, session=requests)

    r = Resolver(constraints=constraints, repository=pypi)
    results = []

    for result in r.resolve():
        results.append({
            'name':
            pep423_name(result.name),
            'version':
            six.u(str(result.specifier)).replace('==', '')
        })

    return results
Esempio n. 3
0
    def test_find_dependencies_simple(self):
        """A simple scenario for finding dependencies."""
        pkgmgr = FakePackageManager(simple)

        # Given a simple top-level configuration (just "foo")...
        spec_set = SpecSet()
        spec_set.add_spec('foo')

        # ...let's see what this leads to
        resolver = Resolver(spec_set, pkgmgr)
        self.assertItemsEqual(
                ['foo==0.1', 'bar==1.2', 'qux==0.1', 'simplejson==2.4.0'],
                map(str, resolver.resolve()))
Esempio n. 4
0
    def test_find_dependencies_simple(self):
        """A simple scenario for finding dependencies."""
        pkgmgr = FakePackageManager(simple)

        # Given a simple top-level configuration (just "foo")...
        spec_set = SpecSet()
        spec_set.add_spec('foo')

        # ...let's see what this leads to
        resolver = Resolver(spec_set, pkgmgr)
        self.assertItemsEqual(
            ['foo==0.1', 'bar==1.2', 'qux==0.1', 'simplejson==2.4.0'],
            map(str, resolver.resolve()))
Esempio n. 5
0
def resolve_deps(deps, sources=None, verbose=False, hashes=False):

    constraints = []

    for dep in deps:
        if dep.startswith('-e '):
            constraint = pip.req.InstallRequirement.from_editable(dep[len('-e '):])
        else:
            constraint = pip.req.InstallRequirement.from_line(dep)
        constraints.append(constraint)

    pip_command = get_pip_command()

    pip_args = []

    if sources:
        pip_args.extend(['-i', sources[0]['url']])

    pip_options, _ = pip_command.parse_args(pip_args)

    pypi = PyPIRepository(pip_options=pip_options, session=requests)

    if verbose:
        logging.log.verbose = True

    resolver = Resolver(constraints=constraints, repository=pypi)
    results = []

    # pre-resolve instead of iterating to avoid asking pypi for hashes of editable packages
    resolved_tree = resolver.resolve()

    for result in resolved_tree:
        name = pep423_name(result.name)
        version = clean_pkg_version(result.specifier)

        if hashes:
            try:
                collected_hashes = []
                r = requests.get('https://pypi.org/pypi/{0}/json'.format(name))
                api_releases = r.json()['releases']
                cleaned_releases = {}
                for api_version, api_info in api_releases.items():
                    cleaned_releases[clean_pkg_version(api_version)] = api_info
    
                for release in cleaned_releases[version]:
                    collected_hashes.append(release['digests']['sha256'])

                collected_hashes = ['sha256:' + s for s in collected_hashes]

                # Collect un-collectable hashes.
                if not collected_hashes:
Esempio n. 6
0
    def on_end(self, event):
        # Our config object
        python_config = event.config["python"]

        # Pip / PyPI
        pip_command = get_pip_command()
        pip_options, _ = pip_command.parse_args([])
        session = pip_command._build_session(pip_options)
        repository = PyPIRepository(pip_options, session)

        for extra in itertools.chain((None, ), python_config.get_extras()):
            requirements_file = "requirements{}.txt".format(
                "-" + extra if extra else "")

            if python_config.override_requirements or not os.path.exists(
                    requirements_file):
                tmpfile = tempfile.NamedTemporaryFile(mode="wt", delete=False)
                if extra:
                    tmpfile.write("\n".join(
                        python_config.get_requirements(extra=extra)))
                else:
                    tmpfile.write("\n".join(python_config.get_requirements()))
                tmpfile.flush()
                constraints = list(
                    parse_requirements(tmpfile.name,
                                       finder=repository.finder,
                                       session=repository.session,
                                       options=pip_options))
                resolver = Resolver(constraints,
                                    repository,
                                    prereleases=False,
                                    clear_caches=False,
                                    allow_unsafe=False)

                self.render_file_inline(
                    requirements_file,
                    "\n".join((
                        "-e .{}".format("[" + extra + "]" if extra else ""),
                        *(("-r requirements.txt", ) if extra else ()),
                        *python_config.get_vendors(extra=extra),
                        *sorted(
                            format_requirement(req)
                            for req in resolver.resolve(max_rounds=10)
                            if req.name != python_config.get("name")),
                    )),
                    override=python_config.override_requirements,
                )
Esempio n. 7
0
    def _prepare_resolver(self, requirements: TRequirements) -> Resolver:
        if isinstance(requirements, Path):
            constraints = parse_requirements(
                str(requirements),
                self._repository.session,
                self._repository.finder,
                options=self._repository.options,
            )
        else:
            constraints = {
                InstallRequirement(PipRequirement(req), comes_from="line")
                for req in requirements
            }
        cache = DependencyCache(self.CACHE_DIR)

        resolver = Resolver(constraints, self._repository, cache)
        resolver.resolve = partial(resolver.resolve, max_rounds=100)
        return resolver
Esempio n. 8
0
    def on_end(self, event):
        # Our config object
        python_config = event.config["python"]

        # Pip / PyPI
        pip_command = get_pip_command()
        pip_options, _ = pip_command.parse_args([])
        session = pip_command._build_session(pip_options)
        repository = PyPIRepository(pip_options, session)

        for extra in itertools.chain((None,), python_config.get_extras()):
            requirements_file = "requirements{}.txt".format("-" + extra if extra else "")

            if python_config.override_requirements or not os.path.exists(requirements_file):
                tmpfile = tempfile.NamedTemporaryFile(mode="wt", delete=False)
                if extra:
                    tmpfile.write("\n".join(python_config.get_requirements(extra=extra)))
                else:
                    tmpfile.write("\n".join(python_config.get_requirements()))
                tmpfile.flush()
                constraints = list(
                    parse_requirements(
                        tmpfile.name, finder=repository.finder, session=repository.session, options=pip_options
                    )
                )
                resolver = Resolver(constraints, repository, prereleases=False, clear_caches=False, allow_unsafe=False)

                self.render_file_inline(
                    requirements_file,
                    "\n".join(
                        (
                            "-e .{}".format("[" + extra + "]" if extra else ""),
                            *(("-r requirements.txt",) if extra else ()),
                            *python_config.get_vendors(extra=extra),
                            *sorted(
                                format_requirement(req)
                                for req in resolver.resolve(max_rounds=10)
                                if req.name != python_config.get("name")
                            ),
                        )
                    ),
                    override=python_config.override_requirements,
                )
Esempio n. 9
0
    def on_end(self, event):
        # Our config object
        python_config = event.config['python']

        # Pip / PyPI
        pip_command = get_pip_command()
        pip_options, _ = pip_command.parse_args([])
        session = pip_command._build_session(pip_options)
        repository = PyPIRepository(pip_options, session)

        for extra in itertools.chain((None, ), python_config.get_extras()):
            tmpfile = tempfile.NamedTemporaryFile(mode='wt', delete=False)
            if extra:
                tmpfile.write('\n'.join(
                    python_config.get_requirements(extra=extra)))
            else:
                tmpfile.write('\n'.join(python_config.get_requirements()))
            tmpfile.flush()
            constraints = list(
                parse_requirements(tmpfile.name,
                                   finder=repository.finder,
                                   session=repository.session,
                                   options=pip_options))
            resolver = Resolver(constraints,
                                repository,
                                prereleases=False,
                                clear_caches=False,
                                allow_unsafe=False)

            self.render_file_inline(
                'requirements{}.txt'.format('-' + extra if extra else ''),
                '\n'.join((
                    '-e .{}'.format('[' + extra + ']' if extra else ''),
                    *(('-r requirements.txt', ) if extra else ()),
                    *sorted(
                        format_requirement(req)
                        for req in resolver.resolve(max_rounds=10)
                        if req.name != python_config.get('name')),
                )))
Esempio n. 10
0
def resolve_deps(deps,
                 which,
                 which_pip,
                 project,
                 sources=None,
                 verbose=False,
                 python=False,
                 clear=False,
                 pre=False):
    """Given a list of dependencies, return a resolved list of dependencies,
    using pip-tools -- and their hashes, using the warehouse API / pip.
    """

    index_lookup = {}
    markers_lookup = {}

    python_path = which('python')

    with HackedPythonVersion(python_version=python, python_path=python_path):

        class PipCommand(pip.basecommand.Command):
            """Needed for pip-tools."""
            name = 'PipCommand'

        constraints = []

        for dep in deps:
            t = tempfile.mkstemp(prefix='pipenv-',
                                 suffix='-requirement.txt')[1]
            with open(t, 'w') as f:
                f.write(dep)

            if dep.startswith('-e '):
                constraint = pip.req.InstallRequirement.from_editable(
                    dep[len('-e '):])
            else:
                constraint = [
                    c for c in pip.req.parse_requirements(
                        t, session=pip._vendor.requests)
                ][0]
                # extra_constraints = []

            if ' -i ' in dep:
                index_lookup[constraint.name] = project.get_source(
                    url=dep.split(' -i ')[1]).get('name')

            if constraint.markers:
                markers_lookup[constraint.name] = str(
                    constraint.markers).replace('"', "'")

            constraints.append(constraint)

        pip_command = get_pip_command()

        pip_args = []

        if sources:
            pip_args = prepare_pip_source_args(sources, pip_args)

        if verbose:
            print('Using pip: {0}'.format(' '.join(pip_args)))

        pip_options, _ = pip_command.parse_args(pip_args)

        session = pip_command._build_session(pip_options)
        pypi = PyPIRepository(pip_options=pip_options, session=session)

        if verbose:
            logging.log.verbose = True

        results = []
        resolved_tree = set()

        resolver = Resolver(constraints=constraints,
                            repository=pypi,
                            clear_caches=clear,
                            prereleases=pre)
        # pre-resolve instead of iterating to avoid asking pypi for hashes of editable packages
        try:
            resolved_tree.update(
                resolver.resolve(max_rounds=PIPENV_MAX_ROUNDS))
        except (NoCandidateFound, DistributionNotFound, HTTPError) as e:
            click.echo(
                '{0}: Your dependencies could not be resolved. You likely have a mismatch in your sub-dependencies.\n  '
                'You can use {1} to bypass this mechanism, then run {2} to inspect the situation.'
                ''.format(crayons.red('Warning', bold=True),
                          crayons.red('$ pipenv install --skip-lock'),
                          crayons.red('$ pipenv graph')),
                err=True)
            click.echo(crayons.blue(e))
            sys.exit(1)

    for result in resolved_tree:
        if not result.editable:
            name = pep423_name(result.name)
            version = clean_pkg_version(result.specifier)
            index = index_lookup.get(result.name)

            if not markers_lookup.get(result.name):
                markers = str(
                    result.markers) if result.markers and 'extra' not in str(
                        result.markers) else None
            else:
                markers = markers_lookup.get(result.name)

            collected_hashes = []
            if 'python.org' in '|'.join([source['url'] for source in sources]):
                try:
                    # Grab the hashes from the new warehouse API.
                    r = requests.get(
                        'https://pypi.org/pypi/{0}/json'.format(name),
                        timeout=10)
                    api_releases = r.json()['releases']

                    cleaned_releases = {}
                    for api_version, api_info in api_releases.items():
                        cleaned_releases[clean_pkg_version(
                            api_version)] = api_info

                    for release in cleaned_releases[version]:
                        collected_hashes.append(release['digests']['sha256'])

                    collected_hashes = [
                        'sha256:' + s for s in collected_hashes
                    ]

                    # Collect un-collectable hashes.
                    if not collected_hashes:
                        collected_hashes = list(
                            list(resolver.resolve_hashes([result
                                                          ]).items())[0][1])

                except (ValueError, KeyError):
                    if verbose:
                        print('Error fetching {}'.format(name))

            d = {'name': name, 'version': version, 'hashes': collected_hashes}

            if index:
                d.update({'index': index})

            if markers:
                d.update({'markers': markers.replace('"', "'")})

            results.append(d)

    return results
Esempio n. 11
0
def actually_resolve_reps(deps, index_lookup, markers_lookup, project, sources, verbose, clear, pre):

    class PipCommand(pip.basecommand.Command):
        """Needed for pip-tools."""
        name = 'PipCommand'

    constraints = []

    for dep in deps:
        t = tempfile.mkstemp(prefix='pipenv-', suffix='-requirement.txt')[1]
        with open(t, 'w') as f:
            f.write(dep)

        if dep.startswith('-e '):
            constraint = pip.req.InstallRequirement.from_editable(dep[len('-e '):])
        else:
            constraint = [c for c in pip.req.parse_requirements(t, session=pip._vendor.requests)][0]
            # extra_constraints = []

        if ' -i ' in dep:
            index_lookup[constraint.name] = project.get_source(url=dep.split(' -i ')[1]).get('name')

        if constraint.markers:
            markers_lookup[constraint.name] = str(constraint.markers).replace('"', "'")

        constraints.append(constraint)

    pip_command = get_pip_command()

    pip_args = []

    if sources:
        pip_args = prepare_pip_source_args(sources, pip_args)

    if verbose:
        print('Using pip: {0}'.format(' '.join(pip_args)))

    pip_options, _ = pip_command.parse_args(pip_args)

    session = pip_command._build_session(pip_options)
    pypi = PyPIRepository(pip_options=pip_options, session=session)

    if verbose:
        logging.log.verbose = True


    resolved_tree = set()

    resolver = Resolver(constraints=constraints, repository=pypi, clear_caches=clear, prereleases=pre)
    # pre-resolve instead of iterating to avoid asking pypi for hashes of editable packages
    try:
        resolved_tree.update(resolver.resolve(max_rounds=PIPENV_MAX_ROUNDS))
    except (NoCandidateFound, DistributionNotFound, HTTPError) as e:
        click.echo(
            '{0}: Your dependencies could not be resolved. You likely have a mismatch in your sub-dependencies.\n  '
            'You can use {1} to bypass this mechanism, then run {2} to inspect the situation.'
            ''.format(
                crayons.red('Warning', bold=True),
                crayons.red('$ pipenv install --skip-lock'),
                crayons.red('$ pipenv graph')
            ),
            err=True)

        click.echo(crayons.blue(e))

        if 'no version found at all' in str(e):
            click.echo(crayons.blue('Please check your version specifier and version number. See PEP440 for more information.'))

        raise RuntimeError

    return resolved_tree
Esempio n. 12
0
def actually_resolve_reps(deps, index_lookup, markers_lookup, project, sources, verbose, clear, pre):

    class PipCommand(pip.basecommand.Command):
        """Needed for pip-tools."""
        name = 'PipCommand'

    constraints = []

    for dep in deps:
        t = tempfile.mkstemp(prefix='pipenv-', suffix='-requirement.txt')[1]
        with open(t, 'w') as f:
            f.write(dep)

        if dep.startswith('-e '):
            constraint = pip.req.InstallRequirement.from_editable(dep[len('-e '):])
        else:
            constraint = [c for c in pip.req.parse_requirements(t, session=pip._vendor.requests)][0]
            # extra_constraints = []

        if ' -i ' in dep:
            index_lookup[constraint.name] = project.get_source(url=dep.split(' -i ')[1]).get('name')

        if constraint.markers:
            markers_lookup[constraint.name] = str(constraint.markers).replace('"', "'")

        constraints.append(constraint)

    pip_command = get_pip_command()

    pip_args = []

    if sources:
        pip_args = prepare_pip_source_args(sources, pip_args)

    if verbose:
        print('Using pip: {0}'.format(' '.join(pip_args)))

    pip_options, _ = pip_command.parse_args(pip_args)

    session = pip_command._build_session(pip_options)
    pypi = PyPIRepository(pip_options=pip_options, session=session)

    if verbose:
        logging.log.verbose = True


    resolved_tree = set()

    resolver = Resolver(constraints=constraints, repository=pypi, clear_caches=clear, prereleases=pre)
    # pre-resolve instead of iterating to avoid asking pypi for hashes of editable packages
    try:
        resolved_tree.update(resolver.resolve(max_rounds=PIPENV_MAX_ROUNDS))
    except (NoCandidateFound, DistributionNotFound, HTTPError) as e:
        click.echo(
            '{0}: Your dependencies could not be resolved. You likely have a mismatch in your sub-dependencies.\n  '
            'You can use {1} to bypass this mechanism, then run {2} to inspect the situation.'
            ''.format(
                crayons.red('Warning', bold=True),
                crayons.red('$ pipenv install --skip-lock'),
                crayons.red('$ pipenv graph')
            ),
            err=True)

        click.echo(crayons.blue(e))

        if 'no version found at all' in str(e):
            click.echo(crayons.blue('Please check your version specifier and version number. See PEP440 for more information.'))

        raise RuntimeError

    return resolved_tree
Esempio n. 13
0
def resolve_deps(deps, sources=None, verbose=False):
    """Given a list of dependencies, return a resolved list of dependencies,
    using pip-tools -- and their hashes, using the warehouse API / pip.
    """

    constraints = []

    for dep in deps:
        if dep.startswith('-e '):
            constraint = pip.req.InstallRequirement.from_editable(
                dep[len('-e '):])
        else:
            constraint = pip.req.InstallRequirement.from_line(dep)
        constraints.append(constraint)

    pip_command = get_pip_command()

    pip_args = []

    if sources:
        pip_args.extend(['-i', sources[0]['url']])

    pip_options, _ = pip_command.parse_args(pip_args)

    pypi = PyPIRepository(pip_options=pip_options, session=requests)

    if verbose:
        logging.log.verbose = True

    resolver = Resolver(constraints=constraints, repository=pypi)
    results = []

    # pre-resolve instead of iterating to avoid asking pypi for hashes of editable packages
    resolved_tree = resolver.resolve()

    for result in resolved_tree:
        name = pep423_name(result.name)
        version = clean_pkg_version(result.specifier)

        collected_hashes = []

        try:
            # Grab the hashes from the new warehouse API.
            r = requests.get('https://pypi.org/pypi/{0}/json'.format(name))
            api_releases = r.json()['releases']

            cleaned_releases = {}
            for api_version, api_info in api_releases.items():
                cleaned_releases[clean_pkg_version(api_version)] = api_info

            for release in cleaned_releases[version]:
                collected_hashes.append(release['digests']['sha256'])

            collected_hashes = ['sha256:' + s for s in collected_hashes]

            # Collect un-collectable hashes.
            if not collected_hashes:
                collected_hashes = list(
                    list(resolver.resolve_hashes([result]).items())[0][1])

        except (ValueError, KeyError):
            pass

        results.append({
            'name': name,
            'version': version,
            'hashes': collected_hashes
        })

    return results
Esempio n. 14
0
    def _resolve(self, deps):
        # Checking if we should active prereleases
        prereleases = False
        for dep in deps:
            if dep.accepts_prereleases():
                prereleases = True
                break

        constraints = [dep.as_requirement() for dep in deps]

        command = get_pip_command()
        opts, _ = command.parse_args([])

        resolver = Resolver(
            constraints, PyPIRepository(opts, command._build_session(opts)),
            cache=DependencyCache(CACHE_DIR),
            prereleases=prereleases
        )
        matches = resolver.resolve()
        pinned = [m for m in matches if not m.editable and is_pinned_requirement(m)]
        unpinned = [m for m in matches if m.editable or not is_pinned_requirement(m)]
        reversed_dependencies = resolver.reverse_dependencies(matches)

        # Complete reversed dependencies with cache
        cache = resolver.dependency_cache.cache
        for m in unpinned:
            name = key_from_req(m.req)
            if name not in cache:
                continue

            dependencies = cache[name][list(cache[name].keys())[0]]
            for dep in dependencies:
                dep = canonicalize_name(dep)
                if dep not in reversed_dependencies:
                    reversed_dependencies[dep] = set()

                reversed_dependencies[dep].add(canonicalize_name(name))

        hashes = resolver.resolve_hashes(pinned)
        packages = []
        for m in matches:
            name = key_from_req(m.req)
            if name in self.UNSAFE:
                continue

            version = str(m.req.specifier)
            if m in unpinned:
                url, specifier = m.link.url.split('@')
                rev, _ = specifier.split('#')

                version = self._get_vcs_version(url, rev)
                checksum = 'sha1:{}'.format(version['rev'])
            else:
                version = version.replace('==', '')
                checksum = list(hashes[m])

            # Figuring out category and optionality
            category = None
            optional = False

            # Checking if it's a main dependency
            for dep in deps:
                if dep.name == name:
                    category = dep.category
                    optional = dep.optional
                    break

            if not category:
                def _category(child):
                    opt = False
                    cat = None
                    parents = reversed_dependencies.get(child, set())
                    for parent in parents:
                        for dep in deps:
                            if dep.name != parent:
                                continue

                            opt = dep.optional

                            if dep.category == 'main':
                                # Dependency is given by at least one main package
                                # We flag it as main
                                return 'main', opt

                            return 'dev', opt

                        cat, op = _category(parent)

                        if cat is not None:
                            return cat, opt

                    return cat, opt

                category, optional = _category(name)

            # If category is still None at this point
            # The dependency must have come from a VCS
            # dependency. To avoid missing packages
            # we assume "main" category and not optional
            if category is None:
                category = 'main'
                optional = False

            if not isinstance(checksum, list):
                checksum = [checksum]

            # Retrieving Python restriction if any
            python = self._get_pythons_for_package(
                name, reversed_dependencies, deps
            )
            python = list(python)

            if '*' in python:
                # If at least one parent gave a wildcard
                # Then it should be installed for any Python version
                python = ['*']

            package = {
                'name': name,
                'version': version,
                'checksum': checksum,
                'category': category,
                'optional': optional,
                'python': python
            }

            packages.append(package)

        return sorted(packages, key=lambda p: p['name'].lower())
Esempio n. 15
0
def resolve_deps(deps, sources=None, verbose=False, hashes=False):
    constraints = []

    for dep in deps:
        if dep.startswith('-e '):
            constraint = pip.req.InstallRequirement.from_editable(dep[len('-e '):])
        else:
            constraint = pip.req.InstallRequirement.from_line(dep)
        constraints.append(constraint)

    pip_command = get_pip_command()

    pip_args = []

    if sources:
        pip_args.extend(['-i', sources[0]['url']])

    pip_options, _ = pip_command.parse_args(pip_args)

    pypi = PyPIRepository(pip_options=pip_options, session=requests)

    if verbose:
        logging.log.verbose = True

    resolver = Resolver(constraints=constraints, repository=pypi)
    results = []

    # pre-resolve instead of iterating to avoid asking pypi for hashes of editable packages
    resolved_tree = resolver.resolve()

    for result in resolved_tree:


    return results

def format_toml(data):
    """Pretty-formats a given toml string."""
    data = data.split('\n')
    for i, line in enumerate(data):
        if i > 0:
            if line.startswith('['):
                data[i] = '\n{0}'.format(line)

    return '\n'.join(data)


def multi_split(s, split):
    """Splits on multiple given separators."""
    for r in split:
        s = s.replace(r, '|')

    return [i for i in s.split('|') if len(i) > 0]


def convert_deps_from_pip(dep):
    """"Converts a pip-formatted dependency to a Pipfile-formatted one."""
    dependency = {}

    import requirements

    req = [r for r in requirements.parse(dep)][0]
    # VCS Installs.
    if req.vcs:
        if req.name is None:
            raise ValueError('pipenv requires an #egg fragment for version controlled '
                             'dependencies. Please install remote dependency '
                             'in the form {0}#egg=<package-name>.'.format(req.uri))

        # Crop off the git+, etc part.
        dependency[req.name] = {req.vcs: req.uri[len(req.vcs) + 1:]}

        # Add --editable, if it's there.
        if req.editable:
            dependency[req.name].update({'editable': True})

        # Add the specifier, if it was provided.
        if req.revision:
            dependency[req.name].update({'ref': req.revision})

    elif req.specs or req.extras:
        specs = None
        # Comparison operators: e.g. Django>1.10
        if req.specs:
            r = multi_split(dep, '=<>')
            specs = dep[len(r[0]):]
            dependency[req.name] = specs

        # Extras: e.g. requests[socks]
        if req.extras:
            dependency[req.name] = {'extras': req.extras}

            if specs:
                dependency[req.name].update({'version': specs})



    # Bare dependencies: e.g. requests
    else:
        dependency[dep] = '*'

    return dependency
def convert_deps_to_pip(deps, r=True):
    """"Converts a Pipfile-formatteddependency to a pip-formatted one."""
    dependencies = []

    for dep in deps.keys():
        # Default (e.g. '>1.10').
        extra = deps[dep] if isinstance(deps[dep], six.string_types) else ''
        version = ''

        # Get rid of '*'.
        if deps[dep] == '*' or str(extra) == '{}':
            extra = ''

        hash = ''
        # Support for single hash (spec 1).
        if 'hash' in deps[dep]:
            hash = ' --hash={0}'.format(deps[dep]['hash'])

        # Support for multiple hashes (spec 2).
        if 'hashes' in deps[dep]:
            hash = '{0} '.format(''.join([' --hash={0} '.format(h) for h in deps[dep]['hashes']]))

        # Support for extras (e.g. requests[socks])
        if 'extras' in deps[dep]:
            extra = '[{0}]'.format(deps[dep]['extras'][0])

        if 'version' in deps[dep]:
            version = deps[dep]['version']

        # Support for version control
        maybe_vcs = [vcs for vcs in VCS_LIST if vcs in deps[dep]]

        vcs = maybe_vcs[0] if maybe_vcs else None

        if vcs:
            extra = '{0}+{1}'.format(vcs, deps[dep][vcs])

            # Support for @refs.
            if 'ref' in deps[dep]:
                extra += '@{0}'.format(deps[dep]['ref'])


            extra += '#egg={0}'.format(dep)
            # Support for editable.
            if 'editable' in deps[dep]:
                # Support for --egg.
                dep = '-e '
            else:
                dep = ''

        dependencies.append('{0}{1}{2}{3}'.format(dep, extra, version, hash))
    if not r:
        return dependencies

    # Write requirements.txt to tmp directory.
    f = tempfile.NamedTemporaryFile(suffix='-requirements.txt', delete=False)


    f.write('\n'.join(dependencies).encode('utf-8'))
    return f.name
def mkdir_p(newdir):
    """works the way a good mkdir should :)
        - already exists, silently complete
        - regular file in the way, raise an exception
        - parent directory(ies) does not exist, make them as well
        From: http://code.activestate.com/recipes/82465-a-friendly-mkdir/
    """
    if os.path.isdir(newdir):
        pass
    elif os.path.isfile(newdir):
        raise OSError("a file with the same name as the desired dir, '{0}', already exists.".format(newdir))
    else:
        head, tail = os.path.split(newdir)
        if head and not os.path.isdir(head):
            mkdir_p(head)
        if tail:
            os.mkdir(newdir)


def is_required_version(version, specified_version):
    """Check to see if there's a hard requirement for version
    number provided in the Pipfile.
    """
    # Certain packages may be defined with multiple values.
    if isinstance(specified_version, dict):
        specified_version = specified_version.get('version', '')
    if specified_version.startswith('=='):
        return version.strip() == specified_version.split('==')[1].strip()
    return True


def is_vcs(pipfile_entry):
    """Determine if dictionary entry from Pipfile is for a vcs dependency."""
    if isinstance(pipfile_entry, dict):
        return any(key for key in pipfile_entry.keys() if key in VCS_LIST)
    return False


def pep440_version(version):
    # use pip built in version parser
    return str(pip.index.parse_version(version))


def pep423_name(name):
    """Normalize package name to PEP 423 style standard."""
    return name.lower().replace('_', '-')


def proper_case(package_name):
    """Properly case project name from pypi.org"""
    # Hit the simple API.
    r = requests.get('https://pypi.org/pypi/{0}/json'.format(package_name), timeout=0.3, stream=True)
    if not r.ok:
        raise IOError('Unable to find package {0} in PyPI repository.'.format(package_name))

    r = parse.parse('https://pypi.org/pypi/{name}/json', r.url)
    good_name = r['name']



    return good_name
def split_vcs(split_file):
    """Split VCS dependencies out from file."""
    if 'packages' in split_file or 'dev-packages' in split_file:
        sections = ('packages', 'dev-packages')
    elif 'default' in split_file or 'develop' in split_file:
        sections = ('default', 'develop')

    # For each vcs entry in a given section, move it to section-vcs.
    for section in sections:
        entries = split_file.get(section, {})
        vcs_dict = dict((k, entries.pop(k)) for k in list(entries.keys()) if is_vcs(entries[k]))
        split_file[section+'-vcs'] = vcs_dict

    return split_file


def recase_file(file_dict):
    """Recase file before writing to output."""
    if 'packages' in file_dict or 'dev-packages' in file_dict:
        sections = ('packages', 'dev-packages')
    elif 'default' in file_dict or 'develop' in file_dict:
        sections = ('default', 'develop')

    for section in sections:
        file_section = file_dict.get(section, {})

        # Try to properly case each key if we can.
        for key in list(file_section.keys()):
            try:
                cased_key = proper_case(key)
            except IOError:
                cased_key = key
            file_section[cased_key] = file_section.pop(key)

    return file_dict


def walk_up(bottom):
    """mimic os.walk, but walk 'up' instead of down the directory tree.
    From: https://gist.github.com/zdavkeos/1098474
    """

    bottom = os.path.realpath(bottom)
    # get files in current dir
    try:
        names = os.listdir(bottom)
    except Exception:
        return

    dirs, nondirs = [], []

    for name in names:
        if os.path.isdir(os.path.join(bottom, name)):
            dirs.append(name)
        else:
            nondirs.append(name)


    yield bottom, dirs, nondirs
    new_path = os.path.realpath(os.path.join(bottom, '..'))
    # see if we are at the top
    if new_path == bottom:
        return

    for x in walk_up(new_path):
        yield x


def find_requirements(max_depth=3):
        """Returns the path of a Pipfile in parent directories."""
        i = 0
        for c, d, f in walk_up(os.getcwd()):
            i += 1

            if i < max_depth:
                if 'requirements.txt':
                    r = os.path.join(c, 'requirements.txt')
                    if os.path.isfile(r):
                        return r
        raise RuntimeError('No requirements.txt found!')
Esempio n. 16
0
def resolve_deps(deps, which, which_pip, project, sources=None, verbose=False, python=False, clear=False):
    """Given a list of dependencies, return a resolved list of dependencies,
    using pip-tools -- and their hashes, using the warehouse API / pip.
    """

    with HackedPythonVersion(python):

        class PipCommand(pip.basecommand.Command):
            """Needed for pip-tools."""
            name = 'PipCommand'

        constraints = []
        extra_constraints = []

        for dep in deps:
            t = tempfile.mkstemp(prefix='pipenv-', suffix='-requirement.txt')[1]
            with open(t, 'w') as f:
                f.write(dep)

            if dep.startswith('-e '):
                constraint = pip.req.InstallRequirement.from_editable(dep[len('-e '):])
                # Resolve extra constraints from -e packages (that rely on setuptools.)
                extra_constraints = best_matches_from(dep[len('-e '):], which=which, which_pip=which_pip, project=project)
                extra_constraints = [pip.req.InstallRequirement.from_line(c) for c in extra_constraints]
            else:
                constraint = [c for c in pip.req.parse_requirements(t, session=pip._vendor.requests)][0]
                extra_constraints = []

            constraints.append(constraint)
            constraints.extend(extra_constraints)

        pip_command = get_pip_command()

        pip_args = []

        if sources:
            pip_args.extend(['-i', sources[0]['url']])

        pip_options, _ = pip_command.parse_args(pip_args)

        pypi = PyPIRepository(pip_options=pip_options, session=requests)

        if verbose:
            logging.log.verbose = True

        resolver = Resolver(constraints=constraints, repository=pypi, clear_caches=clear)
        results = []

        # pre-resolve instead of iterating to avoid asking pypi for hashes of editable packages
        resolved_tree = resolver.resolve()

    for result in resolved_tree:
        name = pep423_name(result.name)
        version = clean_pkg_version(result.specifier)

        collected_hashes = []

        try:
            # Grab the hashes from the new warehouse API.
            r = requests.get('https://pypi.org/pypi/{0}/json'.format(name))
            api_releases = r.json()['releases']

            cleaned_releases = {}
            for api_version, api_info in api_releases.items():
                cleaned_releases[clean_pkg_version(api_version)] = api_info

            for release in cleaned_releases[version]:
                collected_hashes.append(release['digests']['sha256'])

            collected_hashes = ['sha256:' + s for s in collected_hashes]

            # Collect un-collectable hashes.
            if not collected_hashes:
                collected_hashes = list(list(resolver.resolve_hashes([result]).items())[0][1])

        except (ValueError, KeyError):
            pass

        results.append({'name': name, 'version': version, 'hashes': collected_hashes})

    return results
Esempio n. 17
0
    def on_end(self, event):

        # Our config object
        python_config = event.config["python"]

        # Pip / PyPI
        repository = PyPIRepository([], cache_dir=CACHE_DIR)

        # We just need to construct this structure if use_uniform_requirements == True
        requirements_by_name = {}

        if python_config.use_uniform_requirements:
            tmpfile = tempfile.NamedTemporaryFile(mode="wt", delete=False)
            for extra in itertools.chain((None,), python_config.get_extras()):
                tmpfile.write("\n".join(python_config.get_requirements(extra=extra)) + "\n")
                tmpfile.flush()

            constraints = list(
                parse_requirements(
                    tmpfile.name, finder=repository.finder, session=repository.session, options=repository.options
                )
            )

            # This resolver is able to evaluate ALL the dependencies along the extras
            resolver = Resolver(
                constraints,
                repository,
                cache=DependencyCache(CACHE_DIR),
                # cache=DependencyCache(tempfile.tempdir),
                prereleases=False,
                clear_caches=False,
                allow_unsafe=False,
            )

            for req in resolver.resolve(max_rounds=10):
                requirements_by_name[parse_requirement(str(req.req)).name] = SimpleNamespace(
                    requirement=format_requirement(req).strip().replace(" ", ""),
                    url=req.link
                )

            python_config.check_duplicate_dependencies_uniform(requirements_by_name)

        # Now it iterates along the versions in extras and looks for the requirements and its dependencies, using the
        # structure created above to select the unified versions (unless the flag indicates otherwise).
        for extra in itertools.chain((None,), python_config.get_extras()):
            requirements_file = "requirements{}.txt".format("-" + extra if extra else "")

            if python_config.override_requirements or not os.path.exists(requirements_file):
                tmpfile = tempfile.NamedTemporaryFile(mode="wt", delete=False)
                tmpfile.write("\n".join(python_config.get_requirements(extra=extra)) + "\n")
                tmpfile.flush()

                constraints = list(
                    parse_requirements(
                        tmpfile.name, finder=repository.finder, session=repository.session, options=repository.options
                    )
                )
                resolver = Resolver(
                    constraints,
                    repository,
                    cache=DependencyCache(CACHE_DIR),
                    prereleases=False,
                    clear_caches=False,
                    allow_unsafe=False,
                )

                if not python_config.use_uniform_requirements:
                    python_config.check_duplicate_dependencies_nonuniform(extra, resolver)

                requirements_list = []
                for req in resolver.resolve(max_rounds=10):
                    if req.name != python_config.get("name"):
                        requirement = python_config.get_requirement_info_by_name(req, requirements_by_name)
                        if requirement:
                            requirements_list.append(requirement)

                self.render_file_inline(
                    requirements_file,
                    "\n".join(
                        (
                            "-e .{}".format("[" + extra + "]" if extra else ""),
                            *(("-r requirements.txt",) if extra else ()),
                            *python_config.get_vendors(extra=extra),
                            *sorted(requirements_list),
                        )
                    ),
                    override=python_config.override_requirements,
                )

        # Updates setup file
        setup = python_config.get_setup()

        context = {
            "url": setup.pop("url", ""),
            "download_url": setup.pop("download_url", ""),
        }

        for k, v in context.items():
            context[k] = context[k].format(name=setup["name"], user=getuser(), version="{version}")

        context.update(
            {
                "entry_points": setup.pop("entry_points", {}),
                "extras_require": python_config.get("extras_require"),
                "install_requires": python_config.get("install_requires"),
                "python": python_config,
                "setup": setup,
                "banner": get_override_warning_banner(),
            }
        )

        # Render (with overwriting) the allmighty setup.py
        self.render_file("setup.py", "python/setup.py.j2", context, override=True)