Ejemplo n.º 1
0
def test_finder_priority_nonegg_over_eggfragments():
    """Test PackageFinder prefers non-egg links over "#egg=" links"""
    req = InstallRequirement.from_line('bar==1.0', None)
    links = ['http://foo/bar.py#egg=bar-1.0', 'http://foo/bar-1.0.tar.gz']

    finder = PackageFinder(links, [], session=PipSession())

    with patch.object(finder, "_get_pages", lambda x, y: []):
        all_versions = finder.find_all_candidates(req.name)
        assert all_versions[0].location.url.endswith('tar.gz')
        assert all_versions[1].location.url.endswith('#egg=bar-1.0')

        link = finder.find_requirement(req, False).location

    assert link.url.endswith('tar.gz')

    links.reverse()
    finder = PackageFinder(links, [], session=PipSession())

    with patch.object(finder, "_get_pages", lambda x, y: []):
        all_versions = finder.find_all_candidates(req.name)
        assert all_versions[0].location.url.endswith('tar.gz')
        assert all_versions[1].location.url.endswith('#egg=bar-1.0')
        link = finder.find_requirement(req, False).location

    assert link.url.endswith('tar.gz')
Ejemplo n.º 2
0
def test_finder_priority_nonegg_over_eggfragments():
    """Test PackageFinder prefers non-egg links over "#egg=" links"""
    req = InstallRequirement.from_line('bar==1.0', None)
    links = ['http://foo/bar.py#egg=bar-1.0', 'http://foo/bar-1.0.tar.gz']

    finder = PackageFinder(links, [], session=PipSession())

    with patch.object(finder, "_get_pages", lambda x, y: []):
        all_versions = finder.find_all_candidates(req.name)
        assert all_versions[0].location.url.endswith('tar.gz')
        assert all_versions[1].location.url.endswith('#egg=bar-1.0')

        link = finder.find_requirement(req, False)

    assert link.url.endswith('tar.gz')

    links.reverse()
    finder = PackageFinder(links, [], session=PipSession())

    with patch.object(finder, "_get_pages", lambda x, y: []):
        all_versions = finder.find_all_candidates(req.name)
        assert all_versions[0].location.url.endswith('tar.gz')
        assert all_versions[1].location.url.endswith('#egg=bar-1.0')
        link = finder.find_requirement(req, False)

    assert link.url.endswith('tar.gz')
Ejemplo n.º 3
0
Archivo: index.py Proyecto: jayvdb/grip
class Index:
    def __init__(self, url):
        self.session = PipSession(cache=os.path.join(USER_CACHE_DIR, 'http'))
        self.finder = PackageFinder([], [url], session=self.session)

    def candidates_for(self, dep, source=False):
        candidates = self.finder.find_all_candidates(dep.name)
        if source:
            candidates = [x for x in candidates if not x.location.is_wheel]
        return candidates

    def best_candidate_of(self, dep, candidates):
        if dep:
            compatible_versions = set(
                dep.req.specifier.filter([str(c.version) for c in candidates],
                                         prereleases=False))

            if len(compatible_versions) == 0:
                compatible_versions = set(
                    dep.req.specifier.filter(
                        [str(c.version) for c in candidates],
                        prereleases=True))

            applicable_candidates = [
                c for c in candidates if str(c.version) in compatible_versions
            ]
        else:
            applicable_candidates = candidates

        if not len(applicable_candidates):
            return None

        return max(applicable_candidates, key=self.finder._candidate_sort_key)
Ejemplo n.º 4
0
def test_finder_only_installs_data_require(data):
    """
    Test whether the PackageFinder understand data-python-requires

    This can optionally be exposed by a simple-repository to tell which
    distribution are compatible with which version of Python by adding a
    data-python-require to the anchor links.

    See pep 503 for more informations.
    """

    # using a local index (that has pre & dev releases)
    finder = PackageFinder([],
                           [data.index_url("datarequire")],
                           session=PipSession())
    links = finder.find_all_candidates("fakepackage")

    expected = ['1.0.0', '9.9.9']
    if sys.version_info < (2, 7):
        expected.append('2.6.0')
    elif (2, 7) < sys.version_info < (3,):
        expected.append('2.7.0')
    elif sys.version_info > (3, 3):
        expected.append('3.3.0')

    assert set([str(v.version) for v in links]) == set(expected)
Ejemplo n.º 5
0
def latest_version(req, session, include_prereleases=False):
    """Returns a Version instance with the latest version for the package.

    :param req:                 Instance of
                                pip.req.req_install.InstallRequirement.
    :param session:             Instance of pip.download.PipSession.
    :param include_prereleases: Include prereleased beta versions.
    """
    if not req:  # pragma: nocover
        return None

    index_urls = [PyPI.simple_url]
    finder = PackageFinder(session=session, find_links=[],
                           index_urls=index_urls)

    all_candidates = finder.find_all_candidates(req.name)

    if not include_prereleases:
        all_candidates = [candidate for candidate in all_candidates
                          if not candidate.version.is_prerelease]

    if not all_candidates:
        return None

    best_candidate = max(all_candidates,
                         key=finder._candidate_sort_key)
    remote_version = best_candidate.version
    return remote_version
Ejemplo n.º 6
0
def test_finder_only_installs_data_require(data):
    """
    Test whether the PackageFinder understand data-python-requires

    This can optionally be exposed by a simple-repository to tell which
    distribution are compatible with which version of Python by adding a
    data-python-require to the anchor links.

    See pep 503 for more informations.
    """

    # using a local index (that has pre & dev releases)
    finder = PackageFinder([],
                           [data.index_url("datarequire")],
                           session=PipSession())
    links = finder.find_all_candidates("fakepackage")

    expected = ['1.0.0', '9.9.9']
    if sys.version_info < (2, 7):
        expected.append('2.6.0')
    elif (2, 7) < sys.version_info < (3,):
        expected.append('2.7.0')
    elif sys.version_info > (3, 3):
        expected.append('3.3.0')

    assert set([str(v.version) for v in links]) == set(expected)
Ejemplo n.º 7
0
def lookup_candidates(pkg, version):
    finder = PackageFinder(find_links=[],
                           format_control=FormatControl(set([":all:"]), set()),
                           index_urls=[_DEFAULT_INDEX],
                           session=PipSession())
    for candidate in finder.find_all_candidates(pkg):
        if str(candidate.version) == version:
            return candidate.location
    return None
Ejemplo n.º 8
0
def test_finder_priority_file_over_page(data):
    """Test PackageFinder prefers file links over equivalent page links"""
    req = InstallRequirement.from_line('gmpy==1.15', None)
    finder = PackageFinder(
        [data.find_links],
        ["http://pypi.python.org/simple"],
        session=PipSession(),
    )
    all_versions = finder.find_all_candidates(req.name)
    # 1 file InstallationCandidate followed by all https ones
    assert all_versions[0].location.scheme == 'file'
    assert all(version.location.scheme == 'https'
               for version in all_versions[1:]), all_versions

    link = finder.find_requirement(req, False).location
    assert link.url.startswith("file://")
Ejemplo n.º 9
0
def test_finder_priority_file_over_page(data):
    """Test PackageFinder prefers file links over equivalent page links"""
    req = InstallRequirement.from_line('gmpy==1.15', None)
    finder = PackageFinder(
        [data.find_links],
        ["http://pypi.python.org/simple"],
        session=PipSession(),
    )
    all_versions = finder.find_all_candidates(req.name)
    # 1 file InstallationCandidate followed by all https ones
    assert all_versions[0].location.scheme == 'file'
    assert all(version.location.scheme == 'https'
               for version in all_versions[1:]), all_versions

    link = finder.find_requirement(req, False)
    assert link.url.startswith("file://")
Ejemplo n.º 10
0
def test_finder_priority_page_over_deplink():
    """
    Test PackageFinder prefers page links over equivalent dependency links
    """
    req = InstallRequirement.from_line('pip==1.5.6', None)
    finder = PackageFinder(
        [],
        ["https://pypi.python.org/simple"],
        process_dependency_links=True,
        session=PipSession(),
    )
    finder.add_dependency_links([
        'https://warehouse.python.org/packages/source/p/pip/pip-1.5.6.tar.gz'])
    all_versions = finder.find_all_candidates(req.name)
    # Check that the dependency_link is last
    assert all_versions[-1].location.url.startswith('https://warehouse')
    link = finder.find_requirement(req, False).location
    assert link.url.startswith("https://pypi"), link
Ejemplo n.º 11
0
def test_finder_priority_page_over_deplink():
    """
    Test PackageFinder prefers page links over equivalent dependency links
    """
    req = InstallRequirement.from_line('pip==1.5.6', None)
    finder = PackageFinder(
        [],
        ["https://pypi.python.org/simple"],
        process_dependency_links=True,
        session=PipSession(),
    )
    finder.add_dependency_links([
        'https://warehouse.python.org/packages/source/p/pip/pip-1.5.6.tar.gz'])
    all_versions = finder.find_all_candidates(req.name)
    # Check that the dependency_link is last
    assert all_versions[-1].location.url.startswith('https://warehouse')
    link = finder.find_requirement(req, False)
    assert link.url.startswith("https://pypi"), link
Ejemplo n.º 12
0
    def run(self, package_name):

        # PackageFinder requires session which requires options

        options, args = self.parse_args([])
        session = self._build_session(options=options)
        
        finder = PackageFinder(
            find_links=[],
            index_urls=['https://pypi.python.org/simple/'],
            session=session,
        )
       
        candidates = finder.find_all_candidates(package_name)
        
        # set() to remove repeated versions - ie. matplotlib
        versions = sorted(set(c.version for c in candidates))
        
        print('\n'.join(map(str, versions)))
Ejemplo n.º 13
0
    def run(self, package_name):

        # PackageFinder requires session which requires options

        options, args = self.parse_args([])
        session = self._build_session(options=options)

        finder = PackageFinder(
            find_links=[],
            index_urls=['https://pypi.python.org/simple/'],
            session=session,
        )

        candidates = finder.find_all_candidates(package_name)

        # set() to remove repeated versions - ie. matplotlib
        versions = sorted(set(c.version for c in candidates))

        print('\n'.join(map(str, versions)))
Ejemplo n.º 14
0
def pip_version_check(session, options):
    """Check for an update for pip.

    Limit the frequency of checks to once per week. State is stored either in
    the active virtualenv or in the user's USER_CACHE_DIR keyed off the prefix
    of the pip script path.
    """
    installed_version = get_installed_version("pip")
    if not installed_version:
        return

    pip_version = packaging_version.parse(installed_version)
    pypi_version = None

    try:
        state = load_selfcheck_statefile()

        current_time = datetime.datetime.utcnow()
        # Determine if we need to refresh the state
        if "last_check" in state.state and "pypi_version" in state.state:
            last_check = datetime.datetime.strptime(state.state["last_check"],
                                                    SELFCHECK_DATE_FMT)
            if (current_time - last_check).total_seconds() < 7 * 24 * 60 * 60:
                pypi_version = state.state["pypi_version"]

        # Refresh the version if we need to or just see if we need to warn
        if pypi_version is None:
            # Lets use PackageFinder to see what the latest pip version is
            finder = PackageFinder(
                find_links=options.find_links,
                index_urls=[options.index_url] + options.extra_index_urls,
                allow_all_prereleases=False,  # Explicitly set to False
                trusted_hosts=options.trusted_hosts,
                process_dependency_links=options.process_dependency_links,
                session=session,
            )
            all_candidates = finder.find_all_candidates("pip")
            if not all_candidates:
                return
            pypi_version = str(
                max(all_candidates, key=lambda c: c.version).version)

            # save that we've performed a check
            state.save(pypi_version, current_time)

        remote_version = packaging_version.parse(pypi_version)

        # Determine if our pypi_version is older
        if (pip_version < remote_version
                and pip_version.base_version != remote_version.base_version):
            # Advise "python -m pip" on Windows to avoid issues
            # with overwriting pip.exe.
            if WINDOWS:
                pip_cmd = "python -m pip"
            else:
                pip_cmd = "pip"
            logger.warning(
                "You are using pip version %s, however version %s is "
                "available.\nYou should consider upgrading via the "
                "'%s install --upgrade pip' command.", pip_version,
                pypi_version, pip_cmd)
    except Exception:
        logger.debug(
            "There was an error checking the latest version of pip",
            exc_info=True,
        )
Ejemplo n.º 15
0
class PyPIRepository(BaseRepository):
    DEFAULT_INDEX_URL = 'https://pypi.python.org/simple'

    """
    The PyPIRepository will use the provided Finder instance to lookup
    packages.  Typically, it looks up packages on PyPI (the default implicit
    config), but any other PyPI mirror can be used if index_urls is
    changed/configured on the Finder.
    """
    def __init__(self, pip_options, session):
        self.session = session

        index_urls = [pip_options.index_url] + pip_options.extra_index_urls
        if pip_options.no_index:
            index_urls = []

        self.finder = PackageFinder(
            find_links=pip_options.find_links,
            index_urls=index_urls,
            trusted_hosts=pip_options.trusted_hosts,
            allow_all_prereleases=pip_options.pre,
            process_dependency_links=pip_options.process_dependency_links,
            session=self.session,
        )

        # Caches
        # stores project_name => InstallationCandidate mappings for all
        # versions reported by PyPI, so we only have to ask once for each
        # project
        self._available_candidates_cache = {}

        # stores InstallRequirement => list(InstallRequirement) mappings
        # of all secondary dependencies for the given requirement, so we
        # only have to go to disk once for each requirement
        self._dependencies_cache = {}

        # Setup file paths
        self.freshen_build_caches()
        self._download_dir = os.path.join(CACHE_DIR, 'pkgs')
        self._wheel_download_dir = os.path.join(CACHE_DIR, 'wheels')

    def freshen_build_caches(self):
        """
        Start with fresh build/source caches.  Will remove any old build
        caches from disk automatically.
        """
        self._build_dir = TemporaryDirectory('build')
        self._source_dir = TemporaryDirectory('source')

    @property
    def build_dir(self):
        return self._build_dir.name

    @property
    def source_dir(self):
        return self._source_dir.name

    def clear_caches(self):
        rmtree(self._download_dir, ignore_errors=True)
        rmtree(self._wheel_download_dir, ignore_errors=True)

    def find_all_candidates(self, req_name):
        if req_name not in self._available_candidates_cache:
            # pip 8 changed the internal API, making this a public method
            if pip_version_info >= (8, 0):
                candidates = self.finder.find_all_candidates(req_name)
            else:
                candidates = self.finder._find_all_versions(req_name)
            self._available_candidates_cache[req_name] = candidates
        return self._available_candidates_cache[req_name]

    def find_best_match(self, ireq, prereleases=None):
        """
        Returns a Version object that indicates the best match for the given
        InstallRequirement according to the external repository.
        """
        if ireq.editable:
            return ireq  # return itself as the best match

        all_candidates = self.find_all_candidates(ireq.name)
        candidates_by_version = lookup_table(all_candidates, key=lambda c: c.version, unique=True)
        matching_versions = ireq.specifier.filter((candidate.version for candidate in all_candidates),
                                                  prereleases=prereleases)

        # Reuses pip's internal candidate sort key to sort
        matching_candidates = [candidates_by_version[ver] for ver in matching_versions]
        if not matching_candidates:
            raise NoCandidateFound(ireq, all_candidates)
        best_candidate = max(matching_candidates, key=self.finder._candidate_sort_key)

        # Turn the candidate into a pinned InstallRequirement
        return make_install_requirement(
            best_candidate.project, best_candidate.version, ireq.extras, constraint=ireq.constraint
        )

    def get_dependencies(self, ireq):
        """
        Given a pinned or an editable InstallRequirement, returns a set of
        dependencies (also InstallRequirements, but not necessarily pinned).
        They indicate the secondary dependencies for the given requirement.
        """
        if not (ireq.editable or is_pinned_requirement(ireq)):
            raise TypeError('Expected pinned or editable InstallRequirement, got {}'.format(ireq))

        if ireq not in self._dependencies_cache:

            if not os.path.isdir(self._download_dir):
                os.makedirs(self._download_dir)
            if not os.path.isdir(self._wheel_download_dir):
                os.makedirs(self._wheel_download_dir)

            reqset = RequirementSet(self.build_dir,
                                    self.source_dir,
                                    download_dir=self._download_dir,
                                    wheel_download_dir=self._wheel_download_dir,
                                    session=self.session)
            self._dependencies_cache[ireq] = reqset._prepare_file(self.finder, ireq)
        return set(self._dependencies_cache[ireq])

    def get_hashes(self, ireq):
        """
        Given a pinned InstallRequire, returns a set of hashes that represent
        all of the files for a given requirement. It is not acceptable for an
        editable or unpinned requirement to be passed to this function.
        """
        if ireq.editable or not is_pinned_requirement(ireq):
            raise TypeError(
                "Expected pinned requirement, not unpinned or editable, got {}".format(ireq))

        # We need to get all of the candidates that match our current version
        # pin, these will represent all of the files that could possibly
        # satisify this constraint.
        all_candidates = self.find_all_candidates(ireq.name)
        candidates_by_version = lookup_table(all_candidates, key=lambda c: c.version)
        matching_versions = list(
            ireq.specifier.filter((candidate.version for candidate in all_candidates)))
        matching_candidates = candidates_by_version[matching_versions[0]]

        return {
            self._get_file_hash(candidate.location)
            for candidate in matching_candidates
        }

    def _get_file_hash(self, location):
        with TemporaryDirectory() as tmpdir:
            unpack_url(
                location, self.build_dir,
                download_dir=tmpdir, only_download=True, session=self.session
            )
            files = os.listdir(tmpdir)
            assert len(files) == 1
            filename = os.path.abspath(os.path.join(tmpdir, files[0]))

            h = hashlib.new(FAVORITE_HASH)
            with open(filename, "rb") as fp:
                for chunk in iter(lambda: fp.read(8096), b""):
                    h.update(chunk)

        return ":".join([FAVORITE_HASH, h.hexdigest()])
def get_latest_release_version(finder: PackageFinder,
                               package: str) -> VersionInfo:
    results = finder.find_all_candidates(package)
    versions = sorted(set([p.version for p in results]), reverse=True)
    return semver.parse_version_info(str(versions[0]))
Ejemplo n.º 17
0
class PyPIRepository(BaseRepository):
    DEFAULT_INDEX_URL = 'https://pypi.python.org/simple'

    """
    The PyPIRepository will use the provided Finder instance to lookup
    packages.  Typically, it looks up packages on PyPI (the default implicit
    config), but any other PyPI mirror can be used if index_urls is
    changed/configured on the Finder.
    """
    def __init__(self, pip_options, session):
        self.session = session

        index_urls = [pip_options.index_url] + pip_options.extra_index_urls
        if pip_options.no_index:
            index_urls = []

        self.finder = PackageFinder(
            find_links=pip_options.find_links,
            index_urls=index_urls,
            trusted_hosts=pip_options.trusted_hosts,
            allow_all_prereleases=pip_options.pre,
            process_dependency_links=pip_options.process_dependency_links,
            session=self.session,
        )

        # Caches
        # stores project_name => InstallationCandidate mappings for all
        # versions reported by PyPI, so we only have to ask once for each
        # project
        self._available_candidates_cache = {}

        # Setup file paths
        self.freshen_build_caches()
        self._download_dir = os.path.join(CACHE_DIR, 'pkgs')
        self._wheel_download_dir = os.path.join(CACHE_DIR, 'wheels')

    def freshen_build_caches(self):
        """
        Start with fresh build/source caches.  Will remove any old build
        caches from disk automatically.
        """
        self._build_dir = TemporaryDirectory('build')
        self._source_dir = TemporaryDirectory('source')

    @property
    def build_dir(self):
        return self._build_dir.name

    @property
    def source_dir(self):
        return self._source_dir.name

    def clear_caches(self):
        rmtree(self._download_dir, ignore_errors=True)
        rmtree(self._wheel_download_dir, ignore_errors=True)

    def find_all_candidates(self, req_name):
        if req_name not in self._available_candidates_cache:
            # pip 8 changed the internal API, making this a public method
            if pip_version_info >= (8, 0):
                candidates = self.finder.find_all_candidates(req_name)
            else:
                candidates = self.finder._find_all_versions(req_name)
            self._available_candidates_cache[req_name] = candidates
        return self._available_candidates_cache[req_name]

    def find_best_match(self, ireq, prereleases=None):
        """
        Returns a Version object that indicates the best match for the given
        InstallRequirement according to the external repository.
        """
        if ireq.editable:
            return ireq  # return itself as the best match

        all_candidates = self.find_all_candidates(ireq.name)
        candidates_by_version = lookup_table(all_candidates, key=lambda c: c.version, unique=True)
        matching_versions = ireq.specifier.filter((candidate.version for candidate in all_candidates),
                                                  prereleases=prereleases)

        # Reuses pip's internal candidate sort key to sort
        matching_candidates = [candidates_by_version[ver] for ver in matching_versions]
        if not matching_candidates:
            raise NoCandidateFound(ireq, all_candidates)
        best_candidate = max(matching_candidates, key=self.finder._candidate_sort_key)

        # Turn the candidate into a pinned InstallRequirement
        return make_install_requirement(
            best_candidate.project, best_candidate.version, ireq.extras
        )

    def get_dependencies(self, ireq):
        """
        Given a pinned or an editable InstallRequirement, returns a set of
        dependencies (also InstallRequirements, but not necessarily pinned).
        They indicate the secondary dependencies for the given requirement.
        """
        if not (ireq.editable or is_pinned_requirement(ireq)):
            raise TypeError('Expected pinned or editable InstallRequirement, got {}'.format(ireq))

        if not os.path.isdir(self._download_dir):
            os.makedirs(self._download_dir)
        if not os.path.isdir(self._wheel_download_dir):
            os.makedirs(self._wheel_download_dir)

        reqset = RequirementSet(self.build_dir,
                                self.source_dir,
                                download_dir=self._download_dir,
                                wheel_download_dir=self._wheel_download_dir,
                                session=self.session)
        dependencies = reqset._prepare_file(self.finder, ireq)
        return set(dependencies)
Ejemplo n.º 18
0
def test_find_all_candidates_find_links_and_index(data):
    finder = PackageFinder(
        [data.find_links], [data.index_url('simple')], session=PipSession())
    versions = finder.find_all_candidates('simple')
    # first the find-links versions then the page versions
    assert [str(v.version) for v in versions] == ['3.0', '2.0', '1.0', '1.0']
Ejemplo n.º 19
0
def test_find_all_candidates_index(data):
    finder = PackageFinder(
        [], [data.index_url('simple')], session=PipSession())
    versions = finder.find_all_candidates('simple')
    assert [str(v.version) for v in versions] == ['1.0']
Ejemplo n.º 20
0
def test_find_all_candidates_find_links(data):
    finder = PackageFinder(
        [data.find_links], [], session=PipSession())
    versions = finder.find_all_candidates('simple')
    assert [str(v.version) for v in versions] == ['3.0', '2.0', '1.0']
Ejemplo n.º 21
0
def test_find_all_candidates_nothing(data):
    """Find nothing without anything"""
    finder = PackageFinder([], [], session=PipSession())
    assert not finder.find_all_candidates('pip')
Ejemplo n.º 22
0
class PyPIRepository(BaseRepository):
    DEFAULT_INDEX_URL = 'https://pypi.python.org/simple'

    """
    The PyPIRepository will use the provided Finder instance to lookup
    packages.  Typically, it looks up packages on PyPI (the default implicit
    config), but any other PyPI mirror can be used if index_urls is
    changed/configured on the Finder.
    """
    def __init__(self, pip_options, session):
        self.session = session

        index_urls = [pip_options.index_url] + pip_options.extra_index_urls
        if pip_options.no_index:
            index_urls = []

        self.finder = PackageFinder(
            find_links=pip_options.find_links,
            index_urls=index_urls,
            trusted_hosts=pip_options.trusted_hosts,
            allow_all_prereleases=pip_options.pre,
            process_dependency_links=pip_options.process_dependency_links,
            session=self.session,
        )

        # Caches
        # stores project_name => InstallationCandidate mappings for all
        # versions reported by PyPI, so we only have to ask once for each
        # project
        self._available_candidates_cache = {}

        # stores InstallRequirement => list(InstallRequirement) mappings
        # of all secondary dependencies for the given requirement, so we
        # only have to go to disk once for each requirement
        self._dependencies_cache = {}

        # Setup file paths
        self.freshen_build_caches()
        self._download_dir = fs_str(os.path.join(CACHE_DIR, 'pkgs'))
        self._wheel_download_dir = fs_str(os.path.join(CACHE_DIR, 'wheels'))

    def freshen_build_caches(self):
        """
        Start with fresh build/source caches.  Will remove any old build
        caches from disk automatically.
        """
        self._build_dir = TemporaryDirectory(fs_str('build'))
        self._source_dir = TemporaryDirectory(fs_str('source'))

    @property
    def build_dir(self):
        return self._build_dir.name

    @property
    def source_dir(self):
        return self._source_dir.name

    def clear_caches(self):
        rmtree(self._download_dir, ignore_errors=True)
        rmtree(self._wheel_download_dir, ignore_errors=True)

    def find_all_candidates(self, req_name):
        if req_name not in self._available_candidates_cache:
            candidates = self.finder.find_all_candidates(req_name)
            self._available_candidates_cache[req_name] = candidates
        return self._available_candidates_cache[req_name]

    def find_best_match(self, ireq, prereleases=None):
        """
        Returns a Version object that indicates the best match for the given
        InstallRequirement according to the external repository.
        """
        if ireq.editable:
            return ireq  # return itself as the best match

        all_candidates = self.find_all_candidates(ireq.name)
        candidates_by_version = lookup_table(all_candidates, key=lambda c: c.version, unique=True)
        matching_versions = ireq.specifier.filter((candidate.version for candidate in all_candidates),
                                                  prereleases=prereleases)

        # Reuses pip's internal candidate sort key to sort
        matching_candidates = [candidates_by_version[ver] for ver in matching_versions]
        if not matching_candidates:
            raise NoCandidateFound(ireq, all_candidates, self.finder)
        best_candidate = max(matching_candidates, key=self.finder._candidate_sort_key)

        # Turn the candidate into a pinned InstallRequirement
        return make_install_requirement(
            best_candidate.project, best_candidate.version, ireq.extras, constraint=ireq.constraint
        )

    def get_dependencies(self, ireq):
        """
        Given a pinned or an editable InstallRequirement, returns a set of
        dependencies (also InstallRequirements, but not necessarily pinned).
        They indicate the secondary dependencies for the given requirement.
        """
        if not (ireq.editable or is_pinned_requirement(ireq)):
            raise TypeError('Expected pinned or editable InstallRequirement, got {}'.format(ireq))

        if ireq not in self._dependencies_cache:
            if ireq.editable and (ireq.source_dir and os.path.exists(ireq.source_dir)):
                # No download_dir for locally available editable requirements.
                # If a download_dir is passed, pip will  unnecessarely
                # archive the entire source directory
                download_dir = None
            elif ireq.link and not ireq.link.is_artifact:
                # No download_dir for VCS sources.  This also works around pip
                # using git-checkout-index, which gets rid of the .git dir.
                download_dir = None
            else:
                download_dir = self._download_dir
                if not os.path.isdir(download_dir):
                    os.makedirs(download_dir)
            if not os.path.isdir(self._wheel_download_dir):
                os.makedirs(self._wheel_download_dir)

            reqset = RequirementSet(self.build_dir,
                                    self.source_dir,
                                    download_dir=download_dir,
                                    wheel_download_dir=self._wheel_download_dir,
                                    session=self.session)
            self._dependencies_cache[ireq] = reqset._prepare_file(self.finder, ireq)
        return set(self._dependencies_cache[ireq])

    def get_hashes(self, ireq):
        """
        Given an InstallRequirement, return a set of hashes that represent all
        of the files for a given requirement. Editable requirements return an
        empty set. Unpinned requirements raise a TypeError.
        """
        if ireq.editable:
            return set()

        if not is_pinned_requirement(ireq):
            raise TypeError(
                "Expected pinned requirement, got {}".format(ireq))

        # We need to get all of the candidates that match our current version
        # pin, these will represent all of the files that could possibly
        # satisfy this constraint.
        all_candidates = self.find_all_candidates(ireq.name)
        candidates_by_version = lookup_table(all_candidates, key=lambda c: c.version)
        matching_versions = list(
            ireq.specifier.filter((candidate.version for candidate in all_candidates)))
        matching_candidates = candidates_by_version[matching_versions[0]]

        return {
            self._get_file_hash(candidate.location)
            for candidate in matching_candidates
        }

    def _get_file_hash(self, location):
        h = hashlib.new(FAVORITE_HASH)
        with open_local_or_remote_file(location, self.session) as fp:
            for chunk in iter(lambda: fp.read(8096), b""):
                h.update(chunk)
        return ":".join([FAVORITE_HASH, h.hexdigest()])

    @contextmanager
    def allow_all_wheels(self):
        """
        Monkey patches pip.Wheel to allow wheels from all platforms and Python versions.

        This also saves the candidate cache and set a new one, or else the results from the
        previous non-patched calls will interfere.
        """
        def _wheel_supported(self, tags=None):
            # Ignore current platform. Support everything.
            return True

        def _wheel_support_index_min(self, tags=None):
            # All wheels are equal priority for sorting.
            return 0

        original_wheel_supported = Wheel.supported
        original_support_index_min = Wheel.support_index_min
        original_cache = self._available_candidates_cache

        Wheel.supported = _wheel_supported
        Wheel.support_index_min = _wheel_support_index_min
        self._available_candidates_cache = {}

        try:
            yield
        finally:
            Wheel.supported = original_wheel_supported
            Wheel.support_index_min = original_support_index_min
            self._available_candidates_cache = original_cache
Ejemplo n.º 23
0
class PyPIRepository(BaseRepository):
    DEFAULT_INDEX_URL = 'https://pypi.python.org/simple'

    """
    The PyPIRepository will use the provided Finder instance to lookup
    packages.  Typically, it looks up packages on PyPI (the default implicit
    config), but any other PyPI mirror can be used if index_urls is
    changed/configured on the Finder.
    """
    def __init__(self, pip_options, session):
        self.session = session

        index_urls = [pip_options.index_url] + pip_options.extra_index_urls
        if pip_options.no_index:
            index_urls = []

        self.finder = PackageFinder(
            find_links=pip_options.find_links,
            index_urls=index_urls,
            trusted_hosts=pip_options.trusted_hosts,
            allow_all_prereleases=pip_options.pre,
            process_dependency_links=pip_options.process_dependency_links,
            session=self.session,
        )

        # Caches
        # stores project_name => InstallationCandidate mappings for all
        # versions reported by PyPI, so we only have to ask once for each
        # project
        self._available_candidates_cache = {}

        # stores InstallRequirement => list(InstallRequirement) mappings
        # of all secondary dependencies for the given requirement, so we
        # only have to go to disk once for each requirement
        self._dependencies_cache = {}

        # Setup file paths
        self.freshen_build_caches()
        self._download_dir = os.path.join(CACHE_DIR, 'pkgs')
        self._wheel_download_dir = os.path.join(CACHE_DIR, 'wheels')

    def freshen_build_caches(self):
        """
        Start with fresh build/source caches.  Will remove any old build
        caches from disk automatically.
        """
        self._build_dir = TemporaryDirectory('build')
        self._source_dir = TemporaryDirectory('source')

    @property
    def build_dir(self):
        return self._build_dir.name

    @property
    def source_dir(self):
        return self._source_dir.name

    def clear_caches(self):
        rmtree(self._download_dir, ignore_errors=True)
        rmtree(self._wheel_download_dir, ignore_errors=True)

    def find_all_candidates(self, req_name):
        if req_name not in self._available_candidates_cache:
            # pip 8 changed the internal API, making this a public method
            if pip_version_info >= (8, 0):
                candidates = self.finder.find_all_candidates(req_name)
            else:
                candidates = self.finder._find_all_versions(req_name)
            self._available_candidates_cache[req_name] = candidates
        return self._available_candidates_cache[req_name]

    def find_best_match(self, ireq, prereleases=None):
        """
        Returns a Version object that indicates the best match for the given
        InstallRequirement according to the external repository.
        """
        if ireq.editable:
            return ireq  # return itself as the best match

        all_candidates = self.find_all_candidates(ireq.name)
        candidates_by_version = lookup_table(all_candidates, key=lambda c: c.version, unique=True)
        matching_versions = ireq.specifier.filter((candidate.version for candidate in all_candidates),
                                                  prereleases=prereleases)

        # Reuses pip's internal candidate sort key to sort
        matching_candidates = [candidates_by_version[ver] for ver in matching_versions]
        if not matching_candidates:
            raise NoCandidateFound(ireq, all_candidates)
        best_candidate = max(matching_candidates, key=self.finder._candidate_sort_key)

        # Turn the candidate into a pinned InstallRequirement
        return make_install_requirement(
            best_candidate.project, best_candidate.version, ireq.extras, constraint=ireq.constraint
        )

    def get_dependencies(self, ireq):
        """
        Given a pinned or an editable InstallRequirement, returns a set of
        dependencies (also InstallRequirements, but not necessarily pinned).
        They indicate the secondary dependencies for the given requirement.
        """
        if not (ireq.editable or is_pinned_requirement(ireq)):
            raise TypeError('Expected pinned or editable InstallRequirement, got {}'.format(ireq))

        if ireq not in self._dependencies_cache:
            if ireq.link and not ireq.link.is_artifact:
                # No download_dir for VCS sources.  This also works around pip
                # using git-checkout-index, which gets rid of the .git dir.
                download_dir = None
            else:
                download_dir = self._download_dir
                if not os.path.isdir(download_dir):
                    os.makedirs(download_dir)
            if not os.path.isdir(self._wheel_download_dir):
                os.makedirs(self._wheel_download_dir)

            reqset = RequirementSet(self.build_dir,
                                    self.source_dir,
                                    download_dir=download_dir,
                                    wheel_download_dir=self._wheel_download_dir,
                                    session=self.session)
            self._dependencies_cache[ireq] = reqset._prepare_file(self.finder, ireq)
        return set(self._dependencies_cache[ireq])

    def get_hashes(self, ireq):
        """
        Given a pinned InstallRequire, returns a set of hashes that represent
        all of the files for a given requirement. It is not acceptable for an
        editable or unpinned requirement to be passed to this function.
        """
        if not is_pinned_requirement(ireq):
            raise TypeError(
                "Expected pinned requirement, not unpinned or editable, got {}".format(ireq))

        # We need to get all of the candidates that match our current version
        # pin, these will represent all of the files that could possibly
        # satisify this constraint.
        all_candidates = self.find_all_candidates(ireq.name)
        candidates_by_version = lookup_table(all_candidates, key=lambda c: c.version)
        matching_versions = list(
            ireq.specifier.filter((candidate.version for candidate in all_candidates)))
        matching_candidates = candidates_by_version[matching_versions[0]]

        return {
            self._get_file_hash(candidate.location)
            for candidate in matching_candidates
        }

    def _get_file_hash(self, location):
        with TemporaryDirectory() as tmpdir:
            unpack_url(
                location, self.build_dir,
                download_dir=tmpdir, only_download=True, session=self.session
            )
            files = os.listdir(tmpdir)
            assert len(files) == 1
            filename = os.path.abspath(os.path.join(tmpdir, files[0]))

            h = hashlib.new(FAVORITE_HASH)
            with open(filename, "rb") as fp:
                for chunk in iter(lambda: fp.read(8096), b""):
                    h.update(chunk)

        return ":".join([FAVORITE_HASH, h.hexdigest()])
Ejemplo n.º 24
0
def test_find_all_candidates_find_links_and_index(data):
    finder = PackageFinder(
        [data.find_links], [data.index_url('simple')], session=PipSession())
    versions = finder.find_all_candidates('simple')
    # first the find-links versions then the page versions
    assert [str(v.version) for v in versions] == ['3.0', '2.0', '1.0', '1.0']
Ejemplo n.º 25
0
def test_find_all_candidates_index(data):
    finder = PackageFinder(
        [], [data.index_url('simple')], session=PipSession())
    versions = finder.find_all_candidates('simple')
    assert [str(v.version) for v in versions] == ['1.0']
Ejemplo n.º 26
0
def pip_version_check(session, options):
    """Check for an update for pip.

    Limit the frequency of checks to once per week. State is stored either in
    the active virtualenv or in the user's USER_CACHE_DIR keyed off the prefix
    of the pip script path.
    """
    installed_version = get_installed_version("pip")
    if not installed_version:
        return

    pip_version = packaging_version.parse(installed_version)
    pypi_version = None

    try:
        state = load_selfcheck_statefile()

        current_time = datetime.datetime.utcnow()
        # Determine if we need to refresh the state
        if "last_check" in state.state and "pypi_version" in state.state:
            last_check = datetime.datetime.strptime(
                state.state["last_check"],
                SELFCHECK_DATE_FMT
            )
            if (current_time - last_check).total_seconds() < 7 * 24 * 60 * 60:
                pypi_version = state.state["pypi_version"]

        # Refresh the version if we need to or just see if we need to warn
        if pypi_version is None:
            # Lets use PackageFinder to see what the latest pip version is
            finder = PackageFinder(
                find_links=options.find_links,
                index_urls=[options.index_url] + options.extra_index_urls,
                allow_all_prereleases=False,  # Explicitly set to False
                trusted_hosts=options.trusted_hosts,
                process_dependency_links=options.process_dependency_links,
                session=session,
            )
            all_candidates = finder.find_all_candidates("pip")
            if not all_candidates:
                return
            pypi_version = str(
                max(all_candidates, key=lambda c: c.version).version
            )

            # save that we've performed a check
            state.save(pypi_version, current_time)

        remote_version = packaging_version.parse(pypi_version)

        # Determine if our pypi_version is older
        if (pip_version < remote_version and
                pip_version.base_version != remote_version.base_version):
            # Advise "python -m pip" on Windows to avoid issues
            # with overwriting pip.exe.
            if WINDOWS:
                pip_cmd = "python -m pip"
            else:
                pip_cmd = "pip"
            logger.warning(
                "You are using pip version %s, however version %s is "
                "available.\nYou should consider upgrading via the "
                "'%s install --upgrade pip' command.",
                pip_version, pypi_version, pip_cmd
            )
    except Exception:
        logger.debug(
            "There was an error checking the latest version of pip",
            exc_info=True,
        )
Ejemplo n.º 27
0
def test_find_all_candidates_find_links(data):
    finder = PackageFinder(
        [data.find_links], [], session=PipSession())
    versions = finder.find_all_candidates('simple')
    assert [str(v.version) for v in versions] == ['3.0', '2.0', '1.0']
Ejemplo n.º 28
0
class PyPIRepository(BaseRepository):
    DEFAULT_INDEX_URL = 'https://pypi.python.org/simple'
    """
    The PyPIRepository will use the provided Finder instance to lookup
    packages.  Typically, it looks up packages on PyPI (the default implicit
    config), but any other PyPI mirror can be used if index_urls is
    changed/configured on the Finder.
    """
    def __init__(self, pip_options, session):
        self.session = session

        index_urls = [pip_options.index_url] + pip_options.extra_index_urls
        if pip_options.no_index:
            index_urls = []

        self.finder = PackageFinder(
            find_links=pip_options.find_links,
            index_urls=index_urls,
            trusted_hosts=pip_options.trusted_hosts,
            allow_all_prereleases=pip_options.pre,
            process_dependency_links=pip_options.process_dependency_links,
            session=self.session,
        )

        # Caches
        # stores project_name => InstallationCandidate mappings for all
        # versions reported by PyPI, so we only have to ask once for each
        # project
        self._available_candidates_cache = {}

        # stores InstallRequirement => list(InstallRequirement) mappings
        # of all secondary dependencies for the given requirement, so we
        # only have to go to disk once for each requirement
        self._dependencies_cache = {}

        # Setup file paths
        self.freshen_build_caches()
        self._download_dir = fs_str(os.path.join(CACHE_DIR, 'pkgs'))
        self._wheel_download_dir = fs_str(os.path.join(CACHE_DIR, 'wheels'))

    def freshen_build_caches(self):
        """
        Start with fresh build/source caches.  Will remove any old build
        caches from disk automatically.
        """
        self._build_dir = TemporaryDirectory(fs_str('build'))
        self._source_dir = TemporaryDirectory(fs_str('source'))

    @property
    def build_dir(self):
        return self._build_dir.name

    @property
    def source_dir(self):
        return self._source_dir.name

    def clear_caches(self):
        rmtree(self._download_dir, ignore_errors=True)
        rmtree(self._wheel_download_dir, ignore_errors=True)

    def find_all_candidates(self, req_name):
        if req_name not in self._available_candidates_cache:
            # pip 8 changed the internal API, making this a public method
            if pip_version_info >= (8, 0):
                candidates = self.finder.find_all_candidates(req_name)
            else:
                candidates = self.finder._find_all_versions(req_name)
            self._available_candidates_cache[req_name] = candidates
        return self._available_candidates_cache[req_name]

    def find_best_match(self, ireq, prereleases=None):
        """
        Returns a Version object that indicates the best match for the given
        InstallRequirement according to the external repository.
        """
        if ireq.editable:
            return ireq  # return itself as the best match

        all_candidates = self.find_all_candidates(ireq.name)
        candidates_by_version = lookup_table(all_candidates,
                                             key=lambda c: c.version,
                                             unique=True)
        matching_versions = ireq.specifier.filter(
            (candidate.version for candidate in all_candidates),
            prereleases=prereleases)

        # Reuses pip's internal candidate sort key to sort
        matching_candidates = [
            candidates_by_version[ver] for ver in matching_versions
        ]
        if not matching_candidates:
            raise NoCandidateFound(ireq, all_candidates)
        best_candidate = max(matching_candidates,
                             key=self.finder._candidate_sort_key)

        # Turn the candidate into a pinned InstallRequirement
        return make_install_requirement(best_candidate.project,
                                        best_candidate.version,
                                        ireq.extras,
                                        constraint=ireq.constraint)

    def get_dependencies(self, ireq):
        """
        Given a pinned or an editable InstallRequirement, returns a set of
        dependencies (also InstallRequirements, but not necessarily pinned).
        They indicate the secondary dependencies for the given requirement.
        """
        if not (ireq.editable or is_pinned_requirement(ireq)):
            raise TypeError(
                'Expected pinned or editable InstallRequirement, got {}'.
                format(ireq))

        if ireq not in self._dependencies_cache:
            if ireq.editable and (ireq.source_dir
                                  and os.path.exists(ireq.source_dir)):
                # No download_dir for locally available editable requirements.
                # If a download_dir is passed, pip will  unnecessarely
                # archive the entire source directory
                download_dir = None
            elif ireq.link and not ireq.link.is_artifact:
                # No download_dir for VCS sources.  This also works around pip
                # using git-checkout-index, which gets rid of the .git dir.
                download_dir = None
            else:
                download_dir = self._download_dir
                if not os.path.isdir(download_dir):
                    os.makedirs(download_dir)
            if not os.path.isdir(self._wheel_download_dir):
                os.makedirs(self._wheel_download_dir)

            reqset = RequirementSet(
                self.build_dir,
                self.source_dir,
                download_dir=download_dir,
                wheel_download_dir=self._wheel_download_dir,
                session=self.session)
            self._dependencies_cache[ireq] = reqset._prepare_file(
                self.finder, ireq)
        return set(self._dependencies_cache[ireq])

    def get_hashes(self, ireq):
        """
        Given a pinned InstallRequire, returns a set of hashes that represent
        all of the files for a given requirement. It is not acceptable for an
        editable or unpinned requirement to be passed to this function.
        """
        if not is_pinned_requirement(ireq):
            raise TypeError(
                "Expected pinned requirement, not unpinned or editable, got {}"
                .format(ireq))

        # We need to get all of the candidates that match our current version
        # pin, these will represent all of the files that could possibly
        # satisfy this constraint.
        all_candidates = self.find_all_candidates(ireq.name)
        candidates_by_version = lookup_table(all_candidates,
                                             key=lambda c: c.version)
        matching_versions = list(
            ireq.specifier.filter(
                (candidate.version for candidate in all_candidates)))
        matching_candidates = candidates_by_version[matching_versions[0]]

        return {
            self._get_file_hash(candidate.location)
            for candidate in matching_candidates
        }

    def _get_file_hash(self, location):
        h = hashlib.new(FAVORITE_HASH)
        with open_local_or_remote_file(location, self.session) as fp:
            for chunk in iter(lambda: fp.read(8096), b""):
                h.update(chunk)
        return ":".join([FAVORITE_HASH, h.hexdigest()])

    @contextmanager
    def allow_all_wheels(self):
        """
        Monkey patches pip.Wheel to allow wheels from all platforms and Python versions.

        This also saves the candidate cache and set a new one, or else the results from the
        previous non-patched calls will interfere.
        """
        def _wheel_supported(self, tags=None):
            # Ignore current platform. Support everything.
            return True

        def _wheel_support_index_min(self, tags=None):
            # All wheels are equal priority for sorting.
            return 0

        original_wheel_supported = Wheel.supported
        original_support_index_min = Wheel.support_index_min
        original_cache = self._available_candidates_cache

        Wheel.supported = _wheel_supported
        Wheel.support_index_min = _wheel_support_index_min
        self._available_candidates_cache = {}

        try:
            yield
        finally:
            Wheel.supported = original_wheel_supported
            Wheel.support_index_min = original_support_index_min
            self._available_candidates_cache = original_cache
Ejemplo n.º 29
0
def test_find_all_candidates_nothing(data):
    """Find nothing without anything"""
    finder = PackageFinder([], [], session=PipSession())
    assert not finder.find_all_candidates('pip')
Ejemplo n.º 30
0
class PyPIRepository(BaseRepository):
    DEFAULT_INDEX_URL = 'https://pypi.python.org/simple'
    """
    The PyPIRepository will use the provided Finder instance to lookup
    packages.  Typically, it looks up packages on PyPI (the default implicit
    config), but any other PyPI mirror can be used if index_urls is
    changed/configured on the Finder.
    """
    def __init__(self, pip_options, session):
        self.session = session

        index_urls = [pip_options.index_url] + pip_options.extra_index_urls
        if pip_options.no_index:
            index_urls = []

        self.finder = PackageFinder(
            find_links=pip_options.find_links,
            index_urls=index_urls,
            trusted_hosts=pip_options.trusted_hosts,
            allow_all_prereleases=pip_options.pre,
            process_dependency_links=pip_options.process_dependency_links,
            session=self.session,
        )

        # Caches
        # stores project_name => InstallationCandidate mappings for all
        # versions reported by PyPI, so we only have to ask once for each
        # project
        self._available_candidates_cache = {}

        # Setup file paths
        self.freshen_build_caches()
        self._download_dir = os.path.join(CACHE_DIR, 'pkgs')
        self._wheel_download_dir = os.path.join(CACHE_DIR, 'wheels')

    def freshen_build_caches(self):
        """
        Start with fresh build/source caches.  Will remove any old build
        caches from disk automatically.
        """
        self._build_dir = TemporaryDirectory('build')
        self._source_dir = TemporaryDirectory('source')

    @property
    def build_dir(self):
        return self._build_dir.name

    @property
    def source_dir(self):
        return self._source_dir.name

    def clear_caches(self):
        rmtree(self._download_dir, ignore_errors=True)
        rmtree(self._wheel_download_dir, ignore_errors=True)

    def find_all_candidates(self, req_name):
        if req_name not in self._available_candidates_cache:
            # pip 8 changed the internal API, making this a public method
            if pip_version_info >= (8, 0):
                candidates = self.finder.find_all_candidates(req_name)
            else:
                candidates = self.finder._find_all_versions(req_name)
            self._available_candidates_cache[req_name] = candidates
        return self._available_candidates_cache[req_name]

    def find_best_match(self, ireq, prereleases=None):
        """
        Returns a Version object that indicates the best match for the given
        InstallRequirement according to the external repository.
        """
        if ireq.editable:
            return ireq  # return itself as the best match

        all_candidates = self.find_all_candidates(ireq.name)
        candidates_by_version = lookup_table(all_candidates,
                                             key=lambda c: c.version,
                                             unique=True)
        matching_versions = ireq.specifier.filter(
            (candidate.version for candidate in all_candidates),
            prereleases=prereleases)

        # Reuses pip's internal candidate sort key to sort
        matching_candidates = [
            candidates_by_version[ver] for ver in matching_versions
        ]
        if not matching_candidates:
            raise NoCandidateFound(ireq, all_candidates)
        best_candidate = max(matching_candidates,
                             key=self.finder._candidate_sort_key)

        # Turn the candidate into a pinned InstallRequirement
        return make_install_requirement(best_candidate.project,
                                        best_candidate.version, ireq.extras)

    def get_dependencies(self, ireq):
        """
        Given a pinned or an editable InstallRequirement, returns a set of
        dependencies (also InstallRequirements, but not necessarily pinned).
        They indicate the secondary dependencies for the given requirement.
        """
        if not (ireq.editable or is_pinned_requirement(ireq)):
            raise TypeError(
                'Expected pinned or editable InstallRequirement, got {}'.
                format(ireq))

        if not os.path.isdir(self._download_dir):
            os.makedirs(self._download_dir)
        if not os.path.isdir(self._wheel_download_dir):
            os.makedirs(self._wheel_download_dir)

        reqset = RequirementSet(self.build_dir,
                                self.source_dir,
                                download_dir=self._download_dir,
                                wheel_download_dir=self._wheel_download_dir,
                                session=self.session)
        dependencies = reqset._prepare_file(self.finder, ireq)
        return set(dependencies)
Ejemplo n.º 31
0
class PyPIRepository(BaseRepository):
    DEFAULT_INDEX_URL = 'https://pypi.python.org/simple'

    """
    The PyPIRepository will use the provided Finder instance to lookup
    packages.  Typically, it looks up packages on PyPI (the default implicit
    config), but any other PyPI mirror can be used if index_urls is
    changed/configured on the Finder.
    """
    def __init__(self, pip_options, session):
        self.session = session

        index_urls = [pip_options.index_url] + pip_options.extra_index_urls
        if pip_options.no_index:
            index_urls = []

        self.finder = PackageFinder(
            find_links=pip_options.find_links,
            index_urls=index_urls,
            trusted_hosts=pip_options.trusted_hosts,
            allow_all_prereleases=pip_options.pre,
            process_dependency_links=pip_options.process_dependency_links,
            session=self.session,
        )

        # Caches
        # stores project_name => InstallationCandidate mappings for all
        # versions reported by PyPI, so we only have to ask once for each
        # project
        self._available_candidates_cache = {}

        # stores InstallRequirement => list(InstallRequirement) mappings
        # of all secondary dependencies for the given requirement, so we
        # only have to go to disk once for each requirement
        self._dependencies_cache = {}

        # Setup file paths
        self.freshen_build_caches()
        self._download_dir = fs_str(os.path.join(CACHE_DIR, 'pkgs'))
        self._wheel_download_dir = fs_str(os.path.join(CACHE_DIR, 'wheels'))

    def freshen_build_caches(self):
        """
        Start with fresh build/source caches.  Will remove any old build
        caches from disk automatically.
        """
        self._build_dir = TemporaryDirectory(fs_str('build'))
        self._source_dir = TemporaryDirectory(fs_str('source'))

    @property
    def build_dir(self):
        return self._build_dir.name

    @property
    def source_dir(self):
        return self._source_dir.name

    def clear_caches(self):
        rmtree(self._download_dir, ignore_errors=True)
        rmtree(self._wheel_download_dir, ignore_errors=True)

    def find_all_candidates(self, req_name):
        if req_name not in self._available_candidates_cache:
            # pip 8 changed the internal API, making this a public method
            if pip_version_info >= (8, 0):
                candidates = self.finder.find_all_candidates(req_name)
            else:
                candidates = self.finder._find_all_versions(req_name)
            self._available_candidates_cache[req_name] = candidates
        return self._available_candidates_cache[req_name]

    def find_best_match(self, ireq, prereleases=None):
        """
        Returns a Version object that indicates the best match for the given
        InstallRequirement according to the external repository.
        """
        if ireq.editable or is_vcs_link(ireq):
            return ireq  # return itself as the best match

        all_candidates = self.find_all_candidates(ireq.name)
        candidates_by_version = lookup_table(all_candidates, key=lambda c: c.version, unique=True)
        matching_versions = ireq.specifier.filter((candidate.version for candidate in all_candidates),
                                                  prereleases=prereleases)

        # Reuses pip's internal candidate sort key to sort
        matching_candidates = [candidates_by_version[ver] for ver in matching_versions]
        if not matching_candidates:
            raise NoCandidateFound(ireq, all_candidates, self.finder.index_urls)
        best_candidate = max(matching_candidates, key=self.finder._candidate_sort_key)

        # Turn the candidate into a pinned InstallRequirement
        return make_install_requirement(
            best_candidate.project, best_candidate.version, ireq.extras, constraint=ireq.constraint
        )

    def _get_dependencies(self, ireq):
        """
        :type ireq: pip.req.InstallRequirement
        """
        deps = self._dependencies_cache.get(getattr(ireq.link, 'url', None))
        if not deps:
            if ireq.editable and (ireq.source_dir and os.path.exists(ireq.source_dir)):
                # No download_dir for locally available editable requirements.
                # If a download_dir is passed, pip will  unnecessarely
                # archive the entire source directory
                download_dir = None
            elif ireq.link and not ireq.link.is_artifact:
                # No download_dir for VCS sources.  This also works around pip
                # using git-checkout-index, which gets rid of the .git dir.
                download_dir = None
            else:
                download_dir = self._download_dir
                if not os.path.isdir(download_dir):
                    os.makedirs(download_dir)
            if not os.path.isdir(self._wheel_download_dir):
                os.makedirs(self._wheel_download_dir)

            reqset = RequirementSet(self.build_dir,
                                    self.source_dir,
                                    download_dir=download_dir,
                                    wheel_download_dir=self._wheel_download_dir,
                                    session=self.session,
                                    ignore_installed=True)
            deps = reqset._prepare_file(self.finder, ireq)
            if ireq.req and ireq._temp_build_dir and ireq._ideal_build_dir:
                # Move the temporary build directory under self.build_dir
                ireq.source_dir = None
                ireq._correct_build_location()
            assert ireq.link.url
            self._dependencies_cache[ireq.link.url] = deps
        return set(deps)

    def get_hashes(self, ireq):
        """
        Given an InstallRequirement, return a set of hashes that represent all
        of the files for a given requirement. Editable requirements return an
        empty set. Unpinned requirements raise a TypeError.
        """
        if ireq.editable:
            return set()

        check_is_hashable(ireq)

        if ireq.link and ireq.link.is_artifact:
            return {self._get_file_hash(ireq.link)}

        # We need to get all of the candidates that match our current version
        # pin, these will represent all of the files that could possibly
        # satisfy this constraint.
        all_candidates = self.find_all_candidates(ireq.name)
        candidates_by_version = lookup_table(all_candidates, key=lambda c: c.version)
        matching_versions = list(
            ireq.specifier.filter((candidate.version for candidate in all_candidates)))
        matching_candidates = candidates_by_version[matching_versions[0]]

        return {
            self._get_file_hash(candidate.location)
            for candidate in matching_candidates
        }

    def _get_file_hash(self, location):
        h = hashlib.new(FAVORITE_HASH)
        with open_local_or_remote_file(location, self.session) as fp:
            for chunk in iter(lambda: fp.read(8096), b""):
                h.update(chunk)
        return ":".join([FAVORITE_HASH, h.hexdigest()])