Ejemplo n.º 1
0
 def get_location(self, dist, dependency_links):
     for url in dependency_links:
         egg_fragment = Link(url).egg_fragment
         if not egg_fragment:
             continue
         if '-' in egg_fragment:
             ## FIXME: will this work when a package has - in the name?
             key = '-'.join(egg_fragment.split('-')[:-1]).lower()
         else:
             key = egg_fragment
         if key == dist.key:
             return url.split('#', 1)[0]
     return None
Ejemplo n.º 2
0
def test_unpack_http_url_with_urllib_response_without_content_type():
    """
    It should download and unpack files even if no Content-Type header exists
    """
    def _get_response_from_url_mock(*args, **kw):
        resp = _get_response_from_url_original(*args, **kw)
        del resp.info()['content-type']
        return resp

    with patch('pip.download._get_response_from_url',
               _get_response_from_url_mock) as mocked:
        uri = path_to_url2(
            os.path.join(tests_data, 'packages', 'simple-1.0.tar.gz'))
        link = Link(uri)
        temp_dir = mkdtemp()
        try:
            unpack_http_url(link,
                            temp_dir,
                            download_cache=None,
                            download_dir=None)
            assert set(os.listdir(temp_dir)) == set([
                'PKG-INFO', 'setup.cfg', 'setup.py', 'simple',
                'simple.egg-info'
            ])
        finally:
            rmtree(temp_dir)
Ejemplo n.º 3
0
def test_unpack_http_url_bad_cache_checksum(mock_get_response,
                                            mock_unpack_file):
    """
    If cached download has bad checksum, re-download.
    """
    base_url = 'http://www.example.com/somepackage.tgz'
    contents = b('downloaded')
    download_hash = hashlib.new('sha1', contents)
    link = Link(base_url + '#sha1=' + download_hash.hexdigest())
    response = mock_get_response.return_value = MockResponse(contents)
    response.info = lambda: {'content-type': 'application/x-tar'}
    response.geturl = lambda: base_url

    cache_dir = mkdtemp()
    try:
        cache_file = os.path.join(cache_dir, urllib.quote(base_url, ''))
        cache_ct_file = cache_file + '.content-type'
        _write_file(cache_file, 'some contents')
        _write_file(cache_ct_file, 'application/x-tar')

        unpack_http_url(link, 'location', download_cache=cache_dir)

        # despite existence of cached file with bad hash, downloaded again
        mock_get_response.assert_called_once_with(base_url, link)
        # cached file is replaced with newly downloaded file
        with open(cache_file) as fh:
            assert fh.read() == 'downloaded'

    finally:
        rmtree(cache_dir)
Ejemplo n.º 4
0
 def _link(self):
     try:
         return self._req.link
     except AttributeError:
         # The link attribute isn't available prior to pip 6.1.0, so fall
         # back to the now deprecated 'url' attribute.
         return Link(self._req.url) if self._req.url else None
Ejemplo n.º 5
0
    def test_updates_package_in_nested_requirements(self):
        tempdir = tempfile.mkdtemp()
        requirements = os.path.join(tempdir,
                                    'requirements-with-nested-reqfile.txt')
        requirements_nested = os.path.join(tempdir, 'requirements-nested.txt')
        shutil.copy('tests/samples/requirements-with-nested-reqfile.txt',
                    requirements)
        shutil.copy('tests/samples/requirements-nested.txt',
                    requirements_nested)
        args = ['-r', requirements]

        with utils.mock.patch('pip.index.PackageFinder.find_all_candidates'
                              ) as mock_find_all_candidates:
            project = 'readtime'
            version = '0.10.1'
            link = Link('')
            candidate = InstallationCandidate(project, version, link)
            mock_find_all_candidates.return_value = [candidate]

            result = self.runner.invoke(pur, args)
            self.assertIsNone(result.exception)
            expected_output = "Updated readtime: 0.9 -> 0.10.1\nAll requirements up-to-date.\n"
            self.assertEquals(u(result.output), u(expected_output))
            self.assertEquals(result.exit_code, 0)
            expected_requirements = open(
                'tests/samples/results/test_updates_package_in_nested_requirements'
            ).read()
            self.assertEquals(open(requirements).read(), expected_requirements)
            expected_requirements = open(
                'tests/samples/results/test_updates_package_in_nested_requirements_nested'
            ).read()
            self.assertEquals(
                open(requirements_nested).read(), expected_requirements)
Ejemplo n.º 6
0
    def test_updates_from_alt_index_url(self):
        requirements = 'tests/samples/requirements-with-alt-index-url.txt'
        tempdir = tempfile.mkdtemp()
        tmpfile = os.path.join(tempdir, 'requirements.txt')
        shutil.copy(requirements, tmpfile)
        args = ['-r', tmpfile]

        class PackageFinderSpy(PackageFinder):

            _spy = None

            def __init__(self, *args, **kwargs):
                super(PackageFinderSpy, self).__init__(*args, **kwargs)
                PackageFinderSpy._spy = self

        with utils.mock.patch('pur.PackageFinder',
                              wraps=PackageFinderSpy) as mock_finder:
            with utils.mock.patch('pip.index.PackageFinder.find_all_candidates'
                                  ) as mock_find_all_candidates:

                project = 'flask'
                version = '12.1'
                link = Link('')
                candidate = InstallationCandidate(project, version, link)
                mock_find_all_candidates.return_value = [candidate]

                self.runner.invoke(pur, args)

                self.assertTrue(mock_finder.called)

                self.assertEqual(
                    PackageFinderSpy._spy.index_urls,
                    ['http://pypi.example.com', 'https://pypi.example2.com'])
                self.assertEqual(PackageFinderSpy._spy.secure_origins,
                                 [('*', 'pypi.example.com', '*')])
Ejemplo n.º 7
0
    def from_editable(cls,
                      editable_req,
                      comes_from=None,
                      isolated=False,
                      options=None,
                      wheel_cache=None,
                      constraint=False):
        from pip.index import Link

        name, url, extras_override = parse_editable(editable_req)
        if url.startswith('file:'):
            source_dir = url_to_path(url)
        else:
            source_dir = None

        if name is not None:
            try:
                req = Requirement(name)
            except InvalidRequirement:
                raise InstallationError("Invalid requirement: '%s'" % req)
        else:
            req = None
        return cls(
            req,
            comes_from,
            source_dir=source_dir,
            editable=True,
            link=Link(url),
            constraint=constraint,
            isolated=isolated,
            options=options if options else {},
            wheel_cache=wheel_cache,
            extras=extras_override or (),
        )
Ejemplo n.º 8
0
    def from_editable(cls,
                      editable_req,
                      comes_from=None,
                      default_vcs=None,
                      isolated=False,
                      options=None,
                      wheel_cache=None,
                      constraint=False):
        from pip.index import Link

        name, url, extras_override, editable_options = parse_editable(
            editable_req, default_vcs)
        if url.startswith('file:'):
            source_dir = url_to_path(url)
        else:
            source_dir = None

        res = cls(name,
                  comes_from,
                  source_dir=source_dir,
                  editable=True,
                  link=Link(url),
                  constraint=constraint,
                  editable_options=editable_options,
                  isolated=isolated,
                  options=options if options else {},
                  wheel_cache=wheel_cache)

        if extras_override is not None:
            res.extras = extras_override

        return res
Ejemplo n.º 9
0
 def test_unpack_file_url_thats_a_dir(self, tmpdir, data):
     self.prep(tmpdir, data)
     dist_path = data.packages.join("FSPkg")
     dist_url = Link(path_to_url(dist_path))
     unpack_file_url(dist_url, self.build_dir,
                     download_dir=self.download_dir)
     assert os.path.isdir(os.path.join(self.build_dir, 'fspkg'))
Ejemplo n.º 10
0
def test_unpack_http_url_with_urllib_response_without_content_type(data):
    """
    It should download and unpack files even if no Content-Type header exists
    """
    _real_session = PipSession()

    def _fake_session_get(*args, **kwargs):
        resp = _real_session.get(*args, **kwargs)
        del resp.headers["Content-Type"]
        return resp

    session = Mock()
    session.get = _fake_session_get

    uri = path_to_url(data.packages.join("simple-1.0.tar.gz"))
    link = Link(uri)
    temp_dir = mkdtemp()
    try:
        unpack_http_url(
            link,
            temp_dir,
            download_dir=None,
            session=session,
        )
        assert set(os.listdir(temp_dir)) == set([
            'PKG-INFO', 'setup.cfg', 'setup.py', 'simple', 'simple.egg-info'
        ])
    finally:
        rmtree(temp_dir)
Ejemplo n.º 11
0
def test_check_hash_sha512_invalid():
    file_path = os.path.join(packages, "gmpy-1.15.tar.gz")
    file_link = Link("http://testserver/gmpy-1.15.tar.gz#sha512=deadbeef")

    download_hash = _get_hash_from_file(file_path, file_link)

    assert_raises(InstallationError, _check_hash, download_hash, file_link)
Ejemplo n.º 12
0
def faster_find_requirement(self, req, upgrade):
    """see faster_pip_packagefinder"""
    from pip.index import BestVersionAlreadyInstalled
    if req_is_absolute(req.req):
        # if the version is pinned-down by a ==
        # first try to use any installed package that satisfies the req
        if req.satisfied_by:
            if upgrade:
                # as a matter of api, find_requirement() only raises during upgrade -- shrug
                raise BestVersionAlreadyInstalled
            else:
                return None

        # then try an optimistic search for a .whl file:
        from os.path import join
        from glob import glob
        from pip.wheel import Wheel
        from pip.index import Link
        for findlink in self.find_links:
            if findlink.startswith('file://'):
                findlink = findlink[7:]
            else:
                continue
            # this matches the name-munging done in pip.wheel:
            reqname = req.name.replace('-', '_')
            for link in glob(join(findlink, reqname + '-*.whl')):
                link = Link('file://' + link)
                wheel = Wheel(link.filename)
                if wheel.version in req.req and wheel.supported():
                    return link

    # otherwise, do the full network search
    return self.unpatched['find_requirement'](self, req, upgrade)
Ejemplo n.º 13
0
    def __init__(self, req, comes_from, source_dir=None, editable=False,
                 link=None, update=True, pycompile=True, markers=None,
                 isolated=False, options=None, wheel_cache=None,
                 constraint=False, extras=()):
        assert req is None or isinstance(req, Requirement), req
        self.req = req
        self.comes_from = comes_from
        self.constraint = constraint
        if source_dir is not None:
            self.source_dir = os.path.normpath(os.path.abspath(source_dir))
        else:
            self.source_dir = None
        self.editable = editable

        self._wheel_cache = wheel_cache
        if link is not None:
            self.link = self.original_link = link
        else:
            from pip.index import Link
            self.link = self.original_link = req and req.url and Link(req.url)

        if extras:
            self.extras = extras
        elif req:
            self.extras = set(
                pkg_resources.safe_extra(extra) for extra in req.extras
            )
        else:
            self.extras = set()
        if markers is not None:
            self.markers = markers
        else:
            self.markers = req and req.marker
        self._egg_info_path = None
        # This holds the pkg_resources.Distribution object if this requirement
        # is already available:
        self.satisfied_by = None
        # This hold the pkg_resources.Distribution object if this requirement
        # conflicts with another installed distribution:
        self.conflicts_with = None
        # Temporary build location
        self._temp_build_dir = TempDirectory(kind="req-build")
        # Used to store the global directory where the _temp_build_dir should
        # have been created. Cf _correct_build_location method.
        self._ideal_build_dir = None
        # True if the editable should be updated:
        self.update = update
        # Set to True after successful installation
        self.install_succeeded = None
        # UninstallPathSet of uninstalled distribution (for possible rollback)
        self.uninstalled_pathset = None
        self.use_user_site = False
        self.target_dir = None
        self.options = options if options else {}
        self.pycompile = pycompile
        # Set to True after successful preparation of this requirement
        self.prepared = False

        self.isolated = isolated
Ejemplo n.º 14
0
def test_check_hash_md5_valid(data):
    file_path = data.packages.join("gmpy-1.15.tar.gz")
    file_link = Link("http://testserver/gmpy-1.15.tar.gz"
                     "#md5=d41d8cd98f00b204e9800998ecf8427e")

    download_hash = _get_hash_from_file(file_path, file_link)

    _check_hash(download_hash, file_link)
Ejemplo n.º 15
0
 def test_download_url(self, mockget):
     mockget.return_value = Mock()
     mockget.return_value.content = self.dists['dp'].bytes()
     rd = self.makeone()
     pinfo, outfile = rd.download_url(
         Link('http://fakeurl.com/dummypackage.tar.gz#egg=dummypackage'))
     assert outfile.exists()
     assert pinfo.name == 'dummypackage'
Ejemplo n.º 16
0
def test_check_hash_sha1_valid(data):
    file_path = data.packages.join("gmpy-1.15.tar.gz")
    file_link = Link("http://testserver/gmpy-1.15.tar.gz"
                     "#sha1=da39a3ee5e6b4b0d3255bfef95601890afd80709")

    download_hash = _get_hash_from_file(file_path, file_link)

    _check_hash(download_hash, file_link)
Ejemplo n.º 17
0
def test_check_hash_sha512_invalid(data):
    file_path = data.packages.join("gmpy-1.15.tar.gz")
    file_link = Link("http://testserver/gmpy-1.15.tar.gz#sha512=deadbeef")

    download_hash = _get_hash_from_file(file_path, file_link)

    with pytest.raises(InstallationError):
        _check_hash(download_hash, file_link)
Ejemplo n.º 18
0
def test_get_hash_from_file_unknown(data):
    file_path = data.packages.join("gmpy-1.15.tar.gz")
    file_link = Link("http://testserver/gmpy-1.15.tar.gz"
                     "#unknown_hash=d41d8cd98f00b204e9800998ecf8427e")

    download_hash = _get_hash_from_file(file_path, file_link)

    assert download_hash is None
Ejemplo n.º 19
0
def test_check_hash_sha512_valid(data):
    file_path = data.packages.join("gmpy-1.15.tar.gz")
    file_link = Link(
        "http://testserver/gmpy-1.15.tar.gz#sha512=cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e"
    )

    download_hash = _get_hash_from_file(file_path, file_link)

    _check_hash(download_hash, file_link)
Ejemplo n.º 20
0
 def test_http_link(self):
     link = Link(
         'https://pypi.python.org/packages/source/p/pip/pip-7.0.3.tar.gz'
         '#md5=e972d691ff6779ffb4b594449bac3e43')
     assert link_to_nix(link) == (
         'fetchurl {\n'
         '  url = "https://pypi.python.org/packages/source/p/pip/pip-7.0.3.tar.gz";\n'
         '  md5 = "e972d691ff6779ffb4b594449bac3e43";\n'
         '}')
Ejemplo n.º 21
0
def test_check_hash_sha224_valid(data):
    file_path = data.packages.join("gmpy-1.15.tar.gz")
    file_link = Link(
        "http://testserver/gmpy-1.15.tar.gz#sha224=d14a028c2a3a2bc9476102bb288234c415a2b01f828ea62ac5b3e42f'"
    )

    download_hash = _get_hash_from_file(file_path, file_link)

    _check_hash(download_hash, file_link)
Ejemplo n.º 22
0
def test_check_hash_sha384_valid(data):
    file_path = data.packages.join("gmpy-1.15.tar.gz")
    file_link = Link(
        "http://testserver/gmpy-1.15.tar.gz#sha384=38b060a751ac96384cd9327eb1b1e36a21fdb71114be07434c0cc7bf63f6e1da274edebfe76f65fbd51ad2f14898b95b"
    )

    download_hash = _get_hash_from_file(file_path, file_link)

    _check_hash(download_hash, file_link)
Ejemplo n.º 23
0
def test_get_hash_from_file_md5(data):
    file_path = data.packages.join("gmpy-1.15.tar.gz")
    file_link = Link("http://testserver/gmpy-1.15.tar.gz"
                     "#md5=d41d8cd98f00b204e9800998ecf8427e")

    download_hash = _get_hash_from_file(file_path, file_link)

    assert download_hash.digest_size == 16
    assert download_hash.hexdigest() == "d41d8cd98f00b204e9800998ecf8427e"
Ejemplo n.º 24
0
def test_check_hash_sha256_valid(data):
    file_path = data.packages.join("gmpy-1.15.tar.gz")
    file_link = Link(
        "http://testserver/gmpy-1.15.tar.gz#sha256=e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
    )

    download_hash = _get_hash_from_file(file_path, file_link)

    _check_hash(download_hash, file_link)
Ejemplo n.º 25
0
def download_url(url, download_dir, sha256=None):
    cmd = DownloadCommand()
    options, _ = cmd.parse_args([])
    link = Link(url)
    session = cmd._build_session(options)
    hashes = Hashes({"sha256": [sha256]}) if sha256 else None
    downloaded_path = _check_download_dir(link, download_dir, hashes)
    if downloaded_path:
        return downloaded_path
    try:
        downloaded_path, _ = _download_http_url(Link(url), session,
                                                download_dir, hashes)
    except HashMismatch0 as e:
        expected = e.allowed["sha256"][0]
        actual = e.gots["sha256"].hexdigest()
        raise HashMismatch(url, expected, actual)
    else:
        return _ensure_expected_download_path(downloaded_path, link)
Ejemplo n.º 26
0
 def test_http_link(self):
     link = Link(
         'https://pypi.python.org/packages/source/p/pip/pip-7.0.3.tar.gz'
         '#md5=e972d691ff6779ffb4b594449bac3e43')
     assert link_to_nix(link) == (
         'fetchurl {\n'
         '  url = "https://pypi.python.org/packages/source/p/pip/pip-7.0.3.tar.gz";\n'
         '  sha256 = "1zdgl0qsgsh71b397120y7vw3rkbisrgws2rqv5c4vbgba19iidl";\n'
         '}')
Ejemplo n.º 27
0
    def _link_hook(self, overrides, spec, link):
        overrides = overrides or {}
        if overrides.get("src"):
            logger.info(
                '===> Link override %s found for package %s',
                overrides, spec)

            _, version = splitext(link.filename)[0].rsplit('-', 1)
            spec = Spec.from_pinned(name=spec.name, version=version)
            src = env.from_string(
                overrides.get("src")).render({"spec": spec})
            link = Link(src)

            # Hack to make pickle work
            link.comes_from = None

            return link, spec.pinned

        return link, None
Ejemplo n.º 28
0
def test_get_hash_from_file_sha1(data):
    file_path = data.packages.join("gmpy-1.15.tar.gz")
    file_link = Link("http://testserver/gmpy-1.15.tar.gz"
                     "#sha1=da39a3ee5e6b4b0d3255bfef95601890afd80709")

    download_hash = _get_hash_from_file(file_path, file_link)

    assert download_hash.digest_size == 20
    assert download_hash.hexdigest() == (
        "da39a3ee5e6b4b0d3255bfef95601890afd80709")
Ejemplo n.º 29
0
    def _downloaded_filename(self):
        """Download the package's archive if necessary, and return its
        filename.

        --no-deps is implied, as we have reimplemented the bits that would
        ordinarily do dependency resolution.

        """
        # Peep doesn't support requirements that don't come down as a single
        # file, because it can't hash them. Thus, it doesn't support editable
        # requirements, because pip itself doesn't support editable
        # requirements except for "local projects or a VCS url". Nor does it
        # support VCS requirements yet, because we haven't yet come up with a
        # portable, deterministic way to hash them. In summary, all we support
        # is == requirements and tarballs/zips/etc.

        # TODO: Stop on reqs that are editable or aren't ==.

        finder = package_finder(self._argv)

        # If the requirement isn't already specified as a URL, get a URL
        # from an index:
        link = (finder.find_requirement(self._req, upgrade=False)
                if self._req.url is None else Link(self._req.url))

        if link:
            lower_scheme = link.scheme.lower(
            )  # pip lower()s it for some reason.
            if lower_scheme == 'http' or lower_scheme == 'https':
                file_path = self._download(link)
                return basename(file_path)
            elif lower_scheme == 'file':
                # The following is inspired by pip's unpack_file_url():
                link_path = url_to_path(link.url_without_fragment)
                if isdir(link_path):
                    raise UnsupportedRequirementError(
                        "%s: %s is a directory. So that it can compute "
                        "a hash, peep supports only filesystem paths which "
                        "point to files" %
                        (self._req, link.url_without_fragment))
                else:
                    copy(link_path, self._temp_path)
                    return basename(link_path)
            else:
                raise UnsupportedRequirementError(
                    "%s: The download link, %s, would not result in a file "
                    "that can be hashed. Peep supports only == requirements, "
                    "file:// URLs pointing to files (not folders), and "
                    "http:// and https:// URLs pointing to tarballs, zips, "
                    "etc." % (self._req, link.url))
        else:
            raise UnsupportedRequirementError(
                "%s: couldn't determine where to download this requirement from."
                % (self._req, ))
    def test_link_package_versions_match_wheel(self):
        """Test that 'pytest' archives match for 'pytest'"""

        # TODO: Uncomment these, when #1217 is fixed
        # link = Link('http:/yo/pytest-1.0.tar.gz')
        # result = self.finder._link_package_versions(link, self.search_name)
        # assert result == [(self.parsed_version, link, self.version)], result

        link = Link('http:/yo/pytest-1.0-py2.py3-none-any.whl')
        result = self.finder._link_package_versions(link, self.search_name)
        assert result == [(self.parsed_version, link, self.version)], result
 def test_link_sorting_raises_when_wheel_unsupported(self):
     links = [
         (
             parse_version('1.0'),
             Link('simple-1.0-py2.py3-none-TEST.whl'),
             '1.0',
         ),
     ]
     finder = PackageFinder([], [], use_wheel=True, session=PipSession())
     with pytest.raises(InstallationError):
         finder._sort_versions(links)
Ejemplo n.º 32
0
    def from_line(
            cls, name, comes_from=None, isolated=False, options=None,
            wheel_cache=None, constraint=False):
        """Creates an InstallRequirement from a name, which might be a
        requirement, directory containing 'setup.py', filename, or URL.
        """
        from pip.index import Link

        if is_url(name):
            marker_sep = '; '
        else:
            marker_sep = ';'
        if marker_sep in name:
            name, markers = name.split(marker_sep, 1)
            markers = markers.strip()
            if not markers:
                markers = None
        else:
            markers = None
        name = name.strip()
        req = None
        path = os.path.normpath(os.path.abspath(name))
        link = None
        extras = None

        if is_url(name):
            link = Link(name)
        else:
            p, extras = _strip_extras(path)
            if (os.path.isdir(p) and
                    (os.path.sep in name or name.startswith('.'))):

                if not is_installable_dir(p):
                    raise InstallationError(
                        "Directory %r is not installable. File 'setup.py' "
                        "not found." % name
                    )
                link = Link(path_to_url(p))
            elif is_archive_file(p):
                if not os.path.isfile(p):
                    logger.warning(
                        'Requirement %r looks like a filename, but the '
                        'file does not exist',
                        name
                    )
                link = Link(path_to_url(p))

        # it's a local file, dir, or url
        if link:
            # Normalize URLs
            link.normalize()
            # wheel file
            if link.is_wheel:
                wheel = Wheel(link.filename)  # can raise InvalidWheelFilename
                req = "%s==%s" % (wheel.name, wheel.version)
            else:
                # set the req to the egg fragment.  when it's not there, this
                # will become an 'unnamed' requirement
                req = link.egg_fragment

        # a requirement specifier
        else:
            req = name

        options = options if options else {}
        res = cls(req, comes_from, link=link, markers=markers,
                  isolated=isolated, options=options,
                  wheel_cache=wheel_cache, constraint=constraint)

        if extras:
            res.extras = Requirement('placeholder' + extras).extras

        return res