Exemple #1
0
def test_pip_wheel_fail_cause_of_previous_build_dir(script, data):
    """
    Test when 'pip wheel' tries to install a package that has a previous build
    directory
    """

    # Given that I have a previous build dir of the `simple` package
    build = script.venv_path / 'build' / 'simple'
    os.makedirs(build)
    write_delete_marker_file(script.venv_path / 'build' / 'simple')
    build.joinpath('setup.py').write_text('#')

    # When I call pip trying to install things again
    result = script.pip(
        'wheel',
        '--no-index',
        '--find-links=%s' % data.find_links,
        '--build',
        script.venv_path / 'build',
        'simple==3.0',
        expect_error=True,
        expect_temp=True,
    )

    # Then I see that the error code is the right one
    assert result.returncode == PREVIOUS_BUILD_DIR_ERROR, result
Exemple #2
0
def unpack_url(
        link,  # type: Link
        location,  # type: str
        download_dir=None,  # type: Optional[str]
        only_download=False,  # type: bool
        session=None,  # type: Optional[PipSession]
        hashes=None,  # type: Optional[Hashes]
        progress_bar="on",  # type: str
):
    # type: (...) -> None
    """Unpack link.
       If link is a VCS link:
         if only_download, export into download_dir and ignore location
          else unpack into location
       for other types of link:
         - unpack into location
         - if download_dir, copy the file into download_dir
         - if only_download, mark location for deletion

    :param hashes: A Hashes object, one of whose embedded hashes must match,
        or HashMismatch will be raised. If the Hashes is empty, no matches are
        required, and unhashable types of requirements (like VCS ones, which
        would ordinarily raise HashUnsupported) are allowed.
    """
    # non-editable vcs urls
    if is_vcs_url(link):
        unpack_vcs_link(link, location)

    # file urls
    elif is_file_url(link):
        unpack_file_url(link, location, download_dir, hashes=hashes)

    # http urls
    else:
        if session is None:
            session = PipSession()

        unpack_http_url(
            link,
            location,
            download_dir,
            session,
            hashes=hashes,
            progress_bar=progress_bar,
        )
    if only_download:
        write_delete_marker_file(location)
def test_cleanup_prevented_upon_build_dir_exception(script, data):
    """
    Test no cleanup occurs after a PreviousBuildDirError
    """
    build = script.venv_path / 'build'
    build_simple = build / 'simple'
    os.makedirs(build_simple)
    write_delete_marker_file(build_simple)
    build_simple.joinpath("setup.py").write_text("#")
    result = script.pip(
        'install', '-f', data.find_links, '--no-index', 'simple',
        '--build', build,
        expect_error=True, expect_temp=True,
    )

    assert result.returncode == PREVIOUS_BUILD_DIR_ERROR, str(result)
    assert "pip can't proceed" in result.stderr, str(result)
    assert exists(build_simple), str(result)
    def ensure_build_location(self, build_dir):
        # type: (str) -> str
        assert build_dir is not None
        if self._temp_build_dir is not None:
            assert self._temp_build_dir.path
            return self._temp_build_dir.path
        if self.req is None:
            # Some systems have /tmp as a symlink which confuses custom
            # builds (such as numpy). Thus, we ensure that the real path
            # is returned.
            self._temp_build_dir = TempDirectory(kind="req-build")

            return self._temp_build_dir.path
        if self.editable:
            name = self.name.lower()
        else:
            name = self.name
        # FIXME: Is there a better place to create the build_dir? (hg and bzr
        # need this)
        if not os.path.exists(build_dir):
            logger.debug('Creating directory %s', build_dir)
            os.makedirs(build_dir)
            write_delete_marker_file(build_dir)
        return os.path.join(build_dir, name)
Exemple #5
0
def _make_build_dir(build_dir):
    os.makedirs(build_dir)
    write_delete_marker_file(build_dir)
Exemple #6
0
    def prepare_linked_requirement(
            self,
            req,  # type: InstallRequirement
            session,  # type: PipSession
            finder,  # type: PackageFinder
            require_hashes,  # type: bool
    ):
        # type: (...) -> AbstractDistribution
        """Prepare a requirement that would be obtained from req.link
        """
        assert req.link
        link = req.link

        # TODO: Breakup into smaller functions
        if link.scheme == 'file':
            path = link.file_path
            logger.info('Processing %s', display_path(path))
        else:
            logger.info('Collecting %s', req.req or req)

        with indent_log():
            # @@ if filesystem packages are not marked
            # editable in a req, a non deterministic error
            # occurs when the script attempts to unpack the
            # build directory
            req.ensure_has_source_dir(self.build_dir)
            # If a checkout exists, it's unwise to keep going.  version
            # inconsistencies are logged later, but do not fail the
            # installation.
            # FIXME: this won't upgrade when there's an existing
            # package unpacked in `req.source_dir`
            if os.path.exists(os.path.join(req.source_dir, 'setup.py')):
                raise PreviousBuildDirError(
                    "pip can't proceed with requirements '%s' due to a"
                    " pre-existing build directory (%s). This is "
                    "likely due to a previous installation that failed"
                    ". pip is being responsible and not assuming it "
                    "can delete this. Please delete it and try again." %
                    (req, req.source_dir))

            # Now that we have the real link, we can tell what kind of
            # requirements we have and raise some more informative errors
            # than otherwise. (For example, we can raise VcsHashUnsupported
            # for a VCS URL rather than HashMissing.)
            if require_hashes:
                # We could check these first 2 conditions inside
                # unpack_url and save repetition of conditions, but then
                # we would report less-useful error messages for
                # unhashable requirements, complaining that there's no
                # hash provided.
                if link.is_vcs:
                    raise VcsHashUnsupported()
                elif link.is_existing_dir():
                    raise DirectoryUrlHashUnsupported()
                if not req.original_link and not req.is_pinned:
                    # Unpinned packages are asking for trouble when a new
                    # version is uploaded. This isn't a security check, but
                    # it saves users a surprising hash mismatch in the
                    # future.
                    #
                    # file:/// URLs aren't pinnable, so don't complain
                    # about them not being pinned.
                    raise HashUnpinned()

            hashes = req.hashes(trust_internet=not require_hashes)
            if require_hashes and not hashes:
                # Known-good hashes are missing for this requirement, so
                # shim it with a facade object that will provoke hash
                # computation and then raise a HashMissing exception
                # showing the user what the hash should be.
                hashes = MissingHashes()

            download_dir = self.download_dir
            if link.is_wheel and self.wheel_download_dir:
                # when doing 'pip wheel` we download wheels to a
                # dedicated dir.
                download_dir = self.wheel_download_dir

            try:
                unpack_url(link,
                           req.source_dir,
                           download_dir,
                           session=session,
                           hashes=hashes,
                           progress_bar=self.progress_bar)
            except requests.HTTPError as exc:
                logger.critical(
                    'Could not install requirement %s because of error %s',
                    req,
                    exc,
                )
                raise InstallationError(
                    'Could not install requirement %s because of HTTP '
                    'error %s for URL %s' % (req, exc, link))

            if link.is_wheel:
                if download_dir:
                    # When downloading, we only unpack wheels to get
                    # metadata.
                    autodelete_unpacked = True
                else:
                    # When installing a wheel, we use the unpacked
                    # wheel.
                    autodelete_unpacked = False
            else:
                # We always delete unpacked sdists after pip runs.
                autodelete_unpacked = True
            if autodelete_unpacked:
                write_delete_marker_file(req.source_dir)

            abstract_dist = _get_prepared_distribution(
                req,
                self.req_tracker,
                finder,
                self.build_isolation,
            )

            if self._download_should_save:
                # Make a .zip of the source_dir we already created.
                if link.is_vcs:
                    req.archive(self.download_dir)
        return abstract_dist