예제 #1
0
def pip_download_link(resconfig, url: str, destdir: str) -> Dict:
    link = Link(url)
    tmpdir = tempfile.mkdtemp(prefix='.tmp-')
    with redirect_stdout(sys.stderr):

        netloc = urlsplit(resconfig['source']['repository']['index_url'])[1]
        hostname = netloc.split(':')[0]
        with PipSession(retries=RETRIES, insecure_hosts=[
                hostname,
        ]) as session:
            session.timeout = TIMEOUT
            session.auth.prompting = False
            session.auth.passwords[netloc] = (
                resconfig['source']['repository'].get('username', None),
                resconfig['source']['repository'].get('password', None))

            unpack_url(link,
                       tmpdir,
                       destdir,
                       only_download=True,
                       session=session)

    shutil.rmtree(tmpdir)
예제 #2
0
    def prepare_linked_requirement(self, req, session, finder,
                                   upgrade_allowed, require_hashes):
        """Prepare a requirement that would be obtained from req.link
        """
        # TODO: Breakup into smaller functions
        if req.link and req.link.scheme == 'file':
            path = url_to_path(req.link.url)
            logger.info('Processing %s', display_path(path))
        else:
            logger.info('Collecting %s', req)

        with indent_log():
            # @@ if filesystem packages are not marked
            # editable in a req, a non deterministic error
            # occurs when the script attempts to unpack the
            # build directory
            req.ensure_has_source_dir(self.build_dir)
            # If a checkout exists, it's unwise to keep going.  version
            # inconsistencies are logged later, but do not fail the
            # installation.
            # FIXME: this won't upgrade when there's an existing
            # package unpacked in `req.source_dir`
            # package unpacked in `req.source_dir`
            if os.path.exists(os.path.join(req.source_dir, 'setup.py')):
                raise PreviousBuildDirError(
                    "pip can't proceed with requirements '%s' due to a"
                    " pre-existing build directory (%s). This is "
                    "likely due to a previous installation that failed"
                    ". pip is being responsible and not assuming it "
                    "can delete this. Please delete it and try again."
                    % (req, req.source_dir)
                )
            req.populate_link(finder, upgrade_allowed, require_hashes)

            # We can't hit this spot and have populate_link return None.
            # req.satisfied_by is None here (because we're
            # guarded) and upgrade has no impact except when satisfied_by
            # is not None.
            # Then inside find_requirement existing_applicable -> False
            # If no new versions are found, DistributionNotFound is raised,
            # otherwise a result is guaranteed.
            assert req.link
            link = req.link

            # Now that we have the real link, we can tell what kind of
            # requirements we have and raise some more informative errors
            # than otherwise. (For example, we can raise VcsHashUnsupported
            # for a VCS URL rather than HashMissing.)
            if require_hashes:
                # We could check these first 2 conditions inside
                # unpack_url and save repetition of conditions, but then
                # we would report less-useful error messages for
                # unhashable requirements, complaining that there's no
                # hash provided.
                if is_vcs_url(link):
                    raise VcsHashUnsupported()
                elif is_file_url(link) and is_dir_url(link):
                    raise DirectoryUrlHashUnsupported()
                if not req.original_link and not req.is_pinned:
                    # Unpinned packages are asking for trouble when a new
                    # version is uploaded. This isn't a security check, but
                    # it saves users a surprising hash mismatch in the
                    # future.
                    #
                    # file:/// URLs aren't pinnable, so don't complain
                    # about them not being pinned.
                    raise HashUnpinned()

            hashes = req.hashes(trust_internet=not require_hashes)
            if require_hashes and not hashes:
                # Known-good hashes are missing for this requirement, so
                # shim it with a facade object that will provoke hash
                # computation and then raise a HashMissing exception
                # showing the user what the hash should be.
                hashes = MissingHashes()

            try:
                download_dir = self.download_dir
                # We always delete unpacked sdists after pip ran.
                autodelete_unpacked = True
                if req.link.is_wheel and self.wheel_download_dir:
                    # when doing 'pip wheel` we download wheels to a
                    # dedicated dir.
                    download_dir = self.wheel_download_dir
                if req.link.is_wheel:
                    if download_dir:
                        # When downloading, we only unpack wheels to get
                        # metadata.
                        autodelete_unpacked = True
                    else:
                        # When installing a wheel, we use the unpacked
                        # wheel.
                        autodelete_unpacked = False
                unpack_url(
                    req.link, req.source_dir,
                    download_dir, autodelete_unpacked,
                    session=session, hashes=hashes,
                    progress_bar=self.progress_bar
                )
            except requests.HTTPError as exc:
                logger.critical(
                    'Could not install requirement %s because of error %s',
                    req,
                    exc,
                )
                raise InstallationError(
                    'Could not install requirement %s because of HTTP '
                    'error %s for URL %s' %
                    (req, exc, req.link)
                )
            abstract_dist = make_abstract_dist(req)
            abstract_dist.prep_for_dist(finder, self.build_isolation)
            if self._download_should_save:
                # Make a .zip of the source_dir we already created.
                if req.link.scheme in vcs.all_schemes:
                    req.archive(self.download_dir)
        return abstract_dist
예제 #3
0
    def build(
        self,
        requirements,  # type: Iterable[InstallRequirement]
        session,  # type: PipSession
        autobuilding=False  # type: bool
    ):
        # type: (...) -> List[InstallRequirement]
        """Build wheels.

        :param unpack: If True, replace the sdist we built from with the
            newly built wheel, in preparation for installation.
        :return: True if all the wheels built correctly.
        """
        buildset = []
        format_control = self.finder.format_control
        # Whether a cache directory is available for autobuilding=True.
        cache_available = bool(self._wheel_dir or self.wheel_cache.cache_dir)

        for req in requirements:
            ephem_cache = should_use_ephemeral_cache(
                req, format_control=format_control, autobuilding=autobuilding,
                cache_available=cache_available,
            )
            if ephem_cache is None:
                continue

            buildset.append((req, ephem_cache))

        if not buildset:
            return []

        # Is any wheel build not using the ephemeral cache?
        if any(not ephem_cache for _, ephem_cache in buildset):
            have_directory_for_build = self._wheel_dir or (
                autobuilding and self.wheel_cache.cache_dir
            )
            assert have_directory_for_build

        # TODO by @pradyunsg
        # Should break up this method into 2 separate methods.

        # Build the wheels.
        logger.info(
            'Building wheels for collected packages: %s',
            ', '.join([req.name for (req, _) in buildset]),
        )
        _cache = self.wheel_cache  # shorter name
        with indent_log():
            build_success, build_failure = [], []
            for req, ephem in buildset:
                python_tag = None
                if autobuilding:
                    python_tag = pep425tags.implementation_tag
                    if ephem:
                        output_dir = _cache.get_ephem_path_for_link(req.link)
                    else:
                        output_dir = _cache.get_path_for_link(req.link)
                    try:
                        ensure_dir(output_dir)
                    except OSError as e:
                        logger.warning("Building wheel for %s failed: %s",
                                       req.name, e)
                        build_failure.append(req)
                        continue
                else:
                    output_dir = self._wheel_dir
                wheel_file = self._build_one(
                    req, output_dir,
                    python_tag=python_tag,
                )
                if wheel_file:
                    build_success.append(req)
                    if autobuilding:
                        # XXX: This is mildly duplicative with prepare_files,
                        # but not close enough to pull out to a single common
                        # method.
                        # The code below assumes temporary source dirs -
                        # prevent it doing bad things.
                        if req.source_dir and not os.path.exists(os.path.join(
                                req.source_dir, PIP_DELETE_MARKER_FILENAME)):
                            raise AssertionError(
                                "bad source dir - missing marker")
                        # Delete the source we built the wheel from
                        req.remove_temporary_source()
                        # set the build directory again - name is known from
                        # the work prepare_files did.
                        req.source_dir = req.build_location(
                            self.preparer.build_dir
                        )
                        # Update the link for this.
                        req.link = Link(path_to_url(wheel_file))
                        assert req.link.is_wheel
                        # extract the wheel into the dir
                        unpack_url(
                            req.link, req.source_dir, None, False,
                            session=session,
                        )
                else:
                    build_failure.append(req)

        # notify success/failure
        if build_success:
            logger.info(
                'Successfully built %s',
                ' '.join([req.name for req in build_success]),
            )
        if build_failure:
            logger.info(
                'Failed to build %s',
                ' '.join([req.name for req in build_failure]),
            )
        # Return a list of requirements that failed to build
        return build_failure
예제 #4
0
    def build(self, requirements, session, autobuilding=False):
        """Build wheels.

        :param unpack: If True, replace the sdist we built from with the
            newly built wheel, in preparation for installation.
        :return: True if all the wheels built correctly.
        """
        from pip._internal import index

        building_is_possible = self._wheel_dir or (autobuilding and
                                                   self.wheel_cache.cache_dir)
        assert building_is_possible

        buildset = []
        for req in requirements:
            if req.constraint:
                continue
            if req.is_wheel:
                if not autobuilding:
                    logger.info(
                        'Skipping %s, due to already being wheel.',
                        req.name,
                    )
            elif autobuilding and req.editable:
                pass
            elif autobuilding and not req.source_dir:
                pass
            elif autobuilding and req.link and not req.link.is_artifact:
                # VCS checkout. Build wheel just for this run.
                buildset.append((req, True))
            else:
                ephem_cache = False
                if autobuilding:
                    link = req.link
                    base, ext = link.splitext()
                    if index.egg_info_matches(base, None, link) is None:
                        # E.g. local directory. Build wheel just for this run.
                        ephem_cache = True
                    if "binary" not in index.fmt_ctl_formats(
                            self.finder.format_control,
                            canonicalize_name(req.name)):
                        logger.info(
                            "Skipping bdist_wheel for %s, due to binaries "
                            "being disabled for it.",
                            req.name,
                        )
                        continue
                buildset.append((req, ephem_cache))

        if not buildset:
            return True

        # Build the wheels.
        logger.info(
            'Building wheels for collected packages: %s',
            ', '.join([req.name for (req, _) in buildset]),
        )
        _cache = self.wheel_cache  # shorter name
        with indent_log():
            build_success, build_failure = [], []
            for req, ephem in buildset:
                python_tag = None
                if autobuilding:
                    python_tag = pep425tags.implementation_tag
                    if ephem:
                        output_dir = _cache.get_ephem_path_for_link(req.link)
                    else:
                        output_dir = _cache.get_path_for_link(req.link)
                    try:
                        ensure_dir(output_dir)
                    except OSError as e:
                        logger.warning("Building wheel for %s failed: %s",
                                       req.name, e)
                        build_failure.append(req)
                        continue
                else:
                    output_dir = self._wheel_dir
                wheel_file = self._build_one(
                    req,
                    output_dir,
                    python_tag=python_tag,
                )
                if wheel_file:
                    build_success.append(req)
                    if autobuilding:
                        # XXX: This is mildly duplicative with prepare_files,
                        # but not close enough to pull out to a single common
                        # method.
                        # The code below assumes temporary source dirs -
                        # prevent it doing bad things.
                        if req.source_dir and not os.path.exists(
                                os.path.join(req.source_dir,
                                             PIP_DELETE_MARKER_FILENAME)):
                            raise AssertionError(
                                "bad source dir - missing marker")
                        # Delete the source we built the wheel from
                        req.remove_temporary_source()
                        # set the build directory again - name is known from
                        # the work prepare_files did.
                        req.source_dir = req.build_location(
                            self.preparer.build_dir)
                        # Update the link for this.
                        req.link = index.Link(path_to_url(wheel_file))
                        assert req.link.is_wheel
                        # extract the wheel into the dir
                        unpack_url(
                            req.link,
                            req.source_dir,
                            None,
                            False,
                            session=session,
                        )
                else:
                    build_failure.append(req)

        # notify success/failure
        if build_success:
            logger.info(
                'Successfully built %s',
                ' '.join([req.name for req in build_success]),
            )
        if build_failure:
            logger.info(
                'Failed to build %s',
                ' '.join([req.name for req in build_failure]),
            )
        # Return True if all builds were successful
        return len(build_failure) == 0
예제 #5
0
    def prepare_linked_requirement(
        self,
        req,  # type: InstallRequirement
        session,  # type: PipSession
        finder,  # type: PackageFinder
        require_hashes,  # type: bool
    ):
        # type: (...) -> AbstractDistribution
        """Prepare a requirement that would be obtained from req.link
        """
        assert req.link
        link = req.link

        # TODO: Breakup into smaller functions
        if link.scheme == "file":
            path = link.file_path
            logger.info("Processing %s", display_path(path))
        else:
            logger.info("Collecting %s", req.req or req)

        with indent_log():
            # @@ if filesystem packages are not marked
            # editable in a req, a non deterministic error
            # occurs when the script attempts to unpack the
            # build directory
            req.ensure_has_source_dir(self.build_dir)
            # If a checkout exists, it's unwise to keep going.  version
            # inconsistencies are logged later, but do not fail the
            # installation.
            # FIXME: this won't upgrade when there's an existing
            # package unpacked in `req.source_dir`
            if os.path.exists(os.path.join(req.source_dir, "setup.py")):
                raise PreviousBuildDirError(
                    "pip can't proceed with requirements '%s' due to a"
                    " pre-existing build directory (%s). This is "
                    "likely due to a previous installation that failed"
                    ". pip is being responsible and not assuming it "
                    "can delete this. Please delete it and try again."
                    % (req, req.source_dir)
                )

            # Now that we have the real link, we can tell what kind of
            # requirements we have and raise some more informative errors
            # than otherwise. (For example, we can raise VcsHashUnsupported
            # for a VCS URL rather than HashMissing.)
            if require_hashes:
                # We could check these first 2 conditions inside
                # unpack_url and save repetition of conditions, but then
                # we would report less-useful error messages for
                # unhashable requirements, complaining that there's no
                # hash provided.
                if link.is_vcs:
                    raise VcsHashUnsupported()
                elif link.is_existing_dir():
                    raise DirectoryUrlHashUnsupported()
                if not req.original_link and not req.is_pinned:
                    # Unpinned packages are asking for trouble when a new
                    # version is uploaded. This isn't a security check, but
                    # it saves users a surprising hash mismatch in the
                    # future.
                    #
                    # file:/// URLs aren't pinnable, so don't complain
                    # about them not being pinned.
                    raise HashUnpinned()

            hashes = req.hashes(trust_internet=not require_hashes)
            if require_hashes and not hashes:
                # Known-good hashes are missing for this requirement, so
                # shim it with a facade object that will provoke hash
                # computation and then raise a HashMissing exception
                # showing the user what the hash should be.
                hashes = MissingHashes()

            download_dir = self.download_dir
            if link.is_wheel and self.wheel_download_dir:
                # when doing 'pip wheel` we download wheels to a
                # dedicated dir.
                download_dir = self.wheel_download_dir

            try:
                unpack_url(
                    link,
                    req.source_dir,
                    download_dir,
                    session=session,
                    hashes=hashes,
                    progress_bar=self.progress_bar,
                )
            except requests.HTTPError as exc:
                logger.critical(
                    "Could not install requirement %s because of error %s", req, exc
                )
                raise InstallationError(
                    "Could not install requirement %s because of HTTP "
                    "error %s for URL %s" % (req, exc, link)
                )

            if link.is_wheel:
                if download_dir:
                    # When downloading, we only unpack wheels to get
                    # metadata.
                    autodelete_unpacked = True
                else:
                    # When installing a wheel, we use the unpacked
                    # wheel.
                    autodelete_unpacked = False
            else:
                # We always delete unpacked sdists after pip runs.
                autodelete_unpacked = True
            if autodelete_unpacked:
                write_delete_marker_file(req.source_dir)

            abstract_dist = _get_prepared_distribution(
                req, self.req_tracker, finder, self.build_isolation
            )

            if self._download_should_save:
                # Make a .zip of the source_dir we already created.
                if link.is_vcs:
                    req.archive(self.download_dir)
        return abstract_dist
예제 #6
0
    def prepare_linked_requirement(self, req, session, finder,
                                   upgrade_allowed, require_hashes):
        """Prepare a requirement that would be obtained from req.link
        """
        # TODO: Breakup into smaller functions
        if req.link and req.link.scheme == 'file':
            path = url_to_path(req.link.url)
            logger.info('Processing %s', display_path(path))
        else:
            logger.info('Collecting %s', req)

        with indent_log():
            # @@ if filesystem packages are not marked
            # editable in a req, a non deterministic error
            # occurs when the script attempts to unpack the
            # build directory
            req.ensure_has_source_dir(self.build_dir)
            # If a checkout exists, it's unwise to keep going.  version
            # inconsistencies are logged later, but do not fail the
            # installation.
            # FIXME: this won't upgrade when there's an existing
            # package unpacked in `req.source_dir`
            # package unpacked in `req.source_dir`
            if os.path.exists(os.path.join(req.source_dir, 'setup.py')):
                raise PreviousBuildDirError(
                    "pip can't proceed with requirements '%s' due to a"
                    " pre-existing build directory (%s). This is "
                    "likely due to a previous installation that failed"
                    ". pip is being responsible and not assuming it "
                    "can delete this. Please delete it and try again."
                    % (req, req.source_dir)
                )
            req.populate_link(finder, upgrade_allowed, require_hashes)

            # We can't hit this spot and have populate_link return None.
            # req.satisfied_by is None here (because we're
            # guarded) and upgrade has no impact except when satisfied_by
            # is not None.
            # Then inside find_requirement existing_applicable -> False
            # If no new versions are found, DistributionNotFound is raised,
            # otherwise a result is guaranteed.
            assert req.link
            link = req.link

            # Now that we have the real link, we can tell what kind of
            # requirements we have and raise some more informative errors
            # than otherwise. (For example, we can raise VcsHashUnsupported
            # for a VCS URL rather than HashMissing.)
            if require_hashes:
                # We could check these first 2 conditions inside
                # unpack_url and save repetition of conditions, but then
                # we would report less-useful error messages for
                # unhashable requirements, complaining that there's no
                # hash provided.
                if is_vcs_url(link):
                    raise VcsHashUnsupported()
                elif is_file_url(link) and is_dir_url(link):
                    raise DirectoryUrlHashUnsupported()
                if not req.original_link and not req.is_pinned:
                    # Unpinned packages are asking for trouble when a new
                    # version is uploaded. This isn't a security check, but
                    # it saves users a surprising hash mismatch in the
                    # future.
                    #
                    # file:/// URLs aren't pinnable, so don't complain
                    # about them not being pinned.
                    raise HashUnpinned()

            hashes = req.hashes(trust_internet=not require_hashes)
            if require_hashes and not hashes:
                # Known-good hashes are missing for this requirement, so
                # shim it with a facade object that will provoke hash
                # computation and then raise a HashMissing exception
                # showing the user what the hash should be.
                hashes = MissingHashes()

            try:
                download_dir = self.download_dir
                # We always delete unpacked sdists after pip ran.
                autodelete_unpacked = True
                if req.link.is_wheel and self.wheel_download_dir:
                    # when doing 'pip wheel` we download wheels to a
                    # dedicated dir.
                    download_dir = self.wheel_download_dir
                if req.link.is_wheel:
                    if download_dir:
                        # When downloading, we only unpack wheels to get
                        # metadata.
                        autodelete_unpacked = True
                    else:
                        # When installing a wheel, we use the unpacked
                        # wheel.
                        autodelete_unpacked = False
                unpack_url(
                    req.link, req.source_dir,
                    download_dir, autodelete_unpacked,
                    session=session, hashes=hashes,
                    progress_bar=self.progress_bar
                )
            except requests.HTTPError as exc:
                logger.critical(
                    'Could not install requirement %s because of error %s',
                    req,
                    exc,
                )
                raise InstallationError(
                    'Could not install requirement %s because of HTTP '
                    'error %s for URL %s' %
                    (req, exc, req.link)
                )
            abstract_dist = make_abstract_dist(req)
            abstract_dist.prep_for_dist(finder, self.build_isolation)
            if self._download_should_save:
                # Make a .zip of the source_dir we already created.
                if req.link.scheme in vcs.all_schemes:
                    req.archive(self.download_dir)
        return abstract_dist
예제 #7
0
파일: wheel.py 프로젝트: qwer1234yy/SMART
    def build(self, requirements, session, autobuilding=False):
        """Build wheels.

        :param unpack: If True, replace the sdist we built from with the
            newly built wheel, in preparation for installation.
        :return: True if all the wheels built correctly.
        """
        from pip._internal import index

        building_is_possible = self._wheel_dir or (
                autobuilding and self.wheel_cache.cache_dir
        )
        assert building_is_possible

        buildset = []
        for req in requirements:
            if req.constraint:
                continue
            if req.is_wheel:
                if not autobuilding:
                    logger.info(
                        'Skipping %s, due to already being wheel.', req.name,
                    )
            elif autobuilding and req.editable:
                pass
            elif autobuilding and not req.source_dir:
                pass
            elif autobuilding and req.link and not req.link.is_artifact:
                # VCS checkout. Build wheel just for this run.
                buildset.append((req, True))
            else:
                ephem_cache = False
                if autobuilding:
                    link = req.link
                    base, ext = link.splitext()
                    if index.egg_info_matches(base, None, link) is None:
                        # E.g. local directory. Build wheel just for this run.
                        ephem_cache = True
                    if "binary" not in index.fmt_ctl_formats(
                            self.finder.format_control,
                            canonicalize_name(req.name)):
                        logger.info(
                            "Skipping bdist_wheel for %s, due to binaries "
                            "being disabled for it.", req.name,
                        )
                        continue
                buildset.append((req, ephem_cache))

        if not buildset:
            return True

        # Build the wheels.
        logger.info(
            'Building wheels for collected packages: %s',
            ', '.join([req.name for (req, _) in buildset]),
        )
        _cache = self.wheel_cache  # shorter name
        with indent_log():
            build_success, build_failure = [], []
            for req, ephem in buildset:
                python_tag = None
                if autobuilding:
                    python_tag = pep425tags.implementation_tag
                    if ephem:
                        output_dir = _cache.get_ephem_path_for_link(req.link)
                    else:
                        output_dir = _cache.get_path_for_link(req.link)
                    try:
                        ensure_dir(output_dir)
                    except OSError as e:
                        logger.warning("Building wheel for %s failed: %s",
                                       req.name, e)
                        build_failure.append(req)
                        continue
                else:
                    output_dir = self._wheel_dir
                wheel_file = self._build_one(
                    req, output_dir,
                    python_tag=python_tag,
                )
                if wheel_file:
                    build_success.append(req)
                    if autobuilding:
                        # XXX: This is mildly duplicative with prepare_files,
                        # but not close enough to pull out to a single common
                        # method.
                        # The code below assumes temporary source dirs -
                        # prevent it doing bad things.
                        if req.source_dir and not os.path.exists(os.path.join(
                                req.source_dir, PIP_DELETE_MARKER_FILENAME)):
                            raise AssertionError(
                                "bad source dir - missing marker")
                        # Delete the source we built the wheel from
                        req.remove_temporary_source()
                        # set the build directory again - name is known from
                        # the work prepare_files did.
                        req.source_dir = req.build_location(
                            self.preparer.build_dir
                        )
                        # Update the link for this.
                        req.link = index.Link(path_to_url(wheel_file))
                        assert req.link.is_wheel
                        # extract the wheel into the dir
                        unpack_url(
                            req.link, req.source_dir, None, False,
                            session=session,
                        )
                else:
                    build_failure.append(req)

        # notify success/failure
        if build_success:
            logger.info(
                'Successfully built %s',
                ' '.join([req.name for req in build_success]),
            )
        if build_failure:
            logger.info(
                'Failed to build %s',
                ' '.join([req.name for req in build_failure]),
            )
        # Return True if all builds were successful
        return len(build_failure) == 0
예제 #8
0
                    # when doing 'pip wheel` we download wheels to a
                    # dedicated dir.
                    download_dir = self.wheel_download_dir
                if req.link.is_wheel:
                    if download_dir:
                        # When downloading, we only unpack wheels to get
                        # metadata.
                        autodelete_unpacked = True
                    else:
                        # When installing a wheel, we use the unpacked
                        # wheel.
                        autodelete_unpacked = False
                unpack_url(
                    req.link, req.source_dir,
                    download_dir, autodelete_unpacked,
                    session=session, hashes=hashes,
                    progress_bar=self.progress_bar
=======
                local_file = unpack_url(
                    link, req.source_dir, self.downloader, download_dir,
                    hashes=hashes,
>>>>>>> e585743114c1741ec20dc76010f96171f3516589
                )
            except requests.HTTPError as exc:
                logger.critical(
                    'Could not install requirement %s because of error %s',
                    req,
                    exc,
                )
                raise InstallationError(
<<<<<<< HEAD
예제 #9
0
    def build(
        self,
        requirements,  # type: Iterable[InstallRequirement]
        session,  # type: PipSession
        autobuilding=False  # type: bool
    ):
        # type: (...) -> List[InstallRequirement]
        """Build wheels.

        :param unpack: If True, replace the sdist we built from with the
            newly built wheel, in preparation for installation.
        :return: True if all the wheels built correctly.
        """
        buildset = []
        format_control = self.finder.format_control
        # Whether a cache directory is available for autobuilding=True.
        cache_available = bool(self._wheel_dir or self.wheel_cache.cache_dir)

        for req in requirements:
            ephem_cache = should_use_ephemeral_cache(
                req, format_control=format_control, autobuilding=autobuilding,
                cache_available=cache_available,
            )
            if ephem_cache is None:
                continue

            buildset.append((req, ephem_cache))

        if not buildset:
            return []

        # Is any wheel build not using the ephemeral cache?
        if any(not ephem_cache for _, ephem_cache in buildset):
            have_directory_for_build = self._wheel_dir or (
                autobuilding and self.wheel_cache.cache_dir
            )
            assert have_directory_for_build

        # TODO by @pradyunsg
        # Should break up this method into 2 separate methods.

        # Build the wheels.
        logger.info(
            'Building wheels for collected packages: %s',
            ', '.join([req.name for (req, _) in buildset]),
        )
        _cache = self.wheel_cache  # shorter name
        with indent_log():
            build_success, build_failure = [], []
            for req, ephem in buildset:
                python_tag = None
                if autobuilding:
                    python_tag = pep425tags.implementation_tag
                    if ephem:
                        output_dir = _cache.get_ephem_path_for_link(req.link)
                    else:
                        output_dir = _cache.get_path_for_link(req.link)
                    try:
                        ensure_dir(output_dir)
                    except OSError as e:
                        logger.warning("Building wheel for %s failed: %s",
                                       req.name, e)
                        build_failure.append(req)
                        continue
                else:
                    output_dir = self._wheel_dir
                wheel_file = self._build_one(
                    req, output_dir,
                    python_tag=python_tag,
                )
                if wheel_file:
                    build_success.append(req)
                    if autobuilding:
                        # XXX: This is mildly duplicative with prepare_files,
                        # but not close enough to pull out to a single common
                        # method.
                        # The code below assumes temporary source dirs -
                        # prevent it doing bad things.
                        if req.source_dir and not os.path.exists(os.path.join(
                                req.source_dir, PIP_DELETE_MARKER_FILENAME)):
                            raise AssertionError(
                                "bad source dir - missing marker")
                        # Delete the source we built the wheel from
                        req.remove_temporary_source()
                        # set the build directory again - name is known from
                        # the work prepare_files did.
                        req.source_dir = req.build_location(
                            self.preparer.build_dir
                        )
                        # Update the link for this.
                        req.link = Link(path_to_url(wheel_file))
                        assert req.link.is_wheel
                        # extract the wheel into the dir
                        unpack_url(
                            req.link, req.source_dir, None, False,
                            session=session,
                        )
                else:
                    build_failure.append(req)

        # notify success/failure
        if build_success:
            logger.info(
                'Successfully built %s',
                ' '.join([req.name for req in build_success]),
            )
        if build_failure:
            logger.info(
                'Failed to build %s',
                ' '.join([req.name for req in build_failure]),
            )
        # Return a list of requirements that failed to build
        return build_failure
예제 #10
0
                    # when doing 'pip wheel` we download wheels to a
                    # dedicated dir.
                    download_dir = self.wheel_download_dir
                if req.link.is_wheel:
                    if download_dir:
                        # When downloading, we only unpack wheels to get
                        # metadata.
                        autodelete_unpacked = True
                    else:
                        # When installing a wheel, we use the unpacked
                        # wheel.
                        autodelete_unpacked = False
                unpack_url(
                    req.link, req.source_dir,
                    download_dir, autodelete_unpacked,
                    session=session, hashes=hashes,
                    progress_bar=self.progress_bar
>>>>>>> 71358189c5e72ee2ac9883b408a2f540a7f5745e
                )
            except requests.HTTPError as exc:
                logger.critical(
                    'Could not install requirement %s because of error %s',
                    req,
                    exc,
                )
                raise InstallationError(
<<<<<<< HEAD
                    'Could not install requirement {} because of HTTP '
                    'error {} for URL {}'.format(req, exc, link)
                )