Exemplo n.º 1
0
def unpack_http_url(
        link,  # type: Link
        location,  # type: str
        session,  # type: PipSession
        download_dir=None,  # type: Optional[str]
        hashes=None,  # type: Optional[Hashes]
        progress_bar="on"  # type: str
):
    # type: (...) -> None
    with TempDirectory(kind="unpack") as temp_dir:
        # If a download dir is specified, is the file already downloaded there?
        already_downloaded_path = None
        if download_dir:
            already_downloaded_path = _check_download_dir(
                link, download_dir, hashes)

        if already_downloaded_path:
            from_path = already_downloaded_path
            content_type = mimetypes.guess_type(from_path)[0]
        else:
            # let's download to a tmp dir
            from_path, content_type = _download_http_url(
                link, session, temp_dir.path, hashes, progress_bar)

        # unpack the archive to the build dir location. even when only
        # downloading archives, they have to be unpacked to parse dependencies
        unpack_file(from_path, location, content_type)

        # a download dir is specified; let's copy the archive there
        if download_dir and not already_downloaded_path:
            _copy_file(from_path, download_dir, link)

        if not already_downloaded_path:
            os.unlink(from_path)
Exemplo n.º 2
0
def unpack_file_url(
        link,  # type: Link
        location,  # type: str
        download_dir=None,  # type: Optional[str]
        hashes=None  # type: Optional[Hashes]
):
    # type: (...) -> None
    """Unpack link into location.

    If download_dir is provided and link points to a file, make a copy
    of the link file inside download_dir.
    """

    link_path = url_to_path(link.url_without_fragment)
    # If it's a url to a local directory
    if is_dir_url(link):

        def ignore(d, names):
            # Pulling in those directories can potentially be very slow,
            # exclude the following directories if they appear in the top
            # level dir (and only it).
            # See discussion at https://github.com/pypa/pip/pull/6770
            return ['.tox', '.nox'] if d == link_path else []

        if os.path.isdir(location):
            rmtree(location)
        shutil.copytree(link_path, location, symlinks=True, ignore=ignore)

        if download_dir:
            logger.info('Link is a directory, ignoring download_dir')
        return

    # If --require-hashes is off, `hashes` is either empty, the
    # link's embedded hash, or MissingHashes; it is required to
    # match. If --require-hashes is on, we are satisfied by any
    # hash in `hashes` matching: a URL-based or an option-based
    # one; no internet-sourced hash will be in `hashes`.
    if hashes:
        hashes.check_against_path(link_path)

    # If a download dir is specified, is the file already there and valid?
    already_downloaded_path = None
    if download_dir:
        already_downloaded_path = _check_download_dir(link, download_dir,
                                                      hashes)

    if already_downloaded_path:
        from_path = already_downloaded_path
    else:
        from_path = link_path

    content_type = mimetypes.guess_type(from_path)[0]

    # unpack the archive to the build dir location. even when only downloading
    # archives, they have to be unpacked to parse dependencies
    unpack_file(from_path, location, content_type, link)

    # a download dir is specified and not already downloaded
    if download_dir and not already_downloaded_path:
        _copy_file(from_path, download_dir, link)
Exemplo n.º 3
0
def unpack_http_url(
    link,  # type: Link
    location,  # type: str
    downloader,  # type: Downloader
    download_dir=None,  # type: Optional[str]
    hashes=None,  # type: Optional[Hashes]
):
    # type: (...) -> str
    temp_dir = TempDirectory(kind="unpack", globally_managed=True)
    # If a download dir is specified, is the file already downloaded there?
    already_downloaded_path = None
    if download_dir:
        already_downloaded_path = _check_download_dir(
            link, download_dir, hashes
        )

    if already_downloaded_path:
        from_path = already_downloaded_path
        content_type = mimetypes.guess_type(from_path)[0]
    else:
        # let's download to a tmp dir
        from_path, content_type = _download_http_url(
            link, downloader, temp_dir.path, hashes
        )

    # unpack the archive to the build dir location. even when only
    # downloading archives, they have to be unpacked to parse dependencies
    unpack_file(from_path, location, content_type)

    return from_path
Exemplo n.º 4
0
def unpack_url(
        link,  # type: Link
        location,  # type: str
        download,  # type: Downloader
        download_dir=None,  # type: Optional[str]
        hashes=None,  # type: Optional[Hashes]
):
    # type: (...) -> Optional[File]
    """Unpack link into location, downloading if required.

    :param hashes: A Hashes object, one of whose embedded hashes must match,
        or HashMismatch will be raised. If the Hashes is empty, no matches are
        required, and unhashable types of requirements (like VCS ones, which
        would ordinarily raise HashUnsupported) are allowed.
    """
    # non-editable vcs urls
    if link.is_vcs:
        unpack_vcs_link(link, location)
        return None

    # Once out-of-tree-builds are no longer supported, could potentially
    # replace the below condition with `assert not link.is_existing_dir`
    # - unpack_url does not need to be called for in-tree-builds.
    #
    # As further cleanup, _copy_source_tree and accompanying tests can
    # be removed.
    if link.is_existing_dir():
        deprecated(
            "A future pip version will change local packages to be built "
            "in-place without first copying to a temporary directory. "
            "We recommend you use --use-feature=in-tree-build to test "
            "your packages with this new behavior before it becomes the "
            "default.\n",
            replacement=None,
            gone_in="21.3",
            issue=7555)
        if os.path.isdir(location):
            rmtree(location)
        _copy_source_tree(link.file_path, location)
        return None

    # file urls
    if link.is_file:
        file = get_file_url(link, download_dir, hashes=hashes)

    # http urls
    else:
        file = get_http_url(
            link,
            download,
            download_dir,
            hashes=hashes,
        )

    # unpack the archive to the build dir location. even when only downloading
    # archives, they have to be unpacked to parse dependencies, except wheels
    if not link.is_wheel:
        unpack_file(file.path, location, file.content_type)

    return file
Exemplo n.º 5
0
def install_wheel(
        name,  # type: str
        wheel_path,  # type: str
        scheme,  # type: Scheme
        req_description,  # type: str
        pycompile=True,  # type: bool
        warn_script_location=True,  # type: bool
        _temp_dir_for_testing=None,  # type: Optional[str]
        direct_url=None,  # type: Optional[DirectUrl]
):
    # type: (...) -> None
    with TempDirectory(path=_temp_dir_for_testing,
                       kind="unpacked-wheel") as unpacked_dir, ZipFile(
                           wheel_path, allowZip64=True) as z:
        unpack_file(wheel_path, unpacked_dir.path)
        install_unpacked_wheel(
            name=name,
            wheeldir=unpacked_dir.path,
            wheel_zip=z,
            scheme=scheme,
            req_description=req_description,
            pycompile=pycompile,
            warn_script_location=warn_script_location,
            direct_url=direct_url,
        )
Exemplo n.º 6
0
def test_wheel_version(tmpdir, data):
    future_wheel = 'futurewheel-1.9-py2.py3-none-any.whl'
    broken_wheel = 'brokenwheel-1.0-py2.py3-none-any.whl'
    future_version = (1, 9)

    unpack_file(data.packages.joinpath(future_wheel), tmpdir + 'future')
    unpack_file(data.packages.joinpath(broken_wheel), tmpdir + 'broken')

    assert wheel.wheel_version(tmpdir + 'future') == future_version
    assert not wheel.wheel_version(tmpdir + 'broken')
Exemplo n.º 7
0
def unpack_url(
    link: Link,
    location: str,
    download: Downloader,
    verbosity: int,
    download_dir: Optional[str] = None,
    hashes: Optional[Hashes] = None,
) -> Optional[File]:
    """Unpack link into location, downloading if required.

    :param hashes: A Hashes object, one of whose embedded hashes must match,
        or HashMismatch will be raised. If the Hashes is empty, no matches are
        required, and unhashable types of requirements (like VCS ones, which
        would ordinarily raise HashUnsupported) are allowed.
    """
    # non-editable vcs urls
    if link.is_vcs:
        unpack_vcs_link(link, location, verbosity=verbosity)
        return None

    # Once out-of-tree-builds are no longer supported, could potentially
    # replace the below condition with `assert not link.is_existing_dir`
    # - unpack_url does not need to be called for in-tree-builds.
    #
    # As further cleanup, _copy_source_tree and accompanying tests can
    # be removed.
    #
    # TODO when use-deprecated=out-of-tree-build is removed
    if link.is_existing_dir():
        if os.path.isdir(location):
            rmtree(location)
        _copy_source_tree(link.file_path, location)
        return None

    # file urls
    if link.is_file:
        file = get_file_url(link, download_dir, hashes=hashes)

    # http urls
    else:
        file = get_http_url(
            link,
            download,
            download_dir,
            hashes=hashes,
        )

    # unpack the archive to the build dir location. even when only downloading
    # archives, they have to be unpacked to parse dependencies, except wheels
    if not link.is_wheel:
        unpack_file(file.path, location, file.content_type)

    return file
Exemplo n.º 8
0
def unpack_file_url(
        link,  # type: Link
        location,  # type: str
        download_dir=None,  # type: Optional[str]
        hashes=None  # type: Optional[Hashes]
):
    # type: (...) -> Optional[str]
    """Unpack link into location.

    If download_dir is provided and link points to a file, make a copy
    of the link file inside download_dir.
    """
    link_path = link.file_path
    # If it's a url to a local directory
    if link.is_existing_dir():
        if os.path.isdir(location):
            rmtree(location)
        _copy_source_tree(link_path, location)
        if download_dir:
            logger.info('Link is a directory, ignoring download_dir')
        return None

    # If a download dir is specified, is the file already there and valid?
    already_downloaded_path = None
    if download_dir:
        already_downloaded_path = _check_download_dir(link, download_dir,
                                                      hashes)

    if already_downloaded_path:
        from_path = already_downloaded_path
    else:
        from_path = link_path

    # If --require-hashes is off, `hashes` is either empty, the
    # link's embedded hash, or MissingHashes; it is required to
    # match. If --require-hashes is on, we are satisfied by any
    # hash in `hashes` matching: a URL-based or an option-based
    # one; no internet-sourced hash will be in `hashes`.
    if hashes:
        hashes.check_against_path(from_path)

    content_type = mimetypes.guess_type(from_path)[0]

    # unpack the archive to the build dir location. even when only downloading
    # archives, they have to be unpacked to parse dependencies
    unpack_file(from_path, location, content_type)

    # a download dir is specified and not already downloaded
    if download_dir and not os.path.exists(
            os.path.join(download_dir, link.filename)):
        _copy_file(from_path, download_dir, link)

    return from_path
Exemplo n.º 9
0
    def test_unpack_wheel_no_flatten(self):
        from tempfile import mkdtemp
        from shutil import rmtree

        filepath = os.path.join(DATA_DIR, 'packages',
                                'meta-1.0-py2.py3-none-any.whl')
        try:
            tmpdir = mkdtemp()
            unpack_file(filepath, tmpdir, 'application/zip', None)
            assert os.path.isdir(os.path.join(tmpdir, 'meta-1.0.dist-info'))
        finally:
            rmtree(tmpdir)
            pass
Exemplo n.º 10
0
 def prep(self, data, tmpdir):
     self.name = 'sample'
     self.wheelpath = data.packages.joinpath(
         'sample-1.2.0-py2.py3-none-any.whl')
     self.req = Requirement('sample')
     self.src = os.path.join(tmpdir, 'src')
     self.dest = os.path.join(tmpdir, 'dest')
     unpack_file(self.wheelpath, self.src)
     self.scheme = {
         'scripts': os.path.join(self.dest, 'bin'),
         'purelib': os.path.join(self.dest, 'lib'),
         'data': os.path.join(self.dest, 'data'),
     }
     self.src_dist_info = os.path.join(self.src, 'sample-1.2.0.dist-info')
     self.dest_dist_info = os.path.join(self.scheme['purelib'],
                                        'sample-1.2.0.dist-info')
Exemplo n.º 11
0
def unpack_url(
        link,  # type: Link
        location,  # type: str
        downloader,  # type: Downloader
        download_dir=None,  # type: Optional[str]
        hashes=None,  # type: Optional[Hashes]
):
    # type: (...) -> Optional[File]
    """Unpack link into location, downloading if required.

    :param hashes: A Hashes object, one of whose embedded hashes must match,
        or HashMismatch will be raised. If the Hashes is empty, no matches are
        required, and unhashable types of requirements (like VCS ones, which
        would ordinarily raise HashUnsupported) are allowed.
    """
    # non-editable vcs urls
    if link.is_vcs:
        unpack_vcs_link(link, location)
        return None

    # If it's a url to a local directory
    if link.is_existing_dir():
        if os.path.isdir(location):
            rmtree(location)
        _copy_source_tree(link.file_path, location)
        return None

    # file urls
    if link.is_file:
        file = get_file_url(link, download_dir, hashes=hashes)

    # http urls
    else:
        file = get_http_url(
            link,
            downloader,
            download_dir,
            hashes=hashes,
        )

    # unpack the archive to the build dir location. even when only downloading
    # archives, they have to be unpacked to parse dependencies, except wheels
    if not link.is_wheel:
        unpack_file(file.path, location, file.content_type)

    return file
Exemplo n.º 12
0
 def prep(self, data, tmpdir):
     self.name = 'sample'
     self.wheelpath = data.packages.joinpath(
         'sample-1.2.0-py2.py3-none-any.whl')
     self.req = Requirement('sample')
     self.src = os.path.join(tmpdir, 'src')
     self.dest = os.path.join(tmpdir, 'dest')
     unpack_file(self.wheelpath, self.src)
     self.scheme = Scheme(
         purelib=os.path.join(self.dest, 'lib'),
         platlib=os.path.join(self.dest, 'lib'),
         headers=os.path.join(self.dest, 'headers'),
         scripts=os.path.join(self.dest, 'bin'),
         data=os.path.join(self.dest, 'data'),
     )
     self.src_dist_info = os.path.join(self.src, 'sample-1.2.0.dist-info')
     self.dest_dist_info = os.path.join(self.scheme.purelib,
                                        'sample-1.2.0.dist-info')
Exemplo n.º 13
0
def unpack_url(
    link: Link,
    location: str,
    download: Downloader,
    verbosity: int,
    download_dir: Optional[str] = None,
    hashes: Optional[Hashes] = None,
) -> Optional[File]:
    """Unpack link into location, downloading if required.

    :param hashes: A Hashes object, one of whose embedded hashes must match,
        or HashMismatch will be raised. If the Hashes is empty, no matches are
        required, and unhashable types of requirements (like VCS ones, which
        would ordinarily raise HashUnsupported) are allowed.
    """
    # non-editable vcs urls
    if link.is_vcs:
        unpack_vcs_link(link, location, verbosity=verbosity)
        return None

    assert not link.is_existing_dir()

    # file urls
    if link.is_file:
        file = get_file_url(link, download_dir, hashes=hashes)

    # http urls
    else:
        file = get_http_url(
            link,
            download,
            download_dir,
            hashes=hashes,
        )

    # unpack the archive to the build dir location. even when only downloading
    # archives, they have to be unpacked to parse dependencies, except wheels
    if not link.is_wheel:
        unpack_file(file.path, location, file.content_type)

    return file
Exemplo n.º 14
0
def decompress_egg_files(directory=None):
    try:
        EXTS = pip.utils.ARCHIVE_EXTENSIONS
    except AttributeError:
        try:
            EXTS = pip._internal.utils.misc.ARCHIVE_EXTENSIONS
        except AttributeError:
            EXTS = ('.zip', '.whl', '.tar', '.tar.gz', '.tar.bz2')
    try:
        unpack_file = pip.utils.unpack_file
    except AttributeError:
        # XXX a work-around for pip >= 10.0
        try:
            unpack_file = pip.util.unpack_file
        except AttributeError:
            try:
                unpack_file = pip._internal.utils.misc.unpack_file
            except AttributeError:
                from pip._internal.utils.unpacking import unpack_file
    pathname = "*"
    if directory is not None:
        pathname = os.path.join(directory, pathname)
    eggs = [f for ext in EXTS for f in glob(pathname + "%s" % ext)]
    if not eggs:
        files = glob(pathname)
        err = ('No egg files with a supported file extension were found. '
               'Files: %s' % ', '.join(files))
        raise NotFoundException(err)
    for egg in eggs:
        click.echo("Uncompressing: %s" % egg)
        egg_ext = EXTS[list(egg.endswith(ext) for ext in EXTS).index(True)]
        decompress_location = egg[:-len(egg_ext)]
        try:
            unpack_file(egg, decompress_location, None)
        except TypeError:
            unpack_file(egg, decompress_location, None, None)
Exemplo n.º 15
0
    def build(
        self,
        requirements,  # type: Iterable[InstallRequirement]
        should_unpack=False  # type: bool
    ):
        # type: (...) -> List[InstallRequirement]
        """Build wheels.

        :param should_unpack: If True, after building the wheel, unpack it
            and replace the sdist with the unpacked version in preparation
            for installation.
        :return: True if all the wheels built correctly.
        """
        # pip install uses should_unpack=True.
        # pip install never provides a _wheel_dir.
        # pip wheel uses should_unpack=False.
        # pip wheel always provides a _wheel_dir (via the preparer).
        assert (
            (should_unpack and not self._wheel_dir) or
            (not should_unpack and self._wheel_dir)
        )

        buildset = []
        cache_available = bool(self.wheel_cache.cache_dir)

        for req in requirements:
            ephem_cache = should_use_ephemeral_cache(
                req,
                should_unpack=should_unpack,
                cache_available=cache_available,
                check_binary_allowed=self.check_binary_allowed,
            )
            if ephem_cache is None:
                continue

            # Determine where the wheel should go.
            if should_unpack:
                if ephem_cache:
                    output_dir = self.wheel_cache.get_ephem_path_for_link(
                        req.link
                    )
                else:
                    output_dir = self.wheel_cache.get_path_for_link(req.link)
            else:
                output_dir = self._wheel_dir

            buildset.append((req, output_dir))

        if not buildset:
            return []

        # TODO by @pradyunsg
        # Should break up this method into 2 separate methods.

        # Build the wheels.
        logger.info(
            'Building wheels for collected packages: %s',
            ', '.join([req.name for (req, _) in buildset]),
        )

        python_tag = None
        if should_unpack:
            python_tag = pep425tags.implementation_tag

        with indent_log():
            build_success, build_failure = [], []
            for req, output_dir in buildset:
                try:
                    ensure_dir(output_dir)
                except OSError as e:
                    logger.warning(
                        "Building wheel for %s failed: %s",
                        req.name, e,
                    )
                    build_failure.append(req)
                    continue

                wheel_file = self._build_one(
                    req, output_dir,
                    python_tag=python_tag,
                )
                if wheel_file:
                    build_success.append(req)
                    self.wheel_filenames.append(
                        os.path.relpath(wheel_file, output_dir)
                    )
                    if should_unpack:
                        # XXX: This is mildly duplicative with prepare_files,
                        # but not close enough to pull out to a single common
                        # method.
                        # The code below assumes temporary source dirs -
                        # prevent it doing bad things.
                        if (
                            req.source_dir and
                            not has_delete_marker_file(req.source_dir)
                        ):
                            raise AssertionError(
                                "bad source dir - missing marker")
                        # Delete the source we built the wheel from
                        req.remove_temporary_source()
                        # set the build directory again - name is known from
                        # the work prepare_files did.
                        req.source_dir = req.ensure_build_location(
                            self.preparer.build_dir
                        )
                        # Update the link for this.
                        req.link = Link(path_to_url(wheel_file))
                        assert req.link.is_wheel
                        # extract the wheel into the dir
                        unpack_file(req.link.file_path, req.source_dir)
                else:
                    build_failure.append(req)

        # notify success/failure
        if build_success:
            logger.info(
                'Successfully built %s',
                ' '.join([req.name for req in build_success]),
            )
        if build_failure:
            logger.info(
                'Failed to build %s',
                ' '.join([req.name for req in build_failure]),
            )
        # Return a list of requirements that failed to build
        return build_failure
Exemplo n.º 16
0
def unpack_url(
    link: Link,
    location: str,
    download: Downloader,
    download_dir: Optional[str] = None,
    hashes: Optional[Hashes] = None,
) -> Optional[File]:
    """Unpack link into location, downloading if required.

    :param hashes: A Hashes object, one of whose embedded hashes must match,
        or HashMismatch will be raised. If the Hashes is empty, no matches are
        required, and unhashable types of requirements (like VCS ones, which
        would ordinarily raise HashUnsupported) are allowed.
    """
    # non-editable vcs urls
    if link.is_vcs:
        unpack_vcs_link(link, location)
        return None

    # Once out-of-tree-builds are no longer supported, could potentially
    # replace the below condition with `assert not link.is_existing_dir`
    # - unpack_url does not need to be called for in-tree-builds.
    #
    # As further cleanup, _copy_source_tree and accompanying tests can
    # be removed.
    if link.is_existing_dir():
        deprecated(
            reason=(
                "pip copied the source tree into a temporary directory "
                "before building it. This is changing so that packages "
                "are built in-place "
                'within the original source tree ("in-tree build").'
            ),
            replacement=None,
            gone_in="21.3",
            feature_flag="in-tree-build",
            issue=7555,
        )
        if os.path.isdir(location):
            rmtree(location)
        _copy_source_tree(link.file_path, location)
        return None

    # file urls
    if link.is_file:
        file = get_file_url(link, download_dir, hashes=hashes)

    # http urls
    else:
        file = get_http_url(
            link,
            download,
            download_dir,
            hashes=hashes,
        )

    # unpack the archive to the build dir location. even when only downloading
    # archives, they have to be unpacked to parse dependencies, except wheels
    if not link.is_wheel:
        unpack_file(file.path, location, file.content_type)

    return file
Exemplo n.º 17
0
    def build(
            self,
            requirements,  # type: Iterable[InstallRequirement]
            should_unpack,  # type: bool
            wheel_cache,  # type: WheelCache
            build_options,  # type: List[str]
            global_options,  # type: List[str]
            check_binary_allowed=None,  # type: Optional[BinaryAllowedPredicate]
    ):
        # type: (...) -> BuildResult
        """Build wheels.

        :param should_unpack: If True, after building the wheel, unpack it
            and replace the sdist with the unpacked version in preparation
            for installation.
        :return: The list of InstallRequirement that succeeded to build and
            the list of InstallRequirement that failed to build.
        """
        if check_binary_allowed is None:
            # Binaries allowed by default.
            check_binary_allowed = _always_true

        buildset = _collect_buildset(
            requirements,
            wheel_cache=wheel_cache,
            check_binary_allowed=check_binary_allowed,
            need_wheel=not should_unpack,
        )
        if not buildset:
            return [], []

        # TODO by @pradyunsg
        # Should break up this method into 2 separate methods.

        # Build the wheels.
        logger.info(
            'Building wheels for collected packages: %s',
            ', '.join([req.name for (req, _) in buildset]),
        )

        with indent_log():
            build_successes, build_failures = [], []
            for req, cache_dir in buildset:
                wheel_file = _build_one(req, cache_dir, build_options,
                                        global_options)
                if wheel_file:
                    # Update the link for this.
                    req.link = Link(path_to_url(wheel_file))
                    req.local_file_path = req.link.file_path
                    assert req.link.is_wheel
                    if should_unpack:
                        # XXX: This is mildly duplicative with prepare_files,
                        # but not close enough to pull out to a single common
                        # method.
                        # The code below assumes temporary source dirs -
                        # prevent it doing bad things.
                        if (req.source_dir and
                                not has_delete_marker_file(req.source_dir)):
                            raise AssertionError(
                                "bad source dir - missing marker")
                        # Delete the source we built the wheel from
                        req.remove_temporary_source()
                        # set the build directory again - name is known from
                        # the work prepare_files did.
                        req.source_dir = req.ensure_build_location(
                            self.preparer.build_dir)
                        # extract the wheel into the dir
                        unpack_file(req.link.file_path, req.source_dir)
                    build_successes.append(req)
                else:
                    build_failures.append(req)

        # notify success/failure
        if build_successes:
            logger.info(
                'Successfully built %s',
                ' '.join([req.name for req in build_successes]),
            )
        if build_failures:
            logger.info(
                'Failed to build %s',
                ' '.join([req.name for req in build_failures]),
            )
        # Return a list of requirements that failed to build
        return build_successes, build_failures
Exemplo n.º 18
0
 def test_unpack_wheel_no_flatten(self, tmpdir):
     filepath = os.path.join(DATA_DIR, 'packages',
                             'meta-1.0-py2.py3-none-any.whl')
     unpack_file(filepath, tmpdir)
     assert os.path.isdir(os.path.join(tmpdir, 'meta-1.0.dist-info'))
Exemplo n.º 19
0
    if link.is_file:
        file = get_file_url(link, download_dir, hashes=hashes)

    # http urls
    else:
        file = get_http_url(
            link,
            downloader,
            download_dir,
            hashes=hashes,
        )

    # unpack the archive to the build dir location. even when only downloading
<<<<<<< HEAD
    # archives, they have to be unpacked to parse dependencies
    unpack_file(file.path, location, file.content_type)
=======
    # archives, they have to be unpacked to parse dependencies, except wheels
    if not link.is_wheel:
        unpack_file(file.path, location, file.content_type)
>>>>>>> 7e5c5fbd6c824de4d4c2b62da3f7cae87d462119

    return file


def _download_http_url(
    link,  # type: Link
    downloader,  # type: Downloader
    temp_dir,  # type: str
    hashes,  # type: Optional[Hashes]
):
Exemplo n.º 20
0
 def test_unpack_wheel_no_flatten(self, tmpdir: Path) -> None:
     filepath = os.path.join(DATA_DIR, "packages",
                             "meta-1.0-py2.py3-none-any.whl")
     unpack_file(filepath, tmpdir)
     assert os.path.isdir(os.path.join(tmpdir, "meta-1.0.dist-info"))
Exemplo n.º 21
0
            _install_wheel(
                name=name,
                wheel_zip=z,
                wheel_path=wheel_path,
                scheme=scheme,
                pycompile=pycompile,
                warn_script_location=warn_script_location,
                direct_url=direct_url,
                requested=requested,
            )
=======
    _temp_dir_for_testing=None,  # type: Optional[str]
    direct_url=None,  # type: Optional[DirectUrl]
):
    # type: (...) -> None
    with TempDirectory(
        path=_temp_dir_for_testing, kind="unpacked-wheel"
    ) as unpacked_dir, ZipFile(wheel_path, allowZip64=True) as z:
        unpack_file(wheel_path, unpacked_dir.path)
        install_unpacked_wheel(
            name=name,
            wheeldir=unpacked_dir.path,
            wheel_zip=z,
            scheme=scheme,
            req_description=req_description,
            pycompile=pycompile,
            warn_script_location=warn_script_location,
            direct_url=direct_url,
        )
>>>>>>> b66a76afa15ab74019740676a52a071b85ed8f71
Exemplo n.º 22
0
    def build(
            self,
            requirements,  # type: Iterable[InstallRequirement]
            should_unpack,  # type: bool
    ):
        # type: (...) -> List[InstallRequirement]
        """Build wheels.

        :param should_unpack: If True, after building the wheel, unpack it
            and replace the sdist with the unpacked version in preparation
            for installation.
        :return: The list of InstallRequirement that failed to build.
        """
        # pip install uses should_unpack=True.
        # pip install never provides a _wheel_dir.
        # pip wheel uses should_unpack=False.
        # pip wheel always provides a _wheel_dir (via the preparer).
        assert ((should_unpack and not self._wheel_dir)
                or (not should_unpack and self._wheel_dir))

        buildset = _collect_buildset(
            requirements,
            wheel_cache=self.wheel_cache,
            check_binary_allowed=self.check_binary_allowed,
            need_wheel=not should_unpack,
        )
        if not buildset:
            return []

        # TODO by @pradyunsg
        # Should break up this method into 2 separate methods.

        # Build the wheels.
        logger.info(
            'Building wheels for collected packages: %s',
            ', '.join([req.name for (req, _) in buildset]),
        )

        with indent_log():
            build_success, build_failure = [], []
            for req, cache_dir in buildset:
                wheel_file = self._build_one(req, cache_dir)
                if wheel_file:
                    if should_unpack:
                        # XXX: This is mildly duplicative with prepare_files,
                        # but not close enough to pull out to a single common
                        # method.
                        # The code below assumes temporary source dirs -
                        # prevent it doing bad things.
                        if (req.source_dir and
                                not has_delete_marker_file(req.source_dir)):
                            raise AssertionError(
                                "bad source dir - missing marker")
                        # Delete the source we built the wheel from
                        req.remove_temporary_source()
                        # set the build directory again - name is known from
                        # the work prepare_files did.
                        req.source_dir = req.ensure_build_location(
                            self.preparer.build_dir)
                        # Update the link for this.
                        req.link = Link(path_to_url(wheel_file))
                        req.local_file_path = req.link.file_path
                        assert req.link.is_wheel
                        # extract the wheel into the dir
                        unpack_file(req.link.file_path, req.source_dir)
                    else:
                        # copy from cache to target directory
                        try:
                            ensure_dir(self._wheel_dir)
                            shutil.copy(wheel_file, self._wheel_dir)
                        except OSError as e:
                            logger.warning(
                                "Building wheel for %s failed: %s",
                                req.name,
                                e,
                            )
                            build_failure.append(req)
                            continue
                    build_success.append(req)
                else:
                    build_failure.append(req)

        # notify success/failure
        if build_success:
            logger.info(
                'Successfully built %s',
                ' '.join([req.name for req in build_success]),
            )
        if build_failure:
            logger.info(
                'Failed to build %s',
                ' '.join([req.name for req in build_failure]),
            )
        # Return a list of requirements that failed to build
        return build_failure