示例#1
0
    def ensure_build_location(self, build_dir):
        # type: (str) -> str
        assert build_dir is not None
        if self._temp_build_dir is not None:
            assert self._temp_build_dir.path
            return self._temp_build_dir.path
        if self.req is None:
            # for requirement via a path to a directory: the name of the
            # package is not available yet so we create a temp directory
            # Once run_egg_info will have run, we'll be able to fix it via
            # move_to_correct_build_directory().
            # Some systems have /tmp as a symlink which confuses custom
            # builds (such as numpy). Thus, we ensure that the real path
            # is returned.
            self._temp_build_dir = TempDirectory(kind="req-build")
            self._ideal_build_dir = build_dir

            return self._temp_build_dir.path
        if self.editable:
            name = self.name.lower()
        else:
            name = self.name
        # FIXME: Is there a better place to create the build_dir? (hg and bzr
        # need this)
        if not os.path.exists(build_dir):
            logger.debug('Creating directory %s', build_dir)
            _make_build_dir(build_dir)
        return os.path.join(build_dir, name)
示例#2
0
 def __init__(self, dist):
     self.paths = set()
     self._refuse = set()
     self.pth = {}
     self.dist = dist
     self.save_dir = TempDirectory(kind="uninstall")
     self._moved_paths = []
    def _get_file_stash(self, path):
        """Stashes a file.

        If no root has been provided, one will be created for the directory
        in the user's temp directory."""
        path = os.path.normcase(path)
        head, old_head = os.path.dirname(path), None
        save_dir = None

        while head != old_head:
            try:
                save_dir = self._save_dirs[head]
                break
            except KeyError:
                pass
            head, old_head = os.path.dirname(head), head
        else:
            # Did not find any suitable root
            head = os.path.dirname(path)
            save_dir = TempDirectory(kind='uninstall')
            save_dir.create()
            self._save_dirs[head] = save_dir

        relpath = os.path.relpath(path, head)
        if relpath and relpath != os.path.curdir:
            return os.path.join(save_dir.path, relpath)
        return save_dir.path
示例#4
0
    def __init__(self, format_control):
        # type: (FormatControl) -> None
        self._temp_dir = TempDirectory(kind="ephem-wheel-cache")
        self._temp_dir.create()

        super(EphemWheelCache, self).__init__(self._temp_dir.path,
                                              format_control)
示例#5
0
    def __init__(self, req, comes_from, source_dir=None, editable=False,
                 link=None, update=True, markers=None,
                 isolated=False, options=None, wheel_cache=None,
                 constraint=False, extras=()):
        assert req is None or isinstance(req, Requirement), req
        self.req = req
        self.comes_from = comes_from
        self.constraint = constraint
        if source_dir is not None:
            self.source_dir = os.path.normpath(os.path.abspath(source_dir))
        else:
            self.source_dir = None
        self.editable = editable

        self._wheel_cache = wheel_cache
        if link is not None:
            self.link = self.original_link = link
        else:
            from pipenv.patched.notpip._internal.index import Link
            self.link = self.original_link = req and req.url and Link(req.url)

        if extras:
            self.extras = extras
        elif req:
            self.extras = {
                pkg_resources.safe_extra(extra) for extra in req.extras
            }
        else:
            self.extras = set()
        if markers is not None:
            self.markers = markers
        else:
            self.markers = req and req.marker
        self._egg_info_path = None
        # This holds the pkg_resources.Distribution object if this requirement
        # is already available:
        self.satisfied_by = None
        # This hold the pkg_resources.Distribution object if this requirement
        # conflicts with another installed distribution:
        self.conflicts_with = None
        # Temporary build location
        self._temp_build_dir = TempDirectory(kind="req-build")
        # Used to store the global directory where the _temp_build_dir should
        # have been created. Cf _correct_build_location method.
        self._ideal_build_dir = None
        # True if the editable should be updated:
        self.update = update
        # Set to True after successful installation
        self.install_succeeded = None
        # UninstallPathSet of uninstalled distribution (for possible rollback)
        self.uninstalled_pathset = None
        self.options = options if options else {}
        # Set to True after successful preparation of this requirement
        self.prepared = False
        self.is_direct = False

        self.isolated = isolated
        self.build_env = BuildEnvironment(no_clean=True)
示例#6
0
    def move_to_correct_build_directory(self):
        # type: () -> None
        """Move self._temp_build_dir to "self._ideal_build_dir/self.req.name"

        For some requirements (e.g. a path to a directory), the name of the
        package is not available until we run egg_info, so the build_location
        will return a temporary directory and store the _ideal_build_dir.

        This is only called to "fix" the build directory after generating
        metadata.
        """
        if self.source_dir is not None:
            return
        assert self.req is not None
        assert self._temp_build_dir
        assert (
            self._ideal_build_dir is not None and
            self._ideal_build_dir.path  # type: ignore
        )
        old_location = self._temp_build_dir
        self._temp_build_dir = None  # checked inside ensure_build_location

        # Figure out the correct place to put the files.
        new_location = self.ensure_build_location(self._ideal_build_dir)
        if os.path.exists(new_location):
            raise InstallationError(
                'A package already exists in %s; please remove it to continue'
                % display_path(new_location)
            )

        # Move the files to the correct location.
        logger.debug(
            'Moving package %s from %s to new location %s',
            self, display_path(old_location.path), display_path(new_location),
        )
        shutil.move(old_location.path, new_location)

        # Update directory-tracking variables, to be in line with new_location
        self.source_dir = os.path.normpath(os.path.abspath(new_location))
        self._temp_build_dir = TempDirectory(
            path=new_location, kind="req-install",
        )

        # Correct the metadata directory, if it exists
        if self.metadata_directory:
            old_meta = self.metadata_directory
            rel = os.path.relpath(old_meta, start=old_location.path)
            new_meta = os.path.join(new_location, rel)
            new_meta = os.path.normpath(os.path.abspath(new_meta))
            self.metadata_directory = new_meta

        # Done with any "move built files" work, since have moved files to the
        # "ideal" build location. Setting to None allows to clearly flag that
        # no more moves are needed.
        self._ideal_build_dir = None
    def __init__(self):
        # type: () -> None
        self._temp_dir = TempDirectory(kind="build-env")
        self._temp_dir.create()

        self._prefixes = OrderedDict(
            ((name, _Prefix(os.path.join(self._temp_dir.path, name)))
             for name in ('normal', 'overlay')))

        self._bin_dirs = []  # type: List[str]
        self._lib_dirs = []  # type: List[str]
        for prefix in reversed(list(self._prefixes.values())):
            self._bin_dirs.append(prefix.bin_dir)
            self._lib_dirs.extend(prefix.lib_dirs)

        # Customize site to:
        # - ensure .pth files are honored
        # - prevent access to system site packages
        system_sites = {
            os.path.normcase(site)
            for site in (
                get_python_lib(plat_specific=False),
                get_python_lib(plat_specific=True),
            )
        }
        self._site_dir = os.path.join(self._temp_dir.path, 'site')
        if not os.path.exists(self._site_dir):
            os.mkdir(self._site_dir)
        with open(os.path.join(self._site_dir, 'sitecustomize.py'), 'w') as fp:
            fp.write(
                textwrap.dedent('''
                import os, site, sys

                # First, drop system-sites related paths.
                original_sys_path = sys.path[:]
                known_paths = set()
                for path in {system_sites!r}:
                    site.addsitedir(path, known_paths=known_paths)
                system_paths = set(
                    os.path.normcase(path)
                    for path in sys.path[len(original_sys_path):]
                )
                original_sys_path = [
                    path for path in original_sys_path
                    if os.path.normcase(path) not in system_paths
                ]
                sys.path = original_sys_path

                # Second, add lib directories.
                # ensuring .pth file are processed.
                for path in {lib_dirs!r}:
                    assert not path in sys.path
                    site.addsitedir(path)
                ''').format(system_sites=system_sites,
                            lib_dirs=self._lib_dirs))
示例#8
0
 def __init__(self):
     # type: () -> None
     self._root = os.environ.get('PIP_REQ_TRACKER')
     if self._root is None:
         self._temp_dir = TempDirectory(delete=False, kind='req-tracker')
         self._root = os.environ['PIP_REQ_TRACKER'] = self._temp_dir.path
         logger.debug('Created requirements tracker %r', self._root)
     else:
         self._temp_dir = None
         logger.debug('Re-using requirements tracker %r', self._root)
     self._entries = set()  # type: Set[InstallRequirement]
示例#9
0
class EphemWheelCache(SimpleWheelCache):
    """A SimpleWheelCache that creates it's own temporary cache directory
    """
    def __init__(self, format_control):
        self._temp_dir = TempDirectory(kind="ephem-wheel-cache")
        self._temp_dir.create()

        super(EphemWheelCache, self).__init__(self._temp_dir.path,
                                              format_control)

    def cleanup(self):
        self._temp_dir.cleanup()
示例#10
0
class EphemWheelCache(SimpleWheelCache):
    """A SimpleWheelCache that creates it's own temporary cache directory
    """

    def __init__(self, format_control):
        self._temp_dir = TempDirectory(kind="ephem-wheel-cache")
        self._temp_dir.create()

        super(EphemWheelCache, self).__init__(
            self._temp_dir.path, format_control
        )

    def cleanup(self):
        self._temp_dir.cleanup()
示例#11
0
def unpack_http_url(link,
                    location,
                    download_dir=None,
                    session=None,
                    hashes=None,
                    progress_bar="on"):
    if session is None:
        raise TypeError(
            "unpack_http_url() missing 1 required keyword argument: 'session'")

    with TempDirectory(kind="unpack") as temp_dir:
        # If a download dir is specified, is the file already downloaded there?
        already_downloaded_path = None
        if download_dir:
            already_downloaded_path = _check_download_dir(
                link, download_dir, hashes)

        if already_downloaded_path:
            from_path = already_downloaded_path
            content_type = mimetypes.guess_type(from_path)[0]
        else:
            # let's download to a tmp dir
            from_path, content_type = _download_http_url(
                link, session, temp_dir.path, hashes, progress_bar)

        # unpack the archive to the build dir location. even when only
        # downloading archives, they have to be unpacked to parse dependencies
        unpack_file(from_path, location, content_type, link)

        # a download dir is specified; let's copy the archive there
        if download_dir and not already_downloaded_path:
            _copy_file(from_path, download_dir, link)

        if not already_downloaded_path:
            os.unlink(from_path)
示例#12
0
    def __init__(self, format_control):
        self._temp_dir = TempDirectory(kind="ephem-wheel-cache")
        self._temp_dir.create()

        super(EphemWheelCache, self).__init__(
            self._temp_dir.path, format_control
        )
示例#13
0
def get_http_url(
    link,  # type: Link
    download,  # type: Downloader
    download_dir=None,  # type: Optional[str]
    hashes=None,  # type: Optional[Hashes]
):
    # type: (...) -> File
    temp_dir = TempDirectory(kind="unpack", globally_managed=True)
    # If a download dir is specified, is the file already downloaded there?
    already_downloaded_path = None
    if download_dir:
        already_downloaded_path = _check_download_dir(
            link, download_dir, hashes
        )

    if already_downloaded_path:
        from_path = already_downloaded_path
        content_type = None
    else:
        # let's download to a tmp dir
        from_path, content_type = download(link, temp_dir.path)
        if hashes:
            hashes.check_against_path(from_path)

    return File(from_path, content_type)
示例#14
0
def generate_editable_metadata(
    build_env: BuildEnvironment, backend: Pep517HookCaller, details: str
) -> str:
    """Generate metadata using mechanisms described in PEP 660.

    Returns the generated metadata directory.
    """
    metadata_tmpdir = TempDirectory(kind="modern-metadata", globally_managed=True)

    metadata_dir = metadata_tmpdir.path

    with build_env:
        # Note that Pep517HookCaller implements a fallback for
        # prepare_metadata_for_build_wheel/editable, so we don't have to
        # consider the possibility that this hook doesn't exist.
        runner = runner_with_spinner_message(
            "Preparing editable metadata (pyproject.toml)"
        )
        with backend.subprocess_runner(runner):
            try:
                distinfo_dir = backend.prepare_metadata_for_build_editable(metadata_dir)
            except InstallationSubprocessError as error:
                raise MetadataGenerationFailed(package_details=details) from error

    return os.path.join(metadata_dir, distinfo_dir)
示例#15
0
    def _complete_partial_requirements(
        self,
        partially_downloaded_reqs: Iterable[InstallRequirement],
        parallel_builds: bool = False,
    ) -> None:
        """Download any requirements which were only fetched by metadata."""
        # Download to a temporary directory. These will be copied over as
        # needed for downstream 'download', 'wheel', and 'install' commands.
        temp_dir = TempDirectory(kind="unpack", globally_managed=True).path

        # Map each link to the requirement that owns it. This allows us to set
        # `req.local_file_path` on the appropriate requirement after passing
        # all the links at once into BatchDownloader.
        links_to_fully_download: Dict[Link, InstallRequirement] = {}
        for req in partially_downloaded_reqs:
            assert req.link
            links_to_fully_download[req.link] = req

        batch_download = self._batch_download(
            links_to_fully_download.keys(),
            temp_dir,
        )
        for link, (filepath, _) in batch_download:
            logger.debug("Downloading link %s to %s", link, filepath)
            req = links_to_fully_download[link]
            req.local_file_path = filepath

        # This step is necessary to ensure all lazy wheels are processed
        # successfully by the 'download', 'wheel', and 'install' commands.
        for req in partially_downloaded_reqs:
            self._prepare_linked_requirement(req, parallel_builds)
示例#16
0
 def __init__(self, dist):
     self.paths = set()
     self._refuse = set()
     self.pth = {}
     self.dist = dist
     self.save_dir = TempDirectory(kind="uninstall")
     self._moved_paths = []
示例#17
0
def unpack_http_url(
    link,  # type: Link
    location,  # type: str
    downloader,  # type: Downloader
    download_dir=None,  # type: Optional[str]
    hashes=None,  # type: Optional[Hashes]
):
    # type: (...) -> str
    temp_dir = TempDirectory(kind="unpack", globally_managed=True)
    # If a download dir is specified, is the file already downloaded there?
    already_downloaded_path = None
    if download_dir:
        already_downloaded_path = _check_download_dir(
            link, download_dir, hashes
        )

    if already_downloaded_path:
        from_path = already_downloaded_path
        content_type = mimetypes.guess_type(from_path)[0]
    else:
        # let's download to a tmp dir
        from_path, content_type = _download_http_url(
            link, downloader, temp_dir.path, hashes
        )

    # unpack the archive to the build dir location. even when only
    # downloading archives, they have to be unpacked to parse dependencies
    unpack_file(from_path, location, content_type)

    return from_path
示例#18
0
    def __init__(self, format_control: FormatControl) -> None:
        self._temp_dir = TempDirectory(
            kind=tempdir_kinds.EPHEM_WHEEL_CACHE,
            globally_managed=True,
        )

        super().__init__(self._temp_dir.path, format_control)
示例#19
0
    def export(self, location):
        """Export the Hg repository at the url to the destination location"""
        with TempDirectory(kind="export") as temp_dir:
            self.unpack(temp_dir.path)

            self.run_command(
                ['archive', location], show_stdout=False, cwd=temp_dir.path
            )
示例#20
0
def _build_one_inside_env(
    req,  # type: InstallRequirement
    output_dir,  # type: str
    build_options,  # type: List[str]
    global_options,  # type: List[str]
):
    # type: (...) -> Optional[str]
    with TempDirectory(kind="wheel") as temp_dir:
        assert req.name
        if req.use_pep517:
            assert req.metadata_directory
            assert req.pep517_backend
            if global_options:
                logger.warning(
                    'Ignoring --global-option when building %s using PEP 517', req.name
                )
            if build_options:
                logger.warning(
                    'Ignoring --build-option when building %s using PEP 517', req.name
                )
            wheel_path = build_wheel_pep517(
                name=req.name,
                backend=req.pep517_backend,
                metadata_directory=req.metadata_directory,
                tempd=temp_dir.path,
            )
        else:
            wheel_path = build_wheel_legacy(
                name=req.name,
                setup_py_path=req.setup_py_path,
                source_dir=req.unpacked_source_directory,
                global_options=global_options,
                build_options=build_options,
                tempd=temp_dir.path,
            )

        if wheel_path is not None:
            wheel_name = os.path.basename(wheel_path)
            dest_path = os.path.join(output_dir, wheel_name)
            try:
                wheel_hash, length = hash_file(wheel_path)
                shutil.move(wheel_path, dest_path)
                logger.info('Created wheel for %s: '
                            'filename=%s size=%d sha256=%s',
                            req.name, wheel_name, length,
                            wheel_hash.hexdigest())
                logger.info('Stored in directory: %s', output_dir)
                return dest_path
            except Exception as e:
                logger.warning(
                    "Building wheel for %s failed: %s",
                    req.name, e,
                )
        # Ignore return, we can't do anything else useful.
        if not req.use_pep517:
            _clean_one_legacy(req, global_options)
        return None
示例#21
0
    def export(self, location, url):
        # type: (str, HiddenText) -> None
        """Export the Hg repository at the url to the destination location"""
        with TempDirectory(kind="export") as temp_dir:
            self.unpack(temp_dir.path, url=url)

            self.run_command(['archive', location],
                             show_stdout=False,
                             cwd=temp_dir.path)
示例#22
0
def get_requirement_tracker() -> Iterator["RequirementTracker"]:
    root = os.environ.get("PIP_REQ_TRACKER")
    with contextlib.ExitStack() as ctx:
        if root is None:
            root = ctx.enter_context(TempDirectory(kind="req-tracker")).path
            ctx.enter_context(update_env_context_manager(PIP_REQ_TRACKER=root))
            logger.debug("Initialized build tracking at %s", root)

        with RequirementTracker(root) as tracker:
            yield tracker
示例#23
0
    def export(self, location):
        """Export the Git repository at the url to the destination location"""
        if not location.endswith('/'):
            location = location + '/'

        with TempDirectory(kind="export") as temp_dir:
            self.unpack(temp_dir.path)
            self.run_command(
                ['checkout-index', '-a', '-f', '--prefix', location],
                show_stdout=False,
                cwd=temp_dir.path)
示例#24
0
    def __init__(self) -> None:
        temp_dir = TempDirectory(kind=tempdir_kinds.BUILD_ENV,
                                 globally_managed=True)

        self._prefixes = OrderedDict(
            (name, _Prefix(os.path.join(temp_dir.path, name)))
            for name in ("normal", "overlay"))

        self._bin_dirs: List[str] = []
        self._lib_dirs: List[str] = []
        for prefix in reversed(list(self._prefixes.values())):
            self._bin_dirs.append(prefix.bin_dir)
            self._lib_dirs.extend(prefix.lib_dirs)

        # Customize site to:
        # - ensure .pth files are honored
        # - prevent access to system site packages
        system_sites = {
            os.path.normcase(site)
            for site in (get_purelib(), get_platlib())
        }
        self._site_dir = os.path.join(temp_dir.path, "site")
        if not os.path.exists(self._site_dir):
            os.mkdir(self._site_dir)
        with open(os.path.join(self._site_dir, "sitecustomize.py"),
                  "w",
                  encoding="utf-8") as fp:
            fp.write(
                textwrap.dedent("""
                import os, site, sys

                # First, drop system-sites related paths.
                original_sys_path = sys.path[:]
                known_paths = set()
                for path in {system_sites!r}:
                    site.addsitedir(path, known_paths=known_paths)
                system_paths = set(
                    os.path.normcase(path)
                    for path in sys.path[len(original_sys_path):]
                )
                original_sys_path = [
                    path for path in original_sys_path
                    if os.path.normcase(path) not in system_paths
                ]
                sys.path = original_sys_path

                # Second, add lib directories.
                # ensuring .pth file are processed.
                for path in {lib_dirs!r}:
                    assert not path in sys.path
                    site.addsitedir(path)
                """).format(system_sites=system_sites,
                            lib_dirs=self._lib_dirs))
示例#25
0
    def ensure_build_location(self, build_dir: str, autodelete: bool,
                              parallel_builds: bool) -> str:
        assert build_dir is not None
        if self._temp_build_dir is not None:
            assert self._temp_build_dir.path
            return self._temp_build_dir.path
        if self.req is None:
            # Some systems have /tmp as a symlink which confuses custom
            # builds (such as numpy). Thus, we ensure that the real path
            # is returned.
            self._temp_build_dir = TempDirectory(kind=tempdir_kinds.REQ_BUILD,
                                                 globally_managed=True)

            return self._temp_build_dir.path

        # This is the only remaining place where we manually determine the path
        # for the temporary directory. It is only needed for editables where
        # it is the value of the --src option.

        # When parallel builds are enabled, add a UUID to the build directory
        # name so multiple builds do not interfere with each other.
        dir_name: str = canonicalize_name(self.name)
        if parallel_builds:
            dir_name = f"{dir_name}_{uuid.uuid4().hex}"

        # FIXME: Is there a better place to create the build_dir? (hg and bzr
        # need this)
        if not os.path.exists(build_dir):
            logger.debug("Creating directory %s", build_dir)
            os.makedirs(build_dir)
        actual_build_dir = os.path.join(build_dir, dir_name)
        # `None` indicates that we respect the globally-configured deletion
        # settings, which is what we actually want when auto-deleting.
        delete_arg = None if autodelete else False
        return TempDirectory(
            path=actual_build_dir,
            delete=delete_arg,
            kind=tempdir_kinds.REQ_BUILD,
            globally_managed=True,
        ).path
示例#26
0
    def _get_directory_stash(self, path: str) -> str:
        """Stashes a directory.

        Directories are stashed adjacent to their original location if
        possible, or else moved/copied into the user's temp dir."""

        try:
            save_dir: TempDirectory = AdjacentTempDirectory(path)
        except OSError:
            save_dir = TempDirectory(kind="uninstall")
        self._save_dirs[os.path.normcase(path)] = save_dir

        return save_dir.path
示例#27
0
 def _build_one_inside_env(self, req, output_dir, python_tag=None):
     with TempDirectory(kind="wheel") as temp_dir:
         if self.__build_one(req, temp_dir.path, python_tag=python_tag):
             try:
                 wheel_name = os.listdir(temp_dir.path)[0]
                 wheel_path = os.path.join(output_dir, wheel_name)
                 shutil.move(os.path.join(temp_dir.path, wheel_name),
                             wheel_path)
                 logger.info('Stored in directory: %s', output_dir)
                 return wheel_path
             except:
                 pass
         # Ignore return, we can't do anything else useful.
         self._clean_one(req)
         return None
    def export(self, location):
        """
        Export the Bazaar repository at the url to the destination location
        """
        # Remove the location to make sure Bazaar can export it correctly
        if os.path.exists(location):
            rmtree(location)

        with TempDirectory(kind="export") as temp_dir:
            self.unpack(temp_dir.path)

            self.run_command(
                ['export', location],
                cwd=temp_dir.path,
                show_stdout=False,
            )
示例#29
0
 def _build_one_inside_env(self, req, output_dir, python_tag=None):
     with TempDirectory(kind="wheel") as temp_dir:
         if req.use_pep517:
             builder = self._build_one_pep517
         else:
             builder = self._build_one_legacy
         wheel_path = builder(req, temp_dir.path, python_tag=python_tag)
         if wheel_path is not None:
             wheel_name = os.path.basename(wheel_path)
             dest_path = os.path.join(output_dir, wheel_name)
             try:
                 shutil.move(wheel_path, dest_path)
                 logger.info('Stored in directory: %s', output_dir)
                 return dest_path
             except Exception:
                 pass
         # Ignore return, we can't do anything else useful.
         self._clean_one(req)
         return None
示例#30
0
    def prepare_pep517_metadata(self):
        # type: () -> str
        assert self.pep517_backend is not None

        # NOTE: This needs to be refactored to stop using atexit
        metadata_tmpdir = TempDirectory(kind="modern-metadata")
        atexit.register(metadata_tmpdir.cleanup)

        metadata_dir = metadata_tmpdir.path

        with self.build_env:
            # Note that Pep517HookCaller implements a fallback for
            # prepare_metadata_for_build_wheel, so we don't have to
            # consider the possibility that this hook doesn't exist.
            runner = runner_with_spinner_message("Preparing wheel metadata")
            backend = self.pep517_backend
            with backend.subprocess_runner(runner):
                distinfo_dir = backend.prepare_metadata_for_build_wheel(
                    metadata_dir
                )

        return os.path.join(metadata_dir, distinfo_dir)
示例#31
0
def generate_metadata(
    build_env: BuildEnvironment,
    setup_py_path: str,
    source_dir: str,
    isolated: bool,
    details: str,
) -> str:
    """Generate metadata using setup.py-based defacto mechanisms.

    Returns the generated metadata directory.
    """
    logger.debug(
        "Running setup.py (path:%s) egg_info for package %s",
        setup_py_path,
        details,
    )

    egg_info_dir = TempDirectory(kind="pip-egg-info", globally_managed=True).path

    args = make_setuptools_egg_info_args(
        setup_py_path,
        egg_info_dir=egg_info_dir,
        no_user_config=isolated,
    )

    with build_env:
        with open_spinner("Preparing metadata (setup.py)") as spinner:
            try:
                call_subprocess(
                    args,
                    cwd=source_dir,
                    command_desc="python setup.py egg_info",
                    spinner=spinner,
                )
            except InstallationSubprocessError as error:
                raise MetadataGenerationFailed(package_details=details) from error

    # Return the .egg-info directory.
    return _find_egg_info(egg_info_dir)
示例#32
0
def _create_standalone_pip() -> Iterator[str]:
    """Create a "standalone pip" zip file.

    The zip file's content is identical to the currently-running pip.
    It will be used to install requirements into the build environment.
    """
    source = pathlib.Path(pip_location).resolve().parent

    # Return the current instance if `source` is not a directory. We can't build
    # a zip from this, and it likely means the instance is already standalone.
    if not source.is_dir():
        yield str(source)
        return

    with TempDirectory(kind="standalone-pip") as tmp_dir:
        pip_zip = os.path.join(tmp_dir.path, "__env_pip__.zip")
        kwargs = {}
        if sys.version_info >= (3, 8):
            kwargs["strict_timestamps"] = False
        with zipfile.ZipFile(pip_zip, "w", **kwargs) as zf:
            for child in source.rglob("*"):
                zf.write(child, child.relative_to(source.parent).as_posix())
        yield os.path.join(pip_zip, "pip")
示例#33
0
def generate_metadata(build_env, backend):
    # type: (BuildEnvironment, Pep517HookCaller) -> str
    """Generate metadata using mechanisms described in PEP 517.

    Returns the generated metadata directory.
    """
    metadata_tmpdir = TempDirectory(
        kind="modern-metadata", globally_managed=True
    )

    metadata_dir = metadata_tmpdir.path

    with build_env:
        # Note that Pep517HookCaller implements a fallback for
        # prepare_metadata_for_build_wheel, so we don't have to
        # consider the possibility that this hook doesn't exist.
        runner = runner_with_spinner_message("Preparing wheel metadata")
        with backend.subprocess_runner(runner):
            distinfo_dir = backend.prepare_metadata_for_build_wheel(
                metadata_dir
            )

    return os.path.join(metadata_dir, distinfo_dir)
示例#34
0
class UninstallPathSet(object):
    """A set of file paths to be removed in the uninstallation of a
    requirement."""
    def __init__(self, dist):
        self.paths = set()
        self._refuse = set()
        self.pth = {}
        self.dist = dist
        self.save_dir = TempDirectory(kind="uninstall")
        self._moved_paths = []

    def _permitted(self, path):
        """
        Return True if the given path is one we are permitted to
        remove/modify, False otherwise.

        """
        return is_local(path)

    def add(self, path):
        head, tail = os.path.split(path)

        # we normalize the head to resolve parent directory symlinks, but not
        # the tail, since we only want to uninstall symlinks, not their targets
        path = os.path.join(normalize_path(head), os.path.normcase(tail))

        if not os.path.exists(path):
            return
        if self._permitted(path):
            self.paths.add(path)
        else:
            self._refuse.add(path)

        # __pycache__ files can show up after 'installed-files.txt' is created,
        # due to imports
        if os.path.splitext(path)[1] == '.py' and uses_pycache:
            self.add(cache_from_source(path))

    def add_pth(self, pth_file, entry):
        pth_file = normalize_path(pth_file)
        if self._permitted(pth_file):
            if pth_file not in self.pth:
                self.pth[pth_file] = UninstallPthEntries(pth_file)
            self.pth[pth_file].add(entry)
        else:
            self._refuse.add(pth_file)

    def _stash(self, path):
        return os.path.join(
            self.save_dir.path, os.path.splitdrive(path)[1].lstrip(os.path.sep)
        )

    def remove(self, auto_confirm=False, verbose=False):
        """Remove paths in ``self.paths`` with confirmation (unless
        ``auto_confirm`` is True)."""

        if not self.paths:
            logger.info(
                "Can't uninstall '%s'. No files were found to uninstall.",
                self.dist.project_name,
            )
            return

        dist_name_version = (
            self.dist.project_name + "-" + self.dist.version
        )
        logger.info('Uninstalling %s:', dist_name_version)

        with indent_log():
            if auto_confirm or self._allowed_to_proceed(verbose):
                self.save_dir.create()

                for path in sorted(compact(self.paths)):
                    new_path = self._stash(path)
                    logger.debug('Removing file or directory %s', path)
                    self._moved_paths.append(path)
                    renames(path, new_path)
                for pth in self.pth.values():
                    pth.remove()

                logger.info('Successfully uninstalled %s', dist_name_version)

    def _allowed_to_proceed(self, verbose):
        """Display which files would be deleted and prompt for confirmation
        """

        def _display(msg, paths):
            if not paths:
                return

            logger.info(msg)
            with indent_log():
                for path in sorted(compact(paths)):
                    logger.info(path)

        if not verbose:
            will_remove, will_skip = compress_for_output_listing(self.paths)
        else:
            # In verbose mode, display all the files that are going to be
            # deleted.
            will_remove = list(self.paths)
            will_skip = set()

        _display('Would remove:', will_remove)
        _display('Would not remove (might be manually added):', will_skip)
        _display('Would not remove (outside of prefix):', self._refuse)

        return ask('Proceed (y/n)? ', ('y', 'n')) == 'y'

    def rollback(self):
        """Rollback the changes previously made by remove()."""
        if self.save_dir.path is None:
            logger.error(
                "Can't roll back %s; was not uninstalled",
                self.dist.project_name,
            )
            return False
        logger.info('Rolling back uninstall of %s', self.dist.project_name)
        for path in self._moved_paths:
            tmp_path = self._stash(path)
            logger.debug('Replacing %s', path)
            renames(tmp_path, path)
        for pth in self.pth.values():
            pth.rollback()

    def commit(self):
        """Remove temporary save dir: rollback will no longer be possible."""
        self.save_dir.cleanup()
        self._moved_paths = []

    @classmethod
    def from_dist(cls, dist):
        dist_path = normalize_path(dist.location)
        if not dist_is_local(dist):
            logger.info(
                "Not uninstalling %s at %s, outside environment %s",
                dist.key,
                dist_path,
                sys.prefix,
            )
            return cls(dist)

        if dist_path in {p for p in {sysconfig.get_path("stdlib"),
                                     sysconfig.get_path("platstdlib")}
                         if p}:
            logger.info(
                "Not uninstalling %s at %s, as it is in the standard library.",
                dist.key,
                dist_path,
            )
            return cls(dist)

        paths_to_remove = cls(dist)
        develop_egg_link = egg_link_path(dist)
        develop_egg_link_egg_info = '{}.egg-info'.format(
            pkg_resources.to_filename(dist.project_name))
        egg_info_exists = dist.egg_info and os.path.exists(dist.egg_info)
        # Special case for distutils installed package
        distutils_egg_info = getattr(dist._provider, 'path', None)

        # Uninstall cases order do matter as in the case of 2 installs of the
        # same package, pip needs to uninstall the currently detected version
        if (egg_info_exists and dist.egg_info.endswith('.egg-info') and
                not dist.egg_info.endswith(develop_egg_link_egg_info)):
            # if dist.egg_info.endswith(develop_egg_link_egg_info), we
            # are in fact in the develop_egg_link case
            paths_to_remove.add(dist.egg_info)
            if dist.has_metadata('installed-files.txt'):
                for installed_file in dist.get_metadata(
                        'installed-files.txt').splitlines():
                    path = os.path.normpath(
                        os.path.join(dist.egg_info, installed_file)
                    )
                    paths_to_remove.add(path)
            # FIXME: need a test for this elif block
            # occurs with --single-version-externally-managed/--record outside
            # of pip
            elif dist.has_metadata('top_level.txt'):
                if dist.has_metadata('namespace_packages.txt'):
                    namespaces = dist.get_metadata('namespace_packages.txt')
                else:
                    namespaces = []
                for top_level_pkg in [
                        p for p
                        in dist.get_metadata('top_level.txt').splitlines()
                        if p and p not in namespaces]:
                    path = os.path.join(dist.location, top_level_pkg)
                    paths_to_remove.add(path)
                    paths_to_remove.add(path + '.py')
                    paths_to_remove.add(path + '.pyc')
                    paths_to_remove.add(path + '.pyo')

        elif distutils_egg_info:
            raise UninstallationError(
                "Cannot uninstall {!r}. It is a distutils installed project "
                "and thus we cannot accurately determine which files belong "
                "to it which would lead to only a partial uninstall.".format(
                    dist.project_name,
                )
            )

        elif dist.location.endswith('.egg'):
            # package installed by easy_install
            # We cannot match on dist.egg_name because it can slightly vary
            # i.e. setuptools-0.6c11-py2.6.egg vs setuptools-0.6rc11-py2.6.egg
            paths_to_remove.add(dist.location)
            easy_install_egg = os.path.split(dist.location)[1]
            easy_install_pth = os.path.join(os.path.dirname(dist.location),
                                            'easy-install.pth')
            paths_to_remove.add_pth(easy_install_pth, './' + easy_install_egg)

        elif egg_info_exists and dist.egg_info.endswith('.dist-info'):
            for path in uninstallation_paths(dist):
                paths_to_remove.add(path)

        elif develop_egg_link:
            # develop egg
            with open(develop_egg_link, 'r') as fh:
                link_pointer = os.path.normcase(fh.readline().strip())
            assert (link_pointer == dist.location), (
                'Egg-link %s does not match installed location of %s '
                '(at %s)' % (link_pointer, dist.project_name, dist.location)
            )
            paths_to_remove.add(develop_egg_link)
            easy_install_pth = os.path.join(os.path.dirname(develop_egg_link),
                                            'easy-install.pth')
            paths_to_remove.add_pth(easy_install_pth, dist.location)

        else:
            logger.debug(
                'Not sure how to uninstall: %s - Check: %s',
                dist, dist.location,
            )

        # find distutils scripts= scripts
        if dist.has_metadata('scripts') and dist.metadata_isdir('scripts'):
            for script in dist.metadata_listdir('scripts'):
                if dist_in_usersite(dist):
                    bin_dir = bin_user
                else:
                    bin_dir = bin_py
                paths_to_remove.add(os.path.join(bin_dir, script))
                if WINDOWS:
                    paths_to_remove.add(os.path.join(bin_dir, script) + '.bat')

        # find console_scripts
        _scripts_to_remove = []
        console_scripts = dist.get_entry_map(group='console_scripts')
        for name in console_scripts.keys():
            _scripts_to_remove.extend(_script_names(dist, name, False))
        # find gui_scripts
        gui_scripts = dist.get_entry_map(group='gui_scripts')
        for name in gui_scripts.keys():
            _scripts_to_remove.extend(_script_names(dist, name, True))

        for s in _scripts_to_remove:
            paths_to_remove.add(s)

        return paths_to_remove
示例#35
0
    def run(self, options, args):
        cmdoptions.check_install_build_global(options)

        upgrade_strategy = "to-satisfy-only"
        if options.upgrade:
            upgrade_strategy = options.upgrade_strategy

        if options.build_dir:
            options.build_dir = os.path.abspath(options.build_dir)

        options.src_dir = os.path.abspath(options.src_dir)
        install_options = options.install_options or []
        if options.use_user_site:
            if options.prefix_path:
                raise CommandError(
                    "Can not combine '--user' and '--prefix' as they imply "
                    "different installation locations"
                )
            if virtualenv_no_global():
                raise InstallationError(
                    "Can not perform a '--user' install. User site-packages "
                    "are not visible in this virtualenv."
                )
            install_options.append('--user')
            install_options.append('--prefix=')

        target_temp_dir = TempDirectory(kind="target")
        if options.target_dir:
            options.ignore_installed = True
            options.target_dir = os.path.abspath(options.target_dir)
            if (os.path.exists(options.target_dir) and not
                    os.path.isdir(options.target_dir)):
                raise CommandError(
                    "Target path exists but is not a directory, will not "
                    "continue."
                )

            # Create a target directory for using with the target option
            target_temp_dir.create()
            install_options.append('--home=' + target_temp_dir.path)

        global_options = options.global_options or []

        with self._build_session(options) as session:
            finder = self._build_package_finder(options, session)
            build_delete = (not (options.no_clean or options.build_dir))
            wheel_cache = WheelCache(options.cache_dir, options.format_control)

            if options.cache_dir and not check_path_owner(options.cache_dir):
                logger.warning(
                    "The directory '%s' or its parent directory is not owned "
                    "by the current user and caching wheels has been "
                    "disabled. check the permissions and owner of that "
                    "directory. If executing pip with sudo, you may want "
                    "sudo's -H flag.",
                    options.cache_dir,
                )
                options.cache_dir = None

            with TempDirectory(
                options.build_dir, delete=build_delete, kind="install"
            ) as directory:
                requirement_set = RequirementSet(
                    require_hashes=options.require_hashes,
                )

                try:
                    self.populate_requirement_set(
                        requirement_set, args, options, finder, session,
                        self.name, wheel_cache
                    )
                    preparer = RequirementPreparer(
                        build_dir=directory.path,
                        src_dir=options.src_dir,
                        download_dir=None,
                        wheel_download_dir=None,
                        progress_bar=options.progress_bar,
                        build_isolation=options.build_isolation,
                    )

                    resolver = Resolver(
                        preparer=preparer,
                        finder=finder,
                        session=session,
                        wheel_cache=wheel_cache,
                        use_user_site=options.use_user_site,
                        upgrade_strategy=upgrade_strategy,
                        force_reinstall=options.force_reinstall,
                        ignore_dependencies=options.ignore_dependencies,
                        ignore_requires_python=options.ignore_requires_python,
                        ignore_installed=options.ignore_installed,
                        isolated=options.isolated_mode,
                    )
                    resolver.resolve(requirement_set)

                    # If caching is disabled or wheel is not installed don't
                    # try to build wheels.
                    if wheel and options.cache_dir:
                        # build wheels before install.
                        wb = WheelBuilder(
                            finder, preparer, wheel_cache,
                            build_options=[], global_options=[],
                        )
                        # Ignore the result: a failed wheel will be
                        # installed from the sdist/vcs whatever.
                        wb.build(
                            requirement_set.requirements.values(),
                            session=session, autobuilding=True
                        )

                    to_install = resolver.get_installation_order(
                        requirement_set
                    )

                    # Consistency Checking of the package set we're installing.
                    should_warn_about_conflicts = (
                        not options.ignore_dependencies and
                        options.warn_about_conflicts
                    )
                    if should_warn_about_conflicts:
                        self._warn_about_conflicts(to_install)

                    # Don't warn about script install locations if
                    # --target has been specified
                    warn_script_location = options.warn_script_location
                    if options.target_dir:
                        warn_script_location = False

                    installed = install_given_reqs(
                        to_install,
                        install_options,
                        global_options,
                        root=options.root_path,
                        home=target_temp_dir.path,
                        prefix=options.prefix_path,
                        pycompile=options.compile,
                        warn_script_location=warn_script_location,
                        use_user_site=options.use_user_site,
                    )

                    possible_lib_locations = get_lib_location_guesses(
                        user=options.use_user_site,
                        home=target_temp_dir.path,
                        root=options.root_path,
                        prefix=options.prefix_path,
                        isolated=options.isolated_mode,
                    )
                    reqs = sorted(installed, key=operator.attrgetter('name'))
                    items = []
                    for req in reqs:
                        item = req.name
                        try:
                            installed_version = get_installed_version(
                                req.name, possible_lib_locations
                            )
                            if installed_version:
                                item += '-' + installed_version
                        except Exception:
                            pass
                        items.append(item)
                    installed = ' '.join(items)
                    if installed:
                        logger.info('Successfully installed %s', installed)
                except EnvironmentError as error:
                    show_traceback = (self.verbosity >= 1)

                    message = create_env_error_message(
                        error, show_traceback, options.use_user_site,
                    )
                    logger.error(message, exc_info=show_traceback)

                    return ERROR
                except PreviousBuildDirError:
                    options.no_clean = True
                    raise
                finally:
                    # Clean up
                    if not options.no_clean:
                        requirement_set.cleanup_files()
                        wheel_cache.cleanup()

        if options.target_dir:
            self._handle_target_dir(
                options.target_dir, target_temp_dir, options.upgrade
            )
        return requirement_set
示例#36
0
class BuildEnvironment(object):
    """Creates and manages an isolated environment to install build deps
    """

    def __init__(self, no_clean):
        self._temp_dir = TempDirectory(kind="build-env")
        self._no_clean = no_clean

    @property
    def path(self):
        return self._temp_dir.path

    def __enter__(self):
        self._temp_dir.create()

        self.save_path = os.environ.get('PATH', None)
        self.save_pythonpath = os.environ.get('PYTHONPATH', None)
        self.save_nousersite = os.environ.get('PYTHONNOUSERSITE', None)

        install_scheme = 'nt' if (os.name == 'nt') else 'posix_prefix'
        install_dirs = get_paths(install_scheme, vars={
            'base': self.path,
            'platbase': self.path,
        })

        scripts = install_dirs['scripts']
        if self.save_path:
            os.environ['PATH'] = scripts + os.pathsep + self.save_path
        else:
            os.environ['PATH'] = scripts + os.pathsep + os.defpath

        # Note: prefer distutils' sysconfig to get the
        # library paths so PyPy is correctly supported.
        purelib = get_python_lib(plat_specific=0, prefix=self.path)
        platlib = get_python_lib(plat_specific=1, prefix=self.path)
        if purelib == platlib:
            lib_dirs = purelib
        else:
            lib_dirs = purelib + os.pathsep + platlib
        if self.save_pythonpath:
            os.environ['PYTHONPATH'] = lib_dirs + os.pathsep + \
                self.save_pythonpath
        else:
            os.environ['PYTHONPATH'] = lib_dirs

        os.environ['PYTHONNOUSERSITE'] = '1'

        return self.path

    def __exit__(self, exc_type, exc_val, exc_tb):
        if not self._no_clean:
            self._temp_dir.cleanup()

        def restore_var(varname, old_value):
            if old_value is None:
                os.environ.pop(varname, None)
            else:
                os.environ[varname] = old_value

        restore_var('PATH', self.save_path)
        restore_var('PYTHONPATH', self.save_pythonpath)
        restore_var('PYTHONNOUSERSITE', self.save_nousersite)

    def cleanup(self):
        self._temp_dir.cleanup()
示例#37
0
 def __init__(self, no_clean):
     self._temp_dir = TempDirectory(kind="build-env")
     self._no_clean = no_clean