Example #1
0
def _copy_dist_from_dir(link_path, location):
    """Copy distribution files in `link_path` to `location`.

    Invoked when user requests to install a local directory. E.g.:

        pip install .
        pip install ~/dev/git-repos/python-prompt-toolkit

    """

    # Note: This is currently VERY SLOW if you have a lot of data in the
    # directory, because it copies everything with `shutil.copytree`.
    # What it should really do is build an sdist and install that.
    # See https://github.com/pypa/pip/issues/2195

    if os.path.isdir(location):
        rmtree(location)

    # build an sdist
    setup_py = 'setup.py'
    sdist_args = [sys.executable]
    sdist_args.append('-c')
    sdist_args.append(SETUPTOOLS_SHIM % setup_py)
    sdist_args.append('sdist')
    sdist_args += ['--dist-dir', location]
    logger.info('Running setup.py sdist for %s', link_path)

    with indent_log():
        call_subprocess(sdist_args, cwd=link_path, show_stdout=False)

    # unpack sdist into `location`
    sdist = os.path.join(location, os.listdir(location)[0])
    logger.info('Unpacking sdist %s into %s', sdist, location)
    unpack_file(sdist, location, content_type=None, link=None)
Example #2
0
def _copy_dist_from_dir(link_path, location):
    """Copy distribution files in `link_path` to `location`.

    Invoked when user requests to install a local directory. E.g.:

        pip install .
        pip install ~/dev/git-repos/python-prompt-toolkit

    """

    # Note: This is currently VERY SLOW if you have a lot of data in the
    # directory, because it copies everything with `shutil.copytree`.
    # What it should really do is build an sdist and install that.
    # See https://github.com/pypa/pip/issues/2195

    if os.path.isdir(location):
        rmtree(location)

    # build an sdist
    setup_py = 'setup.py'
    sdist_args = [os.environ.get('PIP_PYTHON_PATH', sys.executable)]
    sdist_args.append('-c')
    sdist_args.append(SETUPTOOLS_SHIM % setup_py)
    sdist_args.append('sdist')
    sdist_args += ['--dist-dir', location]
    logger.info('Running setup.py sdist for %s', link_path)

    with indent_log():
        call_subprocess(sdist_args, cwd=link_path, show_stdout=False)

    # unpack sdist into `location`
    sdist = os.path.join(location, os.listdir(location)[0])
    logger.info('Unpacking sdist %s into %s', sdist, location)
    unpack_file(sdist, location, content_type=None, link=None)
Example #3
0
 def cleanup(self):
     # type: () -> None
     """Remove the temporary directory created and reset state"""
     self._deleted = True
     if not os.path.exists(self._path):
         return
     rmtree(self._path)
Example #4
0
 def _cleanup(cls, name, warn_message=None):
     try:
         rmtree(name)
     except OSError:
         pass
     else:
         if warn_message:
             warnings.warn(warn_message, ResourceWarning)
Example #5
0
 def unpack(self, location):
     """
     Clean up current location and download the url repository
     (and vcs infos) into location
     """
     if os.path.exists(location):
         rmtree(location)
     self.obtain(location)
Example #6
0
 def unpack(self, location):
     """
     Clean up current location and download the url repository
     (and vcs infos) into location
     """
     if os.path.exists(location):
         rmtree(location)
     self.obtain(location)
Example #7
0
 def remove_temporary_source(self):
     """Remove the source files from this requirement, if they are marked
     for deletion"""
     if self.source_dir and os.path.exists(
             os.path.join(self.source_dir, PIP_DELETE_MARKER_FILENAME)):
         logger.debug('Removing source in %s', self.source_dir)
         rmtree(self.source_dir)
     self.source_dir = None
     self._temp_build_dir.cleanup()
     self.build_env.cleanup()
Example #8
0
 def _cleanup(cls, name, warn_message=None):
     if not os.path.exists(name):
         return
     try:
         rmtree(name)
     except OSError:
         pass
     else:
         if warn_message:
             warnings.warn(warn_message, ResourceWarning)
Example #9
0
    def unpack(self, location, url):
        # type: (str, HiddenText) -> None
        """
        Clean up current location and download the url repository
        (and vcs infos) into location

        :param url: the repository URL starting with a vcs prefix.
        """
        if os.path.exists(location):
            rmtree(location)
        self.obtain(location, url=url)
Example #10
0
 def cleanup(self):
     """Remove the temporary directory created and reset state
     """
     if getattr(self._finalizer, "detach",
                None) and self._finalizer.detach():
         if os.path.exists(self._path):
             self._deleted = True
             try:
                 rmtree(self._path)
             except OSError:
                 pass
Example #11
0
    def export(self, location):
        """Export the svn repository at the url to the destination location"""
        url, rev_options = self.get_url_rev_options(self.url)

        logger.info('Exporting svn repository %s to %s', url, location)
        with indent_log():
            if os.path.exists(location):
                # Subversion doesn't like to check out over an existing
                # directory --force fixes this, but was only added in svn 1.5
                rmtree(location)
            cmd_args = ['export'] + rev_options.to_args() + [url, location]
            self.run_command(cmd_args, show_stdout=False)
Example #12
0
def unpack_url(
    link: Link,
    location: str,
    download: Downloader,
    verbosity: int,
    download_dir: Optional[str] = None,
    hashes: Optional[Hashes] = None,
) -> Optional[File]:
    """Unpack link into location, downloading if required.

    :param hashes: A Hashes object, one of whose embedded hashes must match,
        or HashMismatch will be raised. If the Hashes is empty, no matches are
        required, and unhashable types of requirements (like VCS ones, which
        would ordinarily raise HashUnsupported) are allowed.
    """
    # non-editable vcs urls
    if link.is_vcs:
        unpack_vcs_link(link, location, verbosity=verbosity)
        return None

    # Once out-of-tree-builds are no longer supported, could potentially
    # replace the below condition with `assert not link.is_existing_dir`
    # - unpack_url does not need to be called for in-tree-builds.
    #
    # As further cleanup, _copy_source_tree and accompanying tests can
    # be removed.
    #
    # TODO when use-deprecated=out-of-tree-build is removed
    if link.is_existing_dir():
        if os.path.isdir(location):
            rmtree(location)
        _copy_source_tree(link.file_path, location)
        return None

    # file urls
    if link.is_file:
        file = get_file_url(link, download_dir, hashes=hashes)

    # http urls
    else:
        file = get_http_url(
            link,
            download,
            download_dir,
            hashes=hashes,
        )

    # unpack the archive to the build dir location. even when only downloading
    # archives, they have to be unpacked to parse dependencies, except wheels
    if not link.is_wheel:
        unpack_file(file.path, location, file.content_type)

    return file
Example #13
0
 def remove_temporary_source(self):
     # type: () -> None
     """Remove the source files from this requirement, if they are marked
     for deletion"""
     if self.source_dir and has_delete_marker_file(self.source_dir):
         logger.debug('Removing source in %s', self.source_dir)
         rmtree(self.source_dir)
     self.source_dir = None
     if self._temp_build_dir:
         self._temp_build_dir.cleanup()
         self._temp_build_dir = None
     self.build_env.cleanup()
Example #14
0
def unpack_file_url(
    link,  # type: Link
    location,  # type: str
    download_dir=None,  # type: Optional[str]
    hashes=None  # type: Optional[Hashes]
):
    # type: (...) -> None
    """Unpack link into location.

    If download_dir is provided and link points to a file, make a copy
    of the link file inside download_dir.
    """
    link_path = url_to_path(link.url_without_fragment)

    # If it's a url to a local directory
    if is_dir_url(link):
        if os.path.isdir(location):
            rmtree(location)
        shutil.copytree(link_path, location, symlinks=True)
        if download_dir:
            logger.info('Link is a directory, ignoring download_dir')
        return

    # If --require-hashes is off, `hashes` is either empty, the
    # link's embedded hash, or MissingHashes; it is required to
    # match. If --require-hashes is on, we are satisfied by any
    # hash in `hashes` matching: a URL-based or an option-based
    # one; no internet-sourced hash will be in `hashes`.
    if hashes:
        hashes.check_against_path(link_path)

    # If a download dir is specified, is the file already there and valid?
    already_downloaded_path = None
    if download_dir:
        already_downloaded_path = _check_download_dir(link,
                                                      download_dir,
                                                      hashes)

    if already_downloaded_path:
        from_path = already_downloaded_path
    else:
        from_path = link_path

    content_type = mimetypes.guess_type(from_path)[0]

    # unpack the archive to the build dir location. even when only downloading
    # archives, they have to be unpacked to parse dependencies
    unpack_file(from_path, location, content_type, link)

    # a download dir is specified and not already downloaded
    if download_dir and not already_downloaded_path:
        _copy_file(from_path, download_dir, link)
Example #15
0
 def export(self, location):
     """Export the svn repository at the url to the destination location"""
     url, rev = self.get_url_rev()
     rev_options = get_rev_options(self, url, rev)
     url = self.remove_auth_from_url(url)
     logger.info('Exporting svn repository %s to %s', url, location)
     with indent_log():
         if os.path.exists(location):
             # Subversion doesn't like to check out over an existing
             # directory --force fixes this, but was only added in svn 1.5
             rmtree(location)
         cmd_args = ['export'] + rev_options.to_args() + [url, location]
         self.run_command(cmd_args, show_stdout=False)
    def export(self, location, url):
        # type: (str, HiddenText) -> None
        """
        Export the Bazaar repository at the url to the destination location
        """
        # Remove the location to make sure Bazaar can export it correctly
        if os.path.exists(location):
            rmtree(location)

        url, rev_options = self.get_url_rev_options(url)
        self.run_command(
            make_command('export', location, url, rev_options.to_args()),
            show_stdout=False,
        )
Example #17
0
    def export(self, location):
        """
        Export the Bazaar repository at the url to the destination location
        """
        # Remove the location to make sure Bazaar can export it correctly
        if os.path.exists(location):
            rmtree(location)

        with TempDirectory(kind="export") as temp_dir:
            self.unpack(temp_dir.path)

            self.run_command(
                ['export', location],
                cwd=temp_dir.path, show_stdout=False,
            )
    def export(self, location):
        """
        Export the Bazaar repository at the url to the destination location
        """
        # Remove the location to make sure Bazaar can export it correctly
        if os.path.exists(location):
            rmtree(location)

        with TempDirectory(kind="export") as temp_dir:
            self.unpack(temp_dir.path)

            self.run_command(
                ['export', location],
                cwd=temp_dir.path,
                show_stdout=False,
            )
Example #19
0
def unpack_file_url(link, location, download_dir=None, hashes=None):
    """Unpack link into location.

    If download_dir is provided and link points to a file, make a copy
    of the link file inside download_dir.
    """
    link_path = url_to_path(link.url_without_fragment)

    # If it's a url to a local directory
    if is_dir_url(link):
        if os.path.isdir(location):
            rmtree(location)
        shutil.copytree(link_path, location, symlinks=True)
        if download_dir:
            logger.info('Link is a directory, ignoring download_dir')
        return

    # If --require-hashes is off, `hashes` is either empty, the
    # link's embedded hash, or MissingHashes; it is required to
    # match. If --require-hashes is on, we are satisfied by any
    # hash in `hashes` matching: a URL-based or an option-based
    # one; no internet-sourced hash will be in `hashes`.
    if hashes:
        hashes.check_against_path(link_path)

    # If a download dir is specified, is the file already there and valid?
    already_downloaded_path = None
    if download_dir:
        already_downloaded_path = _check_download_dir(link,
                                                      download_dir,
                                                      hashes)

    if already_downloaded_path:
        from_path = already_downloaded_path
    else:
        from_path = link_path

    content_type = mimetypes.guess_type(from_path)[0]

    # unpack the archive to the build dir location. even when only downloading
    # archives, they have to be unpacked to parse dependencies
    unpack_file(from_path, location, content_type, link)

    # a download dir is specified and not already downloaded
    if download_dir and not already_downloaded_path:
        _copy_file(from_path, download_dir, link)
Example #20
0
    def rollback(self) -> None:
        """Undoes the uninstall by moving stashed files back."""
        for p in self._moves:
            logger.info("Moving to %s\n from %s", *p)

        for new_path, path in self._moves:
            try:
                logger.debug('Replacing %s from %s', new_path, path)
                if os.path.isfile(new_path) or os.path.islink(new_path):
                    os.unlink(new_path)
                elif os.path.isdir(new_path):
                    rmtree(new_path)
                renames(path, new_path)
            except OSError as ex:
                logger.error("Failed to restore %s", new_path)
                logger.debug("Exception: %s", ex)

        self.commit()
Example #21
0
def unpack_file_url(
    link,  # type: Link
    location,  # type: str
    download_dir=None,  # type: Optional[str]
    hashes=None  # type: Optional[Hashes]
):
    # type: (...) -> Optional[str]
    """Unpack link into location.
    """
    link_path = link.file_path
    # If it's a url to a local directory
    if link.is_existing_dir():
        if os.path.isdir(location):
            rmtree(location)
        _copy_source_tree(link_path, location)
        return None

    # If a download dir is specified, is the file already there and valid?
    already_downloaded_path = None
    if download_dir:
        already_downloaded_path = _check_download_dir(
            link, download_dir, hashes
        )

    if already_downloaded_path:
        from_path = already_downloaded_path
    else:
        from_path = link_path

    # If --require-hashes is off, `hashes` is either empty, the
    # link's embedded hash, or MissingHashes; it is required to
    # match. If --require-hashes is on, we are satisfied by any
    # hash in `hashes` matching: a URL-based or an option-based
    # one; no internet-sourced hash will be in `hashes`.
    if hashes:
        hashes.check_against_path(from_path)

    content_type = mimetypes.guess_type(from_path)[0]

    # unpack the archive to the build dir location. even when only downloading
    # archives, they have to be unpacked to parse dependencies
    unpack_file(from_path, location, content_type)

    return from_path
    def export(self, location, url):
        # type: (str, HiddenText) -> None
        """Export the svn repository at the url to the destination location"""
        url, rev_options = self.get_url_rev_options(url)

        logger.info('Exporting svn repository %s to %s', url, location)
        with indent_log():
            if os.path.exists(location):
                # Subversion doesn't like to check out over an existing
                # directory --force fixes this, but was only added in svn 1.5
                rmtree(location)
            cmd_args = make_command(
                'export',
                self.get_remote_call_options(),
                rev_options.to_args(),
                url,
                location,
            )
            self.run_command(cmd_args, show_stdout=False)
Example #23
0
 def cleanup(self):
     """Remove the temporary directory created and reset state
     """
     if self.path is not None and os.path.exists(self.path):
         rmtree(self.path)
     self.path = None
Example #24
0
 def cleanup(self):
     """Remove the temporary directory created and reset state
     """
     if self.path is not None and os.path.exists(self.path):
         rmtree(self.path)
     self.path = None
Example #25
0
    def prepare_linked_requirement(self, req, session, finder, upgrade_allowed,
                                   require_hashes):
        """Prepare a requirement that would be obtained from req.link
        """
        # TODO: Breakup into smaller functions
        if req.link and req.link.scheme == 'file':
            path = url_to_path(req.link.url)
            logger.info('Processing %s', display_path(path))
        else:
            logger.info('Collecting %s', req)

        with indent_log():
            # @@ if filesystem packages are not marked
            # editable in a req, a non deterministic error
            # occurs when the script attempts to unpack the
            # build directory
            req.ensure_has_source_dir(self.build_dir)
            # If a checkout exists, it's unwise to keep going.  version
            # inconsistencies are logged later, but do not fail the
            # installation.
            # FIXME: this won't upgrade when there's an existing
            # package unpacked in `req.source_dir`
            if os.path.exists(os.path.join(req.source_dir, 'setup.py')):
                rmtree(req.source_dir)
            req.populate_link(finder, upgrade_allowed, require_hashes)

            # We can't hit this spot and have populate_link return None.
            # req.satisfied_by is None here (because we're
            # guarded) and upgrade has no impact except when satisfied_by
            # is not None.
            # Then inside find_requirement existing_applicable -> False
            # If no new versions are found, DistributionNotFound is raised,
            # otherwise a result is guaranteed.
            assert req.link
            link = req.link

            # Now that we have the real link, we can tell what kind of
            # requirements we have and raise some more informative errors
            # than otherwise. (For example, we can raise VcsHashUnsupported
            # for a VCS URL rather than HashMissing.)
            if require_hashes:
                # We could check these first 2 conditions inside
                # unpack_url and save repetition of conditions, but then
                # we would report less-useful error messages for
                # unhashable requirements, complaining that there's no
                # hash provided.
                if is_vcs_url(link):
                    raise VcsHashUnsupported()
                elif is_file_url(link) and is_dir_url(link):
                    raise DirectoryUrlHashUnsupported()
                if not req.original_link and not req.is_pinned:
                    # Unpinned packages are asking for trouble when a new
                    # version is uploaded. This isn't a security check, but
                    # it saves users a surprising hash mismatch in the
                    # future.
                    #
                    # file:/// URLs aren't pinnable, so don't complain
                    # about them not being pinned.
                    raise HashUnpinned()

            hashes = req.hashes(trust_internet=not require_hashes)
            if require_hashes and not hashes:
                # Known-good hashes are missing for this requirement, so
                # shim it with a facade object that will provoke hash
                # computation and then raise a HashMissing exception
                # showing the user what the hash should be.
                hashes = MissingHashes()

            try:
                download_dir = self.download_dir
                # We always delete unpacked sdists after pip ran.
                autodelete_unpacked = True
                if req.link.is_wheel and self.wheel_download_dir:
                    # when doing 'pip wheel` we download wheels to a
                    # dedicated dir.
                    download_dir = self.wheel_download_dir
                if req.link.is_wheel:
                    if download_dir:
                        # When downloading, we only unpack wheels to get
                        # metadata.
                        autodelete_unpacked = True
                    else:
                        # When installing a wheel, we use the unpacked
                        # wheel.
                        autodelete_unpacked = False
                unpack_url(req.link,
                           req.source_dir,
                           download_dir,
                           autodelete_unpacked,
                           session=session,
                           hashes=hashes,
                           progress_bar=self.progress_bar)
            except requests.HTTPError as exc:
                logger.critical(
                    'Could not install requirement %s because of error %s',
                    req,
                    exc,
                )
                raise InstallationError(
                    'Could not install requirement %s because of HTTP '
                    'error %s for URL %s' % (req, exc, req.link))
            abstract_dist = make_abstract_dist(req)
            with self.req_tracker.track(req):
                abstract_dist.prep_for_dist(finder, self.build_isolation)
            if self._download_should_save:
                # Make a .zip of the source_dir we already created.
                if req.link.scheme in vcs.all_schemes:
                    req.archive(self.download_dir)
        return abstract_dist
Example #26
0
    def prepare_linked_requirement(
        self,
        req,  # type: InstallRequirement
    ):
        # type: (...) -> AbstractDistribution
        """Prepare a requirement that would be obtained from req.link
        """
        assert req.link
        link = req.link

        # TODO: Breakup into smaller functions
        if link.scheme == 'file':
            path = link.file_path
            logger.info('Processing %s', display_path(path))
        else:
            logger.info('Collecting %s', req.req or req)

        with indent_log():
            # @@ if filesystem packages are not marked
            # editable in a req, a non deterministic error
            # occurs when the script attempts to unpack the
            # build directory
            # Since source_dir is only set for editable requirements.
            assert req.source_dir is None
            req.ensure_has_source_dir(self.build_dir)
            # If a checkout exists, it's unwise to keep going.  version
            # inconsistencies are logged later, but do not fail the
            # installation.
            # FIXME: this won't upgrade when there's an existing
            # package unpacked in `req.source_dir`
            if os.path.exists(os.path.join(req.source_dir, 'setup.py')):
                rmtree(req.source_dir)

            # Now that we have the real link, we can tell what kind of
            # requirements we have and raise some more informative errors
            # than otherwise. (For example, we can raise VcsHashUnsupported
            # for a VCS URL rather than HashMissing.)
            if self.require_hashes:
                # We could check these first 2 conditions inside
                # unpack_url and save repetition of conditions, but then
                # we would report less-useful error messages for
                # unhashable requirements, complaining that there's no
                # hash provided.
                if link.is_vcs:
                    raise VcsHashUnsupported()
                elif link.is_existing_dir():
                    raise DirectoryUrlHashUnsupported()
                if not req.original_link and not req.is_pinned:
                    # Unpinned packages are asking for trouble when a new
                    # version is uploaded. This isn't a security check, but
                    # it saves users a surprising hash mismatch in the
                    # future.
                    #
                    # file:/// URLs aren't pinnable, so don't complain
                    # about them not being pinned.
                    raise HashUnpinned()

            hashes = req.hashes(trust_internet=not self.require_hashes)
            if self.require_hashes and not hashes:
                # Known-good hashes are missing for this requirement, so
                # shim it with a facade object that will provoke hash
                # computation and then raise a HashMissing exception
                # showing the user what the hash should be.
                hashes = MissingHashes()

            download_dir = self.download_dir
            if link.is_wheel and self.wheel_download_dir:
                # when doing 'pip wheel` we download wheels to a
                # dedicated dir.
                download_dir = self.wheel_download_dir

            try:
                local_path = unpack_url(
                    link, req.source_dir, self.downloader, download_dir,
                    hashes=hashes,
                )
            except requests.HTTPError as exc:
                logger.critical(
                    'Could not install requirement %s because of error %s',
                    req,
                    exc,
                )
                raise InstallationError(
                    'Could not install requirement {} because of HTTP '
                    'error {} for URL {}'.format(req, exc, link)
                )

            # For use in later processing, preserve the file path on the
            # requirement.
            if local_path:
                req.local_file_path = local_path

            if link.is_wheel:
                if download_dir:
                    # When downloading, we only unpack wheels to get
                    # metadata.
                    autodelete_unpacked = True
                else:
                    # When installing a wheel, we use the unpacked
                    # wheel.
                    autodelete_unpacked = False
            else:
                # We always delete unpacked sdists after pip runs.
                autodelete_unpacked = True
            if autodelete_unpacked:
                write_delete_marker_file(req.source_dir)

            abstract_dist = _get_prepared_distribution(
                req, self.req_tracker, self.finder, self.build_isolation,
            )

            if download_dir:
                if link.is_existing_dir():
                    logger.info('Link is a directory, ignoring download_dir')
                elif local_path and not os.path.exists(
                    os.path.join(download_dir, link.filename)
                ):
                    _copy_file(local_path, download_dir, link)

            if self._download_should_save:
                # Make a .zip of the source_dir we already created.
                if link.is_vcs:
                    req.archive(self.download_dir)
        return abstract_dist
Example #27
0
def unpack_url(
    link,  # type: Link
    location,  # type: str
    download,  # type: Downloader
    download_dir=None,  # type: Optional[str]
    hashes=None,  # type: Optional[Hashes]
):
    # type: (...) -> Optional[File]
    """Unpack link into location, downloading if required.

    :param hashes: A Hashes object, one of whose embedded hashes must match,
        or HashMismatch will be raised. If the Hashes is empty, no matches are
        required, and unhashable types of requirements (like VCS ones, which
        would ordinarily raise HashUnsupported) are allowed.
    """
    # non-editable vcs urls
    if link.is_vcs:
        unpack_vcs_link(link, location)
        return None

    # Once out-of-tree-builds are no longer supported, could potentially
    # replace the below condition with `assert not link.is_existing_dir`
    # - unpack_url does not need to be called for in-tree-builds.
    #
    # As further cleanup, _copy_source_tree and accompanying tests can
    # be removed.
    if link.is_existing_dir():
        deprecated(
            "A future pip version will change local packages to be built "
            "in-place without first copying to a temporary directory. "
            "We recommend you use --use-feature=in-tree-build to test "
            "your packages with this new behavior before it becomes the "
            "default.\n",
            replacement=None,
            gone_in="21.3",
            issue=7555
        )
        if os.path.isdir(location):
            rmtree(location)
        _copy_source_tree(link.file_path, location)
        return None

    # file urls
    if link.is_file:
        file = get_file_url(link, download_dir, hashes=hashes)

    # http urls
    else:
        file = get_http_url(
            link,
            download,
            download_dir,
            hashes=hashes,
        )

    # unpack the archive to the build dir location. even when only downloading
    # archives, they have to be unpacked to parse dependencies, except wheels
    if not link.is_wheel:
        unpack_file(file.path, location, file.content_type)

    return file
Example #28
0
    def obtain(self, dest):
        # type: (str) -> None
        """
        Install or update in editable mode the package represented by this
        VersionControl object.

        Args:
          dest: the repository directory in which to install or update.
        """
        url, rev_options = self.get_url_rev_options(self.url)

        if not os.path.exists(dest):
            self.fetch_new(dest, url, rev_options)
            return

        rev_display = rev_options.to_display()
        if self.is_repository_directory(dest):
            existing_url = self.get_remote_url(dest)
            if self.compare_urls(existing_url, url):
                logger.debug(
                    '%s in %s exists, and has correct URL (%s)',
                    self.repo_name.title(),
                    display_path(dest),
                    url,
                )
                if not self.is_commit_id_equal(dest, rev_options.rev):
                    logger.info(
                        'Updating %s %s%s',
                        display_path(dest),
                        self.repo_name,
                        rev_display,
                    )
                    self.update(dest, url, rev_options)
                else:
                    logger.info('Skipping because already up-to-date.')
                return

            logger.warning(
                '%s %s in %s exists with URL %s',
                self.name,
                self.repo_name,
                display_path(dest),
                existing_url,
            )
            prompt = ('(s)witch, (i)gnore, (w)ipe, (b)ackup ', ('s', 'i', 'w',
                                                                'b'))
        else:
            logger.warning(
                'Directory %s already exists, and is not a %s %s.',
                dest,
                self.name,
                self.repo_name,
            )
            # https://github.com/python/mypy/issues/1174
            prompt = (
                '(i)gnore, (w)ipe, (b)ackup ',  # type: ignore
                ('i', 'w', 'b'))

        logger.warning(
            'The plan is to install the %s repository %s',
            self.name,
            url,
        )
        response = ask_path_exists('What to do?  %s' % prompt[0], prompt[1])

        if response == 'a':
            sys.exit(-1)

        if response == 'w':
            logger.warning('Deleting %s', display_path(dest))
            rmtree(dest)
            self.fetch_new(dest, url, rev_options)
            return

        if response == 'b':
            dest_dir = backup_dir(dest)
            logger.warning(
                'Backing up %s to %s',
                display_path(dest),
                dest_dir,
            )
            shutil.move(dest, dest_dir)
            self.fetch_new(dest, url, rev_options)
            return

        # Do nothing if the response is "i".
        if response == 's':
            logger.info(
                'Switching %s %s to %s%s',
                self.repo_name,
                display_path(dest),
                url,
                rev_display,
            )
            self.switch(dest, url, rev_options)
Example #29
0
    def check_destination(self, dest, url, rev_options):
        """
        Prepare a location to receive a checkout/clone.

        Return True if the location is ready for (and requires) a
        checkout/clone, False otherwise.

        Args:
          rev_options: a RevOptions object.
        """
        checkout = True
        prompt = False
        rev_display = rev_options.to_display()
        if os.path.exists(dest):
            checkout = False
            if os.path.exists(os.path.join(dest, self.dirname)):
                existing_url = self.get_url(dest)
                if self.compare_urls(existing_url, url):
                    logger.debug(
                        '%s in %s exists, and has correct URL (%s)',
                        self.repo_name.title(),
                        display_path(dest),
                        url,
                    )
                    if not self.is_commit_id_equal(dest, rev_options.rev):
                        logger.info(
                            'Updating %s %s%s',
                            display_path(dest),
                            self.repo_name,
                            rev_display,
                        )
                        self.update(dest, rev_options)
                    else:
                        logger.info('Skipping because already up-to-date.')
                else:
                    logger.warning(
                        '%s %s in %s exists with URL %s',
                        self.name,
                        self.repo_name,
                        display_path(dest),
                        existing_url,
                    )
                    prompt = ('(s)witch, (i)gnore, (w)ipe, (b)ackup ',
                              ('s', 'i', 'w', 'b'))
            else:
                logger.warning(
                    'Directory %s already exists, and is not a %s %s.',
                    dest,
                    self.name,
                    self.repo_name,
                )
                prompt = ('(i)gnore, (w)ipe, (b)ackup ', ('i', 'w', 'b'))
        if prompt:
            logger.warning(
                'The plan is to install the %s repository %s',
                self.name,
                url,
            )
            response = ask_path_exists('What to do?  %s' % prompt[0],
                                       prompt[1])

            if response == 's':
                logger.info(
                    'Switching %s %s to %s%s',
                    self.repo_name,
                    display_path(dest),
                    url,
                    rev_display,
                )
                self.switch(dest, url, rev_options)
            elif response == 'i':
                # do nothing
                pass
            elif response == 'w':
                logger.warning('Deleting %s', display_path(dest))
                rmtree(dest)
                checkout = True
            elif response == 'b':
                dest_dir = backup_dir(dest)
                logger.warning(
                    'Backing up %s to %s',
                    display_path(dest),
                    dest_dir,
                )
                shutil.move(dest, dest_dir)
                checkout = True
            elif response == 'a':
                sys.exit(-1)
        return checkout
Example #30
0
    def check_destination(self, dest, url, rev_options):
        """
        Prepare a location to receive a checkout/clone.

        Return True if the location is ready for (and requires) a
        checkout/clone, False otherwise.

        Args:
          rev_options: a RevOptions object.
        """
        checkout = True
        prompt = False
        rev_display = rev_options.to_display()
        if os.path.exists(dest):
            checkout = False
            if os.path.exists(os.path.join(dest, self.dirname)):
                existing_url = self.get_url(dest)
                if self.compare_urls(existing_url, url):
                    logger.debug(
                        '%s in %s exists, and has correct URL (%s)',
                        self.repo_name.title(),
                        display_path(dest),
                        url,
                    )
                    if not self.is_commit_id_equal(dest, rev_options.rev):
                        logger.info(
                            'Updating %s %s%s',
                            display_path(dest),
                            self.repo_name,
                            rev_display,
                        )
                        self.update(dest, rev_options)
                    else:
                        logger.info(
                            'Skipping because already up-to-date.')
                else:
                    logger.warning(
                        '%s %s in %s exists with URL %s',
                        self.name,
                        self.repo_name,
                        display_path(dest),
                        existing_url,
                    )
                    prompt = ('(s)witch, (i)gnore, (w)ipe, (b)ackup ',
                              ('s', 'i', 'w', 'b'))
            else:
                logger.warning(
                    'Directory %s already exists, and is not a %s %s.',
                    dest,
                    self.name,
                    self.repo_name,
                )
                prompt = ('(i)gnore, (w)ipe, (b)ackup ', ('i', 'w', 'b'))
        if prompt:
            logger.warning(
                'The plan is to install the %s repository %s',
                self.name,
                url,
            )
            response = ask_path_exists('What to do?  %s' % prompt[0],
                                       prompt[1])

            if response == 's':
                logger.info(
                    'Switching %s %s to %s%s',
                    self.repo_name,
                    display_path(dest),
                    url,
                    rev_display,
                )
                self.switch(dest, url, rev_options)
            elif response == 'i':
                # do nothing
                pass
            elif response == 'w':
                logger.warning('Deleting %s', display_path(dest))
                rmtree(dest)
                checkout = True
            elif response == 'b':
                dest_dir = backup_dir(dest)
                logger.warning(
                    'Backing up %s to %s', display_path(dest), dest_dir,
                )
                shutil.move(dest, dest_dir)
                checkout = True
            elif response == 'a':
                sys.exit(-1)
        return checkout