def _copy_dist_from_dir(link_path, location): """Copy distribution files in `link_path` to `location`. Invoked when user requests to install a local directory. E.g.: pip install . pip install ~/dev/git-repos/python-prompt-toolkit """ # Note: This is currently VERY SLOW if you have a lot of data in the # directory, because it copies everything with `shutil.copytree`. # What it should really do is build an sdist and install that. # See https://github.com/pypa/pip/issues/2195 if os.path.isdir(location): rmtree(location) # build an sdist setup_py = 'setup.py' sdist_args = [sys.executable] sdist_args.append('-c') sdist_args.append(SETUPTOOLS_SHIM % setup_py) sdist_args.append('sdist') sdist_args += ['--dist-dir', location] logger.info('Running setup.py sdist for %s', link_path) with indent_log(): call_subprocess(sdist_args, cwd=link_path, show_stdout=False) # unpack sdist into `location` sdist = os.path.join(location, os.listdir(location)[0]) logger.info('Unpacking sdist %s into %s', sdist, location) unpack_file(sdist, location, content_type=None, link=None)
def test_rmtree_retries_for_3sec(tmpdir, monkeypatch): """ Test pip._internal.utils.rmtree will retry failures for no more than 3 sec """ monkeypatch.setattr(shutil, 'rmtree', Failer(duration=5).call) with pytest.raises(OSError): rmtree('foo')
def unpack(self, location): """ Clean up current location and download the url repository (and vcs infos) into location """ if os.path.exists(location): rmtree(location) self.obtain(location)
def remove_temporary_source(self): """Remove the source files from this requirement, if they are marked for deletion""" if self.source_dir and os.path.exists( os.path.join(self.source_dir, PIP_DELETE_MARKER_FILENAME)): logger.debug('Removing source in %s', self.source_dir) rmtree(self.source_dir) self.source_dir = None self._temp_build_dir.cleanup()
def create_venv(dest_dir): if os.path.exists(dest_dir): rmtree(dest_dir) print('Creating virtualenv in %s' % dest_dir) code = subprocess.check_call([ 'virtualenv', '--no-site-packages', dest_dir, ]) assert not code, "virtualenv failed"
def unpack(self, location, url): # type: (str, str) -> None """ Clean up current location and download the url repository (and vcs infos) into location :param url: the repository URL starting with a vcs prefix. """ if os.path.exists(location): rmtree(location) self.obtain(location, url=url)
def export(self, location): """Export the svn repository at the url to the destination location""" url, rev_options = self.get_url_rev_options(self.url) logger.info('Exporting svn repository %s to %s', url, location) with indent_log(): if os.path.exists(location): # Subversion doesn't like to check out over an existing # directory --force fixes this, but was only added in svn 1.5 rmtree(location) cmd_args = ['export'] + rev_options.to_args() + [url, location] self.run_command(cmd_args, show_stdout=False)
def unpack_file_url( link, # type: Link location, # type: str download_dir=None, # type: Optional[str] hashes=None # type: Optional[Hashes] ): # type: (...) -> None """Unpack link into location. If download_dir is provided and link points to a file, make a copy of the link file inside download_dir. """ link_path = url_to_path(link.url_without_fragment) # If it's a url to a local directory if is_dir_url(link): if os.path.isdir(location): rmtree(location) shutil.copytree(link_path, location, symlinks=True) if download_dir: logger.info('Link is a directory, ignoring download_dir') return # If --require-hashes is off, `hashes` is either empty, the # link's embedded hash, or MissingHashes; it is required to # match. If --require-hashes is on, we are satisfied by any # hash in `hashes` matching: a URL-based or an option-based # one; no internet-sourced hash will be in `hashes`. if hashes: hashes.check_against_path(link_path) # If a download dir is specified, is the file already there and valid? already_downloaded_path = None if download_dir: already_downloaded_path = _check_download_dir(link, download_dir, hashes) if already_downloaded_path: from_path = already_downloaded_path else: from_path = link_path content_type = mimetypes.guess_type(from_path)[0] # unpack the archive to the build dir location. even when only downloading # archives, they have to be unpacked to parse dependencies unpack_file(from_path, location, content_type, link) # a download dir is specified and not already downloaded if download_dir and not already_downloaded_path: _copy_file(from_path, download_dir, link)
def export(self, location): """ Export the Bazaar repository at the url to the destination location """ # Remove the location to make sure Bazaar can export it correctly if os.path.exists(location): rmtree(location) url, rev_options = self.get_url_rev_options(self.url) self.run_command( ['export', location, url] + rev_options.to_args(), show_stdout=False, )
def export(self, location, url): """Export the svn repository at the url to the destination location""" url, rev_options = self.get_url_rev_options(url) logger.info("Exporting svn repository %s to %s", url, location) with indent_log(): if os.path.exists(location): # Subversion doesn't like to check out over an existing # directory --force fixes this, but was only added in svn 1.5 rmtree(location) cmd_args = (["export"] + self.get_remote_call_options() + rev_options.to_args() + [url, location]) self.run_command(cmd_args, show_stdout=False)
def export(self, location, url): """ Export the Bazaar repository at the url to the destination location """ # Remove the location to make sure Bazaar can export it correctly if os.path.exists(location): rmtree(location) url, rev_options = self.get_url_rev_options(url) self.run_command( ['export', location, url] + rev_options.to_args(), show_stdout=False, )
def export(self, location): """Export the svn repository at the url to the destination location""" url, rev = self.get_url_rev() rev_options = get_rev_options(self, url, rev) url = remove_auth_from_url(url) logger.info('Exporting svn repository %s to %s', url, location) with indent_log(): if os.path.exists(location): # Subversion doesn't like to check out over an existing # directory --force fixes this, but was only added in svn 1.5 rmtree(location) cmd_args = ['export'] + rev_options.to_args() + [url, location] self.run_command(cmd_args, show_stdout=False)
def export(self, location, url): # type: (str, HiddenText) -> None """ Export the Bazaar repository at the url to the destination location """ # Remove the location to make sure Bazaar can export it correctly if os.path.exists(location): rmtree(location) url, rev_options = self.get_url_rev_options(url) self.run_command( make_command('export', location, url, rev_options.to_args()) )
def _test_packages(output, pending_fn): package = get_last_item(pending_fn) print('Testing package %s' % package) dest_dir = os.path.join(output, package) print('Creating virtualenv in %s' % dest_dir) create_venv(dest_dir) print('Uninstalling actual pip') code = subprocess.check_call([ os.path.join(dest_dir, bin_dir, 'pip'), 'uninstall', '-y', 'pip', ]) assert not code, 'pip uninstallation failed' print('Installing development pip') code = subprocess.check_call( [ os.path.join(dest_dir, bin_dir, 'python'), 'setup.py', 'install' ], cwd=src_folder, ) assert not code, 'pip installation failed' print('Trying installation of %s' % dest_dir) code = subprocess.check_call([ os.path.join(dest_dir, bin_dir, 'pip'), 'install', package, ]) if code: print('Installation of %s failed' % package) print('Now checking easy_install...') create_venv(dest_dir) code = subprocess.check_call([ os.path.join(dest_dir, bin_dir, 'easy_install'), package, ]) if code: print('easy_install also failed') add_package(os.path.join(output, 'easy-failure.txt'), package) else: print('easy_install succeeded') add_package(os.path.join(output, 'failure.txt'), package) pop_last_item(pending_fn, package) else: print('Installation of %s succeeded' % package) add_package(os.path.join(output, 'success.txt'), package) pop_last_item(pending_fn, package) rmtree(dest_dir)
def export(self, location): """Export the svn repository at the url to the destination location""" url, rev = self.get_url_rev() rev_options = get_rev_options(url, rev) url = self.remove_auth_from_url(url) logger.info('Exporting svn repository %s to %s', url, location) with indent_log(): if os.path.exists(location): # Subversion doesn't like to check out over an existing # directory --force fixes this, but was only added in svn 1.5 rmtree(location) self.run_command( ['export'] + rev_options + [url, location], show_stdout=False)
def export(self, location): """ Export the Bazaar repository at the url to the destination location """ # Remove the location to make sure Bazaar can export it correctly if os.path.exists(location): rmtree(location) with TempDirectory(kind="export") as temp_dir: self.unpack(temp_dir.path) self.run_command(["export", location], cwd=temp_dir.path, show_stdout=False)
def test_uninstall_editable_with_source_outside_venv(script, tmpdir): """ Test uninstalling editable install from existing source outside the venv. """ try: temp = mkdtemp() temp_pkg_dir = join(temp, 'pip-test-package') _test_uninstall_editable_with_source_outside_venv( script, tmpdir, temp_pkg_dir, ) finally: rmtree(temp)
def test_install_curdir(script, data): """ Test installing current directory ('.'). """ run_from = data.packages.join("FSPkg") # Python 2.4 Windows balks if this exists already egg_info = join(run_from, "FSPkg.egg-info") if os.path.isdir(egg_info): rmtree(egg_info) result = script.pip('install', curdir, cwd=run_from, expect_error=False) fspkg_folder = script.site_packages / 'fspkg' egg_info_folder = (script.site_packages / 'FSPkg-0.1.dev0-py%s.egg-info' % pyversion) assert fspkg_folder in result.files_created, str(result.stdout) assert egg_info_folder in result.files_created, str(result)
def export(self, location): """ Export the Bazaar repository at the url to the destination location """ # Remove the location to make sure Bazaar can export it correctly if os.path.exists(location): rmtree(location) with TempDirectory(kind="export") as temp_dir: self.unpack(temp_dir.path) self.run_command( ['export', location], cwd=temp_dir.path, show_stdout=False, )
def export(self, location, url): # type: (str, HiddenText) -> None """Export the svn repository at the url to the destination location""" url, rev_options = self.get_url_rev_options(url) logger.info('Exporting svn repository %s to %s', url, location) with indent_log(): if os.path.exists(location): # Subversion doesn't like to check out over an existing # directory --force fixes this, but was only added in svn 1.5 rmtree(location) cmd_args = make_command( 'export', self.get_remote_call_options(), rev_options.to_args(), url, location, )
def cleanup(self, alldir=False): """Cleanup temporary build directory. :param bool alldir: Remove all temporary directories. (default: False) :rtype: None """ if alldir: [ rmtree(tempdir, ignore_errors=True) for tempdir in glob("{0}/tmp*{1}".format( os.path.dirname(self.tempdir), SUFFIX)) ] else: rmtree(self.tempdir, ignore_errors=True)
def cleanup(self): # type: () -> None """Remove the temporary directory created and reset state""" self._deleted = True if not os.path.exists(self._path): return # Make sure to pass unicode on Python 2 to make the contents also # use unicode, ensuring non-ASCII names and can be represented. # This is only done on Windows because POSIX platforms use bytes # natively for paths, and the bytes-text conversion omission avoids # errors caused by the environment configuring encodings incorrectly. if WINDOWS: rmtree(ensure_text(self._path)) else: rmtree(self._path)
def test_uninstall_editable_with_source_outside_venv( script: PipTestEnvironment, tmpdir: Path) -> None: """ Test uninstalling editable install from existing source outside the venv. """ try: temp = mkdtemp() temp_pkg_dir = join(temp, "pip-test-package") _test_uninstall_editable_with_source_outside_venv( script, tmpdir, temp_pkg_dir, ) finally: rmtree(temp)
def test_install_curdir(script, data): """ Test installing current directory ('.'). """ run_from = data.packages.join("FSPkg") # Python 2.4 Windows balks if this exists already egg_info = join(run_from, "FSPkg.egg-info") if os.path.isdir(egg_info): rmtree(egg_info) result = script.pip('install', curdir, cwd=run_from, expect_error=False) fspkg_folder = script.site_packages / 'fspkg' egg_info_folder = ( script.site_packages / 'FSPkg-0.1.dev0-py%s.egg-info' % pyversion ) assert fspkg_folder in result.files_created, str(result.stdout) assert egg_info_folder in result.files_created, str(result)
def unpack_url( link, # type: Link location, # type: str downloader, # type: Downloader download_dir=None, # type: Optional[str] hashes=None, # type: Optional[Hashes] ): # type: (...) -> Optional[File] """Unpack link into location, downloading if required. :param hashes: A Hashes object, one of whose embedded hashes must match, or HashMismatch will be raised. If the Hashes is empty, no matches are required, and unhashable types of requirements (like VCS ones, which would ordinarily raise HashUnsupported) are allowed. """ # non-editable vcs urls if link.is_vcs: unpack_vcs_link(link, location) return None # If it's a url to a local directory if link.is_existing_dir(): if os.path.isdir(location): rmtree(location) _copy_source_tree(link.file_path, location) return None # file urls if link.is_file: file = get_file_url(link, download_dir, hashes=hashes) # http urls else: file = get_http_url( link, downloader, download_dir, hashes=hashes, ) # unpack the archive to the build dir location. even when only downloading # archives, they have to be unpacked to parse dependencies, except wheels if not link.is_wheel: unpack_file(file.path, location, file.content_type) return file
def test_uninstall_editable_with_source_outside_venv(script, tmpdir): """ Test uninstalling editable install from existing source outside the venv. """ cache_dir = tmpdir.join("cache") try: temp = mkdtemp() tmpdir = join(temp, 'pip-test-package') _test_uninstall_editable_with_source_outside_venv( script, tmpdir, cache_dir, ) finally: rmtree(temp)
def export(self, location, url): # type: (str, HiddenText) -> None """ Export the Bazaar repository at the url to the destination location """ # Remove the location to make sure Bazaar can export it correctly if os.path.exists(location): rmtree(location) url, rev_options = self.get_url_rev_options(url) self.run_command( <<<<<<< HEAD make_command('export', location, url, rev_options.to_args()) ======= make_command('export', location, url, rev_options.to_args()), show_stdout=False, >>>>>>> b66a76afa15ab74019740676a52a071b85ed8f71 )
def rollback(self): """Rollback the changes previously made by remove().""" if not self._save_dirs: logger.error( "Can't roll back %s; was not uninstalled", self.dist.project_name, ) return False logger.info('Rolling back uninstall of %s', self.dist.project_name) for path, tmp_path in self._moved_paths: logger.debug('Replacing %s', path) if os.path.isdir(tmp_path) and os.path.isdir(path): rmtree(path) renames(tmp_path, path) for pth in self.pth.values(): pth.rollback() for save_dir in self._save_dirs: save_dir.cleanup()
def export(self, location, url): # type: (str, HiddenText) -> None """ Export the Bazaar repository at the url to the destination location """ # Remove the location to make sure Bazaar can export it correctly if os.path.exists(location): rmtree(location) url, rev_options = self.get_url_rev_options(url) self.run_command( <<<<<<< HEAD make_command('export', location, url, rev_options.to_args()), show_stdout=False, ======= make_command('export', location, url, rev_options.to_args()) >>>>>>> 7e5c5fbd6c824de4d4c2b62da3f7cae87d462119 )
def rollback(self): """Undoes the uninstall by moving stashed files back.""" for p in self._moves: logging.info("Moving to %s\n from %s", *p) for new_path, path in self._moves: try: logger.debug('Replacing %s from %s', new_path, path) if os.path.isfile(new_path): os.unlink(new_path) elif os.path.isdir(new_path): rmtree(new_path) renames(path, new_path) except OSError as ex: logger.error("Failed to restore %s", new_path) logger.debug("Exception: %s", ex) self.commit()
def unpack_file_url( link, # type: Link location, # type: str download_dir=None, # type: Optional[str] hashes=None # type: Optional[Hashes] ): # type: (...) -> Optional[str] """Unpack link into location. """ link_path = link.file_path # If it's a url to a local directory if link.is_existing_dir(): if os.path.isdir(location): rmtree(location) _copy_source_tree(link_path, location) return None # If a download dir is specified, is the file already there and valid? already_downloaded_path = None if download_dir: already_downloaded_path = _check_download_dir( link, download_dir, hashes ) if already_downloaded_path: from_path = already_downloaded_path else: from_path = link_path # If --require-hashes is off, `hashes` is either empty, the # link's embedded hash, or MissingHashes; it is required to # match. If --require-hashes is on, we are satisfied by any # hash in `hashes` matching: a URL-based or an option-based # one; no internet-sourced hash will be in `hashes`. if hashes: hashes.check_against_path(from_path) content_type = mimetypes.guess_type(from_path)[0] # unpack the archive to the build dir location. even when only downloading # archives, they have to be unpacked to parse dependencies unpack_file(from_path, location, content_type) return from_path
def rollback(self): # type: () -> None """Undoes the uninstall by moving stashed files back.""" for p in self._moves: logging.info("Moving to %s\n from %s", *p) for new_path, path in self._moves: try: logger.debug('Replacing %s from %s', new_path, path) if os.path.isfile(new_path): os.unlink(new_path) elif os.path.isdir(new_path): rmtree(new_path) renames(path, new_path) except OSError as ex: logger.error("Failed to restore %s", new_path) logger.debug("Exception: %s", ex) self.commit()
def unpack(self, location, url): # type: (str, HiddenText) -> None """ Clean up current location and download the url repository (and vcs infos) into location :param url: the repository URL starting with a vcs prefix. """ if os.path.exists(location): rmtree(location) self.obtain(location, url=url)
) <<<<<<< HEAD logger.debug("Created temporary directory: {}".format(path)) ======= logger.debug("Created temporary directory: %s", path) >>>>>>> 7e5c5fbd6c824de4d4c2b62da3f7cae87d462119 return path def cleanup(self): # type: () -> None """Remove the temporary directory created and reset state """ self._deleted = True if os.path.exists(self._path): <<<<<<< HEAD rmtree(self._path) ======= # Make sure to pass unicode on Python 2 to make the contents also # use unicode, ensuring non-ASCII names and can be represented. rmtree(ensure_text(self._path)) >>>>>>> 7e5c5fbd6c824de4d4c2b62da3f7cae87d462119 class AdjacentTempDirectory(TempDirectory): """Helper class that creates a temporary directory adjacent to a real one. Attributes: original The original directory to create a temp directory for. path After calling create() or entering, contains the full
def test_rmtree_retries(tmpdir, monkeypatch): """ Test pip._internal.utils.rmtree will retry failures """ monkeypatch.setattr(shutil, 'rmtree', Failer(duration=1).call) rmtree('foo')
def check_destination(self, dest, url, rev_options): """ Prepare a location to receive a checkout/clone. Return True if the location is ready for (and requires) a checkout/clone, False otherwise. Args: rev_options: a RevOptions object. """ checkout = True prompt = False rev_display = rev_options.to_display() if os.path.exists(dest): checkout = False if os.path.exists(os.path.join(dest, self.dirname)): existing_url = self.get_url(dest) if self.compare_urls(existing_url, url): logger.debug( '%s in %s exists, and has correct URL (%s)', self.repo_name.title(), display_path(dest), url, ) if not self.is_commit_id_equal(dest, rev_options.rev): logger.info( 'Updating %s %s%s', display_path(dest), self.repo_name, rev_display, ) self.update(dest, rev_options) else: logger.info( 'Skipping because already up-to-date.') else: logger.warning( '%s %s in %s exists with URL %s', self.name, self.repo_name, display_path(dest), existing_url, ) prompt = ('(s)witch, (i)gnore, (w)ipe, (b)ackup ', ('s', 'i', 'w', 'b')) else: logger.warning( 'Directory %s already exists, and is not a %s %s.', dest, self.name, self.repo_name, ) prompt = ('(i)gnore, (w)ipe, (b)ackup ', ('i', 'w', 'b')) if prompt: logger.warning( 'The plan is to install the %s repository %s', self.name, url, ) response = ask_path_exists('What to do? %s' % prompt[0], prompt[1]) if response == 's': logger.info( 'Switching %s %s to %s%s', self.repo_name, display_path(dest), url, rev_display, ) self.switch(dest, url, rev_options) elif response == 'i': # do nothing pass elif response == 'w': logger.warning('Deleting %s', display_path(dest)) rmtree(dest) checkout = True elif response == 'b': dest_dir = backup_dir(dest) logger.warning( 'Backing up %s to %s', display_path(dest), dest_dir, ) shutil.move(dest, dest_dir) checkout = True elif response == 'a': sys.exit(-1) return checkout
def obtain(self, dest, url): # type: (str, HiddenText) -> None """ Install or update in editable mode the package represented by this VersionControl object. :param dest: the repository directory in which to install or update. :param url: the repository URL starting with a vcs prefix. """ url, rev_options = self.get_url_rev_options(url) if not os.path.exists(dest): self.fetch_new(dest, url, rev_options) return rev_display = rev_options.to_display() if self.is_repository_directory(dest): existing_url = self.get_remote_url(dest) if self.compare_urls(existing_url, url.secret): logger.debug( "%s in %s exists, and has correct URL (%s)", self.repo_name.title(), display_path(dest), url, ) if not self.is_commit_id_equal(dest, rev_options.rev): logger.info( "Updating %s %s%s", display_path(dest), self.repo_name, rev_display, ) self.update(dest, url, rev_options) else: logger.info("Skipping because already up-to-date.") return logger.warning( "%s %s in %s exists with URL %s", self.name, self.repo_name, display_path(dest), existing_url, ) prompt = ("(s)witch, (i)gnore, (w)ipe, (b)ackup ", ("s", "i", "w", "b")) else: logger.warning( "Directory %s already exists, and is not a %s %s.", dest, self.name, self.repo_name, ) # https://github.com/python/mypy/issues/1174 prompt = ("(i)gnore, (w)ipe, (b)ackup ", ("i", "w", "b")) # type: ignore logger.warning( "The plan is to install the %s repository %s", self.name, url, ) response = ask_path_exists("What to do? {}".format(prompt[0]), prompt[1]) if response == "a": sys.exit(-1) if response == "w": logger.warning("Deleting %s", display_path(dest)) rmtree(dest) self.fetch_new(dest, url, rev_options) return if response == "b": dest_dir = backup_dir(dest) logger.warning( "Backing up %s to %s", display_path(dest), dest_dir, ) shutil.move(dest, dest_dir) self.fetch_new(dest, url, rev_options) return # Do nothing if the response is "i". if response == "s": logger.info( "Switching %s %s to %s%s", self.repo_name, display_path(dest), url, rev_display, ) self.switch(dest, url, rev_options)
def unpack_file_url( link, # type: Link location, # type: str download_dir=None, # type: Optional[str] hashes=None # type: Optional[Hashes] ): # type: (...) -> None """Unpack link into location. If download_dir is provided and link points to a file, make a copy of the link file inside download_dir. """ link_path = url_to_path(link.url_without_fragment) # If it's a url to a local directory if is_dir_url(link): target_abspath = os.path.abspath(location) target_basename = os.path.basename(target_abspath) target_dirname = os.path.dirname(target_abspath) def ignore(d, names): # type: (str, List[str]) -> List[str] skipped = [] # type: List[str] if d == link_path: # Pulling in those directories can potentially be very slow, # exclude the following directories if they appear in the top # level dir (and only it). # See discussion at https://github.com/pypa/pip/pull/6770 skipped += ['.tox', '.nox'] if os.path.abspath(d) == target_dirname: # Prevent an infinite recursion if the target is in source. # This can happen when TMPDIR is set to ${PWD}/... # and we copy PWD to TMPDIR. skipped += [target_basename] return skipped if os.path.isdir(location): rmtree(location) shutil.copytree(link_path, location, symlinks=True, ignore=ignore) if download_dir: logger.info('Link is a directory, ignoring download_dir') return # If --require-hashes is off, `hashes` is either empty, the # link's embedded hash, or MissingHashes; it is required to # match. If --require-hashes is on, we are satisfied by any # hash in `hashes` matching: a URL-based or an option-based # one; no internet-sourced hash will be in `hashes`. if hashes: hashes.check_against_path(link_path) # If a download dir is specified, is the file already there and valid? already_downloaded_path = None if download_dir: already_downloaded_path = _check_download_dir(link, download_dir, hashes) if already_downloaded_path: from_path = already_downloaded_path else: from_path = link_path content_type = mimetypes.guess_type(from_path)[0] # unpack the archive to the build dir location. even when only downloading # archives, they have to be unpacked to parse dependencies unpack_file(from_path, location, content_type, link) # a download dir is specified and not already downloaded if download_dir and not already_downloaded_path: _copy_file(from_path, download_dir, link)
def cleanup(self): """Remove the temporary directory created and reset state """ if self.path is not None and os.path.exists(self.path): rmtree(self.path) self.path = None
def cleanup(self): # type: () -> None """Remove the temporary directory created and reset state""" self._deleted = True if os.path.exists(self._path): rmtree(self._path)
('i', 'w', 'b')) logger.warning( 'The plan is to install the %s repository %s', self.name, url, ) response = ask_path_exists('What to do? {}'.format( prompt[0]), prompt[1]) if response == 'a': sys.exit(-1) if response == 'w': logger.warning('Deleting %s', display_path(dest)) rmtree(dest) self.fetch_new(dest, url, rev_options) return if response == 'b': dest_dir = backup_dir(dest) logger.warning( 'Backing up %s to %s', display_path(dest), dest_dir, ) shutil.move(dest, dest_dir) self.fetch_new(dest, url, rev_options) return # Do nothing if the response is "i". if response == 's': logger.info(
def check_destination(self, dest, url, rev_options): """ Prepare a city to receive a checkout/clone. Return True if the city is ready for (and requires) a checkout/clone, False otherwise. Args: rev_options: a RevOptions object. """ checkout = True prompt = False rev_display = rev_options.to_display() if os.path.exists(dest): checkout = False if os.path.exists(os.path.join(dest, self.dirname)): existing_url = self.get_url(dest) if self.compare_urls(existing_url, url): logger.debug( '%s in %s exists, and has correct URL (%s)', self.repo_name.title(), display_path(dest), url, ) if not self.is_commit_id_equal(dest, rev_options.rev): logger.info( 'Updating %s %s%s', display_path(dest), self.repo_name, rev_display, ) self.update(dest, rev_options) else: logger.info('Skipping because already up-to-date.') else: logger.warning( '%s %s in %s exists with URL %s', self.name, self.repo_name, display_path(dest), existing_url, ) prompt = ('(s)witch, (i)gnore, (w)ipe, (b)ackup ', ('s', 'i', 'w', 'b')) else: logger.warning( 'Directory %s already exists, and is not a %s %s.', dest, self.name, self.repo_name, ) prompt = ('(i)gnore, (w)ipe, (b)ackup ', ('i', 'w', 'b')) if prompt: logger.warning( 'The plan is to install the %s repository %s', self.name, url, ) response = ask_path_exists('What to do? %s' % prompt[0], prompt[1]) if response == 's': logger.info( 'Switching %s %s to %s%s', self.repo_name, display_path(dest), url, rev_display, ) self.switch(dest, url, rev_options) elif response == 'i': # do nothing pass elif response == 'w': logger.warning('Deleting %s', display_path(dest)) rmtree(dest) checkout = True elif response == 'b': dest_dir = backup_dir(dest) logger.warning( 'Backing up %s to %s', display_path(dest), dest_dir, ) shutil.move(dest, dest_dir) checkout = True elif response == 'a': sys.exit(-1) return checkout
def unpack_url( link: Link, location: str, download: Downloader, download_dir: Optional[str] = None, hashes: Optional[Hashes] = None, ) -> Optional[File]: """Unpack link into location, downloading if required. :param hashes: A Hashes object, one of whose embedded hashes must match, or HashMismatch will be raised. If the Hashes is empty, no matches are required, and unhashable types of requirements (like VCS ones, which would ordinarily raise HashUnsupported) are allowed. """ # non-editable vcs urls if link.is_vcs: unpack_vcs_link(link, location) return None # Once out-of-tree-builds are no longer supported, could potentially # replace the below condition with `assert not link.is_existing_dir` # - unpack_url does not need to be called for in-tree-builds. # # As further cleanup, _copy_source_tree and accompanying tests can # be removed. if link.is_existing_dir(): deprecated( reason=( "pip copied the source tree into a temporary directory " "before building it. This is changing so that packages " "are built in-place " 'within the original source tree ("in-tree build").' ), replacement=None, gone_in="21.3", feature_flag="in-tree-build", issue=7555, ) if os.path.isdir(location): rmtree(location) _copy_source_tree(link.file_path, location) return None # file urls if link.is_file: file = get_file_url(link, download_dir, hashes=hashes) # http urls else: file = get_http_url( link, download, download_dir, hashes=hashes, ) # unpack the archive to the build dir location. even when only downloading # archives, they have to be unpacked to parse dependencies, except wheels if not link.is_wheel: unpack_file(file.path, location, file.content_type) return file
def obtain(self, dest): # type: (str) -> None """ Install or update in editable mode the package represented by this VersionControl object. Args: dest: the repository directory in which to install or update. """ url, rev_options = self.get_url_rev_options(self.url) if not os.path.exists(dest): self.fetch_new(dest, url, rev_options) return rev_display = rev_options.to_display() if self.is_repository_directory(dest): existing_url = self.get_remote_url(dest) if self.compare_urls(existing_url, url): logger.debug( '%s in %s exists, and has correct URL (%s)', self.repo_name.title(), display_path(dest), url, ) if not self.is_commit_id_equal(dest, rev_options.rev): logger.info( 'Updating %s %s%s', display_path(dest), self.repo_name, rev_display, ) self.update(dest, url, rev_options) else: logger.info('Skipping because already up-to-date.') return logger.warning( '%s %s in %s exists with URL %s', self.name, self.repo_name, display_path(dest), existing_url, ) prompt = ('(s)witch, (i)gnore, (w)ipe, (b)ackup ', ('s', 'i', 'w', 'b')) else: logger.warning( 'Directory %s already exists, and is not a %s %s.', dest, self.name, self.repo_name, ) # https://github.com/python/mypy/issues/1174 prompt = ( '(i)gnore, (w)ipe, (b)ackup ', # type: ignore ('i', 'w', 'b')) logger.warning( 'The plan is to install the %s repository %s', self.name, url, ) response = ask_path_exists('What to do? %s' % prompt[0], prompt[1]) if response == 'a': sys.exit(-1) if response == 'w': logger.warning('Deleting %s', display_path(dest)) rmtree(dest) self.fetch_new(dest, url, rev_options) return if response == 'b': dest_dir = backup_dir(dest) logger.warning( 'Backing up %s to %s', display_path(dest), dest_dir, ) shutil.move(dest, dest_dir) self.fetch_new(dest, url, rev_options) return # Do nothing if the response is "i". if response == 's': logger.info( 'Switching %s %s to %s%s', self.repo_name, display_path(dest), url, rev_display, ) self.switch(dest, url, rev_options)
def obtain(self, dest): """ Install or update in editable mode the package represented by this VersionControl object. Args: dest: the repository directory in which to install or update. """ url, rev_options = self.get_url_rev_options(self.url) if not os.path.exists(dest): self.fetch_new(dest, url, rev_options) return rev_display = rev_options.to_display() if self.is_repository_directory(dest): existing_url = self.get_url(dest) if self.compare_urls(existing_url, url): logger.debug( '%s in %s exists, and has correct URL (%s)', self.repo_name.title(), display_path(dest), url, ) if not self.is_commit_id_equal(dest, rev_options.rev): logger.info( 'Updating %s %s%s', display_path(dest), self.repo_name, rev_display, ) self.update(dest, url, rev_options) else: logger.info('Skipping because already up-to-date.') return logger.warning( '%s %s in %s exists with URL %s', self.name, self.repo_name, display_path(dest), existing_url, ) prompt = ('(s)witch, (i)gnore, (w)ipe, (b)ackup ', ('s', 'i', 'w', 'b')) else: logger.warning( 'Directory %s already exists, and is not a %s %s.', dest, self.name, self.repo_name, ) prompt = ('(i)gnore, (w)ipe, (b)ackup ', ('i', 'w', 'b')) logger.warning( 'The plan is to install the %s repository %s', self.name, url, ) response = ask_path_exists('What to do? %s' % prompt[0], prompt[1]) if response == 'a': sys.exit(-1) if response == 'w': logger.warning('Deleting %s', display_path(dest)) rmtree(dest) self.fetch_new(dest, url, rev_options) return if response == 'b': dest_dir = backup_dir(dest) logger.warning( 'Backing up %s to %s', display_path(dest), dest_dir, ) shutil.move(dest, dest_dir) self.fetch_new(dest, url, rev_options) return # Do nothing if the response is "i". if response == 's': logger.info( 'Switching %s %s to %s%s', self.repo_name, display_path(dest), url, rev_display, ) self.switch(dest, url, rev_options)