def do_remove( project: Project, dev: bool = False, section: Optional[str] = None, sync: bool = True, packages: Sequence[str] = (), ): """Remove packages from working set and pyproject.toml :param project: The project instance :param dev: Remove package from dev-dependencies :param section: Remove package from given section :param sync: Whether perform syncing action :param packages: Package names to be removed :return: None """ check_project_file(project) if not packages: raise PdmUsageError("Must specify at least one package to remove.") section = "dev" if dev else section or "default" if section not in list(project.iter_sections()): raise ProjectError(f"No {section} dependencies given in pyproject.toml.") deps = project.get_pyproject_dependencies(section) stream.echo( f"Removing packages from {section} dependencies: " + ", ".join(str(stream.green(name, bold=True)) for name in packages) ) for name in packages: req = parse_requirement(name) matched_indexes = sorted( (i for i, r in enumerate(deps) if req.matches(r, False)), reverse=True ) if not matched_indexes: raise ProjectError( "{} does not exist in {} dependencies.".format( stream.green(name, bold=True), section ) ) for i in matched_indexes: del deps[i] project.write_pyproject() do_lock(project, "reuse") if sync: do_sync(project, sections=(section,), default=False, clean=True)
def do_import( project: Project, filename: str, format: str | None = None, options: Namespace | None = None, ) -> None: """Import project metadata from given file. :param project: the project instance :param filename: the file name :param format: the file format, or guess if not given. :param options: other options parsed to the CLI. """ if not format: for key in FORMATS: if FORMATS[key].check_fingerprint(project, filename): break else: raise PdmUsageError("Can't derive the file format automatically, " "please specify it via '-f/--format' option.") else: key = format if options is None: options = Namespace(dev=False, section=None) project_data, settings = FORMATS[key].convert(project, filename, options) pyproject = project.pyproject or atoml.document() if "tool" not in pyproject or "pdm" not in pyproject[ "tool"]: # type: ignore pyproject.setdefault("tool", {})["pdm"] = atoml.table() if "project" not in pyproject: pyproject.add("project", atoml.table()) # type: ignore pyproject["project"].add( # type: ignore atoml.comment("PEP 621 project metadata")) pyproject["project"].add( # type: ignore atoml.comment("See https://www.python.org/dev/peps/pep-0621/")) merge_dictionary(pyproject["project"], project_data) # type: ignore merge_dictionary(pyproject["tool"]["pdm"], settings) # type: ignore pyproject["build-system"] = { "requires": ["pdm-pep517"], "build-backend": "pdm.pep517.api", } project.pyproject = cast(dict, pyproject) project.write_pyproject()
def do_lock( project: Project, strategy: str = "all", tracked_names: Optional[Iterable[str]] = None, requirements: Optional[Dict[str, Dict[str, Requirement]]] = None, ) -> Dict[str, Candidate]: """Performs the locking process and update lockfile. :param project: the project instance :param strategy: update stratege: reuse/eager/all :param tracked_names: required when using eager strategy :param requirements: An optional dictionary of requirements, read from pyproject if not given. """ check_project_file(project) # TODO: multiple dependency definitions for the same package. repository = project.get_repository() requirements = requirements or project.all_dependencies allow_prereleases = project.allow_prereleases requires_python = project.python_requires if strategy == "all": provider = BaseProvider(repository, requires_python, allow_prereleases) else: provider_class = (ReusePinProvider if strategy == "reuse" else EagerUpdateProvider) preferred_pins = project.get_locked_candidates("__all__") provider = provider_class( preferred_pins, tracked_names or (), repository, requires_python, allow_prereleases, ) flat_reqs = list( itertools.chain(*[deps.values() for _, deps in requirements.items()])) # TODO: switch reporter at io level. with halo.Halo(text="Resolving dependencies", spinner="dots") as spin: reporter = SpinnerReporter(flat_reqs, spin) mapping, dependencies, summaries = resolve(provider, reporter, requirements, requires_python) data = format_lockfile(mapping, dependencies, summaries) spin.succeed("Resolution success") project.write_lockfile(data) return mapping
def do_use( project: Project, python: Optional[str] = "", first: Optional[bool] = False ) -> None: """Use the specified python version and save in project config. The python can be a version string or interpreter path. """ def version_matcher(py_version: PythonInfo) -> bool: return project.python_requires.contains(str(py_version.version)) python = python.strip() found_interpreters = list( dict.fromkeys(filter(version_matcher, project.find_interpreters(python))) ) if not found_interpreters: raise NoPythonVersion("Python interpreter is not found on the system.") if first or len(found_interpreters) == 1: selected_python = found_interpreters[0] else: project.core.ui.echo("Please enter the Python interpreter to use") for i, py_version in enumerate(found_interpreters): project.core.ui.echo( f"{i}. {termui.green(py_version.executable)} ({py_version.identifier})" ) selection = click.prompt( "Please select:", type=click.Choice([str(i) for i in range(len(found_interpreters))]), default="0", show_choices=False, ) selected_python = found_interpreters[int(selection)] old_path = project.config.get("python.path") new_path = selected_python.executable project.core.ui.echo( "Using Python interpreter: {} ({})".format( termui.green(str(new_path)), selected_python.identifier, ) ) project.python = selected_python if old_path and Path(old_path) != Path(new_path) and not project.is_global: project.core.ui.echo(termui.cyan("Updating executable scripts...")) project.environment.update_shebangs(new_path)
def do_remove( project: Project, dev: bool = False, section: Optional[str] = None, sync: bool = True, packages: Sequence[str] = (), ): """Remove packages from working set and pyproject.toml :param project: The project instance :param dev: Remove package from dev-dependencies :param section: Remove package from given section :param sync: Whether perform syncing action :param packages: Package names to be removed :return: None """ check_project_file(project) if not packages: raise PdmUsageError("Must specify at least one package to remove.") section = "dev" if dev else section or "default" toml_section = f"{section}-dependencies" if section != "default" else "dependencies" if toml_section not in project.tool_settings: raise ProjectError( f"No such section {stream.yellow(toml_section)} in pyproject.toml." ) deps = project.tool_settings[toml_section] stream.echo(f"Removing packages from {section} dependencies: " + ", ".join( str(stream.green(name, bold=True)) for name in packages)) for name in packages: matched_name = next( filter( lambda k: safe_name(k).lower() == safe_name(name).lower(), deps.keys(), ), None, ) if not matched_name: raise ProjectError("{} does not exist in {} dependencies.".format( stream.green(name, bold=True), section)) del deps[matched_name] project.write_pyproject() do_lock(project, "reuse") if sync: do_sync(project, sections=(section, ), default=False, clean=True)
def deprecate_global_option(value) -> Project: if value: stream.echo( stream.red( "DEPRECATION: -g/--global with argument is deprecated and will be " "removed in v1.5.0, please use '-gp <PROJECT_PATH>' instead."), err=True, ) return Project.create_global(value)
def do_remove( project: Project, dev: bool = False, group: str | None = None, sync: bool = True, packages: Sequence[str] = (), no_editable: bool = False, no_self: bool = False, ) -> None: """Remove packages from working set and pyproject.toml""" check_project_file(project) if not packages: raise PdmUsageError("Must specify at least one package to remove.") if not group: group = "dev" if dev else "default" if group not in list(project.iter_groups()): raise ProjectError(f"No-exist group {group}") deps = project.get_pyproject_dependencies(group, dev) project.core.ui.echo( f"Removing packages from {group} {'dev-' if dev else ''}dependencies: " + ", ".join(str(termui.green(name, bold=True)) for name in packages)) for name in packages: req = parse_requirement(name) matched_indexes = sorted( (i for i, r in enumerate(deps) if req.matches(r, False)), reverse=True) if not matched_indexes: raise ProjectError("{} does not exist in {} dependencies.".format( termui.green(name, bold=True), group)) for i in matched_indexes: del deps[i] project.write_pyproject() do_lock(project, "reuse") if sync: do_sync( project, groups=(group, ), default=False, clean=True, no_editable=no_editable, no_self=no_self, )
def handle(self, project: Project, options: argparse.Namespace) -> None: result = project.get_repository().search(options.query) terminal_width = None if sys.stdout.isatty(): terminal_width = get_terminal_size()[0] print_results( project.core.ui, result, project.environment.get_working_set(), terminal_width, )
def do_init( project: Project, name: str = "", version: str = "", license: str = "MIT", author: str = "", email: str = "", python_requires: str = "", ) -> None: """Bootstrap the project and create a pyproject.toml""" data = { "project": { "name": name, "version": version, "description": "", "authors": array_of_inline_tables([{ "name": author, "email": email }]), "license": make_inline_table({"text": license}), "urls": { "homepage": "" }, "dependencies": make_array([], True), "dev-dependencies": make_array([], True), "requires-python": python_requires, "dynamic": ["classifiers"], }, "build-system": { "requires": ["pdm-pep517"], "build-backend": "pdm.pep517.api" }, } if python_requires and python_requires != "*": get_specifier(python_requires) if not project.pyproject: project._pyproject = data else: project._pyproject["project"] = data["project"] project._pyproject["build-system"] = data["build-system"] project.write_pyproject()
def do_init( project: Project, name: str = "", version: str = "", description: str = "", license: str = "MIT", author: str = "", email: str = "", python_requires: str = "", ) -> None: """Bootstrap the project and create a pyproject.toml""" data = { "project": { "name": name, "version": version, "description": description, "authors": array_of_inline_tables([{"name": author, "email": email}]), "license": make_inline_table({"text": license}), "dependencies": make_array([], True), }, "build-system": { "requires": ["pdm-pep517>=0.12.0"], "build-backend": "pdm.pep517.api", }, } if python_requires and python_requires != "*": data["project"]["requires-python"] = python_requires # type: ignore if name and version: readme = next(project.root.glob("README*"), None) if readme is None: readme = project.root.joinpath("README.md") readme.write_text(f"# {name}\n\n{description}\n") data["project"]["readme"] = readme.name # type: ignore get_specifier(python_requires) if not project.pyproject: project._pyproject = data else: project._pyproject["project"] = data["project"] # type: ignore project._pyproject["build-system"] = data["build-system"] # type: ignore project.write_pyproject() signals.post_init.send(project)
def do_import(project: Project, filename: str, format: Optional[str] = None) -> None: """Import project metadata from given file. :param project: the project instance :param filename: the file name :param format: the file format, or guess if not given. """ if not format: for key in FORMATS: if FORMATS[key].check_fingerprint(project, filename): break else: raise PdmUsageError("Can't derive the file format automatically, " "please specify it via '-f/--format' option.") else: key = format project_data, settings = FORMATS[key].convert(project, filename) pyproject = project.pyproject or tomlkit.document() if "tool" not in pyproject or "pdm" not in pyproject["tool"]: setdefault(pyproject, "tool", {})["pdm"] = tomlkit.table() pyproject["tool"]["pdm"].update(settings) if "project" not in pyproject: pyproject.add("project", tomlkit.table()) pyproject["project"].add(tomlkit.comment("PEP 621 project metadata")) pyproject["project"].add( tomlkit.comment("See https://www.python.org/dev/peps/pep-0621/")) pyproject["project"].update(project_data) pyproject["build-system"] = { "requires": ["pdm-pep517"], "build-backend": "pdm.pep517.api", } project.pyproject = pyproject project.write_pyproject()
def check_lockfile(project: Project, raise_not_exist: bool = True) -> str | None: """Check if the lock file exists and is up to date. Return the update strategy.""" if not project.lockfile_file.exists(): if raise_not_exist: raise ProjectError("Lock file does not exist, nothing to install") project.core.ui.echo("Lock file does not exist", fg="yellow", err=True) return "all" elif not project.is_lockfile_compatible(): project.core.ui.echo( "Lock file version is not compatible with PDM, installation may fail", fg="yellow", err=True, ) return "all" elif not project.is_lockfile_hash_match(): project.core.ui.echo( "Lock file hash doesn't match pyproject.toml, packages may be outdated", fg="yellow", err=True, ) return "reuse" return None
def do_init( project: Project, name: str = "", version: str = "", license: str = "MIT", author: str = "", email: str = "", python_requires: str = "", ) -> None: """Bootstrap the project and create a pyproject.toml""" data = { "tool": { "pdm": { "name": name, "version": version, "description": "", "author": f"{author} <{email}>", "license": license, "homepage": "", "dependencies": tomlkit.table(), "dev-dependencies": tomlkit.table(), } }, "build-system": { "requires": ["pdm"], "build-backend": "pdm.builders.api" }, } if python_requires and python_requires != "*": get_specifier(python_requires) data["tool"]["pdm"]["python_requires"] = python_requires if not project.pyproject: project._pyproject = data else: project._pyproject.setdefault("tool", {})["pdm"] = data["tool"]["pdm"] project._pyproject["build-system"] = data["build-system"] project.write_pyproject() project.environment.write_site_py()
def handle(self, project: Project, options: argparse.Namespace) -> None: candidates = {} sections = list(options.sections) if options.pyproject: options.hashes = False sections = translate_sections( project, options.default, compatible_dev_flag(project, options.dev), options.sections or (), ) for section in sections: if options.pyproject: candidates.update(project.get_dependencies(section)) else: candidates.update(project.get_locked_candidates(section)) candidates.pop(project.meta.name and project.meta.project_name, None) content = FORMATS[options.format].export(project, candidates.values(), options) if options.output: Path(options.output).write_text(content) else: project.core.ui.echo(content)
def handle(self, project: Project, options: argparse.Namespace) -> None: if not project.meta and click._compat.isatty(sys.stdout): actions.ask_for_import(project) if options.lock: if not (project.lockfile_file.exists() and project.is_lockfile_compatible()): project.core.ui.echo( "Lock file does not exist or is incompatible, " "trying to generate one...") actions.do_lock(project, strategy="all") elif not project.is_lockfile_hash_match(): project.core.ui.echo( "Lock file hash doesn't match pyproject.toml, regenerating..." ) actions.do_lock(project, strategy="reuse") actions.do_sync( project, sections=options.sections, dev=options.dev, default=options.default, no_editable=options.no_editable, no_self=options.no_self, )
def handle(self, project: Project, options: argparse.Namespace) -> None: if not project.meta and click._compat.isatty(sys.stdout): actions.ask_for_import(project) if options.lock: if not project.lockfile_file.exists(): stream.echo( "Lock file does not exist, trying to generate one...") actions.do_lock(project, strategy="all") elif not project.is_lockfile_hash_match(): stream.echo( "Lock file hash doesn't match pyproject.toml, regenerating..." ) actions.do_lock(project, strategy="reuse") actions.do_sync(project, options.sections, options.dev, options.default, False, False)
def remove_cache_files(project: Project, pattern: str) -> None: if not pattern: raise PdmUsageError("Please provide a pattern") if pattern == "*": files = list(find_files(project.cache_dir, pattern)) else: # Only remove wheel files which specific pattern is given files = list(find_files(project.cache("wheels"), pattern)) if not files: raise PdmUsageError("No matching files found") for file in files: os.unlink(file) project.core.ui.echo(f"Removed {file}", verbosity=termui.DETAIL) project.core.ui.echo(f"{len(files)} file{'s' if len(files) > 1 else ''} removed")
def handle(self, project: Project, options: argparse.Namespace) -> None: if not options.type: cache_parent = project.cache_dir elif options.type not in self.CACHE_TYPES: raise PdmUsageError( f"Invalid cache type {options.type}, should one of {self.CACHE_TYPES}" ) else: cache_parent = project.cache(options.type) with project.core.ui.open_spinner( f"Clearing {options.type or 'all'} caches...") as spinner: files = list(find_files(cache_parent, "*")) for file in files: os.unlink(file) spinner.succeed( f"{len(files)} file{'s' if len(files) > 1 else ''} removed")
def handle(self, project: Project, options: argparse.Namespace) -> None: with project.core.ui.open_spinner("Calculating cache files"): output = [ f"{termui.cyan('Cache Root')}: {project.cache_dir}, " f"Total size: {format_size(directory_size(project.cache_dir))}" ] for name, description in [ ("hashes", "File Hashe Cache"), ("http", "HTTP Cache"), ("wheels", "Wheels Cache"), ("metadata", "Metadata Cache"), ]: cache_location = project.cache(name) files = list(find_files(cache_location, "*")) size = directory_size(cache_location) output.append(f" {termui.cyan(description)}: {cache_location}") output.append(f" Files: {len(files)}, Size: {format_size(size)}") project.core.ui.echo("\n".join(output))
def do_build( project: Project, sdist: bool = True, wheel: bool = True, dest: str = "dist", clean: bool = True, ): """Build artifacts for distribution.""" check_project_file(project) if not wheel and not sdist: context.io.echo("All artifacts are disabled, nothing to do.", err=True) return ireq = project.make_self_candidate(False).ireq ireq.source_dir = "." if clean: shutil.rmtree(dest, ignore_errors=True) if sdist: with SdistBuilder(ireq) as builder: builder.build(dest) if wheel: with WheelBuilder(ireq) as builder: builder.build(dest)
def resolve_candidates_from_lockfile( project: Project, requirements: Iterable[Requirement] ) -> dict[str, Candidate]: ui = project.core.ui resolve_max_rounds = int(project.config["strategy.resolve_max_rounds"]) reqs = [ req for req in requirements if not req.marker or req.marker.evaluate(project.environment.marker_environment) ] with ui.logging("install-resolve"): with ui.open_spinner("Resolving packages from lockfile..."): reporter = BaseReporter() provider = project.get_provider(for_install=True) resolver: Resolver = project.core.resolver_class(provider, reporter) mapping, *_ = resolve( resolver, reqs, project.environment.python_requires, resolve_max_rounds, ) return mapping
def handle(self, project: Project, options: argparse.Namespace) -> None: package = options.package if package: req = parse_requirement(package) repository = project.get_repository() # reverse the result so that latest is at first. matches = repository.find_candidates( req, project.environment.python_requires, True ) latest = next(iter(matches), None) if not latest: project.core.ui.echo( termui.yellow(f"No match found for the package {package!r}"), err=True, ) return latest_stable = next(filter(filter_stable, matches), None) metadata = latest.metadata else: if not project.meta.name: raise PdmUsageError("This project is not a package") metadata = project.meta package = normalize_name(metadata.name) latest_stable = None assert metadata project_info = ProjectInfo(metadata) if any(getattr(options, key, None) for key in self.metadata_keys): for key in self.metadata_keys: if getattr(options, key, None): project.core.ui.echo(project_info[key]) return installed = project.environment.get_working_set().get(package) if latest_stable: project_info.latest_stable_version = str(latest_stable.version) if installed: project_info.installed_version = str(installed.version) project.core.ui.display_columns(list(project_info.generate_rows()))
def do_sync( project: Project, *, groups: Collection[str] = (), dev: bool = True, default: bool = True, dry_run: bool = False, clean: bool = False, requirements: list[Requirement] | None = None, tracked_names: Collection[str] | None = None, no_editable: bool | Collection[str] = False, no_self: bool = False, reinstall: bool = False, ) -> None: """Synchronize project""" if requirements is None: groups = translate_groups(project, default, dev, groups or ()) requirements = [] for group in groups: requirements.extend(project.get_dependencies(group).values()) candidates = resolve_candidates_from_lockfile(project, requirements) if tracked_names and dry_run: candidates = { name: c for name, c in candidates.items() if name in tracked_names } handler = project.core.synchronizer_class( candidates, project.environment, clean, dry_run, no_editable=no_editable, install_self=not no_self and "default" in groups and bool(project.name), use_install_cache=project.config["install.cache"], reinstall=reinstall, ) signals.pre_install.send(project, candidates=candidates, dry_run=dry_run) handler.synchronize() signals.post_install.send(project, candidates=candidates, dry_run=dry_run)
def get_latest_version(project: Project) -> str | None: """Get the latest version of PDM from PyPI, cache for 7 days""" from pdm.utils import get_finder cache_key = hashlib.sha224(sys.executable.encode()).hexdigest() cache_file = project.cache("self-check") / cache_key if cache_file.exists(): state = json.loads(cache_file.read_text()) else: state = {} current_time = datetime.datetime.utcnow().timestamp() if ( state.get("last-check") and current_time - state["last-check"] < 60 * 60 * 24 * 7 ): return cast(str, state["latest-version"]) candidate = get_finder([], project.cache_dir.as_posix()).find_best_candidate("pdm") if not candidate.best_candidate: return None latest_version = str(candidate.best_candidate.version) state.update({"latest-version": latest_version, "last-check": current_time}) cache_file.write_text(json.dumps(state)) return latest_version
def handle(self, project: Project, options: argparse.Namespace) -> None: package = options.package req = parse_requirement(package) repository = project.get_repository() # reverse the result so that latest is at first. matches = repository.find_candidates( req, project.environment.python_requires, True) latest = next(iter(matches)) latest_stable = next(filter(filter_stable, matches), None) installed = project.environment.get_working_set().get(package) metadata = latest.get_metadata() if metadata._legacy: result = Metadata(dict(metadata._legacy.items()), True) else: result = Metadata(dict(metadata._data), False) if latest_stable: result.latest_stable_version = str(latest_stable.version) if installed: result.installed_version = str(installed.version) stream.display_columns(list(result.generate_rows()))
def handle(self, project: Project, options: argparse.Namespace) -> None: groups: list[str] = list(options.groups) if options.pyproject: options.hashes = False groups = translate_groups( project, options.default, options.dev, options.groups or (), ) requirements: dict[str, Requirement] = {} packages: Iterable[Requirement] | Iterable[Candidate] for group in groups: requirements.update(project.get_dependencies(group)) if options.pyproject: packages = requirements.values() else: project.core.ui.echo( "The exported requirements file is no longer cross-platform. " "Using it on other platforms may cause unexpected result.", fg="yellow", err=True, ) candidates = resolve_candidates_from_lockfile( project, requirements.values()) # Remove candidates with [extras] because the bare candidates are already # included packages = (candidate for candidate in candidates.values() if not candidate.req.extras) content = FORMATS[options.format].export(project, packages, options) # type: ignore if options.output: Path(options.output).write_text(content) else: project.core.ui.echo(content)
def do_build( project: Project, sdist: bool = True, wheel: bool = True, dest: str = "dist", clean: bool = True, ): """Build artifacts for distribution.""" if project.is_global: raise ProjectError("Not allowed to build based on the global project.") check_project_file(project) if not wheel and not sdist: stream.echo("All artifacts are disabled, nothing to do.", err=True) return ireq = project.make_self_candidate(False).ireq ireq.source_dir = project.root.as_posix() if clean: shutil.rmtree(dest, ignore_errors=True) if sdist: with SdistBuilder(ireq) as builder: builder.build(dest) if wheel: with WheelBuilder(ireq) as builder: builder.build(dest)
def handle(self, project: Project, options: argparse.Namespace) -> None: rows = [] for file in find_files(project.cache("wheels"), options.pattern): rows.append((format_size(file_size(file)), os.path.basename(file))) project.core.ui.display_columns(rows, [">Size", "Filename"])
def do_update( project: Project, dev: bool = False, sections: Sequence[str] = (), default: bool = True, strategy: str = "reuse", save: str = "compatible", unconstrained: bool = False, packages: Sequence[str] = (), ) -> None: """Update specified packages or all packages :param project: The project instance :param dev: whether to update dev dependencies :param sections: update speicified sections :param default: update default :param strategy: update strategy (reuse/eager) :param save: save strategy (compatible/exact/wildcard) :param unconstrained: ignore version constraint :param packages: specified packages to update :return: None """ check_project_file(project) if len(packages) > 0 and (len(sections) > 1 or not default): raise click.BadParameter( "packages argument can't be used together with multple -s or --no-default." ) if not packages: if unconstrained: raise click.BadArgumentUsage( "--unconstrained must be used with package names given.") # pdm update with no packages given, same as 'lock' + 'sync' do_lock(project) do_sync(project, sections, dev, default, clean=False) return section = sections[0] if sections else ("dev" if dev else "default") all_dependencies = project.all_dependencies dependencies = all_dependencies[section] updated_deps = {} tracked_names = set() for name in packages: matched_name = next( filter( lambda k: safe_name(strip_extras(k)[0]).lower() == safe_name( name).lower(), dependencies.keys(), ), None, ) if not matched_name: raise ProjectError("{} does not exist in {} dependencies.".format( context.io.green(name, bold=True), section)) if unconstrained: dependencies[matched_name].specifier = get_specifier("") tracked_names.add(matched_name) updated_deps[matched_name] = dependencies[matched_name] context.io.echo("Updating packages: {}.".format(", ".join( context.io.green(v, bold=True) for v in tracked_names))) resolved = do_lock(project, strategy, tracked_names, all_dependencies) do_sync(project, sections=(section, ), default=False, clean=False) if unconstrained: # Need to update version constraints save_version_specifiers(updated_deps, resolved, save) project.add_dependencies(updated_deps) lockfile = project.lockfile lockfile["root"]["content_hash"] = "md5:" + project.get_content_hash( "md5") project.write_lockfile(lockfile, False)
def do_use( project: Project, python: str = "", first: bool = False, ignore_remembered: bool = False, ) -> None: """Use the specified python version and save in project config. The python can be a version string or interpreter path. """ if python: python = python.strip() def version_matcher(py_version: PythonInfo) -> bool: return project.python_requires.contains(str(py_version.version), True) if not project.cache_dir.exists(): project.cache_dir.mkdir(parents=True) use_cache: JSONFileCache[str, str] = JSONFileCache( project.cache_dir / "use_cache.json" ) selected_python: PythonInfo | None = None if python and not ignore_remembered: if use_cache.has_key(python): path = use_cache.get(python) cached_python = PythonInfo.from_path(path) if not cached_python.valid: project.core.ui.echo( f"The last selection is corrupted. {path!r}", fg="red", err=True, ) elif version_matcher(cached_python): project.core.ui.echo( "Using the last selection, add '-i' to ignore it.", fg="yellow", err=True, ) selected_python = cached_python if selected_python is None: found_interpreters = list(dict.fromkeys(project.find_interpreters(python))) matching_interperters = list(filter(version_matcher, found_interpreters)) if not found_interpreters: raise NoPythonVersion("Python interpreter is not found on the system.") if not matching_interperters: project.core.ui.echo("Interpreters found but not matching:", err=True) for py in found_interpreters: project.core.ui.echo(f" - {py.executable} ({py.identifier})", err=True) raise NoPythonVersion( "No python is found meeting the requirement " f"{termui.green('python' + str(project.python_requires))}" ) if first or len(matching_interperters) == 1: selected_python = matching_interperters[0] else: project.core.ui.echo("Please enter the Python interpreter to use") for i, py_version in enumerate(matching_interperters): project.core.ui.echo( f"{i}. {termui.green(str(py_version.executable))} " f"({py_version.identifier})" ) selection = click.prompt( "Please select:", type=click.Choice([str(i) for i in range(len(matching_interperters))]), default="0", show_choices=False, ) selected_python = matching_interperters[int(selection)] if python: use_cache.set(python, selected_python.path.as_posix()) if not selected_python.valid: path = str(selected_python.executable) raise InvalidPyVersion(f"Invalid Python interpreter: {path}") old_python = project.python if "python.path" in project.config else None project.core.ui.echo( "Using Python interpreter: {} ({})".format( termui.green(str(selected_python.executable)), selected_python.identifier, ) ) project.python = selected_python if ( old_python and old_python.path != selected_python.path and not project.environment.is_global ): project.core.ui.echo(termui.cyan("Updating executable scripts...")) project.environment.update_shebangs(selected_python.executable.as_posix())