def handle(self, project: Project, options: argparse.Namespace) -> None: package = options.package req = parse_requirement(package) repository = project.get_repository() # reverse the result so that latest is at first. matches = repository.find_candidates( req, project.environment.python_requires, True) latest = next(iter(matches), None) if not latest: stream.echo( stream.yellow(f"No match found for the package {package!r}"), err=True) return latest_stable = next(filter(filter_stable, matches), None) installed = project.environment.get_working_set().get(package) metadata = latest.get_metadata() if metadata._legacy: result = ProjectInfo(dict(metadata._legacy.items()), True) else: result = ProjectInfo(dict(metadata._data), False) if latest_stable: result.latest_stable_version = str(latest_stable.version) if installed: result.installed_version = str(installed.version) stream.display_columns(list(result.generate_rows()))
def handle(self, project: Project, options: argparse.Namespace) -> None: if project.pyproject_file.exists(): stream.echo("{}".format( stream.cyan("pyproject.toml already exists, update it now."))) else: stream.echo("{}".format( stream.cyan("Creating a pyproject.toml for PDM..."))) python = click.prompt("Please enter the Python interpreter to use", default="", show_default=False) actions.do_use(project, python) is_library = (click.prompt( "Is the project a library that will be upload to PyPI?(y/n)", default="n", ).lower() == "y") if is_library: name = click.prompt("Project name", default=project.root.name) version = click.prompt("Project version", default="0.1.0") else: name, version = "", "" license = click.prompt("License(SPDX name)", default="MIT") git_user, git_email = get_user_email_from_git() author = click.prompt("Author name", default=git_user) email = click.prompt("Author email", default=git_email) python_version, _ = get_python_version(project.python_executable, True, 2) python_requires = click.prompt("Python requires('*' to allow any)", default=f">={python_version}") actions.do_init(project, name, version, license, author, email, python_requires) actions.ask_for_import(project)
def _add_file(self, wheel, full_path, rel_path=None): if not rel_path: rel_path = full_path if os.sep != "/": # We always want to have /-separated paths in the zip file and in RECORD rel_path = rel_path.replace(os.sep, "/") stream.echo(f" - Adding: {rel_path}", verbosity=stream.DETAIL) zinfo = zipfile.ZipInfo(rel_path) # Normalize permission bits to either 755 (executable) or 644 st_mode = os.stat(full_path).st_mode if stat.S_ISDIR(st_mode): zinfo.external_attr |= 0x10 # MS-DOS directory flag hashsum = hashlib.sha256() with open(full_path, "rb") as src: while True: buf = src.read(1024 * 8) if not buf: break hashsum.update(buf) src.seek(0) wheel.writestr(zinfo, src.read(), compress_type=zipfile.ZIP_DEFLATED) size = os.stat(full_path).st_size hash_digest = urlsafe_b64encode(hashsum.digest()).decode("ascii").rstrip("=") self._records.append((rel_path, hash_digest, str(size)))
def set_env_in_reg(env_name: str, value: str) -> None: """Manipulate the WinReg, and add value to the environment variable if exists or create new. """ import winreg value = os.path.normcase(value) with winreg.ConnectRegistry(None, winreg.HKEY_CURRENT_USER) as root: with winreg.OpenKey(root, "Environment", 0, winreg.KEY_ALL_ACCESS) as env_key: try: old_value, type_ = winreg.QueryValueEx(env_key, env_name) if value in [ os.path.normcase(item) for item in old_value.split(os.pathsep) ]: return except FileNotFoundError: old_value, type_ = "", winreg.REG_EXPAND_SZ new_value = ";".join(old_value, value) if old_value else value try: winreg.SetValueEx(env_key, env_name, 0, type_, new_value) except PermissionError: stream.echo( stream.red( "Permission denied, please run the terminal as administrator." ), err=True, ) sys.exit(1) stream.echo( stream.green("The environment variable has been saved, " "please restart the session to take effect."))
def handle(self, project: Project, options: argparse.Namespace) -> None: candidates = [] if options.pyproject: options.hashes = False if options.default: # Don't include self candidate if options.pyproject: temp = project.dependencies else: temp = project.get_locked_candidates() temp.pop(project.meta.name, None) candidates.extend(temp.values()) if options.dev: if options.pyproject: candidates.extend(project.dev_dependencies.values()) else: candidates.extend(project.get_locked_candidates("dev").values()) for section in options.sections: if options.pyproject: candidates.extend(project.get_dependencies(section).values()) else: candidates.extend(project.get_locked_candidates(section).values()) content = FORMATS[options.format].export(project, candidates, options) if options.output: Path(options.output).write_text(content) else: stream.echo(content)
def main(self, args=None, prog_name=None, obj=None, **extra): """The main entry function""" self.init_parser() self.load_plugins() self.parser.set_defaults(global_project=None) options = self.parser.parse_args(args or None) stream.set_verbosity(options.verbose) if obj is not None: options.project = obj if options.global_project: options.project = options.global_project if not getattr(options, "project", None): options.project = self.project_class() # Add reverse reference for core object options.project.core = self try: f = options.handler except AttributeError: self.parser.print_help() sys.exit(1) else: try: f(options.project, options) except Exception: etype, err, traceback = sys.exc_info() if stream.verbosity > stream.NORMAL: raise err.with_traceback(traceback) stream.echo("[{}]: {}".format(etype.__name__, err), err=True) sys.exit(1)
def handle(self, project: Project, options: argparse.Namespace) -> None: if project.pyproject_file.exists(): stream.echo("{}".format( stream.cyan("pyproject.toml already exists, update it now."))) else: stream.echo("{}".format( stream.cyan("Creating a pyproject.toml for PDM..."))) python = click.prompt("Please enter the Python interpreter to use", default="", show_default=False) actions.do_use(project, python) name = click.prompt("Project name", default=project.root.name) version = click.prompt("Project version", default="0.0.0") license = click.prompt("License(SPDX name)", default="MIT") git_user, git_email = get_user_email_from_git() author = click.prompt("Author name", default=git_user) email = click.prompt("Author email", default=git_email) python_version = ".".join( map(str, get_python_version(project.environment.python_executable)[:2])) python_requires = click.prompt("Python requires('*' to allow any)", default=f">={python_version}") actions.do_init(project, name, version, license, author, email, python_requires) actions.ask_for_import(project)
def do_list(project: Project, graph: bool = False, reverse: bool = False) -> None: """Display a list of packages installed in the local packages directory. :param project: the project instance. :param graph: whether to display a graph. :param reverse: wheter to display reverse graph. """ from pdm.cli.utils import ( build_dependency_graph, format_dependency_graph, format_reverse_dependency_graph, ) check_project_file(project) working_set = project.environment.get_working_set() if reverse and not graph: raise PdmUsageError("--reverse must be used with --graph") if graph: with project.environment.activate(): dep_graph = build_dependency_graph(working_set) if reverse: graph = format_reverse_dependency_graph(project, dep_graph) else: graph = format_dependency_graph(project, dep_graph) stream.echo(graph) else: rows = [(stream.green(k, bold=True), format_dist(v)) for k, v in sorted(working_set.items())] stream.display_columns(rows, ["Package", "Version"])
def do_build( project: Project, sdist: bool = True, wheel: bool = True, dest: str = "dist", clean: bool = True, ): """Build artifacts for distribution.""" if project.is_global: raise ProjectError("Not allowed to build based on the global project.") check_project_file(project) if not wheel and not sdist: stream.echo("All artifacts are disabled, nothing to do.", err=True) return if not os.path.isabs(dest): dest = project.root.joinpath(dest).as_posix() if clean: shutil.rmtree(dest, ignore_errors=True) with stream.logging("build"), EnvBuilder(project.root, project.environment) as builder: if sdist: stream.echo("Building sdist...") loc = builder.build_sdist(dest) stream.echo(f"Built sdist at {loc}") if wheel: stream.echo("Building wheel...") loc = builder.build_wheel(dest) stream.echo(f"Built wheel at {loc}")
def write_pyproject(self, show_message: bool = True) -> None: with atomic_open_for_write(self.pyproject_file.as_posix(), encoding="utf-8") as f: f.write(tomlkit.dumps(self.pyproject)) if show_message: stream.echo("Changes are written to pyproject.toml.") self._pyproject = None
def do_add( project: Project, dev: bool = False, section: Optional[str] = None, sync: bool = True, save: str = "compatible", strategy: str = "reuse", editables: Iterable[str] = (), packages: Iterable[str] = (), ) -> None: """Add packages and install :param project: the project instance :param dev: add to dev dependencies seciton :param section: specify section to be add to :param sync: whether to install added packages :param save: save strategy :param strategy: update strategy :param editables: editable requirements :param packages: normal requirements """ check_project_file(project) if not editables and not packages: raise PdmUsageError("Must specify at least one package or editable package.") section = "dev" if dev else section or "default" tracked_names = set() requirements = {} for r in [parse_requirement(line, True) for line in editables] + [ parse_requirement(line) for line in packages ]: key = r.identify() r.from_section = section tracked_names.add(key) requirements[key] = r stream.echo( f"Adding packages to {section} dependencies: " + ", ".join(stream.green(key or "", bold=True) for key in requirements) ) all_dependencies = project.all_dependencies all_dependencies.setdefault(section, {}).update(requirements) reqs = [r for deps in all_dependencies.values() for r in deps.values()] resolved = do_lock(project, strategy, tracked_names, reqs) # Update dependency specifiers and lockfile hash. save_version_specifiers(requirements, resolved, save) project.add_dependencies(requirements) lockfile = project.lockfile project.write_lockfile(lockfile, False) if sync: do_sync( project, sections=(section,), dev=False, default=False, dry_run=False, clean=False, )
def do_lock( project: Project, strategy: str = "all", tracked_names: Optional[Iterable[str]] = None, requirements: Optional[List[Requirement]] = None, ) -> Dict[str, Candidate]: """Performs the locking process and update lockfile. :param project: the project instance :param strategy: update stratege: reuse/eager/all :param tracked_names: required when using eager strategy :param requirements: An optional dictionary of requirements, read from pyproject if not given. """ check_project_file(project) # TODO: multiple dependency definitions for the same package. provider = project.get_provider(strategy, tracked_names) if not requirements: requirements = [ r for deps in project.all_dependencies.values() for r in deps.values() ] resolve_max_rounds = int(project.config["strategy.resolve_max_rounds"]) with stream.logging("lock"): # The context managers are nested to ensure the spinner is stopped before # any message is thrown to the output. with stream.open_spinner(title="Resolving dependencies", spinner="dots") as spin: reporter = project.get_reporter(requirements, tracked_names, spin) resolver = project.core.resolver_class(provider, reporter) try: mapping, dependencies, summaries = resolve( resolver, requirements, project.environment.python_requires, resolve_max_rounds, ) except ResolutionTooDeep: spin.fail(f"{LOCK} Lock failed") stream.echo( "The dependency resolution exceeds the maximum loop depth of " f"{resolve_max_rounds}, there may be some circular dependencies " "in your project. Try to solve them or increase the " f"{stream.green('`strategy.resolve_max_rounds`')} config.", err=True, ) raise except ResolutionImpossible as err: spin.fail(f"{LOCK} Lock failed") stream.echo(format_resolution_impossible(err), err=True) raise else: data = format_lockfile(mapping, dependencies, summaries) spin.succeed(f"{LOCK} Lock successful") project.write_lockfile(data) return mapping
def deprecate_global_option(value) -> Project: if value: stream.echo( stream.red( "DEPRECATION: -g/--global with argument is deprecated and will be " "removed in v1.5.0, please use '-gp <PROJECT_PATH>' instead."), err=True, ) return Project.create_global(value)
def write_lockfile(self, toml_data: Container, show_message: bool = True) -> None: toml_data["metadata"].update(self.get_lock_metadata()) with atomic_open_for_write(self.lockfile_file) as fp: fp.write(tomlkit.dumps(toml_data)) if show_message: stream.echo(f"Changes are written to {stream.green('pdm.lock')}.") self._lockfile = None
def write_lockfile(self, toml_data: Container, show_message: bool = True) -> None: toml_data.update({"root": self.get_project_metadata()}) with atomic_open_for_write(self.lockfile_file) as fp: fp.write(tomlkit.dumps(toml_data)) if show_message: stream.echo("Changes are written to pdm.lock.") self._lockfile = None
def _show_headline(self, packages: Dict[str, List[str]]) -> None: add, update, remove = packages["add"], packages["update"], packages[ "remove"] results = [stream.bold("Synchronizing working set with lock file:")] results.extend([ f"{stream.green(str(len(add)))} to add,", f"{stream.yellow(str(len(update)))} to update,", f"{stream.red(str(len(remove)))} to remove", ]) stream.echo(" ".join(results) + "\n")
def update_project_egg_info(self): if not self.environment.project.meta.name: return canonical_name = self.environment.project.meta.project_name.lower( ).replace("-", "_") egg_info_dir = self.environment.project.root / f"{canonical_name}.egg-info" if egg_info_dir.exists(): stream.echo("Updating the project's egg info...") with EnvBuilder(self.environment.project.root, self.environment) as builder: builder.build_egg_info(str(builder.src_dir))
def python_executable(self) -> str: """Get the Python interpreter path.""" config = self.config if self.project_config.get( "python.path") and not os.getenv("PDM_IGNORE_SAVED_PYTHON"): return self.project_config["python.path"] path = None if config["use_venv"]: path = get_venv_python(self.root) if path: stream.echo( f"Virtualenv interpreter {stream.green(path)} is detected.", err=True, verbosity=stream.DETAIL, ) if not path and PYENV_INSTALLED and config.get("python.use_pyenv", True): path = Path(PYENV_ROOT, "shims", "python").as_posix() if not path: path = shutil.which("python") version = None if path: try: version, _ = get_python_version(path, True) except (FileNotFoundError, subprocess.CalledProcessError): version = None if not version or not self.python_requires.contains(version): finder = Finder() for python in finder.find_all_python_versions(): version, _ = get_python_version(python.path.as_posix(), True) if self.python_requires.contains(version): path = python.path.as_posix() break else: version = ".".join(map(str, sys.version_info[:3])) if self.python_requires.contains(version): path = sys.executable if path: if os.path.normcase(path) == os.path.normcase(sys.executable): # Refer to the base interpreter to allow for venvs path = getattr(sys, "_base_executable", sys.executable) stream.echo( "Using Python interpreter: {} ({})".format( stream.green(path), version), err=True, ) if not os.getenv("PDM_IGNORE_SAVED_PYTHON"): self.project_config["python.path"] = Path(path).as_posix() return path raise NoPythonVersion( "No Python that satisfies {} is found on the system.".format( self.python_requires))
def _run_command( project: Project, args: Union[List[str], str], shell: bool = False, env: Optional[Dict[str, str]] = None, env_file: Optional[str] = None, ) -> None: if "PYTHONPATH" in os.environ: pythonpath = os.pathsep.join( [PEP582_PATH, os.getenv("PYTHONPATH")]) else: pythonpath = PEP582_PATH project_env = project.environment this_path = project_env.get_paths()["scripts"] python_root = os.path.dirname(project.python_executable) new_path = os.pathsep.join( [python_root, this_path, os.getenv("PATH", "")]) os.environ.update({"PYTHONPATH": pythonpath, "PATH": new_path}) if project_env.packages_path: os.environ.update( {"PEP582_PACKAGES": str(project_env.packages_path)}) if env_file: import dotenv stream.echo(f"Loading .env file: {stream.green(env_file)}", err=True) dotenv.load_dotenv(project.root.joinpath(env_file).as_posix(), override=True) if env: os.environ.update(env) if shell: sys.exit(subprocess.call(os.path.expandvars(args), shell=True)) command, *args = args expanded_command = project_env.which(command) if not expanded_command: raise PdmUsageError("Command {} is not found on your PATH.".format( stream.green(f"'{command}'"))) expanded_command = os.path.expanduser( os.path.expandvars(expanded_command)) expanded_args = [ os.path.expandvars(arg) for arg in [expanded_command] + args ] if os.name == "nt" or "CI" in os.environ: # In order to make sure pytest is playing well, # don't hand over the process under a testing environment. sys.exit(subprocess.call(expanded_args)) else: os.execv(expanded_command, expanded_args)
def __delitem__(self, key) -> None: self._data.pop(key, None) try: del self._file_data[key] except KeyError: pass else: env_var = self._config_map[key].env_var if env_var is not None and env_var in os.environ: stream.echo( stream.yellow( "WARNING: the config is shadowed by env var '{}', " "set value won't take effect.".format(env_var))) self._save_config()
def handle(self, project: Project, options: argparse.Namespace) -> None: if not project.meta and click._compat.isatty(sys.stdout): actions.ask_for_import(project) if options.lock: if not project.lockfile_file.exists(): stream.echo( "Lock file does not exist, trying to generate one...") actions.do_lock(project, strategy="all") elif not project.is_lockfile_hash_match(): stream.echo( "Lock file hash doesn't match pyproject.toml, regenerating..." ) actions.do_lock(project, strategy="reuse") actions.do_sync(project, options.sections, options.dev, options.default, False, False)
def _write_to_zip(self, wheel, rel_path): sio = StringIO() yield sio # The default is a fixed timestamp rather than the current time, so # that building a wheel twice on the same computer can automatically # give you the exact same result. date_time = (2016, 1, 1, 0, 0, 0) zi = zipfile.ZipInfo(rel_path, date_time) b = sio.getvalue().encode("utf-8") hashsum = hashlib.sha256(b) hash_digest = urlsafe_b64encode(hashsum.digest()).decode("ascii").rstrip("=") wheel.writestr(zi, b, compress_type=zipfile.ZIP_DEFLATED) stream.echo(f" - Adding: {rel_path}", verbosity=stream.DETAIL) self._records.append((rel_path, hash_digest, str(len(b))))
def do_remove( project: Project, dev: bool = False, section: Optional[str] = None, sync: bool = True, packages: Sequence[str] = (), ): """Remove packages from working set and pyproject.toml :param project: The project instance :param dev: Remove package from dev-dependencies :param section: Remove package from given section :param sync: Whether perform syncing action :param packages: Package names to be removed :return: None """ check_project_file(project) if not packages: raise PdmUsageError("Must specify at least one package to remove.") section = "dev" if dev else section or "default" if section not in list(project.iter_sections()): raise ProjectError(f"No {section} dependencies given in pyproject.toml.") deps = project.get_pyproject_dependencies(section) stream.echo( f"Removing packages from {section} dependencies: " + ", ".join(str(stream.green(name, bold=True)) for name in packages) ) for name in packages: req = parse_requirement(name) matched_indexes = sorted( (i for i, r in enumerate(deps) if req.matches(r, False)), reverse=True ) if not matched_indexes: raise ProjectError( "{} does not exist in {} dependencies.".format( stream.green(name, bold=True), section ) ) for i in matched_indexes: del deps[i] project.write_pyproject() do_lock(project, "reuse") if sync: do_sync(project, sections=(section,), default=False, clean=True)
def do_remove( project: Project, dev: bool = False, section: Optional[str] = None, sync: bool = True, packages: Sequence[str] = (), ): """Remove packages from working set and pyproject.toml :param project: The project instance :param dev: Remove package from dev-dependencies :param section: Remove package from given section :param sync: Whether perform syncing action :param packages: Package names to be removed :return: None """ check_project_file(project) if not packages: raise PdmUsageError("Must specify at least one package to remove.") section = "dev" if dev else section or "default" toml_section = f"{section}-dependencies" if section != "default" else "dependencies" if toml_section not in project.tool_settings: raise ProjectError( f"No such section {stream.yellow(toml_section)} in pyproject.toml." ) deps = project.tool_settings[toml_section] stream.echo(f"Removing packages from {section} dependencies: " + ", ".join( str(stream.green(name, bold=True)) for name in packages)) for name in packages: matched_name = next( filter( lambda k: safe_name(k).lower() == safe_name(name).lower(), deps.keys(), ), None, ) if not matched_name: raise ProjectError("{} does not exist in {} dependencies.".format( stream.green(name, bold=True), section)) del deps[matched_name] project.write_pyproject() do_lock(project, "reuse") if sync: do_sync(project, sections=(section, ), default=False, clean=True)
def do_list(project: Project, graph: bool = False) -> None: """Display a list of packages installed in the local packages directory. :param project: the project instance. :param graph: whether to display a graph. """ from pdm.cli.utils import build_dependency_graph, format_dependency_graph check_project_file(project) working_set = project.environment.get_working_set() if graph: with project.environment.activate(): dep_graph = build_dependency_graph(working_set) stream.echo(format_dependency_graph(dep_graph)) else: rows = [(stream.green(k, bold=True), format_dist(v)) for k, v in sorted(working_set.items())] stream.display_columns(rows, ["Package", "Version"])
def __setitem__(self, key: str, value: Any) -> None: if key not in self._config_map: raise NoConfigError(key) if not self.is_global and self._config_map[key].global_only: raise ValueError( f"Config item '{key}' is not allowed to set in project config." ) value = self._config_map[key].coerce(value) env_var = self._config_map[key].env_var if env_var is not None and env_var in os.environ: stream.echo( stream.yellow( "WARNING: the config is shadowed by env var '{}', " "the value set won't take effect.".format(env_var))) self._data[key] = value self._file_data[key] = value self._save_config()
def do_use(project: Project, python: str, first: bool = False) -> None: """Use the specified python version and save in project config. The python can be a version string or interpreter path. """ if python and not all(c.isdigit() for c in python.split(".")): if Path(python).exists(): python_path = Path(python).absolute().as_posix() else: python_path = shutil.which(python) if not python_path: raise NoPythonVersion(f"{python} is not a valid Python.") python_version = get_python_version(python_path, True) else: finder = pythonfinder.Finder() pythons = [] args = [int(v) for v in python.split(".") if v != ""] for i, entry in enumerate(finder.find_all_python_versions(*args)): python_version = get_python_version(entry.path.as_posix(), True) pythons.append((entry.path.as_posix(), python_version)) if not pythons: raise NoPythonVersion( f"Python {python} is not available on the system.") if not first and len(pythons) > 1: for i, (path, python_version) in enumerate(pythons): stream.echo(f"{i}. {stream.green(path)} ({python_version})") selection = click.prompt( "Please select:", type=click.Choice([str(i) for i in range(len(pythons))]), default="0", show_choices=False, ) else: selection = 0 python_path, python_version = pythons[int(selection)] if not project.python_requires.contains(python_version): raise NoPythonVersion("The target Python version {} doesn't satisfy " "the Python requirement: {}".format( python_version, project.python_requires)) stream.echo("Using Python interpreter: {} ({})".format( stream.green(python_path), python_version)) project.project_config["python.path"] = Path(python_path).as_posix()
def main(self, args=None, prog_name=None, obj=None, **extra): """The main entry function""" from pdm.models.pip_shims import global_tempdir_manager self.init_parser() self.load_plugins() self.parser.set_defaults(global_project=None) options = self.parser.parse_args(args or None) stream.set_verbosity(options.verbose) if obj is not None: options.project = obj if options.global_project: options.project = options.global_project if options.pep582: print_pep582_command(options.pep582) sys.exit(0) if not getattr(options, "project", None): options.project = self.project_class() # Add reverse reference for core object options.project.core = self migrate_pyproject(options.project) try: f = options.handler except AttributeError: self.parser.print_help() sys.exit(1) else: try: with global_tempdir_manager(): f(options.project, options) except Exception: etype, err, traceback = sys.exc_info() if stream.verbosity > stream.NORMAL: raise err.with_traceback(traceback) stream.echo(f"{stream.red('[' + etype.__name__ + ']')}: {err}", err=True) stream.echo( stream.yellow("Add '-v' to see the detailed traceback")) sys.exit(1)
def search(self, query: str) -> SearchResult: pypi_simple = self.sources[0]["url"].rstrip("/") results = [] if pypi_simple.endswith("/simple"): search_url = pypi_simple[:-6] + "search" else: search_url = pypi_simple + "/search" with self.environment.get_finder() as finder: session = finder.session resp = session.get(search_url, params={"q": query}) if resp.status_code == 404: stream.echo( stream.yellow( f"{pypi_simple!r} doesn't support '/search' endpoint, fallback " f"to {self.DEFAULT_INDEX_URL!r} now.\n" "This may take longer depending on your network condition." ), err=True, ) resp = session.get( f"{self.DEFAULT_INDEX_URL}/search", params={"q": query} ) resp.raise_for_status() content = parse(resp.content, namespaceHTMLElements=False) for result in content.findall(".//*[@class='package-snippet']"): name = result.find("h3/*[@class='package-snippet__name']").text version = result.find("h3/*[@class='package-snippet__version']").text if not name or not version: continue description = result.find("p[@class='package-snippet__description']").text if not description: description = "" result = Package(name, version, description) results.append(result) return results
def python_executable(self) -> str: """Get the Python interpreter path.""" config = self.project.config if config.get("python.path"): return config["python.path"] if PYENV_INSTALLED and config.get("python.use_pyenv", True): return os.path.join(PYENV_ROOT, "shims", "python") if "VIRTUAL_ENV" in os.environ: stream.echo( "An activated virtualenv is detected, reuse the interpreter now.", err=True, verbosity=stream.DETAIL, ) return get_venv_python(self.project.root) # First try what `python` refers to. path = shutil.which("python") version = None if path: version, _ = get_python_version(path, True) if not version or not self.python_requires.contains(version): finder = Finder() for python in finder.find_all_python_versions(): version, _ = get_python_version(python.path.as_posix(), True) if self.python_requires.contains(version): path = python.path.as_posix() break else: version = ".".join(map(str, sys.version_info[:3])) if self.python_requires.contains(version): path = sys.executable if path: stream.echo( "Using Python interpreter: {} ({})".format(stream.green(path), version) ) self.project.project_config["python.path"] = Path(path).as_posix() return path raise NoPythonVersion( "No Python that satisfies {} is found on the system.".format( self.python_requires ) )