def do_remove( project: Project, dev: bool = False, group: str | None = None, sync: bool = True, packages: Collection[str] = (), no_editable: bool = False, no_self: bool = False, dry_run: bool = False, ) -> None: """Remove packages from working set and pyproject.toml""" check_project_file(project) if not packages: raise PdmUsageError("Must specify at least one package to remove.") if not group: group = "dev" if dev else "default" if group not in list(project.iter_groups()): raise ProjectError(f"No-exist group {group}") deps = project.get_pyproject_dependencies(group, dev) project.core.ui.echo( f"Removing packages from {group} {'dev-' if dev else ''}dependencies: " + ", ".join(str(termui.green(name, bold=True)) for name in packages) ) for name in packages: req = parse_requirement(name) matched_indexes = sorted( (i for i, r in enumerate(deps) if req.matches(r, False)), reverse=True ) if not matched_indexes: raise ProjectError( "{} does not exist in {} dependencies.".format( termui.green(name, bold=True), group ) ) for i in matched_indexes: del deps[i] if not dry_run: project.write_pyproject() do_lock(project, "reuse", dry_run=dry_run) if sync: do_sync( project, groups=(group,), default=False, clean=True, no_editable=no_editable, no_self=no_self, dry_run=dry_run, )
def _legacy_generate_rows(self) -> Iterator[Tuple[str, str]]: yield termui.cyan("Name:"), self._data["Name"] yield termui.cyan("Latest version:"), self._data["Version"] if self.latest_stable_version: yield (termui.cyan("Latest stable version:"), self.latest_stable_version) if self.installed_version: yield (termui.green("Installed version:"), self.installed_version) yield termui.cyan("Summary:"), self._data.get("Summary", "") yield termui.cyan("Author:"), self._data.get("Author", "") yield termui.cyan("Author email:"), self._data.get("Author-email", "") yield termui.cyan("License:"), self._data.get("License", "") yield termui.cyan("Requires python:"), self._data.get( "Requires-Python", "") yield termui.cyan("Platform:"), ", ".join( self._data.get("Platform", [])) yield termui.cyan("Keywords:"), ", ".join( self._data.get("Keywords", [])) yield termui.cyan("Homepage:"), self._data.get("Home-page", "") if self._data.get("Project-URL"): lines = [ ":".join(parts) for parts in self._data.get("Project-URL") ] yield termui.cyan("Project URLs:"), lines[0] for line in lines[1:]: yield "", line
def check_project_file(project: Project) -> None: """Check the existence of the project file and throws an error on failure.""" if not project.meta: raise ProjectError( "The pyproject.toml has not been initialized yet. You can do this " "by running {}.".format(termui.green("'pdm init'")) )
def _run_command( project: Project, args: Union[Sequence[str], str], chdir: bool = False, shell: bool = False, env: Optional[Mapping[str, str]] = None, env_file: Optional[str] = None, ) -> None: if "PYTHONPATH" in os.environ: pythonpath = os.pathsep.join( [PEP582_PATH, os.getenv("PYTHONPATH", "")]) else: pythonpath = PEP582_PATH project_env = project.environment this_path = project_env.get_paths()["scripts"] python_root = os.path.dirname(project.python.executable) new_path = os.pathsep.join( [python_root, this_path, os.getenv("PATH", "")]) os.environ.update({ "PYTHONPATH": pythonpath, "PATH": new_path, "PDM_PROJECT_ROOT": str(project.root), }) if project_env.packages_path: os.environ.update( {"PEP582_PACKAGES": str(project_env.packages_path)}) if env_file: import dotenv project.core.ui.echo( f"Loading .env file: {termui.green(env_file)}", err=True) dotenv.load_dotenv(project.root.joinpath(env_file).as_posix(), override=True) if env: os.environ.update(env) if shell: assert isinstance(args, str) sys.exit(subprocess.call(os.path.expandvars(args), shell=True)) assert isinstance(args, Sequence) command, *args = args expanded_command = project_env.which(command) if not expanded_command: raise PdmUsageError("Command {} is not found on your PATH.".format( termui.green(f"'{command}'"))) expanded_command = os.path.expanduser( os.path.expandvars(expanded_command)) expanded_args = [ os.path.expandvars(arg) for arg in [expanded_command] + args ] if os.name == "nt" or "CI" in os.environ: # In order to make sure pytest is playing well, # don't hand over the process under a testing environment. cwd = project.root if chdir else None sys.exit(subprocess.call(expanded_args, cwd=cwd)) else: if chdir: os.chdir(project.root) os.execv(expanded_command, expanded_args)
def print_pep582_command(ui: termui.UI, shell: str = "AUTO"): """Print the export PYTHONPATH line to be evaluated by the shell.""" import shellingham if os.name == "nt": try: set_env_in_reg("PYTHONPATH", PEP582_PATH) except PermissionError: ui.echo( termui.red( "Permission denied, please run the terminal as administrator." ), err=True, ) ui.echo( termui.green("The environment variable has been saved, " "please restart the session to take effect.")) return lib_path = PEP582_PATH.replace("'", "\\'") if shell == "AUTO": shell = shellingham.detect_shell()[0] shell = shell.lower() if shell in ("zsh", "bash"): result = f"export PYTHONPATH='{lib_path}':$PYTHONPATH" elif shell == "fish": result = f"set -x PYTHONPATH '{lib_path}' $PYTHONPATH" elif shell in ("tcsh", "csh"): result = f"setenv PYTHONPATH '{lib_path}':$PYTHONPATH" else: raise PdmUsageError( f"Unsupported shell: {shell}, please specify another shell " "via `--pep582 <SHELL>`") ui.echo(result)
def generate_rows(self) -> Iterator[Tuple[str, str]]: if self.legacy: yield from self._legacy_generate_rows() return yield termui.cyan("Name:"), self._data["name"] yield termui.cyan("Latest version:"), self._data["version"] if self.latest_stable_version: yield (termui.cyan("Latest stable version:"), self.latest_stable_version) if self.installed_version: yield (termui.green("Installed version:"), self.installed_version) yield termui.cyan("Summary:"), self._data.get("summary", "") contacts = (self._data.get("extensions", {}).get("python.details", {}).get("contacts")) if contacts: author_contact = next( iter(c for c in contacts if c["role"] == "author"), {}) yield termui.cyan("Author:"), author_contact.get("name", "") yield termui.cyan("Author email:"), author_contact.get("email", "") yield termui.cyan("License:"), self._data.get("license", "") yield termui.cyan("Homepage:"), self._data.get("extensions", {}).get( "python.details", {}).get("project_urls", {}).get("Home", "") yield termui.cyan("Project URLs:"), self._data.get("project_url", "") yield termui.cyan("Platform:"), self._data.get("platform", "") yield termui.cyan("Keywords:"), ", ".join( self._data.get("keywords", []))
def do_list(project: Project, graph: bool = False, reverse: bool = False) -> None: """Display a list of packages installed in the local packages directory. :param project: the project instance. :param graph: whether to display a graph. :param reverse: whether to display reverse graph. """ from pdm.cli.utils import ( build_dependency_graph, format_dependency_graph, format_reverse_dependency_graph, ) check_project_file(project) working_set = project.environment.get_working_set() if reverse and not graph: raise PdmUsageError("--reverse must be used with --graph") if graph: with project.environment.activate(): dep_graph = build_dependency_graph(working_set) if reverse: graph = format_reverse_dependency_graph(project, dep_graph) else: graph = format_dependency_graph(project, dep_graph) project.core.ui.echo(graph) else: rows = [(termui.green(k, bold=True), format_dist(v)) for k, v in sorted(working_set.items())] project.core.ui.display_columns(rows, ["Package", "Version"])
def do_remove( project: Project, dev: bool = False, section: Optional[str] = None, sync: bool = True, packages: Sequence[str] = (), ): """Remove packages from working set and pyproject.toml :param project: The project instance :param dev: Remove package from dev-dependencies :param section: Remove package from given section :param sync: Whether perform syncing action :param packages: Package names to be removed :return: None """ check_project_file(project) if not packages: raise PdmUsageError("Must specify at least one package to remove.") if not section: section = "dev" if dev else "default" if section not in list(project.iter_sections()): raise ProjectError(f"No-exist section {section}") deps = project.get_pyproject_dependencies(section, dev) project.core.ui.echo( f"Removing packages from {section} {'dev-' if dev else ''}dependencies: " + ", ".join(str(termui.green(name, bold=True)) for name in packages) ) for name in packages: req = parse_requirement(name) matched_indexes = sorted( (i for i, r in enumerate(deps) if req.matches(r, False)), reverse=True ) if not matched_indexes: raise ProjectError( "{} does not exist in {} dependencies.".format( termui.green(name, bold=True), section ) ) for i in matched_indexes: del deps[i] project.write_pyproject() do_lock(project, "reuse") if sync: do_sync(project, sections=(section,), default=False, clean=True)
def do_add( project: Project, dev: bool = False, section: Optional[str] = None, sync: bool = True, save: str = "compatible", strategy: str = "reuse", editables: Iterable[str] = (), packages: Iterable[str] = (), ) -> None: """Add packages and install :param project: the project instance :param dev: add to dev dependencies section :param section: specify section to be add to :param sync: whether to install added packages :param save: save strategy :param strategy: update strategy :param editables: editable requirements :param packages: normal requirements """ check_project_file(project) if not editables and not packages: raise PdmUsageError( "Must specify at least one package or editable package.") section = "dev" if dev else section or "default" tracked_names = set() requirements = {} for r in [parse_requirement(line, True) for line in editables ] + [parse_requirement(line) for line in packages]: key = r.identify() r.from_section = section tracked_names.add(key) requirements[key] = r project.core.ui.echo(f"Adding packages to {section} dependencies: " + ", ".join( termui.green(key or "", bold=True) for key in requirements)) all_dependencies = project.all_dependencies all_dependencies.setdefault(section, {}).update(requirements) reqs = [r for deps in all_dependencies.values() for r in deps.values()] resolved = do_lock(project, strategy, tracked_names, reqs) # Update dependency specifiers and lockfile hash. save_version_specifiers(requirements, resolved, save) project.add_dependencies(requirements) lockfile = project.lockfile project.write_lockfile(lockfile, False) if sync: do_sync( project, sections=(section, ), dev=False, default=False, dry_run=False, clean=False, )
def _show_list(self, project: Project) -> None: if not project.scripts: return columns = ["Name", "Type", "Script", "Description"] result = [] for name, script in project.scripts.items(): if name == "_": continue kind, value, options = self._normalize_script(script) result.append((termui.green(name), kind, value, options.get("help", ""))) project.core.ui.display_columns(result, columns)
def do_add( project: Project, dev: bool = False, group: str | None = None, sync: bool = True, save: str = "compatible", strategy: str = "reuse", editables: Iterable[str] = (), packages: Iterable[str] = (), unconstrained: bool = False, no_editable: bool = False, no_self: bool = False, ) -> None: """Add packages and install""" check_project_file(project) if not editables and not packages: raise PdmUsageError( "Must specify at least one package or editable package.") if not group: group = "dev" if dev else "default" tracked_names: set[str] = set() requirements: dict[str, Requirement] = {} for r in [parse_requirement(line, True) for line in editables ] + [parse_requirement(line) for line in packages]: key = r.identify() tracked_names.add(key) requirements[key] = r project.core.ui.echo( f"Adding packages to {group} {'dev-' if dev else ''}dependencies: " + ", ".join(termui.green(key or "", bold=True) for key in requirements)) all_dependencies = project.all_dependencies group_deps = all_dependencies.setdefault(group, {}) if unconstrained: for req in group_deps.values(): req.specifier = get_specifier("") group_deps.update(requirements) reqs = [r for deps in all_dependencies.values() for r in deps.values()] resolved = do_lock(project, strategy, tracked_names, reqs) # Update dependency specifiers and lockfile hash. deps_to_update = group_deps if unconstrained else requirements save_version_specifiers({group: deps_to_update}, resolved, save) project.add_dependencies(deps_to_update, group, dev) lockfile = project.lockfile project.write_lockfile(lockfile, False) if sync: do_sync( project, groups=(group, ), default=False, no_editable=no_editable, no_self=no_self, )
def migrate_pyproject(project: Project): """Migrate the legacy pyproject format to PEP 621""" if project.pyproject and "project" in project.pyproject: pyproject = project.pyproject settings = {} updated_fields = [] for field in ("includes", "excludes", "build", "package-dir"): if field in pyproject["project"]: updated_fields.append(field) settings[field] = pyproject["project"][field] del pyproject["project"][field] if "dev-dependencies" in pyproject["project"]: if pyproject["project"]["dev-dependencies"]: settings["dev-dependencies"] = { "dev": pyproject["project"]["dev-dependencies"] } del pyproject["project"]["dev-dependencies"] updated_fields.append("dev-dependencies") if updated_fields: if "tool" not in pyproject or "pdm" not in pyproject["tool"]: setdefault(pyproject, "tool", {})["pdm"] = tomlkit.table() pyproject["tool"]["pdm"].update(settings) project.pyproject = pyproject project.write_pyproject() project.core.ui.echo( f"{termui.yellow('[AUTO-MIGRATION]')} These fields are moved from " f"[project] to [tool.pdm] table: {updated_fields}", err=True, ) return if not project.pyproject_file.exists() or not FORMATS["legacy"].check_fingerprint( project, project.pyproject_file ): return project.core.ui.echo( f"{termui.yellow('[AUTO-MIGRATION]')} Legacy pdm 0.x metadata detected, " "migrating to PEP 621...", err=True, ) do_import(project, project.pyproject_file, "legacy") project.core.ui.echo( termui.green("pyproject.toml") + termui.yellow( " has been migrated to PEP 621 successfully. " "Now you can safely delete the legacy metadata under [tool.pdm] table." ), err=True, )
def do_use(project: Project, python: Optional[str] = "", first: Optional[bool] = False) -> None: """Use the specified python version and save in project config. The python can be a version string or interpreter path. """ def version_matcher(py_version): return project.python_requires.contains(str(py_version.version)) python = python.strip() found_interpreters = list( dict.fromkeys( filter(version_matcher, project.find_interpreters(python)))) if not found_interpreters: raise NoPythonVersion("Python interpreter is not found on the system.") if first or len(found_interpreters) == 1: selected_python = found_interpreters[0] else: project.core.ui.echo("Please enter the Python interpreter to use") for i, py_version in enumerate(found_interpreters): python_version = str(py_version.version) is_64bit = py_version.get_architecture() == "64bit" version_string = get_python_version_string(python_version, is_64bit) project.core.ui.echo( f"{i}. {termui.green(py_version.executable)} ({version_string})" ) selection = click.prompt( "Please select:", type=click.Choice([str(i) for i in range(len(found_interpreters))]), default="0", show_choices=False, ) selected_python = found_interpreters[int(selection)] old_path = project.config.get("python.path") new_path = selected_python.executable python_version = str(selected_python.version) is_64bit = selected_python.get_architecture() == "64bit" project.core.ui.echo("Using Python interpreter: {} ({})".format( termui.green(str(new_path)), get_python_version_string(python_version, is_64bit), )) project.python_executable = new_path if old_path and Path(old_path) != Path(new_path) and not project.is_global: project.core.ui.echo(termui.cyan("Updating executable scripts...")) project.environment.update_shebangs(new_path)
def show_list(self) -> None: if not self.project.scripts: return columns = ["Name", "Type", "Script", "Description"] result = [] for name in self.project.scripts: if name == "_": continue task = self._get_task(name) assert task is not None result.append(( termui.green(name), task.kind, str(task.args), task.options.get("help", ""), )) self.project.core.ui.display_columns(result, columns)
def do_list( project: Project, graph: bool = False, reverse: bool = False, freeze: bool = False, json: bool = False, ) -> None: """Display a list of packages installed in the local packages directory.""" from pdm.cli.utils import build_dependency_graph, format_dependency_graph check_project_file(project) working_set = project.environment.get_working_set() if graph: dep_graph = build_dependency_graph( working_set, project.environment.marker_environment ) project.core.ui.echo( format_dependency_graph(project, dep_graph, reverse=reverse, json=json) ) else: if reverse: raise PdmUsageError("--reverse must be used with --graph") if json: raise PdmUsageError("--json must be used with --graph") if freeze: reqs = sorted( ( Requirement.from_dist(dist) .as_line() .replace( "${PROJECT_ROOT}", project.root.absolute().as_posix().lstrip("/"), ) for dist in sorted( working_set.values(), key=lambda d: d.metadata["Name"] ) ), key=lambda x: x.lower(), ) project.core.ui.echo("\n".join(reqs)) return rows = [ (termui.green(k, bold=True), termui.yellow(v.version), get_dist_location(v)) for k, v in sorted(working_set.items()) ] project.core.ui.display_columns(rows, ["Package", "Version", "Location"])
def do_use(project: Project, python: str = "", first: bool = False) -> None: """Use the specified python version and save in project config. The python can be a version string or interpreter path. """ def version_matcher(py_version: PythonInfo) -> bool: return project.python_requires.contains(str(py_version.version)) if python: python = python.strip() found_interpreters = list( dict.fromkeys( filter(version_matcher, project.find_interpreters(python)))) if not found_interpreters: raise NoPythonVersion("Python interpreter is not found on the system.") if first or len(found_interpreters) == 1: selected_python = found_interpreters[0] else: project.core.ui.echo("Please enter the Python interpreter to use") for i, py_version in enumerate(found_interpreters): project.core.ui.echo( f"{i}. {termui.green(py_version.executable)} ({py_version.identifier})" ) selection = click.prompt( "Please select:", type=click.Choice([str(i) for i in range(len(found_interpreters))]), default="0", show_choices=False, ) selected_python = found_interpreters[int(selection)] old_path = project.python.executable if "python.path" in project.config else None new_path = selected_python.executable project.core.ui.echo("Using Python interpreter: {} ({})".format( termui.green(str(new_path)), selected_python.identifier, )) project.python = selected_python if (old_path and Path(old_path) != Path(new_path) and not project.environment.is_global): project.core.ui.echo(termui.cyan("Updating executable scripts...")) project.environment.update_shebangs(old_path, new_path)
def generate_rows(self) -> Iterator[tuple[str, str]]: yield termui.cyan("Name:"), self._parsed["name"] yield termui.cyan("Latest version:"), self._parsed["version"] if self.latest_stable_version: yield (termui.cyan("Latest stable version:"), self.latest_stable_version) if self.installed_version: yield (termui.green("Installed version:"), self.installed_version) yield termui.cyan("Summary:"), self._parsed.get("summary", "") yield termui.cyan("Author:"), self._parsed.get("author", "") yield termui.cyan("Author email:"), self._parsed.get("email", "") yield termui.cyan("License:"), self._parsed.get("license", "") yield termui.cyan("Homepage:"), self._parsed.get("homepage", "") yield from itertools.zip_longest( (termui.cyan("Project URLs:"), ), self._parsed.get("project-urls", []), fillvalue="", ) yield termui.cyan("Platform:"), self._parsed.get("platform", "") yield termui.cyan("Keywords:"), self._parsed.get("keywords", "")
def print_results( ui: termui.UI, hits: SearchResult, working_set: WorkingSet, terminal_width: Optional[int] = None, ) -> None: if not hits: return name_column_width = ( max([len(hit.name) + len(hit.version or "") for hit in hits]) + 4 ) for hit in hits: name = hit.name summary = hit.summary or "" latest = hit.version or "" if terminal_width is not None: target_width = terminal_width - name_column_width - 5 if target_width > 10: # wrap and indent summary to fit terminal summary = textwrap.wrap(summary, target_width) summary = ("\n" + " " * (name_column_width + 2)).join(summary) current_width = len(name) + len(latest) + 4 spaces = " " * (name_column_width - current_width) line = "{name} ({latest}){spaces} - {summary}".format( name=termui.green(name, bold=True), latest=termui.yellow(latest), spaces=spaces, summary=summary, ) try: ui.echo(line) if safe_name(name).lower() in working_set: dist = working_set[safe_name(name).lower()] if dist.version == latest: ui.echo(" INSTALLED: %s (latest)" % dist.version) else: ui.echo(" INSTALLED: %s" % dist.version) ui.echo(" LATEST: %s" % latest) except UnicodeEncodeError: pass
def migrate_pyproject(project: Project): """Migrate the legacy pyproject format to PEP 621""" if (not project.pyproject_file.exists() or not FORMATS["legacy"].check_fingerprint(project, project.pyproject_file) or "project" in project.pyproject): return project.core.ui.echo( termui.yellow( "Legacy [tool.pdm] metadata detected, migrating to PEP 621..."), err=True, ) do_import(project, project.pyproject_file, "legacy") project.core.ui.echo( termui.green("pyproject.toml") + termui.yellow( " has been migrated to PEP 621 successfully. " "Now you can safely delete the legacy metadata under [tool.pdm] table." ), err=True, )
def do_update( project: Project, dev: bool = False, sections: Sequence[str] = (), default: bool = True, strategy: str = "reuse", save: str = "compatible", unconstrained: bool = False, packages: Sequence[str] = (), ) -> None: """Update specified packages or all packages :param project: The project instance :param dev: whether to update dev dependencies :param sections: update specified sections :param default: update default :param strategy: update strategy (reuse/eager) :param save: save strategy (compatible/exact/wildcard) :param unconstrained: ignore version constraint :param packages: specified packages to update :return: None """ check_project_file(project) if len(packages) > 0 and (len(sections) > 1 or not default): raise PdmUsageError( "packages argument can't be used together with multiple -s or --no-default." ) if not packages: if unconstrained: raise PdmUsageError( "--unconstrained must be used with package names given.") # pdm update with no packages given, same as 'lock' + 'sync' do_lock(project) do_sync(project, sections, dev, default, clean=False) return section = sections[0] if sections else ("dev" if dev else "default") all_dependencies = project.all_dependencies dependencies = all_dependencies[section] updated_deps = {} tracked_names = set() for name in packages: matched_name = next( filter( lambda k: safe_name(strip_extras(k)[0]).lower() == safe_name( name).lower(), dependencies.keys(), ), None, ) if not matched_name: raise ProjectError("{} does not exist in {} dependencies.".format( termui.green(name, bold=True), section)) if unconstrained: dependencies[matched_name].specifier = get_specifier("") tracked_names.add(matched_name) updated_deps[matched_name] = dependencies[matched_name] project.core.ui.echo("Updating packages: {}.".format(", ".join( termui.green(v, bold=True) for v in tracked_names))) reqs = [r for deps in all_dependencies.values() for r in deps.values()] resolved = do_lock(project, strategy, tracked_names, reqs) do_sync(project, sections=(section, ), default=False, clean=False) if unconstrained: # Need to update version constraints save_version_specifiers(updated_deps, resolved, save) project.add_dependencies(updated_deps) lockfile = project.lockfile project.write_lockfile(lockfile, False)
def do_use( project: Project, python: str = "", first: bool = False, ignore_remembered: bool = False, ) -> None: """Use the specified python version and save in project config. The python can be a version string or interpreter path. """ if python: python = python.strip() def version_matcher(py_version: PythonInfo) -> bool: return project.python_requires.contains(str(py_version.version), True) if not project.cache_dir.exists(): project.cache_dir.mkdir(parents=True) use_cache: JSONFileCache[str, str] = JSONFileCache( project.cache_dir / "use_cache.json" ) selected_python: PythonInfo | None = None if python and not ignore_remembered: if use_cache.has_key(python): path = use_cache.get(python) cached_python = PythonInfo.from_path(path) if not cached_python.valid: project.core.ui.echo( f"The last selection is corrupted. {path!r}", fg="red", err=True, ) elif version_matcher(cached_python): project.core.ui.echo( "Using the last selection, add '-i' to ignore it.", fg="yellow", err=True, ) selected_python = cached_python if selected_python is None: found_interpreters = list(dict.fromkeys(project.find_interpreters(python))) matching_interperters = list(filter(version_matcher, found_interpreters)) if not found_interpreters: raise NoPythonVersion("Python interpreter is not found on the system.") if not matching_interperters: project.core.ui.echo("Interpreters found but not matching:", err=True) for py in found_interpreters: project.core.ui.echo(f" - {py.executable} ({py.identifier})", err=True) raise NoPythonVersion( "No python is found meeting the requirement " f"{termui.green('python' + str(project.python_requires))}" ) if first or len(matching_interperters) == 1: selected_python = matching_interperters[0] else: project.core.ui.echo("Please enter the Python interpreter to use") for i, py_version in enumerate(matching_interperters): project.core.ui.echo( f"{i}. {termui.green(str(py_version.executable))} " f"({py_version.identifier})" ) selection = click.prompt( "Please select:", type=click.Choice([str(i) for i in range(len(matching_interperters))]), default="0", show_choices=False, ) selected_python = matching_interperters[int(selection)] if python: use_cache.set(python, selected_python.path.as_posix()) if not selected_python.valid: path = str(selected_python.executable) raise InvalidPyVersion(f"Invalid Python interpreter: {path}") old_python = project.python if "python.path" in project.config else None project.core.ui.echo( "Using Python interpreter: {} ({})".format( termui.green(str(selected_python.executable)), selected_python.identifier, ) ) project.python = selected_python if ( old_python and old_python.path != selected_python.path and not project.environment.is_global ): project.core.ui.echo(termui.cyan("Updating executable scripts...")) project.environment.update_shebangs(selected_python.executable.as_posix())
def _run_process( self, args: Sequence[str] | str, chdir: bool = False, shell: bool = False, site_packages: bool = False, env: Mapping[str, str] | None = None, env_file: str | None = None, ) -> int: """Run command in a subprocess and return the exit code.""" project = self.project process_env = os.environ.copy() if "PYTHONPATH" in process_env: pythonpath = os.pathsep.join( [PEP582_PATH, os.getenv("PYTHONPATH", "")]) else: pythonpath = PEP582_PATH project_env = project.environment this_path = project_env.get_paths()["scripts"] python_root = os.path.dirname(project.python.executable) new_path = os.pathsep.join( [this_path, os.getenv("PATH", ""), python_root]) process_env.update({ "PYTHONPATH": pythonpath, "PATH": new_path, "PDM_PROJECT_ROOT": str(project.root), }) if project_env.packages_path: process_env.update( {"PEP582_PACKAGES": str(project_env.packages_path)}) if env_file: import dotenv project.core.ui.echo( f"Loading .env file: {termui.green(env_file)}", err=True, verbosity=termui.DETAIL, ) process_env.update( dotenv.dotenv_values(project.root / env_file, encoding="utf-8")) if env: process_env.update(env) if shell: assert isinstance(args, str) expanded_args: str | Sequence[str] = os.path.expandvars(args) else: assert isinstance(args, Sequence) command, *args = args expanded_command = project_env.which(command) if not expanded_command: raise PdmUsageError( "Command {} is not found on your PATH.".format( termui.green(f"'{command}'"))) expanded_command = os.path.expanduser( os.path.expandvars(expanded_command)) expanded_args = [ os.path.expandvars(arg) for arg in [expanded_command] + args ] if (not project_env.is_global and not site_packages and (command.startswith("python") or is_path_relative_to(expanded_command, this_path))): # The executable belongs to the local packages directory. # Don't load system site-packages process_env["NO_SITE_PACKAGES"] = "1" cwd = project.root if chdir else None s = signal.signal(signal.SIGINT, lambda signum, frame: process.send_signal(signum)) process = subprocess.Popen(expanded_args, cwd=cwd, env=process_env, shell=shell, bufsize=0) process.wait() signal.signal(signal.SIGINT, s) return process.returncode
def do_update( project: Project, *, dev: bool | None = None, groups: Sequence[str] = (), default: bool = True, strategy: str = "reuse", save: str = "compatible", unconstrained: bool = False, top: bool = False, dry_run: bool = False, packages: Collection[str] = (), sync: bool = True, no_editable: bool = False, no_self: bool = False, prerelease: bool = False, ) -> None: """Update specified packages or all packages""" check_project_file(project) if len(packages) > 0 and (top or len(groups) > 1 or not default): raise PdmUsageError( "packages argument can't be used together with multiple -G or " "--no-default and --top." ) all_dependencies = project.all_dependencies updated_deps: dict[str, dict[str, Requirement]] = defaultdict(dict) install_dev = True if dev is None else dev if not packages: if prerelease: raise PdmUsageError("--prerelease must be used with packages given") groups = translate_groups(project, default, install_dev, groups or ()) for group in groups: updated_deps[group] = all_dependencies[group] else: group = groups[0] if groups else ("dev" if dev else "default") dependencies = all_dependencies[group] for name in packages: matched_name = next( filter( lambda k: normalize_name(strip_extras(k)[0]) == normalize_name(name), dependencies.keys(), ), None, ) if not matched_name: raise ProjectError( "{} does not exist in {} {}dependencies.".format( termui.green(name, bold=True), group, "dev-" if dev else "" ) ) dependencies[matched_name].prerelease = prerelease updated_deps[group][matched_name] = dependencies[matched_name] project.core.ui.echo( "Updating packages: {}.".format( ", ".join( termui.green(v, bold=True) for v in chain.from_iterable(updated_deps.values()) ) ) ) if unconstrained: for deps in updated_deps.values(): for dep in deps.values(): dep.specifier = get_specifier("") reqs = [r for deps in all_dependencies.values() for r in deps.values()] resolved = do_lock( project, strategy, chain.from_iterable(updated_deps.values()), reqs, dry_run=dry_run, ) if sync or dry_run: do_sync( project, groups=groups, dev=install_dev, default=default, clean=False, dry_run=dry_run, requirements=[r for deps in updated_deps.values() for r in deps.values()], tracked_names=list(chain.from_iterable(updated_deps.values())) if top else None, no_editable=no_editable, no_self=no_self, ) if unconstrained and not dry_run: # Need to update version constraints save_version_specifiers(updated_deps, resolved, save) for group, deps in updated_deps.items(): project.add_dependencies(deps, group, dev or False) lockfile = project.lockfile project.write_lockfile(lockfile, False)
def do_update( project: Project, dev: bool = False, sections: Sequence[str] = (), default: bool = True, strategy: str = "reuse", save: str = "compatible", unconstrained: bool = False, top: bool = False, dry_run: bool = False, packages: Sequence[str] = (), ) -> None: """Update specified packages or all packages""" check_project_file(project) if len(packages) > 0 and (top or len(sections) > 1 or not default): raise PdmUsageError( "packages argument can't be used together with multiple -s or " "--no-default and --top.") all_dependencies = project.all_dependencies updated_deps = {} if not packages: sections = translate_sections(project, default, dev, sections or ()) for section in sections: updated_deps.update(all_dependencies[section]) else: section = sections[0] if sections else ("dev" if dev else "default") dependencies = all_dependencies[section] for name in packages: matched_name = next( filter( lambda k: safe_name(strip_extras(k)[0]).lower() == safe_name(name).lower(), dependencies.keys(), ), None, ) if not matched_name: raise ProjectError( "{} does not exist in {} {}dependencies.".format( termui.green(name, bold=True), section, "dev-" if dev else "")) updated_deps[matched_name] = dependencies[matched_name] project.core.ui.echo("Updating packages: {}.".format(", ".join( termui.green(v, bold=True) for v in updated_deps))) if unconstrained: for _, dep in updated_deps.items(): dep.specifier = get_specifier("") reqs = [r for deps in all_dependencies.values() for r in deps.values()] resolved = do_lock( project, strategy if top or packages else "all", updated_deps.keys(), reqs, dry_run=dry_run, ) do_sync( project, sections=sections, dev=dev, default=default, clean=False, dry_run=dry_run, tracked_names=updated_deps.keys() if top else None, ) if unconstrained and not dry_run: # Need to update version constraints save_version_specifiers(updated_deps, resolved, save) project.add_dependencies(updated_deps, section, dev) lockfile = project.lockfile project.write_lockfile(lockfile, False)