def _normalize_script( self, script: Any ) -> Tuple[str, Union[Sequence[str], str], MutableMapping[str, Any]]: if not getattr(script, "items", None): # Regard as the same as {cmd = ... } kind = "cmd" value = script options = {} else: script = dict( script) # to remove the effect of atoml's container type. for key in self.TYPES: if key in script: kind = key value = script.pop(key) break else: raise PdmUsageError( f"Script type must be one of ({', '.join(self.TYPES)})") options = script.copy() if any(key not in self.OPTIONS for key in options): raise PdmUsageError( f"pdm scripts only accept options: ({', '.join(self.OPTIONS)})" ) return kind, value, options
def _get_task(self, script_name: str) -> Task | None: if script_name not in self.project.scripts: return None script = cast("str | Sequence[str] | Mapping[str,Any]", self.project.scripts[script_name]) if not isinstance(script, Mapping): # Regard as the same as {cmd = ... } kind = "cmd" value = script options = {} else: script = dict( script) # to remove the effect of tomlkit's container type. for key in self.TYPES: if key in script: kind = key value = cast("str | Sequence[str]", script.pop(key)) break else: raise PdmUsageError( f"Script type must be one of ({', '.join(self.TYPES)})") options = script.copy() unknown_options = set(options) - set(self.OPTIONS) if unknown_options: raise PdmUsageError( f"Unknown options for task {script_name}: {', '.join(unknown_options)}" ) return Task(kind, script_name, value, cast(TaskOptions, options))
def _run_command( project: Project, args: Union[Sequence[str], str], chdir: bool = False, shell: bool = False, env: Optional[Mapping[str, str]] = None, env_file: Optional[str] = None, ) -> None: if "PYTHONPATH" in os.environ: pythonpath = os.pathsep.join( [PEP582_PATH, os.getenv("PYTHONPATH", "")]) else: pythonpath = PEP582_PATH project_env = project.environment this_path = project_env.get_paths()["scripts"] python_root = os.path.dirname(project.python.executable) new_path = os.pathsep.join( [python_root, this_path, os.getenv("PATH", "")]) os.environ.update({ "PYTHONPATH": pythonpath, "PATH": new_path, "PDM_PROJECT_ROOT": str(project.root), }) if project_env.packages_path: os.environ.update( {"PEP582_PACKAGES": str(project_env.packages_path)}) if env_file: import dotenv project.core.ui.echo( f"Loading .env file: {termui.green(env_file)}", err=True) dotenv.load_dotenv(project.root.joinpath(env_file).as_posix(), override=True) if env: os.environ.update(env) if shell: assert isinstance(args, str) sys.exit(subprocess.call(os.path.expandvars(args), shell=True)) assert isinstance(args, Sequence) command, *args = args expanded_command = project_env.which(command) if not expanded_command: raise PdmUsageError("Command {} is not found on your PATH.".format( termui.green(f"'{command}'"))) expanded_command = os.path.expanduser( os.path.expandvars(expanded_command)) expanded_args = [ os.path.expandvars(arg) for arg in [expanded_command] + args ] if os.name == "nt" or "CI" in os.environ: # In order to make sure pytest is playing well, # don't hand over the process under a testing environment. cwd = project.root if chdir else None sys.exit(subprocess.call(expanded_args, cwd=cwd)) else: if chdir: os.chdir(project.root) os.execv(expanded_command, expanded_args)
def translate_groups(project: Project, default: bool, dev: bool, groups: Iterable[str]) -> list[str]: """Translate default, dev and groups containing ":all" into a list of groups""" optional_groups = set(project.meta.optional_dependencies or []) dev_groups = set(project.tool_settings.get("dev-dependencies", [])) groups_set = set(groups) if dev is None: dev = True if groups_set & dev_groups: if not dev: raise PdmUsageError( "--prod is not allowed with dev groups and should be left") elif dev: groups_set.update(dev_groups) if ":all" in groups: groups_set.discard(":all") groups_set.update(optional_groups) if default: groups_set.add("default") # Sorts the result in ascending order instead of in random order # to make this function pure invalid_groups = groups_set - set(project.iter_groups()) if invalid_groups: project.core.ui.echo(f"Ignoring non-existing groups: {invalid_groups}", fg="yellow", err=True) groups_set -= invalid_groups return sorted(groups_set)
def handle(self, project: Project, options: argparse.Namespace) -> None: if not options.type: types: Iterable[str] = self.CACHE_TYPES elif options.type not in self.CACHE_TYPES: raise PdmUsageError( f"Invalid cache type {options.type}, should one of {self.CACHE_TYPES}" ) else: types = (str(options.type), ) packages = files = 0 with project.core.ui.open_spinner( f"Clearing {options.type or 'all'} caches...") as spinner: for type_ in types: if type_ == "packages": packages += self._clear_packages(project.cache(type_)) else: files += self._clear_files(project.cache(type_)) message = [] if packages: message.append( f"{packages} package{'s' if packages > 1 else ''}") if files: message.append(f"{files} file{'s' if files > 1 else ''}") if not message: # pragma: no cover text = "No files need to be removed" else: text = f"{' and '.join(message)} are removed" spinner.succeed(text)
def do_list(project: Project, graph: bool = False, reverse: bool = False) -> None: """Display a list of packages installed in the local packages directory. :param project: the project instance. :param graph: whether to display a graph. :param reverse: wheter to display reverse graph. """ from pdm.cli.utils import ( build_dependency_graph, format_dependency_graph, format_reverse_dependency_graph, ) check_project_file(project) working_set = project.environment.get_working_set() if reverse and not graph: raise PdmUsageError("--reverse must be used with --graph") if graph: with project.environment.activate(): dep_graph = build_dependency_graph(working_set) if reverse: graph = format_reverse_dependency_graph(project, dep_graph) else: graph = format_dependency_graph(project, dep_graph) stream.echo(graph) else: rows = [(stream.green(k, bold=True), format_dist(v)) for k, v in sorted(working_set.items())] stream.display_columns(rows, ["Package", "Version"])
def get_dependencies(self, section: Optional[str] = None ) -> Dict[str, Requirement]: metadata = self.meta optional_dependencies = metadata.get("optional-dependencies", {}) dev_dependencies = self.tool_settings.get("dev-dependencies", {}) if section in (None, "default"): deps = metadata.get("dependencies", []) else: if section in optional_dependencies and section in dev_dependencies: self.core.ui.echo( f"The {section} section exists in both [optional-dependencies] " "and [dev-dependencies], the former is taken.", err=True, fg="yellow", ) if section in optional_dependencies: deps = optional_dependencies[section] elif section in dev_dependencies: deps = dev_dependencies[section] else: raise PdmUsageError(f"Non-exist section {section}") result = {} with cd(self.root): for line in deps: if line.startswith("-e "): req = parse_requirement(line[3:].strip(), True) else: req = parse_requirement(line) # make editable packages behind normal ones to override correctly. result[req.identify()] = req return result
def print_pep582_command(ui: termui.UI, shell: str = "AUTO"): """Print the export PYTHONPATH line to be evaluated by the shell.""" import shellingham if os.name == "nt": try: set_env_in_reg("PYTHONPATH", PEP582_PATH) except PermissionError: ui.echo( termui.red( "Permission denied, please run the terminal as administrator." ), err=True, ) ui.echo( termui.green("The environment variable has been saved, " "please restart the session to take effect.")) return lib_path = PEP582_PATH.replace("'", "\\'") if shell == "AUTO": shell = shellingham.detect_shell()[0] shell = shell.lower() if shell in ("zsh", "bash"): result = f"export PYTHONPATH='{lib_path}':$PYTHONPATH" elif shell == "fish": result = f"set -x PYTHONPATH '{lib_path}' $PYTHONPATH" elif shell in ("tcsh", "csh"): result = f"setenv PYTHONPATH '{lib_path}':$PYTHONPATH" else: raise PdmUsageError( f"Unsupported shell: {shell}, please specify another shell " "via `--pep582 <SHELL>`") ui.echo(result)
def do_sync( project: Project, sections: Sequence[str] = (), dev: bool = False, default: bool = True, dry_run: bool = False, clean: Optional[bool] = None, ) -> None: """Synchronize project :param project: The project instance. :param sections: A tuple of optional sections to be synced. :param dev: whether to include dev-dependencies. :param default: whether to include default dependencies. :param dry_run: Print actions without actually running them. :param clean: whether to remove unneeded packages. """ if not project.lockfile_file.exists(): raise ProjectError("Lock file does not exist, nothing to sync") clean = default if clean is None else clean candidates = {} for section in sections: if section not in list(project.iter_sections()): raise PdmUsageError( f"Section {termui.green(repr(section))} doesn't exist " "in the pyproject.toml") candidates.update(project.get_locked_candidates(section)) if dev: candidates.update(project.get_locked_candidates("dev")) if default: candidates.update(project.get_locked_candidates()) handler = project.core.synchronizer_class(candidates, project.environment) handler.synchronize(clean=clean, dry_run=dry_run)
def do_import(project: Project, filename: str, format: Optional[str] = None) -> None: """Import project metadata from given file. :param project: the project instance :param filename: the file name :param format: the file format, or guess if not given. """ if not format: for key in FORMATS: if FORMATS[key].check_fingerprint(project, filename): break else: raise PdmUsageError("Can't derive the file format automatically, " "please specify it via '-f/--format' option.") else: key = format tool_settings = FORMATS[key].convert(project, filename) format_toml(tool_settings) if not project.pyproject_file.exists(): project.pyproject = {"tool": {"pdm": {}}} project.tool_settings.update(tool_settings) project.pyproject["build-system"] = { "requires": ["pdm"], "build-backend": ["pdm.builders.api"], } project.write_pyproject()
def do_sync( project: Project, sections: Sequence[str] = (), dev: bool = False, default: bool = True, dry_run: bool = False, clean: Optional[bool] = None, tracked_names: Optional[Sequence[str]] = None, ) -> None: """Synchronize project""" if not project.lockfile_file.exists(): raise ProjectError("Lock file does not exist, nothing to sync") clean = default if clean is None else clean if tracked_names and dry_run: candidates = { name: c for name, c in project.get_locked_candidates("__all__").items() if name in tracked_names } else: candidates = {} sections = translate_sections(project, default, dev, sections or ()) valid_sections = list(project.iter_sections()) for section in sections: if section not in valid_sections: raise PdmUsageError( f"Section {termui.green(repr(section))} doesn't exist " "in the pyproject.toml") candidates.update(project.get_locked_candidates(section)) handler = project.core.synchronizer_class(candidates, project.environment, clean, dry_run) handler.synchronize()
def do_list( project: Project, graph: bool = False, reverse: bool = False, freeze: bool = False, json: bool = False, ) -> None: """Display a list of packages installed in the local packages directory.""" from pdm.cli.utils import build_dependency_graph, format_dependency_graph check_project_file(project) working_set = project.environment.get_working_set() if graph: dep_graph = build_dependency_graph( working_set, project.environment.marker_environment ) project.core.ui.echo( format_dependency_graph(project, dep_graph, reverse=reverse, json=json) ) else: if reverse: raise PdmUsageError("--reverse must be used with --graph") if json: raise PdmUsageError("--json must be used with --graph") if freeze: reqs = sorted( ( Requirement.from_dist(dist) .as_line() .replace( "${PROJECT_ROOT}", project.root.absolute().as_posix().lstrip("/"), ) for dist in sorted( working_set.values(), key=lambda d: d.metadata["Name"] ) ), key=lambda x: x.lower(), ) project.core.ui.echo("\n".join(reqs)) return rows = [ (termui.green(k, bold=True), termui.yellow(v.version), get_dist_location(v)) for k, v in sorted(working_set.items()) ] project.core.ui.display_columns(rows, ["Package", "Version", "Location"])
def handle(self, project: Project, options: argparse.Namespace) -> None: with project.environment.activate(): expanded_command = project.environment.which(options.command) if not expanded_command: raise PdmUsageError( "Command {} is not found on your PATH.".format( stream.green(f"'{options.command}'"))) sys.exit(subprocess.call([expanded_command] + list(options.args)))
def remove_cache_files(project: Project, pattern: str) -> None: if not pattern: raise PdmUsageError("Please provide a pattern") if pattern == "*": files = list(find_files(project.cache_dir, pattern)) else: # Only remove wheel files which specific pattern is given files = list(find_files(project.cache("wheels"), pattern)) if not files: raise PdmUsageError("No matching files found") for file in files: os.unlink(file) project.core.ui.echo(f"Removed {file}", verbosity=termui.DETAIL) project.core.ui.echo(f"{len(files)} file{'s' if len(files) > 1 else ''} removed")
def do_add( project: Project, dev: bool = False, section: Optional[str] = None, sync: bool = True, save: str = "compatible", strategy: str = "reuse", editables: Iterable[str] = (), packages: Iterable[str] = (), ) -> None: """Add packages and install :param project: the project instance :param dev: add to dev dependencies section :param section: specify section to be add to :param sync: whether to install added packages :param save: save strategy :param strategy: update strategy :param editables: editable requirements :param packages: normal requirements """ check_project_file(project) if not editables and not packages: raise PdmUsageError( "Must specify at least one package or editable package.") section = "dev" if dev else section or "default" tracked_names = set() requirements = {} for r in [parse_requirement(line, True) for line in editables ] + [parse_requirement(line) for line in packages]: key = r.identify() r.from_section = section tracked_names.add(key) requirements[key] = r project.core.ui.echo(f"Adding packages to {section} dependencies: " + ", ".join( termui.green(key or "", bold=True) for key in requirements)) all_dependencies = project.all_dependencies all_dependencies.setdefault(section, {}).update(requirements) reqs = [r for deps in all_dependencies.values() for r in deps.values()] resolved = do_lock(project, strategy, tracked_names, reqs) # Update dependency specifiers and lockfile hash. save_version_specifiers(requirements, resolved, save) project.add_dependencies(requirements) lockfile = project.lockfile project.write_lockfile(lockfile, False) if sync: do_sync( project, sections=(section, ), dev=False, default=False, dry_run=False, clean=False, )
def do_add( project: Project, dev: bool = False, group: str | None = None, sync: bool = True, save: str = "compatible", strategy: str = "reuse", editables: Iterable[str] = (), packages: Iterable[str] = (), unconstrained: bool = False, no_editable: bool = False, no_self: bool = False, ) -> None: """Add packages and install""" check_project_file(project) if not editables and not packages: raise PdmUsageError( "Must specify at least one package or editable package.") if not group: group = "dev" if dev else "default" tracked_names: set[str] = set() requirements: dict[str, Requirement] = {} for r in [parse_requirement(line, True) for line in editables ] + [parse_requirement(line) for line in packages]: key = r.identify() tracked_names.add(key) requirements[key] = r project.core.ui.echo( f"Adding packages to {group} {'dev-' if dev else ''}dependencies: " + ", ".join(termui.green(key or "", bold=True) for key in requirements)) all_dependencies = project.all_dependencies group_deps = all_dependencies.setdefault(group, {}) if unconstrained: for req in group_deps.values(): req.specifier = get_specifier("") group_deps.update(requirements) reqs = [r for deps in all_dependencies.values() for r in deps.values()] resolved = do_lock(project, strategy, tracked_names, reqs) # Update dependency specifiers and lockfile hash. deps_to_update = group_deps if unconstrained else requirements save_version_specifiers({group: deps_to_update}, resolved, save) project.add_dependencies(deps_to_update, group, dev) lockfile = project.lockfile project.write_lockfile(lockfile, False) if sync: do_sync( project, groups=(group, ), default=False, no_editable=no_editable, no_self=no_self, )
def handle(self, project: Project, options: argparse.Namespace) -> None: import shellingham shell = options.shell or shellingham.detect_shell()[0] if shell not in self.SUPPORTED_SHELLS: raise PdmUsageError(f"Unsupported shell: {shell}") suffix = "ps1" if shell == "powershell" else shell completion = importlib.resources.read_text("pdm.cli.completions", f"pdm.{suffix}") project.core.ui.echo( completion.replace("%{python_executable}", sys.executable))
def handle(self, project: Project, options: argparse.Namespace) -> None: if options.list: return self._show_list(project) global_env_options = project.scripts.get("_", {}) if project.scripts else {} if not options.command: raise PdmUsageError("No command given") if project.scripts and options.command in project.scripts: self._run_script(project, options.command, options.args, global_env_options) else: self._run_command( project, [options.command] + options.args, **global_env_options )
def do_remove( project: Project, dev: bool = False, group: str | None = None, sync: bool = True, packages: Collection[str] = (), no_editable: bool = False, no_self: bool = False, dry_run: bool = False, ) -> None: """Remove packages from working set and pyproject.toml""" check_project_file(project) if not packages: raise PdmUsageError("Must specify at least one package to remove.") if not group: group = "dev" if dev else "default" if group not in list(project.iter_groups()): raise ProjectError(f"No-exist group {group}") deps = project.get_pyproject_dependencies(group, dev) project.core.ui.echo( f"Removing packages from {group} {'dev-' if dev else ''}dependencies: " + ", ".join(str(termui.green(name, bold=True)) for name in packages) ) for name in packages: req = parse_requirement(name) matched_indexes = sorted( (i for i, r in enumerate(deps) if req.matches(r, False)), reverse=True ) if not matched_indexes: raise ProjectError( "{} does not exist in {} dependencies.".format( termui.green(name, bold=True), group ) ) for i in matched_indexes: del deps[i] if not dry_run: project.write_pyproject() do_lock(project, "reuse", dry_run=dry_run) if sync: do_sync( project, groups=(group,), default=False, clean=True, no_editable=no_editable, no_self=no_self, dry_run=dry_run, )
def _normalize_script(self, script): if not getattr(script, "items", None): # Must be a string, regard as the same as {cmd = "..."} kind = "cmd" value = str(script) options = {} else: script = dict( script) # to remove the effect of tomlkit's container type. for key in self.TYPES: if key in script: kind = key value = script.pop(key) break else: raise PdmUsageError( f"Script type must be one of ({', '.join(self.TYPES)})") options = script.copy() if not all(key in self.OPTIONS for key in options): raise PdmUsageError( f"pdm scripts only accept options: ({', '.join(self.OPTIONS)})" ) return kind, value, options
def do_import( project: Project, filename: str, format: str | None = None, options: Namespace | None = None, ) -> None: """Import project metadata from given file. :param project: the project instance :param filename: the file name :param format: the file format, or guess if not given. :param options: other options parsed to the CLI. """ if not format: for key in FORMATS: if FORMATS[key].check_fingerprint(project, filename): break else: raise PdmUsageError( "Can't derive the file format automatically, " "please specify it via '-f/--format' option." ) else: key = format if options is None: options = Namespace(dev=False, group=None) project_data, settings = FORMATS[key].convert(project, filename, options) pyproject = project.pyproject or tomlkit.document() if "tool" not in pyproject or "pdm" not in pyproject["tool"]: # type: ignore pyproject.setdefault("tool", {})["pdm"] = tomlkit.table() if "project" not in pyproject: pyproject.add("project", tomlkit.table()) # type: ignore pyproject["project"].add( # type: ignore tomlkit.comment("PEP 621 project metadata") ) pyproject["project"].add( # type: ignore tomlkit.comment("See https://www.python.org/dev/peps/pep-0621/") ) merge_dictionary(pyproject["project"], project_data) # type: ignore merge_dictionary(pyproject["tool"]["pdm"], settings) # type: ignore pyproject["build-system"] = { "requires": ["pdm-pep517"], "build-backend": "pdm.pep517.api", } project.pyproject = cast(dict, pyproject) project.write_pyproject()
def handle(self, project: Project, options: argparse.Namespace) -> None: if options.editables and options.no_editable: raise PdmUsageError("`--no-editable` cannot be used with `-e/--editable`") actions.do_add( project, dev=options.dev, section=options.section, sync=options.sync, save=options.save_strategy or project.config["strategy.save"], strategy=options.update_strategy or project.config["strategy.update"], editables=options.editables, packages=options.packages, no_editable=options.no_editable, no_self=options.no_self, )
def _run_command( project: Project, args: Union[List[str], str], shell: bool = False, env: Optional[Dict[str, str]] = None, env_file: Optional[str] = None, ) -> None: if "PYTHONPATH" in os.environ: pythonpath = os.pathsep.join( [PEP582_PATH, os.getenv("PYTHONPATH")]) else: pythonpath = PEP582_PATH project_env = project.environment this_path = project_env.get_paths()["scripts"] python_root = os.path.dirname(project_env.python_executable) new_path = os.pathsep.join( [python_root, this_path, os.getenv("PATH", "")]) os.environ.update({"PYTHONPATH": pythonpath, "PATH": new_path}) if env_file: import dotenv stream.echo(f"Loading .env file: {stream.green(env_file)}", err=True) dotenv.load_dotenv(project.root.joinpath(env_file).as_posix(), override=True) if env: os.environ.update(env) if shell: sys.exit(subprocess.call(os.path.expandvars(args), shell=True)) command, *args = args expanded_command = project_env.which(command) if not expanded_command: raise PdmUsageError("Command {} is not found on your PATH.".format( stream.green(f"'{command}'"))) expanded_command = os.path.expanduser( os.path.expandvars(expanded_command)) expanded_args = [ os.path.expandvars(arg) for arg in [expanded_command] + args ] if os.name == "nt" or "CI" in os.environ: # In order to make sure pytest is playing well, # don't hand over the process under a testing environment. sys.exit(subprocess.call(expanded_args)) else: os.execv(expanded_command, expanded_args)
def do_remove( project: Project, dev: bool = False, section: Optional[str] = None, sync: bool = True, packages: Sequence[str] = (), ): """Remove packages from working set and pyproject.toml :param project: The project instance :param dev: Remove package from dev-dependencies :param section: Remove package from given section :param sync: Whether perform syncing action :param packages: Package names to be removed :return: None """ check_project_file(project) if not packages: raise PdmUsageError("Must specify at least one package to remove.") if not section: section = "dev" if dev else "default" if section not in list(project.iter_sections()): raise ProjectError(f"No-exist section {section}") deps = project.get_pyproject_dependencies(section, dev) project.core.ui.echo( f"Removing packages from {section} {'dev-' if dev else ''}dependencies: " + ", ".join(str(termui.green(name, bold=True)) for name in packages) ) for name in packages: req = parse_requirement(name) matched_indexes = sorted( (i for i, r in enumerate(deps) if req.matches(r, False)), reverse=True ) if not matched_indexes: raise ProjectError( "{} does not exist in {} dependencies.".format( termui.green(name, bold=True), section ) ) for i in matched_indexes: del deps[i] project.write_pyproject() do_lock(project, "reuse") if sync: do_sync(project, sections=(section,), default=False, clean=True)
def handle(self, project: Project, options: argparse.Namespace) -> None: if not options.type: cache_parent = project.cache_dir elif options.type not in self.CACHE_TYPES: raise PdmUsageError( f"Invalid cache type {options.type}, should one of {self.CACHE_TYPES}" ) else: cache_parent = project.cache(options.type) with project.core.ui.open_spinner( f"Clearing {options.type or 'all'} caches...") as spinner: files = list(find_files(cache_parent, "*")) for file in files: os.unlink(file) spinner.succeed( f"{len(files)} file{'s' if len(files) > 1 else ''} removed")
def _run_task(self, task: Task, args: Sequence[str] = ()) -> int: kind, _, value, options = task options.pop("help", None) shell = False if kind == "cmd": if not isinstance(value, list): value = shlex.split(str(value)) args = value + list(args) elif kind == "shell": assert isinstance(value, str) args = " ".join([value] + list(args)) # type: ignore shell = True elif kind == "call": assert isinstance(value, str) module, _, func = value.partition(":") if not module or not func: raise PdmUsageError( "Python callable must be in the form <module_name>:<callable_name>" ) short_name = "_1" if re.search(r"\(.*?\)", func) is None: func += "()" args = [ "python", "-c", f"import sys, {module} as {short_name};" f"sys.exit({short_name}.{func})", ] + list(args) if "env" in self.global_options: options["env"] = { **self.global_options["env"], **options.get("env", {}) } options["env_file"] = options.get("env_file", self.global_options.get("env_file")) self.project.core.ui.echo( f"Running {task}: {termui.green(str(args))}", err=True, verbosity=termui.DETAIL, ) return self._run_process( args, chdir=True, shell=shell, **options # type: ignore )
def do_remove( project: Project, dev: bool = False, section: Optional[str] = None, sync: bool = True, packages: Sequence[str] = (), ): """Remove packages from working set and pyproject.toml :param project: The project instance :param dev: Remove package from dev-dependencies :param section: Remove package from given section :param sync: Whether perform syncing action :param packages: Package names to be removed :return: None """ check_project_file(project) if not packages: raise PdmUsageError("Must specify at least one package to remove.") section = "dev" if dev else section or "default" toml_section = f"{section}-dependencies" if section != "default" else "dependencies" if toml_section not in project.tool_settings: raise ProjectError( f"No such section {stream.yellow(toml_section)} in pyproject.toml." ) deps = project.tool_settings[toml_section] stream.echo(f"Removing packages from {section} dependencies: " + ", ".join( str(stream.green(name, bold=True)) for name in packages)) for name in packages: matched_name = next( filter( lambda k: safe_name(k).lower() == safe_name(name).lower(), deps.keys(), ), None, ) if not matched_name: raise ProjectError("{} does not exist in {} dependencies.".format( stream.green(name, bold=True), section)) del deps[matched_name] project.write_pyproject() do_lock(project, "reuse") if sync: do_sync(project, sections=(section, ), default=False, clean=True)
def _run_script( self, project: Project, script_name: str, args: Sequence[str], global_env_options: Mapping[str, Union[str, Mapping[str, str]]], ) -> None: script = project.scripts[script_name] kind, value, options = self._normalize_script(script) options.pop("help", None) if kind == "cmd": if not isinstance(value, list): value = shlex.split(str(value)) args = value + list(args) elif kind == "shell": assert isinstance(value, str) args = " ".join([value] + list(args)) # type: ignore options["shell"] = True elif kind == "call": assert isinstance(value, str) module, _, func = value.partition(":") if not module or not func: raise PdmUsageError( "Python callable must be in the form <module_name>:<callable_name>" ) short_name = "_" + hashlib.sha1(module.encode()).hexdigest()[:6] if re.search(r"\(.*?\)", func) is None: func += "()" args = [ "python", "-c", f"import sys, {module} as {short_name};" f"sys.exit({short_name}.{func})", ] + list(args) if "env" in global_env_options: options["env"] = { **cast(Mapping[str, str], global_env_options["env"]), **options.get("env", {}), } options["env_file"] = options.get("env_file", global_env_options.get("env_file")) project.core.ui.echo( f"Running {kind} script: {termui.green(str(args))}", err=True) return self._run_command(project, args, **options)
def do_import(project: Project, filename: str, format: Optional[str] = None) -> None: """Import project metadata from given file. :param project: the project instance :param filename: the file name :param format: the file format, or guess if not given. """ if not format: for key in FORMATS: if FORMATS[key].check_fingerprint(project, filename): break else: raise PdmUsageError("Can't derive the file format automatically, " "please specify it via '-f/--format' option.") else: key = format project_data, settings = FORMATS[key].convert(project, filename) pyproject = project.pyproject or tomlkit.document() if "tool" not in pyproject or "pdm" not in pyproject["tool"]: setdefault(pyproject, "tool", {})["pdm"] = tomlkit.table() pyproject["tool"]["pdm"].update(settings) if "project" not in pyproject: pyproject.add("project", tomlkit.table()) pyproject["project"].add(tomlkit.comment("PEP 621 project metadata")) pyproject["project"].add( tomlkit.comment("See https://www.python.org/dev/peps/pep-0621/")) pyproject["project"].update(project_data) pyproject["build-system"] = { "requires": ["pdm-pep517"], "build-backend": "pdm.pep517.api", } project.pyproject = pyproject project.write_pyproject()
def print_pep582_command(shell: str = "AUTO"): """Print the export PYTHONPATH line to be evaluated by the shell.""" import shellingham if os.name == "nt": set_env_in_reg("PYTHONPATH", PEP582_PATH) return lib_path = PEP582_PATH.replace("'", "\\'") if shell == "AUTO": shell = shellingham.detect_shell()[0] shell = shell.lower() if shell in ("zsh", "bash"): result = f"export PYTHONPATH='{lib_path}':$PYTHONPATH" elif shell == "fish": result = f"set -x PYTHONPATH '{lib_path}' $PYTHONPATH" elif shell in ("tcsh", "csh"): result = f"setenv PYTHONPATH '{lib_path}':$PYTHONPATH" else: raise PdmUsageError( f"Unsupported shell: {shell}, please specify another shell " "via `--pep582 <SHELL>`") stream.echo(result)