def do_remove( project: Project, dev: bool = False, group: str | None = None, sync: bool = True, packages: Collection[str] = (), no_editable: bool = False, no_self: bool = False, dry_run: bool = False, ) -> None: """Remove packages from working set and pyproject.toml""" check_project_file(project) if not packages: raise PdmUsageError("Must specify at least one package to remove.") if not group: group = "dev" if dev else "default" if group not in list(project.iter_groups()): raise ProjectError(f"No-exist group {group}") deps = project.get_pyproject_dependencies(group, dev) project.core.ui.echo( f"Removing packages from {group} {'dev-' if dev else ''}dependencies: " + ", ".join(str(termui.green(name, bold=True)) for name in packages) ) for name in packages: req = parse_requirement(name) matched_indexes = sorted( (i for i, r in enumerate(deps) if req.matches(r, False)), reverse=True ) if not matched_indexes: raise ProjectError( "{} does not exist in {} dependencies.".format( termui.green(name, bold=True), group ) ) for i in matched_indexes: del deps[i] if not dry_run: project.write_pyproject() do_lock(project, "reuse", dry_run=dry_run) if sync: do_sync( project, groups=(group,), default=False, clean=True, no_editable=no_editable, no_self=no_self, dry_run=dry_run, )
def do_remove( project: Project, dev: bool = False, section: Optional[str] = None, sync: bool = True, packages: Sequence[str] = (), ): """Remove packages from working set and pyproject.toml :param project: The project instance :param dev: Remove package from dev-dependencies :param section: Remove package from given section :param sync: Whether perform syncing action :param packages: Package names to be removed :return: None """ check_project_file(project) if not packages: raise click.BadParameter( "Must specify at least one package to remove.") section = "dev" if dev else section or "default" toml_section = f"{section}-dependencies" if section != "default" else "dependencies" if toml_section not in project.tool_settings: raise ProjectError( f"No such section {context.io.yellow(toml_section)} in pyproject.toml." ) deps = project.tool_settings[toml_section] context.io.echo(f"Removing packages from {section} dependencies: " + ", ".join( str(context.io.green(name, bold=True)) for name in packages)) for name in packages: matched_name = next( filter( lambda k: safe_name(k).lower() == safe_name(name).lower(), deps.keys(), ), None, ) if not matched_name: raise ProjectError("{} does not exist in {} dependencies.".format( context.io.green(name, bold=True), section)) del deps[matched_name] project.write_pyproject() do_lock(project, "reuse") if sync: do_sync(project, sections=(section, ), default=False, clean=True)
def do_build( project: Project, sdist: bool = True, wheel: bool = True, dest: str = "dist", clean: bool = True, config_settings: Mapping[str, str] | None = None, ) -> None: """Build artifacts for distribution.""" from pdm.builders import SdistBuilder, WheelBuilder if project.is_global: raise ProjectError("Not allowed to build based on the global project.") if not wheel and not sdist: project.core.ui.echo("All artifacts are disabled, nothing to do.", err=True) return if not os.path.isabs(dest): dest = project.root.joinpath(dest).as_posix() if clean: shutil.rmtree(dest, ignore_errors=True) with project.core.ui.logging("build"): if sdist: project.core.ui.echo("Building sdist...") loc = SdistBuilder(project.root, project.environment).build( dest, config_settings) project.core.ui.echo(f"Built sdist at {loc}") if wheel: project.core.ui.echo("Building wheel...") loc = WheelBuilder(project.root, project.environment).build( dest, config_settings) project.core.ui.echo(f"Built wheel at {loc}")
def do_build( project: Project, sdist: bool = True, wheel: bool = True, dest: str = "dist", clean: bool = True, ): """Build artifacts for distribution.""" if project.is_global: raise ProjectError("Not allowed to build based on the global project.") check_project_file(project) if not wheel and not sdist: stream.echo("All artifacts are disabled, nothing to do.", err=True) return if not os.path.isabs(dest): dest = project.root.joinpath(dest).as_posix() if clean: shutil.rmtree(dest, ignore_errors=True) with stream.logging("build"), EnvBuilder(project.root, project.environment) as builder: if sdist: stream.echo("Building sdist...") loc = builder.build_sdist(dest) stream.echo(f"Built sdist at {loc}") if wheel: stream.echo("Building wheel...") loc = builder.build_wheel(dest) stream.echo(f"Built wheel at {loc}")
def lockfile(self) -> Container: if not self.lockfile_file.is_file(): raise ProjectError("Lock file does not exist.") if not self._lockfile: data = tomlkit.parse(self.lockfile_file.read_text("utf-8")) self._lockfile = data return self._lockfile
def do_sync( project: Project, sections: Sequence[str] = (), dev: bool = False, default: bool = True, dry_run: bool = False, clean: Optional[bool] = None, ) -> None: """Synchronize project :param project: The project instance. :param sections: A tuple of optional sections to be synced. :param dev: whether to include dev-dependecies. :param default: whether to include default dependencies. :param dry_run: Print actions without actually running them. :param clean: whether to remove unneeded packages. """ if not project.lockfile_file.exists(): raise ProjectError("Lock file does not exist, nothing to sync.") clean = default if clean is None else clean candidates = {} for section in sections: candidates.update(project.get_locked_candidates(section)) if dev: candidates.update(project.get_locked_candidates("dev")) if default: candidates.update(project.get_locked_candidates()) handler = Synchronizer(candidates, project.environment) handler.synchronize(clean=clean, dry_run=dry_run)
def do_sync( project: Project, sections: Sequence[str] = (), dev: bool = False, default: bool = True, dry_run: bool = False, clean: Optional[bool] = None, tracked_names: Optional[Sequence[str]] = None, ) -> None: """Synchronize project""" if not project.lockfile_file.exists(): raise ProjectError("Lock file does not exist, nothing to sync") clean = default if clean is None else clean if tracked_names and dry_run: candidates = { name: c for name, c in project.get_locked_candidates("__all__").items() if name in tracked_names } else: candidates = {} sections = translate_sections(project, default, dev, sections or ()) valid_sections = list(project.iter_sections()) for section in sections: if section not in valid_sections: raise PdmUsageError( f"Section {termui.green(repr(section))} doesn't exist " "in the pyproject.toml") candidates.update(project.get_locked_candidates(section)) handler = project.core.synchronizer_class(candidates, project.environment, clean, dry_run) handler.synchronize()
def check_project_file(project: Project) -> None: """Check the existence of the project file and throws an error on failure.""" if not project.meta: raise ProjectError( "The pyproject.toml has not been initialized yet. You can do this " "by running {}.".format(termui.green("'pdm init'")) )
def convert_package_paths(self) -> Dict[str, Union[List, Dict]]: """Return a {package_dir, packages, package_data, exclude_package_data} dict. """ package_dir = {} packages = [] py_modules = [] package_data = {"": ["*"]} exclude_package_data = {} with vistir.cd(self.project.root.as_posix()): if not self.includes: if os.path.isdir("src"): package_dir[""] = "src" packages = setuptools.find_packages("src") else: packages = setuptools.find_packages(exclude=["tests", "tests.*"]) if not packages: py_modules = [path[:-3] for path in glob.glob("*.py")] else: packages_set = set() includes = self.includes for include in includes[:]: if include.replace("\\", "/").endswith("/*"): include = include[:-2] if "*" not in include and os.path.isdir(include): temp = setuptools.find_packages(include) if os.path.exists(include + "/__init__.py"): temp.append(include) elif temp: package_dir[""] = include packages_set.update(temp) includes.remove(include) packages[:] = list(packages_set) for include in includes: for path in glob.glob(include): if "/" not in path.lstrip("./") and path.endswith(".py"): # Only include top level py modules py_modules.append(path.lstrip("./")[:-3]) if include.endswith(".py"): continue for package in packages: relpath = os.path.relpath(include, package) if not relpath.startswith(".."): package_data.setdefault(package, []).append(relpath) for exclude in self.excludes or []: for package in packages: relpath = os.path.relpath(exclude, package) if not relpath.startswith(".."): exclude_package_data.setdefault(package, []).append(relpath) if packages and py_modules: raise ProjectError( "Can't specify packages and py_modules at the same time." ) return { "package_dir": package_dir, "packages": packages, "py_modules": py_modules, "package_data": package_data, "exclude_package_data": exclude_package_data, }
def lockfile(self) -> dict: if not self._lockfile: if not self.lockfile_file.is_file(): raise ProjectError("Lock file does not exist.") data = atoml.parse(self.lockfile_file.read_text("utf-8")) self._lockfile = data return self._lockfile
def do_remove( project: Project, dev: bool = False, section: Optional[str] = None, sync: bool = True, packages: Sequence[str] = (), ): """Remove packages from working set and pyproject.toml :param project: The project instance :param dev: Remove package from dev-dependencies :param section: Remove package from given section :param sync: Whether perform syncing action :param packages: Package names to be removed :return: None """ check_project_file(project) if not packages: raise PdmUsageError("Must specify at least one package to remove.") if not section: section = "dev" if dev else "default" if section not in list(project.iter_sections()): raise ProjectError(f"No-exist section {section}") deps = project.get_pyproject_dependencies(section, dev) project.core.ui.echo( f"Removing packages from {section} {'dev-' if dev else ''}dependencies: " + ", ".join(str(termui.green(name, bold=True)) for name in packages) ) for name in packages: req = parse_requirement(name) matched_indexes = sorted( (i for i, r in enumerate(deps) if req.matches(r, False)), reverse=True ) if not matched_indexes: raise ProjectError( "{} does not exist in {} dependencies.".format( termui.green(name, bold=True), section ) ) for i in matched_indexes: del deps[i] project.write_pyproject() do_lock(project, "reuse") if sync: do_sync(project, sections=(section,), default=False, clean=True)
def do_sync( project: Project, *, groups: Sequence[str] = (), dev: bool = True, default: bool = True, dry_run: bool = False, clean: bool = False, requirements: list[Requirement] | None = None, tracked_names: Sequence[str] | None = None, no_editable: bool = False, no_self: bool = False, reinstall: bool = False, ) -> None: """Synchronize project""" if requirements is None: if not project.lockfile_file.exists(): raise ProjectError("Lock file does not exist, nothing to sync") elif not project.is_lockfile_compatible(): project.core.ui.echo( "Lock file version is not compatible with PDM, " "install may fail, please regenerate the pdm.lock", err=True, ) elif not project.is_lockfile_hash_match(): project.core.ui.echo( "Lock file hash doesn't match pyproject.toml, packages may be outdated", err=True, ) groups = translate_groups(project, default, dev, groups or ()) requirements = [] for group in groups: requirements.extend(project.get_dependencies(group).values()) candidates = resolve_candidates_from_lockfile(project, requirements) if tracked_names and dry_run: candidates = { name: c for name, c in candidates.items() if name in tracked_names } handler = project.core.synchronizer_class( candidates, project.environment, clean, dry_run, no_editable=no_editable, install_self=not no_self and "default" in groups and bool(project.meta.name), use_install_cache=project.config["feature.install_cache"], reinstall=reinstall, ) handler.synchronize()
def check_lockfile(project: Project, raise_not_exist: bool = True) -> str | None: """Check if the lock file exists and is up to date. Return the update strategy.""" if not project.lockfile_file.exists(): if raise_not_exist: raise ProjectError("Lock file does not exist, nothing to install") project.core.ui.echo("Lock file does not exist", fg="yellow", err=True) return "all" elif not project.is_lockfile_compatible(): project.core.ui.echo( "Lock file version is not compatible with PDM, installation may fail", fg="yellow", err=True, ) return "all" elif not project.is_lockfile_hash_match(): project.core.ui.echo( "Lock file hash doesn't match pyproject.toml, packages may be outdated", fg="yellow", err=True, ) return "reuse" return None
def ensure_setup_py(self, clean: bool = True) -> None: """Ensures the requirement has a setup.py ready.""" # XXX: Currently only handle PDM project, and do nothing if not. if not self.ireq.source_dir or os.path.isfile(self.ireq.setup_py_path): return setup_py_path = self.ireq.setup_py_path if not self.project.is_pdm: raise ProjectError( "General PEP 517 editable build is not supported " "except for PDM proects.") setup_py_content = self.format_setup_py() with open(setup_py_path, "w", encoding="utf-8") as fp: fp.write(setup_py_content) # Clean this temp file when process exits def cleanup(): os.unlink(setup_py_path) if clean: atexit.register(cleanup)
def do_build( project: Project, sdist: bool = True, wheel: bool = True, dest: str = "dist", clean: bool = True, ): """Build artifacts for distribution.""" if project.is_global: raise ProjectError("Not allowed to build based on the global project.") check_project_file(project) if not wheel and not sdist: stream.echo("All artifacts are disabled, nothing to do.", err=True) return ireq = project.make_self_candidate(False).ireq ireq.source_dir = project.root.as_posix() if clean: shutil.rmtree(dest, ignore_errors=True) if sdist: with SdistBuilder(ireq) as builder: builder.build(dest) if wheel: with WheelBuilder(ireq) as builder: builder.build(dest)
def do_update( project: Project, dev: bool = False, sections: Sequence[str] = (), default: bool = True, strategy: str = "reuse", save: str = "compatible", unconstrained: bool = False, packages: Sequence[str] = (), ) -> None: """Update specified packages or all packages :param project: The project instance :param dev: whether to update dev dependencies :param sections: update speicified sections :param default: update default :param strategy: update strategy (reuse/eager) :param save: save strategy (compatible/exact/wildcard) :param unconstrained: ignore version constraint :param packages: specified packages to update :return: None """ check_project_file(project) if len(packages) > 0 and (len(sections) > 1 or not default): raise click.BadParameter( "packages argument can't be used together with multple -s or --no-default." ) if not packages: if unconstrained: raise click.BadArgumentUsage( "--unconstrained must be used with package names given.") # pdm update with no packages given, same as 'lock' + 'sync' do_lock(project) do_sync(project, sections, dev, default, clean=False) return section = sections[0] if sections else ("dev" if dev else "default") all_dependencies = project.all_dependencies dependencies = all_dependencies[section] updated_deps = {} tracked_names = set() for name in packages: matched_name = next( filter( lambda k: safe_name(strip_extras(k)[0]).lower() == safe_name( name).lower(), dependencies.keys(), ), None, ) if not matched_name: raise ProjectError("{} does not exist in {} dependencies.".format( context.io.green(name, bold=True), section)) if unconstrained: dependencies[matched_name].specifier = get_specifier("") tracked_names.add(matched_name) updated_deps[matched_name] = dependencies[matched_name] context.io.echo("Updating packages: {}.".format(", ".join( context.io.green(v, bold=True) for v in tracked_names))) resolved = do_lock(project, strategy, tracked_names, all_dependencies) do_sync(project, sections=(section, ), default=False, clean=False) if unconstrained: # Need to update version constraints save_version_specifiers(updated_deps, resolved, save) project.add_dependencies(updated_deps) lockfile = project.lockfile lockfile["root"]["content_hash"] = "md5:" + project.get_content_hash( "md5") project.write_lockfile(lockfile, False)
def do_update( project: Project, *, dev: bool | None = None, groups: Sequence[str] = (), default: bool = True, strategy: str = "reuse", save: str = "compatible", unconstrained: bool = False, top: bool = False, dry_run: bool = False, packages: Collection[str] = (), sync: bool = True, no_editable: bool = False, no_self: bool = False, prerelease: bool = False, ) -> None: """Update specified packages or all packages""" check_project_file(project) if len(packages) > 0 and (top or len(groups) > 1 or not default): raise PdmUsageError( "packages argument can't be used together with multiple -G or " "--no-default and --top." ) all_dependencies = project.all_dependencies updated_deps: dict[str, dict[str, Requirement]] = defaultdict(dict) install_dev = True if dev is None else dev if not packages: if prerelease: raise PdmUsageError("--prerelease must be used with packages given") groups = translate_groups(project, default, install_dev, groups or ()) for group in groups: updated_deps[group] = all_dependencies[group] else: group = groups[0] if groups else ("dev" if dev else "default") dependencies = all_dependencies[group] for name in packages: matched_name = next( filter( lambda k: normalize_name(strip_extras(k)[0]) == normalize_name(name), dependencies.keys(), ), None, ) if not matched_name: raise ProjectError( "{} does not exist in {} {}dependencies.".format( termui.green(name, bold=True), group, "dev-" if dev else "" ) ) dependencies[matched_name].prerelease = prerelease updated_deps[group][matched_name] = dependencies[matched_name] project.core.ui.echo( "Updating packages: {}.".format( ", ".join( termui.green(v, bold=True) for v in chain.from_iterable(updated_deps.values()) ) ) ) if unconstrained: for deps in updated_deps.values(): for dep in deps.values(): dep.specifier = get_specifier("") reqs = [r for deps in all_dependencies.values() for r in deps.values()] resolved = do_lock( project, strategy, chain.from_iterable(updated_deps.values()), reqs, dry_run=dry_run, ) if sync or dry_run: do_sync( project, groups=groups, dev=install_dev, default=default, clean=False, dry_run=dry_run, requirements=[r for deps in updated_deps.values() for r in deps.values()], tracked_names=list(chain.from_iterable(updated_deps.values())) if top else None, no_editable=no_editable, no_self=no_self, ) if unconstrained and not dry_run: # Need to update version constraints save_version_specifiers(updated_deps, resolved, save) for group, deps in updated_deps.items(): project.add_dependencies(deps, group, dev or False) lockfile = project.lockfile project.write_lockfile(lockfile, False)
def do_update( project: Project, dev: bool = False, sections: Sequence[str] = (), default: bool = True, strategy: str = "reuse", save: str = "compatible", unconstrained: bool = False, top: bool = False, dry_run: bool = False, packages: Sequence[str] = (), ) -> None: """Update specified packages or all packages""" check_project_file(project) if len(packages) > 0 and (top or len(sections) > 1 or not default): raise PdmUsageError( "packages argument can't be used together with multiple -s or " "--no-default and --top.") all_dependencies = project.all_dependencies updated_deps = {} if not packages: sections = translate_sections(project, default, dev, sections or ()) for section in sections: updated_deps.update(all_dependencies[section]) else: section = sections[0] if sections else ("dev" if dev else "default") dependencies = all_dependencies[section] for name in packages: matched_name = next( filter( lambda k: safe_name(strip_extras(k)[0]).lower() == safe_name(name).lower(), dependencies.keys(), ), None, ) if not matched_name: raise ProjectError( "{} does not exist in {} {}dependencies.".format( termui.green(name, bold=True), section, "dev-" if dev else "")) updated_deps[matched_name] = dependencies[matched_name] project.core.ui.echo("Updating packages: {}.".format(", ".join( termui.green(v, bold=True) for v in updated_deps))) if unconstrained: for _, dep in updated_deps.items(): dep.specifier = get_specifier("") reqs = [r for deps in all_dependencies.values() for r in deps.values()] resolved = do_lock( project, strategy if top or packages else "all", updated_deps.keys(), reqs, dry_run=dry_run, ) do_sync( project, sections=sections, dev=dev, default=default, clean=False, dry_run=dry_run, tracked_names=updated_deps.keys() if top else None, ) if unconstrained and not dry_run: # Need to update version constraints save_version_specifiers(updated_deps, resolved, save) project.add_dependencies(updated_deps, section, dev) lockfile = project.lockfile project.write_lockfile(lockfile, False)