def do_add( project: Project, dev: bool = False, section: Optional[str] = None, sync: bool = True, save: str = "compatible", strategy: str = "reuse", editables: Iterable[str] = (), packages: Iterable[str] = (), ) -> None: """Add packages and install :param project: the project instance :param dev: add to dev dependencies seciton :param section: specify section to be add to :param sync: whether to install added packages :param save: save strategy :param strategy: update strategy :param editables: editable requirements :param packages: normal requirements """ check_project_file(project) if not editables and not packages: raise PdmUsageError("Must specify at least one package or editable package.") section = "dev" if dev else section or "default" tracked_names = set() requirements = {} for r in [parse_requirement(line, True) for line in editables] + [ parse_requirement(line) for line in packages ]: key = r.identify() r.from_section = section tracked_names.add(key) requirements[key] = r stream.echo( f"Adding packages to {section} dependencies: " + ", ".join(stream.green(key or "", bold=True) for key in requirements) ) all_dependencies = project.all_dependencies all_dependencies.setdefault(section, {}).update(requirements) reqs = [r for deps in all_dependencies.values() for r in deps.values()] resolved = do_lock(project, strategy, tracked_names, reqs) # Update dependency specifiers and lockfile hash. save_version_specifiers(requirements, resolved, save) project.add_dependencies(requirements) lockfile = project.lockfile project.write_lockfile(lockfile, False) if sync: do_sync( project, sections=(section,), dev=False, default=False, dry_run=False, clean=False, )
def do_lock( project: Project, strategy: str = "all", tracked_names: Optional[Iterable[str]] = None, requirements: Optional[List[Requirement]] = None, ) -> Dict[str, Candidate]: """Performs the locking process and update lockfile. :param project: the project instance :param strategy: update stratege: reuse/eager/all :param tracked_names: required when using eager strategy :param requirements: An optional dictionary of requirements, read from pyproject if not given. """ check_project_file(project) # TODO: multiple dependency definitions for the same package. provider = project.get_provider(strategy, tracked_names) if not requirements: requirements = [ r for deps in project.all_dependencies.values() for r in deps.values() ] resolve_max_rounds = int(project.config["strategy.resolve_max_rounds"]) with stream.logging("lock"): # The context managers are nested to ensure the spinner is stopped before # any message is thrown to the output. with stream.open_spinner(title="Resolving dependencies", spinner="dots") as spin: reporter = project.get_reporter(requirements, tracked_names, spin) resolver = project.core.resolver_class(provider, reporter) try: mapping, dependencies, summaries = resolve( resolver, requirements, project.environment.python_requires, resolve_max_rounds, ) except ResolutionTooDeep: spin.fail(f"{LOCK} Lock failed") stream.echo( "The dependency resolution exceeds the maximum loop depth of " f"{resolve_max_rounds}, there may be some circular dependencies " "in your project. Try to solve them or increase the " f"{stream.green('`strategy.resolve_max_rounds`')} config.", err=True, ) raise except ResolutionImpossible as err: spin.fail(f"{LOCK} Lock failed") stream.echo(format_resolution_impossible(err), err=True) raise else: data = format_lockfile(mapping, dependencies, summaries) spin.succeed(f"{LOCK} Lock successful") project.write_lockfile(data) return mapping
def do_lock( project: Project, strategy: str = "all", tracked_names: Iterable[str] | None = None, requirements: list[Requirement] | None = None, dry_run: bool = False, ) -> dict[str, Candidate]: """Performs the locking process and update lockfile.""" check_project_file(project) # TODO: multiple dependency definitions for the same package. provider = project.get_provider(strategy, tracked_names) if not requirements: requirements = [ r for deps in project.all_dependencies.values() for r in deps.values() ] resolve_max_rounds = int(project.config["strategy.resolve_max_rounds"]) ui = project.core.ui with ui.logging("lock"): # The context managers are nested to ensure the spinner is stopped before # any message is thrown to the output. with ui.open_spinner(title="Resolving dependencies", spinner="dots") as spin: reporter = project.get_reporter(requirements, tracked_names, spin) resolver: Resolver = project.core.resolver_class( provider, reporter) try: mapping, dependencies, summaries = resolve( resolver, requirements, project.environment.python_requires, resolve_max_rounds, ) except ResolutionTooDeep: spin.fail(f"{termui.Emoji.LOCK} Lock failed") ui.echo( "The dependency resolution exceeds the maximum loop depth of " f"{resolve_max_rounds}, there may be some circular dependencies " "in your project. Try to solve them or increase the " f"{termui.green('`strategy.resolve_max_rounds`')} config.", err=True, ) raise except ResolutionImpossible as err: spin.fail(f"{termui.Emoji.LOCK} Lock failed") ui.echo(format_resolution_impossible(err), err=True) raise else: data = format_lockfile(mapping, dependencies, summaries) spin.succeed(f"{termui.Emoji.LOCK} Lock successful") project.write_lockfile(data, write=not dry_run) return mapping
def do_add( project: Project, dev: bool = False, group: str | None = None, sync: bool = True, save: str = "compatible", strategy: str = "reuse", editables: Iterable[str] = (), packages: Iterable[str] = (), unconstrained: bool = False, no_editable: bool = False, no_self: bool = False, ) -> None: """Add packages and install""" check_project_file(project) if not editables and not packages: raise PdmUsageError( "Must specify at least one package or editable package.") if not group: group = "dev" if dev else "default" tracked_names: set[str] = set() requirements: dict[str, Requirement] = {} for r in [parse_requirement(line, True) for line in editables ] + [parse_requirement(line) for line in packages]: key = r.identify() tracked_names.add(key) requirements[key] = r project.core.ui.echo( f"Adding packages to {group} {'dev-' if dev else ''}dependencies: " + ", ".join(termui.green(key or "", bold=True) for key in requirements)) all_dependencies = project.all_dependencies group_deps = all_dependencies.setdefault(group, {}) if unconstrained: for req in group_deps.values(): req.specifier = get_specifier("") group_deps.update(requirements) reqs = [r for deps in all_dependencies.values() for r in deps.values()] resolved = do_lock(project, strategy, tracked_names, reqs) # Update dependency specifiers and lockfile hash. deps_to_update = group_deps if unconstrained else requirements save_version_specifiers({group: deps_to_update}, resolved, save) project.add_dependencies(deps_to_update, group, dev) lockfile = project.lockfile project.write_lockfile(lockfile, False) if sync: do_sync( project, groups=(group, ), default=False, no_editable=no_editable, no_self=no_self, )
def do_lock( project: Project, strategy: str = "all", tracked_names: Optional[Iterable[str]] = None, requirements: Optional[Dict[str, Dict[str, Requirement]]] = None, ) -> Dict[str, Candidate]: """Performs the locking process and update lockfile. :param project: the project instance :param strategy: update stratege: reuse/eager/all :param tracked_names: required when using eager strategy :param requirements: An optional dictionary of requirements, read from pyproject if not given. """ check_project_file(project) # TODO: multiple dependency definitions for the same package. repository = project.get_repository() requirements = requirements or project.all_dependencies allow_prereleases = project.allow_prereleases requires_python = project.python_requires if strategy == "all": provider = BaseProvider(repository, requires_python, allow_prereleases) else: provider_class = (ReusePinProvider if strategy == "reuse" else EagerUpdateProvider) preferred_pins = project.get_locked_candidates("__all__") provider = provider_class( preferred_pins, tracked_names or (), repository, requires_python, allow_prereleases, ) flat_reqs = list( itertools.chain(*[deps.values() for _, deps in requirements.items()])) # TODO: switch reporter at io level. with halo.Halo(text="Resolving dependencies", spinner="dots") as spin: reporter = SpinnerReporter(flat_reqs, spin) mapping, dependencies, summaries = resolve(provider, reporter, requirements, requires_python) data = format_lockfile(mapping, dependencies, summaries) spin.succeed("Resolution success") project.write_lockfile(data) return mapping
def do_update( project: Project, dev: bool = False, sections: Sequence[str] = (), default: bool = True, strategy: str = "reuse", save: str = "compatible", unconstrained: bool = False, packages: Sequence[str] = (), ) -> None: """Update specified packages or all packages :param project: The project instance :param dev: whether to update dev dependencies :param sections: update speicified sections :param default: update default :param strategy: update strategy (reuse/eager) :param save: save strategy (compatible/exact/wildcard) :param unconstrained: ignore version constraint :param packages: specified packages to update :return: None """ check_project_file(project) if len(packages) > 0 and (len(sections) > 1 or not default): raise click.BadParameter( "packages argument can't be used together with multple -s or --no-default." ) if not packages: if unconstrained: raise click.BadArgumentUsage( "--unconstrained must be used with package names given.") # pdm update with no packages given, same as 'lock' + 'sync' do_lock(project) do_sync(project, sections, dev, default, clean=False) return section = sections[0] if sections else ("dev" if dev else "default") all_dependencies = project.all_dependencies dependencies = all_dependencies[section] updated_deps = {} tracked_names = set() for name in packages: matched_name = next( filter( lambda k: safe_name(strip_extras(k)[0]).lower() == safe_name( name).lower(), dependencies.keys(), ), None, ) if not matched_name: raise ProjectError("{} does not exist in {} dependencies.".format( context.io.green(name, bold=True), section)) if unconstrained: dependencies[matched_name].specifier = get_specifier("") tracked_names.add(matched_name) updated_deps[matched_name] = dependencies[matched_name] context.io.echo("Updating packages: {}.".format(", ".join( context.io.green(v, bold=True) for v in tracked_names))) resolved = do_lock(project, strategy, tracked_names, all_dependencies) do_sync(project, sections=(section, ), default=False, clean=False) if unconstrained: # Need to update version constraints save_version_specifiers(updated_deps, resolved, save) project.add_dependencies(updated_deps) lockfile = project.lockfile lockfile["root"]["content_hash"] = "md5:" + project.get_content_hash( "md5") project.write_lockfile(lockfile, False)
def do_lock( project: Project, strategy: str = "all", tracked_names: Iterable[str] | None = None, requirements: list[Requirement] | None = None, dry_run: bool = False, refresh: bool = False, ) -> dict[str, Candidate]: """Performs the locking process and update lockfile.""" check_project_file(project) if refresh: locked_repo = project.locked_repository repo = project.get_repository() mapping: dict[str, Candidate] = {} dependencies: dict[str, list[Requirement]] = {} with project.core.ui.open_spinner("Re-calculating hashes..."): for key, candidate in locked_repo.packages.items(): reqs, python_requires, summary = locked_repo.candidate_info[key] candidate.hashes = repo.get_hashes(candidate) candidate.summary = summary candidate.requires_python = python_requires ident = cast(str, key[0]) mapping[ident] = candidate dependencies[ident] = list(map(parse_requirement, reqs)) lockfile = format_lockfile(project, mapping, dependencies) project.write_lockfile(lockfile) return mapping # TODO: multiple dependency definitions for the same package. provider = project.get_provider(strategy, tracked_names) if not requirements: requirements = [ r for deps in project.all_dependencies.values() for r in deps.values() ] resolve_max_rounds = int(project.config["strategy.resolve_max_rounds"]) ui = project.core.ui with ui.logging("lock"): # The context managers are nested to ensure the spinner is stopped before # any message is thrown to the output. with ui.open_spinner(title="Resolving dependencies", spinner="dots") as spin: reporter = project.get_reporter(requirements, tracked_names, spin) resolver: Resolver = project.core.resolver_class(provider, reporter) signals.pre_lock.send(project, requirements=requirements, dry_run=dry_run) try: mapping, dependencies = resolve( resolver, requirements, project.environment.python_requires, resolve_max_rounds, ) except ResolutionTooDeep: spin.fail(f"{termui.Emoji.LOCK} Lock failed") ui.echo( "The dependency resolution exceeds the maximum loop depth of " f"{resolve_max_rounds}, there may be some circular dependencies " "in your project. Try to solve them or increase the " f"{termui.green('`strategy.resolve_max_rounds`')} config.", err=True, ) raise except ResolutionImpossible as err: spin.fail(f"{termui.Emoji.LOCK} Lock failed") ui.echo(format_resolution_impossible(err), err=True) raise ResolutionImpossible("Unable to find a resolution") from None else: data = format_lockfile(project, mapping, dependencies) spin.succeed(f"{termui.Emoji.LOCK} Lock successful") signals.post_lock.send(project, resolution=mapping, dry_run=dry_run) project.write_lockfile(data, write=not dry_run) return mapping
def do_update( project: Project, *, dev: bool | None = None, groups: Sequence[str] = (), default: bool = True, strategy: str = "reuse", save: str = "compatible", unconstrained: bool = False, top: bool = False, dry_run: bool = False, packages: Collection[str] = (), sync: bool = True, no_editable: bool = False, no_self: bool = False, prerelease: bool = False, ) -> None: """Update specified packages or all packages""" check_project_file(project) if len(packages) > 0 and (top or len(groups) > 1 or not default): raise PdmUsageError( "packages argument can't be used together with multiple -G or " "--no-default and --top." ) all_dependencies = project.all_dependencies updated_deps: dict[str, dict[str, Requirement]] = defaultdict(dict) install_dev = True if dev is None else dev if not packages: if prerelease: raise PdmUsageError("--prerelease must be used with packages given") groups = translate_groups(project, default, install_dev, groups or ()) for group in groups: updated_deps[group] = all_dependencies[group] else: group = groups[0] if groups else ("dev" if dev else "default") dependencies = all_dependencies[group] for name in packages: matched_name = next( filter( lambda k: normalize_name(strip_extras(k)[0]) == normalize_name(name), dependencies.keys(), ), None, ) if not matched_name: raise ProjectError( "{} does not exist in {} {}dependencies.".format( termui.green(name, bold=True), group, "dev-" if dev else "" ) ) dependencies[matched_name].prerelease = prerelease updated_deps[group][matched_name] = dependencies[matched_name] project.core.ui.echo( "Updating packages: {}.".format( ", ".join( termui.green(v, bold=True) for v in chain.from_iterable(updated_deps.values()) ) ) ) if unconstrained: for deps in updated_deps.values(): for dep in deps.values(): dep.specifier = get_specifier("") reqs = [r for deps in all_dependencies.values() for r in deps.values()] resolved = do_lock( project, strategy, chain.from_iterable(updated_deps.values()), reqs, dry_run=dry_run, ) if sync or dry_run: do_sync( project, groups=groups, dev=install_dev, default=default, clean=False, dry_run=dry_run, requirements=[r for deps in updated_deps.values() for r in deps.values()], tracked_names=list(chain.from_iterable(updated_deps.values())) if top else None, no_editable=no_editable, no_self=no_self, ) if unconstrained and not dry_run: # Need to update version constraints save_version_specifiers(updated_deps, resolved, save) for group, deps in updated_deps.items(): project.add_dependencies(deps, group, dev or False) lockfile = project.lockfile project.write_lockfile(lockfile, False)
def do_update( project: Project, dev: bool = False, sections: Sequence[str] = (), default: bool = True, strategy: str = "reuse", save: str = "compatible", unconstrained: bool = False, top: bool = False, dry_run: bool = False, packages: Sequence[str] = (), ) -> None: """Update specified packages or all packages""" check_project_file(project) if len(packages) > 0 and (top or len(sections) > 1 or not default): raise PdmUsageError( "packages argument can't be used together with multiple -s or " "--no-default and --top.") all_dependencies = project.all_dependencies updated_deps = {} if not packages: sections = translate_sections(project, default, dev, sections or ()) for section in sections: updated_deps.update(all_dependencies[section]) else: section = sections[0] if sections else ("dev" if dev else "default") dependencies = all_dependencies[section] for name in packages: matched_name = next( filter( lambda k: safe_name(strip_extras(k)[0]).lower() == safe_name(name).lower(), dependencies.keys(), ), None, ) if not matched_name: raise ProjectError( "{} does not exist in {} {}dependencies.".format( termui.green(name, bold=True), section, "dev-" if dev else "")) updated_deps[matched_name] = dependencies[matched_name] project.core.ui.echo("Updating packages: {}.".format(", ".join( termui.green(v, bold=True) for v in updated_deps))) if unconstrained: for _, dep in updated_deps.items(): dep.specifier = get_specifier("") reqs = [r for deps in all_dependencies.values() for r in deps.values()] resolved = do_lock( project, strategy if top or packages else "all", updated_deps.keys(), reqs, dry_run=dry_run, ) do_sync( project, sections=sections, dev=dev, default=default, clean=False, dry_run=dry_run, tracked_names=updated_deps.keys() if top else None, ) if unconstrained and not dry_run: # Need to update version constraints save_version_specifiers(updated_deps, resolved, save) project.add_dependencies(updated_deps, section, dev) lockfile = project.lockfile project.write_lockfile(lockfile, False)