Esempio n. 1
0
    def handle(self, project: Project, options: argparse.Namespace) -> None:
        check_project_file(project)
        interpreter = project.python
        if options.python:
            project.core.ui.echo(str(interpreter.executable))
        elif options.where:
            project.core.ui.echo(str(project.root))
        elif options.packages:
            project.core.ui.echo(str(project.environment.packages_path))
        elif options.env:
            project.core.ui.echo(
                json.dumps(project.environment.marker_environment, indent=2))
        else:

            rows = [
                (termui.cyan("PDM version:", bold=True), project.core.version),
                (
                    termui.cyan("Python Interpreter:", bold=True),
                    f"{interpreter.executable} ({interpreter.identifier})",
                ),
                (termui.cyan("Project Root:",
                             bold=True), project.root.as_posix()),
                (
                    termui.cyan("Project Packages:", bold=True),
                    str(project.environment.packages_path),
                ),
            ]
            project.core.ui.display_columns(rows)
Esempio n. 2
0
File: actions.py Progetto: ulwlu/pdm
def do_info(
    project: Project,
    python: bool = False,
    show_project: bool = False,
    env: bool = False,
) -> None:
    """Show project information."""
    check_project_file(project)
    python_path = project.python_executable
    python_version, is_64bit = get_python_version(python_path, True)
    if not python and not show_project and not env:
        rows = [
            (termui.cyan("PDM version:", bold=True), project.core.version),
            (
                termui.cyan("Python Interpreter:", bold=True),
                python_path +
                f" ({get_python_version_string(python_version, is_64bit)})",
            ),
            (termui.cyan("Project Root:", bold=True), project.root.as_posix()),
        ]
        project.core.ui.display_columns(rows)
        return

    if python:
        project.core.ui.echo(python_path)
    if show_project:
        project.core.ui.echo(project.root.as_posix())
    if env:
        project.core.ui.echo(
            json.dumps(project.environment.marker_environment, indent=2))
Esempio n. 3
0
def do_lock(
    project: Project,
    strategy: str = "all",
    tracked_names: Optional[Iterable[str]] = None,
    requirements: Optional[List[Requirement]] = None,
) -> Dict[str, Candidate]:
    """Performs the locking process and update lockfile.

    :param project: the project instance
    :param strategy: update stratege: reuse/eager/all
    :param tracked_names: required when using eager strategy
    :param requirements: An optional dictionary of requirements, read from pyproject
        if not given.
    """
    check_project_file(project)
    # TODO: multiple dependency definitions for the same package.
    provider = project.get_provider(strategy, tracked_names)
    if not requirements:
        requirements = [
            r for deps in project.all_dependencies.values()
            for r in deps.values()
        ]

    with stream.open_spinner(title="Resolving dependencies",
                             spinner="dots") as spin, stream.logging("lock"):
        reporter = project.get_reporter(requirements, tracked_names, spin)
        resolver = project.core.resolver_class(provider, reporter)
        mapping, dependencies, summaries = resolve(
            resolver, requirements, project.environment.python_requires)
        data = format_lockfile(mapping, dependencies, summaries)
        spin.succeed("Resolution success")
    project.write_lockfile(data)

    return mapping
Esempio n. 4
0
def do_list(project: Project,
            graph: bool = False,
            reverse: bool = False) -> None:
    """Display a list of packages installed in the local packages directory.

    :param project: the project instance.
    :param graph: whether to display a graph.
    :param reverse: wheter to display reverse graph.
    """
    from pdm.cli.utils import (
        build_dependency_graph,
        format_dependency_graph,
        format_reverse_dependency_graph,
    )

    check_project_file(project)
    working_set = project.environment.get_working_set()
    if reverse and not graph:
        raise PdmUsageError("--reverse must be used with --graph")
    if graph:
        with project.environment.activate():
            dep_graph = build_dependency_graph(working_set)
        if reverse:
            graph = format_reverse_dependency_graph(project, dep_graph)
        else:
            graph = format_dependency_graph(project, dep_graph)
        stream.echo(graph)
    else:
        rows = [(stream.green(k, bold=True), format_dist(v))
                for k, v in sorted(working_set.items())]
        stream.display_columns(rows, ["Package", "Version"])
Esempio n. 5
0
def do_build(
    project: Project,
    sdist: bool = True,
    wheel: bool = True,
    dest: str = "dist",
    clean: bool = True,
):
    """Build artifacts for distribution."""
    if project.is_global:
        raise ProjectError("Not allowed to build based on the global project.")
    check_project_file(project)
    if not wheel and not sdist:
        stream.echo("All artifacts are disabled, nothing to do.", err=True)
        return
    if not os.path.isabs(dest):
        dest = project.root.joinpath(dest).as_posix()
    if clean:
        shutil.rmtree(dest, ignore_errors=True)
    with stream.logging("build"), EnvBuilder(project.root,
                                             project.environment) as builder:
        if sdist:
            stream.echo("Building sdist...")
            loc = builder.build_sdist(dest)
            stream.echo(f"Built sdist at {loc}")
        if wheel:
            stream.echo("Building wheel...")
            loc = builder.build_wheel(dest)
            stream.echo(f"Built wheel at {loc}")
Esempio n. 6
0
File: actions.py Progetto: ulwlu/pdm
def do_add(
        project: Project,
        dev: bool = False,
        section: Optional[str] = None,
        sync: bool = True,
        save: str = "compatible",
        strategy: str = "reuse",
        editables: Iterable[str] = (),
        packages: Iterable[str] = (),
) -> None:
    """Add packages and install

    :param project: the project instance
    :param dev: add to dev dependencies section
    :param section: specify section to be add to
    :param sync: whether to install added packages
    :param save: save strategy
    :param strategy: update strategy
    :param editables: editable requirements
    :param packages: normal requirements
    """
    check_project_file(project)
    if not editables and not packages:
        raise PdmUsageError(
            "Must specify at least one package or editable package.")
    section = "dev" if dev else section or "default"
    tracked_names = set()
    requirements = {}
    for r in [parse_requirement(line, True) for line in editables
              ] + [parse_requirement(line) for line in packages]:
        key = r.identify()
        r.from_section = section
        tracked_names.add(key)
        requirements[key] = r
    project.core.ui.echo(f"Adding packages to {section} dependencies: " +
                         ", ".join(
                             termui.green(key or "", bold=True)
                             for key in requirements))
    all_dependencies = project.all_dependencies
    all_dependencies.setdefault(section, {}).update(requirements)
    reqs = [r for deps in all_dependencies.values() for r in deps.values()]
    resolved = do_lock(project, strategy, tracked_names, reqs)

    # Update dependency specifiers and lockfile hash.
    save_version_specifiers(requirements, resolved, save)
    project.add_dependencies(requirements)
    lockfile = project.lockfile
    project.write_lockfile(lockfile, False)

    if sync:
        do_sync(
            project,
            sections=(section, ),
            dev=False,
            default=False,
            dry_run=False,
            clean=False,
        )
Esempio n. 7
0
File: actions.py Progetto: ulwlu/pdm
def do_lock(
    project: Project,
    strategy: str = "all",
    tracked_names: Optional[Iterable[str]] = None,
    requirements: Optional[List[Requirement]] = None,
) -> Dict[str, Candidate]:
    """Performs the locking process and update lockfile.

    :param project: the project instance
    :param strategy: update strategy: reuse/eager/all
    :param tracked_names: required when using eager strategy
    :param requirements: An optional dictionary of requirements, read from pyproject
        if not given.
    """
    check_project_file(project)
    # TODO: multiple dependency definitions for the same package.
    provider = project.get_provider(strategy, tracked_names)
    if not requirements:
        requirements = [
            r for deps in project.all_dependencies.values()
            for r in deps.values()
        ]
    resolve_max_rounds = int(project.config["strategy.resolve_max_rounds"])
    ui = project.core.ui
    with ui.logging("lock"):
        # The context managers are nested to ensure the spinner is stopped before
        # any message is thrown to the output.
        with ui.open_spinner(title="Resolving dependencies",
                             spinner="dots") as spin:
            reporter = project.get_reporter(requirements, tracked_names, spin)
            resolver = project.core.resolver_class(provider, reporter)
            try:
                mapping, dependencies, summaries = resolve(
                    resolver,
                    requirements,
                    project.environment.python_requires,
                    resolve_max_rounds,
                )
            except ResolutionTooDeep:
                spin.fail(f"{termui.Emoji.LOCK} Lock failed")
                ui.echo(
                    "The dependency resolution exceeds the maximum loop depth of "
                    f"{resolve_max_rounds}, there may be some circular dependencies "
                    "in your project. Try to solve them or increase the "
                    f"{termui.green('`strategy.resolve_max_rounds`')} config.",
                    err=True,
                )
                raise
            except ResolutionImpossible as err:
                spin.fail(f"{termui.Emoji.LOCK} Lock failed")
                ui.echo(format_resolution_impossible(err), err=True)
                raise
            else:
                data = format_lockfile(mapping, dependencies, summaries)
                spin.succeed(f"{termui.Emoji.LOCK} Lock successful")
    project.write_lockfile(data)

    return mapping
Esempio n. 8
0
def do_add(
    project: Project,
    dev: bool = False,
    group: str | None = None,
    sync: bool = True,
    save: str = "compatible",
    strategy: str = "reuse",
    editables: Iterable[str] = (),
    packages: Iterable[str] = (),
    unconstrained: bool = False,
    no_editable: bool = False,
    no_self: bool = False,
) -> None:
    """Add packages and install"""
    check_project_file(project)
    if not editables and not packages:
        raise PdmUsageError(
            "Must specify at least one package or editable package.")
    if not group:
        group = "dev" if dev else "default"
    tracked_names: set[str] = set()
    requirements: dict[str, Requirement] = {}
    for r in [parse_requirement(line, True) for line in editables
              ] + [parse_requirement(line) for line in packages]:
        key = r.identify()
        tracked_names.add(key)
        requirements[key] = r
    project.core.ui.echo(
        f"Adding packages to {group} {'dev-' if dev else ''}dependencies: " +
        ", ".join(termui.green(key or "", bold=True) for key in requirements))
    all_dependencies = project.all_dependencies
    group_deps = all_dependencies.setdefault(group, {})
    if unconstrained:
        for req in group_deps.values():
            req.specifier = get_specifier("")
    group_deps.update(requirements)
    reqs = [r for deps in all_dependencies.values() for r in deps.values()]
    resolved = do_lock(project, strategy, tracked_names, reqs)

    # Update dependency specifiers and lockfile hash.
    deps_to_update = group_deps if unconstrained else requirements
    save_version_specifiers({group: deps_to_update}, resolved, save)
    project.add_dependencies(deps_to_update, group, dev)
    lockfile = project.lockfile
    project.write_lockfile(lockfile, False)

    if sync:
        do_sync(
            project,
            groups=(group, ),
            default=False,
            no_editable=no_editable,
            no_self=no_self,
        )
Esempio n. 9
0
def do_remove(
    project: Project,
    dev: bool = False,
    group: str | None = None,
    sync: bool = True,
    packages: Collection[str] = (),
    no_editable: bool = False,
    no_self: bool = False,
    dry_run: bool = False,
) -> None:
    """Remove packages from working set and pyproject.toml"""
    check_project_file(project)
    if not packages:
        raise PdmUsageError("Must specify at least one package to remove.")
    if not group:
        group = "dev" if dev else "default"
    if group not in list(project.iter_groups()):
        raise ProjectError(f"No-exist group {group}")

    deps = project.get_pyproject_dependencies(group, dev)
    project.core.ui.echo(
        f"Removing packages from {group} {'dev-' if dev else ''}dependencies: "
        + ", ".join(str(termui.green(name, bold=True)) for name in packages)
    )
    for name in packages:
        req = parse_requirement(name)
        matched_indexes = sorted(
            (i for i, r in enumerate(deps) if req.matches(r, False)), reverse=True
        )
        if not matched_indexes:
            raise ProjectError(
                "{} does not exist in {} dependencies.".format(
                    termui.green(name, bold=True), group
                )
            )
        for i in matched_indexes:
            del deps[i]

    if not dry_run:
        project.write_pyproject()
    do_lock(project, "reuse", dry_run=dry_run)
    if sync:
        do_sync(
            project,
            groups=(group,),
            default=False,
            clean=True,
            no_editable=no_editable,
            no_self=no_self,
            dry_run=dry_run,
        )
Esempio n. 10
0
 def handle(self, project: Project, options: argparse.Namespace) -> None:
     check_project_file(project)
     runner = TaskRunner(project)
     if options.list:
         return runner.show_list()
     if options.site_packages:
         runner.global_options.update(
             {"site_packages": options.site_packages})
     if not options.command:
         project.core.ui.echo(
             "No command is given, default to the Python REPL.",
             fg="yellow",
             err=True,
         )
         options.command = "python"
     sys.exit(runner.run(options.command, options.args))
Esempio n. 11
0
def do_remove(
    project: Project,
    dev: bool = False,
    section: Optional[str] = None,
    sync: bool = True,
    packages: Sequence[str] = (),
):
    """Remove packages from working set and pyproject.toml

    :param project: The project instance
    :param dev: Remove package from dev-dependencies
    :param section: Remove package from given section
    :param sync: Whether perform syncing action
    :param packages: Package names to be removed
    :return: None
    """
    check_project_file(project)
    if not packages:
        raise PdmUsageError("Must specify at least one package to remove.")
    if not section:
        section = "dev" if dev else "default"
    if section not in list(project.iter_sections()):
        raise ProjectError(f"No-exist section {section}")

    deps = project.get_pyproject_dependencies(section, dev)
    project.core.ui.echo(
        f"Removing packages from {section} {'dev-' if dev else ''}dependencies: "
        + ", ".join(str(termui.green(name, bold=True)) for name in packages)
    )
    for name in packages:
        req = parse_requirement(name)
        matched_indexes = sorted(
            (i for i, r in enumerate(deps) if req.matches(r, False)), reverse=True
        )
        if not matched_indexes:
            raise ProjectError(
                "{} does not exist in {} dependencies.".format(
                    termui.green(name, bold=True), section
                )
            )
        for i in matched_indexes:
            del deps[i]

    project.write_pyproject()
    do_lock(project, "reuse")
    if sync:
        do_sync(project, sections=(section,), default=False, clean=True)
Esempio n. 12
0
def do_list(
    project: Project,
    graph: bool = False,
    reverse: bool = False,
    freeze: bool = False,
    json: bool = False,
) -> None:
    """Display a list of packages installed in the local packages directory."""
    from pdm.cli.utils import build_dependency_graph, format_dependency_graph

    check_project_file(project)
    working_set = project.environment.get_working_set()
    if graph:
        dep_graph = build_dependency_graph(
            working_set, project.environment.marker_environment
        )
        project.core.ui.echo(
            format_dependency_graph(project, dep_graph, reverse=reverse, json=json)
        )
    else:
        if reverse:
            raise PdmUsageError("--reverse must be used with --graph")
        if json:
            raise PdmUsageError("--json must be used with --graph")
        if freeze:
            reqs = sorted(
                (
                    Requirement.from_dist(dist)
                    .as_line()
                    .replace(
                        "${PROJECT_ROOT}",
                        project.root.absolute().as_posix().lstrip("/"),
                    )
                    for dist in sorted(
                        working_set.values(), key=lambda d: d.metadata["Name"]
                    )
                ),
                key=lambda x: x.lower(),
            )
            project.core.ui.echo("\n".join(reqs))
            return
        rows = [
            (termui.green(k, bold=True), termui.yellow(v.version), get_dist_location(v))
            for k, v in sorted(working_set.items())
        ]
        project.core.ui.display_columns(rows, ["Package", "Version", "Location"])
Esempio n. 13
0
def do_remove(
        project: Project,
        dev: bool = False,
        section: Optional[str] = None,
        sync: bool = True,
        packages: Sequence[str] = (),
):
    """Remove packages from working set and pyproject.toml

    :param project: The project instance
    :param dev: Remove package from dev-dependencies
    :param section: Remove package from given section
    :param sync: Whether perform syncing action
    :param packages: Package names to be removed
    :return: None
    """
    check_project_file(project)
    if not packages:
        raise PdmUsageError("Must specify at least one package to remove.")
    section = "dev" if dev else section or "default"
    toml_section = f"{section}-dependencies" if section != "default" else "dependencies"
    if toml_section not in project.tool_settings:
        raise ProjectError(
            f"No such section {stream.yellow(toml_section)} in pyproject.toml."
        )
    deps = project.tool_settings[toml_section]
    stream.echo(f"Removing packages from {section} dependencies: " + ", ".join(
        str(stream.green(name, bold=True)) for name in packages))
    for name in packages:
        matched_name = next(
            filter(
                lambda k: safe_name(k).lower() == safe_name(name).lower(),
                deps.keys(),
            ),
            None,
        )
        if not matched_name:
            raise ProjectError("{} does not exist in {} dependencies.".format(
                stream.green(name, bold=True), section))
        del deps[matched_name]

    project.write_pyproject()
    do_lock(project, "reuse")
    if sync:
        do_sync(project, sections=(section, ), default=False, clean=True)
Esempio n. 14
0
def do_list(project: Project, graph: bool = False) -> None:
    """Display a list of packages installed in the local packages directory.

    :param project: the project instance.
    :param graph: whether to display a graph.
    """
    from pdm.cli.utils import build_dependency_graph, format_dependency_graph

    check_project_file(project)
    working_set = project.environment.get_working_set()
    if graph:
        with project.environment.activate():
            dep_graph = build_dependency_graph(working_set)
        stream.echo(format_dependency_graph(dep_graph))
    else:
        rows = [(stream.green(k, bold=True), format_dist(v))
                for k, v in sorted(working_set.items())]
        stream.display_columns(rows, ["Package", "Version"])
Esempio n. 15
0
File: run.py Progetto: leibowitz/pdm
 def handle(self, project: Project, options: argparse.Namespace) -> None:
     check_project_file(project)
     if options.list:
         return self._show_list(project)
     global_env_options = project.scripts.get(
         "_", {}) if project.scripts else {}
     assert isinstance(global_env_options, dict)
     if not options.command:
         raise PdmUsageError("No command given")
     if project.scripts and options.command in project.scripts:
         self._run_script(project, options.command, options.args,
                          global_env_options)
     else:
         self._run_command(
             project,
             [options.command] + options.args,
             **global_env_options,  # type: ignore
         )
Esempio n. 16
0
def do_build(
    project: Project,
    sdist: bool = True,
    wheel: bool = True,
    dest: str = "dist",
    clean: bool = True,
):
    """Build artifacts for distribution."""
    if project.is_global:
        raise ProjectError("Not allowed to build based on the global project.")
    check_project_file(project)
    if not wheel and not sdist:
        stream.echo("All artifacts are disabled, nothing to do.", err=True)
        return
    ireq = project.make_self_candidate(False).ireq
    ireq.source_dir = project.root.as_posix()
    if clean:
        shutil.rmtree(dest, ignore_errors=True)
    if sdist:
        with SdistBuilder(ireq) as builder:
            builder.build(dest)
    if wheel:
        with WheelBuilder(ireq) as builder:
            builder.build(dest)
Esempio n. 17
0
def do_update(
        project: Project,
        dev: bool = False,
        sections: Sequence[str] = (),
        default: bool = True,
        strategy: str = "reuse",
        save: str = "compatible",
        unconstrained: bool = False,
        packages: Sequence[str] = (),
) -> None:
    """Update specified packages or all packages

    :param project: The project instance
    :param dev: whether to update dev dependencies
    :param sections: update speicified sections
    :param default: update default
    :param strategy: update strategy (reuse/eager)
    :param save: save strategy (compatible/exact/wildcard)
    :param unconstrained: ignore version constraint
    :param packages: specified packages to update
    :return: None
    """
    check_project_file(project)
    if len(packages) > 0 and (len(sections) > 1 or not default):
        raise PdmUsageError(
            "packages argument can't be used together with multple -s or --no-default."
        )
    if not packages:
        if unconstrained:
            raise PdmUsageError(
                "--unconstrained must be used with package names given.")
        # pdm update with no packages given, same as 'lock' + 'sync'
        do_lock(project)
        do_sync(project, sections, dev, default, clean=False)
        return
    section = sections[0] if sections else ("dev" if dev else "default")
    all_dependencies = project.all_dependencies
    dependencies = all_dependencies[section]
    updated_deps = {}
    tracked_names = set()
    for name in packages:
        matched_name = next(
            filter(
                lambda k: safe_name(strip_extras(k)[0]).lower() == safe_name(
                    name).lower(),
                dependencies.keys(),
            ),
            None,
        )
        if not matched_name:
            raise ProjectError("{} does not exist in {} dependencies.".format(
                stream.green(name, bold=True), section))
        if unconstrained:
            dependencies[matched_name].specifier = get_specifier("")
        tracked_names.add(matched_name)
        updated_deps[matched_name] = dependencies[matched_name]
    stream.echo("Updating packages: {}.".format(", ".join(
        stream.green(v, bold=True) for v in tracked_names)))
    reqs = [r for deps in all_dependencies.values() for r in deps.values()]
    resolved = do_lock(project, strategy, tracked_names, reqs)
    do_sync(project, sections=(section, ), default=False, clean=False)
    if unconstrained:
        # Need to update version constraints
        save_version_specifiers(updated_deps, resolved, save)
        project.add_dependencies(updated_deps)
        lockfile = project.lockfile
        project.write_lockfile(lockfile, False)
Esempio n. 18
0
def do_lock(
    project: Project,
    strategy: str = "all",
    tracked_names: Iterable[str] | None = None,
    requirements: list[Requirement] | None = None,
    dry_run: bool = False,
    refresh: bool = False,
) -> dict[str, Candidate]:
    """Performs the locking process and update lockfile."""
    check_project_file(project)
    if refresh:
        locked_repo = project.locked_repository
        repo = project.get_repository()
        mapping: dict[str, Candidate] = {}
        dependencies: dict[str, list[Requirement]] = {}
        with project.core.ui.open_spinner("Re-calculating hashes..."):
            for key, candidate in locked_repo.packages.items():
                reqs, python_requires, summary = locked_repo.candidate_info[key]
                candidate.hashes = repo.get_hashes(candidate)
                candidate.summary = summary
                candidate.requires_python = python_requires
                ident = cast(str, key[0])
                mapping[ident] = candidate
                dependencies[ident] = list(map(parse_requirement, reqs))
            lockfile = format_lockfile(project, mapping, dependencies)
        project.write_lockfile(lockfile)
        return mapping
    # TODO: multiple dependency definitions for the same package.
    provider = project.get_provider(strategy, tracked_names)
    if not requirements:
        requirements = [
            r for deps in project.all_dependencies.values() for r in deps.values()
        ]
    resolve_max_rounds = int(project.config["strategy.resolve_max_rounds"])
    ui = project.core.ui
    with ui.logging("lock"):
        # The context managers are nested to ensure the spinner is stopped before
        # any message is thrown to the output.
        with ui.open_spinner(title="Resolving dependencies", spinner="dots") as spin:
            reporter = project.get_reporter(requirements, tracked_names, spin)
            resolver: Resolver = project.core.resolver_class(provider, reporter)
            signals.pre_lock.send(project, requirements=requirements, dry_run=dry_run)
            try:
                mapping, dependencies = resolve(
                    resolver,
                    requirements,
                    project.environment.python_requires,
                    resolve_max_rounds,
                )
            except ResolutionTooDeep:
                spin.fail(f"{termui.Emoji.LOCK} Lock failed")
                ui.echo(
                    "The dependency resolution exceeds the maximum loop depth of "
                    f"{resolve_max_rounds}, there may be some circular dependencies "
                    "in your project. Try to solve them or increase the "
                    f"{termui.green('`strategy.resolve_max_rounds`')} config.",
                    err=True,
                )
                raise
            except ResolutionImpossible as err:
                spin.fail(f"{termui.Emoji.LOCK} Lock failed")
                ui.echo(format_resolution_impossible(err), err=True)
                raise ResolutionImpossible("Unable to find a resolution") from None
            else:
                data = format_lockfile(project, mapping, dependencies)
                spin.succeed(f"{termui.Emoji.LOCK} Lock successful")
            signals.post_lock.send(project, resolution=mapping, dry_run=dry_run)

    project.write_lockfile(data, write=not dry_run)

    return mapping
Esempio n. 19
0
def do_update(
    project: Project,
    *,
    dev: bool | None = None,
    groups: Sequence[str] = (),
    default: bool = True,
    strategy: str = "reuse",
    save: str = "compatible",
    unconstrained: bool = False,
    top: bool = False,
    dry_run: bool = False,
    packages: Collection[str] = (),
    sync: bool = True,
    no_editable: bool = False,
    no_self: bool = False,
    prerelease: bool = False,
) -> None:
    """Update specified packages or all packages"""
    check_project_file(project)
    if len(packages) > 0 and (top or len(groups) > 1 or not default):
        raise PdmUsageError(
            "packages argument can't be used together with multiple -G or "
            "--no-default and --top."
        )
    all_dependencies = project.all_dependencies
    updated_deps: dict[str, dict[str, Requirement]] = defaultdict(dict)
    install_dev = True if dev is None else dev
    if not packages:
        if prerelease:
            raise PdmUsageError("--prerelease must be used with packages given")
        groups = translate_groups(project, default, install_dev, groups or ())
        for group in groups:
            updated_deps[group] = all_dependencies[group]
    else:
        group = groups[0] if groups else ("dev" if dev else "default")
        dependencies = all_dependencies[group]
        for name in packages:
            matched_name = next(
                filter(
                    lambda k: normalize_name(strip_extras(k)[0])
                    == normalize_name(name),
                    dependencies.keys(),
                ),
                None,
            )
            if not matched_name:
                raise ProjectError(
                    "{} does not exist in {} {}dependencies.".format(
                        termui.green(name, bold=True), group, "dev-" if dev else ""
                    )
                )
            dependencies[matched_name].prerelease = prerelease
            updated_deps[group][matched_name] = dependencies[matched_name]
        project.core.ui.echo(
            "Updating packages: {}.".format(
                ", ".join(
                    termui.green(v, bold=True)
                    for v in chain.from_iterable(updated_deps.values())
                )
            )
        )
    if unconstrained:
        for deps in updated_deps.values():
            for dep in deps.values():
                dep.specifier = get_specifier("")
    reqs = [r for deps in all_dependencies.values() for r in deps.values()]
    resolved = do_lock(
        project,
        strategy,
        chain.from_iterable(updated_deps.values()),
        reqs,
        dry_run=dry_run,
    )
    if sync or dry_run:
        do_sync(
            project,
            groups=groups,
            dev=install_dev,
            default=default,
            clean=False,
            dry_run=dry_run,
            requirements=[r for deps in updated_deps.values() for r in deps.values()],
            tracked_names=list(chain.from_iterable(updated_deps.values()))
            if top
            else None,
            no_editable=no_editable,
            no_self=no_self,
        )
    if unconstrained and not dry_run:
        # Need to update version constraints
        save_version_specifiers(updated_deps, resolved, save)
        for group, deps in updated_deps.items():
            project.add_dependencies(deps, group, dev or False)
        lockfile = project.lockfile
        project.write_lockfile(lockfile, False)
Esempio n. 20
0
def do_update(
        project: Project,
        dev: bool = False,
        sections: Sequence[str] = (),
        default: bool = True,
        strategy: str = "reuse",
        save: str = "compatible",
        unconstrained: bool = False,
        top: bool = False,
        dry_run: bool = False,
        packages: Sequence[str] = (),
) -> None:
    """Update specified packages or all packages"""
    check_project_file(project)
    if len(packages) > 0 and (top or len(sections) > 1 or not default):
        raise PdmUsageError(
            "packages argument can't be used together with multiple -s or "
            "--no-default and --top.")
    all_dependencies = project.all_dependencies
    updated_deps = {}
    if not packages:
        sections = translate_sections(project, default, dev, sections or ())
        for section in sections:
            updated_deps.update(all_dependencies[section])
    else:
        section = sections[0] if sections else ("dev" if dev else "default")
        dependencies = all_dependencies[section]
        for name in packages:
            matched_name = next(
                filter(
                    lambda k: safe_name(strip_extras(k)[0]).lower() ==
                    safe_name(name).lower(),
                    dependencies.keys(),
                ),
                None,
            )
            if not matched_name:
                raise ProjectError(
                    "{} does not exist in {} {}dependencies.".format(
                        termui.green(name, bold=True), section,
                        "dev-" if dev else ""))
            updated_deps[matched_name] = dependencies[matched_name]
        project.core.ui.echo("Updating packages: {}.".format(", ".join(
            termui.green(v, bold=True) for v in updated_deps)))
    if unconstrained:
        for _, dep in updated_deps.items():
            dep.specifier = get_specifier("")
    reqs = [r for deps in all_dependencies.values() for r in deps.values()]
    resolved = do_lock(
        project,
        strategy if top or packages else "all",
        updated_deps.keys(),
        reqs,
        dry_run=dry_run,
    )
    do_sync(
        project,
        sections=sections,
        dev=dev,
        default=default,
        clean=False,
        dry_run=dry_run,
        tracked_names=updated_deps.keys() if top else None,
    )
    if unconstrained and not dry_run:
        # Need to update version constraints
        save_version_specifiers(updated_deps, resolved, save)
        project.add_dependencies(updated_deps, section, dev)
        lockfile = project.lockfile
        project.write_lockfile(lockfile, False)