Esempio n. 1
0
def do_build(
    project: Project,
    sdist: bool = True,
    wheel: bool = True,
    dest: str = "dist",
    clean: bool = True,
):
    """Build artifacts for distribution."""
    if project.is_global:
        raise ProjectError("Not allowed to build based on the global project.")
    check_project_file(project)
    if not wheel and not sdist:
        stream.echo("All artifacts are disabled, nothing to do.", err=True)
        return
    if not os.path.isabs(dest):
        dest = project.root.joinpath(dest).as_posix()
    if clean:
        shutil.rmtree(dest, ignore_errors=True)
    with stream.logging("build"), EnvBuilder(project.root,
                                             project.environment) as builder:
        if sdist:
            stream.echo("Building sdist...")
            loc = builder.build_sdist(dest)
            stream.echo(f"Built sdist at {loc}")
        if wheel:
            stream.echo("Building wheel...")
            loc = builder.build_wheel(dest)
            stream.echo(f"Built wheel at {loc}")
Esempio n. 2
0
def do_lock(
    project: Project,
    strategy: str = "all",
    tracked_names: Optional[Iterable[str]] = None,
    requirements: Optional[List[Requirement]] = None,
) -> Dict[str, Candidate]:
    """Performs the locking process and update lockfile.

    :param project: the project instance
    :param strategy: update stratege: reuse/eager/all
    :param tracked_names: required when using eager strategy
    :param requirements: An optional dictionary of requirements, read from pyproject
        if not given.
    """
    check_project_file(project)
    # TODO: multiple dependency definitions for the same package.
    provider = project.get_provider(strategy, tracked_names)
    if not requirements:
        requirements = [
            r for deps in project.all_dependencies.values()
            for r in deps.values()
        ]

    with stream.open_spinner(title="Resolving dependencies",
                             spinner="dots") as spin, stream.logging("lock"):
        reporter = project.get_reporter(requirements, tracked_names, spin)
        resolver = project.core.resolver_class(provider, reporter)
        mapping, dependencies, summaries = resolve(
            resolver, requirements, project.environment.python_requires)
        data = format_lockfile(mapping, dependencies, summaries)
        spin.succeed("Resolution success")
    project.write_lockfile(data)

    return mapping
Esempio n. 3
0
def resolve_requirements(
    repository,
    lines,
    requires_python="",
    allow_prereleases=None,
    strategy="all",
    preferred_pins=None,
    tracked_names=None,
):
    requirements = []
    for line in lines:
        if line.startswith("-e "):
            requirements.append(parse_requirement(line[3:], True))
        else:
            requirements.append(parse_requirement(line))
    requires_python = PySpecSet(requires_python)
    if not preferred_pins:
        provider = BaseProvider(repository, requires_python, allow_prereleases)
    else:
        provider_class = (
            ReusePinProvider if strategy == "reuse" else EagerUpdateProvider
        )
        provider = provider_class(
            preferred_pins,
            tracked_names or (),
            repository,
            requires_python,
            allow_prereleases,
        )

    with stream.open_spinner("Resolving dependencies") as spin, stream.logging("lock"):
        reporter = SpinnerReporter(spin, requirements)
        resolver = Resolver(provider, reporter)
        mapping, *_ = resolve(resolver, requirements, requires_python)
        return mapping
Esempio n. 4
0
File: actions.py Progetto: pohlt/pdm
def do_lock(
    project: Project,
    strategy: str = "all",
    tracked_names: Optional[Iterable[str]] = None,
    requirements: Optional[List[Requirement]] = None,
) -> Dict[str, Candidate]:
    """Performs the locking process and update lockfile.

    :param project: the project instance
    :param strategy: update stratege: reuse/eager/all
    :param tracked_names: required when using eager strategy
    :param requirements: An optional dictionary of requirements, read from pyproject
        if not given.
    """
    check_project_file(project)
    # TODO: multiple dependency definitions for the same package.
    provider = project.get_provider(strategy, tracked_names)
    if not requirements:
        requirements = [
            r for deps in project.all_dependencies.values()
            for r in deps.values()
        ]
    resolve_max_rounds = int(project.config["strategy.resolve_max_rounds"])
    with stream.logging("lock"):
        # The context managers are nested to ensure the spinner is stopped before
        # any message is thrown to the output.
        with stream.open_spinner(title="Resolving dependencies",
                                 spinner="dots") as spin:
            reporter = project.get_reporter(requirements, tracked_names, spin)
            resolver = project.core.resolver_class(provider, reporter)
            try:
                mapping, dependencies, summaries = resolve(
                    resolver,
                    requirements,
                    project.environment.python_requires,
                    resolve_max_rounds,
                )
            except ResolutionTooDeep:
                spin.fail(f"{LOCK} Lock failed")
                stream.echo(
                    "The dependency resolution exceeds the maximum loop depth of "
                    f"{resolve_max_rounds}, there may be some circular dependencies "
                    "in your project. Try to solve them or increase the "
                    f"{stream.green('`strategy.resolve_max_rounds`')} config.",
                    err=True,
                )
                raise
            except ResolutionImpossible as err:
                spin.fail(f"{LOCK} Lock failed")
                stream.echo(format_resolution_impossible(err), err=True)
                raise
            else:
                data = format_lockfile(mapping, dependencies, summaries)
                spin.succeed(f"{LOCK} Lock successful")
    project.write_lockfile(data)

    return mapping
Esempio n. 5
0
def convert(project, filename):
    ireqs, finder = parse_requirement_file(str(filename))
    with stream.logging("build"):
        reqs = [ireq_as_line(ireq, project.environment) for ireq in ireqs]

    data = {"dependencies": make_array(reqs, True)}
    settings = {}
    if finder.index_urls:
        sources = [convert_url_to_source(finder.index_urls[0], "pypi")]
        sources.extend(convert_url_to_source(url) for url in finder.index_urls[1:])
        settings["source"] = sources

    return data, settings
Esempio n. 6
0
def convert(project, filename, options):
    ireqs, finder = parse_requirement_file(str(filename))
    with stream.logging("build"):
        reqs = [ireq_as_line(ireq, project.environment) for ireq in ireqs]

    deps = make_array(reqs, True)
    data = {"dependencies": [], "dev-dependencies": []}
    if options.dev and options.section:
        raise PdmUsageError(
            "Can't specify --dev and --section at the same time")
    elif options.dev:
        data["dev-dependencies"] = deps
    elif options.section:
        data["optional-dependencies"] = {options.section: deps}
    else:
        data["dependencies"] = deps
    settings = {}
    if finder.index_urls:
        sources = [convert_url_to_source(finder.index_urls[0], "pypi")]
        sources.extend(
            convert_url_to_source(url) for url in finder.index_urls[1:])
        settings["source"] = sources

    return data, settings
Esempio n. 7
0
    def synchronize(self, clean: bool = True, dry_run: bool = False) -> None:
        """Synchronize the working set with pinned candidates.

        :param clean: Whether to remove unneeded packages, defaults to True.
        :param dry_run: If set to True, only prints actions without actually do them.
        """
        to_add, to_update, to_remove = self.compare_with_working_set()
        if not clean:
            to_remove = []
        if not any([to_add, to_update, to_remove]):
            stream.echo(
                stream.yellow(
                    "All packages are synced to date, nothing to do."))
            if not dry_run:
                with stream.logging("install"):
                    self.update_project_egg_info()
            return
        to_do = {"remove": to_remove, "update": to_update, "add": to_add}
        self._show_headline(to_do)

        if dry_run:
            self._show_summary(to_do)
            return

        handlers = {
            "add": self.install_candidate,
            "update": self.update_candidate,
            "remove": self.remove_distribution,
        }

        sequential_jobs = []
        parallel_jobs = []
        # Self package will be installed after all other dependencies are installed.
        install_self = None
        for kind in to_do:
            for key in to_do[kind]:
                if (key == self.environment.project.meta.name and
                        self.environment.project.meta.project_name.lower()):
                    install_self = (kind, key)
                elif key in self.SEQUENTIAL_PACKAGES:
                    sequential_jobs.append((kind, key))
                elif key in self.candidates and self.candidates[
                        key].req.editable:
                    # Editable packages are installed sequentially.
                    sequential_jobs.append((kind, key))
                else:
                    parallel_jobs.append((kind, key))

        errors: List[str] = []
        failed_jobs: List[Tuple[str, str]] = []

        def update_progress(future, kind, key):
            if future.exception():
                failed_jobs.append((kind, key))
                error = future.exception()
                errors.extend([f"{kind} {stream.green(key)} failed:\n"] +
                              traceback.format_exception(
                                  type(error), error, error.__traceback__))

        with stream.logging("install"), self.environment.activate():
            with stream.indent("  "):
                for job in sequential_jobs:
                    kind, key = job
                    handlers[kind](key)
                for i in range(self.retry_times + 1):
                    with self.create_executor() as executor:
                        for job in parallel_jobs:
                            kind, key = job
                            future = executor.submit(handlers[kind], key)
                            future.add_done_callback(
                                functools.partial(update_progress,
                                                  kind=kind,
                                                  key=key))
                    if not failed_jobs or i == self.retry_times:
                        break
                    parallel_jobs, failed_jobs = failed_jobs, []
                    errors.clear()
                    stream.echo("Retry failed jobs")

            if errors:
                stream.echo(stream.red("\nERRORS:"))
                stream.echo("".join(errors), err=True)
                raise InstallationError(
                    "Some package operations are not complete yet")

            if install_self:
                stream.echo("Installing the project as an editable package...")
                with stream.indent("  "):
                    handlers[install_self[0]](install_self[1])
            else:
                self.update_project_egg_info()
            stream.echo(f"\n{CELE} All complete!")
Esempio n. 8
0
    def synchronize(self, clean: bool = True, dry_run: bool = False) -> None:
        """Synchronize the working set with pinned candidates.

        :param clean: Whether to remove unneeded packages, defaults to True.
        :param dry_run: If set to True, only prints actions without actually do them.
        """
        to_add, to_update, to_remove = self.compare_with_working_set()
        if not clean:
            to_remove = []
        lists_to_check = [to_add, to_update, to_remove]
        if not any(lists_to_check):
            if not dry_run:
                self.environment.write_site_py()
            stream.echo("All packages are synced to date, nothing to do.")
            return

        if dry_run:
            result = dict(
                add=[self.candidates[key] for key in to_add],
                update=[(self.working_set[key], self.candidates[key])
                        for key in to_update],
                remove=[self.working_set[key] for key in to_remove],
            )
            self.summarize(result, dry_run)
            return

        handlers = {
            "add": self.install_candidate,
            "update": self.update_candidate,
            "remove": self.remove_distribution,
        }

        result = defaultdict(list)
        failed = defaultdict(list)
        to_do = {"add": to_add, "update": to_update, "remove": to_remove}
        # Keep track of exceptions
        errors = []

        def update_progress(future, section, key, bar):
            if future.exception():
                failed[section].append(key)
                errors.append(future.exception())
            else:
                result[section].append(future.result())
            bar.update(1)

        with stream.logging("install"):
            with self.progressbar("Synchronizing:",
                                  sum(len(lst)
                                      for lst in to_do.values())) as (bar,
                                                                      pool):
                # First update packages, then remove and add
                for section in sorted(to_do, reverse=True):
                    # setup toolkits are installed sequentially before other packages.
                    for key in sorted(
                            to_do[section],
                            key=lambda x: x not in self.SEQUENTIAL_PACKAGES):
                        future = pool.submit(handlers[section], key)
                        future.add_done_callback(
                            functools.partial(update_progress,
                                              section=section,
                                              key=key,
                                              bar=bar))
                        if key in self.SEQUENTIAL_PACKAGES:
                            future.result()

            # Retry for failed items
            for i in range(self.RETRY_TIMES):
                if not any(failed.values()):
                    break
                stream.echo(
                    stream.yellow(
                        "\nSome packages failed to install, retrying..."))
                to_do = failed
                failed = defaultdict(list)
                errors.clear()
                with self.progressbar(
                        f"Retrying ({i + 1}/{self.RETRY_TIMES}):",
                        sum(len(lst) for lst in to_do.values()),
                ) as (bar, pool):

                    for section in sorted(to_do, reverse=True):
                        for key in sorted(
                                to_do[section],
                                key=lambda x: x not in self.
                                SEQUENTIAL_PACKAGES,
                        ):
                            future = pool.submit(handlers[section], key)
                            future.add_done_callback(
                                functools.partial(update_progress,
                                                  section=section,
                                                  key=key,
                                                  bar=bar))
                            if key in self.SEQUENTIAL_PACKAGES:
                                future.result()
            # End installation
            self.summarize(result)
            self.environment.write_site_py()
            if not any(failed.values()):
                return
            stream.echo("\n")
            error_msg = []
            if failed["add"] + failed["update"]:
                error_msg.append(
                    "Installation failed: "
                    f"{', '.join(failed['add'] + failed['update'])}")
            if failed["remove"]:
                error_msg.append(
                    f"Removal failed: {', '.join(failed['remove'])}")
            for error in errors:
                stream.echo(
                    "".join(
                        traceback.format_exception(type(error), error,
                                                   error.__traceback__)),
                    verbosity=stream.DEBUG,
                )
            raise InstallationError("\n" + "\n".join(error_msg))