Esempio n. 1
0
def test_metadata_header_round_trip() -> None:
    input_metadata = LockfileMetadata.new(
        InterpreterConstraints(
            ["CPython==2.7.*", "PyPy", "CPython>=3.6,<4,!=3.7.*"]),
        reqset("ansicolors==0.1.0"),
    )
    serialized_lockfile = input_metadata.add_header_to_lockfile(
        b"req1==1.0", regenerate_command="./pants lock")
    output_metadata = LockfileMetadata.from_lockfile(serialized_lockfile)
    assert input_metadata == output_metadata
Esempio n. 2
0
def test_add_header_to_lockfile() -> None:
    input_lockfile = b"""dave==3.1.4 \\
    --hash=sha256:cab0c0c0c0c0dadacafec0c0c0c0cafedadabeefc0c0c0c0feedbeeffeedbeef \\
    """

    expected = b"""
# This lockfile was autogenerated by Pants. To regenerate, run:
#
#    ./pants lock
#
# --- BEGIN PANTS LOCKFILE METADATA: DO NOT EDIT OR REMOVE ---
# {
#   "version": 2,
#   "valid_for_interpreter_constraints": [
#     "CPython>=3.7"
#   ],
#   "generated_with_requirements": [
#     "ansicolors==0.1.0"
#   ]
# }
# --- END PANTS LOCKFILE METADATA ---
dave==3.1.4 \\
    --hash=sha256:cab0c0c0c0c0dadacafec0c0c0c0cafedadabeefc0c0c0c0feedbeeffeedbeef \\
    """

    def line_by_line(b: bytes) -> list[bytes]:
        return [i for i in (j.strip() for j in b.splitlines()) if i]

    metadata = LockfileMetadata.new(InterpreterConstraints([">=3.7"]),
                                    reqset("ansicolors==0.1.0"))
    result = metadata.add_header_to_lockfile(input_lockfile,
                                             regenerate_command="./pants lock")
    assert line_by_line(result) == line_by_line(expected)
Esempio n. 3
0
def _validate_metadata(
    metadata: LockfileMetadata,
    request: PexRequest,
    requirements: (Lockfile | LockfileContent),
    python_setup: PythonSetup,
) -> None:

    # TODO(#12314): Improve this message: `Requirement.parse` raises `InvalidRequirement`, which
    # doesn't have mypy stubs at the moment; it may be hard to catch this exception and typecheck.
    req_strings = ({PipRequirement.parse(i)
                    for i in requirements.req_strings}
                   if requirements.req_strings is not None else None)

    validation = metadata.is_valid_for(
        requirements.lockfile_hex_digest,
        request.interpreter_constraints,
        python_setup.interpreter_universe,
        req_strings,
    )

    if validation:
        return

    def tool_message_parts(
        requirements: (ToolCustomLockfile | ToolDefaultLockfile),
    ) -> Iterator[str]:

        tool_name = requirements.options_scope_name
        uses_source_plugins = requirements.uses_source_plugins
        uses_project_interpreter_constraints = requirements.uses_project_interpreter_constraints

        yield "You are using "

        if isinstance(requirements, ToolDefaultLockfile):
            yield "the `<default>` lockfile provided by Pants "
        elif isinstance(requirements, ToolCustomLockfile):
            yield f"the lockfile at {requirements.file_path} "

        yield (
            f"to install the tool `{tool_name}`, but it is not compatible with your "
            "configuration: "
            "\n\n")

        if any(i == InvalidLockfileReason.INVALIDATION_DIGEST_MISMATCH
               or i == InvalidLockfileReason.REQUIREMENTS_MISMATCH
               for i in validation.failure_reasons):
            # TODO(12314): Add message showing _which_ requirements diverged.

            yield (
                "- You have set different requirements than those used to generate the lockfile. "
                f"You can fix this by not setting `[{tool_name}].version`, ")

            if uses_source_plugins:
                yield f"`[{tool_name}].source_plugins`, "

            yield (
                f"and `[{tool_name}].extra_requirements`, or by using a new "
                "custom lockfile."
                "\n")

        if InvalidLockfileReason.INTERPRETER_CONSTRAINTS_MISMATCH in validation.failure_reasons:
            yield (
                f"- You have set interpreter constraints (`{request.interpreter_constraints}`) that "
                "are not compatible with those used to generate the lockfile "
                f"(`{metadata.valid_for_interpreter_constraints}`). ")
            if not uses_project_interpreter_constraints:
                yield (
                    f"You can fix this by not setting `[{tool_name}].interpreter_constraints`, "
                    "or by using a new custom lockfile. ")
            else:
                yield (
                    f"`{tool_name}` determines its interpreter constraints based on your code's own "
                    "constraints. To fix this error, you can either change your code's constraints "
                    f"(see {doc_url('python-interpreter-compatibility')}) or by generating a new "
                    "custom lockfile. ")
            yield "\n"

        yield "\n"

        if not isinstance(requirements, ToolCustomLockfile):
            yield (
                "To generate a custom lockfile based on your current configuration, set "
                f"`[{tool_name}].lockfile` to where you want to create the lockfile, then run "
                f"`./pants generate-lockfiles --resolve={tool_name}`. ")
        else:
            yield (
                "To regenerate your lockfile based on your current configuration, run "
                f"`./pants generate-lockfiles --resolve={tool_name}`. ")

    message: str
    if isinstance(requirements, (ToolCustomLockfile, ToolDefaultLockfile)):
        message = "".join(tool_message_parts(requirements)).strip()
    else:
        # TODO(12314): Improve this message
        raise InvalidLockfileError(f"{validation.failure_reasons}")

    if python_setup.invalid_lockfile_behavior == InvalidLockfileBehavior.error:
        raise ValueError(message)
    else:
        logger.warning("%s", message)
Esempio n. 4
0
async def generate_lockfile(
    req: PythonLockfileRequest,
    poetry_subsystem: PoetrySubsystem,
    generate_lockfiles_subsystem: GenerateLockfilesSubsystem,
) -> PythonLockfile:
    pyproject_toml = create_pyproject_toml(req.requirements, req.interpreter_constraints).encode()
    pyproject_toml_digest, launcher_digest = await MultiGet(
        Get(Digest, CreateDigest([FileContent("pyproject.toml", pyproject_toml)])),
        Get(Digest, CreateDigest([POETRY_LAUNCHER])),
    )

    poetry_pex = await Get(
        VenvPex,
        PexRequest(
            output_filename="poetry.pex",
            internal_only=True,
            requirements=poetry_subsystem.pex_requirements(),
            interpreter_constraints=poetry_subsystem.interpreter_constraints,
            main=EntryPoint(PurePath(POETRY_LAUNCHER.path).stem),
            sources=launcher_digest,
        ),
    )

    # WONTFIX(#12314): Wire up Poetry to named_caches.
    # WONTFIX(#12314): Wire up all the pip options like indexes.
    poetry_lock_result = await Get(
        ProcessResult,
        VenvPexProcess(
            poetry_pex,
            argv=("lock",),
            input_digest=pyproject_toml_digest,
            output_files=("poetry.lock", "pyproject.toml"),
            description=req._description or f"Generate lockfile for {req.resolve_name}",
            # Instead of caching lockfile generation with LMDB, we instead use the invalidation
            # scheme from `lockfile_metadata.py` to check for stale/invalid lockfiles. This is
            # necessary so that our invalidation is resilient to deleting LMDB or running on a
            # new machine.
            #
            # We disable caching with LMDB so that when you generate a lockfile, you always get
            # the most up-to-date snapshot of the world. This is generally desirable and also
            # necessary to avoid an awkward edge case where different developers generate different
            # lockfiles even when generating at the same time. See
            # https://github.com/pantsbuild/pants/issues/12591.
            cache_scope=ProcessCacheScope.PER_SESSION,
        ),
    )
    poetry_export_result = await Get(
        ProcessResult,
        VenvPexProcess(
            poetry_pex,
            argv=("export", "-o", req.lockfile_dest),
            input_digest=poetry_lock_result.output_digest,
            output_files=(req.lockfile_dest,),
            description=(
                f"Exporting Poetry lockfile to requirements.txt format for {req.resolve_name}"
            ),
            level=LogLevel.DEBUG,
        ),
    )

    initial_lockfile_digest_contents = await Get(
        DigestContents, Digest, poetry_export_result.output_digest
    )
    # TODO(#12314) Improve error message on `Requirement.parse`
    metadata = LockfileMetadata.new(
        req.interpreter_constraints,
        {PipRequirement.parse(i) for i in req.requirements},
    )
    lockfile_with_header = metadata.add_header_to_lockfile(
        initial_lockfile_digest_contents[0].content,
        regenerate_command=(
            generate_lockfiles_subsystem.custom_command
            or req._regenerate_command
            or f"./pants generate-lockfiles --resolve={req.resolve_name}"
        ),
    )
    final_lockfile_digest = await Get(
        Digest, CreateDigest([FileContent(req.lockfile_dest, lockfile_with_header)])
    )
    return PythonLockfile(final_lockfile_digest, req.resolve_name, req.lockfile_dest)
Esempio n. 5
0
async def build_pex(
    request: PexRequest,
    python_setup: PythonSetup,
    python_repos: PythonRepos,
    platform: Platform,
    pex_runtime_env: PexRuntimeEnvironment,
) -> BuildPexResult:
    """Returns a PEX with the given settings."""
    argv = ["--output-file", request.output_filename, *request.additional_args]

    repository_pex = (request.requirements.repository_pex if isinstance(
        request.requirements, PexRequirements) else None)
    if repository_pex:
        argv.extend(["--pex-repository", repository_pex.name])
    else:
        # NB: In setting `--no-pypi`, we rely on the default value of `--python-repos-indexes`
        # including PyPI, which will override `--no-pypi` and result in using PyPI in the default
        # case. Why set `--no-pypi`, then? We need to do this so that
        # `--python-repos-repos=['custom_url']` will only point to that index and not include PyPI.
        argv.extend([
            "--no-pypi",
            *(f"--index={index}" for index in python_repos.indexes),
            *(f"--repo={repo}" for repo in python_repos.repos),
            "--resolver-version",
            "pip-2020-resolver",
        ])

    python: PythonExecutable | None = None

    # NB: If `--platform` is specified, this signals that the PEX should not be built locally.
    # `--interpreter-constraint` only makes sense in the context of building locally. These two
    # flags are mutually exclusive. See https://github.com/pantsbuild/pex/issues/957.
    if request.platforms:
        # TODO(#9560): consider validating that these platforms are valid with the interpreter
        #  constraints.
        argv.extend(request.platforms.generate_pex_arg_list())
    elif request.python:
        python = request.python
    elif request.internal_only:
        # NB: If it's an internal_only PEX, we do our own lookup of the interpreter based on the
        # interpreter constraints, and then will run the PEX with that specific interpreter. We
        # will have already validated that there were no platforms.
        python = await Get(PythonExecutable, InterpreterConstraints,
                           request.interpreter_constraints)
    else:
        # `--interpreter-constraint` options are mutually exclusive with the `--python` option,
        # so we only specify them if we have not already located a concrete Python.
        argv.extend(request.interpreter_constraints.generate_pex_arg_list())

    if python:
        argv.extend(["--python", python.path])

    argv.append("--no-emit-warnings")

    if python_setup.resolver_jobs:
        argv.extend(["--jobs", str(python_setup.resolver_jobs)])

    if python_setup.manylinux:
        argv.extend(["--manylinux", python_setup.manylinux])
    else:
        argv.append("--no-manylinux")

    if request.main is not None:
        argv.extend(request.main.iter_pex_args())

    # TODO(John Sirois): Right now any request requirements will shadow corresponding pex path
    #  requirements, which could lead to problems. Support shading python binaries.
    #  See: https://github.com/pantsbuild/pants/issues/9206
    if request.pex_path:
        argv.extend(
            ["--pex-path", ":".join(pex.name for pex in request.pex_path)])

    source_dir_name = "source_files"
    argv.append(f"--sources-directory={source_dir_name}")
    sources_digest_as_subdir = await Get(
        Digest, AddPrefix(request.sources or EMPTY_DIGEST, source_dir_name))

    additional_inputs_digest = request.additional_inputs or EMPTY_DIGEST
    repository_pex_digest = repository_pex.digest if repository_pex else EMPTY_DIGEST
    constraint_file_digest = EMPTY_DIGEST
    requirements_file_digest = EMPTY_DIGEST

    # TODO(#12314): Capture the resolve name for multiple user lockfiles.
    resolve_name = (request.requirements.options_scope_name if isinstance(
        request.requirements,
        (ToolDefaultLockfile, ToolCustomLockfile)) else None)

    if isinstance(request.requirements, Lockfile):
        is_monolithic_resolve = True
        argv.extend(["--requirement", request.requirements.file_path])
        argv.append("--no-transitive")
        globs = PathGlobs(
            [request.requirements.file_path],
            glob_match_error_behavior=GlobMatchErrorBehavior.error,
            description_of_origin=request.requirements.
            file_path_description_of_origin,
        )
        if python_setup.invalid_lockfile_behavior in {
                InvalidLockfileBehavior.warn,
                InvalidLockfileBehavior.error,
        }:
            requirements_file_digest_contents = await Get(
                DigestContents, PathGlobs, globs)
            metadata = LockfileMetadata.from_lockfile(
                requirements_file_digest_contents[0].content,
                request.requirements.file_path,
                resolve_name,
            )
            _validate_metadata(metadata, request, request.requirements,
                               python_setup)
        requirements_file_digest = await Get(Digest, PathGlobs, globs)

    elif isinstance(request.requirements, LockfileContent):
        is_monolithic_resolve = True
        file_content = request.requirements.file_content
        argv.extend(["--requirement", file_content.path])
        argv.append("--no-transitive")
        if python_setup.invalid_lockfile_behavior in {
                InvalidLockfileBehavior.warn,
                InvalidLockfileBehavior.error,
        }:
            metadata = LockfileMetadata.from_lockfile(
                file_content.content, resolve_name=resolve_name)
            _validate_metadata(metadata, request, request.requirements,
                               python_setup)
        requirements_file_digest = await Get(Digest,
                                             CreateDigest([file_content]))
    else:
        assert isinstance(request.requirements, PexRequirements)
        is_monolithic_resolve = request.requirements.is_all_constraints_resolve

        if (request.requirements.apply_constraints
                and python_setup.requirement_constraints is not None):
            argv.extend(
                ["--constraints", python_setup.requirement_constraints])
            constraint_file_digest = await Get(
                Digest,
                PathGlobs(
                    [python_setup.requirement_constraints],
                    glob_match_error_behavior=GlobMatchErrorBehavior.error,
                    description_of_origin=
                    "the option `[python].requirement_constraints`",
                ),
            )

        argv.extend(request.requirements.req_strings)

    merged_digest = await Get(
        Digest,
        MergeDigests((
            sources_digest_as_subdir,
            additional_inputs_digest,
            constraint_file_digest,
            requirements_file_digest,
            repository_pex_digest,
            *(pex.digest for pex in request.pex_path),
        )),
    )

    output_files: Iterable[str] | None = None
    output_directories: Iterable[str] | None = None
    if request.internal_only or is_monolithic_resolve:
        # This is a much friendlier layout for the CAS than the default zipapp.
        argv.extend(["--layout", "packed"])
        output_directories = [request.output_filename]
    else:
        output_files = [request.output_filename]

    process = await Get(
        Process,
        PexCliProcess(
            python=python,
            argv=argv,
            additional_input_digest=merged_digest,
            description=_build_pex_description(request),
            output_files=output_files,
            output_directories=output_directories,
        ),
    )

    # NB: Building a Pex is platform dependent, so in order to get a PEX that we can use locally
    # without cross-building, we specify that our PEX command should be run on the current local
    # platform.
    result = await Get(ProcessResult, MultiPlatformProcess({platform:
                                                            process}))

    if pex_runtime_env.verbosity > 0:
        log_output = result.stderr.decode()
        if log_output:
            logger.info("%s", log_output)

    digest = (await Get(
        Digest,
        MergeDigests(
            (result.output_digest, *(pex.digest for pex in request.pex_path))))
              if request.pex_path else result.output_digest)

    return BuildPexResult(result=result,
                          pex_filename=request.output_filename,
                          digest=digest,
                          python=python)