Пример #1
0
def test_validate_tool_lockfiles(
    is_default_lock: bool,
    invalid_reqs: bool,
    invalid_constraints: bool,
    uses_source_plugins: bool,
    uses_project_ic: bool,
    caplog,
) -> None:
    runtime_interpreter_constraints = (InterpreterConstraints([
        "==2.7.*"
    ]) if invalid_constraints else METADATA.valid_for_interpreter_constraints)
    req_strings = ["bad-req"] if invalid_reqs else [
        str(r) for r in METADATA.requirements
    ]
    requirements = create_tool_lock(
        default_lock=is_default_lock,
        uses_source_plugins=uses_source_plugins,
        uses_project_interpreter_constraints=uses_project_ic,
    )
    validate_metadata(
        METADATA,
        runtime_interpreter_constraints,
        requirements,
        req_strings,
        create_python_setup(InvalidLockfileBehavior.warn),
    )

    def contains(msg: str, if_: bool) -> None:
        assert (msg in caplog.text) is if_

    contains("You are using the `<default>` lockfile provided by Pants",
             if_=is_default_lock)
    contains("You are using the lockfile at lock.txt", if_=not is_default_lock)

    contains("You have set different requirements", if_=invalid_reqs)
    contains("In the input requirements, but not in the lockfile: ['bad-req']",
             if_=invalid_reqs)
    contains(
        "In the lockfile, but not in the input requirements: ['ansicolors', 'requests']",
        if_=invalid_reqs,
    )
    contains(".source_plugins`, and", if_=invalid_reqs and uses_source_plugins)

    contains("You have set interpreter constraints", if_=invalid_constraints)
    contains(
        "determines its interpreter constraints based on your code's own constraints.",
        if_=invalid_constraints and uses_project_ic,
    )
    contains(
        ".interpreter_constraints`, or by using a new custom lockfile.",
        if_=invalid_constraints and not uses_project_ic,
    )

    contains(
        "To generate a custom lockfile based on your current configuration",
        if_=is_default_lock)
    contains("To regenerate your lockfile based on your current configuration",
             if_=not is_default_lock)
Пример #2
0
def test_validate_user_lockfiles(
    invalid_reqs: bool,
    invalid_constraints: bool,
    caplog,
) -> None:
    runtime_interpreter_constraints = (InterpreterConstraints([
        "==2.7.*"
    ]) if invalid_constraints else METADATA.valid_for_interpreter_constraints)
    req_strings = FrozenOrderedSet(["bad-req"] if invalid_reqs else
                                   [str(r) for r in METADATA.requirements])
    lockfile = Lockfile(
        file_path="lock.txt",
        file_path_description_of_origin="foo",
        resolve_name="a",
    )

    # Ignore validation if resolves are manually managed.
    assert not should_validate_metadata(
        lockfile,
        create_python_setup(InvalidLockfileBehavior.warn,
                            enable_resolves=False))

    validate_metadata(
        METADATA,
        runtime_interpreter_constraints,
        lockfile,
        req_strings,
        create_python_setup(InvalidLockfileBehavior.warn),
    )

    def contains(msg: str, if_: bool = True) -> None:
        assert (msg in caplog.text) is if_

    contains(
        "You are using the lockfile at lock.txt to install the resolve `a`")
    contains(
        "The targets depend on requirements that are not in the lockfile: ['bad-req']",
        if_=invalid_reqs,
    )
    contains("The targets use interpreter constraints",
             if_=invalid_constraints)
    contains("./pants generate-lockfiles --resolve=a`")
Пример #3
0
async def build_pex(
    request: PexRequest,
    python_setup: PythonSetup,
    python_repos: PythonRepos,
    platform: Platform,
    pex_runtime_env: PexRuntimeEnvironment,
) -> BuildPexResult:
    """Returns a PEX with the given settings."""
    argv = [
        "--output-file",
        request.output_filename,
        "--no-emit-warnings",
        *python_setup.manylinux_pex_args,
        *request.additional_args,
    ]

    python: PythonExecutable | None = None

    # NB: If `--platform` is specified, this signals that the PEX should not be built locally.
    # `--interpreter-constraint` only makes sense in the context of building locally. These two
    # flags are mutually exclusive. See https://github.com/pantsbuild/pex/issues/957.
    if request.platforms or request.complete_platforms:
        # Note that this means that this is not an internal-only pex.
        # TODO(#9560): consider validating that these platforms are valid with the interpreter
        #  constraints.
        argv.extend(request.platforms.generate_pex_arg_list())
        argv.extend(request.complete_platforms.generate_pex_arg_list())
    elif request.python:
        python = request.python
    elif request.internal_only:
        # NB: If it's an internal_only PEX, we do our own lookup of the interpreter based on the
        # interpreter constraints, and then will run the PEX with that specific interpreter. We
        # will have already validated that there were no platforms.
        python = await Get(PythonExecutable, InterpreterConstraints,
                           request.interpreter_constraints)
    else:
        # `--interpreter-constraint` options are mutually exclusive with the `--python` option,
        # so we only specify them if we have not already located a concrete Python.
        argv.extend(request.interpreter_constraints.generate_pex_arg_list())

    if python:
        argv.extend(["--python", python.path])

    if request.main is not None:
        argv.extend(request.main.iter_pex_args())

    # TODO(John Sirois): Right now any request requirements will shadow corresponding pex path
    #  requirements, which could lead to problems. Support shading python binaries.
    #  See: https://github.com/pantsbuild/pants/issues/9206
    if request.pex_path:
        argv.extend(
            ["--pex-path", ":".join(pex.name for pex in request.pex_path)])

    source_dir_name = "source_files"
    argv.append(f"--sources-directory={source_dir_name}")
    sources_digest_as_subdir = await Get(
        Digest, AddPrefix(request.sources or EMPTY_DIGEST, source_dir_name))

    # Include any additional arguments and input digests required by the requirements.
    requirements_digests = []
    pex_lock_resolver_args = [*python_repos.pex_args]
    pip_resolver_args = [
        *python_repos.pex_args, "--resolver-version", "pip-2020-resolver"
    ]
    if isinstance(request.requirements, EntireLockfile):
        lockfile = await Get(
            LoadedLockfile,
            LoadedLockfileRequest(request.requirements.lockfile))
        concurrency_available = lockfile.requirement_estimate
        requirements_digests.append(lockfile.lockfile_digest)
        if lockfile.is_pex_native:
            argv.extend(["--lock", lockfile.lockfile_path])
            argv.extend(pex_lock_resolver_args)
        else:
            # We use pip to resolve a requirements.txt pseudo-lockfile, possibly with hashes.
            argv.extend(
                ["--requirement", lockfile.lockfile_path, "--no-transitive"])
            argv.extend(pip_resolver_args)
        if lockfile.metadata and request.requirements.complete_req_strings:
            validate_metadata(
                lockfile.metadata,
                request.interpreter_constraints,
                lockfile.original_lockfile,
                request.requirements.complete_req_strings,
                python_setup,
            )
    else:
        # TODO: This is not the best heuristic for available concurrency, since the
        # requirements almost certainly have transitive deps which also need building, but it
        # is better than using something hardcoded.
        concurrency_available = len(request.requirements.req_strings)
        argv.extend(request.requirements.req_strings)

        if isinstance(request.requirements.from_superset, Pex):
            repository_pex = request.requirements.from_superset
            argv.extend(["--pex-repository", repository_pex.name])
            requirements_digests.append(repository_pex.digest)
        elif isinstance(request.requirements.from_superset, LoadedLockfile):
            loaded_lockfile = request.requirements.from_superset
            # NB: This is also validated in the constructor.
            assert loaded_lockfile.is_pex_native
            if request.requirements.req_strings:
                requirements_digests.append(loaded_lockfile.lockfile_digest)
                argv.extend(["--lock", loaded_lockfile.lockfile_path])
                argv.extend(pex_lock_resolver_args)

                if loaded_lockfile.metadata:
                    validate_metadata(
                        loaded_lockfile.metadata,
                        request.interpreter_constraints,
                        loaded_lockfile.original_lockfile,
                        request.requirements.req_strings,
                        python_setup,
                    )
        else:
            assert request.requirements.from_superset is None

            # We use pip to perform a normal resolve.
            argv.extend(pip_resolver_args)
            if request.requirements.constraints_strings:
                constraints_file = "__constraints.txt"
                constraints_content = "\n".join(
                    request.requirements.constraints_strings)
                requirements_digests.append(await Get(
                    Digest,
                    CreateDigest([
                        FileContent(constraints_file,
                                    constraints_content.encode())
                    ]),
                ))
                argv.extend(["--constraints", constraints_file])

    merged_digest = await Get(
        Digest,
        MergeDigests((
            request.complete_platforms.digest,
            sources_digest_as_subdir,
            request.additional_inputs,
            *requirements_digests,
            *(pex.digest for pex in request.pex_path),
        )),
    )

    argv.extend(["--layout", request.layout.value])
    output_files: Iterable[str] | None = None
    output_directories: Iterable[str] | None = None
    if PexLayout.ZIPAPP == request.layout:
        output_files = [request.output_filename]
    else:
        output_directories = [request.output_filename]

    process = await Get(
        Process,
        PexCliProcess(
            python=python,
            subcommand=(),
            extra_args=argv,
            additional_input_digest=merged_digest,
            description=_build_pex_description(request),
            output_files=output_files,
            output_directories=output_directories,
            concurrency_available=concurrency_available,
        ),
    )

    process = dataclasses.replace(process, platform=platform)

    # NB: Building a Pex is platform dependent, so in order to get a PEX that we can use locally
    # without cross-building, we specify that our PEX command should be run on the current local
    # platform.
    result = await Get(ProcessResult, Process, process)

    if pex_runtime_env.verbosity > 0:
        log_output = result.stderr.decode()
        if log_output:
            logger.info("%s", log_output)

    digest = (await Get(
        Digest,
        MergeDigests(
            (result.output_digest, *(pex.digest for pex in request.pex_path))))
              if request.pex_path else result.output_digest)

    return BuildPexResult(result=result,
                          pex_filename=request.output_filename,
                          digest=digest,
                          python=python)
Пример #4
0
async def build_pex(
    request: PexRequest,
    python_setup: PythonSetup,
    python_repos: PythonRepos,
    platform: Platform,
    pex_runtime_env: PexRuntimeEnvironment,
) -> BuildPexResult:
    """Returns a PEX with the given settings."""
    argv = [
        "--output-file",
        request.output_filename,
        "--no-emit-warnings",
        *python_setup.manylinux_pex_args,
        *request.additional_args,
    ]
    should_resolve = True

    repository_pex = (request.requirements.repository_pex if isinstance(
        request.requirements, PexRequirements) else None)
    if repository_pex:
        should_resolve = False
        argv.extend(["--pex-repository", repository_pex.name])

    python: PythonExecutable | None = None

    # NB: If `--platform` is specified, this signals that the PEX should not be built locally.
    # `--interpreter-constraint` only makes sense in the context of building locally. These two
    # flags are mutually exclusive. See https://github.com/pantsbuild/pex/issues/957.
    if request.platforms or request.complete_platforms:
        # TODO(#9560): consider validating that these platforms are valid with the interpreter
        #  constraints.
        argv.extend(request.platforms.generate_pex_arg_list())
        argv.extend(request.complete_platforms.generate_pex_arg_list())
    elif request.python:
        python = request.python
    elif request.internal_only:
        # NB: If it's an internal_only PEX, we do our own lookup of the interpreter based on the
        # interpreter constraints, and then will run the PEX with that specific interpreter. We
        # will have already validated that there were no platforms.
        python = await Get(PythonExecutable, InterpreterConstraints,
                           request.interpreter_constraints)
    else:
        # `--interpreter-constraint` options are mutually exclusive with the `--python` option,
        # so we only specify them if we have not already located a concrete Python.
        argv.extend(request.interpreter_constraints.generate_pex_arg_list())

    if python:
        argv.extend(["--python", python.path])

    if request.main is not None:
        argv.extend(request.main.iter_pex_args())

    # TODO(John Sirois): Right now any request requirements will shadow corresponding pex path
    #  requirements, which could lead to problems. Support shading python binaries.
    #  See: https://github.com/pantsbuild/pants/issues/9206
    if request.pex_path:
        argv.extend(
            ["--pex-path", ":".join(pex.name for pex in request.pex_path)])

    source_dir_name = "source_files"
    argv.append(f"--sources-directory={source_dir_name}")
    sources_digest_as_subdir = await Get(
        Digest, AddPrefix(request.sources or EMPTY_DIGEST, source_dir_name))

    additional_inputs_digest = request.additional_inputs or EMPTY_DIGEST
    repository_pex_digest = repository_pex.digest if repository_pex else EMPTY_DIGEST
    constraints_file_digest = EMPTY_DIGEST
    requirements_file_digest = EMPTY_DIGEST
    requirement_count: int

    if isinstance(request.requirements, (Lockfile, LockfileContent)):
        is_monolithic_resolve = True
        resolve_name = request.requirements.resolve_name

        if isinstance(request.requirements, Lockfile):
            synthetic_lock = False
            lock_path = request.requirements.file_path
            requirements_file_digest = await Get(
                Digest,
                PathGlobs(
                    [lock_path],
                    glob_match_error_behavior=GlobMatchErrorBehavior.error,
                    description_of_origin=request.requirements.
                    file_path_description_of_origin,
                ),
            )
            _digest_contents = await Get(DigestContents, Digest,
                                         requirements_file_digest)
            lock_bytes = _digest_contents[0].content
        else:
            synthetic_lock = True
            _fc = request.requirements.file_content
            lock_path, lock_bytes = (_fc.path, _fc.content)
            requirements_file_digest = await Get(Digest, CreateDigest([_fc]))

        if _is_probably_pex_json_lockfile(lock_bytes):
            should_resolve = False
            header_delimiter = "//"
            requirements_file_digest = await Get(
                Digest,
                CreateDigest([
                    FileContent(
                        lock_path,
                        _strip_comments_from_pex_json_lockfile(lock_bytes))
                ]),
            )
            requirement_count = _pex_lockfile_requirement_count(lock_bytes)
            argv.extend(["--lock", lock_path])
        else:
            header_delimiter = "#"
            # Note: this is a very naive heuristic. It will overcount because entries often
            # have >1 line due to `--hash`.
            requirement_count = len(lock_bytes.decode().splitlines())
            argv.extend(["--requirement", lock_path, "--no-transitive"])

        if should_validate_metadata(request.requirements,
                                    python_setup):  # type: ignore[arg-type]
            metadata = PythonLockfileMetadata.from_lockfile(
                lock_bytes,
                **(dict() if synthetic_lock else dict(
                    lockfile_path=lock_path)),
                resolve_name=resolve_name,
                delimeter=header_delimiter,
            )

            validate_metadata(
                metadata,
                request.interpreter_constraints,
                request.requirements,
                python_setup  # type: ignore[arg-type]
            )

    else:
        assert isinstance(request.requirements, PexRequirements)
        is_monolithic_resolve = request.requirements.is_all_constraints_resolve
        requirement_count = len(request.requirements.req_strings)

        if request.requirements.constraints_strings:
            constraints_file = "__constraints.txt"
            constaints_content = "\n".join(
                request.requirements.constraints_strings)
            constraints_file_digest = await Get(
                Digest,
                CreateDigest([
                    FileContent(constraints_file, constaints_content.encode())
                ]),
            )
            argv.extend(["--constraints", constraints_file])

        argv.extend(request.requirements.req_strings)

    merged_digest = await Get(
        Digest,
        MergeDigests((
            request.complete_platforms.digest,
            sources_digest_as_subdir,
            additional_inputs_digest,
            constraints_file_digest,
            requirements_file_digest,
            repository_pex_digest,
            *(pex.digest for pex in request.pex_path),
        )),
    )

    if request.internal_only or is_monolithic_resolve:
        # This is a much friendlier layout for the CAS than the default zipapp.
        layout = PexLayout.PACKED
    else:
        layout = request.layout or PexLayout.ZIPAPP
    argv.extend(["--layout", layout.value])

    output_files: Iterable[str] | None = None
    output_directories: Iterable[str] | None = None
    if PexLayout.ZIPAPP == layout:
        output_files = [request.output_filename]
    else:
        output_directories = [request.output_filename]

    if should_resolve:
        argv.extend([
            *python_repos.pex_args, "--resolver-version", "pip-2020-resolver"
        ])

    process = await Get(
        Process,
        PexCliProcess(
            python=python,
            subcommand=(),
            extra_args=argv,
            additional_input_digest=merged_digest,
            description=_build_pex_description(request),
            output_files=output_files,
            output_directories=output_directories,
            # TODO: This is not the best heuristic for available concurrency, since the
            # requirements almost certainly have transitive deps which also need building, but it
            # is better than using something hardcoded.
            concurrency_available=requirement_count,
        ),
    )

    process = dataclasses.replace(process, platform=platform)

    # NB: Building a Pex is platform dependent, so in order to get a PEX that we can use locally
    # without cross-building, we specify that our PEX command should be run on the current local
    # platform.
    result = await Get(ProcessResult, Process, process)

    if pex_runtime_env.verbosity > 0:
        log_output = result.stderr.decode()
        if log_output:
            logger.info("%s", log_output)

    digest = (await Get(
        Digest,
        MergeDigests(
            (result.output_digest, *(pex.digest for pex in request.pex_path))))
              if request.pex_path else result.output_digest)

    return BuildPexResult(result=result,
                          pex_filename=request.output_filename,
                          digest=digest,
                          python=python)