Exemplo n.º 1
0
async def package_python_awslambda(
    field_set: PythonAwsLambdaFieldSet, lambdex: Lambdex
) -> BuiltPackage:
    output_filename = field_set.output_path.value_or_default(
        field_set.address,
        # Lambdas typically use the .zip suffix, so we use that instead of .pex.
        file_ending="zip",
    )

    # We hardcode the platform value to the appropriate one for each AWS Lambda runtime.
    # (Running the "hello world" lambda in the example code will report the platform, and can be
    # used to verify correctness of these platform strings.)
    py_major, py_minor = field_set.runtime.to_interpreter_version()
    platform = f"linux_x86_64-cp-{py_major}{py_minor}-cp{py_major}{py_minor}"
    # set pymalloc ABI flag - this was removed in python 3.8 https://bugs.python.org/issue36707
    if py_major <= 3 and py_minor < 8:
        platform += "m"
    if (py_major, py_minor) == (2, 7):
        platform += "u"
    pex_request = TwoStepPexFromTargetsRequest(
        PexFromTargetsRequest(
            addresses=[field_set.address],
            internal_only=False,
            entry_point=None,
            output_filename=output_filename,
            platforms=PexPlatforms([platform]),
            additional_args=[
                # Ensure we can resolve manylinux wheels in addition to any AMI-specific wheels.
                "--manylinux=manylinux2014",
                # When we're executing Pex on Linux, allow a local interpreter to be resolved if
                # available and matching the AMI platform.
                "--resolve-local-platforms",
            ],
        )
    )

    lambdex_request = PexRequest(
        output_filename="lambdex.pex",
        internal_only=True,
        requirements=PexRequirements(lambdex.all_requirements),
        interpreter_constraints=PexInterpreterConstraints(lambdex.interpreter_constraints),
        entry_point=lambdex.entry_point,
    )

    lambdex_pex, pex_result, handler = await MultiGet(
        Get(Pex, PexRequest, lambdex_request),
        Get(TwoStepPex, TwoStepPexFromTargetsRequest, pex_request),
        Get(ResolvedPythonAwsHandler, ResolvePythonAwsHandlerRequest(field_set.handler)),
    )
    input_digest = await Get(Digest, MergeDigests((pex_result.pex.digest, lambdex_pex.digest)))

    # NB: Lambdex modifies its input pex in-place, so the input file is also the output file.
    result = await Get(
        ProcessResult,
        PexProcess(
            lambdex_pex,
            argv=("build", "-e", handler.val, output_filename),
            input_digest=input_digest,
            output_files=(output_filename,),
            description=f"Setting up handler in {output_filename}",
        ),
    )
    artifact = BuiltPackageArtifact(
        output_filename,
        extra_log_lines=(
            f"    Runtime: {field_set.runtime.value}",
            # The AWS-facing handler function is always lambdex_handler.handler, which is the
            # wrapper injected by lambdex that manages invocation of the actual handler.
            "    Handler: lambdex_handler.handler",
        ),
    )
    return BuiltPackage(digest=result.output_digest, artifacts=(artifact,))
Exemplo n.º 2
0
async def package_python_awslambda(
        field_set: PythonAwsLambdaFieldSet, lambdex: Lambdex,
        union_membership: UnionMembership) -> BuiltPackage:
    output_filename = field_set.output_path.value_or_default(
        # Lambdas typically use the .zip suffix, so we use that instead of .pex.
        file_ending="zip", )

    # We hardcode the platform value to the appropriate one for each AWS Lambda runtime.
    # (Running the "hello world" lambda in the example code will report the platform, and can be
    # used to verify correctness of these platform strings.)
    py_major, py_minor = field_set.runtime.to_interpreter_version()
    platform = f"linux_x86_64-cp-{py_major}{py_minor}-cp{py_major}{py_minor}"
    # set pymalloc ABI flag - this was removed in python 3.8 https://bugs.python.org/issue36707
    if py_major <= 3 and py_minor < 8:
        platform += "m"
    if (py_major, py_minor) == (2, 7):
        platform += "u"

    additional_pex_args = (
        # Ensure we can resolve manylinux wheels in addition to any AMI-specific wheels.
        "--manylinux=manylinux2014",
        # When we're executing Pex on Linux, allow a local interpreter to be resolved if
        # available and matching the AMI platform.
        "--resolve-local-platforms",
    )
    pex_request = PexFromTargetsRequest(
        addresses=[field_set.address],
        internal_only=False,
        output_filename=output_filename,
        platforms=PexPlatforms([platform]),
        additional_args=additional_pex_args,
        additional_lockfile_args=additional_pex_args,
    )

    lambdex_request = PexRequest(
        output_filename="lambdex.pex",
        internal_only=True,
        requirements=lambdex.pex_requirements(),
        interpreter_constraints=lambdex.interpreter_constraints,
        main=lambdex.main,
    )

    lambdex_pex, pex_result, handler, transitive_targets = await MultiGet(
        Get(VenvPex, PexRequest, lambdex_request),
        Get(Pex, PexFromTargetsRequest, pex_request),
        Get(ResolvedPythonAwsHandler,
            ResolvePythonAwsHandlerRequest(field_set.handler)),
        Get(TransitiveTargets, TransitiveTargetsRequest([field_set.address])),
    )

    # Warn if users depend on `files` targets, which won't be included in the PEX and is a common
    # gotcha.
    file_tgts = targets_with_sources_types([FileSourceField],
                                           transitive_targets.dependencies,
                                           union_membership)
    if file_tgts:
        files_addresses = sorted(tgt.address.spec for tgt in file_tgts)
        logger.warning(
            f"The `python_awslambda` target {field_set.address} transitively depends on the below "
            "`files` targets, but Pants will not include them in the built Lambda. Filesystem APIs "
            "like `open()` are not able to load files within the binary itself; instead, they "
            "read from the current working directory."
            f"\n\nInstead, use `resources` targets. See {doc_url('resources')}."
            f"\n\nFiles targets dependencies: {files_addresses}")

    # NB: Lambdex modifies its input pex in-place, so the input file is also the output file.
    result = await Get(
        ProcessResult,
        VenvPexProcess(
            lambdex_pex,
            argv=("build", "-e", handler.val, output_filename),
            input_digest=pex_result.digest,
            output_files=(output_filename, ),
            description=f"Setting up handler in {output_filename}",
        ),
    )
    artifact = BuiltPackageArtifact(
        output_filename,
        extra_log_lines=(
            f"    Runtime: {field_set.runtime.value}",
            # The AWS-facing handler function is always lambdex_handler.handler, which is the
            # wrapper injected by lambdex that manages invocation of the actual handler.
            "    Handler: lambdex_handler.handler",
        ),
    )
    return BuiltPackage(digest=result.output_digest, artifacts=(artifact, ))
Exemplo n.º 3
0
async def pex_from_targets(
    request: PexFromTargetsRequest,
    python_setup: PythonSetup,
    constraints_file: MaybeConstraintsFile,
) -> PexRequest:
    if request.direct_deps_only:
        targets = await Get(Targets, Addresses(request.addresses))
        direct_deps = await MultiGet(
            Get(Targets, DependenciesRequest(tgt.get(Dependencies)))
            for tgt in targets)
        all_targets = FrozenOrderedSet(itertools.chain(*direct_deps, targets))
    else:
        transitive_targets = await Get(
            TransitiveTargets, TransitiveTargetsRequest(request.addresses))
        all_targets = transitive_targets.closure

    input_digests = []
    if request.additional_sources:
        input_digests.append(request.additional_sources)
    if request.include_source_files:
        prepared_sources = await Get(StrippedPythonSourceFiles,
                                     PythonSourceFilesRequest(all_targets))
        input_digests.append(
            prepared_sources.stripped_source_files.snapshot.digest)
    merged_input_digest = await Get(Digest, MergeDigests(input_digests))

    if request.hardcoded_interpreter_constraints:
        interpreter_constraints = request.hardcoded_interpreter_constraints
    else:
        calculated_constraints = PexInterpreterConstraints.create_from_targets(
            all_targets, python_setup)
        # If there are no targets, we fall back to the global constraints. This is relevant,
        # for example, when running `./pants repl` with no specs.
        interpreter_constraints = calculated_constraints or PexInterpreterConstraints(
            python_setup.interpreter_constraints)

    exact_reqs = PexRequirements.create_from_requirement_fields(
        (tgt[PythonRequirementsField]
         for tgt in all_targets if tgt.has_field(PythonRequirementsField)),
        additional_requirements=request.additional_requirements,
    )

    requirements = exact_reqs
    repository_pex: Pex | None = None
    description = request.description

    if constraints_file.path:
        constraints_file_contents = await Get(DigestContents, Digest,
                                              constraints_file.digest)
        constraints_file_reqs = set(
            parse_requirements_file(
                constraints_file_contents[0].content.decode(),
                rel_path=constraints_file.path,
            ))

        # In requirement strings, Foo_-Bar.BAZ and foo-bar-baz refer to the same project. We let
        # packaging canonicalize for us.
        # See: https://www.python.org/dev/peps/pep-0503/#normalized-names
        exact_req_projects = {
            canonicalize_project_name(Requirement.parse(req).project_name)
            for req in exact_reqs
        }
        constraint_file_projects = {
            canonicalize_project_name(req.project_name)
            for req in constraints_file_reqs
        }
        unconstrained_projects = exact_req_projects - constraint_file_projects
        if unconstrained_projects:
            constraints_descr = (
                f"constraints file {constraints_file.path}"
                if python_setup.requirement_constraints else
                f"_python_constraints target {python_setup.requirement_constraints_target}"
            )
            logger.warning(
                f"The {constraints_descr} does not contain entries for the following "
                f"requirements: {', '.join(unconstrained_projects)}")

        if python_setup.resolve_all_constraints == ResolveAllConstraintsOption.ALWAYS or (
                python_setup.resolve_all_constraints
                == ResolveAllConstraintsOption.NONDEPLOYABLES
                and request.internal_only):
            if unconstrained_projects:
                logger.warning(
                    "Ignoring `[python_setup].resolve_all_constraints` option because constraints "
                    "file does not cover all requirements.")
            else:
                repository_pex = await Get(
                    Pex,
                    PexRequest(
                        description=
                        f"Resolving {python_setup.requirement_constraints}",
                        output_filename="repository.pex",
                        internal_only=request.internal_only,
                        requirements=PexRequirements(
                            str(req) for req in constraints_file_reqs),
                        interpreter_constraints=interpreter_constraints,
                        platforms=request.platforms,
                    ),
                )
    elif (python_setup.resolve_all_constraints !=
          ResolveAllConstraintsOption.NEVER
          and python_setup.resolve_all_constraints_was_set_explicitly()):
        raise ValueError(
            "[python-setup].resolve_all_constraints is set to "
            f"{python_setup.resolve_all_constraints.value}, so "
            "either [python-setup].requirement_constraints or "
            "[python-setup].requirement_constraints_target must also be provided."
        )

    return PexRequest(
        output_filename=request.output_filename,
        internal_only=request.internal_only,
        requirements=requirements,
        interpreter_constraints=interpreter_constraints,
        platforms=request.platforms,
        main=request.main,
        sources=merged_input_digest,
        additional_inputs=request.additional_inputs,
        repository_pex=repository_pex,
        additional_args=request.additional_args,
        description=description,
    )
Exemplo n.º 4
0
async def generate_lockfile(
    req: PythonLockfileRequest,
    poetry_subsystem: PoetrySubsystem,
    generate_lockfiles_subsystem: GenerateLockfilesSubsystem,
) -> PythonLockfile:
    pyproject_toml = create_pyproject_toml(req.requirements, req.interpreter_constraints).encode()
    pyproject_toml_digest, launcher_digest = await MultiGet(
        Get(Digest, CreateDigest([FileContent("pyproject.toml", pyproject_toml)])),
        Get(Digest, CreateDigest([POETRY_LAUNCHER])),
    )

    poetry_pex = await Get(
        VenvPex,
        PexRequest(
            output_filename="poetry.pex",
            internal_only=True,
            requirements=poetry_subsystem.pex_requirements(),
            interpreter_constraints=poetry_subsystem.interpreter_constraints,
            main=EntryPoint(PurePath(POETRY_LAUNCHER.path).stem),
            sources=launcher_digest,
        ),
    )

    # WONTFIX(#12314): Wire up Poetry to named_caches.
    # WONTFIX(#12314): Wire up all the pip options like indexes.
    poetry_lock_result = await Get(
        ProcessResult,
        VenvPexProcess(
            poetry_pex,
            argv=("lock",),
            input_digest=pyproject_toml_digest,
            output_files=("poetry.lock", "pyproject.toml"),
            description=req._description or f"Generate lockfile for {req.resolve_name}",
            # Instead of caching lockfile generation with LMDB, we instead use the invalidation
            # scheme from `lockfile_metadata.py` to check for stale/invalid lockfiles. This is
            # necessary so that our invalidation is resilient to deleting LMDB or running on a
            # new machine.
            #
            # We disable caching with LMDB so that when you generate a lockfile, you always get
            # the most up-to-date snapshot of the world. This is generally desirable and also
            # necessary to avoid an awkward edge case where different developers generate different
            # lockfiles even when generating at the same time. See
            # https://github.com/pantsbuild/pants/issues/12591.
            cache_scope=ProcessCacheScope.PER_SESSION,
        ),
    )
    poetry_export_result = await Get(
        ProcessResult,
        VenvPexProcess(
            poetry_pex,
            argv=("export", "-o", req.lockfile_dest),
            input_digest=poetry_lock_result.output_digest,
            output_files=(req.lockfile_dest,),
            description=(
                f"Exporting Poetry lockfile to requirements.txt format for {req.resolve_name}"
            ),
            level=LogLevel.DEBUG,
        ),
    )

    initial_lockfile_digest_contents = await Get(
        DigestContents, Digest, poetry_export_result.output_digest
    )
    # TODO(#12314) Improve error message on `Requirement.parse`
    metadata = LockfileMetadata.new(
        req.interpreter_constraints,
        {Requirement.parse(i) for i in req.requirements},
    )
    lockfile_with_header = metadata.add_header_to_lockfile(
        initial_lockfile_digest_contents[0].content,
        regenerate_command=(
            generate_lockfiles_subsystem.custom_command
            or req._regenerate_command
            or f"./pants generate-lockfiles --resolve={req.resolve_name}"
        ),
    )
    final_lockfile_digest = await Get(
        Digest, CreateDigest([FileContent(req.lockfile_dest, lockfile_with_header)])
    )
    return PythonLockfile(final_lockfile_digest, req.resolve_name, req.lockfile_dest)
Exemplo n.º 5
0
async def mypy_typecheck_partition(
    partition: MyPyPartition,
    config_file: MyPyConfigFile,
    first_party_plugins: MyPyFirstPartyPlugins,
    mypy: MyPy,
    python_setup: PythonSetup,
) -> CheckResult:
    # MyPy requires 3.5+ to run, but uses the typed-ast library to work with 2.7, 3.4, 3.5, 3.6,
    # and 3.7. However, typed-ast does not understand 3.8+, so instead we must run MyPy with
    # Python 3.8+ when relevant. We only do this if <3.8 can't be used, as we don't want a
    # loose requirement like `>=3.6` to result in requiring Python 3.8+, which would error if
    # 3.8+ is not installed on the machine.
    tool_interpreter_constraints = (partition.interpreter_constraints if (
        mypy.options.is_default("interpreter_constraints")
        and partition.interpreter_constraints.requires_python38_or_newer(
            python_setup.interpreter_universe)) else
                                    mypy.interpreter_constraints)

    closure_sources_get = Get(PythonSourceFiles,
                              PythonSourceFilesRequest(partition.closure))
    roots_sources_get = Get(
        SourceFiles,
        SourceFilesRequest(
            tgt.get(PythonSources) for tgt in partition.root_targets))

    # See `requirements_venv_pex` for how this will get wrapped in a `VenvPex`.
    requirements_pex_get = Get(
        Pex,
        PexFromTargetsRequest,
        PexFromTargetsRequest.for_requirements(
            (tgt.address for tgt in partition.root_targets),
            hardcoded_interpreter_constraints=partition.
            interpreter_constraints,
            internal_only=True,
        ),
    )

    mypy_pex_get = Get(
        VenvPex,
        PexRequest(
            output_filename="mypy.pex",
            internal_only=True,
            main=mypy.main,
            requirements=mypy.pex_requirements(
                extra_requirements=first_party_plugins.requirement_strings, ),
            interpreter_constraints=tool_interpreter_constraints,
        ),
    )

    closure_sources, roots_sources, mypy_pex, requirements_pex = await MultiGet(
        closure_sources_get, roots_sources_get, mypy_pex_get,
        requirements_pex_get)

    python_files = determine_python_files(roots_sources.snapshot.files)
    file_list_path = "__files.txt"
    file_list_digest_request = Get(
        Digest,
        CreateDigest(
            [FileContent(file_list_path, "\n".join(python_files).encode())]),
    )

    # This creates a venv with all the 3rd-party requirements used by the code. We tell MyPy to
    # use this venv by setting `--python-executable`. Note that this Python interpreter is
    # different than what we run MyPy with.
    #
    # We could have directly asked the `PexFromTargetsRequest` to return a `VenvPex`, rather than
    # `Pex`, but that would mean missing out on sharing a cache with other goals like `test` and
    # `run`.
    requirements_venv_pex_request = Get(
        VenvPex,
        PexRequest(
            output_filename="requirements_venv.pex",
            internal_only=True,
            pex_path=[requirements_pex],
            interpreter_constraints=partition.interpreter_constraints,
        ),
    )

    requirements_venv_pex, file_list_digest = await MultiGet(
        requirements_venv_pex_request, file_list_digest_request)

    merged_input_files = await Get(
        Digest,
        MergeDigests([
            file_list_digest,
            first_party_plugins.sources_digest,
            closure_sources.source_files.snapshot.digest,
            requirements_venv_pex.digest,
            config_file.digest,
        ]),
    )

    all_used_source_roots = sorted(
        set(
            itertools.chain(first_party_plugins.source_roots,
                            closure_sources.source_roots)))
    env = {
        "PEX_EXTRA_SYS_PATH": ":".join(all_used_source_roots),
        "MYPYPATH": ":".join(all_used_source_roots),
    }

    result = await Get(
        FallibleProcessResult,
        VenvPexProcess(
            mypy_pex,
            argv=generate_argv(
                mypy,
                venv_python=requirements_venv_pex.python.argv0,
                file_list_path=file_list_path,
                python_version=config_file.python_version_to_autoset(
                    partition.interpreter_constraints,
                    python_setup.interpreter_universe),
            ),
            input_digest=merged_input_files,
            extra_env=env,
            output_directories=(REPORT_DIR, ),
            description=f"Run MyPy on {pluralize(len(python_files), 'file')}.",
            level=LogLevel.DEBUG,
        ),
    )
    report = await Get(Digest, RemovePrefix(result.output_digest, REPORT_DIR))
    return CheckResult.from_fallible_process_result(
        result,
        partition_description=str(
            sorted(str(c) for c in partition.interpreter_constraints)),
        report=report,
    )
Exemplo n.º 6
0
async def setup_black(
    setup_request: SetupRequest, black: Black, python_setup: PythonSetup
) -> Setup:
    # Black requires 3.6+ but uses the typed-ast library to work with 2.7, 3.4, 3.5, 3.6, and 3.7.
    # However, typed-ast does not understand 3.8+, so instead we must run Black with Python 3.8+
    # when relevant. We only do this if if <3.8 can't be used, as we don't want a loose requirement
    # like `>=3.6` to result in requiring Python 3.8, which would error if 3.8 is not installed on
    # the machine.
    all_interpreter_constraints = InterpreterConstraints.create_from_compatibility_fields(
        (field_set.interpreter_constraints for field_set in setup_request.request.field_sets),
        python_setup,
    )
    tool_interpreter_constraints = (
        all_interpreter_constraints
        if (
            black.options.is_default("interpreter_constraints")
            and all_interpreter_constraints.requires_python38_or_newer(
                python_setup.interpreter_universe
            )
        )
        else black.interpreter_constraints
    )

    black_pex_get = Get(
        VenvPex,
        PexRequest(
            output_filename="black.pex",
            internal_only=True,
            requirements=black.pex_requirements(),
            interpreter_constraints=tool_interpreter_constraints,
            main=black.main,
        ),
    )

    source_files_get = Get(
        SourceFiles,
        SourceFilesRequest(field_set.source for field_set in setup_request.request.field_sets),
    )

    source_files, black_pex = await MultiGet(source_files_get, black_pex_get)
    source_files_snapshot = (
        source_files.snapshot
        if setup_request.request.prior_formatter_result is None
        else setup_request.request.prior_formatter_result
    )

    config_files = await Get(
        ConfigFiles, ConfigFilesRequest, black.config_request(source_files_snapshot.dirs)
    )
    input_digest = await Get(
        Digest, MergeDigests((source_files_snapshot.digest, config_files.snapshot.digest))
    )

    process = await Get(
        Process,
        VenvPexProcess(
            black_pex,
            argv=generate_argv(source_files, black, check_only=setup_request.check_only),
            input_digest=input_digest,
            output_files=source_files_snapshot.files,
            description=f"Run Black on {pluralize(len(setup_request.request.field_sets), 'file')}.",
            level=LogLevel.DEBUG,
        ),
    )
    return Setup(process, original_digest=source_files_snapshot.digest)
Exemplo n.º 7
0
async def mypy_typecheck_partition(partition: MyPyPartition,
                                   mypy: MyPy) -> TypecheckResult:
    plugin_target_addresses = await Get(Addresses, UnparsedAddressInputs,
                                        mypy.source_plugins)
    plugin_transitive_targets_request = Get(
        TransitiveTargets, TransitiveTargetsRequest(plugin_target_addresses))
    plugin_transitive_targets, launcher_script = await MultiGet(
        plugin_transitive_targets_request,
        Get(Digest, CreateDigest([LAUNCHER_FILE])))

    plugin_requirements = PexRequirements.create_from_requirement_fields(
        plugin_tgt[PythonRequirementsField]
        for plugin_tgt in plugin_transitive_targets.closure
        if plugin_tgt.has_field(PythonRequirementsField))

    # If the user did not set `--python-version` already, we set it ourselves based on their code's
    # interpreter constraints. This determines what AST is used by MyPy.
    python_version = (
        None if partition.python_version_already_configured else
        partition.interpreter_constraints.minimum_python_version())

    # MyPy requires 3.5+ to run, but uses the typed-ast library to work with 2.7, 3.4, 3.5, 3.6,
    # and 3.7. However, typed-ast does not understand 3.8, so instead we must run MyPy with
    # Python 3.8 when relevant. We only do this if <3.8 can't be used, as we don't want a
    # loose requirement like `>=3.6` to result in requiring Python 3.8, which would error if
    # 3.8 is not installed on the machine.
    tool_interpreter_constraints = PexInterpreterConstraints((
        "CPython>=3.8", ) if (
            mypy.options.is_default("interpreter_constraints")
            and partition.interpreter_constraints.requires_python38_or_newer()
        ) else mypy.interpreter_constraints)

    plugin_sources_request = Get(
        PythonSourceFiles,
        PythonSourceFilesRequest(plugin_transitive_targets.closure))
    typechecked_sources_request = Get(
        PythonSourceFiles, PythonSourceFilesRequest(partition.closure))

    # Normally, this `requirements.pex` would be merged with mypy.pex via `--pex-path`. However,
    # this will cause a runtime error if the interpreter constraints are different between the
    # PEXes and they have incompatible wheels.
    #
    # Instead, we teach MyPy about the requirements by extracting the distributions from
    # requirements.pex and setting EXTRACTED_WHEELS, which our custom launcher script then
    # looks for.
    #
    # Conventionally, MyPy users might instead set `MYPYPATH` for this. However, doing this
    # results in type checking the requirements themselves.
    requirements_pex_request = Get(
        Pex,
        PexFromTargetsRequest,
        PexFromTargetsRequest.for_requirements(
            (addr for addr in partition.field_set_addresses),
            hardcoded_interpreter_constraints=partition.
            interpreter_constraints,
            internal_only=True,
        ),
    )
    mypy_pex_request = Get(
        Pex,
        PexRequest(
            output_filename="mypy.pex",
            internal_only=True,
            sources=launcher_script,
            requirements=PexRequirements(
                itertools.chain(mypy.all_requirements, plugin_requirements)),
            interpreter_constraints=tool_interpreter_constraints,
            entry_point=PurePath(LAUNCHER_FILE.path).stem,
        ),
    )

    config_digest_request = Get(Digest, PathGlobs, config_path_globs(mypy))

    (
        plugin_sources,
        typechecked_sources,
        mypy_pex,
        requirements_pex,
        config_digest,
    ) = await MultiGet(
        plugin_sources_request,
        typechecked_sources_request,
        mypy_pex_request,
        requirements_pex_request,
        config_digest_request,
    )

    typechecked_srcs_snapshot = typechecked_sources.source_files.snapshot
    file_list_path = "__files.txt"
    python_files = "\n".join(
        determine_python_files(
            typechecked_sources.source_files.snapshot.files))
    create_file_list_request = Get(
        Digest,
        CreateDigest([FileContent(file_list_path, python_files.encode())]),
    )

    file_list_digest, extracted_pex_distributions = await MultiGet(
        create_file_list_request,
        Get(ExtractedPexDistributions, Pex, requirements_pex))

    merged_input_files = await Get(
        Digest,
        MergeDigests([
            file_list_digest,
            plugin_sources.source_files.snapshot.digest,
            typechecked_srcs_snapshot.digest,
            mypy_pex.digest,
            extracted_pex_distributions.digest,
            config_digest,
        ]),
    )

    all_used_source_roots = sorted(
        set(
            itertools.chain(plugin_sources.source_roots,
                            typechecked_sources.source_roots)))
    env = {
        "PEX_EXTRA_SYS_PATH":
        ":".join(all_used_source_roots),
        "EXTRACTED_WHEELS":
        ":".join(extracted_pex_distributions.wheel_directory_paths),
    }

    result = await Get(
        FallibleProcessResult,
        PexProcess(
            mypy_pex,
            argv=generate_argv(mypy,
                               file_list_path=file_list_path,
                               python_version=python_version),
            input_digest=merged_input_files,
            extra_env=env,
            description=
            f"Run MyPy on {pluralize(len(typechecked_srcs_snapshot.files), 'file')}.",
            level=LogLevel.DEBUG,
        ),
    )
    return TypecheckResult.from_fallible_process_result(
        result,
        partition_description=str(
            sorted(str(c) for c in partition.interpreter_constraints)))
Exemplo n.º 8
0
async def build_local_dists(
    request: LocalDistsPexRequest,
) -> LocalDistsPex:

    transitive_targets = await Get(TransitiveTargets, TransitiveTargetsRequest(request.addresses))
    applicable_targets = [
        tgt for tgt in transitive_targets.closure if PythonDistributionFieldSet.is_applicable(tgt)
    ]

    python_dist_field_sets = [
        PythonDistributionFieldSet.create(target) for target in applicable_targets
    ]

    dists = await MultiGet(
        [Get(BuiltPackage, PackageFieldSet, field_set) for field_set in python_dist_field_sets]
    )

    # The primary use-case of the "local dists" feature is to support consuming native extensions
    # as wheels without having to publish them first.
    # It doesn't seem very useful to consume locally-built sdists, and it makes it hard to
    # reason about possible sys.path collisions between the in-repo sources and whatever the
    # sdist will place on the sys.path when it's installed.
    # So for now we simply ignore sdists, with a warning if necessary.
    provided_files = set()
    wheels = []

    all_contents = await MultiGet(Get(DigestContents, Digest, dist.digest) for dist in dists)
    for dist, contents, tgt in zip(dists, all_contents, applicable_targets):
        artifacts = {(a.relpath or "") for a in dist.artifacts}
        # A given local dist might build a wheel and an sdist (and maybe other artifacts -
        # we don't know what setup command was run...)
        # As long as there is a wheel, we can ignore the other artifacts.
        wheel = next((art for art in artifacts if art.endswith(".whl")), None)
        if wheel:
            wheel_content = next(content for content in contents if content.path == wheel)
            wheels.append(wheel)
            buf = BytesIO()
            buf.write(wheel_content.content)
            buf.seek(0)
            with zipfile.ZipFile(buf) as zf:
                provided_files.update(zf.namelist())
        else:
            logger.warning(
                f"Encountered a dependency on the {tgt.alias} target at {tgt.address.spec}, but "
                "this target does not produce a Python wheel artifact. Therefore this target's "
                "code will be used directly from sources, without a distribution being built, "
                "and therefore any native extensions in it will not be built.\n\n"
                f"See {doc_url('python-distributions')} for details on how to set up a {tgt.alias} "
                "target to produce a wheel."
            )

    dists_digest = await Get(Digest, MergeDigests([dist.digest for dist in dists]))
    wheels_digest = await Get(Digest, DigestSubset(dists_digest, PathGlobs(["**/*.whl"])))

    dists_pex = await Get(
        Pex,
        PexRequest(
            output_filename="local_dists.pex",
            requirements=PexRequirements(wheels),
            interpreter_constraints=request.interpreter_constraints,
            additional_inputs=wheels_digest,
            internal_only=request.internal_only,
        ),
    )

    # We check source roots in reverse lexicographic order,
    # so we'll find the innermost root that matches.
    source_roots = list(reversed(sorted(request.sources.source_roots)))
    remaining_sources = set(request.sources.source_files.files)
    unrooted_files_set = set(request.sources.source_files.unrooted_files)
    for source in request.sources.source_files.files:
        if source not in unrooted_files_set:
            for source_root in source_roots:
                source_relpath = fast_relpath_optional(source, source_root)
                if source_relpath is not None and source_relpath in provided_files:
                    remaining_sources.remove(source)
    remaining_sources_snapshot = await Get(
        Snapshot,
        DigestSubset(
            request.sources.source_files.snapshot.digest, PathGlobs(sorted(remaining_sources))
        ),
    )
    subtracted_sources = PythonSourceFiles(
        SourceFiles(remaining_sources_snapshot, request.sources.source_files.unrooted_files),
        request.sources.source_roots,
    )

    return LocalDistsPex(dists_pex, subtracted_sources)
Exemplo n.º 9
0
async def pex_from_targets(request: PexFromTargetsRequest, python_setup: PythonSetup) -> PexRequest:
    if request.direct_deps_only:
        targets = await Get(Targets, Addresses(request.addresses))
        direct_deps = await MultiGet(
            Get(Targets, DependenciesRequest(tgt.get(Dependencies))) for tgt in targets
        )
        all_targets = FrozenOrderedSet(itertools.chain(*direct_deps, targets))
    else:
        transitive_targets = await Get(
            TransitiveTargets, TransitiveTargetsRequest(request.addresses)
        )
        all_targets = transitive_targets.closure

    if request.hardcoded_interpreter_constraints:
        interpreter_constraints = request.hardcoded_interpreter_constraints
    else:
        calculated_constraints = InterpreterConstraints.create_from_targets(
            all_targets, python_setup
        )
        # If there are no targets, we fall back to the global constraints. This is relevant,
        # for example, when running `./pants repl` with no specs.
        interpreter_constraints = calculated_constraints or InterpreterConstraints(
            python_setup.interpreter_constraints
        )

    sources_digests = []
    if request.additional_sources:
        sources_digests.append(request.additional_sources)
    if request.include_source_files:
        sources = await Get(PythonSourceFiles, PythonSourceFilesRequest(all_targets))
    else:
        sources = PythonSourceFiles.empty()

    additional_inputs_digests = []
    if request.additional_inputs:
        additional_inputs_digests.append(request.additional_inputs)
    additional_args = request.additional_args
    if request.include_local_dists:
        # Note that LocalDistsPexRequest has no `direct_deps_only` mode, so we will build all
        # local dists in the transitive closure even if the request was for direct_deps_only.
        # Since we currently use `direct_deps_only` in one case (building a requirements pex
        # when running pylint) and in that case include_local_dists=False, this seems harmless.
        local_dists = await Get(
            LocalDistsPex,
            LocalDistsPexRequest(
                request.addresses,
                internal_only=request.internal_only,
                interpreter_constraints=interpreter_constraints,
                sources=sources,
            ),
        )
        remaining_sources = local_dists.remaining_sources
        additional_inputs_digests.append(local_dists.pex.digest)
        additional_args += ("--requirements-pex", local_dists.pex.name)
    else:
        remaining_sources = sources

    remaining_sources_stripped = await Get(
        StrippedPythonSourceFiles, PythonSourceFiles, remaining_sources
    )
    sources_digests.append(remaining_sources_stripped.stripped_source_files.snapshot.digest)

    merged_sources_digest, additional_inputs = await MultiGet(
        Get(Digest, MergeDigests(sources_digests)),
        Get(Digest, MergeDigests(additional_inputs_digests)),
    )

    requirements = PexRequirements.create_from_requirement_fields(
        (
            tgt[PythonRequirementsField]
            for tgt in all_targets
            if tgt.has_field(PythonRequirementsField)
        ),
        additional_requirements=request.additional_requirements,
        apply_constraints=True,
    )

    description = request.description

    if requirements:
        repository_pex: Pex | None = None
        if python_setup.requirement_constraints:
            maybe_constraints_repository_pex = await Get(
                _ConstraintsRepositoryPex,
                _ConstraintsRepositoryPexRequest(
                    requirements,
                    request.platforms,
                    interpreter_constraints,
                    request.internal_only,
                    request.additional_lockfile_args,
                ),
            )
            if maybe_constraints_repository_pex.maybe_pex:
                repository_pex = maybe_constraints_repository_pex.maybe_pex
        elif (
            python_setup.resolve_all_constraints
            and python_setup.resolve_all_constraints_was_set_explicitly()
        ):
            raise ValueError(
                "`[python].resolve_all_constraints` is enabled, so "
                "`[python].requirement_constraints` must also be set."
            )
        elif request.resolve_and_lockfile:
            resolve, lockfile = request.resolve_and_lockfile
            repository_pex = await Get(
                Pex,
                PexRequest(
                    description=f"Installing {lockfile} for the resolve `{resolve}`",
                    output_filename=f"{path_safe(resolve)}_lockfile.pex",
                    internal_only=request.internal_only,
                    requirements=Lockfile(
                        file_path=lockfile,
                        file_path_description_of_origin=(
                            f"the resolve `{resolve}` (from "
                            "`[python].experimental_resolves_to_lockfiles`)"
                        ),
                        # TODO(#12314): Hook up lockfile staleness check.
                        lockfile_hex_digest=None,
                        req_strings=None,
                    ),
                    interpreter_constraints=interpreter_constraints,
                    platforms=request.platforms,
                    additional_args=request.additional_lockfile_args,
                ),
            )
        elif python_setup.lockfile:
            repository_pex = await Get(
                Pex,
                PexRequest(
                    description=f"Installing {python_setup.lockfile}",
                    output_filename="lockfile.pex",
                    internal_only=request.internal_only,
                    requirements=Lockfile(
                        file_path=python_setup.lockfile,
                        file_path_description_of_origin=(
                            "the option `[python].experimental_lockfile`"
                        ),
                        # TODO(#12314): Hook up lockfile staleness check once multiple lockfiles
                        # are supported.
                        lockfile_hex_digest=None,
                        req_strings=None,
                    ),
                    interpreter_constraints=interpreter_constraints,
                    platforms=request.platforms,
                    additional_args=request.additional_lockfile_args,
                ),
            )
        requirements = dataclasses.replace(requirements, repository_pex=repository_pex)

    return PexRequest(
        output_filename=request.output_filename,
        internal_only=request.internal_only,
        requirements=requirements,
        interpreter_constraints=interpreter_constraints,
        platforms=request.platforms,
        main=request.main,
        sources=merged_sources_digest,
        additional_inputs=additional_inputs,
        additional_args=additional_args,
        description=description,
    )
Exemplo n.º 10
0
async def setup_pytest_for_target(
    request: TestSetupRequest,
    pytest: PyTest,
    test_subsystem: TestSubsystem,
    python_setup: PythonSetup,
    coverage_config: CoverageConfig,
    coverage_subsystem: CoverageSubsystem,
    test_extra_env: TestExtraEnv,
    global_options: GlobalOptions,
) -> TestSetup:
    transitive_targets = await Get(
        TransitiveTargets,
        TransitiveTargetsRequest([request.field_set.address]))
    all_targets = transitive_targets.closure

    interpreter_constraints = PexInterpreterConstraints.create_from_targets(
        all_targets, python_setup)

    # Defaults to zip_safe=False.
    requirements_pex_request = Get(
        Pex,
        PexFromTargetsRequest,
        PexFromTargetsRequest.for_requirements([request.field_set.address],
                                               internal_only=True),
    )

    pytest_pex_request = Get(
        Pex,
        PexRequest(
            output_filename="pytest.pex",
            requirements=PexRequirements(pytest.get_requirement_strings()),
            interpreter_constraints=interpreter_constraints,
            entry_point="pytest:main",
            internal_only=True,
            additional_args=(
                # NB: We set `--not-zip-safe` because Pytest plugin discovery, which uses
                # `importlib_metadata` and thus `zipp`, does not play nicely when doing import
                # magic directly from zip files. `zipp` has pathologically bad behavior with large
                # zipfiles.
                # TODO: this does have a performance cost as the pex must now be expanded to disk.
                # Long term, it would be better to fix Zipp (whose fix would then need to be used
                # by importlib_metadata and then by Pytest). See
                # https://github.com/jaraco/zipp/pull/26.
                "--not-zip-safe",
                # TODO(John Sirois): Support shading python binaries:
                #   https://github.com/pantsbuild/pants/issues/9206
                "--pex-path",
                requirements_pex_request.input.output_filename,
            ),
        ),
    )

    prepared_sources_request = Get(
        PythonSourceFiles,
        PythonSourceFilesRequest(all_targets, include_files=True))

    # Create any assets that the test depends on through the `runtime_package_dependencies` field.
    assets: Tuple[BuiltPackage, ...] = ()
    unparsed_runtime_packages = (request.field_set.runtime_package_dependencies
                                 .to_unparsed_address_inputs())
    if unparsed_runtime_packages.values:
        runtime_package_targets = await Get(Targets, UnparsedAddressInputs,
                                            unparsed_runtime_packages)
        field_sets_per_target = await Get(
            FieldSetsPerTarget,
            FieldSetsPerTargetRequest(PackageFieldSet,
                                      runtime_package_targets),
        )
        assets = await MultiGet(
            Get(BuiltPackage, PackageFieldSet, field_set)
            for field_set in field_sets_per_target.field_sets)

    # Get the file names for the test_target so that we can specify to Pytest precisely which files
    # to test, rather than using auto-discovery.
    field_set_source_files_request = Get(
        SourceFiles, SourceFilesRequest([request.field_set.sources]))

    pytest_pex, requirements_pex, prepared_sources, field_set_source_files = await MultiGet(
        pytest_pex_request,
        requirements_pex_request,
        prepared_sources_request,
        field_set_source_files_request,
    )

    input_digest = await Get(
        Digest,
        MergeDigests((
            coverage_config.digest,
            prepared_sources.source_files.snapshot.digest,
            requirements_pex.digest,
            pytest_pex.digest,
            *(binary.digest for binary in assets),
        )),
    )

    add_opts = [f"--color={'yes' if global_options.options.colors else 'no'}"]
    output_files = []

    results_file_name = None
    if pytest.options.junit_xml_dir and not request.is_debug:
        results_file_name = f"{request.field_set.address.path_safe_spec}.xml"
        add_opts.extend((f"--junitxml={results_file_name}", "-o",
                         f"junit_family={pytest.options.junit_family}"))
        output_files.append(results_file_name)

    coverage_args = []
    if test_subsystem.use_coverage and not request.is_debug:
        output_files.append(".coverage")
        cov_paths = coverage_subsystem.filter if coverage_subsystem.filter else (
            ".", )
        coverage_args = [
            "--cov-report=",  # Turn off output.
            *itertools.chain.from_iterable(["--cov", cov_path]
                                           for cov_path in cov_paths),
        ]

    extra_env = {
        "PYTEST_ADDOPTS": " ".join(add_opts),
        "PEX_EXTRA_SYS_PATH": ":".join(prepared_sources.source_roots),
    }

    extra_env.update(test_extra_env.env)

    # Cache test runs only if they are successful, or not at all if `--test-force`.
    cache_scope = ProcessCacheScope.NEVER if test_subsystem.force else ProcessCacheScope.SUCCESSFUL
    process = await Get(
        Process,
        PexProcess(
            pytest_pex,
            argv=(*pytest.options.args, *coverage_args,
                  *field_set_source_files.files),
            extra_env=extra_env,
            input_digest=input_digest,
            output_files=output_files,
            timeout_seconds=request.field_set.timeout.
            calculate_from_global_options(pytest),
            execution_slot_variable=pytest.options.execution_slot_var,
            description=f"Run Pytest for {request.field_set.address}",
            level=LogLevel.DEBUG,
            cache_scope=cache_scope,
        ),
    )
    return TestSetup(process, results_file_name=results_file_name)
Exemplo n.º 11
0
async def generate_python_from_protobuf(
    request: GeneratePythonFromProtobufRequest,
    protoc: Protoc,
    grpc_python_plugin: GrpcPythonPlugin,
    python_protobuf_subsystem: PythonProtobufSubsystem,
) -> GeneratedSources:
    download_protoc_request = Get(DownloadedExternalTool, ExternalToolRequest,
                                  protoc.get_request(Platform.current))

    output_dir = "_generated_files"
    create_output_dir_request = Get(Digest,
                                    CreateDigest([Directory(output_dir)]))

    # Protoc needs all transitive dependencies on `protobuf_libraries` to work properly. It won't
    # actually generate those dependencies; it only needs to look at their .proto files to work
    # with imports.
    transitive_targets = await Get(
        TransitiveTargets,
        TransitiveTargetsRequest([request.protocol_target.address]))

    # NB: By stripping the source roots, we avoid having to set the value `--proto_path`
    # for Protobuf imports to be discoverable.
    all_stripped_sources_request = Get(
        StrippedSourceFiles,
        SourceFilesRequest(
            (tgt.get(Sources) for tgt in transitive_targets.closure),
            for_sources_types=(ProtobufSources, ),
        ),
    )
    target_stripped_sources_request = Get(
        StrippedSourceFiles,
        SourceFilesRequest([request.protocol_target[ProtobufSources]]))

    (
        downloaded_protoc_binary,
        empty_output_dir,
        all_sources_stripped,
        target_sources_stripped,
    ) = await MultiGet(
        download_protoc_request,
        create_output_dir_request,
        all_stripped_sources_request,
        target_stripped_sources_request,
    )

    protoc_gen_mypy_script = "protoc-gen-mypy"
    mypy_pex = None
    if python_protobuf_subsystem.mypy_plugin:
        mypy_pex = await Get(
            VenvPex,
            VenvPexRequest(
                bin_names=[protoc_gen_mypy_script],
                pex_request=PexRequest(
                    output_filename="mypy_protobuf.pex",
                    internal_only=True,
                    requirements=PexRequirements(
                        [python_protobuf_subsystem.mypy_plugin_version]),
                    # TODO(John Sirois): Fix these interpreter constraints to track the actual
                    #  python requirement of the mypy_plugin_version or else plumb an option for
                    #  manually setting the constraint to track what mypy_plugin_version needs:
                    #  https://github.com/pantsbuild/pants/issues/11565
                    # Here we guess a constraint that will likely work with any mypy_plugin_version
                    # selected.
                    interpreter_constraints=PexInterpreterConstraints(
                        ["CPython>=3.5"]),
                ),
            ),
        )

    downloaded_grpc_plugin = (await Get(
        DownloadedExternalTool,
        ExternalToolRequest,
        grpc_python_plugin.get_request(Platform.current),
    ) if request.protocol_target.get(ProtobufGrpcToggle).value else None)

    unmerged_digests = [
        all_sources_stripped.snapshot.digest,
        downloaded_protoc_binary.digest,
        empty_output_dir,
    ]
    if mypy_pex:
        unmerged_digests.append(mypy_pex.digest)
    if downloaded_grpc_plugin:
        unmerged_digests.append(downloaded_grpc_plugin.digest)
    input_digest = await Get(Digest, MergeDigests(unmerged_digests))

    argv = [downloaded_protoc_binary.exe, "--python_out", output_dir]
    if mypy_pex:
        argv.extend([
            f"--plugin=protoc-gen-mypy={mypy_pex.bin[protoc_gen_mypy_script].argv0}",
            "--mypy_out",
            output_dir,
        ])
    if downloaded_grpc_plugin:
        argv.extend([
            f"--plugin=protoc-gen-grpc={downloaded_grpc_plugin.exe}",
            "--grpc_out", output_dir
        ])
    argv.extend(target_sources_stripped.snapshot.files)

    result = await Get(
        ProcessResult,
        Process(
            argv,
            input_digest=input_digest,
            description=
            f"Generating Python sources from {request.protocol_target.address}.",
            level=LogLevel.DEBUG,
            output_directories=(output_dir, ),
        ),
    )

    # We must do some path manipulation on the output digest for it to look like normal sources,
    # including adding back a source root.
    py_source_root = request.protocol_target.get(PythonSourceRootField).value
    if py_source_root:
        # Verify that the python source root specified by the target is in fact a source root.
        source_root_request = SourceRootRequest(PurePath(py_source_root))
    else:
        # The target didn't specify a python source root, so use the protobuf_library's source root.
        source_root_request = SourceRootRequest.for_target(
            request.protocol_target)

    normalized_digest, source_root = await MultiGet(
        Get(Digest, RemovePrefix(result.output_digest, output_dir)),
        Get(SourceRoot, SourceRootRequest, source_root_request),
    )

    source_root_restored = (await Get(
        Snapshot, AddPrefix(normalized_digest, source_root.path))
                            if source_root.path != "." else await Get(
                                Snapshot, Digest, normalized_digest))
    return GeneratedSources(source_root_restored)
Exemplo n.º 12
0
async def create_pex_from_targets(request: PexFromTargetsRequest) -> PexRequest:
    interpreter_constraints = await Get(
        InterpreterConstraints,
        InterpreterConstraintsRequest,
        request.to_interpreter_constraints_request(),
    )

    transitive_targets = await Get(TransitiveTargets, TransitiveTargetsRequest(request.addresses))

    sources_digests = []
    if request.additional_sources:
        sources_digests.append(request.additional_sources)
    if request.include_source_files:
        sources = await Get(PythonSourceFiles, PythonSourceFilesRequest(transitive_targets.closure))
    else:
        sources = PythonSourceFiles.empty()

    additional_inputs_digests = []
    if request.additional_inputs:
        additional_inputs_digests.append(request.additional_inputs)
    additional_args = request.additional_args
    if request.include_local_dists:
        local_dists = await Get(
            LocalDistsPex,
            LocalDistsPexRequest(
                request.addresses,
                internal_only=request.internal_only,
                interpreter_constraints=interpreter_constraints,
                sources=sources,
            ),
        )
        remaining_sources = local_dists.remaining_sources
        additional_inputs_digests.append(local_dists.pex.digest)
        additional_args += ("--requirements-pex", local_dists.pex.name)
    else:
        remaining_sources = sources

    remaining_sources_stripped = await Get(
        StrippedPythonSourceFiles, PythonSourceFiles, remaining_sources
    )
    sources_digests.append(remaining_sources_stripped.stripped_source_files.snapshot.digest)

    merged_sources_digest, additional_inputs = await MultiGet(
        Get(Digest, MergeDigests(sources_digests)),
        Get(Digest, MergeDigests(additional_inputs_digests)),
    )

    description = request.description

    if request.include_requirements:
        requirements = await Get(PexRequirements, _PexRequirementsRequest(request.addresses))
    else:
        requirements = PexRequirements()

    if requirements:
        repository_pex = await Get(
            OptionalPex,
            _RepositoryPexRequest(
                request.addresses,
                requirements=requirements,
                hardcoded_interpreter_constraints=request.hardcoded_interpreter_constraints,
                platforms=request.platforms,
                complete_platforms=request.complete_platforms,
                internal_only=request.internal_only,
                additional_lockfile_args=request.additional_lockfile_args,
            ),
        )
        requirements = dataclasses.replace(requirements, repository_pex=repository_pex.maybe_pex)

    return PexRequest(
        output_filename=request.output_filename,
        internal_only=request.internal_only,
        layout=request.layout,
        requirements=requirements,
        interpreter_constraints=interpreter_constraints,
        platforms=request.platforms,
        complete_platforms=request.complete_platforms,
        main=request.main,
        sources=merged_sources_digest,
        additional_inputs=additional_inputs,
        additional_args=additional_args,
        description=description,
    )
Exemplo n.º 13
0
async def pylint_lint_partition(partition: PylintPartition,
                                pylint: Pylint) -> LintResult:
    requirements_pex_get = Get(
        Pex,
        PexFromTargetsRequest,
        PexFromTargetsRequest.for_requirements(
            (field_set.address for field_set in partition.field_sets),
            # NB: These constraints must be identical to the other PEXes. Otherwise, we risk using
            # a different version for the requirements than the other two PEXes, which can result
            # in a PEX runtime error about missing dependencies.
            hardcoded_interpreter_constraints=partition.
            interpreter_constraints,
            internal_only=True,
            direct_deps_only=True,
        ),
    )

    plugin_requirements = PexRequirements.create_from_requirement_fields(
        plugin_tgt[PythonRequirementsField]
        for plugin_tgt in partition.plugin_targets
        if plugin_tgt.has_field(PythonRequirementsField))
    pylint_pex_get = Get(
        Pex,
        PexRequest(
            output_filename="pylint.pex",
            internal_only=True,
            requirements=PexRequirements(
                [*pylint.all_requirements, *plugin_requirements]),
            interpreter_constraints=partition.interpreter_constraints,
        ),
    )

    prepare_plugin_sources_get = Get(
        StrippedPythonSourceFiles,
        PythonSourceFilesRequest(partition.plugin_targets))
    prepare_python_sources_get = Get(
        PythonSourceFiles,
        PythonSourceFilesRequest(partition.targets_with_dependencies))
    field_set_sources_get = Get(
        SourceFiles,
        SourceFilesRequest(field_set.sources
                           for field_set in partition.field_sets))

    (
        pylint_pex,
        requirements_pex,
        prepared_plugin_sources,
        prepared_python_sources,
        field_set_sources,
    ) = await MultiGet(
        pylint_pex_get,
        requirements_pex_get,
        prepare_plugin_sources_get,
        prepare_python_sources_get,
        field_set_sources_get,
    )

    pylint_runner_pex, config_files = await MultiGet(
        Get(
            VenvPex,
            PexRequest(
                output_filename="pylint_runner.pex",
                interpreter_constraints=partition.interpreter_constraints,
                main=pylint.main,
                internal_only=True,
                pex_path=[pylint_pex, requirements_pex],
            ),
        ),
        Get(ConfigFiles, ConfigFilesRequest,
            pylint.config_request(field_set_sources.snapshot.dirs)),
    )

    prefixed_plugin_sources = (await Get(
        Digest,
        AddPrefix(
            prepared_plugin_sources.stripped_source_files.snapshot.digest,
            "__plugins"),
    ) if pylint.source_plugins else EMPTY_DIGEST)

    pythonpath = list(prepared_python_sources.source_roots)
    if pylint.source_plugins:
        # NB: Pylint source plugins must be explicitly loaded via PEX_EXTRA_SYS_PATH. The value must
        # point to the plugin's directory, rather than to a parent's directory, because
        # `load-plugins` takes a module name rather than a path to the module; i.e. `plugin`, but
        # not `path.to.plugin`. (This means users must have specified the parent directory as a
        # source root.)
        pythonpath.append("__plugins")

    input_digest = await Get(
        Digest,
        MergeDigests((
            config_files.snapshot.digest,
            prefixed_plugin_sources,
            prepared_python_sources.source_files.snapshot.digest,
        )),
    )

    result = await Get(
        FallibleProcessResult,
        VenvPexProcess(
            pylint_runner_pex,
            argv=generate_argv(field_set_sources, pylint),
            input_digest=input_digest,
            extra_env={"PEX_EXTRA_SYS_PATH": ":".join(pythonpath)},
            description=
            f"Run Pylint on {pluralize(len(partition.field_sets), 'file')}.",
            level=LogLevel.DEBUG,
        ),
    )
    return LintResult.from_fallible_process_result(
        result,
        partition_description=str(
            sorted(str(c) for c in partition.interpreter_constraints)))
Exemplo n.º 14
0
async def mypy_typecheck_partition(partition: MyPyPartition, mypy: MyPy) -> TypecheckResult:
    plugin_target_addresses = await Get(Addresses, UnparsedAddressInputs, mypy.source_plugins)
    plugin_transitive_targets = await Get(
        TransitiveTargets, TransitiveTargetsRequest(plugin_target_addresses)
    )

    plugin_requirements = PexRequirements.create_from_requirement_fields(
        plugin_tgt[PythonRequirementsField]
        for plugin_tgt in plugin_transitive_targets.closure
        if plugin_tgt.has_field(PythonRequirementsField)
    )

    # If the user did not set `--python-version` already, we set it ourselves based on their code's
    # interpreter constraints. This determines what AST is used by MyPy.
    python_version = (
        None
        if partition.python_version_already_configured
        else partition.interpreter_constraints.minimum_python_version()
    )

    # MyPy requires 3.5+ to run, but uses the typed-ast library to work with 2.7, 3.4, 3.5, 3.6,
    # and 3.7. However, typed-ast does not understand 3.8+, so instead we must run MyPy with
    # Python 3.8+ when relevant. We only do this if <3.8 can't be used, as we don't want a
    # loose requirement like `>=3.6` to result in requiring Python 3.8+, which would error if
    # 3.8+ is not installed on the machine.
    tool_interpreter_constraints = (
        partition.interpreter_constraints
        if (
            mypy.options.is_default("interpreter_constraints")
            and partition.interpreter_constraints.requires_python38_or_newer()
        )
        else PexInterpreterConstraints(mypy.interpreter_constraints)
    )

    plugin_sources_request = Get(
        PythonSourceFiles, PythonSourceFilesRequest(plugin_transitive_targets.closure)
    )
    typechecked_sources_request = Get(
        PythonSourceFiles, PythonSourceFilesRequest(partition.closure)
    )

    requirements_pex_request = Get(
        Pex,
        PexFromTargetsRequest,
        PexFromTargetsRequest.for_requirements(
            (addr for addr in partition.field_set_addresses),
            hardcoded_interpreter_constraints=partition.interpreter_constraints,
            internal_only=True,
        ),
    )
    # TODO(John Sirois): Scope the extra requirements to the partition.
    #  Right now we just use a global set of extra requirements and these might not be compatible
    #  with all partitions. See: https://github.com/pantsbuild/pants/issues/11556
    mypy_extra_requirements_pex_request = Get(
        Pex,
        PexRequest(
            output_filename="mypy_extra_requirements.pex",
            internal_only=True,
            requirements=PexRequirements(mypy.extra_requirements),
            interpreter_constraints=partition.interpreter_constraints,
        ),
    )
    mypy_pex_request = Get(
        VenvPex,
        PexRequest(
            output_filename="mypy.pex",
            internal_only=True,
            main=mypy.main,
            requirements=PexRequirements((*mypy.all_requirements, *plugin_requirements)),
            interpreter_constraints=tool_interpreter_constraints,
        ),
    )

    config_digest_request = Get(Digest, PathGlobs, config_path_globs(mypy))

    (
        plugin_sources,
        typechecked_sources,
        mypy_pex,
        requirements_pex,
        mypy_extra_requirements_pex,
        config_digest,
    ) = await MultiGet(
        plugin_sources_request,
        typechecked_sources_request,
        mypy_pex_request,
        requirements_pex_request,
        mypy_extra_requirements_pex_request,
        config_digest_request,
    )

    typechecked_srcs_snapshot = typechecked_sources.source_files.snapshot
    file_list_path = "__files.txt"
    python_files = "\n".join(
        determine_python_files(typechecked_sources.source_files.snapshot.files)
    )
    file_list_digest_request = Get(
        Digest,
        CreateDigest([FileContent(file_list_path, python_files.encode())]),
    )

    typechecked_venv_pex_request = Get(
        VenvPex,
        PexRequest(
            output_filename="typechecked_venv.pex",
            internal_only=True,
            pex_path=[requirements_pex, mypy_extra_requirements_pex],
            interpreter_constraints=partition.interpreter_constraints,
        ),
    )

    typechecked_venv_pex, file_list_digest = await MultiGet(
        typechecked_venv_pex_request, file_list_digest_request
    )

    merged_input_files = await Get(
        Digest,
        MergeDigests(
            [
                file_list_digest,
                plugin_sources.source_files.snapshot.digest,
                typechecked_srcs_snapshot.digest,
                typechecked_venv_pex.digest,
                config_digest,
            ]
        ),
    )

    all_used_source_roots = sorted(
        set(itertools.chain(plugin_sources.source_roots, typechecked_sources.source_roots))
    )
    env = {
        "PEX_EXTRA_SYS_PATH": ":".join(all_used_source_roots),
    }

    result = await Get(
        FallibleProcessResult,
        VenvPexProcess(
            mypy_pex,
            argv=generate_argv(
                mypy,
                typechecked_venv_pex=typechecked_venv_pex,
                file_list_path=file_list_path,
                python_version=python_version,
            ),
            input_digest=merged_input_files,
            extra_env=env,
            description=f"Run MyPy on {pluralize(len(typechecked_srcs_snapshot.files), 'file')}.",
            level=LogLevel.DEBUG,
        ),
    )
    return TypecheckResult.from_fallible_process_result(
        result, partition_description=str(sorted(str(c) for c in partition.interpreter_constraints))
    )
Exemplo n.º 15
0
async def twine_upload(request: PublishToPyPiRequest,
                       twine_subsystem: TwineSubsystem) -> PublishProcesses:
    dists = tuple(artifact.relpath for pkg in request.packages
                  for artifact in pkg.artifacts if artifact.relpath)

    if twine_subsystem.skip or not dists:
        return PublishProcesses()

    # Too verbose to provide feedback as to why some packages were skipped?
    skip = None
    if request.field_set.skip_twine.value:
        skip = f"(by `{request.field_set.skip_twine.alias}` on {request.field_set.address})"
    elif not request.field_set.repositories.value:
        # I'd rather have used the opt_out mechanism on the field set, but that gives no hint as to
        # why the target was not applicable..
        skip = f"(no `{request.field_set.repositories.alias}` specified for {request.field_set.address})"

    if skip:
        return PublishProcesses([
            PublishPackages(
                names=dists,
                description=skip,
            ),
        ])

    twine_pex, packages_digest, config_files = await MultiGet(
        Get(
            VenvPex,
            PexRequest(
                output_filename="twine.pex",
                internal_only=True,
                requirements=twine_subsystem.pex_requirements(),
                interpreter_constraints=twine_subsystem.
                interpreter_constraints,
                main=twine_subsystem.main,
            ),
        ),
        Get(Digest, MergeDigests(pkg.digest for pkg in request.packages)),
        Get(ConfigFiles, ConfigFilesRequest, twine_subsystem.config_request()),
    )

    input_digest = await Get(
        Digest, MergeDigests((packages_digest, config_files.snapshot.digest)))
    pex_proc_requests = []
    twine_envs = await MultiGet(
        Get(Environment, EnvironmentRequest, twine_env_request(repo))
        for repo in request.field_set.repositories.value)

    for repo, env in zip(request.field_set.repositories.value, twine_envs):
        pex_proc_requests.append(
            VenvPexProcess(
                twine_pex,
                argv=twine_upload_args(twine_subsystem, config_files, repo,
                                       dists),
                input_digest=input_digest,
                extra_env=twine_env(env, repo),
                description=repo,
            ))

    processes = await MultiGet(
        Get(Process, VenvPexProcess, request) for request in pex_proc_requests)

    return PublishProcesses(
        PublishPackages(
            names=dists,
            process=InteractiveProcess.from_process(process),
            description=process.description,
            data=PublishOutputData({"repository": process.description}),
        ) for process in processes)
Exemplo n.º 16
0
async def _setup_constraints_repository_pex(
    request: _ConstraintsRepositoryPexRequest, python_setup: PythonSetup
) -> _ConstraintsRepositoryPex:
    # NB: it isn't safe to resolve against the whole constraints file if
    # platforms are in use. See https://github.com/pantsbuild/pants/issues/12222.
    if not python_setup.resolve_all_constraints or request.platforms:
        return _ConstraintsRepositoryPex(None)

    constraints_path = python_setup.requirement_constraints
    assert constraints_path is not None

    constraints_file_contents = await Get(
        DigestContents,
        PathGlobs(
            [constraints_path],
            glob_match_error_behavior=GlobMatchErrorBehavior.error,
            description_of_origin="the option `[python].requirement_constraints`",
        ),
    )
    constraints_file_reqs = set(
        parse_requirements_file(
            constraints_file_contents[0].content.decode(), rel_path=constraints_path
        )
    )

    # In requirement strings, Foo_-Bar.BAZ and foo-bar-baz refer to the same project. We let
    # packaging canonicalize for us.
    # See: https://www.python.org/dev/peps/pep-0503/#normalized-names
    url_reqs = set()  # E.g., 'foobar@ git+https://github.com/foo/bar.git@branch'
    name_reqs = set()  # E.g., foobar>=1.2.3
    name_req_projects = set()

    for req_str in request.requirements.req_strings:
        req = PipRequirement.parse(req_str)
        if req.url:
            url_reqs.add(req)
        else:
            name_reqs.add(req)
            name_req_projects.add(canonicalize_project_name(req.project_name))

    constraint_file_projects = {
        canonicalize_project_name(req.project_name) for req in constraints_file_reqs
    }
    # Constraints files must only contain name reqs, not URL reqs (those are already
    # constrained by their very nature). See https://github.com/pypa/pip/issues/8210.
    unconstrained_projects = name_req_projects - constraint_file_projects
    if unconstrained_projects:
        logger.warning(
            f"The constraints file {constraints_path} does not contain "
            f"entries for the following requirements: {', '.join(unconstrained_projects)}.\n\n"
            f"Ignoring `[python_setup].resolve_all_constraints` option."
        )
        return _ConstraintsRepositoryPex(None)

    # To get a full set of requirements we must add the URL requirements to the
    # constraints file, since the latter cannot contain URL requirements.
    # NB: We can only add the URL requirements we know about here, i.e., those that
    #  are transitive deps of the targets in play. There may be others in the repo.
    #  So we may end up creating a few different repository pexes, each with identical
    #  name requirements but different subsets of URL requirements. Fortunately since
    #  all these repository pexes will have identical pinned versions of everything,
    #  this is not a correctness issue, only a performance one.
    all_constraints = {str(req) for req in (constraints_file_reqs | url_reqs)}
    repository_pex = await Get(
        Pex,
        PexRequest(
            description=f"Resolving {constraints_path}",
            output_filename="repository.pex",
            internal_only=request.internal_only,
            requirements=PexRequirements(
                all_constraints,
                apply_constraints=True,
                # TODO: See PexRequirements docs.
                is_all_constraints_resolve=True,
            ),
            interpreter_constraints=request.interpreter_constraints,
            platforms=request.platforms,
            additional_args=request.additional_lockfile_args,
        ),
    )
    return _ConstraintsRepositoryPex(repository_pex)
Exemplo n.º 17
0
async def package_python_google_cloud_function(
    field_set: PythonGoogleCloudFunctionFieldSet,
    lambdex: Lambdex,
    platform: Platform,
    union_membership: UnionMembership,
) -> BuiltPackage:
    if platform.is_macos:
        logger.warning(
            "Google Cloud Functions built on macOS may fail to build. If your function uses any"
            " third-party dependencies without binary wheels (bdist) for Linux available, it will"
            " fail to build. If this happens, you will either need to update your dependencies to"
            " only use dependencies with pre-built wheels, or find a Linux environment to run"
            f" {bin_name()} package. (See https://realpython.com/python-wheels/ for more about"
            " wheels.)\n\n(If the build does not raise an exception, it's safe to use macOS.)"
        )

    output_filename = field_set.output_path.value_or_default(
        # Cloud Functions typically use the .zip suffix, so we use that instead of .pex.
        file_ending="zip", )

    # We hardcode the platform value to the appropriate one for each Google Cloud Function runtime.
    # (Running the "hello world" cloud function in the example code will report the platform, and can be
    # used to verify correctness of these platform strings.)
    pex_platforms = []
    interpreter_version = field_set.runtime.to_interpreter_version()
    if interpreter_version:
        py_major, py_minor = interpreter_version
        platform_str = f"linux_x86_64-cp-{py_major}{py_minor}-cp{py_major}{py_minor}"
        # set pymalloc ABI flag - this was removed in python 3.8 https://bugs.python.org/issue36707
        if py_major <= 3 and py_minor < 8:
            platform_str += "m"
        pex_platforms.append(platform_str)

    additional_pex_args = (
        # Ensure we can resolve manylinux wheels in addition to any AMI-specific wheels.
        "--manylinux=manylinux2014",
        # When we're executing Pex on Linux, allow a local interpreter to be resolved if
        # available and matching the AMI platform.
        "--resolve-local-platforms",
    )

    complete_platforms = await Get(CompletePlatforms,
                                   PexCompletePlatformsField,
                                   field_set.complete_platforms)

    pex_request = PexFromTargetsRequest(
        addresses=[field_set.address],
        internal_only=False,
        output_filename=output_filename,
        platforms=PexPlatforms(pex_platforms),
        complete_platforms=complete_platforms,
        additional_args=additional_pex_args,
        additional_lockfile_args=additional_pex_args,
    )

    lambdex_request = PexRequest(
        output_filename="lambdex.pex",
        internal_only=True,
        requirements=lambdex.pex_requirements(),
        interpreter_constraints=lambdex.interpreter_constraints,
        main=lambdex.main,
    )

    lambdex_pex, pex_result, handler, transitive_targets = await MultiGet(
        Get(VenvPex, PexRequest, lambdex_request),
        Get(Pex, PexFromTargetsRequest, pex_request),
        Get(ResolvedPythonGoogleHandler,
            ResolvePythonGoogleHandlerRequest(field_set.handler)),
        Get(TransitiveTargets, TransitiveTargetsRequest([field_set.address])),
    )

    # Warn if users depend on `files` targets, which won't be included in the PEX and is a common
    # gotcha.
    file_tgts = targets_with_sources_types([FileSourceField],
                                           transitive_targets.dependencies,
                                           union_membership)
    if file_tgts:
        files_addresses = sorted(tgt.address.spec for tgt in file_tgts)
        logger.warning(
            f"The `python_google_cloud_function` target {field_set.address} transitively depends "
            "on the below `files` targets, but Pants will not include them in the built Cloud "
            "Function. Filesystem APIs like `open()` are not able to load files within the binary "
            "itself; instead, they read from the current working directory."
            f"\n\nInstead, use `resources` targets. See {doc_url('resources')}."
            f"\n\nFiles targets dependencies: {files_addresses}")

    # NB: Lambdex modifies its input pex in-place, so the input file is also the output file.
    result = await Get(
        ProcessResult,
        VenvPexProcess(
            lambdex_pex,
            argv=("build", "-M", "main.py", "-e", handler.val,
                  output_filename),
            input_digest=pex_result.digest,
            output_files=(output_filename, ),
            description=f"Setting up handler in {output_filename}",
        ),
    )

    extra_log_data: list[tuple[str, str]] = []
    if field_set.runtime.value:
        extra_log_data.append(("Runtime", field_set.runtime.value))
    extra_log_data.extend(
        ("Complete platform", path) for path in complete_platforms)
    # The GCP-facing handler function is always main.handler, which is the
    # wrapper injected by lambdex that manages invocation of the actual handler.
    extra_log_data.append(("Handler", "main.handler"))

    first_column_width = 4 + max(len(header) for header, _ in extra_log_data)
    artifact = BuiltPackageArtifact(
        output_filename,
        extra_log_lines=tuple(
            f"{header.rjust(first_column_width, ' ')}: {data}"
            for header, data in extra_log_data),
    )
    return BuiltPackage(digest=result.output_digest, artifacts=(artifact, ))
Exemplo n.º 18
0
async def flake8_lint_partition(partition: Flake8Partition, flake8: Flake8,
                                lint_subsystem: LintSubsystem) -> LintResult:
    flake8_pex_request = Get(
        Pex,
        PexRequest(
            output_filename="flake8.pex",
            internal_only=True,
            requirements=PexRequirements(flake8.all_requirements),
            interpreter_constraints=partition.interpreter_constraints,
            entry_point=flake8.entry_point,
        ),
    )

    config_digest_request = Get(
        Digest,
        PathGlobs(
            globs=[flake8.config] if flake8.config else [],
            glob_match_error_behavior=GlobMatchErrorBehavior.error,
            description_of_origin="the option `--flake8-config`",
        ),
    )

    source_files_request = Get(
        SourceFiles,
        SourceFilesRequest(field_set.sources
                           for field_set in partition.field_sets))

    flake8_pex, config_digest, source_files = await MultiGet(
        flake8_pex_request, config_digest_request, source_files_request)

    input_digest = await Get(
        Digest,
        MergeDigests(
            (source_files.snapshot.digest, flake8_pex.digest, config_digest)),
    )

    report_file_name = "flake8_report.txt" if lint_subsystem.reports_dir else None

    result = await Get(
        FallibleProcessResult,
        PexProcess(
            flake8_pex,
            argv=generate_args(source_files=source_files,
                               flake8=flake8,
                               report_file_name=report_file_name),
            input_digest=input_digest,
            output_files=(report_file_name, ) if report_file_name else None,
            description=
            f"Run Flake8 on {pluralize(len(partition.field_sets), 'file')}.",
            level=LogLevel.DEBUG,
        ),
    )

    report = None
    if report_file_name:
        report_digest = await Get(
            Digest,
            DigestSubset(
                result.output_digest,
                PathGlobs(
                    [report_file_name],
                    glob_match_error_behavior=GlobMatchErrorBehavior.warn,
                    description_of_origin="Flake8 report file",
                ),
            ),
        )
        report = LintReport(report_file_name, report_digest)

    return LintResult.from_fallible_process_result(
        result,
        partition_description=str(
            sorted(str(c) for c in partition.interpreter_constraints)),
        report=report,
    )
Exemplo n.º 19
0
async def setup_pytest_for_target(
    request: TestSetupRequest,
    pytest: PyTest,
    test_subsystem: TestSubsystem,
    python_setup: PythonSetup,
    coverage_config: CoverageConfig,
    coverage_subsystem: CoverageSubsystem,
    test_extra_env: TestExtraEnv,
    global_options: GlobalOptions,
    complete_env: CompleteEnvironment,
) -> TestSetup:
    transitive_targets, plugin_setups = await MultiGet(
        Get(TransitiveTargets,
            TransitiveTargetsRequest([request.field_set.address])),
        Get(AllPytestPluginSetups,
            AllPytestPluginSetupsRequest(request.field_set.address)),
    )
    all_targets = transitive_targets.closure

    interpreter_constraints = InterpreterConstraints.create_from_targets(
        all_targets, python_setup)

    requirements_pex_get = Get(
        Pex,
        PexFromTargetsRequest,
        PexFromTargetsRequest.for_requirements(
            [request.field_set.address],
            internal_only=True,
            resolve_and_lockfile=request.field_set.resolve.
            resolve_and_lockfile(python_setup),
        ),
    )
    pytest_pex_get = Get(
        Pex,
        PexRequest(
            output_filename="pytest.pex",
            requirements=pytest.pex_requirements(),
            interpreter_constraints=interpreter_constraints,
            internal_only=True,
        ),
    )

    # Ensure that the empty extra output dir exists.
    extra_output_directory_digest_get = Get(
        Digest, CreateDigest([Directory(_EXTRA_OUTPUT_DIR)]))

    prepared_sources_get = Get(
        PythonSourceFiles,
        PythonSourceFilesRequest(all_targets, include_files=True))

    # Get the file names for the test_target so that we can specify to Pytest precisely which files
    # to test, rather than using auto-discovery.
    field_set_source_files_get = Get(
        SourceFiles, SourceFilesRequest([request.field_set.sources]))

    (
        pytest_pex,
        requirements_pex,
        prepared_sources,
        field_set_source_files,
        extra_output_directory_digest,
    ) = await MultiGet(
        pytest_pex_get,
        requirements_pex_get,
        prepared_sources_get,
        field_set_source_files_get,
        extra_output_directory_digest_get,
    )

    local_dists = await Get(
        LocalDistsPex,
        LocalDistsPexRequest(
            [request.field_set.address],
            interpreter_constraints=interpreter_constraints,
            sources=prepared_sources,
        ),
    )

    pytest_runner_pex_get = Get(
        VenvPex,
        PexRequest(
            output_filename="pytest_runner.pex",
            interpreter_constraints=interpreter_constraints,
            main=pytest.main,
            internal_only=True,
            pex_path=[pytest_pex, requirements_pex, local_dists.pex],
        ),
    )
    config_files_get = Get(
        ConfigFiles,
        ConfigFilesRequest,
        pytest.config_request(field_set_source_files.snapshot.dirs),
    )
    pytest_runner_pex, config_files = await MultiGet(pytest_runner_pex_get,
                                                     config_files_get)

    input_digest = await Get(
        Digest,
        MergeDigests((
            coverage_config.digest,
            local_dists.remaining_sources.source_files.snapshot.digest,
            config_files.snapshot.digest,
            extra_output_directory_digest,
            *(plugin_setup.digest for plugin_setup in plugin_setups),
        )),
    )

    add_opts = [f"--color={'yes' if global_options.options.colors else 'no'}"]
    output_files = []

    results_file_name = None
    if pytest.options.junit_xml_dir and not request.is_debug:
        results_file_name = f"{request.field_set.address.path_safe_spec}.xml"
        add_opts.extend((f"--junitxml={results_file_name}", "-o",
                         f"junit_family={pytest.options.junit_family}"))
        output_files.append(results_file_name)

    coverage_args = []
    if test_subsystem.use_coverage and not request.is_debug:
        pytest.validate_pytest_cov_included()
        output_files.append(".coverage")

        if coverage_subsystem.filter:
            cov_args = [f"--cov={morf}" for morf in coverage_subsystem.filter]
        else:
            # N.B.: Passing `--cov=` or `--cov=.` to communicate "record coverage for all sources"
            # fails in certain contexts as detailed in:
            #   https://github.com/pantsbuild/pants/issues/12390
            # Instead we focus coverage on just the directories containing python source files
            # materialized to the Process chroot.
            cov_args = [
                f"--cov={source_root}"
                for source_root in prepared_sources.source_roots
            ]

        coverage_args = [
            "--cov-report=",  # Turn off output.
            f"--cov-config={coverage_config.path}",
            *cov_args,
        ]

    extra_env = {
        "PYTEST_ADDOPTS": " ".join(add_opts),
        "PEX_EXTRA_SYS_PATH": ":".join(prepared_sources.source_roots),
        **test_extra_env.env,
        # NOTE: `complete_env` intentionally after `test_extra_env` to allow overriding within
        # `python_tests`
        **complete_env.get_subset(request.field_set.extra_env_vars.value or ()),
    }

    # Cache test runs only if they are successful, or not at all if `--test-force`.
    cache_scope = (ProcessCacheScope.PER_SESSION
                   if test_subsystem.force else ProcessCacheScope.SUCCESSFUL)
    process = await Get(
        Process,
        VenvPexProcess(
            pytest_runner_pex,
            argv=(*pytest.options.args, *coverage_args,
                  *field_set_source_files.files),
            extra_env=extra_env,
            input_digest=input_digest,
            output_directories=(_EXTRA_OUTPUT_DIR, ),
            output_files=output_files,
            timeout_seconds=request.field_set.timeout.
            calculate_from_global_options(pytest),
            execution_slot_variable=pytest.options.execution_slot_var,
            description=f"Run Pytest for {request.field_set.address}",
            level=LogLevel.DEBUG,
            cache_scope=cache_scope,
        ),
    )
    return TestSetup(process, results_file_name=results_file_name)
Exemplo n.º 20
0
async def create_python_binary_run_request(
    field_set: PythonBinaryFieldSet,
    python_binary_defaults: PythonBinaryDefaults,
    pex_env: PexEnvironment,
) -> RunRequest:
    entry_point = field_set.entry_point.value
    if entry_point is None:
        binary_source_paths = await Get(
            Paths, PathGlobs,
            field_set.sources.path_globs(FilesNotFoundBehavior.error))
        if len(binary_source_paths.files) != 1:
            raise InvalidFieldException(
                "No `entry_point` was set for the target "
                f"{repr(field_set.address)}, so it must have exactly one source, but it has "
                f"{len(binary_source_paths.files)}")
        entry_point_path = binary_source_paths.files[0]
        source_root = await Get(
            SourceRoot,
            SourceRootRequest,
            SourceRootRequest.for_file(entry_point_path),
        )
        entry_point = PythonBinarySources.translate_source_file_to_entry_point(
            os.path.relpath(entry_point_path, source_root.path))
    transitive_targets = await Get(TransitiveTargets,
                                   Addresses([field_set.address]))

    # Note that we get an intermediate PexRequest here (instead of going straight to a Pex)
    # so that we can get the interpreter constraints for use in runner_pex_request.
    requirements_pex_request = await Get(
        PexRequest,
        PexFromTargetsRequest,
        PexFromTargetsRequest.for_requirements([field_set.address],
                                               internal_only=True),
    )

    requirements_request = Get(Pex, PexRequest, requirements_pex_request)

    sources_request = Get(
        PythonSourceFiles,
        PythonSourceFilesRequest(transitive_targets.closure,
                                 include_files=True))

    output_filename = f"{field_set.address.target_name}.pex"
    runner_pex_request = Get(
        Pex,
        PexRequest(
            output_filename=output_filename,
            interpreter_constraints=requirements_pex_request.
            interpreter_constraints,
            additional_args=field_set.generate_additional_args(
                python_binary_defaults),
            internal_only=True,
            # Note that the entry point file is not in the Pex itself, but on the
            # PEX_PATH. This works fine!
            entry_point=entry_point,
        ),
    )

    requirements, sources, runner_pex = await MultiGet(requirements_request,
                                                       sources_request,
                                                       runner_pex_request)

    merged_digest = await Get(
        Digest,
        MergeDigests([
            requirements.digest, sources.source_files.snapshot.digest,
            runner_pex.digest
        ]),
    )

    def in_chroot(relpath: str) -> str:
        return os.path.join("{chroot}", relpath)

    args = pex_env.create_argv(in_chroot(runner_pex.name),
                               python=runner_pex.python)

    chrooted_source_roots = [in_chroot(sr) for sr in sources.source_roots]
    extra_env = {
        **pex_env.environment_dict(python_configured=runner_pex.python is not None),
        "PEX_PATH":
        in_chroot(requirements_pex_request.output_filename),
        "PEX_EXTRA_SYS_PATH":
        ":".join(chrooted_source_roots),
    }

    return RunRequest(digest=merged_digest, args=args, extra_env=extra_env)
Exemplo n.º 21
0
async def create_pex_binary_run_request(
    field_set: PexBinaryFieldSet,
    pex_binary_defaults: PexBinaryDefaults,
    pex_env: PexEnvironment,
) -> RunRequest:
    entry_point, transitive_targets = await MultiGet(
        Get(
            ResolvedPexEntryPoint,
            ResolvePexEntryPointRequest(field_set.entry_point,
                                        field_set.sources),
        ),
        Get(TransitiveTargets, TransitiveTargetsRequest([field_set.address])),
    )
    transitive_targets = await Get(
        TransitiveTargets, TransitiveTargetsRequest([field_set.address]))

    # Note that we get an intermediate PexRequest here (instead of going straight to a Pex)
    # so that we can get the interpreter constraints for use in runner_pex_request.
    requirements_pex_request = await Get(
        PexRequest,
        PexFromTargetsRequest,
        PexFromTargetsRequest.for_requirements([field_set.address],
                                               internal_only=True),
    )

    requirements_request = Get(Pex, PexRequest, requirements_pex_request)

    sources_request = Get(
        PythonSourceFiles,
        PythonSourceFilesRequest(transitive_targets.closure,
                                 include_files=True))

    output_filename = f"{field_set.address.target_name}.pex"
    runner_pex_request = Get(
        Pex,
        PexRequest(
            output_filename=output_filename,
            interpreter_constraints=requirements_pex_request.
            interpreter_constraints,
            additional_args=field_set.generate_additional_args(
                pex_binary_defaults),
            internal_only=True,
            # Note that the entry point file is not in the PEX itself. It's loaded by setting
            # `PEX_EXTRA_SYS_PATH`.
            entry_point=entry_point.val,
        ),
    )

    requirements, sources, runner_pex = await MultiGet(requirements_request,
                                                       sources_request,
                                                       runner_pex_request)

    merged_digest = await Get(
        Digest,
        MergeDigests([
            requirements.digest, sources.source_files.snapshot.digest,
            runner_pex.digest
        ]),
    )

    def in_chroot(relpath: str) -> str:
        return os.path.join("{chroot}", relpath)

    args = pex_env.create_argv(in_chroot(runner_pex.name),
                               python=runner_pex.python)

    chrooted_source_roots = [in_chroot(sr) for sr in sources.source_roots]
    extra_env = {
        **pex_env.environment_dict(python_configured=runner_pex.python is not None),
        "PEX_PATH":
        in_chroot(requirements_pex_request.output_filename),
        "PEX_EXTRA_SYS_PATH":
        ":".join(chrooted_source_roots),
    }

    return RunRequest(digest=merged_digest, args=args, extra_env=extra_env)
Exemplo n.º 22
0
async def generate_python_from_protobuf(
    request: GeneratePythonFromProtobufRequest,
    protoc: Protoc,
    grpc_python_plugin: GrpcPythonPlugin,
    python_protobuf_subsystem: PythonProtobufSubsystem,
    python_protobuf_mypy_plugin: PythonProtobufMypyPlugin,
) -> GeneratedSources:
    download_protoc_request = Get(DownloadedExternalTool, ExternalToolRequest,
                                  protoc.get_request(Platform.current))

    output_dir = "_generated_files"
    create_output_dir_request = Get(Digest,
                                    CreateDigest([Directory(output_dir)]))

    # Protoc needs all transitive dependencies on `protobuf_libraries` to work properly. It won't
    # actually generate those dependencies; it only needs to look at their .proto files to work
    # with imports.
    transitive_targets = await Get(
        TransitiveTargets,
        TransitiveTargetsRequest([request.protocol_target.address]))

    # NB: By stripping the source roots, we avoid having to set the value `--proto_path`
    # for Protobuf imports to be discoverable.
    all_stripped_sources_request = Get(
        StrippedSourceFiles,
        SourceFilesRequest(
            (tgt.get(Sources) for tgt in transitive_targets.closure),
            for_sources_types=(ProtobufSources, ),
        ),
    )
    target_stripped_sources_request = Get(
        StrippedSourceFiles,
        SourceFilesRequest([request.protocol_target[ProtobufSources]]))

    (
        downloaded_protoc_binary,
        empty_output_dir,
        all_sources_stripped,
        target_sources_stripped,
    ) = await MultiGet(
        download_protoc_request,
        create_output_dir_request,
        all_stripped_sources_request,
        target_stripped_sources_request,
    )

    protoc_gen_mypy_script = "protoc-gen-mypy"
    protoc_gen_mypy_grpc_script = "protoc-gen-mypy_grpc"
    mypy_pex = None
    mypy_request = PexRequest(
        output_filename="mypy_protobuf.pex",
        internal_only=True,
        requirements=PexRequirements([
            python_protobuf_mypy_plugin.plugin_requirement(
                python_protobuf_subsystem)
        ]),
        interpreter_constraints=PexInterpreterConstraints(
            python_protobuf_mypy_plugin.interpreter_constraints),
    )

    if python_protobuf_subsystem.mypy_plugin:
        mypy_pex = await Get(
            VenvPex,
            VenvPexRequest(
                bin_names=[protoc_gen_mypy_script],
                pex_request=mypy_request,
            ),
        )

        if request.protocol_target.get(ProtobufGrpcToggle).value:
            mypy_info = await Get(PexResolveInfo, VenvPex, mypy_pex)

            # In order to generate stubs for gRPC code, we need mypy-protobuf 2.0 or above.
            if any(dist_info.project_name == "mypy-protobuf"
                   and dist_info.version.major >= 2
                   for dist_info in mypy_info):
                # TODO: Use `pex_path` once VenvPex stores a Pex field.
                mypy_pex = await Get(
                    VenvPex,
                    VenvPexRequest(
                        bin_names=[
                            protoc_gen_mypy_script, protoc_gen_mypy_grpc_script
                        ],
                        pex_request=mypy_request,
                    ),
                )

    downloaded_grpc_plugin = (await Get(
        DownloadedExternalTool,
        ExternalToolRequest,
        grpc_python_plugin.get_request(Platform.current),
    ) if request.protocol_target.get(ProtobufGrpcToggle).value else None)

    unmerged_digests = [
        all_sources_stripped.snapshot.digest,
        downloaded_protoc_binary.digest,
        empty_output_dir,
    ]
    if mypy_pex:
        unmerged_digests.append(mypy_pex.digest)
    if downloaded_grpc_plugin:
        unmerged_digests.append(downloaded_grpc_plugin.digest)
    input_digest = await Get(Digest, MergeDigests(unmerged_digests))

    argv = [downloaded_protoc_binary.exe, "--python_out", output_dir]
    if mypy_pex:
        argv.extend([
            f"--plugin=protoc-gen-mypy={mypy_pex.bin[protoc_gen_mypy_script].argv0}",
            "--mypy_out",
            output_dir,
        ])
    if downloaded_grpc_plugin:
        argv.extend([
            f"--plugin=protoc-gen-grpc={downloaded_grpc_plugin.exe}",
            "--grpc_out", output_dir
        ])

        if mypy_pex and protoc_gen_mypy_grpc_script in mypy_pex.bin:
            argv.extend([
                f"--plugin=protoc-gen-mypy_grpc={mypy_pex.bin[protoc_gen_mypy_grpc_script].argv0}",
                "--mypy_grpc_out",
                output_dir,
            ])

    argv.extend(target_sources_stripped.snapshot.files)
    result = await Get(
        ProcessResult,
        Process(
            argv,
            input_digest=input_digest,
            description=
            f"Generating Python sources from {request.protocol_target.address}.",
            level=LogLevel.DEBUG,
            output_directories=(output_dir, ),
        ),
    )

    # We must do some path manipulation on the output digest for it to look like normal sources,
    # including adding back a source root.
    py_source_root = request.protocol_target.get(PythonSourceRootField).value
    if py_source_root:
        # Verify that the python source root specified by the target is in fact a source root.
        source_root_request = SourceRootRequest(PurePath(py_source_root))
    else:
        # The target didn't specify a python source root, so use the protobuf_library's source root.
        source_root_request = SourceRootRequest.for_target(
            request.protocol_target)

    normalized_digest, source_root = await MultiGet(
        Get(Digest, RemovePrefix(result.output_digest, output_dir)),
        Get(SourceRoot, SourceRootRequest, source_root_request),
    )

    source_root_restored = (await Get(
        Snapshot, AddPrefix(normalized_digest, source_root.path))
                            if source_root.path != "." else await Get(
                                Snapshot, Digest, normalized_digest))
    return GeneratedSources(source_root_restored)
Exemplo n.º 23
0
async def bandit_lint_partition(partition: BanditPartition, bandit: Bandit,
                                lint_subsystem: LintSubsystem) -> LintResult:
    bandit_pex_get = Get(
        VenvPex,
        PexRequest(
            output_filename="bandit.pex",
            internal_only=True,
            requirements=PexRequirements(bandit.all_requirements),
            interpreter_constraints=partition.interpreter_constraints,
            main=bandit.main,
        ),
    )

    config_files_get = Get(ConfigFiles, ConfigFilesRequest,
                           bandit.config_request)
    source_files_get = Get(
        SourceFiles,
        SourceFilesRequest(field_set.sources
                           for field_set in partition.field_sets))

    bandit_pex, config_files, source_files = await MultiGet(
        bandit_pex_get, config_files_get, source_files_get)

    input_digest = await Get(
        Digest,
        MergeDigests(
            (source_files.snapshot.digest, config_files.snapshot.digest)))

    report_file_name = "bandit_report.txt" if lint_subsystem.reports_dir else None

    result = await Get(
        FallibleProcessResult,
        VenvPexProcess(
            bandit_pex,
            argv=generate_argv(source_files,
                               bandit,
                               report_file_name=report_file_name),
            input_digest=input_digest,
            description=
            f"Run Bandit on {pluralize(len(partition.field_sets), 'file')}.",
            output_files=(report_file_name, ) if report_file_name else None,
            level=LogLevel.DEBUG,
        ),
    )

    report = None
    if report_file_name:
        report_digest = await Get(
            Digest,
            DigestSubset(
                result.output_digest,
                PathGlobs(
                    [report_file_name],
                    glob_match_error_behavior=GlobMatchErrorBehavior.warn,
                    description_of_origin="Bandit report file",
                ),
            ),
        )
        report = LintReport(report_file_name, report_digest)

    return LintResult.from_fallible_process_result(
        result,
        partition_description=str(
            sorted(str(c) for c in partition.interpreter_constraints)),
        report=report,
    )
Exemplo n.º 24
0
Arquivo: repl.py Projeto: hephex/pants
async def create_ipython_repl_request(repl: IPythonRepl, ipython: IPython,
                                      pex_env: PexEnvironment) -> ReplRequest:
    # Note that we get an intermediate PexRequest here (instead of going straight to a Pex) so
    # that we can get the interpreter constraints for use in ipython_request/local_dists_request.
    requirements_pex_request = await Get(
        PexRequest,
        PexFromTargetsRequest,
        PexFromTargetsRequest.for_requirements(
            (tgt.address for tgt in repl.targets), internal_only=True),
    )

    requirements_request = Get(Pex, PexRequest, requirements_pex_request)

    sources_request = Get(
        PythonSourceFiles,
        PythonSourceFilesRequest(repl.targets, include_files=True))

    ipython_request = Get(
        Pex,
        PexRequest(
            output_filename="ipython.pex",
            main=ipython.main,
            requirements=ipython.pex_requirements(),
            interpreter_constraints=requirements_pex_request.
            interpreter_constraints,
            internal_only=True,
        ),
    )

    requirements_pex, sources, ipython_pex = await MultiGet(
        requirements_request, sources_request, ipython_request)

    local_dists = await Get(
        LocalDistsPex,
        LocalDistsPexRequest(
            [tgt.address for tgt in repl.targets],
            internal_only=True,
            interpreter_constraints=requirements_pex_request.
            interpreter_constraints,
            sources=sources,
        ),
    )

    merged_digest = await Get(
        Digest,
        MergeDigests((
            requirements_pex.digest,
            local_dists.pex.digest,
            local_dists.remaining_sources.source_files.snapshot.digest,
            ipython_pex.digest,
        )),
    )

    complete_pex_env = pex_env.in_workspace()
    args = list(
        complete_pex_env.create_argv(repl.in_chroot(ipython_pex.name),
                                     python=ipython_pex.python))
    if ipython.options.ignore_cwd:
        args.append("--ignore-cwd")

    chrooted_source_roots = [repl.in_chroot(sr) for sr in sources.source_roots]
    extra_env = {
        **complete_pex_env.environment_dict(python_configured=ipython_pex.python is not None),
        "PEX_PATH":
        os.pathsep.join([
            repl.in_chroot(requirements_pex_request.output_filename),
            repl.in_chroot(local_dists.pex.name),
        ]),
        "PEX_EXTRA_SYS_PATH":
        os.pathsep.join(chrooted_source_roots),
    }

    return ReplRequest(digest=merged_digest, args=args, extra_env=extra_env)
Exemplo n.º 25
0
async def generate_python_from_protobuf(
    request: GeneratePythonFromProtobufRequest,
    protoc: Protoc,
    grpc_python_plugin: GrpcPythonPlugin,
    python_protobuf_subsystem: PythonProtobufSubsystem,
) -> GeneratedSources:
    download_protoc_request = Get(DownloadedExternalTool, ExternalToolRequest,
                                  protoc.get_request(Platform.current))

    output_dir = "_generated_files"
    create_output_dir_request = Get(Digest,
                                    CreateDigest([Directory(output_dir)]))

    # Protoc needs all transitive dependencies on `protobuf_libraries` to work properly. It won't
    # actually generate those dependencies; it only needs to look at their .proto files to work
    # with imports.
    # TODO(#10917): Use TransitiveTargets instead of TransitiveTargetsLite.
    transitive_targets = await Get(
        TransitiveTargets,
        TransitiveTargetsRequestLite([request.protocol_target.address]))

    # NB: By stripping the source roots, we avoid having to set the value `--proto_path`
    # for Protobuf imports to be discoverable.
    all_stripped_sources_request = Get(
        StrippedSourceFiles,
        SourceFilesRequest(
            (tgt.get(Sources) for tgt in transitive_targets.closure),
            for_sources_types=(ProtobufSources, ),
        ),
    )
    target_stripped_sources_request = Get(
        StrippedSourceFiles,
        SourceFilesRequest([request.protocol_target[ProtobufSources]]))

    (
        downloaded_protoc_binary,
        empty_output_dir,
        all_sources_stripped,
        target_sources_stripped,
    ) = await MultiGet(
        download_protoc_request,
        create_output_dir_request,
        all_stripped_sources_request,
        target_stripped_sources_request,
    )

    # To run the MyPy Protobuf plugin, we first install it with Pex, then extract the wheels and
    # point Protoc to the extracted wheels with its `--plugin` argument.
    extracted_mypy_wheels = None
    if python_protobuf_subsystem.mypy_plugin:
        mypy_pex = await Get(
            Pex,
            PexRequest(
                output_filename="mypy_protobuf.pex",
                internal_only=True,
                requirements=PexRequirements(
                    [python_protobuf_subsystem.mypy_plugin_version]),
                # This is solely to ensure that we use an appropriate interpreter when resolving
                # the distribution. We don't actually run the distribution directly with Python,
                # as we extract out its binary.
                interpreter_constraints=PexInterpreterConstraints(
                    ["CPython>=3.5"]),
            ),
        )
        extracted_mypy_wheels = await Get(ExtractedPexDistributions, Pex,
                                          mypy_pex)

    downloaded_grpc_plugin = (await Get(
        DownloadedExternalTool,
        ExternalToolRequest,
        grpc_python_plugin.get_request(Platform.current),
    ) if request.protocol_target.get(ProtobufGrcpToggle).value else None)

    unmerged_digests = [
        all_sources_stripped.snapshot.digest,
        downloaded_protoc_binary.digest,
        empty_output_dir,
    ]
    if extracted_mypy_wheels:
        unmerged_digests.append(extracted_mypy_wheels.digest)
    if downloaded_grpc_plugin:
        unmerged_digests.append(downloaded_grpc_plugin.digest)
    input_digest = await Get(Digest, MergeDigests(unmerged_digests))

    argv = [downloaded_protoc_binary.exe, "--python_out", output_dir]
    if extracted_mypy_wheels:
        mypy_plugin_path = next(
            p for p in extracted_mypy_wheels.wheel_directory_paths
            if p.startswith(".deps/mypy_protobuf-"))
        argv.extend([
            f"--plugin=protoc-gen-mypy={mypy_plugin_path}/bin/protoc-gen-mypy",
            "--mypy_out",
            output_dir,
        ])
    if downloaded_grpc_plugin:
        argv.extend([
            f"--plugin=protoc-gen-grpc={downloaded_grpc_plugin.exe}",
            "--grpc_out", output_dir
        ])
    argv.extend(target_sources_stripped.snapshot.files)

    env = {}
    if extracted_mypy_wheels:
        env["PYTHONPATH"] = ":".join(
            extracted_mypy_wheels.wheel_directory_paths)

    result = await Get(
        ProcessResult,
        Process(
            argv,
            env=env,
            input_digest=input_digest,
            description=
            f"Generating Python sources from {request.protocol_target.address}.",
            level=LogLevel.DEBUG,
            output_directories=(output_dir, ),
        ),
    )

    # We must do some path manipulation on the output digest for it to look like normal sources,
    # including adding back a source root.
    py_source_root = request.protocol_target.get(PythonSourceRootField).value
    if py_source_root:
        # Verify that the python source root specified by the target is in fact a source root.
        source_root_request = SourceRootRequest(PurePath(py_source_root))
    else:
        # The target didn't specify a python source root, so use the protobuf_library's source root.
        source_root_request = SourceRootRequest.for_target(
            request.protocol_target)

    normalized_digest, source_root = await MultiGet(
        Get(Digest, RemovePrefix(result.output_digest, output_dir)),
        Get(SourceRoot, SourceRootRequest, source_root_request),
    )

    source_root_restored = (await Get(
        Snapshot, AddPrefix(normalized_digest, source_root.path))
                            if source_root.path != "." else await Get(
                                Snapshot, Digest, normalized_digest))
    return GeneratedSources(source_root_restored)
Exemplo n.º 26
0
async def run_pep517_build(request: DistBuildRequest,
                           python_setup: PythonSetup) -> DistBuildResult:
    # Note that this pex has no entrypoint. We use it to run our generated shim, which
    # in turn imports from and invokes the build backend.
    build_backend_pex = await Get(
        VenvPex,
        PexRequest(
            output_filename="build_backend.pex",
            internal_only=True,
            requirements=request.build_system.requires,
            pex_path=request.extra_build_time_requirements,
            interpreter_constraints=request.interpreter_constraints,
        ),
    )

    dist_dir = "dist"
    backend_shim_name = "backend_shim.py"
    backend_shim_path = os.path.join(request.working_directory,
                                     backend_shim_name)
    backend_shim_digest = await Get(
        Digest,
        CreateDigest([
            FileContent(backend_shim_path,
                        interpolate_backend_shim(dist_dir, request)),
        ]),
    )

    merged_digest = await Get(
        Digest, MergeDigests((request.input, backend_shim_digest)))

    extra_env = {
        **(request.extra_build_time_env or {}),
        "PEX_EXTRA_SYS_PATH":
        os.pathsep.join(request.build_time_source_roots),
    }
    if python_setup.macos_big_sur_compatibility and is_macos_big_sur():
        extra_env["MACOSX_DEPLOYMENT_TARGET"] = "10.16"

    result = await Get(
        ProcessResult,
        VenvPexProcess(
            build_backend_pex,
            argv=(backend_shim_name, ),
            input_digest=merged_digest,
            extra_env=extra_env,
            working_directory=request.working_directory,
            output_directories=(
                dist_dir, ),  # Relative to the working_directory.
            description=
            (f"Run {request.build_system.build_backend} for {request.target_address_spec}"
             if request.target_address_spec else
             f"Run {request.build_system.build_backend}"),
            level=LogLevel.DEBUG,
        ),
    )
    output_lines = result.stdout.decode().splitlines()
    paths = {}
    for line in output_lines:
        for dist_type in ["wheel", "sdist"]:
            if line.startswith(f"{dist_type}: "):
                paths[dist_type] = line[len(dist_type) + 2:].strip()
    # Note that output_digest paths are relative to the working_directory.
    output_digest = await Get(Digest,
                              RemovePrefix(result.output_digest, dist_dir))
    output_snapshot = await Get(Snapshot, Digest, output_digest)
    for dist_type, path in paths.items():
        if path not in output_snapshot.files:
            raise BuildBackendError(
                softwrap(f"""
                    Build backend {request.build_system.build_backend} did not create
                    expected {dist_type} file {path}
                    """))
    return DistBuildResult(output_digest,
                           wheel_path=paths.get("wheel"),
                           sdist_path=paths.get("sdist"))
Exemplo n.º 27
0
async def setup_pytest_for_target(
    request: TestSetupRequest,
    pytest: PyTest,
    test_subsystem: TestSubsystem,
    python_setup: PythonSetup,
    coverage_config: CoverageConfig,
    coverage_subsystem: CoverageSubsystem,
    test_extra_env: TestExtraEnv,
    global_options: GlobalOptions,
) -> TestSetup:
    transitive_targets = await Get(
        TransitiveTargets,
        TransitiveTargetsRequest([request.field_set.address]))
    all_targets = transitive_targets.closure

    interpreter_constraints = PexInterpreterConstraints.create_from_targets(
        all_targets, python_setup)

    requirements_pex_get = Get(
        Pex,
        PexFromTargetsRequest,
        PexFromTargetsRequest.for_requirements([request.field_set.address],
                                               internal_only=True),
    )
    pytest_pex_get = Get(
        Pex,
        PexRequest(
            output_filename="pytest.pex",
            requirements=PexRequirements(pytest.get_requirement_strings()),
            interpreter_constraints=interpreter_constraints,
            internal_only=True,
        ),
    )

    extra_output_directory_digest_get = Get(
        Digest, CreateDigest([Directory(_EXTRA_OUTPUT_DIR)]))

    prepared_sources_get = Get(
        PythonSourceFiles,
        PythonSourceFilesRequest(all_targets, include_files=True))

    build_package_dependencies_get = Get(
        BuiltPackageDependencies,
        BuildPackageDependenciesRequest(
            request.field_set.runtime_package_dependencies),
    )

    # Get the file names for the test_target so that we can specify to Pytest precisely which files
    # to test, rather than using auto-discovery.
    field_set_source_files_get = Get(
        SourceFiles, SourceFilesRequest([request.field_set.sources]))

    (
        pytest_pex,
        requirements_pex,
        prepared_sources,
        field_set_source_files,
        built_package_dependencies,
        extra_output_directory_digest,
    ) = await MultiGet(
        pytest_pex_get,
        requirements_pex_get,
        prepared_sources_get,
        field_set_source_files_get,
        build_package_dependencies_get,
        extra_output_directory_digest_get,
    )

    pytest_runner_pex_get = Get(
        VenvPex,
        PexRequest(
            output_filename="pytest_runner.pex",
            interpreter_constraints=interpreter_constraints,
            main=ConsoleScript("pytest"),
            internal_only=True,
            pex_path=[pytest_pex, requirements_pex],
        ),
    )
    config_files_get = Get(
        ConfigFiles,
        ConfigFilesRequest,
        pytest.config_request(field_set_source_files.snapshot.dirs),
    )
    pytest_runner_pex, config_files = await MultiGet(pytest_runner_pex_get,
                                                     config_files_get)

    input_digest = await Get(
        Digest,
        MergeDigests((
            coverage_config.digest,
            prepared_sources.source_files.snapshot.digest,
            config_files.snapshot.digest,
            extra_output_directory_digest,
            *(pkg.digest for pkg in built_package_dependencies),
        )),
    )

    add_opts = [f"--color={'yes' if global_options.options.colors else 'no'}"]
    output_files = []

    results_file_name = None
    if pytest.options.junit_xml_dir and not request.is_debug:
        results_file_name = f"{request.field_set.address.path_safe_spec}.xml"
        add_opts.extend((f"--junitxml={results_file_name}", "-o",
                         f"junit_family={pytest.options.junit_family}"))
        output_files.append(results_file_name)

    coverage_args = []
    if test_subsystem.use_coverage and not request.is_debug:
        output_files.append(".coverage")
        cov_paths = coverage_subsystem.filter if coverage_subsystem.filter else (
            ".", )
        coverage_args = [
            "--cov-report=",  # Turn off output.
            f"--cov-config={coverage_config.path}",
            *itertools.chain.from_iterable(["--cov", cov_path]
                                           for cov_path in cov_paths),
        ]

    extra_env = {
        "PYTEST_ADDOPTS": " ".join(add_opts),
        "PEX_EXTRA_SYS_PATH": ":".join(prepared_sources.source_roots),
        **test_extra_env.env,
    }

    # Cache test runs only if they are successful, or not at all if `--test-force`.
    cache_scope = ProcessCacheScope.NEVER if test_subsystem.force else ProcessCacheScope.SUCCESSFUL
    process = await Get(
        Process,
        VenvPexProcess(
            pytest_runner_pex,
            argv=(*pytest.options.args, *coverage_args,
                  *field_set_source_files.files),
            extra_env=extra_env,
            input_digest=input_digest,
            output_directories=(_EXTRA_OUTPUT_DIR, ),
            output_files=output_files,
            timeout_seconds=request.field_set.timeout.
            calculate_from_global_options(pytest),
            execution_slot_variable=pytest.options.execution_slot_var,
            description=f"Run Pytest for {request.field_set.address}",
            level=LogLevel.DEBUG,
            cache_scope=cache_scope,
        ),
    )
    return TestSetup(process, results_file_name=results_file_name)
Exemplo n.º 28
0
async def collect_fixture_configs(
    _request: CollectFixtureConfigsRequest,
    pytest: PyTest,
    python_setup: PythonSetup,
    test_extra_env: TestExtraEnv,
    targets: Targets,
) -> CollectedJVMLockfileFixtureConfigs:
    addresses = [tgt.address for tgt in targets]
    transitive_targets = await Get(TransitiveTargets,
                                   TransitiveTargetsRequest(addresses))
    all_targets = transitive_targets.closure

    interpreter_constraints = InterpreterConstraints.create_from_targets(
        all_targets, python_setup)

    pytest_pex, requirements_pex, prepared_sources, root_sources = await MultiGet(
        Get(
            Pex,
            PexRequest(
                output_filename="pytest.pex",
                requirements=pytest.pex_requirements(),
                interpreter_constraints=interpreter_constraints,
                internal_only=True,
            ),
        ),
        Get(Pex, RequirementsPexRequest(addresses)),
        Get(
            PythonSourceFiles,
            PythonSourceFilesRequest(all_targets,
                                     include_files=True,
                                     include_resources=True),
        ),
        Get(
            PythonSourceFiles,
            PythonSourceFilesRequest(targets),
        ),
    )

    script_content = FileContent(path="collect-fixtures.py",
                                 content=COLLECTION_SCRIPT.encode(),
                                 is_executable=True)
    script_digest = await Get(Digest, CreateDigest([script_content]))

    pytest_runner_pex_get = Get(
        VenvPex,
        PexRequest(
            output_filename="pytest_runner.pex",
            interpreter_constraints=interpreter_constraints,
            main=EntryPoint(PurePath(script_content.path).stem),
            sources=script_digest,
            internal_only=True,
            pex_path=[
                pytest_pex,
                requirements_pex,
            ],
        ),
    )
    config_file_dirs = list(
        group_by_dir(prepared_sources.source_files.files).keys())
    config_files_get = Get(
        ConfigFiles,
        ConfigFilesRequest,
        pytest.config_request(config_file_dirs),
    )
    pytest_runner_pex, config_files = await MultiGet(pytest_runner_pex_get,
                                                     config_files_get)

    pytest_config_digest = config_files.snapshot.digest

    input_digest = await Get(
        Digest,
        MergeDigests((
            prepared_sources.source_files.snapshot.digest,
            pytest_config_digest,
        )),
    )

    extra_env = {
        "PEX_EXTRA_SYS_PATH": ":".join(prepared_sources.source_roots),
        **test_extra_env.env,
    }

    process = await Get(
        Process,
        VenvPexProcess(
            pytest_runner_pex,
            argv=[
                name for name in root_sources.source_files.files
                if name.endswith(".py")
            ],
            extra_env=extra_env,
            input_digest=input_digest,
            output_files=("tests.json", ),
            description="Collect test lockfile requirements from all tests.",
            level=LogLevel.DEBUG,
            cache_scope=ProcessCacheScope.PER_SESSION,
        ),
    )

    result = await Get(ProcessResult, Process, process)
    digest_contents = await Get(DigestContents, Digest, result.output_digest)
    assert len(digest_contents) == 1
    assert digest_contents[0].path == "tests.json"
    raw_config_data = json.loads(digest_contents[0].content)

    configs = []
    for item in raw_config_data:
        config = JVMLockfileFixtureConfig(
            definition=JVMLockfileFixtureDefinition.from_kwargs(
                item["kwargs"]),
            test_file_path=item["test_file_path"],
        )
        configs.append(config)

    return CollectedJVMLockfileFixtureConfigs(configs)
Exemplo n.º 29
0
async def pex_from_targets(request: PexFromTargetsRequest,
                           python_setup: PythonSetup) -> PexRequest:
    if request.direct_deps_only:
        targets = await Get(Targets, Addresses(request.addresses))
        direct_deps = await MultiGet(
            Get(Targets, DependenciesRequest(tgt.get(Dependencies)))
            for tgt in targets)
        all_targets = FrozenOrderedSet(itertools.chain(*direct_deps, targets))
    else:
        transitive_targets = await Get(
            TransitiveTargets, TransitiveTargetsRequest(request.addresses))
        all_targets = transitive_targets.closure

    input_digests = []
    if request.additional_sources:
        input_digests.append(request.additional_sources)
    if request.include_source_files:
        prepared_sources = await Get(StrippedPythonSourceFiles,
                                     PythonSourceFilesRequest(all_targets))
        input_digests.append(
            prepared_sources.stripped_source_files.snapshot.digest)
    merged_input_digest = await Get(Digest, MergeDigests(input_digests))

    if request.hardcoded_interpreter_constraints:
        interpreter_constraints = request.hardcoded_interpreter_constraints
    else:
        calculated_constraints = PexInterpreterConstraints.create_from_compatibility_fields(
            (tgt[PythonInterpreterCompatibility] for tgt in all_targets
             if tgt.has_field(PythonInterpreterCompatibility)),
            python_setup,
        )
        # If there are no targets, we fall back to the global constraints. This is relevant,
        # for example, when running `./pants repl` with no specs.
        interpreter_constraints = calculated_constraints or PexInterpreterConstraints(
            python_setup.interpreter_constraints)

    exact_reqs = PexRequirements.create_from_requirement_fields(
        (tgt[PythonRequirementsField]
         for tgt in all_targets if tgt.has_field(PythonRequirementsField)),
        additional_requirements=request.additional_requirements,
    )

    requirements = exact_reqs
    description = request.description

    if python_setup.requirement_constraints:
        # In requirement strings Foo_-Bar.BAZ and foo-bar-baz refer to the same project. We let
        # packaging canonicalize for us.
        # See: https://www.python.org/dev/peps/pep-0503/#normalized-names

        exact_req_projects = {
            canonicalize_project_name(Requirement.parse(req).project_name)
            for req in exact_reqs
        }
        constraints_file_contents = await Get(
            DigestContents,
            PathGlobs(
                [python_setup.requirement_constraints],
                glob_match_error_behavior=GlobMatchErrorBehavior.error,
                conjunction=GlobExpansionConjunction.all_match,
                description_of_origin=
                "the option `--python-setup-requirement-constraints`",
            ),
        )
        constraints_file_reqs = set(
            parse_requirements(
                next(iter(constraints_file_contents)).content.decode()))
        constraint_file_projects = {
            canonicalize_project_name(req.project_name)
            for req in constraints_file_reqs
        }
        unconstrained_projects = exact_req_projects - constraint_file_projects
        if unconstrained_projects:
            logger.warning(
                f"The constraints file {python_setup.requirement_constraints} does not contain "
                f"entries for the following requirements: {', '.join(unconstrained_projects)}"
            )

        if python_setup.resolve_all_constraints == ResolveAllConstraintsOption.ALWAYS or (
                python_setup.resolve_all_constraints
                == ResolveAllConstraintsOption.NONDEPLOYABLES
                and request.internal_only):
            if unconstrained_projects:
                logger.warning(
                    "Ignoring resolve_all_constraints setting in [python_setup] scope "
                    "because constraints file does not cover all requirements."
                )
            else:
                requirements = PexRequirements(
                    str(req) for req in constraints_file_reqs)
                description = description or f"Resolving {python_setup.requirement_constraints}"
    elif (python_setup.resolve_all_constraints !=
          ResolveAllConstraintsOption.NEVER
          and python_setup.resolve_all_constraints_was_set_explicitly()):
        raise ValueError(
            f"[python-setup].resolve_all_constraints is set to "
            f"{python_setup.resolve_all_constraints.value}, so "
            f"[python-setup].requirement_constraints must also be provided.")

    return PexRequest(
        output_filename=request.output_filename,
        internal_only=request.internal_only,
        requirements=requirements,
        interpreter_constraints=interpreter_constraints,
        platforms=request.platforms,
        entry_point=request.entry_point,
        sources=merged_input_digest,
        additional_inputs=request.additional_inputs,
        additional_args=request.additional_args,
        description=description,
    )
Exemplo n.º 30
0
async def run_setup_py(req: RunSetupPyRequest, setuptools: Setuptools,
                       python_setup: PythonSetup) -> RunSetupPyResult:
    """Run a setup.py command on a single exported target."""
    # Note that this pex has no entrypoint. We use it to run our generated setup.py, which
    # in turn imports from and invokes setuptools.

    setuptools_pex = await Get(
        VenvPex,
        PexRequest(
            output_filename="setuptools.pex",
            internal_only=True,
            requirements=setuptools.pex_requirements(),
            interpreter_constraints=req.interpreter_constraints,
        ),
    )

    # We prefix the entire chroot, and run with this prefix as the cwd, so that we can capture any
    # changes setup made within it (e.g., when running 'develop') without also capturing other
    # artifacts of the pex process invocation.
    chroot_prefix = "chroot"

    # The setuptools dist dir, created by it under the chroot (not to be confused with
    # pants's own dist dir, at the buildroot).
    dist_dir = "dist"

    prefixed_chroot = await Get(Digest,
                                AddPrefix(req.chroot.digest, chroot_prefix))

    # setup.py basically always expects to be run with the cwd as its own directory
    # (e.g., paths in it are relative to that directory). This is true of the setup.py
    # we generate and is overwhelmingly likely to be true for existing setup.py files,
    # as there is no robust way to run them otherwise.
    setup_script_reldir, setup_script_name = os.path.split(
        req.chroot.setup_script)
    working_directory = os.path.join(chroot_prefix, setup_script_reldir)

    if python_setup.macos_big_sur_compatibility and is_macos_big_sur():
        extra_env = {"MACOSX_DEPLOYMENT_TARGET": "10.16"}
    else:
        extra_env = {}
    result = await Get(
        ProcessResult,
        VenvPexProcess(
            setuptools_pex,
            argv=(setup_script_name, *req.args),
            input_digest=prefixed_chroot,
            extra_env=extra_env,
            working_directory=working_directory,
            # setuptools commands that create dists write them to the distdir.
            # TODO: Could there be other useful files to capture?
            output_directories=(
                dist_dir, ),  # Relative to the working_directory.
            description=
            f"Run setuptools for {req.exported_target.target.address}",
            level=LogLevel.DEBUG,
        ),
    )
    # Note that output_digest paths are relative to the working_directory.
    output_digest = await Get(Digest,
                              RemovePrefix(result.output_digest, dist_dir))
    return RunSetupPyResult(output_digest)