Exemple #1
0
async def create_python_repl_request(repl: PythonRepl,
                                     pex_env: PexEnvironment) -> ReplRequest:
    requirements_request = Get(
        Pex,
        PexFromTargetsRequest,
        PexFromTargetsRequest.for_requirements(
            (tgt.address for tgt in repl.targets), internal_only=True),
    )
    sources_request = Get(
        PythonSourceFiles,
        PythonSourceFilesRequest(repl.targets, include_files=True))
    requirements_pex, sources = await MultiGet(requirements_request,
                                               sources_request)
    merged_digest = await Get(
        Digest,
        MergeDigests(
            (requirements_pex.digest, sources.source_files.snapshot.digest)))

    args = pex_env.create_argv(repl.in_chroot(requirements_pex.name),
                               python=requirements_pex.python)

    chrooted_source_roots = [repl.in_chroot(sr) for sr in sources.source_roots]
    extra_env = {
        **pex_env.environment_dict(python_configured=requirements_pex.python is not None),
        "PEX_EXTRA_SYS_PATH":
        ":".join(chrooted_source_roots),
    }

    return ReplRequest(digest=merged_digest, args=args, extra_env=extra_env)
def get_unstripped_sources(
    rule_runner: RuleRunner,
    targets: Iterable[Target],
    *,
    include_resources: bool = True,
    include_files: bool = False,
    source_roots: list[str] | None = None,
    extra_args: list[str] | None = None,
) -> PythonSourceFiles:
    rule_runner.set_options(
        [
            f"--source-root-patterns={source_roots or ['src/python']}",
            "--no-python-protobuf-infer-runtime-dependency",
            *(extra_args or []),
        ],
        env_inherit={"PATH", "PYENV_ROOT", "HOME"},
    )
    return rule_runner.request(
        PythonSourceFiles,
        [
            PythonSourceFilesRequest(targets,
                                     include_resources=include_resources,
                                     include_files=include_files)
        ],
    )
Exemple #3
0
async def generate_chroot(
    request: DistBuildChrootRequest, subsys: SetupPyGeneration
) -> DistBuildChroot:
    generate_setup = request.exported_target.target.get(GenerateSetupField).value
    if generate_setup is None:
        generate_setup = subsys.generate_setup_default

    if generate_setup:
        sources = await Get(DistBuildSources, DistBuildChrootRequest, request)
        generated_setup_py = await Get(
            GeneratedSetupPy,
            GenerateSetupPyRequest(
                request.exported_target, sources, request.interpreter_constraints
            ),
        )
        # We currently generate a setup.py that expects to be in the source root.
        # TODO: It might make sense to generate one in the target's directory, for
        #  consistency with the existing setup.py case.
        working_directory = ""
        chroot_digest = await Get(Digest, MergeDigests((sources.digest, generated_setup_py.digest)))
    else:
        transitive_targets = await Get(
            TransitiveTargets,
            TransitiveTargetsRequest([request.exported_target.target.address]),
        )
        source_files = await Get(
            PythonSourceFiles,
            PythonSourceFilesRequest(
                targets=transitive_targets.closure, include_resources=True, include_files=True
            ),
        )
        chroot_digest = source_files.source_files.snapshot.digest
        working_directory = request.exported_target.target.address.spec_path
    return DistBuildChroot(chroot_digest, working_directory)
Exemple #4
0
async def create_ipython_repl_request(repl: IPythonRepl, ipython: IPython,
                                      pex_env: PexEnvironment) -> ReplRequest:
    # Note that we get an intermediate PexRequest here (instead of going straight to a Pex)
    # so that we can get the interpreter constraints for use in ipython_request.
    requirements_pex_request = await Get(
        PexRequest,
        PexFromTargetsRequest,
        PexFromTargetsRequest.for_requirements(
            (tgt.address for tgt in repl.targets), internal_only=True),
    )

    requirements_request = Get(Pex, PexRequest, requirements_pex_request)

    sources_request = Get(
        PythonSourceFiles,
        PythonSourceFilesRequest(repl.targets, include_files=True))

    ipython_request = Get(
        Pex,
        PexRequest(
            output_filename="ipython.pex",
            main=ipython.main,
            requirements=PexRequirements(ipython.all_requirements),
            interpreter_constraints=requirements_pex_request.
            interpreter_constraints,
            internal_only=True,
        ),
    )

    requirements_pex, sources, ipython_pex = await MultiGet(
        requirements_request, sources_request, ipython_request)
    merged_digest = await Get(
        Digest,
        MergeDigests(
            (requirements_pex.digest, sources.source_files.snapshot.digest,
             ipython_pex.digest)),
    )

    args = list(
        pex_env.create_argv(repl.in_chroot(ipython_pex.name),
                            python=ipython_pex.python))
    if ipython.options.ignore_cwd:
        args.append("--ignore-cwd")

    chrooted_source_roots = [repl.in_chroot(sr) for sr in sources.source_roots]
    extra_env = {
        **pex_env.environment_dict(python_configured=ipython_pex.python is not None),
        "PEX_PATH":
        repl.in_chroot(requirements_pex_request.output_filename),
        "PEX_EXTRA_SYS_PATH":
        ":".join(chrooted_source_roots),
    }

    return ReplRequest(digest=merged_digest, args=args, extra_env=extra_env)
Exemple #5
0
async def create_python_repl_request(request: PythonRepl,
                                     pex_env: PexEnvironment,
                                     python_setup: PythonSetup) -> ReplRequest:
    validate_compatible_resolve(request.targets, python_setup)

    interpreter_constraints, transitive_targets = await MultiGet(
        Get(InterpreterConstraints,
            InterpreterConstraintsRequest(request.addresses)),
        Get(TransitiveTargets, TransitiveTargetsRequest(request.addresses)),
    )

    requirements_request = Get(Pex, RequirementsPexRequest(request.addresses))
    local_dists_request = Get(
        LocalDistsPex,
        LocalDistsPexRequest(
            request.addresses,
            internal_only=True,
            interpreter_constraints=interpreter_constraints,
        ),
    )

    sources_request = Get(
        PythonSourceFiles,
        PythonSourceFilesRequest(transitive_targets.closure,
                                 include_files=True))

    requirements_pex, local_dists, sources = await MultiGet(
        requirements_request, local_dists_request, sources_request)
    merged_digest = await Get(
        Digest,
        MergeDigests((requirements_pex.digest, local_dists.pex.digest,
                      sources.source_files.snapshot.digest)),
    )

    complete_pex_env = pex_env.in_workspace()
    args = complete_pex_env.create_argv(request.in_chroot(
        requirements_pex.name),
                                        python=requirements_pex.python)

    chrooted_source_roots = [
        request.in_chroot(sr) for sr in sources.source_roots
    ]
    extra_env = {
        **complete_pex_env.environment_dict(python_configured=requirements_pex.python is not None),
        "PEX_EXTRA_SYS_PATH":
        ":".join(chrooted_source_roots),
        "PEX_PATH":
        request.in_chroot(local_dists.pex.name),
    }

    return ReplRequest(digest=merged_digest, args=args, extra_env=extra_env)
Exemple #6
0
async def create_python_repl_request(repl: PythonRepl,
                                     pex_env: PexEnvironment) -> ReplRequest:

    # Note that we get an intermediate PexRequest here (instead of going straight to a Pex) so
    # that we can get the interpreter constraints for use in local_dists_request.
    requirements_pex_request = await Get(
        PexRequest,
        PexFromTargetsRequest,
        PexFromTargetsRequest.for_requirements(
            (tgt.address for tgt in repl.targets), internal_only=True),
    )
    requirements_request = Get(Pex, PexRequest, requirements_pex_request)

    local_dists_request = Get(
        LocalDistsPex,
        LocalDistsPexRequest(
            Addresses(tgt.address for tgt in repl.targets),
            internal_only=True,
            interpreter_constraints=requirements_pex_request.
            interpreter_constraints,
        ),
    )

    sources_request = Get(
        PythonSourceFiles,
        PythonSourceFilesRequest(repl.targets, include_files=True))

    requirements_pex, local_dists, sources = await MultiGet(
        requirements_request, local_dists_request, sources_request)
    merged_digest = await Get(
        Digest,
        MergeDigests((requirements_pex.digest, local_dists.pex.digest,
                      sources.source_files.snapshot.digest)),
    )

    complete_pex_env = pex_env.in_workspace()
    args = complete_pex_env.create_argv(repl.in_chroot(requirements_pex.name),
                                        python=requirements_pex.python)

    chrooted_source_roots = [repl.in_chroot(sr) for sr in sources.source_roots]
    extra_env = {
        **complete_pex_env.environment_dict(python_configured=requirements_pex.python is not None),
        "PEX_EXTRA_SYS_PATH":
        ":".join(chrooted_source_roots),
        "PEX_PATH":
        repl.in_chroot(local_dists.pex.name),
    }

    return ReplRequest(digest=merged_digest, args=args, extra_env=extra_env)
Exemple #7
0
async def get_sources(request: SetupPySourcesRequest) -> SetupPySources:
    python_sources_request = PythonSourceFilesRequest(targets=request.targets,
                                                      include_resources=False,
                                                      include_files=False)
    all_sources_request = PythonSourceFilesRequest(targets=request.targets,
                                                   include_resources=True,
                                                   include_files=True)
    python_sources, all_sources = await MultiGet(
        Get(StrippedPythonSourceFiles, PythonSourceFilesRequest,
            python_sources_request),
        Get(StrippedPythonSourceFiles, PythonSourceFilesRequest,
            all_sources_request),
    )

    python_files = set(python_sources.stripped_source_files.snapshot.files)
    all_files = set(all_sources.stripped_source_files.snapshot.files)
    resource_files = all_files - python_files

    init_py_digest_contents = await Get(
        DigestContents,
        DigestSubset(python_sources.stripped_source_files.snapshot.digest,
                     PathGlobs(["**/__init__.py"])),
    )

    packages, namespace_packages, package_data = find_packages(
        python_files=python_files,
        resource_files=resource_files,
        init_py_digest_contents=init_py_digest_contents,
        py2=request.py2,
    )
    return SetupPySources(
        digest=all_sources.stripped_source_files.snapshot.digest,
        packages=packages,
        namespace_packages=namespace_packages,
        package_data=package_data,
    )
Exemple #8
0
async def pylint_first_party_plugins(
        pylint: Pylint) -> PylintFirstPartyPlugins:
    if not pylint.source_plugins:
        return PylintFirstPartyPlugins(FrozenOrderedSet(), FrozenOrderedSet(),
                                       EMPTY_DIGEST)

    plugin_target_addresses = await Get(Addresses, UnparsedAddressInputs,
                                        pylint.source_plugins)
    transitive_targets = await Get(
        TransitiveTargets, TransitiveTargetsRequest(plugin_target_addresses))

    requirements_fields: OrderedSet[PythonRequirementsField] = OrderedSet()
    interpreter_constraints_fields: OrderedSet[
        InterpreterConstraintsField] = OrderedSet()
    for tgt in transitive_targets.closure:
        if tgt.has_field(PythonRequirementsField):
            requirements_fields.add(tgt[PythonRequirementsField])
        if tgt.has_field(InterpreterConstraintsField):
            interpreter_constraints_fields.add(
                tgt[InterpreterConstraintsField])

    # NB: Pylint source plugins must be explicitly loaded via PYTHONPATH (i.e. PEX_EXTRA_SYS_PATH).
    # The value must point to the plugin's directory, rather than to a parent's directory, because
    # `load-plugins` takes a module name rather than a path to the module; i.e. `plugin`, but
    # not `path.to.plugin`. (This means users must have specified the parent directory as a
    # source root.)
    stripped_sources = await Get(
        StrippedPythonSourceFiles,
        PythonSourceFilesRequest(transitive_targets.closure))
    prefixed_sources = await Get(
        Digest,
        AddPrefix(stripped_sources.stripped_source_files.snapshot.digest,
                  PylintFirstPartyPlugins.PREFIX),
    )

    return PylintFirstPartyPlugins(
        requirement_strings=PexRequirements.create_from_requirement_fields(
            requirements_fields,
            constraints_strings=(),
        ).req_strings,
        interpreter_constraints_fields=FrozenOrderedSet(
            interpreter_constraints_fields),
        sources_digest=prefixed_sources,
    )
Exemple #9
0
async def mypy_first_party_plugins(mypy: MyPy) -> MyPyFirstPartyPlugins:
    if not mypy.source_plugins:
        return MyPyFirstPartyPlugins(FrozenOrderedSet(), EMPTY_DIGEST, ())

    plugin_target_addresses = await Get(Addresses, UnparsedAddressInputs,
                                        mypy.source_plugins)
    transitive_targets = await Get(
        TransitiveTargets, TransitiveTargetsRequest(plugin_target_addresses))

    requirements = PexRequirements.create_from_requirement_fields(
        plugin_tgt[PythonRequirementsField]
        for plugin_tgt in transitive_targets.closure
        if plugin_tgt.has_field(PythonRequirementsField))

    sources = await Get(PythonSourceFiles,
                        PythonSourceFilesRequest(transitive_targets.closure))
    return MyPyFirstPartyPlugins(
        requirement_strings=requirements.req_strings,
        sources_digest=sources.source_files.snapshot.digest,
        source_roots=sources.source_roots,
    )
def get_unstripped_sources(
    rule_runner: RuleRunner,
    targets: Iterable[Target],
    *,
    include_resources: bool = True,
    include_files: bool = False,
    source_roots: list[str] | None = None,
    extra_args: list[str] | None = None,
) -> PythonSourceFiles:
    rule_runner.set_options([
        "--backend-packages=pants.backend.python",
        f"--source-root-patterns={source_roots or ['src/python']}",
        *(extra_args or []),
    ])
    return rule_runner.request(
        PythonSourceFiles,
        [
            PythonSourceFilesRequest(targets,
                                     include_resources=include_resources,
                                     include_files=include_files)
        ],
    )
Exemple #11
0
 def get_unstripped_sources(
     self,
     targets: Iterable[Target],
     *,
     include_resources: bool = True,
     include_files: bool = False,
     source_roots: Optional[List[str]] = None,
     extra_args: Optional[List[str]] = None,
 ) -> PythonSourceFiles:
     return self.request(
         PythonSourceFiles,
         [
             PythonSourceFilesRequest(targets,
                                      include_resources=include_resources,
                                      include_files=include_files),
             create_options_bootstrapper(args=[
                 "--backend-packages=pants.backend.python",
                 f"--source-root-patterns={source_roots or ['src/python']}",
                 *(extra_args or []),
             ]),
         ],
     )
Exemple #12
0
async def pylint_lint_partition(
    partition: PylintPartition, pylint: Pylint, first_party_plugins: PylintFirstPartyPlugins
) -> LintResult:
    requirements_pex_get = Get(
        Pex,
        PexFromTargetsRequest,
        PexFromTargetsRequest.for_requirements(
            (field_set.address for field_set in partition.field_sets),
            # NB: These constraints must be identical to the other PEXes. Otherwise, we risk using
            # a different version for the requirements than the other two PEXes, which can result
            # in a PEX runtime error about missing dependencies.
            hardcoded_interpreter_constraints=partition.interpreter_constraints,
            internal_only=True,
            direct_deps_only=True,
        ),
    )

    pylint_pex_get = Get(
        Pex,
        PexRequest(
            output_filename="pylint.pex",
            internal_only=True,
            requirements=pylint.pex_requirements(
                extra_requirements=first_party_plugins.requirement_strings,
            ),
            interpreter_constraints=partition.interpreter_constraints,
        ),
    )

    prepare_python_sources_get = Get(
        PythonSourceFiles, PythonSourceFilesRequest(partition.targets_with_dependencies)
    )
    field_set_sources_get = Get(
        SourceFiles, SourceFilesRequest(field_set.sources for field_set in partition.field_sets)
    )

    pylint_pex, requirements_pex, prepared_python_sources, field_set_sources = await MultiGet(
        pylint_pex_get,
        requirements_pex_get,
        prepare_python_sources_get,
        field_set_sources_get,
    )

    pylint_runner_pex, config_files = await MultiGet(
        Get(
            VenvPex,
            PexRequest(
                output_filename="pylint_runner.pex",
                interpreter_constraints=partition.interpreter_constraints,
                main=pylint.main,
                internal_only=True,
                pex_path=[pylint_pex, requirements_pex],
            ),
        ),
        Get(
            ConfigFiles, ConfigFilesRequest, pylint.config_request(field_set_sources.snapshot.dirs)
        ),
    )

    pythonpath = list(prepared_python_sources.source_roots)
    if first_party_plugins:
        pythonpath.append(first_party_plugins.PREFIX)

    input_digest = await Get(
        Digest,
        MergeDigests(
            (
                config_files.snapshot.digest,
                first_party_plugins.sources_digest,
                prepared_python_sources.source_files.snapshot.digest,
            )
        ),
    )

    result = await Get(
        FallibleProcessResult,
        VenvPexProcess(
            pylint_runner_pex,
            argv=generate_argv(field_set_sources, pylint),
            input_digest=input_digest,
            extra_env={"PEX_EXTRA_SYS_PATH": ":".join(pythonpath)},
            description=f"Run Pylint on {pluralize(len(partition.field_sets), 'file')}.",
            level=LogLevel.DEBUG,
        ),
    )
    return LintResult.from_fallible_process_result(
        result, partition_description=str(sorted(str(c) for c in partition.interpreter_constraints))
    )
Exemple #13
0
async def create_python_binary_run_request(
    field_set: PythonBinaryFieldSet,
    python_binary_defaults: PythonBinaryDefaults,
    pex_env: PexEnvironment,
) -> RunRequest:
    entry_point = field_set.entry_point.value
    if entry_point is None:
        binary_source_paths = await Get(
            Paths, PathGlobs,
            field_set.sources.path_globs(FilesNotFoundBehavior.error))
        if len(binary_source_paths.files) != 1:
            raise InvalidFieldException(
                "No `entry_point` was set for the target "
                f"{repr(field_set.address)}, so it must have exactly one source, but it has "
                f"{len(binary_source_paths.files)}")
        entry_point_path = binary_source_paths.files[0]
        source_root = await Get(
            SourceRoot,
            SourceRootRequest,
            SourceRootRequest.for_file(entry_point_path),
        )
        entry_point = PythonBinarySources.translate_source_file_to_entry_point(
            os.path.relpath(entry_point_path, source_root.path))
    transitive_targets = await Get(TransitiveTargets,
                                   Addresses([field_set.address]))

    # Note that we get an intermediate PexRequest here (instead of going straight to a Pex)
    # so that we can get the interpreter constraints for use in runner_pex_request.
    requirements_pex_request = await Get(
        PexRequest,
        PexFromTargetsRequest,
        PexFromTargetsRequest.for_requirements([field_set.address],
                                               internal_only=True),
    )

    requirements_request = Get(Pex, PexRequest, requirements_pex_request)

    sources_request = Get(
        PythonSourceFiles,
        PythonSourceFilesRequest(transitive_targets.closure,
                                 include_files=True))

    output_filename = f"{field_set.address.target_name}.pex"
    runner_pex_request = Get(
        Pex,
        PexRequest(
            output_filename=output_filename,
            interpreter_constraints=requirements_pex_request.
            interpreter_constraints,
            additional_args=field_set.generate_additional_args(
                python_binary_defaults),
            internal_only=True,
            # Note that the entry point file is not in the Pex itself, but on the
            # PEX_PATH. This works fine!
            entry_point=entry_point,
        ),
    )

    requirements, sources, runner_pex = await MultiGet(requirements_request,
                                                       sources_request,
                                                       runner_pex_request)

    merged_digest = await Get(
        Digest,
        MergeDigests([
            requirements.digest, sources.source_files.snapshot.digest,
            runner_pex.digest
        ]),
    )

    def in_chroot(relpath: str) -> str:
        return os.path.join("{chroot}", relpath)

    args = pex_env.create_argv(in_chroot(runner_pex.name),
                               python=runner_pex.python)

    chrooted_source_roots = [in_chroot(sr) for sr in sources.source_roots]
    extra_env = {
        **pex_env.environment_dict(python_configured=runner_pex.python is not None),
        "PEX_PATH":
        in_chroot(requirements_pex_request.output_filename),
        "PEX_EXTRA_SYS_PATH":
        ":".join(chrooted_source_roots),
    }

    return RunRequest(digest=merged_digest, args=args, extra_env=extra_env)
Exemple #14
0
async def pex_from_targets(request: PexFromTargetsRequest, python_setup: PythonSetup) -> PexRequest:
    if request.direct_deps_only:
        targets = await Get(Targets, Addresses(request.addresses))
        direct_deps = await MultiGet(
            Get(Targets, DependenciesRequest(tgt.get(Dependencies))) for tgt in targets
        )
        all_targets = FrozenOrderedSet(itertools.chain(*direct_deps, targets))
    else:
        transitive_targets = await Get(
            TransitiveTargets, TransitiveTargetsRequest(request.addresses)
        )
        all_targets = transitive_targets.closure

    input_digests = []
    if request.additional_sources:
        input_digests.append(request.additional_sources)
    if request.include_source_files:
        prepared_sources = await Get(
            StrippedPythonSourceFiles, PythonSourceFilesRequest(all_targets)
        )
        input_digests.append(prepared_sources.stripped_source_files.snapshot.digest)
    merged_input_digest = await Get(Digest, MergeDigests(input_digests))

    if request.hardcoded_interpreter_constraints:
        interpreter_constraints = request.hardcoded_interpreter_constraints
    else:
        calculated_constraints = PexInterpreterConstraints.create_from_targets(
            all_targets, python_setup
        )
        # If there are no targets, we fall back to the global constraints. This is relevant,
        # for example, when running `./pants repl` with no specs.
        interpreter_constraints = calculated_constraints or PexInterpreterConstraints(
            python_setup.interpreter_constraints
        )

    exact_reqs = PexRequirements.create_from_requirement_fields(
        (
            tgt[PythonRequirementsField]
            for tgt in all_targets
            if tgt.has_field(PythonRequirementsField)
        ),
        additional_requirements=request.additional_requirements,
    )

    requirements = exact_reqs
    description = request.description

    if python_setup.requirement_constraints:
        # In requirement strings Foo_-Bar.BAZ and foo-bar-baz refer to the same project. We let
        # packaging canonicalize for us.
        # See: https://www.python.org/dev/peps/pep-0503/#normalized-names

        exact_req_projects = {
            canonicalize_project_name(Requirement.parse(req).project_name) for req in exact_reqs
        }
        constraints_file_contents = await Get(
            DigestContents,
            PathGlobs(
                [python_setup.requirement_constraints],
                glob_match_error_behavior=GlobMatchErrorBehavior.error,
                conjunction=GlobExpansionConjunction.all_match,
                description_of_origin="the option `--python-setup-requirement-constraints`",
            ),
        )
        constraints_file_reqs = set(
            parse_requirements_file(
                constraints_file_contents[0].content.decode(),
                rel_path=python_setup.requirement_constraints,
            )
        )
        constraint_file_projects = {
            canonicalize_project_name(req.project_name) for req in constraints_file_reqs
        }
        unconstrained_projects = exact_req_projects - constraint_file_projects
        if unconstrained_projects:
            logger.warning(
                f"The constraints file {python_setup.requirement_constraints} does not contain "
                f"entries for the following requirements: {', '.join(unconstrained_projects)}"
            )

        if python_setup.resolve_all_constraints == ResolveAllConstraintsOption.ALWAYS or (
            python_setup.resolve_all_constraints == ResolveAllConstraintsOption.NONDEPLOYABLES
            and request.internal_only
        ):
            if unconstrained_projects:
                logger.warning(
                    "Ignoring resolve_all_constraints setting in [python_setup] scope "
                    "because constraints file does not cover all requirements."
                )
            else:
                requirements = PexRequirements(str(req) for req in constraints_file_reqs)
                description = description or f"Resolving {python_setup.requirement_constraints}"
    elif (
        python_setup.resolve_all_constraints != ResolveAllConstraintsOption.NEVER
        and python_setup.resolve_all_constraints_was_set_explicitly()
    ):
        raise ValueError(
            f"[python-setup].resolve_all_constraints is set to "
            f"{python_setup.resolve_all_constraints.value}, so "
            f"[python-setup].requirement_constraints must also be provided."
        )

    return PexRequest(
        output_filename=request.output_filename,
        internal_only=request.internal_only,
        requirements=requirements,
        interpreter_constraints=interpreter_constraints,
        platforms=request.platforms,
        entry_point=request.entry_point,
        sources=merged_input_digest,
        additional_inputs=request.additional_inputs,
        additional_args=request.additional_args,
        description=description,
    )
Exemple #15
0
async def create_pex_binary_run_request(field_set: PexBinaryFieldSet,
                                        pex_binary_defaults: PexBinaryDefaults,
                                        pex_env: PexEnvironment) -> RunRequest:
    run_in_sandbox = field_set.run_in_sandbox.value
    entry_point, transitive_targets = await MultiGet(
        Get(
            ResolvedPexEntryPoint,
            ResolvePexEntryPointRequest(field_set.entry_point),
        ),
        Get(TransitiveTargets, TransitiveTargetsRequest([field_set.address])),
    )

    addresses = [field_set.address]
    interpreter_constraints = await Get(
        InterpreterConstraints, InterpreterConstraintsRequest(addresses))

    pex_filename = (field_set.address.generated_name.replace(".", "_")
                    if field_set.address.generated_name else
                    field_set.address.target_name)
    pex_get = Get(
        Pex,
        PexFromTargetsRequest(
            [field_set.address],
            output_filename=f"{pex_filename}.pex",
            internal_only=True,
            include_source_files=False,
            # Note that the file for first-party entry points is not in the PEX itself. In that
            # case, it's loaded by setting `PEX_EXTRA_SYS_PATH`.
            main=entry_point.val or field_set.script.value,
            additional_args=(
                *field_set.generate_additional_args(pex_binary_defaults),
                # N.B.: Since we cobble together the runtime environment via PEX_EXTRA_SYS_PATH
                # below, it's important for any app that re-executes itself that these environment
                # variables are not stripped.
                "--no-strip-pex-env",
            ),
        ),
    )
    sources_get = Get(
        PythonSourceFiles,
        PythonSourceFilesRequest(transitive_targets.closure,
                                 include_files=True))
    pex, sources = await MultiGet(pex_get, sources_get)

    local_dists = await Get(
        LocalDistsPex,
        LocalDistsPexRequest(
            [field_set.address],
            internal_only=True,
            interpreter_constraints=interpreter_constraints,
            sources=sources,
        ),
    )

    input_digests = [
        pex.digest,
        local_dists.pex.digest,
        # Note regarding inline mode: You might think that the sources don't need to be copied
        # into the chroot when using inline sources. But they do, because some of them might be
        # codegenned, and those won't exist in the inline source tree. Rather than incurring the
        # complexity of figuring out here which sources were codegenned, we copy everything.
        # The inline source roots precede the chrooted ones in PEX_EXTRA_SYS_PATH, so the inline
        # sources will take precedence and their copies in the chroot will be ignored.
        local_dists.remaining_sources.source_files.snapshot.digest,
    ]
    merged_digest = await Get(Digest, MergeDigests(input_digests))

    def in_chroot(relpath: str) -> str:
        return os.path.join("{chroot}", relpath)

    complete_pex_env = pex_env.in_workspace()
    args = complete_pex_env.create_argv(in_chroot(pex.name), python=pex.python)

    chrooted_source_roots = [in_chroot(sr) for sr in sources.source_roots]
    # The order here is important: we want the in-repo sources to take precedence over their
    # copies in the sandbox (see above for why those copies exist even in non-sandboxed mode).
    source_roots = [
        *([] if run_in_sandbox else sources.source_roots),
        *chrooted_source_roots,
    ]
    extra_env = {
        **complete_pex_env.environment_dict(python_configured=pex.python is not None),
        "PEX_PATH":
        in_chroot(local_dists.pex.name),
        "PEX_EXTRA_SYS_PATH":
        os.pathsep.join(source_roots),
    }

    return RunRequest(digest=merged_digest, args=args, extra_env=extra_env)
Exemple #16
0
async def collect_fixture_configs(
    _request: CollectFixtureConfigsRequest,
    pytest: PyTest,
    python_setup: PythonSetup,
    test_extra_env: TestExtraEnv,
    targets: Targets,
) -> CollectedJVMLockfileFixtureConfigs:
    addresses = [tgt.address for tgt in targets]
    transitive_targets = await Get(TransitiveTargets,
                                   TransitiveTargetsRequest(addresses))
    all_targets = transitive_targets.closure

    interpreter_constraints = InterpreterConstraints.create_from_targets(
        all_targets, python_setup)

    pytest_pex, requirements_pex, prepared_sources, root_sources = await MultiGet(
        Get(
            Pex,
            PexRequest(
                output_filename="pytest.pex",
                requirements=pytest.pex_requirements(),
                interpreter_constraints=interpreter_constraints,
                internal_only=True,
            ),
        ),
        Get(Pex, RequirementsPexRequest(addresses)),
        Get(
            PythonSourceFiles,
            PythonSourceFilesRequest(all_targets,
                                     include_files=True,
                                     include_resources=True),
        ),
        Get(
            PythonSourceFiles,
            PythonSourceFilesRequest(targets),
        ),
    )

    script_content = FileContent(path="collect-fixtures.py",
                                 content=COLLECTION_SCRIPT.encode(),
                                 is_executable=True)
    script_digest = await Get(Digest, CreateDigest([script_content]))

    pytest_runner_pex_get = Get(
        VenvPex,
        PexRequest(
            output_filename="pytest_runner.pex",
            interpreter_constraints=interpreter_constraints,
            main=EntryPoint(PurePath(script_content.path).stem),
            sources=script_digest,
            internal_only=True,
            pex_path=[
                pytest_pex,
                requirements_pex,
            ],
        ),
    )
    config_file_dirs = list(
        group_by_dir(prepared_sources.source_files.files).keys())
    config_files_get = Get(
        ConfigFiles,
        ConfigFilesRequest,
        pytest.config_request(config_file_dirs),
    )
    pytest_runner_pex, config_files = await MultiGet(pytest_runner_pex_get,
                                                     config_files_get)

    pytest_config_digest = config_files.snapshot.digest

    input_digest = await Get(
        Digest,
        MergeDigests((
            prepared_sources.source_files.snapshot.digest,
            pytest_config_digest,
        )),
    )

    extra_env = {
        "PEX_EXTRA_SYS_PATH": ":".join(prepared_sources.source_roots),
        **test_extra_env.env,
    }

    process = await Get(
        Process,
        VenvPexProcess(
            pytest_runner_pex,
            argv=[
                name for name in root_sources.source_files.files
                if name.endswith(".py")
            ],
            extra_env=extra_env,
            input_digest=input_digest,
            output_files=("tests.json", ),
            description="Collect test lockfile requirements from all tests.",
            level=LogLevel.DEBUG,
            cache_scope=ProcessCacheScope.PER_SESSION,
        ),
    )

    result = await Get(ProcessResult, Process, process)
    digest_contents = await Get(DigestContents, Digest, result.output_digest)
    assert len(digest_contents) == 1
    assert digest_contents[0].path == "tests.json"
    raw_config_data = json.loads(digest_contents[0].content)

    configs = []
    for item in raw_config_data:
        config = JVMLockfileFixtureConfig(
            definition=JVMLockfileFixtureDefinition.from_kwargs(
                item["kwargs"]),
            test_file_path=item["test_file_path"],
        )
        configs.append(config)

    return CollectedJVMLockfileFixtureConfigs(configs)
Exemple #17
0
async def setup_pytest_for_target(
    request: TestSetupRequest,
    pytest: PyTest,
    test_subsystem: TestSubsystem,
    python_setup: PythonSetup,
    coverage_config: CoverageConfig,
    coverage_subsystem: CoverageSubsystem,
    test_extra_env: TestExtraEnv,
    global_options: GlobalOptions,
) -> TestSetup:
    transitive_targets = await Get(
        TransitiveTargets, TransitiveTargetsRequest([request.field_set.address])
    )
    all_targets = transitive_targets.closure

    interpreter_constraints = PexInterpreterConstraints.create_from_targets(
        all_targets, python_setup
    )

    requirements_pex_request = Get(
        Pex,
        PexFromTargetsRequest,
        PexFromTargetsRequest.for_requirements([request.field_set.address], internal_only=True),
    )

    pytest_pex_request = Get(
        Pex,
        PexRequest(
            output_filename="pytest.pex",
            requirements=PexRequirements(pytest.get_requirement_strings()),
            interpreter_constraints=interpreter_constraints,
            internal_only=True,
        ),
    )

    prepared_sources_request = Get(
        PythonSourceFiles, PythonSourceFilesRequest(all_targets, include_files=True)
    )

    # Create any assets that the test depends on through the `runtime_package_dependencies` field.
    assets: Tuple[BuiltPackage, ...] = ()
    unparsed_runtime_packages = (
        request.field_set.runtime_package_dependencies.to_unparsed_address_inputs()
    )
    if unparsed_runtime_packages.values:
        runtime_package_targets = await Get(
            Targets, UnparsedAddressInputs, unparsed_runtime_packages
        )
        field_sets_per_target = await Get(
            FieldSetsPerTarget,
            FieldSetsPerTargetRequest(PackageFieldSet, runtime_package_targets),
        )
        assets = await MultiGet(
            Get(BuiltPackage, PackageFieldSet, field_set)
            for field_set in field_sets_per_target.field_sets
        )

    # Get the file names for the test_target so that we can specify to Pytest precisely which files
    # to test, rather than using auto-discovery.
    field_set_source_files_request = Get(
        SourceFiles, SourceFilesRequest([request.field_set.sources])
    )

    pytest_pex, requirements_pex, prepared_sources, field_set_source_files = await MultiGet(
        pytest_pex_request,
        requirements_pex_request,
        prepared_sources_request,
        field_set_source_files_request,
    )

    pytest_runner_pex = await Get(
        VenvPex,
        PexRequest(
            output_filename="pytest_runner.pex",
            interpreter_constraints=interpreter_constraints,
            # TODO(John Sirois): Switch to ConsoleScript once Pex supports discovering console
            #  scripts via the PEX_PATH: https://github.com/pantsbuild/pex/issues/1257
            main=EntryPoint("pytest"),
            internal_only=True,
            pex_path=[pytest_pex, requirements_pex],
        ),
    )

    input_digest = await Get(
        Digest,
        MergeDigests(
            (
                coverage_config.digest,
                prepared_sources.source_files.snapshot.digest,
                *(binary.digest for binary in assets),
            )
        ),
    )

    add_opts = [f"--color={'yes' if global_options.options.colors else 'no'}"]
    output_files = []

    results_file_name = None
    if pytest.options.junit_xml_dir and not request.is_debug:
        results_file_name = f"{request.field_set.address.path_safe_spec}.xml"
        add_opts.extend(
            (f"--junitxml={results_file_name}", "-o", f"junit_family={pytest.options.junit_family}")
        )
        output_files.append(results_file_name)

    coverage_args = []
    if test_subsystem.use_coverage and not request.is_debug:
        output_files.append(".coverage")
        cov_paths = coverage_subsystem.filter if coverage_subsystem.filter else (".",)
        coverage_args = [
            "--cov-report=",  # Turn off output.
            *itertools.chain.from_iterable(["--cov", cov_path] for cov_path in cov_paths),
        ]

    extra_env = {
        "PYTEST_ADDOPTS": " ".join(add_opts),
        "PEX_EXTRA_SYS_PATH": ":".join(prepared_sources.source_roots),
    }

    extra_env.update(test_extra_env.env)

    # Cache test runs only if they are successful, or not at all if `--test-force`.
    cache_scope = ProcessCacheScope.NEVER if test_subsystem.force else ProcessCacheScope.SUCCESSFUL
    process = await Get(
        Process,
        VenvPexProcess(
            pytest_runner_pex,
            argv=(*pytest.options.args, *coverage_args, *field_set_source_files.files),
            extra_env=extra_env,
            input_digest=input_digest,
            output_files=output_files,
            timeout_seconds=request.field_set.timeout.calculate_from_global_options(pytest),
            execution_slot_variable=pytest.options.execution_slot_var,
            description=f"Run Pytest for {request.field_set.address}",
            level=LogLevel.DEBUG,
            cache_scope=cache_scope,
        ),
    )
    return TestSetup(process, results_file_name=results_file_name)
Exemple #18
0
async def pylint_lint_partition(
        partition: PylintPartition, pylint: Pylint,
        first_party_plugins: PylintFirstPartyPlugins) -> LintResult:
    requirements_pex_get = Get(
        Pex,
        RequirementsPexRequest(
            (t.address for t in partition.root_targets),
            # NB: These constraints must be identical to the other PEXes. Otherwise, we risk using
            # a different version for the requirements than the other two PEXes, which can result
            # in a PEX runtime error about missing dependencies.
            hardcoded_interpreter_constraints=partition.
            interpreter_constraints,
            internal_only=True,
        ),
    )

    pylint_pex_get = Get(
        Pex,
        PexRequest,
        pylint.to_pex_request(
            interpreter_constraints=partition.interpreter_constraints,
            extra_requirements=first_party_plugins.requirement_strings,
        ),
    )

    prepare_python_sources_get = Get(
        PythonSourceFiles, PythonSourceFilesRequest(partition.closure))
    field_set_sources_get = Get(
        SourceFiles,
        SourceFilesRequest(t[PythonSourceField]
                           for t in partition.root_targets))
    # Ensure that the empty report dir exists.
    report_directory_digest_get = Get(Digest,
                                      CreateDigest([Directory(REPORT_DIR)]))

    (
        pylint_pex,
        requirements_pex,
        prepared_python_sources,
        field_set_sources,
        report_directory,
    ) = await MultiGet(
        pylint_pex_get,
        requirements_pex_get,
        prepare_python_sources_get,
        field_set_sources_get,
        report_directory_digest_get,
    )

    pylint_runner_pex, config_files = await MultiGet(
        Get(
            VenvPex,
            VenvPexRequest(
                PexRequest(
                    output_filename="pylint_runner.pex",
                    interpreter_constraints=partition.interpreter_constraints,
                    main=pylint.main,
                    internal_only=True,
                    pex_path=[pylint_pex, requirements_pex],
                ),
                # TODO(John Sirois): Remove this (change to the default of symlinks) when we can
                #  upgrade to a version of Pylint with https://github.com/PyCQA/pylint/issues/1470
                #  resolved.
                site_packages_copies=True,
            ),
        ),
        Get(ConfigFiles, ConfigFilesRequest,
            pylint.config_request(field_set_sources.snapshot.dirs)),
    )

    pythonpath = list(prepared_python_sources.source_roots)
    if first_party_plugins:
        pythonpath.append(first_party_plugins.PREFIX)

    input_digest = await Get(
        Digest,
        MergeDigests((
            config_files.snapshot.digest,
            first_party_plugins.sources_digest,
            prepared_python_sources.source_files.snapshot.digest,
            report_directory,
        )),
    )

    result = await Get(
        FallibleProcessResult,
        VenvPexProcess(
            pylint_runner_pex,
            argv=generate_argv(field_set_sources, pylint),
            input_digest=input_digest,
            output_directories=(REPORT_DIR, ),
            extra_env={"PEX_EXTRA_SYS_PATH": ":".join(pythonpath)},
            concurrency_available=len(partition.root_targets),
            description=
            f"Run Pylint on {pluralize(len(partition.root_targets), 'file')}.",
            level=LogLevel.DEBUG,
        ),
    )
    report = await Get(Digest, RemovePrefix(result.output_digest, REPORT_DIR))
    return LintResult.from_fallible_process_result(
        result,
        partition_description=str(
            sorted(str(c) for c in partition.interpreter_constraints)),
        report=report,
    )
Exemple #19
0
async def create_pex_binary_run_request(
    field_set: PexBinaryFieldSet,
    pex_binary_defaults: PexBinaryDefaults,
    pex_env: PexEnvironment,
) -> RunRequest:
    entry_point, transitive_targets = await MultiGet(
        Get(
            ResolvedPexEntryPoint,
            ResolvePexEntryPointRequest(field_set.entry_point),
        ),
        Get(TransitiveTargets, TransitiveTargetsRequest([field_set.address])),
    )

    # Note that we get an intermediate PexRequest here (instead of going straight to a Pex)
    # so that we can get the interpreter constraints for use in runner_pex_request.
    requirements_pex_request = await Get(
        PexRequest,
        PexFromTargetsRequest,
        PexFromTargetsRequest.for_requirements([field_set.address],
                                               internal_only=True),
    )

    requirements_request = Get(Pex, PexRequest, requirements_pex_request)

    sources_request = Get(
        PythonSourceFiles,
        PythonSourceFilesRequest(transitive_targets.closure,
                                 include_files=True))

    output_filename = f"{field_set.address.target_name}.pex"
    runner_pex_request = Get(
        Pex,
        PexRequest(
            output_filename=output_filename,
            interpreter_constraints=requirements_pex_request.
            interpreter_constraints,
            additional_args=field_set.generate_additional_args(
                pex_binary_defaults),
            internal_only=True,
            # Note that the entry point file is not in the PEX itself. It's loaded by setting
            # `PEX_EXTRA_SYS_PATH`.
            # TODO(John Sirois): Support ConsoleScript in PexBinary targets:
            #  https://github.com/pantsbuild/pants/issues/11619
            main=entry_point.val,
        ),
    )

    requirements, sources, runner_pex = await MultiGet(requirements_request,
                                                       sources_request,
                                                       runner_pex_request)

    merged_digest = await Get(
        Digest,
        MergeDigests([
            requirements.digest, sources.source_files.snapshot.digest,
            runner_pex.digest
        ]),
    )

    def in_chroot(relpath: str) -> str:
        return os.path.join("{chroot}", relpath)

    args = pex_env.create_argv(in_chroot(runner_pex.name),
                               python=runner_pex.python)

    chrooted_source_roots = [in_chroot(sr) for sr in sources.source_roots]
    extra_env = {
        **pex_env.environment_dict(python_configured=runner_pex.python is not None),
        "PEX_PATH":
        in_chroot(requirements_pex_request.output_filename),
        "PEX_EXTRA_SYS_PATH":
        ":".join(chrooted_source_roots),
    }

    return RunRequest(digest=merged_digest, args=args, extra_env=extra_env)
Exemple #20
0
async def setup_pytest_for_target(
    request: TestSetupRequest,
    pytest: PyTest,
    test_subsystem: TestSubsystem,
    python_setup: PythonSetup,
    coverage_config: CoverageConfig,
    coverage_subsystem: CoverageSubsystem,
    test_extra_env: TestExtraEnv,
    global_options: GlobalOptions,
    complete_env: CompleteEnvironment,
) -> TestSetup:
    transitive_targets, plugin_setups = await MultiGet(
        Get(TransitiveTargets,
            TransitiveTargetsRequest([request.field_set.address])),
        Get(AllPytestPluginSetups,
            AllPytestPluginSetupsRequest(request.field_set.address)),
    )
    all_targets = transitive_targets.closure

    interpreter_constraints = InterpreterConstraints.create_from_targets(
        all_targets, python_setup)

    requirements_pex_get = Get(
        Pex,
        PexFromTargetsRequest,
        PexFromTargetsRequest.for_requirements(
            [request.field_set.address],
            internal_only=True,
            resolve_and_lockfile=request.field_set.resolve.
            resolve_and_lockfile(python_setup),
        ),
    )
    pytest_pex_get = Get(
        Pex,
        PexRequest(
            output_filename="pytest.pex",
            requirements=pytest.pex_requirements(),
            interpreter_constraints=interpreter_constraints,
            internal_only=True,
        ),
    )

    # Ensure that the empty extra output dir exists.
    extra_output_directory_digest_get = Get(
        Digest, CreateDigest([Directory(_EXTRA_OUTPUT_DIR)]))

    prepared_sources_get = Get(
        PythonSourceFiles,
        PythonSourceFilesRequest(all_targets, include_files=True))

    # Get the file names for the test_target so that we can specify to Pytest precisely which files
    # to test, rather than using auto-discovery.
    field_set_source_files_get = Get(
        SourceFiles, SourceFilesRequest([request.field_set.sources]))

    (
        pytest_pex,
        requirements_pex,
        prepared_sources,
        field_set_source_files,
        extra_output_directory_digest,
    ) = await MultiGet(
        pytest_pex_get,
        requirements_pex_get,
        prepared_sources_get,
        field_set_source_files_get,
        extra_output_directory_digest_get,
    )

    local_dists = await Get(
        LocalDistsPex,
        LocalDistsPexRequest(
            [request.field_set.address],
            interpreter_constraints=interpreter_constraints,
            sources=prepared_sources,
        ),
    )

    pytest_runner_pex_get = Get(
        VenvPex,
        PexRequest(
            output_filename="pytest_runner.pex",
            interpreter_constraints=interpreter_constraints,
            main=pytest.main,
            internal_only=True,
            pex_path=[pytest_pex, requirements_pex, local_dists.pex],
        ),
    )
    config_files_get = Get(
        ConfigFiles,
        ConfigFilesRequest,
        pytest.config_request(field_set_source_files.snapshot.dirs),
    )
    pytest_runner_pex, config_files = await MultiGet(pytest_runner_pex_get,
                                                     config_files_get)

    input_digest = await Get(
        Digest,
        MergeDigests((
            coverage_config.digest,
            local_dists.remaining_sources.source_files.snapshot.digest,
            config_files.snapshot.digest,
            extra_output_directory_digest,
            *(plugin_setup.digest for plugin_setup in plugin_setups),
        )),
    )

    add_opts = [f"--color={'yes' if global_options.options.colors else 'no'}"]
    output_files = []

    results_file_name = None
    if pytest.options.junit_xml_dir and not request.is_debug:
        results_file_name = f"{request.field_set.address.path_safe_spec}.xml"
        add_opts.extend((f"--junitxml={results_file_name}", "-o",
                         f"junit_family={pytest.options.junit_family}"))
        output_files.append(results_file_name)

    coverage_args = []
    if test_subsystem.use_coverage and not request.is_debug:
        pytest.validate_pytest_cov_included()
        output_files.append(".coverage")

        if coverage_subsystem.filter:
            cov_args = [f"--cov={morf}" for morf in coverage_subsystem.filter]
        else:
            # N.B.: Passing `--cov=` or `--cov=.` to communicate "record coverage for all sources"
            # fails in certain contexts as detailed in:
            #   https://github.com/pantsbuild/pants/issues/12390
            # Instead we focus coverage on just the directories containing python source files
            # materialized to the Process chroot.
            cov_args = [
                f"--cov={source_root}"
                for source_root in prepared_sources.source_roots
            ]

        coverage_args = [
            "--cov-report=",  # Turn off output.
            f"--cov-config={coverage_config.path}",
            *cov_args,
        ]

    extra_env = {
        "PYTEST_ADDOPTS": " ".join(add_opts),
        "PEX_EXTRA_SYS_PATH": ":".join(prepared_sources.source_roots),
        **test_extra_env.env,
        # NOTE: `complete_env` intentionally after `test_extra_env` to allow overriding within
        # `python_tests`
        **complete_env.get_subset(request.field_set.extra_env_vars.value or ()),
    }

    # Cache test runs only if they are successful, or not at all if `--test-force`.
    cache_scope = (ProcessCacheScope.PER_SESSION
                   if test_subsystem.force else ProcessCacheScope.SUCCESSFUL)
    process = await Get(
        Process,
        VenvPexProcess(
            pytest_runner_pex,
            argv=(*pytest.options.args, *coverage_args,
                  *field_set_source_files.files),
            extra_env=extra_env,
            input_digest=input_digest,
            output_directories=(_EXTRA_OUTPUT_DIR, ),
            output_files=output_files,
            timeout_seconds=request.field_set.timeout.
            calculate_from_global_options(pytest),
            execution_slot_variable=pytest.options.execution_slot_var,
            description=f"Run Pytest for {request.field_set.address}",
            level=LogLevel.DEBUG,
            cache_scope=cache_scope,
        ),
    )
    return TestSetup(process, results_file_name=results_file_name)
Exemple #21
0
async def pex_from_targets(
    request: PexFromTargetsRequest,
    python_setup: PythonSetup,
    constraints_file: MaybeConstraintsFile,
) -> PexRequest:
    if request.direct_deps_only:
        targets = await Get(Targets, Addresses(request.addresses))
        direct_deps = await MultiGet(
            Get(Targets, DependenciesRequest(tgt.get(Dependencies)))
            for tgt in targets)
        all_targets = FrozenOrderedSet(itertools.chain(*direct_deps, targets))
    else:
        transitive_targets = await Get(
            TransitiveTargets, TransitiveTargetsRequest(request.addresses))
        all_targets = transitive_targets.closure

    input_digests = []
    if request.additional_sources:
        input_digests.append(request.additional_sources)
    if request.include_source_files:
        prepared_sources = await Get(StrippedPythonSourceFiles,
                                     PythonSourceFilesRequest(all_targets))
        input_digests.append(
            prepared_sources.stripped_source_files.snapshot.digest)
    merged_input_digest = await Get(Digest, MergeDigests(input_digests))

    if request.hardcoded_interpreter_constraints:
        interpreter_constraints = request.hardcoded_interpreter_constraints
    else:
        calculated_constraints = PexInterpreterConstraints.create_from_targets(
            all_targets, python_setup)
        # If there are no targets, we fall back to the global constraints. This is relevant,
        # for example, when running `./pants repl` with no specs.
        interpreter_constraints = calculated_constraints or PexInterpreterConstraints(
            python_setup.interpreter_constraints)

    exact_reqs = PexRequirements.create_from_requirement_fields(
        (tgt[PythonRequirementsField]
         for tgt in all_targets if tgt.has_field(PythonRequirementsField)),
        additional_requirements=request.additional_requirements,
    )

    requirements = exact_reqs
    repository_pex: Pex | None = None
    description = request.description

    if constraints_file.path:
        constraints_file_contents = await Get(DigestContents, Digest,
                                              constraints_file.digest)
        constraints_file_reqs = set(
            parse_requirements_file(
                constraints_file_contents[0].content.decode(),
                rel_path=constraints_file.path,
            ))

        # In requirement strings, Foo_-Bar.BAZ and foo-bar-baz refer to the same project. We let
        # packaging canonicalize for us.
        # See: https://www.python.org/dev/peps/pep-0503/#normalized-names
        exact_req_projects = {
            canonicalize_project_name(Requirement.parse(req).project_name)
            for req in exact_reqs
        }
        constraint_file_projects = {
            canonicalize_project_name(req.project_name)
            for req in constraints_file_reqs
        }
        unconstrained_projects = exact_req_projects - constraint_file_projects
        if unconstrained_projects:
            constraints_descr = (
                f"constraints file {constraints_file.path}"
                if python_setup.requirement_constraints else
                f"_python_constraints target {python_setup.requirement_constraints_target}"
            )
            logger.warning(
                f"The {constraints_descr} does not contain entries for the following "
                f"requirements: {', '.join(unconstrained_projects)}")

        if python_setup.resolve_all_constraints == ResolveAllConstraintsOption.ALWAYS or (
                python_setup.resolve_all_constraints
                == ResolveAllConstraintsOption.NONDEPLOYABLES
                and request.internal_only):
            if unconstrained_projects:
                logger.warning(
                    "Ignoring `[python_setup].resolve_all_constraints` option because constraints "
                    "file does not cover all requirements.")
            else:
                repository_pex = await Get(
                    Pex,
                    PexRequest(
                        description=
                        f"Resolving {python_setup.requirement_constraints}",
                        output_filename="repository.pex",
                        internal_only=request.internal_only,
                        requirements=PexRequirements(
                            str(req) for req in constraints_file_reqs),
                        interpreter_constraints=interpreter_constraints,
                        platforms=request.platforms,
                    ),
                )
    elif (python_setup.resolve_all_constraints !=
          ResolveAllConstraintsOption.NEVER
          and python_setup.resolve_all_constraints_was_set_explicitly()):
        raise ValueError(
            "[python-setup].resolve_all_constraints is set to "
            f"{python_setup.resolve_all_constraints.value}, so "
            "either [python-setup].requirement_constraints or "
            "[python-setup].requirement_constraints_target must also be provided."
        )

    return PexRequest(
        output_filename=request.output_filename,
        internal_only=request.internal_only,
        requirements=requirements,
        interpreter_constraints=interpreter_constraints,
        platforms=request.platforms,
        main=request.main,
        sources=merged_input_digest,
        additional_inputs=request.additional_inputs,
        repository_pex=repository_pex,
        additional_args=request.additional_args,
        description=description,
    )
Exemple #22
0
async def generate_coverage_reports(
    merged_coverage_data: MergedCoverageData,
    coverage_setup: CoverageSetup,
    coverage_config: CoverageConfig,
    coverage_subsystem: CoverageSubsystem,
    global_options: GlobalOptions,
) -> CoverageReports:
    """Takes all Python test results and generates a single coverage report."""
    transitive_targets = await Get(
        TransitiveTargets,
        TransitiveTargetsRequest(merged_coverage_data.addresses))
    sources = await Get(
        PythonSourceFiles,
        # Coverage sometimes includes non-Python files in its `.coverage` data. We need to
        # ensure that they're present when generating the report. We include all the files included
        # by `pytest_runner.py`.
        PythonSourceFilesRequest(transitive_targets.closure,
                                 include_files=True,
                                 include_resources=True),
    )
    input_digest = await Get(
        Digest,
        MergeDigests((
            merged_coverage_data.coverage_data,
            coverage_config.digest,
            sources.source_files.snapshot.digest,
        )),
    )

    pex_processes = []
    report_types = []
    result_snapshot = await Get(Snapshot, Digest,
                                merged_coverage_data.coverage_data)
    coverage_reports: list[CoverageReport] = []
    for report_type in coverage_subsystem.reports:
        if report_type == CoverageReportType.RAW:
            coverage_reports.append(
                FilesystemCoverageReport(
                    # We don't know yet if the coverage is sufficient, so we let some other report
                    # trigger the failure if necessary.
                    coverage_insufficient=False,
                    report_type=CoverageReportType.RAW.value,
                    result_snapshot=result_snapshot,
                    directory_to_materialize_to=coverage_subsystem.output_dir,
                    report_file=coverage_subsystem.output_dir / ".coverage",
                ))
            continue

        report_types.append(report_type)
        output_file = (f"coverage.{report_type.value}" if report_type in {
            CoverageReportType.XML, CoverageReportType.JSON
        } else None)
        args = [report_type.report_name, f"--rcfile={coverage_config.path}"]
        if coverage_subsystem.fail_under is not None:
            args.append(f"--fail-under={coverage_subsystem.fail_under}")
        pex_processes.append(
            VenvPexProcess(
                coverage_setup.pex,
                argv=tuple(args),
                input_digest=input_digest,
                output_directories=("htmlcov", )
                if report_type == CoverageReportType.HTML else None,
                output_files=(output_file, ) if output_file else None,
                description=
                f"Generate Pytest {report_type.report_name} coverage report.",
                level=LogLevel.DEBUG,
            ))
    results = await MultiGet(
        Get(FallibleProcessResult, VenvPexProcess, process)
        for process in pex_processes)
    for proc, res in zip(pex_processes, results):
        if res.exit_code not in {0, 2}:
            # coverage.py uses exit code 2 if --fail-under triggers, in which case the
            # reports are still generated.
            raise ProcessExecutionFailure(
                res.exit_code,
                res.stdout,
                res.stderr,
                proc.description,
                local_cleanup=global_options.options.
                process_execution_local_cleanup,
            )

    # In practice if one result triggers --fail-under, they all will, but no need to rely on that.
    result_exit_codes = tuple(res.exit_code for res in results)
    result_stdouts = tuple(res.stdout for res in results)
    result_snapshots = await MultiGet(
        Get(Snapshot, Digest, res.output_digest) for res in results)

    coverage_reports.extend(
        _get_coverage_report(coverage_subsystem.output_dir, report_type,
                             exit_code != 0, stdout, snapshot)
        for (report_type, exit_code, stdout, snapshot) in zip(
            report_types, result_exit_codes, result_stdouts, result_snapshots))

    return CoverageReports(tuple(coverage_reports))
Exemple #23
0
async def mypy_typecheck_partition(partition: MyPyPartition,
                                   mypy: MyPy) -> TypecheckResult:
    plugin_target_addresses = await Get(Addresses, UnparsedAddressInputs,
                                        mypy.source_plugins)
    plugin_transitive_targets_request = Get(
        TransitiveTargets, TransitiveTargetsRequest(plugin_target_addresses))
    plugin_transitive_targets, launcher_script = await MultiGet(
        plugin_transitive_targets_request,
        Get(Digest, CreateDigest([LAUNCHER_FILE])))

    plugin_requirements = PexRequirements.create_from_requirement_fields(
        plugin_tgt[PythonRequirementsField]
        for plugin_tgt in plugin_transitive_targets.closure
        if plugin_tgt.has_field(PythonRequirementsField))

    # If the user did not set `--python-version` already, we set it ourselves based on their code's
    # interpreter constraints. This determines what AST is used by MyPy.
    python_version = (
        None if partition.python_version_already_configured else
        partition.interpreter_constraints.minimum_python_version())

    # MyPy requires 3.5+ to run, but uses the typed-ast library to work with 2.7, 3.4, 3.5, 3.6,
    # and 3.7. However, typed-ast does not understand 3.8, so instead we must run MyPy with
    # Python 3.8 when relevant. We only do this if <3.8 can't be used, as we don't want a
    # loose requirement like `>=3.6` to result in requiring Python 3.8, which would error if
    # 3.8 is not installed on the machine.
    tool_interpreter_constraints = PexInterpreterConstraints((
        "CPython>=3.8", ) if (
            mypy.options.is_default("interpreter_constraints")
            and partition.interpreter_constraints.requires_python38_or_newer()
        ) else mypy.interpreter_constraints)

    plugin_sources_request = Get(
        PythonSourceFiles,
        PythonSourceFilesRequest(plugin_transitive_targets.closure))
    typechecked_sources_request = Get(
        PythonSourceFiles, PythonSourceFilesRequest(partition.closure))

    # Normally, this `requirements.pex` would be merged with mypy.pex via `--pex-path`. However,
    # this will cause a runtime error if the interpreter constraints are different between the
    # PEXes and they have incompatible wheels.
    #
    # Instead, we teach MyPy about the requirements by extracting the distributions from
    # requirements.pex and setting EXTRACTED_WHEELS, which our custom launcher script then
    # looks for.
    #
    # Conventionally, MyPy users might instead set `MYPYPATH` for this. However, doing this
    # results in type checking the requirements themselves.
    requirements_pex_request = Get(
        Pex,
        PexFromTargetsRequest,
        PexFromTargetsRequest.for_requirements(
            (addr for addr in partition.field_set_addresses),
            hardcoded_interpreter_constraints=partition.
            interpreter_constraints,
            internal_only=True,
        ),
    )
    mypy_pex_request = Get(
        Pex,
        PexRequest(
            output_filename="mypy.pex",
            internal_only=True,
            sources=launcher_script,
            requirements=PexRequirements(
                itertools.chain(mypy.all_requirements, plugin_requirements)),
            interpreter_constraints=tool_interpreter_constraints,
            entry_point=PurePath(LAUNCHER_FILE.path).stem,
        ),
    )

    config_digest_request = Get(Digest, PathGlobs, config_path_globs(mypy))

    (
        plugin_sources,
        typechecked_sources,
        mypy_pex,
        requirements_pex,
        config_digest,
    ) = await MultiGet(
        plugin_sources_request,
        typechecked_sources_request,
        mypy_pex_request,
        requirements_pex_request,
        config_digest_request,
    )

    typechecked_srcs_snapshot = typechecked_sources.source_files.snapshot
    file_list_path = "__files.txt"
    python_files = "\n".join(
        determine_python_files(
            typechecked_sources.source_files.snapshot.files))
    create_file_list_request = Get(
        Digest,
        CreateDigest([FileContent(file_list_path, python_files.encode())]),
    )

    file_list_digest, extracted_pex_distributions = await MultiGet(
        create_file_list_request,
        Get(ExtractedPexDistributions, Pex, requirements_pex))

    merged_input_files = await Get(
        Digest,
        MergeDigests([
            file_list_digest,
            plugin_sources.source_files.snapshot.digest,
            typechecked_srcs_snapshot.digest,
            mypy_pex.digest,
            extracted_pex_distributions.digest,
            config_digest,
        ]),
    )

    all_used_source_roots = sorted(
        set(
            itertools.chain(plugin_sources.source_roots,
                            typechecked_sources.source_roots)))
    env = {
        "PEX_EXTRA_SYS_PATH":
        ":".join(all_used_source_roots),
        "EXTRACTED_WHEELS":
        ":".join(extracted_pex_distributions.wheel_directory_paths),
    }

    result = await Get(
        FallibleProcessResult,
        PexProcess(
            mypy_pex,
            argv=generate_argv(mypy,
                               file_list_path=file_list_path,
                               python_version=python_version),
            input_digest=merged_input_files,
            extra_env=env,
            description=
            f"Run MyPy on {pluralize(len(typechecked_srcs_snapshot.files), 'file')}.",
            level=LogLevel.DEBUG,
        ),
    )
    return TypecheckResult.from_fallible_process_result(
        result,
        partition_description=str(
            sorted(str(c) for c in partition.interpreter_constraints)))
Exemple #24
0
async def generate_coverage_reports(
    merged_coverage_data: MergedCoverageData,
    coverage_setup: CoverageSetup,
    coverage_config: CoverageConfig,
    coverage_subsystem: CoverageSubsystem,
    all_used_addresses: Addresses,
) -> CoverageReports:
    """Takes all Python test results and generates a single coverage report."""
    transitive_targets = await Get(TransitiveTargets, TransitiveTargetsRequest(all_used_addresses))
    sources = await Get(
        PythonSourceFiles,
        PythonSourceFilesRequest(transitive_targets.closure, include_resources=False),
    )
    input_digest = await Get(
        Digest,
        MergeDigests(
            (
                merged_coverage_data.coverage_data,
                coverage_config.digest,
                coverage_setup.pex.digest,
                sources.source_files.snapshot.digest,
            )
        ),
    )

    pex_processes = []
    report_types = []
    result_snapshot = await Get(Snapshot, Digest, merged_coverage_data.coverage_data)
    coverage_reports: List[CoverageReport] = []
    for report_type in coverage_subsystem.reports:
        if report_type == CoverageReportType.RAW:
            coverage_reports.append(
                FilesystemCoverageReport(
                    report_type=CoverageReportType.RAW.value,
                    result_snapshot=result_snapshot,
                    directory_to_materialize_to=coverage_subsystem.output_dir,
                    report_file=coverage_subsystem.output_dir / ".coverage",
                )
            )
            continue
        report_types.append(report_type)
        output_file = (
            f"coverage.{report_type.value}"
            if report_type in {CoverageReportType.XML, CoverageReportType.JSON}
            else None
        )
        pex_processes.append(
            PexProcess(
                coverage_setup.pex,
                # We pass `--ignore-errors` because Pants dynamically injects missing `__init__.py`
                # files and this will cause Coverage to fail.
                argv=(report_type.report_name, "--ignore-errors"),
                input_digest=input_digest,
                output_directories=("htmlcov",) if report_type == CoverageReportType.HTML else None,
                output_files=(output_file,) if output_file else None,
                description=f"Generate Pytest {report_type.report_name} coverage report.",
                level=LogLevel.DEBUG,
            )
        )
    results = await MultiGet(Get(ProcessResult, PexProcess, process) for process in pex_processes)
    result_stdouts = tuple(res.stdout for res in results)
    result_snapshots = await MultiGet(Get(Snapshot, Digest, res.output_digest) for res in results)

    coverage_reports.extend(
        _get_coverage_report(coverage_subsystem.output_dir, report_type, stdout, snapshot)
        for (report_type, stdout, snapshot) in zip(report_types, result_stdouts, result_snapshots)
    )

    return CoverageReports(tuple(coverage_reports))
Exemple #25
0
async def pex_from_targets(
    request: PexFromTargetsRequest,
    python_setup: PythonSetup,
    constraints_file: MaybeConstraintsFile,
) -> PexRequest:
    if request.direct_deps_only:
        targets = await Get(Targets, Addresses(request.addresses))
        direct_deps = await MultiGet(
            Get(Targets, DependenciesRequest(tgt.get(Dependencies))) for tgt in targets
        )
        all_targets = FrozenOrderedSet(itertools.chain(*direct_deps, targets))
    else:
        transitive_targets = await Get(
            TransitiveTargets, TransitiveTargetsRequest(request.addresses)
        )
        all_targets = transitive_targets.closure

    input_digests = []
    if request.additional_sources:
        input_digests.append(request.additional_sources)
    if request.include_source_files:
        prepared_sources = await Get(
            StrippedPythonSourceFiles, PythonSourceFilesRequest(all_targets)
        )
        input_digests.append(prepared_sources.stripped_source_files.snapshot.digest)
    merged_input_digest = await Get(Digest, MergeDigests(input_digests))

    if request.hardcoded_interpreter_constraints:
        interpreter_constraints = request.hardcoded_interpreter_constraints
    else:
        calculated_constraints = PexInterpreterConstraints.create_from_targets(
            all_targets, python_setup
        )
        # If there are no targets, we fall back to the global constraints. This is relevant,
        # for example, when running `./pants repl` with no specs.
        interpreter_constraints = calculated_constraints or PexInterpreterConstraints(
            python_setup.interpreter_constraints
        )

    exact_reqs = PexRequirements.create_from_requirement_fields(
        (
            tgt[PythonRequirementsField]
            for tgt in all_targets
            if tgt.has_field(PythonRequirementsField)
        ),
        additional_requirements=request.additional_requirements,
    )

    requirements = exact_reqs
    repository_pex: Pex | None = None
    description = request.description

    if constraints_file.path:
        constraints_file_contents = await Get(DigestContents, Digest, constraints_file.digest)
        constraints_file_reqs = set(
            parse_requirements_file(
                constraints_file_contents[0].content.decode(),
                rel_path=constraints_file.path,
            )
        )

        # In requirement strings, Foo_-Bar.BAZ and foo-bar-baz refer to the same project. We let
        # packaging canonicalize for us.
        # See: https://www.python.org/dev/peps/pep-0503/#normalized-names

        url_reqs = set()  # E.g., 'foobar@ git+https://github.com/foo/bar.git@branch'
        name_reqs = set()  # E.g., foobar>=1.2.3
        name_req_projects = set()

        for req_str in exact_reqs:
            req = Requirement.parse(req_str)
            if req.url:  # type: ignore[attr-defined]
                url_reqs.add(req)
            else:
                name_reqs.add(req)
                name_req_projects.add(canonicalize_project_name(req.project_name))

        constraint_file_projects = {
            canonicalize_project_name(req.project_name) for req in constraints_file_reqs
        }
        # Constraints files must only contain name reqs, not URL reqs (those are already
        # constrained by their very nature). See https://github.com/pypa/pip/issues/8210.
        unconstrained_projects = name_req_projects - constraint_file_projects
        if unconstrained_projects:
            constraints_descr = (
                f"constraints file {constraints_file.path}"
                if python_setup.requirement_constraints
                else f"_python_constraints target {python_setup.requirement_constraints_target}"
            )
            logger.warning(
                f"The {constraints_descr} does not contain entries for the following "
                f"requirements: {', '.join(unconstrained_projects)}"
            )

        if python_setup.resolve_all_constraints:
            if unconstrained_projects:
                logger.warning(
                    "Ignoring `[python_setup].resolve_all_constraints` option because constraints "
                    "file does not cover all requirements."
                )
            else:
                # To get a full set of requirements we must add the URL requirements to the
                # constraints file, since the latter cannot contain URL requirements.
                # NB: We can only add the URL requirements we know about here, i.e., those that
                #  are transitive deps of the targets in play. There may be others in the repo.
                #  So we may end up creating a few different repository pexes, each with identical
                #  name requirements but different subsets of URL requirements. Fortunately since
                #  all these repository pexes will have identical pinned versions of everything,
                #  this is not a correctness issue, only a performance one.
                # TODO: Address this as part of providing proper lockfile support. However we
                #  generate lockfiles, they must be able to include URL requirements.
                all_constraints = {str(req) for req in (constraints_file_reqs | url_reqs)}
                repository_pex = await Get(
                    Pex,
                    PexRequest(
                        description=f"Resolving {python_setup.requirement_constraints}",
                        output_filename="repository.pex",
                        internal_only=request.internal_only,
                        requirements=PexRequirements(all_constraints),
                        interpreter_constraints=interpreter_constraints,
                        platforms=request.platforms,
                        additional_args=["-vvv"],
                    ),
                )
    elif (
        python_setup.resolve_all_constraints
        and python_setup.resolve_all_constraints_was_set_explicitly()
    ):
        raise ValueError(
            "[python-setup].resolve_all_constraints is enabled, so either "
            "[python-setup].requirement_constraints or "
            "[python-setup].requirement_constraints_target must also be provided."
        )

    return PexRequest(
        output_filename=request.output_filename,
        internal_only=request.internal_only,
        requirements=requirements,
        interpreter_constraints=interpreter_constraints,
        platforms=request.platforms,
        main=request.main,
        sources=merged_input_digest,
        additional_inputs=request.additional_inputs,
        repository_pex=repository_pex,
        additional_args=request.additional_args,
        description=description,
    )
Exemple #26
0
async def mypy_typecheck_partition(partition: MyPyPartition, mypy: MyPy) -> TypecheckResult:
    plugin_target_addresses = await Get(Addresses, UnparsedAddressInputs, mypy.source_plugins)
    plugin_transitive_targets = await Get(
        TransitiveTargets, TransitiveTargetsRequest(plugin_target_addresses)
    )

    plugin_requirements = PexRequirements.create_from_requirement_fields(
        plugin_tgt[PythonRequirementsField]
        for plugin_tgt in plugin_transitive_targets.closure
        if plugin_tgt.has_field(PythonRequirementsField)
    )

    # If the user did not set `--python-version` already, we set it ourselves based on their code's
    # interpreter constraints. This determines what AST is used by MyPy.
    python_version = (
        None
        if partition.python_version_already_configured
        else partition.interpreter_constraints.minimum_python_version()
    )

    # MyPy requires 3.5+ to run, but uses the typed-ast library to work with 2.7, 3.4, 3.5, 3.6,
    # and 3.7. However, typed-ast does not understand 3.8+, so instead we must run MyPy with
    # Python 3.8+ when relevant. We only do this if <3.8 can't be used, as we don't want a
    # loose requirement like `>=3.6` to result in requiring Python 3.8+, which would error if
    # 3.8+ is not installed on the machine.
    tool_interpreter_constraints = (
        partition.interpreter_constraints
        if (
            mypy.options.is_default("interpreter_constraints")
            and partition.interpreter_constraints.requires_python38_or_newer()
        )
        else PexInterpreterConstraints(mypy.interpreter_constraints)
    )

    plugin_sources_get = Get(
        PythonSourceFiles, PythonSourceFilesRequest(plugin_transitive_targets.closure)
    )
    closure_sources_get = Get(PythonSourceFiles, PythonSourceFilesRequest(partition.closure))
    roots_sources_get = Get(
        SourceFiles, SourceFilesRequest(tgt.get(PythonSources) for tgt in partition.root_targets)
    )

    requirements_pex_get = Get(
        Pex,
        PexFromTargetsRequest,
        PexFromTargetsRequest.for_requirements(
            (tgt.address for tgt in partition.root_targets),
            hardcoded_interpreter_constraints=partition.interpreter_constraints,
            internal_only=True,
        ),
    )

    # TODO(John Sirois): Scope the extra requirements to the partition.
    #  Right now we just use a global set of extra requirements and these might not be compatible
    #  with all partitions. See: https://github.com/pantsbuild/pants/issues/11556
    mypy_extra_requirements_pex_get = Get(
        Pex,
        PexRequest(
            output_filename="mypy_extra_requirements.pex",
            internal_only=True,
            requirements=PexRequirements(mypy.extra_requirements),
            interpreter_constraints=partition.interpreter_constraints,
        ),
    )
    mypy_pex_get = Get(
        VenvPex,
        PexRequest(
            output_filename="mypy.pex",
            internal_only=True,
            main=mypy.main,
            requirements=PexRequirements((*mypy.all_requirements, *plugin_requirements)),
            interpreter_constraints=tool_interpreter_constraints,
        ),
    )

    config_files_get = Get(ConfigFiles, ConfigFilesRequest, mypy.config_request)

    (
        plugin_sources,
        closure_sources,
        roots_sources,
        mypy_pex,
        requirements_pex,
        mypy_extra_requirements_pex,
        config_files,
    ) = await MultiGet(
        plugin_sources_get,
        closure_sources_get,
        roots_sources_get,
        mypy_pex_get,
        requirements_pex_get,
        mypy_extra_requirements_pex_get,
        config_files_get,
    )

    python_files = determine_python_files(roots_sources.snapshot.files)
    file_list_path = "__files.txt"
    file_list_digest_request = Get(
        Digest,
        CreateDigest([FileContent(file_list_path, "\n".join(python_files).encode())]),
    )

    typechecked_venv_pex_request = Get(
        VenvPex,
        PexRequest(
            output_filename="typechecked_venv.pex",
            internal_only=True,
            pex_path=[requirements_pex, mypy_extra_requirements_pex],
            interpreter_constraints=partition.interpreter_constraints,
        ),
    )

    typechecked_venv_pex, file_list_digest = await MultiGet(
        typechecked_venv_pex_request, file_list_digest_request
    )

    merged_input_files = await Get(
        Digest,
        MergeDigests(
            [
                file_list_digest,
                plugin_sources.source_files.snapshot.digest,
                closure_sources.source_files.snapshot.digest,
                typechecked_venv_pex.digest,
                config_files.snapshot.digest,
            ]
        ),
    )

    all_used_source_roots = sorted(
        set(itertools.chain(plugin_sources.source_roots, closure_sources.source_roots))
    )
    env = {
        "PEX_EXTRA_SYS_PATH": ":".join(all_used_source_roots),
        "MYPYPATH": ":".join(all_used_source_roots),
    }

    result = await Get(
        FallibleProcessResult,
        VenvPexProcess(
            mypy_pex,
            argv=generate_argv(
                mypy,
                typechecked_venv_pex,
                file_list_path=file_list_path,
                python_version=python_version,
            ),
            input_digest=merged_input_files,
            extra_env=env,
            description=f"Run MyPy on {pluralize(len(python_files), 'file')}.",
            level=LogLevel.DEBUG,
        ),
    )
    return TypecheckResult.from_fallible_process_result(
        result, partition_description=str(sorted(str(c) for c in partition.interpreter_constraints))
    )
Exemple #27
0
async def create_pex_binary_run_request(
    field_set: PexBinaryFieldSet,
    pex_binary_defaults: PexBinaryDefaults,
    pex_env: PexEnvironment,
    python_setup: PythonSetup,
) -> RunRequest:
    entry_point, transitive_targets = await MultiGet(
        Get(
            ResolvedPexEntryPoint,
            ResolvePexEntryPointRequest(field_set.entry_point),
        ),
        Get(TransitiveTargets, TransitiveTargetsRequest([field_set.address])),
    )

    # Note that we get an intermediate PexRequest here (instead of going straight to a Pex)
    # so that we can get the interpreter constraints for use in local_dists_get.
    requirements_pex_request = await Get(
        PexRequest,
        PexFromTargetsRequest(
            [field_set.address],
            output_filename=f"{field_set.address.target_name}.pex",
            internal_only=True,
            include_source_files=False,
            # Note that the file for first-party entry points is not in the PEX itself. In that
            # case, it's loaded by setting `PEX_EXTRA_SYS_PATH`.
            main=entry_point.val or field_set.script.value,
            resolve_and_lockfile=field_set.resolve.resolve_and_lockfile(
                python_setup),
            additional_args=(
                *field_set.generate_additional_args(pex_binary_defaults),
                # N.B.: Since we cobble together the runtime environment via PEX_EXTRA_SYS_PATH
                # below, it's important for any app that re-executes itself that these environment
                # variables are not stripped.
                "--no-strip-pex-env",
            ),
        ),
    )
    pex_get = Get(Pex, PexRequest, requirements_pex_request)
    sources_get = Get(
        PythonSourceFiles,
        PythonSourceFilesRequest(transitive_targets.closure,
                                 include_files=True))
    pex, sources = await MultiGet(pex_get, sources_get)

    local_dists = await Get(
        LocalDistsPex,
        LocalDistsPexRequest(
            [field_set.address],
            internal_only=True,
            interpreter_constraints=requirements_pex_request.
            interpreter_constraints,
            sources=sources,
        ),
    )

    merged_digest = await Get(
        Digest,
        MergeDigests([
            pex.digest,
            local_dists.pex.digest,
            local_dists.remaining_sources.source_files.snapshot.digest,
        ]),
    )

    def in_chroot(relpath: str) -> str:
        return os.path.join("{chroot}", relpath)

    complete_pex_env = pex_env.in_workspace()
    args = complete_pex_env.create_argv(in_chroot(pex.name), python=pex.python)

    chrooted_source_roots = [in_chroot(sr) for sr in sources.source_roots]
    extra_env = {
        **complete_pex_env.environment_dict(python_configured=pex.python is not None),
        "PEX_PATH":
        in_chroot(local_dists.pex.name),
        "PEX_EXTRA_SYS_PATH":
        os.pathsep.join(chrooted_source_roots),
    }

    return RunRequest(digest=merged_digest, args=args, extra_env=extra_env)
Exemple #28
0
async def mypy_typecheck_partition(
    partition: MyPyPartition,
    config_file: MyPyConfigFile,
    first_party_plugins: MyPyFirstPartyPlugins,
    mypy: MyPy,
    python_setup: PythonSetup,
) -> CheckResult:
    # MyPy requires 3.5+ to run, but uses the typed-ast library to work with 2.7, 3.4, 3.5, 3.6,
    # and 3.7. However, typed-ast does not understand 3.8+, so instead we must run MyPy with
    # Python 3.8+ when relevant. We only do this if <3.8 can't be used, as we don't want a
    # loose requirement like `>=3.6` to result in requiring Python 3.8+, which would error if
    # 3.8+ is not installed on the machine.
    tool_interpreter_constraints = (partition.interpreter_constraints if (
        mypy.options.is_default("interpreter_constraints")
        and partition.interpreter_constraints.requires_python38_or_newer(
            python_setup.interpreter_universe)) else
                                    mypy.interpreter_constraints)

    closure_sources_get = Get(PythonSourceFiles,
                              PythonSourceFilesRequest(partition.closure))
    roots_sources_get = Get(
        SourceFiles,
        SourceFilesRequest(
            tgt.get(PythonSources) for tgt in partition.root_targets))

    # See `requirements_venv_pex` for how this will get wrapped in a `VenvPex`.
    requirements_pex_get = Get(
        Pex,
        PexFromTargetsRequest,
        PexFromTargetsRequest.for_requirements(
            (tgt.address for tgt in partition.root_targets),
            hardcoded_interpreter_constraints=partition.
            interpreter_constraints,
            internal_only=True,
        ),
    )

    mypy_pex_get = Get(
        VenvPex,
        PexRequest(
            output_filename="mypy.pex",
            internal_only=True,
            main=mypy.main,
            requirements=mypy.pex_requirements(
                extra_requirements=first_party_plugins.requirement_strings, ),
            interpreter_constraints=tool_interpreter_constraints,
        ),
    )

    closure_sources, roots_sources, mypy_pex, requirements_pex = await MultiGet(
        closure_sources_get, roots_sources_get, mypy_pex_get,
        requirements_pex_get)

    python_files = determine_python_files(roots_sources.snapshot.files)
    file_list_path = "__files.txt"
    file_list_digest_request = Get(
        Digest,
        CreateDigest(
            [FileContent(file_list_path, "\n".join(python_files).encode())]),
    )

    # This creates a venv with all the 3rd-party requirements used by the code. We tell MyPy to
    # use this venv by setting `--python-executable`. Note that this Python interpreter is
    # different than what we run MyPy with.
    #
    # We could have directly asked the `PexFromTargetsRequest` to return a `VenvPex`, rather than
    # `Pex`, but that would mean missing out on sharing a cache with other goals like `test` and
    # `run`.
    requirements_venv_pex_request = Get(
        VenvPex,
        PexRequest(
            output_filename="requirements_venv.pex",
            internal_only=True,
            pex_path=[requirements_pex],
            interpreter_constraints=partition.interpreter_constraints,
        ),
    )

    requirements_venv_pex, file_list_digest = await MultiGet(
        requirements_venv_pex_request, file_list_digest_request)

    merged_input_files = await Get(
        Digest,
        MergeDigests([
            file_list_digest,
            first_party_plugins.sources_digest,
            closure_sources.source_files.snapshot.digest,
            requirements_venv_pex.digest,
            config_file.digest,
        ]),
    )

    all_used_source_roots = sorted(
        set(
            itertools.chain(first_party_plugins.source_roots,
                            closure_sources.source_roots)))
    env = {
        "PEX_EXTRA_SYS_PATH": ":".join(all_used_source_roots),
        "MYPYPATH": ":".join(all_used_source_roots),
    }

    result = await Get(
        FallibleProcessResult,
        VenvPexProcess(
            mypy_pex,
            argv=generate_argv(
                mypy,
                venv_python=requirements_venv_pex.python.argv0,
                file_list_path=file_list_path,
                python_version=config_file.python_version_to_autoset(
                    partition.interpreter_constraints,
                    python_setup.interpreter_universe),
            ),
            input_digest=merged_input_files,
            extra_env=env,
            output_directories=(REPORT_DIR, ),
            description=f"Run MyPy on {pluralize(len(python_files), 'file')}.",
            level=LogLevel.DEBUG,
        ),
    )
    report = await Get(Digest, RemovePrefix(result.output_digest, REPORT_DIR))
    return CheckResult.from_fallible_process_result(
        result,
        partition_description=str(
            sorted(str(c) for c in partition.interpreter_constraints)),
        report=report,
    )
Exemple #29
0
async def pylint_lint_partition(partition: PylintPartition, pylint: Pylint) -> LintResult:
    requirements_pex_request = Get(
        Pex,
        PexFromTargetsRequest,
        PexFromTargetsRequest.for_requirements(
            (field_set.address for field_set in partition.field_sets),
            # NB: These constraints must be identical to the other PEXes. Otherwise, we risk using
            # a different version for the requirements than the other two PEXes, which can result
            # in a PEX runtime error about missing dependencies.
            hardcoded_interpreter_constraints=partition.interpreter_constraints,
            internal_only=True,
            direct_deps_only=True,
        ),
    )

    plugin_requirements = PexRequirements.create_from_requirement_fields(
        plugin_tgt[PythonRequirementsField]
        for plugin_tgt in partition.plugin_targets
        if plugin_tgt.has_field(PythonRequirementsField)
    )
    # Right now any Pylint transitive requirements will shadow corresponding user
    # requirements, which could lead to problems.
    pylint_pex_request = Get(
        Pex,
        PexRequest(
            output_filename="pylint.pex",
            internal_only=True,
            requirements=PexRequirements([*pylint.all_requirements, *plugin_requirements]),
            entry_point=pylint.entry_point,
            interpreter_constraints=partition.interpreter_constraints,
            # TODO(John Sirois): Support shading python binaries:
            #   https://github.com/pantsbuild/pants/issues/9206
            additional_args=("--pex-path", requirements_pex_request.input.output_filename),
        ),
    )

    config_digest_request = Get(
        Digest,
        PathGlobs(
            globs=[pylint.config] if pylint.config else [],
            glob_match_error_behavior=GlobMatchErrorBehavior.error,
            description_of_origin="the option `--pylint-config`",
        ),
    )

    prepare_plugin_sources_request = Get(
        StrippedPythonSourceFiles, PythonSourceFilesRequest(partition.plugin_targets)
    )
    prepare_python_sources_request = Get(
        PythonSourceFiles, PythonSourceFilesRequest(partition.targets_with_dependencies)
    )
    field_set_sources_request = Get(
        SourceFiles, SourceFilesRequest(field_set.sources for field_set in partition.field_sets)
    )

    (
        pylint_pex,
        requirements_pex,
        config_digest,
        prepared_plugin_sources,
        prepared_python_sources,
        field_set_sources,
    ) = await MultiGet(
        pylint_pex_request,
        requirements_pex_request,
        config_digest_request,
        prepare_plugin_sources_request,
        prepare_python_sources_request,
        field_set_sources_request,
    )

    prefixed_plugin_sources = (
        await Get(
            Digest,
            AddPrefix(prepared_plugin_sources.stripped_source_files.snapshot.digest, "__plugins"),
        )
        if pylint.source_plugins
        else EMPTY_DIGEST
    )

    pythonpath = list(prepared_python_sources.source_roots)
    if pylint.source_plugins:
        # NB: Pylint source plugins must be explicitly loaded via PEX_EXTRA_SYS_PATH. The value must
        # point to the plugin's directory, rather than to a parent's directory, because
        # `load-plugins` takes a module name rather than a path to the module; i.e. `plugin`, but
        # not `path.to.plugin`. (This means users must have specified the parent directory as a
        # source root.)
        pythonpath.append("__plugins")

    input_digest = await Get(
        Digest,
        MergeDigests(
            (
                pylint_pex.digest,
                requirements_pex.digest,
                config_digest,
                prefixed_plugin_sources,
                prepared_python_sources.source_files.snapshot.digest,
            )
        ),
    )

    result = await Get(
        FallibleProcessResult,
        PexProcess(
            pylint_pex,
            argv=generate_args(source_files=field_set_sources, pylint=pylint),
            input_digest=input_digest,
            extra_env={"PEX_EXTRA_SYS_PATH": ":".join(pythonpath)},
            description=f"Run Pylint on {pluralize(len(partition.field_sets), 'file')}.",
            level=LogLevel.DEBUG,
        ),
    )
    return LintResult.from_fallible_process_result(
        result, partition_description=str(sorted(str(c) for c in partition.interpreter_constraints))
    )
Exemple #30
0
async def setup_pytest_for_target(
    request: TestSetupRequest,
    pytest: PyTest,
    test_subsystem: TestSubsystem,
    python_setup: PythonSetup,
    coverage_config: CoverageConfig,
    coverage_subsystem: CoverageSubsystem,
    test_extra_env: TestExtraEnv,
    global_options: GlobalOptions,
) -> TestSetup:
    transitive_targets = await Get(
        TransitiveTargets,
        TransitiveTargetsRequest([request.field_set.address]))
    all_targets = transitive_targets.closure

    interpreter_constraints = PexInterpreterConstraints.create_from_targets(
        all_targets, python_setup)

    requirements_pex_get = Get(
        Pex,
        PexFromTargetsRequest,
        PexFromTargetsRequest.for_requirements([request.field_set.address],
                                               internal_only=True),
    )
    pytest_pex_get = Get(
        Pex,
        PexRequest(
            output_filename="pytest.pex",
            requirements=PexRequirements(pytest.get_requirement_strings()),
            interpreter_constraints=interpreter_constraints,
            internal_only=True,
        ),
    )

    extra_output_directory_digest_get = Get(
        Digest, CreateDigest([Directory(_EXTRA_OUTPUT_DIR)]))

    prepared_sources_get = Get(
        PythonSourceFiles,
        PythonSourceFilesRequest(all_targets, include_files=True))

    build_package_dependencies_get = Get(
        BuiltPackageDependencies,
        BuildPackageDependenciesRequest(
            request.field_set.runtime_package_dependencies),
    )

    # Get the file names for the test_target so that we can specify to Pytest precisely which files
    # to test, rather than using auto-discovery.
    field_set_source_files_get = Get(
        SourceFiles, SourceFilesRequest([request.field_set.sources]))

    (
        pytest_pex,
        requirements_pex,
        prepared_sources,
        field_set_source_files,
        built_package_dependencies,
        extra_output_directory_digest,
    ) = await MultiGet(
        pytest_pex_get,
        requirements_pex_get,
        prepared_sources_get,
        field_set_source_files_get,
        build_package_dependencies_get,
        extra_output_directory_digest_get,
    )

    pytest_runner_pex_get = Get(
        VenvPex,
        PexRequest(
            output_filename="pytest_runner.pex",
            interpreter_constraints=interpreter_constraints,
            main=ConsoleScript("pytest"),
            internal_only=True,
            pex_path=[pytest_pex, requirements_pex],
        ),
    )
    config_files_get = Get(
        ConfigFiles,
        ConfigFilesRequest,
        pytest.config_request(field_set_source_files.snapshot.dirs),
    )
    pytest_runner_pex, config_files = await MultiGet(pytest_runner_pex_get,
                                                     config_files_get)

    input_digest = await Get(
        Digest,
        MergeDigests((
            coverage_config.digest,
            prepared_sources.source_files.snapshot.digest,
            config_files.snapshot.digest,
            extra_output_directory_digest,
            *(pkg.digest for pkg in built_package_dependencies),
        )),
    )

    add_opts = [f"--color={'yes' if global_options.options.colors else 'no'}"]
    output_files = []

    results_file_name = None
    if pytest.options.junit_xml_dir and not request.is_debug:
        results_file_name = f"{request.field_set.address.path_safe_spec}.xml"
        add_opts.extend((f"--junitxml={results_file_name}", "-o",
                         f"junit_family={pytest.options.junit_family}"))
        output_files.append(results_file_name)

    coverage_args = []
    if test_subsystem.use_coverage and not request.is_debug:
        output_files.append(".coverage")
        cov_paths = coverage_subsystem.filter if coverage_subsystem.filter else (
            ".", )
        coverage_args = [
            "--cov-report=",  # Turn off output.
            f"--cov-config={coverage_config.path}",
            *itertools.chain.from_iterable(["--cov", cov_path]
                                           for cov_path in cov_paths),
        ]

    extra_env = {
        "PYTEST_ADDOPTS": " ".join(add_opts),
        "PEX_EXTRA_SYS_PATH": ":".join(prepared_sources.source_roots),
        **test_extra_env.env,
    }

    # Cache test runs only if they are successful, or not at all if `--test-force`.
    cache_scope = ProcessCacheScope.NEVER if test_subsystem.force else ProcessCacheScope.SUCCESSFUL
    process = await Get(
        Process,
        VenvPexProcess(
            pytest_runner_pex,
            argv=(*pytest.options.args, *coverage_args,
                  *field_set_source_files.files),
            extra_env=extra_env,
            input_digest=input_digest,
            output_directories=(_EXTRA_OUTPUT_DIR, ),
            output_files=output_files,
            timeout_seconds=request.field_set.timeout.
            calculate_from_global_options(pytest),
            execution_slot_variable=pytest.options.execution_slot_var,
            description=f"Run Pytest for {request.field_set.address}",
            level=LogLevel.DEBUG,
            cache_scope=cache_scope,
        ),
    )
    return TestSetup(process, results_file_name=results_file_name)