Beispiel #1
0
async def create_python_repl_request(repl: PythonRepl,
                                     pex_env: PexEnvironment) -> ReplRequest:
    requirements_request = Get(
        Pex,
        PexFromTargetsRequest,
        PexFromTargetsRequest.for_requirements(
            (tgt.address for tgt in repl.targets), internal_only=True),
    )
    sources_request = Get(
        PythonSourceFiles,
        PythonSourceFilesRequest(repl.targets, include_files=True))
    requirements_pex, sources = await MultiGet(requirements_request,
                                               sources_request)
    merged_digest = await Get(
        Digest,
        MergeDigests(
            (requirements_pex.digest, sources.source_files.snapshot.digest)))

    args = pex_env.create_argv(repl.in_chroot(requirements_pex.name),
                               python=requirements_pex.python)

    chrooted_source_roots = [repl.in_chroot(sr) for sr in sources.source_roots]
    extra_env = {
        **pex_env.environment_dict(python_configured=requirements_pex.python is not None),
        "PEX_EXTRA_SYS_PATH":
        ":".join(chrooted_source_roots),
    }

    return ReplRequest(digest=merged_digest, args=args, extra_env=extra_env)
Beispiel #2
0
 def get_pex_request(
         constraints_file: str | None,
         resolve_all_constraints: bool | None,
         *,
         direct_deps_only: bool = False,
         additional_args: Iterable[str] = (),
         additional_lockfile_args: Iterable[str] = (),
 ) -> PexRequest:
     args = ["--backend-packages=pants.backend.python"]
     request = PexFromTargetsRequest(
         [Address("", target_name="app")],
         output_filename="demo.pex",
         internal_only=True,
         direct_deps_only=direct_deps_only,
         additional_args=additional_args,
         additional_lockfile_args=additional_lockfile_args,
     )
     if resolve_all_constraints is not None:
         args.append(
             f"--python-resolve-all-constraints={resolve_all_constraints!r}"
         )
     if constraints_file:
         args.append(f"--python-requirement-constraints={constraints_file}")
     args.append("--python-repos-indexes=[]")
     args.append(f"--python-repos-repos={find_links}")
     rule_runner.set_options(args, env_inherit={"PATH"})
     pex_request = rule_runner.request(PexRequest, [request])
     assert OrderedSet(additional_args).issubset(
         OrderedSet(pex_request.additional_args))
     return pex_request
Beispiel #3
0
def test_issue_12222(rule_runner: RuleRunner) -> None:
    rule_runner.write_files({
        "constraints.txt":
        "foo==1.0\nbar==1.0",
        "BUILD":
        dedent("""
                python_requirement(name="foo",requirements=["foo"])
                python_requirement(name="bar",requirements=["bar"])
                python_sources(name="lib",sources=[],dependencies=[":foo"])
                """),
    })
    request = PexFromTargetsRequest(
        [Address("", target_name="lib")],
        output_filename="demo.pex",
        internal_only=False,
        platforms=PexPlatforms(["some-platform-x86_64"]),
    )
    rule_runner.set_options([
        "--python-requirement-constraints=constraints.txt",
        "--python-resolve-all-constraints",
    ])
    result = rule_runner.request(PexRequest, [request])

    assert result.requirements == PexRequirements(["foo"],
                                                  apply_constraints=True)
Beispiel #4
0
async def package_pex_binary(
        field_set: PexBinaryFieldSet,
        pex_binary_defaults: PexBinaryDefaults) -> BuiltPackage:
    resolved_entry_point = await Get(
        ResolvedPexEntryPoint,
        ResolvePexEntryPointRequest(field_set.entry_point))
    output_filename = field_set.output_path.value_or_default(field_set.address,
                                                             file_ending="pex")
    two_step_pex = await Get(
        TwoStepPex,
        TwoStepPexFromTargetsRequest(
            PexFromTargetsRequest(
                addresses=[field_set.address],
                internal_only=False,
                # TODO(John Sirois): Support ConsoleScript in PexBinary targets:
                #  https://github.com/pantsbuild/pants/issues/11619
                main=resolved_entry_point.val,
                platforms=PexPlatforms.create_from_platforms_field(
                    field_set.platforms),
                output_filename=output_filename,
                additional_args=field_set.generate_additional_args(
                    pex_binary_defaults),
            )),
    )
    return BuiltPackage(two_step_pex.pex.digest,
                        (BuiltPackageArtifact(output_filename), ))
Beispiel #5
0
async def package_pex_binary(
    field_set: PexBinaryFieldSet,
    pex_binary_defaults: PexBinaryDefaults,
    union_membership: UnionMembership,
) -> BuiltPackage:
    resolved_entry_point, transitive_targets = await MultiGet(
        Get(ResolvedPexEntryPoint,
            ResolvePexEntryPointRequest(field_set.entry_point)),
        Get(TransitiveTargets, TransitiveTargetsRequest([field_set.address])),
    )

    # Warn if users depend on `files` targets, which won't be included in the PEX and is a common
    # gotcha.
    file_tgts = targets_with_sources_types([FileSourceField],
                                           transitive_targets.dependencies,
                                           union_membership)
    if file_tgts:
        files_addresses = sorted(tgt.address.spec for tgt in file_tgts)
        logger.warning(
            softwrap(f"""
                The `pex_binary` target {field_set.address} transitively depends on the below `files`
                targets, but Pants will not include them in the PEX. Filesystem APIs like `open()`
                are not able to load files within the binary itself; instead, they read from the
                current working directory.

                Instead, use `resources` targets or wrap this `pex_binary` in an `archive`.
                See {doc_url('resources')}.

                Files targets dependencies: {files_addresses}
                """))

    output_filename = field_set.output_path.value_or_default(file_ending="pex")

    complete_platforms = await Get(CompletePlatforms,
                                   PexCompletePlatformsField,
                                   field_set.complete_platforms)

    pex = await Get(
        Pex,
        PexFromTargetsRequest(
            addresses=[field_set.address],
            internal_only=False,
            main=resolved_entry_point.val or field_set.script.value,
            platforms=PexPlatforms.create_from_platforms_field(
                field_set.platforms),
            complete_platforms=complete_platforms,
            output_filename=output_filename,
            layout=PexLayout(field_set.layout.value),
            additional_args=field_set.generate_additional_args(
                pex_binary_defaults),
            include_requirements=field_set.include_requirements.value,
            include_local_dists=True,
        ),
    )
    return BuiltPackage(pex.digest, (BuiltPackageArtifact(output_filename), ))
Beispiel #6
0
async def create_ipython_repl_request(repl: IPythonRepl, ipython: IPython,
                                      pex_env: PexEnvironment) -> ReplRequest:
    # Note that we get an intermediate PexRequest here (instead of going straight to a Pex)
    # so that we can get the interpreter constraints for use in ipython_request.
    requirements_pex_request = await Get(
        PexRequest,
        PexFromTargetsRequest,
        PexFromTargetsRequest.for_requirements(
            (tgt.address for tgt in repl.targets), internal_only=True),
    )

    requirements_request = Get(Pex, PexRequest, requirements_pex_request)

    sources_request = Get(
        PythonSourceFiles,
        PythonSourceFilesRequest(repl.targets, include_files=True))

    ipython_request = Get(
        Pex,
        PexRequest(
            output_filename="ipython.pex",
            main=ipython.main,
            requirements=PexRequirements(ipython.all_requirements),
            interpreter_constraints=requirements_pex_request.
            interpreter_constraints,
            internal_only=True,
        ),
    )

    requirements_pex, sources, ipython_pex = await MultiGet(
        requirements_request, sources_request, ipython_request)
    merged_digest = await Get(
        Digest,
        MergeDigests(
            (requirements_pex.digest, sources.source_files.snapshot.digest,
             ipython_pex.digest)),
    )

    args = list(
        pex_env.create_argv(repl.in_chroot(ipython_pex.name),
                            python=ipython_pex.python))
    if ipython.options.ignore_cwd:
        args.append("--ignore-cwd")

    chrooted_source_roots = [repl.in_chroot(sr) for sr in sources.source_roots]
    extra_env = {
        **pex_env.environment_dict(python_configured=ipython_pex.python is not None),
        "PEX_PATH":
        repl.in_chroot(requirements_pex_request.output_filename),
        "PEX_EXTRA_SYS_PATH":
        ":".join(chrooted_source_roots),
    }

    return ReplRequest(digest=merged_digest, args=args, extra_env=extra_env)
Beispiel #7
0
async def export_venv(request: ExportedVenvRequest, python_setup: PythonSetup,
                      pex_env: PexEnvironment) -> ExportableData:
    # Pick a single interpreter for the venv.
    interpreter_constraints = InterpreterConstraints.create_from_targets(
        request.targets, python_setup)
    if not interpreter_constraints:
        # If there were no targets that defined any constraints, fall back to the global ones.
        interpreter_constraints = InterpreterConstraints(
            python_setup.interpreter_constraints)
    min_interpreter = interpreter_constraints.snap_to_minimum(
        python_setup.interpreter_universe)
    if not min_interpreter:
        raise ExportError(
            "The following interpreter constraints were computed for all the targets for which "
            f"export was requested: {interpreter_constraints}. There is no python interpreter "
            "compatible with these constraints. Please restrict the target set to one that shares "
            "a compatible interpreter.")

    venv_pex = await Get(
        VenvPex,
        PexFromTargetsRequest,
        PexFromTargetsRequest.for_requirements(
            (tgt.address for tgt in request.targets),
            internal_only=True,
            hardcoded_interpreter_constraints=min_interpreter,
        ),
    )

    complete_pex_env = pex_env.in_workspace()
    venv_abspath = os.path.join(complete_pex_env.pex_root,
                                venv_pex.venv_rel_dir)

    # Run the venv_pex to get the full python version (including patch #), so we
    # can use it in the symlink name.
    res = await Get(
        ProcessResult,
        VenvPexProcess(
            venv_pex=venv_pex,
            description="Create virtualenv",
            argv=[
                "-c",
                "import sys; print('.'.join(str(x) for x in sys.version_info[0:3]))"
            ],
            input_digest=venv_pex.digest,
        ),
    )
    py_version = res.stdout.strip().decode()

    return ExportableData(
        f"virtualenv for {min_interpreter}",
        os.path.join("python", "virtualenv"),
        symlinks=[Symlink(venv_abspath, py_version)],
    )
Beispiel #8
0
async def create_python_repl_request(repl: PythonRepl,
                                     pex_env: PexEnvironment) -> ReplRequest:

    # Note that we get an intermediate PexRequest here (instead of going straight to a Pex) so
    # that we can get the interpreter constraints for use in local_dists_request.
    requirements_pex_request = await Get(
        PexRequest,
        PexFromTargetsRequest,
        PexFromTargetsRequest.for_requirements(
            (tgt.address for tgt in repl.targets), internal_only=True),
    )
    requirements_request = Get(Pex, PexRequest, requirements_pex_request)

    local_dists_request = Get(
        LocalDistsPex,
        LocalDistsPexRequest(
            Addresses(tgt.address for tgt in repl.targets),
            internal_only=True,
            interpreter_constraints=requirements_pex_request.
            interpreter_constraints,
        ),
    )

    sources_request = Get(
        PythonSourceFiles,
        PythonSourceFilesRequest(repl.targets, include_files=True))

    requirements_pex, local_dists, sources = await MultiGet(
        requirements_request, local_dists_request, sources_request)
    merged_digest = await Get(
        Digest,
        MergeDigests((requirements_pex.digest, local_dists.pex.digest,
                      sources.source_files.snapshot.digest)),
    )

    complete_pex_env = pex_env.in_workspace()
    args = complete_pex_env.create_argv(repl.in_chroot(requirements_pex.name),
                                        python=requirements_pex.python)

    chrooted_source_roots = [repl.in_chroot(sr) for sr in sources.source_roots]
    extra_env = {
        **complete_pex_env.environment_dict(python_configured=requirements_pex.python is not None),
        "PEX_EXTRA_SYS_PATH":
        ":".join(chrooted_source_roots),
        "PEX_PATH":
        repl.in_chroot(local_dists.pex.name),
    }

    return ReplRequest(digest=merged_digest, args=args, extra_env=extra_env)
async def package_pex_binary(
    field_set: PexBinaryFieldSet,
    pex_binary_defaults: PexBinaryDefaults,
    union_membership: UnionMembership,
) -> BuiltPackage:
    resolved_entry_point, transitive_targets = await MultiGet(
        Get(ResolvedPexEntryPoint,
            ResolvePexEntryPointRequest(field_set.entry_point)),
        Get(TransitiveTargets, TransitiveTargetsRequest([field_set.address])),
    )

    # Warn if users depend on `files` targets, which won't be included in the PEX and is a common
    # gotcha.
    files_tgts = targets_with_sources_types([FilesSources],
                                            transitive_targets.dependencies,
                                            union_membership)
    if files_tgts:
        files_addresses = sorted(tgt.address.spec for tgt in files_tgts)
        logger.warning(
            f"The pex_binary target {field_set.address} transitively depends on the below files "
            "targets, but Pants will not include them in the PEX. Filesystem APIs like `open()` "
            "are not able to load files within the binary itself; instead, they read from the "
            "current working directory."
            "\n\nInstead, use `resources` targets or wrap this `pex_binary` in an `archive`. See "
            f"{bracketed_docs_url('resources')}."
            f"\n\nFiles targets dependencies: {files_addresses}")

    output_filename = field_set.output_path.value_or_default(field_set.address,
                                                             file_ending="pex")
    two_step_pex = await Get(
        TwoStepPex,
        TwoStepPexFromTargetsRequest(
            PexFromTargetsRequest(
                addresses=[field_set.address],
                internal_only=False,
                # TODO(John Sirois): Support ConsoleScript in PexBinary targets:
                #  https://github.com/pantsbuild/pants/issues/11619
                main=resolved_entry_point.val,
                platforms=PexPlatforms.create_from_platforms_field(
                    field_set.platforms),
                output_filename=output_filename,
                additional_args=field_set.generate_additional_args(
                    pex_binary_defaults),
            )),
    )
    return BuiltPackage(two_step_pex.pex.digest,
                        (BuiltPackageArtifact(output_filename), ))
Beispiel #10
0
async def package_pex_binary(
    field_set: PexBinaryFieldSet,
    pex_binary_defaults: PexBinaryDefaults,
    global_options: GlobalOptions,
) -> BuiltPackage:
    entry_point = field_set.entry_point.value
    if entry_point is None:
        binary_source_paths = await Get(
            Paths, PathGlobs,
            field_set.sources.path_globs(FilesNotFoundBehavior.error))
        if len(binary_source_paths.files) != 1:
            raise InvalidFieldException(
                "No `entry_point` was set for the target "
                f"{repr(field_set.address)}, so it must have exactly one source, but it has "
                f"{len(binary_source_paths.files)}")
        entry_point_path = binary_source_paths.files[0]
        source_root = await Get(
            SourceRoot,
            SourceRootRequest,
            SourceRootRequest.for_file(entry_point_path),
        )
        entry_point = PexBinarySources.translate_source_file_to_entry_point(
            os.path.relpath(entry_point_path, source_root.path))

    output_filename = field_set.output_path.value_or_default(
        field_set.address,
        file_ending="pex",
        use_legacy_format=global_options.options.pants_distdir_legacy_paths,
    )
    two_step_pex = await Get(
        TwoStepPex,
        TwoStepPexFromTargetsRequest(
            PexFromTargetsRequest(
                addresses=[field_set.address],
                internal_only=False,
                entry_point=entry_point,
                platforms=PexPlatforms.create_from_platforms_field(
                    field_set.platforms),
                output_filename=output_filename,
                additional_args=field_set.generate_additional_args(
                    pex_binary_defaults),
            )),
    )
    return BuiltPackage(two_step_pex.pex.digest,
                        (BuiltPackageArtifact(output_filename), ))
Beispiel #11
0
 def get_pex_request(
     constraints_file: Optional[str],
     resolve_all: Optional[ResolveAllConstraintsOption],
     *,
     direct_deps_only: bool = False,
 ) -> PexRequest:
     args = ["--backend-packages=pants.backend.python"]
     request = PexFromTargetsRequest(
         [Address("", target_name="app")],
         output_filename="demo.pex",
         internal_only=True,
         direct_deps_only=direct_deps_only,
     )
     if resolve_all:
         args.append(
             f"--python-setup-resolve-all-constraints={resolve_all.value}")
     if constraints_file:
         args.append(
             f"--python-setup-requirement-constraints={constraints_file}")
     rule_runner.set_options(args)
     return rule_runner.request(PexRequest, [request])
def test_exclude_requirements(
    include_requirements: bool, tmp_path: Path, rule_runner: RuleRunner
) -> None:
    sdists = tmp_path / "sdists"
    sdists.mkdir()
    find_links = create_dists(sdists, Project("baz", "2.2.2"))

    rule_runner.write_files(
        {
            "BUILD": dedent(
                """
                python_requirement(name="baz", requirements=["foo==1.2.3"])
                python_sources(name="app", sources=["app.py"], dependencies=[":baz"])
                """
            ),
            "constraints.txt": dedent("foo==1.2.3"),
            "app.py": "",
        }
    )

    rule_runner.set_options(
        [
            "--backend-packages=pants.backend.python",
            "--python-repos-indexes=[]",
            f"--python-repos-repos={find_links}",
        ],
        env_inherit={"PATH"},
    )

    request = PexFromTargetsRequest(
        [Address("", target_name="app")],
        output_filename="demo.pex",
        internal_only=True,
        include_requirements=include_requirements,
    )
    pex_request = rule_runner.request(PexRequest, [request])
    assert isinstance(pex_request.requirements, PexRequirements)
    assert len(pex_request.requirements.req_strings) == (1 if include_requirements else 0)
Beispiel #13
0
async def package_pex_binary(
        field_set: PexBinaryFieldSet,
        pex_binary_defaults: PexBinaryDefaults) -> BuiltPackage:
    resolved_entry_point = await Get(
        ResolvedPexEntryPoint,
        ResolvePexEntryPointRequest(field_set.entry_point))
    output_filename = field_set.output_path.value_or_default(field_set.address,
                                                             file_ending="pex")
    two_step_pex = await Get(
        TwoStepPex,
        TwoStepPexFromTargetsRequest(
            PexFromTargetsRequest(
                addresses=[field_set.address],
                internal_only=False,
                entry_point=resolved_entry_point.val,
                platforms=PexPlatforms.create_from_platforms_field(
                    field_set.platforms),
                output_filename=output_filename,
                additional_args=field_set.generate_additional_args(
                    pex_binary_defaults),
            )),
    )
    return BuiltPackage(two_step_pex.pex.digest,
                        (BuiltPackageArtifact(output_filename), ))
Beispiel #14
0
async def create_python_awslambda(
    field_set: PythonAwsLambdaFieldSet, lambdex_setup: LambdexSetup, global_options: GlobalOptions
) -> CreatedAWSLambda:
    output_filename = field_set.output_path.value_or_default(
        field_set.address,
        # Lambdas typically use the .zip suffix, so we use that instead of .pex.
        file_ending="zip",
        use_legacy_format=global_options.options.pants_distdir_legacy_paths,
    )

    # We hardcode the platform value to the appropriate one for each AWS Lambda runtime.
    # (Running the "hello world" lambda in the example code will report the platform, and can be
    # used to verify correctness of these platform strings.)
    py_major, py_minor = field_set.runtime.to_interpreter_version()
    platform = f"linux_x86_64-cp-{py_major}{py_minor}-cp{py_major}{py_minor}"
    # set pymalloc ABI flag - this was removed in python 3.8 https://bugs.python.org/issue36707
    if py_major <= 3 and py_minor < 8:
        platform += "m"
    if (py_major, py_minor) == (2, 7):
        platform += "u"
    pex_request = TwoStepPexFromTargetsRequest(
        PexFromTargetsRequest(
            addresses=[field_set.address],
            internal_only=False,
            entry_point=None,
            output_filename=output_filename,
            platforms=PexPlatforms([platform]),
            additional_args=[
                # Ensure we can resolve manylinux wheels in addition to any AMI-specific wheels.
                "--manylinux=manylinux2014",
                # When we're executing Pex on Linux, allow a local interpreter to be resolved if
                # available and matching the AMI platform.
                "--resolve-local-platforms",
            ],
        )
    )

    pex_result = await Get(TwoStepPex, TwoStepPexFromTargetsRequest, pex_request)
    input_digest = await Get(
        Digest, MergeDigests((pex_result.pex.digest, lambdex_setup.requirements_pex.digest))
    )

    # NB: Lambdex modifies its input pex in-place, so the input file is also the output file.
    result = await Get(
        ProcessResult,
        PexProcess(
            lambdex_setup.requirements_pex,
            argv=("build", "-e", field_set.handler.value, output_filename),
            input_digest=input_digest,
            output_files=(output_filename,),
            description=f"Setting up handler in {output_filename}",
        ),
    )
    return CreatedAWSLambda(
        digest=result.output_digest,
        zip_file_relpath=output_filename,
        runtime=field_set.runtime.value,
        # The AWS-facing handler function is always lambdex_handler.handler, which is the wrapper
        # injected by lambdex that manages invocation of the actual handler.
        handler="lambdex_handler.handler",
    )
Beispiel #15
0
async def package_python_awslambda(
    field_set: PythonAwsLambdaFieldSet, lambdex: Lambdex
) -> BuiltPackage:
    output_filename = field_set.output_path.value_or_default(
        field_set.address,
        # Lambdas typically use the .zip suffix, so we use that instead of .pex.
        file_ending="zip",
    )

    # We hardcode the platform value to the appropriate one for each AWS Lambda runtime.
    # (Running the "hello world" lambda in the example code will report the platform, and can be
    # used to verify correctness of these platform strings.)
    py_major, py_minor = field_set.runtime.to_interpreter_version()
    platform = f"linux_x86_64-cp-{py_major}{py_minor}-cp{py_major}{py_minor}"
    # set pymalloc ABI flag - this was removed in python 3.8 https://bugs.python.org/issue36707
    if py_major <= 3 and py_minor < 8:
        platform += "m"
    if (py_major, py_minor) == (2, 7):
        platform += "u"
    pex_request = TwoStepPexFromTargetsRequest(
        PexFromTargetsRequest(
            addresses=[field_set.address],
            internal_only=False,
            main=None,
            output_filename=output_filename,
            platforms=PexPlatforms([platform]),
            additional_args=[
                # Ensure we can resolve manylinux wheels in addition to any AMI-specific wheels.
                "--manylinux=manylinux2014",
                # When we're executing Pex on Linux, allow a local interpreter to be resolved if
                # available and matching the AMI platform.
                "--resolve-local-platforms",
            ],
        )
    )

    lambdex_request = PexRequest(
        output_filename="lambdex.pex",
        internal_only=True,
        requirements=PexRequirements(lambdex.all_requirements),
        interpreter_constraints=PexInterpreterConstraints(lambdex.interpreter_constraints),
        main=lambdex.main,
    )

    lambdex_pex, pex_result, handler = await MultiGet(
        Get(VenvPex, PexRequest, lambdex_request),
        Get(TwoStepPex, TwoStepPexFromTargetsRequest, pex_request),
        Get(ResolvedPythonAwsHandler, ResolvePythonAwsHandlerRequest(field_set.handler)),
    )

    # NB: Lambdex modifies its input pex in-place, so the input file is also the output file.
    result = await Get(
        ProcessResult,
        VenvPexProcess(
            lambdex_pex,
            argv=("build", "-e", handler.val, output_filename),
            input_digest=pex_result.pex.digest,
            output_files=(output_filename,),
            description=f"Setting up handler in {output_filename}",
        ),
    )
    artifact = BuiltPackageArtifact(
        output_filename,
        extra_log_lines=(
            f"    Runtime: {field_set.runtime.value}",
            # The AWS-facing handler function is always lambdex_handler.handler, which is the
            # wrapper injected by lambdex that manages invocation of the actual handler.
            "    Handler: lambdex_handler.handler",
        ),
    )
    return BuiltPackage(digest=result.output_digest, artifacts=(artifact,))
Beispiel #16
0
async def create_pex_binary_run_request(field_set: PexBinaryFieldSet,
                                        pex_binary_defaults: PexBinaryDefaults,
                                        pex_env: PexEnvironment) -> RunRequest:
    run_in_sandbox = field_set.run_in_sandbox.value
    entry_point, transitive_targets = await MultiGet(
        Get(
            ResolvedPexEntryPoint,
            ResolvePexEntryPointRequest(field_set.entry_point),
        ),
        Get(TransitiveTargets, TransitiveTargetsRequest([field_set.address])),
    )

    addresses = [field_set.address]
    interpreter_constraints = await Get(
        InterpreterConstraints, InterpreterConstraintsRequest(addresses))

    pex_filename = (field_set.address.generated_name.replace(".", "_")
                    if field_set.address.generated_name else
                    field_set.address.target_name)
    pex_get = Get(
        Pex,
        PexFromTargetsRequest(
            [field_set.address],
            output_filename=f"{pex_filename}.pex",
            internal_only=True,
            include_source_files=False,
            # Note that the file for first-party entry points is not in the PEX itself. In that
            # case, it's loaded by setting `PEX_EXTRA_SYS_PATH`.
            main=entry_point.val or field_set.script.value,
            additional_args=(
                *field_set.generate_additional_args(pex_binary_defaults),
                # N.B.: Since we cobble together the runtime environment via PEX_EXTRA_SYS_PATH
                # below, it's important for any app that re-executes itself that these environment
                # variables are not stripped.
                "--no-strip-pex-env",
            ),
        ),
    )
    sources_get = Get(
        PythonSourceFiles,
        PythonSourceFilesRequest(transitive_targets.closure,
                                 include_files=True))
    pex, sources = await MultiGet(pex_get, sources_get)

    local_dists = await Get(
        LocalDistsPex,
        LocalDistsPexRequest(
            [field_set.address],
            internal_only=True,
            interpreter_constraints=interpreter_constraints,
            sources=sources,
        ),
    )

    input_digests = [
        pex.digest,
        local_dists.pex.digest,
        # Note regarding inline mode: You might think that the sources don't need to be copied
        # into the chroot when using inline sources. But they do, because some of them might be
        # codegenned, and those won't exist in the inline source tree. Rather than incurring the
        # complexity of figuring out here which sources were codegenned, we copy everything.
        # The inline source roots precede the chrooted ones in PEX_EXTRA_SYS_PATH, so the inline
        # sources will take precedence and their copies in the chroot will be ignored.
        local_dists.remaining_sources.source_files.snapshot.digest,
    ]
    merged_digest = await Get(Digest, MergeDigests(input_digests))

    def in_chroot(relpath: str) -> str:
        return os.path.join("{chroot}", relpath)

    complete_pex_env = pex_env.in_workspace()
    args = complete_pex_env.create_argv(in_chroot(pex.name), python=pex.python)

    chrooted_source_roots = [in_chroot(sr) for sr in sources.source_roots]
    # The order here is important: we want the in-repo sources to take precedence over their
    # copies in the sandbox (see above for why those copies exist even in non-sandboxed mode).
    source_roots = [
        *([] if run_in_sandbox else sources.source_roots),
        *chrooted_source_roots,
    ]
    extra_env = {
        **complete_pex_env.environment_dict(python_configured=pex.python is not None),
        "PEX_PATH":
        in_chroot(local_dists.pex.name),
        "PEX_EXTRA_SYS_PATH":
        os.pathsep.join(source_roots),
    }

    return RunRequest(digest=merged_digest, args=args, extra_env=extra_env)
Beispiel #17
0
async def setup_pytest_for_target(
    request: TestSetupRequest,
    pytest: PyTest,
    test_subsystem: TestSubsystem,
    python_setup: PythonSetup,
    coverage_config: CoverageConfig,
    coverage_subsystem: CoverageSubsystem,
    test_extra_env: TestExtraEnv,
    global_options: GlobalOptions,
) -> TestSetup:
    transitive_targets = await Get(
        TransitiveTargets, TransitiveTargetsRequest([request.field_set.address])
    )
    all_targets = transitive_targets.closure

    interpreter_constraints = PexInterpreterConstraints.create_from_targets(
        all_targets, python_setup
    )

    requirements_pex_request = Get(
        Pex,
        PexFromTargetsRequest,
        PexFromTargetsRequest.for_requirements([request.field_set.address], internal_only=True),
    )

    pytest_pex_request = Get(
        Pex,
        PexRequest(
            output_filename="pytest.pex",
            requirements=PexRequirements(pytest.get_requirement_strings()),
            interpreter_constraints=interpreter_constraints,
            internal_only=True,
        ),
    )

    prepared_sources_request = Get(
        PythonSourceFiles, PythonSourceFilesRequest(all_targets, include_files=True)
    )

    # Create any assets that the test depends on through the `runtime_package_dependencies` field.
    assets: Tuple[BuiltPackage, ...] = ()
    unparsed_runtime_packages = (
        request.field_set.runtime_package_dependencies.to_unparsed_address_inputs()
    )
    if unparsed_runtime_packages.values:
        runtime_package_targets = await Get(
            Targets, UnparsedAddressInputs, unparsed_runtime_packages
        )
        field_sets_per_target = await Get(
            FieldSetsPerTarget,
            FieldSetsPerTargetRequest(PackageFieldSet, runtime_package_targets),
        )
        assets = await MultiGet(
            Get(BuiltPackage, PackageFieldSet, field_set)
            for field_set in field_sets_per_target.field_sets
        )

    # Get the file names for the test_target so that we can specify to Pytest precisely which files
    # to test, rather than using auto-discovery.
    field_set_source_files_request = Get(
        SourceFiles, SourceFilesRequest([request.field_set.sources])
    )

    pytest_pex, requirements_pex, prepared_sources, field_set_source_files = await MultiGet(
        pytest_pex_request,
        requirements_pex_request,
        prepared_sources_request,
        field_set_source_files_request,
    )

    pytest_runner_pex = await Get(
        VenvPex,
        PexRequest(
            output_filename="pytest_runner.pex",
            interpreter_constraints=interpreter_constraints,
            # TODO(John Sirois): Switch to ConsoleScript once Pex supports discovering console
            #  scripts via the PEX_PATH: https://github.com/pantsbuild/pex/issues/1257
            main=EntryPoint("pytest"),
            internal_only=True,
            pex_path=[pytest_pex, requirements_pex],
        ),
    )

    input_digest = await Get(
        Digest,
        MergeDigests(
            (
                coverage_config.digest,
                prepared_sources.source_files.snapshot.digest,
                *(binary.digest for binary in assets),
            )
        ),
    )

    add_opts = [f"--color={'yes' if global_options.options.colors else 'no'}"]
    output_files = []

    results_file_name = None
    if pytest.options.junit_xml_dir and not request.is_debug:
        results_file_name = f"{request.field_set.address.path_safe_spec}.xml"
        add_opts.extend(
            (f"--junitxml={results_file_name}", "-o", f"junit_family={pytest.options.junit_family}")
        )
        output_files.append(results_file_name)

    coverage_args = []
    if test_subsystem.use_coverage and not request.is_debug:
        output_files.append(".coverage")
        cov_paths = coverage_subsystem.filter if coverage_subsystem.filter else (".",)
        coverage_args = [
            "--cov-report=",  # Turn off output.
            *itertools.chain.from_iterable(["--cov", cov_path] for cov_path in cov_paths),
        ]

    extra_env = {
        "PYTEST_ADDOPTS": " ".join(add_opts),
        "PEX_EXTRA_SYS_PATH": ":".join(prepared_sources.source_roots),
    }

    extra_env.update(test_extra_env.env)

    # Cache test runs only if they are successful, or not at all if `--test-force`.
    cache_scope = ProcessCacheScope.NEVER if test_subsystem.force else ProcessCacheScope.SUCCESSFUL
    process = await Get(
        Process,
        VenvPexProcess(
            pytest_runner_pex,
            argv=(*pytest.options.args, *coverage_args, *field_set_source_files.files),
            extra_env=extra_env,
            input_digest=input_digest,
            output_files=output_files,
            timeout_seconds=request.field_set.timeout.calculate_from_global_options(pytest),
            execution_slot_variable=pytest.options.execution_slot_var,
            description=f"Run Pytest for {request.field_set.address}",
            level=LogLevel.DEBUG,
            cache_scope=cache_scope,
        ),
    )
    return TestSetup(process, results_file_name=results_file_name)
Beispiel #18
0
async def mypy_typecheck_partition(partition: MyPyPartition, mypy: MyPy) -> TypecheckResult:
    plugin_target_addresses = await Get(Addresses, UnparsedAddressInputs, mypy.source_plugins)
    plugin_transitive_targets = await Get(
        TransitiveTargets, TransitiveTargetsRequest(plugin_target_addresses)
    )

    plugin_requirements = PexRequirements.create_from_requirement_fields(
        plugin_tgt[PythonRequirementsField]
        for plugin_tgt in plugin_transitive_targets.closure
        if plugin_tgt.has_field(PythonRequirementsField)
    )

    # If the user did not set `--python-version` already, we set it ourselves based on their code's
    # interpreter constraints. This determines what AST is used by MyPy.
    python_version = (
        None
        if partition.python_version_already_configured
        else partition.interpreter_constraints.minimum_python_version()
    )

    # MyPy requires 3.5+ to run, but uses the typed-ast library to work with 2.7, 3.4, 3.5, 3.6,
    # and 3.7. However, typed-ast does not understand 3.8+, so instead we must run MyPy with
    # Python 3.8+ when relevant. We only do this if <3.8 can't be used, as we don't want a
    # loose requirement like `>=3.6` to result in requiring Python 3.8+, which would error if
    # 3.8+ is not installed on the machine.
    tool_interpreter_constraints = (
        partition.interpreter_constraints
        if (
            mypy.options.is_default("interpreter_constraints")
            and partition.interpreter_constraints.requires_python38_or_newer()
        )
        else PexInterpreterConstraints(mypy.interpreter_constraints)
    )

    plugin_sources_get = Get(
        PythonSourceFiles, PythonSourceFilesRequest(plugin_transitive_targets.closure)
    )
    closure_sources_get = Get(PythonSourceFiles, PythonSourceFilesRequest(partition.closure))
    roots_sources_get = Get(
        SourceFiles, SourceFilesRequest(tgt.get(PythonSources) for tgt in partition.root_targets)
    )

    requirements_pex_get = Get(
        Pex,
        PexFromTargetsRequest,
        PexFromTargetsRequest.for_requirements(
            (tgt.address for tgt in partition.root_targets),
            hardcoded_interpreter_constraints=partition.interpreter_constraints,
            internal_only=True,
        ),
    )

    # TODO(John Sirois): Scope the extra requirements to the partition.
    #  Right now we just use a global set of extra requirements and these might not be compatible
    #  with all partitions. See: https://github.com/pantsbuild/pants/issues/11556
    mypy_extra_requirements_pex_get = Get(
        Pex,
        PexRequest(
            output_filename="mypy_extra_requirements.pex",
            internal_only=True,
            requirements=PexRequirements(mypy.extra_requirements),
            interpreter_constraints=partition.interpreter_constraints,
        ),
    )
    mypy_pex_get = Get(
        VenvPex,
        PexRequest(
            output_filename="mypy.pex",
            internal_only=True,
            main=mypy.main,
            requirements=PexRequirements((*mypy.all_requirements, *plugin_requirements)),
            interpreter_constraints=tool_interpreter_constraints,
        ),
    )

    config_files_get = Get(ConfigFiles, ConfigFilesRequest, mypy.config_request)

    (
        plugin_sources,
        closure_sources,
        roots_sources,
        mypy_pex,
        requirements_pex,
        mypy_extra_requirements_pex,
        config_files,
    ) = await MultiGet(
        plugin_sources_get,
        closure_sources_get,
        roots_sources_get,
        mypy_pex_get,
        requirements_pex_get,
        mypy_extra_requirements_pex_get,
        config_files_get,
    )

    python_files = determine_python_files(roots_sources.snapshot.files)
    file_list_path = "__files.txt"
    file_list_digest_request = Get(
        Digest,
        CreateDigest([FileContent(file_list_path, "\n".join(python_files).encode())]),
    )

    typechecked_venv_pex_request = Get(
        VenvPex,
        PexRequest(
            output_filename="typechecked_venv.pex",
            internal_only=True,
            pex_path=[requirements_pex, mypy_extra_requirements_pex],
            interpreter_constraints=partition.interpreter_constraints,
        ),
    )

    typechecked_venv_pex, file_list_digest = await MultiGet(
        typechecked_venv_pex_request, file_list_digest_request
    )

    merged_input_files = await Get(
        Digest,
        MergeDigests(
            [
                file_list_digest,
                plugin_sources.source_files.snapshot.digest,
                closure_sources.source_files.snapshot.digest,
                typechecked_venv_pex.digest,
                config_files.snapshot.digest,
            ]
        ),
    )

    all_used_source_roots = sorted(
        set(itertools.chain(plugin_sources.source_roots, closure_sources.source_roots))
    )
    env = {
        "PEX_EXTRA_SYS_PATH": ":".join(all_used_source_roots),
        "MYPYPATH": ":".join(all_used_source_roots),
    }

    result = await Get(
        FallibleProcessResult,
        VenvPexProcess(
            mypy_pex,
            argv=generate_argv(
                mypy,
                typechecked_venv_pex,
                file_list_path=file_list_path,
                python_version=python_version,
            ),
            input_digest=merged_input_files,
            extra_env=env,
            description=f"Run MyPy on {pluralize(len(python_files), 'file')}.",
            level=LogLevel.DEBUG,
        ),
    )
    return TypecheckResult.from_fallible_process_result(
        result, partition_description=str(sorted(str(c) for c in partition.interpreter_constraints))
    )
Beispiel #19
0
async def create_pex_binary_run_request(
    field_set: PexBinaryFieldSet,
    pex_binary_defaults: PexBinaryDefaults,
    pex_env: PexEnvironment,
) -> RunRequest:
    entry_point, transitive_targets = await MultiGet(
        Get(
            ResolvedPexEntryPoint,
            ResolvePexEntryPointRequest(field_set.entry_point),
        ),
        Get(TransitiveTargets, TransitiveTargetsRequest([field_set.address])),
    )

    # Note that we get an intermediate PexRequest here (instead of going straight to a Pex)
    # so that we can get the interpreter constraints for use in runner_pex_request.
    requirements_pex_request = await Get(
        PexRequest,
        PexFromTargetsRequest,
        PexFromTargetsRequest.for_requirements([field_set.address],
                                               internal_only=True),
    )

    requirements_request = Get(Pex, PexRequest, requirements_pex_request)

    sources_request = Get(
        PythonSourceFiles,
        PythonSourceFilesRequest(transitive_targets.closure,
                                 include_files=True))

    output_filename = f"{field_set.address.target_name}.pex"
    runner_pex_request = Get(
        Pex,
        PexRequest(
            output_filename=output_filename,
            interpreter_constraints=requirements_pex_request.
            interpreter_constraints,
            additional_args=field_set.generate_additional_args(
                pex_binary_defaults),
            internal_only=True,
            # Note that the entry point file is not in the PEX itself. It's loaded by setting
            # `PEX_EXTRA_SYS_PATH`.
            # TODO(John Sirois): Support ConsoleScript in PexBinary targets:
            #  https://github.com/pantsbuild/pants/issues/11619
            main=entry_point.val,
        ),
    )

    requirements, sources, runner_pex = await MultiGet(requirements_request,
                                                       sources_request,
                                                       runner_pex_request)

    merged_digest = await Get(
        Digest,
        MergeDigests([
            requirements.digest, sources.source_files.snapshot.digest,
            runner_pex.digest
        ]),
    )

    def in_chroot(relpath: str) -> str:
        return os.path.join("{chroot}", relpath)

    args = pex_env.create_argv(in_chroot(runner_pex.name),
                               python=runner_pex.python)

    chrooted_source_roots = [in_chroot(sr) for sr in sources.source_roots]
    extra_env = {
        **pex_env.environment_dict(python_configured=runner_pex.python is not None),
        "PEX_PATH":
        in_chroot(requirements_pex_request.output_filename),
        "PEX_EXTRA_SYS_PATH":
        ":".join(chrooted_source_roots),
    }

    return RunRequest(digest=merged_digest, args=args, extra_env=extra_env)
Beispiel #20
0
async def setup_pytest_for_target(
    request: TestSetupRequest,
    pytest: PyTest,
    test_subsystem: TestSubsystem,
    python_setup: PythonSetup,
    coverage_config: CoverageConfig,
    coverage_subsystem: CoverageSubsystem,
    test_extra_env: TestExtraEnv,
    global_options: GlobalOptions,
) -> TestSetup:
    transitive_targets = await Get(
        TransitiveTargets,
        TransitiveTargetsRequest([request.field_set.address]))
    all_targets = transitive_targets.closure

    interpreter_constraints = PexInterpreterConstraints.create_from_targets(
        all_targets, python_setup)

    # Defaults to zip_safe=False.
    requirements_pex_request = Get(
        Pex,
        PexFromTargetsRequest,
        PexFromTargetsRequest.for_requirements([request.field_set.address],
                                               internal_only=True),
    )

    pytest_pex_request = Get(
        Pex,
        PexRequest(
            output_filename="pytest.pex",
            requirements=PexRequirements(pytest.get_requirement_strings()),
            interpreter_constraints=interpreter_constraints,
            entry_point="pytest:main",
            internal_only=True,
            additional_args=(
                # NB: We set `--not-zip-safe` because Pytest plugin discovery, which uses
                # `importlib_metadata` and thus `zipp`, does not play nicely when doing import
                # magic directly from zip files. `zipp` has pathologically bad behavior with large
                # zipfiles.
                # TODO: this does have a performance cost as the pex must now be expanded to disk.
                # Long term, it would be better to fix Zipp (whose fix would then need to be used
                # by importlib_metadata and then by Pytest). See
                # https://github.com/jaraco/zipp/pull/26.
                "--not-zip-safe",
                # TODO(John Sirois): Support shading python binaries:
                #   https://github.com/pantsbuild/pants/issues/9206
                "--pex-path",
                requirements_pex_request.input.output_filename,
            ),
        ),
    )

    prepared_sources_request = Get(
        PythonSourceFiles,
        PythonSourceFilesRequest(all_targets, include_files=True))

    # Create any assets that the test depends on through the `runtime_package_dependencies` field.
    assets: Tuple[BuiltPackage, ...] = ()
    unparsed_runtime_packages = (request.field_set.runtime_package_dependencies
                                 .to_unparsed_address_inputs())
    if unparsed_runtime_packages.values:
        runtime_package_targets = await Get(Targets, UnparsedAddressInputs,
                                            unparsed_runtime_packages)
        field_sets_per_target = await Get(
            FieldSetsPerTarget,
            FieldSetsPerTargetRequest(PackageFieldSet,
                                      runtime_package_targets),
        )
        assets = await MultiGet(
            Get(BuiltPackage, PackageFieldSet, field_set)
            for field_set in field_sets_per_target.field_sets)

    # Get the file names for the test_target so that we can specify to Pytest precisely which files
    # to test, rather than using auto-discovery.
    field_set_source_files_request = Get(
        SourceFiles, SourceFilesRequest([request.field_set.sources]))

    pytest_pex, requirements_pex, prepared_sources, field_set_source_files = await MultiGet(
        pytest_pex_request,
        requirements_pex_request,
        prepared_sources_request,
        field_set_source_files_request,
    )

    input_digest = await Get(
        Digest,
        MergeDigests((
            coverage_config.digest,
            prepared_sources.source_files.snapshot.digest,
            requirements_pex.digest,
            pytest_pex.digest,
            *(binary.digest for binary in assets),
        )),
    )

    add_opts = [f"--color={'yes' if global_options.options.colors else 'no'}"]
    output_files = []

    results_file_name = None
    if pytest.options.junit_xml_dir and not request.is_debug:
        results_file_name = f"{request.field_set.address.path_safe_spec}.xml"
        add_opts.extend((f"--junitxml={results_file_name}", "-o",
                         f"junit_family={pytest.options.junit_family}"))
        output_files.append(results_file_name)

    coverage_args = []
    if test_subsystem.use_coverage and not request.is_debug:
        output_files.append(".coverage")
        cov_paths = coverage_subsystem.filter if coverage_subsystem.filter else (
            ".", )
        coverage_args = [
            "--cov-report=",  # Turn off output.
            *itertools.chain.from_iterable(["--cov", cov_path]
                                           for cov_path in cov_paths),
        ]

    extra_env = {
        "PYTEST_ADDOPTS": " ".join(add_opts),
        "PEX_EXTRA_SYS_PATH": ":".join(prepared_sources.source_roots),
    }

    extra_env.update(test_extra_env.env)

    # Cache test runs only if they are successful, or not at all if `--test-force`.
    cache_scope = ProcessCacheScope.NEVER if test_subsystem.force else ProcessCacheScope.SUCCESSFUL
    process = await Get(
        Process,
        PexProcess(
            pytest_pex,
            argv=(*pytest.options.args, *coverage_args,
                  *field_set_source_files.files),
            extra_env=extra_env,
            input_digest=input_digest,
            output_files=output_files,
            timeout_seconds=request.field_set.timeout.
            calculate_from_global_options(pytest),
            execution_slot_variable=pytest.options.execution_slot_var,
            description=f"Run Pytest for {request.field_set.address}",
            level=LogLevel.DEBUG,
            cache_scope=cache_scope,
        ),
    )
    return TestSetup(process, results_file_name=results_file_name)
Beispiel #21
0
async def setup_pytest_for_target(
    request: TestSetupRequest,
    pytest: PyTest,
    test_subsystem: TestSubsystem,
    python_setup: PythonSetup,
    coverage_config: CoverageConfig,
    coverage_subsystem: CoverageSubsystem,
    test_extra_env: TestExtraEnv,
    global_options: GlobalOptions,
) -> TestSetup:
    transitive_targets = await Get(
        TransitiveTargets,
        TransitiveTargetsRequest([request.field_set.address]))
    all_targets = transitive_targets.closure

    interpreter_constraints = PexInterpreterConstraints.create_from_targets(
        all_targets, python_setup)

    requirements_pex_get = Get(
        Pex,
        PexFromTargetsRequest,
        PexFromTargetsRequest.for_requirements([request.field_set.address],
                                               internal_only=True),
    )
    pytest_pex_get = Get(
        Pex,
        PexRequest(
            output_filename="pytest.pex",
            requirements=PexRequirements(pytest.get_requirement_strings()),
            interpreter_constraints=interpreter_constraints,
            internal_only=True,
        ),
    )

    extra_output_directory_digest_get = Get(
        Digest, CreateDigest([Directory(_EXTRA_OUTPUT_DIR)]))

    prepared_sources_get = Get(
        PythonSourceFiles,
        PythonSourceFilesRequest(all_targets, include_files=True))

    build_package_dependencies_get = Get(
        BuiltPackageDependencies,
        BuildPackageDependenciesRequest(
            request.field_set.runtime_package_dependencies),
    )

    # Get the file names for the test_target so that we can specify to Pytest precisely which files
    # to test, rather than using auto-discovery.
    field_set_source_files_get = Get(
        SourceFiles, SourceFilesRequest([request.field_set.sources]))

    (
        pytest_pex,
        requirements_pex,
        prepared_sources,
        field_set_source_files,
        built_package_dependencies,
        extra_output_directory_digest,
    ) = await MultiGet(
        pytest_pex_get,
        requirements_pex_get,
        prepared_sources_get,
        field_set_source_files_get,
        build_package_dependencies_get,
        extra_output_directory_digest_get,
    )

    pytest_runner_pex_get = Get(
        VenvPex,
        PexRequest(
            output_filename="pytest_runner.pex",
            interpreter_constraints=interpreter_constraints,
            main=ConsoleScript("pytest"),
            internal_only=True,
            pex_path=[pytest_pex, requirements_pex],
        ),
    )
    config_files_get = Get(
        ConfigFiles,
        ConfigFilesRequest,
        pytest.config_request(field_set_source_files.snapshot.dirs),
    )
    pytest_runner_pex, config_files = await MultiGet(pytest_runner_pex_get,
                                                     config_files_get)

    input_digest = await Get(
        Digest,
        MergeDigests((
            coverage_config.digest,
            prepared_sources.source_files.snapshot.digest,
            config_files.snapshot.digest,
            extra_output_directory_digest,
            *(pkg.digest for pkg in built_package_dependencies),
        )),
    )

    add_opts = [f"--color={'yes' if global_options.options.colors else 'no'}"]
    output_files = []

    results_file_name = None
    if pytest.options.junit_xml_dir and not request.is_debug:
        results_file_name = f"{request.field_set.address.path_safe_spec}.xml"
        add_opts.extend((f"--junitxml={results_file_name}", "-o",
                         f"junit_family={pytest.options.junit_family}"))
        output_files.append(results_file_name)

    coverage_args = []
    if test_subsystem.use_coverage and not request.is_debug:
        output_files.append(".coverage")
        cov_paths = coverage_subsystem.filter if coverage_subsystem.filter else (
            ".", )
        coverage_args = [
            "--cov-report=",  # Turn off output.
            f"--cov-config={coverage_config.path}",
            *itertools.chain.from_iterable(["--cov", cov_path]
                                           for cov_path in cov_paths),
        ]

    extra_env = {
        "PYTEST_ADDOPTS": " ".join(add_opts),
        "PEX_EXTRA_SYS_PATH": ":".join(prepared_sources.source_roots),
        **test_extra_env.env,
    }

    # Cache test runs only if they are successful, or not at all if `--test-force`.
    cache_scope = ProcessCacheScope.NEVER if test_subsystem.force else ProcessCacheScope.SUCCESSFUL
    process = await Get(
        Process,
        VenvPexProcess(
            pytest_runner_pex,
            argv=(*pytest.options.args, *coverage_args,
                  *field_set_source_files.files),
            extra_env=extra_env,
            input_digest=input_digest,
            output_directories=(_EXTRA_OUTPUT_DIR, ),
            output_files=output_files,
            timeout_seconds=request.field_set.timeout.
            calculate_from_global_options(pytest),
            execution_slot_variable=pytest.options.execution_slot_var,
            description=f"Run Pytest for {request.field_set.address}",
            level=LogLevel.DEBUG,
            cache_scope=cache_scope,
        ),
    )
    return TestSetup(process, results_file_name=results_file_name)
Beispiel #22
0
async def create_python_binary_run_request(
    field_set: PythonBinaryFieldSet,
    python_binary_defaults: PythonBinaryDefaults,
    pex_env: PexEnvironment,
) -> RunRequest:
    entry_point = field_set.entry_point.value
    if entry_point is None:
        binary_source_paths = await Get(
            Paths, PathGlobs,
            field_set.sources.path_globs(FilesNotFoundBehavior.error))
        if len(binary_source_paths.files) != 1:
            raise InvalidFieldException(
                "No `entry_point` was set for the target "
                f"{repr(field_set.address)}, so it must have exactly one source, but it has "
                f"{len(binary_source_paths.files)}")
        entry_point_path = binary_source_paths.files[0]
        source_root = await Get(
            SourceRoot,
            SourceRootRequest,
            SourceRootRequest.for_file(entry_point_path),
        )
        entry_point = PythonBinarySources.translate_source_file_to_entry_point(
            os.path.relpath(entry_point_path, source_root.path))
    transitive_targets = await Get(TransitiveTargets,
                                   Addresses([field_set.address]))

    # Note that we get an intermediate PexRequest here (instead of going straight to a Pex)
    # so that we can get the interpreter constraints for use in runner_pex_request.
    requirements_pex_request = await Get(
        PexRequest,
        PexFromTargetsRequest,
        PexFromTargetsRequest.for_requirements([field_set.address],
                                               internal_only=True),
    )

    requirements_request = Get(Pex, PexRequest, requirements_pex_request)

    sources_request = Get(
        PythonSourceFiles,
        PythonSourceFilesRequest(transitive_targets.closure,
                                 include_files=True))

    output_filename = f"{field_set.address.target_name}.pex"
    runner_pex_request = Get(
        Pex,
        PexRequest(
            output_filename=output_filename,
            interpreter_constraints=requirements_pex_request.
            interpreter_constraints,
            additional_args=field_set.generate_additional_args(
                python_binary_defaults),
            internal_only=True,
            # Note that the entry point file is not in the Pex itself, but on the
            # PEX_PATH. This works fine!
            entry_point=entry_point,
        ),
    )

    requirements, sources, runner_pex = await MultiGet(requirements_request,
                                                       sources_request,
                                                       runner_pex_request)

    merged_digest = await Get(
        Digest,
        MergeDigests([
            requirements.digest, sources.source_files.snapshot.digest,
            runner_pex.digest
        ]),
    )

    def in_chroot(relpath: str) -> str:
        return os.path.join("{chroot}", relpath)

    args = pex_env.create_argv(in_chroot(runner_pex.name),
                               python=runner_pex.python)

    chrooted_source_roots = [in_chroot(sr) for sr in sources.source_roots]
    extra_env = {
        **pex_env.environment_dict(python_configured=runner_pex.python is not None),
        "PEX_PATH":
        in_chroot(requirements_pex_request.output_filename),
        "PEX_EXTRA_SYS_PATH":
        ":".join(chrooted_source_roots),
    }

    return RunRequest(digest=merged_digest, args=args, extra_env=extra_env)
Beispiel #23
0
async def pylint_lint_partition(
    partition: PylintPartition, pylint: Pylint, first_party_plugins: PylintFirstPartyPlugins
) -> LintResult:
    requirements_pex_get = Get(
        Pex,
        PexFromTargetsRequest,
        PexFromTargetsRequest.for_requirements(
            (field_set.address for field_set in partition.field_sets),
            # NB: These constraints must be identical to the other PEXes. Otherwise, we risk using
            # a different version for the requirements than the other two PEXes, which can result
            # in a PEX runtime error about missing dependencies.
            hardcoded_interpreter_constraints=partition.interpreter_constraints,
            internal_only=True,
            direct_deps_only=True,
        ),
    )

    pylint_pex_get = Get(
        Pex,
        PexRequest(
            output_filename="pylint.pex",
            internal_only=True,
            requirements=pylint.pex_requirements(
                extra_requirements=first_party_plugins.requirement_strings,
            ),
            interpreter_constraints=partition.interpreter_constraints,
        ),
    )

    prepare_python_sources_get = Get(
        PythonSourceFiles, PythonSourceFilesRequest(partition.targets_with_dependencies)
    )
    field_set_sources_get = Get(
        SourceFiles, SourceFilesRequest(field_set.sources for field_set in partition.field_sets)
    )

    pylint_pex, requirements_pex, prepared_python_sources, field_set_sources = await MultiGet(
        pylint_pex_get,
        requirements_pex_get,
        prepare_python_sources_get,
        field_set_sources_get,
    )

    pylint_runner_pex, config_files = await MultiGet(
        Get(
            VenvPex,
            PexRequest(
                output_filename="pylint_runner.pex",
                interpreter_constraints=partition.interpreter_constraints,
                main=pylint.main,
                internal_only=True,
                pex_path=[pylint_pex, requirements_pex],
            ),
        ),
        Get(
            ConfigFiles, ConfigFilesRequest, pylint.config_request(field_set_sources.snapshot.dirs)
        ),
    )

    pythonpath = list(prepared_python_sources.source_roots)
    if first_party_plugins:
        pythonpath.append(first_party_plugins.PREFIX)

    input_digest = await Get(
        Digest,
        MergeDigests(
            (
                config_files.snapshot.digest,
                first_party_plugins.sources_digest,
                prepared_python_sources.source_files.snapshot.digest,
            )
        ),
    )

    result = await Get(
        FallibleProcessResult,
        VenvPexProcess(
            pylint_runner_pex,
            argv=generate_argv(field_set_sources, pylint),
            input_digest=input_digest,
            extra_env={"PEX_EXTRA_SYS_PATH": ":".join(pythonpath)},
            description=f"Run Pylint on {pluralize(len(partition.field_sets), 'file')}.",
            level=LogLevel.DEBUG,
        ),
    )
    return LintResult.from_fallible_process_result(
        result, partition_description=str(sorted(str(c) for c in partition.interpreter_constraints))
    )
Beispiel #24
0
async def mypy_typecheck_partition(partition: MyPyPartition,
                                   mypy: MyPy) -> TypecheckResult:
    plugin_target_addresses = await Get(Addresses, UnparsedAddressInputs,
                                        mypy.source_plugins)
    plugin_transitive_targets_request = Get(
        TransitiveTargets, TransitiveTargetsRequest(plugin_target_addresses))
    plugin_transitive_targets, launcher_script = await MultiGet(
        plugin_transitive_targets_request,
        Get(Digest, CreateDigest([LAUNCHER_FILE])))

    plugin_requirements = PexRequirements.create_from_requirement_fields(
        plugin_tgt[PythonRequirementsField]
        for plugin_tgt in plugin_transitive_targets.closure
        if plugin_tgt.has_field(PythonRequirementsField))

    # If the user did not set `--python-version` already, we set it ourselves based on their code's
    # interpreter constraints. This determines what AST is used by MyPy.
    python_version = (
        None if partition.python_version_already_configured else
        partition.interpreter_constraints.minimum_python_version())

    # MyPy requires 3.5+ to run, but uses the typed-ast library to work with 2.7, 3.4, 3.5, 3.6,
    # and 3.7. However, typed-ast does not understand 3.8, so instead we must run MyPy with
    # Python 3.8 when relevant. We only do this if <3.8 can't be used, as we don't want a
    # loose requirement like `>=3.6` to result in requiring Python 3.8, which would error if
    # 3.8 is not installed on the machine.
    tool_interpreter_constraints = PexInterpreterConstraints((
        "CPython>=3.8", ) if (
            mypy.options.is_default("interpreter_constraints")
            and partition.interpreter_constraints.requires_python38_or_newer()
        ) else mypy.interpreter_constraints)

    plugin_sources_request = Get(
        PythonSourceFiles,
        PythonSourceFilesRequest(plugin_transitive_targets.closure))
    typechecked_sources_request = Get(
        PythonSourceFiles, PythonSourceFilesRequest(partition.closure))

    # Normally, this `requirements.pex` would be merged with mypy.pex via `--pex-path`. However,
    # this will cause a runtime error if the interpreter constraints are different between the
    # PEXes and they have incompatible wheels.
    #
    # Instead, we teach MyPy about the requirements by extracting the distributions from
    # requirements.pex and setting EXTRACTED_WHEELS, which our custom launcher script then
    # looks for.
    #
    # Conventionally, MyPy users might instead set `MYPYPATH` for this. However, doing this
    # results in type checking the requirements themselves.
    requirements_pex_request = Get(
        Pex,
        PexFromTargetsRequest,
        PexFromTargetsRequest.for_requirements(
            (addr for addr in partition.field_set_addresses),
            hardcoded_interpreter_constraints=partition.
            interpreter_constraints,
            internal_only=True,
        ),
    )
    mypy_pex_request = Get(
        Pex,
        PexRequest(
            output_filename="mypy.pex",
            internal_only=True,
            sources=launcher_script,
            requirements=PexRequirements(
                itertools.chain(mypy.all_requirements, plugin_requirements)),
            interpreter_constraints=tool_interpreter_constraints,
            entry_point=PurePath(LAUNCHER_FILE.path).stem,
        ),
    )

    config_digest_request = Get(Digest, PathGlobs, config_path_globs(mypy))

    (
        plugin_sources,
        typechecked_sources,
        mypy_pex,
        requirements_pex,
        config_digest,
    ) = await MultiGet(
        plugin_sources_request,
        typechecked_sources_request,
        mypy_pex_request,
        requirements_pex_request,
        config_digest_request,
    )

    typechecked_srcs_snapshot = typechecked_sources.source_files.snapshot
    file_list_path = "__files.txt"
    python_files = "\n".join(
        determine_python_files(
            typechecked_sources.source_files.snapshot.files))
    create_file_list_request = Get(
        Digest,
        CreateDigest([FileContent(file_list_path, python_files.encode())]),
    )

    file_list_digest, extracted_pex_distributions = await MultiGet(
        create_file_list_request,
        Get(ExtractedPexDistributions, Pex, requirements_pex))

    merged_input_files = await Get(
        Digest,
        MergeDigests([
            file_list_digest,
            plugin_sources.source_files.snapshot.digest,
            typechecked_srcs_snapshot.digest,
            mypy_pex.digest,
            extracted_pex_distributions.digest,
            config_digest,
        ]),
    )

    all_used_source_roots = sorted(
        set(
            itertools.chain(plugin_sources.source_roots,
                            typechecked_sources.source_roots)))
    env = {
        "PEX_EXTRA_SYS_PATH":
        ":".join(all_used_source_roots),
        "EXTRACTED_WHEELS":
        ":".join(extracted_pex_distributions.wheel_directory_paths),
    }

    result = await Get(
        FallibleProcessResult,
        PexProcess(
            mypy_pex,
            argv=generate_argv(mypy,
                               file_list_path=file_list_path,
                               python_version=python_version),
            input_digest=merged_input_files,
            extra_env=env,
            description=
            f"Run MyPy on {pluralize(len(typechecked_srcs_snapshot.files), 'file')}.",
            level=LogLevel.DEBUG,
        ),
    )
    return TypecheckResult.from_fallible_process_result(
        result,
        partition_description=str(
            sorted(str(c) for c in partition.interpreter_constraints)))
Beispiel #25
0
async def setup_pytest_for_target(
    request: TestSetupRequest,
    pytest: PyTest,
    test_subsystem: TestSubsystem,
    python_setup: PythonSetup,
    coverage_config: CoverageConfig,
    coverage_subsystem: CoverageSubsystem,
    test_extra_env: TestExtraEnv,
    global_options: GlobalOptions,
    complete_env: CompleteEnvironment,
) -> TestSetup:
    transitive_targets, plugin_setups = await MultiGet(
        Get(TransitiveTargets,
            TransitiveTargetsRequest([request.field_set.address])),
        Get(AllPytestPluginSetups,
            AllPytestPluginSetupsRequest(request.field_set.address)),
    )
    all_targets = transitive_targets.closure

    interpreter_constraints = InterpreterConstraints.create_from_targets(
        all_targets, python_setup)

    requirements_pex_get = Get(
        Pex,
        PexFromTargetsRequest,
        PexFromTargetsRequest.for_requirements(
            [request.field_set.address],
            internal_only=True,
            resolve_and_lockfile=request.field_set.resolve.
            resolve_and_lockfile(python_setup),
        ),
    )
    pytest_pex_get = Get(
        Pex,
        PexRequest(
            output_filename="pytest.pex",
            requirements=pytest.pex_requirements(),
            interpreter_constraints=interpreter_constraints,
            internal_only=True,
        ),
    )

    # Ensure that the empty extra output dir exists.
    extra_output_directory_digest_get = Get(
        Digest, CreateDigest([Directory(_EXTRA_OUTPUT_DIR)]))

    prepared_sources_get = Get(
        PythonSourceFiles,
        PythonSourceFilesRequest(all_targets, include_files=True))

    # Get the file names for the test_target so that we can specify to Pytest precisely which files
    # to test, rather than using auto-discovery.
    field_set_source_files_get = Get(
        SourceFiles, SourceFilesRequest([request.field_set.sources]))

    (
        pytest_pex,
        requirements_pex,
        prepared_sources,
        field_set_source_files,
        extra_output_directory_digest,
    ) = await MultiGet(
        pytest_pex_get,
        requirements_pex_get,
        prepared_sources_get,
        field_set_source_files_get,
        extra_output_directory_digest_get,
    )

    local_dists = await Get(
        LocalDistsPex,
        LocalDistsPexRequest(
            [request.field_set.address],
            interpreter_constraints=interpreter_constraints,
            sources=prepared_sources,
        ),
    )

    pytest_runner_pex_get = Get(
        VenvPex,
        PexRequest(
            output_filename="pytest_runner.pex",
            interpreter_constraints=interpreter_constraints,
            main=pytest.main,
            internal_only=True,
            pex_path=[pytest_pex, requirements_pex, local_dists.pex],
        ),
    )
    config_files_get = Get(
        ConfigFiles,
        ConfigFilesRequest,
        pytest.config_request(field_set_source_files.snapshot.dirs),
    )
    pytest_runner_pex, config_files = await MultiGet(pytest_runner_pex_get,
                                                     config_files_get)

    input_digest = await Get(
        Digest,
        MergeDigests((
            coverage_config.digest,
            local_dists.remaining_sources.source_files.snapshot.digest,
            config_files.snapshot.digest,
            extra_output_directory_digest,
            *(plugin_setup.digest for plugin_setup in plugin_setups),
        )),
    )

    add_opts = [f"--color={'yes' if global_options.options.colors else 'no'}"]
    output_files = []

    results_file_name = None
    if pytest.options.junit_xml_dir and not request.is_debug:
        results_file_name = f"{request.field_set.address.path_safe_spec}.xml"
        add_opts.extend((f"--junitxml={results_file_name}", "-o",
                         f"junit_family={pytest.options.junit_family}"))
        output_files.append(results_file_name)

    coverage_args = []
    if test_subsystem.use_coverage and not request.is_debug:
        pytest.validate_pytest_cov_included()
        output_files.append(".coverage")

        if coverage_subsystem.filter:
            cov_args = [f"--cov={morf}" for morf in coverage_subsystem.filter]
        else:
            # N.B.: Passing `--cov=` or `--cov=.` to communicate "record coverage for all sources"
            # fails in certain contexts as detailed in:
            #   https://github.com/pantsbuild/pants/issues/12390
            # Instead we focus coverage on just the directories containing python source files
            # materialized to the Process chroot.
            cov_args = [
                f"--cov={source_root}"
                for source_root in prepared_sources.source_roots
            ]

        coverage_args = [
            "--cov-report=",  # Turn off output.
            f"--cov-config={coverage_config.path}",
            *cov_args,
        ]

    extra_env = {
        "PYTEST_ADDOPTS": " ".join(add_opts),
        "PEX_EXTRA_SYS_PATH": ":".join(prepared_sources.source_roots),
        **test_extra_env.env,
        # NOTE: `complete_env` intentionally after `test_extra_env` to allow overriding within
        # `python_tests`
        **complete_env.get_subset(request.field_set.extra_env_vars.value or ()),
    }

    # Cache test runs only if they are successful, or not at all if `--test-force`.
    cache_scope = (ProcessCacheScope.PER_SESSION
                   if test_subsystem.force else ProcessCacheScope.SUCCESSFUL)
    process = await Get(
        Process,
        VenvPexProcess(
            pytest_runner_pex,
            argv=(*pytest.options.args, *coverage_args,
                  *field_set_source_files.files),
            extra_env=extra_env,
            input_digest=input_digest,
            output_directories=(_EXTRA_OUTPUT_DIR, ),
            output_files=output_files,
            timeout_seconds=request.field_set.timeout.
            calculate_from_global_options(pytest),
            execution_slot_variable=pytest.options.execution_slot_var,
            description=f"Run Pytest for {request.field_set.address}",
            level=LogLevel.DEBUG,
            cache_scope=cache_scope,
        ),
    )
    return TestSetup(process, results_file_name=results_file_name)
Beispiel #26
0
async def package_python_awslambda(
    field_set: PythonAwsLambdaFieldSet,
    lambdex: Lambdex,
    platform: Platform,
    union_membership: UnionMembership,
) -> BuiltPackage:
    if platform.is_macos:
        logger.warning(
            "AWS Lambdas built on macOS may fail to build. If your lambda uses any third-party"
            " dependencies without binary wheels (bdist) for Linux available, it will fail to"
            " build. If this happens, you will either need to update your dependencies to only use"
            f" dependencies with pre-built wheels, or find a Linux environment to run {bin_name()}"
            " package. (See https://realpython.com/python-wheels/ for more about wheels.)\n\n(If"
            " the build does not raise an exception, it's safe to use macOS.)")

    output_filename = field_set.output_path.value_or_default(
        # Lambdas typically use the .zip suffix, so we use that instead of .pex.
        file_ending="zip", )

    # We hardcode the platform value to the appropriate one for each AWS Lambda runtime.
    # (Running the "hello world" lambda in the example code will report the platform, and can be
    # used to verify correctness of these platform strings.)
    pex_platforms = []
    interpreter_version = field_set.runtime.to_interpreter_version()
    if interpreter_version:
        py_major, py_minor = interpreter_version
        platform_str = f"linux_x86_64-cp-{py_major}{py_minor}-cp{py_major}{py_minor}"
        # set pymalloc ABI flag - this was removed in python 3.8 https://bugs.python.org/issue36707
        if py_major <= 3 and py_minor < 8:
            platform_str += "m"
        if (py_major, py_minor) == (2, 7):
            platform_str += "u"
        pex_platforms.append(platform_str)

    additional_pex_args = (
        # Ensure we can resolve manylinux wheels in addition to any AMI-specific wheels.
        "--manylinux=manylinux2014",
        # When we're executing Pex on Linux, allow a local interpreter to be resolved if
        # available and matching the AMI platform.
        "--resolve-local-platforms",
    )

    complete_platforms = await Get(CompletePlatforms,
                                   PexCompletePlatformsField,
                                   field_set.complete_platforms)

    pex_request = PexFromTargetsRequest(
        addresses=[field_set.address],
        internal_only=False,
        include_requirements=field_set.include_requirements.value,
        output_filename=output_filename,
        platforms=PexPlatforms(pex_platforms),
        complete_platforms=complete_platforms,
        additional_args=additional_pex_args,
        additional_lockfile_args=additional_pex_args,
    )

    lambdex_pex, pex_result, handler, transitive_targets = await MultiGet(
        Get(VenvPex, PexRequest, lambdex.to_pex_request()),
        Get(Pex, PexFromTargetsRequest, pex_request),
        Get(ResolvedPythonAwsHandler,
            ResolvePythonAwsHandlerRequest(field_set.handler)),
        Get(TransitiveTargets, TransitiveTargetsRequest([field_set.address])),
    )

    # Warn if users depend on `files` targets, which won't be included in the PEX and is a common
    # gotcha.
    file_tgts = targets_with_sources_types([FileSourceField],
                                           transitive_targets.dependencies,
                                           union_membership)
    if file_tgts:
        files_addresses = sorted(tgt.address.spec for tgt in file_tgts)
        logger.warning(
            softwrap(f"""
                The `python_awslambda` target {field_set.address} transitively depends on the below
                `files` targets, but Pants will not include them in the built Lambda. Filesystem APIs
                like `open()` are not able to load files within the binary itself; instead, they
                read from the current working directory.

                Instead, use `resources` targets. See {doc_url('resources')}.

                Files targets dependencies: {files_addresses}
                """))

    # NB: Lambdex modifies its input pex in-place, so the input file is also the output file.
    result = await Get(
        ProcessResult,
        VenvPexProcess(
            lambdex_pex,
            argv=("build", "-e", handler.val, output_filename),
            input_digest=pex_result.digest,
            output_files=(output_filename, ),
            description=f"Setting up handler in {output_filename}",
        ),
    )

    extra_log_data: list[tuple[str, str]] = []
    if field_set.runtime.value:
        extra_log_data.append(("Runtime", field_set.runtime.value))
    extra_log_data.extend(
        ("Complete platform", path) for path in complete_platforms)
    # The AWS-facing handler function is always lambdex_handler.handler, which is the
    # wrapper injected by lambdex that manages invocation of the actual handler.
    extra_log_data.append(("Handler", "lambdex_handler.handler"))
    first_column_width = 4 + max(len(header) for header, _ in extra_log_data)

    artifact = BuiltPackageArtifact(
        output_filename,
        extra_log_lines=tuple(
            f"{header.rjust(first_column_width, ' ')}: {data}"
            for header, data in extra_log_data),
    )
    return BuiltPackage(digest=result.output_digest, artifacts=(artifact, ))
Beispiel #27
0
async def create_pex_binary_run_request(
    field_set: PexBinaryFieldSet,
    pex_binary_defaults: PexBinaryDefaults,
    pex_env: PexEnvironment,
    python_setup: PythonSetup,
) -> RunRequest:
    entry_point, transitive_targets = await MultiGet(
        Get(
            ResolvedPexEntryPoint,
            ResolvePexEntryPointRequest(field_set.entry_point),
        ),
        Get(TransitiveTargets, TransitiveTargetsRequest([field_set.address])),
    )

    # Note that we get an intermediate PexRequest here (instead of going straight to a Pex)
    # so that we can get the interpreter constraints for use in local_dists_get.
    requirements_pex_request = await Get(
        PexRequest,
        PexFromTargetsRequest(
            [field_set.address],
            output_filename=f"{field_set.address.target_name}.pex",
            internal_only=True,
            include_source_files=False,
            # Note that the file for first-party entry points is not in the PEX itself. In that
            # case, it's loaded by setting `PEX_EXTRA_SYS_PATH`.
            main=entry_point.val or field_set.script.value,
            resolve_and_lockfile=field_set.resolve.resolve_and_lockfile(
                python_setup),
            additional_args=(
                *field_set.generate_additional_args(pex_binary_defaults),
                # N.B.: Since we cobble together the runtime environment via PEX_EXTRA_SYS_PATH
                # below, it's important for any app that re-executes itself that these environment
                # variables are not stripped.
                "--no-strip-pex-env",
            ),
        ),
    )
    pex_get = Get(Pex, PexRequest, requirements_pex_request)
    sources_get = Get(
        PythonSourceFiles,
        PythonSourceFilesRequest(transitive_targets.closure,
                                 include_files=True))
    pex, sources = await MultiGet(pex_get, sources_get)

    local_dists = await Get(
        LocalDistsPex,
        LocalDistsPexRequest(
            [field_set.address],
            internal_only=True,
            interpreter_constraints=requirements_pex_request.
            interpreter_constraints,
            sources=sources,
        ),
    )

    merged_digest = await Get(
        Digest,
        MergeDigests([
            pex.digest,
            local_dists.pex.digest,
            local_dists.remaining_sources.source_files.snapshot.digest,
        ]),
    )

    def in_chroot(relpath: str) -> str:
        return os.path.join("{chroot}", relpath)

    complete_pex_env = pex_env.in_workspace()
    args = complete_pex_env.create_argv(in_chroot(pex.name), python=pex.python)

    chrooted_source_roots = [in_chroot(sr) for sr in sources.source_roots]
    extra_env = {
        **complete_pex_env.environment_dict(python_configured=pex.python is not None),
        "PEX_PATH":
        in_chroot(local_dists.pex.name),
        "PEX_EXTRA_SYS_PATH":
        os.pathsep.join(chrooted_source_roots),
    }

    return RunRequest(digest=merged_digest, args=args, extra_env=extra_env)
Beispiel #28
0
async def mypy_typecheck_partition(
    partition: MyPyPartition,
    config_file: MyPyConfigFile,
    first_party_plugins: MyPyFirstPartyPlugins,
    mypy: MyPy,
    python_setup: PythonSetup,
) -> CheckResult:
    # MyPy requires 3.5+ to run, but uses the typed-ast library to work with 2.7, 3.4, 3.5, 3.6,
    # and 3.7. However, typed-ast does not understand 3.8+, so instead we must run MyPy with
    # Python 3.8+ when relevant. We only do this if <3.8 can't be used, as we don't want a
    # loose requirement like `>=3.6` to result in requiring Python 3.8+, which would error if
    # 3.8+ is not installed on the machine.
    tool_interpreter_constraints = (partition.interpreter_constraints if (
        mypy.options.is_default("interpreter_constraints")
        and partition.interpreter_constraints.requires_python38_or_newer(
            python_setup.interpreter_universe)) else
                                    mypy.interpreter_constraints)

    closure_sources_get = Get(PythonSourceFiles,
                              PythonSourceFilesRequest(partition.closure))
    roots_sources_get = Get(
        SourceFiles,
        SourceFilesRequest(
            tgt.get(PythonSources) for tgt in partition.root_targets))

    # See `requirements_venv_pex` for how this will get wrapped in a `VenvPex`.
    requirements_pex_get = Get(
        Pex,
        PexFromTargetsRequest,
        PexFromTargetsRequest.for_requirements(
            (tgt.address for tgt in partition.root_targets),
            hardcoded_interpreter_constraints=partition.
            interpreter_constraints,
            internal_only=True,
        ),
    )

    mypy_pex_get = Get(
        VenvPex,
        PexRequest(
            output_filename="mypy.pex",
            internal_only=True,
            main=mypy.main,
            requirements=mypy.pex_requirements(
                extra_requirements=first_party_plugins.requirement_strings, ),
            interpreter_constraints=tool_interpreter_constraints,
        ),
    )

    closure_sources, roots_sources, mypy_pex, requirements_pex = await MultiGet(
        closure_sources_get, roots_sources_get, mypy_pex_get,
        requirements_pex_get)

    python_files = determine_python_files(roots_sources.snapshot.files)
    file_list_path = "__files.txt"
    file_list_digest_request = Get(
        Digest,
        CreateDigest(
            [FileContent(file_list_path, "\n".join(python_files).encode())]),
    )

    # This creates a venv with all the 3rd-party requirements used by the code. We tell MyPy to
    # use this venv by setting `--python-executable`. Note that this Python interpreter is
    # different than what we run MyPy with.
    #
    # We could have directly asked the `PexFromTargetsRequest` to return a `VenvPex`, rather than
    # `Pex`, but that would mean missing out on sharing a cache with other goals like `test` and
    # `run`.
    requirements_venv_pex_request = Get(
        VenvPex,
        PexRequest(
            output_filename="requirements_venv.pex",
            internal_only=True,
            pex_path=[requirements_pex],
            interpreter_constraints=partition.interpreter_constraints,
        ),
    )

    requirements_venv_pex, file_list_digest = await MultiGet(
        requirements_venv_pex_request, file_list_digest_request)

    merged_input_files = await Get(
        Digest,
        MergeDigests([
            file_list_digest,
            first_party_plugins.sources_digest,
            closure_sources.source_files.snapshot.digest,
            requirements_venv_pex.digest,
            config_file.digest,
        ]),
    )

    all_used_source_roots = sorted(
        set(
            itertools.chain(first_party_plugins.source_roots,
                            closure_sources.source_roots)))
    env = {
        "PEX_EXTRA_SYS_PATH": ":".join(all_used_source_roots),
        "MYPYPATH": ":".join(all_used_source_roots),
    }

    result = await Get(
        FallibleProcessResult,
        VenvPexProcess(
            mypy_pex,
            argv=generate_argv(
                mypy,
                venv_python=requirements_venv_pex.python.argv0,
                file_list_path=file_list_path,
                python_version=config_file.python_version_to_autoset(
                    partition.interpreter_constraints,
                    python_setup.interpreter_universe),
            ),
            input_digest=merged_input_files,
            extra_env=env,
            output_directories=(REPORT_DIR, ),
            description=f"Run MyPy on {pluralize(len(python_files), 'file')}.",
            level=LogLevel.DEBUG,
        ),
    )
    report = await Get(Digest, RemovePrefix(result.output_digest, REPORT_DIR))
    return CheckResult.from_fallible_process_result(
        result,
        partition_description=str(
            sorted(str(c) for c in partition.interpreter_constraints)),
        report=report,
    )
Beispiel #29
0
async def pylint_lint_partition(partition: PylintPartition, pylint: Pylint) -> LintResult:
    requirements_pex_request = Get(
        Pex,
        PexFromTargetsRequest,
        PexFromTargetsRequest.for_requirements(
            (field_set.address for field_set in partition.field_sets),
            # NB: These constraints must be identical to the other PEXes. Otherwise, we risk using
            # a different version for the requirements than the other two PEXes, which can result
            # in a PEX runtime error about missing dependencies.
            hardcoded_interpreter_constraints=partition.interpreter_constraints,
            internal_only=True,
            direct_deps_only=True,
        ),
    )

    plugin_requirements = PexRequirements.create_from_requirement_fields(
        plugin_tgt[PythonRequirementsField]
        for plugin_tgt in partition.plugin_targets
        if plugin_tgt.has_field(PythonRequirementsField)
    )
    # Right now any Pylint transitive requirements will shadow corresponding user
    # requirements, which could lead to problems.
    pylint_pex_request = Get(
        Pex,
        PexRequest(
            output_filename="pylint.pex",
            internal_only=True,
            requirements=PexRequirements([*pylint.all_requirements, *plugin_requirements]),
            entry_point=pylint.entry_point,
            interpreter_constraints=partition.interpreter_constraints,
            # TODO(John Sirois): Support shading python binaries:
            #   https://github.com/pantsbuild/pants/issues/9206
            additional_args=("--pex-path", requirements_pex_request.input.output_filename),
        ),
    )

    config_digest_request = Get(
        Digest,
        PathGlobs(
            globs=[pylint.config] if pylint.config else [],
            glob_match_error_behavior=GlobMatchErrorBehavior.error,
            description_of_origin="the option `--pylint-config`",
        ),
    )

    prepare_plugin_sources_request = Get(
        StrippedPythonSourceFiles, PythonSourceFilesRequest(partition.plugin_targets)
    )
    prepare_python_sources_request = Get(
        PythonSourceFiles, PythonSourceFilesRequest(partition.targets_with_dependencies)
    )
    field_set_sources_request = Get(
        SourceFiles, SourceFilesRequest(field_set.sources for field_set in partition.field_sets)
    )

    (
        pylint_pex,
        requirements_pex,
        config_digest,
        prepared_plugin_sources,
        prepared_python_sources,
        field_set_sources,
    ) = await MultiGet(
        pylint_pex_request,
        requirements_pex_request,
        config_digest_request,
        prepare_plugin_sources_request,
        prepare_python_sources_request,
        field_set_sources_request,
    )

    prefixed_plugin_sources = (
        await Get(
            Digest,
            AddPrefix(prepared_plugin_sources.stripped_source_files.snapshot.digest, "__plugins"),
        )
        if pylint.source_plugins
        else EMPTY_DIGEST
    )

    pythonpath = list(prepared_python_sources.source_roots)
    if pylint.source_plugins:
        # NB: Pylint source plugins must be explicitly loaded via PEX_EXTRA_SYS_PATH. The value must
        # point to the plugin's directory, rather than to a parent's directory, because
        # `load-plugins` takes a module name rather than a path to the module; i.e. `plugin`, but
        # not `path.to.plugin`. (This means users must have specified the parent directory as a
        # source root.)
        pythonpath.append("__plugins")

    input_digest = await Get(
        Digest,
        MergeDigests(
            (
                pylint_pex.digest,
                requirements_pex.digest,
                config_digest,
                prefixed_plugin_sources,
                prepared_python_sources.source_files.snapshot.digest,
            )
        ),
    )

    result = await Get(
        FallibleProcessResult,
        PexProcess(
            pylint_pex,
            argv=generate_args(source_files=field_set_sources, pylint=pylint),
            input_digest=input_digest,
            extra_env={"PEX_EXTRA_SYS_PATH": ":".join(pythonpath)},
            description=f"Run Pylint on {pluralize(len(partition.field_sets), 'file')}.",
            level=LogLevel.DEBUG,
        ),
    )
    return LintResult.from_fallible_process_result(
        result, partition_description=str(sorted(str(c) for c in partition.interpreter_constraints))
    )
Beispiel #30
0
async def package_python_awslambda(
        field_set: PythonAwsLambdaFieldSet, lambdex: Lambdex,
        union_membership: UnionMembership) -> BuiltPackage:
    output_filename = field_set.output_path.value_or_default(
        # Lambdas typically use the .zip suffix, so we use that instead of .pex.
        file_ending="zip", )

    # We hardcode the platform value to the appropriate one for each AWS Lambda runtime.
    # (Running the "hello world" lambda in the example code will report the platform, and can be
    # used to verify correctness of these platform strings.)
    py_major, py_minor = field_set.runtime.to_interpreter_version()
    platform = f"linux_x86_64-cp-{py_major}{py_minor}-cp{py_major}{py_minor}"
    # set pymalloc ABI flag - this was removed in python 3.8 https://bugs.python.org/issue36707
    if py_major <= 3 and py_minor < 8:
        platform += "m"
    if (py_major, py_minor) == (2, 7):
        platform += "u"

    additional_pex_args = (
        # Ensure we can resolve manylinux wheels in addition to any AMI-specific wheels.
        "--manylinux=manylinux2014",
        # When we're executing Pex on Linux, allow a local interpreter to be resolved if
        # available and matching the AMI platform.
        "--resolve-local-platforms",
    )
    pex_request = PexFromTargetsRequest(
        addresses=[field_set.address],
        internal_only=False,
        output_filename=output_filename,
        platforms=PexPlatforms([platform]),
        additional_args=additional_pex_args,
        additional_lockfile_args=additional_pex_args,
    )

    lambdex_request = PexRequest(
        output_filename="lambdex.pex",
        internal_only=True,
        requirements=lambdex.pex_requirements(),
        interpreter_constraints=lambdex.interpreter_constraints,
        main=lambdex.main,
    )

    lambdex_pex, pex_result, handler, transitive_targets = await MultiGet(
        Get(VenvPex, PexRequest, lambdex_request),
        Get(Pex, PexFromTargetsRequest, pex_request),
        Get(ResolvedPythonAwsHandler,
            ResolvePythonAwsHandlerRequest(field_set.handler)),
        Get(TransitiveTargets, TransitiveTargetsRequest([field_set.address])),
    )

    # Warn if users depend on `files` targets, which won't be included in the PEX and is a common
    # gotcha.
    file_tgts = targets_with_sources_types([FileSourceField],
                                           transitive_targets.dependencies,
                                           union_membership)
    if file_tgts:
        files_addresses = sorted(tgt.address.spec for tgt in file_tgts)
        logger.warning(
            f"The `python_awslambda` target {field_set.address} transitively depends on the below "
            "`files` targets, but Pants will not include them in the built Lambda. Filesystem APIs "
            "like `open()` are not able to load files within the binary itself; instead, they "
            "read from the current working directory."
            f"\n\nInstead, use `resources` targets. See {doc_url('resources')}."
            f"\n\nFiles targets dependencies: {files_addresses}")

    # NB: Lambdex modifies its input pex in-place, so the input file is also the output file.
    result = await Get(
        ProcessResult,
        VenvPexProcess(
            lambdex_pex,
            argv=("build", "-e", handler.val, output_filename),
            input_digest=pex_result.digest,
            output_files=(output_filename, ),
            description=f"Setting up handler in {output_filename}",
        ),
    )
    artifact = BuiltPackageArtifact(
        output_filename,
        extra_log_lines=(
            f"    Runtime: {field_set.runtime.value}",
            # The AWS-facing handler function is always lambdex_handler.handler, which is the
            # wrapper injected by lambdex that manages invocation of the actual handler.
            "    Handler: lambdex_handler.handler",
        ),
    )
    return BuiltPackage(digest=result.output_digest, artifacts=(artifact, ))