Пример #1
0
    def test_add_prefix(self) -> None:
        digest = self.request(
            Digest,
            [
                CreateDigest((
                    FileContent(path="main.py", content=b'print("from main")'),
                    FileContent(path="subdir/sub.py",
                                content=b'print("from sub")'),
                ))
            ],
        )

        # Two components.
        output_digest = self.request(
            Digest, [AddPrefix(digest, "outer_dir/middle_dir")])
        snapshot = self.request(Snapshot, [output_digest])
        assert sorted(snapshot.files) == [
            "outer_dir/middle_dir/main.py",
            "outer_dir/middle_dir/subdir/sub.py",
        ]
        assert sorted(snapshot.dirs) == [
            "outer_dir",
            "outer_dir/middle_dir",
            "outer_dir/middle_dir/subdir",
        ]

        # Empty.
        output_digest = self.request(Digest, [AddPrefix(digest, "")])
        assert digest == output_digest

        # Illegal.
        with self.assertRaisesRegex(Exception,
                                    r"The `prefix` must be relative."):
            self.request(Digest, [AddPrefix(digest, "../something")])
Пример #2
0
def test_add_prefix(rule_runner: RuleRunner) -> None:
    digest = rule_runner.request(
        Digest,
        [CreateDigest([FileContent("main.ext", b""), FileContent("subdir/sub.ext", b"")])],
    )

    # Two components.
    output_digest = rule_runner.request(Digest, [AddPrefix(digest, "outer_dir/middle_dir")])
    snapshot = rule_runner.request(Snapshot, [output_digest])
    assert sorted(snapshot.files) == [
        "outer_dir/middle_dir/main.ext",
        "outer_dir/middle_dir/subdir/sub.ext",
    ]
    assert sorted(snapshot.dirs) == [
        "outer_dir",
        "outer_dir/middle_dir",
        "outer_dir/middle_dir/subdir",
    ]

    # Empty.
    output_digest = rule_runner.request(Digest, [AddPrefix(digest, "")])
    assert digest == output_digest

    # Illegal.
    with pytest.raises(Exception, match=r"The `prefix` must be relative."):
        rule_runner.request(Digest, [AddPrefix(digest, "../something")])
Пример #3
0
async def export(
    console: Console,
    targets: Targets,
    export_subsystem: ExportSubsystem,
    workspace: Workspace,
    union_membership: UnionMembership,
    build_root: BuildRoot,
    dist_dir: DistDir,
) -> Export:
    request_types = cast("Iterable[type[ExportableDataRequest]]",
                         union_membership.get(ExportableDataRequest))
    requests = tuple(request_type(targets) for request_type in request_types)
    exportables = await MultiGet(
        Get(ExportableData, ExportableDataRequest, request)
        for request in requests)
    prefixed_digests = await MultiGet(
        Get(Digest, AddPrefix(exp.digest, exp.reldir)) for exp in exportables)
    output_dir = os.path.join(str(dist_dir.relpath), "export")
    merged_digest = await Get(Digest, MergeDigests(prefixed_digests))
    dist_digest = await Get(Digest, AddPrefix(merged_digest, output_dir))
    workspace.write_digest(dist_digest)
    for exp in exportables:
        for symlink in exp.symlinks:
            # Note that if symlink.source_path is an abspath, join returns it unchanged.
            source_abspath = os.path.join(build_root.path, symlink.source_path)
            link_abspath = os.path.abspath(
                os.path.join(output_dir, exp.reldir, symlink.link_rel_path))
            absolute_symlink(source_abspath, link_abspath)
        console.print_stdout(
            f"Wrote {exp.description} to {os.path.join(output_dir, exp.reldir)}"
        )
    return Export(exit_code=0)
Пример #4
0
    def test_add_prefix(self) -> None:
        digest = self.request_single_product(
            Digest,
            CreateDigest(
                (
                    FileContent(path="main.py", content=b'print("from main")'),
                    FileContent(path="subdir/sub.py", content=b'print("from sub")'),
                )
            ),
        )

        # Two components.
        output_digest = self.request_single_product(
            Digest, AddPrefix(digest, "outer_dir/middle_dir")
        )
        snapshot = self.request_single_product(Snapshot, output_digest)
        assert sorted(snapshot.files) == [
            "outer_dir/middle_dir/main.py",
            "outer_dir/middle_dir/subdir/sub.py",
        ]
        assert sorted(snapshot.dirs) == [
            "outer_dir",
            "outer_dir/middle_dir",
            "outer_dir/middle_dir/subdir",
        ]

        # Empty.
        output_digest = self.request_single_product(Digest, AddPrefix(digest, ""))
        assert digest == output_digest

        # Illegal.
        with self.assertRaisesRegex(
            Exception, r"Cannot add component .*ParentDir.* of path prefix `../something`."
        ):
            self.request_single_product(Digest, AddPrefix(digest, "../something"))
Пример #5
0
async def bsp_scala_compile_request(
    request: ScalaBSPCompileFieldSet,
    classpath_entry_request: ClasspathEntryRequestFactory,
) -> BSPCompileResult:
    coarsened_targets = await Get(CoarsenedTargets,
                                  Addresses([request.source.address]))
    assert len(coarsened_targets) == 1
    coarsened_target = coarsened_targets[0]
    resolve = await Get(CoursierResolveKey,
                        CoarsenedTargets([coarsened_target]))

    result = await Get(
        FallibleClasspathEntry,
        ClasspathEntryRequest,
        classpath_entry_request.for_targets(component=coarsened_target,
                                            resolve=resolve),
    )
    _logger.info(f"scala compile result = {result}")
    output_digest = EMPTY_DIGEST
    if result.exit_code == 0 and result.output:
        entries = await Get(DigestEntries, Digest, result.output.digest)
        new_entires = [
            dataclasses.replace(entry, path=os.path.basename(entry.path))
            for entry in entries
        ]
        flat_digest = await Get(Digest, CreateDigest(new_entires))
        output_digest = await Get(
            Digest, AddPrefix(flat_digest, f"jvm/resolves/{resolve.name}/lib"))

    return BSPCompileResult(
        status=StatusCode.ERROR if result.exit_code != 0 else StatusCode.OK,
        output_digest=output_digest,
    )
Пример #6
0
async def materialize_scala_runtime_jars(
    request: MaterializeScalaRuntimeJarsRequest,
) -> MaterializeScalaRuntimeJarsResult:
    tool_classpath = await Get(
        ToolClasspath,
        ToolClasspathRequest(
            artifact_requirements=ArtifactRequirements.from_coordinates([
                Coordinate(
                    group="org.scala-lang",
                    artifact="scala-compiler",
                    version=request.scala_version,
                ),
                Coordinate(
                    group="org.scala-lang",
                    artifact="scala-library",
                    version=request.scala_version,
                ),
            ]), ),
    )

    materialized_classpath_digest = await Get(
        Digest,
        AddPrefix(tool_classpath.content.digest,
                  f"jvm/scala-runtime/{request.scala_version}"),
    )
    materialized_classpath = await Get(Snapshot, Digest,
                                       materialized_classpath_digest)
    return MaterializeScalaRuntimeJarsResult(materialized_classpath)
Пример #7
0
async def package_go_binary(field_set: GoBinaryFieldSet) -> BuiltPackage:
    main_pkg = await Get(GoBinaryMainPackage, GoBinaryMainPackageRequest(field_set.main))
    built_package = await Get(
        BuiltGoPackage, BuildGoPackageTargetRequest(main_pkg.address, is_main=True)
    )
    main_pkg_a_file_path = built_package.import_paths_to_pkg_a_files["main"]
    import_config = await Get(
        ImportConfig, ImportConfigRequest(built_package.import_paths_to_pkg_a_files)
    )
    input_digest = await Get(Digest, MergeDigests([built_package.digest, import_config.digest]))

    output_filename = PurePath(field_set.output_path.value_or_default(file_ending=None))
    binary = await Get(
        LinkedGoBinary,
        LinkGoBinaryRequest(
            input_digest=input_digest,
            archives=(main_pkg_a_file_path,),
            import_config_path=import_config.CONFIG_PATH,
            output_filename=f"./{output_filename.name}",
            description=f"Link Go binary for {field_set.address}",
        ),
    )

    renamed_output_digest = await Get(Digest, AddPrefix(binary.digest, str(output_filename.parent)))

    artifact = BuiltPackageArtifact(relpath=str(output_filename))
    return BuiltPackage(renamed_output_digest, (artifact,))
Пример #8
0
async def analyze_java_source_dependencies(
    processor_classfiles: JavaParserCompiledClassfiles,
    jdk: InternalJdk,
    request: JavaSourceDependencyAnalysisRequest,
) -> FallibleJavaSourceDependencyAnalysisResult:
    source_files = request.source_files
    if len(source_files.files) > 1:
        raise ValueError(
            f"parse_java_package expects sources with exactly 1 source file, but found {len(source_files.files)}."
        )
    elif len(source_files.files) == 0:
        raise ValueError(
            "parse_java_package expects sources with exactly 1 source file, but found none."
        )
    source_prefix = "__source_to_analyze"
    source_path = os.path.join(source_prefix, source_files.files[0])
    processorcp_relpath = "__processorcp"
    toolcp_relpath = "__toolcp"

    parser_lockfile_request = await Get(GenerateJvmLockfileFromTool,
                                        JavaParserToolLockfileSentinel())
    tool_classpath, prefixed_source_files_digest = await MultiGet(
        Get(
            ToolClasspath,
            ToolClasspathRequest(lockfile=parser_lockfile_request),
        ),
        Get(Digest, AddPrefix(source_files.snapshot.digest, source_prefix)),
    )

    extra_immutable_input_digests = {
        toolcp_relpath: tool_classpath.digest,
        processorcp_relpath: processor_classfiles.digest,
    }

    analysis_output_path = "__source_analysis.json"

    process_result = await Get(
        FallibleProcessResult,
        JvmProcess(
            jdk=jdk,
            classpath_entries=[
                *tool_classpath.classpath_entries(toolcp_relpath),
                processorcp_relpath,
            ],
            argv=[
                "org.pantsbuild.javaparser.PantsJavaParserLauncher",
                analysis_output_path,
                source_path,
            ],
            input_digest=prefixed_source_files_digest,
            extra_immutable_input_digests=extra_immutable_input_digests,
            output_files=(analysis_output_path, ),
            extra_nailgun_keys=extra_immutable_input_digests,
            description=f"Analyzing {source_files.files[0]}",
            level=LogLevel.DEBUG,
        ),
    )

    return FallibleJavaSourceDependencyAnalysisResult(
        process_result=process_result)
Пример #9
0
async def generate_import_config(
    request: GatherImportsRequest, stdlib_imports: GoStdLibImports, goroot: GoRoot
) -> GatheredImports:
    import_config_digests: dict[str, tuple[str, Digest]] = {}
    for pkg in request.packages:
        fp = pkg.object_digest.fingerprint
        prefixed_digest = await Get(Digest, AddPrefix(pkg.object_digest, f"__pkgs__/{fp}"))
        import_config_digests[pkg.import_path] = (fp, prefixed_digest)

    pkg_digests: OrderedSet[Digest] = OrderedSet()

    import_config = ["# import config"]
    for import_path, (fp, digest) in import_config_digests.items():
        pkg_digests.add(digest)
        import_config.append(f"packagefile {import_path}=__pkgs__/{fp}/__pkg__.a")

    if request.include_stdlib:
        pkg_digests.add(goroot.digest)
        import_config.extend(
            f"packagefile {import_path}={os.path.normpath(static_file_path)}"
            for import_path, static_file_path in stdlib_imports.items()
        )

    import_config_content = "\n".join(import_config).encode("utf-8")
    import_config_digest = await Get(
        Digest, CreateDigest([FileContent("./importcfg", import_config_content)])
    )
    pkg_digests.add(import_config_digest)

    digest = await Get(Digest, MergeDigests(pkg_digests))
    return GatheredImports(digest=digest)
Пример #10
0
async def generate_python_from_thrift(
    request: GeneratePythonFromThriftRequest,
    thrift_python: ThriftPythonSubsystem,
) -> GeneratedSources:
    result = await Get(
        GeneratedThriftSources,
        GenerateThriftSourcesRequest(
            thrift_source_field=request.protocol_target[ThriftSourceField],
            lang_id="py",
            lang_options=thrift_python.gen_options,
            lang_name="Python",
        ),
    )

    # We must add back the source root for Python imports to work properly. Note that the file
    # paths will be different depending on whether `namespace py` was used. See the tests for
    # examples.
    source_root = await Get(
        SourceRoot, SourceRootRequest, SourceRootRequest.for_target(request.protocol_target)
    )
    source_root_restored = (
        await Get(Snapshot, AddPrefix(result.snapshot.digest, source_root.path))
        if source_root.path != "."
        else await Get(Snapshot, Digest, result.snapshot.digest)
    )
    return GeneratedSources(source_root_restored)
Пример #11
0
async def relocate_files(
        request: RelocateFilesViaCodegenRequest) -> GeneratedSources:
    # Unlike normal codegen, we operate the on the sources of the `files_targets` field, not the
    # `sources` of the original `relocated_sources` target.
    # TODO(#13086): Because we're using `Targets` instead of `UnexpandedTargets`, the
    #  `files` target generator gets replaced by its generated `file` targets. That replacement is
    #  necessary because we only hydrate sources for `FileSourcesField`, which is only for the
    #  `file` target.  That's really subtle!
    original_file_targets = await Get(
        Targets,
        UnparsedAddressInputs,
        request.protocol_target.get(
            RelocatedFilesOriginalTargetsField).to_unparsed_address_inputs(),
    )
    original_files_sources = await MultiGet(
        Get(
            HydratedSources,
            HydrateSourcesRequest(tgt.get(SourcesField),
                                  for_sources_types=(FileSourceField, )),
        ) for tgt in original_file_targets)
    snapshot = await Get(
        Snapshot,
        MergeDigests(sources.snapshot.digest
                     for sources in original_files_sources))

    src_val = request.protocol_target.get(RelocatedFilesSrcField).value
    dest_val = request.protocol_target.get(RelocatedFilesDestField).value
    if src_val:
        snapshot = await Get(Snapshot, RemovePrefix(snapshot.digest, src_val))
    if dest_val:
        snapshot = await Get(Snapshot, AddPrefix(snapshot.digest, dest_val))
    return GeneratedSources(snapshot)
Пример #12
0
async def _jvm_bsp_resources(
    request: BSPResourcesRequest,
    build_root: BuildRoot,
) -> BSPResourcesResult:
    """Generically handles a BSPResourcesRequest (subclass).

    This is a `@rule_helper` rather than a `@rule` for the same reason as `_jvm_bsp_compile`.
    """
    coarsened_targets = await Get(
        CoarsenedTargets, Addresses([fs.address for fs in request.field_sets]))

    source_files = await Get(
        StrippedSourceFiles,
        SourceFilesRequest(
            [tgt.get(SourcesField) for tgt in coarsened_targets.closure()],
            for_sources_types=(ResourceSourceField, ),
            enable_codegen=True,
        ),
    )

    rel_resources_dir = _jvm_resources_directory(
        request.bsp_target.bsp_target_id)
    output_digest = await Get(
        Digest,
        AddPrefix(source_files.snapshot.digest, rel_resources_dir),
    )

    return BSPResourcesResult(
        resources=(
            # NB: IntelliJ requires that directory URIs end in slashes.
            build_root.pathlib_path.joinpath(
                ".pants.d/bsp", rel_resources_dir).as_uri() + "/", ),
        output_digest=output_digest,
    )
Пример #13
0
async def relocate_files(request: RelocateFilesViaCodegenRequest) -> GeneratedSources:
    # Unlike normal codegen, we operate the on the sources of the `files_targets` field, not the
    # `sources` of the original `relocated_sources` target.
    # TODO(#10915): using `await Get(Addresses, UnparsedAddressInputs)` causes a graph failure.
    original_files_targets = await MultiGet(
        Get(
            WrappedTarget,
            AddressInput,
            AddressInput.parse(v, relative_to=request.protocol_target.address.spec_path),
        )
        for v in (
            request.protocol_target.get(RelocatedFilesOriginalTargets)
            .to_unparsed_address_inputs()
            .values
        )
    )
    original_files_sources = await MultiGet(
        Get(HydratedSources, HydrateSourcesRequest(wrapped_tgt.target.get(Sources)))
        for wrapped_tgt in original_files_targets
    )
    snapshot = await Get(
        Snapshot, MergeDigests(sources.snapshot.digest for sources in original_files_sources)
    )

    src_val = request.protocol_target.get(RelocatedFilesSrcField).value
    dest_val = request.protocol_target.get(RelocatedFilesDestField).value
    if src_val:
        snapshot = await Get(Snapshot, RemovePrefix(snapshot.digest, src_val))
    if dest_val:
        snapshot = await Get(Snapshot, AddPrefix(snapshot.digest, dest_val))
    return GeneratedSources(snapshot)
Пример #14
0
async def handle_bsp_scalac_options_request(
    request: HandleScalacOptionsRequest,
    build_root: BuildRoot,
    workspace: Workspace,
) -> HandleScalacOptionsResult:
    targets = await Get(Targets, BuildTargetIdentifier, request.bsp_target_id)
    thirdparty_modules = await Get(
        ThirdpartyModules,
        ThirdpartyModulesRequest(Addresses(tgt.address for tgt in targets)))
    resolve = thirdparty_modules.resolve

    resolve_digest = await Get(
        Digest,
        AddPrefix(thirdparty_modules.merged_digest,
                  f"jvm/resolves/{resolve.name}/lib"))

    workspace.write_digest(resolve_digest, path_prefix=".pants.d/bsp")

    classpath = tuple(
        build_root.pathlib_path.joinpath(
            f".pants.d/bsp/jvm/resolves/{resolve.name}/lib/{filename}").as_uri(
            ) for cp_entry in thirdparty_modules.entries.values()
        for filename in cp_entry.filenames)

    return HandleScalacOptionsResult(
        ScalacOptionsItem(
            target=request.bsp_target_id,
            options=(),
            classpath=classpath,
            class_directory=build_root.pathlib_path.joinpath(
                f".pants.d/bsp/{jvm_classes_directory(request.bsp_target_id)}"
            ).as_uri(),
        ))
Пример #15
0
async def materialize_classpath(
        request: MaterializedClasspathRequest) -> MaterializedClasspath:
    """Resolve, fetch, and merge various classpath types to a single `Digest` and metadata."""

    artifact_requirements_lockfiles = await MultiGet(
        Get(CoursierResolvedLockfile, ArtifactRequirements,
            artifact_requirements)
        for artifact_requirements in request.artifact_requirements)
    lockfile_and_requirements_classpath_entries = await MultiGet(
        Get(
            ResolvedClasspathEntries,
            CoursierResolvedLockfile,
            lockfile,
        )
        for lockfile in (*request.lockfiles, *artifact_requirements_lockfiles))
    all_classpath_entries = (
        *lockfile_and_requirements_classpath_entries,
        *request.resolved_classpaths,
    )
    merged_digest = await Get(
        Digest,
        MergeDigests(classpath_entry.digest
                     for classpath_entries in all_classpath_entries
                     for classpath_entry in classpath_entries),
    )
    if request.prefix is not None:
        merged_digest = await Get(Digest,
                                  AddPrefix(merged_digest, request.prefix))

    file_names = tuple(classpath_entry.file_name
                       for classpath_entries in all_classpath_entries
                       for classpath_entry in classpath_entries)
    return MaterializedClasspath(prefix=request.prefix,
                                 digest=merged_digest,
                                 file_names=file_names)
Пример #16
0
async def classpath(
    coarsened_targets: CoarsenedTargets,
    union_membership: UnionMembership,
) -> Classpath:
    targets = Targets(t for ct in coarsened_targets.closure()
                      for t in ct.members)

    resolve = await Get(CoursierResolveKey, Targets, targets)

    transitive_classpath_entries = await MultiGet(
        Get(
            ClasspathEntry,
            ClasspathEntryRequest,
            ClasspathEntryRequest.for_targets(
                union_membership, component=t, resolve=resolve),
        ) for t in coarsened_targets.closure())
    merged_transitive_classpath_entries_digest = await Get(
        Digest,
        MergeDigests(classfiles.digest
                     for classfiles in transitive_classpath_entries))

    return Classpath(await Get(
        Snapshot,
        AddPrefix(merged_transitive_classpath_entries_digest,
                  _USERCP_RELPATH)))
Пример #17
0
async def generate_java_from_wsdl(
        request: GenerateJavaFromWsdlRequest) -> GeneratedSources:
    sources = await Get(
        HydratedSources,
        HydrateSourcesRequest(request.protocol_target[WsdlSourceField]))

    target_package = request.protocol_target[JavaPackageField].value
    compile_results = await MultiGet(
        Get(
            CompiledWsdlSource,
            CompileWsdlSourceRequest(
                sources.snapshot.digest,
                path=path,
                module=request.protocol_target[JavaModuleField].value,
                package=target_package,
            ),
        ) for path in sources.snapshot.files)

    merged_output_digests, source_root = await MultiGet(
        Get(Digest, MergeDigests([r.output_digest for r in compile_results])),
        Get(SourceRoot, SourceRootRequest,
            SourceRootRequest.for_target(request.protocol_target)),
    )

    source_root_restored = (await Get(
        Snapshot, AddPrefix(merged_output_digests, source_root.path))
                            if source_root.path != "." else await Get(
                                Snapshot, Digest, merged_output_digests))
    return GeneratedSources(source_root_restored)
Пример #18
0
async def materialize_classpath(
        request: MaterializedClasspathRequest) -> MaterializedClasspath:
    """Resolve, fetch, and merge various classpath types to a single `Digest` and metadata."""

    artifact_requirements_lockfiles = await MultiGet(
        Get(CoursierResolvedLockfile, ArtifactRequirements,
            artifact_requirements)
        for artifact_requirements in request.artifact_requirements)
    lockfile_and_requirements_classpath_entries = await MultiGet(
        Get(
            ResolvedClasspathEntries,
            CoursierResolvedLockfile,
            lockfile,
        )
        for lockfile in (*request.lockfiles, *artifact_requirements_lockfiles))
    merged_snapshot = await Get(
        Snapshot,
        MergeDigests(classpath_entry.digest for classpath_entries in
                     lockfile_and_requirements_classpath_entries
                     for classpath_entry in classpath_entries),
    )
    if request.prefix is not None:
        merged_snapshot = await Get(
            Snapshot, AddPrefix(merged_snapshot.digest, request.prefix))
    return MaterializedClasspath(content=merged_snapshot)
Пример #19
0
async def merge_coverage_data(
        data_collection: PytestCoverageDataCollection,
        coverage_setup: CoverageSetup) -> MergedCoverageData:
    if len(data_collection) == 1:
        return MergedCoverageData(data_collection[0].digest)
    # We prefix each .coverage file with its corresponding address to avoid collisions.
    coverage_digests = await MultiGet(
        Get(Digest, AddPrefix(data.digest, prefix=data.address.path_safe_spec))
        for data in data_collection)
    input_digest = await Get(
        Digest, MergeDigests((*coverage_digests, coverage_setup.pex.digest)))
    prefixes = sorted(f"{data.address.path_safe_spec}/.coverage"
                      for data in data_collection)
    result = await Get(
        ProcessResult,
        PexProcess(
            coverage_setup.pex,
            argv=("combine", *prefixes),
            input_digest=input_digest,
            output_files=(".coverage", ),
            description=f"Merge {len(prefixes)} Pytest coverage reports.",
            level=LogLevel.DEBUG,
        ),
    )
    return MergedCoverageData(result.output_digest)
Пример #20
0
async def package_python_dist(
    field_set: PythonDistributionFieldSet,
    python_setup: PythonSetup,
) -> BuiltPackage:
    transitive_targets = await Get(TransitiveTargets, TransitiveTargetsRequest([field_set.address]))
    exported_target = ExportedTarget(transitive_targets.roots[0])
    interpreter_constraints = PexInterpreterConstraints.create_from_targets(
        transitive_targets.closure, python_setup
    )
    chroot = await Get(
        SetupPyChroot,
        SetupPyChrootRequest(exported_target, py2=interpreter_constraints.includes_python2()),
    )

    # If commands were provided, run setup.py with them; Otherwise just dump chroots.
    commands = exported_target.target.get(SetupPyCommandsField).value or ()
    if commands:
        validate_commands(commands)
        setup_py_result = await Get(
            RunSetupPyResult,
            RunSetupPyRequest(exported_target, interpreter_constraints, chroot, commands),
        )
        dist_snapshot = await Get(Snapshot, Digest, setup_py_result.output)
        return BuiltPackage(
            setup_py_result.output,
            tuple(BuiltPackageArtifact(path) for path in dist_snapshot.files),
        )
    else:
        dirname = f"{chroot.setup_kwargs.name}-{chroot.setup_kwargs.version}"
        rel_chroot = await Get(Digest, AddPrefix(chroot.digest, dirname))
        return BuiltPackage(rel_chroot, (BuiltPackageArtifact(dirname),))
Пример #21
0
async def _jvm_bsp_compile(
        request: BSPCompileRequest,
        classpath_entry_request: ClasspathEntryRequestFactory
) -> BSPCompileResult:
    """Generically handles a BSPCompileRequest (subclass).

    This is a `@rule_helper` rather than a `@rule`, because BSP backends like `java` and `scala`
    independently declare their `BSPCompileRequest` union members. We can't register a single shared
    `BSPCompileRequest` @union member for all JVM because their FieldSets are also declared via
    @unions, and we can't forward the implementation of a @union to another the way we might with
    an abstract class.
    """
    coarsened_targets = await Get(
        CoarsenedTargets, Addresses([fs.address for fs in request.field_sets]))
    resolve = await Get(CoursierResolveKey, CoarsenedTargets,
                        coarsened_targets)

    # TODO: We include the (non-3rdparty) transitive dependencies here, because each project
    # currently only has a single BuildTarget. This has the effect of including `resources` targets,
    # which are referenced by BuildTargets (via `buildTarget/resources`), rather than necessarily
    # being owned by any particular BuildTarget.
    #
    # To resolve #15051, this will no longer be transitive, and so `resources` will need to be
    # attached-to/referenced-by nearby BuildTarget(s) instead (most likely: direct dependent(s)).
    results = await MultiGet(
        Get(
            FallibleClasspathEntry,
            BSPClasspathEntryRequest(
                classpath_entry_request.for_targets(component=coarsened_target,
                                                    resolve=resolve),
                task_id=request.task_id,
            ),
        ) for coarsened_target in coarsened_targets.coarsened_closure()
        if not any(
            JvmArtifactFieldSet.is_applicable(t)
            for t in coarsened_target.members))

    entries = FallibleClasspathEntry.if_all_succeeded(results)
    if entries is None:
        return BSPCompileResult(
            status=StatusCode.ERROR,
            output_digest=EMPTY_DIGEST,
        )

    loose_classfiles = await MultiGet(
        Get(LooseClassfiles, ClasspathEntry, entry) for entry in entries)
    merged_loose_classfiles = await Get(
        Digest, MergeDigests(lc.digest for lc in loose_classfiles))
    output_digest = await Get(
        Digest,
        AddPrefix(merged_loose_classfiles,
                  jvm_classes_directory(request.bsp_target.bsp_target_id)),
    )

    return BSPCompileResult(
        status=StatusCode.OK,
        output_digest=output_digest,
    )
Пример #22
0
async def analyze_scala_source_dependencies(
    jdk: InternalJdk,
    processor_classfiles: ScalaParserCompiledClassfiles,
    source_files: SourceFiles,
) -> FallibleScalaSourceDependencyAnalysisResult:
    if len(source_files.files) > 1:
        raise ValueError(
            f"analyze_scala_source_dependencies expects sources with exactly 1 source file, but found {len(source_files.snapshot.files)}."
        )
    elif len(source_files.files) == 0:
        raise ValueError(
            "analyze_scala_source_dependencies expects sources with exactly 1 source file, but found none."
        )
    source_prefix = "__source_to_analyze"
    source_path = os.path.join(source_prefix, source_files.files[0])
    processorcp_relpath = "__processorcp"
    toolcp_relpath = "__toolcp"

    (tool_classpath, prefixed_source_files_digest,) = await MultiGet(
        Get(
            ToolClasspath,
            ToolClasspathRequest(artifact_requirements=SCALA_PARSER_ARTIFACT_REQUIREMENTS),
        ),
        Get(Digest, AddPrefix(source_files.snapshot.digest, source_prefix)),
    )

    extra_immutable_input_digests = {
        toolcp_relpath: tool_classpath.digest,
        processorcp_relpath: processor_classfiles.digest,
    }

    analysis_output_path = "__source_analysis.json"

    process_result = await Get(
        FallibleProcessResult,
        JvmProcess(
            jdk=jdk,
            classpath_entries=[
                *tool_classpath.classpath_entries(toolcp_relpath),
                processorcp_relpath,
            ],
            argv=[
                "org.pantsbuild.backend.scala.dependency_inference.ScalaParser",
                analysis_output_path,
                source_path,
            ],
            input_digest=prefixed_source_files_digest,
            extra_immutable_input_digests=extra_immutable_input_digests,
            output_files=(analysis_output_path,),
            extra_nailgun_keys=extra_immutable_input_digests,
            description=f"Analyzing {source_files.files[0]}",
            level=LogLevel.DEBUG,
        ),
    )

    return FallibleScalaSourceDependencyAnalysisResult(process_result=process_result)
Пример #23
0
async def package_python_dist(
    field_set: PythonDistributionFieldSet,
    python_setup: PythonSetup,
) -> BuiltPackage:
    transitive_targets = await Get(TransitiveTargets, TransitiveTargetsRequest([field_set.address]))
    exported_target = ExportedTarget(transitive_targets.roots[0])

    dist_tgt = exported_target.target
    wheel = dist_tgt.get(WheelField).value
    sdist = dist_tgt.get(SDistField).value
    if not wheel and not sdist:
        raise NoDistTypeSelected(
            f"In order to package {dist_tgt.address.spec} at least one of {WheelField.alias!r} or "
            f"{SDistField.alias!r} must be `True`."
        )

    wheel_config_settings = dist_tgt.get(WheelConfigSettingsField).value or FrozenDict()
    sdist_config_settings = dist_tgt.get(SDistConfigSettingsField).value or FrozenDict()

    interpreter_constraints = InterpreterConstraints.create_from_targets(
        transitive_targets.closure, python_setup
    ) or InterpreterConstraints(python_setup.interpreter_constraints)
    chroot = await Get(
        DistBuildChroot,
        DistBuildChrootRequest(
            exported_target,
            py2=interpreter_constraints.includes_python2(),
        ),
    )

    # We prefix the entire chroot, and run with this prefix as the cwd, so that we can capture
    # any changes setup made within it without also capturing other artifacts of the pex
    # process invocation.
    chroot_prefix = "chroot"
    working_directory = os.path.join(chroot_prefix, chroot.working_directory)
    prefixed_chroot = await Get(Digest, AddPrefix(chroot.digest, chroot_prefix))
    build_system = await Get(BuildSystem, BuildSystemRequest(prefixed_chroot, working_directory))
    setup_py_result = await Get(
        DistBuildResult,
        DistBuildRequest(
            build_system=build_system,
            interpreter_constraints=interpreter_constraints,
            build_wheel=wheel,
            build_sdist=sdist,
            input=prefixed_chroot,
            working_directory=working_directory,
            target_address_spec=exported_target.target.address.spec,
            wheel_config_settings=wheel_config_settings,
            sdist_config_settings=sdist_config_settings,
        ),
    )
    dist_snapshot = await Get(Snapshot, Digest, setup_py_result.output)
    return BuiltPackage(
        setup_py_result.output,
        tuple(BuiltPackageArtifact(path) for path in dist_snapshot.files),
    )
Пример #24
0
async def materialize_classpath_for_tool(
        request: ToolClasspathRequest) -> ToolClasspath:
    if request.artifact_requirements:
        resolution = await Get(CoursierResolvedLockfile, ArtifactRequirements,
                               request.artifact_requirements)
    else:
        lockfile_req = request.lockfile
        assert lockfile_req is not None
        regen_command = f"`{GenerateLockfilesSubsystem.name} --resolve={lockfile_req.resolve_name}`"
        if lockfile_req.read_lockfile_dest == DEFAULT_TOOL_LOCKFILE:
            lockfile_bytes = importlib.resources.read_binary(
                *lockfile_req.default_lockfile_resource)
            resolution = CoursierResolvedLockfile.from_serialized(
                lockfile_bytes)
        else:
            lockfile_snapshot = await Get(
                Snapshot, PathGlobs([lockfile_req.read_lockfile_dest]))
            if not lockfile_snapshot.files:
                raise ValueError(
                    f"No lockfile found at {lockfile_req.read_lockfile_dest}, which is configured "
                    f"by the option {lockfile_req.lockfile_option_name}."
                    f"Run {regen_command} to generate it.")

            resolution = await Get(
                CoursierResolvedLockfile,
                CoursierResolveKey(
                    name=lockfile_req.resolve_name,
                    path=lockfile_req.read_lockfile_dest,
                    digest=lockfile_snapshot.digest,
                ),
            )

        # Validate that the lockfile is correct.
        lockfile_inputs = await Get(
            ArtifactRequirements,
            GatherJvmCoordinatesRequest(lockfile_req.artifact_inputs,
                                        lockfile_req.artifact_option_name),
        )
        if resolution.metadata and not resolution.metadata.is_valid_for(
                lockfile_inputs, LockfileContext.TOOL):
            raise ValueError(
                f"The lockfile {lockfile_req.read_lockfile_dest} (configured by the option "
                f"{lockfile_req.lockfile_option_name}) was generated with different requirements "
                f"than are currently set via {lockfile_req.artifact_option_name}. Run "
                f"{regen_command} to regenerate the lockfile.")

    classpath_entries = await Get(ResolvedClasspathEntries,
                                  CoursierResolvedLockfile, resolution)
    merged_snapshot = await Get(
        Snapshot,
        MergeDigests(classpath_entry.digest
                     for classpath_entry in classpath_entries))
    if request.prefix is not None:
        merged_snapshot = await Get(
            Snapshot, AddPrefix(merged_snapshot.digest, request.prefix))
    return ToolClasspath(merged_snapshot)
Пример #25
0
async def materialize_jvm_plugins(
    request: MaterializeJvmPluginsRequest, ) -> MaterializedJvmPlugins:
    materialized_plugins = await MultiGet(
        Get(MaterializedJvmPlugin, MaterializeJvmPluginRequest(plugin))
        for plugin in request.plugins)
    plugin_digests = await MultiGet(
        Get(Digest, AddPrefix(p.classpath.digest, p.name))
        for p in materialized_plugins)
    merged_plugins_digest = await Get(Digest, MergeDigests(plugin_digests))
    return MaterializedJvmPlugins(merged_plugins_digest, materialized_plugins)
Пример #26
0
async def export(
    console: Console,
    targets: Targets,
    workspace: Workspace,
    union_membership: UnionMembership,
    build_root: BuildRoot,
    dist_dir: DistDir,
) -> Export:
    request_types = cast("Iterable[type[ExportRequest]]",
                         union_membership.get(ExportRequest))
    requests = tuple(request_type(targets) for request_type in request_types)
    all_results = await MultiGet(
        Get(ExportResults, ExportRequest, request) for request in requests)
    flattened_results = [res for results in all_results for res in results]

    prefixed_digests = await MultiGet(
        Get(Digest, AddPrefix(result.digest, result.reldir))
        for result in flattened_results)
    output_dir = os.path.join(str(dist_dir.relpath), "export")
    merged_digest = await Get(Digest, MergeDigests(prefixed_digests))
    dist_digest = await Get(Digest, AddPrefix(merged_digest, output_dir))
    workspace.write_digest(dist_digest)
    environment = await Get(Environment, EnvironmentRequest(["PATH"]))
    for result in flattened_results:
        digest_root = os.path.join(build_root.path, output_dir, result.reldir)
        for cmd in result.post_processing_cmds:
            argv = tuple(
                arg.format(digest_root=digest_root) for arg in cmd.argv)
            ip = InteractiveProcess(
                argv=argv,
                env={
                    "PATH": environment.get("PATH", ""),
                    **cmd.extra_env
                },
                run_in_workspace=True,
            )
            await Effect(InteractiveProcessResult, InteractiveProcess, ip)

        console.print_stdout(
            f"Wrote {result.description} to {os.path.join(output_dir, result.reldir)}"
        )
    return Export(exit_code=0)
Пример #27
0
async def run_python_test(
    field_set: PythonTestFieldSet,
    test_setup: TestTargetSetup,
    python_setup: PythonSetup,
    subprocess_encoding_environment: SubprocessEncodingEnvironment,
    global_options: GlobalOptions,
    test_options: TestOptions,
) -> TestResult:
    """Runs pytest for one target."""
    add_opts = [f"--color={'yes' if global_options.options.colors else 'no'}"]
    if test_setup.xml_dir:
        test_results_file = f"{field_set.address.path_safe_spec}.xml"
        add_opts.extend(
            (f"--junitxml={test_results_file}", f"-o junit_family={test_setup.junit_family}",)
        )
    env = {"PYTEST_ADDOPTS": " ".join(add_opts)}

    use_coverage = test_options.values.use_coverage
    output_dirs = [".coverage"] if use_coverage else []
    if test_setup.xml_dir:
        output_dirs.append(test_results_file)
    process = test_setup.test_runner_pex.create_process(
        python_setup=python_setup,
        subprocess_encoding_environment=subprocess_encoding_environment,
        pex_path=f"./{test_setup.test_runner_pex.output_filename}",
        pex_args=test_setup.args,
        input_digest=test_setup.input_digest,
        output_directories=tuple(output_dirs) if output_dirs else None,
        description=f"Run Pytest for {field_set.address.reference()}",
        timeout_seconds=test_setup.timeout_seconds,
        env=env,
    )
    result = await Get[FallibleProcessResult](Process, process)

    coverage_data = None
    if use_coverage:
        coverage_snapshot = await Get[Snapshot](
            SnapshotSubset(result.output_digest, PathGlobs([".coverage"]))
        )
        coverage_data = PytestCoverageData(field_set.address, coverage_snapshot.digest)

    xml_results_digest = None
    if test_setup.xml_dir:
        xml_results_snapshot = await Get[Snapshot](
            SnapshotSubset(result.output_digest, PathGlobs([test_results_file]))
        )
        xml_results_digest = await Get[Digest](
            AddPrefix(xml_results_snapshot.digest, test_setup.xml_dir)
        )

    return TestResult.from_fallible_process_result(
        result, coverage_data=coverage_data, xml_results=xml_results_digest
    )
Пример #28
0
 def test_add_prefix(self) -> None:
     input_files_content = InputFilesContent((
         FileContent(path="main.py", content=b'print("from main")'),
         FileContent(path="subdir/sub.py", content=b'print("from sub")'),
     ))
     digest = self.request_single_product(Digest, input_files_content)
     output_digest = self.request_single_product(
         Digest, AddPrefix(digest, "outer_dir"))
     snapshot = self.request_single_product(Snapshot, output_digest)
     assert sorted(snapshot.files) == [
         "outer_dir/main.py", "outer_dir/subdir/sub.py"
     ]
     assert sorted(snapshot.dirs) == ["outer_dir", "outer_dir/subdir"]
Пример #29
0
async def run_python_test(field_set: PythonTestFieldSet,
                          test_subsystem: TestSubsystem,
                          pytest: PyTest) -> TestResult:
    if field_set.is_conftest_or_type_stub():
        return TestResult.skip(field_set.address)

    setup = await Get(TestSetup, TestSetupRequest(field_set, is_debug=False))
    result = await Get(FallibleProcessResult, Process, setup.process)

    coverage_data = None
    if test_subsystem.use_coverage:
        coverage_snapshot = await Get(
            Snapshot,
            DigestSubset(result.output_digest, PathGlobs([".coverage"])))
        if coverage_snapshot.files == (".coverage", ):
            coverage_data = PytestCoverageData(field_set.address,
                                               coverage_snapshot.digest)
        else:
            logger.warning(
                f"Failed to generate coverage data for {field_set.address}.")

    xml_results_snapshot = None
    if setup.results_file_name:
        xml_results_snapshot = await Get(
            Snapshot,
            DigestSubset(result.output_digest,
                         PathGlobs([setup.results_file_name])))
        if xml_results_snapshot.files == (setup.results_file_name, ):
            xml_results_snapshot = await Get(
                Snapshot,
                AddPrefix(xml_results_snapshot.digest,
                          pytest.options.junit_xml_dir),
            )
        else:
            logger.warning(
                f"Failed to generate JUnit XML data for {field_set.address}.")
    extra_output_snapshot = await Get(
        Snapshot,
        DigestSubset(result.output_digest,
                     PathGlobs([f"{_EXTRA_OUTPUT_DIR}/**"])))
    extra_output_snapshot = await Get(
        Snapshot, RemovePrefix(extra_output_snapshot.digest,
                               _EXTRA_OUTPUT_DIR))

    return TestResult.from_fallible_process_result(
        result,
        address=field_set.address,
        coverage_data=coverage_data,
        xml_results=xml_results_snapshot,
        extra_output=extra_output_snapshot,
    )
Пример #30
0
async def run_shell_command(
    request: GenerateFilesFromShellCommandRequest,
) -> GeneratedSources:
    shell_command = request.protocol_target
    result = await Get(ProcessResult, ShellCommandProcessRequest(shell_command))

    if shell_command[ShellCommandLogOutputField].value:
        if result.stdout:
            logger.info(result.stdout.decode())
        if result.stderr:
            logger.warning(result.stderr.decode())

    working_directory = shell_command.address.spec_path
    output = await Get(Snapshot, AddPrefix(result.output_digest, working_directory))
    return GeneratedSources(output)