Esempio n. 1
0
 def from_json_dict(cls, d: dict[str, Any]) -> Any:
     return cls(
         targets=tuple(
             BuildTargetIdentifier.from_json_dict(x) for x in d["targets"]),
         origin_id=d.get("originId"),
         arguments=tuple(d["arguments"]) if "arguments" in d else None,
     )
Esempio n. 2
0
 def from_json_dict(cls, d):
     return cls(
         target=BuildTargetIdentifier.from_json_dict(d["target"]),
         options=tuple(d["options"]),
         classpath=tuple(d["classpath"]),
         class_directory=d["classDirectory"],
     )
Esempio n. 3
0
 def from_json_dict(cls, d: dict[str, Any]) -> Any:
     return cls(
         target=BuildTargetIdentifier.from_json_dict(d["target"]),
         origin_id=d.get("originId"),
         errors=d["errors"],
         warnings=d["warnings"],
         time=d.get("time"),
         no_op=d.get("noOp"),
     )
Esempio n. 4
0
def test_resolve_filtering(rule_runner: RuleRunner) -> None:
    rule_runner.write_files({
        "lib/Example1.java":
        "",
        "lib/Example2.java":
        "",
        "lib/BUILD":
        textwrap.dedent("""\
                java_source(name='lib1', source='Example1.java')

                java_source(
                    name='lib2',
                    source='Example2.java',
                    resolve=parametrize('jvm-default', 'other')
                )
                """),
        "bsp.toml":
        textwrap.dedent("""\
                [groups.lib_jvm_default]
                base_directory = "lib"
                addresses = ["lib::"]
                resolve = "jvm:jvm-default"

                [groups.lib_other]
                base_directory = "lib"
                addresses = ["lib::"]
                resolve = "jvm:other"
                """),
    })
    rule_runner.set_options([
        "--experimental-bsp-groups-config-files=['bsp.toml']",
        "--jvm-resolves={'jvm-default': 'unused', 'other': 'unused'}",
    ])

    targets = rule_runner.request(
        Targets, [BuildTargetIdentifier("pants:lib_jvm_default")])
    assert {"lib:lib1", "lib:lib2@resolve=jvm-default"
            } == {str(t.address)
                  for t in targets}

    targets = rule_runner.request(Targets,
                                  [BuildTargetIdentifier("pants:lib_other")])
    assert {"lib:lib2@resolve=other"} == {str(t.address) for t in targets}
Esempio n. 5
0
 def from_json_dict(cls, d):
     return cls(
         targets=tuple(BuildTargetIdentifier.from_json_dict(x) for x in d["targets"]),
     )
Esempio n. 6
0
async def generate_one_bsp_build_target_request(
    request: GenerateOneBSPBuildTargetRequest,
    union_membership: UnionMembership,
    build_root: BuildRoot,
) -> GenerateOneBSPBuildTargetResult:
    # Find all Pants targets that are part of this BSP build target.
    targets = await Get(Targets, AddressSpecs,
                        request.bsp_target.specs.address_specs)

    # Classify the targets by the language backends that claim them to provide metadata for them.
    field_sets_by_lang_id: dict[str,
                                OrderedSet[FieldSet]] = defaultdict(OrderedSet)
    # lang_ids_by_field_set: dict[Type[FieldSet], set[str]] = defaultdict(set)
    metadata_request_types: FrozenOrderedSet[
        Type[BSPBuildTargetsMetadataRequest]] = union_membership.get(
            BSPBuildTargetsMetadataRequest)
    metadata_request_types_by_lang_id = {
        metadata_request_type.language_id: metadata_request_type
        for metadata_request_type in metadata_request_types
    }
    for tgt in targets:
        for metadata_request_type in metadata_request_types:
            field_set_type: Type[
                FieldSet] = metadata_request_type.field_set_type
            if field_set_type.is_applicable(tgt):
                field_sets_by_lang_id[metadata_request_type.language_id].add(
                    field_set_type.create(tgt))
                # lang_ids_by_field_set[field_set_type].add(metadata_request_type.language_id)

    # TODO: Consider how to check whether the provided languages are compatible or whether compatible resolves
    # selected.

    # Request each language backend to provide metadata for the BuildTarget.
    metadata_results = await MultiGet(
        Get(
            BSPBuildTargetsMetadataResult,
            BSPBuildTargetsMetadataRequest,
            metadata_request_types_by_lang_id[lang_id](
                field_sets=tuple(field_sets)),
        ) for lang_id, field_sets in field_sets_by_lang_id.items())
    metadata_results_by_lang_id = {
        lang_id: metadata_result
        for lang_id, metadata_result in zip(field_sets_by_lang_id.keys(),
                                            metadata_results)
    }

    # Pretend to merge the metadata into a single piece of metadata, but really just choose the metadata
    # from the last provider.
    metadata_merge_order = find_metadata_merge_order([
        metadata_request_types_by_lang_id[lang_id]
        for lang_id in field_sets_by_lang_id.keys()
    ])
    # TODO: None if no metadata obtained.
    metadata = metadata_results_by_lang_id[
        metadata_merge_order[-1].language_id].metadata
    digest = await Get(Digest,
                       MergeDigests([r.digest for r in metadata_results]))

    # Determine "base directory" for this build target using source roots.
    # TODO: This actually has nothing to do with source roots. It should probably be computed as an ancestor
    # directory or else be configurable by the user. It is used as a hint in IntelliJ for where to place the
    # corresponding IntelliJ module.
    source_info = await Get(BSPBuildTargetSourcesInfo, BSPBuildTargetInternal,
                            request.bsp_target)
    if source_info.source_roots:
        roots = [
            build_root.pathlib_path.joinpath(p)
            for p in source_info.source_roots
        ]
    else:
        roots = [build_root.pathlib_path]

    return GenerateOneBSPBuildTargetResult(
        build_target=BuildTarget(
            id=BuildTargetIdentifier(f"pants:{request.bsp_target.name}"),
            display_name=request.bsp_target.name,
            base_directory=roots[0].as_uri(),
            tags=(),
            capabilities=BuildTargetCapabilities(
                can_compile=any(r.can_compile for r in metadata_results),
                can_test=any(r.can_test for r in metadata_results),
                can_run=any(r.can_run for r in metadata_results),
                can_debug=any(r.can_debug for r in metadata_results),
            ),
            language_ids=tuple(sorted(field_sets_by_lang_id.keys())),
            dependencies=(),
            data=metadata,
        ),
        digest=digest,
    )
Esempio n. 7
0
async def generate_one_bsp_build_target_request(
    request: GenerateOneBSPBuildTargetRequest,
    union_membership: UnionMembership,
    build_root: BuildRoot,
) -> GenerateOneBSPBuildTargetResult:
    # Find all Pants targets that are part of this BSP build target.
    targets = await Get(Targets, BSPBuildTargetInternal, request.bsp_target)

    # Determine whether the targets are compilable.
    can_compile = any(
        req_type.field_set_type.is_applicable(t)  # type: ignore[misc]
        for req_type in union_membership[BSPCompileRequest] for t in targets)

    # Classify the targets by the language backends that claim to provide metadata for them.
    field_sets_by_request_type: dict[type[BSPBuildTargetsMetadataRequest],
                                     OrderedSet[FieldSet]] = defaultdict(
                                         OrderedSet)
    metadata_request_types: FrozenOrderedSet[
        Type[BSPBuildTargetsMetadataRequest]] = union_membership.get(
            BSPBuildTargetsMetadataRequest)
    metadata_request_types_by_lang_id: dict[
        str, type[BSPBuildTargetsMetadataRequest]] = {}
    for metadata_request_type in metadata_request_types:
        previous = metadata_request_types_by_lang_id.get(
            metadata_request_type.language_id)
        if previous:
            raise ValueError(
                f"Multiple implementations claim to support `{metadata_request_type.language_id}`:"
                f"{bullet_list([previous.__name__, metadata_request_type.__name__])}"
                "\n"
                "Do you have conflicting language support backends enabled?")
        metadata_request_types_by_lang_id[
            metadata_request_type.language_id] = metadata_request_type

    for tgt in targets:
        for metadata_request_type in metadata_request_types:
            field_set_type: Type[
                FieldSet] = metadata_request_type.field_set_type
            if field_set_type.is_applicable(tgt):
                field_sets_by_request_type[metadata_request_type].add(
                    field_set_type.create(tgt))

    # Request each language backend to provide metadata for the BuildTarget, and then merge it.
    metadata_results = await MultiGet(
        Get(
            BSPBuildTargetsMetadataResult,
            BSPBuildTargetsMetadataRequest,
            request_type(field_sets=tuple(field_sets)),
        ) for request_type, field_sets in field_sets_by_request_type.items())
    metadata = merge_metadata(
        list(zip(field_sets_by_request_type.keys(), metadata_results)))

    digest = await Get(Digest,
                       MergeDigests([r.digest for r in metadata_results]))

    # Determine "base directory" for this build target using source roots.
    # TODO: This actually has nothing to do with source roots. It should probably be computed as an ancestor
    # directory or else be configurable by the user. It is used as a hint in IntelliJ for where to place the
    # corresponding IntelliJ module.
    source_info = await Get(BSPBuildTargetSourcesInfo, BSPBuildTargetInternal,
                            request.bsp_target)
    if source_info.source_roots:
        roots = [
            build_root.pathlib_path.joinpath(p)
            for p in source_info.source_roots
        ]
    else:
        roots = []

    base_directory: Path | None = None
    if request.bsp_target.definition.base_directory:
        base_directory = build_root.pathlib_path.joinpath(
            request.bsp_target.definition.base_directory)
    elif roots:
        base_directory = roots[0]

    return GenerateOneBSPBuildTargetResult(
        build_target=BuildTarget(
            id=BuildTargetIdentifier(f"pants:{request.bsp_target.name}"),
            display_name=request.bsp_target.name,
            base_directory=base_directory.as_uri() if base_directory else None,
            tags=(),
            capabilities=BuildTargetCapabilities(
                can_compile=can_compile,
                can_debug=False,
                # TODO: See https://github.com/pantsbuild/pants/issues/15050.
                can_run=False,
                can_test=False,
            ),
            language_ids=tuple(
                sorted(req.language_id for req in field_sets_by_request_type)),
            dependencies=(),
            data=metadata,
        ),
        digest=digest,
    )
Esempio n. 8
0
 def bsp_target_id(self) -> BuildTargetIdentifier:
     return BuildTargetIdentifier(f"pants:{self.name}")
Esempio n. 9
0
 def from_json_dict(cls, d: Any):
     return cls(
         target=BuildTargetIdentifier.from_json_dict(d["target"]),
         sources=tuple(SourceItem.from_json_dict(i) for i in d["sources"]),
         roots=tuple(d.get("sources", ())),
     )
Esempio n. 10
0
def test_intellij_test(jvm_rule_runner: RuleRunner,
                       jvm_lockfile: JVMLockfileFixture) -> None:
    jvm_rule_runner.write_files({
        "3rdparty/jvm/BUILD":
        jvm_lockfile.requirements_as_jvm_artifact_targets(),
        "3rdparty/jvm/default.lock":
        jvm_lockfile.serialized_lockfile,
        "BUILD":
        "scalatest_tests(name='main')",
        "Spec.scala":
        dedent("""\
                package org.pantsbuild.example

                import org.scalatest.funspec.AnyFunSpec

                import org.pantsbuild.example.lib.ExampleLib

                class ExampleLibSpec extends AnyFunSpec {
                  describe("ExampleLib") {
                    it("should say hello") {
                      assert(ExampleLib.HELLO == "hello!")
                    }
                  }
                }
                """),
        "lib/BUILD":
        "java_sources()",
        "lib/ExampleLib.java":
        dedent("""\
                package org.pantsbuild.example.lib;

                public class ExampleLib {
                    public static String HELLO = "hello!";
                }
                """),
        "bsp-groups.toml":
        dedent("""\
                [groups.default]
                addresses = ["::"]
                """),
    })

    target_ids = (BuildTargetIdentifier("pants:default"), )

    # We set a very high timeout here (was 15s) due to CI flakes as documented in:
    #   https://github.com/pantsbuild/pants/issues/15657
    # This seems to paper over some slow interaction between requests and the LMDB
    # store as noted in the ticket.
    timeout = 45

    with setup_bsp_server(
            jvm_rule_runner,
            notification_names={
                "build/taskStart", "build/taskProgress", "build/taskFinish"
            },
    ) as (endpoint, notifications):
        build_root = Path(jvm_rule_runner.build_root)

        # build/initialize
        _ = endpoint.request(
            "build/initialize",
            InitializeBuildParams(
                display_name="IntelliJ-BSP",
                version="2022.1.13",
                bsp_version="2.0",
                root_uri=build_root.as_uri(),
                capabilities=BuildClientCapabilities(language_ids=("scala",
                                                                   "java")),
                data={
                    "clientClassesRootDir": (build_root / "out").as_uri(),
                    "supportedScalaVersions": [],
                },
            ).to_json_dict(),
        ).result(timeout=timeout)

        # build/initialized
        endpoint.notify("build/initialized")

        # workspace/buildTargets
        build_targets = WorkspaceBuildTargetsResult.from_json_dict(
            endpoint.request("workspace/buildTargets").result(timeout=timeout))
        assert len(build_targets.targets) == 1
        assert build_targets.targets[
            0].capabilities == BuildTargetCapabilities(can_compile=True)
        assert build_targets.targets[0].language_ids == ("java", "scala")

        # buildTarget/sources
        sources = SourcesResult.from_json_dict(
            endpoint.request("buildTarget/sources",
                             SourcesParams(target_ids).to_json_dict()).result(
                                 timeout=timeout))
        assert len(sources.items[0].sources) == 2

        # buildTarget/dependencySources - (NB: stubbed)
        _ = endpoint.request(
            "buildTarget/dependencySources",
            DependencySourcesParams(target_ids).to_json_dict()).result(
                timeout=timeout)

        # buildTarget/resources - (NB: used only to index resources)
        _ = endpoint.request(
            "buildTarget/resources",
            ResourcesParams(target_ids).to_json_dict()).result(timeout=timeout)

        # buildTarget/scalacOptions
        scalac_options = ScalacOptionsResult.from_json_dict(
            endpoint.request(
                "buildTarget/scalacOptions",
                ScalacOptionsParams(target_ids).to_json_dict()).result(
                    timeout=timeout))
        assert scalac_options.items[0].classpath
        class_directory = Path(
            urlparse(scalac_options.items[0].class_directory).path)
        assert not class_directory.exists()

        # buildTarget/compile
        compile_result = CompileResult.from_json_dict(
            endpoint.request("buildTarget/compile",
                             CompileParams(target_ids).to_json_dict()).result(
                                 timeout=timeout))
        assert StatusCode(compile_result.status_code) == StatusCode.OK
        notifications.assert_received_unordered([
            ("build/taskStart", {}),
            ("build/taskProgress", {
                "message": "//Spec.scala:main succeeded."
            }),
            ("build/taskProgress", {
                "message": "lib/ExampleLib.java succeeded."
            }),
            ("build/taskFinish", {}),
        ])
        assert list(class_directory.iterdir())
Esempio n. 11
0
 def from_json_dict(cls, d: dict[str, Any]) -> Any:
     return cls(target=BuildTargetIdentifier.from_json_dict(d["target"]))
Esempio n. 12
0
 def from_json_dict(cls, d: dict[str, Any]) -> Any:
     return cls(
         targets=tuple(BuildTargetIdentifier.from_json_dict(x) for x in d["targets"]),
     )
Esempio n. 13
0
 def from_json_dict(cls, d):
     return cls(
         targets=tuple(BuildTargetIdentifier.from_json_dict(x) for x in d["targets"]),
         origin_id=d.get("originId"),
     )