示例#1
0
def test_user_repo_order_is_respected(rule_runner: RuleRunner) -> None:
    """Tests that the repo resolution order issue found in #14577 is avoided."""

    jai_core = Coordinate(group="javax.media",
                          artifact="jai_core",
                          version="1.1.3")

    # `repo1.maven.org` has a bogus POM that Coursier hits first
    # `repo.osgeo.org` has a valid POM and should succeed
    rule_runner.set_options(
        args=[
            """--coursier-repos=['https://repo1.maven.org/maven2', 'https://repo.osgeo.org/repository/release']"""
        ],
        env_inherit=PYTHON_BOOTSTRAP_ENV,
    )
    with engine_error(ProcessExecutionFailure):
        rule_runner.request(
            CoursierResolvedLockfile,
            [
                ArtifactRequirements.from_coordinates([jai_core]),
            ],
        )

    rule_runner.set_options(
        args=[
            """--coursier-repos=['https://repo.osgeo.org/repository/release', 'https://repo1.maven.org/maven2']"""
        ],
        env_inherit=PYTHON_BOOTSTRAP_ENV,
    )
    rule_runner.request(
        CoursierResolvedLockfile,
        [
            ArtifactRequirements.from_coordinates([jai_core]),
        ],
    )
示例#2
0
def test_multiple_resolves(rule_runner: RuleRunner) -> None:
    rule_runner.write_files({
        "BUILD":
        dedent("""\
                jvm_artifact(
                    name='hamcrest',
                    group='org.hamcrest',
                    artifact='hamcrest-core',
                    version="1.3",
                    resolve=parametrize("a", "b"),
                )

                jvm_artifact(
                    name='opentest4j',
                    group='org.opentest4j',
                    artifact='opentest4j',
                    version='1.2.0',
                    resolve="a",
                )

                jvm_artifact(
                    name='apiguardian-api',
                    group='org.apiguardian',
                    artifact='apiguardian-api',
                    version='1.1.0',
                    resolve="b",
                )
                """),
    })
    rule_runner.set_options(["--jvm-resolves={'a': 'a.lock', 'b': 'b.lock'}"],
                            env_inherit={"PATH"})

    result = rule_runner.request(UserGenerateLockfiles,
                                 [RequestedJVMUserResolveNames(["a", "b"])])
    hamcrest_core = ArtifactRequirement(
        Coordinate("org.hamcrest", "hamcrest-core", "1.3"))
    assert set(result) == {
        GenerateJvmLockfile(
            artifacts=ArtifactRequirements([
                hamcrest_core,
                ArtifactRequirement(
                    Coordinate("org.opentest4j", "opentest4j", "1.2.0")),
            ]),
            resolve_name="a",
            lockfile_dest="a.lock",
        ),
        GenerateJvmLockfile(
            artifacts=ArtifactRequirements([
                ArtifactRequirement(
                    Coordinate("org.apiguardian", "apiguardian-api", "1.1.0")),
                hamcrest_core,
            ]),
            resolve_name="b",
            lockfile_dest="b.lock",
        ),
    }
示例#3
0
async def setup_user_lockfile_requests(
    requested: RequestedJVMUserResolveNames,
    all_targets: AllTargets,
    jvm_subsystem: JvmSubsystem,
) -> UserGenerateLockfiles:
    resolve_to_artifacts: Mapping[
        str, OrderedSet[ArtifactRequirement]] = defaultdict(OrderedSet)
    for tgt in sorted(all_targets, key=lambda t: t.address):
        if not tgt.has_field(JvmArtifactResolveField):
            continue
        artifact = ArtifactRequirement.from_jvm_artifact_target(tgt)
        resolve = tgt[JvmResolveField].normalized_value(jvm_subsystem)
        resolve_to_artifacts[resolve].add(artifact)

    # Generate a JVM lockfile request for each requested resolve. This step also allows other backends to
    # validate the proposed set of artifact requirements for each resolve.
    jvm_lockfile_requests = await MultiGet(
        Get(
            GenerateJvmLockfile,
            _ValidateJvmArtifactsRequest(
                artifacts=ArtifactRequirements(
                    resolve_to_artifacts.get(resolve, ())),
                resolve_name=resolve,
            ),
        ) for resolve in requested)

    return UserGenerateLockfiles(jvm_lockfile_requests)
示例#4
0
    def jvm_lockfile(self, request) -> JVMLockfileFixture:
        mark = request.node.get_closest_marker("jvm_lockfile")

        definition = JVMLockfileFixtureDefinition.from_kwargs(mark.kwargs)

        # Load the lockfile.
        lockfile_path = request.node.path.parent / definition.lockfile_rel_path
        lockfile_contents = lockfile_path.read_bytes()
        lockfile = CoursierResolvedLockfile.from_serialized(lockfile_contents)

        # Check the lockfile's requirements against the requirements in the lockfile.
        # Fail the test if the lockfile needs to be regenerated.
        artifact_reqs = ArtifactRequirements([
            ArtifactRequirement(coordinate)
            for coordinate in definition.coordinates
        ])
        if not lockfile.metadata:
            raise ValueError(
                f"Expected JVM lockfile {definition.lockfile_rel_path} to have metadata."
            )
        if not lockfile.metadata.is_valid_for(artifact_reqs,
                                              LockfileContext.TOOL):
            raise ValueError(
                f"Lockfile fixture {definition.lockfile_rel_path} is not valid. "
                "Please re-generate it using: "
                f"{bin_name()} internal-generate-test-lockfile-fixtures ::")

        return JVMLockfileFixture(lockfile, lockfile_contents.decode(),
                                  artifact_reqs)
示例#5
0
def test_fetch_one_coord_with_classifier(rule_runner: RuleRunner) -> None:
    # Has as a transitive dependency an artifact with both a `classifier` and `packaging`.
    coordinate = Coordinate(group="org.apache.avro",
                            artifact="avro-tools",
                            version="1.11.0")
    resolved_lockfile = rule_runner.request(
        CoursierResolvedLockfile,
        [ArtifactRequirements.from_coordinates([coordinate])],
    )

    entries = [
        e for e in resolved_lockfile.entries if e.coord == Coordinate(
            group="org.apache.avro",
            artifact="trevni-avro",
            version="1.11.0",
            packaging="jar",
            classifier="tests",
            strict=True,
        )
    ]
    assert len(entries) == 1
    entry = entries[0]

    classpath_entry = rule_runner.request(ClasspathEntry, [entry])
    assert classpath_entry.filenames == (
        "org.apache.avro_trevni-avro_jar_tests_1.11.0.jar", )
示例#6
0
def test_resolve_with_transitive_deps(rule_runner: RuleRunner) -> None:
    junit_coord = Coordinate(group="junit", artifact="junit", version="4.13.2")
    resolved_lockfile = rule_runner.request(
        CoursierResolvedLockfile,
        [
            ArtifactRequirements.from_coordinates([junit_coord]),
        ],
    )

    assert resolved_lockfile == CoursierResolvedLockfile(entries=(
        CoursierLockfileEntry(
            coord=junit_coord,
            file_name="junit_junit_4.13.2.jar",
            direct_dependencies=Coordinates([HAMCREST_COORD]),
            dependencies=Coordinates([HAMCREST_COORD]),
            file_digest=FileDigest(
                fingerprint=
                "8e495b634469d64fb8acfa3495a065cbacc8a0fff55ce1e31007be4c16dc57d3",
                serialized_bytes_length=384581,
            ),
        ),
        CoursierLockfileEntry(
            coord=HAMCREST_COORD,
            file_name="org.hamcrest_hamcrest-core_1.3.jar",
            direct_dependencies=Coordinates([]),
            dependencies=Coordinates([]),
            file_digest=FileDigest(
                fingerprint=
                "66fdef91e9739348df7a096aa384a5685f4e875584cce89386a7a47251c4d8e9",
                serialized_bytes_length=45024,
            ),
        ),
    ))
示例#7
0
async def materialize_scala_runtime_jars(
    request: MaterializeScalaRuntimeJarsRequest,
) -> MaterializeScalaRuntimeJarsResult:
    tool_classpath = await Get(
        ToolClasspath,
        ToolClasspathRequest(
            artifact_requirements=ArtifactRequirements.from_coordinates([
                Coordinate(
                    group="org.scala-lang",
                    artifact="scala-compiler",
                    version=request.scala_version,
                ),
                Coordinate(
                    group="org.scala-lang",
                    artifact="scala-library",
                    version=request.scala_version,
                ),
            ]), ),
    )

    materialized_classpath_digest = await Get(
        Digest,
        AddPrefix(tool_classpath.content.digest,
                  f"jvm/scala-runtime/{request.scala_version}"),
    )
    materialized_classpath = await Get(Snapshot, Digest,
                                       materialized_classpath_digest)
    return MaterializeScalaRuntimeJarsResult(materialized_classpath)
示例#8
0
def test_resolve_with_inexact_coord(rule_runner: RuleRunner) -> None:
    resolved_lockfile = rule_runner.request(
        CoursierResolvedLockfile,
        [
            # Note the open-ended coordinate here.  We will still resolve this for the user, but the result
            # will be exact and pinned.  As noted above, this is an especially brittle unit test, but version
            # 4.8 was chosen because it has multiple patch versions and no new versions have been uploaded
            # to 4.8.x in over a decade.
            ArtifactRequirements.from_coordinates(
                [Coordinate(group="junit", artifact="junit", version="4.8+")]),
        ],
    )

    assert resolved_lockfile == CoursierResolvedLockfile(
        entries=(CoursierLockfileEntry(
            coord=Coordinate(group="junit", artifact="junit", version="4.8.2"),
            file_name="junit_junit_4.8.2.jar",
            direct_dependencies=Coordinates([]),
            dependencies=Coordinates([]),
            file_digest=FileDigest(
                fingerprint=
                "a2aa2c3bb2b72da76c3e6a71531f1eefdc350494819baf2b1d80d7146e020f9e",
                serialized_bytes_length=237344,
            ),
        ), ))
示例#9
0
def test_generate_lockfile(rule_runner: RuleRunner) -> None:
    artifacts = ArtifactRequirements([
        ArtifactRequirement(Coordinate("org.hamcrest", "hamcrest-core", "1.3"))
    ])
    result = rule_runner.request(
        GenerateLockfileResult,
        [
            GenerateJvmLockfile(artifacts=artifacts,
                                resolve_name="test",
                                lockfile_dest="lock.txt")
        ],
    )
    digest_contents = rule_runner.request(DigestContents, [result.digest])
    assert len(digest_contents) == 1

    expected = CoursierResolvedLockfile(
        entries=(CoursierLockfileEntry(
            coord=Coordinate(
                group="org.hamcrest",
                artifact="hamcrest-core",
                version="1.3",
            ),
            file_name="org.hamcrest_hamcrest-core_1.3.jar",
            direct_dependencies=Coordinates([]),
            dependencies=Coordinates([]),
            file_digest=FileDigest(
                fingerprint=
                "66fdef91e9739348df7a096aa384a5685f4e875584cce89386a7a47251c4d8e9",
                serialized_bytes_length=45024,
            ),
        ), ),
        metadata=JVMLockfileMetadata.new(artifacts),
    )
    assert CoursierResolvedLockfile.from_serialized(
        digest_contents[0].content) == expected
示例#10
0
async def gather_lockfile_fixtures() -> RenderedJVMLockfileFixtures:
    configs = await Get(CollectedJVMLockfileFixtureConfigs,
                        CollectFixtureConfigsRequest())
    rendered_fixtures = []
    for config in configs:
        artifact_reqs = ArtifactRequirements([
            ArtifactRequirement(coordinate)
            for coordinate in config.definition.coordinates
        ])
        lockfile = await Get(CoursierResolvedLockfile, ArtifactRequirements,
                             artifact_reqs)
        serialized_lockfile = JVMLockfileMetadata.new(
            artifact_reqs
        ).add_header_to_lockfile(
            lockfile.to_serialized(),
            regenerate_command=
            f"{bin_name()} {InternalGenerateTestLockfileFixturesSubsystem.name} ::",
            delimeter="#",
        )

        lockfile_path = os.path.join(os.path.dirname(config.test_file_path),
                                     config.definition.lockfile_rel_path)
        rendered_fixtures.append(
            RenderedJVMLockfileFixture(
                content=serialized_lockfile,
                path=lockfile_path,
            ))

    return RenderedJVMLockfileFixtures(rendered_fixtures)
示例#11
0
def test_resolve_with_a_jar(rule_runner: RuleRunner) -> None:

    rule_runner.write_files({
        "BUILD":
        textwrap.dedent("""\
            jvm_artifact(
              name="jeremy",
              group="jeremy",
              artifact="jeremy",
              version="4.13.2",
              jar="jeremy.jar",
            )
            """),
        "jeremy.jar":
        "hello dave",
    })

    targets = rule_runner.request(Targets, [
        RawSpecs(recursive_globs=(RecursiveGlobSpec(""), ),
                 description_of_origin="tests")
    ])
    jeremy_target = targets[0]

    jar_field = jeremy_target[JvmArtifactJarSourceField]

    requirement = ArtifactRequirement(
        coordinate=Coordinate(
            group="jeremy",
            artifact="jeremy",
            version="4.13.2",
        ),
        jar=jar_field,
    )

    resolved_lockfile = rule_runner.request(
        CoursierResolvedLockfile,
        [ArtifactRequirements([requirement])],
    )

    coordinate = requirement.coordinate
    assert resolved_lockfile == CoursierResolvedLockfile(
        entries=(CoursierLockfileEntry(
            coord=Coordinate(group=coordinate.group,
                             artifact=coordinate.artifact,
                             version=coordinate.version),
            file_name=
            f"{coordinate.group}_{coordinate.artifact}_{coordinate.version}.jar",
            direct_dependencies=Coordinates([]),
            dependencies=Coordinates([]),
            file_digest=FileDigest(
                fingerprint=
                "55b9afa8d7776cd6c318eec51f506e9c7f66c247dcec343d4667f5f269714f86",
                serialized_bytes_length=10,
            ),
            pants_address=jar_field.address.spec,
        ), ))
示例#12
0
async def gather_coordinates_for_jvm_lockfile(
    request: GatherJvmCoordinatesRequest, ) -> ArtifactRequirements:
    # Separate `artifact_inputs` by whether the strings parse as an `Address` or not.
    requirements: set[ArtifactRequirement] = set()
    candidate_address_inputs: set[AddressInput] = set()
    bad_artifact_inputs = []
    for artifact_input in request.artifact_inputs:
        # Try parsing as a `Coordinate` first since otherwise `AddressInput.parse` will try to see if the
        # group name is a file on disk.
        if 2 <= artifact_input.count(":") <= 3:
            try:
                maybe_coord = Coordinate.from_coord_str(
                    artifact_input).as_requirement()
                requirements.add(maybe_coord)
                continue
            except Exception:
                pass

        try:
            address_input = AddressInput.parse(
                artifact_input,
                description_of_origin=f"the option `{request.option_name}`")
            candidate_address_inputs.add(address_input)
        except Exception:
            bad_artifact_inputs.append(artifact_input)

    if bad_artifact_inputs:
        raise ValueError(
            "The following values could not be parsed as an address nor as a JVM coordinate string. "
            f"The problematic inputs supplied to the `{request.option_name}` option were: "
            f"{', '.join(bad_artifact_inputs)}.")

    # Gather coordinates from the provided addresses.
    addresses = await MultiGet(
        Get(Address, AddressInput, ai) for ai in candidate_address_inputs)
    all_supplied_targets = await Get(Targets, Addresses(addresses))
    other_targets = []
    for tgt in all_supplied_targets:
        if JvmArtifactFieldSet.is_applicable(tgt):
            requirements.add(ArtifactRequirement.from_jvm_artifact_target(tgt))
        else:
            other_targets.append(tgt)

    if other_targets:
        raise ValueError(
            softwrap(f"""
                The following addresses reference targets that are not `jvm_artifact` targets.
                Please only supply the addresses of `jvm_artifact` for the `{request.option_name}`
                option. The problematic addresses are: {', '.join(str(tgt.address) for tgt in other_targets)}.
                """))

    return ArtifactRequirements(requirements)
示例#13
0
async def fetch_scala_library(request: ScalaLibraryRequest) -> ClasspathEntry:
    tcp = await Get(
        ToolClasspath,
        ToolClasspathRequest(
            artifact_requirements=ArtifactRequirements.from_coordinates([
                Coordinate(
                    group="org.scala-lang",
                    artifact="scala-library",
                    version=request.version,
                ),
            ]), ),
    )

    return ClasspathEntry(tcp.digest, tcp.content.files)
示例#14
0
def test_resolve_conflicting(rule_runner: RuleRunner) -> None:
    with engine_error(
            ProcessExecutionFailure,
            contains="Resolution error: Unsatisfied rule Strict(junit:junit)"):
        rule_runner.request(
            CoursierResolvedLockfile,
            [
                ArtifactRequirements.from_coordinates([
                    Coordinate(
                        group="junit", artifact="junit", version="4.8.1"),
                    Coordinate(
                        group="junit", artifact="junit", version="4.8.2"),
                ]),
            ],
        )
示例#15
0
def test_resolve_with_no_deps(rule_runner: RuleRunner) -> None:
    resolved_lockfile = rule_runner.request(
        CoursierResolvedLockfile,
        [ArtifactRequirements.from_coordinates([HAMCREST_COORD])],
    )
    assert resolved_lockfile == CoursierResolvedLockfile(
        entries=(CoursierLockfileEntry(
            coord=HAMCREST_COORD,
            file_name="org.hamcrest_hamcrest-core_1.3.jar",
            direct_dependencies=Coordinates([]),
            dependencies=Coordinates([]),
            file_digest=FileDigest(
                fingerprint=
                "66fdef91e9739348df7a096aa384a5685f4e875584cce89386a7a47251c4d8e9",
                serialized_bytes_length=45024,
            ),
        ), ))
示例#16
0
def java_parser_artifact_requirements() -> ArtifactRequirements:
    # Update in concert with the target definition for `java_parser`.
    return ArtifactRequirements.from_coordinates([
        Coordinate(group="com.fasterxml.jackson.core",
                   artifact="jackson-databind",
                   version="2.12.4"),
        Coordinate(
            group="com.fasterxml.jackson.datatype",
            artifact="jackson-datatype-jdk8",
            version="2.12.4",
        ),
        Coordinate(
            group="com.github.javaparser",
            artifact="javaparser-symbol-solver-core",
            version="3.23.0",
        ),
    ], )
示例#17
0
def test_transitive_excludes(rule_runner: RuleRunner) -> None:
    resolve = rule_runner.request(
        CoursierResolvedLockfile,
        [
            ArtifactRequirements([
                Coordinate(
                    group="com.fasterxml.jackson.core",
                    artifact="jackson-databind",
                    version="2.12.1",
                ).as_requirement().with_extra_excludes(
                    "com.fasterxml.jackson.core:jackson-core")
            ]),
        ],
    )

    entries = resolve.entries
    assert any(i for i in entries if i.coord.artifact == "jackson-databind")
    assert not any(i for i in entries if i.coord.artifact == "jackson-core")
示例#18
0
def test_resolve_with_broken_url(rule_runner: RuleRunner) -> None:

    coordinate = ArtifactRequirement(
        coordinate=Coordinate(
            group="org.hamcrest",
            artifact="hamcrest-core",
            version=
            "1.3_inexplicably_wrong",  # if the group/artifact/version is real, coursier will fallback
        ),
        url="https://this_url_does_not_work",
    )

    expected_exception_msg = r".*this_url_does_not_work not found under https.*"

    with pytest.raises(ExecutionError, match=expected_exception_msg):
        rule_runner.request(
            CoursierResolvedLockfile,
            [ArtifactRequirements([coordinate])],
        )
示例#19
0
class ToolClasspathRequest:
    """A request to set up the classpath for a JVM tool by fetching artifacts and merging the
    classpath.

    :param prefix: if set, should be a relative directory that will
        be prepended to every classpath element.  This is useful for
        keeping all classpath elements isolated under a single directory
        in a process invocation, where other inputs on the process's
        root directory might interfere with un-prefixed classpath
        entries (or vice versa).
    """

    prefix: str | None = None
    lockfile: GenerateJvmLockfileFromTool | None = None
    artifact_requirements: ArtifactRequirements = ArtifactRequirements()

    def __post_init__(self) -> None:
        if not bool(self.lockfile) ^ bool(self.artifact_requirements):
            raise AssertionError(
                f"Exactly one of `lockfile` or `artifact_requirements` must be provided: {self}"
            )
示例#20
0
async def _materialize_scala_runtime_jars(scala_version: str) -> Snapshot:
    tool_classpath = await Get(
        ToolClasspath,
        ToolClasspathRequest(
            artifact_requirements=ArtifactRequirements.from_coordinates([
                Coordinate(
                    group="org.scala-lang",
                    artifact="scala-compiler",
                    version=scala_version,
                ),
                Coordinate(
                    group="org.scala-lang",
                    artifact="scala-library",
                    version=scala_version,
                ),
            ]), ),
    )

    return await Get(
        Snapshot,
        AddPrefix(tool_classpath.content.digest,
                  f"jvm/scala-runtime/{scala_version}"),
    )
示例#21
0
def test_resolve_with_working_url(rule_runner: RuleRunner) -> None:

    requirement = ArtifactRequirement(
        coordinate=Coordinate(
            group="apache-commons-local",
            artifact="commons-collections",
            version="1.0.0_JAR_LOCAL",
        ),
        url=
        "https://repo1.maven.org/maven2/org/apache/commons/commons-collections4/4.2/commons-collections4-4.2.jar",
    )

    resolved_lockfile = rule_runner.request(
        CoursierResolvedLockfile,
        [ArtifactRequirements([requirement])],
    )

    coordinate = requirement.coordinate

    assert resolved_lockfile == CoursierResolvedLockfile(
        entries=(CoursierLockfileEntry(
            coord=Coordinate(group=coordinate.group,
                             artifact=coordinate.artifact,
                             version=coordinate.version),
            file_name=
            f"{coordinate.group}_{coordinate.artifact}_{coordinate.version}.jar",
            direct_dependencies=Coordinates([]),
            dependencies=Coordinates([]),
            file_digest=FileDigest(
                fingerprint=
                "6a594721d51444fd97b3eaefc998a77f606dedb03def494f74755aead3c9df3e",
                serialized_bytes_length=752798,
            ),
            remote_url=requirement.url,
            pants_address=None,
        ), ))
示例#22
0
def test_resolve_with_packaging(rule_runner: RuleRunner) -> None:
    # Tests that an artifact pom which actually reports packaging ends up with proper version and
    # packaging information.
    #   see https://github.com/pantsbuild/pants/issues/13986
    resolved_lockfile = rule_runner.request(
        CoursierResolvedLockfile,
        [
            ArtifactRequirements.from_coordinates([
                Coordinate(group="org.bouncycastle",
                           artifact="bcutil-jdk15on",
                           version="1.70")
            ]),
        ],
    )

    assert resolved_lockfile == CoursierResolvedLockfile(entries=(
        CoursierLockfileEntry(
            coord=Coordinate(
                group="org.bouncycastle",
                artifact="bcprov-jdk15on",
                version="1.70",
                packaging="jar",
                strict=True,
            ),
            file_name="org.bouncycastle_bcprov-jdk15on_jar_1.70.jar",
            direct_dependencies=Coordinates([]),
            dependencies=Coordinates([]),
            file_digest=FileDigest(
                "8f3c20e3e2d565d26f33e8d4857a37d0d7f8ac39b62a7026496fcab1bdac30d4",
                5867298),
            remote_url=None,
            pants_address=None,
        ),
        CoursierLockfileEntry(
            coord=Coordinate(
                group="org.bouncycastle",
                artifact="bcutil-jdk15on",
                version="1.70",
                packaging="jar",
                strict=True,
            ),
            file_name="org.bouncycastle_bcutil-jdk15on_1.70.jar",
            direct_dependencies=Coordinates([
                Coordinate(
                    group="org.bouncycastle",
                    artifact="bcprov-jdk15on",
                    version="1.70",
                    packaging="jar",
                    strict=True,
                )
            ]),
            dependencies=Coordinates([
                Coordinate(
                    group="org.bouncycastle",
                    artifact="bcprov-jdk15on",
                    version="1.70",
                    packaging="jar",
                    strict=True,
                )
            ]),
            file_digest=FileDigest(
                "52dc5551b0257666526c5095424567fed7dc7b00d2b1ba7bd52298411112b1d0",
                482530),
            remote_url=None,
            pants_address=None,
        ),
    ))
示例#23
0
CIRCE_DEPENDENCIES = [
    Coordinate.from_coord_str(s)
    for s in [
        "io.circe:circe-generic_2.13:0.14.1",
        "org.typelevel:simulacrum-scalafix-annotations_2.13:0.5.4",
        "org.typelevel:cats-core_2.13:2.6.1",
        "org.scala-lang:scala-library:2.13.6",
        "io.circe:circe-numbers_2.13:0.14.1",
        "com.chuusai:shapeless_2.13:2.3.7",
        "io.circe:circe-core_2.13:0.14.1",
        "org.typelevel:cats-kernel_2.13:2.6.1",
    ]
]

SCALA_PARSER_ARTIFACT_REQUIREMENTS = ArtifactRequirements.from_coordinates(
    SCALAMETA_DEPENDENCIES + CIRCE_DEPENDENCIES
)


@dataclass(frozen=True)
class ScalaImport:
    name: str
    alias: str | None
    is_wildcard: bool

    @classmethod
    def from_json_dict(cls, data: Mapping[str, Any]):
        return cls(name=data["name"], alias=data.get("alias"), is_wildcard=data["isWildcard"])

    def to_debug_json_dict(self) -> dict[str, Any]:
        return {
示例#24
0
async def setup_scala_parser_classfiles(jdk: InternalJdk) -> ScalaParserCompiledClassfiles:
    dest_dir = "classfiles"

    parser_source_content = pkgutil.get_data(
        "pants.backend.scala.dependency_inference", "ScalaParser.scala"
    )
    if not parser_source_content:
        raise AssertionError("Unable to find ScalaParser.scala resource.")

    parser_source = FileContent("ScalaParser.scala", parser_source_content)

    tool_classpath, parser_classpath, source_digest = await MultiGet(
        Get(
            ToolClasspath,
            ToolClasspathRequest(
                prefix="__toolcp",
                artifact_requirements=ArtifactRequirements.from_coordinates(
                    [
                        Coordinate(
                            group="org.scala-lang",
                            artifact="scala-compiler",
                            version=PARSER_SCALA_VERSION,
                        ),
                        Coordinate(
                            group="org.scala-lang",
                            artifact="scala-library",
                            version=PARSER_SCALA_VERSION,
                        ),
                        Coordinate(
                            group="org.scala-lang",
                            artifact="scala-reflect",
                            version=PARSER_SCALA_VERSION,
                        ),
                    ]
                ),
            ),
        ),
        Get(
            ToolClasspath,
            ToolClasspathRequest(
                prefix="__parsercp", artifact_requirements=SCALA_PARSER_ARTIFACT_REQUIREMENTS
            ),
        ),
        Get(Digest, CreateDigest([parser_source, Directory(dest_dir)])),
    )

    merged_digest = await Get(
        Digest,
        MergeDigests(
            (
                tool_classpath.digest,
                parser_classpath.digest,
                source_digest,
            )
        ),
    )

    process_result = await Get(
        ProcessResult,
        JvmProcess(
            jdk=jdk,
            classpath_entries=tool_classpath.classpath_entries(),
            argv=[
                "scala.tools.nsc.Main",
                "-bootclasspath",
                ":".join(tool_classpath.classpath_entries()),
                "-classpath",
                ":".join(parser_classpath.classpath_entries()),
                "-d",
                dest_dir,
                parser_source.path,
            ],
            input_digest=merged_digest,
            output_directories=(dest_dir,),
            description="Compile Scala parser for dependency inference with scalac",
            level=LogLevel.DEBUG,
            # NB: We do not use nailgun for this process, since it is launched exactly once.
            use_nailgun=False,
        ),
    )
    stripped_classfiles_digest = await Get(
        Digest, RemovePrefix(process_result.output_digest, dest_dir)
    )
    return ScalaParserCompiledClassfiles(digest=stripped_classfiles_digest)
示例#25
0
async def coursier_fetch_one_coord(
    request: CoursierLockfileEntry, ) -> ClasspathEntry:
    """Run `coursier fetch --intransitive` to fetch a single artifact.

    This rule exists to permit efficient subsetting of a "global" classpath
    in the form of a lockfile.  Callers can determine what subset of dependencies
    from the lockfile are needed for a given target, then request those
    lockfile entries individually.

    By fetching only one entry at a time, we maximize our cache efficiency.  If instead
    we fetched the entire subset that the caller wanted, there would be a different cache
    key for every possible subset.

    This rule also guarantees exact reproducibility.  If all caches have been
    removed, `coursier fetch` will re-download the artifact, and this rule will
    confirm that what was downloaded matches exactly (by content digest) what
    was specified in the lockfile (what Coursier originally downloaded).
    """

    # Prepare any URL- or JAR-specifying entries for use with Coursier
    req: ArtifactRequirement
    if request.pants_address:
        targets = await Get(
            Targets,
            UnparsedAddressInputs([request.pants_address],
                                  owning_address=None,
                                  description_of_origin="TODO(#14468)"),
        )
        req = ArtifactRequirement(request.coord,
                                  jar=targets[0][JvmArtifactJarSourceField])
    else:
        req = ArtifactRequirement(request.coord, url=request.remote_url)

    coursier_resolve_info = await Get(
        CoursierResolveInfo,
        ArtifactRequirements([req]),
    )

    coursier_report_file_name = "coursier_report.json"

    process_result = await Get(
        ProcessResult,
        CoursierFetchProcess(
            args=(
                coursier_report_file_name,
                "--intransitive",
                *coursier_resolve_info.argv,
            ),
            input_digest=coursier_resolve_info.digest,
            output_directories=("classpath", ),
            output_files=(coursier_report_file_name, ),
            description=
            f"Fetching with coursier: {request.coord.to_coord_str()}",
        ),
    )
    report_digest = await Get(
        Digest,
        DigestSubset(process_result.output_digest,
                     PathGlobs([coursier_report_file_name])))
    report_contents = await Get(DigestContents, Digest, report_digest)
    report = json.loads(report_contents[0].content)

    report_deps = report["dependencies"]
    if len(report_deps) == 0:
        raise CoursierError(
            "Coursier fetch report has no dependencies (i.e. nothing was fetched)."
        )
    elif len(report_deps) > 1:
        raise CoursierError(
            "Coursier fetch report has multiple dependencies, but exactly 1 was expected."
        )

    dep = report_deps[0]
    resolved_coord = Coordinate.from_coord_str(dep["coord"])
    if resolved_coord != request.coord:
        raise CoursierError(
            f'Coursier resolved coord "{resolved_coord.to_coord_str()}" does not match requested coord "{request.coord.to_coord_str()}".'
        )

    classpath_dest_name = classpath_dest_filename(dep["coord"], dep["file"])
    classpath_dest = f"classpath/{classpath_dest_name}"

    resolved_file_digest = await Get(
        Digest,
        DigestSubset(process_result.output_digest,
                     PathGlobs([classpath_dest])))
    stripped_digest = await Get(
        Digest, RemovePrefix(resolved_file_digest, "classpath"))
    file_digest = await Get(
        FileDigest,
        ExtractFileDigest(stripped_digest, classpath_dest_name),
    )
    if file_digest != request.file_digest:
        raise CoursierError(
            f"Coursier fetch for '{resolved_coord}' succeeded, but fetched artifact {file_digest} did not match the expected artifact: {request.file_digest}."
        )
    return ClasspathEntry(digest=stripped_digest,
                          filenames=(classpath_dest_name, ))
示例#26
0
async def create_scala_repl_request(
        request: ScalaRepl, bash: BashBinary,
        scala_subsystem: ScalaSubsystem) -> ReplRequest:
    user_classpath = await Get(Classpath, Addresses, request.addresses)

    roots = await Get(CoarsenedTargets, Addresses, request.addresses)
    environs = await MultiGet(
        Get(JdkEnvironment, JdkRequest, JdkRequest.from_target(target))
        for target in roots)
    jdk = max(environs, key=lambda j: j.jre_major_version)

    scala_version = scala_subsystem.version_for_resolve(
        user_classpath.resolve.name)
    tool_classpath = await Get(
        ToolClasspath,
        ToolClasspathRequest(
            prefix="__toolcp",
            artifact_requirements=ArtifactRequirements.from_coordinates([
                Coordinate(
                    group="org.scala-lang",
                    artifact="scala-compiler",
                    version=scala_version,
                ),
                Coordinate(
                    group="org.scala-lang",
                    artifact="scala-library",
                    version=scala_version,
                ),
                Coordinate(
                    group="org.scala-lang",
                    artifact="scala-reflect",
                    version=scala_version,
                ),
            ]),
        ),
    )

    user_classpath_prefix = "__cp"
    prefixed_user_classpath = await MultiGet(
        Get(Digest, AddPrefix(d, user_classpath_prefix))
        for d in user_classpath.digests())

    # TODO: Manually merging the `immutable_input_digests` since InteractiveProcess doesn't
    # support them yet. See https://github.com/pantsbuild/pants/issues/13852.
    jdk_digests = await MultiGet(
        Get(Digest, AddPrefix(digest, relpath))
        for relpath, digest in jdk.immutable_input_digests.items())

    repl_digest = await Get(
        Digest,
        MergeDigests([
            *prefixed_user_classpath, tool_classpath.content.digest,
            *jdk_digests
        ]),
    )

    return ReplRequest(
        digest=repl_digest,
        args=[
            *jdk.args(bash, tool_classpath.classpath_entries()),
            "-Dscala.usejavacp=true",
            "scala.tools.nsc.MainGenericRunner",
            "-classpath",
            ":".join(user_classpath.args(prefix=user_classpath_prefix)),
        ],
        extra_env={
            **jdk.env,
            "PANTS_INTERNAL_ABSOLUTE_PREFIX": "",
        },
        run_in_workspace=False,
        append_only_caches=jdk.append_only_caches,
    )
示例#27
0
async def compile_kotlin_source(
    kotlin: KotlinSubsystem,
    kotlinc: KotlincSubsystem,
    request: CompileKotlinSourceRequest,
) -> FallibleClasspathEntry:
    # Request classpath entries for our direct dependencies.
    dependency_cpers = await Get(FallibleClasspathEntries, ClasspathDependenciesRequest(request))
    direct_dependency_classpath_entries = dependency_cpers.if_all_succeeded()

    if direct_dependency_classpath_entries is None:
        return FallibleClasspathEntry(
            description=str(request.component),
            result=CompileResult.DEPENDENCY_FAILED,
            output=None,
            exit_code=1,
        )

    kotlin_version = kotlin.version_for_resolve(request.resolve.name)

    component_members_with_sources = tuple(
        t for t in request.component.members if t.has_field(SourcesField)
    )
    component_members_and_source_files = zip(
        component_members_with_sources,
        await MultiGet(
            Get(
                SourceFiles,
                SourceFilesRequest(
                    (t.get(SourcesField),),
                    for_sources_types=(KotlinSourceField,),
                    enable_codegen=True,
                ),
            )
            for t in component_members_with_sources
        ),
    )

    plugins_ = await MultiGet(
        Get(
            KotlincPluginTargetsForTarget,
            KotlincPluginsForTargetRequest(target, request.resolve.name),
        )
        for target in request.component.members
    )
    plugins_request = KotlincPluginsRequest.from_target_plugins(plugins_, request.resolve)
    local_plugins = await Get(KotlincPlugins, KotlincPluginsRequest, plugins_request)

    component_members_and_kotlin_source_files = [
        (target, sources)
        for target, sources in component_members_and_source_files
        if sources.snapshot.digest != EMPTY_DIGEST
    ]

    if not component_members_and_kotlin_source_files:
        # Is a generator, and so exports all of its direct deps.
        exported_digest = await Get(
            Digest, MergeDigests(cpe.digest for cpe in direct_dependency_classpath_entries)
        )
        classpath_entry = ClasspathEntry.merge(exported_digest, direct_dependency_classpath_entries)
        return FallibleClasspathEntry(
            description=str(request.component),
            result=CompileResult.SUCCEEDED,
            output=classpath_entry,
            exit_code=0,
        )

    toolcp_relpath = "__toolcp"
    local_kotlinc_plugins_relpath = "__localplugincp"
    usercp = "__cp"

    user_classpath = Classpath(direct_dependency_classpath_entries, request.resolve)

    tool_classpath, sources_digest, jdk = await MultiGet(
        Get(
            ToolClasspath,
            ToolClasspathRequest(
                artifact_requirements=ArtifactRequirements.from_coordinates(
                    [
                        Coordinate(
                            group="org.jetbrains.kotlin",
                            artifact="kotlin-compiler-embeddable",
                            version=kotlin_version,
                        ),
                        Coordinate(
                            group="org.jetbrains.kotlin",
                            artifact="kotlin-scripting-compiler-embeddable",
                            version=kotlin_version,
                        ),
                    ]
                ),
            ),
        ),
        Get(
            Digest,
            MergeDigests(
                (
                    sources.snapshot.digest
                    for _, sources in component_members_and_kotlin_source_files
                )
            ),
        ),
        Get(JdkEnvironment, JdkRequest, JdkRequest.from_target(request.component)),
    )

    extra_immutable_input_digests = {
        toolcp_relpath: tool_classpath.digest,
        local_kotlinc_plugins_relpath: local_plugins.classpath.digest,
    }
    extra_nailgun_keys = tuple(extra_immutable_input_digests)
    extra_immutable_input_digests.update(user_classpath.immutable_inputs(prefix=usercp))

    classpath_arg = ":".join(user_classpath.immutable_inputs_args(prefix=usercp))

    output_file = compute_output_jar_filename(request.component)
    process_result = await Get(
        FallibleProcessResult,
        JvmProcess(
            jdk=jdk,
            classpath_entries=tool_classpath.classpath_entries(toolcp_relpath),
            argv=[
                "org.jetbrains.kotlin.cli.jvm.K2JVMCompiler",
                *(("-classpath", classpath_arg) if classpath_arg else ()),
                "-d",
                output_file,
                *(local_plugins.args(local_kotlinc_plugins_relpath)),
                *kotlinc.args,
                *sorted(
                    itertools.chain.from_iterable(
                        sources.snapshot.files
                        for _, sources in component_members_and_kotlin_source_files
                    )
                ),
            ],
            input_digest=sources_digest,
            extra_immutable_input_digests=extra_immutable_input_digests,
            extra_nailgun_keys=extra_nailgun_keys,
            output_files=(output_file,),
            description=f"Compile {request.component} with kotlinc",
            level=LogLevel.DEBUG,
        ),
    )
    output: ClasspathEntry | None = None
    if process_result.exit_code == 0:
        output = ClasspathEntry(
            process_result.output_digest, (output_file,), direct_dependency_classpath_entries
        )

    return FallibleClasspathEntry.from_fallible_process_result(
        str(request.component),
        process_result,
        output,
    )
示例#28
0
async def setup_scalapb_shim_classfiles(
    scalapb: ScalaPBSubsystem,
    jdk: InternalJdk,
) -> ScalaPBShimCompiledClassfiles:
    dest_dir = "classfiles"

    scalapb_shim_content = pkgutil.get_data(
        "pants.backend.codegen.protobuf.scala", "ScalaPBShim.scala")
    if not scalapb_shim_content:
        raise AssertionError("Unable to find ScalaParser.scala resource.")

    scalapb_shim_source = FileContent("ScalaPBShim.scala",
                                      scalapb_shim_content)

    lockfile_request = await Get(GenerateJvmLockfileFromTool,
                                 ScalapbcToolLockfileSentinel())
    tool_classpath, shim_classpath, source_digest = await MultiGet(
        Get(
            ToolClasspath,
            ToolClasspathRequest(
                prefix="__toolcp",
                artifact_requirements=ArtifactRequirements.from_coordinates([
                    Coordinate(
                        group="org.scala-lang",
                        artifact="scala-compiler",
                        version=SHIM_SCALA_VERSION,
                    ),
                    Coordinate(
                        group="org.scala-lang",
                        artifact="scala-library",
                        version=SHIM_SCALA_VERSION,
                    ),
                    Coordinate(
                        group="org.scala-lang",
                        artifact="scala-reflect",
                        version=SHIM_SCALA_VERSION,
                    ),
                ]),
            ),
        ),
        Get(ToolClasspath,
            ToolClasspathRequest(prefix="__shimcp",
                                 lockfile=lockfile_request)),
        Get(Digest, CreateDigest([scalapb_shim_source,
                                  Directory(dest_dir)])),
    )

    merged_digest = await Get(
        Digest,
        MergeDigests(
            (tool_classpath.digest, shim_classpath.digest, source_digest)))

    process_result = await Get(
        ProcessResult,
        JvmProcess(
            jdk=jdk,
            classpath_entries=tool_classpath.classpath_entries(),
            argv=[
                "scala.tools.nsc.Main",
                "-bootclasspath",
                ":".join(tool_classpath.classpath_entries()),
                "-classpath",
                ":".join(shim_classpath.classpath_entries()),
                "-d",
                dest_dir,
                scalapb_shim_source.path,
            ],
            input_digest=merged_digest,
            extra_jvm_options=scalapb.jvm_options,
            output_directories=(dest_dir, ),
            description="Compile ScalaPB shim with scalac",
            level=LogLevel.DEBUG,
            # NB: We do not use nailgun for this process, since it is launched exactly once.
            use_nailgun=False,
        ),
    )
    stripped_classfiles_digest = await Get(
        Digest, RemovePrefix(process_result.output_digest, dest_dir))
    return ScalaPBShimCompiledClassfiles(digest=stripped_classfiles_digest)
示例#29
0
def test_empty_resolve(rule_runner: RuleRunner) -> None:
    resolved_lockfile = rule_runner.request(
        CoursierResolvedLockfile,
        [ArtifactRequirements([])],
    )
    assert resolved_lockfile == CoursierResolvedLockfile(entries=())