예제 #1
0
def test_user_repo_order_is_respected(rule_runner: RuleRunner) -> None:
    """Tests that the repo resolution order issue found in #14577 is avoided."""

    jai_core = Coordinate(group="javax.media",
                          artifact="jai_core",
                          version="1.1.3")

    # `repo1.maven.org` has a bogus POM that Coursier hits first
    # `repo.osgeo.org` has a valid POM and should succeed
    rule_runner.set_options(
        args=[
            """--coursier-repos=['https://repo1.maven.org/maven2', 'https://repo.osgeo.org/repository/release']"""
        ],
        env_inherit=PYTHON_BOOTSTRAP_ENV,
    )
    with engine_error(ProcessExecutionFailure):
        rule_runner.request(
            CoursierResolvedLockfile,
            [
                ArtifactRequirements.from_coordinates([jai_core]),
            ],
        )

    rule_runner.set_options(
        args=[
            """--coursier-repos=['https://repo.osgeo.org/repository/release', 'https://repo1.maven.org/maven2']"""
        ],
        env_inherit=PYTHON_BOOTSTRAP_ENV,
    )
    rule_runner.request(
        CoursierResolvedLockfile,
        [
            ArtifactRequirements.from_coordinates([jai_core]),
        ],
    )
예제 #2
0
def test_resolve_with_transitive_deps(rule_runner: RuleRunner) -> None:
    junit_coord = Coordinate(group="junit", artifact="junit", version="4.13.2")
    resolved_lockfile = rule_runner.request(
        CoursierResolvedLockfile,
        [
            ArtifactRequirements.from_coordinates([junit_coord]),
        ],
    )

    assert resolved_lockfile == CoursierResolvedLockfile(entries=(
        CoursierLockfileEntry(
            coord=junit_coord,
            file_name="junit_junit_4.13.2.jar",
            direct_dependencies=Coordinates([HAMCREST_COORD]),
            dependencies=Coordinates([HAMCREST_COORD]),
            file_digest=FileDigest(
                fingerprint=
                "8e495b634469d64fb8acfa3495a065cbacc8a0fff55ce1e31007be4c16dc57d3",
                serialized_bytes_length=384581,
            ),
        ),
        CoursierLockfileEntry(
            coord=HAMCREST_COORD,
            file_name="org.hamcrest_hamcrest-core_1.3.jar",
            direct_dependencies=Coordinates([]),
            dependencies=Coordinates([]),
            file_digest=FileDigest(
                fingerprint=
                "66fdef91e9739348df7a096aa384a5685f4e875584cce89386a7a47251c4d8e9",
                serialized_bytes_length=45024,
            ),
        ),
    ))
예제 #3
0
def test_resolve_with_inexact_coord(rule_runner: RuleRunner) -> None:
    resolved_lockfile = rule_runner.request(
        CoursierResolvedLockfile,
        [
            # Note the open-ended coordinate here.  We will still resolve this for the user, but the result
            # will be exact and pinned.  As noted above, this is an especially brittle unit test, but version
            # 4.8 was chosen because it has multiple patch versions and no new versions have been uploaded
            # to 4.8.x in over a decade.
            ArtifactRequirements.from_coordinates(
                [Coordinate(group="junit", artifact="junit", version="4.8+")]),
        ],
    )

    assert resolved_lockfile == CoursierResolvedLockfile(
        entries=(CoursierLockfileEntry(
            coord=Coordinate(group="junit", artifact="junit", version="4.8.2"),
            file_name="junit_junit_4.8.2.jar",
            direct_dependencies=Coordinates([]),
            dependencies=Coordinates([]),
            file_digest=FileDigest(
                fingerprint=
                "a2aa2c3bb2b72da76c3e6a71531f1eefdc350494819baf2b1d80d7146e020f9e",
                serialized_bytes_length=237344,
            ),
        ), ))
예제 #4
0
def test_fetch_one_coord_with_classifier(rule_runner: RuleRunner) -> None:
    # Has as a transitive dependency an artifact with both a `classifier` and `packaging`.
    coordinate = Coordinate(group="org.apache.avro",
                            artifact="avro-tools",
                            version="1.11.0")
    resolved_lockfile = rule_runner.request(
        CoursierResolvedLockfile,
        [ArtifactRequirements.from_coordinates([coordinate])],
    )

    entries = [
        e for e in resolved_lockfile.entries if e.coord == Coordinate(
            group="org.apache.avro",
            artifact="trevni-avro",
            version="1.11.0",
            packaging="jar",
            classifier="tests",
            strict=True,
        )
    ]
    assert len(entries) == 1
    entry = entries[0]

    classpath_entry = rule_runner.request(ClasspathEntry, [entry])
    assert classpath_entry.filenames == (
        "org.apache.avro_trevni-avro_jar_tests_1.11.0.jar", )
예제 #5
0
async def materialize_scala_runtime_jars(
    request: MaterializeScalaRuntimeJarsRequest,
) -> MaterializeScalaRuntimeJarsResult:
    tool_classpath = await Get(
        ToolClasspath,
        ToolClasspathRequest(
            artifact_requirements=ArtifactRequirements.from_coordinates([
                Coordinate(
                    group="org.scala-lang",
                    artifact="scala-compiler",
                    version=request.scala_version,
                ),
                Coordinate(
                    group="org.scala-lang",
                    artifact="scala-library",
                    version=request.scala_version,
                ),
            ]), ),
    )

    materialized_classpath_digest = await Get(
        Digest,
        AddPrefix(tool_classpath.content.digest,
                  f"jvm/scala-runtime/{request.scala_version}"),
    )
    materialized_classpath = await Get(Snapshot, Digest,
                                       materialized_classpath_digest)
    return MaterializeScalaRuntimeJarsResult(materialized_classpath)
예제 #6
0
async def fetch_scala_library(request: ScalaLibraryRequest) -> ClasspathEntry:
    tcp = await Get(
        ToolClasspath,
        ToolClasspathRequest(
            artifact_requirements=ArtifactRequirements.from_coordinates([
                Coordinate(
                    group="org.scala-lang",
                    artifact="scala-library",
                    version=request.version,
                ),
            ]), ),
    )

    return ClasspathEntry(tcp.digest, tcp.content.files)
예제 #7
0
def test_resolve_conflicting(rule_runner: RuleRunner) -> None:
    with engine_error(
            ProcessExecutionFailure,
            contains="Resolution error: Unsatisfied rule Strict(junit:junit)"):
        rule_runner.request(
            CoursierResolvedLockfile,
            [
                ArtifactRequirements.from_coordinates([
                    Coordinate(
                        group="junit", artifact="junit", version="4.8.1"),
                    Coordinate(
                        group="junit", artifact="junit", version="4.8.2"),
                ]),
            ],
        )
예제 #8
0
def test_resolve_with_no_deps(rule_runner: RuleRunner) -> None:
    resolved_lockfile = rule_runner.request(
        CoursierResolvedLockfile,
        [ArtifactRequirements.from_coordinates([HAMCREST_COORD])],
    )
    assert resolved_lockfile == CoursierResolvedLockfile(
        entries=(CoursierLockfileEntry(
            coord=HAMCREST_COORD,
            file_name="org.hamcrest_hamcrest-core_1.3.jar",
            direct_dependencies=Coordinates([]),
            dependencies=Coordinates([]),
            file_digest=FileDigest(
                fingerprint=
                "66fdef91e9739348df7a096aa384a5685f4e875584cce89386a7a47251c4d8e9",
                serialized_bytes_length=45024,
            ),
        ), ))
예제 #9
0
def java_parser_artifact_requirements() -> ArtifactRequirements:
    # Update in concert with the target definition for `java_parser`.
    return ArtifactRequirements.from_coordinates([
        Coordinate(group="com.fasterxml.jackson.core",
                   artifact="jackson-databind",
                   version="2.12.4"),
        Coordinate(
            group="com.fasterxml.jackson.datatype",
            artifact="jackson-datatype-jdk8",
            version="2.12.4",
        ),
        Coordinate(
            group="com.github.javaparser",
            artifact="javaparser-symbol-solver-core",
            version="3.23.0",
        ),
    ], )
예제 #10
0
async def _materialize_scala_runtime_jars(scala_version: str) -> Snapshot:
    tool_classpath = await Get(
        ToolClasspath,
        ToolClasspathRequest(
            artifact_requirements=ArtifactRequirements.from_coordinates([
                Coordinate(
                    group="org.scala-lang",
                    artifact="scala-compiler",
                    version=scala_version,
                ),
                Coordinate(
                    group="org.scala-lang",
                    artifact="scala-library",
                    version=scala_version,
                ),
            ]), ),
    )

    return await Get(
        Snapshot,
        AddPrefix(tool_classpath.content.digest,
                  f"jvm/scala-runtime/{scala_version}"),
    )
예제 #11
0
async def setup_scala_parser_classfiles(jdk: InternalJdk) -> ScalaParserCompiledClassfiles:
    dest_dir = "classfiles"

    parser_source_content = pkgutil.get_data(
        "pants.backend.scala.dependency_inference", "ScalaParser.scala"
    )
    if not parser_source_content:
        raise AssertionError("Unable to find ScalaParser.scala resource.")

    parser_source = FileContent("ScalaParser.scala", parser_source_content)

    tool_classpath, parser_classpath, source_digest = await MultiGet(
        Get(
            ToolClasspath,
            ToolClasspathRequest(
                prefix="__toolcp",
                artifact_requirements=ArtifactRequirements.from_coordinates(
                    [
                        Coordinate(
                            group="org.scala-lang",
                            artifact="scala-compiler",
                            version=PARSER_SCALA_VERSION,
                        ),
                        Coordinate(
                            group="org.scala-lang",
                            artifact="scala-library",
                            version=PARSER_SCALA_VERSION,
                        ),
                        Coordinate(
                            group="org.scala-lang",
                            artifact="scala-reflect",
                            version=PARSER_SCALA_VERSION,
                        ),
                    ]
                ),
            ),
        ),
        Get(
            ToolClasspath,
            ToolClasspathRequest(
                prefix="__parsercp", artifact_requirements=SCALA_PARSER_ARTIFACT_REQUIREMENTS
            ),
        ),
        Get(Digest, CreateDigest([parser_source, Directory(dest_dir)])),
    )

    merged_digest = await Get(
        Digest,
        MergeDigests(
            (
                tool_classpath.digest,
                parser_classpath.digest,
                source_digest,
            )
        ),
    )

    process_result = await Get(
        ProcessResult,
        JvmProcess(
            jdk=jdk,
            classpath_entries=tool_classpath.classpath_entries(),
            argv=[
                "scala.tools.nsc.Main",
                "-bootclasspath",
                ":".join(tool_classpath.classpath_entries()),
                "-classpath",
                ":".join(parser_classpath.classpath_entries()),
                "-d",
                dest_dir,
                parser_source.path,
            ],
            input_digest=merged_digest,
            output_directories=(dest_dir,),
            description="Compile Scala parser for dependency inference with scalac",
            level=LogLevel.DEBUG,
            # NB: We do not use nailgun for this process, since it is launched exactly once.
            use_nailgun=False,
        ),
    )
    stripped_classfiles_digest = await Get(
        Digest, RemovePrefix(process_result.output_digest, dest_dir)
    )
    return ScalaParserCompiledClassfiles(digest=stripped_classfiles_digest)
예제 #12
0
CIRCE_DEPENDENCIES = [
    Coordinate.from_coord_str(s)
    for s in [
        "io.circe:circe-generic_2.13:0.14.1",
        "org.typelevel:simulacrum-scalafix-annotations_2.13:0.5.4",
        "org.typelevel:cats-core_2.13:2.6.1",
        "org.scala-lang:scala-library:2.13.6",
        "io.circe:circe-numbers_2.13:0.14.1",
        "com.chuusai:shapeless_2.13:2.3.7",
        "io.circe:circe-core_2.13:0.14.1",
        "org.typelevel:cats-kernel_2.13:2.6.1",
    ]
]

SCALA_PARSER_ARTIFACT_REQUIREMENTS = ArtifactRequirements.from_coordinates(
    SCALAMETA_DEPENDENCIES + CIRCE_DEPENDENCIES
)


@dataclass(frozen=True)
class ScalaImport:
    name: str
    alias: str | None
    is_wildcard: bool

    @classmethod
    def from_json_dict(cls, data: Mapping[str, Any]):
        return cls(name=data["name"], alias=data.get("alias"), is_wildcard=data["isWildcard"])

    def to_debug_json_dict(self) -> dict[str, Any]:
        return {
예제 #13
0
async def create_scala_repl_request(
        request: ScalaRepl, bash: BashBinary,
        scala_subsystem: ScalaSubsystem) -> ReplRequest:
    user_classpath = await Get(Classpath, Addresses, request.addresses)

    roots = await Get(CoarsenedTargets, Addresses, request.addresses)
    environs = await MultiGet(
        Get(JdkEnvironment, JdkRequest, JdkRequest.from_target(target))
        for target in roots)
    jdk = max(environs, key=lambda j: j.jre_major_version)

    scala_version = scala_subsystem.version_for_resolve(
        user_classpath.resolve.name)
    tool_classpath = await Get(
        ToolClasspath,
        ToolClasspathRequest(
            prefix="__toolcp",
            artifact_requirements=ArtifactRequirements.from_coordinates([
                Coordinate(
                    group="org.scala-lang",
                    artifact="scala-compiler",
                    version=scala_version,
                ),
                Coordinate(
                    group="org.scala-lang",
                    artifact="scala-library",
                    version=scala_version,
                ),
                Coordinate(
                    group="org.scala-lang",
                    artifact="scala-reflect",
                    version=scala_version,
                ),
            ]),
        ),
    )

    user_classpath_prefix = "__cp"
    prefixed_user_classpath = await MultiGet(
        Get(Digest, AddPrefix(d, user_classpath_prefix))
        for d in user_classpath.digests())

    # TODO: Manually merging the `immutable_input_digests` since InteractiveProcess doesn't
    # support them yet. See https://github.com/pantsbuild/pants/issues/13852.
    jdk_digests = await MultiGet(
        Get(Digest, AddPrefix(digest, relpath))
        for relpath, digest in jdk.immutable_input_digests.items())

    repl_digest = await Get(
        Digest,
        MergeDigests([
            *prefixed_user_classpath, tool_classpath.content.digest,
            *jdk_digests
        ]),
    )

    return ReplRequest(
        digest=repl_digest,
        args=[
            *jdk.args(bash, tool_classpath.classpath_entries()),
            "-Dscala.usejavacp=true",
            "scala.tools.nsc.MainGenericRunner",
            "-classpath",
            ":".join(user_classpath.args(prefix=user_classpath_prefix)),
        ],
        extra_env={
            **jdk.env,
            "PANTS_INTERNAL_ABSOLUTE_PREFIX": "",
        },
        run_in_workspace=False,
        append_only_caches=jdk.append_only_caches,
    )
예제 #14
0
def test_resolve_with_packaging(rule_runner: RuleRunner) -> None:
    # Tests that an artifact pom which actually reports packaging ends up with proper version and
    # packaging information.
    #   see https://github.com/pantsbuild/pants/issues/13986
    resolved_lockfile = rule_runner.request(
        CoursierResolvedLockfile,
        [
            ArtifactRequirements.from_coordinates([
                Coordinate(group="org.bouncycastle",
                           artifact="bcutil-jdk15on",
                           version="1.70")
            ]),
        ],
    )

    assert resolved_lockfile == CoursierResolvedLockfile(entries=(
        CoursierLockfileEntry(
            coord=Coordinate(
                group="org.bouncycastle",
                artifact="bcprov-jdk15on",
                version="1.70",
                packaging="jar",
                strict=True,
            ),
            file_name="org.bouncycastle_bcprov-jdk15on_jar_1.70.jar",
            direct_dependencies=Coordinates([]),
            dependencies=Coordinates([]),
            file_digest=FileDigest(
                "8f3c20e3e2d565d26f33e8d4857a37d0d7f8ac39b62a7026496fcab1bdac30d4",
                5867298),
            remote_url=None,
            pants_address=None,
        ),
        CoursierLockfileEntry(
            coord=Coordinate(
                group="org.bouncycastle",
                artifact="bcutil-jdk15on",
                version="1.70",
                packaging="jar",
                strict=True,
            ),
            file_name="org.bouncycastle_bcutil-jdk15on_1.70.jar",
            direct_dependencies=Coordinates([
                Coordinate(
                    group="org.bouncycastle",
                    artifact="bcprov-jdk15on",
                    version="1.70",
                    packaging="jar",
                    strict=True,
                )
            ]),
            dependencies=Coordinates([
                Coordinate(
                    group="org.bouncycastle",
                    artifact="bcprov-jdk15on",
                    version="1.70",
                    packaging="jar",
                    strict=True,
                )
            ]),
            file_digest=FileDigest(
                "52dc5551b0257666526c5095424567fed7dc7b00d2b1ba7bd52298411112b1d0",
                482530),
            remote_url=None,
            pants_address=None,
        ),
    ))
예제 #15
0
async def compile_kotlin_source(
    kotlin: KotlinSubsystem,
    kotlinc: KotlincSubsystem,
    request: CompileKotlinSourceRequest,
) -> FallibleClasspathEntry:
    # Request classpath entries for our direct dependencies.
    dependency_cpers = await Get(FallibleClasspathEntries, ClasspathDependenciesRequest(request))
    direct_dependency_classpath_entries = dependency_cpers.if_all_succeeded()

    if direct_dependency_classpath_entries is None:
        return FallibleClasspathEntry(
            description=str(request.component),
            result=CompileResult.DEPENDENCY_FAILED,
            output=None,
            exit_code=1,
        )

    kotlin_version = kotlin.version_for_resolve(request.resolve.name)

    component_members_with_sources = tuple(
        t for t in request.component.members if t.has_field(SourcesField)
    )
    component_members_and_source_files = zip(
        component_members_with_sources,
        await MultiGet(
            Get(
                SourceFiles,
                SourceFilesRequest(
                    (t.get(SourcesField),),
                    for_sources_types=(KotlinSourceField,),
                    enable_codegen=True,
                ),
            )
            for t in component_members_with_sources
        ),
    )

    plugins_ = await MultiGet(
        Get(
            KotlincPluginTargetsForTarget,
            KotlincPluginsForTargetRequest(target, request.resolve.name),
        )
        for target in request.component.members
    )
    plugins_request = KotlincPluginsRequest.from_target_plugins(plugins_, request.resolve)
    local_plugins = await Get(KotlincPlugins, KotlincPluginsRequest, plugins_request)

    component_members_and_kotlin_source_files = [
        (target, sources)
        for target, sources in component_members_and_source_files
        if sources.snapshot.digest != EMPTY_DIGEST
    ]

    if not component_members_and_kotlin_source_files:
        # Is a generator, and so exports all of its direct deps.
        exported_digest = await Get(
            Digest, MergeDigests(cpe.digest for cpe in direct_dependency_classpath_entries)
        )
        classpath_entry = ClasspathEntry.merge(exported_digest, direct_dependency_classpath_entries)
        return FallibleClasspathEntry(
            description=str(request.component),
            result=CompileResult.SUCCEEDED,
            output=classpath_entry,
            exit_code=0,
        )

    toolcp_relpath = "__toolcp"
    local_kotlinc_plugins_relpath = "__localplugincp"
    usercp = "__cp"

    user_classpath = Classpath(direct_dependency_classpath_entries, request.resolve)

    tool_classpath, sources_digest, jdk = await MultiGet(
        Get(
            ToolClasspath,
            ToolClasspathRequest(
                artifact_requirements=ArtifactRequirements.from_coordinates(
                    [
                        Coordinate(
                            group="org.jetbrains.kotlin",
                            artifact="kotlin-compiler-embeddable",
                            version=kotlin_version,
                        ),
                        Coordinate(
                            group="org.jetbrains.kotlin",
                            artifact="kotlin-scripting-compiler-embeddable",
                            version=kotlin_version,
                        ),
                    ]
                ),
            ),
        ),
        Get(
            Digest,
            MergeDigests(
                (
                    sources.snapshot.digest
                    for _, sources in component_members_and_kotlin_source_files
                )
            ),
        ),
        Get(JdkEnvironment, JdkRequest, JdkRequest.from_target(request.component)),
    )

    extra_immutable_input_digests = {
        toolcp_relpath: tool_classpath.digest,
        local_kotlinc_plugins_relpath: local_plugins.classpath.digest,
    }
    extra_nailgun_keys = tuple(extra_immutable_input_digests)
    extra_immutable_input_digests.update(user_classpath.immutable_inputs(prefix=usercp))

    classpath_arg = ":".join(user_classpath.immutable_inputs_args(prefix=usercp))

    output_file = compute_output_jar_filename(request.component)
    process_result = await Get(
        FallibleProcessResult,
        JvmProcess(
            jdk=jdk,
            classpath_entries=tool_classpath.classpath_entries(toolcp_relpath),
            argv=[
                "org.jetbrains.kotlin.cli.jvm.K2JVMCompiler",
                *(("-classpath", classpath_arg) if classpath_arg else ()),
                "-d",
                output_file,
                *(local_plugins.args(local_kotlinc_plugins_relpath)),
                *kotlinc.args,
                *sorted(
                    itertools.chain.from_iterable(
                        sources.snapshot.files
                        for _, sources in component_members_and_kotlin_source_files
                    )
                ),
            ],
            input_digest=sources_digest,
            extra_immutable_input_digests=extra_immutable_input_digests,
            extra_nailgun_keys=extra_nailgun_keys,
            output_files=(output_file,),
            description=f"Compile {request.component} with kotlinc",
            level=LogLevel.DEBUG,
        ),
    )
    output: ClasspathEntry | None = None
    if process_result.exit_code == 0:
        output = ClasspathEntry(
            process_result.output_digest, (output_file,), direct_dependency_classpath_entries
        )

    return FallibleClasspathEntry.from_fallible_process_result(
        str(request.component),
        process_result,
        output,
    )
예제 #16
0
async def setup_scalapb_shim_classfiles(
    scalapb: ScalaPBSubsystem,
    jdk: InternalJdk,
) -> ScalaPBShimCompiledClassfiles:
    dest_dir = "classfiles"

    scalapb_shim_content = pkgutil.get_data(
        "pants.backend.codegen.protobuf.scala", "ScalaPBShim.scala")
    if not scalapb_shim_content:
        raise AssertionError("Unable to find ScalaParser.scala resource.")

    scalapb_shim_source = FileContent("ScalaPBShim.scala",
                                      scalapb_shim_content)

    lockfile_request = await Get(GenerateJvmLockfileFromTool,
                                 ScalapbcToolLockfileSentinel())
    tool_classpath, shim_classpath, source_digest = await MultiGet(
        Get(
            ToolClasspath,
            ToolClasspathRequest(
                prefix="__toolcp",
                artifact_requirements=ArtifactRequirements.from_coordinates([
                    Coordinate(
                        group="org.scala-lang",
                        artifact="scala-compiler",
                        version=SHIM_SCALA_VERSION,
                    ),
                    Coordinate(
                        group="org.scala-lang",
                        artifact="scala-library",
                        version=SHIM_SCALA_VERSION,
                    ),
                    Coordinate(
                        group="org.scala-lang",
                        artifact="scala-reflect",
                        version=SHIM_SCALA_VERSION,
                    ),
                ]),
            ),
        ),
        Get(ToolClasspath,
            ToolClasspathRequest(prefix="__shimcp",
                                 lockfile=lockfile_request)),
        Get(Digest, CreateDigest([scalapb_shim_source,
                                  Directory(dest_dir)])),
    )

    merged_digest = await Get(
        Digest,
        MergeDigests(
            (tool_classpath.digest, shim_classpath.digest, source_digest)))

    process_result = await Get(
        ProcessResult,
        JvmProcess(
            jdk=jdk,
            classpath_entries=tool_classpath.classpath_entries(),
            argv=[
                "scala.tools.nsc.Main",
                "-bootclasspath",
                ":".join(tool_classpath.classpath_entries()),
                "-classpath",
                ":".join(shim_classpath.classpath_entries()),
                "-d",
                dest_dir,
                scalapb_shim_source.path,
            ],
            input_digest=merged_digest,
            extra_jvm_options=scalapb.jvm_options,
            output_directories=(dest_dir, ),
            description="Compile ScalaPB shim with scalac",
            level=LogLevel.DEBUG,
            # NB: We do not use nailgun for this process, since it is launched exactly once.
            use_nailgun=False,
        ),
    )
    stripped_classfiles_digest = await Get(
        Digest, RemovePrefix(process_result.output_digest, dest_dir))
    return ScalaPBShimCompiledClassfiles(digest=stripped_classfiles_digest)