def test_resolve_with_transitive_deps(rule_runner: RuleRunner) -> None:
    junit_coord = Coordinate(group="junit", artifact="junit", version="4.13.2")
    resolved_lockfile = rule_runner.request(
        CoursierResolvedLockfile,
        [
            ArtifactRequirements([junit_coord]),
        ],
    )

    assert resolved_lockfile == CoursierResolvedLockfile(entries=(
        CoursierLockfileEntry(
            coord=junit_coord,
            file_name="junit-4.13.2.jar",
            direct_dependencies=Coordinates([HAMCREST_COORD]),
            dependencies=Coordinates([HAMCREST_COORD]),
            file_digest=FileDigest(
                fingerprint=
                "8e495b634469d64fb8acfa3495a065cbacc8a0fff55ce1e31007be4c16dc57d3",
                serialized_bytes_length=384581,
            ),
        ),
        CoursierLockfileEntry(
            coord=HAMCREST_COORD,
            file_name="hamcrest-core-1.3.jar",
            direct_dependencies=Coordinates([]),
            dependencies=Coordinates([]),
            file_digest=FileDigest(
                fingerprint=
                "66fdef91e9739348df7a096aa384a5685f4e875584cce89386a7a47251c4d8e9",
                serialized_bytes_length=45024,
            ),
        ),
    ))
def test_resolve_with_inexact_coord(rule_runner: RuleRunner) -> None:
    resolved_lockfile = rule_runner.request(
        CoursierResolvedLockfile,
        [
            # Note the open-ended coordinate here.  We will still resolve this for the user, but the result
            # will be exact and pinned.  As noted above, this is an especially brittle unit test, but version
            # 4.8 was chosen because it has multiple patch versions and no new versions have been uploaded
            # to 4.8.x in over a decade.
            ArtifactRequirements(
                [Coordinate(group="junit", artifact="junit", version="4.8+")]),
        ],
    )

    assert resolved_lockfile == CoursierResolvedLockfile(
        entries=(CoursierLockfileEntry(
            coord=Coordinate(group="junit", artifact="junit", version="4.8.2"),
            file_name="junit-4.8.2.jar",
            direct_dependencies=Coordinates([]),
            dependencies=Coordinates([]),
            file_digest=FileDigest(
                fingerprint=
                "a2aa2c3bb2b72da76c3e6a71531f1eefdc350494819baf2b1d80d7146e020f9e",
                serialized_bytes_length=237344,
            ),
        ), ))
Exemplo n.º 3
0
async def gather_artifact_requirements(
    request: GatherArtifactRequirementsRequest,
) -> ArtifactRequirements:
    def from_target(tgt: Target) -> Coordinate:
        group = tgt[JvmArtifactGroupField].value
        if not group:
            raise InvalidTargetException(
                f"The `group` field of {tgt.alias} target {tgt.address} must be set."
            )

        artifact = tgt[JvmArtifactArtifactField].value
        if not artifact:
            raise InvalidTargetException(
                f"The `artifact` field of {tgt.alias} target {tgt.address} must be set."
            )

        version = tgt[JvmArtifactVersionField].value
        if not version:
            raise InvalidTargetException(
                f"The `version` field of {tgt.alias} target {tgt.address} must be set."
            )

        return Coordinate(
            group=group,
            artifact=artifact,
            version=version,
        )

    requirements_addresses = await Get(
        Addresses, UnparsedAddressInputs, request.requirements.to_unparsed_address_inputs()
    )
    requirements_targets = await Get(Targets, Addresses, requirements_addresses)

    return ArtifactRequirements(from_target(tgt) for tgt in requirements_targets)
Exemplo n.º 4
0
async def gather_artifact_requirements(
    request: GatherArtifactRequirementsRequest,
) -> ArtifactRequirements:

    requirements_addresses = await Get(
        Addresses, UnparsedAddressInputs, request.requirements.to_unparsed_address_inputs()
    )
    requirements_targets = await Get(Targets, Addresses, requirements_addresses)

    return ArtifactRequirements(_coordinate_from_target(tgt) for tgt in requirements_targets)
Exemplo n.º 5
0
def java_parser_artifact_requirements() -> ArtifactRequirements:
    return ArtifactRequirements([
        Coordinate(group="com.fasterxml.jackson.core",
                   artifact="jackson-databind",
                   version="2.12.4"),
        Coordinate(
            group="com.github.javaparser",
            artifact="javaparser-symbol-solver-core",
            version="3.23.0",
        ),
    ], )
def test_resolve_conflicting(rule_runner: RuleRunner) -> None:
    with engine_error(
            ProcessExecutionFailure,
            contains="Resolution error: Unsatisfied rule Strict(junit:junit)"):
        rule_runner.request(
            CoursierResolvedLockfile,
            [
                ArtifactRequirements([
                    Coordinate(
                        group="junit", artifact="junit", version="4.8.1"),
                    Coordinate(
                        group="junit", artifact="junit", version="4.8.2"),
                ]),
            ],
        )
def test_resolve_with_no_deps(rule_runner: RuleRunner) -> None:
    resolved_lockfile = rule_runner.request(
        CoursierResolvedLockfile,
        [ArtifactRequirements([HAMCREST_COORD])],
    )
    assert resolved_lockfile == CoursierResolvedLockfile(
        entries=(CoursierLockfileEntry(
            coord=HAMCREST_COORD,
            file_name="hamcrest-core-1.3.jar",
            direct_dependencies=Coordinates([]),
            dependencies=Coordinates([]),
            file_digest=FileDigest(
                fingerprint=
                "66fdef91e9739348df7a096aa384a5685f4e875584cce89386a7a47251c4d8e9",
                serialized_bytes_length=45024,
            ),
        ), ))
Exemplo n.º 8
0
def java_parser_artifact_requirements() -> ArtifactRequirements:
    # Update in concert with the target definition for `java_parser`.
    return ArtifactRequirements(
        [
            Coordinate(
                group="com.fasterxml.jackson.core", artifact="jackson-databind", version="2.12.4"
            ),
            Coordinate(
                group="com.fasterxml.jackson.datatype",
                artifact="jackson-datatype-jdk8",
                version="2.12.4",
            ),
            Coordinate(
                group="com.github.javaparser",
                artifact="javaparser-symbol-solver-core",
                version="3.23.0",
            ),
        ],
    )
Exemplo n.º 9
0
async def run_junit_test(
    bash: BashBinary,
    jdk_setup: JdkSetup,
    junit: JUnit,
    test_subsystem: TestSubsystem,
    field_set: JavaTestFieldSet,
) -> TestResult:
    transitive_targets = await Get(TransitiveTargets, TransitiveTargetsRequest([field_set.address]))
    coarsened_targets = await Get(
        CoarsenedTargets, Addresses(t.address for t in transitive_targets.closure)
    )
    lockfile = await Get(
        CoursierResolvedLockfile,
        CoursierLockfileForTargetRequest(Targets(transitive_targets.closure)),
    )
    materialized_classpath = await Get(
        MaterializedClasspath,
        MaterializedClasspathRequest(
            prefix="__thirdpartycp",
            lockfiles=(lockfile,),
            artifact_requirements=(
                ArtifactRequirements(
                    [
                        Coordinate(
                            group="org.junit.platform",
                            artifact="junit-platform-console",
                            version="1.7.2",
                        ),
                        Coordinate(
                            group="org.junit.jupiter",
                            artifact="junit-jupiter-engine",
                            version="5.7.2",
                        ),
                        Coordinate(
                            group="org.junit.vintage",
                            artifact="junit-vintage-engine",
                            version="5.7.2",
                        ),
                    ]
                ),
            ),
        ),
    )
    transitive_user_classfiles = await MultiGet(
        Get(CompiledClassfiles, CompileJavaSourceRequest(component=t)) for t in coarsened_targets
    )
    merged_transitive_user_classfiles_digest = await Get(
        Digest, MergeDigests(classfiles.digest for classfiles in transitive_user_classfiles)
    )
    usercp_relpath = "__usercp"
    prefixed_transitive_user_classfiles_digest = await Get(
        Digest, AddPrefix(merged_transitive_user_classfiles_digest, usercp_relpath)
    )
    merged_digest = await Get(
        Digest,
        MergeDigests(
            (
                prefixed_transitive_user_classfiles_digest,
                materialized_classpath.digest,
                jdk_setup.digest,
            )
        ),
    )
    proc = Process(
        argv=[
            *jdk_setup.args(bash, [materialized_classpath.classpath_arg()]),
            "org.junit.platform.console.ConsoleLauncher",
            "--classpath",
            usercp_relpath,
            "--scan-class-path",
            usercp_relpath,
            *junit.options.args,
        ],
        input_digest=merged_digest,
        description=f"Run JUnit 5 ConsoleLauncher against {field_set.address}",
        level=LogLevel.DEBUG,
    )

    process_result = await Get(
        FallibleProcessResult,
        Process,
        proc,
    )

    return TestResult.from_fallible_process_result(
        process_result,
        address=field_set.address,
        output_setting=test_subsystem.output,
    )
Exemplo n.º 10
0
def test_empty_resolve(rule_runner: RuleRunner) -> None:
    resolved_lockfile = rule_runner.request(
        CoursierResolvedLockfile,
        [ArtifactRequirements([])],
    )
    assert resolved_lockfile == CoursierResolvedLockfile(entries=())
Exemplo n.º 11
0
async def run_junit_test(
    bash: BashBinary,
    jdk_setup: JdkSetup,
    junit: JUnit,
    test_subsystem: TestSubsystem,
    field_set: JavaTestFieldSet,
) -> TestResult:
    classpath = await Get(Classpath, Addresses([field_set.address]))
    junit_classpath = await Get(
        MaterializedClasspath,
        MaterializedClasspathRequest(
            prefix="__thirdpartycp",
            artifact_requirements=(ArtifactRequirements([
                Coordinate(
                    group="org.junit.platform",
                    artifact="junit-platform-console",
                    version="1.7.2",
                ),
                Coordinate(
                    group="org.junit.jupiter",
                    artifact="junit-jupiter-engine",
                    version="5.7.2",
                ),
                Coordinate(
                    group="org.junit.vintage",
                    artifact="junit-vintage-engine",
                    version="5.7.2",
                ),
            ]), ),
        ),
    )
    merged_digest = await Get(
        Digest,
        MergeDigests((classpath.content.digest, jdk_setup.digest,
                      junit_classpath.digest)),
    )

    reports_dir_prefix = "__reports_dir"
    reports_dir = f"{reports_dir_prefix}/{field_set.address.path_safe_spec}"

    user_classpath_arg = ":".join(classpath.user_classpath_entries())

    process_result = await Get(
        FallibleProcessResult,
        Process(
            argv=[
                *jdk_setup.args(bash, [
                    *classpath.classpath_entries(),
                    *junit_classpath.classpath_entries()
                ]),
                "org.junit.platform.console.ConsoleLauncher",
                *(("--classpath",
                   user_classpath_arg) if user_classpath_arg else ()),
                *(("--scan-class-path",
                   user_classpath_arg) if user_classpath_arg else ()),
                "--reports-dir",
                reports_dir,
                *junit.options.args,
            ],
            input_digest=merged_digest,
            output_directories=(reports_dir, ),
            append_only_caches=jdk_setup.append_only_caches,
            env=jdk_setup.env,
            description=
            f"Run JUnit 5 ConsoleLauncher against {field_set.address}",
            level=LogLevel.DEBUG,
        ),
    )

    xml_result_subset = await Get(
        Digest,
        DigestSubset(process_result.output_digest,
                     PathGlobs([f"{reports_dir_prefix}/**"])))
    xml_results = await Get(
        Snapshot, RemovePrefix(xml_result_subset, reports_dir_prefix))

    return TestResult.from_fallible_process_result(
        process_result,
        address=field_set.address,
        output_setting=test_subsystem.output,
        xml_results=xml_results,
    )
Exemplo n.º 12
0
async def compile_scala_source(
    bash: BashBinary,
    coursier: Coursier,
    jdk_setup: JdkSetup,
    scala: ScalaSubsystem,
    union_membership: UnionMembership,
    request: CompileScalaSourceRequest,
) -> FallibleClasspathEntry:
    # Request classpath entries for our direct dependencies.
    direct_dependency_classpath_entries = FallibleClasspathEntry.if_all_succeeded(
        await MultiGet(
            Get(
                FallibleClasspathEntry,
                ClasspathEntryRequest,
                ClasspathEntryRequest.for_targets(union_membership,
                                                  component=coarsened_dep,
                                                  resolve=request.resolve),
            ) for coarsened_dep in request.component.dependencies))
    if direct_dependency_classpath_entries is None:
        return FallibleClasspathEntry(
            description=str(request.component),
            result=CompileResult.DEPENDENCY_FAILED,
            output=None,
            exit_code=1,
        )

    component_members_with_sources = tuple(t for t in request.component.members
                                           if t.has_field(SourcesField))
    component_members_and_source_files = zip(
        component_members_with_sources,
        await MultiGet(
            Get(
                SourceFiles,
                SourceFilesRequest(
                    (t.get(SourcesField), ),
                    for_sources_types=(ScalaSourceField, ),
                    enable_codegen=True,
                ),
            ) for t in component_members_with_sources),
    )

    component_members_and_scala_source_files = [
        (target, sources)
        for target, sources in component_members_and_source_files
        if sources.snapshot.digest != EMPTY_DIGEST
    ]

    if not component_members_and_scala_source_files:
        # Is a generator, and so exports all of its direct deps.
        exported_digest = await Get(
            Digest,
            MergeDigests(cpe.digest
                         for cpe in direct_dependency_classpath_entries))
        classpath_entry = ClasspathEntry.merge(
            exported_digest, direct_dependency_classpath_entries)
        return FallibleClasspathEntry(
            description=str(request.component),
            result=CompileResult.SUCCEEDED,
            output=classpath_entry,
            exit_code=0,
        )

    (
        tool_classpath,
        merged_transitive_dependency_classpath_entries_digest,
    ) = await MultiGet(
        Get(
            MaterializedClasspath,
            MaterializedClasspathRequest(
                prefix="__toolcp",
                artifact_requirements=(ArtifactRequirements([
                    Coordinate(
                        group="org.scala-lang",
                        artifact="scala-compiler",
                        version=scala.version,
                    ),
                    Coordinate(
                        group="org.scala-lang",
                        artifact="scala-library",
                        version=scala.version,
                    ),
                ]), ),
            ),
        ),
        Get(
            Digest,
            # Flatten the entire transitive classpath.
            MergeDigests(classfiles.digest
                         for classfiles in ClasspathEntry.closure(
                             direct_dependency_classpath_entries)),
        ),
    )

    usercp = "__cp"
    prefixed_transitive_dependency_classpath_digest = await Get(
        Digest,
        AddPrefix(merged_transitive_dependency_classpath_entries_digest,
                  usercp))

    merged_digest = await Get(
        Digest,
        MergeDigests((
            prefixed_transitive_dependency_classpath_digest,
            tool_classpath.digest,
            jdk_setup.digest,
            *(sources.snapshot.digest
              for _, sources in component_members_and_scala_source_files),
        )),
    )

    classpath_arg = ClasspathEntry.arg(
        ClasspathEntry.closure(direct_dependency_classpath_entries),
        prefix=usercp)

    output_file = f"{request.component.representative.address.path_safe_spec}.jar"
    process_result = await Get(
        FallibleProcessResult,
        Process(
            argv=[
                *jdk_setup.args(bash, tool_classpath.classpath_entries()),
                "scala.tools.nsc.Main",
                "-bootclasspath",
                ":".join(tool_classpath.classpath_entries()),
                *(("-classpath", classpath_arg) if classpath_arg else ()),
                "-d",
                output_file,
                *sorted(
                    chain.from_iterable(
                        sources.snapshot.files for _, sources in
                        component_members_and_scala_source_files)),
            ],
            input_digest=merged_digest,
            use_nailgun=jdk_setup.digest,
            output_files=(output_file, ),
            description=f"Compile {request.component} with scalac",
            level=LogLevel.DEBUG,
            append_only_caches=jdk_setup.append_only_caches,
            env=jdk_setup.env,
        ),
    )
    output: ClasspathEntry | None = None
    if process_result.exit_code == 0:
        output = ClasspathEntry(process_result.output_digest, (output_file, ),
                                direct_dependency_classpath_entries)

    return FallibleClasspathEntry.from_fallible_process_result(
        str(request.component),
        process_result,
        output,
    )
Exemplo n.º 13
0
async def coursier_generate_lockfile(
    request: CoursierGenerateLockfileRequest,
    jvm: JvmSubsystem,
    targets_by_resolve_name: JvmTargetsByResolveName,
) -> CoursierGenerateLockfileResult:

    # `targets_by_resolve_name` supplies all of the targets that depend on each JVM resolve, so
    # no need to find transitive deps?

    # This task finds all of the sources that depend on this lockfile, and then resolves
    # a lockfile that satisfies all of their `jvm_artifact` dependencies.

    targets = targets_by_resolve_name.targets_by_resolve_name[request.resolve]

    # Find JVM artifacts in the dependency tree of the targets that depend on this lockfile.
    # These artifacts constitute the requirements that will be resolved for this lockfile.
    dependee_targets = await Get(
        TransitiveTargets, TransitiveTargetsRequest(tgt.address for tgt in targets)
    )
    resolvable_dependencies = [
        tgt for tgt in dependee_targets.closure if JvmArtifactFieldSet.is_applicable(tgt)
    ]

    artifact_requirements = ArtifactRequirements(
        [_coordinate_from_target(tgt) for tgt in resolvable_dependencies]
    )

    resolved_lockfile = await Get(
        CoursierResolvedLockfile,
        ArtifactRequirements,
        artifact_requirements,
    )
    resolved_lockfile_json = resolved_lockfile.to_json()

    lockfile_path = jvm.options.resolves[request.resolve]

    # Materialise the existing lockfile, and check for changes. We don't want to re-write
    # identical lockfiles
    existing_lockfile_source = PathGlobs(
        [lockfile_path],
        glob_match_error_behavior=GlobMatchErrorBehavior.ignore,
    )
    existing_lockfile_digest_contents = await Get(
        DigestContents, PathGlobs, existing_lockfile_source
    )

    if not existing_lockfile_digest_contents:
        # The user defined the target and resolved it, but hasn't created a lockfile yet.
        # For convenience, create the initial lockfile for them in the specified path.
        return CoursierGenerateLockfileResult(
            digest=await Get(
                Digest,
                CreateDigest(
                    (
                        FileContent(
                            path=lockfile_path,
                            content=resolved_lockfile_json,
                        ),
                    )
                ),
            )
        )

    existing_lockfile_json = existing_lockfile_digest_contents[0].content

    if resolved_lockfile_json != existing_lockfile_json:
        # The generated lockfile differs from the existing one, so return the digest of the generated one.
        return CoursierGenerateLockfileResult(
            digest=await Get(
                Digest,
                CreateDigest((FileContent(path=lockfile_path, content=resolved_lockfile_json),)),
            )
        )
    # The generated lockfile didn't change, so return an empty digest.
    return CoursierGenerateLockfileResult(
        digest=EMPTY_DIGEST,
    )
Exemplo n.º 14
0
async def setup_scala_parser_classfiles(
    bash: BashBinary, jdk_setup: JdkSetup
) -> ScalaParserCompiledClassfiles:
    dest_dir = "classfiles"

    parser_source_content = pkgutil.get_data(
        "pants.backend.scala.dependency_inference", "ScalaParser.scala"
    )
    if not parser_source_content:
        raise AssertionError("Unable to find ScalaParser.scala resource.")

    parser_source = FileContent("ScalaParser.scala", parser_source_content)

    tool_classpath, parser_classpath, source_digest = await MultiGet(
        Get(
            MaterializedClasspath,
            MaterializedClasspathRequest(
                prefix="__toolcp",
                artifact_requirements=(
                    ArtifactRequirements(
                        [
                            Coordinate(
                                group="org.scala-lang",
                                artifact="scala-compiler",
                                version=PARSER_SCALA_VERSION,
                            ),
                            Coordinate(
                                group="org.scala-lang",
                                artifact="scala-library",
                                version=PARSER_SCALA_VERSION,
                            ),
                            Coordinate(
                                group="org.scala-lang",
                                artifact="scala-reflect",
                                version=PARSER_SCALA_VERSION,
                            ),
                        ]
                    ),
                ),
            ),
        ),
        Get(
            MaterializedClasspath,
            MaterializedClasspathRequest(
                prefix="__parsercp", artifact_requirements=(SCALA_PARSER_ARTIFACT_REQUIREMENTS,)
            ),
        ),
        Get(
            Digest,
            CreateDigest(
                [
                    parser_source,
                    Directory(dest_dir),
                ]
            ),
        ),
    )

    merged_digest = await Get(
        Digest,
        MergeDigests(
            (
                tool_classpath.digest,
                parser_classpath.digest,
                jdk_setup.digest,
                source_digest,
            )
        ),
    )

    # NB: We do not use nailgun for this process, since it is launched exactly once.
    process_result = await Get(
        ProcessResult,
        Process(
            argv=[
                *jdk_setup.args(bash, tool_classpath.classpath_entries()),
                "scala.tools.nsc.Main",
                "-bootclasspath",
                ":".join(tool_classpath.classpath_entries()),
                "-classpath",
                ":".join(parser_classpath.classpath_entries()),
                "-d",
                dest_dir,
                parser_source.path,
            ],
            input_digest=merged_digest,
            append_only_caches=jdk_setup.append_only_caches,
            env=jdk_setup.env,
            output_directories=(dest_dir,),
            description="Compile Scala parser for dependency inference with scalac",
            level=LogLevel.DEBUG,
        ),
    )
    stripped_classfiles_digest = await Get(
        Digest, RemovePrefix(process_result.output_digest, dest_dir)
    )
    return ScalaParserCompiledClassfiles(digest=stripped_classfiles_digest)
Exemplo n.º 15
0
CIRCE_DEPENDENCIES = [
    Coordinate.from_coord_str(s)
    for s in [
        "io.circe:circe-generic_2.13:0.14.1",
        "org.typelevel:simulacrum-scalafix-annotations_2.13:0.5.4",
        "org.typelevel:cats-core_2.13:2.6.1",
        "org.scala-lang:scala-library:2.13.6",
        "io.circe:circe-numbers_2.13:0.14.1",
        "com.chuusai:shapeless_2.13:2.3.7",
        "io.circe:circe-core_2.13:0.14.1",
        "org.typelevel:cats-kernel_2.13:2.6.1",
    ]
]

SCALA_PARSER_ARTIFACT_REQUIREMENTS = ArtifactRequirements(
    SCALAMETA_DEPENDENCIES + CIRCE_DEPENDENCIES
)


@dataclass(frozen=True)
class ScalaImport:
    name: str
    is_wildcard: bool

    @classmethod
    def from_json_dict(cls, data: Mapping[str, Any]):
        return cls(name=data["name"], is_wildcard=data["isWildcard"])

    def to_debug_json_dict(self) -> dict[str, Any]:
        return {
            "name": self.name,