Пример #1
0
async def fetch_with_coursier(
    coursier: Coursier,
    request: CoursierFetchRequest,
) -> FallibleClasspathEntry:
    # TODO: Loading this per JvmArtifact.
    lockfile = await Get(CoursierResolvedLockfile, CoursierResolveKey,
                         request.resolve)

    # All of the transitive dependencies are exported.
    # TODO: Expose an option to control whether this exports only the root, direct dependencies,
    # transitive dependencies, etc.
    assert len(request.component.members
               ) == 1, "JvmArtifact does not have dependencies."
    root_entry, transitive_entries = lockfile.dependencies(
        request.resolve,
        Coordinate.from_jvm_artifact_target(request.component.representative))

    classpath_entries = await MultiGet(
        Get(ClasspathEntry, CoursierLockfileEntry, entry)
        for entry in (root_entry, *transitive_entries))
    exported_digest = await Get(
        Digest, MergeDigests(cpe.digest for cpe in classpath_entries))

    return FallibleClasspathEntry(
        description=str(request.component),
        result=CompileResult.SUCCEEDED,
        output=ClasspathEntry.merge(exported_digest, classpath_entries),
        exit_code=0,
    )
Пример #2
0
async def deploy_jar_classpath(
    request: DeployJarClasspathEntryRequest, ) -> FallibleClasspathEntry:
    if len(request.component.members) > 1:
        # If multiple DeployJar targets were coarsened into a single instance, it's because they
        # formed a cycle among themselves... but at a high level, they shouldn't have dependencies
        # on one another anyway.
        raise Exception(
            "`deploy_jar` targets should not depend on one another:\n"
            f"{request.component.bullet_list()}")
    fallible_entries = await Get(FallibleClasspathEntries,
                                 ClasspathDependenciesRequest(request))
    classpath_entries = fallible_entries.if_all_succeeded()
    if classpath_entries is None:
        return FallibleClasspathEntry(
            description=str(request.component),
            result=CompileResult.DEPENDENCY_FAILED,
            output=None,
            exit_code=1,
        )
    return FallibleClasspathEntry(
        description=str(request.component),
        result=CompileResult.SUCCEEDED,
        output=ClasspathEntry(EMPTY_DIGEST, dependencies=classpath_entries),
        exit_code=0,
    )
Пример #3
0
async def noop_classpath_entry(
    request: NoopClasspathEntryRequest, ) -> FallibleClasspathEntry:

    return FallibleClasspathEntry(
        f"Empty classpath for no-op classpath target {request.component}",
        CompileResult.SUCCEEDED,
        ClasspathEntry(EMPTY_DIGEST, [], []),
        exit_code=0,
    )
Пример #4
0
async def fetch_scala_library(request: ScalaLibraryRequest) -> ClasspathEntry:
    tcp = await Get(
        ToolClasspath,
        ToolClasspathRequest(
            artifact_requirements=ArtifactRequirements.from_coordinates([
                Coordinate(
                    group="org.scala-lang",
                    artifact="scala-library",
                    version=request.version,
                ),
            ]), ),
    )

    return ClasspathEntry(tcp.digest, tcp.content.files)
Пример #5
0
async def fetch_with_coursier(
        request: CoursierFetchRequest) -> FallibleClasspathEntry:
    # TODO: Loading this per JvmArtifact.
    lockfile = await Get(CoursierResolvedLockfile, CoursierResolveKey,
                         request.resolve)

    requirement = ArtifactRequirement.from_jvm_artifact_target(
        request.component.representative)

    if lockfile.metadata and not lockfile.metadata.is_valid_for(
        [requirement], LockfileContext.USER):
        raise ValueError(
            f"Requirement `{requirement.to_coord_arg_str()}` has changed since the lockfile "
            f"for {request.resolve.path} was generated. Run `{bin_name()} generate-lockfiles` to update your "
            "lockfile based on the new requirements.")

    # All of the transitive dependencies are exported.
    # TODO: Expose an option to control whether this exports only the root, direct dependencies,
    # transitive dependencies, etc.
    assert len(request.component.members
               ) == 1, "JvmArtifact does not have dependencies."
    root_entry, transitive_entries = lockfile.dependencies(
        request.resolve,
        requirement.coordinate,
    )

    classpath_entries = await MultiGet(
        Get(ClasspathEntry, CoursierLockfileEntry, entry)
        for entry in (root_entry, *transitive_entries))
    exported_digest = await Get(
        Digest, MergeDigests(cpe.digest for cpe in classpath_entries))

    return FallibleClasspathEntry(
        description=str(request.component),
        result=CompileResult.SUCCEEDED,
        output=ClasspathEntry.merge(exported_digest, classpath_entries),
        exit_code=0,
    )
Пример #6
0
async def fetch_plugins(request: ScalaPluginsRequest) -> ScalaPlugins:
    # Fetch all the artifacts
    coarsened_targets = await Get(
        CoarsenedTargets,
        Addresses(target.address for target in request.artifacts))
    fallible_artifacts = await MultiGet(
        Get(
            FallibleClasspathEntry,
            CoursierFetchRequest(ct, resolve=request.resolve),
        ) for ct in coarsened_targets)

    artifacts = FallibleClasspathEntry.if_all_succeeded(fallible_artifacts)
    if artifacts is None:
        failed = [i for i in fallible_artifacts if i.exit_code != 0]
        raise Exception(f"Fetching local scala plugins failed: {failed}")

    merged_classpath_digest = await Get(
        Digest, MergeDigests(i.digest for i in artifacts))
    merged = ClasspathEntry.merge(merged_classpath_digest, artifacts)

    names = tuple(_plugin_name(target) for target in request.plugins)

    return ScalaPlugins(names=names, classpath=merged)
Пример #7
0
 def args(self, *, prefix: str = "") -> Iterator[str]:
     """All transitive filenames for this Classpath."""
     return ClasspathEntry.args(ClasspathEntry.closure(self.entries),
                                prefix=prefix)
Пример #8
0
def test_compile_with_multiple_scala_versions(rule_runner: RuleRunner) -> None:
    scala_library_coord_2_12 = Coordinate(group="org.scala-lang",
                                          artifact="scala-library",
                                          version="2.12.15")
    scala_library_coord_2_13 = Coordinate(group="org.scala-lang",
                                          artifact="scala-library",
                                          version="2.13.8")
    rule_runner.write_files({
        "BUILD":
        dedent("""\
                scala_sources(
                    name = 'main_2.12',
                    resolve = "scala2.12",
                )
                scala_sources(
                    name = 'main_2.13',
                    resolve = "scala2.13",
                )
                jvm_artifact(
                  name="org.scala-lang_scala-library_2.12.15",
                  group="org.scala-lang",
                  artifact="scala-library",
                  version="2.12.15",
                  resolve="scala2.12",
                )
                jvm_artifact(
                  name="org.scala-lang_scala-library_2.13.8",
                  group="org.scala-lang",
                  artifact="scala-library",
                  version="2.13.8",
                  resolve="scala2.13",
                )
                """),
        "Example.scala":
        SCALA_LIB_SOURCE,
        "3rdparty/jvm/scala2.12.lock":
        TestCoursierWrapper.new(entries=(CoursierLockfileEntry(
            coord=scala_library_coord_2_12,
            file_name="org.scala-lang_scala-library_2.12.15.jar",
            direct_dependencies=Coordinates([]),
            dependencies=Coordinates([]),
            file_digest=FileDigest(
                "e518bb640e2175de5cb1f8e326679b8d975376221f1b547757de429bbf4563f0",
                5443542,
            ),
        ), ), ).serialize([ArtifactRequirement(scala_library_coord_2_12)]),
        "3rdparty/jvm/scala2.13.lock":
        TestCoursierWrapper.new(entries=(CoursierLockfileEntry(
            coord=scala_library_coord_2_13,
            file_name="org.scala-lang_scala-library_2.13.8.jar",
            direct_dependencies=Coordinates([]),
            dependencies=Coordinates([]),
            file_digest=FileDigest(
                "a0882b82514190c2bac7d1a459872a75f005fc0f3e88b2bc0390367146e35db7",
                6003601,
            ),
        ), ), ).serialize([ArtifactRequirement(scala_library_coord_2_13)]),
    })
    rule_runner.set_options(
        [
            '--scala-version-for-resolve={"scala2.12":"2.12.15","scala2.13":"2.13.8"}',
            '--jvm-resolves={"scala2.12":"3rdparty/jvm/scala2.12.lock","scala2.13":"3rdparty/jvm/scala2.13.lock"}',
        ],
        env_inherit=PYTHON_BOOTSTRAP_ENV,
    )
    classpath_2_12 = rule_runner.request(
        ClasspathEntry,
        [
            CompileScalaSourceRequest(
                component=expect_single_expanded_coarsened_target(
                    rule_runner, Address(spec_path="",
                                         target_name="main_2.12")),
                resolve=make_resolve(rule_runner, "scala2.12",
                                     "3rdparty/jvm/scala2.12.lock"),
            )
        ],
    )
    entries_2_12 = list(ClasspathEntry.closure([classpath_2_12]))
    filenames_2_12 = sorted(
        itertools.chain.from_iterable(entry.filenames
                                      for entry in entries_2_12))
    assert filenames_2_12 == [
        ".Example.scala.main_2.12.scalac.jar",
        "org.scala-lang_scala-library_2.12.15.jar",
    ]

    classpath_2_13 = rule_runner.request(
        ClasspathEntry,
        [
            CompileScalaSourceRequest(
                component=expect_single_expanded_coarsened_target(
                    rule_runner, Address(spec_path="",
                                         target_name="main_2.13")),
                resolve=make_resolve(rule_runner, "scala2.13",
                                     "3rdparty/jvm/scala2.13.lock"),
            )
        ],
    )
    entries_2_13 = list(ClasspathEntry.closure([classpath_2_13]))
    filenames_2_13 = sorted(
        itertools.chain.from_iterable(entry.filenames
                                      for entry in entries_2_13))
    assert filenames_2_13 == [
        ".Example.scala.main_2.13.scalac.jar",
        "org.scala-lang_scala-library_2.13.8.jar",
    ]
Пример #9
0
async def compile_java_source(
    bash: BashBinary,
    javac: JavacSubsystem,
    zip_binary: ZipBinary,
    request: CompileJavaSourceRequest,
) -> FallibleClasspathEntry:
    # Request the component's direct dependency classpath, and additionally any prerequisite.
    optional_prereq_request = [
        *((request.prerequisite, ) if request.prerequisite else ())
    ]
    fallibles = await MultiGet(
        Get(FallibleClasspathEntries,
            ClasspathEntryRequests(optional_prereq_request)),
        Get(FallibleClasspathEntries, ClasspathDependenciesRequest(request)),
    )

    direct_dependency_classpath_entries = FallibleClasspathEntries(
        itertools.chain(*fallibles)).if_all_succeeded()

    if direct_dependency_classpath_entries is None:
        return FallibleClasspathEntry(
            description=str(request.component),
            result=CompileResult.DEPENDENCY_FAILED,
            output=None,
            exit_code=1,
        )

    # Capture just the `ClasspathEntry` objects that are listed as `export` types by source analysis
    deps_to_classpath_entries = dict(
        zip(request.component.dependencies, direct_dependency_classpath_entries
            or ()))
    # Re-request inferred dependencies to get a list of export dependency addresses
    inferred_dependencies = await MultiGet(
        Get(
            JavaInferredDependencies,
            JavaInferredDependenciesAndExportsRequest(tgt[JavaSourceField]),
        ) for tgt in request.component.members
        if JavaFieldSet.is_applicable(tgt))
    flat_exports = {
        export
        for i in inferred_dependencies for export in i.exports
    }

    export_classpath_entries = [
        classpath_entry for coarsened_target, classpath_entry in
        deps_to_classpath_entries.items()
        if any(m.address in flat_exports for m in coarsened_target.members)
    ]

    # Then collect the component's sources.
    component_members_with_sources = tuple(t for t in request.component.members
                                           if t.has_field(SourcesField))
    component_members_and_source_files = zip(
        component_members_with_sources,
        await MultiGet(
            Get(
                SourceFiles,
                SourceFilesRequest(
                    (t.get(SourcesField), ),
                    for_sources_types=(JavaSourceField, ),
                    enable_codegen=True,
                ),
            ) for t in component_members_with_sources),
    )
    component_members_and_java_source_files = [
        (target, sources)
        for target, sources in component_members_and_source_files
        if sources.snapshot.digest != EMPTY_DIGEST
    ]
    if not component_members_and_java_source_files:
        # Is a generator, and so exports all of its direct deps.
        exported_digest = await Get(
            Digest,
            MergeDigests(cpe.digest
                         for cpe in direct_dependency_classpath_entries))
        classpath_entry = ClasspathEntry.merge(
            exported_digest, direct_dependency_classpath_entries)
        return FallibleClasspathEntry(
            description=str(request.component),
            result=CompileResult.SUCCEEDED,
            output=classpath_entry,
            exit_code=0,
        )

    dest_dir = "classfiles"
    dest_dir_digest, jdk = await MultiGet(
        Get(
            Digest,
            CreateDigest([Directory(dest_dir)]),
        ),
        Get(JdkEnvironment, JdkRequest,
            JdkRequest.from_target(request.component)),
    )
    merged_digest = await Get(
        Digest,
        MergeDigests((
            dest_dir_digest,
            *(sources.snapshot.digest
              for _, sources in component_members_and_java_source_files),
        )),
    )

    usercp = "__cp"
    user_classpath = Classpath(direct_dependency_classpath_entries,
                               request.resolve)
    classpath_arg = ":".join(
        user_classpath.root_immutable_inputs_args(prefix=usercp))
    immutable_input_digests = dict(
        user_classpath.root_immutable_inputs(prefix=usercp))

    # Compile.
    compile_result = await Get(
        FallibleProcessResult,
        JvmProcess(
            jdk=jdk,
            classpath_entries=[f"{jdk.java_home}/lib/tools.jar"],
            argv=[
                "com.sun.tools.javac.Main",
                *(("-cp", classpath_arg) if classpath_arg else ()),
                *javac.args,
                "-d",
                dest_dir,
                *sorted(
                    chain.from_iterable(
                        sources.snapshot.files for _, sources in
                        component_members_and_java_source_files)),
            ],
            input_digest=merged_digest,
            extra_immutable_input_digests=immutable_input_digests,
            output_directories=(dest_dir, ),
            description=f"Compile {request.component} with javac",
            level=LogLevel.DEBUG,
        ),
    )
    if compile_result.exit_code != 0:
        return FallibleClasspathEntry.from_fallible_process_result(
            str(request.component),
            compile_result,
            None,
        )

    # Jar.
    # NB: We jar up the outputs in a separate process because the nailgun runner cannot support
    # invoking via a `bash` wrapper (since the trailing portion of the command is executed by
    # the nailgun server). We might be able to resolve this in the future via a Javac wrapper shim.
    output_snapshot = await Get(Snapshot, Digest, compile_result.output_digest)
    output_file = compute_output_jar_filename(request.component)
    output_files: tuple[str, ...] = (output_file, )
    if output_snapshot.files:
        jar_result = await Get(
            ProcessResult,
            Process(
                argv=[
                    bash.path,
                    "-c",
                    " ".join([
                        "cd", dest_dir, ";", zip_binary.path, "-r",
                        f"../{output_file}", "."
                    ]),
                ],
                input_digest=compile_result.output_digest,
                output_files=output_files,
                description=f"Capture outputs of {request.component} for javac",
                level=LogLevel.TRACE,
            ),
        )
        jar_output_digest = jar_result.output_digest
    else:
        # If there was no output, then do not create a jar file. This may occur, for example, when compiling
        # a `package-info.java` in a single partition.
        output_files = ()
        jar_output_digest = EMPTY_DIGEST

    output_classpath = ClasspathEntry(jar_output_digest, output_files,
                                      direct_dependency_classpath_entries)

    if export_classpath_entries:
        merged_export_digest = await Get(
            Digest,
            MergeDigests((output_classpath.digest,
                          *(i.digest for i in export_classpath_entries))),
        )
        merged_classpath = ClasspathEntry.merge(
            merged_export_digest,
            (output_classpath, *export_classpath_entries))
        output_classpath = merged_classpath

    return FallibleClasspathEntry.from_fallible_process_result(
        str(request.component),
        compile_result,
        output_classpath,
    )
Пример #10
0
async def compile_java_source(
    bash: BashBinary,
    jdk_setup: JdkSetup,
    zip_binary: ZipBinary,
    union_membership: UnionMembership,
    request: CompileJavaSourceRequest,
) -> FallibleClasspathEntry:
    # Request the component's direct dependency classpath.
    direct_dependency_classpath_entries = FallibleClasspathEntry.if_all_succeeded(
        await MultiGet(
            Get(
                FallibleClasspathEntry,
                ClasspathEntryRequest,
                ClasspathEntryRequest.for_targets(union_membership,
                                                  component=coarsened_dep,
                                                  resolve=request.resolve),
            ) for coarsened_dep in request.component.dependencies))
    if direct_dependency_classpath_entries is None:
        return FallibleClasspathEntry(
            description=str(request.component),
            result=CompileResult.DEPENDENCY_FAILED,
            output=None,
            exit_code=1,
        )

    # Then collect the component's sources.
    component_members_with_sources = tuple(t for t in request.component.members
                                           if t.has_field(SourcesField))
    component_members_and_source_files = zip(
        component_members_with_sources,
        await MultiGet(
            Get(
                SourceFiles,
                SourceFilesRequest(
                    (t.get(SourcesField), ),
                    for_sources_types=(JavaSourceField, ),
                    enable_codegen=True,
                ),
            ) for t in component_members_with_sources),
    )
    component_members_and_java_source_files = [
        (target, sources)
        for target, sources in component_members_and_source_files
        if sources.snapshot.digest != EMPTY_DIGEST
    ]
    if not component_members_and_java_source_files:
        # Is a generator, and so exports all of its direct deps.
        exported_digest = await Get(
            Digest,
            MergeDigests(cpe.digest
                         for cpe in direct_dependency_classpath_entries))
        classpath_entry = ClasspathEntry.merge(
            exported_digest, direct_dependency_classpath_entries)
        return FallibleClasspathEntry(
            description=str(request.component),
            result=CompileResult.SUCCEEDED,
            output=classpath_entry,
            exit_code=0,
        )

    dest_dir = "classfiles"
    (merged_direct_dependency_classpath_digest,
     dest_dir_digest) = await MultiGet(
         Get(
             Digest,
             MergeDigests(
                 classfiles.digest
                 for classfiles in direct_dependency_classpath_entries),
         ),
         Get(
             Digest,
             CreateDigest([Directory(dest_dir)]),
         ),
     )

    usercp = "__cp"
    prefixed_direct_dependency_classpath_digest = await Get(
        Digest, AddPrefix(merged_direct_dependency_classpath_digest, usercp))
    classpath_arg = ClasspathEntry.arg(direct_dependency_classpath_entries,
                                       prefix=usercp)

    merged_digest = await Get(
        Digest,
        MergeDigests((
            prefixed_direct_dependency_classpath_digest,
            dest_dir_digest,
            jdk_setup.digest,
            *(sources.snapshot.digest
              for _, sources in component_members_and_java_source_files),
        )),
    )

    # Compile.
    compile_result = await Get(
        FallibleProcessResult,
        Process(
            argv=[
                *jdk_setup.args(bash,
                                [f"{jdk_setup.java_home}/lib/tools.jar"]),
                "com.sun.tools.javac.Main",
                *(("-cp", classpath_arg) if classpath_arg else ()),
                "-d",
                dest_dir,
                *sorted(
                    chain.from_iterable(
                        sources.snapshot.files for _, sources in
                        component_members_and_java_source_files)),
            ],
            input_digest=merged_digest,
            use_nailgun=jdk_setup.digest,
            append_only_caches=jdk_setup.append_only_caches,
            env=jdk_setup.env,
            output_directories=(dest_dir, ),
            description=f"Compile {request.component} with javac",
            level=LogLevel.DEBUG,
        ),
    )
    if compile_result.exit_code != 0:
        return FallibleClasspathEntry.from_fallible_process_result(
            str(request.component),
            compile_result,
            None,
        )

    # Jar.
    # NB: We jar up the outputs in a separate process because the nailgun runner cannot support
    # invoking via a `bash` wrapper (since the trailing portion of the command is executed by
    # the nailgun server). We might be able to resolve this in the future via a Javac wrapper shim.
    output_snapshot = await Get(Snapshot, Digest, compile_result.output_digest)
    output_file = f"{request.component.representative.address.path_safe_spec}.jar"
    if output_snapshot.files:
        jar_result = await Get(
            ProcessResult,
            Process(
                argv=[
                    bash.path,
                    "-c",
                    " ".join([
                        "cd", dest_dir, ";", zip_binary.path, "-r",
                        f"../{output_file}", "."
                    ]),
                ],
                input_digest=compile_result.output_digest,
                output_files=(output_file, ),
                description=f"Capture outputs of {request.component} for javac",
                level=LogLevel.TRACE,
            ),
        )
        jar_output_digest = jar_result.output_digest
    else:
        # If there was no output, then do not create a jar file. This may occur, for example, when compiling
        # a `package-info.java` in a single partition.
        jar_output_digest = EMPTY_DIGEST

    return FallibleClasspathEntry.from_fallible_process_result(
        str(request.component),
        compile_result,
        ClasspathEntry(jar_output_digest, (output_file, ),
                       direct_dependency_classpath_entries),
    )
Пример #11
0
 def root_immutable_inputs(self,
                           *,
                           prefix: str = ""
                           ) -> Iterator[tuple[str, Digest]]:
     """Returns root (relpath, Digest) tuples for use with `Process.immutable_input_digests`."""
     return ClasspathEntry.immutable_inputs(self.entries, prefix=prefix)
Пример #12
0
async def compile_kotlin_source(
    kotlin: KotlinSubsystem,
    kotlinc: KotlincSubsystem,
    request: CompileKotlinSourceRequest,
) -> FallibleClasspathEntry:
    # Request classpath entries for our direct dependencies.
    dependency_cpers = await Get(FallibleClasspathEntries, ClasspathDependenciesRequest(request))
    direct_dependency_classpath_entries = dependency_cpers.if_all_succeeded()

    if direct_dependency_classpath_entries is None:
        return FallibleClasspathEntry(
            description=str(request.component),
            result=CompileResult.DEPENDENCY_FAILED,
            output=None,
            exit_code=1,
        )

    kotlin_version = kotlin.version_for_resolve(request.resolve.name)

    component_members_with_sources = tuple(
        t for t in request.component.members if t.has_field(SourcesField)
    )
    component_members_and_source_files = zip(
        component_members_with_sources,
        await MultiGet(
            Get(
                SourceFiles,
                SourceFilesRequest(
                    (t.get(SourcesField),),
                    for_sources_types=(KotlinSourceField,),
                    enable_codegen=True,
                ),
            )
            for t in component_members_with_sources
        ),
    )

    plugins_ = await MultiGet(
        Get(
            KotlincPluginTargetsForTarget,
            KotlincPluginsForTargetRequest(target, request.resolve.name),
        )
        for target in request.component.members
    )
    plugins_request = KotlincPluginsRequest.from_target_plugins(plugins_, request.resolve)
    local_plugins = await Get(KotlincPlugins, KotlincPluginsRequest, plugins_request)

    component_members_and_kotlin_source_files = [
        (target, sources)
        for target, sources in component_members_and_source_files
        if sources.snapshot.digest != EMPTY_DIGEST
    ]

    if not component_members_and_kotlin_source_files:
        # Is a generator, and so exports all of its direct deps.
        exported_digest = await Get(
            Digest, MergeDigests(cpe.digest for cpe in direct_dependency_classpath_entries)
        )
        classpath_entry = ClasspathEntry.merge(exported_digest, direct_dependency_classpath_entries)
        return FallibleClasspathEntry(
            description=str(request.component),
            result=CompileResult.SUCCEEDED,
            output=classpath_entry,
            exit_code=0,
        )

    toolcp_relpath = "__toolcp"
    local_kotlinc_plugins_relpath = "__localplugincp"
    usercp = "__cp"

    user_classpath = Classpath(direct_dependency_classpath_entries, request.resolve)

    tool_classpath, sources_digest, jdk = await MultiGet(
        Get(
            ToolClasspath,
            ToolClasspathRequest(
                artifact_requirements=ArtifactRequirements.from_coordinates(
                    [
                        Coordinate(
                            group="org.jetbrains.kotlin",
                            artifact="kotlin-compiler-embeddable",
                            version=kotlin_version,
                        ),
                        Coordinate(
                            group="org.jetbrains.kotlin",
                            artifact="kotlin-scripting-compiler-embeddable",
                            version=kotlin_version,
                        ),
                    ]
                ),
            ),
        ),
        Get(
            Digest,
            MergeDigests(
                (
                    sources.snapshot.digest
                    for _, sources in component_members_and_kotlin_source_files
                )
            ),
        ),
        Get(JdkEnvironment, JdkRequest, JdkRequest.from_target(request.component)),
    )

    extra_immutable_input_digests = {
        toolcp_relpath: tool_classpath.digest,
        local_kotlinc_plugins_relpath: local_plugins.classpath.digest,
    }
    extra_nailgun_keys = tuple(extra_immutable_input_digests)
    extra_immutable_input_digests.update(user_classpath.immutable_inputs(prefix=usercp))

    classpath_arg = ":".join(user_classpath.immutable_inputs_args(prefix=usercp))

    output_file = compute_output_jar_filename(request.component)
    process_result = await Get(
        FallibleProcessResult,
        JvmProcess(
            jdk=jdk,
            classpath_entries=tool_classpath.classpath_entries(toolcp_relpath),
            argv=[
                "org.jetbrains.kotlin.cli.jvm.K2JVMCompiler",
                *(("-classpath", classpath_arg) if classpath_arg else ()),
                "-d",
                output_file,
                *(local_plugins.args(local_kotlinc_plugins_relpath)),
                *kotlinc.args,
                *sorted(
                    itertools.chain.from_iterable(
                        sources.snapshot.files
                        for _, sources in component_members_and_kotlin_source_files
                    )
                ),
            ],
            input_digest=sources_digest,
            extra_immutable_input_digests=extra_immutable_input_digests,
            extra_nailgun_keys=extra_nailgun_keys,
            output_files=(output_file,),
            description=f"Compile {request.component} with kotlinc",
            level=LogLevel.DEBUG,
        ),
    )
    output: ClasspathEntry | None = None
    if process_result.exit_code == 0:
        output = ClasspathEntry(
            process_result.output_digest, (output_file,), direct_dependency_classpath_entries
        )

    return FallibleClasspathEntry.from_fallible_process_result(
        str(request.component),
        process_result,
        output,
    )
Пример #13
0
async def coursier_fetch_one_coord(
    request: CoursierLockfileEntry, ) -> ClasspathEntry:
    """Run `coursier fetch --intransitive` to fetch a single artifact.

    This rule exists to permit efficient subsetting of a "global" classpath
    in the form of a lockfile.  Callers can determine what subset of dependencies
    from the lockfile are needed for a given target, then request those
    lockfile entries individually.

    By fetching only one entry at a time, we maximize our cache efficiency.  If instead
    we fetched the entire subset that the caller wanted, there would be a different cache
    key for every possible subset.

    This rule also guarantees exact reproducibility.  If all caches have been
    removed, `coursier fetch` will re-download the artifact, and this rule will
    confirm that what was downloaded matches exactly (by content digest) what
    was specified in the lockfile (what Coursier originally downloaded).
    """

    # Prepare any URL- or JAR-specifying entries for use with Coursier
    req: ArtifactRequirement
    if request.pants_address:
        targets = await Get(
            Targets,
            UnparsedAddressInputs([request.pants_address],
                                  owning_address=None,
                                  description_of_origin="TODO(#14468)"),
        )
        req = ArtifactRequirement(request.coord,
                                  jar=targets[0][JvmArtifactJarSourceField])
    else:
        req = ArtifactRequirement(request.coord, url=request.remote_url)

    coursier_resolve_info = await Get(
        CoursierResolveInfo,
        ArtifactRequirements([req]),
    )

    coursier_report_file_name = "coursier_report.json"

    process_result = await Get(
        ProcessResult,
        CoursierFetchProcess(
            args=(
                coursier_report_file_name,
                "--intransitive",
                *coursier_resolve_info.argv,
            ),
            input_digest=coursier_resolve_info.digest,
            output_directories=("classpath", ),
            output_files=(coursier_report_file_name, ),
            description=
            f"Fetching with coursier: {request.coord.to_coord_str()}",
        ),
    )
    report_digest = await Get(
        Digest,
        DigestSubset(process_result.output_digest,
                     PathGlobs([coursier_report_file_name])))
    report_contents = await Get(DigestContents, Digest, report_digest)
    report = json.loads(report_contents[0].content)

    report_deps = report["dependencies"]
    if len(report_deps) == 0:
        raise CoursierError(
            "Coursier fetch report has no dependencies (i.e. nothing was fetched)."
        )
    elif len(report_deps) > 1:
        raise CoursierError(
            "Coursier fetch report has multiple dependencies, but exactly 1 was expected."
        )

    dep = report_deps[0]
    resolved_coord = Coordinate.from_coord_str(dep["coord"])
    if resolved_coord != request.coord:
        raise CoursierError(
            f'Coursier resolved coord "{resolved_coord.to_coord_str()}" does not match requested coord "{request.coord.to_coord_str()}".'
        )

    classpath_dest_name = classpath_dest_filename(dep["coord"], dep["file"])
    classpath_dest = f"classpath/{classpath_dest_name}"

    resolved_file_digest = await Get(
        Digest,
        DigestSubset(process_result.output_digest,
                     PathGlobs([classpath_dest])))
    stripped_digest = await Get(
        Digest, RemovePrefix(resolved_file_digest, "classpath"))
    file_digest = await Get(
        FileDigest,
        ExtractFileDigest(stripped_digest, classpath_dest_name),
    )
    if file_digest != request.file_digest:
        raise CoursierError(
            f"Coursier fetch for '{resolved_coord}' succeeded, but fetched artifact {file_digest} did not match the expected artifact: {request.file_digest}."
        )
    return ClasspathEntry(digest=stripped_digest,
                          filenames=(classpath_dest_name, ))
Пример #14
0
async def coursier_fetch_one_coord(
    bash: BashBinary,
    coursier: Coursier,
    request: CoursierLockfileEntry,
) -> ClasspathEntry:
    """Run `coursier fetch --intransitive` to fetch a single artifact.

    This rule exists to permit efficient subsetting of a "global" classpath
    in the form of a lockfile.  Callers can determine what subset of dependencies
    from the lockfile are needed for a given target, then request those
    lockfile entries individually.

    By fetching only one entry at a time, we maximize our cache efficiency.  If instead
    we fetched the entire subset that the caller wanted, there would be a different cache
    key for every possible subset.

    This rule also guarantees exact reproducibility.  If all caches have been
    removed, `coursier fetch` will re-download the artifact, and this rule will
    confirm that what was downloaded matches exactly (by content digest) what
    was specified in the lockfile (what Coursier originally downloaded).
    """
    coursier_report_file_name = "coursier_report.json"
    process_result = await Get(
        ProcessResult,
        Process(
            argv=coursier.args(
                [
                    coursier_report_file_name, "--intransitive",
                    request.coord.to_coord_str()
                ],
                wrapper=[bash.path, coursier.wrapper_script],
            ),
            input_digest=coursier.digest,
            output_directories=("classpath", ),
            output_files=(coursier_report_file_name, ),
            append_only_caches=coursier.append_only_caches,
            env=coursier.env,
            description=
            f"Fetching with coursier: {request.coord.to_coord_str()}",
            level=LogLevel.DEBUG,
        ),
    )
    report_digest = await Get(
        Digest,
        DigestSubset(process_result.output_digest,
                     PathGlobs([coursier_report_file_name])))
    report_contents = await Get(DigestContents, Digest, report_digest)
    report = json.loads(report_contents[0].content)

    report_deps = report["dependencies"]
    if len(report_deps) == 0:
        raise CoursierError(
            "Coursier fetch report has no dependencies (i.e. nothing was fetched)."
        )
    elif len(report_deps) > 1:
        raise CoursierError(
            "Coursier fetch report has multiple dependencies, but exactly 1 was expected."
        )

    dep = report_deps[0]

    resolved_coord = Coordinate.from_coord_str(dep["coord"])
    if resolved_coord != request.coord:
        raise CoursierError(
            f'Coursier resolved coord "{resolved_coord.to_coord_str()}" does not match requested coord "{request.coord.to_coord_str()}".'
        )

    file_path = PurePath(dep["file"])
    classpath_dest = f"classpath/{file_path.name}"

    resolved_file_digest = await Get(
        Digest,
        DigestSubset(process_result.output_digest,
                     PathGlobs([classpath_dest])))
    stripped_digest = await Get(
        Digest, RemovePrefix(resolved_file_digest, "classpath"))
    file_digest = await Get(
        FileDigest,
        ExtractFileDigest(stripped_digest, file_path.name),
    )
    if file_digest != request.file_digest:
        raise CoursierError(
            f"Coursier fetch for '{resolved_coord}' succeeded, but fetched artifact {file_digest} did not match the expected artifact: {request.file_digest}."
        )
    return ClasspathEntry(digest=stripped_digest, filenames=(file_path.name, ))
Пример #15
0
 def root_args(self, *, prefix: str = "") -> Iterator[str]:
     """The root filenames for this Classpath."""
     return ClasspathEntry.args(self.entries, prefix=prefix)
Пример #16
0
 def digests(self) -> Iterator[Digest]:
     """All transitive Digests for this Classpath."""
     return (entry.digest for entry in ClasspathEntry.closure(self.entries))
Пример #17
0
async def compile_scala_source(
    bash: BashBinary,
    coursier: Coursier,
    jdk_setup: JdkSetup,
    scala: ScalaSubsystem,
    union_membership: UnionMembership,
    request: CompileScalaSourceRequest,
) -> FallibleClasspathEntry:
    # Request classpath entries for our direct dependencies.
    direct_dependency_classpath_entries = FallibleClasspathEntry.if_all_succeeded(
        await MultiGet(
            Get(
                FallibleClasspathEntry,
                ClasspathEntryRequest,
                ClasspathEntryRequest.for_targets(union_membership,
                                                  component=coarsened_dep,
                                                  resolve=request.resolve),
            ) for coarsened_dep in request.component.dependencies))
    if direct_dependency_classpath_entries is None:
        return FallibleClasspathEntry(
            description=str(request.component),
            result=CompileResult.DEPENDENCY_FAILED,
            output=None,
            exit_code=1,
        )

    component_members_with_sources = tuple(t for t in request.component.members
                                           if t.has_field(SourcesField))
    component_members_and_source_files = zip(
        component_members_with_sources,
        await MultiGet(
            Get(
                SourceFiles,
                SourceFilesRequest(
                    (t.get(SourcesField), ),
                    for_sources_types=(ScalaSourceField, ),
                    enable_codegen=True,
                ),
            ) for t in component_members_with_sources),
    )

    component_members_and_scala_source_files = [
        (target, sources)
        for target, sources in component_members_and_source_files
        if sources.snapshot.digest != EMPTY_DIGEST
    ]

    if not component_members_and_scala_source_files:
        # Is a generator, and so exports all of its direct deps.
        exported_digest = await Get(
            Digest,
            MergeDigests(cpe.digest
                         for cpe in direct_dependency_classpath_entries))
        classpath_entry = ClasspathEntry.merge(
            exported_digest, direct_dependency_classpath_entries)
        return FallibleClasspathEntry(
            description=str(request.component),
            result=CompileResult.SUCCEEDED,
            output=classpath_entry,
            exit_code=0,
        )

    (
        tool_classpath,
        merged_transitive_dependency_classpath_entries_digest,
    ) = await MultiGet(
        Get(
            MaterializedClasspath,
            MaterializedClasspathRequest(
                prefix="__toolcp",
                artifact_requirements=(ArtifactRequirements([
                    Coordinate(
                        group="org.scala-lang",
                        artifact="scala-compiler",
                        version=scala.version,
                    ),
                    Coordinate(
                        group="org.scala-lang",
                        artifact="scala-library",
                        version=scala.version,
                    ),
                ]), ),
            ),
        ),
        Get(
            Digest,
            # Flatten the entire transitive classpath.
            MergeDigests(classfiles.digest
                         for classfiles in ClasspathEntry.closure(
                             direct_dependency_classpath_entries)),
        ),
    )

    usercp = "__cp"
    prefixed_transitive_dependency_classpath_digest = await Get(
        Digest,
        AddPrefix(merged_transitive_dependency_classpath_entries_digest,
                  usercp))

    merged_digest = await Get(
        Digest,
        MergeDigests((
            prefixed_transitive_dependency_classpath_digest,
            tool_classpath.digest,
            jdk_setup.digest,
            *(sources.snapshot.digest
              for _, sources in component_members_and_scala_source_files),
        )),
    )

    classpath_arg = ClasspathEntry.arg(
        ClasspathEntry.closure(direct_dependency_classpath_entries),
        prefix=usercp)

    output_file = f"{request.component.representative.address.path_safe_spec}.jar"
    process_result = await Get(
        FallibleProcessResult,
        Process(
            argv=[
                *jdk_setup.args(bash, tool_classpath.classpath_entries()),
                "scala.tools.nsc.Main",
                "-bootclasspath",
                ":".join(tool_classpath.classpath_entries()),
                *(("-classpath", classpath_arg) if classpath_arg else ()),
                "-d",
                output_file,
                *sorted(
                    chain.from_iterable(
                        sources.snapshot.files for _, sources in
                        component_members_and_scala_source_files)),
            ],
            input_digest=merged_digest,
            use_nailgun=jdk_setup.digest,
            output_files=(output_file, ),
            description=f"Compile {request.component} with scalac",
            level=LogLevel.DEBUG,
            append_only_caches=jdk_setup.append_only_caches,
            env=jdk_setup.env,
        ),
    )
    output: ClasspathEntry | None = None
    if process_result.exit_code == 0:
        output = ClasspathEntry(process_result.output_digest, (output_file, ),
                                direct_dependency_classpath_entries)

    return FallibleClasspathEntry.from_fallible_process_result(
        str(request.component),
        process_result,
        output,
    )
Пример #18
0
 def root_immutable_inputs_args(self, *, prefix: str = "") -> Iterator[str]:
     """Returns root relative filenames for the given entries to be used as immutable_inputs."""
     return ClasspathEntry.immutable_inputs_args(self.entries,
                                                 prefix=prefix)
Пример #19
0
async def assemble_resources_jar(
    zip: ZipBinary,
    request: JvmResourcesRequest,
) -> FallibleClasspathEntry:
    # Request the component's direct dependency classpath, and additionally any prerequisite.
    # Filter out any dependencies that are generated by our current target so that each resource
    # only appears in a single input JAR.
    # NOTE: Generated dependencies will have the same dependencies as the current target, so we
    # don't need to inspect those dependencies.
    optional_prereq_request = [
        *((request.prerequisite, ) if request.prerequisite else ())
    ]
    fallibles = await MultiGet(
        Get(FallibleClasspathEntries,
            ClasspathEntryRequests(optional_prereq_request)),
        Get(FallibleClasspathEntries,
            ClasspathDependenciesRequest(request, ignore_generated=True)),
    )
    direct_dependency_classpath_entries = FallibleClasspathEntries(
        itertools.chain(*fallibles)).if_all_succeeded()

    if direct_dependency_classpath_entries is None:
        return FallibleClasspathEntry(
            description=str(request.component),
            result=CompileResult.DEPENDENCY_FAILED,
            output=None,
            exit_code=1,
        )

    source_files = await Get(
        StrippedSourceFiles,
        SourceFilesRequest(
            [tgt.get(SourcesField) for tgt in request.component.members]),
    )

    output_filename = f"{request.component.representative.address.path_safe_spec}.jar"
    output_files = [output_filename]

    resources_jar_input_digest = source_files.snapshot.digest
    resources_jar_result = await Get(
        ProcessResult,
        Process(
            argv=[
                zip.path,
                output_filename,
                *source_files.snapshot.files,
            ],
            description="Build partial JAR containing resources files",
            input_digest=resources_jar_input_digest,
            output_files=output_files,
        ),
    )

    cpe = ClasspathEntry(resources_jar_result.output_digest, output_files, [])

    merged_cpe_digest = await Get(
        Digest,
        MergeDigests(
            chain((cpe.digest, ),
                  (i.digest for i in direct_dependency_classpath_entries))),
    )

    merged_cpe = ClasspathEntry.merge(
        digest=merged_cpe_digest,
        entries=[cpe, *direct_dependency_classpath_entries])

    return FallibleClasspathEntry(output_filename, CompileResult.SUCCEEDED,
                                  merged_cpe, 0)