async def setup_scalatest_for_target( request: TestSetupRequest, jvm: JvmSubsystem, scalatest: Scalatest, test_subsystem: TestSubsystem, ) -> TestSetup: jdk, dependencies = await MultiGet( Get(JdkEnvironment, JdkRequest, JdkRequest.from_field(request.field_set.jdk_version)), Get(Targets, DependenciesRequest(request.field_set.dependencies)), ) lockfile_request = await Get(GenerateJvmLockfileFromTool, ScalatestToolLockfileSentinel()) classpath, scalatest_classpath, files = await MultiGet( Get(Classpath, Addresses([request.field_set.address])), Get(ToolClasspath, ToolClasspathRequest(lockfile=lockfile_request)), Get( SourceFiles, SourceFilesRequest( (dep.get(SourcesField) for dep in dependencies), for_sources_types=(FileSourceField,), enable_codegen=True, ), ), ) input_digest = await Get(Digest, MergeDigests((*classpath.digests(), files.snapshot.digest))) toolcp_relpath = "__toolcp" extra_immutable_input_digests = { toolcp_relpath: scalatest_classpath.digest, } reports_dir_prefix = "__reports_dir" reports_dir = f"{reports_dir_prefix}/{request.field_set.address.path_safe_spec}" # Classfiles produced by the root `scalatest_test` targets are the only ones which should run. user_classpath_arg = ":".join(classpath.root_args()) # Cache test runs only if they are successful, or not at all if `--test-force`. cache_scope = ( ProcessCacheScope.PER_SESSION if test_subsystem.force else ProcessCacheScope.SUCCESSFUL ) extra_jvm_args: list[str] = [] if request.is_debug: extra_jvm_args.extend(jvm.debug_args) process = JvmProcess( jdk=jdk, classpath_entries=[ *classpath.args(), *scalatest_classpath.classpath_entries(toolcp_relpath), ], argv=[ *extra_jvm_args, "org.scalatest.tools.Runner", # TODO: We currently give the entire user classpath to the JVM for startup (which # mixes it with the user classpath), and then only specify the roots to run here. # see https://github.com/pantsbuild/pants/issues/13871 *(("-R", user_classpath_arg) if user_classpath_arg else ()), "-o", "-u", reports_dir, *scalatest.args, ], input_digest=input_digest, extra_immutable_input_digests=extra_immutable_input_digests, output_directories=(reports_dir,), description=f"Run Scalatest runner for {request.field_set.address}", level=LogLevel.DEBUG, cache_scope=cache_scope, use_nailgun=False, ) return TestSetup(process=process, reports_dir_prefix=reports_dir_prefix)
async def compile_java_source( bash: BashBinary, javac: JavacSubsystem, zip_binary: ZipBinary, request: CompileJavaSourceRequest, ) -> FallibleClasspathEntry: # Request the component's direct dependency classpath, and additionally any prerequisite. optional_prereq_request = [ *((request.prerequisite, ) if request.prerequisite else ()) ] fallibles = await MultiGet( Get(FallibleClasspathEntries, ClasspathEntryRequests(optional_prereq_request)), Get(FallibleClasspathEntries, ClasspathDependenciesRequest(request)), ) direct_dependency_classpath_entries = FallibleClasspathEntries( itertools.chain(*fallibles)).if_all_succeeded() if direct_dependency_classpath_entries is None: return FallibleClasspathEntry( description=str(request.component), result=CompileResult.DEPENDENCY_FAILED, output=None, exit_code=1, ) # Capture just the `ClasspathEntry` objects that are listed as `export` types by source analysis deps_to_classpath_entries = dict( zip(request.component.dependencies, direct_dependency_classpath_entries or ())) # Re-request inferred dependencies to get a list of export dependency addresses inferred_dependencies = await MultiGet( Get( JavaInferredDependencies, JavaInferredDependenciesAndExportsRequest(tgt[JavaSourceField]), ) for tgt in request.component.members if JavaFieldSet.is_applicable(tgt)) flat_exports = { export for i in inferred_dependencies for export in i.exports } export_classpath_entries = [ classpath_entry for coarsened_target, classpath_entry in deps_to_classpath_entries.items() if any(m.address in flat_exports for m in coarsened_target.members) ] # Then collect the component's sources. component_members_with_sources = tuple(t for t in request.component.members if t.has_field(SourcesField)) component_members_and_source_files = zip( component_members_with_sources, await MultiGet( Get( SourceFiles, SourceFilesRequest( (t.get(SourcesField), ), for_sources_types=(JavaSourceField, ), enable_codegen=True, ), ) for t in component_members_with_sources), ) component_members_and_java_source_files = [ (target, sources) for target, sources in component_members_and_source_files if sources.snapshot.digest != EMPTY_DIGEST ] if not component_members_and_java_source_files: # Is a generator, and so exports all of its direct deps. exported_digest = await Get( Digest, MergeDigests(cpe.digest for cpe in direct_dependency_classpath_entries)) classpath_entry = ClasspathEntry.merge( exported_digest, direct_dependency_classpath_entries) return FallibleClasspathEntry( description=str(request.component), result=CompileResult.SUCCEEDED, output=classpath_entry, exit_code=0, ) dest_dir = "classfiles" dest_dir_digest, jdk = await MultiGet( Get( Digest, CreateDigest([Directory(dest_dir)]), ), Get(JdkEnvironment, JdkRequest, JdkRequest.from_target(request.component)), ) merged_digest = await Get( Digest, MergeDigests(( dest_dir_digest, *(sources.snapshot.digest for _, sources in component_members_and_java_source_files), )), ) usercp = "__cp" user_classpath = Classpath(direct_dependency_classpath_entries, request.resolve) classpath_arg = ":".join( user_classpath.root_immutable_inputs_args(prefix=usercp)) immutable_input_digests = dict( user_classpath.root_immutable_inputs(prefix=usercp)) # Compile. compile_result = await Get( FallibleProcessResult, JvmProcess( jdk=jdk, classpath_entries=[f"{jdk.java_home}/lib/tools.jar"], argv=[ "com.sun.tools.javac.Main", *(("-cp", classpath_arg) if classpath_arg else ()), *javac.args, "-d", dest_dir, *sorted( chain.from_iterable( sources.snapshot.files for _, sources in component_members_and_java_source_files)), ], input_digest=merged_digest, extra_immutable_input_digests=immutable_input_digests, output_directories=(dest_dir, ), description=f"Compile {request.component} with javac", level=LogLevel.DEBUG, ), ) if compile_result.exit_code != 0: return FallibleClasspathEntry.from_fallible_process_result( str(request.component), compile_result, None, ) # Jar. # NB: We jar up the outputs in a separate process because the nailgun runner cannot support # invoking via a `bash` wrapper (since the trailing portion of the command is executed by # the nailgun server). We might be able to resolve this in the future via a Javac wrapper shim. output_snapshot = await Get(Snapshot, Digest, compile_result.output_digest) output_file = compute_output_jar_filename(request.component) output_files: tuple[str, ...] = (output_file, ) if output_snapshot.files: jar_result = await Get( ProcessResult, Process( argv=[ bash.path, "-c", " ".join([ "cd", dest_dir, ";", zip_binary.path, "-r", f"../{output_file}", "." ]), ], input_digest=compile_result.output_digest, output_files=output_files, description=f"Capture outputs of {request.component} for javac", level=LogLevel.TRACE, ), ) jar_output_digest = jar_result.output_digest else: # If there was no output, then do not create a jar file. This may occur, for example, when compiling # a `package-info.java` in a single partition. output_files = () jar_output_digest = EMPTY_DIGEST output_classpath = ClasspathEntry(jar_output_digest, output_files, direct_dependency_classpath_entries) if export_classpath_entries: merged_export_digest = await Get( Digest, MergeDigests((output_classpath.digest, *(i.digest for i in export_classpath_entries))), ) merged_classpath = ClasspathEntry.merge( merged_export_digest, (output_classpath, *export_classpath_entries)) output_classpath = merged_classpath return FallibleClasspathEntry.from_fallible_process_result( str(request.component), compile_result, output_classpath, )
async def create_scala_repl_request( request: ScalaRepl, bash: BashBinary, scala_subsystem: ScalaSubsystem) -> ReplRequest: user_classpath = await Get(Classpath, Addresses, request.addresses) roots = await Get(CoarsenedTargets, Addresses, request.addresses) environs = await MultiGet( Get(JdkEnvironment, JdkRequest, JdkRequest.from_target(target)) for target in roots) jdk = max(environs, key=lambda j: j.jre_major_version) scala_version = scala_subsystem.version_for_resolve( user_classpath.resolve.name) tool_classpath = await Get( ToolClasspath, ToolClasspathRequest( prefix="__toolcp", artifact_requirements=ArtifactRequirements.from_coordinates([ Coordinate( group="org.scala-lang", artifact="scala-compiler", version=scala_version, ), Coordinate( group="org.scala-lang", artifact="scala-library", version=scala_version, ), Coordinate( group="org.scala-lang", artifact="scala-reflect", version=scala_version, ), ]), ), ) user_classpath_prefix = "__cp" prefixed_user_classpath = await MultiGet( Get(Digest, AddPrefix(d, user_classpath_prefix)) for d in user_classpath.digests()) # TODO: Manually merging the `immutable_input_digests` since InteractiveProcess doesn't # support them yet. See https://github.com/pantsbuild/pants/issues/13852. jdk_digests = await MultiGet( Get(Digest, AddPrefix(digest, relpath)) for relpath, digest in jdk.immutable_input_digests.items()) repl_digest = await Get( Digest, MergeDigests([ *prefixed_user_classpath, tool_classpath.content.digest, *jdk_digests ]), ) return ReplRequest( digest=repl_digest, args=[ *jdk.args(bash, tool_classpath.classpath_entries()), "-Dscala.usejavacp=true", "scala.tools.nsc.MainGenericRunner", "-classpath", ":".join(user_classpath.args(prefix=user_classpath_prefix)), ], extra_env={ **jdk.env, "PANTS_INTERNAL_ABSOLUTE_PREFIX": "", }, run_in_workspace=False, append_only_caches=jdk.append_only_caches, )
async def bsp_resolve_scala_metadata( request: ScalaBSPBuildTargetsMetadataRequest, bash: BashBinary, jvm: JvmSubsystem, scala: ScalaSubsystem, build_root: BuildRoot, ) -> BSPBuildTargetsMetadataResult: resolves = {fs.resolve.normalized_value(jvm) for fs in request.field_sets} jdk_versions = {fs.jdk for fs in request.field_sets} if len(resolves) > 1: raise ValueError( "Cannot provide Scala metadata for multiple resolves. Please set the " "`resolve = jvm:$resolve` field in your `[experimental-bsp].groups_config_files` to " "select the relevant resolve to use.") (resolve, ) = resolves scala_version = scala.version_for_resolve(resolve) scala_runtime = await _materialize_scala_runtime_jars(scala_version) # # Extract the JDK paths from an lawful-evil process so we can supply it to the IDE. # # Why lawful-evil? # This script relies on implementation details of the Pants JVM execution environment, # namely that the Coursier Archive Cache (i.e. where JDKs are extracted to after download) # is stored into a predictable location on disk and symlinked into the sandbox on process # startup. The script reads the symlink of the cache directory, and outputs the linked # location of the JDK (according to Coursier), and we use that to calculate the permanent # location of the JDK. # # Please don't do anything like this except as a last resort. # # The maximum JDK version will be compatible with all the specified targets jdk_requests = [JdkRequest.from_field(version) for version in jdk_versions] jdk_request = max(jdk_requests, key=_jdk_request_sort_key(jvm)) jdk, readlink, = await MultiGet( Get(JdkEnvironment, JdkRequest, jdk_request), Get(ReadlinkBinary, ReadlinkBinaryRequest()), ) if any(i.version == DefaultJdk.SYSTEM for i in jdk_requests): system_jdk = await Get(JdkEnvironment, JdkRequest, JdkRequest.SYSTEM) if system_jdk.jre_major_version > jdk.jre_major_version: jdk = system_jdk cmd = "leak_paths.sh" leak_jdk_sandbox_paths = textwrap.dedent(f"""\ # Script to leak JDK cache paths out of Coursier sandbox so that BSP can use them. {readlink.path} {jdk.coursier.cache_dir} {jdk.java_home_command} """) leak_sandbox_path_digest = await Get( Digest, CreateDigest([ FileContent( cmd, leak_jdk_sandbox_paths.encode("utf-8"), is_executable=True, ), ]), ) leaked_paths = await Get( ProcessResult, Process( [ bash.path, cmd, ], input_digest=leak_sandbox_path_digest, immutable_input_digests=jdk.immutable_input_digests, env=jdk.env, use_nailgun=(), description="Report JDK cache paths for BSP", append_only_caches=jdk.append_only_caches, level=LogLevel.TRACE, ), ) cache_dir, jdk_home = leaked_paths.stdout.decode().strip().split("\n") _, sep, suffix = jdk_home.partition(jdk.coursier.cache_dir) if sep: coursier_java_home = cache_dir + suffix else: # Partition failed. Probably a system JDK instead coursier_java_home = jdk_home scala_jar_uris = tuple( build_root.pathlib_path.joinpath(".pants.d/bsp").joinpath(p).as_uri() for p in scala_runtime.files) jvm_build_target = JvmBuildTarget( java_home=Path(coursier_java_home).as_uri(), java_version=f"1.{jdk.jre_major_version}", ) return BSPBuildTargetsMetadataResult( metadata=ScalaBuildTarget( scala_organization="org.scala-lang", scala_version=scala_version, scala_binary_version=".".join(scala_version.split(".")[0:2]), platform=ScalaPlatform.JVM, jars=scala_jar_uris, jvm_build_target=jvm_build_target, ), digest=scala_runtime.digest, )
async def compile_kotlin_source( kotlin: KotlinSubsystem, kotlinc: KotlincSubsystem, request: CompileKotlinSourceRequest, ) -> FallibleClasspathEntry: # Request classpath entries for our direct dependencies. dependency_cpers = await Get(FallibleClasspathEntries, ClasspathDependenciesRequest(request)) direct_dependency_classpath_entries = dependency_cpers.if_all_succeeded() if direct_dependency_classpath_entries is None: return FallibleClasspathEntry( description=str(request.component), result=CompileResult.DEPENDENCY_FAILED, output=None, exit_code=1, ) kotlin_version = kotlin.version_for_resolve(request.resolve.name) component_members_with_sources = tuple( t for t in request.component.members if t.has_field(SourcesField) ) component_members_and_source_files = zip( component_members_with_sources, await MultiGet( Get( SourceFiles, SourceFilesRequest( (t.get(SourcesField),), for_sources_types=(KotlinSourceField,), enable_codegen=True, ), ) for t in component_members_with_sources ), ) plugins_ = await MultiGet( Get( KotlincPluginTargetsForTarget, KotlincPluginsForTargetRequest(target, request.resolve.name), ) for target in request.component.members ) plugins_request = KotlincPluginsRequest.from_target_plugins(plugins_, request.resolve) local_plugins = await Get(KotlincPlugins, KotlincPluginsRequest, plugins_request) component_members_and_kotlin_source_files = [ (target, sources) for target, sources in component_members_and_source_files if sources.snapshot.digest != EMPTY_DIGEST ] if not component_members_and_kotlin_source_files: # Is a generator, and so exports all of its direct deps. exported_digest = await Get( Digest, MergeDigests(cpe.digest for cpe in direct_dependency_classpath_entries) ) classpath_entry = ClasspathEntry.merge(exported_digest, direct_dependency_classpath_entries) return FallibleClasspathEntry( description=str(request.component), result=CompileResult.SUCCEEDED, output=classpath_entry, exit_code=0, ) toolcp_relpath = "__toolcp" local_kotlinc_plugins_relpath = "__localplugincp" usercp = "__cp" user_classpath = Classpath(direct_dependency_classpath_entries, request.resolve) tool_classpath, sources_digest, jdk = await MultiGet( Get( ToolClasspath, ToolClasspathRequest( artifact_requirements=ArtifactRequirements.from_coordinates( [ Coordinate( group="org.jetbrains.kotlin", artifact="kotlin-compiler-embeddable", version=kotlin_version, ), Coordinate( group="org.jetbrains.kotlin", artifact="kotlin-scripting-compiler-embeddable", version=kotlin_version, ), ] ), ), ), Get( Digest, MergeDigests( ( sources.snapshot.digest for _, sources in component_members_and_kotlin_source_files ) ), ), Get(JdkEnvironment, JdkRequest, JdkRequest.from_target(request.component)), ) extra_immutable_input_digests = { toolcp_relpath: tool_classpath.digest, local_kotlinc_plugins_relpath: local_plugins.classpath.digest, } extra_nailgun_keys = tuple(extra_immutable_input_digests) extra_immutable_input_digests.update(user_classpath.immutable_inputs(prefix=usercp)) classpath_arg = ":".join(user_classpath.immutable_inputs_args(prefix=usercp)) output_file = compute_output_jar_filename(request.component) process_result = await Get( FallibleProcessResult, JvmProcess( jdk=jdk, classpath_entries=tool_classpath.classpath_entries(toolcp_relpath), argv=[ "org.jetbrains.kotlin.cli.jvm.K2JVMCompiler", *(("-classpath", classpath_arg) if classpath_arg else ()), "-d", output_file, *(local_plugins.args(local_kotlinc_plugins_relpath)), *kotlinc.args, *sorted( itertools.chain.from_iterable( sources.snapshot.files for _, sources in component_members_and_kotlin_source_files ) ), ], input_digest=sources_digest, extra_immutable_input_digests=extra_immutable_input_digests, extra_nailgun_keys=extra_nailgun_keys, output_files=(output_file,), description=f"Compile {request.component} with kotlinc", level=LogLevel.DEBUG, ), ) output: ClasspathEntry | None = None if process_result.exit_code == 0: output = ClasspathEntry( process_result.output_digest, (output_file,), direct_dependency_classpath_entries ) return FallibleClasspathEntry.from_fallible_process_result( str(request.component), process_result, output, )
async def setup_junit_for_target( request: TestSetupRequest, jvm: JvmSubsystem, junit: JUnit, test_subsystem: TestSubsystem, ) -> TestSetup: jdk, transitive_tgts = await MultiGet( Get(JdkEnvironment, JdkRequest, JdkRequest.from_field(request.field_set.jdk_version)), Get(TransitiveTargets, TransitiveTargetsRequest([request.field_set.address])), ) lockfile_request = await Get(GenerateJvmLockfileFromTool, JunitToolLockfileSentinel()) classpath, junit_classpath, files = await MultiGet( Get(Classpath, Addresses([request.field_set.address])), Get(ToolClasspath, ToolClasspathRequest(lockfile=lockfile_request)), Get( SourceFiles, SourceFilesRequest( (dep.get(SourcesField) for dep in transitive_tgts.dependencies), for_sources_types=(FileSourceField, ), enable_codegen=True, ), ), ) input_digest = await Get( Digest, MergeDigests((*classpath.digests(), files.snapshot.digest))) toolcp_relpath = "__toolcp" extra_immutable_input_digests = { toolcp_relpath: junit_classpath.digest, } reports_dir_prefix = "__reports_dir" reports_dir = f"{reports_dir_prefix}/{request.field_set.address.path_safe_spec}" # Classfiles produced by the root `junit_test` targets are the only ones which should run. user_classpath_arg = ":".join(classpath.root_args()) # Cache test runs only if they are successful, or not at all if `--test-force`. cache_scope = (ProcessCacheScope.PER_SESSION if test_subsystem.force else ProcessCacheScope.SUCCESSFUL) extra_jvm_args: list[str] = [] if request.is_debug: extra_jvm_args.extend(jvm.debug_args) process = JvmProcess( jdk=jdk, classpath_entries=[ *classpath.args(), *junit_classpath.classpath_entries(toolcp_relpath), ], argv=[ *extra_jvm_args, "org.junit.platform.console.ConsoleLauncher", *(("--classpath", user_classpath_arg) if user_classpath_arg else ()), *(("--scan-class-path", user_classpath_arg) if user_classpath_arg else ()), "--reports-dir", reports_dir, *junit.args, ], input_digest=input_digest, extra_jvm_options=junit.jvm_options, extra_immutable_input_digests=extra_immutable_input_digests, output_directories=(reports_dir, ), description= f"Run JUnit 5 ConsoleLauncher against {request.field_set.address}", level=LogLevel.DEBUG, cache_scope=cache_scope, use_nailgun=False, ) return TestSetup(process=process, reports_dir_prefix=reports_dir_prefix)
async def create_deploy_jar_run_request( field_set: DeployJarFieldSet, ) -> RunRequest: jdk = await Get(JdkEnvironment, JdkRequest, JdkRequest.from_field(field_set.jdk_version)) main_class = field_set.main_class.value assert main_class is not None package = await Get(BuiltPackage, DeployJarFieldSet, field_set) assert len(package.artifacts) == 1 jar_path = package.artifacts[0].relpath assert jar_path is not None proc = await Get( Process, JvmProcess( jdk=jdk, classpath_entries=[f"{{chroot}}/{jar_path}"], argv=(main_class, ), input_digest=package.digest, description=f"Run {main_class}.main(String[])", use_nailgun=False, ), ) support_digests = await MultiGet( Get(Digest, AddPrefix(digest, prefix)) for prefix, digest in proc.immutable_input_digests.items()) runtime_jvm = await Get(__RuntimeJvm, JdkEnvironment, jdk) support_digests += (runtime_jvm.digest, ) # TODO(#14386) This argument re-writing code should be done in a more standardised way. # See also `jdk_rules.py` for other argument re-writing code. def prefixed(arg: str, prefixes: Iterable[str]) -> str: if any(arg.startswith(prefix) for prefix in prefixes): return f"{{chroot}}/{arg}" else: return arg prefixes = (jdk.bin_dir, jdk.jdk_preparation_script, jdk.java_home) args = [prefixed(arg, prefixes) for arg in proc.argv] env = { **proc.env, "PANTS_INTERNAL_ABSOLUTE_PREFIX": "{chroot}/", } # absolutify coursier cache envvars for key in env: if key.startswith("COURSIER"): env[key] = prefixed(env[key], (jdk.coursier.cache_dir, )) request_digest = await Get( Digest, MergeDigests([ proc.input_digest, *support_digests, ]), ) return RunRequest( digest=request_digest, args=args, extra_env=env, )