async def materialize_scala_runtime_jars( request: MaterializeScalaRuntimeJarsRequest, ) -> MaterializeScalaRuntimeJarsResult: tool_classpath = await Get( ToolClasspath, ToolClasspathRequest( artifact_requirements=ArtifactRequirements.from_coordinates([ Coordinate( group="org.scala-lang", artifact="scala-compiler", version=request.scala_version, ), Coordinate( group="org.scala-lang", artifact="scala-library", version=request.scala_version, ), ]), ), ) materialized_classpath_digest = await Get( Digest, AddPrefix(tool_classpath.content.digest, f"jvm/scala-runtime/{request.scala_version}"), ) materialized_classpath = await Get(Snapshot, Digest, materialized_classpath_digest) return MaterializeScalaRuntimeJarsResult(materialized_classpath)
async def analyze_java_source_dependencies( processor_classfiles: JavaParserCompiledClassfiles, jdk: InternalJdk, request: JavaSourceDependencyAnalysisRequest, ) -> FallibleJavaSourceDependencyAnalysisResult: source_files = request.source_files if len(source_files.files) > 1: raise ValueError( f"parse_java_package expects sources with exactly 1 source file, but found {len(source_files.files)}." ) elif len(source_files.files) == 0: raise ValueError( "parse_java_package expects sources with exactly 1 source file, but found none." ) source_prefix = "__source_to_analyze" source_path = os.path.join(source_prefix, source_files.files[0]) processorcp_relpath = "__processorcp" toolcp_relpath = "__toolcp" parser_lockfile_request = await Get(GenerateJvmLockfileFromTool, JavaParserToolLockfileSentinel()) tool_classpath, prefixed_source_files_digest = await MultiGet( Get( ToolClasspath, ToolClasspathRequest(lockfile=parser_lockfile_request), ), Get(Digest, AddPrefix(source_files.snapshot.digest, source_prefix)), ) extra_immutable_input_digests = { toolcp_relpath: tool_classpath.digest, processorcp_relpath: processor_classfiles.digest, } analysis_output_path = "__source_analysis.json" process_result = await Get( FallibleProcessResult, JvmProcess( jdk=jdk, classpath_entries=[ *tool_classpath.classpath_entries(toolcp_relpath), processorcp_relpath, ], argv=[ "org.pantsbuild.javaparser.PantsJavaParserLauncher", analysis_output_path, source_path, ], input_digest=prefixed_source_files_digest, extra_immutable_input_digests=extra_immutable_input_digests, output_files=(analysis_output_path, ), extra_nailgun_keys=extra_immutable_input_digests, description=f"Analyzing {source_files.files[0]}", level=LogLevel.DEBUG, ), ) return FallibleJavaSourceDependencyAnalysisResult( process_result=process_result)
async def setup_google_java_format( setup_request: SetupRequest, tool: GoogleJavaFormatSubsystem, jdk: InternalJdk, ) -> Setup: lockfile_request = await Get(GenerateJvmLockfileFromTool, GoogleJavaFormatToolLockfileSentinel()) source_files, tool_classpath = await MultiGet( Get( SourceFiles, SourceFilesRequest( field_set.source for field_set in setup_request.request.field_sets), ), Get(ToolClasspath, ToolClasspathRequest(lockfile=lockfile_request)), ) source_files_snapshot = ( source_files.snapshot if setup_request.request.prior_formatter_result is None else setup_request.request.prior_formatter_result) toolcp_relpath = "__toolcp" extra_immutable_input_digests = { toolcp_relpath: tool_classpath.digest, } maybe_java11_or_higher_options = [] if jdk.jre_major_version >= 11: maybe_java11_or_higher_options = [ "--add-exports=jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED", "--add-exports=jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED", "--add-exports=jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED", "--add-exports=jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED", "--add-exports=jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED", ] args = [ *maybe_java11_or_higher_options, "com.google.googlejavaformat.java.Main", *(["--aosp"] if tool.aosp else []), "--dry-run" if setup_request.check_only else "--replace", *source_files.files, ] process = JvmProcess( jdk=jdk, argv=args, classpath_entries=tool_classpath.classpath_entries(toolcp_relpath), input_digest=source_files_snapshot.digest, extra_immutable_input_digests=extra_immutable_input_digests, extra_nailgun_keys=extra_immutable_input_digests, output_files=source_files_snapshot.files, description= f"Run Google Java Format on {pluralize(len(setup_request.request.field_sets), 'file')}.", level=LogLevel.DEBUG, ) return Setup(process, original_snapshot=source_files_snapshot)
async def setup_scalafmt( setup_request: SetupRequest, tool: ScalafmtSubsystem, ) -> Setup: toolcp_relpath = "__toolcp" lockfile_request = await Get(GenerateJvmLockfileFromTool, ScalafmtToolLockfileSentinel()) source_files, tool_classpath = await MultiGet( Get( SourceFiles, SourceFilesRequest(field_set.source for field_set in setup_request.request.field_sets), ), Get(ToolClasspath, ToolClasspathRequest(lockfile=lockfile_request)), ) source_files_snapshot = ( source_files.snapshot if setup_request.request.prior_formatter_result is None else setup_request.request.prior_formatter_result ) config_files = await Get( ScalafmtConfigFiles, GatherScalafmtConfigFilesRequest(source_files_snapshot) ) merged_sources_digest = await Get( Digest, MergeDigests([source_files_snapshot.digest, config_files.snapshot.digest]) ) extra_immutable_input_digests = { toolcp_relpath: tool_classpath.digest, } # Partition the work by which source files share the same config file (regardless of directory). source_files_by_config_file: dict[str, set[str]] = defaultdict(set) for source_dir, files_in_source_dir in group_by_dir(source_files_snapshot.files).items(): config_file = config_files.source_dir_to_config_file[source_dir] source_files_by_config_file[config_file].update( os.path.join(source_dir, name) for name in files_in_source_dir ) partitions = await MultiGet( Get( Partition, SetupScalafmtPartition( classpath_entries=tuple(tool_classpath.classpath_entries(toolcp_relpath)), merged_sources_digest=merged_sources_digest, extra_immutable_input_digests=FrozenDict(extra_immutable_input_digests), config_file=config_file, files=tuple(sorted(files)), check_only=setup_request.check_only, ), ) for config_file, files in source_files_by_config_file.items() ) return Setup(tuple(partitions), original_snapshot=source_files_snapshot)
async def google_java_format_fmt( request: GoogleJavaFormatRequest, tool: GoogleJavaFormatSubsystem, jdk: InternalJdk, ) -> FmtResult: if tool.skip: return FmtResult.skip(formatter_name=request.name) lockfile_request = await Get(GenerateJvmLockfileFromTool, GoogleJavaFormatToolLockfileSentinel()) tool_classpath = await Get(ToolClasspath, ToolClasspathRequest(lockfile=lockfile_request)) toolcp_relpath = "__toolcp" extra_immutable_input_digests = { toolcp_relpath: tool_classpath.digest, } maybe_java11_or_higher_options = [] if jdk.jre_major_version >= 11: maybe_java11_or_higher_options = [ "--add-exports=jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED", "--add-exports=jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED", "--add-exports=jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED", "--add-exports=jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED", "--add-exports=jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED", ] args = [ *maybe_java11_or_higher_options, "com.google.googlejavaformat.java.Main", *(["--aosp"] if tool.aosp else []), "--replace", *request.snapshot.files, ] result = await Get( ProcessResult, JvmProcess( jdk=jdk, argv=args, classpath_entries=tool_classpath.classpath_entries(toolcp_relpath), input_digest=request.snapshot.digest, extra_jvm_options=tool.jvm_options, extra_immutable_input_digests=extra_immutable_input_digests, extra_nailgun_keys=extra_immutable_input_digests, output_files=request.snapshot.files, description= f"Run Google Java Format on {pluralize(len(request.field_sets), 'file')}.", level=LogLevel.DEBUG, ), ) output_snapshot = await Get(Snapshot, Digest, result.output_digest) return FmtResult.create(request, result, output_snapshot, strip_chroot_path=True)
async def analyze_scala_source_dependencies( jdk: InternalJdk, processor_classfiles: ScalaParserCompiledClassfiles, source_files: SourceFiles, ) -> FallibleScalaSourceDependencyAnalysisResult: if len(source_files.files) > 1: raise ValueError( f"analyze_scala_source_dependencies expects sources with exactly 1 source file, but found {len(source_files.snapshot.files)}." ) elif len(source_files.files) == 0: raise ValueError( "analyze_scala_source_dependencies expects sources with exactly 1 source file, but found none." ) source_prefix = "__source_to_analyze" source_path = os.path.join(source_prefix, source_files.files[0]) processorcp_relpath = "__processorcp" toolcp_relpath = "__toolcp" (tool_classpath, prefixed_source_files_digest,) = await MultiGet( Get( ToolClasspath, ToolClasspathRequest(artifact_requirements=SCALA_PARSER_ARTIFACT_REQUIREMENTS), ), Get(Digest, AddPrefix(source_files.snapshot.digest, source_prefix)), ) extra_immutable_input_digests = { toolcp_relpath: tool_classpath.digest, processorcp_relpath: processor_classfiles.digest, } analysis_output_path = "__source_analysis.json" process_result = await Get( FallibleProcessResult, JvmProcess( jdk=jdk, classpath_entries=[ *tool_classpath.classpath_entries(toolcp_relpath), processorcp_relpath, ], argv=[ "org.pantsbuild.backend.scala.dependency_inference.ScalaParser", analysis_output_path, source_path, ], input_digest=prefixed_source_files_digest, extra_immutable_input_digests=extra_immutable_input_digests, output_files=(analysis_output_path,), extra_nailgun_keys=extra_immutable_input_digests, description=f"Analyzing {source_files.files[0]}", level=LogLevel.DEBUG, ), ) return FallibleScalaSourceDependencyAnalysisResult(process_result=process_result)
async def build_processors(jdk: InternalJdk) -> JavaParserCompiledClassfiles: dest_dir = "classfiles" parser_lockfile_request = await Get(GenerateJvmLockfileFromTool, JavaParserToolLockfileSentinel()) materialized_classpath, source_digest = await MultiGet( Get( ToolClasspath, ToolClasspathRequest(prefix="__toolcp", lockfile=parser_lockfile_request), ), Get( Digest, CreateDigest([ FileContent( path=_LAUNCHER_BASENAME, content=_load_javaparser_launcher_source(), ), Directory(dest_dir), ]), ), ) merged_digest = await Get( Digest, MergeDigests(( materialized_classpath.digest, source_digest, )), ) process_result = await Get( ProcessResult, JvmProcess( jdk=jdk, classpath_entries=[f"{jdk.java_home}/lib/tools.jar"], argv=[ "com.sun.tools.javac.Main", "-cp", ":".join(materialized_classpath.classpath_entries()), "-d", dest_dir, _LAUNCHER_BASENAME, ], input_digest=merged_digest, output_directories=(dest_dir, ), description= f"Compile {_LAUNCHER_BASENAME} import processors with javac", level=LogLevel.DEBUG, # NB: We do not use nailgun for this process, since it is launched exactly once. use_nailgun=False, ), ) stripped_classfiles_digest = await Get( Digest, RemovePrefix(process_result.output_digest, dest_dir)) return JavaParserCompiledClassfiles(digest=stripped_classfiles_digest)
async def global_scalac_plugins( loaded_global_plugins: _LoadedGlobalScalacPlugins, ) -> GlobalScalacPlugins: lockfile_request = await Get( GenerateJvmLockfileFromTool, GlobalScalacPluginsToolLockfileSentinel() ) classpath = await Get( ToolClasspath, ToolClasspathRequest(prefix="__scalac_plugin_cp", lockfile=lockfile_request), ) return GlobalScalacPlugins(loaded_global_plugins.names, classpath)
async def materialize_jvm_plugin( request: MaterializeJvmPluginRequest) -> MaterializedJvmPlugin: requirements = await Get( ArtifactRequirements, GatherJvmCoordinatesRequest( artifact_inputs=FrozenOrderedSet([request.plugin.artifact]), option_name="[scalapb].jvm_plugins", ), ) classpath = await Get( ToolClasspath, ToolClasspathRequest(artifact_requirements=requirements)) return MaterializedJvmPlugin(name=request.plugin.name, classpath=classpath)
async def fetch_scala_library(request: ScalaLibraryRequest) -> ClasspathEntry: tcp = await Get( ToolClasspath, ToolClasspathRequest( artifact_requirements=ArtifactRequirements.from_coordinates([ Coordinate( group="org.scala-lang", artifact="scala-library", version=request.version, ), ]), ), ) return ClasspathEntry(tcp.digest, tcp.content.files)
async def ktlint_fmt(request: KtlintRequest, tool: KtlintSubsystem, jdk: InternalJdk) -> FmtResult: if tool.skip: return FmtResult.skip(formatter_name=request.name) lockfile_request = await Get(GenerateJvmLockfileFromTool, KtlintToolLockfileSentinel()) tool_classpath = await Get(ToolClasspath, ToolClasspathRequest(lockfile=lockfile_request)) toolcp_relpath = "__toolcp" extra_immutable_input_digests = { toolcp_relpath: tool_classpath.digest, } args = [ "com.pinterest.ktlint.Main", "-F", *request.snapshot.files, ] result = await Get( ProcessResult, JvmProcess( jdk=jdk, argv=args, classpath_entries=tool_classpath.classpath_entries(toolcp_relpath), input_digest=request.snapshot.digest, extra_jvm_options=tool.jvm_options, extra_immutable_input_digests=extra_immutable_input_digests, extra_nailgun_keys=extra_immutable_input_digests, output_files=request.snapshot.files, description= f"Run Ktlint on {pluralize(len(request.field_sets), 'file')}.", level=LogLevel.DEBUG, ), ) output_snapshot = await Get(Snapshot, Digest, result.output_digest) return FmtResult.create(request, result, output_snapshot, strip_chroot_path=True)
async def _materialize_scala_runtime_jars(scala_version: str) -> Snapshot: tool_classpath = await Get( ToolClasspath, ToolClasspathRequest( artifact_requirements=ArtifactRequirements.from_coordinates([ Coordinate( group="org.scala-lang", artifact="scala-compiler", version=scala_version, ), Coordinate( group="org.scala-lang", artifact="scala-library", version=scala_version, ), ]), ), ) return await Get( Snapshot, AddPrefix(tool_classpath.content.digest, f"jvm/scala-runtime/{scala_version}"), )
async def setup_scalatest_for_target( request: TestSetupRequest, jvm: JvmSubsystem, scalatest: Scalatest, test_subsystem: TestSubsystem, ) -> TestSetup: jdk, dependencies = await MultiGet( Get(JdkEnvironment, JdkRequest, JdkRequest.from_field(request.field_set.jdk_version)), Get(Targets, DependenciesRequest(request.field_set.dependencies)), ) lockfile_request = await Get(GenerateJvmLockfileFromTool, ScalatestToolLockfileSentinel()) classpath, scalatest_classpath, files = await MultiGet( Get(Classpath, Addresses([request.field_set.address])), Get(ToolClasspath, ToolClasspathRequest(lockfile=lockfile_request)), Get( SourceFiles, SourceFilesRequest( (dep.get(SourcesField) for dep in dependencies), for_sources_types=(FileSourceField,), enable_codegen=True, ), ), ) input_digest = await Get(Digest, MergeDigests((*classpath.digests(), files.snapshot.digest))) toolcp_relpath = "__toolcp" extra_immutable_input_digests = { toolcp_relpath: scalatest_classpath.digest, } reports_dir_prefix = "__reports_dir" reports_dir = f"{reports_dir_prefix}/{request.field_set.address.path_safe_spec}" # Classfiles produced by the root `scalatest_test` targets are the only ones which should run. user_classpath_arg = ":".join(classpath.root_args()) # Cache test runs only if they are successful, or not at all if `--test-force`. cache_scope = ( ProcessCacheScope.PER_SESSION if test_subsystem.force else ProcessCacheScope.SUCCESSFUL ) extra_jvm_args: list[str] = [] if request.is_debug: extra_jvm_args.extend(jvm.debug_args) process = JvmProcess( jdk=jdk, classpath_entries=[ *classpath.args(), *scalatest_classpath.classpath_entries(toolcp_relpath), ], argv=[ *extra_jvm_args, "org.scalatest.tools.Runner", # TODO: We currently give the entire user classpath to the JVM for startup (which # mixes it with the user classpath), and then only specify the roots to run here. # see https://github.com/pantsbuild/pants/issues/13871 *(("-R", user_classpath_arg) if user_classpath_arg else ()), "-o", "-u", reports_dir, *scalatest.args, ], input_digest=input_digest, extra_immutable_input_digests=extra_immutable_input_digests, output_directories=(reports_dir,), description=f"Run Scalatest runner for {request.field_set.address}", level=LogLevel.DEBUG, cache_scope=cache_scope, use_nailgun=False, ) return TestSetup(process=process, reports_dir_prefix=reports_dir_prefix)
async def compile_avro_source( request: CompileAvroSourceRequest, jdk: InternalJdk, avro_tools: AvroSubsystem, ) -> CompiledAvroSource: output_dir = "_generated_files" toolcp_relpath = "__toolcp" lockfile_request = await Get(GenerateJvmLockfileFromTool, AvroToolLockfileSentinel()) tool_classpath, subsetted_input_digest, empty_output_dir = await MultiGet( Get(ToolClasspath, ToolClasspathRequest(lockfile=lockfile_request)), Get( Digest, DigestSubset( request.digest, PathGlobs( [request.path], glob_match_error_behavior=GlobMatchErrorBehavior.error, conjunction=GlobExpansionConjunction.all_match, description_of_origin="the Avro source file name", ), ), ), Get(Digest, CreateDigest([Directory(output_dir)])), ) input_digest = await Get( Digest, MergeDigests( [ subsetted_input_digest, empty_output_dir, ] ), ) extra_immutable_input_digests = { toolcp_relpath: tool_classpath.digest, } def make_avro_process( args: Iterable[str], *, overridden_input_digest: Digest | None = None, overridden_output_dir: str | None = None, ) -> JvmProcess: return JvmProcess( jdk=jdk, argv=( "org.apache.avro.tool.Main", *args, ), classpath_entries=tool_classpath.classpath_entries(toolcp_relpath), input_digest=( overridden_input_digest if overridden_input_digest is not None else input_digest ), extra_immutable_input_digests=extra_immutable_input_digests, extra_nailgun_keys=extra_immutable_input_digests, description="Generating Java sources from Avro source.", level=LogLevel.DEBUG, output_directories=(overridden_output_dir if overridden_output_dir else output_dir,), ) path = PurePath(request.path) if path.suffix == ".avsc": result = await Get( ProcessResult, JvmProcess, make_avro_process(["compile", "schema", request.path, output_dir]), ) elif path.suffix == ".avpr": result = await Get( ProcessResult, JvmProcess, make_avro_process(["compile", "protocol", request.path, output_dir]), ) elif path.suffix == ".avdl": idl_output_dir = "__idl" avpr_path = os.path.join(idl_output_dir, str(path.with_suffix(".avpr"))) idl_output_dir_digest = await Get( Digest, CreateDigest([Directory(os.path.dirname(avpr_path))]) ) idl_input_digest = await Get(Digest, MergeDigests([input_digest, idl_output_dir_digest])) idl_result = await Get( ProcessResult, JvmProcess, make_avro_process( ["idl", request.path, avpr_path], overridden_input_digest=idl_input_digest, overridden_output_dir=idl_output_dir, ), ) generated_files_dir = await Get(Digest, CreateDigest([Directory(output_dir)])) protocol_input_digest = await Get( Digest, MergeDigests([idl_result.output_digest, generated_files_dir]) ) result = await Get( ProcessResult, JvmProcess, make_avro_process( ["compile", "protocol", avpr_path, output_dir], overridden_input_digest=protocol_input_digest, ), ) else: raise AssertionError( f"Avro backend does not support files with extension `{path.suffix}`: {path}" ) normalized_digest = await Get(Digest, RemovePrefix(result.output_digest, output_dir)) return CompiledAvroSource(normalized_digest)
async def create_scala_repl_request( request: ScalaRepl, bash: BashBinary, scala_subsystem: ScalaSubsystem) -> ReplRequest: user_classpath = await Get(Classpath, Addresses, request.addresses) roots = await Get(CoarsenedTargets, Addresses, request.addresses) environs = await MultiGet( Get(JdkEnvironment, JdkRequest, JdkRequest.from_target(target)) for target in roots) jdk = max(environs, key=lambda j: j.jre_major_version) scala_version = scala_subsystem.version_for_resolve( user_classpath.resolve.name) tool_classpath = await Get( ToolClasspath, ToolClasspathRequest( prefix="__toolcp", artifact_requirements=ArtifactRequirements.from_coordinates([ Coordinate( group="org.scala-lang", artifact="scala-compiler", version=scala_version, ), Coordinate( group="org.scala-lang", artifact="scala-library", version=scala_version, ), Coordinate( group="org.scala-lang", artifact="scala-reflect", version=scala_version, ), ]), ), ) user_classpath_prefix = "__cp" prefixed_user_classpath = await MultiGet( Get(Digest, AddPrefix(d, user_classpath_prefix)) for d in user_classpath.digests()) # TODO: Manually merging the `immutable_input_digests` since InteractiveProcess doesn't # support them yet. See https://github.com/pantsbuild/pants/issues/13852. jdk_digests = await MultiGet( Get(Digest, AddPrefix(digest, relpath)) for relpath, digest in jdk.immutable_input_digests.items()) repl_digest = await Get( Digest, MergeDigests([ *prefixed_user_classpath, tool_classpath.content.digest, *jdk_digests ]), ) return ReplRequest( digest=repl_digest, args=[ *jdk.args(bash, tool_classpath.classpath_entries()), "-Dscala.usejavacp=true", "scala.tools.nsc.MainGenericRunner", "-classpath", ":".join(user_classpath.args(prefix=user_classpath_prefix)), ], extra_env={ **jdk.env, "PANTS_INTERNAL_ABSOLUTE_PREFIX": "", }, run_in_workspace=False, append_only_caches=jdk.append_only_caches, )
async def compile_kotlin_source( kotlin: KotlinSubsystem, kotlinc: KotlincSubsystem, request: CompileKotlinSourceRequest, ) -> FallibleClasspathEntry: # Request classpath entries for our direct dependencies. dependency_cpers = await Get(FallibleClasspathEntries, ClasspathDependenciesRequest(request)) direct_dependency_classpath_entries = dependency_cpers.if_all_succeeded() if direct_dependency_classpath_entries is None: return FallibleClasspathEntry( description=str(request.component), result=CompileResult.DEPENDENCY_FAILED, output=None, exit_code=1, ) kotlin_version = kotlin.version_for_resolve(request.resolve.name) component_members_with_sources = tuple( t for t in request.component.members if t.has_field(SourcesField) ) component_members_and_source_files = zip( component_members_with_sources, await MultiGet( Get( SourceFiles, SourceFilesRequest( (t.get(SourcesField),), for_sources_types=(KotlinSourceField,), enable_codegen=True, ), ) for t in component_members_with_sources ), ) plugins_ = await MultiGet( Get( KotlincPluginTargetsForTarget, KotlincPluginsForTargetRequest(target, request.resolve.name), ) for target in request.component.members ) plugins_request = KotlincPluginsRequest.from_target_plugins(plugins_, request.resolve) local_plugins = await Get(KotlincPlugins, KotlincPluginsRequest, plugins_request) component_members_and_kotlin_source_files = [ (target, sources) for target, sources in component_members_and_source_files if sources.snapshot.digest != EMPTY_DIGEST ] if not component_members_and_kotlin_source_files: # Is a generator, and so exports all of its direct deps. exported_digest = await Get( Digest, MergeDigests(cpe.digest for cpe in direct_dependency_classpath_entries) ) classpath_entry = ClasspathEntry.merge(exported_digest, direct_dependency_classpath_entries) return FallibleClasspathEntry( description=str(request.component), result=CompileResult.SUCCEEDED, output=classpath_entry, exit_code=0, ) toolcp_relpath = "__toolcp" local_kotlinc_plugins_relpath = "__localplugincp" usercp = "__cp" user_classpath = Classpath(direct_dependency_classpath_entries, request.resolve) tool_classpath, sources_digest, jdk = await MultiGet( Get( ToolClasspath, ToolClasspathRequest( artifact_requirements=ArtifactRequirements.from_coordinates( [ Coordinate( group="org.jetbrains.kotlin", artifact="kotlin-compiler-embeddable", version=kotlin_version, ), Coordinate( group="org.jetbrains.kotlin", artifact="kotlin-scripting-compiler-embeddable", version=kotlin_version, ), ] ), ), ), Get( Digest, MergeDigests( ( sources.snapshot.digest for _, sources in component_members_and_kotlin_source_files ) ), ), Get(JdkEnvironment, JdkRequest, JdkRequest.from_target(request.component)), ) extra_immutable_input_digests = { toolcp_relpath: tool_classpath.digest, local_kotlinc_plugins_relpath: local_plugins.classpath.digest, } extra_nailgun_keys = tuple(extra_immutable_input_digests) extra_immutable_input_digests.update(user_classpath.immutable_inputs(prefix=usercp)) classpath_arg = ":".join(user_classpath.immutable_inputs_args(prefix=usercp)) output_file = compute_output_jar_filename(request.component) process_result = await Get( FallibleProcessResult, JvmProcess( jdk=jdk, classpath_entries=tool_classpath.classpath_entries(toolcp_relpath), argv=[ "org.jetbrains.kotlin.cli.jvm.K2JVMCompiler", *(("-classpath", classpath_arg) if classpath_arg else ()), "-d", output_file, *(local_plugins.args(local_kotlinc_plugins_relpath)), *kotlinc.args, *sorted( itertools.chain.from_iterable( sources.snapshot.files for _, sources in component_members_and_kotlin_source_files ) ), ], input_digest=sources_digest, extra_immutable_input_digests=extra_immutable_input_digests, extra_nailgun_keys=extra_nailgun_keys, output_files=(output_file,), description=f"Compile {request.component} with kotlinc", level=LogLevel.DEBUG, ), ) output: ClasspathEntry | None = None if process_result.exit_code == 0: output = ClasspathEntry( process_result.output_digest, (output_file,), direct_dependency_classpath_entries ) return FallibleClasspathEntry.from_fallible_process_result( str(request.component), process_result, output, )
async def generate_scrooge_thrift_sources( request: GenerateScroogeThriftSourcesRequest, jdk: InternalJdk, scrooge: ScroogeSubsystem, ) -> GeneratedScroogeThriftSources: output_dir = "_generated_files" toolcp_relpath = "__toolcp" lockfile_request = await Get(GenerateJvmLockfileFromTool, ScroogeToolLockfileSentinel()) tool_classpath, transitive_targets, empty_output_dir_digest, wrapped_target = await MultiGet( Get(ToolClasspath, ToolClasspathRequest(lockfile=lockfile_request)), Get(TransitiveTargets, TransitiveTargetsRequest([request.thrift_source_field.address])), Get(Digest, CreateDigest([Directory(output_dir)])), Get( WrappedTarget, WrappedTargetRequest(request.thrift_source_field.address, description_of_origin="<infallible>"), ), ) transitive_sources, target_sources = await MultiGet( Get( SourceFiles, SourceFilesRequest(tgt[ThriftSourceField] for tgt in transitive_targets.closure if tgt.has_field(ThriftSourceField)), ), Get(SourceFiles, SourceFilesRequest([request.thrift_source_field])), ) sources_roots = await Get( SourceRootsResult, SourceRootsRequest, SourceRootsRequest.for_files(transitive_sources.snapshot.files), ) deduped_source_root_paths = sorted( {sr.path for sr in sources_roots.path_to_root.values()}) input_digest = await Get( Digest, MergeDigests([ transitive_sources.snapshot.digest, target_sources.snapshot.digest, empty_output_dir_digest, ]), ) maybe_include_paths = [] for path in deduped_source_root_paths: maybe_include_paths.extend(["-i", path]) maybe_finagle_option = [] if wrapped_target.target[ScroogeFinagleBoolField].value: maybe_finagle_option = ["--finagle"] extra_immutable_input_digests = { toolcp_relpath: tool_classpath.digest, } result = await Get( ProcessResult, JvmProcess( jdk=jdk, classpath_entries=tool_classpath.classpath_entries(toolcp_relpath), argv=[ "com.twitter.scrooge.Main", *maybe_include_paths, "--dest", output_dir, "--language", request.lang_id, *maybe_finagle_option, *target_sources.snapshot.files, ], input_digest=input_digest, extra_jvm_options=scrooge.jvm_options, extra_immutable_input_digests=extra_immutable_input_digests, extra_nailgun_keys=extra_immutable_input_digests, description= f"Generating {request.lang_name} sources from {request.thrift_source_field.address}.", level=LogLevel.DEBUG, output_directories=(output_dir, ), ), ) output_snapshot = await Get(Snapshot, RemovePrefix(result.output_digest, output_dir)) return GeneratedScroogeThriftSources(output_snapshot)
async def compile_wsdl_source( request: CompileWsdlSourceRequest, jdk: InternalJdk, jaxws: JaxWsTools, ) -> CompiledWsdlSource: output_dir = "_generated_files" toolcp_relpath = "__toolcp" lockfile_request = await Get(GenerateJvmLockfileFromTool, JaxWsToolsLockfileSentinel()) tool_classpath, subsetted_input_digest, empty_output_dir = await MultiGet( Get( ToolClasspath, ToolClasspathRequest(lockfile=lockfile_request), ), Get( Digest, DigestSubset( request.digest, PathGlobs( [request.path], glob_match_error_behavior=GlobMatchErrorBehavior.error, conjunction=GlobExpansionConjunction.all_match, description_of_origin="the WSDL file name", ), ), ), Get(Digest, CreateDigest([Directory(output_dir)])), ) input_digest = await Get( Digest, MergeDigests([subsetted_input_digest, empty_output_dir])) immutable_input_digests = { toolcp_relpath: tool_classpath.digest, } jaxws_args = [ "-d", output_dir, "-encoding", "utf8", "-keep", "-Xnocompile", "-B-XautoNameResolution", ] if request.module: jaxws_args.extend(["-m", request.module]) if request.package: jaxws_args.extend(["-p", request.package]) jaxws_process = JvmProcess( jdk=jdk, argv=[ "com.sun.tools.ws.WsImport", *jaxws_args, request.path, ], classpath_entries=tool_classpath.classpath_entries(toolcp_relpath), input_digest=input_digest, extra_immutable_input_digests=immutable_input_digests, extra_nailgun_keys=immutable_input_digests, description="Generating Java sources from WSDL source", level=LogLevel.DEBUG, output_directories=(output_dir, ), ) jaxws_result = await Get(ProcessResult, JvmProcess, jaxws_process) normalized_digest = await Get( Digest, RemovePrefix(jaxws_result.output_digest, output_dir)) return CompiledWsdlSource(normalized_digest)
async def setup_scalapb_shim_classfiles( scalapb: ScalaPBSubsystem, jdk: InternalJdk, ) -> ScalaPBShimCompiledClassfiles: dest_dir = "classfiles" scalapb_shim_content = pkgutil.get_data( "pants.backend.codegen.protobuf.scala", "ScalaPBShim.scala") if not scalapb_shim_content: raise AssertionError("Unable to find ScalaParser.scala resource.") scalapb_shim_source = FileContent("ScalaPBShim.scala", scalapb_shim_content) lockfile_request = await Get(GenerateJvmLockfileFromTool, ScalapbcToolLockfileSentinel()) tool_classpath, shim_classpath, source_digest = await MultiGet( Get( ToolClasspath, ToolClasspathRequest( prefix="__toolcp", artifact_requirements=ArtifactRequirements.from_coordinates([ Coordinate( group="org.scala-lang", artifact="scala-compiler", version=SHIM_SCALA_VERSION, ), Coordinate( group="org.scala-lang", artifact="scala-library", version=SHIM_SCALA_VERSION, ), Coordinate( group="org.scala-lang", artifact="scala-reflect", version=SHIM_SCALA_VERSION, ), ]), ), ), Get(ToolClasspath, ToolClasspathRequest(prefix="__shimcp", lockfile=lockfile_request)), Get(Digest, CreateDigest([scalapb_shim_source, Directory(dest_dir)])), ) merged_digest = await Get( Digest, MergeDigests( (tool_classpath.digest, shim_classpath.digest, source_digest))) process_result = await Get( ProcessResult, JvmProcess( jdk=jdk, classpath_entries=tool_classpath.classpath_entries(), argv=[ "scala.tools.nsc.Main", "-bootclasspath", ":".join(tool_classpath.classpath_entries()), "-classpath", ":".join(shim_classpath.classpath_entries()), "-d", dest_dir, scalapb_shim_source.path, ], input_digest=merged_digest, extra_jvm_options=scalapb.jvm_options, output_directories=(dest_dir, ), description="Compile ScalaPB shim with scalac", level=LogLevel.DEBUG, # NB: We do not use nailgun for this process, since it is launched exactly once. use_nailgun=False, ), ) stripped_classfiles_digest = await Get( Digest, RemovePrefix(process_result.output_digest, dest_dir)) return ScalaPBShimCompiledClassfiles(digest=stripped_classfiles_digest)
async def generate_scala_from_protobuf( request: GenerateScalaFromProtobufRequest, protoc: Protoc, scalapb: ScalaPBSubsystem, shim_classfiles: ScalaPBShimCompiledClassfiles, jdk: InternalJdk, ) -> GeneratedSources: output_dir = "_generated_files" toolcp_relpath = "__toolcp" shimcp_relpath = "__shimcp" plugins_relpath = "__plugins" protoc_relpath = "__protoc" lockfile_request = await Get(GenerateJvmLockfileFromTool, ScalapbcToolLockfileSentinel()) ( downloaded_protoc_binary, tool_classpath, empty_output_dir, transitive_targets, inherit_env, ) = await MultiGet( Get(DownloadedExternalTool, ExternalToolRequest, protoc.get_request(Platform.current)), Get(ToolClasspath, ToolClasspathRequest(lockfile=lockfile_request)), Get(Digest, CreateDigest([Directory(output_dir)])), Get(TransitiveTargets, TransitiveTargetsRequest([request.protocol_target.address])), # Need PATH so that ScalaPB can invoke `mkfifo`. Get(Environment, EnvironmentRequest(requested=["PATH"])), ) # NB: By stripping the source roots, we avoid having to set the value `--proto_path` # for Protobuf imports to be discoverable. all_sources_stripped, target_sources_stripped = await MultiGet( Get( StrippedSourceFiles, SourceFilesRequest(tgt[ProtobufSourceField] for tgt in transitive_targets.closure if tgt.has_field(ProtobufSourceField)), ), Get(StrippedSourceFiles, SourceFilesRequest([request.protocol_target[ProtobufSourceField] ])), ) merged_jvm_plugins_digest = EMPTY_DIGEST maybe_jvm_plugins_setup_args: tuple[str, ...] = () maybe_jvm_plugins_output_args: tuple[str, ...] = () jvm_plugins = scalapb.jvm_plugins if jvm_plugins: materialized_jvm_plugins = await Get( MaterializedJvmPlugins, MaterializeJvmPluginsRequest(jvm_plugins)) merged_jvm_plugins_digest = materialized_jvm_plugins.digest maybe_jvm_plugins_setup_args = materialized_jvm_plugins.setup_args( plugins_relpath) maybe_jvm_plugins_output_args = tuple( f"--{plugin.name}_out={output_dir}" for plugin in materialized_jvm_plugins.plugins) extra_immutable_input_digests = { toolcp_relpath: tool_classpath.digest, shimcp_relpath: shim_classfiles.digest, plugins_relpath: merged_jvm_plugins_digest, protoc_relpath: downloaded_protoc_binary.digest, } input_digest = await Get( Digest, MergeDigests([all_sources_stripped.snapshot.digest, empty_output_dir])) result = await Get( ProcessResult, JvmProcess( jdk=jdk, classpath_entries=[ *tool_classpath.classpath_entries(toolcp_relpath), shimcp_relpath ], argv=[ "org.pantsbuild.backend.scala.scalapb.ScalaPBShim", f"--protoc={os.path.join(protoc_relpath, downloaded_protoc_binary.exe)}", *maybe_jvm_plugins_setup_args, f"--scala_out={output_dir}", *maybe_jvm_plugins_output_args, *target_sources_stripped.snapshot.files, ], input_digest=input_digest, extra_immutable_input_digests=extra_immutable_input_digests, extra_nailgun_keys=extra_immutable_input_digests, description= f"Generating Scala sources from {request.protocol_target.address}.", level=LogLevel.DEBUG, output_directories=(output_dir, ), extra_env=inherit_env, ), ) normalized_digest, source_root = await MultiGet( Get(Digest, RemovePrefix(result.output_digest, output_dir)), Get(SourceRoot, SourceRootRequest, SourceRootRequest.for_target(request.protocol_target)), ) source_root_restored = (await Get( Snapshot, AddPrefix(normalized_digest, source_root.path)) if source_root.path != "." else await Get( Snapshot, Digest, normalized_digest)) return GeneratedSources(source_root_restored)
async def setup_scala_parser_classfiles(jdk: InternalJdk) -> ScalaParserCompiledClassfiles: dest_dir = "classfiles" parser_source_content = pkgutil.get_data( "pants.backend.scala.dependency_inference", "ScalaParser.scala" ) if not parser_source_content: raise AssertionError("Unable to find ScalaParser.scala resource.") parser_source = FileContent("ScalaParser.scala", parser_source_content) tool_classpath, parser_classpath, source_digest = await MultiGet( Get( ToolClasspath, ToolClasspathRequest( prefix="__toolcp", artifact_requirements=ArtifactRequirements.from_coordinates( [ Coordinate( group="org.scala-lang", artifact="scala-compiler", version=PARSER_SCALA_VERSION, ), Coordinate( group="org.scala-lang", artifact="scala-library", version=PARSER_SCALA_VERSION, ), Coordinate( group="org.scala-lang", artifact="scala-reflect", version=PARSER_SCALA_VERSION, ), ] ), ), ), Get( ToolClasspath, ToolClasspathRequest( prefix="__parsercp", artifact_requirements=SCALA_PARSER_ARTIFACT_REQUIREMENTS ), ), Get(Digest, CreateDigest([parser_source, Directory(dest_dir)])), ) merged_digest = await Get( Digest, MergeDigests( ( tool_classpath.digest, parser_classpath.digest, source_digest, ) ), ) process_result = await Get( ProcessResult, JvmProcess( jdk=jdk, classpath_entries=tool_classpath.classpath_entries(), argv=[ "scala.tools.nsc.Main", "-bootclasspath", ":".join(tool_classpath.classpath_entries()), "-classpath", ":".join(parser_classpath.classpath_entries()), "-d", dest_dir, parser_source.path, ], input_digest=merged_digest, output_directories=(dest_dir,), description="Compile Scala parser for dependency inference with scalac", level=LogLevel.DEBUG, # NB: We do not use nailgun for this process, since it is launched exactly once. use_nailgun=False, ), ) stripped_classfiles_digest = await Get( Digest, RemovePrefix(process_result.output_digest, dest_dir) ) return ScalaParserCompiledClassfiles(digest=stripped_classfiles_digest)
async def setup_junit_for_target( request: TestSetupRequest, jvm: JvmSubsystem, junit: JUnit, test_subsystem: TestSubsystem, ) -> TestSetup: jdk, transitive_tgts = await MultiGet( Get(JdkEnvironment, JdkRequest, JdkRequest.from_field(request.field_set.jdk_version)), Get(TransitiveTargets, TransitiveTargetsRequest([request.field_set.address])), ) lockfile_request = await Get(GenerateJvmLockfileFromTool, JunitToolLockfileSentinel()) classpath, junit_classpath, files = await MultiGet( Get(Classpath, Addresses([request.field_set.address])), Get(ToolClasspath, ToolClasspathRequest(lockfile=lockfile_request)), Get( SourceFiles, SourceFilesRequest( (dep.get(SourcesField) for dep in transitive_tgts.dependencies), for_sources_types=(FileSourceField, ), enable_codegen=True, ), ), ) input_digest = await Get( Digest, MergeDigests((*classpath.digests(), files.snapshot.digest))) toolcp_relpath = "__toolcp" extra_immutable_input_digests = { toolcp_relpath: junit_classpath.digest, } reports_dir_prefix = "__reports_dir" reports_dir = f"{reports_dir_prefix}/{request.field_set.address.path_safe_spec}" # Classfiles produced by the root `junit_test` targets are the only ones which should run. user_classpath_arg = ":".join(classpath.root_args()) # Cache test runs only if they are successful, or not at all if `--test-force`. cache_scope = (ProcessCacheScope.PER_SESSION if test_subsystem.force else ProcessCacheScope.SUCCESSFUL) extra_jvm_args: list[str] = [] if request.is_debug: extra_jvm_args.extend(jvm.debug_args) process = JvmProcess( jdk=jdk, classpath_entries=[ *classpath.args(), *junit_classpath.classpath_entries(toolcp_relpath), ], argv=[ *extra_jvm_args, "org.junit.platform.console.ConsoleLauncher", *(("--classpath", user_classpath_arg) if user_classpath_arg else ()), *(("--scan-class-path", user_classpath_arg) if user_classpath_arg else ()), "--reports-dir", reports_dir, *junit.args, ], input_digest=input_digest, extra_jvm_options=junit.jvm_options, extra_immutable_input_digests=extra_immutable_input_digests, output_directories=(reports_dir, ), description= f"Run JUnit 5 ConsoleLauncher against {request.field_set.address}", level=LogLevel.DEBUG, cache_scope=cache_scope, use_nailgun=False, ) return TestSetup(process=process, reports_dir_prefix=reports_dir_prefix)