async def generate_python_from_thrift( request: GeneratePythonFromThriftRequest, thrift_python: ThriftPythonSubsystem, ) -> GeneratedSources: result = await Get( GeneratedThriftSources, GenerateThriftSourcesRequest( thrift_source_field=request.protocol_target[ThriftSourceField], lang_id="py", lang_options=thrift_python.gen_options, lang_name="Python", ), ) # We must add back the source root for Python imports to work properly. Note that the file # paths will be different depending on whether `namespace py` was used. See the tests for # examples. source_root = await Get( SourceRoot, SourceRootRequest, SourceRootRequest.for_target(request.protocol_target) ) source_root_restored = ( await Get(Snapshot, AddPrefix(result.snapshot.digest, source_root.path)) if source_root.path != "." else await Get(Snapshot, Digest, result.snapshot.digest) ) return GeneratedSources(source_root_restored)
async def generate_java_from_wsdl( request: GenerateJavaFromWsdlRequest) -> GeneratedSources: sources = await Get( HydratedSources, HydrateSourcesRequest(request.protocol_target[WsdlSourceField])) target_package = request.protocol_target[JavaPackageField].value compile_results = await MultiGet( Get( CompiledWsdlSource, CompileWsdlSourceRequest( sources.snapshot.digest, path=path, module=request.protocol_target[JavaModuleField].value, package=target_package, ), ) for path in sources.snapshot.files) merged_output_digests, source_root = await MultiGet( Get(Digest, MergeDigests([r.output_digest for r in compile_results])), Get(SourceRoot, SourceRootRequest, SourceRootRequest.for_target(request.protocol_target)), ) source_root_restored = (await Get( Snapshot, AddPrefix(merged_output_digests, source_root.path)) if source_root.path != "." else await Get( Snapshot, Digest, merged_output_digests)) return GeneratedSources(source_root_restored)
async def prepare_python_sources( request: PythonSourceFilesRequest, union_membership: UnionMembership ) -> PythonSourceFiles: sources = await Get( SourceFiles, SourceFilesRequest( (tgt.get(Sources) for tgt in request.targets), for_sources_types=request.valid_sources_types, enable_codegen=True, ), ) missing_init_files = await Get( AncestorFiles, AncestorFilesRequest("__init__.py", sources.snapshot), ) init_injected = await Get( Snapshot, MergeDigests((sources.snapshot.digest, missing_init_files.snapshot.digest)), ) source_root_objs = await MultiGet( Get(SourceRoot, SourceRootRequest, SourceRootRequest.for_target(tgt)) for tgt in request.targets if ( tgt.has_field(PythonSources) or tgt.has_field(ResourcesSources) or tgt.get(Sources).can_generate(PythonSources, union_membership) or tgt.get(Sources).can_generate(ResourcesSources, union_membership) ) ) source_root_paths = {source_root_obj.path for source_root_obj in source_root_objs} return PythonSourceFiles( SourceFiles(init_injected, sources.unrooted_files), tuple(sorted(source_root_paths)) )
async def generate_scala_from_thrift_with_scrooge( request: GenerateScalaFromThriftRequest, ) -> GeneratedSources: result = await Get( GeneratedScroogeThriftSources, GenerateScroogeThriftSourcesRequest( thrift_source_field=request.protocol_target[ThriftSourceField], lang_id="scala", lang_name="Scala", ), ) source_root = await Get( SourceRoot, SourceRootRequest, SourceRootRequest.for_target(request.protocol_target)) source_root_restored = (await Get( Snapshot, AddPrefix(result.snapshot.digest, source_root.path)) if source_root.path != "." else await Get( Snapshot, Digest, result.snapshot.digest)) return GeneratedSources(source_root_restored)
async def generate_java_from_avro( request: GenerateJavaFromAvroRequest, ) -> GeneratedSources: sources = await Get( HydratedSources, HydrateSourcesRequest(request.protocol_target[AvroSourceField])) compile_results = await MultiGet( Get(CompiledAvroSource, CompileAvroSourceRequest(sources.snapshot.digest, path)) for path in sources.snapshot.files) merged_output_digest, source_root = await MultiGet( Get(Digest, MergeDigests([r.output_digest for r in compile_results])), Get(SourceRoot, SourceRootRequest, SourceRootRequest.for_target(request.protocol_target)), ) source_root_restored = (await Get( Snapshot, AddPrefix(merged_output_digest, source_root.path)) if source_root.path != "." else await Get( Snapshot, Digest, merged_output_digest)) return GeneratedSources(source_root_restored)
async def generate_java_from_thrift( request: GenerateJavaFromThriftRequest, thrift_java: ApacheThriftJavaSubsystem, ) -> GeneratedSources: result = await Get( GeneratedThriftSources, GenerateThriftSourcesRequest( thrift_source_field=request.protocol_target[ThriftSourceField], lang_id="java", lang_options=thrift_java.gen_options, lang_name="Java", ), ) source_root = await Get( SourceRoot, SourceRootRequest, SourceRootRequest.for_target(request.protocol_target)) source_root_restored = (await Get( Snapshot, AddPrefix(result.snapshot.digest, source_root.path)) if source_root.path != "." else await Get( Snapshot, Digest, result.snapshot.digest)) return GeneratedSources(source_root_restored)
async def generate_python_from_protobuf( request: GeneratePythonFromProtobufRequest, protoc: Protoc, grpc_python_plugin: GrpcPythonPlugin, python_protobuf_subsystem: PythonProtobufSubsystem, python_protobuf_mypy_plugin: PythonProtobufMypyPlugin, pex_environment: PexEnvironment, ) -> GeneratedSources: download_protoc_request = Get(DownloadedExternalTool, ExternalToolRequest, protoc.get_request(Platform.current)) output_dir = "_generated_files" create_output_dir_request = Get(Digest, CreateDigest([Directory(output_dir)])) # Protoc needs all transitive dependencies on `protobuf_libraries` to work properly. It won't # actually generate those dependencies; it only needs to look at their .proto files to work # with imports. transitive_targets = await Get( TransitiveTargets, TransitiveTargetsRequest([request.protocol_target.address])) # NB: By stripping the source roots, we avoid having to set the value `--proto_path` # for Protobuf imports to be discoverable. all_stripped_sources_request = Get( StrippedSourceFiles, SourceFilesRequest(tgt[ProtobufSourceField] for tgt in transitive_targets.closure if tgt.has_field(ProtobufSourceField)), ) target_stripped_sources_request = Get( StrippedSourceFiles, SourceFilesRequest([request.protocol_target[ProtobufSourceField]])) ( downloaded_protoc_binary, empty_output_dir, all_sources_stripped, target_sources_stripped, ) = await MultiGet( download_protoc_request, create_output_dir_request, all_stripped_sources_request, target_stripped_sources_request, ) protoc_gen_mypy_script = "protoc-gen-mypy" protoc_gen_mypy_grpc_script = "protoc-gen-mypy_grpc" mypy_pex = None mypy_request = PexRequest( output_filename="mypy_protobuf.pex", internal_only=True, requirements=python_protobuf_mypy_plugin.pex_requirements(), interpreter_constraints=python_protobuf_mypy_plugin. interpreter_constraints, ) if python_protobuf_subsystem.mypy_plugin: mypy_pex = await Get( VenvPex, VenvPexRequest(bin_names=[protoc_gen_mypy_script], pex_request=mypy_request), ) if request.protocol_target.get(ProtobufGrpcToggleField).value: mypy_info = await Get(PexResolveInfo, VenvPex, mypy_pex) # In order to generate stubs for gRPC code, we need mypy-protobuf 2.0 or above. if any(dist_info.project_name == "mypy-protobuf" and dist_info.version.major >= 2 for dist_info in mypy_info): # TODO: Use `pex_path` once VenvPex stores a Pex field. mypy_pex = await Get( VenvPex, VenvPexRequest( bin_names=[ protoc_gen_mypy_script, protoc_gen_mypy_grpc_script ], pex_request=mypy_request, ), ) downloaded_grpc_plugin = (await Get( DownloadedExternalTool, ExternalToolRequest, grpc_python_plugin.get_request(Platform.current), ) if request.protocol_target.get(ProtobufGrpcToggleField).value else None) unmerged_digests = [ all_sources_stripped.snapshot.digest, downloaded_protoc_binary.digest, empty_output_dir, ] if mypy_pex: unmerged_digests.append(mypy_pex.digest) if downloaded_grpc_plugin: unmerged_digests.append(downloaded_grpc_plugin.digest) input_digest = await Get(Digest, MergeDigests(unmerged_digests)) argv = [downloaded_protoc_binary.exe, "--python_out", output_dir] if mypy_pex: argv.extend([ f"--plugin=protoc-gen-mypy={mypy_pex.bin[protoc_gen_mypy_script].argv0}", "--mypy_out", output_dir, ]) if downloaded_grpc_plugin: argv.extend([ f"--plugin=protoc-gen-grpc={downloaded_grpc_plugin.exe}", "--grpc_out", output_dir ]) if mypy_pex and protoc_gen_mypy_grpc_script in mypy_pex.bin: argv.extend([ f"--plugin=protoc-gen-mypy_grpc={mypy_pex.bin[protoc_gen_mypy_grpc_script].argv0}", "--mypy_grpc_out", output_dir, ]) argv.extend(target_sources_stripped.snapshot.files) result = await Get( ProcessResult, Process( argv, input_digest=input_digest, description= f"Generating Python sources from {request.protocol_target.address}.", level=LogLevel.DEBUG, output_directories=(output_dir, ), append_only_caches=pex_environment.in_sandbox( working_directory=None).append_only_caches, ), ) # We must do some path manipulation on the output digest for it to look like normal sources, # including adding back a source root. py_source_root = request.protocol_target.get(PythonSourceRootField).value if py_source_root: # Verify that the python source root specified by the target is in fact a source root. source_root_request = SourceRootRequest(PurePath(py_source_root)) else: # The target didn't specify a python source root, so use the protobuf_source's source root. source_root_request = SourceRootRequest.for_target( request.protocol_target) normalized_digest, source_root = await MultiGet( Get(Digest, RemovePrefix(result.output_digest, output_dir)), Get(SourceRoot, SourceRootRequest, source_root_request), ) source_root_restored = (await Get( Snapshot, AddPrefix(normalized_digest, source_root.path)) if source_root.path != "." else await Get( Snapshot, Digest, normalized_digest)) return GeneratedSources(source_root_restored)
async def generate_python_from_protobuf( request: GeneratePythonFromProtobufRequest, protoc: Protoc ) -> GeneratedSources: download_protoc_request = Get( DownloadedExternalTool, ExternalToolRequest, protoc.get_request(Platform.current) ) output_dir = "_generated_files" # TODO(#9650): replace this with a proper intrinsic to create empty directories. create_output_dir_request = Get( ProcessResult, Process( ("/bin/mkdir", output_dir), description=f"Create the directory {output_dir}", level=LogLevel.DEBUG, output_directories=(output_dir,), ), ) # Protoc needs all transitive dependencies on `protobuf_libraries` to work properly. It won't # actually generate those dependencies; it only needs to look at their .proto files to work # with imports. transitive_targets = await Get(TransitiveTargets, Addresses([request.protocol_target.address])) # NB: By stripping the source roots, we avoid having to set the value `--proto_path` # for Protobuf imports to be discoverable. all_stripped_sources_request = Get( StrippedSourceFiles, SourceFilesRequest( (tgt.get(Sources) for tgt in transitive_targets.closure), for_sources_types=(ProtobufSources,), ), ) target_stripped_sources_request = Get( StrippedSourceFiles, SourceFilesRequest([request.protocol_target[ProtobufSources]]), ) ( downloaded_protoc_binary, create_output_dir_result, all_sources_stripped, target_sources_stripped, ) = await MultiGet( download_protoc_request, create_output_dir_request, all_stripped_sources_request, target_stripped_sources_request, ) input_digest = await Get( Digest, MergeDigests( ( all_sources_stripped.snapshot.digest, downloaded_protoc_binary.digest, create_output_dir_result.output_digest, ) ), ) result = await Get( ProcessResult, Process( ( downloaded_protoc_binary.exe, "--python_out", output_dir, *target_sources_stripped.snapshot.files, ), input_digest=input_digest, description=f"Generating Python sources from {request.protocol_target.address}.", level=LogLevel.DEBUG, output_directories=(output_dir,), ), ) # We must do some path manipulation on the output digest for it to look like normal sources, # including adding back a source root. py_source_root = request.protocol_target.get(PythonSourceRootField).value if py_source_root: # Verify that the python source root specified by the target is in fact a source root. source_root_request = SourceRootRequest(PurePath(py_source_root)) else: # The target didn't specify a python source root, so use the protobuf_library's source root. source_root_request = SourceRootRequest.for_target(request.protocol_target) normalized_digest, source_root = await MultiGet( Get(Digest, RemovePrefix(result.output_digest, output_dir)), Get(SourceRoot, SourceRootRequest, source_root_request), ) source_root_restored = ( await Get(Snapshot, AddPrefix(normalized_digest, source_root.path)) if source_root.path != "." else await Get(Snapshot, Digest, normalized_digest) ) return GeneratedSources(source_root_restored)
async def prepare_python_sources( request: PythonSourceFilesRequest, union_membership: UnionMembership) -> PythonSourceFiles: sources = await Get( SourceFiles, SourceFilesRequest( (tgt.get(SourcesField) for tgt in request.targets), for_sources_types=request.valid_sources_types, enable_codegen=True, ), ) missing_init_files = await Get( AncestorFiles, AncestorFilesRequest(input_files=sources.snapshot.files, requested=("__init__.py", "__init__.pyi")), ) init_injected = await Get( Snapshot, MergeDigests( (sources.snapshot.digest, missing_init_files.snapshot.digest))) # Codegen is able to generate code in any arbitrary location, unlike sources normally being # rooted under the target definition. To determine source roots for these generated files, we # cannot use the normal `SourceRootRequest.for_target()` and we instead must determine # a source root for every individual generated file. So, we re-resolve the codegen sources here. python_and_resources_targets = [] codegen_targets = [] for tgt in request.targets: if tgt.has_field(PythonSourceField) or tgt.has_field( ResourceSourceField): python_and_resources_targets.append(tgt) elif tgt.get(SourcesField).can_generate( PythonSourceField, union_membership) or tgt.get(SourcesField).can_generate( ResourceSourceField, union_membership): codegen_targets.append(tgt) codegen_sources = await MultiGet( Get( HydratedSources, HydrateSourcesRequest( tgt.get(SourcesField), for_sources_types=request.valid_sources_types, enable_codegen=True, ), ) for tgt in codegen_targets) source_root_requests = [ *(SourceRootRequest.for_target(tgt) for tgt in python_and_resources_targets), *(SourceRootRequest.for_file(f) for sources in codegen_sources for f in sources.snapshot.files), ] source_root_objs = await MultiGet( Get(SourceRoot, SourceRootRequest, req) for req in source_root_requests) source_root_paths = { source_root_obj.path for source_root_obj in source_root_objs } return PythonSourceFiles( SourceFiles(init_injected, sources.unrooted_files), tuple(sorted(source_root_paths)))
async def generate_python_from_protobuf( request: GeneratePythonFromProtobufRequest, protoc: Protoc, grpc_python_plugin: GrpcPythonPlugin, python_protobuf_subsystem: PythonProtobufSubsystem, ) -> GeneratedSources: download_protoc_request = Get(DownloadedExternalTool, ExternalToolRequest, protoc.get_request(Platform.current)) output_dir = "_generated_files" create_output_dir_request = Get(Digest, CreateDigest([Directory(output_dir)])) # Protoc needs all transitive dependencies on `protobuf_libraries` to work properly. It won't # actually generate those dependencies; it only needs to look at their .proto files to work # with imports. # TODO(#10917): Use TransitiveTargets instead of TransitiveTargetsLite. transitive_targets = await Get( TransitiveTargets, TransitiveTargetsRequestLite([request.protocol_target.address])) # NB: By stripping the source roots, we avoid having to set the value `--proto_path` # for Protobuf imports to be discoverable. all_stripped_sources_request = Get( StrippedSourceFiles, SourceFilesRequest( (tgt.get(Sources) for tgt in transitive_targets.closure), for_sources_types=(ProtobufSources, ), ), ) target_stripped_sources_request = Get( StrippedSourceFiles, SourceFilesRequest([request.protocol_target[ProtobufSources]])) ( downloaded_protoc_binary, empty_output_dir, all_sources_stripped, target_sources_stripped, ) = await MultiGet( download_protoc_request, create_output_dir_request, all_stripped_sources_request, target_stripped_sources_request, ) # To run the MyPy Protobuf plugin, we first install it with Pex, then extract the wheels and # point Protoc to the extracted wheels with its `--plugin` argument. extracted_mypy_wheels = None if python_protobuf_subsystem.mypy_plugin: mypy_pex = await Get( Pex, PexRequest( output_filename="mypy_protobuf.pex", internal_only=True, requirements=PexRequirements( [python_protobuf_subsystem.mypy_plugin_version]), # This is solely to ensure that we use an appropriate interpreter when resolving # the distribution. We don't actually run the distribution directly with Python, # as we extract out its binary. interpreter_constraints=PexInterpreterConstraints( ["CPython>=3.5"]), ), ) extracted_mypy_wheels = await Get(ExtractedPexDistributions, Pex, mypy_pex) downloaded_grpc_plugin = (await Get( DownloadedExternalTool, ExternalToolRequest, grpc_python_plugin.get_request(Platform.current), ) if request.protocol_target.get(ProtobufGrcpToggle).value else None) unmerged_digests = [ all_sources_stripped.snapshot.digest, downloaded_protoc_binary.digest, empty_output_dir, ] if extracted_mypy_wheels: unmerged_digests.append(extracted_mypy_wheels.digest) if downloaded_grpc_plugin: unmerged_digests.append(downloaded_grpc_plugin.digest) input_digest = await Get(Digest, MergeDigests(unmerged_digests)) argv = [downloaded_protoc_binary.exe, "--python_out", output_dir] if extracted_mypy_wheels: mypy_plugin_path = next( p for p in extracted_mypy_wheels.wheel_directory_paths if p.startswith(".deps/mypy_protobuf-")) argv.extend([ f"--plugin=protoc-gen-mypy={mypy_plugin_path}/bin/protoc-gen-mypy", "--mypy_out", output_dir, ]) if downloaded_grpc_plugin: argv.extend([ f"--plugin=protoc-gen-grpc={downloaded_grpc_plugin.exe}", "--grpc_out", output_dir ]) argv.extend(target_sources_stripped.snapshot.files) env = {} if extracted_mypy_wheels: env["PYTHONPATH"] = ":".join( extracted_mypy_wheels.wheel_directory_paths) result = await Get( ProcessResult, Process( argv, env=env, input_digest=input_digest, description= f"Generating Python sources from {request.protocol_target.address}.", level=LogLevel.DEBUG, output_directories=(output_dir, ), ), ) # We must do some path manipulation on the output digest for it to look like normal sources, # including adding back a source root. py_source_root = request.protocol_target.get(PythonSourceRootField).value if py_source_root: # Verify that the python source root specified by the target is in fact a source root. source_root_request = SourceRootRequest(PurePath(py_source_root)) else: # The target didn't specify a python source root, so use the protobuf_library's source root. source_root_request = SourceRootRequest.for_target( request.protocol_target) normalized_digest, source_root = await MultiGet( Get(Digest, RemovePrefix(result.output_digest, output_dir)), Get(SourceRoot, SourceRootRequest, source_root_request), ) source_root_restored = (await Get( Snapshot, AddPrefix(normalized_digest, source_root.path)) if source_root.path != "." else await Get( Snapshot, Digest, normalized_digest)) return GeneratedSources(source_root_restored)
async def generate_go_from_protobuf( request: GenerateGoFromProtobufRequest, protoc: Protoc, go_protoc_plugin: _SetupGoProtocPlugin, ) -> GeneratedSources: output_dir = "_generated_files" protoc_relpath = "__protoc" protoc_go_plugin_relpath = "__protoc_gen_go" downloaded_protoc_binary, empty_output_dir, transitive_targets = await MultiGet( Get(DownloadedExternalTool, ExternalToolRequest, protoc.get_request(Platform.current)), Get(Digest, CreateDigest([Directory(output_dir)])), Get(TransitiveTargets, TransitiveTargetsRequest([request.protocol_target.address])), ) # NB: By stripping the source roots, we avoid having to set the value `--proto_path` # for Protobuf imports to be discoverable. all_sources_stripped, target_sources_stripped = await MultiGet( Get( StrippedSourceFiles, SourceFilesRequest(tgt[ProtobufSourceField] for tgt in transitive_targets.closure if tgt.has_field(ProtobufSourceField)), ), Get(StrippedSourceFiles, SourceFilesRequest([request.protocol_target[ProtobufSourceField] ])), ) input_digest = await Get( Digest, MergeDigests([all_sources_stripped.snapshot.digest, empty_output_dir])) maybe_grpc_plugin_args = [] if request.protocol_target.get(ProtobufGrpcToggleField).value: maybe_grpc_plugin_args = [ f"--go-grpc_out={output_dir}", "--go-grpc_opt=paths=source_relative", ] result = await Get( ProcessResult, Process( argv=[ os.path.join(protoc_relpath, downloaded_protoc_binary.exe), f"--plugin=go={os.path.join('.', protoc_go_plugin_relpath, 'protoc-gen-go')}", f"--plugin=go-grpc={os.path.join('.', protoc_go_plugin_relpath, 'protoc-gen-go-grpc')}", f"--go_out={output_dir}", "--go_opt=paths=source_relative", *maybe_grpc_plugin_args, *target_sources_stripped.snapshot.files, ], # Note: Necessary or else --plugin option needs absolute path. env={"PATH": protoc_go_plugin_relpath}, input_digest=input_digest, immutable_input_digests={ protoc_relpath: downloaded_protoc_binary.digest, protoc_go_plugin_relpath: go_protoc_plugin.digest, }, description= f"Generating Go sources from {request.protocol_target.address}.", level=LogLevel.DEBUG, output_directories=(output_dir, ), ), ) normalized_digest, source_root = await MultiGet( Get(Digest, RemovePrefix(result.output_digest, output_dir)), Get(SourceRoot, SourceRootRequest, SourceRootRequest.for_target(request.protocol_target)), ) source_root_restored = (await Get( Snapshot, AddPrefix(normalized_digest, source_root.path)) if source_root.path != "." else await Get( Snapshot, Digest, normalized_digest)) return GeneratedSources(source_root_restored)
async def generate_scala_from_protobuf( request: GenerateScalaFromProtobufRequest, protoc: Protoc, scalapb: ScalaPBSubsystem, shim_classfiles: ScalaPBShimCompiledClassfiles, jdk: InternalJdk, ) -> GeneratedSources: output_dir = "_generated_files" toolcp_relpath = "__toolcp" shimcp_relpath = "__shimcp" plugins_relpath = "__plugins" protoc_relpath = "__protoc" lockfile_request = await Get(GenerateJvmLockfileFromTool, ScalapbcToolLockfileSentinel()) ( downloaded_protoc_binary, tool_classpath, empty_output_dir, transitive_targets, inherit_env, ) = await MultiGet( Get(DownloadedExternalTool, ExternalToolRequest, protoc.get_request(Platform.current)), Get(ToolClasspath, ToolClasspathRequest(lockfile=lockfile_request)), Get(Digest, CreateDigest([Directory(output_dir)])), Get(TransitiveTargets, TransitiveTargetsRequest([request.protocol_target.address])), # Need PATH so that ScalaPB can invoke `mkfifo`. Get(Environment, EnvironmentRequest(requested=["PATH"])), ) # NB: By stripping the source roots, we avoid having to set the value `--proto_path` # for Protobuf imports to be discoverable. all_sources_stripped, target_sources_stripped = await MultiGet( Get( StrippedSourceFiles, SourceFilesRequest(tgt[ProtobufSourceField] for tgt in transitive_targets.closure if tgt.has_field(ProtobufSourceField)), ), Get(StrippedSourceFiles, SourceFilesRequest([request.protocol_target[ProtobufSourceField] ])), ) merged_jvm_plugins_digest = EMPTY_DIGEST maybe_jvm_plugins_setup_args: tuple[str, ...] = () maybe_jvm_plugins_output_args: tuple[str, ...] = () jvm_plugins = scalapb.jvm_plugins if jvm_plugins: materialized_jvm_plugins = await Get( MaterializedJvmPlugins, MaterializeJvmPluginsRequest(jvm_plugins)) merged_jvm_plugins_digest = materialized_jvm_plugins.digest maybe_jvm_plugins_setup_args = materialized_jvm_plugins.setup_args( plugins_relpath) maybe_jvm_plugins_output_args = tuple( f"--{plugin.name}_out={output_dir}" for plugin in materialized_jvm_plugins.plugins) extra_immutable_input_digests = { toolcp_relpath: tool_classpath.digest, shimcp_relpath: shim_classfiles.digest, plugins_relpath: merged_jvm_plugins_digest, protoc_relpath: downloaded_protoc_binary.digest, } input_digest = await Get( Digest, MergeDigests([all_sources_stripped.snapshot.digest, empty_output_dir])) result = await Get( ProcessResult, JvmProcess( jdk=jdk, classpath_entries=[ *tool_classpath.classpath_entries(toolcp_relpath), shimcp_relpath ], argv=[ "org.pantsbuild.backend.scala.scalapb.ScalaPBShim", f"--protoc={os.path.join(protoc_relpath, downloaded_protoc_binary.exe)}", *maybe_jvm_plugins_setup_args, f"--scala_out={output_dir}", *maybe_jvm_plugins_output_args, *target_sources_stripped.snapshot.files, ], input_digest=input_digest, extra_immutable_input_digests=extra_immutable_input_digests, extra_nailgun_keys=extra_immutable_input_digests, description= f"Generating Scala sources from {request.protocol_target.address}.", level=LogLevel.DEBUG, output_directories=(output_dir, ), extra_env=inherit_env, ), ) normalized_digest, source_root = await MultiGet( Get(Digest, RemovePrefix(result.output_digest, output_dir)), Get(SourceRoot, SourceRootRequest, SourceRootRequest.for_target(request.protocol_target)), ) source_root_restored = (await Get( Snapshot, AddPrefix(normalized_digest, source_root.path)) if source_root.path != "." else await Get( Snapshot, Digest, normalized_digest)) return GeneratedSources(source_root_restored)
async def generate_python_from_protobuf( request: GeneratePythonFromProtobufRequest, protoc: Protoc, grpc_python_plugin: GrpcPythonPlugin, python_protobuf_subsystem: PythonProtobufSubsystem, ) -> GeneratedSources: download_protoc_request = Get(DownloadedExternalTool, ExternalToolRequest, protoc.get_request(Platform.current)) output_dir = "_generated_files" create_output_dir_request = Get(Digest, CreateDigest([Directory(output_dir)])) # Protoc needs all transitive dependencies on `protobuf_libraries` to work properly. It won't # actually generate those dependencies; it only needs to look at their .proto files to work # with imports. transitive_targets = await Get( TransitiveTargets, TransitiveTargetsRequest([request.protocol_target.address])) # NB: By stripping the source roots, we avoid having to set the value `--proto_path` # for Protobuf imports to be discoverable. all_stripped_sources_request = Get( StrippedSourceFiles, SourceFilesRequest( (tgt.get(Sources) for tgt in transitive_targets.closure), for_sources_types=(ProtobufSources, ), ), ) target_stripped_sources_request = Get( StrippedSourceFiles, SourceFilesRequest([request.protocol_target[ProtobufSources]])) ( downloaded_protoc_binary, empty_output_dir, all_sources_stripped, target_sources_stripped, ) = await MultiGet( download_protoc_request, create_output_dir_request, all_stripped_sources_request, target_stripped_sources_request, ) protoc_gen_mypy_script = "protoc-gen-mypy" mypy_pex = None if python_protobuf_subsystem.mypy_plugin: mypy_pex = await Get( VenvPex, VenvPexRequest( bin_names=[protoc_gen_mypy_script], pex_request=PexRequest( output_filename="mypy_protobuf.pex", internal_only=True, requirements=PexRequirements( [python_protobuf_subsystem.mypy_plugin_version]), # TODO(John Sirois): Fix these interpreter constraints to track the actual # python requirement of the mypy_plugin_version or else plumb an option for # manually setting the constraint to track what mypy_plugin_version needs: # https://github.com/pantsbuild/pants/issues/11565 # Here we guess a constraint that will likely work with any mypy_plugin_version # selected. interpreter_constraints=PexInterpreterConstraints( ["CPython>=3.5"]), ), ), ) downloaded_grpc_plugin = (await Get( DownloadedExternalTool, ExternalToolRequest, grpc_python_plugin.get_request(Platform.current), ) if request.protocol_target.get(ProtobufGrpcToggle).value else None) unmerged_digests = [ all_sources_stripped.snapshot.digest, downloaded_protoc_binary.digest, empty_output_dir, ] if mypy_pex: unmerged_digests.append(mypy_pex.digest) if downloaded_grpc_plugin: unmerged_digests.append(downloaded_grpc_plugin.digest) input_digest = await Get(Digest, MergeDigests(unmerged_digests)) argv = [downloaded_protoc_binary.exe, "--python_out", output_dir] if mypy_pex: argv.extend([ f"--plugin=protoc-gen-mypy={mypy_pex.bin[protoc_gen_mypy_script].argv0}", "--mypy_out", output_dir, ]) if downloaded_grpc_plugin: argv.extend([ f"--plugin=protoc-gen-grpc={downloaded_grpc_plugin.exe}", "--grpc_out", output_dir ]) argv.extend(target_sources_stripped.snapshot.files) result = await Get( ProcessResult, Process( argv, input_digest=input_digest, description= f"Generating Python sources from {request.protocol_target.address}.", level=LogLevel.DEBUG, output_directories=(output_dir, ), ), ) # We must do some path manipulation on the output digest for it to look like normal sources, # including adding back a source root. py_source_root = request.protocol_target.get(PythonSourceRootField).value if py_source_root: # Verify that the python source root specified by the target is in fact a source root. source_root_request = SourceRootRequest(PurePath(py_source_root)) else: # The target didn't specify a python source root, so use the protobuf_library's source root. source_root_request = SourceRootRequest.for_target( request.protocol_target) normalized_digest, source_root = await MultiGet( Get(Digest, RemovePrefix(result.output_digest, output_dir)), Get(SourceRoot, SourceRootRequest, source_root_request), ) source_root_restored = (await Get( Snapshot, AddPrefix(normalized_digest, source_root.path)) if source_root.path != "." else await Get( Snapshot, Digest, normalized_digest)) return GeneratedSources(source_root_restored)
async def generate_java_from_protobuf( request: GenerateJavaFromProtobufRequest, protoc: Protoc, ) -> GeneratedSources: download_protoc_request = Get(DownloadedExternalTool, ExternalToolRequest, protoc.get_request(Platform.current)) output_dir = "_generated_files" create_output_dir_request = Get(Digest, CreateDigest([Directory(output_dir)])) # Protoc needs all transitive dependencies on `protobuf_source` to work properly. It won't # actually generate those dependencies; it only needs to look at their .proto files to work # with imports. transitive_targets = await Get( TransitiveTargets, TransitiveTargetsRequest([request.protocol_target.address])) # NB: By stripping the source roots, we avoid having to set the value `--proto_path` # for Protobuf imports to be discoverable. all_stripped_sources_request = Get( StrippedSourceFiles, SourceFilesRequest(tgt[ProtobufSourceField] for tgt in transitive_targets.closure if tgt.has_field(ProtobufSourceField)), ) target_stripped_sources_request = Get( StrippedSourceFiles, SourceFilesRequest([request.protocol_target[ProtobufSourceField]])) ( downloaded_protoc_binary, empty_output_dir, all_sources_stripped, target_sources_stripped, ) = await MultiGet( download_protoc_request, create_output_dir_request, all_stripped_sources_request, target_stripped_sources_request, ) unmerged_digests = [ all_sources_stripped.snapshot.digest, downloaded_protoc_binary.digest, empty_output_dir, ] input_digest = await Get(Digest, MergeDigests(unmerged_digests)) argv = [downloaded_protoc_binary.exe, "--java_out", output_dir] argv.extend(target_sources_stripped.snapshot.files) result = await Get( ProcessResult, Process( argv, input_digest=input_digest, description= f"Generating Java sources from {request.protocol_target.address}.", level=LogLevel.DEBUG, output_directories=(output_dir, ), ), ) normalized_digest, source_root = await MultiGet( Get(Digest, RemovePrefix(result.output_digest, output_dir)), Get(SourceRoot, SourceRootRequest, SourceRootRequest.for_target(request.protocol_target)), ) source_root_restored = (await Get( Snapshot, AddPrefix(normalized_digest, source_root.path)) if source_root.path != "." else await Get( Snapshot, Digest, normalized_digest)) return GeneratedSources(source_root_restored)