async def generate_python_from_protobuf( request: GeneratePythonFromProtobufRequest, protoc: Protoc, grpc_python_plugin: GrpcPythonPlugin, python_protobuf_subsystem: PythonProtobufSubsystem, python_protobuf_mypy_plugin: PythonProtobufMypyPlugin, pex_environment: PexEnvironment, ) -> GeneratedSources: download_protoc_request = Get(DownloadedExternalTool, ExternalToolRequest, protoc.get_request(Platform.current)) output_dir = "_generated_files" create_output_dir_request = Get(Digest, CreateDigest([Directory(output_dir)])) # Protoc needs all transitive dependencies on `protobuf_libraries` to work properly. It won't # actually generate those dependencies; it only needs to look at their .proto files to work # with imports. transitive_targets = await Get( TransitiveTargets, TransitiveTargetsRequest([request.protocol_target.address])) # NB: By stripping the source roots, we avoid having to set the value `--proto_path` # for Protobuf imports to be discoverable. all_stripped_sources_request = Get( StrippedSourceFiles, SourceFilesRequest(tgt[ProtobufSourceField] for tgt in transitive_targets.closure if tgt.has_field(ProtobufSourceField)), ) target_stripped_sources_request = Get( StrippedSourceFiles, SourceFilesRequest([request.protocol_target[ProtobufSourceField]])) ( downloaded_protoc_binary, empty_output_dir, all_sources_stripped, target_sources_stripped, ) = await MultiGet( download_protoc_request, create_output_dir_request, all_stripped_sources_request, target_stripped_sources_request, ) protoc_gen_mypy_script = "protoc-gen-mypy" protoc_gen_mypy_grpc_script = "protoc-gen-mypy_grpc" mypy_pex = None mypy_request = PexRequest( output_filename="mypy_protobuf.pex", internal_only=True, requirements=python_protobuf_mypy_plugin.pex_requirements(), interpreter_constraints=python_protobuf_mypy_plugin. interpreter_constraints, ) if python_protobuf_subsystem.mypy_plugin: mypy_pex = await Get( VenvPex, VenvPexRequest(bin_names=[protoc_gen_mypy_script], pex_request=mypy_request), ) if request.protocol_target.get(ProtobufGrpcToggleField).value: mypy_info = await Get(PexResolveInfo, VenvPex, mypy_pex) # In order to generate stubs for gRPC code, we need mypy-protobuf 2.0 or above. if any(dist_info.project_name == "mypy-protobuf" and dist_info.version.major >= 2 for dist_info in mypy_info): # TODO: Use `pex_path` once VenvPex stores a Pex field. mypy_pex = await Get( VenvPex, VenvPexRequest( bin_names=[ protoc_gen_mypy_script, protoc_gen_mypy_grpc_script ], pex_request=mypy_request, ), ) downloaded_grpc_plugin = (await Get( DownloadedExternalTool, ExternalToolRequest, grpc_python_plugin.get_request(Platform.current), ) if request.protocol_target.get(ProtobufGrpcToggleField).value else None) unmerged_digests = [ all_sources_stripped.snapshot.digest, downloaded_protoc_binary.digest, empty_output_dir, ] if mypy_pex: unmerged_digests.append(mypy_pex.digest) if downloaded_grpc_plugin: unmerged_digests.append(downloaded_grpc_plugin.digest) input_digest = await Get(Digest, MergeDigests(unmerged_digests)) argv = [downloaded_protoc_binary.exe, "--python_out", output_dir] if mypy_pex: argv.extend([ f"--plugin=protoc-gen-mypy={mypy_pex.bin[protoc_gen_mypy_script].argv0}", "--mypy_out", output_dir, ]) if downloaded_grpc_plugin: argv.extend([ f"--plugin=protoc-gen-grpc={downloaded_grpc_plugin.exe}", "--grpc_out", output_dir ]) if mypy_pex and protoc_gen_mypy_grpc_script in mypy_pex.bin: argv.extend([ f"--plugin=protoc-gen-mypy_grpc={mypy_pex.bin[protoc_gen_mypy_grpc_script].argv0}", "--mypy_grpc_out", output_dir, ]) argv.extend(target_sources_stripped.snapshot.files) result = await Get( ProcessResult, Process( argv, input_digest=input_digest, description= f"Generating Python sources from {request.protocol_target.address}.", level=LogLevel.DEBUG, output_directories=(output_dir, ), append_only_caches=pex_environment.in_sandbox( working_directory=None).append_only_caches, ), ) # We must do some path manipulation on the output digest for it to look like normal sources, # including adding back a source root. py_source_root = request.protocol_target.get(PythonSourceRootField).value if py_source_root: # Verify that the python source root specified by the target is in fact a source root. source_root_request = SourceRootRequest(PurePath(py_source_root)) else: # The target didn't specify a python source root, so use the protobuf_source's source root. source_root_request = SourceRootRequest.for_target( request.protocol_target) normalized_digest, source_root = await MultiGet( Get(Digest, RemovePrefix(result.output_digest, output_dir)), Get(SourceRoot, SourceRootRequest, source_root_request), ) source_root_restored = (await Get( Snapshot, AddPrefix(normalized_digest, source_root.path)) if source_root.path != "." else await Get( Snapshot, Digest, normalized_digest)) return GeneratedSources(source_root_restored)
async def generate_python_from_protobuf( request: GeneratePythonFromProtobufRequest, protoc: Protoc, grpc_python_plugin: GrpcPythonPlugin, python_protobuf_subsystem: PythonProtobufSubsystem, ) -> GeneratedSources: download_protoc_request = Get(DownloadedExternalTool, ExternalToolRequest, protoc.get_request(Platform.current)) output_dir = "_generated_files" create_output_dir_request = Get(Digest, CreateDigest([Directory(output_dir)])) # Protoc needs all transitive dependencies on `protobuf_libraries` to work properly. It won't # actually generate those dependencies; it only needs to look at their .proto files to work # with imports. # TODO(#10917): Use TransitiveTargets instead of TransitiveTargetsLite. transitive_targets = await Get( TransitiveTargets, TransitiveTargetsRequestLite([request.protocol_target.address])) # NB: By stripping the source roots, we avoid having to set the value `--proto_path` # for Protobuf imports to be discoverable. all_stripped_sources_request = Get( StrippedSourceFiles, SourceFilesRequest( (tgt.get(Sources) for tgt in transitive_targets.closure), for_sources_types=(ProtobufSources, ), ), ) target_stripped_sources_request = Get( StrippedSourceFiles, SourceFilesRequest([request.protocol_target[ProtobufSources]])) ( downloaded_protoc_binary, empty_output_dir, all_sources_stripped, target_sources_stripped, ) = await MultiGet( download_protoc_request, create_output_dir_request, all_stripped_sources_request, target_stripped_sources_request, ) # To run the MyPy Protobuf plugin, we first install it with Pex, then extract the wheels and # point Protoc to the extracted wheels with its `--plugin` argument. extracted_mypy_wheels = None if python_protobuf_subsystem.mypy_plugin: mypy_pex = await Get( Pex, PexRequest( output_filename="mypy_protobuf.pex", internal_only=True, requirements=PexRequirements( [python_protobuf_subsystem.mypy_plugin_version]), # This is solely to ensure that we use an appropriate interpreter when resolving # the distribution. We don't actually run the distribution directly with Python, # as we extract out its binary. interpreter_constraints=PexInterpreterConstraints( ["CPython>=3.5"]), ), ) extracted_mypy_wheels = await Get(ExtractedPexDistributions, Pex, mypy_pex) downloaded_grpc_plugin = (await Get( DownloadedExternalTool, ExternalToolRequest, grpc_python_plugin.get_request(Platform.current), ) if request.protocol_target.get(ProtobufGrcpToggle).value else None) unmerged_digests = [ all_sources_stripped.snapshot.digest, downloaded_protoc_binary.digest, empty_output_dir, ] if extracted_mypy_wheels: unmerged_digests.append(extracted_mypy_wheels.digest) if downloaded_grpc_plugin: unmerged_digests.append(downloaded_grpc_plugin.digest) input_digest = await Get(Digest, MergeDigests(unmerged_digests)) argv = [downloaded_protoc_binary.exe, "--python_out", output_dir] if extracted_mypy_wheels: mypy_plugin_path = next( p for p in extracted_mypy_wheels.wheel_directory_paths if p.startswith(".deps/mypy_protobuf-")) argv.extend([ f"--plugin=protoc-gen-mypy={mypy_plugin_path}/bin/protoc-gen-mypy", "--mypy_out", output_dir, ]) if downloaded_grpc_plugin: argv.extend([ f"--plugin=protoc-gen-grpc={downloaded_grpc_plugin.exe}", "--grpc_out", output_dir ]) argv.extend(target_sources_stripped.snapshot.files) env = {} if extracted_mypy_wheels: env["PYTHONPATH"] = ":".join( extracted_mypy_wheels.wheel_directory_paths) result = await Get( ProcessResult, Process( argv, env=env, input_digest=input_digest, description= f"Generating Python sources from {request.protocol_target.address}.", level=LogLevel.DEBUG, output_directories=(output_dir, ), ), ) # We must do some path manipulation on the output digest for it to look like normal sources, # including adding back a source root. py_source_root = request.protocol_target.get(PythonSourceRootField).value if py_source_root: # Verify that the python source root specified by the target is in fact a source root. source_root_request = SourceRootRequest(PurePath(py_source_root)) else: # The target didn't specify a python source root, so use the protobuf_library's source root. source_root_request = SourceRootRequest.for_target( request.protocol_target) normalized_digest, source_root = await MultiGet( Get(Digest, RemovePrefix(result.output_digest, output_dir)), Get(SourceRoot, SourceRootRequest, source_root_request), ) source_root_restored = (await Get( Snapshot, AddPrefix(normalized_digest, source_root.path)) if source_root.path != "." else await Get( Snapshot, Digest, normalized_digest)) return GeneratedSources(source_root_restored)
async def generate_python_from_protobuf( request: GeneratePythonFromProtobufRequest, protoc: Protoc ) -> GeneratedSources: download_protoc_request = Get( DownloadedExternalTool, ExternalToolRequest, protoc.get_request(Platform.current) ) output_dir = "_generated_files" # TODO(#9650): replace this with a proper intrinsic to create empty directories. create_output_dir_request = Get( ProcessResult, Process( ("/bin/mkdir", output_dir), description=f"Create the directory {output_dir}", level=LogLevel.DEBUG, output_directories=(output_dir,), ), ) # Protoc needs all transitive dependencies on `protobuf_libraries` to work properly. It won't # actually generate those dependencies; it only needs to look at their .proto files to work # with imports. transitive_targets = await Get(TransitiveTargets, Addresses([request.protocol_target.address])) # NB: By stripping the source roots, we avoid having to set the value `--proto_path` # for Protobuf imports to be discoverable. all_stripped_sources_request = Get( StrippedSourceFiles, SourceFilesRequest( (tgt.get(Sources) for tgt in transitive_targets.closure), for_sources_types=(ProtobufSources,), ), ) target_stripped_sources_request = Get( StrippedSourceFiles, SourceFilesRequest([request.protocol_target[ProtobufSources]]), ) ( downloaded_protoc_binary, create_output_dir_result, all_sources_stripped, target_sources_stripped, ) = await MultiGet( download_protoc_request, create_output_dir_request, all_stripped_sources_request, target_stripped_sources_request, ) input_digest = await Get( Digest, MergeDigests( ( all_sources_stripped.snapshot.digest, downloaded_protoc_binary.digest, create_output_dir_result.output_digest, ) ), ) result = await Get( ProcessResult, Process( ( downloaded_protoc_binary.exe, "--python_out", output_dir, *target_sources_stripped.snapshot.files, ), input_digest=input_digest, description=f"Generating Python sources from {request.protocol_target.address}.", level=LogLevel.DEBUG, output_directories=(output_dir,), ), ) # We must do some path manipulation on the output digest for it to look like normal sources, # including adding back a source root. py_source_root = request.protocol_target.get(PythonSourceRootField).value if py_source_root: # Verify that the python source root specified by the target is in fact a source root. source_root_request = SourceRootRequest(PurePath(py_source_root)) else: # The target didn't specify a python source root, so use the protobuf_library's source root. source_root_request = SourceRootRequest.for_target(request.protocol_target) normalized_digest, source_root = await MultiGet( Get(Digest, RemovePrefix(result.output_digest, output_dir)), Get(SourceRoot, SourceRootRequest, source_root_request), ) source_root_restored = ( await Get(Snapshot, AddPrefix(normalized_digest, source_root.path)) if source_root.path != "." else await Get(Snapshot, Digest, normalized_digest) ) return GeneratedSources(source_root_restored)
async def generate_go_from_protobuf( request: GenerateGoFromProtobufRequest, protoc: Protoc, go_protoc_plugin: _SetupGoProtocPlugin, ) -> GeneratedSources: output_dir = "_generated_files" protoc_relpath = "__protoc" protoc_go_plugin_relpath = "__protoc_gen_go" downloaded_protoc_binary, empty_output_dir, transitive_targets = await MultiGet( Get(DownloadedExternalTool, ExternalToolRequest, protoc.get_request(Platform.current)), Get(Digest, CreateDigest([Directory(output_dir)])), Get(TransitiveTargets, TransitiveTargetsRequest([request.protocol_target.address])), ) # NB: By stripping the source roots, we avoid having to set the value `--proto_path` # for Protobuf imports to be discoverable. all_sources_stripped, target_sources_stripped = await MultiGet( Get( StrippedSourceFiles, SourceFilesRequest(tgt[ProtobufSourceField] for tgt in transitive_targets.closure if tgt.has_field(ProtobufSourceField)), ), Get(StrippedSourceFiles, SourceFilesRequest([request.protocol_target[ProtobufSourceField] ])), ) input_digest = await Get( Digest, MergeDigests([all_sources_stripped.snapshot.digest, empty_output_dir])) maybe_grpc_plugin_args = [] if request.protocol_target.get(ProtobufGrpcToggleField).value: maybe_grpc_plugin_args = [ f"--go-grpc_out={output_dir}", "--go-grpc_opt=paths=source_relative", ] result = await Get( ProcessResult, Process( argv=[ os.path.join(protoc_relpath, downloaded_protoc_binary.exe), f"--plugin=go={os.path.join('.', protoc_go_plugin_relpath, 'protoc-gen-go')}", f"--plugin=go-grpc={os.path.join('.', protoc_go_plugin_relpath, 'protoc-gen-go-grpc')}", f"--go_out={output_dir}", "--go_opt=paths=source_relative", *maybe_grpc_plugin_args, *target_sources_stripped.snapshot.files, ], # Note: Necessary or else --plugin option needs absolute path. env={"PATH": protoc_go_plugin_relpath}, input_digest=input_digest, immutable_input_digests={ protoc_relpath: downloaded_protoc_binary.digest, protoc_go_plugin_relpath: go_protoc_plugin.digest, }, description= f"Generating Go sources from {request.protocol_target.address}.", level=LogLevel.DEBUG, output_directories=(output_dir, ), ), ) normalized_digest, source_root = await MultiGet( Get(Digest, RemovePrefix(result.output_digest, output_dir)), Get(SourceRoot, SourceRootRequest, SourceRootRequest.for_target(request.protocol_target)), ) source_root_restored = (await Get( Snapshot, AddPrefix(normalized_digest, source_root.path)) if source_root.path != "." else await Get( Snapshot, Digest, normalized_digest)) return GeneratedSources(source_root_restored)
async def setup_full_package_build_request( request: _SetupGoProtobufPackageBuildRequest, protoc: Protoc, go_protoc_plugin: _SetupGoProtocPlugin, package_mapping: ImportPathToPackages, go_protobuf_mapping: GoProtobufImportPathMapping, analyzer: PackageAnalyzerSetup, ) -> FallibleBuildGoPackageRequest: output_dir = "_generated_files" protoc_relpath = "__protoc" protoc_go_plugin_relpath = "__protoc_gen_go" transitive_targets, downloaded_protoc_binary, empty_output_dir = await MultiGet( Get(TransitiveTargets, TransitiveTargetsRequest(request.addresses)), Get(DownloadedExternalTool, ExternalToolRequest, protoc.get_request(Platform.current)), Get(Digest, CreateDigest([Directory(output_dir)])), ) all_sources = await Get( SourceFiles, SourceFilesRequest( sources_fields=(tgt[ProtobufSourceField] for tgt in transitive_targets.closure), for_sources_types=(ProtobufSourceField, ), enable_codegen=True, ), ) source_roots, input_digest = await MultiGet( Get(SourceRootsResult, SourceRootsRequest, SourceRootsRequest.for_files(all_sources.files)), Get(Digest, MergeDigests([all_sources.snapshot.digest, empty_output_dir])), ) source_root_paths = sorted( {sr.path for sr in source_roots.path_to_root.values()}) pkg_sources = await MultiGet( Get(SourcesPaths, SourcesPathsRequest(tgt[ProtobufSourceField])) for tgt in transitive_targets.roots) pkg_files = sorted({f for ps in pkg_sources for f in ps.files}) maybe_grpc_plugin_args = [] if any( tgt.get(ProtobufGrpcToggleField).value for tgt in transitive_targets.roots): maybe_grpc_plugin_args = [ f"--go-grpc_out={output_dir}", "--go-grpc_opt=paths=source_relative", ] gen_result = await Get( FallibleProcessResult, Process( argv=[ os.path.join(protoc_relpath, downloaded_protoc_binary.exe), f"--plugin=go={os.path.join('.', protoc_go_plugin_relpath, 'protoc-gen-go')}", f"--plugin=go-grpc={os.path.join('.', protoc_go_plugin_relpath, 'protoc-gen-go-grpc')}", f"--go_out={output_dir}", "--go_opt=paths=source_relative", *(f"--proto_path={source_root}" for source_root in source_root_paths), *maybe_grpc_plugin_args, *pkg_files, ], # Note: Necessary or else --plugin option needs absolute path. env={"PATH": protoc_go_plugin_relpath}, input_digest=input_digest, immutable_input_digests={ protoc_relpath: downloaded_protoc_binary.digest, protoc_go_plugin_relpath: go_protoc_plugin.digest, }, description=f"Generating Go sources from {request.import_path}.", level=LogLevel.DEBUG, output_directories=(output_dir, ), ), ) if gen_result.exit_code != 0: return FallibleBuildGoPackageRequest( request=None, import_path=request.import_path, exit_code=gen_result.exit_code, stderr=gen_result.stderr.decode(), ) # Ensure that the generated files are in a single package directory. gen_sources = await Get(Snapshot, Digest, gen_result.output_digest) files_by_dir = group_by_dir(gen_sources.files) if len(files_by_dir) != 1: return FallibleBuildGoPackageRequest( request=None, import_path=request.import_path, exit_code=1, stderr= ("Expected Go files generated from Protobuf sources to be output to a single directory.\n" f"- import path: {request.import_path}\n" f"- protobuf files: {', '.join(pkg_files)}"), ) gen_dir = list(files_by_dir.keys())[0] # Analyze the generated sources. input_digest = await Get( Digest, MergeDigests([gen_sources.digest, analyzer.digest])) result = await Get( FallibleProcessResult, Process( (analyzer.path, gen_dir), input_digest=input_digest, description= f"Determine metadata for generated Go package for {request.import_path}", level=LogLevel.DEBUG, env={"CGO_ENABLED": "0"}, ), ) # Parse the metadata from the analysis. fallible_analysis = FallibleFirstPartyPkgAnalysis.from_process_result( result, dir_path=gen_dir, import_path=request.import_path, minimum_go_version="", description_of_source= f"Go package generated from protobuf targets `{', '.join(str(addr) for addr in request.addresses)}`", ) if not fallible_analysis.analysis: return FallibleBuildGoPackageRequest( request=None, import_path=request.import_path, exit_code=fallible_analysis.exit_code, stderr=fallible_analysis.stderr, ) analysis = fallible_analysis.analysis # Obtain build requests for third-party dependencies. # TODO: Consider how to merge this code with existing dependency inference code. dep_build_request_addrs: list[Address] = [] for dep_import_path in (*analysis.imports, *analysis.test_imports, *analysis.xtest_imports): # Infer dependencies on other Go packages. candidate_addresses = package_mapping.mapping.get(dep_import_path) if candidate_addresses: # TODO: Use explicit dependencies to disambiguate? This should never happen with Go backend though. if len(candidate_addresses) > 1: return FallibleBuildGoPackageRequest( request=None, import_path=request.import_path, exit_code=result.exit_code, stderr= (f"Multiple addresses match import of `{dep_import_path}`.\n" f"addresses: {', '.join(str(a) for a in candidate_addresses)}" ), ) dep_build_request_addrs.extend(candidate_addresses) # Infer dependencies on other generated Go sources. go_protobuf_candidate_addresses = go_protobuf_mapping.mapping.get( dep_import_path) if go_protobuf_candidate_addresses: dep_build_request_addrs.extend(go_protobuf_candidate_addresses) dep_build_requests = await MultiGet( Get(BuildGoPackageRequest, BuildGoPackageTargetRequest(addr)) for addr in dep_build_request_addrs) return FallibleBuildGoPackageRequest( request=BuildGoPackageRequest( import_path=request.import_path, digest=gen_sources.digest, dir_path=analysis.dir_path, go_file_names=analysis.go_files, s_file_names=analysis.s_files, direct_dependencies=dep_build_requests, minimum_go_version=analysis.minimum_go_version, ), import_path=request.import_path, )
async def generate_scala_from_protobuf( request: GenerateScalaFromProtobufRequest, protoc: Protoc, scalapb: ScalaPBSubsystem, shim_classfiles: ScalaPBShimCompiledClassfiles, jdk: InternalJdk, ) -> GeneratedSources: output_dir = "_generated_files" toolcp_relpath = "__toolcp" shimcp_relpath = "__shimcp" plugins_relpath = "__plugins" protoc_relpath = "__protoc" lockfile_request = await Get(GenerateJvmLockfileFromTool, ScalapbcToolLockfileSentinel()) ( downloaded_protoc_binary, tool_classpath, empty_output_dir, transitive_targets, inherit_env, ) = await MultiGet( Get(DownloadedExternalTool, ExternalToolRequest, protoc.get_request(Platform.current)), Get(ToolClasspath, ToolClasspathRequest(lockfile=lockfile_request)), Get(Digest, CreateDigest([Directory(output_dir)])), Get(TransitiveTargets, TransitiveTargetsRequest([request.protocol_target.address])), # Need PATH so that ScalaPB can invoke `mkfifo`. Get(Environment, EnvironmentRequest(requested=["PATH"])), ) # NB: By stripping the source roots, we avoid having to set the value `--proto_path` # for Protobuf imports to be discoverable. all_sources_stripped, target_sources_stripped = await MultiGet( Get( StrippedSourceFiles, SourceFilesRequest(tgt[ProtobufSourceField] for tgt in transitive_targets.closure if tgt.has_field(ProtobufSourceField)), ), Get(StrippedSourceFiles, SourceFilesRequest([request.protocol_target[ProtobufSourceField] ])), ) merged_jvm_plugins_digest = EMPTY_DIGEST maybe_jvm_plugins_setup_args: tuple[str, ...] = () maybe_jvm_plugins_output_args: tuple[str, ...] = () jvm_plugins = scalapb.jvm_plugins if jvm_plugins: materialized_jvm_plugins = await Get( MaterializedJvmPlugins, MaterializeJvmPluginsRequest(jvm_plugins)) merged_jvm_plugins_digest = materialized_jvm_plugins.digest maybe_jvm_plugins_setup_args = materialized_jvm_plugins.setup_args( plugins_relpath) maybe_jvm_plugins_output_args = tuple( f"--{plugin.name}_out={output_dir}" for plugin in materialized_jvm_plugins.plugins) extra_immutable_input_digests = { toolcp_relpath: tool_classpath.digest, shimcp_relpath: shim_classfiles.digest, plugins_relpath: merged_jvm_plugins_digest, protoc_relpath: downloaded_protoc_binary.digest, } input_digest = await Get( Digest, MergeDigests([all_sources_stripped.snapshot.digest, empty_output_dir])) result = await Get( ProcessResult, JvmProcess( jdk=jdk, classpath_entries=[ *tool_classpath.classpath_entries(toolcp_relpath), shimcp_relpath ], argv=[ "org.pantsbuild.backend.scala.scalapb.ScalaPBShim", f"--protoc={os.path.join(protoc_relpath, downloaded_protoc_binary.exe)}", *maybe_jvm_plugins_setup_args, f"--scala_out={output_dir}", *maybe_jvm_plugins_output_args, *target_sources_stripped.snapshot.files, ], input_digest=input_digest, extra_immutable_input_digests=extra_immutable_input_digests, extra_nailgun_keys=extra_immutable_input_digests, description= f"Generating Scala sources from {request.protocol_target.address}.", level=LogLevel.DEBUG, output_directories=(output_dir, ), extra_env=inherit_env, ), ) normalized_digest, source_root = await MultiGet( Get(Digest, RemovePrefix(result.output_digest, output_dir)), Get(SourceRoot, SourceRootRequest, SourceRootRequest.for_target(request.protocol_target)), ) source_root_restored = (await Get( Snapshot, AddPrefix(normalized_digest, source_root.path)) if source_root.path != "." else await Get( Snapshot, Digest, normalized_digest)) return GeneratedSources(source_root_restored)
async def generate_python_from_protobuf( request: GeneratePythonFromProtobufRequest, protoc: Protoc, grpc_python_plugin: GrpcPythonPlugin, python_protobuf_subsystem: PythonProtobufSubsystem, ) -> GeneratedSources: download_protoc_request = Get(DownloadedExternalTool, ExternalToolRequest, protoc.get_request(Platform.current)) output_dir = "_generated_files" create_output_dir_request = Get(Digest, CreateDigest([Directory(output_dir)])) # Protoc needs all transitive dependencies on `protobuf_libraries` to work properly. It won't # actually generate those dependencies; it only needs to look at their .proto files to work # with imports. transitive_targets = await Get( TransitiveTargets, TransitiveTargetsRequest([request.protocol_target.address])) # NB: By stripping the source roots, we avoid having to set the value `--proto_path` # for Protobuf imports to be discoverable. all_stripped_sources_request = Get( StrippedSourceFiles, SourceFilesRequest( (tgt.get(Sources) for tgt in transitive_targets.closure), for_sources_types=(ProtobufSources, ), ), ) target_stripped_sources_request = Get( StrippedSourceFiles, SourceFilesRequest([request.protocol_target[ProtobufSources]])) ( downloaded_protoc_binary, empty_output_dir, all_sources_stripped, target_sources_stripped, ) = await MultiGet( download_protoc_request, create_output_dir_request, all_stripped_sources_request, target_stripped_sources_request, ) protoc_gen_mypy_script = "protoc-gen-mypy" mypy_pex = None if python_protobuf_subsystem.mypy_plugin: mypy_pex = await Get( VenvPex, VenvPexRequest( bin_names=[protoc_gen_mypy_script], pex_request=PexRequest( output_filename="mypy_protobuf.pex", internal_only=True, requirements=PexRequirements( [python_protobuf_subsystem.mypy_plugin_version]), # TODO(John Sirois): Fix these interpreter constraints to track the actual # python requirement of the mypy_plugin_version or else plumb an option for # manually setting the constraint to track what mypy_plugin_version needs: # https://github.com/pantsbuild/pants/issues/11565 # Here we guess a constraint that will likely work with any mypy_plugin_version # selected. interpreter_constraints=PexInterpreterConstraints( ["CPython>=3.5"]), ), ), ) downloaded_grpc_plugin = (await Get( DownloadedExternalTool, ExternalToolRequest, grpc_python_plugin.get_request(Platform.current), ) if request.protocol_target.get(ProtobufGrpcToggle).value else None) unmerged_digests = [ all_sources_stripped.snapshot.digest, downloaded_protoc_binary.digest, empty_output_dir, ] if mypy_pex: unmerged_digests.append(mypy_pex.digest) if downloaded_grpc_plugin: unmerged_digests.append(downloaded_grpc_plugin.digest) input_digest = await Get(Digest, MergeDigests(unmerged_digests)) argv = [downloaded_protoc_binary.exe, "--python_out", output_dir] if mypy_pex: argv.extend([ f"--plugin=protoc-gen-mypy={mypy_pex.bin[protoc_gen_mypy_script].argv0}", "--mypy_out", output_dir, ]) if downloaded_grpc_plugin: argv.extend([ f"--plugin=protoc-gen-grpc={downloaded_grpc_plugin.exe}", "--grpc_out", output_dir ]) argv.extend(target_sources_stripped.snapshot.files) result = await Get( ProcessResult, Process( argv, input_digest=input_digest, description= f"Generating Python sources from {request.protocol_target.address}.", level=LogLevel.DEBUG, output_directories=(output_dir, ), ), ) # We must do some path manipulation on the output digest for it to look like normal sources, # including adding back a source root. py_source_root = request.protocol_target.get(PythonSourceRootField).value if py_source_root: # Verify that the python source root specified by the target is in fact a source root. source_root_request = SourceRootRequest(PurePath(py_source_root)) else: # The target didn't specify a python source root, so use the protobuf_library's source root. source_root_request = SourceRootRequest.for_target( request.protocol_target) normalized_digest, source_root = await MultiGet( Get(Digest, RemovePrefix(result.output_digest, output_dir)), Get(SourceRoot, SourceRootRequest, source_root_request), ) source_root_restored = (await Get( Snapshot, AddPrefix(normalized_digest, source_root.path)) if source_root.path != "." else await Get( Snapshot, Digest, normalized_digest)) return GeneratedSources(source_root_restored)
async def generate_java_from_protobuf( request: GenerateJavaFromProtobufRequest, protoc: Protoc, ) -> GeneratedSources: download_protoc_request = Get(DownloadedExternalTool, ExternalToolRequest, protoc.get_request(Platform.current)) output_dir = "_generated_files" create_output_dir_request = Get(Digest, CreateDigest([Directory(output_dir)])) # Protoc needs all transitive dependencies on `protobuf_source` to work properly. It won't # actually generate those dependencies; it only needs to look at their .proto files to work # with imports. transitive_targets = await Get( TransitiveTargets, TransitiveTargetsRequest([request.protocol_target.address])) # NB: By stripping the source roots, we avoid having to set the value `--proto_path` # for Protobuf imports to be discoverable. all_stripped_sources_request = Get( StrippedSourceFiles, SourceFilesRequest(tgt[ProtobufSourceField] for tgt in transitive_targets.closure if tgt.has_field(ProtobufSourceField)), ) target_stripped_sources_request = Get( StrippedSourceFiles, SourceFilesRequest([request.protocol_target[ProtobufSourceField]])) ( downloaded_protoc_binary, empty_output_dir, all_sources_stripped, target_sources_stripped, ) = await MultiGet( download_protoc_request, create_output_dir_request, all_stripped_sources_request, target_stripped_sources_request, ) unmerged_digests = [ all_sources_stripped.snapshot.digest, downloaded_protoc_binary.digest, empty_output_dir, ] input_digest = await Get(Digest, MergeDigests(unmerged_digests)) argv = [downloaded_protoc_binary.exe, "--java_out", output_dir] argv.extend(target_sources_stripped.snapshot.files) result = await Get( ProcessResult, Process( argv, input_digest=input_digest, description= f"Generating Java sources from {request.protocol_target.address}.", level=LogLevel.DEBUG, output_directories=(output_dir, ), ), ) normalized_digest, source_root = await MultiGet( Get(Digest, RemovePrefix(result.output_digest, output_dir)), Get(SourceRoot, SourceRootRequest, SourceRootRequest.for_target(request.protocol_target)), ) source_root_restored = (await Get( Snapshot, AddPrefix(normalized_digest, source_root.path)) if source_root.path != "." else await Get( Snapshot, Digest, normalized_digest)) return GeneratedSources(source_root_restored)