async def generate_mock_generated_target( request: MockGenerateTargetsRequest, union_membership: UnionMembership, ) -> GeneratedTargets: paths = await Get( SourcesPaths, SourcesPathsRequest(request.generator[MultipleSourcesField])) # Generate using both "file address" and "generated target" syntax. return GeneratedTargets( request.generator, [ *_generate_file_level_targets( MockGeneratedTarget, request.generator, paths.files, request.template_address, request.template, request.overrides, union_membership, add_dependencies_on_all_siblings=True, use_generated_address_syntax=False, ).values(), *_generate_file_level_targets( MockGeneratedTarget, request.generator, paths.files, request.template_address, request.template, request.overrides, union_membership, add_dependencies_on_all_siblings=True, use_generated_address_syntax=True, ).values(), ], )
async def generate_terraform_module_targets( request: GenerateTerraformModuleTargetsRequest, ) -> GeneratedTargets: generator = request.generator sources_paths = await Get( SourcesPaths, SourcesPathsRequest(generator.get(TerraformModulesSources)) ) dir_to_filenames = group_by_dir(sources_paths.files) dirs_with_terraform_files = [] for dir, filenames in dir_to_filenames.items(): if any(filename.endswith(".tf") for filename in filenames): dirs_with_terraform_files.append(dir) def gen_target(dir: str) -> Target: generated_target_fields = {} for field in generator.field_values.values(): value: ImmutableValue | None if isinstance(field, Sources): value = tuple(sorted(dir_to_filenames[dir])) else: value = field.value generated_target_fields[field.alias] = value return TerraformModule(generated_target_fields, generator.address.create_generated(dir)) return GeneratedTargets( request.generator, [gen_target(dir) for dir in dirs_with_terraform_files] )
async def generate_targets_from_shell_sources( request: GenerateTargetsFromShellSources, files_not_found_behavior: FilesNotFoundBehavior, shell_setup: ShellSetup, union_membership: UnionMembership, ) -> GeneratedTargets: sources_paths = await Get( SourcesPaths, SourcesPathsRequest(request.generator[ShellSourcesGeneratingSourcesField]) ) all_overrides = {} overrides_field = request.generator[OverridesField] if overrides_field.value: _all_override_paths = await MultiGet( Get(Paths, PathGlobs, path_globs) for path_globs in overrides_field.to_path_globs(files_not_found_behavior) ) all_overrides = overrides_field.flatten_paths( dict(zip(_all_override_paths, overrides_field.value.values())) ) return generate_file_level_targets( ShellSourceTarget, request.generator, sources_paths.files, union_membership, add_dependencies_on_all_siblings=not shell_setup.dependency_inference, overrides=all_overrides, )
async def generate_file_targets( request: GenerateFileTargets, union_membership: UnionMembership, ) -> GeneratedTargets: sources_paths = await Get( SourcesPaths, SourcesPathsRequest(request.generator[MultipleSourcesField])) add_dependencies_on_all_siblings = False if request.generator.settings_request_cls: generator_settings = await Get( TargetFilesGeneratorSettings, TargetFilesGeneratorSettingsRequest, request.generator.settings_request_cls(), ) add_dependencies_on_all_siblings = generator_settings.add_dependencies_on_all_siblings return _generate_file_level_targets( type(request.generator).generated_target_cls, request.generator, sources_paths.files, request.template_address, request.template, request.overrides, union_membership, add_dependencies_on_all_siblings=add_dependencies_on_all_siblings, )
async def map_protobuf_files( protobuf_targets: AllProtobufTargets) -> ProtobufMapping: stripped_sources_per_target = await MultiGet( Get(StrippedSourceFileNames, SourcesPathsRequest(tgt[ProtobufSourceField])) for tgt in protobuf_targets) stripped_files_to_addresses: dict[str, Address] = {} stripped_files_with_multiple_owners: DefaultDict[ str, set[Address]] = defaultdict(set) for tgt, stripped_sources in zip(protobuf_targets, stripped_sources_per_target): assert len(stripped_sources) == 1 stripped_f = stripped_sources[0] if stripped_f in stripped_files_to_addresses: stripped_files_with_multiple_owners[stripped_f].update( {stripped_files_to_addresses[stripped_f], tgt.address}) else: stripped_files_to_addresses[stripped_f] = tgt.address # Remove files with ambiguous owners. for ambiguous_stripped_f in stripped_files_with_multiple_owners: stripped_files_to_addresses.pop(ambiguous_stripped_f) return ProtobufMapping( mapping=FrozenDict(sorted(stripped_files_to_addresses.items())), ambiguous_modules=FrozenDict( (k, tuple(sorted(v))) for k, v in sorted(stripped_files_with_multiple_owners.items())), )
async def map_protobuf_files() -> ProtobufMapping: all_expanded_targets = await Get(Targets, AddressSpecs([DescendantAddresses("")])) protobuf_targets = tuple(tgt for tgt in all_expanded_targets if tgt.has_field(ProtobufSources)) stripped_sources_per_target = await MultiGet( Get(StrippedSourceFileNames, SourcesPathsRequest(tgt[ProtobufSources])) for tgt in protobuf_targets ) stripped_files_to_addresses: dict[str, Address] = {} stripped_files_with_multiple_owners: DefaultDict[str, set[Address]] = defaultdict(set) for tgt, stripped_sources in zip(protobuf_targets, stripped_sources_per_target): for stripped_f in stripped_sources: if stripped_f in stripped_files_to_addresses: stripped_files_with_multiple_owners[stripped_f].update( {stripped_files_to_addresses[stripped_f], tgt.address} ) else: stripped_files_to_addresses[stripped_f] = tgt.address # Remove files with ambiguous owners. for ambiguous_stripped_f in stripped_files_with_multiple_owners: stripped_files_to_addresses.pop(ambiguous_stripped_f) return ProtobufMapping( mapping=FrozenDict(sorted(stripped_files_to_addresses.items())), ambiguous_modules=FrozenDict( (k, tuple(sorted(v))) for k, v in sorted(stripped_files_with_multiple_owners.items()) ), )
async def generate_targets_from_python_tests( request: GenerateTargetsFromPythonTests, files_not_found_behavior: FilesNotFoundBehavior, python_infer: PythonInferSubsystem, union_membership: UnionMembership, ) -> GeneratedTargets: sources_paths = await Get( SourcesPaths, SourcesPathsRequest(request.generator[PythonTestsGeneratingSourcesField]) ) all_overrides = {} overrides_field = request.generator[OverridesField] if overrides_field.value: _all_override_paths = await MultiGet( Get(Paths, PathGlobs, path_globs) for path_globs in overrides_field.to_path_globs(files_not_found_behavior) ) all_overrides = overrides_field.flatten_paths( dict(zip(_all_override_paths, overrides_field.value.values())) ) return generate_file_level_targets( PythonTestTarget, request.generator, sources_paths.files, union_membership, add_dependencies_on_all_siblings=not python_infer.imports, overrides=all_overrides, )
async def generate_mock_generated_target( request: MockGenerateTargetsRequest) -> GeneratedTargets: paths = await Get(SourcesPaths, SourcesPathsRequest(request.generator[Sources])) # Generate using both "file address" and "generated target" syntax. return GeneratedTargets( request.generator, [ *generate_file_level_targets( MockGeneratedTarget, request.generator, paths.files, None, add_dependencies_on_all_siblings=True, use_generated_address_syntax=False, ).values(), *generate_file_level_targets( MockGeneratedTarget, request.generator, paths.files, None, add_dependencies_on_all_siblings=True, use_generated_address_syntax=True, ).values(), ], )
async def map_protobuf_to_python_modules( _: PythonProtobufMappingMarker, ) -> FirstPartyPythonMappingImpl: all_expanded_targets = await Get(Targets, AddressSpecs([DescendantAddresses("")])) protobuf_targets = tuple(tgt for tgt in all_expanded_targets if tgt.has_field(ProtobufSources)) stripped_sources_per_target = await MultiGet( Get(StrippedSourceFileNames, SourcesPathsRequest(tgt[ProtobufSources])) for tgt in protobuf_targets ) modules_to_addresses: Dict[str, Tuple[Address]] = {} modules_with_multiple_owners: Set[str] = set() def add_module(module: str, tgt: Target) -> None: if module in modules_to_addresses: modules_with_multiple_owners.add(module) else: modules_to_addresses[module] = (tgt.address,) for tgt, stripped_sources in zip(protobuf_targets, stripped_sources_per_target): for stripped_f in stripped_sources: # NB: We don't consider the MyPy plugin, which generates `_pb2.pyi`. The stubs end up # sharing the same module as the implementation `_pb2.py`. Because both generated files # come from the same original Protobuf target, we're covered. add_module(proto_path_to_py_module(stripped_f, suffix="_pb2"), tgt) if tgt.get(ProtobufGrpcToggle).value: add_module(proto_path_to_py_module(stripped_f, suffix="_pb2_grpc"), tgt) # Remove modules with ambiguous owners. for ambiguous_module in modules_with_multiple_owners: modules_to_addresses.pop(ambiguous_module) return FirstPartyPythonMappingImpl(modules_to_addresses)
async def restrict_conflicting_sources( ptgt: PutativeTarget) -> DisjointSourcePutativeTarget: source_paths = await Get( Paths, PathGlobs( Sources.prefix_glob_with_dirpath(ptgt.path, glob) for glob in ptgt.owned_sources), ) source_path_set = set(source_paths.files) source_dirs = {os.path.dirname(path) for path in source_path_set} possible_owners = await Get( UnexpandedTargets, AddressSpecs(AscendantAddresses(d) for d in source_dirs)) possible_owners_sources = await MultiGet( Get(SourcesPaths, SourcesPathsRequest(t.get(Sources))) for t in possible_owners) conflicting_targets = [] for tgt, sources in zip(possible_owners, possible_owners_sources): if source_path_set.intersection(sources.files): conflicting_targets.append(tgt) if conflicting_targets: conflicting_addrs = sorted(tgt.address.spec for tgt in conflicting_targets) explicit_srcs_str = ", ".join(ptgt.kwargs.get("sources") or []) # type: ignore[arg-type] orig_sources_str = (f"[{explicit_srcs_str}]" if explicit_srcs_str else f"the default for {ptgt.type_alias}") ptgt = ptgt.restrict_sources().add_comments([ f"# NOTE: Sources restricted from {orig_sources_str} due to conflict with" ] + [f"# - {caddr}" for caddr in conflicting_addrs]) return DisjointSourcePutativeTarget(ptgt)
async def map_shell_files() -> ShellMapping: all_expanded_targets = await Get(Targets, AddressSpecs([DescendantAddresses("")])) shell_tgts = tuple(tgt for tgt in all_expanded_targets if tgt.has_field(ShellSources)) sources_per_target = await MultiGet( Get(SourcesPaths, SourcesPathsRequest(tgt[ShellSources])) for tgt in shell_tgts) files_to_addresses: dict[str, Address] = {} files_with_multiple_owners: DefaultDict[str, set[Address]] = defaultdict(set) for tgt, sources in zip(shell_tgts, sources_per_target): for f in sources.files: if f in files_to_addresses: files_with_multiple_owners[f].update( {files_to_addresses[f], tgt.address}) else: files_to_addresses[f] = tgt.address # Remove files with ambiguous owners. for ambiguous_f in files_with_multiple_owners: files_to_addresses.pop(ambiguous_f) return ShellMapping( mapping=FrozenDict(sorted(files_to_addresses.items())), ambiguous_modules=FrozenDict( (k, tuple(sorted(v))) for k, v in sorted(files_with_multiple_owners.items())), )
async def determine_all_owned_sources(all_tgts: AllUnexpandedTargets) -> AllOwnedSources: all_sources_paths = await MultiGet( Get(SourcesPaths, SourcesPathsRequest(tgt.get(SourcesField))) for tgt in all_tgts ) return AllOwnedSources( itertools.chain.from_iterable(sources_paths.files for sources_paths in all_sources_paths) )
def assert_stripped_source_file_names(address: Address, *, source_root: str, expected: list[str]) -> None: rule_runner.set_options([f"--source-root-patterns=['{source_root}']"]) tgt = rule_runner.get_target(address) result = rule_runner.request(StrippedSourceFileNames, [SourcesPathsRequest(tgt[Sources])]) assert set(result) == set(expected)
async def determine_all_owned_sources() -> AllOwnedSources: all_tgts = await Get(UnexpandedTargets, AddressSpecs([MaybeEmptyDescendantAddresses("")])) all_sources_paths = await MultiGet( Get(SourcesPaths, SourcesPathsRequest(tgt.get(Sources))) for tgt in all_tgts) return AllOwnedSources( itertools.chain.from_iterable(sources_paths.files for sources_paths in all_sources_paths))
async def generate_targets_from_files( request: GenerateTargetsFromFiles, union_membership: UnionMembership) -> GeneratedTargets: paths = await Get(SourcesPaths, SourcesPathsRequest(request.generator[FilesSources])) return generate_file_level_targets( Files, request.generator, paths.files, union_membership, add_dependencies_on_all_siblings=False, )
async def generate_targets_from_junit_tests( request: GenerateTargetsFromJunitTests, union_membership: UnionMembership) -> GeneratedTargets: paths = await Get( SourcesPaths, SourcesPathsRequest(request.generator[JavaTestsGeneratorSourcesField])) return generate_file_level_targets( JunitTestTarget, request.generator, paths.files, union_membership, add_dependencies_on_all_siblings=False, )
async def generate_targets_from_protobuf_library( request: GenerateTargetsFromProtobufLibrary, protoc: Protoc, union_membership: UnionMembership, ) -> GeneratedTargets: paths = await Get(SourcesPaths, SourcesPathsRequest(request.generator[ProtobufSources])) return generate_file_level_targets( ProtobufLibrary, request.generator, paths.files, union_membership, add_dependencies_on_all_siblings=not protoc.dependency_inference, )
async def generate_targets_from_scala_sources( request: GenerateTargetsFromScalaSources, union_membership: UnionMembership) -> GeneratedTargets: paths = await Get( SourcesPaths, SourcesPathsRequest( request.generator[ScalaSourcesGeneratorSourcesField])) return generate_file_level_targets( ScalaSourceTarget, request.generator, paths.files, union_membership, add_dependencies_on_all_siblings=True, use_source_field=True, )
async def generate_targets_from_python_library( request: GenerateTargetsFromPythonLibrary, python_infer: PythonInferSubsystem, union_membership: UnionMembership, ) -> GeneratedTargets: paths = await Get( SourcesPaths, SourcesPathsRequest(request.generator[PythonLibrarySources])) return generate_file_level_targets( PythonLibrary, request.generator, paths.files, union_membership, add_dependencies_on_all_siblings=not python_infer.imports, )
async def generate_targets_from_shell_sources( request: GenerateTargetsFromShellSources, shell_setup: ShellSetup, union_membership: UnionMembership, ) -> GeneratedTargets: paths = await Get( SourcesPaths, SourcesPathsRequest( request.generator[ShellSourcesGeneratingSourcesField])) return generate_file_level_targets( ShellSourceTarget, request.generator, paths.files, union_membership, add_dependencies_on_all_siblings=not shell_setup.dependency_inference, )
async def map_first_party_python_targets_to_modules( _: FirstPartyPythonTargetsMappingMarker, all_python_targets: AllPythonTargets) -> FirstPartyPythonMappingImpl: stripped_sources_per_target = await MultiGet( Get(StrippedSourceFileNames, SourcesPathsRequest( tgt[PythonSourceField])) for tgt in all_python_targets.first_party) modules_to_addresses: DefaultDict[str, list[Address]] = defaultdict(list) modules_with_multiple_implementations: DefaultDict[ str, set[Address]] = defaultdict(set) for tgt, stripped_sources in zip(all_python_targets.first_party, stripped_sources_per_target): # `PythonSourceFile` validates that each target has exactly one file. assert len(stripped_sources) == 1 stripped_f = stripped_sources[0] module = PythonModule.create_from_stripped_path( PurePath(stripped_f)).module if module not in modules_to_addresses: modules_to_addresses[module].append(tgt.address) continue # Else, possible ambiguity. Check if one of the targets is an implementation # (.py file) and the other is a type stub (.pyi file), which we allow. Otherwise, it's # ambiguous. prior_is_type_stub = len( modules_to_addresses[module] ) == 1 and modules_to_addresses[module][0].filename.endswith(".pyi") current_is_type_stub = tgt.address.filename.endswith(".pyi") if prior_is_type_stub ^ current_is_type_stub: modules_to_addresses[module].append(tgt.address) else: modules_with_multiple_implementations[module].update( {*modules_to_addresses[module], tgt.address}) # Remove modules with ambiguous owners. for module in modules_with_multiple_implementations: modules_to_addresses.pop(module) return FirstPartyPythonMappingImpl( mapping=FrozenDict((k, tuple(sorted(v))) for k, v in sorted(modules_to_addresses.items())), ambiguous_modules=FrozenDict( (k, tuple(sorted(v))) for k, v in sorted(modules_with_multiple_implementations.items())), )
async def map_first_party_python_targets_to_modules( _: FirstPartyPythonTargetsMappingMarker, ) -> FirstPartyPythonMappingImpl: all_expanded_targets = await Get(Targets, AddressSpecs([DescendantAddresses("")])) python_targets = tuple(tgt for tgt in all_expanded_targets if tgt.has_field(PythonSources)) stripped_sources_per_target = await MultiGet( Get(StrippedSourceFileNames, SourcesPathsRequest(tgt[PythonSources])) for tgt in python_targets) modules_to_addresses: DefaultDict[str, list[Address]] = defaultdict(list) modules_with_multiple_implementations: DefaultDict[ str, set[Address]] = defaultdict(set) for tgt, stripped_sources in zip(python_targets, stripped_sources_per_target): for stripped_f in stripped_sources: module = PythonModule.create_from_stripped_path( PurePath(stripped_f)).module if module in modules_to_addresses: # We check if one of the targets is an implementation (.py file) and the other is # a type stub (.pyi file), which we allow. Otherwise, we have ambiguity. prior_is_type_stub = len( modules_to_addresses[module] ) == 1 and modules_to_addresses[module][0].filename.endswith( ".pyi") current_is_type_stub = tgt.address.filename.endswith(".pyi") if prior_is_type_stub ^ current_is_type_stub: modules_to_addresses[module].append(tgt.address) else: modules_with_multiple_implementations[module].update( {*modules_to_addresses[module], tgt.address}) else: modules_to_addresses[module].append(tgt.address) # Remove modules with ambiguous owners. for module in modules_with_multiple_implementations: modules_to_addresses.pop(module) return FirstPartyPythonMappingImpl( mapping=FrozenDict((k, tuple(sorted(v))) for k, v in sorted(modules_to_addresses.items())), ambiguous_modules=FrozenDict( (k, tuple(sorted(v))) for k, v in sorted(modules_with_multiple_implementations.items())), )
async def map_protobuf_to_python_modules( protobuf_targets: AllProtobufTargets, _: PythonProtobufMappingMarker, ) -> FirstPartyPythonMappingImpl: stripped_sources_per_target = await MultiGet( Get(StrippedSourceFileNames, SourcesPathsRequest(tgt[ProtobufSourceField])) for tgt in protobuf_targets) # NB: There should be only one address per module, else it's ambiguous. modules_to_addresses: dict[str, tuple[Address]] = {} modules_with_multiple_owners: DefaultDict[str, set[Address]] = defaultdict(set) def add_module(module: str, tgt: Target) -> None: if module in modules_to_addresses: modules_with_multiple_owners[module].update( {*modules_to_addresses[module], tgt.address}) else: modules_to_addresses[module] = (tgt.address, ) for tgt, stripped_sources in zip(protobuf_targets, stripped_sources_per_target): for stripped_f in stripped_sources: # NB: We don't consider the MyPy plugin, which generates `_pb2.pyi`. The stubs end up # sharing the same module as the implementation `_pb2.py`. Because both generated files # come from the same original Protobuf target, we're covered. add_module(proto_path_to_py_module(stripped_f, suffix="_pb2"), tgt) if tgt.get(ProtobufGrpcToggleField).value: add_module( proto_path_to_py_module(stripped_f, suffix="_pb2_grpc"), tgt) # Remove modules with ambiguous owners. for ambiguous_module in modules_with_multiple_owners: modules_to_addresses.pop(ambiguous_module) return FirstPartyPythonMappingImpl( mapping=FrozenDict(sorted(modules_to_addresses.items())), ambiguous_modules=FrozenDict( (k, tuple(sorted(v))) for k, v in sorted(modules_with_multiple_owners.items())), )
async def generate_subtargets(address: Address) -> Subtargets: if address.is_file_target: raise ValueError(f"Cannot generate file Targets for a file Address: {address}") wrapped_build_target = await Get(WrappedTarget, Address, address) build_target = wrapped_build_target.target if not build_target.has_field(Dependencies) or not build_target.has_field(Sources): # If a target type does not support dependencies, we do not split it, as that would prevent # the BUILD target from depending on its splits. return Subtargets(build_target, ()) # Generate a subtarget per source. paths = await Get(SourcesPaths, SourcesPathsRequest(build_target[Sources])) wrapped_subtargets = await MultiGet( Get( WrappedTarget, Address, generate_subtarget_address(address, full_file_name=subtarget_file), ) for subtarget_file in paths.files ) return Subtargets(build_target, tuple(wt.target for wt in wrapped_subtargets))
async def map_protobuf_files() -> ProtobufMapping: all_expanded_targets = await Get(Targets, AddressSpecs([DescendantAddresses("")])) protobuf_targets = tuple(tgt for tgt in all_expanded_targets if tgt.has_field(ProtobufSources)) stripped_sources_per_target = await MultiGet( Get(StrippedSourceFileNames, SourcesPathsRequest(tgt[ProtobufSources])) for tgt in protobuf_targets ) stripped_files_to_addresses: Dict[str, Address] = {} stripped_files_with_multiple_owners: Set[str] = set() for tgt, stripped_sources in zip(protobuf_targets, stripped_sources_per_target): for stripped_f in stripped_sources: if stripped_f in stripped_files_to_addresses: stripped_files_with_multiple_owners.add(stripped_f) else: stripped_files_to_addresses[stripped_f] = tgt.address # Remove files with ambiguous owners. for ambiguous_stripped_f in stripped_files_with_multiple_owners: stripped_files_to_addresses.pop(ambiguous_stripped_f) return ProtobufMapping(stripped_files_to_addresses)
async def resolve_bsp_build_target_source_roots( bsp_target: BSPBuildTargetInternal, ) -> BSPBuildTargetSourcesInfo: targets = await Get(Targets, AddressSpecs, bsp_target.specs.address_specs) targets_with_sources = [ tgt for tgt in targets if tgt.has_field(SourcesField) ] sources_paths = await MultiGet( Get(SourcesPaths, SourcesPathsRequest(tgt[SourcesField])) for tgt in targets_with_sources) merged_source_files: set[str] = set() for sp in sources_paths: merged_source_files.update(sp.files) source_roots_result = await Get( SourceRootsResult, SourceRootsRequest, SourceRootsRequest.for_files(merged_source_files)) source_root_paths = { x.path for x in source_roots_result.path_to_root.values() } return BSPBuildTargetSourcesInfo( source_files=frozenset(merged_source_files), source_roots=frozenset(source_root_paths), )
async def resolve_specs_paths(specs: Specs) -> SpecsPaths: """Resolve all files matching the given specs. All matched targets will use their `sources` field. Certain specs like FileLiteralSpec will also match against all their files, regardless of if a target owns them. Ignores win out over includes, with these edge cases: * Ignored paths: the resolved paths should be excluded. * Ignored targets: their `sources` should be excluded. * File owned by a target that gets filtered out, e.g. via `--tag`. See https://github.com/pantsbuild/pants/issues/15478. """ unfiltered_include_targets, ignore_targets, include_paths, ignore_paths = await MultiGet( Get( Targets, RawSpecs, dataclasses.replace(specs.includes, filter_by_global_options=False)), Get(Targets, RawSpecs, specs.ignores), Get(Paths, PathGlobs, specs.includes.to_specs_paths_path_globs()), Get(Paths, PathGlobs, specs.ignores.to_specs_paths_path_globs()), ) filtered_include_targets = await Get(FilteredTargets, Targets, unfiltered_include_targets) include_targets_sources_paths = await MultiGet( Get(SourcesPaths, SourcesPathsRequest(tgt[SourcesField])) for tgt in filtered_include_targets if tgt.has_field(SourcesField)) ignore_targets_sources_paths = await MultiGet( Get(SourcesPaths, SourcesPathsRequest(tgt[SourcesField])) for tgt in ignore_targets if tgt.has_field(SourcesField)) result_paths = OrderedSet( itertools.chain.from_iterable( paths.files for paths in include_targets_sources_paths), ) result_paths.update(include_paths.files) result_paths.difference_update( itertools.chain.from_iterable( paths.files for paths in ignore_targets_sources_paths)) result_paths.difference_update(ignore_paths.files) # If include paths were given, we need to also remove any paths from filtered out targets # (e.g. via `--tag`), per https://github.com/pantsbuild/pants/issues/15478. if include_paths.files: filtered_out_include_targets = FrozenOrderedSet( unfiltered_include_targets).difference( FrozenOrderedSet(filtered_include_targets)) filtered_include_targets_sources_paths = await MultiGet( Get(SourcesPaths, SourcesPathsRequest(tgt[SourcesField])) for tgt in filtered_out_include_targets if tgt.has_field(SourcesField)) result_paths.difference_update( itertools.chain.from_iterable( paths.files for paths in filtered_include_targets_sources_paths)) dirs = OrderedSet( itertools.chain.from_iterable( recursive_dirname(os.path.dirname(f)) for f in result_paths)) - {""} return SpecsPaths(tuple(sorted(result_paths)), tuple(sorted(dirs)))
async def setup_full_package_build_request( request: _SetupGoProtobufPackageBuildRequest, protoc: Protoc, go_protoc_plugin: _SetupGoProtocPlugin, package_mapping: ImportPathToPackages, go_protobuf_mapping: GoProtobufImportPathMapping, analyzer: PackageAnalyzerSetup, ) -> FallibleBuildGoPackageRequest: output_dir = "_generated_files" protoc_relpath = "__protoc" protoc_go_plugin_relpath = "__protoc_gen_go" transitive_targets, downloaded_protoc_binary, empty_output_dir = await MultiGet( Get(TransitiveTargets, TransitiveTargetsRequest(request.addresses)), Get(DownloadedExternalTool, ExternalToolRequest, protoc.get_request(Platform.current)), Get(Digest, CreateDigest([Directory(output_dir)])), ) all_sources = await Get( SourceFiles, SourceFilesRequest( sources_fields=(tgt[ProtobufSourceField] for tgt in transitive_targets.closure), for_sources_types=(ProtobufSourceField, ), enable_codegen=True, ), ) source_roots, input_digest = await MultiGet( Get(SourceRootsResult, SourceRootsRequest, SourceRootsRequest.for_files(all_sources.files)), Get(Digest, MergeDigests([all_sources.snapshot.digest, empty_output_dir])), ) source_root_paths = sorted( {sr.path for sr in source_roots.path_to_root.values()}) pkg_sources = await MultiGet( Get(SourcesPaths, SourcesPathsRequest(tgt[ProtobufSourceField])) for tgt in transitive_targets.roots) pkg_files = sorted({f for ps in pkg_sources for f in ps.files}) maybe_grpc_plugin_args = [] if any( tgt.get(ProtobufGrpcToggleField).value for tgt in transitive_targets.roots): maybe_grpc_plugin_args = [ f"--go-grpc_out={output_dir}", "--go-grpc_opt=paths=source_relative", ] gen_result = await Get( FallibleProcessResult, Process( argv=[ os.path.join(protoc_relpath, downloaded_protoc_binary.exe), f"--plugin=go={os.path.join('.', protoc_go_plugin_relpath, 'protoc-gen-go')}", f"--plugin=go-grpc={os.path.join('.', protoc_go_plugin_relpath, 'protoc-gen-go-grpc')}", f"--go_out={output_dir}", "--go_opt=paths=source_relative", *(f"--proto_path={source_root}" for source_root in source_root_paths), *maybe_grpc_plugin_args, *pkg_files, ], # Note: Necessary or else --plugin option needs absolute path. env={"PATH": protoc_go_plugin_relpath}, input_digest=input_digest, immutable_input_digests={ protoc_relpath: downloaded_protoc_binary.digest, protoc_go_plugin_relpath: go_protoc_plugin.digest, }, description=f"Generating Go sources from {request.import_path}.", level=LogLevel.DEBUG, output_directories=(output_dir, ), ), ) if gen_result.exit_code != 0: return FallibleBuildGoPackageRequest( request=None, import_path=request.import_path, exit_code=gen_result.exit_code, stderr=gen_result.stderr.decode(), ) # Ensure that the generated files are in a single package directory. gen_sources = await Get(Snapshot, Digest, gen_result.output_digest) files_by_dir = group_by_dir(gen_sources.files) if len(files_by_dir) != 1: return FallibleBuildGoPackageRequest( request=None, import_path=request.import_path, exit_code=1, stderr= ("Expected Go files generated from Protobuf sources to be output to a single directory.\n" f"- import path: {request.import_path}\n" f"- protobuf files: {', '.join(pkg_files)}"), ) gen_dir = list(files_by_dir.keys())[0] # Analyze the generated sources. input_digest = await Get( Digest, MergeDigests([gen_sources.digest, analyzer.digest])) result = await Get( FallibleProcessResult, Process( (analyzer.path, gen_dir), input_digest=input_digest, description= f"Determine metadata for generated Go package for {request.import_path}", level=LogLevel.DEBUG, env={"CGO_ENABLED": "0"}, ), ) # Parse the metadata from the analysis. fallible_analysis = FallibleFirstPartyPkgAnalysis.from_process_result( result, dir_path=gen_dir, import_path=request.import_path, minimum_go_version="", description_of_source= f"Go package generated from protobuf targets `{', '.join(str(addr) for addr in request.addresses)}`", ) if not fallible_analysis.analysis: return FallibleBuildGoPackageRequest( request=None, import_path=request.import_path, exit_code=fallible_analysis.exit_code, stderr=fallible_analysis.stderr, ) analysis = fallible_analysis.analysis # Obtain build requests for third-party dependencies. # TODO: Consider how to merge this code with existing dependency inference code. dep_build_request_addrs: list[Address] = [] for dep_import_path in (*analysis.imports, *analysis.test_imports, *analysis.xtest_imports): # Infer dependencies on other Go packages. candidate_addresses = package_mapping.mapping.get(dep_import_path) if candidate_addresses: # TODO: Use explicit dependencies to disambiguate? This should never happen with Go backend though. if len(candidate_addresses) > 1: return FallibleBuildGoPackageRequest( request=None, import_path=request.import_path, exit_code=result.exit_code, stderr= (f"Multiple addresses match import of `{dep_import_path}`.\n" f"addresses: {', '.join(str(a) for a in candidate_addresses)}" ), ) dep_build_request_addrs.extend(candidate_addresses) # Infer dependencies on other generated Go sources. go_protobuf_candidate_addresses = go_protobuf_mapping.mapping.get( dep_import_path) if go_protobuf_candidate_addresses: dep_build_request_addrs.extend(go_protobuf_candidate_addresses) dep_build_requests = await MultiGet( Get(BuildGoPackageRequest, BuildGoPackageTargetRequest(addr)) for addr in dep_build_request_addrs) return FallibleBuildGoPackageRequest( request=BuildGoPackageRequest( import_path=request.import_path, digest=gen_sources.digest, dir_path=analysis.dir_path, go_file_names=analysis.go_files, s_file_names=analysis.s_files, direct_dependencies=dep_build_requests, minimum_go_version=analysis.minimum_go_version, ), import_path=request.import_path, )