async def resolve_bsp_build_target_addresses( bsp_target: BSPBuildTargetInternal, union_membership: UnionMembership, ) -> Targets: # NB: Using `RawSpecs` directly rather than `RawSpecsWithoutFileOwners` results in a rule graph cycle. targets = await Get( Targets, RawSpecsWithoutFileOwners, RawSpecsWithoutFileOwners.from_raw_specs(bsp_target.specs), ) if bsp_target.definition.resolve_filter is None: return targets resolve_filter = bsp_target.definition.resolve_filter resolve_prefix, matched, resolve_value = resolve_filter.partition(":") if not resolve_prefix or not matched: raise ValueError( f"The `resolve` filter for `{bsp_target}` must have a platform or language specific " f"prefix like `$lang:$filter`, but the configured value: `{resolve_filter}` did not." ) # TODO: See `BSPResolveFieldFactoryRequest` re: this awkwardness. factories = await MultiGet( Get(BSPResolveFieldFactoryResult, BSPResolveFieldFactoryRequest, request()) for request in union_membership.get(BSPResolveFieldFactoryRequest) if request.resolve_prefix == resolve_prefix) return Targets(t for t in targets if any((factory.resolve_field_value)(t) == resolve_value for factory in factories))
def target_types_to_generate_targets_requests( union_membership: UnionMembership, ) -> TargetTypesToGenerateTargetsRequests: return TargetTypesToGenerateTargetsRequests({ request_cls.generate_from: request_cls # type: ignore[misc] for request_cls in union_membership.get(GenerateTargetsRequest) })
def _register_task(self, tasks, rule: TaskRule, union_membership: UnionMembership) -> None: """Register the given TaskRule with the native scheduler.""" self._native.lib.tasks_task_begin( tasks, rule.func, rule.output_type, issubclass(rule.output_type, EngineAwareReturnType), rule.cacheable, rule.canonical_name, rule.desc or "", rule.level.level, ) for selector in rule.input_selectors: self._native.lib.tasks_add_select(tasks, selector) def add_get_edge(product, subject): self._native.lib.tasks_add_get(tasks, product, subject) for the_get in rule.input_gets: if union.is_instance(the_get.input_type): # If the registered subject type is a union, add Get edges to all registered # union members. for union_member in union_membership.get(the_get.input_type): add_get_edge(the_get.output_type, union_member) else: # Otherwise, the Get subject is a "concrete" type, so add a single Get edge. add_get_edge(the_get.output_type, the_get.input_type) self._native.lib.tasks_task_end(tasks)
async def resources_bsp_target( request: ResourcesForOneBSPTargetRequest, union_membership: UnionMembership, ) -> BSPResourcesResult: targets = await Get(Targets, BSPBuildTargetInternal, request.bsp_target) resources_request_types: FrozenOrderedSet[Type[BSPResourcesRequest]] = union_membership.get( BSPResourcesRequest ) field_sets_by_request_type: dict[Type[BSPResourcesRequest], set[FieldSet]] = defaultdict(set) for target in targets: for resources_request_type in resources_request_types: field_set_type = resources_request_type.field_set_type if field_set_type.is_applicable(target): field_set = field_set_type.create(target) field_sets_by_request_type[resources_request_type].add(field_set) resources_results = await MultiGet( Get( BSPResourcesResult, BSPResourcesRequest, resources_request_type(bsp_target=request.bsp_target, field_sets=tuple(field_sets)), ) for resources_request_type, field_sets in field_sets_by_request_type.items() ) resources = tuple(sorted({resource for rr in resources_results for resource in rr.resources})) output_digest = await Get(Digest, MergeDigests([rr.output_digest for rr in resources_results])) return BSPResourcesResult( resources=resources, output_digest=output_digest, )
async def determine_setup_kwargs( exported_target: ExportedTarget, union_membership: UnionMembership) -> SetupKwargs: target = exported_target.target setup_kwargs_requests = union_membership.get( SetupKwargsRequest) # type: ignore[misc] applicable_setup_kwargs_requests = tuple( request for request in setup_kwargs_requests if request.is_applicable(target)) # If no provided implementations, fall back to our default implementation that simply returns # what the user explicitly specified in the BUILD file. if not applicable_setup_kwargs_requests: return SetupKwargs(exported_target.provides.kwargs, address=target.address) if len(applicable_setup_kwargs_requests) > 1: possible_requests = sorted( plugin.__name__ for plugin in applicable_setup_kwargs_requests) raise ValueError( f"Multiple of the registered `SetupKwargsRequest`s can work on the target " f"{target.address}, and it's ambiguous which to use: {possible_requests}\n\nPlease " "activate fewer implementations, or make the classmethod `is_applicable()` more " "precise so that only one implementation is applicable for this target." ) setup_kwargs_request = tuple(applicable_setup_kwargs_requests)[0] return await Get(SetupKwargs, SetupKwargsRequest, setup_kwargs_request(target))
def for_targets( union_membership: UnionMembership, component: CoarsenedTarget, resolve: CoursierResolveKey ) -> ClasspathEntryRequest: """Constructs a subclass compatible with the members of the CoarsenedTarget.""" compatible = [] impls = union_membership.get(ClasspathEntryRequest) for impl in impls: if all(any(fs.is_applicable(t) for fs in impl.field_sets) for t in component.members): compatible.append(impl) if len(compatible) == 1: return compatible[0](component, resolve) impls_str = ", ".join(sorted(impl.__name__ for impl in impls)) targets_str = "\n ".join( sorted(f"{t.address.spec}\t({type(t).alias})" for t in component.members) ) if compatible: raise ClasspathSourceAmbiguity( f"More than one JVM compiler instance ({impls_str}) was compatible with " f"the inputs:\n {targets_str}" ) else: raise ClasspathSourceMissing( f"No single JVM compiler instance (from: {impls_str}) was compatible with all of the " f"the inputs:\n {targets_str}" )
async def construct_workunits_callback_factories( union_membership: UnionMembership, ) -> WorkunitsCallbackFactories: request_types = union_membership.get(WorkunitsCallbackFactoryRequest) workunit_callback_factories = await MultiGet( Get(WorkunitsCallbackFactory, WorkunitsCallbackFactoryRequest, request_type()) for request_type in request_types) return WorkunitsCallbackFactories(workunit_callback_factories)
async def export( console: Console, targets: Targets, export_subsystem: ExportSubsystem, workspace: Workspace, union_membership: UnionMembership, build_root: BuildRoot, dist_dir: DistDir, ) -> Export: request_types = cast("Iterable[type[ExportableDataRequest]]", union_membership.get(ExportableDataRequest)) requests = tuple(request_type(targets) for request_type in request_types) exportables = await MultiGet( Get(ExportableData, ExportableDataRequest, request) for request in requests) prefixed_digests = await MultiGet( Get(Digest, AddPrefix(exp.digest, exp.reldir)) for exp in exportables) output_dir = os.path.join(str(dist_dir.relpath), "export") merged_digest = await Get(Digest, MergeDigests(prefixed_digests)) dist_digest = await Get(Digest, AddPrefix(merged_digest, output_dir)) workspace.write_digest(dist_digest) for exp in exportables: for symlink in exp.symlinks: # Note that if symlink.source_path is an abspath, join returns it unchanged. source_abspath = os.path.join(build_root.path, symlink.source_path) link_abspath = os.path.abspath( os.path.join(output_dir, exp.reldir, symlink.link_rel_path)) absolute_symlink(source_abspath, link_abspath) console.print_stdout( f"Wrote {exp.description} to {os.path.join(output_dir, exp.reldir)}" ) return Export(exit_code=0)
async def merge_first_party_module_mappings( union_membership: UnionMembership, ) -> FirstPartyPythonModuleMapping: all_mappings = await MultiGet( Get( FirstPartyPythonMappingImpl, FirstPartyPythonMappingImplMarker, marker_cls(), ) for marker_cls in union_membership.get(FirstPartyPythonMappingImplMarker) ) resolves_to_modules_to_providers: DefaultDict[ ResolveName, DefaultDict[str, list[ModuleProvider]] ] = defaultdict(lambda: defaultdict(list)) for mapping_impl in all_mappings: for resolve, modules_to_providers in mapping_impl.items(): for module, providers in modules_to_providers.items(): resolves_to_modules_to_providers[resolve][module].extend(providers) return FirstPartyPythonModuleMapping( ( resolve, FrozenDict( (mod, tuple(sorted(providers))) for mod, providers in sorted(mapping.items()) ), ) for resolve, mapping in sorted(resolves_to_modules_to_providers.items()) )
def find_valid_field_sets( request: FieldSetsPerTargetRequest, union_membership: UnionMembership) -> FieldSetsPerTarget: field_set_types = union_membership.get(request.field_set_superclass) return FieldSetsPerTarget((field_set_type.create(target) for field_set_type in field_set_types if field_set_type.is_applicable(target)) for target in request.targets)
async def export_codegen( targets: Targets, union_membership: UnionMembership, workspace: Workspace, dist_dir: DistDir, registered_target_types: RegisteredTargetTypes, ) -> ExportCodegen: # We run all possible code generators. Running codegen requires specifying the expected # output_type, so we must inspect what is possible to generate. all_generate_request_types = union_membership.get(GenerateSourcesRequest) inputs_to_outputs = { req.input: req.output for req in all_generate_request_types } codegen_sources_fields_with_output = [] for tgt in targets: if not tgt.has_field(SourcesField): continue sources = tgt[SourcesField] for input_type in inputs_to_outputs: if isinstance(sources, input_type): output_type = inputs_to_outputs[input_type] codegen_sources_fields_with_output.append( (sources, output_type)) if not codegen_sources_fields_with_output: codegen_targets = sorted({ tgt_type.alias for tgt_type in registered_target_types.types for input_sources in inputs_to_outputs.keys() if tgt_type.class_has_field(input_sources, union_membership=union_membership) }) logger.warning( "No codegen files/targets matched. All codegen target types: " f"{', '.join(codegen_targets)}") return ExportCodegen(exit_code=0) all_hydrated_sources = await MultiGet( Get( HydratedSources, HydrateSourcesRequest( sources_and_output_type[0], for_sources_types=(sources_and_output_type[1], ), enable_codegen=True, ), ) for sources_and_output_type in codegen_sources_fields_with_output) merged_digest = await Get( Digest, MergeDigests(hydrated_sources.snapshot.digest for hydrated_sources in all_hydrated_sources), ) dest = str(dist_dir.relpath / "codegen") logger.info(f"Writing generated files to {dest}") workspace.write_digest(merged_digest, path_prefix=dest) return ExportCodegen(exit_code=0)
async def merge_symbol_mappings( union_membership: UnionMembership, targets_that_provide_types: AllJvmTypeProvidingTargets, jvm: JvmSubsystem, third_party_mapping: ThirdPartySymbolMapping, ) -> SymbolMapping: all_firstparty_mappings = await MultiGet( Get( SymbolMap, FirstPartyMappingRequest, marker_cls(), ) for marker_cls in union_membership.get(FirstPartyMappingRequest)) all_mappings: list[FrozenDict[_ResolveName, FrozenTrieNode]] = [ *all_firstparty_mappings, third_party_mapping, ] resolves = { resolve for mapping in all_mappings for resolve in mapping.keys() } mapping = SymbolMapping( FrozenDict(( resolve, FrozenTrieNode.merge(mapping[resolve] for mapping in all_mappings if resolve in mapping), ) for resolve in resolves)) # `experimental_provides_types` ("`provides`") can be declared on a `java_sources` target, # so each generated `java_source` target will have that `provides` annotation. All that matters # here is that _one_ of the souce files amongst the set of sources actually provides that type. # Collect each address associated with a `provides` annotation and index by the provided type. provided_types: dict[tuple[str, str], set[Address]] = defaultdict(set) for tgt in targets_that_provide_types: resolve = tgt[JvmResolveField].normalized_value(jvm) for provided_type in tgt[JvmProvidesTypesField].value or []: provided_types[(resolve, provided_type)].add(tgt.address) # Check that at least one address declared by each `provides` value actually provides the type: for (resolve, provided_type), provided_addresses in provided_types.items(): symbol_addresses = mapping.addresses_for_symbol(provided_type, resolve=resolve) logger.info( f"addresses for {provided_type} in {resolve}:\n {symbol_addresses}" ) if not any( provided_addresses.intersection(ns_addresses) for ns_addresses in symbol_addresses.values()): raise JvmFirstPartyPackageMappingException( f"The target {next(iter(provided_addresses))} declares that it provides the JVM type " f"`{provided_type}`, however, it does not appear to actually provide that type." ) return mapping
async def generate_lockfiles_goal( workspace: Workspace, union_membership: UnionMembership, generate_lockfiles_subsystem: GenerateLockfilesSubsystem, ) -> GenerateLockfilesGoal: known_user_resolve_names = await MultiGet( Get(KnownUserResolveNames, KnownUserResolveNamesRequest, request()) for request in union_membership.get(KnownUserResolveNamesRequest) ) requested_user_resolve_names, requested_tool_sentinels = determine_resolves_to_generate( known_user_resolve_names, union_membership.get(GenerateToolLockfileSentinel), set(generate_lockfiles_subsystem.resolve_names), ) all_specified_user_requests = await MultiGet( Get(UserGenerateLockfiles, RequestedUserResolveNames, resolve_names) for resolve_names in requested_user_resolve_names ) specified_tool_requests = await MultiGet( Get(WrappedGenerateLockfile, GenerateToolLockfileSentinel, sentinel()) for sentinel in requested_tool_sentinels ) applicable_tool_requests = filter_tool_lockfile_requests( specified_tool_requests, resolve_specified=bool(generate_lockfiles_subsystem.resolve_names), ) results = await MultiGet( Get(GenerateLockfileResult, GenerateLockfile, req) for req in ( *(req for reqs in all_specified_user_requests for req in reqs), *applicable_tool_requests, ) ) merged_digest = await Get(Digest, MergeDigests(res.digest for res in results)) workspace.write_digest(merged_digest) for result in results: logger.info(f"Wrote lockfile for the resolve `{result.resolve_name}` to {result.path}") return GenerateLockfilesGoal(exit_code=0)
async def create_server(request: UvicornServerRequest, union_membership: UnionMembership) -> ExplorerServer: uvicorn = UvicornServer.from_request(request) setups = await MultiGet( Get(UvicornServerSetup, UvicornServerSetupRequest, request_type( request)) for request_type in union_membership.get(UvicornServerSetupRequest)) for setup in setups: setup.apply(uvicorn) return uvicorn.create_server()
async def run_all_setup_plugins( request: AllPytestPluginSetupsRequest, union_membership: UnionMembership) -> AllPytestPluginSetups: wrapped_tgt = await Get(WrappedTarget, Address, request.address) applicable_setup_request_types = tuple( request for request in union_membership.get(PytestPluginSetupRequest) if request.is_applicable(wrapped_tgt.target)) setups = await MultiGet( Get(PytestPluginSetup, PytestPluginSetupRequest, request(wrapped_tgt.target)) # type: ignore[misc, abstract] for request in applicable_setup_request_types) return AllPytestPluginSetups(setups)
async def all_helm_plugins(union_membership: UnionMembership) -> HelmPlugins: bindings = union_membership.get(ExternalHelmPluginBinding) external_plugins = await MultiGet( Get(HelmPlugin, ExternalHelmPluginBinding, binding.create()) for binding in bindings) if logger.isEnabledFor(LogLevel.DEBUG.level): plugins_desc = [ f"{p.name}, version: {p.version}" for p in external_plugins ] logger.debug( f"Downloaded {pluralize(len(external_plugins), 'external Helm plugin')}:\n{bullet_list(plugins_desc)}" ) return HelmPlugins(external_plugins)
async def merge_first_party_module_mappings( union_membership: UnionMembership, ) -> FirstPartySymbolMapping: all_mappings = await MultiGet( Get( SymbolMap, FirstPartyMappingRequest, marker_cls(), ) for marker_cls in union_membership.get(FirstPartyMappingRequest)) merged_dep_map = SymbolMap() for dep_map in all_mappings: merged_dep_map.merge(dep_map) return FirstPartySymbolMapping(merged_dep_map)
def calculate_jvm_request_types( union_membership: UnionMembership) -> ClasspathEntryRequestFactory: cpe_impls = union_membership.get(ClasspathEntryRequest) impls_by_source: dict[type[Field], type[ClasspathEntryRequest]] = {} for impl in cpe_impls: for field_set in impl.field_sets: for field in field_set.required_fields: # Assume only one impl per field (normally sound) # (note that subsequently, we only check for `SourceFields`, so no need to filter) impls_by_source[field] = impl # Classify code generator sources by their CPE impl sources_by_impl_: dict[type[ClasspathEntryRequest], list[type[SourcesField]]] = defaultdict(list) for g in union_membership.get(GenerateSourcesRequest): if g.output in impls_by_source: sources_by_impl_[impls_by_source[g.output]].append(g.input) sources_by_impl = FrozenDict( (key, frozenset(value)) for key, value in sources_by_impl_.items()) return ClasspathEntryRequestFactory(tuple(cpe_impls), sources_by_impl)
def run( self, build_config: BuildConfiguration, graph_session: GraphSession, options: Options, specs: Specs, union_membership: UnionMembership, ) -> ExitCode: for server_request_type in union_membership.get(ExplorerServerRequest): logger.info(f"Using {server_request_type.__name__} to create the explorer server.") break else: logger.error( softwrap( """ There is no Explorer backend server implementation registered. Activate a backend/plugin that registers an implementation for the `ExplorerServerRequest` union to fix this issue. """ ) ) return 127 all_help_info = HelpInfoExtracter.get_all_help_info( options, union_membership, graph_session.goal_consumed_subsystem_scopes, RegisteredTargetTypes.create(build_config.target_types), build_config, ) request_state = RequestState( all_help_info=all_help_info, build_configuration=build_config, scheduler_session=graph_session.scheduler_session, ) server_request = server_request_type( address=self.address, port=self.port, request_state=request_state, ) server = request_state.product_request( ExplorerServer, (server_request,), poll=True, timeout=90, ) return server.run()
async def merge_first_party_module_mappings( union_membership: UnionMembership, ) -> FirstPartyJavaPackageMapping: all_mappings = await MultiGet( Get( FirstPartyJavaMappingImpl, FirstPartyJavaMappingImplMarker, marker_cls(), ) for marker_cls in union_membership.get(FirstPartyJavaMappingImplMarker) ) merged_dep_map = PackageRootedDependencyMap() for dep_map in all_mappings: merged_dep_map.merge(dep_map.package_rooted_dependency_map) return FirstPartyJavaPackageMapping( package_rooted_dependency_map=merged_dep_map)
async def resolve_one_dependency_module( request: ResolveOneDependencyModuleRequest, union_membership: UnionMembership, ) -> ResolveOneDependencyModuleResult: bsp_target = await Get(BSPBuildTargetInternal, BuildTargetIdentifier, request.bsp_target_id) targets = await Get( Targets, AddressSpecs, bsp_target.specs.address_specs, ) field_sets_by_request_type: dict[Type[BSPDependencyModulesRequest], list[FieldSet]] = defaultdict(list) dep_module_request_types: FrozenOrderedSet[ Type[BSPDependencyModulesRequest]] = union_membership.get( BSPDependencyModulesRequest) for tgt in targets: for dep_module_request_type in dep_module_request_types: field_set_type = dep_module_request_type.field_set_type if field_set_type.is_applicable(tgt): field_set = field_set_type.create(tgt) field_sets_by_request_type[dep_module_request_type].append( field_set) if not field_sets_by_request_type: return ResolveOneDependencyModuleResult( bsp_target_id=request.bsp_target_id) responses = await MultiGet( Get( BSPDependencyModulesResult, BSPDependencyModulesRequest, dep_module_request_type(field_sets=tuple(field_sets)), ) for dep_module_request_type, field_sets in field_sets_by_request_type.items()) modules = set(itertools.chain.from_iterable([r.modules for r in responses])) digest = await Get(Digest, MergeDigests([r.digest for r in responses])) return ResolveOneDependencyModuleResult( bsp_target_id=request.bsp_target_id, modules=tuple(modules), digest=digest, )
async def export_virtualenvs( request: ExportVenvsRequest, python_setup: PythonSetup, dist_dir: DistDir, union_membership: UnionMembership, ) -> ExportResults: resolve_to_root_targets: DefaultDict[str, list[Target]] = defaultdict(list) for tgt in request.targets: if not tgt.has_field(PythonResolveField): continue resolve = tgt[PythonResolveField].normalized_value(python_setup) resolve_to_root_targets[resolve].append(tgt) venvs = await MultiGet( Get( ExportResult, _ExportVenvRequest( resolve if python_setup.enable_resolves else None, tuple( tgts)), ) for resolve, tgts in resolve_to_root_targets.items()) no_resolves_dest = dist_dir.relpath / "python" / "virtualenv" if venvs and python_setup.enable_resolves and no_resolves_dest.exists(): logger.warning( softwrap(f""" Because `[python].enable_resolves` is true, `{bin_name()} export ::` no longer writes virtualenvs to {no_resolves_dest}, but instead underneath {dist_dir.relpath / 'python' / 'virtualenvs'}. You will need to update your IDE to point to the new virtualenv. To silence this error, delete {no_resolves_dest} """)) tool_export_types = cast("Iterable[type[ExportPythonToolSentinel]]", union_membership.get(ExportPythonToolSentinel)) # TODO: We request the `ExportPythonTool` entries independently of the `ExportResult`s because # inlining the request causes a rule graph issue. Revisit after #11269. all_export_tool_requests = await MultiGet( Get(ExportPythonTool, ExportPythonToolSentinel, tool_export_type()) for tool_export_type in tool_export_types) all_tool_results = await MultiGet( Get(ExportResult, ExportPythonTool, request) for request in all_export_tool_requests if request.pex_request is not None) return ExportResults(venvs + all_tool_results)
def __init__( self, scheduler_session: SchedulerSession, union_membership: UnionMembership, context: BSPContext, inbound: BinaryIO, outbound: BinaryIO, max_workers: int = 5, ) -> None: self._scheduler_session = scheduler_session self._inbound = JsonRpcStreamReader(inbound) self._outbound = JsonRpcStreamWriter(outbound) self._context: BSPContext = context self._endpoint = Endpoint(self, self._send_outbound_message, max_workers=max_workers) self._handler_mappings: dict[str, type[BSPHandlerMapping]] = {} impls = union_membership.get(BSPHandlerMapping) for impl in impls: self._handler_mappings[impl.method_name] = impl
async def bsp_build_initialize( _request: InitializeBuildParams, union_membership: UnionMembership) -> InitializeBuildResult: compile_provider_language_ids = [] test_provider_language_ids = [] run_provider_language_ids = [] debug_provider_language_ids = [] resources_provider = False language_support_impls = union_membership.get(BSPLanguageSupport) for lang in language_support_impls: if lang.can_compile: compile_provider_language_ids.append(lang.language_id) if lang.can_test: test_provider_language_ids.append(lang.language_id) if lang.can_run: run_provider_language_ids.append(lang.language_id) if lang.can_debug: debug_provider_language_ids.append(lang.language_id) if lang.can_provide_resources: resources_provider = True return InitializeBuildResult( display_name="Pants", version=VERSION, bsp_version=BSP_VERSION, # TODO: replace with an actual BSP version capabilities=BuildServerCapabilities( compile_provider=CompileProvider( language_ids=tuple(sorted(compile_provider_language_ids))), test_provider=TestProvider( language_ids=tuple(sorted(test_provider_language_ids))), run_provider=RunProvider( language_ids=tuple(sorted(run_provider_language_ids))), debug_provider=DebugProvider( language_ids=tuple(sorted(debug_provider_language_ids))), inverse_sources_provider=None, dependency_sources_provider=True, dependency_modules_provider=True, resources_provider=resources_provider, can_reload=None, build_target_changed_provider=None, ), data=None, )
async def resolve_dependencies_lite( request: DependenciesRequestLite, union_membership: UnionMembership, registered_target_types: RegisteredTargetTypes, global_options: GlobalOptions, ) -> Addresses: provided = parse_dependencies_field( request.field, subproject_roots=global_options.options.subproject_roots, registered_target_types=registered_target_types.types, union_membership=union_membership, ) literal_addresses = await MultiGet(Get(Address, AddressInput, ai) for ai in provided.addresses) ignored_addresses = set( await MultiGet(Get(Address, AddressInput, ai) for ai in provided.ignored_addresses) ) # Inject any dependencies. inject_request_types = union_membership.get(InjectDependenciesRequest) injected = await MultiGet( Get(InjectedDependencies, InjectDependenciesRequest, inject_request_type(request.field)) for inject_request_type in inject_request_types if isinstance(request.field, inject_request_type.inject_for) ) # Inject dependencies on all the BUILD target's generated file targets. subtargets = await Get( Subtargets, Address, request.field.address.maybe_convert_to_build_target() ) subtarget_addresses = tuple( t.address for t in subtargets.subtargets if t.address != request.field.address ) result = { addr for addr in ( *subtarget_addresses, *literal_addresses, *itertools.chain.from_iterable(injected), ) if addr not in ignored_addresses } return Addresses(sorted(result))
async def export( console: Console, targets: Targets, workspace: Workspace, union_membership: UnionMembership, build_root: BuildRoot, dist_dir: DistDir, ) -> Export: request_types = cast("Iterable[type[ExportRequest]]", union_membership.get(ExportRequest)) requests = tuple(request_type(targets) for request_type in request_types) all_results = await MultiGet( Get(ExportResults, ExportRequest, request) for request in requests) flattened_results = [res for results in all_results for res in results] prefixed_digests = await MultiGet( Get(Digest, AddPrefix(result.digest, result.reldir)) for result in flattened_results) output_dir = os.path.join(str(dist_dir.relpath), "export") merged_digest = await Get(Digest, MergeDigests(prefixed_digests)) dist_digest = await Get(Digest, AddPrefix(merged_digest, output_dir)) workspace.write_digest(dist_digest) environment = await Get(Environment, EnvironmentRequest(["PATH"])) for result in flattened_results: digest_root = os.path.join(build_root.path, output_dir, result.reldir) for cmd in result.post_processing_cmds: argv = tuple( arg.format(digest_root=digest_root) for arg in cmd.argv) ip = InteractiveProcess( argv=argv, env={ "PATH": environment.get("PATH", ""), **cmd.extra_env }, run_in_workspace=True, ) await Effect(InteractiveProcessResult, InteractiveProcess, ip) console.print_stdout( f"Wrote {result.description} to {os.path.join(output_dir, result.reldir)}" ) return Export(exit_code=0)
async def merge_first_party_module_mappings( union_membership: UnionMembership, ) -> FirstPartyPythonModuleMapping: all_mappings = await MultiGet( Get( FirstPartyPythonMappingImpl, FirstPartyPythonMappingImplMarker, marker_cls(), ) for marker_cls in union_membership.get( FirstPartyPythonMappingImplMarker)) # First, record all known ambiguous modules. We will need to check that an implementation's # module is not ambiguous within another implementation. modules_with_multiple_implementations: DefaultDict[ str, set[Address]] = defaultdict(set) for mapping_impl in all_mappings: for module, addresses in mapping_impl.ambiguous_modules.items(): modules_with_multiple_implementations[module].update(addresses) # Then, merge the unambiguous modules within each MappingImpls while checking for ambiguity # across the other implementations. modules_to_addresses: dict[str, tuple[Address, ...]] = {} for mapping_impl in all_mappings: for module, addresses in mapping_impl.mapping.items(): if module in modules_with_multiple_implementations: modules_with_multiple_implementations[module].update(addresses) elif module in modules_to_addresses: modules_with_multiple_implementations[module].update( {*modules_to_addresses[module], *addresses}) else: modules_to_addresses[module] = addresses # Finally, remove any newly ambiguous modules from the previous step. for module in modules_with_multiple_implementations: if module in modules_to_addresses: modules_to_addresses.pop(module) return FirstPartyPythonModuleMapping( mapping=FrozenDict(sorted(modules_to_addresses.items())), ambiguous_modules=FrozenDict( (k, tuple(sorted(v))) for k, v in sorted(modules_with_multiple_implementations.items())), )
async def validate_jvm_artifacts_for_resolve( request: _ValidateJvmArtifactsRequest, union_membership: UnionMembership, jvm_subsystem: JvmSubsystem, ) -> GenerateJvmLockfile: impls = union_membership.get(ValidateJvmArtifactsForResolveRequest) for impl in impls: validate_request = impl(artifacts=request.artifacts, resolve_name=request.resolve_name) _ = await Get( ValidateJvmArtifactsForResolveResult, ValidateJvmArtifactsForResolveRequest, validate_request, ) return GenerateJvmLockfile( artifacts=request.artifacts, resolve_name=request.resolve_name, lockfile_dest=jvm_subsystem.resolves[request.resolve_name], )
async def merge_first_party_module_mappings( union_membership: UnionMembership, ) -> FirstPartyPythonModuleMapping: all_mappings = await MultiGet( Get( FirstPartyPythonMappingImpl, FirstPartyPythonMappingImplMarker, marker_cls(), ) for marker_cls in union_membership.get( FirstPartyPythonMappingImplMarker)) modules_to_addresses: Dict[str, Tuple[Address, ...]] = {} modules_with_multiple_implementations: Set[str] = set() for mapping in all_mappings: for module, addresses in mapping.items(): if module in modules_to_addresses: modules_with_multiple_implementations.add(module) else: modules_to_addresses[module] = addresses for module in modules_with_multiple_implementations: modules_to_addresses.pop(module) return FirstPartyPythonModuleMapping(sorted(modules_to_addresses.items()))
async def merge_first_party_module_mappings( union_membership: UnionMembership, targets_that_provide_types: AllJvmTypeProvidingTargets, jvm: JvmSubsystem, ) -> FirstPartySymbolMapping: all_mappings = await MultiGet( Get( SymbolMap, FirstPartyMappingRequest, marker_cls(), ) for marker_cls in union_membership.get(FirstPartyMappingRequest)) merged_dep_map = SymbolMap() for dep_map in all_mappings: merged_dep_map.merge(dep_map) # `experimental_provides_types` ("`provides`") can be declared on a `java_sources` target, # so each generated `java_source` target will have that `provides` annotation. All that matters # here is that _one_ of the souce files amongst the set of sources actually provides that type. # Collect each address associated with a `provides` annotation and index by the provided type. provided_types: dict[tuple[str, str], set[Address]] = defaultdict(set) for tgt in targets_that_provide_types: resolve = tgt[JvmResolveField].normalized_value(jvm) for provided_type in tgt[JvmProvidesTypesField].value or []: provided_types[(resolve, provided_type)].add(tgt.address) # Check that at least one address declared by each `provides` value actually provides the type: for (resolve, provided_type), provided_addresses in provided_types.items(): symbol_addresses = merged_dep_map.addresses_for_symbol(provided_type, resolve=resolve) if not provided_addresses.intersection(symbol_addresses): raise JvmFirstPartyPackageMappingException( f"The target {next(iter(provided_addresses))} declares that it provides the JVM type " f"`{provided_type}`, however, it does not appear to actually provide that type." ) return FirstPartySymbolMapping(merged_dep_map)