def test_invalid_get() -> None: # Bad output type. assert_invalid_get( lambda: Get(1, str, "bob" ), # type: ignore[call-overload, no-any-return] expected= ("Invalid Get. The first argument (the output type) must be a type, but given " f"`1` with type {int}."), ) # Bad second argument. assert_invalid_get( lambda: Get(AClass, BClass), expected= ("Invalid Get. Because you are using the shorthand form " "Get(OutputType, InputType(constructor args)), the second argument should be " f"a constructor call, rather than a type, but given {BClass}."), ) assert_invalid_get( lambda: Get(AClass, 1, BClass ), # type: ignore[call-overload, no-any-return] expected= ("Invalid Get. Because you are using the longhand form Get(OutputType, InputType, " "input), the second argument must be a type, but given `1` of type " f"{int}."), ) # Bad third argument. assert_invalid_get( lambda: Get(AClass, BClass, BClass), expected= ("Invalid Get. Because you are using the longhand form Get(OutputType, InputType, " "input), the third argument should be an object, rather than a type, but given " f"{BClass}."), )
async def bsp_compile_request( request: CompileParams, workspace: Workspace, ) -> CompileResult: bsp_targets = await MultiGet( Get(BSPBuildTargetInternal, BuildTargetIdentifier, bsp_target_id) for bsp_target_id in request.targets ) compile_results = await MultiGet( Get( BSPCompileResult, CompileOneBSPTargetRequest( bsp_target=bsp_target, origin_id=request.origin_id, arguments=request.arguments, ), ) for bsp_target in bsp_targets ) output_digest = await Get(Digest, MergeDigests([r.output_digest for r in compile_results])) if output_digest != EMPTY_DIGEST: workspace.write_digest(output_digest, path_prefix=".pants.d/bsp") status_code = StatusCode.OK if any(r.status != StatusCode.OK for r in compile_results): status_code = StatusCode.ERROR return CompileResult( origin_id=request.origin_id, status_code=status_code.value, )
async def run_go_pkg_debug(targets: UnexpandedTargets, console: Console) -> GoPkgDebugGoal: first_party_package_targets = [ tgt for tgt in targets if is_first_party_package_target(tgt) ] first_party_requests = [ Get(ResolvedGoPackage, ResolveGoPackageRequest(address=tgt.address)) for tgt in first_party_package_targets ] third_party_package_targets = [ tgt for tgt in targets if is_third_party_package_target(tgt) ] third_party_requests = [ Get(ResolvedGoPackage, ResolveExternalGoPackageRequest(address=tgt.address)) for tgt in third_party_package_targets ] resolved_packages = await MultiGet( [*first_party_requests, *third_party_requests]) # type: ignore for package in resolved_packages: console.write_stdout(str(package) + "\n") return GoPkgDebugGoal(exit_code=0)
async def addresses_from_raw_specs_with_only_file_owners( specs: RawSpecsWithOnlyFileOwners, owners_not_found_behavior: OwnersNotFoundBehavior) -> Addresses: """Find the owner(s) for each spec.""" paths_per_include = await MultiGet( Get(Paths, PathGlobs, specs.path_globs_for_spec(spec)) for spec in specs.all_specs()) owners_per_include = await MultiGet( Get( Owners, OwnersRequest( paths.files, filter_by_global_options=specs.filter_by_global_options), ) for paths in paths_per_include) addresses: set[Address] = set() for spec, owners in zip(specs.all_specs(), owners_per_include): if (not specs.from_change_detection and owners_not_found_behavior != OwnersNotFoundBehavior.ignore and isinstance(spec, FileLiteralSpec) and not owners): _log_or_raise_unmatched_owners( [PurePath(str(spec))], owners_not_found_behavior, ignore_option="--owners-not-found-behavior=ignore", ) addresses.update(owners) return Addresses(sorted(addresses))
async def bsp_resources_request( request: ResourcesParams, workspace: Workspace, ) -> ResourcesResult: bsp_targets = await MultiGet( Get(BSPBuildTargetInternal, BuildTargetIdentifier, bsp_target_id) for bsp_target_id in request.targets ) resources_results = await MultiGet( Get( BSPResourcesResult, ResourcesForOneBSPTargetRequest( bsp_target=bsp_target, ), ) for bsp_target in bsp_targets ) # TODO: Need to determine how resources are expected to be exposed. Directories? Individual files? # Initially, it looks like loose directories. output_digest = await Get(Digest, MergeDigests([r.output_digest for r in resources_results])) if output_digest != EMPTY_DIGEST: workspace.write_digest(output_digest, path_prefix=".pants.d/bsp") return ResourcesResult( tuple( ResourcesItem( target, rr.resources, ) for target, rr in zip(request.targets, resources_results) ) )
async def map_import_paths_of_all_go_protobuf_targets( targets: AllProtobufTargets, ) -> GoProtobufImportPathMapping: sources = await MultiGet( Get( HydratedSources, HydrateSourcesRequest( tgt[ProtobufSourceField], for_sources_types=(ProtobufSourceField, ), enable_codegen=True, ), ) for tgt in targets) all_contents = await MultiGet( Get(DigestContents, Digest, source.snapshot.digest) for source in sources) go_protobuf_targets: dict[str, set[Address]] = defaultdict(set) for tgt, contents in zip(targets, all_contents): if not contents: continue if len(contents) > 1: raise AssertionError( f"Protobuf target `{tgt.address}` mapped to more than one source file." ) import_path = parse_go_package_option(contents[0].content) if not import_path: continue go_protobuf_targets[import_path].add(tgt.address) return GoProtobufImportPathMapping( FrozenDict( {ip: tuple(addrs) for ip, addrs in go_protobuf_targets.items()}))
async def setup_google_java_format( setup_request: SetupRequest, tool: GoogleJavaFormatSubsystem, jdk: InternalJdk, ) -> Setup: lockfile_request = await Get(GenerateJvmLockfileFromTool, GoogleJavaFormatToolLockfileSentinel()) source_files, tool_classpath = await MultiGet( Get( SourceFiles, SourceFilesRequest( field_set.source for field_set in setup_request.request.field_sets), ), Get(ToolClasspath, ToolClasspathRequest(lockfile=lockfile_request)), ) source_files_snapshot = ( source_files.snapshot if setup_request.request.prior_formatter_result is None else setup_request.request.prior_formatter_result) toolcp_relpath = "__toolcp" extra_immutable_input_digests = { toolcp_relpath: tool_classpath.digest, } maybe_java11_or_higher_options = [] if jdk.jre_major_version >= 11: maybe_java11_or_higher_options = [ "--add-exports=jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED", "--add-exports=jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED", "--add-exports=jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED", "--add-exports=jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED", "--add-exports=jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED", ] args = [ *maybe_java11_or_higher_options, "com.google.googlejavaformat.java.Main", *(["--aosp"] if tool.aosp else []), "--dry-run" if setup_request.check_only else "--replace", *source_files.files, ] process = JvmProcess( jdk=jdk, argv=args, classpath_entries=tool_classpath.classpath_entries(toolcp_relpath), input_digest=source_files_snapshot.digest, extra_immutable_input_digests=extra_immutable_input_digests, extra_nailgun_keys=extra_immutable_input_digests, output_files=source_files_snapshot.files, description= f"Run Google Java Format on {pluralize(len(setup_request.request.field_sets), 'file')}.", level=LogLevel.DEBUG, ) return Setup(process, original_snapshot=source_files_snapshot)
async def tailor( tailor_subsystem: TailorSubsystem, console: Console, workspace: Workspace, union_membership: UnionMembership, ) -> Tailor: putative_target_request_types = union_membership[PutativeTargetsRequest] putative_targets_results = await MultiGet( Get(PutativeTargets, PutativeTargetsRequest, req_type()) for req_type in putative_target_request_types) putative_targets = PutativeTargets.merge(putative_targets_results) fixed_names_ptgts = await Get(UniquelyNamedPutativeTargets, PutativeTargets, putative_targets) fixed_sources_ptgts = await MultiGet( Get(DisjointSourcePutativeTarget, PutativeTarget, ptgt) for ptgt in fixed_names_ptgts.putative_targets) ptgts = [dspt.putative_target for dspt in fixed_sources_ptgts] if ptgts: edited_build_files = await Get( EditedBuildFiles, EditBuildFilesRequest(PutativeTargets(ptgts), tailor_subsystem.build_file_indent), ) updated_build_files = set(edited_build_files.updated_paths) workspace.write_digest(edited_build_files.digest) ptgts_by_build_file = group_by_build_file(ptgts) for build_file_path, ptgts in ptgts_by_build_file.items(): verb = "Updated" if build_file_path in updated_build_files else "Created" console.print_stdout(f"{verb} {console.blue(build_file_path)}:") for ptgt in ptgts: console.print_stdout( f" - Added {console.green(ptgt.type_alias)} target " f"{console.cyan(ptgt.address.spec)}") return Tailor(0)
async def export( console: Console, targets: Targets, export_subsystem: ExportSubsystem, workspace: Workspace, union_membership: UnionMembership, build_root: BuildRoot, dist_dir: DistDir, ) -> Export: request_types = cast("Iterable[type[ExportableDataRequest]]", union_membership.get(ExportableDataRequest)) requests = tuple(request_type(targets) for request_type in request_types) exportables = await MultiGet( Get(ExportableData, ExportableDataRequest, request) for request in requests) prefixed_digests = await MultiGet( Get(Digest, AddPrefix(exp.digest, exp.reldir)) for exp in exportables) output_dir = os.path.join(str(dist_dir.relpath), "export") merged_digest = await Get(Digest, MergeDigests(prefixed_digests)) dist_digest = await Get(Digest, AddPrefix(merged_digest, output_dir)) workspace.write_digest(dist_digest) for exp in exportables: for symlink in exp.symlinks: # Note that if symlink.source_path is an abspath, join returns it unchanged. source_abspath = os.path.join(build_root.path, symlink.source_path) link_abspath = os.path.abspath( os.path.join(output_dir, exp.reldir, symlink.link_rel_path)) absolute_symlink(source_abspath, link_abspath) console.print_stdout( f"Wrote {exp.description} to {os.path.join(output_dir, exp.reldir)}" ) return Export(exit_code=0)
async def setup_go_sdk_process( request: GoSdkProcess, go_sdk_run: GoSdkRunSetup, bash: BashBinary, golang_subsystem: GolangSubsystem, goroot: GoRoot, ) -> Process: input_digest, env_vars = await MultiGet( Get(Digest, MergeDigests([go_sdk_run.digest, request.input_digest])), Get( Environment, EnvironmentRequest( golang_subsystem.env_vars_to_pass_to_subprocesses)), ) return Process( argv=[bash.path, go_sdk_run.script.path, *request.command], env={ **env_vars, **request.env, GoSdkRunSetup.CHDIR_ENV: request.working_dir or "", # TODO: Maybe could just use MAJOR.MINOR for version part here? "__PANTS_GO_SDK_CACHE_KEY": f"{goroot.version}/{goroot.goos}/{goroot.goarch}", }, input_digest=input_digest, description=request.description, output_files=request.output_files, output_directories=request.output_directories, level=LogLevel.DEBUG, platform=request.platform, )
async def resolve_addresses_from_specs(specs: Specs) -> Addresses: includes, ignores = await MultiGet( Get(Addresses, RawSpecs, specs.includes), Get(Addresses, RawSpecs, specs.ignores), ) # No matter what, ignores win out over includes. This avoids "specificity wars" and keeps our # semantics simple/predictable. return Addresses(FrozenOrderedSet(includes) - FrozenOrderedSet(ignores))
def test_create_get() -> None: get = Get(AClass, int, 42) assert get.output_type is AClass assert get.input_type is int assert get.input == 42 # Also test the equivalence of the 1-arg and 2-arg versions. assert Get(AClass, BClass()) == Get(AClass, BClass, BClass())
async def _jvm_bsp_compile( request: BSPCompileRequest, classpath_entry_request: ClasspathEntryRequestFactory ) -> BSPCompileResult: """Generically handles a BSPCompileRequest (subclass). This is a `@rule_helper` rather than a `@rule`, because BSP backends like `java` and `scala` independently declare their `BSPCompileRequest` union members. We can't register a single shared `BSPCompileRequest` @union member for all JVM because their FieldSets are also declared via @unions, and we can't forward the implementation of a @union to another the way we might with an abstract class. """ coarsened_targets = await Get( CoarsenedTargets, Addresses([fs.address for fs in request.field_sets])) resolve = await Get(CoursierResolveKey, CoarsenedTargets, coarsened_targets) # TODO: We include the (non-3rdparty) transitive dependencies here, because each project # currently only has a single BuildTarget. This has the effect of including `resources` targets, # which are referenced by BuildTargets (via `buildTarget/resources`), rather than necessarily # being owned by any particular BuildTarget. # # To resolve #15051, this will no longer be transitive, and so `resources` will need to be # attached-to/referenced-by nearby BuildTarget(s) instead (most likely: direct dependent(s)). results = await MultiGet( Get( FallibleClasspathEntry, BSPClasspathEntryRequest( classpath_entry_request.for_targets(component=coarsened_target, resolve=resolve), task_id=request.task_id, ), ) for coarsened_target in coarsened_targets.coarsened_closure() if not any( JvmArtifactFieldSet.is_applicable(t) for t in coarsened_target.members)) entries = FallibleClasspathEntry.if_all_succeeded(results) if entries is None: return BSPCompileResult( status=StatusCode.ERROR, output_digest=EMPTY_DIGEST, ) loose_classfiles = await MultiGet( Get(LooseClassfiles, ClasspathEntry, entry) for entry in entries) merged_loose_classfiles = await Get( Digest, MergeDigests(lc.digest for lc in loose_classfiles)) output_digest = await Get( Digest, AddPrefix(merged_loose_classfiles, jvm_classes_directory(request.bsp_target.bsp_target_id)), ) return BSPCompileResult( status=StatusCode.OK, output_digest=output_digest, )
async def setup_scalafmt( setup_request: SetupRequest, tool: ScalafmtSubsystem, ) -> Setup: toolcp_relpath = "__toolcp" lockfile_request = await Get(GenerateJvmLockfileFromTool, ScalafmtToolLockfileSentinel()) source_files, tool_classpath = await MultiGet( Get( SourceFiles, SourceFilesRequest(field_set.source for field_set in setup_request.request.field_sets), ), Get(ToolClasspath, ToolClasspathRequest(lockfile=lockfile_request)), ) source_files_snapshot = ( source_files.snapshot if setup_request.request.prior_formatter_result is None else setup_request.request.prior_formatter_result ) config_files = await Get( ScalafmtConfigFiles, GatherScalafmtConfigFilesRequest(source_files_snapshot) ) merged_sources_digest = await Get( Digest, MergeDigests([source_files_snapshot.digest, config_files.snapshot.digest]) ) extra_immutable_input_digests = { toolcp_relpath: tool_classpath.digest, } # Partition the work by which source files share the same config file (regardless of directory). source_files_by_config_file: dict[str, set[str]] = defaultdict(set) for source_dir, files_in_source_dir in group_by_dir(source_files_snapshot.files).items(): config_file = config_files.source_dir_to_config_file[source_dir] source_files_by_config_file[config_file].update( os.path.join(source_dir, name) for name in files_in_source_dir ) partitions = await MultiGet( Get( Partition, SetupScalafmtPartition( classpath_entries=tuple(tool_classpath.classpath_entries(toolcp_relpath)), merged_sources_digest=merged_sources_digest, extra_immutable_input_digests=FrozenDict(extra_immutable_input_digests), config_file=config_file, files=tuple(sorted(files)), check_only=setup_request.check_only, ), ) for config_file, files in source_files_by_config_file.items() ) return Setup(tuple(partitions), original_snapshot=source_files_snapshot)
async def resolve_addresses_from_raw_specs(specs: RawSpecs) -> Addresses: without_file_owners, with_file_owners = await MultiGet( Get(Addresses, RawSpecsWithoutFileOwners, RawSpecsWithoutFileOwners.from_raw_specs(specs)), Get(Addresses, RawSpecsWithOnlyFileOwners, RawSpecsWithOnlyFileOwners.from_raw_specs(specs)), ) # Use a set to dedupe. return Addresses(sorted({*without_file_owners, *with_file_owners}))
async def analyze_scala_source_dependencies( jdk: InternalJdk, processor_classfiles: ScalaParserCompiledClassfiles, source_files: SourceFiles, ) -> FallibleScalaSourceDependencyAnalysisResult: if len(source_files.files) > 1: raise ValueError( f"analyze_scala_source_dependencies expects sources with exactly 1 source file, but found {len(source_files.snapshot.files)}." ) elif len(source_files.files) == 0: raise ValueError( "analyze_scala_source_dependencies expects sources with exactly 1 source file, but found none." ) source_prefix = "__source_to_analyze" source_path = os.path.join(source_prefix, source_files.files[0]) processorcp_relpath = "__processorcp" toolcp_relpath = "__toolcp" (tool_classpath, prefixed_source_files_digest,) = await MultiGet( Get( ToolClasspath, ToolClasspathRequest(artifact_requirements=SCALA_PARSER_ARTIFACT_REQUIREMENTS), ), Get(Digest, AddPrefix(source_files.snapshot.digest, source_prefix)), ) extra_immutable_input_digests = { toolcp_relpath: tool_classpath.digest, processorcp_relpath: processor_classfiles.digest, } analysis_output_path = "__source_analysis.json" process_result = await Get( FallibleProcessResult, JvmProcess( jdk=jdk, classpath_entries=[ *tool_classpath.classpath_entries(toolcp_relpath), processorcp_relpath, ], argv=[ "org.pantsbuild.backend.scala.dependency_inference.ScalaParser", analysis_output_path, source_path, ], input_digest=prefixed_source_files_digest, extra_immutable_input_digests=extra_immutable_input_digests, output_files=(analysis_output_path,), extra_nailgun_keys=extra_immutable_input_digests, description=f"Analyzing {source_files.files[0]}", level=LogLevel.DEBUG, ), ) return FallibleScalaSourceDependencyAnalysisResult(process_result=process_result)
async def materialize_jvm_plugins( request: MaterializeJvmPluginsRequest, ) -> MaterializedJvmPlugins: materialized_plugins = await MultiGet( Get(MaterializedJvmPlugin, MaterializeJvmPluginRequest(plugin)) for plugin in request.plugins) plugin_digests = await MultiGet( Get(Digest, AddPrefix(p.classpath.digest, p.name)) for p in materialized_plugins) merged_plugins_digest = await Get(Digest, MergeDigests(plugin_digests)) return MaterializedJvmPlugins(merged_plugins_digest, materialized_plugins)
async def restrict_conflicting_sources( ptgt: PutativeTarget) -> DisjointSourcePutativeTarget: source_paths = await Get( Paths, PathGlobs( Sources.prefix_glob_with_dirpath(ptgt.path, glob) for glob in ptgt.owned_sources), ) source_path_set = set(source_paths.files) source_dirs = {os.path.dirname(path) for path in source_path_set} possible_owners = await Get( UnexpandedTargets, AddressSpecs(AscendantAddresses(d) for d in source_dirs)) possible_owners_sources = await MultiGet( Get(SourcesPaths, SourcesPathsRequest(t.get(Sources))) for t in possible_owners) conflicting_targets = [] for tgt, sources in zip(possible_owners, possible_owners_sources): if source_path_set.intersection(sources.files): conflicting_targets.append(tgt) if conflicting_targets: conflicting_addrs = sorted(tgt.address.spec for tgt in conflicting_targets) explicit_srcs_str = ", ".join(ptgt.kwargs.get("sources") or []) # type: ignore[arg-type] orig_sources_str = (f"[{explicit_srcs_str}]" if explicit_srcs_str else f"the default for {ptgt.type_alias}") ptgt = ptgt.restrict_sources().add_comments([ f"# NOTE: Sources restricted from {orig_sources_str} due to conflict with" ] + [f"# - {caddr}" for caddr in conflicting_addrs]) return DisjointSourcePutativeTarget(ptgt)
def test_invalid_get_abbreviated() -> None: with pytest.raises( expected_exception=TypeError, match=re.escape( f"The input argument cannot be a type, but given {BClass}."), ): Get(AClass, BClass)
async def map_to_python_modules( vcs_version_targets: AllVCSVersionTargets, python_setup: PythonSetup, _: PythonVCSVersionMappingMarker, ) -> FirstPartyPythonMappingImpl: suffix = ".py" targets = [ tgt for tgt in vcs_version_targets if cast(str, tgt[VersionGenerateToField].value).endswith(suffix) ] stripped_files = await MultiGet( Get(StrippedFileName, StrippedFileNameRequest(cast(str, tgt[VersionGenerateToField].value))) for tgt in targets ) resolves_to_modules_to_providers: DefaultDict[ ResolveName, DefaultDict[str, list[ModuleProvider]] ] = defaultdict(lambda: defaultdict(list)) for tgt, stripped_file in zip(targets, stripped_files): resolve = tgt[PythonResolveField].normalized_value(python_setup) module = stripped_file.value[: -len(suffix)].replace("/", ".") resolves_to_modules_to_providers[resolve][module].append( ModuleProvider(tgt.address, ModuleProviderType.IMPL) ) return FirstPartyPythonMappingImpl.create(resolves_to_modules_to_providers)
async def determine_all_owned_sources(all_tgts: AllUnexpandedTargets) -> AllOwnedSources: all_sources_paths = await MultiGet( Get(SourcesPaths, SourcesPathsRequest(tgt.get(SourcesField))) for tgt in all_tgts ) return AllOwnedSources( itertools.chain.from_iterable(sources_paths.files for sources_paths in all_sources_paths) )
async def map_cc_files(cc_targets: AllCCTargets) -> CCFilesMapping: stripped_file_per_target = await MultiGet( Get(StrippedFileName, StrippedFileNameRequest(tgt[CCSourceField].file_path)) for tgt in cc_targets) stripped_files_to_addresses: dict[str, Address] = {} stripped_files_with_multiple_owners: DefaultDict[ str, set[Address]] = defaultdict(set) for tgt, stripped_file in zip(cc_targets, stripped_file_per_target): if stripped_file.value in stripped_files_to_addresses: stripped_files_with_multiple_owners[stripped_file.value].update({ stripped_files_to_addresses[stripped_file.value], tgt.address }) else: stripped_files_to_addresses[stripped_file.value] = tgt.address # Remove files with ambiguous owners. for ambiguous_stripped_f in stripped_files_with_multiple_owners: stripped_files_to_addresses.pop(ambiguous_stripped_f) mapping_not_stripped = { tgt[CCSourceField].file_path: tgt.address for tgt in cc_targets } return CCFilesMapping( mapping=FrozenDict(sorted(stripped_files_to_addresses.items())), ambiguous_files=FrozenDict( (k, tuple(sorted(v))) for k, v in sorted(stripped_files_with_multiple_owners.items())), mapping_not_stripped=FrozenDict(mapping_not_stripped), )
async def find_python(python_bootstrap: PythonBootstrap) -> PythonBinary: # PEX files are compatible with bootstrapping via Python 2.7 or Python 3.5+, but we select 3.6+ # for maximum compatibility with internal scripts. interpreter_search_paths = python_bootstrap.interpreter_search_paths() all_python_binary_paths = await MultiGet( Get( BinaryPaths, BinaryPathRequest( search_path=interpreter_search_paths, binary_name=binary_name, check_file_entries=True, test=BinaryPathTest( args=[ "-c", # N.B.: The following code snippet must be compatible with Python 3.6+. # # We hash the underlying Python interpreter executable to ensure we detect # changes in the real interpreter that might otherwise be masked by Pyenv # shim scripts found on the search path. Naively, just printing out the full # version_info would be enough, but that does not account for supported abi # changes (e.g.: a pyenv switch from a py27mu interpreter to a py27m # interpreter.) # # When hashing, we pick 8192 for efficiency of reads and fingerprint updates # (writes) since it's a common OS buffer size and an even multiple of the # hash block size. dedent("""\ import sys major, minor = sys.version_info[:2] if not (major == 3 and minor >= 6): sys.exit(1) import hashlib hasher = hashlib.sha256() with open(sys.executable, "rb") as fp: for chunk in iter(lambda: fp.read(8192), b""): hasher.update(chunk) sys.stdout.write(hasher.hexdigest()) """), ], fingerprint_stdout= False, # We already emit a usable fingerprint to stdout. ), ), ) for binary_name in python_bootstrap.interpreter_names) for binary_paths in all_python_binary_paths: path = binary_paths.first_path if path: return PythonBinary( path=path.path, fingerprint=path.fingerprint, ) raise BinaryNotFoundError( "Was not able to locate a Python interpreter to execute rule code.\n" "Please ensure that Python is available in one of the locations identified by " "`[python-bootstrap] search_path`, which currently expands to:\n" f" {interpreter_search_paths}")
async def resources_bsp_target( request: ResourcesForOneBSPTargetRequest, union_membership: UnionMembership, ) -> BSPResourcesResult: targets = await Get(Targets, BSPBuildTargetInternal, request.bsp_target) resources_request_types: FrozenOrderedSet[Type[BSPResourcesRequest]] = union_membership.get( BSPResourcesRequest ) field_sets_by_request_type: dict[Type[BSPResourcesRequest], set[FieldSet]] = defaultdict(set) for target in targets: for resources_request_type in resources_request_types: field_set_type = resources_request_type.field_set_type if field_set_type.is_applicable(target): field_set = field_set_type.create(target) field_sets_by_request_type[resources_request_type].add(field_set) resources_results = await MultiGet( Get( BSPResourcesResult, BSPResourcesRequest, resources_request_type(bsp_target=request.bsp_target, field_sets=tuple(field_sets)), ) for resources_request_type, field_sets in field_sets_by_request_type.items() ) resources = tuple(sorted({resource for rr in resources_results for resource in rr.resources})) output_digest = await Get(Digest, MergeDigests([rr.output_digest for rr in resources_results])) return BSPResourcesResult( resources=resources, output_digest=output_digest, )
async def resolve_bsp_build_target_addresses( bsp_target: BSPBuildTargetInternal, union_membership: UnionMembership, ) -> Targets: # NB: Using `RawSpecs` directly rather than `RawSpecsWithoutFileOwners` results in a rule graph cycle. targets = await Get( Targets, RawSpecsWithoutFileOwners, RawSpecsWithoutFileOwners.from_raw_specs(bsp_target.specs), ) if bsp_target.definition.resolve_filter is None: return targets resolve_filter = bsp_target.definition.resolve_filter resolve_prefix, matched, resolve_value = resolve_filter.partition(":") if not resolve_prefix or not matched: raise ValueError( f"The `resolve` filter for `{bsp_target}` must have a platform or language specific " f"prefix like `$lang:$filter`, but the configured value: `{resolve_filter}` did not." ) # TODO: See `BSPResolveFieldFactoryRequest` re: this awkwardness. factories = await MultiGet( Get(BSPResolveFieldFactoryResult, BSPResolveFieldFactoryRequest, request()) for request in union_membership.get(BSPResolveFieldFactoryRequest) if request.resolve_prefix == resolve_prefix) return Targets(t for t in targets if any((factory.resolve_field_value)(t) == resolve_value for factory in factories))
async def generate_go_external_package_targets( request: GenerateGoExternalPackageTargetsRequest, ) -> GeneratedTargets: generator_addr = request.generator.address resolved_module = await Get(ResolvedGoModule, ResolveGoModuleRequest(generator_addr)) all_resolved_packages = await MultiGet( Get( ResolveExternalGoModuleToPackagesResult, ResolveExternalGoModuleToPackagesRequest( path=module_descriptor.path, version=module_descriptor.version, go_sum_digest=resolved_module.digest, ), ) for module_descriptor in resolved_module.modules) def create_tgt(pkg: ResolvedGoPackage) -> GoExternalPackageTarget: return GoExternalPackageTarget( { GoExternalModulePathField.alias: pkg.module_path, GoExternalModuleVersionField.alias: pkg.module_version, GoExternalPackageImportPathField.alias: pkg.import_path, }, # E.g. `src/go:mod#github.com/google/uuid`. Address( generator_addr.spec_path, target_name=generator_addr.target_name, generated_name=pkg.import_path, ), ) return GeneratedTargets( request.generator, (create_tgt(pkg) for resolved_pkgs in all_resolved_packages for pkg in resolved_pkgs.packages), )
async def export_virtualenvs(request: ExportVenvsRequest, python_setup: PythonSetup, dist_dir: DistDir) -> ExportResults: resolve_to_root_targets: DefaultDict[str, list[Target]] = defaultdict(list) for tgt in request.targets: if not tgt.has_field(PythonResolveField): continue resolve = tgt[PythonResolveField].normalized_value(python_setup) resolve_to_root_targets[resolve].append(tgt) venvs = await MultiGet( Get( ExportResult, _ExportVenvRequest( resolve if python_setup.enable_resolves else None, tuple( tgts)), ) for resolve, tgts in resolve_to_root_targets.items()) no_resolves_dest = dist_dir.relpath / "python" / "virtualenv" if venvs and python_setup.enable_resolves and no_resolves_dest.exists(): logger.warning( f"Because `[python].enable_resolves` is true, `{bin_name()} export ::` no longer " f"writes virtualenvs to {no_resolves_dest}, but instead underneath " f"{dist_dir.relpath / 'python' / 'virtualenvs'}. You will need to " "update your IDE to point to the new virtualenv.\n\n" f"To silence this error, delete {no_resolves_dest}") return ExportResults(venvs)
async def map_first_party_scala_targets_to_symbols( _: FirstPartyScalaTargetsMappingRequest, scala_targets: AllScalaTargets, jvm: JvmSubsystem, ) -> SymbolMap: source_analysis = await MultiGet( Get(ScalaSourceDependencyAnalysis, SourceFilesRequest([target[ScalaSourceField]])) for target in scala_targets) address_and_analysis = zip( [(tgt.address, tgt[JvmResolveField].normalized_value(jvm)) for tgt in scala_targets], source_analysis, ) mapping: Mapping[str, MutableTrieNode] = defaultdict(MutableTrieNode) for (address, resolve), analysis in address_and_analysis: namespace = _symbol_namespace(address) for symbol in analysis.provided_symbols: mapping[resolve].insert(symbol, [address], first_party=True, namespace=namespace) for symbol in analysis.provided_symbols_encoded: mapping[resolve].insert(symbol, [address], first_party=True, namespace=namespace) return SymbolMap( (resolve, node.frozen()) for resolve, node in mapping.items())
def test_invalid_get_subject() -> None: with pytest.raises( expected_exception=TypeError, match=re.escape( f"The subject argument cannot be a type, given {BClass}."), ): Get(AClass, BClass, BClass)
def test_invalid_get_input_does_not_match_type() -> None: assert_invalid_get( lambda: Get(AClass, str, 1), expected= (f"Invalid Get. The third argument `1` must have the exact same type as the " f"second argument, {str}, but had the type {int}."), ) # However, if the `input_type` is a `@union`, then we do not eagerly validate. @union class UnionBase: pass union_get = Get(AClass, UnionBase, 1) assert union_get.input_type == UnionBase assert union_get.input == 1