def check_docker_proc(process: Process): assert process.argv == ( "/dummy/docker", "build", "--tag", "env1:1.2.3", "--file", "docker/test/Dockerfile", ".", ) assert process.env == FrozenDict({ "INHERIT": "from Pants env", "VAR": "value", })
def __init__( self, argv: Iterable[str], *, input_digest: Digest, description: str, level: LogLevel = LogLevel.INFO, output_directories: Iterable[str] | None = None, output_files: Iterable[str] | None = None, extra_env: Mapping[str, str] | None = None, extra_immutable_input_digests: Mapping[str, Digest] | None = None, cache_scope: ProcessCacheScope | None = None, ): self.argv = tuple(argv) self.input_digest = input_digest self.description = description self.level = level self.output_directories = tuple(output_directories or ()) self.output_files = tuple(output_files or ()) self.extra_env = FrozenDict(extra_env or {}) self.extra_immutable_input_digests = FrozenDict( extra_immutable_input_digests or {}) self.cache_scope = cache_scope
def test_third_party_modules_mapping() -> None: colors_addr = Address.parse("//:ansicolors") pants_addr = Address.parse("//:pantsbuild") mapping = ThirdPartyModuleToAddressMapping( FrozenDict({ "colors": colors_addr, "pants": pants_addr })) assert mapping.address_for_module("colors") == colors_addr assert mapping.address_for_module("colors.red") == colors_addr assert mapping.address_for_module("pants") == pants_addr assert mapping.address_for_module("pants.task") == pants_addr assert mapping.address_for_module("pants.task.task") == pants_addr assert mapping.address_for_module("pants.task.task.Task") == pants_addr
def test_map_first_party_modules_to_addresses(rule_runner: RuleRunner) -> None: rule_runner.set_options(["--source-root-patterns=['root1', 'root2', 'root3']"]) rule_runner.write_files( { "root1/protos/f1.proto": "", "root1/protos/f2.proto": "", "root1/protos/BUILD": "protobuf_library()", # These protos would result in the same module name, so neither should be used. "root1/two_owners/f.proto": "", "root1/two_owners/BUILD": "protobuf_library()", "root2/two_owners/f.proto": "", "root2/two_owners/BUILD": "protobuf_library()", # A file with grpc. This also uses the `python_source_root` mechanism, which should be # irrelevant to the module mapping because we strip source roots. "root1/tests/f.proto": "", "root1/tests/BUILD": "protobuf_library(grpc=True, python_source_root='root3')", } ) result = rule_runner.request(FirstPartyPythonMappingImpl, [PythonProtobufMappingMarker()]) assert result == FirstPartyPythonMappingImpl( mapping=FrozenDict( { "protos.f1_pb2": (Address("root1/protos", relative_file_path="f1.proto"),), "protos.f2_pb2": (Address("root1/protos", relative_file_path="f2.proto"),), "tests.f_pb2": (Address("root1/tests", relative_file_path="f.proto"),), "tests.f_pb2_grpc": (Address("root1/tests", relative_file_path="f.proto"),), } ), ambiguous_modules=FrozenDict( { "two_owners.f_pb2": ( Address("root1/two_owners", relative_file_path="f.proto"), Address("root2/two_owners", relative_file_path="f.proto"), ) } ), )
async def get_source_roots( source_roots_request: SourceRootsRequest) -> SourceRootsResult: """Convenience rule to allow callers to request SourceRoots that must exist. That way callers don't have to unpack OptionalSourceRoots if they know they expect a SourceRoot to exist and are willing to error if it doesn't. """ osrr = await Get(OptionalSourceRootsResult, SourceRootsRequest, source_roots_request) path_to_root = {} for path, osr in osrr.path_to_optional_root.items(): if osr.source_root is None: raise NoSourceRootError(path) path_to_root[path] = osr.source_root return SourceRootsResult(path_to_root=FrozenDict(path_to_root))
def create( cls, resolves_to_modules_to_providers: Mapping[ ResolveName, Mapping[str, Iterable[ModuleProvider]] ], ) -> FirstPartyPythonMappingImpl: return FirstPartyPythonMappingImpl( ( resolve, FrozenDict( (mod, tuple(sorted(providers))) for mod, providers in sorted(mapping.items()) ), ) for resolve, mapping in sorted(resolves_to_modules_to_providers.items()) )
def __init__( self, command: Iterable[str], *, description: str, env: Mapping[str, str] | None = None, input_digest: Digest = EMPTY_DIGEST, working_dir: str | None = None, output_files: Iterable[str] = (), output_directories: Iterable[str] = (), allow_downloads: bool = False, platform: Platform | None = None, ) -> None: self.command = tuple(command) self.description = description self.env = (FrozenDict(env or {}) if allow_downloads else FrozenDict({ **(env or {}), "GOPROXY": "off" })) self.input_digest = input_digest self.working_dir = working_dir self.output_files = tuple(output_files) self.output_directories = tuple(output_directories) self.platform = platform
async def map_addresses_to_dependees(all_targets: AllUnexpandedTargets) -> AddressToDependees: dependencies_per_target = await MultiGet( Get(Addresses, DependenciesRequest(tgt.get(Dependencies), include_special_cased_deps=True)) for tgt in all_targets ) address_to_dependees = defaultdict(set) for tgt, dependencies in zip(all_targets, dependencies_per_target): for dependency in dependencies: address_to_dependees[dependency].add(tgt.address) return AddressToDependees( FrozenDict( {addr: FrozenOrderedSet(dependees) for addr, dependees in address_to_dependees.items()} ) )
def group_field_sets_by_constraints( cls, field_sets: Iterable[_FS], python_setup: PythonSetup ) -> FrozenDict["PexInterpreterConstraints", Tuple[_FS, ...]]: results = defaultdict(set) for fs in field_sets: constraints = cls.create_from_compatibility_fields( [fs.interpreter_constraints], python_setup ) results[constraints].add(fs) return FrozenDict( { constraints: tuple(sorted(field_sets, key=lambda fs: fs.address)) for constraints, field_sets in sorted(results.items()) } )
def test_dict_string_to_string_field() -> None: class Example(DictStringToStringField): alias = "example" addr = Address("", target_name="example") assert Example(None, address=addr).value is None assert Example({}, address=addr).value == FrozenDict() assert Example({"hello": "world"}, address=addr).value == FrozenDict({"hello": "world"}) def assert_invalid_type(raw_value: Any) -> None: with pytest.raises(InvalidFieldTypeException): Example(raw_value, address=addr) for v in [0, object(), "hello", ["hello"], {"hello": 0}, {0: "world"}]: assert_invalid_type(v) # Regression test that a default can be set. class ExampleDefault(DictStringToStringField): alias = "example" # Note that we use `FrozenDict` so that the object can be hashable. default = FrozenDict({"default": "val"}) assert ExampleDefault(None, address=addr).value == FrozenDict({"default": "val"})
def compute_value( cls, raw_value: Optional[Union[str, Dict[str, str]]], *, address: Address ) -> Optional[Union[str, FrozenDict[str, str]]]: value_or_default = super().compute_value(raw_value, address=address) if value_or_default is None: return None if isinstance(value_or_default, str): return value_or_default if isinstance(value_or_default, dict): return FrozenDict(value_or_default) raise InvalidFieldTypeException( address, cls.alias, value_or_default, expected_type="either a string or a dictionary of executable name to local file name.", )
def test_group_field_sets_by_constraints() -> None: py2_fs = MockFieldSet.create_for_test("//:py2", ">=2.7,<3") py3_fs = [ MockFieldSet.create_for_test("//:py3", "==3.6.*"), MockFieldSet.create_for_test("//:py3_second", "==3.6.*"), ] no_constraints_fs = MockFieldSet.create_for_test("//:no_constraints", None) assert PexInterpreterConstraints.group_field_sets_by_constraints( [py2_fs, *py3_fs, no_constraints_fs], python_setup=create_subsystem(PythonSetup, interpreter_constraints=[]), ) == FrozenDict({ PexInterpreterConstraints(): (no_constraints_fs, ), PexInterpreterConstraints(["CPython>=2.7,<3"]): (py2_fs, ), PexInterpreterConstraints(["CPython==3.6.*"]): tuple(py3_fs), })
def freeze_json(item: Any) -> Any: if item is None: return None elif isinstance(item, list) or isinstance(item, tuple): return tuple(freeze_json(x) for x in item) elif isinstance(item, dict): result = {} for k, v in item.items(): if not isinstance(k, str): raise AssertionError("Got non-`str` key for _freeze.") result[k] = freeze_json(v) return FrozenDict(result) elif isinstance(item, str) or isinstance(item, int) or isinstance(item, float): return item else: raise AssertionError(f"Unsupported value type for _freeze: {type(item)}")
def assert_pants_requirement( rule_runner: RuleRunner, build_file_entry: str, *, expected_target_name: str, expected_dist: str = "pantsbuild.pants", expected_module: str = "pants", ) -> None: rule_runner.add_to_build_file("3rdparty/python", f"{build_file_entry}\n") target = rule_runner.get_target( Address("3rdparty/python", target_name=expected_target_name)) assert isinstance(target, PythonRequirementLibrary) assert target[PythonRequirementsField].value == ( Requirement.parse(f"{expected_dist}=={pants_version()}"), ) assert target[ModuleMappingField].value == FrozenDict( {expected_dist: (expected_module, )})
def test_dict_string_to_string_field() -> None: class Example(DictStringToStringField): alias = "example" addr = Address.parse(":example") assert Example({ "hello": "world" }, address=addr).value == FrozenDict({"hello": "world"}) def assert_invalid_type(raw_value: Any) -> None: with pytest.raises(InvalidFieldTypeException): Example(raw_value, address=addr) for v in [0, object(), "hello", ["hello"], {"hello": 0}, {0: "world"}]: assert_invalid_type(v)
async def map_third_party_modules_to_addresses( ) -> ThirdPartyModuleToAddressMapping: all_targets = await Get[Targets](AddressSpecs([DescendantAddresses("")])) modules_to_addresses: Dict[str, Address] = {} for tgt in all_targets: if not tgt.has_field(PythonRequirementsField): continue for python_req in tgt[PythonRequirementsField].value: for module in python_req.modules: # NB: If >1 targets have the same module, we do not record the module. This is to # avoid ambiguity. if module in modules_to_addresses: modules_to_addresses.pop(module) else: modules_to_addresses[module] = tgt.address return ThirdPartyModuleToAddressMapping(FrozenDict(modules_to_addresses))
def test_parser_simple(rule_runner: RuleRunner) -> None: analysis = _analyze( rule_runner, textwrap.dedent("""\ package org.pantsbuild.backend.kotlin import java.io.File open class Foo { fun grok() { val x = X() val y = Y() } } class Bar {} fun main(args: Array<String>) { } """), ) assert analysis.imports == { KotlinImport(name="java.io.File", alias=None, is_wildcard=False) } assert analysis.named_declarations == { "org.pantsbuild.backend.kotlin.Bar", "org.pantsbuild.backend.kotlin.Foo", "org.pantsbuild.backend.kotlin.main", } assert analysis.consumed_symbols_by_scope == FrozenDict({ "org.pantsbuild.backend.kotlin.Foo": frozenset({ "X", "Y", }), "org.pantsbuild.backend.kotlin": frozenset({ "Array", "String", }), }) assert analysis.scopes == { "org.pantsbuild.backend.kotlin", "org.pantsbuild.backend.kotlin.Foo", "org.pantsbuild.backend.kotlin.Bar", }
def parse_spec(self, spec: str) -> tuple[Spec, bool]: """Parse the given spec string and also return `true` if it's an ignore. :raises: CmdLineSpecParser.BadSpecError if the address selector could not be parsed. """ is_ignore = False if spec.startswith("-"): is_ignore = True spec = spec[1:] ( ( path_component, target_component, generated_component, parameters, ), wildcard, ) = native_engine.address_spec_parse(spec) if wildcard == "::": return RecursiveGlobSpec( directory=self._normalize_spec_path(path_component)), is_ignore if wildcard == ":": return DirGlobSpec( directory=self._normalize_spec_path(path_component)), is_ignore if target_component or generated_component or parameters: return ( AddressLiteralSpec( path_component=self._normalize_spec_path(path_component), target_component=target_component, generated_component=generated_component, parameters=FrozenDict(sorted(parameters)), ), is_ignore, ) if "*" in path_component: return FileGlobSpec(spec), is_ignore if PurePath(spec).suffix: return FileLiteralSpec(self._normalize_spec_path(spec)), is_ignore spec_path = self._normalize_spec_path(spec) if spec_path == ".": return DirLiteralSpec(""), is_ignore # Some paths that look like dirs can actually be files without extensions. if Path(self._root_dir, spec_path).is_file(): return FileLiteralSpec(spec_path), is_ignore return DirLiteralSpec(spec_path), is_ignore
class ModuleMappingField(DictStringToStringSequenceField): alias = "module_mapping" help = ( "A mapping of requirement names to a list of the modules they provide.\n\n" 'For example, `{"ansicolors": ["colors"]}`.\n\n' "Any unspecified requirements will use the requirement name as the default module, " 'e.g. "Django" will default to `["django"]`.\n\n' "This is used to infer dependencies.") value: FrozenDict[str, tuple[str, ...]] default: ClassVar[FrozenDict[str, tuple[str, ...]]] = FrozenDict() @classmethod def compute_value( # type: ignore[override] cls, raw_value: Dict[str, Iterable[str]], address: Address) -> FrozenDict[str, Tuple[str, ...]]: value_or_default = super().compute_value(raw_value, address) return normalize_module_mapping(value_or_default)
def compute_value(cls, raw_value: Optional[Dict[str, str]], *, address: Address) -> Optional[FrozenDict[str, str]]: value_or_default = super().compute_value(raw_value, address=address) if value_or_default is None: return None invalid_type_exception = InvalidFieldTypeException( address, cls.alias, raw_value, expected_type="a dictionary of string -> string") if not isinstance(value_or_default, dict): raise invalid_type_exception if not all( isinstance(k, str) and isinstance(v, str) for k, v in value_or_default.items()): raise invalid_type_exception return FrozenDict(value_or_default)
def test_unrecognized_symbol() -> None: parser = Parser( target_type_aliases=["tgt"], object_aliases=BuildFileAliases( objects={"obj": 0}, context_aware_object_factories={ "caof": lambda parse_context: lambda _: None }, ), ) prelude_symbols = BuildFilePreludeSymbols(FrozenDict({"prelude": 0})) with pytest.raises(ParseError) as exc: parser.parse("dir/BUILD", "fake", prelude_symbols) assert ( str(exc.value) == "Name 'fake' is not defined.\n\nAll registered symbols: ['caof', 'obj', 'prelude', 'tgt']" )
class TypeStubsModuleMappingField(DictStringToStringSequenceField): alias = "type_stubs_module_mapping" help = ( "A mapping of type-stub requirement names to a list of the modules they provide.\n\n" 'For example, `{"types-requests": ["requests"]}`.\n\n' "If the requirement is not specified _and_ its name looks like a type stub, Pants will " f"use a default. See the `{PythonRequirementTypeStubModulesField.alias}` field from the " f"`{PythonRequirementTarget.alias}` target for more information.") value: FrozenDict[str, tuple[str, ...]] default: ClassVar[FrozenDict[str, tuple[str, ...]]] = FrozenDict() @classmethod def compute_value( # type: ignore[override] cls, raw_value: Dict[str, Iterable[str]], address: Address) -> FrozenDict[str, Tuple[str, ...]]: value_or_default = super().compute_value(raw_value, address) return normalize_module_mapping(value_or_default)
class ModuleMappingField(DictStringToStringSequenceField): alias = "module_mapping" help = ( "A mapping of requirement names to a list of the modules they provide.\n\n" 'For example, `{"ansicolors": ["colors"]}`.\n\n' "Any unspecified requirements will use a default. See the " f"`{PythonRequirementModulesField.alias}` field from the `{PythonRequirementTarget.alias}` " f"target for more information.") value: FrozenDict[str, tuple[str, ...]] default: ClassVar[FrozenDict[str, tuple[str, ...]]] = FrozenDict() @classmethod def compute_value( # type: ignore[override] cls, raw_value: Dict[str, Iterable[str]], address: Address) -> FrozenDict[str, Tuple[str, ...]]: value_or_default = super().compute_value(raw_value, address) return normalize_module_mapping(value_or_default)
def test_first_party_modules_mapping() -> None: util_addr = Address.parse("src/python/util:strutil") test_addr = Address.parse("tests/python/project_test:test") mapping = FirstPartyModuleToAddressMapping( FrozenDict({ "util.strutil": util_addr, "project_test.test": test_addr })) assert mapping.address_for_module("util.strutil") == util_addr assert mapping.address_for_module("util.strutil.ensure_text") == util_addr assert mapping.address_for_module("util") is None assert mapping.address_for_module("project_test.test") == test_addr assert mapping.address_for_module( "project_test.test.TestDemo") == test_addr assert mapping.address_for_module( "project_test.test.TestDemo.method") is None assert mapping.address_for_module("project_test") is None assert mapping.address_for_module("project.test") is None
def _freeze_json_dict(d: dict[Any, Any]) -> FrozenDict[str, Any]: result = {} for k, v in d.items(): if not isinstance(k, str): raise AssertionError("Got non-`str` key for _freeze_json_dict.") f: Any = None if isinstance(v, list): f = tuple(v) elif isinstance(v, dict): f = _freeze_json_dict(v) elif isinstance(v, str) or isinstance(v, int): f = v else: raise AssertionError( f"Unsupported value type for _freeze_json_dict: {type(v)}") result[k] = f return FrozenDict(result)
async def make_process_uncacheable(uncacheable_process: UncacheableProcess) -> Process: uuid = await Get( UUID, UUIDRequest, UUIDRequest.scoped(cast(UUIDScope, uncacheable_process.scope.value)) ) process = uncacheable_process.process env = dict(process.env) # This is a slightly hacky way to force the process to run: since the env var # value is unique, this input combination will never have been seen before, # and therefore never cached. The two downsides are: # 1. This leaks into the process' environment, albeit with a funky var name that is # unlikely to cause problems in practice. # 2. This run will be cached even though it can never be re-used. # TODO: A more principled way of forcing rules to run? env["__PANTS_FORCE_PROCESS_RUN__"] = str(uuid) return dataclasses.replace(process, env=FrozenDict(env))
def __init__( self, *, argv: Iterable[str], description: str, additional_input_digest: Optional[Digest] = None, extra_env: Optional[Mapping[str, str]] = None, output_files: Optional[Iterable[str]] = None, output_directories: Optional[Iterable[str]] = None, ) -> None: self.argv = tuple(argv) self.description = description self.additional_input_digest = additional_input_digest self.extra_env = FrozenDict(extra_env) if extra_env else None self.output_files = tuple(output_files) if output_files else None self.output_directories = tuple( output_directories) if output_directories else None self.__post_init__()
async def map_third_party_modules_to_addresses( ) -> ThirdPartyModuleToAddressMapping: all_targets = await Get[Targets](AddressSpecs([DescendantAddresses("")])) modules_to_addresses: Dict[str, Address] = {} modules_with_multiple_owners: Set[str] = set() for tgt in all_targets: if not tgt.has_field(PythonRequirementsField): continue for python_req in tgt[PythonRequirementsField].value: for module in python_req.modules: if module in modules_to_addresses: modules_with_multiple_owners.add(module) else: modules_to_addresses[module] = tgt.address # Remove modules with ambiguous owners. for module in modules_with_multiple_owners: modules_to_addresses.pop(module) return ThirdPartyModuleToAddressMapping( FrozenDict(sorted(modules_to_addresses.items())))
def test_parse_address_family_empty() -> None: """Test that parsing an empty BUILD file results in an empty AddressFamily.""" af = run_rule_with_mocks( parse_address_family, rule_args=[ Parser(build_root="", target_type_aliases=[], object_aliases=BuildFileAliases()), BuildFileOptions(("BUILD",)), BuildFilePreludeSymbols(FrozenDict()), AddressFamilyDir("/dev/null"), ], mock_gets=[ MockGet( output_type=DigestContents, input_type=PathGlobs, mock=lambda _: DigestContents([FileContent(path="/dev/null/BUILD", content=b"")]), ), ], ) assert len(af.name_to_target_adaptors) == 0
def test_group_field_sets_by_constraints() -> None: py2_fs = MockFieldSet.create_for_test(Address("", target_name="py2"), ">=2.7,<3") py3_fs = [ MockFieldSet.create_for_test(Address("", target_name="py3"), "==3.6.*"), MockFieldSet.create_for_test(Address("", target_name="py3_second"), "==3.6.*"), ] no_constraints_fs = MockFieldSet.create_for_test( Address("", target_name="no_constraints"), None ) assert InterpreterConstraints.group_field_sets_by_constraints( [py2_fs, *py3_fs, no_constraints_fs], python_setup=create_subsystem(PythonSetup, interpreter_constraints=[]), ) == FrozenDict( { InterpreterConstraints(): (no_constraints_fs,), InterpreterConstraints(["CPython>=2.7,<3"]): (py2_fs,), InterpreterConstraints(["CPython==3.6.*"]): tuple(py3_fs), } )