def test_imports_banned() -> None: parser = Parser(target_type_aliases=[], object_aliases=BuildFileAliases()) with pytest.raises(ParseError) as exc: parser.parse( "dir/BUILD", "\nx = 'hello'\n\nimport os\n", BuildFilePreludeSymbols(FrozenDict()) ) assert "Import used in dir/BUILD at line 4" in str(exc.value)
def parse( cls, filepath: str, build_file_content: str, parser: Parser, extra_symbols: BuildFilePreludeSymbols, ) -> AddressMap: """Parses a source for targets. The target adaptors are all 'thin': any targets they point to in other namespaces or even in the same namespace but from a separate source are left as unresolved pointers. """ try: target_adaptors = parser.parse(filepath, build_file_content, extra_symbols) except Exception as e: raise MappingError(f"Failed to parse ./{filepath}:\n{e}") name_to_target_adaptors: dict[str, TargetAdaptor] = {} for target_adaptor in target_adaptors: name = target_adaptor.name if name in name_to_target_adaptors: duplicate = name_to_target_adaptors[name] raise DuplicateNameError( f"A target already exists at {filepath!r} with name {name!r} and target type " f"{duplicate.type_alias!r}. The {target_adaptor.type_alias!r} target " "cannot use the same name.") name_to_target_adaptors[name] = target_adaptor return cls(filepath, dict(sorted(name_to_target_adaptors.items())))
def parser_singleton() -> Parser: return Parser( build_root=build_root_path, target_type_aliases=registered_target_types.aliases, object_aliases=build_configuration.registered_aliases, use_deprecated_python_macros=use_deprecated_python_macros, )
def parse(cls, filepath: str, filecontent: bytes, parser: Parser) -> "AddressMap": """Parses a source for addressable Serializable objects. No matter the parser used, the parsed and mapped addressable objects are all 'thin'; ie: any objects they point to in other namespaces or even in the same namespace but from a separate source are left as unresolved pointers. :param filepath: The path to the byte source containing serialized objects. :param filecontent: The content of byte source containing serialized objects to be parsed. :param parser: The parser cls to use. """ try: objects = parser.parse(filepath, filecontent) except Exception as e: raise MappingError(f"Failed to parse {filepath}:\n{e!r}") objects_by_name: Dict[str, ThinAddressableObject] = {} for obj in objects: if not Serializable.is_serializable(obj): raise UnaddressableObjectError("Parsed a non-serializable object: {!r}".format(obj)) attributes = obj._asdict() name = attributes.get("name") if not name: raise UnaddressableObjectError("Parsed a non-addressable object: {!r}".format(obj)) if name in objects_by_name: raise DuplicateNameError( "An object already exists at {!r} with name {!r}: {!r}. Cannot " "map {!r}".format(filepath, name, objects_by_name[name], obj) ) objects_by_name[name] = obj return cls(filepath, dict(sorted(objects_by_name.items())))
def resolve_addresses_with_origins_from_address_specs( address_specs: AddressSpecs, address_family: AddressFamily, *, tags: Optional[Iterable[str]] = None, exclude_patterns: Optional[Iterable[str]] = None ) -> AddressesWithOrigins: address_mapper = AddressMapper( Parser(target_type_aliases=[], object_aliases=BuildFileAliases()), tags=tags, exclude_target_regexps=exclude_patterns, ) snapshot = Snapshot(Digest("xx", 2), ("root/BUILD", ), ()) addresses_with_origins = run_rule( addresses_with_origins_from_address_specs, rule_args=[address_mapper, address_specs], mock_gets=[ MockGet(product_type=Snapshot, subject_type=PathGlobs, mock=lambda _: snapshot), MockGet( product_type=AddressFamily, subject_type=Dir, mock=lambda _: address_family, ), ], ) return cast(AddressesWithOrigins, addresses_with_origins)
def parse_address_map(build_file: str) -> AddressMap: path = "/dev/null" parser = Parser(target_type_aliases=["thing"], object_aliases=BuildFileAliases()) address_map = AddressMap.parse(path, build_file, parser, BuildFilePreludeSymbols(FrozenDict())) assert path == address_map.path return address_map
def test_unrecognized_symbol() -> None: parser = Parser( target_type_aliases=["tgt"], object_aliases=BuildFileAliases( objects={"obj": 0}, context_aware_object_factories={ "caof": lambda parse_context: lambda _: None }, ), ) prelude_symbols = BuildFilePreludeSymbols(FrozenDict({"prelude": 0})) with pytest.raises(ParseError) as exc: parser.parse("dir/BUILD", "fake", prelude_symbols) assert ( str(exc.value) == "Name 'fake' is not defined.\n\nAll registered symbols: ['caof', 'obj', 'prelude', 'tgt']" )
def perform_test(extra_targets: list[str], dym: str) -> None: parser = Parser( target_type_aliases=["tgt", *extra_targets], object_aliases=BuildFileAliases( objects={"obj": 0}, context_aware_object_factories={"caof": lambda parse_context: lambda _: None}, ), ) prelude_symbols = BuildFilePreludeSymbols(FrozenDict({"prelude": 0})) fmt_extra_sym = str(extra_targets)[1:-1] + (", ") if len(extra_targets) != 0 else "" with pytest.raises(ParseError) as exc: parser.parse("dir/BUILD", "fake", prelude_symbols) assert str(exc.value) == ( f"Name 'fake' is not defined.\n\n{dym}" "If you expect to see more symbols activated in the below list," f" refer to {docs_url('enabling-backends')} for all available" " backends to activate.\n\n" f"All registered symbols: ['caof', {fmt_extra_sym}'obj', 'prelude', 'tgt']" )
def test_parse_address_family_empty() -> None: """Test that parsing an empty BUILD file results in an empty AddressFamily.""" af = run_rule_with_mocks( parse_address_family, rule_args=[ Parser(build_root="", target_type_aliases=[], object_aliases=BuildFileAliases()), BuildFileOptions(("BUILD",)), BuildFilePreludeSymbols(FrozenDict()), AddressFamilyDir("/dev/null"), ], mock_gets=[ MockGet( output_type=DigestContents, input_type=PathGlobs, mock=lambda _: DigestContents([FileContent(path="/dev/null/BUILD", content=b"")]), ), ], ) assert len(af.name_to_target_adaptors) == 0
def test_parse_address_family_empty() -> None: """Test that parsing an empty BUILD file results in an empty AddressFamily.""" address_mapper = AddressMapper(parser=Parser( target_type_aliases=[], object_aliases=BuildFileAliases())) af = run_rule( parse_address_family, rule_args=[ address_mapper, BuildFilePreludeSymbols(FrozenDict()), Dir("/dev/null") ], mock_gets=[ MockGet( product_type=DigestContents, subject_type=PathGlobs, mock=lambda _: DigestContents( [FileContent(path="/dev/null/BUILD", content=b"")]), ), ], ) assert len(af.name_to_target_adaptors) == 0
def test_match_filter_options() -> None: def make_target(target_name: str, **kwargs) -> TargetAdaptor: parsed_address = Address("", target_name=target_name) return TargetAdaptor( type_alias="", name=parsed_address.target_name, address=parsed_address, **kwargs ) untagged_target = make_target(target_name="//:untagged") b_tagged_target = make_target(target_name="//:b-tagged", tags=["b"]) a_and_b_tagged_target = make_target(target_name="//:a-and-b-tagged", tags=["a", "b"]) none_tagged_target = make_target(target_name="//:none-tagged-target", tags=None) parser = Parser(target_type_aliases=[], object_aliases=BuildFileAliases()) mapper = AddressMapper(parser, tags=["-a", "+b"]) def matches(tgt: TargetAdaptor) -> bool: return mapper.matches_filter_options(tgt.kwargs["address"], tgt) assert matches(untagged_target) is False assert matches(b_tagged_target) is True assert matches(a_and_b_tagged_target) is False assert matches(none_tagged_target) is False
def test_parse_address_family_empty() -> None: """Test that parsing an empty BUILD file results in an empty AddressFamily.""" af = run_rule_with_mocks( parse_address_family, rule_args=[ Parser(target_type_aliases=[], object_aliases=BuildFileAliases()), create_subsystem(GlobalOptions, build_patterns=["BUILD"], build_ignore=[]), BuildFilePreludeSymbols(FrozenDict()), Dir("/dev/null"), ], mock_gets=[ MockGet( product_type=DigestContents, subject_type=PathGlobs, mock=lambda _: DigestContents( [FileContent(path="/dev/null/BUILD", content=b"")]), ), ], ) assert len(af.name_to_target_adaptors) == 0
def parser_singleton() -> Parser: return Parser( target_type_aliases=registered_target_types.aliases, object_aliases=build_configuration.registered_aliases, )
def setup_legacy_graph_extended( options_bootstrapper: OptionsBootstrapper, build_configuration: BuildConfiguration, execution_options: ExecutionOptions, *, pants_ignore_patterns: List[str], use_gitignore: bool, local_store_dir: str, local_execution_root_dir: str, named_caches_dir: str, build_root: Optional[str] = None, native: Optional[Native] = None, glob_match_error_behavior: GlobMatchErrorBehavior = GlobMatchErrorBehavior.warn, build_patterns: Optional[Iterable[str]] = None, build_file_prelude_globs: Optional[Iterable[str]] = None, build_ignore_patterns: Optional[Iterable[str]] = None, tags: Optional[Iterable[str]] = None, exclude_target_regexps: Optional[Iterable[str]] = None, subproject_roots: Optional[Iterable[str]] = None, include_trace_on_error: bool = True, ) -> LegacyGraphScheduler: """Construct and return the components necessary for LegacyBuildGraph construction.""" build_root = build_root or get_buildroot() build_configuration = build_configuration or BuildConfigInitializer.get( options_bootstrapper) bootstrap_options = options_bootstrapper.bootstrap_options.for_global_scope( ) execution_options = execution_options or DEFAULT_EXECUTION_OPTIONS build_file_aliases = build_configuration.registered_aliases rules = build_configuration.rules union_membership = UnionMembership.from_rules( build_configuration.union_rules) registered_target_types = RegisteredTargetTypes.create( build_configuration.target_types) parser = Parser(target_type_aliases=registered_target_types.aliases, object_aliases=build_file_aliases) address_mapper = AddressMapper( parser=parser, prelude_glob_patterns=build_file_prelude_globs, build_patterns=build_patterns, build_ignore_patterns=build_ignore_patterns, tags=tags, exclude_target_regexps=exclude_target_regexps, subproject_roots=subproject_roots, ) @rule def address_mapper_singleton() -> AddressMapper: return address_mapper @rule def glob_match_error_behavior_singleton() -> GlobMatchErrorBehavior: return glob_match_error_behavior @rule def build_configuration_singleton() -> BuildConfiguration: return build_configuration @rule def registered_target_types_singleton() -> RegisteredTargetTypes: return registered_target_types @rule def union_membership_singleton() -> UnionMembership: return union_membership @rule def build_root_singleton() -> BuildRoot: return cast(BuildRoot, BuildRoot.instance) # Create a Scheduler containing graph and filesystem rules, with no installed goals. rules = FrozenOrderedSet(( *collect_rules(locals()), RootRule(Console), *build_files.rules(), *fs.rules(), *graph.rules(), *uuid.rules(), *options_parsing.rules(), *process.rules(), *create_platform_rules(), *changed_rules(), *rules, )) goal_map = EngineInitializer._make_goal_map_from_rules(rules) def ensure_absolute_path(v: str) -> str: return Path(v).resolve().as_posix() scheduler = Scheduler( native=native, ignore_patterns=pants_ignore_patterns, use_gitignore=use_gitignore, build_root=build_root, local_store_dir=ensure_absolute_path(local_store_dir), local_execution_root_dir=ensure_absolute_path( local_execution_root_dir), named_caches_dir=ensure_absolute_path(named_caches_dir), rules=rules, union_membership=union_membership, execution_options=execution_options, include_trace_on_error=include_trace_on_error, visualize_to_dir=bootstrap_options.native_engine_visualize_to, ) return LegacyGraphScheduler(scheduler, build_file_aliases, goal_map)