Esempio n. 1
0
def test_address_specs_do_not_exist(
        address_specs_rule_runner: RuleRunner) -> None:
    address_specs_rule_runner.write_files({
        "real/f.txt": "",
        "real/BUILD": "mock_tgt(sources=['f.txt'])",
        "empty/BUILD": "# empty"
    })

    def assert_resolve_error(specs: Iterable[AddressSpec], *,
                             expected: str) -> None:
        with pytest.raises(ExecutionError) as exc:
            resolve_address_specs(address_specs_rule_runner, specs)
        assert expected in str(exc.value)

    # Literal addresses require both a BUILD file to exist and for a target to be resolved.
    assert_resolve_error([AddressLiteralSpec("fake", "tgt")],
                         expected="'fake' does not exist on disk")
    assert_resolve_error(
        [AddressLiteralSpec("fake/f.txt", "tgt")],
        expected="'fake/f.txt' does not exist on disk",
    )
    did_you_mean = ResolveError.did_you_mean(bad_name="fake_tgt",
                                             known_names=["real"],
                                             namespace="real")
    assert_resolve_error([AddressLiteralSpec("real", "fake_tgt")],
                         expected=str(did_you_mean))
    assert_resolve_error([AddressLiteralSpec("real/f.txt", "fake_tgt")],
                         expected=str(did_you_mean))

    # SiblingAddresses requires the BUILD file to exist and at least one match.
    assert_resolve_error(
        [SiblingAddresses("fake")],
        expected=
        ("'fake' does not contain any BUILD files, but 'fake:' expected matching targets "
         "there."),
    )
    assert_resolve_error(
        [SiblingAddresses("empty")],
        expected="Address spec 'empty:' does not match any targets",
    )

    # MaybeEmptySiblingAddresses does not require a BUILD file to exist nor any matches.
    assert not resolve_address_specs(address_specs_rule_runner,
                                     [MaybeEmptySiblingAddresses("fake")])
    assert not resolve_address_specs(address_specs_rule_runner,
                                     [MaybeEmptySiblingAddresses("empty")])

    # DescendantAddresses requires at least one match, even if BUILD files exist.
    assert_resolve_error(
        [DescendantAddresses("fake"),
         DescendantAddresses("empty")],
        expected="Address spec 'fake::' does not match any targets",
    )

    # AscendantAddresses does not require any matches or BUILD files.
    assert not resolve_address_specs(
        address_specs_rule_runner,
        [AscendantAddresses("fake"),
         AscendantAddresses("empty")])
Esempio n. 2
0
async def find_owners(owners_request: OwnersRequest) -> Owners:
    # Determine which of the sources are live and which are deleted.
    sources_set_snapshot = await Get(Snapshot,
                                     PathGlobs(owners_request.sources))

    live_files = FrozenOrderedSet(sources_set_snapshot.files)
    deleted_files = FrozenOrderedSet(s for s in owners_request.sources
                                     if s not in live_files)
    live_dirs = FrozenOrderedSet(os.path.dirname(s) for s in live_files)
    deleted_dirs = FrozenOrderedSet(os.path.dirname(s) for s in deleted_files)

    matching_addresses: OrderedSet[Address] = OrderedSet()
    unmatched_sources = set(owners_request.sources)
    for live in (True, False):
        # Walk up the buildroot looking for targets that would conceivably claim changed sources.
        # For live files, we use expanded Targets, which have file level precision but which are
        # only created for existing files. For deleted files we use UnexpandedTargets, which have
        # the original declared glob.
        candidate_targets: Iterable[Target]
        if live:
            if not live_dirs:
                continue
            sources_set = live_files
            candidate_specs = tuple(
                AscendantAddresses(directory=d) for d in live_dirs)
            candidate_targets = await Get(Targets,
                                          AddressSpecs(candidate_specs))
        else:
            if not deleted_dirs:
                continue
            sources_set = deleted_files
            candidate_specs = tuple(
                AscendantAddresses(directory=d) for d in deleted_dirs)
            candidate_targets = await Get(UnexpandedTargets,
                                          AddressSpecs(candidate_specs))

        build_file_addresses = await MultiGet(
            Get(BuildFileAddress, Address, tgt.address)
            for tgt in candidate_targets)

        for candidate_tgt, bfa in zip(candidate_targets, build_file_addresses):
            matching_files = set(
                matches_filespec(candidate_tgt.get(Sources).filespec,
                                 paths=sources_set))
            if not matching_files and bfa.rel_path not in sources_set:
                continue

            unmatched_sources -= matching_files
            matching_addresses.add(candidate_tgt.address)

    if (unmatched_sources and owners_request.owners_not_found_behavior !=
            OwnersNotFoundBehavior.ignore):
        _log_or_raise_unmatched_owners(
            [PurePath(path) for path in unmatched_sources],
            owners_request.owners_not_found_behavior)

    return Owners(matching_addresses)
Esempio n. 3
0
def test_address_specs_do_not_exist(
        address_specs_rule_runner: RuleRunner) -> None:
    address_specs_rule_runner.write_files({
        "real/f.txt": "",
        "real/BUILD": "mock_tgt(sources=['f.txt'])",
        "empty/BUILD": "# empty"
    })

    def assert_resolve_error(specs: Iterable[AddressSpec], *,
                             expected: str) -> None:
        with engine_error(contains=expected):
            resolve_address_specs(address_specs_rule_runner, specs)

    # Literal addresses require for the relevant BUILD file to exist and for the target to be
    # resolved.
    assert_resolve_error([AddressLiteralSpec("fake", "tgt")],
                         expected="'fake' does not exist on disk")
    assert_resolve_error(
        [AddressLiteralSpec("fake/f.txt", "tgt")],
        expected="'fake/f.txt' does not exist on disk",
    )
    did_you_mean = ResolveError.did_you_mean(bad_name="fake_tgt",
                                             known_names=["real"],
                                             namespace="real")
    assert_resolve_error([AddressLiteralSpec("real", "fake_tgt")],
                         expected=str(did_you_mean))
    assert_resolve_error([AddressLiteralSpec("real/f.txt", "fake_tgt")],
                         expected=str(did_you_mean))

    # SiblingAddresses requires at least one match.
    assert_resolve_error(
        [SiblingAddresses("fake")],
        expected="No targets found for the address glob `fake:`",
    )
    assert_resolve_error(
        [SiblingAddresses("empty")],
        expected="No targets found for the address glob `empty:`")

    # MaybeEmptySiblingAddresses does not require any matches.
    assert not resolve_address_specs(address_specs_rule_runner,
                                     [MaybeEmptySiblingAddresses("fake")])
    assert not resolve_address_specs(address_specs_rule_runner,
                                     [MaybeEmptySiblingAddresses("empty")])

    # DescendantAddresses requires at least one match.
    assert_resolve_error(
        [DescendantAddresses("fake"),
         DescendantAddresses("empty")],
        expected=
        "No targets found for these address globs: ['empty::', 'fake::']",
    )

    # AscendantAddresses does not require any matches.
    assert not resolve_address_specs(
        address_specs_rule_runner,
        [AscendantAddresses("fake"),
         AscendantAddresses("empty")])
Esempio n. 4
0
def test_address_specs_deduplication(
        address_specs_rule_runner: RuleRunner) -> None:
    """When multiple specs cover the same address, we should deduplicate to one single
    AddressWithOrigin.

    We should use the most specific origin spec possible, such as AddressLiteralSpec >
    SiblingAddresses.
    """
    address_specs_rule_runner.create_file("demo/f.txt")
    address_specs_rule_runner.add_to_build_file("demo",
                                                "mock_tgt(sources=['f.txt'])")
    # We also include a file address to ensure that that is included in the result.
    specs = [
        AddressLiteralSpec("demo", "demo"),
        AddressLiteralSpec("demo/f.txt", "demo"),
        SiblingAddresses("demo"),
        DescendantAddresses("demo"),
        AscendantAddresses("demo"),
    ]
    assert resolve_address_specs(address_specs_rule_runner, specs) == {
        AddressWithOrigin(Address("demo"), AddressLiteralSpec("demo", "demo")),
        AddressWithOrigin(
            Address("demo", relative_file_path="f.txt"),
            AddressLiteralSpec("demo/f.txt", "demo"),
        ),
    }
Esempio n. 5
0
async def find_nearest_go_module(
        request: FindNearestGoModuleRequest) -> ResolvedOwningGoModule:
    spec_path = request.spec_path
    candidate_targets = await Get(
        UnexpandedTargets,
        AddressSpecs([
            AscendantAddresses(spec_path),
            MaybeEmptySiblingAddresses(spec_path)
        ]),
    )
    go_module_targets = [
        tgt for tgt in candidate_targets if tgt.has_field(GoModuleSources)
    ]

    # Sort by address.spec_path in descending order so the nearest go_module target is sorted first.
    sorted_go_module_targets = sorted(go_module_targets,
                                      key=lambda tgt: tgt.address.spec_path,
                                      reverse=True)
    if sorted_go_module_targets:
        nearest_go_module_target = sorted_go_module_targets[0]
        return ResolvedOwningGoModule(
            module_address=nearest_go_module_target.address)
    else:
        # TODO: Consider eventually requiring all go_package's to associate with a go_module.
        return ResolvedOwningGoModule(module_address=None)
    def test_address_specs(self) -> None:
        target1 = self.mock_target(
            SOURCES1, origin=SingleAddress(directory=SOURCES1.source_root, name="lib")
        )
        target2 = self.mock_target(SOURCES2, origin=SiblingAddresses(SOURCES2.source_root))
        target3 = self.mock_target(SOURCES3, origin=DescendantAddresses(SOURCES3.source_root))
        target4 = self.mock_target(SOURCES1, origin=AscendantAddresses(SOURCES1.source_root))

        def assert_all_source_files_resolved(
            target: TargetAdaptorWithOrigin, sources: TargetSources
        ) -> None:
            expected = sources.source_file_absolute_paths
            assert self.get_all_source_files([target]) == expected
            assert self.get_specified_source_files([target]) == expected

        assert_all_source_files_resolved(target1, SOURCES1)
        assert_all_source_files_resolved(target2, SOURCES2)
        assert_all_source_files_resolved(target3, SOURCES3)
        assert_all_source_files_resolved(target4, SOURCES1)
        # NB: target1 and target4 refer to the same files. We should be able to handle this
        # gracefully.
        combined_targets = [target1, target2, target3, target4]
        combined_expected = sorted(
            [
                *SOURCES1.source_file_absolute_paths,
                *SOURCES2.source_file_absolute_paths,
                *SOURCES3.source_file_absolute_paths,
            ]
        )
        assert self.get_all_source_files(combined_targets) == combined_expected
        assert self.get_specified_source_files(combined_targets) == combined_expected
Esempio n. 7
0
async def restrict_conflicting_sources(
        ptgt: PutativeTarget) -> DisjointSourcePutativeTarget:
    source_paths = await Get(
        Paths,
        PathGlobs(
            Sources.prefix_glob_with_dirpath(ptgt.path, glob)
            for glob in ptgt.owned_sources),
    )
    source_path_set = set(source_paths.files)
    source_dirs = {os.path.dirname(path) for path in source_path_set}
    possible_owners = await Get(
        UnexpandedTargets,
        AddressSpecs(AscendantAddresses(d) for d in source_dirs))
    possible_owners_sources = await MultiGet(
        Get(SourcesPaths, SourcesPathsRequest(t.get(Sources)))
        for t in possible_owners)
    conflicting_targets = []
    for tgt, sources in zip(possible_owners, possible_owners_sources):
        if source_path_set.intersection(sources.files):
            conflicting_targets.append(tgt)

    if conflicting_targets:
        conflicting_addrs = sorted(tgt.address.spec
                                   for tgt in conflicting_targets)
        explicit_srcs_str = ", ".join(ptgt.kwargs.get("sources")
                                      or [])  # type: ignore[arg-type]
        orig_sources_str = (f"[{explicit_srcs_str}]" if explicit_srcs_str else
                            f"the default for {ptgt.type_alias}")
        ptgt = ptgt.restrict_sources().add_comments([
            f"# NOTE: Sources restricted from {orig_sources_str} due to conflict with"
        ] + [f"#   - {caddr}" for caddr in conflicting_addrs])
    return DisjointSourcePutativeTarget(ptgt)
Esempio n. 8
0
def test_more_specific():
    single_address = SingleAddress(directory="foo/bar", name="baz")
    sibling_addresses = SiblingAddresses(directory="foo/bar")
    ascendant_addresses = AscendantAddresses(directory="foo/bar")
    descendant_addresses = DescendantAddresses(directory="foo/bar")

    assert single_address == more_specific(single_address, None)
    assert single_address == more_specific(single_address, sibling_addresses)
    assert single_address == more_specific(single_address, ascendant_addresses)
    assert single_address == more_specific(single_address, descendant_addresses)
    assert single_address == more_specific(None, single_address)
    assert single_address == more_specific(sibling_addresses, single_address)
    assert single_address == more_specific(ascendant_addresses, single_address)
    assert single_address == more_specific(descendant_addresses, single_address)

    assert sibling_addresses == more_specific(sibling_addresses, None)
    assert sibling_addresses == more_specific(sibling_addresses, ascendant_addresses)
    assert sibling_addresses == more_specific(sibling_addresses, descendant_addresses)
    assert sibling_addresses == more_specific(None, sibling_addresses)
    assert sibling_addresses == more_specific(ascendant_addresses, sibling_addresses)
    assert sibling_addresses == more_specific(descendant_addresses, sibling_addresses)

    assert ascendant_addresses == more_specific(ascendant_addresses, None)
    assert ascendant_addresses == more_specific(ascendant_addresses, descendant_addresses)
    assert ascendant_addresses == more_specific(None, ascendant_addresses)
    assert ascendant_addresses == more_specific(descendant_addresses, ascendant_addresses)

    assert descendant_addresses == more_specific(descendant_addresses, None)
    assert descendant_addresses == more_specific(None, descendant_addresses)
Esempio n. 9
0
    def iter_target_addresses_for_sources(self, sources):
        """Bulk, iterable form of `target_addresses_for_source`."""
        # Walk up the buildroot looking for targets that would conceivably claim changed sources.
        sources_set = set(sources)
        subjects = [
            AscendantAddresses(directory=d)
            for d in self._unique_dirs_for_sources(sources_set)
        ]

        for hydrated_targets in self._engine.product_request(
                HydratedTargets, subjects):
            for hydrated_target in hydrated_targets.dependencies:
                legacy_address = hydrated_target.adaptor.address

                # Handle BUILD files.
                if any(
                        LegacyAddressMapper.is_declaring_file(
                            legacy_address, f) for f in sources_set):
                    yield legacy_address
                else:
                    # Handle claimed files.
                    target_files_iter = self._iter_owned_files_from_hydrated_target(
                        hydrated_target)
                    if any(source_file in sources_set
                           for source_file in target_files_iter):
                        # At least one file in this targets sources match our changed sources - emit its address.
                        yield legacy_address
Esempio n. 10
0
async def find_owners(owners_request: OwnersRequest) -> Owners:
    sources_set = FrozenOrderedSet(owners_request.sources)
    dirs_set = FrozenOrderedSet(
        os.path.dirname(source) for source in sources_set)

    # Walk up the buildroot looking for targets that would conceivably claim changed sources.
    candidate_specs = tuple(AscendantAddresses(directory=d) for d in dirs_set)
    candidate_targets = await Get[HydratedTargets](
        AddressSpecs(candidate_specs))

    # Match the source globs against the expanded candidate targets.
    def owns_any_source(legacy_target: HydratedTarget) -> bool:
        """Given a `HydratedTarget` instance, check if it owns the given source file."""
        target_kwargs = legacy_target.adaptor.kwargs()

        # Handle `sources`-declaring targets.
        # NB: Deleted files can only be matched against the 'filespec' (ie, `PathGlobs`) for a target,
        # so we don't actually call `fileset.matches` here.
        # TODO: This matching logic should be implemented using the rust `fs` crate for two reasons:
        #  1) having two implementations isn't great
        #  2) we're expanding sources via HydratedTarget, but it isn't necessary to do that to match
        target_sources = target_kwargs.get("sources", None)
        return target_sources and any_matches_filespec(
            paths=sources_set, spec=target_sources.filespec)

    build_file_addresses = await MultiGet(
        Get[BuildFileAddress](Address, ht.adaptor.address)
        for ht in candidate_targets)
    owners = Addresses(
        ht.adaptor.address
        for ht, bfa in zip(candidate_targets, build_file_addresses)
        if LegacyAddressMapper.any_is_declaring_file(bfa, sources_set)
        or owns_any_source(ht))
    return Owners(owners)
Esempio n. 11
0
    def test_address_specs_do_not_exist(self) -> None:
        self.create_file("real/f.txt")
        self.add_to_build_file("real", "mock_tgt(sources=['f.txt'])")
        self.add_to_build_file("empty", "# empty")

        def assert_resolve_error(specs: Iterable[AddressSpec], *,
                                 expected: str) -> None:
            with pytest.raises(ExecutionError) as exc:
                self.resolve_address_specs(specs)
            assert expected in str(exc.value)

        # Literal addresses require both a BUILD file to exist and for a target to be resolved.
        assert_resolve_error([AddressLiteralSpec("fake", "tgt")],
                             expected="'fake' does not exist on disk")
        assert_resolve_error(
            [AddressLiteralSpec("fake/f.txt", "tgt")],
            expected="'fake/f.txt' does not exist on disk",
        )
        did_you_mean = ResolveError.did_you_mean(bad_name="fake_tgt",
                                                 known_names=["real"],
                                                 namespace="real")
        assert_resolve_error([AddressLiteralSpec("real", "fake_tgt")],
                             expected=str(did_you_mean))
        assert_resolve_error([AddressLiteralSpec("real/f.txt", "fake_tgt")],
                             expected=str(did_you_mean))

        # SiblingAddresses require the BUILD file to exist, but are okay if no targets are resolved.
        assert_resolve_error(
            [SiblingAddresses("fake")],
            expected=
            ("'fake' does not contain any BUILD files, but 'fake:' expected matching targets "
             "there."),
        )
        assert not self.resolve_address_specs([SiblingAddresses("empty")])

        # DescendantAddresses requires at least one match, even if BUILD files exist.
        assert_resolve_error(
            [DescendantAddresses("fake"),
             DescendantAddresses("empty")],
            expected="Address spec 'fake::' does not match any targets",
        )

        # AscendantAddresses does not require any matches or BUILD files.
        assert not self.resolve_address_specs(
            [AscendantAddresses("fake"),
             AscendantAddresses("empty")])
Esempio n. 12
0
async def find_owners(build_configuration: BuildConfiguration,
                      address_mapper: AddressMapper,
                      owners_request: OwnersRequest) -> BuildFileAddresses:
    sources_set = OrderedSet(owners_request.sources)
    dirs_set = OrderedSet(dirname(source) for source in sources_set)

    # Walk up the buildroot looking for targets that would conceivably claim changed sources.
    candidate_specs = tuple(AscendantAddresses(directory=d) for d in dirs_set)
    candidate_targets = await Get(HydratedTargets, Specs(candidate_specs))

    # Match the source globs against the expanded candidate targets.
    def owns_any_source(legacy_target):
        """Given a `HydratedTarget` instance, check if it owns the given source file."""
        target_kwargs = legacy_target.adaptor.kwargs()

        # Handle `sources`-declaring targets.
        # NB: Deleted files can only be matched against the 'filespec' (ie, `PathGlobs`) for a target,
        # so we don't actually call `fileset.matches` here.
        # TODO: This matching logic should be implemented using the rust `fs` crate for two reasons:
        #  1) having two implementations isn't great
        #  2) we're expanding sources via HydratedTarget, but it isn't necessary to do that to match
        target_sources = target_kwargs.get('sources', None)
        if target_sources and any_matches_filespec(sources_set,
                                                   target_sources.filespec):
            return True

        return False

    direct_owners = tuple(
        ht.adaptor.address for ht in candidate_targets
        if LegacyAddressMapper.any_is_declaring_file(
            ht.adaptor.address, sources_set) or owns_any_source(ht))

    # If the OwnersRequest does not require dependees, then we're done.
    if owners_request.include_dependees == 'none':
        return BuildFileAddresses(direct_owners)
    else:
        # Otherwise: find dependees.
        all_addresses = await Get(BuildFileAddresses,
                                  Specs((DescendantAddresses(''), )))
        all_hydrated_structs = await MultiGet(
            Get(HydratedStruct, Address, a.to_address())
            for a in all_addresses)
        all_structs = [hs.value for hs in all_hydrated_structs]

        bfa = build_configuration.registered_aliases()
        graph = _DependentGraph.from_iterable(
            target_types_from_build_file_aliases(bfa), address_mapper,
            all_structs)
        if owners_request.include_dependees == 'direct':
            return BuildFileAddresses(
                tuple(graph.dependents_of_addresses(direct_owners)))
        else:
            assert owners_request.include_dependees == 'transitive'
            return BuildFileAddresses(
                tuple(graph.transitive_dependents_of_addresses(direct_owners)))
Esempio n. 13
0
    def address_spec(self, *, source_root: str) -> AscendantAddresses:
        """The spec for all candidate targets which could feasibly own the module.

        This uses AscendantAddresses because targets can own files in subdirs (e.g. rglobs). We also
        use the package path, e.g. `helloworld/util/__init__.py`, rather than the module path to
        ensure that we capture all possible targets. It is okay if this directory does not actually
        exist.
        """
        return AscendantAddresses(
            directory=str(PurePath(source_root) / self.name_as_path))
Esempio n. 14
0
async def get_exporting_owner(owned_dependency: OwnedDependency) -> ExportedTarget:
    """Find the exported target that owns the given target (and therefore exports it).

    The owner of T (i.e., the exported target in whose artifact T's code is published) is:

     1. An exported target that depends on T (or is T itself).
     2. Is T's closest filesystem ancestor among those satisfying 1.

    If there are multiple such exported targets at the same degree of ancestry, the ownership
    is ambiguous and an error is raised. If there is no exported target that depends on T
    and is its ancestor, then there is no owner and an error is raised.
    """
    target = owned_dependency.target
    ancestor_addrs = AscendantAddresses(target.address.spec_path)
    ancestor_tgts = await Get(Targets, AddressSpecs([ancestor_addrs]))
    # Note that addresses sort by (spec_path, target_name), and all these targets are
    # ancestors of the given target, i.e., their spec_paths are all prefixes. So sorting by
    # address will effectively sort by closeness of ancestry to the given target.
    exported_ancestor_tgts = sorted(
        [t for t in ancestor_tgts if t.has_field(PythonProvidesField)],
        key=lambda t: t.address,
        reverse=True,
    )
    exported_ancestor_iter = iter(exported_ancestor_tgts)
    for exported_ancestor in exported_ancestor_iter:
        transitive_targets = await Get(
            TransitiveTargets, TransitiveTargetsRequest([exported_ancestor.address])
        )
        if target in transitive_targets.closure:
            owner = exported_ancestor
            # Find any exported siblings of owner that also depend on target. They have the
            # same spec_path as it, so they must immediately follow it in ancestor_iter.
            sibling_owners = []
            sibling = next(exported_ancestor_iter, None)
            while sibling and sibling.address.spec_path == owner.address.spec_path:
                transitive_targets = await Get(
                    TransitiveTargets, TransitiveTargetsRequest([sibling.address])
                )
                if target in transitive_targets.closure:
                    sibling_owners.append(sibling)
                sibling = next(exported_ancestor_iter, None)
            if sibling_owners:
                all_owners = [exported_ancestor] + sibling_owners
                raise AmbiguousOwnerError(
                    f"Found multiple sibling python_distribution targets that are the closest "
                    f"ancestor dependees of {target.address} and are therefore candidates to "
                    f"own it: {', '.join(o.address.spec for o in all_owners)}. Only a "
                    f"single such owner is allowed, to avoid ambiguity."
                )
            return ExportedTarget(owner)
    raise NoOwnerError(
        f"No python_distribution target found to own {target.address}. Note that "
        f"the owner must be in or above the owned target's directory, and must "
        f"depend on it (directly or indirectly)."
    )
Esempio n. 15
0
async def get_exporting_owner(
        owned_dependency: OwnedDependency) -> ExportedTarget:
    """Find the exported target that owns the given target (and therefore exports it).

    The owner of T (i.e., the exported target in whose artifact T's code is published) is:

     1. An exported target that depends on T (or is T itself).
     2. Is T's closest filesystem ancestor among those satisfying 1.

    If there are multiple such exported targets at the same degree of ancestry, the ownership
    is ambiguous and an error is raised. If there is no exported target that depends on T
    and is its ancestor, then there is no owner and an error is raised.
    """
    hydrated_target = owned_dependency.hydrated_target
    ancestor_addrs = AscendantAddresses(
        hydrated_target.adaptor.address.spec_path)
    ancestor_tgts = await Get[HydratedTargets](AddressSpecs(
        (ancestor_addrs, )))
    # Note that addresses sort by (spec_path, target_name), and all these targets are
    # ancestors of the given target, i.e., their spec_paths are all prefixes. So sorting by
    # address will effectively sort by closeness of ancestry to the given target.
    exported_ancestor_tgts = sorted(
        [t.adaptor for t in ancestor_tgts if _is_exported(t)],
        key=lambda adaptor: adaptor.address,
        reverse=True,
    )
    exported_ancestor_iter = iter(exported_ancestor_tgts)
    for exported_ancestor in exported_ancestor_iter:
        tht = await Get[TransitiveHydratedTargets](Addresses(
            [exported_ancestor.address]))
        if hydrated_target in tht.closure:
            owner = exported_ancestor
            # Find any exported siblings of owner that also depend on hydrated_target. They have the
            # same spec_path as it, so they must immediately follow it in ancestor_iter.
            sibling_owners = []
            sibling = next(exported_ancestor_iter, None)
            while sibling and sibling.address.spec_path == owner.address.spec_path:
                tht = await Get[TransitiveHydratedTargets](Addresses(
                    [sibling.address]))
                if hydrated_target in tht.closure:
                    sibling_owners.append(sibling)
                sibling = next(exported_ancestor_iter, None)
            if sibling_owners:
                raise AmbiguousOwnerError(
                    f"Exporting owners for {hydrated_target.adaptor.address.reference()} are "
                    f"ambiguous. Found {exported_ancestor.address.reference()} and "
                    f"{len(sibling_owners)} others: "
                    f'{", ".join(so.address.reference() for so in sibling_owners)}'
                )
            return ExportedTarget(HydratedTarget(owner))
    raise NoOwnerError(
        f"No exported target owner found for {hydrated_target.adaptor.address.reference()}"
    )
Esempio n. 16
0
async def find_putative_go_targets(
        request: PutativeGoTargetsRequest,
        all_owned_sources: AllOwnedSources) -> PutativeTargets:
    putative_targets = []

    # Add `go_mod` targets.
    all_go_mod_files = await Get(Paths, PathGlobs,
                                 request.search_paths.path_globs("go.mod"))
    unowned_go_mod_files = set(all_go_mod_files.files) - set(all_owned_sources)
    for dirname, filenames in group_by_dir(unowned_go_mod_files).items():
        putative_targets.append(
            PutativeTarget.for_target_type(
                GoModTarget,
                path=dirname,
                name=os.path.basename(dirname),
                triggering_sources=sorted(filenames),
            ))

    # Add `go_binary` targets.
    digest_contents = await Get(DigestContents, PathGlobs,
                                request.search_paths.path_globs("*.go"))
    main_package_dirs = [
        os.path.dirname(file_content.path) for file_content in digest_contents
        if has_package_main(file_content.content)
    ]
    existing_targets = await Get(
        UnexpandedTargets,
        AddressSpecs(AscendantAddresses(d) for d in main_package_dirs))
    owned_main_packages = await MultiGet(
        Get(GoBinaryMainPackage,
            GoBinaryMainPackageRequest(t[GoBinaryMainPackageField]))
        for t in existing_targets if t.has_field(GoBinaryMainPackageField))
    unowned_main_package_dirs = set(main_package_dirs) - {
        # We can be confident `go_first_party_package` targets were generated, meaning that the
        # below will get us the full path to the package's directory.
        # TODO: generalize this
        os.path.join(pkg.address.spec_path,
                     pkg.address.generated_name[2:]).rstrip(
                         "/")  # type: ignore[index]
        for pkg in owned_main_packages
    }
    putative_targets.extend(
        PutativeTarget.for_target_type(
            target_type=GoBinaryTarget,
            path=main_pkg_dir,
            name="bin",
            triggering_sources=tuple(),
            kwargs={"name": "bin"},
        ) for main_pkg_dir in unowned_main_package_dirs)

    return PutativeTargets(putative_targets)
Esempio n. 17
0
def test_address_specs_more_specific() -> None:
    literal_addr = AddressLiteralSpec(path_component="foo/bar",
                                      target_component="baz")
    sibling_addresses = SiblingAddresses(directory="foo/bar")
    ascendant_addresses = AscendantAddresses(directory="foo/bar")
    descendant_addresses = DescendantAddresses(directory="foo/bar")

    assert literal_addr == AddressSpecs.more_specific(literal_addr, None)
    assert literal_addr == AddressSpecs.more_specific(literal_addr,
                                                      sibling_addresses)
    assert literal_addr == AddressSpecs.more_specific(literal_addr,
                                                      ascendant_addresses)
    assert literal_addr == AddressSpecs.more_specific(literal_addr,
                                                      descendant_addresses)
    assert literal_addr == AddressSpecs.more_specific(None, literal_addr)
    assert literal_addr == AddressSpecs.more_specific(sibling_addresses,
                                                      literal_addr)
    assert literal_addr == AddressSpecs.more_specific(ascendant_addresses,
                                                      literal_addr)
    assert literal_addr == AddressSpecs.more_specific(descendant_addresses,
                                                      literal_addr)

    assert sibling_addresses == AddressSpecs.more_specific(
        sibling_addresses, None)
    assert sibling_addresses == AddressSpecs.more_specific(
        sibling_addresses, ascendant_addresses)
    assert sibling_addresses == AddressSpecs.more_specific(
        sibling_addresses, descendant_addresses)
    assert sibling_addresses == AddressSpecs.more_specific(
        None, sibling_addresses)
    assert sibling_addresses == AddressSpecs.more_specific(
        ascendant_addresses, sibling_addresses)
    assert sibling_addresses == AddressSpecs.more_specific(
        descendant_addresses, sibling_addresses)

    assert ascendant_addresses == AddressSpecs.more_specific(
        ascendant_addresses, None)
    assert ascendant_addresses == AddressSpecs.more_specific(
        ascendant_addresses, descendant_addresses)
    assert ascendant_addresses == AddressSpecs.more_specific(
        None, ascendant_addresses)
    assert ascendant_addresses == AddressSpecs.more_specific(
        descendant_addresses, ascendant_addresses)

    assert descendant_addresses == AddressSpecs.more_specific(
        descendant_addresses, None)
    assert descendant_addresses == AddressSpecs.more_specific(
        None, descendant_addresses)
Esempio n. 18
0
  def iter_target_addresses_for_sources(self, sources):
    """Bulk, iterable form of `target_addresses_for_source`."""
    # Walk up the buildroot looking for targets that would conceivably claim changed sources.
    sources_set = set(sources)
    specs = tuple(AscendantAddresses(directory=d) for d in self._unique_dirs_for_sources(sources_set))

    # Uniqify all transitive hydrated targets.
    hydrated_target_to_address = {}
    hydrated_targets, = self._scheduler.product_request(HydratedTargets, [Specs(specs)])
    for hydrated_target in hydrated_targets.dependencies:
      if hydrated_target not in hydrated_target_to_address:
        hydrated_target_to_address[hydrated_target] = hydrated_target.adaptor.address

    for hydrated_target, legacy_address in six.iteritems(hydrated_target_to_address):
      # Handle BUILD files.
      if (LegacyAddressMapper.any_is_declaring_file(legacy_address, sources_set) or
          self._owns_any_source(sources_set, hydrated_target)):
        yield legacy_address
Esempio n. 19
0
async def find_nearest_go_mod(request: OwningGoModRequest) -> OwningGoMod:
    # We don't expect `go_mod` targets to be generated, so we can use UnexpandedTargets.
    candidate_targets = await Get(
        UnexpandedTargets,
        AddressSpecs([AscendantAddresses(request.address.spec_path)]))

    # Sort by address.spec_path in descending order so the nearest go_mod target is sorted first.
    go_mod_targets = sorted(
        (tgt for tgt in candidate_targets if tgt.has_field(GoModSourcesField)),
        key=lambda tgt: tgt.address.spec_path,
        reverse=True,
    )
    if not go_mod_targets:
        raise InvalidTargetException(
            f"The target {request.address} does not have a `go_mod` target in its BUILD file or "
            "any ancestor BUILD files. To fix, please make sure your project has a `go.mod` file "
            f"and add a `go_mod` target (you can run `{bin_name()} tailor` to do this)."
        )
    nearest_go_mod_target = go_mod_targets[0]
    return OwningGoMod(nearest_go_mod_target.address)
Esempio n. 20
0
def test_address_specs_deduplication(address_specs_rule_runner: RuleRunner) -> None:
    """When multiple specs cover the same address, we should deduplicate to one single Address."""
    address_specs_rule_runner.write_files(
        {"demo/f.txt": "", "demo/BUILD": "generator(sources=['f.txt'])"}
    )
    specs = [
        AddressLiteralSpec("demo"),
        SiblingAddresses("demo"),
        DescendantAddresses("demo"),
        AscendantAddresses("demo"),
        # We also include targets generated from `demo` to ensure that the final result has both
        # the generator and its generated targets.
        AddressLiteralSpec("demo", None, "f.txt"),
        AddressLiteralSpec("demo/f.txt"),
    ]
    assert resolve_address_specs(address_specs_rule_runner, specs) == {
        Address("demo"),
        Address("demo", generated_name="f.txt"),
        Address("demo", relative_file_path="f.txt"),
    }
Esempio n. 21
0
async def find_owners(owners_request: OwnersRequest) -> Owners:
    sources_set = FrozenOrderedSet(owners_request.sources)
    dirs_set = FrozenOrderedSet(os.path.dirname(source) for source in sources_set)

    # Walk up the buildroot looking for targets that would conceivably claim changed sources.
    candidate_specs = tuple(AscendantAddresses(directory=d) for d in dirs_set)
    candidate_targets = await Get[Targets](AddressSpecs(candidate_specs))
    build_file_addresses = await MultiGet(
        Get[BuildFileAddress](Address, tgt.address) for tgt in candidate_targets
    )

    owners = Addresses(
        tgt.address
        for tgt, bfa in zip(candidate_targets, build_file_addresses)
        if bfa.rel_path in sources_set
        # NB: Deleted files can only be matched against the 'filespec' (i.e. `PathGlobs`) for a
        # target, which is why we use `any_matches_filespec`.
        or any_matches_filespec(sources_set, tgt.get(Sources).filespec)
    )
    return Owners(owners)
Esempio n. 22
0
    def test_address_specs(self) -> None:
        sources_field1 = self.mock_sources_field_with_origin(
            SOURCES1,
            origin=SingleAddress(directory=SOURCES1.source_root, name="lib"))
        sources_field2 = self.mock_sources_field_with_origin(
            SOURCES2, origin=SiblingAddresses(SOURCES2.source_root))
        sources_field3 = self.mock_sources_field_with_origin(
            SOURCES3, origin=DescendantAddresses(SOURCES3.source_root))
        sources_field4 = self.mock_sources_field_with_origin(
            SOURCES1, origin=AscendantAddresses(SOURCES1.source_root))

        def assert_all_source_files_resolved(sources_field_with_origin: Tuple[
            SourcesField, OriginSpec], sources: TargetSources) -> None:
            expected = sources.source_file_absolute_paths
            assert self.get_all_source_files([sources_field_with_origin
                                              ]) == expected
            assert self.get_specified_source_files([sources_field_with_origin
                                                    ]) == expected

        assert_all_source_files_resolved(sources_field1, SOURCES1)
        assert_all_source_files_resolved(sources_field2, SOURCES2)
        assert_all_source_files_resolved(sources_field3, SOURCES3)
        assert_all_source_files_resolved(sources_field4, SOURCES1)
        # NB: sources_field1 and sources_field3 refer to the same files. We should be able to
        # handle this gracefully.
        combined_sources_fields = [
            sources_field1, sources_field2, sources_field3, sources_field4
        ]
        combined_expected = sorted([
            *SOURCES1.source_file_absolute_paths,
            *SOURCES2.source_file_absolute_paths,
            *SOURCES3.source_file_absolute_paths,
        ])
        assert self.get_all_source_files(
            combined_sources_fields) == combined_expected
        assert self.get_specified_source_files(
            combined_sources_fields) == combined_expected
Esempio n. 23
0
    def iter_target_addresses_for_sources(self, sources):
        """Bulk, iterable form of `target_addresses_for_source`."""
        # Walk up the buildroot looking for targets that would conceivably claim changed sources.
        sources_set = set(sources)
        subjects = [
            AscendantAddresses(directory=d)
            for d in self._unique_dirs_for_sources(sources_set)
        ]

        for hydrated_targets in self._scheduler.product_request(
                HydratedTargets, subjects):
            for hydrated_target in hydrated_targets.dependencies:
                legacy_address = hydrated_target.adaptor.address

                # Handle BUILD files.
                if any(
                        LegacyAddressMapper.is_declaring_file(
                            legacy_address, f) for f in sources_set):
                    yield legacy_address
                else:
                    if any(
                            self._owns_source(source, hydrated_target)
                            for source in sources_set):
                        yield legacy_address
Esempio n. 24
0
async def find_putative_go_targets(
    request: PutativeGoTargetsRequest, all_owned_sources: AllOwnedSources
) -> PutativeTargets:
    putative_targets = []

    all_go_mod_files, all_go_files, all_go_files_digest_contents = await MultiGet(
        Get(Paths, PathGlobs, request.search_paths.path_globs("go.mod")),
        Get(Paths, PathGlobs, request.search_paths.path_globs("*.go")),
        Get(DigestContents, PathGlobs, request.search_paths.path_globs("*.go")),
    )

    # Add `go_mod` targets.
    unowned_go_mod_files = set(all_go_mod_files.files) - set(all_owned_sources)
    for dirname, filenames in group_by_dir(unowned_go_mod_files).items():
        putative_targets.append(
            PutativeTarget.for_target_type(
                GoModTarget,
                path=dirname,
                name=None,
                triggering_sources=sorted(filenames),
            )
        )

    # Add `go_package` targets.
    unowned_go_files = set(all_go_files.files) - set(all_owned_sources)
    for dirname, filenames in group_by_dir(unowned_go_files).items():
        # Ignore paths that have `testdata` or `vendor` in them.
        # From `go help packages`: Note, however, that a directory named vendor that itself contains code
        # is not a vendored package: cmd/vendor would be a command named vendor.
        dirname_parts = PurePath(dirname).parts
        if "testdata" in dirname_parts or "vendor" in dirname_parts[0:-1]:
            continue
        putative_targets.append(
            PutativeTarget.for_target_type(
                GoPackageTarget,
                path=dirname,
                name=None,
                triggering_sources=sorted(filenames),
            )
        )

    # Add `go_binary` targets.
    main_package_dirs = [
        os.path.dirname(file_content.path)
        for file_content in all_go_files_digest_contents
        if has_package_main(file_content.content)
    ]
    existing_targets = await Get(
        UnexpandedTargets, AddressSpecs(AscendantAddresses(d) for d in main_package_dirs)
    )
    owned_main_packages = await MultiGet(
        Get(GoBinaryMainPackage, GoBinaryMainPackageRequest(t[GoBinaryMainPackageField]))
        for t in existing_targets
        if t.has_field(GoBinaryMainPackageField)
    )
    unowned_main_package_dirs = set(main_package_dirs) - {
        # NB: We assume the `go_package` lives in the directory it's defined, which we validate
        # by e.g. banning `**` in its sources field.
        pkg.address.spec_path
        for pkg in owned_main_packages
    }
    putative_targets.extend(
        PutativeTarget.for_target_type(
            GoBinaryTarget,
            path=main_pkg_dir,
            name="bin",
            triggering_sources=tuple(),
        )
        for main_pkg_dir in unowned_main_package_dirs
    )

    return PutativeTargets(putative_targets)
Esempio n. 25
0
def test_ascendant_addresses() -> None:
    spec = AscendantAddresses("dir/subdir")
    assert spec.to_build_file_globs(
        ["BUILD"]) == {"BUILD", "dir/BUILD", "dir/subdir/BUILD"}
    assert spec.matches("") is True
    assert spec.matches("dir") is True
    assert spec.matches("dir/subdir") is True
    assert spec.matches("dir/subdir/nested") is False
    assert spec.matches("another/subdir") is False

    spec = AscendantAddresses("")
    assert spec.to_build_file_globs(["BUILD"]) == {"BUILD"}
    assert spec.matches("") is True
    assert spec.matches("dir") is False
Esempio n. 26
0
def test_module_address_spec() -> None:
    assert PythonModule("helloworld.app").address_spec(
        source_root=".") == AscendantAddresses(directory="helloworld/app")
    assert PythonModule("helloworld.app").address_spec(
        source_root="src/python") == AscendantAddresses(
            directory="src/python/helloworld/app")
Esempio n. 27
0
async def find_putative_targets(
    req: PutativePythonTargetsRequest,
    all_owned_sources: AllOwnedSources,
    python_setup: PythonSetup,
) -> PutativeTargets:
    # Find library/test/test_util targets.

    all_py_files_globs: PathGlobs = req.search_paths.path_globs("*.py")
    all_py_files = await Get(Paths, PathGlobs, all_py_files_globs)
    unowned_py_files = set(all_py_files.files) - set(all_owned_sources)
    classified_unowned_py_files = classify_source_files(unowned_py_files)
    pts = []
    for tgt_type, paths in classified_unowned_py_files.items():
        for dirname, filenames in group_by_dir(paths).items():
            if issubclass(tgt_type, PythonTestsGeneratorTarget):
                name = "tests"
                kwargs = {"name": name}
            elif issubclass(tgt_type, PythonTestUtilsGeneratorTarget):
                name = "test_utils"
                kwargs = {"name": name}
            else:
                name = os.path.basename(dirname)
                kwargs = {}
            if (
                python_setup.tailor_ignore_solitary_init_files
                and tgt_type == PythonSourcesGeneratorTarget
                and filenames == {"__init__.py"}
            ):
                continue
            pts.append(
                PutativeTarget.for_target_type(
                    tgt_type, dirname, name, sorted(filenames), kwargs=kwargs
                )
            )

    if python_setup.tailor_requirements_targets:
        # Find requirements files.
        all_requirements_files_globs: PathGlobs = req.search_paths.path_globs("*requirements*.txt")
        all_requirements_files = await Get(Paths, PathGlobs, all_requirements_files_globs)
        unowned_requirements_files = set(all_requirements_files.files) - set(all_owned_sources)
        for req_file in unowned_requirements_files:
            path, name = os.path.split(req_file)
            pts.append(
                PutativeTarget(
                    path=path,
                    # python_requirements is a macro and doesn't take a name argument, but the
                    # PutativeTarget still needs a name for display purposes.
                    name=name,
                    type_alias="python_requirements",
                    triggering_sources=[req_file],
                    owned_sources=[req_file],
                    addressable=False,
                    kwargs={} if name == "requirements.txt" else {"requirements_relpath": name},
                )
            )

    if python_setup.tailor_pex_binary_targets:
        # Find binary targets.

        # Get all files whose content indicates that they are entry points.
        digest_contents = await Get(DigestContents, PathGlobs, all_py_files_globs)
        entry_points = [
            file_content.path
            for file_content in digest_contents
            if is_entry_point(file_content.content)
        ]

        # Get the modules for these entry points.
        src_roots = await Get(
            SourceRootsResult, SourceRootsRequest, SourceRootsRequest.for_files(entry_points)
        )
        module_to_entry_point = {}
        for entry_point in entry_points:
            entry_point_path = PurePath(entry_point)
            src_root = src_roots.path_to_root[entry_point_path]
            stripped_entry_point = entry_point_path.relative_to(src_root.path)
            module = PythonModule.create_from_stripped_path(stripped_entry_point)
            module_to_entry_point[module.module] = entry_point

        # Get existing binary targets for these entry points.
        entry_point_dirs = {os.path.dirname(entry_point) for entry_point in entry_points}
        possible_existing_binary_targets = await Get(
            UnexpandedTargets, AddressSpecs(AscendantAddresses(d) for d in entry_point_dirs)
        )
        possible_existing_binary_entry_points = await MultiGet(
            Get(ResolvedPexEntryPoint, ResolvePexEntryPointRequest(t[PexEntryPointField]))
            for t in possible_existing_binary_targets
            if t.has_field(PexEntryPointField)
        )
        possible_existing_entry_point_modules = {
            rep.val.module for rep in possible_existing_binary_entry_points if rep.val
        }
        unowned_entry_point_modules = (
            module_to_entry_point.keys() - possible_existing_entry_point_modules
        )

        # Generate new targets for entry points that don't already have one.
        for entry_point_module in unowned_entry_point_modules:
            entry_point = module_to_entry_point[entry_point_module]
            path, fname = os.path.split(entry_point)
            name = os.path.splitext(fname)[0]
            pts.append(
                PutativeTarget.for_target_type(
                    target_type=PexBinary,
                    path=path,
                    name=name,
                    triggering_sources=tuple(),
                    kwargs={"name": name, "entry_point": fname},
                )
            )

    return PutativeTargets(pts)
Esempio n. 28
0
async def find_owners(owners_request: OwnersRequest) -> Owners:
    # Determine which of the sources are live and which are deleted.
    sources_paths = await Get(Paths, PathGlobs(owners_request.sources))

    live_files = FrozenOrderedSet(sources_paths.files)
    deleted_files = FrozenOrderedSet(s for s in owners_request.sources
                                     if s not in live_files)
    live_dirs = FrozenOrderedSet(os.path.dirname(s) for s in live_files)
    deleted_dirs = FrozenOrderedSet(os.path.dirname(s) for s in deleted_files)

    # Walk up the buildroot looking for targets that would conceivably claim changed sources.
    # For live files, we use expanded Targets, which have file level precision but which are
    # only created for existing files. For deleted files we use UnexpandedTargets, which have
    # the original declared glob.
    live_candidate_specs = tuple(
        AscendantAddresses(directory=d) for d in live_dirs)
    deleted_candidate_specs = tuple(
        AscendantAddresses(directory=d) for d in deleted_dirs)
    live_candidate_tgts, deleted_candidate_tgts = await MultiGet(
        Get(Targets, AddressSpecs(live_candidate_specs)),
        Get(UnexpandedTargets, AddressSpecs(deleted_candidate_specs)),
    )

    matching_addresses: OrderedSet[Address] = OrderedSet()
    unmatched_sources = set(owners_request.sources)
    for live in (True, False):
        candidate_tgts: Sequence[Target]
        if live:
            candidate_tgts = live_candidate_tgts
            sources_set = live_files
        else:
            candidate_tgts = deleted_candidate_tgts
            sources_set = deleted_files

        build_file_addresses = await MultiGet(
            Get(BuildFileAddress, Address, tgt.address)
            for tgt in candidate_tgts)

        for candidate_tgt, bfa in zip(candidate_tgts, build_file_addresses):
            matching_files = set(
                matches_filespec(candidate_tgt.get(Sources).filespec,
                                 paths=sources_set))
            # Also consider secondary ownership, meaning it's not a `Sources` field with primary
            # ownership, but the target still should match the file. We can't use `tgt.get()`
            # because this is a mixin, and there technically may be >1 field.
            secondary_owner_fields = tuple(
                field  # type: ignore[misc]
                for field in candidate_tgt.field_values.values()
                if isinstance(field, SecondaryOwnerMixin))
            for secondary_owner_field in secondary_owner_fields:
                matching_files.update(
                    matches_filespec(secondary_owner_field.filespec,
                                     paths=sources_set))
            if not matching_files and bfa.rel_path not in sources_set:
                continue

            unmatched_sources -= matching_files
            matching_addresses.add(candidate_tgt.address)

    if (unmatched_sources and owners_request.owners_not_found_behavior !=
            OwnersNotFoundBehavior.ignore):
        _log_or_raise_unmatched_owners(
            [PurePath(path) for path in unmatched_sources],
            owners_request.owners_not_found_behavior)

    return Owners(matching_addresses)
Esempio n. 29
0
def test_address_specs_literals_vs_globs(address_specs_rule_runner: RuleRunner) -> None:
    address_specs_rule_runner.write_files(
        {
            "demo/BUILD": dedent(
                """\
                generator(sources=['**/*.txt'])
                """
            ),
            "demo/f1.txt": "",
            "demo/f2.txt": "",
            "demo/subdir/f.txt": "",
            "demo/subdir/f.another_ext": "",
            "demo/subdir/BUILD": "mock_tgt(name='another_ext', sources=['f.another_ext'])",
            "another_dir/BUILD": "mock_tgt(sources=[])",
        }
    )

    def assert_resolved(spec: AddressSpec, expected: set[Address]) -> None:
        result = resolve_address_specs(address_specs_rule_runner, [spec])
        assert result == expected

    # Literals should be "one-in, one-out".
    assert_resolved(AddressLiteralSpec("demo"), {Address("demo")})
    assert_resolved(
        AddressLiteralSpec("demo/f1.txt"), {Address("demo", relative_file_path="f1.txt")}
    )
    assert_resolved(
        AddressLiteralSpec("demo", None, "f1.txt"), {Address("demo", generated_name="f1.txt")}
    )
    assert_resolved(
        AddressLiteralSpec("demo/subdir", "another_ext"),
        {Address("demo/subdir", target_name="another_ext")},
    )

    assert_resolved(
        # Match all targets that reside in `demo/`, either because explicitly declared there or
        # generated into that dir. Note that this does not include `demo#subdir/f2.ext`, even
        # though its target generator matches.
        SiblingAddresses("demo"),
        {
            Address("demo"),
            Address("demo", relative_file_path="f1.txt"),
            Address("demo", generated_name="f1.txt"),
            Address("demo", relative_file_path="f2.txt"),
            Address("demo", generated_name="f2.txt"),
        },
    )
    assert_resolved(
        # Should include all generated targets that reside in `demo/subdir`, even though their
        # target generator is in an ancestor.
        SiblingAddresses("demo/subdir"),
        {
            Address("demo", relative_file_path="subdir/f.txt"),
            Address("demo", generated_name="subdir/f.txt"),
            Address("demo/subdir", target_name="another_ext"),
        },
    )

    all_tgts_in_demo = {
        Address("demo"),
        Address("demo", relative_file_path="f1.txt"),
        Address("demo", generated_name="f1.txt"),
        Address("demo", relative_file_path="f2.txt"),
        Address("demo", generated_name="f2.txt"),
        Address("demo", relative_file_path="subdir/f.txt"),
        Address("demo", generated_name="subdir/f.txt"),
        Address("demo/subdir", target_name="another_ext"),
    }
    assert_resolved(DescendantAddresses("demo"), all_tgts_in_demo)
    assert_resolved(AscendantAddresses("demo/subdir"), all_tgts_in_demo)
    assert_resolved(
        AscendantAddresses("demo"),
        {
            Address("demo"),
            Address("demo", relative_file_path="f1.txt"),
            Address("demo", generated_name="f1.txt"),
            Address("demo", relative_file_path="f2.txt"),
            Address("demo", generated_name="f2.txt"),
        },
    )