Exemplo n.º 1
0
def classify_source_files(
        paths: Iterable[str]) -> dict[type[Target], set[str]]:
    """Returns a dict of target type -> files that belong to targets of that type."""
    tests_filespec = Filespec(
        includes=list(PythonTestsGeneratingSourcesField.default))
    test_utils_filespec = Filespec(
        includes=list(PythonTestUtilsGeneratingSourcesField.default))

    path_to_file_name = {path: os.path.basename(path) for path in paths}
    test_file_names = set(
        matches_filespec(tests_filespec, paths=path_to_file_name.values()))
    test_util_file_names = set(
        matches_filespec(test_utils_filespec,
                         paths=path_to_file_name.values()))

    test_files = {
        path
        for path, file_name in path_to_file_name.items()
        if file_name in test_file_names
    }
    test_util_files = {
        path
        for path, file_name in path_to_file_name.items()
        if file_name in test_util_file_names
    }
    library_files = set(paths) - test_files - test_util_files
    return {
        PythonTestsGeneratorTarget: test_files,
        PythonTestUtilsGeneratorTarget: test_util_files,
        PythonSourcesGeneratorTarget: library_files,
    }
Exemplo n.º 2
0
def classify_source_files(
        paths: Iterable[str]) -> dict[type[Target], set[str]]:
    """Returns a dict of target type -> files that belong to targets of that type."""
    scalatest_filespec = Filespec(
        includes=list(ScalatestTestsGeneratorSourcesField.default))
    junit_filespec = Filespec(
        includes=list(ScalaJunitTestsGeneratorSourcesField.default))
    scalatest_files = {
        path
        for path in paths if os.path.basename(path) in set(
            matches_filespec(scalatest_filespec,
                             paths=[os.path.basename(path) for path in paths]))
    }
    junit_files = {
        path
        for path in paths if os.path.basename(path) in set(
            matches_filespec(junit_filespec,
                             paths=[os.path.basename(path) for path in paths]))
    }
    sources_files = set(paths) - scalatest_files - junit_files
    return {
        ScalaJunitTestsGeneratorTarget: junit_files,
        ScalaSourcesGeneratorTarget: sources_files,
        ScalatestTestsGeneratorTarget: scalatest_files,
    }
Exemplo n.º 3
0
  def assert_rule_match(self, glob, expected_matches, negate=False):
    """Tests that in-memory glob matching matches lazy-filesystem traversing globs."""
    if negate:
      assertMatch, match_state = self.assertFalse, 'erroneously matches'
    else:
      assertMatch, match_state = self.assertTrue, "doesn't match"

    # Confirm in-memory behaviour.
    for expected in expected_matches:
      assertMatch(
          matches_filespec(expected, {'globs': [glob]}),
          '{} {} path `{}`'.format(glob, match_state, expected),
      )

    # And confirm that it matches on-disk behaviour.
    for expected in expected_matches:
      if expected.endswith('/'):
        self.create_dir(expected)
      else:
        self.create_file(expected)
    snapshot, = self.scheduler.product_request(Snapshot, [PathGlobs([glob])])
    if negate:
      subset = set(expected_matches).intersection(set(snapshot.files))
      self.assertEquals(subset, set(), '{} {} path(s) {}'.format(glob, match_state, subset))
    else:
      self.assertEquals(sorted(expected_matches), sorted(snapshot.files))
Exemplo n.º 4
0
async def find_fortran_targets(
        req: PutativeFortranTargetsRequest,
        all_owned_sources: AllOwnedSources) -> PutativeTargets:
    all_fortran_files = await Get(Paths, PathGlobs, req.path_globs("*.f90"))
    unowned_shell_files = set(all_fortran_files.files) - set(all_owned_sources)

    tests_filespec = Filespec(includes=list(FortranTestsSources.default))
    test_filenames = set(
        matches_filespec(
            tests_filespec,
            paths=[os.path.basename(path) for path in unowned_shell_files]))
    test_files = {
        path
        for path in unowned_shell_files
        if os.path.basename(path) in test_filenames
    }
    sources_files = set(unowned_shell_files) - test_files
    classified_unowned_shell_files = {
        FortranTestsTarget: test_files,
        FortranLibraryTarget: sources_files,
    }

    pts = []
    for tgt_type, paths in classified_unowned_shell_files.items():
        for dirname, filenames in group_by_dir(paths).items():
            name = "tests" if tgt_type == FortranTestsTarget else None
            pts.append(
                PutativeTarget.for_target_type(
                    tgt_type,
                    path=dirname,
                    name=name,
                    triggering_sources=sorted(filenames)))
    return PutativeTargets(pts)
Exemplo n.º 5
0
    def assert_rule_match(
        self,
        glob: str,
        expected_matches: Tuple[str, ...],
        negate: bool = False,
    ) -> None:
        """Tests that in-memory glob matching matches lazy-filesystem traversing globs."""
        if negate:
            assertMatch, match_state = self.assertFalse, 'erroneously matches'
        else:
            assertMatch, match_state = self.assertTrue, "doesn't match"

        # Confirm in-memory behaviour.
        for expected in expected_matches:
            assertMatch(
                matches_filespec(expected, {'globs': [glob]}),
                f'{glob} {match_state} path `{expected}`',
            )

        # And confirm that it matches on-disk behaviour.
        for expected in expected_matches:
            if expected.endswith('/'):
                self.create_dir(expected)
            else:
                self.create_file(expected)
        snapshot = self.request_single_product(Snapshot, PathGlobs([glob]))
        if negate:
            subset = set(expected_matches).intersection(set(snapshot.files))
            self.assertEquals(subset, set(),
                              f'{glob} {match_state} path(s) {subset}')
        else:
            self.assertEquals(sorted(expected_matches), sorted(snapshot.files))
Exemplo n.º 6
0
    def assert_rule_match(self, glob, expected_matches, negate=False):
        """Tests that in-memory glob matching matches lazy-filesystem traversing globs."""
        if negate:
            assertMatch, match_state = self.assertFalse, 'erroneously matches'
        else:
            assertMatch, match_state = self.assertTrue, "doesn't match"

        # Confirm in-memory behaviour.
        for expected in expected_matches:
            assertMatch(
                matches_filespec(expected, {'globs': [glob]}),
                '{} {} path `{}`'.format(glob, match_state, expected),
            )

        # And confirm that it matches on-disk behaviour.
        for expected in expected_matches:
            if expected.endswith('/'):
                self.create_dir(expected)
            else:
                self.create_file(expected)
        snapshot, = self.scheduler.product_request(Snapshot,
                                                   [PathGlobs([glob])])
        if negate:
            subset = set(expected_matches).intersection(set(snapshot.files))
            self.assertEquals(
                subset, set(),
                '{} {} path(s) {}'.format(glob, match_state, subset))
        else:
            self.assertEquals(sorted(expected_matches), sorted(snapshot.files))
Exemplo n.º 7
0
async def find_owners(owners_request: OwnersRequest) -> Owners:
    # Determine which of the sources are live and which are deleted.
    sources_set_snapshot = await Get(Snapshot,
                                     PathGlobs(owners_request.sources))

    live_files = FrozenOrderedSet(sources_set_snapshot.files)
    deleted_files = FrozenOrderedSet(s for s in owners_request.sources
                                     if s not in live_files)
    live_dirs = FrozenOrderedSet(os.path.dirname(s) for s in live_files)
    deleted_dirs = FrozenOrderedSet(os.path.dirname(s) for s in deleted_files)

    matching_addresses: OrderedSet[Address] = OrderedSet()
    unmatched_sources = set(owners_request.sources)
    for live in (True, False):
        # Walk up the buildroot looking for targets that would conceivably claim changed sources.
        # For live files, we use expanded Targets, which have file level precision but which are
        # only created for existing files. For deleted files we use UnexpandedTargets, which have
        # the original declared glob.
        candidate_targets: Iterable[Target]
        if live:
            if not live_dirs:
                continue
            sources_set = live_files
            candidate_specs = tuple(
                AscendantAddresses(directory=d) for d in live_dirs)
            candidate_targets = await Get(Targets,
                                          AddressSpecs(candidate_specs))
        else:
            if not deleted_dirs:
                continue
            sources_set = deleted_files
            candidate_specs = tuple(
                AscendantAddresses(directory=d) for d in deleted_dirs)
            candidate_targets = await Get(UnexpandedTargets,
                                          AddressSpecs(candidate_specs))

        build_file_addresses = await MultiGet(
            Get(BuildFileAddress, Address, tgt.address)
            for tgt in candidate_targets)

        for candidate_tgt, bfa in zip(candidate_targets, build_file_addresses):
            matching_files = set(
                matches_filespec(candidate_tgt.get(Sources).filespec,
                                 paths=sources_set))
            if not matching_files and bfa.rel_path not in sources_set:
                continue

            unmatched_sources -= matching_files
            matching_addresses.add(candidate_tgt.address)

    if (unmatched_sources and owners_request.owners_not_found_behavior !=
            OwnersNotFoundBehavior.ignore):
        _log_or_raise_unmatched_owners(
            [PurePath(path) for path in unmatched_sources],
            owners_request.owners_not_found_behavior)

    return Owners(matching_addresses)
Exemplo n.º 8
0
def classify_source_files(paths: Iterable[str]) -> dict[type[Target], set[str]]:
    """Returns a dict of target type -> files that belong to targets of that type."""
    tests_filespec = Filespec(includes=list(Shunit2TestsGeneratorSourcesField.default))
    test_filenames = set(
        matches_filespec(tests_filespec, paths=[os.path.basename(path) for path in paths])
    )
    test_files = {path for path in paths if os.path.basename(path) in test_filenames}
    sources_files = set(paths) - test_files
    return {Shunit2TestsGeneratorTarget: test_files, ShellSourcesGeneratorTarget: sources_files}
Exemplo n.º 9
0
    def assert_rule_match(self, glob, expected_matches, negate=False):
        if negate:
            asserter, match_state = self.assertFalse, 'erroneously matches'
        else:
            asserter, match_state = self.assertTrue, "doesn't match"

        for expected in expected_matches:
            asserter(
                matches_filespec(expected, {'globs': [glob]}),
                '{} {} path `{}`'.format(glob, match_state, expected),
            )
Exemplo n.º 10
0
 def validate_build_file_name(self, build_file_patterns: tuple[str, ...]) -> None:
     """Check that the specified BUILD file name works with the repository's BUILD file
     patterns."""
     filespec = Filespec(includes=list(build_file_patterns))
     if not bool(matches_filespec(filespec, paths=[self.build_file_name])):
         raise ValueError(
             f"The option `[{self.options_scope}].build_file_name` is set to "
             f"`{self.build_file_name}`, which is not compatible with "
             f"`[GLOBAL].build_patterns`: {sorted(build_file_patterns)}. This means that "
             "generated BUILD files would be ignored.\n\n"
             "To fix, please update the options so that they are compatible."
         )
Exemplo n.º 11
0
def classify_source_files(
        paths: Iterable[str]) -> dict[type[Target], set[str]]:
    """Returns a dict of target type -> files that belong to targets of that type."""
    tests_filespec = Filespec(includes=list(PythonTestsSources.default))
    test_filenames = set(
        matches_filespec(tests_filespec,
                         paths=[os.path.basename(path) for path in paths]))
    test_files = {
        path
        for path in paths if os.path.basename(path) in test_filenames
    }
    library_files = set(paths) - test_files
    return {PythonTests: test_files, PythonLibrary: library_files}
Exemplo n.º 12
0
 def filter_by_ignores(
         self, putative_targets: Iterable[PutativeTarget],
         build_file_ignores: tuple[str, ...]) -> Iterator[PutativeTarget]:
     ignore_paths_filespec = Filespec(
         includes=[*self.ignore_paths, *build_file_ignores])
     for ptgt in putative_targets:
         is_ignored_file = bool(
             matches_filespec(
                 ignore_paths_filespec,
                 paths=[os.path.join(ptgt.path, self.build_file_name)],
             ))
         if is_ignored_file:
             continue
         # Note that `tailor` can only generate explicit targets, so we don't need to
         # worry about generated address syntax (`#`) or file address syntax.
         address = f"{ptgt.path or '//'}:{ptgt.name}"
         if address in self.ignore_adding_targets:
             continue
         yield ptgt
Exemplo n.º 13
0
def assert_rule_match(rule_runner: RuleRunner, glob: str,
                      paths: Tuple[str, ...], *, should_match: bool) -> None:
    # Confirm in-memory behavior.
    matched_filespec = matches_filespec({"includes": [glob]}, paths=paths)
    if should_match:
        assert matched_filespec == paths
    else:
        assert not matched_filespec

    # Confirm on-disk behavior.
    for expected_match in paths:
        if expected_match.endswith("/"):
            rule_runner.create_dir(expected_match)
        else:
            rule_runner.create_file(expected_match)
    snapshot = rule_runner.request_product(Snapshot, [PathGlobs([glob])])
    if should_match:
        assert sorted(paths) == sorted(snapshot.files)
    else:
        assert not snapshot.files
Exemplo n.º 14
0
async def find_owners(owners_request: OwnersRequest) -> Owners:
    # Determine which of the sources are live and which are deleted.
    sources_paths = await Get(Paths, PathGlobs(owners_request.sources))

    live_files = FrozenOrderedSet(sources_paths.files)
    deleted_files = FrozenOrderedSet(s for s in owners_request.sources
                                     if s not in live_files)
    live_dirs = FrozenOrderedSet(os.path.dirname(s) for s in live_files)
    deleted_dirs = FrozenOrderedSet(os.path.dirname(s) for s in deleted_files)

    def create_live_and_deleted_gets(
        *, filter_by_global_options: bool
    ) -> tuple[Get[FilteredTargets | Targets, RawSpecsWithoutFileOwners], Get[
            UnexpandedTargets, RawSpecsWithoutFileOwners], ]:
        """Walk up the buildroot looking for targets that would conceivably claim changed sources.

        For live files, we use Targets, which causes generated targets to be used rather than their
        target generators. For deleted files we use UnexpandedTargets, which have the original
        declared `sources` globs from target generators.

        We ignore unrecognized files, which can happen e.g. when finding owners for deleted files.
        """
        live_raw_specs = RawSpecsWithoutFileOwners(
            ancestor_globs=tuple(
                AncestorGlobSpec(directory=d) for d in live_dirs),
            filter_by_global_options=filter_by_global_options,
            description_of_origin="<owners rule - unused>",
            unmatched_glob_behavior=GlobMatchErrorBehavior.ignore,
        )
        live_get: Get[FilteredTargets | Targets, RawSpecsWithoutFileOwners] = (
            Get(FilteredTargets, RawSpecsWithoutFileOwners,
                live_raw_specs) if filter_by_global_options else Get(
                    Targets, RawSpecsWithoutFileOwners, live_raw_specs))
        deleted_get = Get(
            UnexpandedTargets,
            RawSpecsWithoutFileOwners(
                ancestor_globs=tuple(
                    AncestorGlobSpec(directory=d) for d in deleted_dirs),
                filter_by_global_options=filter_by_global_options,
                description_of_origin="<owners rule - unused>",
                unmatched_glob_behavior=GlobMatchErrorBehavior.ignore,
            ),
        )
        return live_get, deleted_get

    live_get, deleted_get = create_live_and_deleted_gets(
        filter_by_global_options=owners_request.filter_by_global_options)
    live_candidate_tgts, deleted_candidate_tgts = await MultiGet(
        live_get, deleted_get)

    matching_addresses: OrderedSet[Address] = OrderedSet()
    unmatched_sources = set(owners_request.sources)
    for live in (True, False):
        candidate_tgts: Sequence[Target]
        if live:
            candidate_tgts = live_candidate_tgts
            sources_set = live_files
        else:
            candidate_tgts = deleted_candidate_tgts
            sources_set = deleted_files

        build_file_addresses = await MultiGet(
            Get(
                BuildFileAddress,
                BuildFileAddressRequest(
                    tgt.address,
                    description_of_origin="<owners rule - cannot trigger>"),
            ) for tgt in candidate_tgts)

        for candidate_tgt, bfa in zip(candidate_tgts, build_file_addresses):
            matching_files = set(
                matches_filespec(candidate_tgt.get(SourcesField).filespec,
                                 paths=sources_set))
            # Also consider secondary ownership, meaning it's not a `SourcesField` field with
            # primary ownership, but the target still should match the file. We can't use
            # `tgt.get()` because this is a mixin, and there technically may be >1 field.
            secondary_owner_fields = tuple(
                field for field in candidate_tgt.field_values.values()
                if isinstance(field, SecondaryOwnerMixin))
            for secondary_owner_field in secondary_owner_fields:
                matching_files.update(
                    matches_filespec(secondary_owner_field.filespec,
                                     paths=sources_set))
            if not matching_files and bfa.rel_path not in sources_set:
                continue

            unmatched_sources -= matching_files
            matching_addresses.add(candidate_tgt.address)

    if (unmatched_sources and owners_request.owners_not_found_behavior !=
            OwnersNotFoundBehavior.ignore):
        _log_or_raise_unmatched_owners(
            [PurePath(path) for path in unmatched_sources],
            owners_request.owners_not_found_behavior)

    return Owners(matching_addresses)
Exemplo n.º 15
0
 def matches(self, path):
     if not self._populated:
         raise self.NotPopulatedError()
     return matches_filespec(path, self.filespec)
Exemplo n.º 16
0
 def matches(self, path):
     return self.sources.matches(path) or matches_filespec(
         path, self.filespec)
Exemplo n.º 17
0
 def matches(self, path):
   return matches_filespec(path, self.filespec)
Exemplo n.º 18
0
 def _match_source(self, source, fileset):
     return fileset.matches(source) or matches_filespec(
         source, fileset.filespec)
Exemplo n.º 19
0
 def matches(self, path):
     return matches_filespec(path, self.filespec)
Exemplo n.º 20
0
async def find_owners(owners_request: OwnersRequest) -> Owners:
    # Determine which of the sources are live and which are deleted.
    sources_paths = await Get(Paths, PathGlobs(owners_request.sources))

    live_files = FrozenOrderedSet(sources_paths.files)
    deleted_files = FrozenOrderedSet(s for s in owners_request.sources
                                     if s not in live_files)
    live_dirs = FrozenOrderedSet(os.path.dirname(s) for s in live_files)
    deleted_dirs = FrozenOrderedSet(os.path.dirname(s) for s in deleted_files)

    # Walk up the buildroot looking for targets that would conceivably claim changed sources.
    # For live files, we use expanded Targets, which have file level precision but which are
    # only created for existing files. For deleted files we use UnexpandedTargets, which have
    # the original declared glob.
    live_candidate_specs = tuple(
        AscendantAddresses(directory=d) for d in live_dirs)
    deleted_candidate_specs = tuple(
        AscendantAddresses(directory=d) for d in deleted_dirs)
    live_candidate_tgts, deleted_candidate_tgts = await MultiGet(
        Get(Targets, AddressSpecs(live_candidate_specs)),
        Get(UnexpandedTargets, AddressSpecs(deleted_candidate_specs)),
    )

    matching_addresses: OrderedSet[Address] = OrderedSet()
    unmatched_sources = set(owners_request.sources)
    for live in (True, False):
        candidate_tgts: Sequence[Target]
        if live:
            candidate_tgts = live_candidate_tgts
            sources_set = live_files
        else:
            candidate_tgts = deleted_candidate_tgts
            sources_set = deleted_files

        build_file_addresses = await MultiGet(
            Get(BuildFileAddress, Address, tgt.address)
            for tgt in candidate_tgts)

        for candidate_tgt, bfa in zip(candidate_tgts, build_file_addresses):
            matching_files = set(
                matches_filespec(candidate_tgt.get(Sources).filespec,
                                 paths=sources_set))
            # Also consider secondary ownership, meaning it's not a `Sources` field with primary
            # ownership, but the target still should match the file. We can't use `tgt.get()`
            # because this is a mixin, and there technically may be >1 field.
            secondary_owner_fields = tuple(
                field  # type: ignore[misc]
                for field in candidate_tgt.field_values.values()
                if isinstance(field, SecondaryOwnerMixin))
            for secondary_owner_field in secondary_owner_fields:
                matching_files.update(
                    matches_filespec(secondary_owner_field.filespec,
                                     paths=sources_set))
            if not matching_files and bfa.rel_path not in sources_set:
                continue

            unmatched_sources -= matching_files
            matching_addresses.add(candidate_tgt.address)

    if (unmatched_sources and owners_request.owners_not_found_behavior !=
            OwnersNotFoundBehavior.ignore):
        _log_or_raise_unmatched_owners(
            [PurePath(path) for path in unmatched_sources],
            owners_request.owners_not_found_behavior)

    return Owners(matching_addresses)
Exemplo n.º 21
0
 def _match_source(self, source, fileset):
   return fileset.matches(source) or matches_filespec(source, fileset.filespec)
Exemplo n.º 22
0
 def matches(self, path):
   return self.sources.matches(path) or matches_filespec(path, self.filespec)
Exemplo n.º 23
0
 def matches(self, path):
   if not self._populated:
     raise self.NotPopulatedError()
   return matches_filespec(path, self.filespec)