示例#1
0
    def regenerate(self, bazel_targets: Iterable[str], cwd: str = ".") -> None:

        targets = bazel_utils.expand_bazel_target_dirs(
            self.workspace_dir,
            [t for t in bazel_targets if not t.startswith("@")],
            require_build_file=False,
            cwd=cwd,
        )

        for target in targets:
            assert target.startswith(
                "//"), "Target must be absolute: " + target
            pkg, _, _ = target.partition(":")
            pkg_path = bazel_utils.normalize_relative_target_to_os_path(
                pkg[2:])
            target_dir = os.path.join(self.workspace_dir, pkg_path)

            if target_dir in self.visited_dirs:
                continue
            self.visited_dirs.add(target_dir)

            if not os.path.exists(os.path.join(target_dir, "BUILD.in")):
                continue

            if target_dir in self.generated_files:
                continue

            if self.dry_run:
                continue

            out = os.path.join(target_dir, "BUILD.gen_empty")
            open(out, "w").close()

            self.generated_files[target_dir].append(out)
示例#2
0
def targets2packages(bazel_targets):
    go_packages = []
    workspace_dir = bazel_utils.find_workspace()
    targets = bazel_utils.expand_bazel_target_dirs(workspace_dir,
                                                   bazel_targets,
                                                   require_build_file=False)
    for x in targets:
        if x.startswith("//go/src/"):
            go_packages.append(x.replace("//go/src/", ""))
    return go_packages
def targets2packages(workspace_dir: str,
                     bazel_targets: Iterable[str]) -> List[str]:
    go_packages = []
    targets = bazel_utils.expand_bazel_target_dirs(workspace_dir,
                                                   bazel_targets,
                                                   require_build_file=False)
    for x in targets:
        if x.startswith("//go/src/"):
            go_packages.append(x.replace("//go/src/", ""))
    return go_packages
示例#4
0
    def regenerate(self, bazel_targets: Iterable[str], cwd: str = ".") -> None:
        targets = bazel_utils.expand_bazel_target_dirs(
            self.workspace_dir,
            [t for t in bazel_targets if not t.startswith("@")],
            require_build_file=False,
            cwd=cwd,
        )

        for target in targets:
            assert target.startswith(
                "//"), "Target must be absolute: " + target
            target_dir = bazel_utils.normalize_relative_target_to_os_path(
                target[2:])

            if target_dir in self.visited_dirs:
                continue
            self.visited_dirs.add(target_dir)

            build_bzl = os.path.join(self.workspace_dir, target_dir,
                                     BUILD_INPUT)
            if not os.path.isfile(build_bzl):
                continue

            parsed = build_parser.parse_file(build_bzl)

            pip_rules = parsed.get_rules_by_types(PIP_GEN_RULE_TYPES)
            if not pip_rules:
                if self.verbose:
                    print("No pip targets found in %s/%s" %
                          (target_dir, BUILD_INPUT))
                continue

            if not self.skip_deps_generation:
                for rule in pip_rules:
                    self.regenerate(
                        build_parser.maybe_expand_attribute(
                            rule.attr_map.get("deps", [])),
                        cwd=os.path.join(self.workspace_dir, target_dir),
                    )

            if self.verbose:
                head = "(dry run) " if self.dry_run else ""
                print(
                    head + "Processing pip targets in %s: %s" %
                    (target_dir, [rule.attr_map["name"]
                                  for rule in pip_rules]))

            if self.dry_run:
                continue

            self.process_pip_rules(target_dir, pip_rules)
示例#5
0
def regenerate_build_files(
    bazel_targets_l: Sequence[str],
    generators: Sequence[Callable[..., Generator]],
    verbose: bool = False,
    skip_deps_generation: bool = False,
    dry_run: bool = False,
    reverse_deps_generation: bool = False,
    use_magic_mirror: bool = False,
) -> None:
    workspace_dir = bazel_utils.find_workspace()
    bazel_targets = set(bazel_targets_l)

    if reverse_deps_generation:
        targets = bazel_utils.expand_bazel_target_dirs(
            workspace_dir,
            [t for t in bazel_targets if not t.startswith("@")],
            require_build_file=False,
            cwd=".",
        )
        pkgs = [t.partition(":")[0] for t in targets]

        patterns = ['"%s"' % pkg for pkg in pkgs]
        patterns.extend(['"%s:' % pkg for pkg in pkgs])

        for path, dirs, files in os.walk(workspace_dir):
            if "BUILD" not in files:
                continue

            build_content = open(os.path.join(workspace_dir, path,
                                              "BUILD")).read()

            should_regen = False
            for pattern in patterns:
                if pattern in build_content:
                    should_regen = True
                    break

            if should_regen:
                # convert abs path to relative to workspace
                bazel_targets.add("//" +
                                  bazel_utils.normalize_os_path_to_target(
                                      os.path.relpath(path, workspace_dir)))

    generated_files = DefaultDict[str, List[str]](list)

    generator_instances: List[Generator] = []
    for gen in generators:
        # Most of the time `generator` is a class. Sometimes it's a functools.partial, so handle that too.
        generator_name = gen.__name__
        with metrics.Timer(
                "bzl_gen_{}_init_ms".format(generator_name)) as init_timer:
            generator_instances.append(
                gen(
                    workspace_dir,
                    generated_files,
                    verbose,
                    skip_deps_generation,
                    dry_run,
                    use_magic_mirror,
                ))
        metrics.log_cumulative_rate(init_timer.name,
                                    init_timer.get_interval_ms())

    # In order to ensure we don't miss generating specific target types,
    # recursively expands the generated set until it converges.
    prev_visited_dirs: Set[str] = set()

    while bazel_targets:
        for generator in generator_instances:
            with metrics.generator_metric_context(
                    generator.__class__.__name__):
                generator.regenerate(bazel_targets)

        visited_dirs = set(generated_files.keys())
        newly_visited_dirs = visited_dirs.difference(prev_visited_dirs)
        if newly_visited_dirs:
            # continue processing
            prev_visited_dirs = visited_dirs
            bazel_targets = set([
                bazel_utils.normalize_os_path_to_target(
                    d.replace(workspace_dir, "/")) for d in newly_visited_dirs
            ])
        else:
            break

    with metrics.Timer("bzl_gen_merge_build_files_ms") as merge_timer:
        merge_generated_build_files(generated_files)
    metrics.log_cumulative_rate(merge_timer.name,
                                merge_timer.get_interval_ms())
示例#6
0
def regenerate_build_files(
    bazel_targets,
    generators,
    verbose=False,
    skip_deps_generation=False,
    dry_run=False,
    reverse_deps_generation=False,
    use_magic_mirror=False,
):
    workspace_dir = bazel_utils.find_workspace()

    if reverse_deps_generation:
        targets = bazel_utils.expand_bazel_target_dirs(
            workspace_dir,
            [t for t in bazel_targets if not t.startswith("@")],
            require_build_file=False,
            cwd=".",
        )
        pkgs = [t.partition(":")[0] for t in targets]

        patterns = ['"%s"' % pkg for pkg in pkgs]
        patterns.extend(['"%s:' % pkg for pkg in pkgs])

        bazel_targets = set(bazel_targets)
        for path, dirs, files in os.walk(workspace_dir):
            if "BUILD" not in files:
                continue

            build_content = open(os.path.join(workspace_dir, path, "BUILD")).read()

            should_regen = False
            for pattern in patterns:
                if pattern in build_content:
                    should_regen = True
                    break

            if should_regen:
                # convert abs path to relative to workspace
                bazel_targets.add("//" + os.path.relpath(path, workspace_dir))

    generated_files = defaultdict(list)  # type: ignore[var-annotated]

    generator_instances = [
        generator(
            workspace_dir,
            generated_files,
            verbose,
            skip_deps_generation,
            dry_run,
            use_magic_mirror,
        )
        for generator in generators
    ]

    # In order to ensure we don't miss generating specific target types,
    # recursively expands the generated set until it converges.
    prev_visited_dirs = set()  # type: ignore[var-annotated]
    updated_pkgs = set()  # type: ignore[var-annotated]

    while bazel_targets:
        for generator in generator_instances:
            with metrics.generator_metric_context(generator.__class__.__name__):
                res = generator.regenerate(bazel_targets)
            # Generators are expected to do one/both of
            # - return a list of packages/directores where it could have modified BUILD files
            # - Update self.generated_files mapping for BUILD path -> BUILD file fragments
            if res:
                updated_pkgs.update(res)

        visited_dirs = set(generated_files.keys())
        newly_visited_dirs = visited_dirs.difference(prev_visited_dirs)
        if newly_visited_dirs:
            # continue processing
            prev_visited_dirs = visited_dirs
            bazel_targets = [d.replace(workspace_dir, "/") for d in newly_visited_dirs]
        else:
            break

    merge_generated_build_files(generated_files)
    updated_pkgs.update(generated_files.keys())
    return updated_pkgs