def _to_pkg(self, directory): # type: (Text) -> Text directory = os.path.realpath(directory) assert directory.startswith( self.workspace_dir), ("Programming error: " + directory) directory = directory.replace(self.workspace_dir, "/") return bazel_utils.normalize_os_path_to_target(directory)
def _get_bzl_or_build(self, build_dir): filename, parsed = self.parsed_file_cache.get_bzl_or_build(build_dir) if not filename: return "", None # use real path instead of symlink build_dir = os.path.dirname(filename) if build_dir not in self.processed_local_build_dirs: self.processed_local_build_dirs.add(build_dir) pkg = build_dir.replace(self.workspace_dir, "/") pkg = bazel_utils.normalize_os_path_to_target(pkg) self._collect_local_targets(pkg, parsed) return filename, parsed
def compute_deps( self, python_path, pkg, rule_type, name, srcs, stub_srcs, main, pip_main, validate, is_py3_compatible, ): srcs = (srcs or []) + (stub_srcs or []) if main: srcs = srcs + [main] mapping = self.python_path_mappings.get(python_path) self_modules = mapping.compute_self_modules(pkg, srcs) target_dir = bazel_utils.normalize_relative_target_to_os_path(pkg[2:]) all_deps = set() # type: ignore[var-annotated] all_unknown_imports = set() # type: ignore[var-annotated] all_unknown_froms = set() # type: ignore[var-annotated] for src in set(srcs): src = os.path.join(target_dir, src) module_path = PythonPathMapping.convert_from_file_path_to_module( src) filename, parsed = mapping.find_closest_bzl_or_build(module_path) if not filename: raise bazel_utils.BazelError( "Cannot locate %s:%s's source (or its closest BUILD / " "BUILD.in file): %s/%s" % (pkg, name, target_dir, src)) pkg_path = os.path.dirname(filename).replace( self.workspace_dir, "/") src_pkg = bazel_utils.normalize_os_path_to_target(pkg_path) if src_pkg != pkg: print(("WARNING: Skipping %s from %s:%s deps computation " "since it belongs to %s") % (src, pkg, name, src_pkg)) continue import_set, from_set = parse_imports( self.workspace_dir, src, py3_compatible=is_py3_compatible or src.endswith(".pyi"), ) import_deps, unknown_imports = mapping.find_import_targets( src_pkg, self_modules, import_set) all_deps.update(import_deps) all_unknown_imports.update(unknown_imports) if validate: assert not unknown_imports, ( "Unable to locate modules %s (imported by %s) in any " "library target (NOTE: bin and test targets are " "ignored)") % (unknown_imports, src) from_deps, unknown_froms = mapping.find_from_targets( src_pkg, self_modules, from_set) all_deps.update(from_deps) all_unknown_froms.update(unknown_froms) if validate: assert not unknown_froms, ( "Unable to locate modules %s (imported by %s) in any " "library target (NOTE: bin and test targets are " "ignored)") % (unknown_froms, src) import_deps, unknown_imports = mapping.find_import_targets( pkg, self_modules, []) all_deps.update(import_deps) all_unknown_imports.update(unknown_imports) if pip_main: all_deps.add(pip_main) all_deps.discard("%s:%s" % (pkg, name)) if name == os.path.basename(target_dir): all_deps.discard("%s" % pkg) return sort_deps(pkg, all_deps), all_unknown_imports, all_unknown_froms
def regenerate_build_files( bazel_targets_l: Sequence[str], generators: Sequence[Callable[..., Generator]], verbose: bool = False, skip_deps_generation: bool = False, dry_run: bool = False, reverse_deps_generation: bool = False, use_magic_mirror: bool = False, ) -> None: workspace_dir = bazel_utils.find_workspace() bazel_targets = set(bazel_targets_l) if reverse_deps_generation: targets = bazel_utils.expand_bazel_target_dirs( workspace_dir, [t for t in bazel_targets if not t.startswith("@")], require_build_file=False, cwd=".", ) pkgs = [t.partition(":")[0] for t in targets] patterns = ['"%s"' % pkg for pkg in pkgs] patterns.extend(['"%s:' % pkg for pkg in pkgs]) for path, dirs, files in os.walk(workspace_dir): if "BUILD" not in files: continue build_content = open(os.path.join(workspace_dir, path, "BUILD")).read() should_regen = False for pattern in patterns: if pattern in build_content: should_regen = True break if should_regen: # convert abs path to relative to workspace bazel_targets.add("//" + bazel_utils.normalize_os_path_to_target( os.path.relpath(path, workspace_dir))) generated_files = DefaultDict[str, List[str]](list) generator_instances: List[Generator] = [] for gen in generators: # Most of the time `generator` is a class. Sometimes it's a functools.partial, so handle that too. generator_name = gen.__name__ with metrics.Timer( "bzl_gen_{}_init_ms".format(generator_name)) as init_timer: generator_instances.append( gen( workspace_dir, generated_files, verbose, skip_deps_generation, dry_run, use_magic_mirror, )) metrics.log_cumulative_rate(init_timer.name, init_timer.get_interval_ms()) # In order to ensure we don't miss generating specific target types, # recursively expands the generated set until it converges. prev_visited_dirs: Set[str] = set() while bazel_targets: for generator in generator_instances: with metrics.generator_metric_context( generator.__class__.__name__): generator.regenerate(bazel_targets) visited_dirs = set(generated_files.keys()) newly_visited_dirs = visited_dirs.difference(prev_visited_dirs) if newly_visited_dirs: # continue processing prev_visited_dirs = visited_dirs bazel_targets = set([ bazel_utils.normalize_os_path_to_target( d.replace(workspace_dir, "/")) for d in newly_visited_dirs ]) else: break with metrics.Timer("bzl_gen_merge_build_files_ms") as merge_timer: merge_generated_build_files(generated_files) metrics.log_cumulative_rate(merge_timer.name, merge_timer.get_interval_ms())