def test_build_with_select(): # type: () -> None """Ensures that we can parse BUILD files with select() clauses, and that they are appropriately preserved in rules.""" bp = build_parser.parse(BUILD_WITH_SELECT) rule = bp.get_rule("a") assert rule.attr_map["name"] == "a" expanded_srcs = build_parser.maybe_expand_attribute(rule.attr_map["srcs"]) expected_srcs = set( ["common1", "common2", "windows1", "windows2", "osx1", "osx2", "linux1"] ) assert len(expanded_srcs) == 7 assert set(expanded_srcs) == expected_srcs raw_srcs = rule.attr_map["srcs"] assert len(raw_srcs) == 3 select_item = None for val in raw_srcs: if isinstance(val, build_parser.Select): select_item = val assert select_item assert len(select_item.select_map) == 3 assert select_item.select_map["//conditions:windows"] == ["windows1", "windows2"] assert select_item.select_map["//conditions:osx"] == ["osx1", "osx2"] assert select_item.select_map["//conditions:linux"] == ["linux1"]
def maybe_traverse_non_bzl(self, expanded_target): if self.skip_deps_generation: return pkg, _, name = expanded_target.partition(":") if not name: name = os.path.basename(pkg) expanded_target = pkg + ":" + name if expanded_target in self.visited_non_bzl_targets: return self.visited_non_bzl_targets.add(expanded_target) # Note that expanded_target is guaranteed to be an absolute target. pkg_path = bazel_utils.normalize_relative_target_to_os_path(pkg[2:]) _, parsed = self.parsed_cache.get_build( os.path.join(self.workspace_dir, pkg_path)) if parsed is None: return try: rule = parsed.get_rule(name) except KeyError: return self.regenerate( build_parser.maybe_expand_attribute(rule.attr_map.get("deps", [])), cwd=os.path.join(self.workspace_dir, pkg_path), )
def _collect_local_targets(self, pkg, parsed): path_prefix = pkg[2:] pkg_path = bazel_utils.normalize_relative_target_to_os_path( path_prefix) # NOTE: We purposely ignore dbx_py_binary targets, even through it's # technically allowed to use dbx_py_binary as deps. for lib in parsed.get_rules_by_types(PY_LIBRARY_RULE_TYPES): name = lib.attr_map["name"] target = pkg + ":" + name srcs = build_parser.maybe_expand_attribute( lib.attr_map.get("srcs", [])) for src in srcs: assert src.endswith( ".py"), "Invalid python src %s in %s" % (src, pkg) file_path = os.path.join(pkg_path, src) # Clip the file_path relative to the python_path if applicable. if self.python_path: if file_path == self.python_path: file_path = file_path[len(self.python_path):] elif file_path.startswith(self.python_path + "/"): file_path = file_path[len(self.python_path) + 1:] module_path = PythonPathMapping.convert_from_file_path_to_module( file_path) if module_path in self.local_module_targets: other, other_size = self.local_module_targets[module_path] other_pkg, _, other_name = other.partition(":") if not other_name: other_name = os.path.basename(other_pkg) # Use the target with the more specific pkg path, or the # target with the least srcs overwrite = False if len(other_pkg) < len(pkg): overwrite = True elif other_pkg == pkg: if other_size > len(srcs): overwrite = True elif (other_size == len(srcs) and os.path.basename(other_pkg) == other_name): overwrite = True # use the most specific target name if overwrite: self.local_module_targets[module_path] = (target, len(srcs)) print(("WARNING: Module %s specified in multiple targets: " "%s vs %s (autogen_deps may pick the incorrect " "target)") % (module_path, other, target)) continue self.local_module_targets[module_path] = (target, len(srcs))
def parse_build_file(self, build_file): # type: (str) -> None if not os.path.isfile(build_file): return if self._verbose: print( "Parsing %s (%d bytes)" % (build_file, os.path.getsize(build_file)), file=sys.stderr, ) dir = os.path.dirname(build_file) bp = build_parser.BuildParser() if self._verbose: t0 = time.time() try: bp.parse_file(build_file) except SyntaxError as err: sys.stderr.write("whatpyver: Syntax error ignored in build file\n") sys.stderr.write("%s:%s:%s\n" % ( os.path.relpath(build_file), err.lineno, err.text.rstrip() if err.text is not None else "", )) return finally: if self._verbose: t1 = time.time() print("Parsing took %.1f msec" % ((t1 - t0) * 1000)) self._build_file_parsers[build_file] = bp rules = bp.get_rules_by_types(RULE_TYPES) if not any(rule.attr_map.get("srcs") for rule in rules): # If the BUILD file is empty or lacks srcs, it trivially supports py2/py3 # this helps support intermediate directories w/ only __init__ self._py2_files.add(os.path.join(dir, "__init__.py")) self._py3_files.add(os.path.join(dir, "__init__.py")) for rule in rules: # NOTE: These defaults may change when build_tools/py/py.bzl changes. # python2_compatible is used by dbx_py_binary # python_version is used by py_binary # srcs_version is used by py_library py2 = False py3 = (rule.attr_map.get("python3_compatible", True) and rule.attr_map.get("python_version", "PY3") != "PY2" and rule.attr_map.get("srcs_version", "PY3") != "PY2ONLY") for src in build_parser.maybe_expand_attribute( rule.attr_map.get("srcs", [])): src = os.path.join(dir, src) # Explicitly add __init__.py files, since those are typically not included # in BUILD files, but mypy relies on them for module existence, particularly # when follow_imports=skip in the mypy.ini if py2: self._py2_files.add(src) self._py2_files.add(os.path.join(dir, "__init__.py")) if py3: self._py3_files.add(src) self._py3_files.add(os.path.join(dir, "__init__.py"))
def run_rule(args, bazel_args, mode_args, target, rule): attrs = rule.attr_map.copy() attrs.pop("visibility", None) # always a legal param, but we have no use for it here expanded_attrs = { k: build_parser.maybe_expand_attribute(attrs[k]) for k in attrs } if rule.rule_type == "dbx_pkg_deb": dbx_pkg_deb(args, bazel_args, mode_args, target, **expanded_attrs) else: raise bazel_utils.BazelError("invalid rule type: " + rule.rule_type)
def parse_build_file(self, build_file): # type: (str) -> None if not os.path.isfile(build_file): return if self._verbose: print( "Parsing %s (%d bytes)" % (build_file, os.path.getsize(build_file)), file=sys.stderr, ) dir = os.path.dirname(build_file) bp = build_parser.BuildParser() if self._verbose: t0 = time.time() try: bp.parse_file(build_file) except SyntaxError as err: sys.stderr.write("whatpyver: Syntax error ignored in build file\n") sys.stderr.write("%s:%s:%s\n" % ( os.path.relpath(build_file), err.lineno, err.text.rstrip() if err.text is not None else "", )) return finally: if self._verbose: t1 = time.time() print("Parsing took %.1f msec" % ((t1 - t0) * 1000)) self._build_file_parsers[build_file] = bp for rule in bp.get_rules_by_types(RULE_TYPES): # NOTE: These defaults may change when build_tools/py/py.bzl changes. # python2_compatible is used by dbx_py_binary # python_version is used by py_binary # srcs_version is used by py_library py2 = (rule.attr_map.get( "python2_compatible", rule.rule_type not in RULE_TYPES_THAT_DEFAULT_PY3_ONLY, ) or rule.attr_map.get("python_version", "PY3") == "PY2" or rule.attr_map.get( "srcs_version", "PY3") in ("PY2", "PY2ONLY", "PY2AND3")) py3 = (rule.attr_map.get("python3_compatible", True) and rule.attr_map.get("python_version", "PY3") != "PY2" and rule.attr_map.get("srcs_version", "PY3") != "PY2ONLY") for src in build_parser.maybe_expand_attribute( rule.attr_map.get("srcs", [])): src = os.path.join(dir, src) if py2: self._py2_files.add(src) if py3: self._py3_files.add(src)
def regenerate(self, bazel_targets: Iterable[str], cwd: str = ".") -> None: targets = bazel_utils.expand_bazel_target_dirs( self.workspace_dir, [t for t in bazel_targets if not t.startswith("@")], require_build_file=False, cwd=cwd, ) for target in targets: assert target.startswith( "//"), "Target must be absolute: " + target target_dir = bazel_utils.normalize_relative_target_to_os_path( target[2:]) if target_dir in self.visited_dirs: continue self.visited_dirs.add(target_dir) build_bzl = os.path.join(self.workspace_dir, target_dir, BUILD_INPUT) if not os.path.isfile(build_bzl): continue parsed = build_parser.parse_file(build_bzl) pip_rules = parsed.get_rules_by_types(PIP_GEN_RULE_TYPES) if not pip_rules: if self.verbose: print("No pip targets found in %s/%s" % (target_dir, BUILD_INPUT)) continue if not self.skip_deps_generation: for rule in pip_rules: self.regenerate( build_parser.maybe_expand_attribute( rule.attr_map.get("deps", [])), cwd=os.path.join(self.workspace_dir, target_dir), ) if self.verbose: head = "(dry run) " if self.dry_run else "" print( head + "Processing pip targets in %s: %s" % (target_dir, [rule.attr_map["name"] for rule in pip_rules])) if self.dry_run: continue self.process_pip_rules(target_dir, pip_rules)
def test_select_aware_repr(): # type: () -> None """Ensures that we can get a Starlark-valid string representation of a select() clause.""" bp = build_parser.parse(BUILD_WITH_SELECT) rule = bp.get_rule("a") raw_srcs = rule.attr_map["srcs"] # To check the repr func, we re-parse the repr and see if the data is all the same. rewritten_build = "rule1(name='a', srcs={})".format( build_parser.get_select_aware_attribute_repr(raw_srcs) ) bp2 = build_parser.parse(rewritten_build) new_rule = bp2.get_rule("a") expanded_srcs = build_parser.maybe_expand_attribute(new_rule.attr_map["srcs"]) expected_srcs = set( ["common1", "common2", "windows1", "windows2", "osx1", "osx2", "linux1"] ) assert len(expanded_srcs) == 7 assert set(expanded_srcs) == expected_srcs new_raw_srcs = new_rule.attr_map["srcs"] assert len(new_raw_srcs) == 3 new_select_item = None for val in new_raw_srcs: if isinstance(val, build_parser.Select): new_select_item = val assert new_select_item assert len(new_select_item.select_map) == 3 assert new_select_item.select_map["//conditions:windows"] == [ "windows1", "windows2", ] assert new_select_item.select_map["//conditions:osx"] == ["osx1", "osx2"] assert new_select_item.select_map["//conditions:linux"] == ["linux1"]
def generate_build_file(self, pkg, py_rules): to_traverse = [] # type: ignore[var-annotated] output = [PY_LOAD_STATEMENT, ""] # XXX(patrick): maybe verify that the py_rules is a covering set # of all py files in the directory. for rule in py_rules: assert "name" in rule.attr_map, "Invalid rule %s in %s" % ( rule.rule_type, pkg, ) name = rule.attr_map["name"] main = rule.attr_map.get("main", None) pip_main = rule.attr_map.get("pip_main", None) srcs = build_parser.maybe_expand_attribute( rule.attr_map.get("srcs", None)) stub_srcs = build_parser.maybe_expand_attribute( rule.attr_map.get("stub_srcs", None)) autogen_deps = rule.attr_map.get("autogen_deps", True) deps = build_parser.maybe_expand_attribute( rule.attr_map.get("deps", [])) validate = "strict" in rule.attr_map.get("validate", "strict") python_path = rule.attr_map.get("pythonpath", "") is_py2_compat = rule.attr_map.get("python2_compatible", True) is_py3_compat = rule.attr_map.get("python3_compatible", True) assert ( is_py2_compat or is_py3_compat ), "Python target must be either python-2 or python-3 compatible" unknown_imports, unknown_froms = None, None if autogen_deps: assert ( not deps ), "deps should be empty when autogen_deps is " + "enabled: %s:%s" % ( pkg, name, ) deps, unknown_imports, unknown_froms = self.compute_deps( python_path, pkg, rule.rule_type, name, srcs, stub_srcs, main, pip_main, validate, is_py3_compat, ) to_traverse.extend(deps) if unknown_imports or unknown_froms: output.append("# WARNING: autogen_deps may not be correct!!!") print("WARNING: autogen_deps may not be correct!!!") for i in sorted(unknown_imports): # type: ignore[arg-type] output.append("# Unable to locate module: %s" % i) print(" Unable to locate module: %s" % i) for i in sorted(unknown_froms): # type: ignore[arg-type] output.append("# Unable to locate module/variable: %s" % i) print(" Unable to locate module/variable: %s" % i) output.append("%s(" % rule.rule_type) output.append(" name = '%s'," % name) if pip_main: output.append(" pip_main = '%s'," % pip_main) if main: output.append(" main = '%s'," % main) if srcs is None: if main: assert rule.rule_type in PY_BIN_RULE_TYPES, ( "Programming Error " + rule.rule_type) output.append(" srcs = ['%s']," % main) elif pip_main: assert rule.rule_type in PY_BIN_RULE_TYPES, ( "Programming Error " + rule.rule_type) output.append(" srcs = [],") else: output.append(" srcs = [],") if deps: output.append(" deps = [") for dep in deps: output.append(" '%s'," % dep) output.append(" ],") if rule.rule_type == "dbx_py_library": output.append(" validate = 'strict',") if python_path: # XXX(patrick): Fix py.bzl to accept pythonpath on all # py targets ... output.append(" pythonpath = '%s'," % python_path) output.append(")") output.append("") build_outdir = os.path.join(self.workspace_dir, pkg[2:]) build_output = os.path.join(build_outdir, BUILD_OUTPUT) with open(build_output, "w") as f: f.write("\n".join(output)) self.generated_files[build_outdir].append(build_output) if not self.skip_deps_generation: self.regenerate(set(to_traverse), cwd=os.path.join(self.workspace_dir, pkg[2:]))
def generate_build_file(self, target_dir, pip_rules): # Temporarily make BUILD.in file a real BUILD file, and generate the # piplib zips. We will leave the temporary BUILD file around to ensure # we can recursively generate pips. The temporary BUILD files will be # overwritten by gazel as the last step. build = os.path.join(self.workspace_dir, target_dir, DEFAULT_BUILD) content = [PUBLIC_STATEMENT, PIP_LOAD_STATEMENT] for rule in pip_rules: attrs_copy = dict(rule.attr_map) attrs_copy["use_magic_mirror"] = self.use_magic_mirror content.append("") content.append("%s(" % rule.rule_type) for key, val in attrs_copy.items(): content.append(" %s = %s," % (key, repr(val))) content.append(")") with open(build, "w") as fd: fd.write("\n".join(content)) self.build([ "//%s:%s" % (target_dir, rule.attr_map["name"]) for rule in pip_rules ]) out_dir = os.path.join(self.workspace_dir, "bazel-bin", target_dir) output = [PIP_LOAD_STATEMENT] # For each piplib rule, list the zipfile created by Bazel and insert # the 'contents' attribute. We'll rely on gazel to merge in the # remaining attributes. for rule in pip_rules: name = rule.attr_map["name"] wheel_name = name.replace("-", "_") excludes = list( build_parser.maybe_expand_attribute( rule.attr_map.get("py_excludes", PIP_DEFAULT_EXCLUDES))) for namespace_pkg in build_parser.maybe_expand_attribute( rule.attr_map.get("namespace_pkgs", [])): excludes.append( namespace_pkg.replace(".", "/") + "/__init__.py") contents = {} for build_key in _get_build_interpreters(rule.attr_map): wheel = os.path.join( out_dir, wheel_name + "-" + build_key, wheel_name + "-0.0.0-py2.py3-none-any.whl", ) if not os.path.exists(wheel): continue with zipfile.ZipFile(wheel) as zf: contents[build_key] = [ f for f in vinst.wheel_contents(zf) if not self.exclude_path(excludes, f) ] output.append("%s(" % rule.rule_type) output.append(" name = %s," % repr(name)) output.append(" contents = {") for build_key in sorted(contents): output.append(" %s: [" % repr(build_key)) for filename in sorted(contents[build_key]): output.append(" %s," % repr(filename)) output.append(" ],") output.append(" },") output.append(")") output.append("") build_outdir = os.path.join(self.workspace_dir, target_dir) build_output = os.path.join(build_outdir, BUILD_OUTPUT) with open(build_output, "w") as fp: fp.write("\n".join(output)) # Proactively generate the BUILD file because rdeps may depend on "contents" being properly # filled. build_merge.merge_build_files(build, build_output, build) self.generated_files[build_outdir].append(build_output)