def dedup_file(fpath, contents_path): # Don't hardlink empty files; it's fairly pointless. if os.stat(fpath).st_size == 0: return f_hash = sha256_file(fpath) f_hash_dir = os.path.join(contents_path, f_hash[:2]) f_hash_path = os.path.join(f_hash_dir, f_hash[2:]) _maybe_makedirs(f_hash_dir) try: # Create a link to a content addressable name if this is the first time. os.link(fpath, f_hash_path) return except OSError as e: # Make sure only one worker creates the hashed file so all other files reference the same # inode. if e.errno != errno.EEXIST: raise bazel_utils.BazelError("hard link error", e, fpath, f_hash_path) # Replace ourselves with a hardlink to the content addressable file. os.remove(fpath) try: os.link(f_hash_path, fpath) except OSError as e: raise bazel_utils.BazelError("hard link error", e, f_hash_path, fpath)
def _get_itest_target_body_by_bazel_query(bazel_path, target): targets = bazel_utils.targets_of_kinds_for_labels_xml( bazel_path, kinds=[ "service_internal", "service_group_internal", "services_internal_test" ], labels=[ target, "labels(tests, {})".format( target), # to handle testsuite used in Go ], ).getElementsByTagName("rule") if len(targets) == 1: name = targets[0].getAttribute("name") return ITestTarget(name=name, has_services=True) elif len(targets) == 0: names = [t.getAttribute("name") for t in targets] # maybe we were given a test target that does not have a service dependency maybe_test_targets = bazel_utils.test_targets_for_labels( bazel_path, labels=[target]) if len(maybe_test_targets) == 1: return ITestTarget(name=maybe_test_targets[0], has_services=False) else: raise bazel_utils.BazelError( "Please specify exactly one service target or one test target. Specified label expanded to service targets {!r} and test targets {!r}" .format(names, maybe_test_targets)) else: names = [t.getAttribute("name") for t in targets] raise bazel_utils.BazelError( "Please specify exactly one service target or one test target. Specified label expanded to service targets {!r}" .format(names))
def run_rule(args, bazel_args, mode_args, target, rule): attrs = rule.attr_map.copy() attrs.pop("visibility", None) # always a legal param, but we have no use for it here if rule.rule_type == "dbx_pkg_deb": dbx_pkg_deb(args, bazel_args, mode_args, target, **attrs) else: raise bazel_utils.BazelError("invalid rule type: " + rule.rule_type)
def run_rule(args, bazel_args, mode_args, target, rule): attrs = rule.attr_map.copy() attrs.pop("visibility", None) # always a legal param, but we have no use for it here expanded_attrs = { k: build_parser.maybe_expand_attribute(attrs[k]) for k in attrs } if rule.rule_type == "dbx_pkg_deb": dbx_pkg_deb(args, bazel_args, mode_args, target, **expanded_attrs) else: raise bazel_utils.BazelError("invalid rule type: " + rule.rule_type)
def check_for_duplicate_outputs(labels_to_outputs): output_to_labels = defaultdict(list) # type: ignore[var-annotated] for label, outputs in six.iteritems(labels_to_outputs): for output in outputs: output_to_labels[os.path.basename(output)].append(label) duplicate_output_labels = set() for output, labels in six.iteritems(output_to_labels): if len(labels) > 1: duplicate_output_labels.add(frozenset(labels)) if duplicate_output_labels: pretty_output = [ "\n ".join(labels) for labels in duplicate_output_labels ] raise bazel_utils.BazelError( "these labels provide the same outputs:\n %s" % "\n\n ".join(pretty_output))
def copy_manifest(manifest_path, out_dir): contents_path = os.path.join(out_dir, ".contents") args = [] with open(manifest_path) as manifest: for line in manifest: short_dest, src = line.strip().split("\0") if os.path.isdir(src): raise bazel_utils.BazelError( "A raw target pointing to a directory was detected: %s\n" "Please use a filegroup instead." % short_dest) args.append((short_dest, src, out_dir, contents_path)) # This only works on Python 3. with multiprocessing.Pool( initializer=_init_worker) as wpool: # type: ignore[attr-defined] wpool.map_async(_copy_manifest_wrapper, args, chunksize=1).get(3600) shutil.rmtree(contents_path)
def copy_manifest(manifest_path, out_dir): contents_path = os.path.join(out_dir, ".contents") args = [] with open(manifest_path) as manifest: for line in manifest: short_dest, src = line.strip().split("\0") if os.path.isdir(src): raise bazel_utils.BazelError( "A raw target pointing to a directory was detected: %s\n" "Please use a filegroup instead." % short_dest) args.append((short_dest, src, out_dir, contents_path)) wpool = multiprocessing.Pool(initializer=_init_worker) try: # Use async + timeout to make sure KeyboardInterrupt fires. wpool.map_async(_copy_manifest_wrapper, args, chunksize=1).get(3600) except KeyboardInterrupt: wpool.terminate() wpool.join() raise shutil.rmtree(contents_path)
def compute_deps( self, python_path, pkg, rule_type, name, srcs, stub_srcs, main, pip_main, validate, is_py3_compatible, ): srcs = (srcs or []) + (stub_srcs or []) if main: srcs = srcs + [main] mapping = self.python_path_mappings.get(python_path) self_modules = mapping.compute_self_modules(pkg, srcs) target_dir = bazel_utils.normalize_relative_target_to_os_path(pkg[2:]) all_deps = set() # type: ignore[var-annotated] all_unknown_imports = set() # type: ignore[var-annotated] all_unknown_froms = set() # type: ignore[var-annotated] for src in set(srcs): src = os.path.join(target_dir, src) module_path = PythonPathMapping.convert_from_file_path_to_module( src) filename, parsed = mapping.find_closest_bzl_or_build(module_path) if not filename: raise bazel_utils.BazelError( "Cannot locate %s:%s's source (or its closest BUILD / " "BUILD.in file): %s/%s" % (pkg, name, target_dir, src)) pkg_path = os.path.dirname(filename).replace( self.workspace_dir, "/") src_pkg = bazel_utils.normalize_os_path_to_target(pkg_path) if src_pkg != pkg: print(("WARNING: Skipping %s from %s:%s deps computation " "since it belongs to %s") % (src, pkg, name, src_pkg)) continue import_set, from_set = parse_imports( self.workspace_dir, src, py3_compatible=is_py3_compatible or src.endswith(".pyi"), ) import_deps, unknown_imports = mapping.find_import_targets( src_pkg, self_modules, import_set) all_deps.update(import_deps) all_unknown_imports.update(unknown_imports) if validate: assert not unknown_imports, ( "Unable to locate modules %s (imported by %s) in any " "library target (NOTE: bin and test targets are " "ignored)") % (unknown_imports, src) from_deps, unknown_froms = mapping.find_from_targets( src_pkg, self_modules, from_set) all_deps.update(from_deps) all_unknown_froms.update(unknown_froms) if validate: assert not unknown_froms, ( "Unable to locate modules %s (imported by %s) in any " "library target (NOTE: bin and test targets are " "ignored)") % (unknown_froms, src) import_deps, unknown_imports = mapping.find_import_targets( pkg, self_modules, []) all_deps.update(import_deps) all_unknown_imports.update(unknown_imports) if pip_main: all_deps.add(pip_main) all_deps.discard("%s:%s" % (pkg, name)) if name == os.path.basename(target_dir): all_deps.discard("%s" % pkg) return sort_deps(pkg, all_deps), all_unknown_imports, all_unknown_froms
def _raise_on_glob_target(target): if target.endswith(("...", ":all", ":*", ":all-targets")): raise bazel_utils.BazelError( "Globs are not supported. Please specify explicit target path.")
def _build_targets( args, bazel_args, mode_args, pkg_target, pkg_prefix, name="", data=(), output_extension="tmp", file_map=None, preserve_symlinks=False, dedup_files=False, ): if not pkg_target.name.endswith(output_extension): raise bazel_utils.BazelError( "invalid target '%s' - must end with .%s" % (pkg_target.label, output_extension)) # Treat data as our list of targets. targets = [bazel_utils.BazelTarget(x) for x in data] if targets: bazel_cmd = [args.bazel_path] + bazel_args + ["build" ] + mode_args + data subprocess.check_call(bazel_cmd) # ask bazel where bazel-bin and bazel-genfiles are, instead of relying on # the symlinks, to support read-only workspaces pkg_dir_root = bazel_utils.check_output_silently( [args.bazel_path] + bazel_args + ["info"] + mode_args + ["bazel-genfiles"]).strip() out_dir_root = bazel_utils.check_output_silently([args.bazel_path] + bazel_args + ["info"] + mode_args + ["bazel-bin"]).strip() pkg_dir = os.path.join(pkg_dir_root, pkg_target.package, pkg_target.name + "-tmp") out_file = os.path.join(out_dir_root, pkg_target.package, pkg_target.name) out_dir = pkg_dir if pkg_prefix: out_dir = os.path.join(pkg_dir, pkg_prefix.strip("/")) # Prep and move things into the pkg_dir so they get squashed. if os.path.exists(out_file): os.remove(out_file) if os.path.exists(pkg_dir): shutil.rmtree(pkg_dir) os.makedirs(pkg_dir) if pkg_dir != out_dir: if os.path.exists(out_dir): shutil.rmtree(out_dir) os.makedirs(out_dir) if file_map: for dst, src in six.iteritems(file_map): if src: pkg_dst = os.path.join(pkg_dir, dst.strip("/")) pkg_dst_dir = os.path.dirname(pkg_dst) if not os.path.exists(pkg_dst_dir): os.makedirs(pkg_dst_dir) shutil.copy2(os.path.join(pkg_target.package, src), pkg_dst) else: # If there is no source, assume it's a directory. pkg_dst_dir = os.path.join(pkg_dir, dst.strip("/")) if not os.path.exists(pkg_dst_dir): os.makedirs(pkg_dst_dir) if targets: copy_labels( [t.label for t in targets], out_dir, preserve_symlinks=preserve_symlinks, _dedup_files=dedup_files, bazel_args=bazel_args, bazel_build_args=mode_args, ) return pkg_dir, out_file