def main(): logging.basicConfig(level=logging.INFO) parser = optparse.OptionParser( usage="usage: %prog --version=<version> <output path>") parser.add_option("-v", "--version", help="Set version number", type="string", dest="version", metavar="VERSION") parser.add_option("-b", "--build-type", help="Set build type", type="string", dest="build_type", metavar="BUILD_TYPE") parser.add_option("-g", "--git-hash", help="Set git hash", type="string", dest="git_hash", metavar="GIT_HASH") opts, args = parser.parse_args() if not opts.version: parser.error("no version number specified") sys.exit(1) if len(args) != 1: parser.error("no output path specified") sys.exit(1) output_path = args[0] hostname = check_output(["hostname", "-f"]).strip() build_time = "%s %s" % (strftime("%d %b %Y %H:%M:%S", localtime()), time.tzname[0]) username = os.getenv("USER") if opts.git_hash: # Git hash provided on the command line. git_hash = opts.git_hash clean_repo = "true" else: try: # No command line git hash, find it in the local git repository. git_hash = check_output(["git", "rev-parse", "HEAD"]).strip() clean_repo = subprocess.call( "git diff --quiet && git diff --cached --quiet", shell=True) == 0 clean_repo = str(clean_repo).lower() except Exception, e: # If the git commands failed, we're probably building outside of a git # repository. logging.info( "Build appears to be outside of a git repository... " + "continuing without repository information.") git_hash = "non-git-build" clean_repo = "true"
def _run_iwyu_tool(paths): iwyu_args = ['--max_line_length=256'] for m in glob.glob(os.path.join(_MAPPINGS_DIR, "*.imp")): iwyu_args.append("--mapping_file=%s" % os.path.abspath(m)) cmdline = [_IWYU_TOOL, '-p', _BUILD_DIR] cmdline.extend(paths) cmdline.append('--') cmdline.extend(iwyu_args) # iwyu_tool.py requires include-what-you-use on the path env = os.environ.copy() env['PATH'] = "%s:%s" % (_TOOLCHAIN_DIR, env['PATH']) def crash(output): sys.exit((Colors.RED + "Failed to run IWYU tool.\n\n" + Colors.RESET + Colors.YELLOW + "Command line:\n" + Colors.RESET + "%s\n\n" + Colors.YELLOW + "Output:\n" + Colors.RESET + "%s") % (" ".join(cmdline), output)) try: output = check_output(cmdline, env=env, stderr=subprocess.STDOUT) if '\nFATAL ERROR: ' in output or \ 'Assertion failed: ' in output or \ _RE_CLANG_ERROR.search(output): crash(output) return output except subprocess.CalledProcessError, e: crash(e.output)
def check_no_local_commits(): """ Check that there are no local commits which haven't been pushed to the upstream repo via Jenkins. """ upstream_commit = check_output(GET_UPSTREAM_COMMIT_SCRIPT).strip().decode('utf-8') cur_commit = check_output(["git", "rev-parse", "HEAD"]).strip().decode('utf-8') if upstream_commit == cur_commit: return print("The repository appears to have local commits:") subprocess.check_call(["git", "log", "--oneline", "%s..HEAD" % upstream_commit]) print(Colors.RED + "This should not be an official release!" + Colors.RESET) if not confirm_prompt("Continue?"): sys.exit(1)
def rev_parse(rev): """Run git rev-parse, returning the sha1, or None if not found""" try: return check_output(['git', 'rev-parse', rev], stderr=subprocess.STDOUT).strip() except subprocess.CalledProcessError: return None
def check_no_local_commits(): """ Check that there are no local commits which haven't been pushed to the upstream repo via Jenkins. """ upstream_commit = check_output(GET_UPSTREAM_COMMIT_SCRIPT).strip() cur_commit = check_output(["git", "rev-parse", "HEAD"]).strip() if upstream_commit == cur_commit: return print "The repository appears to have local commits:" subprocess.check_call(["git", "log", "--oneline", "%s..HEAD" % upstream_commit]) print Colors.RED + "This should not be an official release!" + Colors.RESET if not confirm_prompt("Continue?"): sys.exit(1)
def _run_iwyu_tool(paths): iwyu_args = ['--max_line_length=256'] for m in glob.glob(os.path.join(_MAPPINGS_DIR, "*.imp")): iwyu_args.append("--mapping_file=%s" % os.path.abspath(m)) cmdline = [_IWYU_TOOL, '-p', _BUILD_DIR] cmdline.extend(paths) cmdline.append('--') cmdline.extend(iwyu_args) # iwyu_tool.py requires include-what-you-use on the path env = os.environ.copy() env['PATH'] = "%s:%s" % (_TOOLCHAIN_DIR, env['PATH']) def crash(output): sys.exit((Colors.RED + "Failed to run IWYU tool.\n\n" + Colors.RESET + Colors.YELLOW + "Command line:\n" + Colors.RESET + "%s\n\n" + Colors.YELLOW + "Output:\n" + Colors.RESET + "%s") % (" ".join(cmdline), output)) try: output = check_output(cmdline, env=env, stderr=subprocess.STDOUT) if '\nFATAL ERROR: ' in output or \ 'Assertion failed: ' in output or \ _RE_CLANG_ERROR.search(output): crash(output) return output except subprocess.CalledProcessError, e: crash(e.output)
def _get_file_list_from_git(): upstream_commit = get_upstream_commit() out = check_output(["git", "diff", "--name-only", upstream_commit]).splitlines() return [ l.decode('utf-8') for l in out if _RE_SOURCE_FILE.search(l.decode('utf-8')) ]
def get_branches(remote): """ Fetch a dictionary mapping branch name to SHA1 hash from the given remote. """ out = check_output(["git", "ls-remote", remote, "refs/heads/*"]) ret = {} for l in out.splitlines(): sha, ref = l.split("\t") branch = ref.replace("refs/heads/", "", 1) ret[branch] = sha return ret
def get_branches(remote): """ Fetch a dictionary mapping branch name to SHA1 hash from the given remote. """ out = check_output(["git", "ls-remote", remote, "refs/heads/*"]) ret = {} for l in out.splitlines(): sha, ref = l.split("\t") branch = ref.replace("refs/heads/", "", 1) ret[branch] = sha return ret
def check_for_command(command): """ Ensure that the specified command is available on the PATH. """ try: _ = check_output(['which', command]) except subprocess.CalledProcessError as err: logging.error("Unable to find %s command", command) raise err
def main(): logging.basicConfig(level=logging.INFO) parser = optparse.OptionParser( usage="usage: %prog --version=<version> <output path>") parser.add_option("-v", "--version", help="Set version number", type="string", dest="version", metavar="VERSION") parser.add_option("-b", "--build-type", help="Set build type", type="string", dest="build_type", metavar="BUILD_TYPE") parser.add_option("-g", "--git-hash", help="Set git hash", type="string", dest="git_hash", metavar="GIT_HASH") opts, args = parser.parse_args() if not opts.version: parser.error("no version number specified") sys.exit(1) if len(args) != 1: parser.error("no output path specified") sys.exit(1) output_path = args[0] hostname = check_output(["hostname", "-f"]).strip() build_time = "%s %s" % (strftime("%d %b %Y %H:%M:%S", localtime()), time.tzname[0]) username = os.getenv("USER") if opts.git_hash: # Git hash provided on the command line. git_hash = opts.git_hash clean_repo = "true" else: try: # No command line git hash, find it in the local git repository. git_hash = check_output(["git", "rev-parse", "HEAD"]).strip() clean_repo = subprocess.call("git diff --quiet && git diff --cached --quiet", shell=True) == 0 clean_repo = str(clean_repo).lower() except Exception, e: # If the git commands failed, we're probably building outside of a git # repository. logging.info("Build appears to be outside of a git repository... " + "continuing without repository information.") git_hash = "non-git-build" clean_repo = "true"
def get_gerrit_ssh_command(): url = check_output("git config --get remote.gerrit.url".split(" ")) m = re.match(r'ssh://(.+)@(.+):(\d+)/.+', url) if not m: raise Exception("expected gerrit remote to be an ssh://user@host:port/ URL: %s" % url) user, host, port = m.groups() if host != GERRIT_HOST: raise Exception("unexpected gerrit host %s in remote 'gerrit'. Expected %s" % ( host, GERRIT_HOST)) return ["ssh", "-p", port, "-l", user, host]
def is_fast_forward(ancestor, child): """ Return True if 'child' is a descendent of 'ancestor' and thus could be fast-forward merged. """ try: merge_base = check_output(['git', 'merge-base', ancestor, child]).strip() except: # If either of the commits is unknown, count this as a non-fast-forward. return False return merge_base == rev_parse(ancestor)
def is_fast_forward(ancestor, child): """ Return True if 'child' is a descendent of 'ancestor' and thus could be fast-forward merged. """ try: merge_base = check_output(['git', 'merge-base', ancestor, child]).strip() except: # If either of the commits is unknown, count this as a non-fast-forward. return False return merge_base == rev_parse(ancestor)
def find_client_jars(path): """ Return a list of jars within 'path' to be checked for compatibility. """ all_jars = set( check_output(["find", path, "-name", "*.jar"]).decode('utf-8').splitlines()) return [ j for j in all_jars if ("-javadoc" not in j and "-sources" not in j and "-test-sources" not in j and "-tests" not in j and "-unshaded" not in j and "buildSrc" not in j and "gradle-wrapper" not in j and "kudu-backup" not in j and "kudu-hive" not in j and "kudu-jepsen" not in j and "kudu-subprocess" not in j) ]
def objdump_private_headers(binary_path): """ Run `objdump -p` on the given binary. Returns a list with one line of objdump output per record. """ check_for_command('objdump') try: output = check_output(["objdump", "-p", binary_path]) except subprocess.CalledProcessError as err: logging.error(err) return [] return output.strip().decode("utf-8").split("\n")
def dump_load_commands_macos(binary_path): """ Run `otool -l` on the given binary. Returns a list with one line of otool output per entry. We use 'otool -l' instead of 'objdump -p' because 'otool' supports Universal Mach-O binaries. """ check_for_command('otool') try: output = check_output(["otool", "-l", binary_path]) except subprocess.CalledProcessError as err: logging.error("Failed to run %s", err.cmd) raise err return output.strip().decode("utf-8").split("\n")
def check_gerrit_remote(): """ Checks that there is a remote named 'gerrit' set up correctly. Otherwise, exits with an error message. """ try: url = check_output(['git', 'config', '--local', '--get', 'remote.gerrit.url']).strip() except subprocess.CalledProcessError: print >>sys.stderr, "No remote named 'gerrit'. Please set one up following " print >>sys.stderr, "the contributor guide." sys.exit(1) if not GERRIT_URL_RE.match(url): print >>sys.stderr, "Unexpected URL for remote 'gerrit'." print >>sys.stderr, " Got: ", url print >>sys.stderr, " Expected to find host '%s' in the URL" % GERRIT_HOST sys.exit(1)
def check_apache_remote(): """ Checks that there is a remote named 'apache' set up correctly. Otherwise, exits with an error message. """ try: url = check_output(['git', 'config', '--local', '--get', 'remote.apache.url']).strip() except subprocess.CalledProcessError: print >>sys.stderr, "No remote named 'apache'. Please set one up, for example with: " print >>sys.stderr, " git remote add apache", APACHE_REPO sys.exit(1) if url != APACHE_REPO: print >>sys.stderr, "Unexpected URL for remote 'apache'." print >>sys.stderr, " Got: ", url print >>sys.stderr, " Expected:", APACHE_REPO sys.exit(1)
def check_gerrit_remote(): """ Checks that there is a remote named 'gerrit' set up correctly. Otherwise, exits with an error message. """ try: url = check_output(['git', 'config', '--local', '--get', 'remote.gerrit.url']).strip().decode('utf-8') except subprocess.CalledProcessError: print("No remote named 'gerrit'. Please set one up following ", file=sys.stderr) print("the contributor guide (git remote add gerrit %s)." % GERRIT_URL, file=sys.stderr) sys.exit(1) if not GERRIT_URL_RE.match(url): print("Unexpected URL for remote 'gerrit'.", file=sys.stderr) print(" Got: ", url, file=sys.stderr) print(" Expected to find URL like '%s'" % GERRIT_URL, file=sys.stderr) sys.exit(1)
def find_client_jars(path): """ Return a list of jars within 'path' to be checked for compatibility. """ all_jars = set(check_output(["find", path, "-name", "*.jar"]).splitlines()) # If we see "original-foo.jar", then remove "foo.jar" since that's a post-shading # duplicate. dups = [] for j in all_jars: dirname, name = os.path.split(j) m = re.match("original-(.+)", name) if m: dups.append(os.path.join(dirname, m.group(1))) for d in dups: all_jars.remove(d) return [j for j in all_jars if ("-tests" not in j and "-sources" not in j and "-with-dependencies" not in j)]
def check_gerrit_remote(): """ Checks that there is a remote named 'gerrit' set up correctly. Otherwise, exits with an error message. """ try: url = check_output(['git', 'config', '--local', '--get', 'remote.gerrit.url']).strip() except subprocess.CalledProcessError: print >>sys.stderr, "No remote named 'gerrit'. Please set one up following " print >>sys.stderr, "the contributor guide." sys.exit(1) if not GERRIT_URL_RE.match(url): print >>sys.stderr, "Unexpected URL for remote 'gerrit'." print >>sys.stderr, " Got: ", url print >>sys.stderr, " Expected to find host '%s' in the URL" % GERRIT_HOST sys.exit(1)
def check_apache_remote(): """ Checks that there is a remote named 'apache' set up correctly. Otherwise, exits with an error message. """ try: url = check_output(['git', 'config', '--local', '--get', 'remote.apache.url']).strip() except subprocess.CalledProcessError: print >>sys.stderr, "No remote named 'apache'. Please set one up, for example with: " print >>sys.stderr, " git remote add apache", APACHE_REPO sys.exit(1) if url != APACHE_REPO: print >>sys.stderr, "Unexpected URL for remote 'apache'." print >>sys.stderr, " Got: ", url print >>sys.stderr, " Expected:", APACHE_REPO sys.exit(1)
def find_client_jars(path): """ Return a list of jars within 'path' to be checked for compatibility. """ all_jars = set(check_output(["find", path, "-name", "*.jar"]).splitlines()) # If we see "original-foo.jar", then remove "foo.jar" since that's a post-shading # duplicate. dups = [] for j in all_jars: dirname, name = os.path.split(j) m = re.match("original-(.+)", name) if m: dups.append(os.path.join(dirname, m.group(1))) for d in dups: all_jars.remove(d) return [ j for j in all_jars if ("-tests" not in j and "-sources" not in j and "-with-dependencies" not in j) ]
def check_gerrit_remote(): """ Checks that there is a remote named 'gerrit' set up correctly. Otherwise, exits with an error message. """ try: url = check_output( ['git', 'config', '--local', '--get', 'remote.gerrit.url']).strip().decode('utf-8') except subprocess.CalledProcessError: print("No remote named 'gerrit'. Please set one up following ", file=sys.stderr) print("the contributor guide (git remote add gerrit %s)." % GERRIT_URL, file=sys.stderr) sys.exit(1) if not GERRIT_URL_RE.match(url): print("Unexpected URL for remote 'gerrit'.", file=sys.stderr) print(" Got: ", url, file=sys.stderr) print(" Expected to find URL like '%s'" % GERRIT_URL, file=sys.stderr) sys.exit(1)
def main(): parser = optparse.OptionParser( usage="usage: %prog --version=<version> <output path>") parser.add_option("-v", "--version", help="Set version number", type="string", dest="version", metavar="VERSION") parser.add_option("-b", "--build-type", help="Set build type", type="string", dest="build_type", metavar="BUILD_TYPE") parser.add_option("-g", "--git-hash", help="Set git hash", type="string", dest="git_hash", metavar="GIT_HASH") opts, args = parser.parse_args() if not opts.version: parser.error("no version number specified") sys.exit(1) if len(args) != 1: parser.error("no output path specified") sys.exit(1) output_path = args[0] hostname = check_output(["hostname", "-f"]).strip().decode('utf-8') build_time = "%s %s" % (strftime("%d %b %Y %H:%M:%S", localtime()), time.tzname[0]) username = os.getenv("USER") if opts.git_hash: # Git hash provided on the command line. git_hash = opts.git_hash clean_repo = "true" else: try: # No command line git hash, find it in the local git repository. git_hash = check_output(["git", "rev-parse", "HEAD"]).strip().decode('utf-8') clean_repo = subprocess.call( "git diff --quiet && git diff --cached --quiet", shell=True) == 0 clean_repo = str(clean_repo).lower() except Exception: # If the git commands failed, we're probably building outside of a git # repository. logging.info( "Build appears to be outside of a git repository... " + "continuing without repository information.") git_hash = "non-git-build" clean_repo = "true" version_string = opts.version build_type = opts.build_type # Add the Jenkins build ID build_id = os.getenv("BUILD_ID", "") # Calculate an identifying hash based on all of the variables except for the # timestamp. We put this hash in a comment, and use it to check whether to # re-generate the file. If it hasn't changed since a previous run, we don't # re-write the file. This avoids having to rebuild all binaries on every build. identifying_hash = hashlib.sha1( repr((git_hash, hostname, username, clean_repo, build_id)).encode('utf-8')).hexdigest() if output_up_to_date(output_path, identifying_hash): return 0 d = os.path.dirname(output_path) if not os.path.exists(d): os.makedirs(d) with open(output_path, "w") as f: f.write(""" // THIS FILE IS AUTO-GENERATED! DO NOT EDIT! // // id_hash=%(identifying_hash)s #ifndef VERSION_INFO_H_ #define VERSION_INFO_H_ #define KUDU_GIT_HASH "%(git_hash)s" #define KUDU_BUILD_HOSTNAME "%(hostname)s" #define KUDU_BUILD_TIMESTAMP "%(build_time)s" #define KUDU_BUILD_USERNAME "%(username)s" #define KUDU_BUILD_CLEAN_REPO %(clean_repo)s #define KUDU_BUILD_ID "%(build_id)s" #define KUDU_BUILD_TYPE "%(build_type)s" #define KUDU_VERSION_STRING "%(version_string)s" #endif """ % locals()) return 0
def get_git_hash(revname): """ Convert 'revname' to its SHA-1 hash. """ return check_output(["git", "rev-parse", revname], cwd=get_repo_dir()).decode('utf-8').strip()
def describe_commit(rev): """ Return a one-line description of a commit. """ return check_output(['git', 'log', '--color', '-n1', '--oneline', rev]).strip().decode('utf-8')
def get_git_hash(revname): """ Convert 'revname' to its SHA-1 hash. """ return check_output(["git", "rev-parse", revname], cwd=get_repo_dir()).strip()
def get_committer_email(rev): """ Return the email address of the committer of the given revision. """ return check_output(['git', 'log', '-n1', '--pretty=format:%ce', rev]).strip()
def current_ref_for_gerrit_number(change_num): j = check_output(get_gerrit_ssh_command() + [ "gerrit", "query", "--current-patch-set", "--format", "JSON", "change:%d" % change_num]) j = json.loads(j.split("\n")[0]) return j['currentPatchSet']['ref']
def main(): logging.basicConfig(level=logging.INFO) parser = optparse.OptionParser( usage="usage: %prog --version=<version> <output path>") parser.add_option("-v", "--version", help="Set version number", type="string", dest="version", metavar="VERSION") parser.add_option("-b", "--build-type", help="Set build type", type="string", dest="build_type", metavar="BUILD_TYPE") parser.add_option("-g", "--git-hash", help="Set git hash", type="string", dest="git_hash", metavar="GIT_HASH") opts, args = parser.parse_args() if not opts.version: parser.error("no version number specified") sys.exit(1) if len(args) != 1: parser.error("no output path specified") sys.exit(1) output_path = args[0] hostname = check_output(["hostname", "-f"]).strip() build_time = "%s %s" % (strftime("%d %b %Y %H:%M:%S", localtime()), time.tzname[0]) username = os.getenv("USER") if opts.git_hash: # Git hash provided on the command line. git_hash = opts.git_hash clean_repo = "true" else: try: # No command line git hash, find it in the local git repository. git_hash = check_output(["git", "rev-parse", "HEAD"]).strip() clean_repo = subprocess.call("git diff --quiet && git diff --cached --quiet", shell=True) == 0 clean_repo = str(clean_repo).lower() except Exception: # If the git commands failed, we're probably building outside of a git # repository. logging.info("Build appears to be outside of a git repository... " + "continuing without repository information.") git_hash = "non-git-build" clean_repo = "true" version_string = opts.version build_type = opts.build_type # Add the Jenkins build ID build_id = os.getenv("BUILD_ID", "") # Calculate an identifying hash based on all of the variables except for the # timestamp. We put this hash in a comment, and use it to check whether to # re-generate the file. If it hasn't changed since a previous run, we don't # re-write the file. This avoids having to rebuild all binaries on every build. identifying_hash = sha.sha(repr((git_hash, hostname, username, clean_repo, build_id))).hexdigest() if output_up_to_date(output_path, identifying_hash): return 0 d = os.path.dirname(output_path) if not os.path.exists(d): os.makedirs(d) with file(output_path, "w") as f: print >>f, """ // THIS FILE IS AUTO-GENERATED! DO NOT EDIT! // // id_hash=%(identifying_hash)s #ifndef VERSION_INFO_H_ #define VERSION_INFO_H_ #define KUDU_GIT_HASH "%(git_hash)s" #define KUDU_BUILD_HOSTNAME "%(hostname)s" #define KUDU_BUILD_TIMESTAMP "%(build_time)s" #define KUDU_BUILD_USERNAME "%(username)s" #define KUDU_BUILD_CLEAN_REPO %(clean_repo)s #define KUDU_BUILD_ID "%(build_id)s" #define KUDU_BUILD_TYPE "%(build_type)s" #define KUDU_VERSION_STRING "%(version_string)s" #endif """ % locals() return 0
def rev_parse(rev): """Run git rev-parse, returning the sha1, or None if not found""" try: return check_output(['git', 'rev-parse', rev], stderr=subprocess.STDOUT).strip() except subprocess.CalledProcessError: return None
def get_my_email(): """ Return the email address in the user's git config. """ return check_output(['git', 'config', '--get', 'user.email']).strip()
def rev_list(arg): """Run git rev-list, returning an array of SHA1 commit hashes.""" return check_output(['git', 'rev-list', arg]).splitlines()
def describe_commit(rev): """ Return a one-line description of a commit. """ return check_output( ['git', 'log', '--color', '-n1', '--oneline', rev]).strip()
def get_committer_email(rev): """ Return the email address of the committer of the given revision. """ return check_output(['git', 'log', '-n1', '--pretty=format:%ce', rev]).strip()
def rev_list(arg): """Run git rev-list, returning an array of SHA1 commit hashes.""" return check_output(['git', 'rev-list', arg]).splitlines()
def _get_file_list_from_git(): upstream_commit = get_upstream_commit() out = check_output(["git", "diff", "--name-only", upstream_commit]).splitlines() return [l for l in out if _RE_SOURCE_FILE.search(l)]