def __make_redhat_config(self, target): """ Prepare the Red Hat kernel config files. Args: target: makefile target, usually 'rh-configs' or 'rh-configs-permissive' """ args = self.make_argv_base + [target] logging.info("building Red Hat configs: %s", args) # Unset CROSS_COMPILE because rh-configs doesn't handle the cross # compile args correctly in some cases environ = os.environ.copy() environ.pop('CROSS_COMPILE', None) self.run_multipipe(args, env=environ) # Copy the correct kernel config into place escaped_source_dir = self.__glob_escape(self.source_dir) config = join_with_slash(escaped_source_dir, self.rh_configs_glob) config_filename = glob.glob(config) # We should exit with an error if there are no matches if not config_filename: logging.error( "The glob string provided with --rh-configs-glob did not " "match any of the kernel configuration files built with " "`make rh-configs`.") sys.exit(1) logging.info("copying Red Hat config: %s", config_filename[0]) shutil.copyfile(config_filename[0], join_with_slash(self.source_dir, ".config"))
def make_rpm_repo(self, rpm_files): """ Make a RPM repository within a directory. Args: rpm_files: A list of RPM paths. Returns: Path to the repo directory. """ # Set a path for the RPM repository directory. repo_dir = join_with_slash(self.source_dir, 'rpm_repo/') # Remove the existing repo directory if it exists. if os.path.isdir(repo_dir): shutil.rmtree(repo_dir) # Create the directory. os.mkdir(repo_dir) # Move our current RPMs to that directory. for rpm_file in rpm_files: shutil.move(rpm_file, repo_dir) # Create an RPM repository in the repo directory. args = ["createrepo", repo_dir] exit_code = self.run_multipipe(args) # If the RPM repo build failed, raise an exception. if exit_code != 0: raise subprocess.CalledProcessError(exit_code, ' '.join(args)) return repo_dir
def find_tarball(self): """ Find a tarball in the buildlog. Returns: The full path to the tarball (as a string), or None if the tarball line was not found in the log. """ # Compile a regex to look for the tarball filename. tgz_regex = re.compile("^Tarball successfully created in (.*)$") # Read the buildlog line by line looking for the tarball. fpath = None with open(self.buildlog, 'r') as fileh: for log_line in fileh: match = tgz_regex.search(log_line) # If we find a match, store the path to the tarball and stop # reading the buildlog. if match: fpath = os.path.realpath( join_with_slash(self.source_dir, match.group(1))) break return fpath
def checkout(self): """ Clone and checkout the specified reference from the specified repo URL to the specified working directory. Requires "ref" (reference) to be specified upon creation. Returns: Full hash of the last commit. """ dstref = join_with_slash("refs", "remotes", "origin", self.ref.split('/')[-1]) logging.info("fetching base repo") git_fetch_args = ["fetch", "origin", "+%s:%s" % (self.ref, dstref)] # If the user provided extra arguments for the git fetch step, append # them to the existing set of arguments. if self.fetch_depth: git_fetch_args.extend(['--depth', self.fetch_depth]) # The __git_cmd() method expects a list of args, not a list of strings, # so we need to expand our list into args with *. self.__git_cmd(*git_fetch_args) logging.info("checking out %s", self.ref) self.__git_cmd("checkout", "-q", "--detach", dstref) self.__git_cmd("reset", "--hard", dstref) head = self.get_commit_hash() logging.info("baserepo %s: %s", self.ref, head) return str(head).rstrip()
def __prepare_kernel_config(self): """Prepare the kernel config for the compile.""" if self.cfgtype == 'rh-configs': # Build Red Hat configs and copy the correct one into place self.__make_redhat_config() elif self.cfgtype in ['tinyconfig', 'allyesconfig', 'allmodconfig']: # Use the cfgtype provided with the kernel's Makefile. self.__make_config() else: # Copy the existing config file into place. Use a subprocess call # for it just for the nice logs and exception in case the call # fails. args = [ 'cp', self.basecfg, join_with_slash(self.source_dir, ".config") ] self.run_multipipe(args) args = self.make_argv_base + [self.cfgtype] logging.info("prepare config: %s", args) self.run_multipipe(args) # NOTE(mhayden): Building kernels with debuginfo can increase the # final kernel tarball size by 3-4x and can increase build time # slightly. Debug symbols are really only needed for deep diagnosis # of kernel issues on a specific system. This is why debuginfo is # disabled by default. if not self.enable_debuginfo: self.__adjust_config_option('disable', 'debug_info') # Set CONFIG_LOCALVERSION self.__adjust_config_option('set-str', 'LOCALVERSION', '.{}'.format(self.localversion)) self._ready = 1
def __init__(self, source_dir, basecfg, cfgtype=None, extra_make_args=None, enable_debuginfo=False, rh_configs_glob=None, localversion=None): self.source_dir = source_dir self.basecfg = basecfg self.cfgtype = cfgtype if cfgtype is not None else "olddefconfig" self._ready = 0 self.buildlog = join_with_slash(self.source_dir, "build.log") self.make_argv_base = ["make", "-C", self.source_dir] self.enable_debuginfo = enable_debuginfo self.build_arch = self.__get_build_arch() self.cross_compiler_prefix = self.__get_cross_compiler_prefix() self.rh_configs_glob = rh_configs_glob self.localversion = localversion self.targz_pkg_argv = [ "INSTALL_MOD_STRIP=1", "-j%d" % multiprocessing.cpu_count(), "targz-pkg" ] # Split the extra make arguments provided by the user if extra_make_args: self.extra_make_args = shlex.split(extra_make_args) else: self.extra_make_args = [] logging.info("basecfg: %s", self.basecfg) logging.info("cfgtype: %s", self.cfgtype)
def __adjust_config_option(self, action, *options): """Adjust a kernel config option using kernel scripts.""" args = [ join_with_slash(self.source_dir, "scripts", "config"), "--file", self.get_cfgpath(), "--{}".format(action) ] + list(options) logging.info("%s config option '%s': %s", action, options, args) self.run_multipipe(args)
def get_cfgpath(self): """ Get path to kernel .config file. Returns: Absolute path to kernel .config. """ return join_with_slash(self.source_dir, ".config")
def cmd_console_check(cfg): """ Check the console logs for any traces. Args: cfg: A dictionary of skt configuration. """ remove_oldresult(cfg.get('output_dir'), 'console_check.') console_result_path = join_with_slash(cfg.get('output_dir'), 'console_check.result') console_report_path = join_with_slash(cfg.get('output_dir'), 'console_check.report') if not cfg.get('krelease') or not cfg.get('console'): raise Exception('<krelease> or <console-url> parameter missing!') trace_list_list = [] for console_path_or_url in cfg.get('console'): console_log = skt.console.ConsoleLog(cfg.get('krelease'), console_path_or_url) trace_list_list.append(console_log.gettraces()) if any(trace_list_list): report_string = '' for trace_list in trace_list_list: if trace_list: report_string += '{}\n{}:\n\n{}\n\n'.format( 'This is the first trace we found in ', # Get the path/URL that belongs to the found trace. The # order of passed console logs and traces is the same, so # we get the index of the trace and retrieve the log on the # same position. cfg.get('console')[trace_list_list.index(trace_list)], trace_list[0] ) report_results(console_result_path, 'false', console_report_path, report_string) else: report_results(console_result_path, 'true', console_report_path, 'No call traces were detected.')
def geturl(self, source): """ Get published URL for a source file path. Args: source: Source file path. Returns: Published URL corresponding to the specified source. """ return join_with_slash(self.baseurl, os.path.basename(source))
def addtstamp(path, tstamp): """ Add time stamp to a file path. Args: path: file path. tstamp: time stamp. Returns: New path with time stamp. """ return join_with_slash(os.path.dirname(path), "%s-%s" % (tstamp, os.path.basename(path)))
def publish(self, source): """ Copy the source file to public destination. Args: source: Source file path. Returns: Published URL corresponding to the specified source. """ destination = join_with_slash(self.destination, "") subprocess.check_call(["scp", source, destination]) return self.geturl(source)
def publish(self, source): """ Copy the source file to public destination. Args: source: Source file path. Returns: Published URL corresponding to the specified source. """ destination = join_with_slash(self.destination, "") shutil.copy(source, destination) return self.geturl(source)
def __init__(self, source_dir, basecfg, cfgtype=None, extra_make_args=None, enable_debuginfo=False, rh_configs_glob=None, localversion=None, make_target=None): self.source_dir = source_dir self.basecfg = basecfg self.cfgtype = cfgtype if cfgtype is not None else "olddefconfig" self._ready = 0 self.buildlog = join_with_slash(self.source_dir, "build.log") self.make_argv_base = ["make", "-C", self.source_dir] self.make_target_args = { 'targz-pkg': [ "INSTALL_MOD_STRIP=1", "-j%d" % multiprocessing.cpu_count(), ], 'binrpm-pkg': ["-j%d" % multiprocessing.cpu_count()] } self.enable_debuginfo = enable_debuginfo self.build_arch = self.__get_build_arch() self.cross_compiler_prefix = self.__get_cross_compiler_prefix() self.rh_configs_glob = rh_configs_glob self.localversion = localversion # Handle the make target provided and select the correct arguments # for make based on the target. self.make_target = make_target try: self.compile_args = self.make_target_args[self.make_target] self.compile_args.append(self.make_target) except KeyError: error_message = ("Supported make targets: " "{}".format(', '.join( self.make_target_args.keys()))) raise (KeyError, error_message) # Split the extra make arguments provided by the user if extra_make_args: self.extra_make_args = shlex.split(extra_make_args) else: self.extra_make_args = [] # Truncate the buildlog, if it exists. self.__reset_build_log() logging.info("basecfg: %s", self.basecfg) logging.info("cfgtype: %s", self.cfgtype)
def remove_oldresult(output_dir, prefix_filename): """ Remove existing results from previous runs. Args: output_dir: Source directory stores existing result. prefix_filename: Prefix of the existing result filename. """ try: for filename in os.listdir(output_dir): if filename.startswith(prefix_filename): os.unlink(join_with_slash(output_dir, filename)) except OSError: pass
def __init__(self, uri, ref=None, wdir=None, fetch_depth=None): """ Initialize a KernelTree. Args: uri: The Git URI of the repository's origin remote. ref: The remote reference to checkout. Assumed to be "master", if not specified. wdir: The directory to house the clone and to checkout into. Creates and uses a temporary directory if not specified. fetch_depth: The amount of git history to include with the clone. Smaller depths lead to faster repo clones. """ # The git "working directory" (the "checkout") self.wdir = wdir # The cloned git repository self.gdir = join_with_slash(self.wdir, ".git") # The origin remote's URL self.uri = uri # The remote reference to checkout self.ref = ref if ref is not None else "master" self.mergelog = join_with_slash(self.wdir, "merge.log") self.fetch_depth = fetch_depth try: os.mkdir(self.wdir) except OSError: pass # Initialize the repository self.__setup_repository() logging.info("base repo url: %s", self.uri) logging.info("base ref: %s", self.ref) logging.info("work dir: %s", self.wdir)
def merge_git_ref(self, uri, ref="master"): """ Merge a git branch/reference into the tree. Args: uri: URL points to a repo containing the branch/reference to merge ref: Reference to checkout, default is master. Return: A tuple (SKT_SUCCESS, reference to commit) on success. A tuple (SKT_FAIL, None) on failure. """ remote_name = self.__get_remote_name(uri) head = None try: self.__git_cmd("remote", "add", remote_name, uri) except subprocess.CalledProcessError: pass dstref = join_with_slash("refs", "remotes", remote_name, ref.split('/')[-1]) logging.info("fetching %s", dstref) self.__git_cmd("fetch", remote_name, "+%s:%s" % (ref, dstref)) logging.info("merging %s: %s", remote_name, ref) try: grargs = {'stdout': subprocess.PIPE} if \ logging.getLogger().level > logging.DEBUG else {} self.__git_cmd("merge", "--no-edit", dstref, **grargs) head = self.get_commit_hash(dstref) logging.info("%s %s: %s", remote_name, ref, head) except subprocess.CalledProcessError: logging.warning("failed to merge '%s' from %s, skipping", ref, remote_name) self.__git_cmd("reset", "--hard") return (SKT_FAIL, None) return (SKT_SUCCESS, head)