def send_email(remote, branch, status, out, err): if not sendemail or self.emailobj is None: return subject = [] if len(sub_prefix) == 0 else [sub_prefix] content = [] if mode == 'merge': subject.append('Merge') elif mode == 'rebase': subject.append('Rebase') elif mode == 'replace': subject.append('Replace') elif mode == 'cherry': subject.append('Cherry') elif mode == 'cherrypick': subject.append('Cherrypick') if valid_str(remote): branch = remote + '/' + branch subject.append(branch) if status: subject.append('passed') else: subject.append('failed') uret = self.git.cmd('remote get-url %s' % remote) url = remote if uret[0] == 0: url = uret[1].strip() content.append('') content.append('Head: %s' % self.git.head_sha()) content.append('Base: %s' % self.git.base_sha()) content.append('Dest Branch: %s' % dest) content.append('Remote: %s' % url) content.append('Remote Branch: %s' % branch) content.append('Status: %s' % "Passed" if status else "Failed") content.append('\n') content.append(format_h1("Output log")) content.append('') content.append(out) content.append('\n') content.append(format_h1("Error log")) content.append('') content.append(err) self.emailobj.send_email(' '.join(subject), '\n'.join(content))
def smatch(self, arch='', config='', cc='', cflags=[], name='', cfg=None, smatch_flags=["C=2"], base=None, script_bin="smatch"): base_warning_count = 0 base_error_count = 0 base_edata = [] base_wdata = [] flags = [] flags.append('CHECK="' + self._get_bin_path(script_bin) + ' -p=kernel"') if base is not None: curr_head = self.git.head_sha() if self.git.cmd('checkout', base)[0] != 0: self.logger.error("Git checkout command failed in %s", base) return False status, base_warning_count,\ base_error_count, base_wdata,\ base_edata = self._compile(arch, config, cc,smatch_flags + flags + cflags, name, cfg, True) if status is False: return False if self.git.cmd('checkout', curr_head)[0] != 0: self.logger.error("Git checkout command failed in %s", curr_head) return False status, warning_count,\ error_count, wdata, edata = self._compile(arch, config, cc, smatch_flags + flags + cflags, name, cfg, True) self.logger.info("Base warinings:%d Base errors:%d New warining:%d New errors:%d\n", base_warning_count, base_error_count, warning_count, error_count) self.logger.debug(format_h1("Diff between Base/New warnings", tab=2)) warning_count = self._diff_count(base_wdata, wdata) self.logger.debug(format_h1("End of new warnings, count:%d" % warning_count, tab=2)) self.logger.debug(format_h1("Diff between Base/New errors\n", tab=2)) error_count = self._diff_count(base_edata, edata) self.logger.debug(format_h1("End of new errors, count:%d" % error_count, tab=2)) name = config if name is None or len(name) == 0 else name self.resobj.update_smatch_test_results(arch, name, status, warning_count, error_count) return status
def send_email(self, emailcfg, sub=None): if emailcfg is not None: emailobj = Email(emailcfg, self.logger) else: return False subject = ['Test results'] if sub is not None: subject.append(sub) content = [] outfile = tempfile.NamedTemporaryFile() self.resobj.dump_results(outfile=outfile.name) with open(outfile.name) as fobj: content.append(format_h1("Test Results")) content.append('') content.append(fobj.read()) content.append('\n') emailobj.send_email(' '.join(subject), '\n'.join(content)) return True
def compile_list(self, arch='', config_list=[], cc='', cflags=[], name='', cfg=None): self.logger.info(format_h1("Running compile tests", tab=2)) result = [] for config in config_list: result.append(self.compile(arch, config, cc, cflags, name, cfg)) return result
def smatch_list(self, arch='', config_list=[], cc='', cflags=[], name='', cfg=None, smatch_flags=["C=2"], base=None, script_bin="smatch"): self.logger.info(format_h1("Running smatch tests", tab=2)) result = [] for config in config_list: result.append(self.smatch(arch, config, cc, cflags, name, cfg, smatch_flags, base, script_bin)) return result
def sparse_list(self, arch='', config_list=[], cc='', cflags=[], name='', cfg=None, sparse_flags=["C=2"], base=None, script_bin=SPARSE_BIN_PATH): self.logger.info(format_h1("Running sparse tests", tab=2)) result = [] for config in config_list: result.append(self.sparse(arch, config, cc, cflags, name, cfg, sparse_flags, base, script_bin)) return result
def run_checkpatch(self, head=None, base=None): self.logger.info(format_h1("Runing checkpatch script", tab=2)) self.enable_checkpatch = True head = self.head if head is None else head base = self.base if base is None else base gerrorcount = 0 gwarningcount = 0 try: if self.valid_git is False: raise Exception("Invalid git repo") if not os.path.exists(os.path.join(self.src, CHECK_PATCH_SCRIPT)): raise Exception("Invalid checkpatch script") ret, count, err = self.git.cmd('rev-list', '--count', str(base) + '..'+ str(head)) if ret != 0: raise Exception("git rev-list command failed") self.logger.info("Number of patches between %s..%s is %d", base, head, int(count)) def parse_results(data): regex = r"total: ([0-9]*) errors, ([0-9]*) warnings," match = re.search(regex, data) if match: return int(match.group(1)), int(match.group(2)) return 0, 0 prev_index = 0 for index in range(1, int(count) + 1): commit_range = str(head) + '~' + str(index) + '..' + str(head) + '~' + str(prev_index) ret, out, err = self.sh.cmd(os.path.join(self.src, CHECK_PATCH_SCRIPT), '-g', commit_range) lerrorcount, lwarningcount = parse_results(out) if lerrorcount != 0 or lwarningcount != 0: self.logger.info(out) self.logger.info(err) gerrorcount = gerrorcount + int(lerrorcount) gwarningcount = gwarningcount + int(lwarningcount) self.logger.debug("lerror:%d lwarning:%d gerror:%d gwarning:%d\n", lerrorcount, lwarningcount, gerrorcount, gwarningcount) prev_index = index except Exception as e: self.logger.error(e) self.resobj.update_checkpatch_results(False, gwarningcount, gerrorcount) return False else: self.resobj.update_checkpatch_results(True, gwarningcount, gerrorcount) return True
def clean_repo(self): """ Clean the git repo and delete all local branches. :return: None """ self.logger.info(format_h1("Cleaning repo", tab=2)) self.git.cmd("reset", "--hard") self.git.cmd("clean", "-fdx") local_branches = [ x.strip() for x in self.git.cmd('branch')[1].splitlines() ] for branch in local_branches: if branch.startswith('* '): continue self.git.cmd("branch", "-D", branch)
def _upload_repo(self, branch_name, upload_options): """ Upload the given branch to a remote patch. supported upload modes are force-push, push and refs-for (for Gerrit). :param branch_name: Name of the local branch. :param upload_options: Dict with upload related params. url - Name of the git remote. branch - Remote branch of git repo. :return: Nothing. """ self.logger.info(format_h1("Uploading %s", tab=2) % branch_name) _force = True if upload_options['mode'] == 'force-push' else False _use_refs = True if upload_options['mode'] == 'refs-for' else False self.git.push(branch_name, upload_options['url'], upload_options['branch'], force=_force, use_refs=_use_refs)
def generate_tar_gz(self, outfile, branch=None, skip_files=['.git']): """ Create kernel tar file. :param outfile: Name of the tar file. :param branch: Git branch. :param skip_files: List of files needs to be skipped. :return: Filename on success, None otherwise. """ self.logger.info(format_h1("Generating tar gz", tab=2)) # Check if outfile is valid. if outfile is None or outfile == "": self.logger.error("Invalid output file %s name\n", outfile) return None # If branch option is used, then kernel soruce should be a valid git repo. if branch is not None and self.valid_git: if self.git.cmd('checkout', branch)[0] != 0: self.logger.error("Git checkout branch %s failed in %s", branch, self.src) return None tar_cmd = "tar" if len(skip_files) > 0: tar_cmd += ' --exclude={%s}' % ','.join(skip_files) tar_cmd += ' -Jcf' try: ret = self.sh.cmd( "%s %s %s" % (tar_cmd, os.path.abspath(outfile), os.path.abspath(self.src)), shell=True) if ret[0] != 0: raise Exception("Create tar command failed") except Exception as e: self.logger.error(e, exc_info=True) return None else: return outfile
def custom_test(self, name, script, arg_list=[], head=None, base=None, enable_head_sub=False, enable_base_sub=False, enable_src_sub=False): self.logger.info(format_h1("Running custom test %s" % name, tab=2)) script = self._get_bin_path(script) if not os.path.exists(script): self.logger.error("Invalid script %s", script) return False cmd = [script] if len(arg_list) > 0: cmd = cmd + arg_list # If arg has sub string $HEAD and enable_head_sub argument is set true and do a string replace. if head is not None and enable_head_sub: for index, item in enumerate(cmd): if "$HEAD" in item: cmd[index] = cmd[index].replace("$HEAD", head) # If arg has sub string $BASE and enable_base_sub argument is set true and do a string replace. if base is not None and enable_base_sub: for index, item in enumerate(cmd): if "$BASE" in item: cmd[index] = cmd[index].replace("$BASE", base) if enable_src_sub: for index, item in enumerate(cmd): if "$SRC" in item: cmd[index] = cmd[index].replace("$SRC", self.src) ret = self.sh.cmd("%s" % (' '.join(cmd))) self.process_custom_test(name, ret) return (ret[0] == 0)
def __init__(self, repo_dir, cfg, repo_head=None, emailcfg=None, logger=None): """ Constructor of KernelInteg class. :rtype: object :param cfg: Kernel Integration Json config file. :param schema: Kernel Integration Json schema file. :param head: SHA-ID or Tag of given branch. :param repo_dir: Repo directory. :param subject_prefix: Prefix for email subject. :param skip_rr_cache: Skip rr cache if set True. :param logger: Logger object """ self.logger = logger or logging.getLogger(__name__) self.schema = pkg_resources.resource_filename( 'klibs', 'schemas/integ-schema.json') self.emailschema = pkg_resources.resource_filename( 'klibs', 'schemas/email-schema.json') self.cfgobj = JSONParser(self.schema, cfg, extend_defaults=True, os_env=True, logger=self.logger) self.cfg = self.cfgobj.get_cfg() # Update email configs. if emailcfg is not None: self.emailobj = Email(emailcfg, self.logger) else: self.emailobj = None self.remote_list = self.cfg['remote-list'] self.repos = self.cfg['repo-list'] self.int_list = self.cfg['int-list'] self.repo_dir = repo_dir self.sh = PyShell(wd=self.repo_dir, logger=self.logger) # All git commands will be executed in repo directory. self.logger.info(format_h1("Initalizing repo", tab=2)) self.git = GitShell(wd=self.repo_dir, init=True, logger=self.logger) # Add git remote and fetch the tags. self.logger.info(format_h1("Add remote", tab=2)) for remote in self.remote_list: self.git.add_remote(remote['name'], remote['url']) self.git.cmd("fetch", remote['name']) valid_repo_head = False def is_valid_head(head): if len(head) == 0: return False ret, out, err = self.git.cmd('show', head) if ret == 0: return True return False # Check if the repo head is valid. if valid_str(repo_head): if is_valid_head(repo_head) is False: raise Exception("Invalid repo head %s" % repo_head) else: valid_repo_head = True #if repo head is given in config file, check whether its valid, make exception if not valid. for repo in self.repos: if valid_repo_head is True: repo['repo-head'] = repo_head else: if is_valid_head(repo['repo-head']) is False: raise Exception("Invalid repo head %s" % repo['repo-head'])
def generate_git_bundle(self, outfile, mode='branch', local_branch=None, head=None, base=None, commit_count=0): """ Create git bundle for the given kernel source. Git bundle can created in three different modes. 1. branch - Given git branch will be bundled. 2. commit_count - Given number of commits will be bundled. 3. diff - Range of commits will be bundled. :param outfile: Name of the git bundle. :param mode: branch, commit_count, and diff mode. :param local_branch: Name of the git branch. :param head: Head SHA ID or Tag :param base: First SHA ID or Tag. :param commit_count: Number of commits. :return: Filename on success, None otherwise. """ set_val = lambda x, y: y if x is None else x # Check the validity of bundle mode. if mode not in self.bundle_modes: self.logger.error("Invalid bundle mode %s", mode) return None # Check the validity of outfile. if outfile is None or outfile == "": self.logger.error("Invalid bundle name %s", outfile) return None # Check whether kernel source is a valid git repo. if not self.valid_git: self.logger.error("Invalid git repo %s", self.src) return None # If local branch is none, then current branch will be used. local_branch = set_val(local_branch, self.git.current_branch()) # If the bundle file is already present, delete it. outfile = os.path.abspath(outfile) self.logger.info(format_h1("Generating git bundle", tab=2)) try: if self.git.cmd('checkout', local_branch)[0] != 0: raise Exception("Git checkout command failed in %s" % self.src) if mode == 'branch': if self.git.cmd('bundle', 'create', outfile, local_branch)[0] != 0: raise Exception("Git bundle create command failed") if mode == 'commit_count': if self.git.cmd('bundle', 'create', outfile, '-' + str(commit_count), local_branch)[0] != 0: raise Exception("Git bundle create command failed") if mode == 'diff' and head is not None and base is not None: if self.git.cmd('bundle', 'create', outfile, str(base) + '..' + str(head))[0] != 0: raise Exception("Git bundle create command failed") except Exception as e: self.logger.error(e, exc_info=True) return None else: return outfile
def auto_test(self): self.logger.info(format_h1("Running kernel tests from json", tab=2)) status = True config_temp = tempfile.mkdtemp("_dir", "config_") cgit = GitShell(wd=config_temp, init=True, logger=self.logger) static_config = self.cfg.get("static-config", None) sparse_config = self.cfg.get("sparse-config", None) smatch_config = self.cfg.get("smatch-config", None) custom_test = self.cfg.get("custom-test", None) # If there is a config in remote source, fetch it and give the local path. def get_configsrc(options): if options is None or not isinstance(options, collections.Mapping): return None if len(options["url"]) == 0: return os.path.abspath(os.path.join(self.src, options["remote-dir"], options["name"])) if options["sync-mode"] == "git": cgit.cmd("clean -xdf") remote_list = cgit.cmd("remote")[1].split('\n') rname = 'origin' for remote in remote_list: rurl = cgit.cmd("remote get-url %s" % remote)[1].strip() if rurl == options["url"]: rname = remote break cgit.add_remote(rname, options["url"]) cgit.cmd("pull %s" % rname) cgit.cmd("checkout %s/%s" % (rname, options["branch"])) return os.path.abspath(os.path.join(config_temp, options["remote-dir"], options["name"])) return None def get_sha(_type='head', config = None): if config is None: return getattr(self, _type) if config[_type]['auto']: if config[_type]['auto-mode'] == "last-upstream": return self.git.cmd('describe --abbrev=0 --match "v[0-9]*" --tags')[1].strip() elif config[_type]['auto-mode'] == "last-tag": return self.git.cmd('describe --abbrev=0 --tags')[1].strip() elif config[_type]['auto-mode'] == "head-commit": return self.git.head_sha() elif config[_type]['auto-mode'] == "base-commit": return self.git.base_sha() elif len(config[_type]['value']) > 0: return config[_type]['value'].strip() else: return getattr(self, _type) def static_test(obj, cobj, config): status = True if cobj["compile-test"]: current_status = self.compile(obj["arch_name"], config, obj["compiler_options"]["CC"], obj["compiler_options"]["cflags"], cobj.get('name', None), get_configsrc(cobj.get('source-params', None))) if current_status is False: self.logger.error("Compilation of arch:%s config:%s failed\n" % (obj["arch_name"], cobj.get('name', config))) status &= current_status if cobj["sparse-test"]: skip = False args = [ obj["arch_name"], config, obj["compiler_options"]["CC"], obj["compiler_options"]["cflags"], cobj.get('name', None), get_configsrc(cobj.get('source-params', None)) ] if sparse_config is not None: if sparse_config["enable"] is False: self.logger.warning("Sparse global flag is disabled\n") skip = True else: args.append(sparse_config["cflags"]) args.append(get_sha("base", sparse_config)) args.append(sparse_config["source"]) if skip is False: current_status = self.sparse(*args) if current_status is False: self.logger.error("Sparse test of arch:%s config:%s failed\n" % (obj["arch_name"], cobj.get('name', config))) status &= current_status if cobj["smatch-test"]: skip = False args = [ obj["arch_name"], config, obj["compiler_options"]["CC"], obj["compiler_options"]["cflags"], cobj.get('name', None), get_configsrc(cobj.get('source-params', None)) ] if smatch_config is not None: if smatch_config["enable"] is False: self.logger.warning("Smatch global flag is disabled\n") skip = True else: args.append(smatch_config["cflags"]) args.append(get_sha("base", smatch_config)) args.append(smatch_config["source"]) if skip is False: current_status = self.smatch(*args) if current_status is False: self.logger.error("Smatch test of arch:%s config:%s failed\n" % (obj["arch_name"], cobj.get('name', config))) status &= current_status return status if static_config is not None and static_config["enable"] is True: # Compile standard configs for obj in static_config["test-list"]: for config in supported_configs: if isinstance(obj, collections.Mapping) and obj.has_key(config): status &= static_test(obj, obj[config], config) # Compile custom configs for cobj in obj["customconfigs"]: if cobj['name'] not in self.custom_configs: self.custom_configs.append(cobj['name']) self.resobj.add_config(cobj['name']) status &= static_test(obj, cobj, cobj['defaction']) checkpatch_config = self.cfg.get("checkpatch-config", None) if checkpatch_config is not None and checkpatch_config["enable"] is True: if len(checkpatch_config["source"]) > 0: self.checkpatch_source = checkpatch_config["source"] status &= self.run_checkpatch(get_sha('head', checkpatch_config), get_sha('base', checkpatch_config)) if custom_test is not None and custom_test["enable"] is True: for ctest in custom_test["test-list"]: status &= self.custom_test(ctest["name"], ctest["source"], ctest["arg-list"], get_sha("head", custom_test), get_sha("base", custom_test), ctest["enable-head-sub"], ctest["enable-base-sub"], ctest["enable-src-sub"]) output_config = self.cfg.get("output-config", None) if output_config is not None and output_config["enable"] is True and len(output_config["url"]) > 0: # Commit the results file used back to server. if output_config["sync-mode"] == "git": self.git_upload_results(remote=('origin', output_config["url"], output_config["branch"]), mode=output_config["mode"], msg=output_config["upload-msg"], append_kinfo=output_config.get("append-kinfo", False), resdir=None, relpath=output_config["remote-dir"], outfile=output_config["name"] ) shutil.rmtree(config_temp, ignore_errors=True) return status
def auto_release(self): str_none = lambda x: None if len(x) == 0 else x.strip() if self.cfg is None: self.logger.error("Invalid config file %s", self.cfg) return False def conv_remotelist(remote_list): new_list = [] for remote in remote_list: new_list.append((remote["name"], remote["url"], remote["branch"], remote["path"])) return new_list if len(new_list) > 0 else None def conv_taglist(tag_list): new_list = [] for tag in tag_list: new_list.append(tag["name"], tag["msg"]) return new_list if len(new_list) > 0 else None def conv_copyformat(flist): if "*" in flist: return None else: return flist try: params = self.cfg.get("bundle", None) if params is not None and params["enable"]: if not self.valid_git: Exception( "Kernel is not a git repo. So bundle option is not supported" ) base = params["base"]["value"] if params["base"]["auto"]: base = self.git.cmd('describe --abbrev=0 --tags')[1] base = str_none(base) head = params["head"]["value"] if params["head"]["auto"]: head = self.git.head_sha() head = str_none(head) bundle = self.generate_git_bundle(params["outname"], params["mode"], str_none(params["branch"]), head, base, params["commit_count"]) uparams = params.get("upload-params", None) if uparams is not None and bundle is not None: self.git_upload(bundle, str_none(params["upload-dir"]), uparams["new-commit"], conv_copyformat(uparams["copy-formats"]), uparams["commit-msg"], conv_remotelist(uparams["remote-list"]), uparams["use-refs"], uparams["force-push"], uparams["clean-update"], conv_taglist(uparams["tag-list"])) else: Exception("Generate bundle failed") except Exception as e: self.logger.error(e, exc_info=True) else: if self.cfg["bundle"]["enable"]: self.logger.info( format_h1("Successfully created git bundle", tab=2)) try: params = self.cfg.get("quilt", None) if params is not None and params["enable"]: if not self.valid_git: Exception( "Kernel is not a git repo. So quilt option is not supported" ) base = params["base"]["value"] if params["base"]["auto"]: base = self.git.cmd('describe --abbrev=0 --tags')[1] base = str_none(base) head = params["head"]["value"] if params["head"]["auto"]: head = self.git.head_sha() head = str_none(head) if head is None or base is None: Exception("Invalid base/head %s/%s", base, head) self.logger.info("head:%s base:%s", head, base) quilt = self.generate_quilt(str_none(params["branch"]), base, head, params['outname'], str_none(params["sed-file"]), str_none(params["audit-script"]), params['series-comment']) uparams = params.get("upload-params", None) if quilt is not None and uparams is not None: ret = self.git_upload( quilt, str_none(params["upload-dir"]), uparams["new-commit"], conv_copyformat(uparams["copy-formats"]), uparams["commit-msg"], conv_remotelist(uparams["remote-list"]), uparams["use-refs"], uparams["force-push"], uparams["clean-update"], conv_taglist(uparams["tag-list"])) if ret is None: Exception("Quilt upload failed") else: Exception("Generate quilt failed") except Exception as e: self.logger.error(e, exc_info=True) else: if self.cfg["quilt"]["enable"]: self.logger.info( format_h1("Successfully created quilt series", tab=2)) try: params = self.cfg.get("tar", None) if params is not None and params["enable"]: tarname = self.generate_tar_gz(params["outname"], str_none(params["branch"]), params["skip-files"]) uparams = params.get("upload-params", None) if tarname is not None and uparams is not None: ret = self.git_upload( tarname, str_none(params["upload-dir"]), uparams["new-commit"], conv_copyformat(uparams["copy-formats"]), uparams["commit-msg"], conv_remotelist(uparams["remote-list"]), uparams["use-refs"], uparams["force-push"], uparams["clean-update"], conv_taglist(uparams["tag-list"])) if ret is None: Exception("tar upload failed") else: Exception("Create tar file failed") except Exception as e: self.logger.error(e, exc_info=True) else: if self.cfg["tar"]["enable"]: self.logger.info( format_h1("Successfully created tar file", tab=2)) try: params = self.cfg.get("upload-kernel", None) if params is not None and params["enable"]: uparams = params.get("upload-params", None) ret = self.git_upload(self.src, str_none(params["upload-dir"]), uparams["new-commit"], conv_copyformat(uparams["copy-formats"]), uparams["commit-msg"], conv_remotelist(uparams["remote-list"]), uparams["use-refs"], uparams["force-push"], uparams["clean-update"], conv_taglist(uparams["tag-list"])) if ret is None: Exception("Upload kernel failed") except Exception as e: self.logger.error(e, exc_info=True) else: if self.cfg["upload-kernel"]["enable"]: self.logger.info( format_h1("Successfully Uploaded Linux kernel", tab=2)) return True
def _create_repo(self, repo): """ Merge the branches given in source-list and create list of output branches as specificed by dest-list option. :param repo: Dict with kernel repo options. Check "repo-params" section in kernel integration schema file for more details. :return: Nothing """ self.logger.info( format_h1("Create %s repo", tab=2) % repo['repo-name']) merge_list = [] status = True # Clean existing git operations try: self.git.cmd('merge --abort') self.git.cmd('rebase --abort') self.git.cmd('cherry-pick --abort') self.git.cmd('revert --abort') except: pass # Get source branches for srepo in repo['source-list']: if srepo['skip'] is True: continue if self.git.valid_branch(srepo['url'], srepo['branch']) is False: raise Exception("Dependent repo %s/%s does not exits" % (srepo['url'], srepo['branch'])) else: merge_list.append((srepo['url'], srepo['branch'], srepo["upstream"], srepo["sha-list"])) dest_repolist = [] for drepo in repo['dest-list']: if drepo['skip']: continue dest_repolist.append(drepo) # Create destination branches dest_branches = [] try: for dest_repo in dest_repolist: if self.git.valid_branch('', dest_repo['local-branch']): ret = self.git.delete(dest_repo['local-branch'], force=True)[0] if ret != 0: raise Exception("Deleting branch %s failed" % dest_repo['local-branch']) self.git.cmd("checkout", repo['repo-head'], "-b", dest_repo['local-branch']) if len(merge_list) > 0: self._merge_branches(dest_repo['merge-mode'], merge_list, dest_repo['local-branch'], dest_repo['merge-options'], repo['send-email'], repo['email-prefix'], repo['repo-name']) dest_branches.append(dest_repo['local-branch']) except Exception as e: self.logger.error(e, exc_info=True) for branch in dest_branches: self.git.delete(branch, force=True) else: self.logger.info("Repo %s creation successfull" % repo['repo-name']) # Compare destination branches if status is True and repo['compare-dest']: if len(dest_repolist) > 1: base_repo = dest_repolist[0] for dest_repo in dest_repolist: ret, out, err = self.git.cmd('diff', base_repo['local-branch'], dest_repo['local-branch']) if ret != 0: if repo['compare-resmode'] == "fail": status = False break else: if len(out) > 0: if repo['compare-resmode'] == "fail": status = False self.logger.error( "Destination branches %s!=%s, resolution:%s" % (base_repo['local-branch'], dest_repo['local-branch'], repo['compare-resmode'])) break else: self.logger.info("Destination branches %s==%s" % (base_repo['local-branch'], dest_repo['local-branch'])) else: self.logger.warn("Skipping destination branch comparison") # Upload the destination branches if status is True: for dest_repo in dest_repolist: if dest_repo['upload-copy'] is True: upload_options = dest_repo['upload-options'] self._upload_repo(dest_repo['local-branch'], upload_options) else: self.logger.warn("Skipping destination branch upload") if repo['send-email']: subject = [] if len( repo['email-prefix']) == 0 else [repo['email-prefix']] content = [] subject.append("integration") if status: subject.append('passed') else: subject.append('failed') content.append(format_h1("This repo integration includes:")) content.append(format_h1("Following source branches:")) content.append('') for rname, rbranch, upstream, shalist in merge_list: url = rname if len(rname) == 0: rname = 'local-branch' else: uret = self.git.cmd('remote get-url %s' % rname) if uret[0] == 0: rname = uret[1].strip() content.append('Remote: %s' % rname) content.append('Branch: %s' % rbranch) content.append('') content.append(format_h1("Following destination branches:")) content.append('') for dest_repo in dest_repolist: content.append('Branch: %s' % dest_repo['local-branch']) content.append('Merge Mode: %s' % dest_repo['merge-mode']) if dest_repo['upload-copy'] is True: content.append('') content.append('Uploaded branch to,') upload_options = dest_repo['upload-options'] rurl = upload_options['url'] for ritem in self.remote_list: if ritem['name'] == rurl: rurl = ritem['url'] break content.append('Upload Remote: %s' % rurl) content.append('Upload Branch: %s' % upload_options['branch']) content.append('') if self.emailobj is not None: self.emailobj.send_email(' '.join(subject), '\n'.join(content)) return status
def generate_quilt(self, local_branch=None, base=None, head=None, patch_dir='quilt', sed_file=None, audit_script=None, series_comment=''): """ Generate the quilt series for the given kernel source. :param local_branch: Name of the kernel branch. :param base: First SHA ID. :param head: Head SHA ID. :param patch_dir: Output directory for storing the quilt series. If it exists, it will be removed. :param sed_file: Sed format list. :param audit_script: Audid script. It will be called with patch_dir as input. If it return non zero value then this function will exit and return None. :param series_comment: Comments to add on top of series file. :return: Return patch_dir or None """ set_val = lambda x, y: y if x is None else x self.logger.info(format_h1("Generating quilt series", tab=2)) if not self.valid_git: self.logger.error("Invalid git repo %s", self.src) return None if sed_file is not None and not os.path.exists(sed_file): self.logger.error("sed pattern file %s does not exist", sed_file) return None if os.path.exists(os.path.abspath(patch_dir)): shutil.rmtree(patch_dir, ignore_errors=True) os.makedirs(patch_dir) local_branch = set_val(local_branch, self.git.current_branch()) if self.git.cmd('checkout', local_branch)[0] != 0: self.logger.error("git checkout command failed in %s", self.src) return None try: series_file = os.path.join(patch_dir, 'series') # if base SHA is not given use TAIL as base SHA if base is None: base = self.git.base_sha() if base is None: raise Exception("git log command failed") # if head SHA is not given use HEAD as head SHA if head is None: head = self.git.head_sha() if head is None: raise Exception("git fetch head SHA failed") # Create the list of patches 'git format-patch -C -M base..head -o patch_dir' ret, out, err = self.git.cmd('format-patch', '-C', '-M', base.strip() + '..' + head.strip(), '-o', patch_dir) if ret != 0: raise Exception( "git format patch command failed out: %s error: %s" % (out, err)) # Format the patches using sed if sed_file is not None: ret, out, err = self.sh.cmd('sed -i -f%s %s/*.patch' % (sed_file, patch_dir), shell=True) if ret != 0: raise Exception("sed command failed out: %s error: %s" % (out, err)) # Make sure the patches passes audit check. if audit_script is not None: ret, out, err = self.sh.cmd(audit_script, patch_dir, shell=True) if ret != 0: raise Exception("Audid check failed out: %s error: %s" % (out, err)) # Write series file comments. with open(series_file, 'w+') as fobj: fobj.write(series_comment) # Write the list of series file. ret, out, err = self.sh.cmd('ls -1 *.patch >> series', wd=patch_dir, shell=True) if ret != 0: raise Exception( "Writing to patch series file failed. Out:%s Error: %s" % (out, err)) except Exception as e: if os.path.exists(patch_dir): shutil.rmtree(patch_dir) self.logger.error(e, exc_info=True) return None else: return patch_dir