def smatch_list(self, arch='', config_list=[], cc='', cflags=[]): self.logger.info(format_h1("Running smatch tests", tab=2)) result = [] for config in config_list: result.append(self.smatch(arch, config, cc, cflags)) return result
def generate_git_bundle(self, mode='branch', local_branch=None, head=None, base=None, commit_count=0, outfile='git.bundle'): set_val = lambda x, y: y if x is None else x if mode not in self.bundle_modes: self.logger.error("Invalid bundle mode %s", mode) return None if not self.valid_git: self.logger.error("Invalid git repo %s", self.src) return None if outfile is None: self.logger.error("Invalid bundle name %s", outfile) return None local_branch = set_val(local_branch, self.git.current_branch()) outfile = os.path.abspath(outfile) if os.path.exists(outfile): shutil.rmtree(outfile) self.logger.info(format_h1("Generating git bundle", tab=2)) try: if local_branch is not None: ret, out, err = self.git.cmd('checkout', local_branch) if ret != 0: raise Exception("Git checkout command failed in %s" % self.src) if mode == 'branch' and local_branch is not None: ret, out, err = self.git.cmd('bundle', 'create', outfile, local_branch) if ret != 0: raise Exception("Git bundle create command failed") elif mode == 'diff' and head is not None and base is not None: ret, out, err = self.git.cmd('bundle', 'create', outfile, str(base) + '..' + str(head)) if ret != 0: raise Exception("Git bundle create command failed") elif mode == 'commit_count' and local_branch is not None: ret, out, err = self.git.cmd('bundle', 'create', outfile, '-' + str(commit_count), local_branch) if ret != 0: raise Exception("Git bundle create command failed") except Exception as e: self.logger.error(e) return None else: return outfile
def run_test(self, cfg): self.logger.info(format_h1("Running kernel tests from json", tab=2)) status = True self.cfg = JSONParser( TEST_SCHEMA, cfg, extend_defaults=True).get_cfg() if cfg is not None else None if self.cfg is None: self.logger.warning("Invalid JSON config file") return False self.logger.info(self.cfg.keys()) compile_config = self.cfg.get("compile-config", None) self.logger.info(compile_config) if compile_config is not None and compile_config["enable"] is True: for obj in compile_config["test-list"]: def config_enabled(config): return obj[config] for config in filter(config_enabled, supported_configs): current_status = self.compile( obj["arch_name"], config, obj["compiler_options"]["CC"], obj["compiler_options"]["cflags"]) if current_status is False: self.logger.error( "Compilation of arch:%s config:%s failed\n" % (obj["arch_name"], config)) status &= current_status checkpatch_config = self.cfg.get("checkpatch-config", None) self.logger.info(checkpatch_config) if checkpatch_config is not None and checkpatch_config[ "enable"] is True: if len(checkpatch_config["source"]) > 0: self.checkpatch_source = checkpatch_config["source"] status &= self.run_checkpatch()[0] aiaiai_config = self.cfg.get("aiaiai-config", None) self.logger.info(aiaiai_config) if aiaiai_config is not None and aiaiai_config["enable"] is True: if len(aiaiai_config["source"]) > 0: self.aiaiai_source = aiaiai_config["source"] status &= self.run_aiaiai() return status
def run_checkpatch(self): self.logger.info(format_h1("Runing checkpatch script", tab=2)) self.enable_checkpatch = True get_val = lambda x, y: getattr(self, y) if x is None else x err_count = 0 warning_count = 0 try: if self.valid_git is False: raise Exception("Invalid git repo") if not os.path.exists(os.path.join(self.src, CHECK_PATCH_SCRIPT)): raise Exception("Invalid checkpatch script") ret, count, err = self.git.cmd( 'rev-list', '--count', str(self.base) + '..' + str(self.head)) if ret != 0: raise Exception("git rev-list command failed") self.logger.debug("Number of patches between %s..%s is %d", self.base, self.head, int(count)) def parse_results(data): regex = r"total: ([0-9]*) errors, ([0-9]*) warnings," match = re.search(regex, data) if match: return int(match.group(1)), int(match.group(2)) return 0, 0 prev_index = 0 for index in range(1, int(count) + 1): commit_range = str(self.head) + '~' + str(index) + '..' + str( self.head) + '~' + str(prev_index) ret, out, err = self.sh.cmd( os.path.join(self.src, CHECK_PATCH_SCRIPT), '-g', commit_range) error, warning = parse_results(out) if error != 0 or warning != 0: self.logger.debug(out) self.logger.debug(err) err_count += error warning_count += warning prev_index = index except Exception as e: self.logger.error(e) return False, err_count, warning_count else: self.resobj.update_checkpatch_results(True, err_count, warning_count) return True, err_count, warning_count
def _static_analysis(self, options): """ Run static analysis tests. Supported tests are checkpatch, aiaiai. :param options: :return: """ supported_tests = ['checkpatch', 'aiaiai'] self.logger.info(format_h1("Static Analysis tests", tab=2)) # Create a result list for supported test types. Default result type is 'N/A'. def results_template(): results = {} for test in supported_tests: results[test] = 'N/A' return results results = results_template() # Generate test results string. def generate_results(results): out = 'Static Analysis Results:\n' width = len(max(supported_tests, key=len)) * 2 for test in supported_tests: out += ('\t%-' + str(width) + 's: %s\n') % (test, results[test]) return out + '\n\n' if options['checkpatch']: self.logger.info(format_h1("Checkpatch tests", tab=2)) if options['aiaiai']: self.logger.info(format_h1("AiAiAi tests", tab=2)) return generate_results(results)
def clean_repo(self): """ Clean the git repo and delete all local branches. :return: None """ self.logger.info(format_h1("Cleaning repo", tab=2)) self._git("reset", "--hard") self._git("clean", "-fdx") local_branches = [x.strip() for x in self._git('branch').splitlines()] for branch in local_branches: if branch.startswith('* '): continue self._git("branch", "-D", branch)
def _generate_output(self, head, branch_name, output_options): """ Generate alternate outputs for given repo. Currently supported format is 'quilt'. If quilt option is selected, then following command are executed. git format-patch -C -M HEAD..<branch_name SHA_ID> -o quilt-folder. Write patch names back to series file. :param head: SHA ID or Tag of head of the kernel branch. :param branch_name: Name of the branch. :param output_options: :return: """ if output_options is None: return quilt_params = output_options.get('quilt', None) if quilt_params is not None: quilt_folder = os.path.join(self.repo_dir, 'quilt') if quilt_params["quilt-folder"] != "": quilt_folder = os.path.join(self.repo_dir, quilt_params["quilt-folder"]) self.logger.info( format_h1("Generating quilt patches in %s", tab=2) % quilt_folder) self.logger.info(quilt_folder) if os.path.exists(quilt_folder): rmtree(quilt_folder, ignore_errors=True) os.makedirs(quilt_folder) tail = self.git.cmd('rev-parse', branch_name)[1] err_code, output, err = self.git.cmd( 'format-patch', '-C', '-M', head.strip() + '..' + tail.strip(), '-o', quilt_folder) if err_code == 0: def get_file_name(path): head, tail = os.path.split(path) return tail with open(os.path.join(quilt_folder, 'series.txt'), 'w') as f: patch_list = map(get_file_name, output.split('\n')) f.write(str('\n'.join(patch_list)))
def _test_branch(self, branch_name, test_options): """ Test given branch and return the status of tests. :param branch_name: Name of the kernel branch. :param test_options: Dict with test_options. profiles - List of test profiles. Supported profiles are, "compile-tests", "static-analysis", "bat-tests". Options assosiated with these profiles are defined in self.test_profiles. :return: Status of the test. """ self.logger.info(format_h1("Testing %s", tab=2) % branch_name) profile_list = test_options['profiles'] status = True out = '\n\n' self._git("checkout", branch_name) # For every test profile, run test and gather status and output. for profile in profile_list: if profile == 'compile-tests': test_status, test_out = self._compile_test( self.test_profiles['compile-tests']) out += test_out if test_status is False: status = False elif profile == 'static-analysis': test_status, test_out = self._static_analysis( self.test_profiles['static-analysis']) out += test_out if test_status is False: status = False self.logger.debug(out) # If send-email flag is set, then send test results back to given recipients. if test_options['send-email'] is True: content = "Following is the test results for branch %s\n" % branch_name content += out self.send_email(subject_prefix=test_options['subject-prefix'], subject="Test Results", content=content) return status
def _upload_branch(self, branch_name, upload_options): """ Upload the given branch to a remote patch. supported upload modes are force-push, push and refs-for (for Gerrit). :param branch_name: Name of the local branch. :param upload_options: Dict with upload related params. url - Name of the git remote. branch - Remote branch of git repo. :return: Nothing. """ self.logger.info(format_h1("Uploading %s", tab=2) % branch_name) if upload_options['mode'] == 'force-push': self._git("push", "-f", upload_options['url'], branch_name + ":" + upload_options['branch']) elif upload_options['mode'] == 'push': self._git("push", upload_options['url'], branch_name + ":" + upload_options['branch']) elif upload_options['mode'] == 'refs-for': self._git("push", upload_options['url'], branch_name + ":refs/for/" + upload_options['branch'])
def generate_tar_gz(self, local_branch=None, outfile=None): self.logger.info(format_h1("Generating tar gz", tab=2)) set_val = lambda x, y: y if x is None else x if local_branch is not None and self.valid_git: ret, out, err = self.git.cmd('checkout', local_branch) if ret != 0: self.logger.error("Git checkout command failed in %s", self.src) return None outfile = set_val(outfile, os.path.join(self.src, 'kernel.tar.gz')) def valid_file(tarinfo): if '.git' in tarinfo.name: return None return tarinfo out = tarfile.open(outfile, mode='w:gz') out.add(self.src, recursive=True, filter=valid_file) return outfile
def run_aiaiai(self): self.logger.info(format_h1("Run AiAiAi Script", tab=2)) return True
def upload_release(self, src, remote_cfg=None): ''' :param src: Source dir. Either kernel, quilt or tar file. :param format: List of glob regex format of file to copied. :param remote_cfg: List of following dict "remote" : (Remote Name, Remote URL) "remotebranch" : Remote Branch "new_commit" : Create new commit and then upload (True|False). "file_format" : List of glob format of the files to be added to the commit. "remotedir" : Relative dir (in remote) "commit_msg" : Commit Message () "clean_update" : Remove existing changes before adding new changes (True | False). "destdir" : destination directory for creating and uploading the new changes. "use_refs" : Use refs/for when pushing (True | False). "force_update" : Force update when pushing (True | False). "clean_update" : Clean git remote before pushing your change (True | False). "timestamp_suffix" : Add time stamp suffix to remotebranch before pushing. It will create a new branch (Trie | False). "timestamp_format" : "%m%d%Y%H%M%S" "tag_list" : [("name", "msg")], empty list if no tagging support needed. Use None for no message. :return: ''' if remote_cfg is None: return False src = os.path.abspath(src) dest_dir = src used_temp = False try: for cfg in remote_cfg: dest_dir = src used_temp = False if len(cfg["remote"]) > 0 and cfg[ "remote"] is not None and cfg["remote"][1] is not None: remote_list = [cfg["remote"]] else: Exception("Incorrect remote name %s or url %s" % cfg["remote"]) rbranch = cfg["remotebranch"] if cfg["timestamp_suffix"] and len(rbranch) > 0: self.logger.info( format_h1("Upload timestamp branch", tab=2)) ts = datetime.datetime.utcnow().strftime( cfg["timestamp_format"]) rbranch = rbranch + '-' + ts if cfg["new_commit"] is True: file_list = [] if cfg["destdir"] is not None: if not os.path.exists(cfg["destdir"]): Exception("Destination dir %s does not exist", cfg["destdir"]) else: dest_dir = os.path.abspath(cfg["destdir"]) else: used_temp = True dest_dir = tempfile.mkdtemp() git = GitShell(wd=dest_dir, init=True, remote_list=remote_list, fetch_all=True, logger=self.logger) git.cmd("checkout", cfg["remote"][0] + '/' + cfg["remotebranch"]) if os.path.isdir(src): for format in cfg["file_format"]: file_list += glob.glob( os.path.join(os.path.abspath(src), format)) else: file_list = [src] # If clean update is True, then remove all contents of the repo. if cfg["clean_update"] is True: ret = git.cmd( 'rm', cfg["remotedir"] + '/*' if cfg["remotedir"] != '.' else '*')[0] if ret != 0: Exception("git rm -r *.patch failed") if cfg["remotedir"] != '.' and not os.path.exists( os.path.join(dest_dir, cfg["remotedir"])): os.makedirs(os.path.join(dest_dir, cfg["remotedir"])) for item in file_list: dest_path = os.path.join(os.path.abspath(dest_dir), cfg["remotedir"], os.path.basename(item)) from shutil import copyfile copyfile(item, dest_path) ret = git.cmd( 'add', cfg["remotedir"] + '/' + os.path.basename(item)) if ret != 0: Exception( "git add %s failed", cfg["remotedir"] + '/' + os.path.basename(item)) ret = git.cmd('commit -s -m "' + cfg["commit_msg"] + '"')[0] if ret != 0: Exception("git commit failed") git = GitShell(wd=dest_dir, init=True, remote_list=remote_list, fetch_all=True, logger=self.logger) ret = git.push('HEAD', cfg["remote"][0], rbranch, force=cfg["force_update"], use_refs=cfg["use_refs"])[0] if ret != 0: Exception("git push to %s %s failed" % (cfg["remote"][0], rbranch)) for tag in cfg["tag_list"]: # Push the tags if required if tag[0] is not None: if tag[1] is not None: ret = git.cmd('tag', '-a', tag[0], '-m', tag[1])[0] else: ret = git.cmd('tag', tag[0])[0] if ret != 0: Exception("git tag %s failed" % (tag[0])) ret = git.cmd('push', cfg["remote"][0], tag[0])[0] if ret != 0: Exception("git push tag to %s failed" % (cfg["remote"][0])) except Exception as e: self.logger.error(e) if used_temp is True: shutil.rmtree(dest_dir) return False else: return True
def _create_branch(self, repo): """ Merge the branches given in source-list and create list of output branches as specificed by dest-list option. :param repo: Dict with kernel repo options. Check "repo-params" section in kernel integration schema file for more details. :return: Nothing """ self.logger.info( format_h1("Create %s repo", tab=2) % repo['repo-name']) merge_list = [] status = True # Get source branches for srepo in repo['source-list']: if srepo['skip'] is True: continue if srepo['use-local'] is True: if self._is_valid_local_branch(srepo['branch']) is False: raise Exception("Dependent repo %s does not exits" % srepo['branch']) merge_list.append(('', srepo['branch'])) else: merge_list.append((srepo['url'], srepo['branch'])) # Create destination branches for dest_repo in repo['dest-list']: self._git("branch", "-D", dest_repo['local-branch'], silent=True) self._git("checkout", repo['repo-head'], "-b", dest_repo['local-branch']) if len(merge_list) > 0: self._merge_branches(dest_repo['merge-mode'], merge_list, dest_repo['local-branch'], dest_repo['merge-options']) if dest_repo['test-branch'] is True: test_options = dest_repo['test-options'] status = self._test_branch(dest_repo['local-branch'], test_options) if status is False: self.logger.error("Testing %s branch failed" % dest_repo['local-branch']) break # Compare destination branches if status is True: if len(repo['dest-list']) > 1: base_repo = repo['dest-list'][0] for dest_repo in repo['dest-list']: ret, out, err = self.git('diff', base_repo, dest_repo) if ret != 0: status = False break else: if len(out) > 0: status = False self.logger.error("Destination branche %s!=%s" % (base_repo['local-branch'], dest_repo['local-branch'])) break else: self.logger.warn("Skipping destination branch comparison") # Upload the destination branches if status is True: for dest_repo in repo['dest-list']: if dest_repo['upload-copy'] is True: upload_options = dest_repo['upload-options'] self._upload_branch(dest_repo['local-branch'], upload_options) if dest_repo['generate-output'] is True: output_options = dest_repo['output-options'] self._generate_output(repo['repo-head'], dest_repo['local-branch'], output_options) else: self.logger.warn("Skipping destination branch upload")
def _compile_test(self, options): # type: (dict) -> boolean, str """ Run selected compile tests for selected architectures. Supported architectures are, i386, x86_64, arm64. Supported configurations are allyesconfig, allnoconfig, allmodconfig, defconfig. :param options: List of compile configurations. Each configuration shall have following options. arch_name - Name of the architecture. compiler_options - Dictionary with compiler options. - CC - Compiler name - cflags - Array of compiler options. allyesconfig - Option to select allyesconfig configuration. allnoconfig - Option to select allnoconfig configuration. allmodconfig - Option to select allmodconfig configuration. defconfig - Option to select defconfig configuration. :return: Overal status of compile tests, and test output. Deafult status is True and output is empty string. """ supported_archs = ['i386', 'x86_64', 'arm64'] supported_configs = [ 'allyesconfig', 'allnoconfig', 'allmodconfig', 'defconfig' ] status = True self.logger.info(format_h1("Compile tests", tab=2)) # Two dimensional status array arch/config def results_template(): results = {} for arch in supported_archs: results[arch] = {} for config in supported_configs: results[arch][config] = 'N/A' return results results = results_template() # String output with compile test results. def generate_results(results): width = len(max(supported_configs, key=len)) * 2 out = 'Compile Test Results:\n' for arch in supported_archs: out += '\t%s results:\n' % arch for config in supported_configs: out += ('\t\t%-' + str(width) + 's: %s\n') % (config, results[arch][config]) return out + '\n\n' # For every compile configuration, run compile tests and gather results. for params in options: if params['arch_name'] not in supported_archs: continue arch = params['arch_name'] def update_compile_results(arch, config, ret, out, err): if ret == 0: results[arch][config] = 'Passed' else: results[arch][config] = 'Failed' self.logger.error('Compile test %s/%s failed\n\n' % (arch, config)) self.logger.error(err) for config in supported_configs: if params[config] is True: out_dir = os.path.join(self.repo_dir, 'out', arch, config) ret, out, err = 0, '', '' kobj = BuildKernel( src_dir=self.repo_dir, out_dir=out_dir, arch=params['arch_name'], cc=params['compiler_options']['CC'], cflags=params['compiler_options']['cflags'], logger=self.logger) getattr(kobj, 'make_' + config)() ret, out, err = kobj.make_kernel() update_compile_results(arch, config, ret, out, err) if ret != 0: status = False return status, generate_results(results)
def run_sparse(self): self.logger.info(format_h1("Run sparse Script", tab=2)) return True
def generate_quilt(self, local_branch=None, base=None, head=None, patch_dir='quilt', sed_file=None, series_comment=''): set_val = lambda x, y: y if x is None else x clean_bkup = False self.logger.info(format_h1("Generating quilt series", tab=2)) if not self.valid_git: self.logger.error("Invalid git repo %s", self.src) return None if sed_file is not None and not os.path.exists(sed_file): self.logger.error("sed pattern file %s does not exist", sed_file) return None local_branch = set_val(local_branch, self.git.current_branch()) if local_branch is not None: if self.git.cmd('checkout', local_branch)[0] != 0: self.logger.error("git checkout command failed in %s", self.src) return None try: patch_dir = os.path.abspath(patch_dir) patch_dir_bkup = patch_dir + '.old' if os.path.exists(patch_dir): clean_bkup = True shutil.move(patch_dir, patch_dir_bkup) os.makedirs(patch_dir) series_file = os.path.join(patch_dir, 'series') # if base SHA is not given use TAIL as base SHA if base is None: base = self.git.base_sha() if base is None: raise Exception("git log command failed") # if head SHA is not given use HEAD as head SHA if head is None: head = self.git.head_sha() if head is None: raise Exception("git fetch head SHA failed") ret, out, err = self.git.cmd('format-patch', '-C', '-M', base.strip() + '..' + head.strip(), '-o', patch_dir) if ret != 0: raise Exception( "git format patch command failed out: %s error: %s" % (out, err)) if sed_file is not None: ret, out, err = self.sh.cmd('sed -i -f%s %s/*.patch' % (sed_file, patch_dir), shell=True) if ret != 0: raise Exception("sed command failed %s" % err) with open(series_file, 'w+') as fobj: fobj.write(series_comment) ret, out, err = self.sh.cmd('ls -1 *.patch >> series', wd=patch_dir, shell=True) if ret != 0: raise Exception( "Writing to patch series file failed. Error: %s" % err) except Exception as e: if os.path.exists(patch_dir): shutil.rmtree(patch_dir) if clean_bkup is True: shutil.move(patch_dir_bkup, patch_dir) self.logger.error(e) return None else: if clean_bkup is True: shutil.rmtree(patch_dir_bkup) return patch_dir
def __init__(self, cfg, schema, repo_head='', repo_dir=os.getcwd(), subject_prefix='', skip_rr_cache=False, logger=None): # type: (json, jsonschema, str, str, str, boolean, boolean) -> object """ Constructor of KernelInteg class. :rtype: object :param cfg: Kernel Integration Json config file. :param schema: Kernel Integration Json schema file. :param repo_head: SHA-ID or Tag of given branch. :param repo_dir: Repo directory. :param subject_prefix: Prefix for email subject. :param skip_rr_cache: Skip rr cache if set True. :param logger: Logger object """ self.logger = logger or logging.getLogger(__name__) self.cfg = JSONParser(cfg, schema, logger=self.logger).get_cfg() self.remote_list = self.cfg['remote-list'] self.repos = self.cfg['repos'] self.kint_repos = self.cfg['kint-list'] self.email_options = self.cfg['email-options'] self.test_profiles = self.cfg['test-profiles'] self.repo_dir = repo_dir self.skip_rr_cache = skip_rr_cache self.subject_prefix = subject_prefix # All git commands will be executed in repo directory. self.git = GitShell(wd=self.repo_dir, logger=self.logger) # git init self.logger.info(format_h1("Initalizing repo", tab=2)) if not os.path.exists(os.path.join(self.repo_dir, ".git")): self._git("init", ".") # Create out dir if its not exists. out_dir = os.path.join(self.repo_dir, 'out') if not os.path.exists(out_dir): self.logger.info(format_h1("Create out dir", tab=2)) os.makedirs(out_dir) # Add git remote self.logger.info(format_h1("Add remote", tab=2)) for remote in self.remote_list: self._git("remote", "add", remote['name'], remote['url'], silent=True) # Get the latest updates self._git("remote", "update") valid_repo_head = False # Check if the repo head is valid. if len(repo_head) > 0: if self._is_valid_head(repo_head) is False: raise Exception("Invalid repo head %s" % repo_head) else: valid_repo_head = True #if repo head is given in config file, check whether its valid, make exception if not valid. for repo in self.repos: if valid_repo_head is True: repo['repo-head'] = repo_head else: if len(repo['repo-head']) == 0: raise Exception("No valid repo head found for %s" % repo['repo-name']) else: if self._is_valid_head(repo['repo-head']) is False: raise Exception("Invalid repo head %s" % repo['repo-head']) # Checkout some random HEAD self._git("checkout", 'HEAD~1', silent=True)