Ejemplo n.º 1
0
    def __init__(self, src, cfg=None, logger=None):

        self.logger = logger or logging.getLogger(__name__)
        self.src = os.path.abspath(src)
        self.base = None
        self.head = None
        self.local_branch = None
        self.remote = None
        self.remote_branch = None
        self.git = GitShell(wd=self.src, logger=logger)
        self.sh = PyShell(wd=self.src, logger=logger)
        self.valid_git = False
        self.cfg = None
        self.cfgobj = None
        self.schema = pkg_resources.resource_filename(
            'klibs', 'schemas/release-schema.json')
        self.bundle_modes = ['branch', 'diff', 'commit_count']

        self.git.dryrun(False)
        self.sh.dryrun(False)

        if not is_valid_kernel(src, logger):
            return

        self.cfgobj = JSONParser(self.schema,
                                 cfg,
                                 extend_defaults=True,
                                 os_env=True,
                                 logger=logger)
        self.cfg = self.cfgobj.get_cfg()

        if self.git.valid():
            self.valid_git = True
Ejemplo n.º 2
0
 def test_git_version(self):
     git = GitShell(logger=logger)
     version = git.cmd('--version')[1]
     if re.match(r'git version \d+\.\d+\.\d+', version):
         logger.info(version)
     else:
         AssertionError("Git version command failed")
Ejemplo n.º 3
0
    def __init__(self, src, cfg=None, out=None, rname=None, rurl=None, branch=None, head=None, base=None,
                 res_cfg=None, logger=None):
        self.logger = logger or logging.getLogger(__name__)
        self.src = src
        self.out = os.path.join(self.src, 'out') if out is None else os.path.absapth(out)
        self.branch = branch
        self.rname = rname
        self.rurl = rurl
        self.head = head
        self.base = base
        self.valid_git = False
        self.schema = pkg_resources.resource_filename('klibs', 'schemas/test-schema.json')
        self.cfg = None
        self.cfgobj = None
        self.resobj = KernelResults(self.src, old_cfg=res_cfg, logger=self.logger)
        self.git = GitShell(wd=self.src, logger=logger)
        self.sh = PyShell(wd=self.src, logger=logger)
        self.checkpatch_source = CHECK_PATCH_SCRIPT
        self.custom_configs = []

        if self.rname is not None and len(self.rname) > 0:
            if not os.path.exists(self.src):
                os.makedirs(self.src)
            if not self.git.valid():
                self.git.init()
            self.git.add_remote(self.rname, rurl)
            self.git.cmd('fetch %s' % self.rname)
            self.branch = self.rname + '/' + self.branch

        self.valid_git = True if self.git.valid() else False

        if self.valid_git:
            if self.branch is not None and len(self.branch) > 0:
                if self.git.cmd('checkout', self.branch)[0] != 0:
                    self.logger.error("Git checkout command failed in %s", self.src)
                    return
            else:
                self.branch = self.git.current_branch()

            #update base & head if its not given
            if self.head is None:
                self.head = self.git.head_sha()
            if self.base is None:
                self.base = self.git.base_sha()

            self.resobj.update_kernel_params(base=self.base, head=self.head, branch=self.branch)

        if not is_valid_kernel(src, logger):
            return

        self.version = BuildKernel(self.src).uname

        if len(self.version) > 0:
            self.resobj.update_kernel_params(version=self.version)

        if cfg is not None:
            self.cfgobj = JSONParser(self.schema, cfg, extend_defaults=True, os_env=True, logger=logger)
            self.cfg = self.cfgobj.get_cfg()
Ejemplo n.º 4
0
    def __init__(self, type, src, idir, logger=None):
        self.logger = logger or logging.getLogger(__name__)

        # Create shell objs
        sh = PyShell(wd=os.getcwd())

        if type not in supported_rootfs.keys():
            self.logger.error("Rootfs type %s is not supported", type)
            return

        self.type = type
        self.src = os.path.abspath(src)
        self.idir = os.path.abspath(idir)
        self.build_init = False

        if not os.path.exists(self.src):
            self.logger.warning(
                "Source dir %s does not exists, So creating it.", self.src)
            os.makedirs(self.src)

        if not os.path.exists(self.idir):
            self.logger.warning("Out dir %s does not exists, So creating it.",
                                self.idir)
            os.makedirs(self.idir)

        # Out dir shell
        self.sh = PyShell(wd=self.idir, stream_stdout=True, logger=logger)
        self.git = GitShell(wd=self.src, stream_stdout=True, logger=logger)
Ejemplo n.º 5
0
    def _busybox_init(self, src_dir):

        if self.config is not None and not os.path.exists(self.config):
            self.logger.error("Invalid config %s", self.config)
            return False

        if self.src_branch is not None and not valid_str(self.src_branch):
            self.logger.error("Invalid branch %s", self.src_branch)
            return False

        if not os.path.exists(src_dir):
            os.makedirs(src_dir)

        git = GitShell(wd=src_dir)

        git.update_shell()

        src_url = supported_rootfs[self.type][0] if self.src_url is None else self.src_url
        if self.src_branch is None:
            self.src_branch = supported_rootfs[self.type][1]

        ret = git.add_remote('origin', src_url)
        if not ret[0]:
            self.logger.error("Add remote %s failed" %  src_url)
            return False

        ret = git.cmd("fetch origin")
        if ret[0] != 0:
            self.logger.error("Git remote fetch failed")
            return False

        ret = git.checkout('origin', self.src_branch)
        if not ret:
            self.logger.error("checkout branch %s failed", self.src_branch)
            return ret
Ejemplo n.º 6
0
    def _build_busybox(self, config=None):

        if config is not None and not os.path.exists(config):
            self.logger.error("Invalid config %s", config)
            return False

        src_dir = os.path.join(self.src, "busybox", "src")
        if not os.path.exists(src_dir):
            os.makedirs(src_dir)

        git = GitShell(wd=src_dir)

        git.update_shell()

        ret = git.add_remote('origin', supported_rootfs[self.type][0])
        if not ret[0]:
            self.logger.error("Add remote %s failed" %
                              supported_rootfs[self.type][0])
            return False

        ret = git.cmd("fetch origin")
        if ret[0] != 0:
            self.logger.error("Git remote fetch failed")
            return False

        ret = git.checkout('origin', supported_rootfs[self.type][1])
        if not ret:
            self.logger.error("checkout branch %s failed",
                              supported_rootfs[self.type][1])
            return ret

        if config is None:
            ret = self.sh.cmd("make defconfig", wd=src_dir)
            if ret[0] != 0:
                self.logger.error("make defconfig failed")
                return False
        else:
            self.sh.cmd("cp -f %s %s/.config" % (config, src_dir))

        ret = self.sh.cmd("make", wd=src_dir)
        if ret[0] != 0:
            self.logger.error("make busybox failed")
            return False

        script = pkg_resources.resource_filename('mkrootfs',
                                                 'scripts/busybox.sh')

        self.sh.cmd("%s %s" % (script, self.idir))

        self.sh.cmd("make CONFIG_PREFIX=%s install" % self.idir, wd=src_dir)

        self.sh.cmd("make clean", wd=src_dir)

        return True
Ejemplo n.º 7
0
    def __init__(self, type, src, idir, out=None, logger=None):
        self.logger = logger or logging.getLogger(__name__)

        if type not in supported_rootfs.keys():
            self.logger.error("Rootfs type %s is not supported", type)
            return

        self.type = type
        self.src = os.path.join(os.path.abspath(src), "busybox" if type == "minrootfs" else type)
        self.out = os.path.join(os.path.abspath(out), self.type) if out is not None else self.src
        self.idir = os.path.join(os.path.abspath(idir), self.type)
        self.cc = None
        self.arch = "x86_64"
        self.cflags = []
        self.build_init = False
        self.config = None
        self.diffconfig = None
        self.src_url = None
        self.src_branch = None

        if not os.path.exists(self.src):
            self.logger.warning("Source dir %s does not exists, So creating it.", self.src)
            os.makedirs(self.src)

        if not os.path.exists(self.idir):
            self.logger.warning("Install dir %s does not exists, So creating it.", self.idir)
            os.makedirs(self.idir)

        if not os.path.exists(self.out):
            self.logger.warning("Out dir %s does not exists, So creating it.", self.out)
            os.makedirs(self.out)

        # Out dir shell
        self.sh = PyShell(wd=self.idir, stream_stdout=True, logger=logger)
        self.git = GitShell(wd=self.src, stream_stdout=True, logger=logger)

        for subdir in ['dev','etc', 'lib', 'proc', 'tmp', 'sys' ,'media', 'mnt' ,'opt'
            ,'var' ,'home','root','usr','var']:
            if os.path.exists(os.path.join(self.idir, subdir)):
                self.build_init = True
            else:
                self.build_init = False
                break
Ejemplo n.º 8
0
    def __init__(self,
                 repo_dir,
                 cfg,
                 repo_head=None,
                 emailcfg=None,
                 logger=None):
        """
        Constructor of KernelInteg class.
        :rtype: object
        :param cfg: Kernel Integration Json config file.
        :param schema: Kernel Integration Json schema file.
        :param head: SHA-ID or Tag of given branch.
        :param repo_dir: Repo directory.
        :param subject_prefix: Prefix for email subject.
        :param skip_rr_cache: Skip rr cache if set True.
        :param logger: Logger object
        """
        self.logger = logger or logging.getLogger(__name__)
        self.schema = pkg_resources.resource_filename(
            'klibs', 'schemas/integ-schema.json')
        self.emailschema = pkg_resources.resource_filename(
            'klibs', 'schemas/email-schema.json')

        self.cfgobj = JSONParser(self.schema,
                                 cfg,
                                 extend_defaults=True,
                                 os_env=True,
                                 logger=self.logger)
        self.cfg = self.cfgobj.get_cfg()

        # Update email configs.
        if emailcfg is not None:
            self.emailobj = Email(emailcfg, self.logger)
        else:
            self.emailobj = None

        self.remote_list = self.cfg['remote-list']
        self.repos = self.cfg['repo-list']
        self.int_list = self.cfg['int-list']

        self.repo_dir = repo_dir
        self.sh = PyShell(wd=self.repo_dir, logger=self.logger)

        # All git commands will be executed in repo directory.
        self.logger.info(format_h1("Initalizing repo", tab=2))
        self.git = GitShell(wd=self.repo_dir, init=True, logger=self.logger)

        # Add git remote and fetch the tags.
        self.logger.info(format_h1("Add remote", tab=2))
        for remote in self.remote_list:
            self.git.add_remote(remote['name'], remote['url'])
            self.git.cmd("fetch", remote['name'])

        valid_repo_head = False

        def is_valid_head(head):

            if len(head) == 0:
                return False

            ret, out, err = self.git.cmd('show', head)
            if ret == 0:
                return True

            return False

        # Check if the repo head is valid.
        if valid_str(repo_head):
            if is_valid_head(repo_head) is False:
                raise Exception("Invalid repo head %s" % repo_head)
            else:
                valid_repo_head = True

        #if repo head is given in config file, check whether its valid, make exception if not valid.
        for repo in self.repos:
            if valid_repo_head is True:
                repo['repo-head'] = repo_head
            else:
                if is_valid_head(repo['repo-head']) is False:
                    raise Exception("Invalid repo head %s" % repo['repo-head'])
Ejemplo n.º 9
0
class KernelInteg(object):
    def __init__(self,
                 repo_dir,
                 cfg,
                 repo_head=None,
                 emailcfg=None,
                 logger=None):
        """
        Constructor of KernelInteg class.
        :rtype: object
        :param cfg: Kernel Integration Json config file.
        :param schema: Kernel Integration Json schema file.
        :param head: SHA-ID or Tag of given branch.
        :param repo_dir: Repo directory.
        :param subject_prefix: Prefix for email subject.
        :param skip_rr_cache: Skip rr cache if set True.
        :param logger: Logger object
        """
        self.logger = logger or logging.getLogger(__name__)
        self.schema = pkg_resources.resource_filename(
            'klibs', 'schemas/integ-schema.json')
        self.emailschema = pkg_resources.resource_filename(
            'klibs', 'schemas/email-schema.json')

        self.cfgobj = JSONParser(self.schema,
                                 cfg,
                                 extend_defaults=True,
                                 os_env=True,
                                 logger=self.logger)
        self.cfg = self.cfgobj.get_cfg()

        # Update email configs.
        if emailcfg is not None:
            self.emailobj = Email(emailcfg, self.logger)
        else:
            self.emailobj = None

        self.remote_list = self.cfg['remote-list']
        self.repos = self.cfg['repo-list']
        self.int_list = self.cfg['int-list']

        self.repo_dir = repo_dir
        self.sh = PyShell(wd=self.repo_dir, logger=self.logger)

        # All git commands will be executed in repo directory.
        self.logger.info(format_h1("Initalizing repo", tab=2))
        self.git = GitShell(wd=self.repo_dir, init=True, logger=self.logger)

        # Add git remote and fetch the tags.
        self.logger.info(format_h1("Add remote", tab=2))
        for remote in self.remote_list:
            self.git.add_remote(remote['name'], remote['url'])
            self.git.cmd("fetch", remote['name'])

        valid_repo_head = False

        def is_valid_head(head):

            if len(head) == 0:
                return False

            ret, out, err = self.git.cmd('show', head)
            if ret == 0:
                return True

            return False

        # Check if the repo head is valid.
        if valid_str(repo_head):
            if is_valid_head(repo_head) is False:
                raise Exception("Invalid repo head %s" % repo_head)
            else:
                valid_repo_head = True

        #if repo head is given in config file, check whether its valid, make exception if not valid.
        for repo in self.repos:
            if valid_repo_head is True:
                repo['repo-head'] = repo_head
            else:
                if is_valid_head(repo['repo-head']) is False:
                    raise Exception("Invalid repo head %s" % repo['repo-head'])

    def clean_repo(self):
        """
        Clean the git repo and delete all local branches.
        :return: None
        """
        self.logger.info(format_h1("Cleaning repo", tab=2))

        self.git.cmd("reset", "--hard")
        self.git.cmd("clean", "-fdx")

        local_branches = [
            x.strip() for x in self.git.cmd('branch')[1].splitlines()
        ]
        for branch in local_branches:
            if branch.startswith('* '):
                continue
                self.git.cmd("branch", "-D", branch)

    def _smb_sync(self,
                  dest,
                  remote,
                  rdir,
                  username='',
                  password='',
                  options=[]):

        cmd = ["//" + remote + '/' + rdir]

        if len(password) > 0:
            cmd.append(password)
        else:
            cmd.append("-N")

        if len(username) > 0:
            cmd.append("-U")
            cmd.append(username)

        cmd = ['smbclient'] + cmd + options

        ret, out, err = self.sh.cmd(' '.join(cmd), shell=True, wd=dest)
        if ret != 0:
            self.logger.error(err)
            self.logger.error(out)

    def _git_sync(self,
                  dest,
                  remote,
                  rbranch,
                  mode='push',
                  msg='Upload updated cache',
                  op='download'):

        uret = self.git.cmd('remote get-url %s' % remote)
        if uret[0] != 0:
            self.logger.error("Invalid sync remote %s branch %s" %
                              (remote, rbranch))
            return False

        git = GitShell(wd=dest,
                       init=True,
                       remote_list=[(remote, uret[1])],
                       logger=self.logger)

        if op == "download":
            git.cmd('clean -xdf')
            git.cmd('fetch', remote)
            git.cmd('checkout', remote + '/' + rbranch)
        elif op == "upload":
            git.cmd('add', '.')
            with tempfile.NamedTemporaryFile() as msg_file:
                msg_file.write(msg)
                msg_file.seek(0)
                git.cmd('commit -s -F %s' % msg_file.name)
            if mode == 'refs-for':
                rbranch = 'refs/for/%s' % rbranch
            if not git.valid_branch(remote, rbranch) or mode == 'force-push':
                git.cmd('push', '-f', remote, 'HEAD:%s' % rbranch)
            else:
                git.cmd('push', remote, 'HEAD:%s' % rbranch)

    def _config_rr_cache(self, options):
        """
        Config git re re re cache.
        :options: Dict with rr-cache options.
            use-auto-merge - Enable rerere.autoupdate if set True, otherwise do nothing.
            use-remote-cache - Get remote cache params if set True, otherwise no remote rerere cache is available.
            remote-cache-params - Parms for remote cache.
                                - sync-protocol - Remote sync protocol (SMB, Rsync)
                                - server-name - Name of the remote server.
                                - Share-point - Name of the share folder.

        :return:
        """
        if options is None:
            return

        cache_dir = os.path.join(self.repo_dir, '.git', 'rr-cache')
        old_dir = os.path.join(self.repo_dir, '.git', 'rr-cache.old')

        self.git.cmd("config", "rerere.enabled", "true")

        def init_state(roptions):
            if os.path.exists(cache_dir):
                if roptions['sync-protocol'] != 'git':
                    if os.path.exists(old_dir):
                        self.sh.cmd('rm -fr %s' % old_dir, shell=True)
                    self.sh.cmd('mv', cache_dir, old_dir, shell=True)
                    self.sh.cmd('mkdir -p %s' % cache_dir, shell=True)
            else:
                self.sh.cmd('mkdir -p %s' % cache_dir, shell=True)

        # Check and enable auto merge
        if options['use-auto-merge']:
            self.git.cmd("config", "rerere.autoupdate", "true")

        # Check and add remote cache
        if options['use-remote-cache']:
            roptions = options['remote-cache-params']
            init_state(roptions)
            if roptions['sync-protocol'] == 'smb':
                self._smb_sync(cache_dir, roptions['url'],
                               roptions['remote-dir'], roptions['username'],
                               roptions['password'], roptions['sync-options'])
            elif roptions['sync-protocol'] == 'git':
                self._git_sync(cache_dir, roptions['url'], roptions['branch'],
                               roptions['mode'])

    def _reset_rr_cache(self, options):
        """
        Reset git rerere cache
        :param params: Dict with remote cache related params.
        :return:
        """
        if options is None:
            return

        cache_dir = os.path.join(self.repo_dir, '.git', 'rr-cache')
        old_dir = os.path.join(self.repo_dir, '.git', 'rr-cache.old')

        self.git.cmd("config", "rerere.enabled", "false")

        def reset_state(roptions):
            if options['use-remote-cache']:
                if roptions['sync-protocol'] != 'git':
                    if os.path.exists(old_dir):
                        if os.path.exists(cache_dir):
                            self.sh.cmd('rm -fr %s' % cache_dir, shell=True)
                        self.sh.cmd('mv', old_dir, cache_dir, shell=True)

        if options['upload-remote-cache'] and os.path.exists(cache_dir):
            if options['use-remote-cache']:
                roptions = options['remote-cache-params']
                if roptions['sync-protocol'] == 'smb':
                    self._smb_sync(cache_dir, roptions['url'],
                                   roptions['remote-dir'],
                                   roptions['username'], roptions['password'],
                                   roptions['upload-options'])
                elif roptions['sync-protocol'] == 'git':
                    self._git_sync(cache_dir,
                                   roptions['url'],
                                   roptions['branch'],
                                   roptions['mode'],
                                   '\n'.join(roptions['upload-msg']),
                                   op='upload')

        reset_state(options['remote-cache-params'])

    def _merge_branches(self,
                        mode,
                        merge_list,
                        dest,
                        options,
                        sendemail=False,
                        sub_prefix='',
                        rrupload_header=''):
        """
        Merge the branches given in merge_list and create a output branch.
        Basic logic is,
        if mode is rebase, then git rebase all branches in merge_list onto to dest branch.
        if mode is merge, then git merge/pull all branches on top of dest branch.
        if mode is replace, then simple checkout will be done.
        :param mode:  Rebase, merge, pull
        :param merge_list: List of (remote, branch) tupule.
        :param dest: Dest branch name.
        :param params: Dict with merge params.
        use-rr-cache -  Use git rerere cache.
        no-ff - Set True if you want to disable fast forward in merge.
        add-log - Set True if you want to add merge log.

        :return: True
        """
        def merge_cmd(remote=None,
                      rbranch=None,
                      no_ff=False,
                      add_log=False,
                      abort=False):
            options = []

            if no_ff:
                options.append('--no-ff')
            if add_log:
                options.append('--log')

            if abort is True:
                return ' '.join(['merge', '--abort'])

            if valid_str(remote):
                return ' '.join(['pull', ' '.join(options), remote, rbranch])
            else:
                return ' '.join(['merge', ' '.join(options), rbranch])

        def send_email(remote, branch, status, out, err):

            if not sendemail or self.emailobj is None:
                return

            subject = [] if len(sub_prefix) == 0 else [sub_prefix]
            content = []

            if mode == 'merge':
                subject.append('Merge')
            elif mode == 'rebase':
                subject.append('Rebase')
            elif mode == 'replace':
                subject.append('Replace')
            elif mode == 'cherry':
                subject.append('Cherry')
            elif mode == 'cherrypick':
                subject.append('Cherrypick')

            if valid_str(remote):
                branch = remote + '/' + branch

            subject.append(branch)

            if status:
                subject.append('passed')
            else:
                subject.append('failed')

            uret = self.git.cmd('remote get-url %s' % remote)
            url = remote
            if uret[0] == 0:
                url = uret[1].strip()

            content.append('')
            content.append('Head: %s' % self.git.head_sha())
            content.append('Base: %s' % self.git.base_sha())
            content.append('Dest Branch: %s' % dest)
            content.append('Remote: %s' % url)
            content.append('Remote Branch: %s' % branch)
            content.append('Status: %s' % "Passed" if status else "Failed")
            content.append('\n')
            content.append(format_h1("Output log"))
            content.append('')
            content.append(out)
            content.append('\n')
            content.append(format_h1("Error log"))
            content.append('')
            content.append(err)

            self.emailobj.send_email(' '.join(subject), '\n'.join(content))

        def add_rrcache_msg(remote, branch):
            if options["use-rr-cache"]:
                if options['rr-cache']['upload-remote-cache']:
                    rcoptions = options['rr-cache']['remote-cache-params']
                    if len(rcoptions['upload-msg']) == 0:
                        msg = rrupload_header if len(
                            rrupload_header) > 0 else dest
                        rcoptions['upload-msg'].append(
                            'rrcache: upload %s results of %s' % (mode, msg))
                    if len(rcoptions['upload-msg']) == 1:
                        rcoptions['upload-msg'].append('\n')
                    uret = self.git.cmd('remote get-url %s' % remote)
                    url = uret[1].strip() if uret[0] == 0 and len(
                        uret[1]) > 0 else branch
                    rcoptions['upload-msg'].append(
                        "Includes %s resolution of %s:%s" %
                        (mode, url, branch))

        def get_cherry_list(upstream, branch):
            ret = self.git.cmd("cherry", upstream, branch)
            if ret[0] != 0:
                return ''
            else:
                commit_list = map(lambda x: x.strip(), ret[1].split('\n'))
                commit_list = filter(lambda x: x.startswith('+'), commit_list)
                commit_list = map(lambda x: x.strip('+ '), commit_list)
                return ' '.join(commit_list)

        # Check whether git diff is empty
        def null_diff():
            dret = self.git.cmd('diff')
            if dret[0] == 0 and len(dret[1]) == 0:
                return True

            return False

        # Check for rerere diff
        def null_rdiff():
            dret = self.git.cmd('rerere diff')
            if dret[0] == 0 and len(dret[1]) < 2:
                return True

            return False

        def auto_resolve():
            if options["rr-cache"]["use-auto-merge"]:
                if null_rdiff() or null_diff():
                    if mode == "merge":
                        self.git.cmd('commit', '-as', '--no-edit')
                        return True
                    elif mode == "rebase":
                        dret = self.git.cmd('diff', '--cached')
                        if dret[0] == 0 and len(dret[1]) == 0:
                            self.git.cmd('rebase', '--skip')
                        else:
                            self.git.cmd('rebase', '--continue')
                        return True
                    elif mode == "cherry" or mode == "cherrypick":
                        dret = self.git.cmd('diff', '--cached')
                        if dret[0] == 0 and len(dret[1]) == 0:
                            self.git.cmd('reset')
                        self.git.cmd('cherry-pick', '--continue')
                        return True
            return False

        if options["use-rr-cache"]:
            self._config_rr_cache(options["rr-cache"])

        for remote, branch, upstream, shalist in merge_list:
            ret = 0, '', ''
            if mode == "merge":
                self.git.cmd("checkout", dest)
                ret = self.git.cmd(
                    merge_cmd(remote, branch, options['no-ff'],
                              options['add-log']))
            elif mode == "rebase":
                self.git.cmd("checkout",
                             remote + '/' + branch if remote != '' else branch)
                ret = self.git.cmd("rebase", dest)
            elif mode == "replace":
                ret = self.git.cmd(
                    "checkout",
                    remote + '/' + branch if remote != '' else branch)
            elif mode == "cherry":
                commit_list = get_cherry_list(
                    upstream,
                    remote + '/' + branch if remote != '' else branch)
                if valid_str(commit_list):
                    ret = self.git.cmd("cherry-pick", commit_list)
                else:
                    continue
            elif mode == "cherrypick":
                if valid_str(shalist):
                    ret = self.git.cmd("cherry-pick", shalist)
                else:
                    continue

            if self.git.inprogress() or ret[0] != 0:
                # First try to auto resolve it.
                auto_resolve()
                # If problem still persist, ask for help.
                if self.git.inprogress():
                    send_email(remote, branch, False, ret[1], ret[2])
                    while True:
                        if not null_rdiff() or not null_diff():
                            raw_input(
                                'Please resolve the issue and then press any key continue'
                            )
                        if self.git.inprogress():
                            if auto_resolve() and not self.git.inprogress():
                                break
                        else:
                            break

            add_rrcache_msg(remote, branch)

            if mode == "rebase" and not self.git.inprogress():
                self.git.cmd("branch", '-D', dest)
                self.git.cmd("checkout", '-b', dest)

        if options['use-rr-cache']:
            self._reset_rr_cache(options["rr-cache"])

        return True

    def _upload_repo(self, branch_name, upload_options):
        """
        Upload the given branch to a remote patch.
        supported upload modes are force-push, push and refs-for (for Gerrit).
        :param branch_name: Name of the local branch.
        :param upload_options: Dict with upload related params.
        url - Name of the git remote.
        branch - Remote branch of git repo.
        :return: Nothing.
        """
        self.logger.info(format_h1("Uploading %s", tab=2) % branch_name)

        _force = True if upload_options['mode'] == 'force-push' else False
        _use_refs = True if upload_options['mode'] == 'refs-for' else False

        self.git.push(branch_name,
                      upload_options['url'],
                      upload_options['branch'],
                      force=_force,
                      use_refs=_use_refs)

    def _create_repo(self, repo):
        """
        Merge the branches given in source-list and create list of output branches as specificed by dest-list option.
        :param repo: Dict with kernel repo options. Check "repo-params" section in kernel integration schema file for
        more details.
        :return: Nothing
        """
        self.logger.info(
            format_h1("Create %s repo", tab=2) % repo['repo-name'])

        merge_list = []
        status = True

        # Clean existing git operations
        try:
            self.git.cmd('merge --abort')
            self.git.cmd('rebase --abort')
            self.git.cmd('cherry-pick --abort')
            self.git.cmd('revert --abort')
        except:
            pass

        # Get source branches
        for srepo in repo['source-list']:
            if srepo['skip'] is True:
                continue
            if self.git.valid_branch(srepo['url'], srepo['branch']) is False:
                raise Exception("Dependent repo %s/%s does not exits" %
                                (srepo['url'], srepo['branch']))
            else:
                merge_list.append((srepo['url'], srepo['branch'],
                                   srepo["upstream"], srepo["sha-list"]))

        dest_repolist = []

        for drepo in repo['dest-list']:
            if drepo['skip']:
                continue
            dest_repolist.append(drepo)

        # Create destination branches
        dest_branches = []
        try:
            for dest_repo in dest_repolist:

                if self.git.valid_branch('', dest_repo['local-branch']):
                    ret = self.git.delete(dest_repo['local-branch'],
                                          force=True)[0]
                    if ret != 0:
                        raise Exception("Deleting branch %s failed" %
                                        dest_repo['local-branch'])

                self.git.cmd("checkout", repo['repo-head'], "-b",
                             dest_repo['local-branch'])

                if len(merge_list) > 0:
                    self._merge_branches(dest_repo['merge-mode'], merge_list,
                                         dest_repo['local-branch'],
                                         dest_repo['merge-options'],
                                         repo['send-email'],
                                         repo['email-prefix'],
                                         repo['repo-name'])
                dest_branches.append(dest_repo['local-branch'])
        except Exception as e:
            self.logger.error(e, exc_info=True)
            for branch in dest_branches:
                self.git.delete(branch, force=True)
        else:
            self.logger.info("Repo %s creation successfull" %
                             repo['repo-name'])

        # Compare destination branches
        if status is True and repo['compare-dest']:
            if len(dest_repolist) > 1:
                base_repo = dest_repolist[0]
                for dest_repo in dest_repolist:
                    ret, out, err = self.git.cmd('diff',
                                                 base_repo['local-branch'],
                                                 dest_repo['local-branch'])
                    if ret != 0:
                        if repo['compare-resmode'] == "fail":
                            status = False
                        break
                    else:
                        if len(out) > 0:
                            if repo['compare-resmode'] == "fail":
                                status = False
                            self.logger.error(
                                "Destination branches %s!=%s, resolution:%s" %
                                (base_repo['local-branch'],
                                 dest_repo['local-branch'],
                                 repo['compare-resmode']))
                            break
                        else:
                            self.logger.info("Destination branches %s==%s" %
                                             (base_repo['local-branch'],
                                              dest_repo['local-branch']))
        else:
            self.logger.warn("Skipping destination branch comparison")

        # Upload the destination branches
        if status is True:
            for dest_repo in dest_repolist:
                if dest_repo['upload-copy'] is True:
                    upload_options = dest_repo['upload-options']
                    self._upload_repo(dest_repo['local-branch'],
                                      upload_options)
        else:
            self.logger.warn("Skipping destination branch upload")

        if repo['send-email']:
            subject = [] if len(
                repo['email-prefix']) == 0 else [repo['email-prefix']]
            content = []

            subject.append("integration")

            if status:
                subject.append('passed')
            else:
                subject.append('failed')

            content.append(format_h1("This repo integration includes:"))
            content.append(format_h1("Following source branches:"))
            content.append('')
            for rname, rbranch, upstream, shalist in merge_list:
                url = rname
                if len(rname) == 0:
                    rname = 'local-branch'
                else:
                    uret = self.git.cmd('remote get-url %s' % rname)
                    if uret[0] == 0:
                        rname = uret[1].strip()
                content.append('Remote: %s' % rname)
                content.append('Branch: %s' % rbranch)
                content.append('')

            content.append(format_h1("Following destination branches:"))
            content.append('')

            for dest_repo in dest_repolist:
                content.append('Branch: %s' % dest_repo['local-branch'])
                content.append('Merge Mode: %s' % dest_repo['merge-mode'])
                if dest_repo['upload-copy'] is True:
                    content.append('')
                    content.append('Uploaded branch to,')
                    upload_options = dest_repo['upload-options']
                    rurl = upload_options['url']
                    for ritem in self.remote_list:
                        if ritem['name'] == rurl:
                            rurl = ritem['url']
                            break
                    content.append('Upload Remote: %s' % rurl)
                    content.append('Upload Branch: %s' %
                                   upload_options['branch'])
                    content.append('')

            if self.emailobj is not None:
                self.emailobj.send_email(' '.join(subject), '\n'.join(content))

        return status

    def _get_repo_by_name(self, name):
        """
        Get repo Dict from "repos" list in given Json config file.
        :param name: Name of the repo
        :return: repo Dict reference or None if not valid repo found.
        """
        for repo in self.repos:
            if repo['repo-name'] == name:
                return repo

        return None

    def get_default_reponame(self):
        default = self.cfg.get('default-repo', "")

        if self._get_repo_by_name(default) is not None:
            return default

        if self.repos is not None and len(self.repos) > 0:
            return self.repos[0]['repo-name']

        return None

    def start(self, name, skip_dep=False):
        """
        Generate kernel and its depndent branches.
        :param name: Name of the kernel repo.
        :param skip_dep: Skip creating dependent repos.
        :return: None
        """
        dep_list = []
        # First get the dep list
        for item in self.int_list:
            if item["repo"] == name:
                dep_list = item["dep-repos"]

        int_list = dep_list if not skip_dep else []
        int_list += [name]

        for name in int_list:
            repo = self._get_repo_by_name(name)
            if repo is not None:
                self._create_repo(repo)
            else:
                self.logger.error("Repo %s does not exist\n" % name)
                return False

        return True
Ejemplo n.º 10
0
    def _git_sync(self,
                  dest,
                  remote,
                  rbranch,
                  mode='push',
                  msg='Upload updated cache',
                  op='download'):

        uret = self.git.cmd('remote get-url %s' % remote)
        if uret[0] != 0:
            self.logger.error("Invalid sync remote %s branch %s" %
                              (remote, rbranch))
            return False

        git = GitShell(wd=dest,
                       init=True,
                       remote_list=[(remote, uret[1])],
                       logger=self.logger)

        if op == "download":
            git.cmd('clean -xdf')
            git.cmd('fetch', remote)
            git.cmd('checkout', remote + '/' + rbranch)
        elif op == "upload":
            git.cmd('add', '.')
            with tempfile.NamedTemporaryFile() as msg_file:
                msg_file.write(msg)
                msg_file.seek(0)
                git.cmd('commit -s -F %s' % msg_file.name)
            if mode == 'refs-for':
                rbranch = 'refs/for/%s' % rbranch
            if not git.valid_branch(remote, rbranch) or mode == 'force-push':
                git.cmd('push', '-f', remote, 'HEAD:%s' % rbranch)
            else:
                git.cmd('push', remote, 'HEAD:%s' % rbranch)
Ejemplo n.º 11
0
    def auto_test(self):
        self.logger.info(format_h1("Running kernel tests from json", tab=2))

        status = True

        config_temp = tempfile.mkdtemp("_dir", "config_")
        cgit = GitShell(wd=config_temp, init=True, logger=self.logger)

        static_config = self.cfg.get("static-config", None)
        sparse_config = self.cfg.get("sparse-config", None)
        smatch_config = self.cfg.get("smatch-config", None)
        custom_test = self.cfg.get("custom-test", None)

        # If there is a config in remote source, fetch it and give the local path.
        def get_configsrc(options):

            if options is None or not isinstance(options, collections.Mapping):
                return None

            if len(options["url"]) == 0:
                return os.path.abspath(os.path.join(self.src, options["remote-dir"], options["name"]))

            if options["sync-mode"] == "git":
                cgit.cmd("clean -xdf")
                remote_list = cgit.cmd("remote")[1].split('\n')
                rname = 'origin'
                for remote in remote_list:
                    rurl = cgit.cmd("remote get-url %s" % remote)[1].strip()
                    if rurl == options["url"]:
                        rname =  remote
                        break
                cgit.add_remote(rname, options["url"])
                cgit.cmd("pull %s" % rname)
                cgit.cmd("checkout %s/%s" % (rname, options["branch"]))

                return os.path.abspath(os.path.join(config_temp, options["remote-dir"], options["name"]))


            return None

        def get_sha(_type='head', config = None):
            if config is None:
                return getattr(self, _type)
            if config[_type]['auto']:
                if config[_type]['auto-mode'] == "last-upstream":
                    return self.git.cmd('describe --abbrev=0 --match "v[0-9]*" --tags')[1].strip()
                elif config[_type]['auto-mode'] == "last-tag":
                    return self.git.cmd('describe --abbrev=0 --tags')[1].strip()
                elif config[_type]['auto-mode'] == "head-commit":
                    return self.git.head_sha()
                elif config[_type]['auto-mode'] == "base-commit":
                    return self.git.base_sha()
            elif len(config[_type]['value']) > 0:
                return config[_type]['value'].strip()
            else:
                return getattr(self, _type)

        def static_test(obj, cobj, config):
            status = True

            if cobj["compile-test"]:
                current_status = self.compile(obj["arch_name"], config, obj["compiler_options"]["CC"],
                                              obj["compiler_options"]["cflags"],
                                              cobj.get('name', None), get_configsrc(cobj.get('source-params', None)))
                if current_status is False:
                    self.logger.error("Compilation of arch:%s config:%s failed\n" % (obj["arch_name"],
                                                                                     cobj.get('name', config)))

                status &= current_status

            if cobj["sparse-test"]:
                skip = False
                args = [
                    obj["arch_name"], config, obj["compiler_options"]["CC"], obj["compiler_options"]["cflags"],
                    cobj.get('name', None), get_configsrc(cobj.get('source-params', None))
                ]

                if sparse_config is not None:
                    if sparse_config["enable"] is False:
                        self.logger.warning("Sparse global flag is disabled\n")
                        skip = True
                    else:
                        args.append(sparse_config["cflags"])
                        args.append(get_sha("base", sparse_config))
                        args.append(sparse_config["source"])

                if skip is False:
                    current_status = self.sparse(*args)

                    if current_status is False:
                        self.logger.error("Sparse test of arch:%s config:%s failed\n" % (obj["arch_name"],
                                                                                         cobj.get('name', config)))
                    status &= current_status

            if cobj["smatch-test"]:
                skip = False
                args = [
                    obj["arch_name"], config, obj["compiler_options"]["CC"], obj["compiler_options"]["cflags"],
                    cobj.get('name', None), get_configsrc(cobj.get('source-params', None))
                ]

                if smatch_config is not None:
                    if smatch_config["enable"] is False:
                        self.logger.warning("Smatch global flag is disabled\n")
                        skip = True
                    else:
                        args.append(smatch_config["cflags"])
                        args.append(get_sha("base", smatch_config))
                        args.append(smatch_config["source"])

                if skip is False:
                    current_status = self.smatch(*args)

                    if current_status is False:
                        self.logger.error("Smatch test of arch:%s config:%s failed\n" % (obj["arch_name"],
                                                                                         cobj.get('name', config)))
                    status &= current_status

            return status

        if static_config is not None and static_config["enable"] is True:
            # Compile standard configs
            for obj in static_config["test-list"]:

                for config in supported_configs:
                    if isinstance(obj, collections.Mapping) and obj.has_key(config):
                        status &= static_test(obj, obj[config], config)

                # Compile custom configs
                for cobj in obj["customconfigs"]:
                    if cobj['name'] not in self.custom_configs:
                        self.custom_configs.append(cobj['name'])

                    self.resobj.add_config(cobj['name'])

                    status &= static_test(obj, cobj, cobj['defaction'])

        checkpatch_config = self.cfg.get("checkpatch-config", None)

        if checkpatch_config is not None and checkpatch_config["enable"] is True:
            if len(checkpatch_config["source"]) > 0:
                self.checkpatch_source = checkpatch_config["source"]

            status &= self.run_checkpatch(get_sha('head', checkpatch_config), get_sha('base', checkpatch_config))

        if custom_test is not None and custom_test["enable"] is True:
            for ctest in custom_test["test-list"]:
                status &=  self.custom_test(ctest["name"], ctest["source"], ctest["arg-list"],
                                            get_sha("head", custom_test),
                                            get_sha("base", custom_test),
                                            ctest["enable-head-sub"],
                                            ctest["enable-base-sub"],
                                            ctest["enable-src-sub"])

        output_config = self.cfg.get("output-config", None)

        if output_config is not None and output_config["enable"] is True and len(output_config["url"]) > 0:

            # Commit the results file  used back to server.
            if output_config["sync-mode"] == "git":
                self.git_upload_results(remote=('origin', output_config["url"], output_config["branch"]),
                                               mode=output_config["mode"],
                                               msg=output_config["upload-msg"],
                                               append_kinfo=output_config.get("append-kinfo", False),
                                               resdir=None,
                                               relpath=output_config["remote-dir"],
                                               outfile=output_config["name"]
                                               )


        shutil.rmtree(config_temp, ignore_errors=True)

        return status
Ejemplo n.º 12
0
    def git_upload_results(self, remote, mode='push', msg=[], append_kinfo=False,
                           resdir=None, relpath=".", outfile='out.json'):
        """
        Upload the results to remote repo.
        :param remote: (remotename, url, branch).
        :param mode:  Git push mode (push, force-push, refs-for)
        :param msg: Commit message in list format, One line for each entry.
        :param append_kinfo: Append kernel info to commit message.
        :param resdir: Dir used for uploading the results.
        :param relpath: Relative path of the results file.
        :param outfile: Results file.

        :return: True | False
        """

        clean_resdir = False

        if not isinstance(remote, tuple) or len(remote) != 3:
            self.logger.info("Invalid remote %s", remote)
            return False

        if resdir is None:
            resdir = tempfile.mkdtemp("_dir", "output_")
            clean_resdir = True

        # Commit the results file  used back to server.
        ogit = GitShell(wd=resdir, init=True, remote_list=[(remote[0], remote[1])], fetch_all=True, logger=self.logger)
        ogit.cmd("clean -xdf")
        ogit.cmd("checkout %s/%s" % (remote[0], remote[2]))
        output_file = os.path.join(resdir, relpath, outfile)

        if not os.path.exists(os.path.dirname(output_file)):
            os.makedirs(os.path.dirname(output_file))

        self.resobj.dump_results(outfile=output_file)

        ogit.cmd('add %s' % (relpath + '/' + outfile))

        # Create the commit message and upload it
        with tempfile.NamedTemporaryFile() as msg_file:
            commit_msg = '\n'.join(msg)
            # Use default msg if its not given in config file.
            if len(commit_msg) == 0:
                commit_msg = "test: Update latest results"
            # Append kernel info if enabled.
            if append_kinfo:
                commit_msg += '\n'
                commit_msg += self.resobj.kernel_info()
            msg_file.write(commit_msg)
            msg_file.seek(0)
            ogit.cmd('commit -s -F %s' % msg_file.name)

        if mode == 'refs-for':
            remote[2] = 'refs/for/%s' % remote[2]

        if not ogit.valid_branch('origin', remote[2]) or mode == 'force-push':
            ogit.cmd('push', '-f', 'origin', 'HEAD:%s' % remote[2])
        else:
            ogit.cmd('push', 'origin', 'HEAD:%s' % remote[2])

        if clean_resdir:
            shutil.rmtree(resdir, ignore_errors=True)
Ejemplo n.º 13
0
class KernelTest(object):

    def __init__(self, src, cfg=None, out=None, rname=None, rurl=None, branch=None, head=None, base=None,
                 res_cfg=None, logger=None):
        self.logger = logger or logging.getLogger(__name__)
        self.src = src
        self.out = os.path.join(self.src, 'out') if out is None else os.path.absapth(out)
        self.branch = branch
        self.rname = rname
        self.rurl = rurl
        self.head = head
        self.base = base
        self.valid_git = False
        self.schema = pkg_resources.resource_filename('klibs', 'schemas/test-schema.json')
        self.cfg = None
        self.cfgobj = None
        self.resobj = KernelResults(self.src, old_cfg=res_cfg, logger=self.logger)
        self.git = GitShell(wd=self.src, logger=logger)
        self.sh = PyShell(wd=self.src, logger=logger)
        self.checkpatch_source = CHECK_PATCH_SCRIPT
        self.custom_configs = []

        if self.rname is not None and len(self.rname) > 0:
            if not os.path.exists(self.src):
                os.makedirs(self.src)
            if not self.git.valid():
                self.git.init()
            self.git.add_remote(self.rname, rurl)
            self.git.cmd('fetch %s' % self.rname)
            self.branch = self.rname + '/' + self.branch

        self.valid_git = True if self.git.valid() else False

        if self.valid_git:
            if self.branch is not None and len(self.branch) > 0:
                if self.git.cmd('checkout', self.branch)[0] != 0:
                    self.logger.error("Git checkout command failed in %s", self.src)
                    return
            else:
                self.branch = self.git.current_branch()

            #update base & head if its not given
            if self.head is None:
                self.head = self.git.head_sha()
            if self.base is None:
                self.base = self.git.base_sha()

            self.resobj.update_kernel_params(base=self.base, head=self.head, branch=self.branch)

        if not is_valid_kernel(src, logger):
            return

        self.version = BuildKernel(self.src).uname

        if len(self.version) > 0:
            self.resobj.update_kernel_params(version=self.version)

        if cfg is not None:
            self.cfgobj = JSONParser(self.schema, cfg, extend_defaults=True, os_env=True, logger=logger)
            self.cfg = self.cfgobj.get_cfg()

    def send_email(self, emailcfg, sub=None):

        if emailcfg is not None:
            emailobj = Email(emailcfg, self.logger)
        else:
            return False

        subject = ['Test results']
        if sub is not None:
            subject.append(sub)

        content = []

        outfile = tempfile.NamedTemporaryFile()
        self.resobj.dump_results(outfile=outfile.name)

        with open(outfile.name) as fobj:
            content.append(format_h1("Test Results"))
            content.append('')
            content.append(fobj.read())
            content.append('\n')

        emailobj.send_email(' '.join(subject), '\n'.join(content))

        return True

    def git_upload_results(self, remote, mode='push', msg=[], append_kinfo=False,
                           resdir=None, relpath=".", outfile='out.json'):
        """
        Upload the results to remote repo.
        :param remote: (remotename, url, branch).
        :param mode:  Git push mode (push, force-push, refs-for)
        :param msg: Commit message in list format, One line for each entry.
        :param append_kinfo: Append kernel info to commit message.
        :param resdir: Dir used for uploading the results.
        :param relpath: Relative path of the results file.
        :param outfile: Results file.

        :return: True | False
        """

        clean_resdir = False

        if not isinstance(remote, tuple) or len(remote) != 3:
            self.logger.info("Invalid remote %s", remote)
            return False

        if resdir is None:
            resdir = tempfile.mkdtemp("_dir", "output_")
            clean_resdir = True

        # Commit the results file  used back to server.
        ogit = GitShell(wd=resdir, init=True, remote_list=[(remote[0], remote[1])], fetch_all=True, logger=self.logger)
        ogit.cmd("clean -xdf")
        ogit.cmd("checkout %s/%s" % (remote[0], remote[2]))
        output_file = os.path.join(resdir, relpath, outfile)

        if not os.path.exists(os.path.dirname(output_file)):
            os.makedirs(os.path.dirname(output_file))

        self.resobj.dump_results(outfile=output_file)

        ogit.cmd('add %s' % (relpath + '/' + outfile))

        # Create the commit message and upload it
        with tempfile.NamedTemporaryFile() as msg_file:
            commit_msg = '\n'.join(msg)
            # Use default msg if its not given in config file.
            if len(commit_msg) == 0:
                commit_msg = "test: Update latest results"
            # Append kernel info if enabled.
            if append_kinfo:
                commit_msg += '\n'
                commit_msg += self.resobj.kernel_info()
            msg_file.write(commit_msg)
            msg_file.seek(0)
            ogit.cmd('commit -s -F %s' % msg_file.name)

        if mode == 'refs-for':
            remote[2] = 'refs/for/%s' % remote[2]

        if not ogit.valid_branch('origin', remote[2]) or mode == 'force-push':
            ogit.cmd('push', '-f', 'origin', 'HEAD:%s' % remote[2])
        else:
            ogit.cmd('push', 'origin', 'HEAD:%s' % remote[2])

        if clean_resdir:
            shutil.rmtree(resdir, ignore_errors=True)

    def auto_test(self):
        self.logger.info(format_h1("Running kernel tests from json", tab=2))

        status = True

        config_temp = tempfile.mkdtemp("_dir", "config_")
        cgit = GitShell(wd=config_temp, init=True, logger=self.logger)

        static_config = self.cfg.get("static-config", None)
        sparse_config = self.cfg.get("sparse-config", None)
        smatch_config = self.cfg.get("smatch-config", None)
        custom_test = self.cfg.get("custom-test", None)

        # If there is a config in remote source, fetch it and give the local path.
        def get_configsrc(options):

            if options is None or not isinstance(options, collections.Mapping):
                return None

            if len(options["url"]) == 0:
                return os.path.abspath(os.path.join(self.src, options["remote-dir"], options["name"]))

            if options["sync-mode"] == "git":
                cgit.cmd("clean -xdf")
                remote_list = cgit.cmd("remote")[1].split('\n')
                rname = 'origin'
                for remote in remote_list:
                    rurl = cgit.cmd("remote get-url %s" % remote)[1].strip()
                    if rurl == options["url"]:
                        rname =  remote
                        break
                cgit.add_remote(rname, options["url"])
                cgit.cmd("pull %s" % rname)
                cgit.cmd("checkout %s/%s" % (rname, options["branch"]))

                return os.path.abspath(os.path.join(config_temp, options["remote-dir"], options["name"]))


            return None

        def get_sha(_type='head', config = None):
            if config is None:
                return getattr(self, _type)
            if config[_type]['auto']:
                if config[_type]['auto-mode'] == "last-upstream":
                    return self.git.cmd('describe --abbrev=0 --match "v[0-9]*" --tags')[1].strip()
                elif config[_type]['auto-mode'] == "last-tag":
                    return self.git.cmd('describe --abbrev=0 --tags')[1].strip()
                elif config[_type]['auto-mode'] == "head-commit":
                    return self.git.head_sha()
                elif config[_type]['auto-mode'] == "base-commit":
                    return self.git.base_sha()
            elif len(config[_type]['value']) > 0:
                return config[_type]['value'].strip()
            else:
                return getattr(self, _type)

        def static_test(obj, cobj, config):
            status = True

            if cobj["compile-test"]:
                current_status = self.compile(obj["arch_name"], config, obj["compiler_options"]["CC"],
                                              obj["compiler_options"]["cflags"],
                                              cobj.get('name', None), get_configsrc(cobj.get('source-params', None)))
                if current_status is False:
                    self.logger.error("Compilation of arch:%s config:%s failed\n" % (obj["arch_name"],
                                                                                     cobj.get('name', config)))

                status &= current_status

            if cobj["sparse-test"]:
                skip = False
                args = [
                    obj["arch_name"], config, obj["compiler_options"]["CC"], obj["compiler_options"]["cflags"],
                    cobj.get('name', None), get_configsrc(cobj.get('source-params', None))
                ]

                if sparse_config is not None:
                    if sparse_config["enable"] is False:
                        self.logger.warning("Sparse global flag is disabled\n")
                        skip = True
                    else:
                        args.append(sparse_config["cflags"])
                        args.append(get_sha("base", sparse_config))
                        args.append(sparse_config["source"])

                if skip is False:
                    current_status = self.sparse(*args)

                    if current_status is False:
                        self.logger.error("Sparse test of arch:%s config:%s failed\n" % (obj["arch_name"],
                                                                                         cobj.get('name', config)))
                    status &= current_status

            if cobj["smatch-test"]:
                skip = False
                args = [
                    obj["arch_name"], config, obj["compiler_options"]["CC"], obj["compiler_options"]["cflags"],
                    cobj.get('name', None), get_configsrc(cobj.get('source-params', None))
                ]

                if smatch_config is not None:
                    if smatch_config["enable"] is False:
                        self.logger.warning("Smatch global flag is disabled\n")
                        skip = True
                    else:
                        args.append(smatch_config["cflags"])
                        args.append(get_sha("base", smatch_config))
                        args.append(smatch_config["source"])

                if skip is False:
                    current_status = self.smatch(*args)

                    if current_status is False:
                        self.logger.error("Smatch test of arch:%s config:%s failed\n" % (obj["arch_name"],
                                                                                         cobj.get('name', config)))
                    status &= current_status

            return status

        if static_config is not None and static_config["enable"] is True:
            # Compile standard configs
            for obj in static_config["test-list"]:

                for config in supported_configs:
                    if isinstance(obj, collections.Mapping) and obj.has_key(config):
                        status &= static_test(obj, obj[config], config)

                # Compile custom configs
                for cobj in obj["customconfigs"]:
                    if cobj['name'] not in self.custom_configs:
                        self.custom_configs.append(cobj['name'])

                    self.resobj.add_config(cobj['name'])

                    status &= static_test(obj, cobj, cobj['defaction'])

        checkpatch_config = self.cfg.get("checkpatch-config", None)

        if checkpatch_config is not None and checkpatch_config["enable"] is True:
            if len(checkpatch_config["source"]) > 0:
                self.checkpatch_source = checkpatch_config["source"]

            status &= self.run_checkpatch(get_sha('head', checkpatch_config), get_sha('base', checkpatch_config))

        if custom_test is not None and custom_test["enable"] is True:
            for ctest in custom_test["test-list"]:
                status &=  self.custom_test(ctest["name"], ctest["source"], ctest["arg-list"],
                                            get_sha("head", custom_test),
                                            get_sha("base", custom_test),
                                            ctest["enable-head-sub"],
                                            ctest["enable-base-sub"],
                                            ctest["enable-src-sub"])

        output_config = self.cfg.get("output-config", None)

        if output_config is not None and output_config["enable"] is True and len(output_config["url"]) > 0:

            # Commit the results file  used back to server.
            if output_config["sync-mode"] == "git":
                self.git_upload_results(remote=('origin', output_config["url"], output_config["branch"]),
                                               mode=output_config["mode"],
                                               msg=output_config["upload-msg"],
                                               append_kinfo=output_config.get("append-kinfo", False),
                                               resdir=None,
                                               relpath=output_config["remote-dir"],
                                               outfile=output_config["name"]
                                               )


        shutil.rmtree(config_temp, ignore_errors=True)

        return status

    def _compile(self, arch='', config='', cc='', cflags=[], name='', cfg=None, clean_build=False):

        custom_config = False

        if arch not in supported_archs:
            self.logger.error("Invalid arch/config %s/%s" % (arch, config))
            return False

        if config not in supported_configs:
            if cfg is None or len(cfg) == 0 or name is None or len(name) == 0:
                self.logger.error("Invalid arch/config %s/%s" % (arch, config))
                return False
            else:
                if name not in self.custom_configs:
                    self.custom_configs.append(name)

                self.resobj.add_config(name)

                custom_config = True

        if name in self.custom_configs:
            custom_config = True

        out_dir = os.path.join(self.out, arch, name if custom_config else config)

        if clean_build:
            self.sh.cmd("rm -fr %s/*" % out_dir, shell=True)

        kobj = BuildKernel(src_dir=self.src, out_dir=out_dir, arch=arch, cc=cc, cflags=cflags, logger=self.logger)

        # If custom config source is given, use it.
        if custom_config:
            kobj.copy_newconfig(cfg)

        getattr(kobj, 'make_' + config)()

        ret, out, err = kobj.make_kernel()

        def parse_results(outputlog, errorlog, status):
            data = errorlog.split('\n')

            warning_count = len(filter(lambda x: True if "warning:" in x else False, data))
            error_count = len(filter(lambda x: True if "error:" in x else False, data))

            warning_data =filter(lambda x: True if "warning:" in x else False, data)
            error_data = filter(lambda x: True if "error:" in x else False, data)

            return status, warning_count, error_count, warning_data, error_data

        status = True if ret == 0 else False

        if not status:
            self.logger.error(err)

        return parse_results(out, err, status)

    def compile(self, arch='', config='', cc='', cflags=[], name='', cfg=None):

        status, warning_count, error_count, wdata, edata = self._compile(arch, config, cc, cflags, name, cfg)

        self.logger.info("List of warnings Arch:%s Config:%s Name:%s Count:%d\n", arch, config, name, warning_count)

        for entry in wdata:
            self.logger.info(entry)

        if warning_count > 0:
            self.logger.info("\n")

        self.logger.info("List of errors Arch:%s Config:%s Name:%s Count:%d\n", arch, config, name, error_count)

        for entry in edata:
            self.logger.info(entry)

        if error_count > 0:
            self.logger.info("\n")

        name = config if name is None or len(name) == 0 else name

        self.resobj.update_compile_test_results(arch, name, status, warning_count, error_count)

        return status

    def _get_bin_path(self, path):
        def which(program):
            import os
            def is_exe(fpath):
                return os.path.isfile(fpath) and os.access(fpath, os.X_OK)

            fpath, fname = os.path.split(program)
            if fpath:
                if is_exe(program):
                    return program
            else:
                for path in os.environ["PATH"].split(os.pathsep):
                    exe_file = os.path.join(path, program)
                    if is_exe(exe_file):
                        return exe_file

            return None

        if path.startswith('.'):
            return os.path.join(os.getcwd(), path)
        elif path.startswith('/'):
            return path
        else:
            new_path = which(path)
            return new_path if which(path) is not None else path

    def _diff_count(self, data1, data2):
        ncount = 0
        for entry in data2:
            if entry not in data1:
                ncount = ncount + 1
                self.logger.info(entry)

        return ncount

    def sparse(self, arch='', config='', cc='', cflags=[], name='', cfg=None, sparse_flags=["C=2"],
               base=None, script_bin=SPARSE_BIN_PATH):

        base_warning_count = 0
        base_error_count = 0
        base_edata = []
        base_wdata = []
        flags = []

        flags.append('CHECK="' + self._get_bin_path(script_bin) + '"')

        if base is not None:
            curr_head = self.git.head_sha()

            if self.git.cmd('checkout', base)[0] != 0:
                self.logger.error("Git checkout command failed in %s", base)
                return False

            status, base_warning_count,\
            base_error_count, base_wdata,\
            base_edata = self._compile(arch, config, cc, sparse_flags + flags + cflags, name, cfg, True)

            if status is False:
                return False

            if self.git.cmd('checkout', curr_head)[0] != 0:
                self.logger.error("Git checkout command failed in %s", curr_head)
                return False

        status, warning_count,\
        error_count, wdata, edata = self._compile(arch, config, cc, sparse_flags + flags + cflags, name, cfg, True)

        self.logger.info("Base warinings:%d Base errors:%d New warining:%d New errors:%d\n",
                         base_warning_count, base_error_count, warning_count, error_count)

        self.logger.debug(format_h1("Diff between Base/New warnings", tab=2))
        warning_count = self._diff_count(base_wdata, wdata)
        self.logger.debug(format_h1("End of new warnings, count:%d" % warning_count, tab=2))

        self.logger.debug(format_h1("Diff between Base/New errors\n", tab=2))
        error_count = self._diff_count(base_edata, edata)
        self.logger.debug(format_h1("End of new errors, count:%d" % error_count, tab=2))

        name = config if name is None or len(name) == 0 else name

        self.resobj.update_sparse_test_results(arch, name, status, warning_count, error_count)

        return status

    def smatch(self, arch='', config='', cc='', cflags=[], name='', cfg=None, smatch_flags=["C=2"],
               base=None, script_bin="smatch"):

        base_warning_count = 0
        base_error_count = 0
        base_edata = []
        base_wdata = []
        flags = []

        flags.append('CHECK="' + self._get_bin_path(script_bin) + ' -p=kernel"')

        if base is not None:
            curr_head = self.git.head_sha()

            if self.git.cmd('checkout', base)[0] != 0:
                self.logger.error("Git checkout command failed in %s", base)
                return False

            status, base_warning_count,\
            base_error_count, base_wdata,\
            base_edata = self._compile(arch, config, cc,smatch_flags + flags + cflags, name, cfg, True)

            if status is False:
                return False

            if self.git.cmd('checkout', curr_head)[0] != 0:
                self.logger.error("Git checkout command failed in %s", curr_head)
                return False

        status, warning_count,\
        error_count, wdata, edata = self._compile(arch, config, cc, smatch_flags + flags + cflags, name, cfg, True)

        self.logger.info("Base warinings:%d Base errors:%d New warining:%d New errors:%d\n",
                         base_warning_count, base_error_count, warning_count, error_count)

        self.logger.debug(format_h1("Diff between Base/New warnings", tab=2))
        warning_count = self._diff_count(base_wdata, wdata)
        self.logger.debug(format_h1("End of new warnings, count:%d" % warning_count, tab=2))

        self.logger.debug(format_h1("Diff between Base/New errors\n", tab=2))
        error_count = self._diff_count(base_edata, edata)
        self.logger.debug(format_h1("End of new errors, count:%d" % error_count, tab=2))

        name = config if name is None or len(name) == 0 else name

        self.resobj.update_smatch_test_results(arch, name, status, warning_count, error_count)

        return status

    def process_custom_test(self, name, ret):
        self.resobj.update_custom_test_results(name, ret[0] == 0)

    def custom_test(self, name, script, arg_list=[], head=None, base=None,
                    enable_head_sub=False, enable_base_sub=False, enable_src_sub=False):
        self.logger.info(format_h1("Running custom test %s" % name, tab=2))

        script = self._get_bin_path(script)

        if not os.path.exists(script):
            self.logger.error("Invalid script %s", script)
            return False

        cmd = [script]

        if len(arg_list) > 0:
            cmd = cmd + arg_list

        # If arg has sub string $HEAD and enable_head_sub argument is set true and do a string replace.
        if head is not None and enable_head_sub:
            for index, item in enumerate(cmd):
                if "$HEAD" in item:
                    cmd[index] = cmd[index].replace("$HEAD", head)

        # If arg has sub string $BASE and enable_base_sub argument is set true and do a string replace.
        if base is not None and enable_base_sub:
            for index, item in enumerate(cmd):
                if "$BASE" in item:
                    cmd[index] = cmd[index].replace("$BASE", base)

        if enable_src_sub:
            for index, item in enumerate(cmd):
                if "$SRC" in item:
                    cmd[index] = cmd[index].replace("$SRC", self.src)

        ret = self.sh.cmd("%s" % (' '.join(cmd)))

        self.process_custom_test(name, ret)

        return (ret[0] == 0)

    def compile_list(self, arch='', config_list=[], cc='', cflags=[], name='', cfg=None):
        self.logger.info(format_h1("Running compile tests", tab=2))
        result = []

        for config in config_list:
            result.append(self.compile(arch, config, cc, cflags, name, cfg))

        return result

    def sparse_list(self, arch='', config_list=[], cc='', cflags=[], name='', cfg=None, sparse_flags=["C=2"],
                    base=None, script_bin=SPARSE_BIN_PATH):
        self.logger.info(format_h1("Running sparse tests", tab=2))
        result = []

        for config in config_list:
            result.append(self.sparse(arch, config, cc, cflags, name, cfg, sparse_flags, base, script_bin))

        return result

    def smatch_list(self, arch='', config_list=[], cc='', cflags=[], name='', cfg=None, smatch_flags=["C=2"],
                    base=None, script_bin="smatch"):
        self.logger.info(format_h1("Running smatch tests", tab=2))
        result = []

        for config in config_list:
            result.append(self.smatch(arch, config, cc, cflags, name, cfg, smatch_flags, base, script_bin))

        return result

    def run_checkpatch(self, head=None, base=None):

        self.logger.info(format_h1("Runing checkpatch script", tab=2))

        self.enable_checkpatch = True
        head = self.head if head is None else head
        base = self.base if base is None else base

        gerrorcount = 0
        gwarningcount = 0

        try:
            if self.valid_git is False:
                raise Exception("Invalid git repo")

            if not os.path.exists(os.path.join(self.src, CHECK_PATCH_SCRIPT)):
                raise Exception("Invalid checkpatch script")

            ret, count, err = self.git.cmd('rev-list', '--count',  str(base) + '..'+ str(head))
            if ret != 0:
                raise Exception("git rev-list command failed")

            self.logger.info("Number of patches between %s..%s is %d", base, head, int(count))

            def parse_results(data):
                regex = r"total: ([0-9]*) errors, ([0-9]*) warnings,"
                match = re.search(regex, data)
                if match:
                    return int(match.group(1)), int(match.group(2))

                return 0, 0

            prev_index = 0

            for index in range(1, int(count) + 1):
                commit_range = str(head) + '~' + str(index) + '..' + str(head) + '~' + str(prev_index)
                ret, out, err = self.sh.cmd(os.path.join(self.src, CHECK_PATCH_SCRIPT), '-g', commit_range)
                lerrorcount, lwarningcount = parse_results(out)
                if lerrorcount != 0 or lwarningcount != 0:
                    self.logger.info(out)
                    self.logger.info(err)
                gerrorcount = gerrorcount + int(lerrorcount)
                gwarningcount = gwarningcount + int(lwarningcount)
                self.logger.debug("lerror:%d lwarning:%d gerror:%d gwarning:%d\n", lerrorcount, lwarningcount,
                                  gerrorcount, gwarningcount)
                prev_index = index
        except Exception as e:
            self.logger.error(e)
            self.resobj.update_checkpatch_results(False, gwarningcount, gerrorcount)
            return False
        else:
            self.resobj.update_checkpatch_results(True, gwarningcount, gerrorcount)
            return True

    def print_results(self, test_type='all'):
        self.resobj.print_test_results(test_type=test_type)

    def get_results(self, test_type='all'):
        return self.resobj.get_test_results(test_type=test_type)

    def dump_results(self, outfile):
        self.resobj.dump_results(outfile)
Ejemplo n.º 14
0
class KernelRelease(object):
    def __init__(self, src, cfg=None, logger=None):

        self.logger = logger or logging.getLogger(__name__)
        self.src = os.path.abspath(src)
        self.base = None
        self.head = None
        self.local_branch = None
        self.remote = None
        self.remote_branch = None
        self.git = GitShell(wd=self.src, logger=logger)
        self.sh = PyShell(wd=self.src, logger=logger)
        self.valid_git = False
        self.cfg = None
        self.cfgobj = None
        self.schema = pkg_resources.resource_filename(
            'klibs', 'schemas/release-schema.json')
        self.bundle_modes = ['branch', 'diff', 'commit_count']

        self.git.dryrun(False)
        self.sh.dryrun(False)

        if not is_valid_kernel(src, logger):
            return

        self.cfgobj = JSONParser(self.schema,
                                 cfg,
                                 extend_defaults=True,
                                 os_env=True,
                                 logger=logger)
        self.cfg = self.cfgobj.get_cfg()

        if self.git.valid():
            self.valid_git = True

    def auto_release(self):
        str_none = lambda x: None if len(x) == 0 else x.strip()
        if self.cfg is None:
            self.logger.error("Invalid config file %s", self.cfg)
            return False

        def conv_remotelist(remote_list):
            new_list = []
            for remote in remote_list:
                new_list.append((remote["name"], remote["url"],
                                 remote["branch"], remote["path"]))

            return new_list if len(new_list) > 0 else None

        def conv_taglist(tag_list):
            new_list = []
            for tag in tag_list:
                new_list.append(tag["name"], tag["msg"])

            return new_list if len(new_list) > 0 else None

        def conv_copyformat(flist):
            if "*" in flist:
                return None
            else:
                return flist

        try:
            params = self.cfg.get("bundle", None)

            if params is not None and params["enable"]:
                if not self.valid_git:
                    Exception(
                        "Kernel is not a git repo. So bundle option is not supported"
                    )

                base = params["base"]["value"]
                if params["base"]["auto"]:
                    base = self.git.cmd('describe --abbrev=0 --tags')[1]
                base = str_none(base)

                head = params["head"]["value"]
                if params["head"]["auto"]:
                    head = self.git.head_sha()
                head = str_none(head)

                bundle = self.generate_git_bundle(params["outname"],
                                                  params["mode"],
                                                  str_none(params["branch"]),
                                                  head, base,
                                                  params["commit_count"])

                uparams = params.get("upload-params", None)

                if uparams is not None and bundle is not None:
                    self.git_upload(bundle, str_none(params["upload-dir"]),
                                    uparams["new-commit"],
                                    conv_copyformat(uparams["copy-formats"]),
                                    uparams["commit-msg"],
                                    conv_remotelist(uparams["remote-list"]),
                                    uparams["use-refs"], uparams["force-push"],
                                    uparams["clean-update"],
                                    conv_taglist(uparams["tag-list"]))
                else:
                    Exception("Generate bundle failed")

        except Exception as e:
            self.logger.error(e, exc_info=True)
        else:
            if self.cfg["bundle"]["enable"]:
                self.logger.info(
                    format_h1("Successfully created git bundle", tab=2))

        try:
            params = self.cfg.get("quilt", None)

            if params is not None and params["enable"]:
                if not self.valid_git:
                    Exception(
                        "Kernel is not a git repo. So quilt option is not supported"
                    )

                base = params["base"]["value"]
                if params["base"]["auto"]:
                    base = self.git.cmd('describe --abbrev=0 --tags')[1]
                base = str_none(base)

                head = params["head"]["value"]
                if params["head"]["auto"]:
                    head = self.git.head_sha()
                head = str_none(head)

                if head is None or base is None:
                    Exception("Invalid base/head %s/%s", base, head)

                self.logger.info("head:%s base:%s", head, base)

                quilt = self.generate_quilt(str_none(params["branch"]), base,
                                            head, params['outname'],
                                            str_none(params["sed-file"]),
                                            str_none(params["audit-script"]),
                                            params['series-comment'])

                uparams = params.get("upload-params", None)

                if quilt is not None and uparams is not None:
                    ret = self.git_upload(
                        quilt, str_none(params["upload-dir"]),
                        uparams["new-commit"],
                        conv_copyformat(uparams["copy-formats"]),
                        uparams["commit-msg"],
                        conv_remotelist(uparams["remote-list"]),
                        uparams["use-refs"], uparams["force-push"],
                        uparams["clean-update"],
                        conv_taglist(uparams["tag-list"]))

                    if ret is None:
                        Exception("Quilt upload failed")
                else:
                    Exception("Generate quilt failed")

        except Exception as e:
            self.logger.error(e, exc_info=True)
        else:
            if self.cfg["quilt"]["enable"]:
                self.logger.info(
                    format_h1("Successfully created quilt series", tab=2))

        try:
            params = self.cfg.get("tar", None)
            if params is not None and params["enable"]:
                tarname = self.generate_tar_gz(params["outname"],
                                               str_none(params["branch"]),
                                               params["skip-files"])
                uparams = params.get("upload-params", None)
                if tarname is not None and uparams is not None:
                    ret = self.git_upload(
                        tarname, str_none(params["upload-dir"]),
                        uparams["new-commit"],
                        conv_copyformat(uparams["copy-formats"]),
                        uparams["commit-msg"],
                        conv_remotelist(uparams["remote-list"]),
                        uparams["use-refs"], uparams["force-push"],
                        uparams["clean-update"],
                        conv_taglist(uparams["tag-list"]))
                    if ret is None:
                        Exception("tar upload failed")
                else:
                    Exception("Create tar file failed")

        except Exception as e:
            self.logger.error(e, exc_info=True)
        else:
            if self.cfg["tar"]["enable"]:
                self.logger.info(
                    format_h1("Successfully created tar file", tab=2))

        try:
            params = self.cfg.get("upload-kernel", None)
            if params is not None and params["enable"]:
                uparams = params.get("upload-params", None)
                ret = self.git_upload(self.src, str_none(params["upload-dir"]),
                                      uparams["new-commit"],
                                      conv_copyformat(uparams["copy-formats"]),
                                      uparams["commit-msg"],
                                      conv_remotelist(uparams["remote-list"]),
                                      uparams["use-refs"],
                                      uparams["force-push"],
                                      uparams["clean-update"],
                                      conv_taglist(uparams["tag-list"]))
                if ret is None:
                    Exception("Upload kernel failed")

        except Exception as e:
            self.logger.error(e, exc_info=True)
        else:
            if self.cfg["upload-kernel"]["enable"]:
                self.logger.info(
                    format_h1("Successfully Uploaded Linux kernel", tab=2))

        return True

    def git_upload(self,
                   src,
                   uploaddir=None,
                   new_commit=False,
                   copy_formats=None,
                   commit_msg="Inital commit",
                   remote_list=None,
                   use_refs=False,
                   force_update=False,
                   clean_update=False,
                   tag_list=None):
        """
        Upload the kernel or tar file or quilt series to a given remote_list.
        :param src: Source dir. Either kernel, quilt or tar file.
        :param uploaddir: Directory used for uploading the new changes. If none, then temp_dir will be used.
        :param new_commit: Create new commit and then upload (True|False).
        :param copy_formats: List of glob format of the files to be added to the commit.
        :param commit_msg: Commit Message
        :param remote_list: [(Remote Name, Remote URL, Remote branch, Remote dest dir)]
        :param use_refs: Use refs/for when pushing (True | False).
        :param force_update: Force update when pushing (True | False).
        :param clean_update: Remove existing content before adding and pushing your change (True | False).
        :param tag_list: [("name", "msg")], None if no tagging support needed. Use None for no message.
        :return:
        """

        # Check for data validity.
        repo_dir = src
        # Check if the source directory is valid.
        src = os.path.abspath(src)
        if not os.path.exists(src):
            self.logger.error("Source %s does not exit", src)
            return None

        # Check the validity of tags
        if tag_list is not None:
            if not isinstance(tag_list, list):
                self.logger.error("Invalid tag type")
                return None
            for tag in tag_list:
                if not isinstance(tag, tuple) or len(tag) != 2:
                    self.logger.error("Invalid tag %s", tag)
                    return None

        # Check for validity of copyformats
        if copy_formats is not None:
            if not isinstance(copy_formats, list):
                self.logger.error("Invalid copy format %s", copy_formats)
                return None

        # Create a valid out directory
        temp_dir = tempfile.mkdtemp()
        if uploaddir is not None:
            uploaddir = os.path.abspath(uploaddir)
        else:
            uploaddir = temp_dir

        def copyanything(src, dst):
            self.logger.info("Copy everything from %s to %s", src, dst)
            if not os.path.isfile(src):
                sh = PyShell(wd=src, logger=self.logger)
                sh.cmd("cp -a %s/* %s/" % (src, dst), shell=True)
            else:
                sh = PyShell(wd=self.src, logger=self.logger)
                sh.cmd("cp -a %s %s/" % (src, dst), shell=True)

        def upload_tags(remote, tag_list):
            if tag_list is not None:
                for tag in tag_list:
                    # Push the tags if required
                    if tag[0] is not None:
                        if tag[1] is not None:
                            ret = git.cmd('tag', '-a', tag[0], '-m', tag[1])[0]
                        else:
                            ret = git.cmd('tag', tag[0])[0]
                        if ret != 0:
                            raise Exception("git tag %s failed" % (tag[0]))

                        if git.cmd('push', remote, tag[0])[0] != 0:
                            raise Exception("git push tag %s to %s failed" %
                                            (tag[0], remote))

        try:
            for remote in remote_list:
                repo_dir = src
                if new_commit:

                    git = GitShell(wd=uploaddir,
                                   init=True,
                                   remote_list=[(remote[0], remote[1])],
                                   fetch_all=True)

                    git.cmd('clean -xdfq')
                    git.cmd('reset --hard')

                    if git.cmd("checkout",
                               remote[0] + '/' + remote[2])[0] != 0:
                        raise Exception(
                            "Git checkout remote:%s branch:%s failed",
                            remote[1], remote[2])

                    # If clean update is given, remove the contents of current repo.
                    if clean_update and git.cmd('rm *')[0] != 0:
                        raise Exception("git rm -r *.patch failed")

                    dest_dir = os.path.join(
                        uploaddir,
                        remote[3]) if remote[3] is not None else uploaddir
                    if not os.path.exists(dest_dir):
                        os.makedirs(dest_dir)

                    if copy_formats is None:
                        copyanything(src, dest_dir)
                    elif os.path.isdir(src):
                        file_list = []
                        for format in copy_formats:
                            file_list += glob.glob(os.path.join(src, format))
                        for item in file_list:
                            shutil.copyfile(
                                item,
                                os.path.join(dest_dir, os.path.basename(item)))

                    if git.cmd('add *')[0] != 0:
                        raise Exception("git add command failed")

                    if git.cmd('commit -s -m "' + commit_msg + '"')[0]:
                        raise Exception("git commit failed")

                    repo_dir = uploaddir

                git = GitShell(wd=repo_dir,
                               init=True,
                               remote_list=[(remote[0], remote[1])],
                               fetch_all=True)

                rbranch = remote[2]

                if git.push('HEAD',
                            remote[0],
                            rbranch,
                            force=force_update,
                            use_refs=use_refs)[0] != 0:
                    raise Exception("git push to %s %s failed" %
                                    (remote[0], rbranch))

                upload_tags(remote[0], tag_list)

        except Exception as e:
            self.logger.error(e)
            shutil.rmtree(temp_dir)
            return None
        else:
            shutil.rmtree(temp_dir)
            return repo_dir

    def generate_quilt(self,
                       local_branch=None,
                       base=None,
                       head=None,
                       patch_dir='quilt',
                       sed_file=None,
                       audit_script=None,
                       series_comment=''):
        """
        Generate the quilt series for the given kernel source.
        :param local_branch: Name of the kernel branch.
        :param base: First SHA ID.
        :param head: Head SHA ID.
        :param patch_dir: Output directory for storing the quilt series. If it exists, it will be removed.
        :param sed_file: Sed format list.
        :param audit_script: Audid script. It will be called with patch_dir as input. If it return non zero value
                             then this function will exit and return None.
        :param series_comment: Comments to add on top of series file.
        :return: Return patch_dir or None
        """

        set_val = lambda x, y: y if x is None else x

        self.logger.info(format_h1("Generating quilt series", tab=2))

        if not self.valid_git:
            self.logger.error("Invalid git repo %s", self.src)
            return None

        if sed_file is not None and not os.path.exists(sed_file):
            self.logger.error("sed pattern file %s does not exist", sed_file)
            return None

        if os.path.exists(os.path.abspath(patch_dir)):
            shutil.rmtree(patch_dir, ignore_errors=True)

        os.makedirs(patch_dir)

        local_branch = set_val(local_branch, self.git.current_branch())

        if self.git.cmd('checkout', local_branch)[0] != 0:
            self.logger.error("git checkout command failed in %s", self.src)
            return None

        try:

            series_file = os.path.join(patch_dir, 'series')

            # if base SHA is not given use TAIL as base SHA
            if base is None:
                base = self.git.base_sha()
                if base is None:
                    raise Exception("git log command failed")

            # if head SHA is not given use HEAD as head SHA
            if head is None:
                head = self.git.head_sha()
                if head is None:
                    raise Exception("git fetch head SHA failed")

            # Create the list of patches 'git format-patch -C -M base..head -o patch_dir'
            ret, out, err = self.git.cmd('format-patch', '-C', '-M',
                                         base.strip() + '..' + head.strip(),
                                         '-o', patch_dir)
            if ret != 0:
                raise Exception(
                    "git format patch command failed out: %s error: %s" %
                    (out, err))

            # Format the patches using sed
            if sed_file is not None:
                ret, out, err = self.sh.cmd('sed -i -f%s %s/*.patch' %
                                            (sed_file, patch_dir),
                                            shell=True)
                if ret != 0:
                    raise Exception("sed command failed out: %s error: %s" %
                                    (out, err))

            # Make sure the patches passes audit check.
            if audit_script is not None:
                ret, out, err = self.sh.cmd(audit_script,
                                            patch_dir,
                                            shell=True)
                if ret != 0:
                    raise Exception("Audid check failed out: %s error: %s" %
                                    (out, err))

            # Write series file comments.
            with open(series_file, 'w+') as fobj:
                fobj.write(series_comment)

            # Write the list of series file.
            ret, out, err = self.sh.cmd('ls -1 *.patch >> series',
                                        wd=patch_dir,
                                        shell=True)
            if ret != 0:
                raise Exception(
                    "Writing to patch series file failed. Out:%s Error: %s" %
                    (out, err))

        except Exception as e:
            if os.path.exists(patch_dir):
                shutil.rmtree(patch_dir)
            self.logger.error(e, exc_info=True)
            return None
        else:
            return patch_dir

    def generate_git_bundle(self,
                            outfile,
                            mode='branch',
                            local_branch=None,
                            head=None,
                            base=None,
                            commit_count=0):
        """
        Create git bundle for the given kernel source. Git bundle can created in three different modes.
            1. branch - Given git branch will be bundled.
            2. commit_count - Given number of commits will be bundled.
            3. diff - Range of commits will be bundled.
        :param outfile: Name of the git bundle.
        :param mode: branch, commit_count, and diff mode.
        :param local_branch: Name of the git branch.
        :param head: Head SHA ID or Tag
        :param base: First SHA ID or Tag.
        :param commit_count: Number of commits.
        :return: Filename on success, None otherwise.
        """

        set_val = lambda x, y: y if x is None else x

        # Check the validity of bundle mode.
        if mode not in self.bundle_modes:
            self.logger.error("Invalid bundle mode %s", mode)
            return None

        # Check the validity of outfile.
        if outfile is None or outfile == "":
            self.logger.error("Invalid bundle name %s", outfile)
            return None

        # Check whether kernel source is a valid git repo.
        if not self.valid_git:
            self.logger.error("Invalid git repo %s", self.src)
            return None

        # If local branch is none, then current branch will be used.
        local_branch = set_val(local_branch, self.git.current_branch())

        # If the bundle file is already present, delete it.
        outfile = os.path.abspath(outfile)

        self.logger.info(format_h1("Generating git bundle", tab=2))

        try:
            if self.git.cmd('checkout', local_branch)[0] != 0:
                raise Exception("Git checkout command failed in %s" % self.src)

            if mode == 'branch':
                if self.git.cmd('bundle', 'create', outfile,
                                local_branch)[0] != 0:
                    raise Exception("Git bundle create command failed")

            if mode == 'commit_count':
                if self.git.cmd('bundle', 'create', outfile,
                                '-' + str(commit_count), local_branch)[0] != 0:
                    raise Exception("Git bundle create command failed")

            if mode == 'diff' and head is not None and base is not None:
                if self.git.cmd('bundle', 'create', outfile,
                                str(base) + '..' + str(head))[0] != 0:
                    raise Exception("Git bundle create command failed")
        except Exception as e:
            self.logger.error(e, exc_info=True)
            return None
        else:
            return outfile

    def generate_tar_gz(self, outfile, branch=None, skip_files=['.git']):
        """
        Create kernel tar file.
        :param outfile: Name of the tar file.
        :param branch: Git branch.
        :param skip_files: List of files needs to be skipped.
        :return: Filename on success, None otherwise.
        """
        self.logger.info(format_h1("Generating tar gz", tab=2))

        # Check if outfile is valid.
        if outfile is None or outfile == "":
            self.logger.error("Invalid output file %s name\n", outfile)
            return None

        # If branch option is used, then kernel soruce should be a valid git repo.
        if branch is not None and self.valid_git:
            if self.git.cmd('checkout', branch)[0] != 0:
                self.logger.error("Git checkout branch %s failed in %s",
                                  branch, self.src)
                return None

        tar_cmd = "tar"
        if len(skip_files) > 0:
            tar_cmd += ' --exclude={%s}' % ','.join(skip_files)
        tar_cmd += ' -Jcf'

        try:
            ret = self.sh.cmd(
                "%s %s %s" %
                (tar_cmd, os.path.abspath(outfile), os.path.abspath(self.src)),
                shell=True)
            if ret[0] != 0:
                raise Exception("Create tar command failed")
        except Exception as e:
            self.logger.error(e, exc_info=True)
            return None
        else:
            return outfile
Ejemplo n.º 15
0
    def git_upload(self,
                   src,
                   uploaddir=None,
                   new_commit=False,
                   copy_formats=None,
                   commit_msg="Inital commit",
                   remote_list=None,
                   use_refs=False,
                   force_update=False,
                   clean_update=False,
                   tag_list=None):
        """
        Upload the kernel or tar file or quilt series to a given remote_list.
        :param src: Source dir. Either kernel, quilt or tar file.
        :param uploaddir: Directory used for uploading the new changes. If none, then temp_dir will be used.
        :param new_commit: Create new commit and then upload (True|False).
        :param copy_formats: List of glob format of the files to be added to the commit.
        :param commit_msg: Commit Message
        :param remote_list: [(Remote Name, Remote URL, Remote branch, Remote dest dir)]
        :param use_refs: Use refs/for when pushing (True | False).
        :param force_update: Force update when pushing (True | False).
        :param clean_update: Remove existing content before adding and pushing your change (True | False).
        :param tag_list: [("name", "msg")], None if no tagging support needed. Use None for no message.
        :return:
        """

        # Check for data validity.
        repo_dir = src
        # Check if the source directory is valid.
        src = os.path.abspath(src)
        if not os.path.exists(src):
            self.logger.error("Source %s does not exit", src)
            return None

        # Check the validity of tags
        if tag_list is not None:
            if not isinstance(tag_list, list):
                self.logger.error("Invalid tag type")
                return None
            for tag in tag_list:
                if not isinstance(tag, tuple) or len(tag) != 2:
                    self.logger.error("Invalid tag %s", tag)
                    return None

        # Check for validity of copyformats
        if copy_formats is not None:
            if not isinstance(copy_formats, list):
                self.logger.error("Invalid copy format %s", copy_formats)
                return None

        # Create a valid out directory
        temp_dir = tempfile.mkdtemp()
        if uploaddir is not None:
            uploaddir = os.path.abspath(uploaddir)
        else:
            uploaddir = temp_dir

        def copyanything(src, dst):
            self.logger.info("Copy everything from %s to %s", src, dst)
            if not os.path.isfile(src):
                sh = PyShell(wd=src, logger=self.logger)
                sh.cmd("cp -a %s/* %s/" % (src, dst), shell=True)
            else:
                sh = PyShell(wd=self.src, logger=self.logger)
                sh.cmd("cp -a %s %s/" % (src, dst), shell=True)

        def upload_tags(remote, tag_list):
            if tag_list is not None:
                for tag in tag_list:
                    # Push the tags if required
                    if tag[0] is not None:
                        if tag[1] is not None:
                            ret = git.cmd('tag', '-a', tag[0], '-m', tag[1])[0]
                        else:
                            ret = git.cmd('tag', tag[0])[0]
                        if ret != 0:
                            raise Exception("git tag %s failed" % (tag[0]))

                        if git.cmd('push', remote, tag[0])[0] != 0:
                            raise Exception("git push tag %s to %s failed" %
                                            (tag[0], remote))

        try:
            for remote in remote_list:
                repo_dir = src
                if new_commit:

                    git = GitShell(wd=uploaddir,
                                   init=True,
                                   remote_list=[(remote[0], remote[1])],
                                   fetch_all=True)

                    git.cmd('clean -xdfq')
                    git.cmd('reset --hard')

                    if git.cmd("checkout",
                               remote[0] + '/' + remote[2])[0] != 0:
                        raise Exception(
                            "Git checkout remote:%s branch:%s failed",
                            remote[1], remote[2])

                    # If clean update is given, remove the contents of current repo.
                    if clean_update and git.cmd('rm *')[0] != 0:
                        raise Exception("git rm -r *.patch failed")

                    dest_dir = os.path.join(
                        uploaddir,
                        remote[3]) if remote[3] is not None else uploaddir
                    if not os.path.exists(dest_dir):
                        os.makedirs(dest_dir)

                    if copy_formats is None:
                        copyanything(src, dest_dir)
                    elif os.path.isdir(src):
                        file_list = []
                        for format in copy_formats:
                            file_list += glob.glob(os.path.join(src, format))
                        for item in file_list:
                            shutil.copyfile(
                                item,
                                os.path.join(dest_dir, os.path.basename(item)))

                    if git.cmd('add *')[0] != 0:
                        raise Exception("git add command failed")

                    if git.cmd('commit -s -m "' + commit_msg + '"')[0]:
                        raise Exception("git commit failed")

                    repo_dir = uploaddir

                git = GitShell(wd=repo_dir,
                               init=True,
                               remote_list=[(remote[0], remote[1])],
                               fetch_all=True)

                rbranch = remote[2]

                if git.push('HEAD',
                            remote[0],
                            rbranch,
                            force=force_update,
                            use_refs=use_refs)[0] != 0:
                    raise Exception("git push to %s %s failed" %
                                    (remote[0], rbranch))

                upload_tags(remote[0], tag_list)

        except Exception as e:
            self.logger.error(e)
            shutil.rmtree(temp_dir)
            return None
        else:
            shutil.rmtree(temp_dir)
            return repo_dir