Ejemplo n.º 1
0
    def __init__(self, src_dir=None, out_dir=None, repo_url=None, cfg = None, logger=None):
        self.logger = logger or logging.getLogger(__name__)

        self.src = os.path.abspath(set_val(src_dir, os.getcwd()))
        self.out = os.path.abspath(set_val(out_dir, os.path.join(self.src, 'out')))
        self.repo = '~/bin/repo'
        self.target = None
        self.lunch_product = None
        self.out_product = None
        self.product_list = None
        self.valid = False
        self.schema = pkg_resources.resource_filename('android_build', 'schemas/android-schema.json')
        self.cfg = None
        self.obj = None

        self.sh =  pyshell.PyShell(wd=self.src, stream_stdout=True)
        self.sh.update_shell()

        if cfg is not None:
            self.obj = JSONParser(self.schema, cfg, extend_defaults=True, os_env=True, logger=logger)
            self.cfg = self.obj.get_cfg()
            repo_url = self.cfg["repo-script"] if valid_str(self.cfg["repo-script"]) else repo_url
            repo_dir = self.cfg.get("repo-dir", None)
            if valid_str(repo_dir):
                self.src = os.path.path(self.src, repo_dir)

        if not self._get_repo_script(repo_url):
            self.logger.error("repo setup failed")
            return None

        self.valid = True
Ejemplo n.º 2
0
    def __init__(self, src, cfg=None, logger=None):

        self.logger = logger or logging.getLogger(__name__)
        self.src = os.path.abspath(src)
        self.base = None
        self.head = None
        self.local_branch = None
        self.remote = None
        self.remote_branch = None
        self.git = GitShell(wd=self.src, logger=logger)
        self.sh = PyShell(wd=self.src, logger=logger)
        self.valid_git = False
        self.cfg = None
        self.cfgobj = None
        self.schema = pkg_resources.resource_filename(
            'klibs', 'schemas/release-schema.json')
        self.bundle_modes = ['branch', 'diff', 'commit_count']

        self.git.dryrun(False)
        self.sh.dryrun(False)

        if not is_valid_kernel(src, logger):
            return

        self.cfgobj = JSONParser(self.schema,
                                 cfg,
                                 extend_defaults=True,
                                 os_env=True,
                                 logger=logger)
        self.cfg = self.cfgobj.get_cfg()

        if self.git.valid():
            self.valid_git = True
Ejemplo n.º 3
0
    def __init__(self, src, cfg=None, out=None, rname=None, rurl=None, branch=None, head=None, base=None,
                 res_cfg=None, logger=None):
        self.logger = logger or logging.getLogger(__name__)
        self.src = src
        self.out = os.path.join(self.src, 'out') if out is None else os.path.absapth(out)
        self.branch = branch
        self.rname = rname
        self.rurl = rurl
        self.head = head
        self.base = base
        self.valid_git = False
        self.schema = pkg_resources.resource_filename('klibs', 'schemas/test-schema.json')
        self.cfg = None
        self.cfgobj = None
        self.resobj = KernelResults(self.src, old_cfg=res_cfg, logger=self.logger)
        self.git = GitShell(wd=self.src, logger=logger)
        self.sh = PyShell(wd=self.src, logger=logger)
        self.checkpatch_source = CHECK_PATCH_SCRIPT
        self.custom_configs = []

        if self.rname is not None and len(self.rname) > 0:
            if not os.path.exists(self.src):
                os.makedirs(self.src)
            if not self.git.valid():
                self.git.init()
            self.git.add_remote(self.rname, rurl)
            self.git.cmd('fetch %s' % self.rname)
            self.branch = self.rname + '/' + self.branch

        self.valid_git = True if self.git.valid() else False

        if self.valid_git:
            if self.branch is not None and len(self.branch) > 0:
                if self.git.cmd('checkout', self.branch)[0] != 0:
                    self.logger.error("Git checkout command failed in %s", self.src)
                    return
            else:
                self.branch = self.git.current_branch()

            #update base & head if its not given
            if self.head is None:
                self.head = self.git.head_sha()
            if self.base is None:
                self.base = self.git.base_sha()

            self.resobj.update_kernel_params(base=self.base, head=self.head, branch=self.branch)

        if not is_valid_kernel(src, logger):
            return

        self.version = BuildKernel(self.src).uname

        if len(self.version) > 0:
            self.resobj.update_kernel_params(version=self.version)

        if cfg is not None:
            self.cfgobj = JSONParser(self.schema, cfg, extend_defaults=True, os_env=True, logger=logger)
            self.cfg = self.cfgobj.get_cfg()
Ejemplo n.º 4
0
 def createlists1(self):
     data = urllib.urlopen("http://api.ihackernews.com/page")
     parser = JSONParser(data)
     d = parser.convert_to_python()
     length = len(d["items"])
     for i in range(length):
         self.llist.append(d["items"][i]["title"].encode(
             "ascii", "xmlcharrefreplace"))
     data.close()
Ejemplo n.º 5
0
    def __init__(self, cfg=None, logger=None):
        """
        Email init()
        :param cfg: Json config file, if given smtp/form/to paramaters will be parsed from Json.
        :param logger: Logger object.
        """
        self.logger = logger or logging.getLogger(__name__)

        # SMTP server related param defaults.
        self.smtp_server = None
        self.smtp_thread = None
        self.server_obj = None
        self.client_obj = None
        self.smtp_port = 0
        self.supported_auths = ['TLS']
        self.auth = None
        self.username = None
        self.password = None

        # Config file related defaults.
        self.cfg_src = cfg
        self.cfg = None
        self.cfgobj = None
        self.schema = pkg_resources.resource_filename(
            'klibs', 'schemas/email-schema.json')

        # Set from/to/cc/bcc defaults
        self._from = None
        self._to = None
        self._cc = None
        self._bcc = None

        # Update params if cfg file is given.
        if cfg is not None:
            set_def = lambda x, y: self.cfg[x] if self.cfg[x] != "" else y
            self.cfgobj = JSONParser(self.schema,
                                     cfg,
                                     extend_defaults=True,
                                     os_env=True,
                                     logger=logger)
            self.cfg = self.cfgobj.get_cfg()

            self.set_header(self.cfg["from"], self.cfg["to"], self.cfg["cc"],
                            self.cfg["bcc"])
            self.set_smtp(self.cfg["smtp-server"], self.cfg["smtp-port"],
                          self.cfg["smtp-authentication"],
                          self.cfg["smtp-username"], self.cfg["smtp-password"])
Ejemplo n.º 6
0
    def simulate(self, networkjson, tcp):
        element_map = JSONParser(networkjson).parser(tcp)
        sim = Event_Simulator(element_map)

        while (not sim.are_flows_done()):
            sim.run_next_event()

        return element_map
Ejemplo n.º 7
0
    def __init__(self, src=None, old_cfg=None, logger=None):
        self.logger = logger or logging.getLogger(__name__)
        self.schema = pkg_resources.resource_filename('klibs', 'schemas/results-schema.json')
        self.src = src
        self.results = {}
        self.kernel_params = {}
        self.static_results = []
        self.checkpatch_results = {}
        self.custom_results = []
        self.bisect_results = {}
        self.custom_configs = []

        res_obj = {}

        self.kernel_params["head"] = ""
        self.kernel_params["base"] = ""
        self.kernel_params["branch"] = ""
        self.kernel_params["version"] = "Linux"

        for arch in supported_archs:
            self.add_arch(arch)

        self.checkpatch_results["status"] = "N/A"
        self.checkpatch_results["warning_count"] = 0
        self.checkpatch_results["error_count"] = 0

        self.bisect_results["status"] = "N/A"
        self.bisect_results["patch-list"] = []

        res_obj["kernel-params"] = self.kernel_params
        res_obj["static-test"] = self.static_results
        res_obj["checkpatch"] = self.checkpatch_results
        res_obj["custom-test"] = self.custom_results
        res_obj["bisect"] = self.bisect_results

        self.cfgobj = JSONParser(self.schema, res_obj, extend_defaults=True)
        self.results = self.cfgobj.get_cfg()

        if old_cfg is not None:
            if not self.update_results(old_cfg):
                return None
Ejemplo n.º 8
0
 def update_results(self, new_cfg):
     try:
         new_results = JSONParser(self.schema, new_cfg, extend_defaults=True).get_cfg()
         param1 = self.results["kernel-params"]
         param2 = new_results["kernel-params"]
         for field in ["head", "base", "branch", "version"]:
             if len(param1[field]) > 0 and len(param2[field]) > 0:
                 if (param1[field] != param2[field]):
                     if field == "version" and (param1[field] == "Linux" or param2[field] == "Linux"):
                         continue
                     else:
                         raise Exception("%s field values does not match %s==%s" % (field, param1[field], param2[field]))
     except Exception as e:
         self.logger.warning("Invalid results config file\n")
         self.logger.warning(e)
         return False
     else:
         self.results = self.merge_results(self.results, new_results)
         return True
Ejemplo n.º 9
0
from JSONDecoder import JSONDecoder
from JSONEncoder import JSONEncoder
from jsonparser import JSONParser

__all__ = [
    'JSONParser', 'loads', 'load_dict', 'load_file', 'dump_file', 'dump_dict',
    'dumps', 'testjson'
]

run = JSONParser()


def loads(s):
    return run.loads()


def load_file(path):
    return run.load_file(path)


def load_dict(dic):
    return run.load_dict(dic)


def dumps():
    return run.dumps()


def dump_dict():
    return run.dump_dict()
Ejemplo n.º 10
0
    def __init__(self,
                 repo_dir,
                 cfg,
                 repo_head=None,
                 emailcfg=None,
                 logger=None):
        """
        Constructor of KernelInteg class.
        :rtype: object
        :param cfg: Kernel Integration Json config file.
        :param schema: Kernel Integration Json schema file.
        :param head: SHA-ID or Tag of given branch.
        :param repo_dir: Repo directory.
        :param subject_prefix: Prefix for email subject.
        :param skip_rr_cache: Skip rr cache if set True.
        :param logger: Logger object
        """
        self.logger = logger or logging.getLogger(__name__)
        self.schema = pkg_resources.resource_filename(
            'klibs', 'schemas/integ-schema.json')
        self.emailschema = pkg_resources.resource_filename(
            'klibs', 'schemas/email-schema.json')

        self.cfgobj = JSONParser(self.schema,
                                 cfg,
                                 extend_defaults=True,
                                 os_env=True,
                                 logger=self.logger)
        self.cfg = self.cfgobj.get_cfg()

        # Update email configs.
        if emailcfg is not None:
            self.emailobj = Email(emailcfg, self.logger)
        else:
            self.emailobj = None

        self.remote_list = self.cfg['remote-list']
        self.repos = self.cfg['repo-list']
        self.int_list = self.cfg['int-list']

        self.repo_dir = repo_dir
        self.sh = PyShell(wd=self.repo_dir, logger=self.logger)

        # All git commands will be executed in repo directory.
        self.logger.info(format_h1("Initalizing repo", tab=2))
        self.git = GitShell(wd=self.repo_dir, init=True, logger=self.logger)

        # Add git remote and fetch the tags.
        self.logger.info(format_h1("Add remote", tab=2))
        for remote in self.remote_list:
            self.git.add_remote(remote['name'], remote['url'])
            self.git.cmd("fetch", remote['name'])

        valid_repo_head = False

        def is_valid_head(head):

            if len(head) == 0:
                return False

            ret, out, err = self.git.cmd('show', head)
            if ret == 0:
                return True

            return False

        # Check if the repo head is valid.
        if valid_str(repo_head):
            if is_valid_head(repo_head) is False:
                raise Exception("Invalid repo head %s" % repo_head)
            else:
                valid_repo_head = True

        #if repo head is given in config file, check whether its valid, make exception if not valid.
        for repo in self.repos:
            if valid_repo_head is True:
                repo['repo-head'] = repo_head
            else:
                if is_valid_head(repo['repo-head']) is False:
                    raise Exception("Invalid repo head %s" % repo['repo-head'])
Ejemplo n.º 11
0
class KernelInteg(object):
    def __init__(self,
                 repo_dir,
                 cfg,
                 repo_head=None,
                 emailcfg=None,
                 logger=None):
        """
        Constructor of KernelInteg class.
        :rtype: object
        :param cfg: Kernel Integration Json config file.
        :param schema: Kernel Integration Json schema file.
        :param head: SHA-ID or Tag of given branch.
        :param repo_dir: Repo directory.
        :param subject_prefix: Prefix for email subject.
        :param skip_rr_cache: Skip rr cache if set True.
        :param logger: Logger object
        """
        self.logger = logger or logging.getLogger(__name__)
        self.schema = pkg_resources.resource_filename(
            'klibs', 'schemas/integ-schema.json')
        self.emailschema = pkg_resources.resource_filename(
            'klibs', 'schemas/email-schema.json')

        self.cfgobj = JSONParser(self.schema,
                                 cfg,
                                 extend_defaults=True,
                                 os_env=True,
                                 logger=self.logger)
        self.cfg = self.cfgobj.get_cfg()

        # Update email configs.
        if emailcfg is not None:
            self.emailobj = Email(emailcfg, self.logger)
        else:
            self.emailobj = None

        self.remote_list = self.cfg['remote-list']
        self.repos = self.cfg['repo-list']
        self.int_list = self.cfg['int-list']

        self.repo_dir = repo_dir
        self.sh = PyShell(wd=self.repo_dir, logger=self.logger)

        # All git commands will be executed in repo directory.
        self.logger.info(format_h1("Initalizing repo", tab=2))
        self.git = GitShell(wd=self.repo_dir, init=True, logger=self.logger)

        # Add git remote and fetch the tags.
        self.logger.info(format_h1("Add remote", tab=2))
        for remote in self.remote_list:
            self.git.add_remote(remote['name'], remote['url'])
            self.git.cmd("fetch", remote['name'])

        valid_repo_head = False

        def is_valid_head(head):

            if len(head) == 0:
                return False

            ret, out, err = self.git.cmd('show', head)
            if ret == 0:
                return True

            return False

        # Check if the repo head is valid.
        if valid_str(repo_head):
            if is_valid_head(repo_head) is False:
                raise Exception("Invalid repo head %s" % repo_head)
            else:
                valid_repo_head = True

        #if repo head is given in config file, check whether its valid, make exception if not valid.
        for repo in self.repos:
            if valid_repo_head is True:
                repo['repo-head'] = repo_head
            else:
                if is_valid_head(repo['repo-head']) is False:
                    raise Exception("Invalid repo head %s" % repo['repo-head'])

    def clean_repo(self):
        """
        Clean the git repo and delete all local branches.
        :return: None
        """
        self.logger.info(format_h1("Cleaning repo", tab=2))

        self.git.cmd("reset", "--hard")
        self.git.cmd("clean", "-fdx")

        local_branches = [
            x.strip() for x in self.git.cmd('branch')[1].splitlines()
        ]
        for branch in local_branches:
            if branch.startswith('* '):
                continue
                self.git.cmd("branch", "-D", branch)

    def _smb_sync(self,
                  dest,
                  remote,
                  rdir,
                  username='',
                  password='',
                  options=[]):

        cmd = ["//" + remote + '/' + rdir]

        if len(password) > 0:
            cmd.append(password)
        else:
            cmd.append("-N")

        if len(username) > 0:
            cmd.append("-U")
            cmd.append(username)

        cmd = ['smbclient'] + cmd + options

        ret, out, err = self.sh.cmd(' '.join(cmd), shell=True, wd=dest)
        if ret != 0:
            self.logger.error(err)
            self.logger.error(out)

    def _git_sync(self,
                  dest,
                  remote,
                  rbranch,
                  mode='push',
                  msg='Upload updated cache',
                  op='download'):

        uret = self.git.cmd('remote get-url %s' % remote)
        if uret[0] != 0:
            self.logger.error("Invalid sync remote %s branch %s" %
                              (remote, rbranch))
            return False

        git = GitShell(wd=dest,
                       init=True,
                       remote_list=[(remote, uret[1])],
                       logger=self.logger)

        if op == "download":
            git.cmd('clean -xdf')
            git.cmd('fetch', remote)
            git.cmd('checkout', remote + '/' + rbranch)
        elif op == "upload":
            git.cmd('add', '.')
            with tempfile.NamedTemporaryFile() as msg_file:
                msg_file.write(msg)
                msg_file.seek(0)
                git.cmd('commit -s -F %s' % msg_file.name)
            if mode == 'refs-for':
                rbranch = 'refs/for/%s' % rbranch
            if not git.valid_branch(remote, rbranch) or mode == 'force-push':
                git.cmd('push', '-f', remote, 'HEAD:%s' % rbranch)
            else:
                git.cmd('push', remote, 'HEAD:%s' % rbranch)

    def _config_rr_cache(self, options):
        """
        Config git re re re cache.
        :options: Dict with rr-cache options.
            use-auto-merge - Enable rerere.autoupdate if set True, otherwise do nothing.
            use-remote-cache - Get remote cache params if set True, otherwise no remote rerere cache is available.
            remote-cache-params - Parms for remote cache.
                                - sync-protocol - Remote sync protocol (SMB, Rsync)
                                - server-name - Name of the remote server.
                                - Share-point - Name of the share folder.

        :return:
        """
        if options is None:
            return

        cache_dir = os.path.join(self.repo_dir, '.git', 'rr-cache')
        old_dir = os.path.join(self.repo_dir, '.git', 'rr-cache.old')

        self.git.cmd("config", "rerere.enabled", "true")

        def init_state(roptions):
            if os.path.exists(cache_dir):
                if roptions['sync-protocol'] != 'git':
                    if os.path.exists(old_dir):
                        self.sh.cmd('rm -fr %s' % old_dir, shell=True)
                    self.sh.cmd('mv', cache_dir, old_dir, shell=True)
                    self.sh.cmd('mkdir -p %s' % cache_dir, shell=True)
            else:
                self.sh.cmd('mkdir -p %s' % cache_dir, shell=True)

        # Check and enable auto merge
        if options['use-auto-merge']:
            self.git.cmd("config", "rerere.autoupdate", "true")

        # Check and add remote cache
        if options['use-remote-cache']:
            roptions = options['remote-cache-params']
            init_state(roptions)
            if roptions['sync-protocol'] == 'smb':
                self._smb_sync(cache_dir, roptions['url'],
                               roptions['remote-dir'], roptions['username'],
                               roptions['password'], roptions['sync-options'])
            elif roptions['sync-protocol'] == 'git':
                self._git_sync(cache_dir, roptions['url'], roptions['branch'],
                               roptions['mode'])

    def _reset_rr_cache(self, options):
        """
        Reset git rerere cache
        :param params: Dict with remote cache related params.
        :return:
        """
        if options is None:
            return

        cache_dir = os.path.join(self.repo_dir, '.git', 'rr-cache')
        old_dir = os.path.join(self.repo_dir, '.git', 'rr-cache.old')

        self.git.cmd("config", "rerere.enabled", "false")

        def reset_state(roptions):
            if options['use-remote-cache']:
                if roptions['sync-protocol'] != 'git':
                    if os.path.exists(old_dir):
                        if os.path.exists(cache_dir):
                            self.sh.cmd('rm -fr %s' % cache_dir, shell=True)
                        self.sh.cmd('mv', old_dir, cache_dir, shell=True)

        if options['upload-remote-cache'] and os.path.exists(cache_dir):
            if options['use-remote-cache']:
                roptions = options['remote-cache-params']
                if roptions['sync-protocol'] == 'smb':
                    self._smb_sync(cache_dir, roptions['url'],
                                   roptions['remote-dir'],
                                   roptions['username'], roptions['password'],
                                   roptions['upload-options'])
                elif roptions['sync-protocol'] == 'git':
                    self._git_sync(cache_dir,
                                   roptions['url'],
                                   roptions['branch'],
                                   roptions['mode'],
                                   '\n'.join(roptions['upload-msg']),
                                   op='upload')

        reset_state(options['remote-cache-params'])

    def _merge_branches(self,
                        mode,
                        merge_list,
                        dest,
                        options,
                        sendemail=False,
                        sub_prefix='',
                        rrupload_header=''):
        """
        Merge the branches given in merge_list and create a output branch.
        Basic logic is,
        if mode is rebase, then git rebase all branches in merge_list onto to dest branch.
        if mode is merge, then git merge/pull all branches on top of dest branch.
        if mode is replace, then simple checkout will be done.
        :param mode:  Rebase, merge, pull
        :param merge_list: List of (remote, branch) tupule.
        :param dest: Dest branch name.
        :param params: Dict with merge params.
        use-rr-cache -  Use git rerere cache.
        no-ff - Set True if you want to disable fast forward in merge.
        add-log - Set True if you want to add merge log.

        :return: True
        """
        def merge_cmd(remote=None,
                      rbranch=None,
                      no_ff=False,
                      add_log=False,
                      abort=False):
            options = []

            if no_ff:
                options.append('--no-ff')
            if add_log:
                options.append('--log')

            if abort is True:
                return ' '.join(['merge', '--abort'])

            if valid_str(remote):
                return ' '.join(['pull', ' '.join(options), remote, rbranch])
            else:
                return ' '.join(['merge', ' '.join(options), rbranch])

        def send_email(remote, branch, status, out, err):

            if not sendemail or self.emailobj is None:
                return

            subject = [] if len(sub_prefix) == 0 else [sub_prefix]
            content = []

            if mode == 'merge':
                subject.append('Merge')
            elif mode == 'rebase':
                subject.append('Rebase')
            elif mode == 'replace':
                subject.append('Replace')
            elif mode == 'cherry':
                subject.append('Cherry')
            elif mode == 'cherrypick':
                subject.append('Cherrypick')

            if valid_str(remote):
                branch = remote + '/' + branch

            subject.append(branch)

            if status:
                subject.append('passed')
            else:
                subject.append('failed')

            uret = self.git.cmd('remote get-url %s' % remote)
            url = remote
            if uret[0] == 0:
                url = uret[1].strip()

            content.append('')
            content.append('Head: %s' % self.git.head_sha())
            content.append('Base: %s' % self.git.base_sha())
            content.append('Dest Branch: %s' % dest)
            content.append('Remote: %s' % url)
            content.append('Remote Branch: %s' % branch)
            content.append('Status: %s' % "Passed" if status else "Failed")
            content.append('\n')
            content.append(format_h1("Output log"))
            content.append('')
            content.append(out)
            content.append('\n')
            content.append(format_h1("Error log"))
            content.append('')
            content.append(err)

            self.emailobj.send_email(' '.join(subject), '\n'.join(content))

        def add_rrcache_msg(remote, branch):
            if options["use-rr-cache"]:
                if options['rr-cache']['upload-remote-cache']:
                    rcoptions = options['rr-cache']['remote-cache-params']
                    if len(rcoptions['upload-msg']) == 0:
                        msg = rrupload_header if len(
                            rrupload_header) > 0 else dest
                        rcoptions['upload-msg'].append(
                            'rrcache: upload %s results of %s' % (mode, msg))
                    if len(rcoptions['upload-msg']) == 1:
                        rcoptions['upload-msg'].append('\n')
                    uret = self.git.cmd('remote get-url %s' % remote)
                    url = uret[1].strip() if uret[0] == 0 and len(
                        uret[1]) > 0 else branch
                    rcoptions['upload-msg'].append(
                        "Includes %s resolution of %s:%s" %
                        (mode, url, branch))

        def get_cherry_list(upstream, branch):
            ret = self.git.cmd("cherry", upstream, branch)
            if ret[0] != 0:
                return ''
            else:
                commit_list = map(lambda x: x.strip(), ret[1].split('\n'))
                commit_list = filter(lambda x: x.startswith('+'), commit_list)
                commit_list = map(lambda x: x.strip('+ '), commit_list)
                return ' '.join(commit_list)

        # Check whether git diff is empty
        def null_diff():
            dret = self.git.cmd('diff')
            if dret[0] == 0 and len(dret[1]) == 0:
                return True

            return False

        # Check for rerere diff
        def null_rdiff():
            dret = self.git.cmd('rerere diff')
            if dret[0] == 0 and len(dret[1]) < 2:
                return True

            return False

        def auto_resolve():
            if options["rr-cache"]["use-auto-merge"]:
                if null_rdiff() or null_diff():
                    if mode == "merge":
                        self.git.cmd('commit', '-as', '--no-edit')
                        return True
                    elif mode == "rebase":
                        dret = self.git.cmd('diff', '--cached')
                        if dret[0] == 0 and len(dret[1]) == 0:
                            self.git.cmd('rebase', '--skip')
                        else:
                            self.git.cmd('rebase', '--continue')
                        return True
                    elif mode == "cherry" or mode == "cherrypick":
                        dret = self.git.cmd('diff', '--cached')
                        if dret[0] == 0 and len(dret[1]) == 0:
                            self.git.cmd('reset')
                        self.git.cmd('cherry-pick', '--continue')
                        return True
            return False

        if options["use-rr-cache"]:
            self._config_rr_cache(options["rr-cache"])

        for remote, branch, upstream, shalist in merge_list:
            ret = 0, '', ''
            if mode == "merge":
                self.git.cmd("checkout", dest)
                ret = self.git.cmd(
                    merge_cmd(remote, branch, options['no-ff'],
                              options['add-log']))
            elif mode == "rebase":
                self.git.cmd("checkout",
                             remote + '/' + branch if remote != '' else branch)
                ret = self.git.cmd("rebase", dest)
            elif mode == "replace":
                ret = self.git.cmd(
                    "checkout",
                    remote + '/' + branch if remote != '' else branch)
            elif mode == "cherry":
                commit_list = get_cherry_list(
                    upstream,
                    remote + '/' + branch if remote != '' else branch)
                if valid_str(commit_list):
                    ret = self.git.cmd("cherry-pick", commit_list)
                else:
                    continue
            elif mode == "cherrypick":
                if valid_str(shalist):
                    ret = self.git.cmd("cherry-pick", shalist)
                else:
                    continue

            if self.git.inprogress() or ret[0] != 0:
                # First try to auto resolve it.
                auto_resolve()
                # If problem still persist, ask for help.
                if self.git.inprogress():
                    send_email(remote, branch, False, ret[1], ret[2])
                    while True:
                        if not null_rdiff() or not null_diff():
                            raw_input(
                                'Please resolve the issue and then press any key continue'
                            )
                        if self.git.inprogress():
                            if auto_resolve() and not self.git.inprogress():
                                break
                        else:
                            break

            add_rrcache_msg(remote, branch)

            if mode == "rebase" and not self.git.inprogress():
                self.git.cmd("branch", '-D', dest)
                self.git.cmd("checkout", '-b', dest)

        if options['use-rr-cache']:
            self._reset_rr_cache(options["rr-cache"])

        return True

    def _upload_repo(self, branch_name, upload_options):
        """
        Upload the given branch to a remote patch.
        supported upload modes are force-push, push and refs-for (for Gerrit).
        :param branch_name: Name of the local branch.
        :param upload_options: Dict with upload related params.
        url - Name of the git remote.
        branch - Remote branch of git repo.
        :return: Nothing.
        """
        self.logger.info(format_h1("Uploading %s", tab=2) % branch_name)

        _force = True if upload_options['mode'] == 'force-push' else False
        _use_refs = True if upload_options['mode'] == 'refs-for' else False

        self.git.push(branch_name,
                      upload_options['url'],
                      upload_options['branch'],
                      force=_force,
                      use_refs=_use_refs)

    def _create_repo(self, repo):
        """
        Merge the branches given in source-list and create list of output branches as specificed by dest-list option.
        :param repo: Dict with kernel repo options. Check "repo-params" section in kernel integration schema file for
        more details.
        :return: Nothing
        """
        self.logger.info(
            format_h1("Create %s repo", tab=2) % repo['repo-name'])

        merge_list = []
        status = True

        # Clean existing git operations
        try:
            self.git.cmd('merge --abort')
            self.git.cmd('rebase --abort')
            self.git.cmd('cherry-pick --abort')
            self.git.cmd('revert --abort')
        except:
            pass

        # Get source branches
        for srepo in repo['source-list']:
            if srepo['skip'] is True:
                continue
            if self.git.valid_branch(srepo['url'], srepo['branch']) is False:
                raise Exception("Dependent repo %s/%s does not exits" %
                                (srepo['url'], srepo['branch']))
            else:
                merge_list.append((srepo['url'], srepo['branch'],
                                   srepo["upstream"], srepo["sha-list"]))

        dest_repolist = []

        for drepo in repo['dest-list']:
            if drepo['skip']:
                continue
            dest_repolist.append(drepo)

        # Create destination branches
        dest_branches = []
        try:
            for dest_repo in dest_repolist:

                if self.git.valid_branch('', dest_repo['local-branch']):
                    ret = self.git.delete(dest_repo['local-branch'],
                                          force=True)[0]
                    if ret != 0:
                        raise Exception("Deleting branch %s failed" %
                                        dest_repo['local-branch'])

                self.git.cmd("checkout", repo['repo-head'], "-b",
                             dest_repo['local-branch'])

                if len(merge_list) > 0:
                    self._merge_branches(dest_repo['merge-mode'], merge_list,
                                         dest_repo['local-branch'],
                                         dest_repo['merge-options'],
                                         repo['send-email'],
                                         repo['email-prefix'],
                                         repo['repo-name'])
                dest_branches.append(dest_repo['local-branch'])
        except Exception as e:
            self.logger.error(e, exc_info=True)
            for branch in dest_branches:
                self.git.delete(branch, force=True)
        else:
            self.logger.info("Repo %s creation successfull" %
                             repo['repo-name'])

        # Compare destination branches
        if status is True and repo['compare-dest']:
            if len(dest_repolist) > 1:
                base_repo = dest_repolist[0]
                for dest_repo in dest_repolist:
                    ret, out, err = self.git.cmd('diff',
                                                 base_repo['local-branch'],
                                                 dest_repo['local-branch'])
                    if ret != 0:
                        if repo['compare-resmode'] == "fail":
                            status = False
                        break
                    else:
                        if len(out) > 0:
                            if repo['compare-resmode'] == "fail":
                                status = False
                            self.logger.error(
                                "Destination branches %s!=%s, resolution:%s" %
                                (base_repo['local-branch'],
                                 dest_repo['local-branch'],
                                 repo['compare-resmode']))
                            break
                        else:
                            self.logger.info("Destination branches %s==%s" %
                                             (base_repo['local-branch'],
                                              dest_repo['local-branch']))
        else:
            self.logger.warn("Skipping destination branch comparison")

        # Upload the destination branches
        if status is True:
            for dest_repo in dest_repolist:
                if dest_repo['upload-copy'] is True:
                    upload_options = dest_repo['upload-options']
                    self._upload_repo(dest_repo['local-branch'],
                                      upload_options)
        else:
            self.logger.warn("Skipping destination branch upload")

        if repo['send-email']:
            subject = [] if len(
                repo['email-prefix']) == 0 else [repo['email-prefix']]
            content = []

            subject.append("integration")

            if status:
                subject.append('passed')
            else:
                subject.append('failed')

            content.append(format_h1("This repo integration includes:"))
            content.append(format_h1("Following source branches:"))
            content.append('')
            for rname, rbranch, upstream, shalist in merge_list:
                url = rname
                if len(rname) == 0:
                    rname = 'local-branch'
                else:
                    uret = self.git.cmd('remote get-url %s' % rname)
                    if uret[0] == 0:
                        rname = uret[1].strip()
                content.append('Remote: %s' % rname)
                content.append('Branch: %s' % rbranch)
                content.append('')

            content.append(format_h1("Following destination branches:"))
            content.append('')

            for dest_repo in dest_repolist:
                content.append('Branch: %s' % dest_repo['local-branch'])
                content.append('Merge Mode: %s' % dest_repo['merge-mode'])
                if dest_repo['upload-copy'] is True:
                    content.append('')
                    content.append('Uploaded branch to,')
                    upload_options = dest_repo['upload-options']
                    rurl = upload_options['url']
                    for ritem in self.remote_list:
                        if ritem['name'] == rurl:
                            rurl = ritem['url']
                            break
                    content.append('Upload Remote: %s' % rurl)
                    content.append('Upload Branch: %s' %
                                   upload_options['branch'])
                    content.append('')

            if self.emailobj is not None:
                self.emailobj.send_email(' '.join(subject), '\n'.join(content))

        return status

    def _get_repo_by_name(self, name):
        """
        Get repo Dict from "repos" list in given Json config file.
        :param name: Name of the repo
        :return: repo Dict reference or None if not valid repo found.
        """
        for repo in self.repos:
            if repo['repo-name'] == name:
                return repo

        return None

    def get_default_reponame(self):
        default = self.cfg.get('default-repo', "")

        if self._get_repo_by_name(default) is not None:
            return default

        if self.repos is not None and len(self.repos) > 0:
            return self.repos[0]['repo-name']

        return None

    def start(self, name, skip_dep=False):
        """
        Generate kernel and its depndent branches.
        :param name: Name of the kernel repo.
        :param skip_dep: Skip creating dependent repos.
        :return: None
        """
        dep_list = []
        # First get the dep list
        for item in self.int_list:
            if item["repo"] == name:
                dep_list = item["dep-repos"]

        int_list = dep_list if not skip_dep else []
        int_list += [name]

        for name in int_list:
            repo = self._get_repo_by_name(name)
            if repo is not None:
                self._create_repo(repo)
            else:
                self.logger.error("Repo %s does not exist\n" % name)
                return False

        return True
Ejemplo n.º 12
0
class KernelResults(object):
    def __init__(self, src=None, old_cfg=None, logger=None):
        self.logger = logger or logging.getLogger(__name__)
        self.schema = pkg_resources.resource_filename('klibs', 'schemas/results-schema.json')
        self.src = src
        self.results = {}
        self.kernel_params = {}
        self.static_results = []
        self.checkpatch_results = {}
        self.custom_results = []
        self.bisect_results = {}
        self.custom_configs = []

        res_obj = {}

        self.kernel_params["head"] = ""
        self.kernel_params["base"] = ""
        self.kernel_params["branch"] = ""
        self.kernel_params["version"] = "Linux"

        for arch in supported_archs:
            self.add_arch(arch)

        self.checkpatch_results["status"] = "N/A"
        self.checkpatch_results["warning_count"] = 0
        self.checkpatch_results["error_count"] = 0

        self.bisect_results["status"] = "N/A"
        self.bisect_results["patch-list"] = []

        res_obj["kernel-params"] = self.kernel_params
        res_obj["static-test"] = self.static_results
        res_obj["checkpatch"] = self.checkpatch_results
        res_obj["custom-test"] = self.custom_results
        res_obj["bisect"] = self.bisect_results

        self.cfgobj = JSONParser(self.schema, res_obj, extend_defaults=True)
        self.results = self.cfgobj.get_cfg()

        if old_cfg is not None:
            if not self.update_results(old_cfg):
                return None

    def get_static_obj(self, arch):
        for index, obj in enumerate(self.static_results):
            if isinstance(obj, dict) and obj.has_key("arch_name") and obj["arch_name"] == arch:
                return index, obj

        return -1, None

    def add_arch(self, arch):
        if arch is None or len(arch) == 0:
            return False

        if self.get_static_obj(arch)[1] is None:
            obj = {}
            obj["arch_name"] = arch
            self.static_results.append(obj)

            for config in supported_configs + self.custom_configs:
                self.add_config(config)

        return True

    def add_config(self, name):
        if name is None or len(name) == 0:
            return False

        for obj in self.static_results:
            if not obj.has_key(name):
                obj[name] = {}
                obj[name]["compile-test"] = {}
                obj[name]["compile-test"]["status"] = "N/A"
                obj[name]["compile-test"]["warning_count"] = 0
                obj[name]["compile-test"]["error_count"] = 0
                obj[name]["sparse-test"] = {}
                obj[name]["sparse-test"]["status"] = "N/A"
                obj[name]["sparse-test"]["warning_count"] = 0
                obj[name]["sparse-test"]["error_count"] = 0
                obj[name]["smatch-test"] = {}
                obj[name]["smatch-test"]["status"] = "N/A"
                obj[name]["smatch-test"]["warning_count"] = 0
                obj[name]["smatch-test"]["error_count"] = 0

        if name not in supported_configs and name not in self.custom_configs:
            self.custom_configs.append(name)

        return True

    def update_results(self, new_cfg):
        try:
            new_results = JSONParser(self.schema, new_cfg, extend_defaults=True).get_cfg()
            param1 = self.results["kernel-params"]
            param2 = new_results["kernel-params"]
            for field in ["head", "base", "branch", "version"]:
                if len(param1[field]) > 0 and len(param2[field]) > 0:
                    if (param1[field] != param2[field]):
                        if field == "version" and (param1[field] == "Linux" or param2[field] == "Linux"):
                            continue
                        else:
                            raise Exception("%s field values does not match %s==%s" % (field, param1[field], param2[field]))
        except Exception as e:
            self.logger.warning("Invalid results config file\n")
            self.logger.warning(e)
            return False
        else:
            self.results = self.merge_results(self.results, new_results)
            return True

    def _update_static_test_results(self, type, arch, config, status, warning_count=0, error_count=0):
        for obj in self.results["static-test"]:
            if obj['arch_name'] == arch:
                obj[config][type]["status"] = "Passed" if status else "Failed"
                obj[config][type]["warning_count"] = warning_count
                obj[config][type]["error_count"] = error_count

    def update_compile_test_results(self, arch, config, status, warning_count=0, error_count=0):
        self._update_static_test_results("compile-test", arch, config, status, warning_count, error_count)

    def update_sparse_test_results(self, arch, config, status, warning_count=0, error_count=0):
        self._update_static_test_results("sparse-test", arch, config, status, warning_count, error_count)

    def update_smatch_test_results(self, arch, config, status, warning_count=0, error_count=0):
        self._update_static_test_results("smatch-test", arch, config, status, warning_count, error_count)

    def update_custom_test_results(self, name, status, **kwargs):
        test_obj = {}
        new_obj = True

        for obj in self.custom_results:
            if obj['name'] == name:
                test_obj = obj
                new_obj = False

        test_obj["name"] = name
        test_obj["status"] = "Passed" if status else "Failed"
        for key, value in viewitems(kwargs):
            test_obj[key] = value

        if new_obj:
            self.custom_results.append(test_obj)

    def update_checkpatch_results(self, status, warning_count=None, error_count=None):
        self.results["checkpatch"]["status"] = "Passed" if status else "Failed"
        if warning_count is not None:
            self.results["checkpatch"]["warning_count"] = warning_count
        if error_count is not None:
            self.results["checkpatch"]["error_count"] = error_count

    def update_kernel_params(self, version=None, branch=None, base=None, head=None):
        if version is not None:
            self.results["kernel-params"]["version"] = version
        if branch is not None:
            self.results["kernel-params"]["branch"] = branch
        if base is not None:
            self.results["kernel-params"]["base"] = base
        if head is not None:
            self.results["kernel-params"]["head"] = head

    def kernel_info(self):
        out = ''
        if self.src is not None:
            out += 'Kernel Info:\n'
            out += "\tVersion: %s\n" % self.results["kernel-params"]["version"]
            out += "\tBranch: %s\n" % self.results["kernel-params"]["branch"]
            out += "\tHead: %s\n" % self.results["kernel-params"]["head"]
            out += "\tBase: %s\n" % self.results["kernel-params"]["base"]

        return out + '\n'

    def static_test_results(self):
        width = len(max(supported_configs + self.custom_configs, key=len)) * 2
        out = 'Static Test Results:\n'
        for obj in self.results["static-test"]:
            out += '\t%s results:\n' % obj['arch_name']
            for config in supported_configs + self.custom_configs:
                out += '\t\t%s results:\n' % config
                for type in ["compile-test", "sparse-test", "smatch-test"]:
                    out += '\t\t\t%s results:\n' % type
                    out += ('\t\t\t\t%-' + str(width) + 's: %s\n') % ("status", obj[config][type]["status"])
                    out += ('\t\t\t\t%-' + str(width) + 's: %s\n') % ("warning", obj[config][type]["warning_count"])
                    out += ('\t\t\t\t%-' + str(width) + 's: %s\n') % ("error", obj[config][type]["error_count"])

        return out + '\n'

    def checkpatch_test_results(self):
        out = 'Checkpatch Test Results:\n'
        out += '\tstatus       : %s\n' % self.checkpatch_results["status"]
        out += '\twarning_count: %s\n' % self.checkpatch_results["warning_count"]
        out += '\terror_count  : %s\n' % self.checkpatch_results["error_count"]

        return out + '\n'

    def custom_test_results(self):
        if len(self.custom_results) == 0:
            return 'Custom Test Results: N/A\n'
        width = len(max(self.custom_results[0].keys(), key=len)) * 2
        out = 'Custom Test Results:\n'
        for obj in self.results["custom-test"]:
            out += '\t%s results:\n' % obj['name']
            for key, value in viewitems(obj):
                if key == 'name':
                    continue
                out += ('\t\t%-' + str(width) + 's: %s\n') % (key, value)

        return out + '\n'

    def bisect_test_results(self):
        out = 'Bisect Test Results:\n'
        out += '\tstatus       : %s\n' % self.bisect_results["status"]

        return out + '\n'

    def get_test_results(self, test_type="compile"):
        out = ''
        out += self.kernel_info()
        if test_type == "static":
            out += self.static_test_results()
        elif test_type == "checkpatch":
            out += self.checkpatch_test_results()
        elif test_type == "all":
            out += self.static_test_results()
            out += self.checkpatch_test_results()

        return out

    def print_test_results(self, test_type="compile"):
        self.logger.info(self.get_test_results(test_type))

    def merge_results(self, dest, src):

        if isinstance(src, collections.Mapping):
            for key, value in src.iteritems():
                dest[key] = self.merge_results(dest.get(key, src[key]), src[key])
        elif isinstance(src, (list, tuple)):
            for index, value in enumerate(src):
                dest[index] = self.merge_results(dest[index] if len(dest) <= index else src[index], src[index])
        else:
            dest = src

        return dest

    def dump_results(self, outfile):
        fobj = open(outfile, 'w+')
        fobj.truncate()
        fobj.close()
        self.cfgobj.dump_cfg(outfile)
Ejemplo n.º 13
0
class BuildAndroid(object):

    def _get_repo_script(self, repo):
        # If not valid repo is given, check if repo exists in PATH
        if not valid_str(repo):
            if which("repo") is None:
                self.logger.error("No valid repo command found")
                return False
            else:
                self.repo = which("repo")
                return True
        else:
            self.repo = os.path.abspath(os.path.join(self.src, 'repo-bin', 'repo'))
            if not os.path.exists(os.path.join(self.src, 'repo-bin')):
                os.makedirs(os.path.join(self.src, 'repo-bin'))
            if repo.startswith('http'):
                self.sh.cmd("curl %s > %s" % (repo, self.repo), shell=True)
            elif os.path.exists(os.path.abspath(repo)):
                self.logger.info(self.sh.cmd("cp %s %s" % (repo, self.repo), shell=True))
            else:
                self.logger.error("Invalid repo %s", repo)
                return False

            return True

        return False

    def repo_init(self, url=None, manifest=None, branch=None, options=''):

        cmd = [self.repo, 'init']

        if valid_str(url):
            cmd.append('-u')
            cmd.append(url)

        if valid_str(manifest):
            cmd.append('-m')
            cmd.append(manifest)

        if valid_str(branch):
            cmd.append('-b')
            cmd.append(branch)

        if valid_str(options):
            cmd.append(options)

        ret = self.sh.cmd(' '.join(cmd), shell=True)
        if ret[0] != 0:
            self.logger.error(ret)

        return ret[0]

    def repo_sync(self, threads=((multiprocessing.cpu_count()/4) * 3), options=''):
        if not self.valid:
            self.logger.error("Invalid repo")
            return False

        ret = self.sh.cmd('%s sync -c -j%d %s' % (self.repo, threads, options), shell=True)
        if ret[0] != 0:
            self.logger.error(ret)

    def repo_abandon_branch(self, branch='master'):
        if not self.valid:
            self.logger.error("Invalid repo")
            return False

        ret = self.sh.cmd('%s abandon %s' % (self.repo, branch), shell=True)
        if ret[0] != 0:
            self.logger.error(ret)

    def repo_create_branch(self, branch='master'):
        if not self.valid:
            self.logger.error("Invalid repo")
            return False

        ret = self.sh.cmd('%s start %s --all' % (self.repo, branch), shell=True)
        if ret[0] != 0:
            self.logger.error(ret)

    def repo_reset(self):
        if not self.valid:
            self.logger.error("Invalid repo")
            return False

        ret = self.sh.cmd('%s forall -vc "git clean -fdx;git checkout HEAD;git reset --hard"' % self.repo, shell=True)
        if ret[0] != 0:
            self.logger.error(ret)
        ret = self.sh.cmd(self.repo, 'sync -d', shell=True)
        if ret[0] != 0:
            self.logger.error(ret)

        return True

    def get_targets(self):

        if not self.valid:
            self.logger.error("Invalid repo")
            return None

        ret = self.sh.cmd('source build/envsetup.sh;echo ${LUNCH_MENU_CHOICES[*]}', shell=True)
        if ret[0] != 0:
            self.logger.error(ret)

        return ret[1].split(' ') if ret[0] == 0 else []

    def clean_all(self):
        if not self.valid:
            self.logger.error("Invalid repo")
            return False

        ret = self.sh.cmd('rm -fr %s/*' % (self.out), shell=True)
        if ret[0] != 0:
            self.logger.error(ret)

        return True

    def clean_kernel(self):
        if self.target is None:
            self.logger.error("No valid target found %s", self.target)
            return False

        target_dir = os.path.join(self.out, 'target', 'product', self.out_product)
        if not os.path.exists(target_dir):
            self.logger.warn("Target dir %s not found", target_dir)
            return True

        if os.path.exists(os.path.join(target_dir, 'obj', 'kernel')):
            self.sh.cmd("rm -fr %s" % os.path.join(target_dir, 'obj', 'kernel'), shell=True)

        return True


    def clean_default(self):
        return True

    def cherrypick_patches(self, path, urls=[]):
        # todo: need to implement the logic
        return True

    def update_project_list(self, project_list=[]):
        for project in project_list:
            status = self.update_project(project["project-dir"], project["new-dir"], project["rurl"])
            if not status:
                return False

        return True

    def update_project(self, project_dir, new_dir=None, remote=None):

        self.logger.info(locals())

        if not valid_str(project_dir):
            self.logger.error("Invalid project dir %s", project_dir)
            return False

        if not project_dir.startswith('/'):
            project_dir = os.path.join(self.src, project_dir)

        if not os.path.exists(os.path.abspath(project_dir)):
            self.logger.error("Invalid project dir path %s", project_dir)
            return False

        if valid_str(new_dir) and not os.path.exists(os.path.abspath(new_dir)):
            self.logger.error("Invalid new dir path %s", new_dir)
            return False

        if valid_str(new_dir):
            if os.path.exists(os.path.abspath(project_dir) + '.old'):
                ret = self.sh.cmd("rm %s" % os.path.abspath(project_dir) + '.old', shell=True)
                if ret[0] != 0:
                    self.logger.error(ret)

            ret = self.sh.cmd("mv %s %s" % (os.path.abspath(project_dir), os.path.abspath(project_dir) + '.old'),
                              shell=True)
            if ret[0] != 0:
                self.logger.error(ret)

            ret = self.sh.cmd("ln -s %s %s" % (os.path.abspath(new_dir), os.path.basename(project_dir)), shell=True,
                              wd=os.path.dirname(os.path.abspath(project_dir)))
            if ret[0] != 0:
                self.logger.error(ret)

        if valid_str(remote):
            remote = remote.split(',')
            self.logger.info(remote)
            git = pyshell.GitShell(wd=os.path.abspath(project_dir), init=True,
                                   remote_list=[(remote[0],remote[1])], fetch_all=True,
                                   logger=self.logger, stream_stdout=True)
            git.cmd('reset --hard')
            git.checkout(remote[0], remote[2])

        self.logger.info("Updating project %s successful", project_dir)

        return True

    def make_target(self, product, target, options='', threads=((multiprocessing.cpu_count()/4) * 3)):

        if not self.valid:
            self.logger.error("Invalid repo")
            return False

        cmds = 'source build/envsetup.sh;lunch %s;make %s -j%d %s;' % (product, target, threads, options)

        ret = self.sh.cmd(cmds, shell=True)
        if ret[0] != 0:
            self.logger.error("Make target %s %s failed" % (product, target))
            self.logger.error(ret[1])
            return False

        return True

    def upload_image(self, mode, lurl, rurl, rname="", msg=""):
        # todo: need to implement the logic

        if mode == 'cp':
            cmd = "cp %s %s" % (lurl, rurl)
            ret = self.sh.cmd(cmd, shell=True)
            if ret[0] != 0:
                self.logger.error("%s failed", cmd)
                self.logger.error(ret[1])
                return False
        elif mode == 'scp':
            cmd = "cp %s %s" % (lurl, rurl)
            ret = self.sh.cmd(cmd, shell=True)
            if ret[0] != 0:
                self.logger.error("%s failed", cmd)
                self.logger.error(ret[1])
                return False

        return True

    def auto_build(self):
        if self.cfg is None or self.obj is None:
            self.logger.error("Invalid config file")
            return False

        #repo init
        self.repo_init(self.cfg["repo-init-params"]["url"],
                       self.cfg["repo-init-params"]["manifest"],
                       self.cfg["repo-init-params"]["branch"])

        self.repo_abandon_branch()

        #repo sync
        self.repo_sync(options=self.cfg["repo-sync-params"]["options"])
        self.repo_create_branch()

        #Make build
        for item in self.cfg["target-list"]:

            print item

            # Check if you want to continue the build
            if not item["enable-build"]:
                continue

            if self.product_list is not None:
                if item["name"] not in self.product_list:
                    continue

            self.target = item["target"]
            self.lunch_product = item["lunch-product"]
            self.out_product = item["out-product"]

            # Check if obj needs to be cleaned
            self.clean_default()
            if item["obj-clean"] == 'all':
                self.clean_all()
            elif item["obj-clean"] == 'kernel':
                self.clean_kernel()

            #Update if any projects needs to be updated
            status = self.update_project_list(item["project-update-list"])
            if not status:
                self.logger.error("project update list failed")
                continue

            # Cherry pick if any patches are required.
            status  = self.cherrypick_patches(self.src, item["cherry-pick-list"])
            if not status:
                self.logger.error("Cherry pick list failed")
                continue

            # Make the build
            status = self.make_target(item["lunch-product"], item["target"], item["options"])
            if not status:
                self.logger.error("Build make command failed")
                continue

            # Check if image needs to be uploaded
            if item["upload-image"]:
                self.upload_image(item["mode"], item["lurl"], item["rurl"], item["rname"], item["msg"])

        return True

    def set_product_list(self, products=None):
        self.product_list = products

    def __init__(self, src_dir=None, out_dir=None, repo_url=None, cfg = None, logger=None):
        self.logger = logger or logging.getLogger(__name__)

        self.src = os.path.abspath(set_val(src_dir, os.getcwd()))
        self.out = os.path.abspath(set_val(out_dir, os.path.join(self.src, 'out')))
        self.repo = '~/bin/repo'
        self.target = None
        self.lunch_product = None
        self.out_product = None
        self.product_list = None
        self.valid = False
        self.schema = pkg_resources.resource_filename('android_build', 'schemas/android-schema.json')
        self.cfg = None
        self.obj = None

        self.sh =  pyshell.PyShell(wd=self.src, stream_stdout=True)
        self.sh.update_shell()

        if cfg is not None:
            self.obj = JSONParser(self.schema, cfg, extend_defaults=True, os_env=True, logger=logger)
            self.cfg = self.obj.get_cfg()
            repo_url = self.cfg["repo-script"] if valid_str(self.cfg["repo-script"]) else repo_url
            repo_dir = self.cfg.get("repo-dir", None)
            if valid_str(repo_dir):
                self.src = os.path.path(self.src, repo_dir)

        if not self._get_repo_script(repo_url):
            self.logger.error("repo setup failed")
            return None

        self.valid = True
Ejemplo n.º 14
0
class KernelTest(object):

    def __init__(self, src, cfg=None, out=None, rname=None, rurl=None, branch=None, head=None, base=None,
                 res_cfg=None, logger=None):
        self.logger = logger or logging.getLogger(__name__)
        self.src = src
        self.out = os.path.join(self.src, 'out') if out is None else os.path.absapth(out)
        self.branch = branch
        self.rname = rname
        self.rurl = rurl
        self.head = head
        self.base = base
        self.valid_git = False
        self.schema = pkg_resources.resource_filename('klibs', 'schemas/test-schema.json')
        self.cfg = None
        self.cfgobj = None
        self.resobj = KernelResults(self.src, old_cfg=res_cfg, logger=self.logger)
        self.git = GitShell(wd=self.src, logger=logger)
        self.sh = PyShell(wd=self.src, logger=logger)
        self.checkpatch_source = CHECK_PATCH_SCRIPT
        self.custom_configs = []

        if self.rname is not None and len(self.rname) > 0:
            if not os.path.exists(self.src):
                os.makedirs(self.src)
            if not self.git.valid():
                self.git.init()
            self.git.add_remote(self.rname, rurl)
            self.git.cmd('fetch %s' % self.rname)
            self.branch = self.rname + '/' + self.branch

        self.valid_git = True if self.git.valid() else False

        if self.valid_git:
            if self.branch is not None and len(self.branch) > 0:
                if self.git.cmd('checkout', self.branch)[0] != 0:
                    self.logger.error("Git checkout command failed in %s", self.src)
                    return
            else:
                self.branch = self.git.current_branch()

            #update base & head if its not given
            if self.head is None:
                self.head = self.git.head_sha()
            if self.base is None:
                self.base = self.git.base_sha()

            self.resobj.update_kernel_params(base=self.base, head=self.head, branch=self.branch)

        if not is_valid_kernel(src, logger):
            return

        self.version = BuildKernel(self.src).uname

        if len(self.version) > 0:
            self.resobj.update_kernel_params(version=self.version)

        if cfg is not None:
            self.cfgobj = JSONParser(self.schema, cfg, extend_defaults=True, os_env=True, logger=logger)
            self.cfg = self.cfgobj.get_cfg()

    def send_email(self, emailcfg, sub=None):

        if emailcfg is not None:
            emailobj = Email(emailcfg, self.logger)
        else:
            return False

        subject = ['Test results']
        if sub is not None:
            subject.append(sub)

        content = []

        outfile = tempfile.NamedTemporaryFile()
        self.resobj.dump_results(outfile=outfile.name)

        with open(outfile.name) as fobj:
            content.append(format_h1("Test Results"))
            content.append('')
            content.append(fobj.read())
            content.append('\n')

        emailobj.send_email(' '.join(subject), '\n'.join(content))

        return True

    def git_upload_results(self, remote, mode='push', msg=[], append_kinfo=False,
                           resdir=None, relpath=".", outfile='out.json'):
        """
        Upload the results to remote repo.
        :param remote: (remotename, url, branch).
        :param mode:  Git push mode (push, force-push, refs-for)
        :param msg: Commit message in list format, One line for each entry.
        :param append_kinfo: Append kernel info to commit message.
        :param resdir: Dir used for uploading the results.
        :param relpath: Relative path of the results file.
        :param outfile: Results file.

        :return: True | False
        """

        clean_resdir = False

        if not isinstance(remote, tuple) or len(remote) != 3:
            self.logger.info("Invalid remote %s", remote)
            return False

        if resdir is None:
            resdir = tempfile.mkdtemp("_dir", "output_")
            clean_resdir = True

        # Commit the results file  used back to server.
        ogit = GitShell(wd=resdir, init=True, remote_list=[(remote[0], remote[1])], fetch_all=True, logger=self.logger)
        ogit.cmd("clean -xdf")
        ogit.cmd("checkout %s/%s" % (remote[0], remote[2]))
        output_file = os.path.join(resdir, relpath, outfile)

        if not os.path.exists(os.path.dirname(output_file)):
            os.makedirs(os.path.dirname(output_file))

        self.resobj.dump_results(outfile=output_file)

        ogit.cmd('add %s' % (relpath + '/' + outfile))

        # Create the commit message and upload it
        with tempfile.NamedTemporaryFile() as msg_file:
            commit_msg = '\n'.join(msg)
            # Use default msg if its not given in config file.
            if len(commit_msg) == 0:
                commit_msg = "test: Update latest results"
            # Append kernel info if enabled.
            if append_kinfo:
                commit_msg += '\n'
                commit_msg += self.resobj.kernel_info()
            msg_file.write(commit_msg)
            msg_file.seek(0)
            ogit.cmd('commit -s -F %s' % msg_file.name)

        if mode == 'refs-for':
            remote[2] = 'refs/for/%s' % remote[2]

        if not ogit.valid_branch('origin', remote[2]) or mode == 'force-push':
            ogit.cmd('push', '-f', 'origin', 'HEAD:%s' % remote[2])
        else:
            ogit.cmd('push', 'origin', 'HEAD:%s' % remote[2])

        if clean_resdir:
            shutil.rmtree(resdir, ignore_errors=True)

    def auto_test(self):
        self.logger.info(format_h1("Running kernel tests from json", tab=2))

        status = True

        config_temp = tempfile.mkdtemp("_dir", "config_")
        cgit = GitShell(wd=config_temp, init=True, logger=self.logger)

        static_config = self.cfg.get("static-config", None)
        sparse_config = self.cfg.get("sparse-config", None)
        smatch_config = self.cfg.get("smatch-config", None)
        custom_test = self.cfg.get("custom-test", None)

        # If there is a config in remote source, fetch it and give the local path.
        def get_configsrc(options):

            if options is None or not isinstance(options, collections.Mapping):
                return None

            if len(options["url"]) == 0:
                return os.path.abspath(os.path.join(self.src, options["remote-dir"], options["name"]))

            if options["sync-mode"] == "git":
                cgit.cmd("clean -xdf")
                remote_list = cgit.cmd("remote")[1].split('\n')
                rname = 'origin'
                for remote in remote_list:
                    rurl = cgit.cmd("remote get-url %s" % remote)[1].strip()
                    if rurl == options["url"]:
                        rname =  remote
                        break
                cgit.add_remote(rname, options["url"])
                cgit.cmd("pull %s" % rname)
                cgit.cmd("checkout %s/%s" % (rname, options["branch"]))

                return os.path.abspath(os.path.join(config_temp, options["remote-dir"], options["name"]))


            return None

        def get_sha(_type='head', config = None):
            if config is None:
                return getattr(self, _type)
            if config[_type]['auto']:
                if config[_type]['auto-mode'] == "last-upstream":
                    return self.git.cmd('describe --abbrev=0 --match "v[0-9]*" --tags')[1].strip()
                elif config[_type]['auto-mode'] == "last-tag":
                    return self.git.cmd('describe --abbrev=0 --tags')[1].strip()
                elif config[_type]['auto-mode'] == "head-commit":
                    return self.git.head_sha()
                elif config[_type]['auto-mode'] == "base-commit":
                    return self.git.base_sha()
            elif len(config[_type]['value']) > 0:
                return config[_type]['value'].strip()
            else:
                return getattr(self, _type)

        def static_test(obj, cobj, config):
            status = True

            if cobj["compile-test"]:
                current_status = self.compile(obj["arch_name"], config, obj["compiler_options"]["CC"],
                                              obj["compiler_options"]["cflags"],
                                              cobj.get('name', None), get_configsrc(cobj.get('source-params', None)))
                if current_status is False:
                    self.logger.error("Compilation of arch:%s config:%s failed\n" % (obj["arch_name"],
                                                                                     cobj.get('name', config)))

                status &= current_status

            if cobj["sparse-test"]:
                skip = False
                args = [
                    obj["arch_name"], config, obj["compiler_options"]["CC"], obj["compiler_options"]["cflags"],
                    cobj.get('name', None), get_configsrc(cobj.get('source-params', None))
                ]

                if sparse_config is not None:
                    if sparse_config["enable"] is False:
                        self.logger.warning("Sparse global flag is disabled\n")
                        skip = True
                    else:
                        args.append(sparse_config["cflags"])
                        args.append(get_sha("base", sparse_config))
                        args.append(sparse_config["source"])

                if skip is False:
                    current_status = self.sparse(*args)

                    if current_status is False:
                        self.logger.error("Sparse test of arch:%s config:%s failed\n" % (obj["arch_name"],
                                                                                         cobj.get('name', config)))
                    status &= current_status

            if cobj["smatch-test"]:
                skip = False
                args = [
                    obj["arch_name"], config, obj["compiler_options"]["CC"], obj["compiler_options"]["cflags"],
                    cobj.get('name', None), get_configsrc(cobj.get('source-params', None))
                ]

                if smatch_config is not None:
                    if smatch_config["enable"] is False:
                        self.logger.warning("Smatch global flag is disabled\n")
                        skip = True
                    else:
                        args.append(smatch_config["cflags"])
                        args.append(get_sha("base", smatch_config))
                        args.append(smatch_config["source"])

                if skip is False:
                    current_status = self.smatch(*args)

                    if current_status is False:
                        self.logger.error("Smatch test of arch:%s config:%s failed\n" % (obj["arch_name"],
                                                                                         cobj.get('name', config)))
                    status &= current_status

            return status

        if static_config is not None and static_config["enable"] is True:
            # Compile standard configs
            for obj in static_config["test-list"]:

                for config in supported_configs:
                    if isinstance(obj, collections.Mapping) and obj.has_key(config):
                        status &= static_test(obj, obj[config], config)

                # Compile custom configs
                for cobj in obj["customconfigs"]:
                    if cobj['name'] not in self.custom_configs:
                        self.custom_configs.append(cobj['name'])

                    self.resobj.add_config(cobj['name'])

                    status &= static_test(obj, cobj, cobj['defaction'])

        checkpatch_config = self.cfg.get("checkpatch-config", None)

        if checkpatch_config is not None and checkpatch_config["enable"] is True:
            if len(checkpatch_config["source"]) > 0:
                self.checkpatch_source = checkpatch_config["source"]

            status &= self.run_checkpatch(get_sha('head', checkpatch_config), get_sha('base', checkpatch_config))

        if custom_test is not None and custom_test["enable"] is True:
            for ctest in custom_test["test-list"]:
                status &=  self.custom_test(ctest["name"], ctest["source"], ctest["arg-list"],
                                            get_sha("head", custom_test),
                                            get_sha("base", custom_test),
                                            ctest["enable-head-sub"],
                                            ctest["enable-base-sub"],
                                            ctest["enable-src-sub"])

        output_config = self.cfg.get("output-config", None)

        if output_config is not None and output_config["enable"] is True and len(output_config["url"]) > 0:

            # Commit the results file  used back to server.
            if output_config["sync-mode"] == "git":
                self.git_upload_results(remote=('origin', output_config["url"], output_config["branch"]),
                                               mode=output_config["mode"],
                                               msg=output_config["upload-msg"],
                                               append_kinfo=output_config.get("append-kinfo", False),
                                               resdir=None,
                                               relpath=output_config["remote-dir"],
                                               outfile=output_config["name"]
                                               )


        shutil.rmtree(config_temp, ignore_errors=True)

        return status

    def _compile(self, arch='', config='', cc='', cflags=[], name='', cfg=None, clean_build=False):

        custom_config = False

        if arch not in supported_archs:
            self.logger.error("Invalid arch/config %s/%s" % (arch, config))
            return False

        if config not in supported_configs:
            if cfg is None or len(cfg) == 0 or name is None or len(name) == 0:
                self.logger.error("Invalid arch/config %s/%s" % (arch, config))
                return False
            else:
                if name not in self.custom_configs:
                    self.custom_configs.append(name)

                self.resobj.add_config(name)

                custom_config = True

        if name in self.custom_configs:
            custom_config = True

        out_dir = os.path.join(self.out, arch, name if custom_config else config)

        if clean_build:
            self.sh.cmd("rm -fr %s/*" % out_dir, shell=True)

        kobj = BuildKernel(src_dir=self.src, out_dir=out_dir, arch=arch, cc=cc, cflags=cflags, logger=self.logger)

        # If custom config source is given, use it.
        if custom_config:
            kobj.copy_newconfig(cfg)

        getattr(kobj, 'make_' + config)()

        ret, out, err = kobj.make_kernel()

        def parse_results(outputlog, errorlog, status):
            data = errorlog.split('\n')

            warning_count = len(filter(lambda x: True if "warning:" in x else False, data))
            error_count = len(filter(lambda x: True if "error:" in x else False, data))

            warning_data =filter(lambda x: True if "warning:" in x else False, data)
            error_data = filter(lambda x: True if "error:" in x else False, data)

            return status, warning_count, error_count, warning_data, error_data

        status = True if ret == 0 else False

        if not status:
            self.logger.error(err)

        return parse_results(out, err, status)

    def compile(self, arch='', config='', cc='', cflags=[], name='', cfg=None):

        status, warning_count, error_count, wdata, edata = self._compile(arch, config, cc, cflags, name, cfg)

        self.logger.info("List of warnings Arch:%s Config:%s Name:%s Count:%d\n", arch, config, name, warning_count)

        for entry in wdata:
            self.logger.info(entry)

        if warning_count > 0:
            self.logger.info("\n")

        self.logger.info("List of errors Arch:%s Config:%s Name:%s Count:%d\n", arch, config, name, error_count)

        for entry in edata:
            self.logger.info(entry)

        if error_count > 0:
            self.logger.info("\n")

        name = config if name is None or len(name) == 0 else name

        self.resobj.update_compile_test_results(arch, name, status, warning_count, error_count)

        return status

    def _get_bin_path(self, path):
        def which(program):
            import os
            def is_exe(fpath):
                return os.path.isfile(fpath) and os.access(fpath, os.X_OK)

            fpath, fname = os.path.split(program)
            if fpath:
                if is_exe(program):
                    return program
            else:
                for path in os.environ["PATH"].split(os.pathsep):
                    exe_file = os.path.join(path, program)
                    if is_exe(exe_file):
                        return exe_file

            return None

        if path.startswith('.'):
            return os.path.join(os.getcwd(), path)
        elif path.startswith('/'):
            return path
        else:
            new_path = which(path)
            return new_path if which(path) is not None else path

    def _diff_count(self, data1, data2):
        ncount = 0
        for entry in data2:
            if entry not in data1:
                ncount = ncount + 1
                self.logger.info(entry)

        return ncount

    def sparse(self, arch='', config='', cc='', cflags=[], name='', cfg=None, sparse_flags=["C=2"],
               base=None, script_bin=SPARSE_BIN_PATH):

        base_warning_count = 0
        base_error_count = 0
        base_edata = []
        base_wdata = []
        flags = []

        flags.append('CHECK="' + self._get_bin_path(script_bin) + '"')

        if base is not None:
            curr_head = self.git.head_sha()

            if self.git.cmd('checkout', base)[0] != 0:
                self.logger.error("Git checkout command failed in %s", base)
                return False

            status, base_warning_count,\
            base_error_count, base_wdata,\
            base_edata = self._compile(arch, config, cc, sparse_flags + flags + cflags, name, cfg, True)

            if status is False:
                return False

            if self.git.cmd('checkout', curr_head)[0] != 0:
                self.logger.error("Git checkout command failed in %s", curr_head)
                return False

        status, warning_count,\
        error_count, wdata, edata = self._compile(arch, config, cc, sparse_flags + flags + cflags, name, cfg, True)

        self.logger.info("Base warinings:%d Base errors:%d New warining:%d New errors:%d\n",
                         base_warning_count, base_error_count, warning_count, error_count)

        self.logger.debug(format_h1("Diff between Base/New warnings", tab=2))
        warning_count = self._diff_count(base_wdata, wdata)
        self.logger.debug(format_h1("End of new warnings, count:%d" % warning_count, tab=2))

        self.logger.debug(format_h1("Diff between Base/New errors\n", tab=2))
        error_count = self._diff_count(base_edata, edata)
        self.logger.debug(format_h1("End of new errors, count:%d" % error_count, tab=2))

        name = config if name is None or len(name) == 0 else name

        self.resobj.update_sparse_test_results(arch, name, status, warning_count, error_count)

        return status

    def smatch(self, arch='', config='', cc='', cflags=[], name='', cfg=None, smatch_flags=["C=2"],
               base=None, script_bin="smatch"):

        base_warning_count = 0
        base_error_count = 0
        base_edata = []
        base_wdata = []
        flags = []

        flags.append('CHECK="' + self._get_bin_path(script_bin) + ' -p=kernel"')

        if base is not None:
            curr_head = self.git.head_sha()

            if self.git.cmd('checkout', base)[0] != 0:
                self.logger.error("Git checkout command failed in %s", base)
                return False

            status, base_warning_count,\
            base_error_count, base_wdata,\
            base_edata = self._compile(arch, config, cc,smatch_flags + flags + cflags, name, cfg, True)

            if status is False:
                return False

            if self.git.cmd('checkout', curr_head)[0] != 0:
                self.logger.error("Git checkout command failed in %s", curr_head)
                return False

        status, warning_count,\
        error_count, wdata, edata = self._compile(arch, config, cc, smatch_flags + flags + cflags, name, cfg, True)

        self.logger.info("Base warinings:%d Base errors:%d New warining:%d New errors:%d\n",
                         base_warning_count, base_error_count, warning_count, error_count)

        self.logger.debug(format_h1("Diff between Base/New warnings", tab=2))
        warning_count = self._diff_count(base_wdata, wdata)
        self.logger.debug(format_h1("End of new warnings, count:%d" % warning_count, tab=2))

        self.logger.debug(format_h1("Diff between Base/New errors\n", tab=2))
        error_count = self._diff_count(base_edata, edata)
        self.logger.debug(format_h1("End of new errors, count:%d" % error_count, tab=2))

        name = config if name is None or len(name) == 0 else name

        self.resobj.update_smatch_test_results(arch, name, status, warning_count, error_count)

        return status

    def process_custom_test(self, name, ret):
        self.resobj.update_custom_test_results(name, ret[0] == 0)

    def custom_test(self, name, script, arg_list=[], head=None, base=None,
                    enable_head_sub=False, enable_base_sub=False, enable_src_sub=False):
        self.logger.info(format_h1("Running custom test %s" % name, tab=2))

        script = self._get_bin_path(script)

        if not os.path.exists(script):
            self.logger.error("Invalid script %s", script)
            return False

        cmd = [script]

        if len(arg_list) > 0:
            cmd = cmd + arg_list

        # If arg has sub string $HEAD and enable_head_sub argument is set true and do a string replace.
        if head is not None and enable_head_sub:
            for index, item in enumerate(cmd):
                if "$HEAD" in item:
                    cmd[index] = cmd[index].replace("$HEAD", head)

        # If arg has sub string $BASE and enable_base_sub argument is set true and do a string replace.
        if base is not None and enable_base_sub:
            for index, item in enumerate(cmd):
                if "$BASE" in item:
                    cmd[index] = cmd[index].replace("$BASE", base)

        if enable_src_sub:
            for index, item in enumerate(cmd):
                if "$SRC" in item:
                    cmd[index] = cmd[index].replace("$SRC", self.src)

        ret = self.sh.cmd("%s" % (' '.join(cmd)))

        self.process_custom_test(name, ret)

        return (ret[0] == 0)

    def compile_list(self, arch='', config_list=[], cc='', cflags=[], name='', cfg=None):
        self.logger.info(format_h1("Running compile tests", tab=2))
        result = []

        for config in config_list:
            result.append(self.compile(arch, config, cc, cflags, name, cfg))

        return result

    def sparse_list(self, arch='', config_list=[], cc='', cflags=[], name='', cfg=None, sparse_flags=["C=2"],
                    base=None, script_bin=SPARSE_BIN_PATH):
        self.logger.info(format_h1("Running sparse tests", tab=2))
        result = []

        for config in config_list:
            result.append(self.sparse(arch, config, cc, cflags, name, cfg, sparse_flags, base, script_bin))

        return result

    def smatch_list(self, arch='', config_list=[], cc='', cflags=[], name='', cfg=None, smatch_flags=["C=2"],
                    base=None, script_bin="smatch"):
        self.logger.info(format_h1("Running smatch tests", tab=2))
        result = []

        for config in config_list:
            result.append(self.smatch(arch, config, cc, cflags, name, cfg, smatch_flags, base, script_bin))

        return result

    def run_checkpatch(self, head=None, base=None):

        self.logger.info(format_h1("Runing checkpatch script", tab=2))

        self.enable_checkpatch = True
        head = self.head if head is None else head
        base = self.base if base is None else base

        gerrorcount = 0
        gwarningcount = 0

        try:
            if self.valid_git is False:
                raise Exception("Invalid git repo")

            if not os.path.exists(os.path.join(self.src, CHECK_PATCH_SCRIPT)):
                raise Exception("Invalid checkpatch script")

            ret, count, err = self.git.cmd('rev-list', '--count',  str(base) + '..'+ str(head))
            if ret != 0:
                raise Exception("git rev-list command failed")

            self.logger.info("Number of patches between %s..%s is %d", base, head, int(count))

            def parse_results(data):
                regex = r"total: ([0-9]*) errors, ([0-9]*) warnings,"
                match = re.search(regex, data)
                if match:
                    return int(match.group(1)), int(match.group(2))

                return 0, 0

            prev_index = 0

            for index in range(1, int(count) + 1):
                commit_range = str(head) + '~' + str(index) + '..' + str(head) + '~' + str(prev_index)
                ret, out, err = self.sh.cmd(os.path.join(self.src, CHECK_PATCH_SCRIPT), '-g', commit_range)
                lerrorcount, lwarningcount = parse_results(out)
                if lerrorcount != 0 or lwarningcount != 0:
                    self.logger.info(out)
                    self.logger.info(err)
                gerrorcount = gerrorcount + int(lerrorcount)
                gwarningcount = gwarningcount + int(lwarningcount)
                self.logger.debug("lerror:%d lwarning:%d gerror:%d gwarning:%d\n", lerrorcount, lwarningcount,
                                  gerrorcount, gwarningcount)
                prev_index = index
        except Exception as e:
            self.logger.error(e)
            self.resobj.update_checkpatch_results(False, gwarningcount, gerrorcount)
            return False
        else:
            self.resobj.update_checkpatch_results(True, gwarningcount, gerrorcount)
            return True

    def print_results(self, test_type='all'):
        self.resobj.print_test_results(test_type=test_type)

    def get_results(self, test_type='all'):
        return self.resobj.get_test_results(test_type=test_type)

    def dump_results(self, outfile):
        self.resobj.dump_results(outfile)
Ejemplo n.º 15
0
class Email(object):
    """
    Wrapper class for sending email.
    """
    def __init__(self, cfg=None, logger=None):
        """
        Email init()
        :param cfg: Json config file, if given smtp/form/to paramaters will be parsed from Json.
        :param logger: Logger object.
        """
        self.logger = logger or logging.getLogger(__name__)

        # SMTP server related param defaults.
        self.smtp_server = None
        self.smtp_thread = None
        self.server_obj = None
        self.client_obj = None
        self.smtp_port = 0
        self.supported_auths = ['TLS']
        self.auth = None
        self.username = None
        self.password = None

        # Config file related defaults.
        self.cfg_src = cfg
        self.cfg = None
        self.cfgobj = None
        self.schema = pkg_resources.resource_filename(
            'klibs', 'schemas/email-schema.json')

        # Set from/to/cc/bcc defaults
        self._from = None
        self._to = None
        self._cc = None
        self._bcc = None

        # Update params if cfg file is given.
        if cfg is not None:
            set_def = lambda x, y: self.cfg[x] if self.cfg[x] != "" else y
            self.cfgobj = JSONParser(self.schema,
                                     cfg,
                                     extend_defaults=True,
                                     os_env=True,
                                     logger=logger)
            self.cfg = self.cfgobj.get_cfg()

            self.set_header(self.cfg["from"], self.cfg["to"], self.cfg["cc"],
                            self.cfg["bcc"])
            self.set_smtp(self.cfg["smtp-server"], self.cfg["smtp-port"],
                          self.cfg["smtp-authentication"],
                          self.cfg["smtp-username"], self.cfg["smtp-password"])

    def _smtp_setup(self):

        str_check = lambda x: x is not None and isinstance(x, builtinbase
                                                           ) and len(x) > 0

        if not str_check(self.smtp_server):
            self.logger.error("Invalid SMTP server %s", self.smtp_server)
            return False

        if not self.smtp_port >= 0 or not isinstance(self.smtp_port,
                                                     (int, long_int)):
            self.logger.error("Invalid SMTP port %s", str(self.smtp_port))
            return False

        if self.smtp_server == "localhost":
            self.server_obj = CustomSMTPServer(
                (self.smtp_server, self.smtp_port), None)
            self.smtp_thread = threading.Thread(target=asyncore.loop,
                                                name="Asyncore Loop",
                                                kwargs={'timeout': 1})
            self.smtp_thread.start()

        self.client_obj = smtplib.SMTP(host=self.smtp_server,
                                       port=self.smtp_port)

        if str_check(
                self.auth
        ) and self.auth in self.supported_auths and self.smtp_server != "localhost":
            if self.auth == 'TLS':
                self.client_obj.starttls()

        if str_check(self.username) and str_check(
                self.password) and self.smtp_server != "localhost":
            self.client_obj.login(self.username, self.password)

        self.logger.debug("SMTP Server Open():%s port:%d\n", self.smtp_server,
                          self.smtp_port)

    def _smtp_close(self):
        self.client_obj.quit()
        if self.server_obj is not None:
            self.server_obj.close()
        if self.smtp_thread is not None:
            self.smtp_thread.join()
        self.logger.debug("SMTP Server Close()\n")

    def _valid_email(self, data):
        def valid(email):
            if re.match(r'^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$',
                        email):
                self.logger.info("Valid email %s", email)
                return True
            else:
                self.logger.info("Invalid email %s", email)

        if isinstance(data, list):
            for item in data:
                if not valid(item):
                    return False
            return True
        elif isinstance(data, builtinbase):
            if valid(data):
                return True

        return False

    def set_smtp(self,
                 smtp_server=None,
                 smtp_port=None,
                 auth=None,
                 username=None,
                 password=None):
        def check_val(val, type):
            return (val is not None and isinstance(val, type))

        if check_val(smtp_server, builtinbase):
            self.smtp_server = smtp_server

        if check_val(smtp_port, (int, long_int)):
            self.smtp_port = smtp_port

        if check_val(auth, builtinbase) and auth in self.supported_auths:
            self.auth = auth

        if check_val(username, builtinbase):
            self.username = username

        if check_val(password, builtinbase):
            self.password = password

    def set_header(self, _from, _to=[], _cc=[], _bcc=[]):
        #update if the field value is vaild

        def set_value(name, param, value):
            if value is not None:
                if self._valid_email(value):
                    return value
                else:
                    self.logger.error("Invalid %s: %s address", name, value)

            return getattr(self, param)

        self._from = set_value('From', '_from', _from)
        self._to = set_value('To', '_to', _to)
        self._cc = set_value('CC', '_cc', _cc)
        self._bcc = set_value('BCC', '_bcc', _bcc)

    def send_email(self, subject='', content=''):

        self.logger.debug("From: %s\nTo: %s\nCC: %s\nBCC: %s\nSubject: %s\n",
                          self._from, self._to, self._cc, self._bcc, subject)

        self._smtp_setup()

        rcpt = map(lambda it: it.strip(), self._cc + self._bcc + self._to)

        msg = MIMEMultipart('alternative')
        msg['From'] = self._from
        msg['Subject'] = subject
        msg['To'] = ','.join(self._to)
        msg['Cc'] = ','.join(self._cc)
        msg['Bcc'] = ','.join(self._bcc)
        msg.attach(MIMEText(content))

        if is_py2:
            self.client_obj.sendmail(self._from, rcpt, msg.as_string())
        else:
            self.client_obj.sendmail(self._from, list(rcpt), msg.as_string())

        self._smtp_close()
Ejemplo n.º 16
0
class KernelRelease(object):
    def __init__(self, src, cfg=None, logger=None):

        self.logger = logger or logging.getLogger(__name__)
        self.src = os.path.abspath(src)
        self.base = None
        self.head = None
        self.local_branch = None
        self.remote = None
        self.remote_branch = None
        self.git = GitShell(wd=self.src, logger=logger)
        self.sh = PyShell(wd=self.src, logger=logger)
        self.valid_git = False
        self.cfg = None
        self.cfgobj = None
        self.schema = pkg_resources.resource_filename(
            'klibs', 'schemas/release-schema.json')
        self.bundle_modes = ['branch', 'diff', 'commit_count']

        self.git.dryrun(False)
        self.sh.dryrun(False)

        if not is_valid_kernel(src, logger):
            return

        self.cfgobj = JSONParser(self.schema,
                                 cfg,
                                 extend_defaults=True,
                                 os_env=True,
                                 logger=logger)
        self.cfg = self.cfgobj.get_cfg()

        if self.git.valid():
            self.valid_git = True

    def auto_release(self):
        str_none = lambda x: None if len(x) == 0 else x.strip()
        if self.cfg is None:
            self.logger.error("Invalid config file %s", self.cfg)
            return False

        def conv_remotelist(remote_list):
            new_list = []
            for remote in remote_list:
                new_list.append((remote["name"], remote["url"],
                                 remote["branch"], remote["path"]))

            return new_list if len(new_list) > 0 else None

        def conv_taglist(tag_list):
            new_list = []
            for tag in tag_list:
                new_list.append(tag["name"], tag["msg"])

            return new_list if len(new_list) > 0 else None

        def conv_copyformat(flist):
            if "*" in flist:
                return None
            else:
                return flist

        try:
            params = self.cfg.get("bundle", None)

            if params is not None and params["enable"]:
                if not self.valid_git:
                    Exception(
                        "Kernel is not a git repo. So bundle option is not supported"
                    )

                base = params["base"]["value"]
                if params["base"]["auto"]:
                    base = self.git.cmd('describe --abbrev=0 --tags')[1]
                base = str_none(base)

                head = params["head"]["value"]
                if params["head"]["auto"]:
                    head = self.git.head_sha()
                head = str_none(head)

                bundle = self.generate_git_bundle(params["outname"],
                                                  params["mode"],
                                                  str_none(params["branch"]),
                                                  head, base,
                                                  params["commit_count"])

                uparams = params.get("upload-params", None)

                if uparams is not None and bundle is not None:
                    self.git_upload(bundle, str_none(params["upload-dir"]),
                                    uparams["new-commit"],
                                    conv_copyformat(uparams["copy-formats"]),
                                    uparams["commit-msg"],
                                    conv_remotelist(uparams["remote-list"]),
                                    uparams["use-refs"], uparams["force-push"],
                                    uparams["clean-update"],
                                    conv_taglist(uparams["tag-list"]))
                else:
                    Exception("Generate bundle failed")

        except Exception as e:
            self.logger.error(e, exc_info=True)
        else:
            if self.cfg["bundle"]["enable"]:
                self.logger.info(
                    format_h1("Successfully created git bundle", tab=2))

        try:
            params = self.cfg.get("quilt", None)

            if params is not None and params["enable"]:
                if not self.valid_git:
                    Exception(
                        "Kernel is not a git repo. So quilt option is not supported"
                    )

                base = params["base"]["value"]
                if params["base"]["auto"]:
                    base = self.git.cmd('describe --abbrev=0 --tags')[1]
                base = str_none(base)

                head = params["head"]["value"]
                if params["head"]["auto"]:
                    head = self.git.head_sha()
                head = str_none(head)

                if head is None or base is None:
                    Exception("Invalid base/head %s/%s", base, head)

                self.logger.info("head:%s base:%s", head, base)

                quilt = self.generate_quilt(str_none(params["branch"]), base,
                                            head, params['outname'],
                                            str_none(params["sed-file"]),
                                            str_none(params["audit-script"]),
                                            params['series-comment'])

                uparams = params.get("upload-params", None)

                if quilt is not None and uparams is not None:
                    ret = self.git_upload(
                        quilt, str_none(params["upload-dir"]),
                        uparams["new-commit"],
                        conv_copyformat(uparams["copy-formats"]),
                        uparams["commit-msg"],
                        conv_remotelist(uparams["remote-list"]),
                        uparams["use-refs"], uparams["force-push"],
                        uparams["clean-update"],
                        conv_taglist(uparams["tag-list"]))

                    if ret is None:
                        Exception("Quilt upload failed")
                else:
                    Exception("Generate quilt failed")

        except Exception as e:
            self.logger.error(e, exc_info=True)
        else:
            if self.cfg["quilt"]["enable"]:
                self.logger.info(
                    format_h1("Successfully created quilt series", tab=2))

        try:
            params = self.cfg.get("tar", None)
            if params is not None and params["enable"]:
                tarname = self.generate_tar_gz(params["outname"],
                                               str_none(params["branch"]),
                                               params["skip-files"])
                uparams = params.get("upload-params", None)
                if tarname is not None and uparams is not None:
                    ret = self.git_upload(
                        tarname, str_none(params["upload-dir"]),
                        uparams["new-commit"],
                        conv_copyformat(uparams["copy-formats"]),
                        uparams["commit-msg"],
                        conv_remotelist(uparams["remote-list"]),
                        uparams["use-refs"], uparams["force-push"],
                        uparams["clean-update"],
                        conv_taglist(uparams["tag-list"]))
                    if ret is None:
                        Exception("tar upload failed")
                else:
                    Exception("Create tar file failed")

        except Exception as e:
            self.logger.error(e, exc_info=True)
        else:
            if self.cfg["tar"]["enable"]:
                self.logger.info(
                    format_h1("Successfully created tar file", tab=2))

        try:
            params = self.cfg.get("upload-kernel", None)
            if params is not None and params["enable"]:
                uparams = params.get("upload-params", None)
                ret = self.git_upload(self.src, str_none(params["upload-dir"]),
                                      uparams["new-commit"],
                                      conv_copyformat(uparams["copy-formats"]),
                                      uparams["commit-msg"],
                                      conv_remotelist(uparams["remote-list"]),
                                      uparams["use-refs"],
                                      uparams["force-push"],
                                      uparams["clean-update"],
                                      conv_taglist(uparams["tag-list"]))
                if ret is None:
                    Exception("Upload kernel failed")

        except Exception as e:
            self.logger.error(e, exc_info=True)
        else:
            if self.cfg["upload-kernel"]["enable"]:
                self.logger.info(
                    format_h1("Successfully Uploaded Linux kernel", tab=2))

        return True

    def git_upload(self,
                   src,
                   uploaddir=None,
                   new_commit=False,
                   copy_formats=None,
                   commit_msg="Inital commit",
                   remote_list=None,
                   use_refs=False,
                   force_update=False,
                   clean_update=False,
                   tag_list=None):
        """
        Upload the kernel or tar file or quilt series to a given remote_list.
        :param src: Source dir. Either kernel, quilt or tar file.
        :param uploaddir: Directory used for uploading the new changes. If none, then temp_dir will be used.
        :param new_commit: Create new commit and then upload (True|False).
        :param copy_formats: List of glob format of the files to be added to the commit.
        :param commit_msg: Commit Message
        :param remote_list: [(Remote Name, Remote URL, Remote branch, Remote dest dir)]
        :param use_refs: Use refs/for when pushing (True | False).
        :param force_update: Force update when pushing (True | False).
        :param clean_update: Remove existing content before adding and pushing your change (True | False).
        :param tag_list: [("name", "msg")], None if no tagging support needed. Use None for no message.
        :return:
        """

        # Check for data validity.
        repo_dir = src
        # Check if the source directory is valid.
        src = os.path.abspath(src)
        if not os.path.exists(src):
            self.logger.error("Source %s does not exit", src)
            return None

        # Check the validity of tags
        if tag_list is not None:
            if not isinstance(tag_list, list):
                self.logger.error("Invalid tag type")
                return None
            for tag in tag_list:
                if not isinstance(tag, tuple) or len(tag) != 2:
                    self.logger.error("Invalid tag %s", tag)
                    return None

        # Check for validity of copyformats
        if copy_formats is not None:
            if not isinstance(copy_formats, list):
                self.logger.error("Invalid copy format %s", copy_formats)
                return None

        # Create a valid out directory
        temp_dir = tempfile.mkdtemp()
        if uploaddir is not None:
            uploaddir = os.path.abspath(uploaddir)
        else:
            uploaddir = temp_dir

        def copyanything(src, dst):
            self.logger.info("Copy everything from %s to %s", src, dst)
            if not os.path.isfile(src):
                sh = PyShell(wd=src, logger=self.logger)
                sh.cmd("cp -a %s/* %s/" % (src, dst), shell=True)
            else:
                sh = PyShell(wd=self.src, logger=self.logger)
                sh.cmd("cp -a %s %s/" % (src, dst), shell=True)

        def upload_tags(remote, tag_list):
            if tag_list is not None:
                for tag in tag_list:
                    # Push the tags if required
                    if tag[0] is not None:
                        if tag[1] is not None:
                            ret = git.cmd('tag', '-a', tag[0], '-m', tag[1])[0]
                        else:
                            ret = git.cmd('tag', tag[0])[0]
                        if ret != 0:
                            raise Exception("git tag %s failed" % (tag[0]))

                        if git.cmd('push', remote, tag[0])[0] != 0:
                            raise Exception("git push tag %s to %s failed" %
                                            (tag[0], remote))

        try:
            for remote in remote_list:
                repo_dir = src
                if new_commit:

                    git = GitShell(wd=uploaddir,
                                   init=True,
                                   remote_list=[(remote[0], remote[1])],
                                   fetch_all=True)

                    git.cmd('clean -xdfq')
                    git.cmd('reset --hard')

                    if git.cmd("checkout",
                               remote[0] + '/' + remote[2])[0] != 0:
                        raise Exception(
                            "Git checkout remote:%s branch:%s failed",
                            remote[1], remote[2])

                    # If clean update is given, remove the contents of current repo.
                    if clean_update and git.cmd('rm *')[0] != 0:
                        raise Exception("git rm -r *.patch failed")

                    dest_dir = os.path.join(
                        uploaddir,
                        remote[3]) if remote[3] is not None else uploaddir
                    if not os.path.exists(dest_dir):
                        os.makedirs(dest_dir)

                    if copy_formats is None:
                        copyanything(src, dest_dir)
                    elif os.path.isdir(src):
                        file_list = []
                        for format in copy_formats:
                            file_list += glob.glob(os.path.join(src, format))
                        for item in file_list:
                            shutil.copyfile(
                                item,
                                os.path.join(dest_dir, os.path.basename(item)))

                    if git.cmd('add *')[0] != 0:
                        raise Exception("git add command failed")

                    if git.cmd('commit -s -m "' + commit_msg + '"')[0]:
                        raise Exception("git commit failed")

                    repo_dir = uploaddir

                git = GitShell(wd=repo_dir,
                               init=True,
                               remote_list=[(remote[0], remote[1])],
                               fetch_all=True)

                rbranch = remote[2]

                if git.push('HEAD',
                            remote[0],
                            rbranch,
                            force=force_update,
                            use_refs=use_refs)[0] != 0:
                    raise Exception("git push to %s %s failed" %
                                    (remote[0], rbranch))

                upload_tags(remote[0], tag_list)

        except Exception as e:
            self.logger.error(e)
            shutil.rmtree(temp_dir)
            return None
        else:
            shutil.rmtree(temp_dir)
            return repo_dir

    def generate_quilt(self,
                       local_branch=None,
                       base=None,
                       head=None,
                       patch_dir='quilt',
                       sed_file=None,
                       audit_script=None,
                       series_comment=''):
        """
        Generate the quilt series for the given kernel source.
        :param local_branch: Name of the kernel branch.
        :param base: First SHA ID.
        :param head: Head SHA ID.
        :param patch_dir: Output directory for storing the quilt series. If it exists, it will be removed.
        :param sed_file: Sed format list.
        :param audit_script: Audid script. It will be called with patch_dir as input. If it return non zero value
                             then this function will exit and return None.
        :param series_comment: Comments to add on top of series file.
        :return: Return patch_dir or None
        """

        set_val = lambda x, y: y if x is None else x

        self.logger.info(format_h1("Generating quilt series", tab=2))

        if not self.valid_git:
            self.logger.error("Invalid git repo %s", self.src)
            return None

        if sed_file is not None and not os.path.exists(sed_file):
            self.logger.error("sed pattern file %s does not exist", sed_file)
            return None

        if os.path.exists(os.path.abspath(patch_dir)):
            shutil.rmtree(patch_dir, ignore_errors=True)

        os.makedirs(patch_dir)

        local_branch = set_val(local_branch, self.git.current_branch())

        if self.git.cmd('checkout', local_branch)[0] != 0:
            self.logger.error("git checkout command failed in %s", self.src)
            return None

        try:

            series_file = os.path.join(patch_dir, 'series')

            # if base SHA is not given use TAIL as base SHA
            if base is None:
                base = self.git.base_sha()
                if base is None:
                    raise Exception("git log command failed")

            # if head SHA is not given use HEAD as head SHA
            if head is None:
                head = self.git.head_sha()
                if head is None:
                    raise Exception("git fetch head SHA failed")

            # Create the list of patches 'git format-patch -C -M base..head -o patch_dir'
            ret, out, err = self.git.cmd('format-patch', '-C', '-M',
                                         base.strip() + '..' + head.strip(),
                                         '-o', patch_dir)
            if ret != 0:
                raise Exception(
                    "git format patch command failed out: %s error: %s" %
                    (out, err))

            # Format the patches using sed
            if sed_file is not None:
                ret, out, err = self.sh.cmd('sed -i -f%s %s/*.patch' %
                                            (sed_file, patch_dir),
                                            shell=True)
                if ret != 0:
                    raise Exception("sed command failed out: %s error: %s" %
                                    (out, err))

            # Make sure the patches passes audit check.
            if audit_script is not None:
                ret, out, err = self.sh.cmd(audit_script,
                                            patch_dir,
                                            shell=True)
                if ret != 0:
                    raise Exception("Audid check failed out: %s error: %s" %
                                    (out, err))

            # Write series file comments.
            with open(series_file, 'w+') as fobj:
                fobj.write(series_comment)

            # Write the list of series file.
            ret, out, err = self.sh.cmd('ls -1 *.patch >> series',
                                        wd=patch_dir,
                                        shell=True)
            if ret != 0:
                raise Exception(
                    "Writing to patch series file failed. Out:%s Error: %s" %
                    (out, err))

        except Exception as e:
            if os.path.exists(patch_dir):
                shutil.rmtree(patch_dir)
            self.logger.error(e, exc_info=True)
            return None
        else:
            return patch_dir

    def generate_git_bundle(self,
                            outfile,
                            mode='branch',
                            local_branch=None,
                            head=None,
                            base=None,
                            commit_count=0):
        """
        Create git bundle for the given kernel source. Git bundle can created in three different modes.
            1. branch - Given git branch will be bundled.
            2. commit_count - Given number of commits will be bundled.
            3. diff - Range of commits will be bundled.
        :param outfile: Name of the git bundle.
        :param mode: branch, commit_count, and diff mode.
        :param local_branch: Name of the git branch.
        :param head: Head SHA ID or Tag
        :param base: First SHA ID or Tag.
        :param commit_count: Number of commits.
        :return: Filename on success, None otherwise.
        """

        set_val = lambda x, y: y if x is None else x

        # Check the validity of bundle mode.
        if mode not in self.bundle_modes:
            self.logger.error("Invalid bundle mode %s", mode)
            return None

        # Check the validity of outfile.
        if outfile is None or outfile == "":
            self.logger.error("Invalid bundle name %s", outfile)
            return None

        # Check whether kernel source is a valid git repo.
        if not self.valid_git:
            self.logger.error("Invalid git repo %s", self.src)
            return None

        # If local branch is none, then current branch will be used.
        local_branch = set_val(local_branch, self.git.current_branch())

        # If the bundle file is already present, delete it.
        outfile = os.path.abspath(outfile)

        self.logger.info(format_h1("Generating git bundle", tab=2))

        try:
            if self.git.cmd('checkout', local_branch)[0] != 0:
                raise Exception("Git checkout command failed in %s" % self.src)

            if mode == 'branch':
                if self.git.cmd('bundle', 'create', outfile,
                                local_branch)[0] != 0:
                    raise Exception("Git bundle create command failed")

            if mode == 'commit_count':
                if self.git.cmd('bundle', 'create', outfile,
                                '-' + str(commit_count), local_branch)[0] != 0:
                    raise Exception("Git bundle create command failed")

            if mode == 'diff' and head is not None and base is not None:
                if self.git.cmd('bundle', 'create', outfile,
                                str(base) + '..' + str(head))[0] != 0:
                    raise Exception("Git bundle create command failed")
        except Exception as e:
            self.logger.error(e, exc_info=True)
            return None
        else:
            return outfile

    def generate_tar_gz(self, outfile, branch=None, skip_files=['.git']):
        """
        Create kernel tar file.
        :param outfile: Name of the tar file.
        :param branch: Git branch.
        :param skip_files: List of files needs to be skipped.
        :return: Filename on success, None otherwise.
        """
        self.logger.info(format_h1("Generating tar gz", tab=2))

        # Check if outfile is valid.
        if outfile is None or outfile == "":
            self.logger.error("Invalid output file %s name\n", outfile)
            return None

        # If branch option is used, then kernel soruce should be a valid git repo.
        if branch is not None and self.valid_git:
            if self.git.cmd('checkout', branch)[0] != 0:
                self.logger.error("Git checkout branch %s failed in %s",
                                  branch, self.src)
                return None

        tar_cmd = "tar"
        if len(skip_files) > 0:
            tar_cmd += ' --exclude={%s}' % ','.join(skip_files)
        tar_cmd += ' -Jcf'

        try:
            ret = self.sh.cmd(
                "%s %s %s" %
                (tar_cmd, os.path.abspath(outfile), os.path.abspath(self.src)),
                shell=True)
            if ret[0] != 0:
                raise Exception("Create tar command failed")
        except Exception as e:
            self.logger.error(e, exc_info=True)
            return None
        else:
            return outfile
def main(path):

    
    data = JSONParser(path)

    try:
        #check account duplicates
        if session.query(Account).filter_by(name=data.account_name).first():
            acc = session.query(Account).filter_by(name=data.account_name).first()
        else:
            acc = Account(name=data.account_name)
            session.add(acc)
            
             
        latest_records =  [i.shortcode for i in session.query(Account).filter_by(name=data.account_name).first().posts]     
        filtered_indices = [data.shortcodes.index(s) for s in data.shortcodes if not s in latest_records]


        print(f"{len(filtered_indices)} new posts found in {path}, {len(data.shortcodes)-len(filtered_indices)} duplicates excluded")

        time.sleep(2)


        for n,idx in enumerate(filtered_indices):
            shortcode,caption, comnum, likes, tag_num, video, time_at = data.get_post(idx)['post'].values()
            users_list = [i['owner'] for i in  data.get_post(idx)['comments']]        
            comments_list = [i['text'] for i in  data.get_post(idx)['comments']]
            timestamps_list = [i['time_created'] for i in data.get_post(idx)['comments']]
            
            #check post duplicates
            if session.query(Post).filter_by(shortcode=shortcode).first():
                post = session.query(Post).filter_by(shortcode=shortcode).first()
            else:
                post = Post(caption=caption, tag_num = tag_num, comment_num=comnum, like=likes, video=video, posted_time=time_at, shortcode=shortcode, account_id=acc.id)          
                session.add(post)

            post.account = acc    
    
            talks =  zip(users_list, comments_list, timestamps_list)
            
            for person, typing, at in talks:                
                comment = Comment(text=typing, commented_time=at)

                if session.query(User).filter_by(name=person).first():
                    user = session.query(User).filter_by(name=person).first()
                else:
                    user = User(name=person)
                    session.add(user)

                session.add(comment)
                comment.post  = post
                comment.account = acc
                comment.user = user
            print(len(filtered_indices)-n, "posts remains")
            
        session.commit()
        
    except KeyboardInterrupt:
        answer = input('commit session?[y/n] : ')
        if answer.lower() == 'y':
            session.commit()
        else:
            print('data are not recorded')
            session.rollback()