def reportstatus(request): if read_config('ui.main.showversion', True) is True: tags = [] versions = {} rtag = {} for tag in GitTag.objects.filter(Q(rptotal__gt=0) | Q(running=True)).order_by("-id"): version = re.sub('-rc\d+$', '', tag.name) if not versions.has_key(tag.repo.name): versions[tag.repo.name] = version rtag[tag.repo.name] = {'version': version, 'rptotal': tag.rptotal, 'repoid': tag.repo.id, 'reponame': tag.repo.name, 'running': tag.running} tags.append(rtag[tag.repo.name]) else: if versions[tag.repo.name] != version: versions[tag.repo.name] = version rtag[tag.repo.name] = {'version': version, 'rptotal': tag.rptotal, 'repoid': tag.repo.id, 'reponame': tag.repo.name, 'running': tag.running} tags.append(rtag[tag.repo.name]) else: rtag[tag.repo.name]['rptotal'] += tag.rptotal context = RequestContext(request) limit = read_config('ui.main.showitems', len(tags)) if len(tags) > limit: context['tags'] = tags[0:limit] else: context['tags'] = tags return render_to_response("report/statusbyversion.html", context) else: tags = GitTag.objects.filter(Q(rptotal__gt=0) | Q(running=True)).order_by("-id") context = RequestContext(request) context['tags'] = tags return render_to_response("reportstatus.html", context)
def reportstatus(request): if read_config('ui.main.showversion', True) is True: tags = [] versions = {} rtag = {} for tag in GitTag.objects.filter(Q(rptotal__gt=0) | Q(running=True)).order_by("-id"): version = re.sub('-rc\d+$', '', tag.name) if not versions.has_key(tag.repo.name): versions[tag.repo.name] = version rtag[tag.repo.name] = { 'version': version, 'rptotal': tag.rptotal, 'repoid': tag.repo.id, 'reponame': tag.repo.name, 'running': tag.running } tags.append(rtag[tag.repo.name]) else: if versions[tag.repo.name] != version: versions[tag.repo.name] = version rtag[tag.repo.name] = { 'version': version, 'rptotal': tag.rptotal, 'repoid': tag.repo.id, 'reponame': tag.repo.name, 'running': tag.running } tags.append(rtag[tag.repo.name]) else: rtag[tag.repo.name]['rptotal'] += tag.rptotal context = RequestContext(request) limit = read_config('ui.main.showitems', len(tags)) if len(tags) > limit: context['tags'] = tags[0:limit] else: context['tags'] = tags return render_to_response("report/statusbyversion.html", context) else: tags = GitTag.objects.filter(Q(rptotal__gt=0) | Q(running=True)).order_by("-id") context = RequestContext(request) context['tags'] = tags return render_to_response("reportstatus.html", context)
def is_change_obsoleted(self, fname, diff): dates = [] days = read_config('patch.obsoleted.days', 30) for line in find_remove_lines(diff): dates = execute_shell("cd %s; git log -n 1 -S '%s' --pretty=format:%%ci%%n %s" % (self._dpath, line, fname)) if len(dates) == 0: continue dt = datetime.datetime.strptime(' '.join(dates[0].split(' ')[:-1]), "%Y-%m-%d %H:%M:%S") delta = datetime.datetime.now() - dt if delta.days < days: return False return True
def get_changelist(self, scommit, ecommit, update, delta = False): if scommit == ecommit and len(scommit) != 0: return [] dateusing = read_config('git.diff.using.datetime', False) daysdelta = read_config('git.diff.datetime.delta', 1) if self.is_linux_next(): if dateusing is True and delta is False: if not isinstance(update, datetime.datetime): stime = strftime("%Y-%m-%d %H:%M:%S", localtime(time() - 2 * 24 * 60 * 60)) else: stime = update - datetime.timedelta(days=daysdelta) lines = execute_shell('cd %s; git log --after="%s" --name-only --format="%%" | sort -u | grep "\w"' % (self._dpath, stime)) return lines else: scommit = self.get_stable() lines = execute_shell('cd %s; git diff --name-only %s...%s' % (self._dpath, scommit, ecommit)) return lines else: if len(scommit) == 0 or scommit is None: scommit = '1da177e4c3f41524e886b7f1b8a0c1fc7321cac2' lines = execute_shell('cd %s; git diff --name-only %s...%s' % (self._dpath, scommit, ecommit)) return lines
def is_change_obsoleted(self, fname, diff): dates = [] days = read_config('patch.obsoleted.days', 30) try: for line in find_remove_lines(diff): dates = execute_shell("cd %s; git log -n 1 -S '%s' --pretty=format:%%ci%%n %s" % (self._dpath, line, fname)) if len(dates) == 0: continue dt = datetime.datetime.strptime(' '.join(dates[0].split(' ')[:-1]), "%Y-%m-%d %H:%M:%S") delta = datetime.datetime.now() - dt if delta.days < days: return False return True except: return True
def _should_report(self): cocci = self._coccis[self._token] spfile = cocci.fullpath() if not os.path.exists(spfile): self.warning('sp_file %s does not exists' % spfile) return False timeout = read_config('cocci.timeout', 60) args = '/usr/bin/spatch %s -I %s -timeout %d -very_quiet -sp_file %s %s' % (cocci.options, os.path.join(self._repo, 'include'), timeout, spfile, self._get_file_path()) self._diff = self._execute_shell(args) if len(self._diff) > 1: if self._diff[0].find('diff ') == 0 or self._diff[0].find('--- ') == 0: return True else: self.warning('spatch output for %s' % self._fname) self.warning('\n'.join(self._diff)) return False
def _should_report(self): cocci = self._coccis[self._token] spfile = cocci.fullpath() if not os.path.exists(spfile): self.warning('sp_file %s does not exists' % spfile) return False timeout = read_config('cocci.timeout', 60) args = '/usr/bin/spatch %s -I %s -timeout %d -very_quiet -sp_file %s %s' % ( cocci.options, os.path.join( self._repo, 'include'), timeout, spfile, self._get_file_path()) self._diff = self._execute_shell(args) if len(self._diff) > 1: if self._diff[0].find('diff ') == 0 or self._diff[0].find( '--- ') == 0: return True else: self.warning('spatch output for %s' % self._fname) self.warning('\n'.join(self._diff)) return False
def _guest_email_list(self): mailto = [] mailcc = [] nolkml = True skiplkml = False commit_signer = '' commit_signer_list = [] _re_list = [{'cc': ['*****@*****.**', '*****@*****.**'], 'rmto': ['David S. Miller <*****@*****.**>', '"David S. Miller" <*****@*****.**>'], 'rmcc': ['*****@*****.**']}, {'cc': ['*****@*****.**'], 'rmto': [], 'rmcc': ['*****@*****.**']}] lists = execute_shell("cd %s ; /usr/bin/perl ./scripts/get_maintainer.pl -f %s --remove-duplicates --nogit" % (self._repo, self._fname)) for m in lists: # skip User <mail> (commit_signer:1/15=7%) if re.search('\(commit_signer:', m) != None: csm = re.sub('\([^>]*\)$', '', m) if len(commit_signer) == 0: commit_signer = csm commit_signer_list.append(csm) continue m = re.sub('\([^>]*\)$', '', m).strip() if re.search(r'<.*>', m) != None: mailto.append(m) elif re.search('*****@*****.**', m) != None: if len(mailcc) == 0: mailcc.append(m) else: skiplkml = True else: if re.search('@vger.kernel.org', m) != None: nolkml = False if len(m.strip()) != 0: mailcc.append(m) if nolkml == True and skiplkml == True: mailcc.append('*****@*****.**') for rml in _re_list: for cc in rml['cc']: if mailcc.count(cc) != 0: for rto in rml['rmto']: if mailto.count(rto) != 0: mailto.remove(rto) for rcc in rml['rmcc']: if mailcc.count(rcc) != 0: mailcc.remove(rcc) break if mailcc.count('*****@*****.**') != 0 or mailcc.count('*****@*****.**') != 0: if mailcc.count('*****@*****.**') != 0: mailcc.remove('*****@*****.**') if mailto.count('David S. Miller <*****@*****.**>') != 0: mailto.remove('David S. Miller <*****@*****.**>') if mailto.count('"David S. Miller" <*****@*****.**>') != 0: mailto.remove('"David S. Miller" <*****@*****.**>') if len(mailto) == 0 and mailcc.count('*****@*****.**') != 0: mailto.append('David S. Miller <*****@*****.**>') if read_config('git.use_commit_singer', True): for m in commit_signer_list: mailto.append(m) else: if len(mailto) == 0 and len(commit_signer) != 0: mailto.append(commit_signer) elist = "" if len(mailto) != 0: elist += "To: %s" % mailto[0].strip() to = mailto[1:] for t in to: elist += ",\n %s" % t.strip() if len(mailcc) != 0: prefix = 'Cc' # to list may be null if len(mailto) == 0: prefix = 'To' elist += "\n%s: %s" % (prefix, mailcc[0].strip()) cc = mailcc[1:] for c in cc: elist += ",\n %s" % c.strip() elist += '\n' self._mlist = elist return elist
def check_patch(repo, git, rtag, flists, commit): count = 0 scaninfo = [] logs = ScanLog(reponame = repo.name, tagname = rtag.name, starttime = strftime("%Y-%m-%d %H:%M:%S", localtime()), desc = 'Processing, please wait...') logs.save() logger = MyLogger() logger.logger.info('%d Files changed' % len(flists)) #logger.logger.info('=' * 40) #logger.logger.info('%s' % '\n'.join(flists)) #logger.logger.info('=' * 40) sche_weekend_enable = read_config('patch.schedule.weekend.enable', True) sche_weekend_limit = read_config('patch.schedule.weekend.limit', 600) sche_weekend_delta = read_config('patch.schedule.weekend.delta', 90) sche_obsolete_skip = read_config('patch.schedule.obsolete.skip', False) weekday = datetime.datetime.now().weekday() for dot in patch_engine_list(): scount = 0 test = dot(repo.dirname(), logger.logger, repo.builddir()) for i in range(test.tokens()): try: rtype = None try: rtype = Type.objects.filter(id = test.get_type())[0] except: test.next_token() continue if rtype.status == False: test.next_token() continue if (rtype.flags & TYPE_SCAN_NEXT_ONLY) != 0 and not git.is_linux_next(): test.next_token() continue if rtype.type == 0 and sche_weekend_enable is True and len(flists) > sche_weekend_limit and weekday < 5: # if we does not have a patch for this cleanup type in # sche_weekend_limit days, schedule scan only on weekend stime = datetime.datetime.now() - datetime.timedelta(days=sche_weekend_delta) if Patch.objects.filter(type = rtype, date__gte=stime).count() == 0: logger.info('Delay scan type %d to weekend' % test.get_type()) test.next_token() continue cmts = GitCommit.objects.filter(repo = repo, type = rtype) if len(cmts) == 0: cmt = GitCommit(repo = repo, type = rtype) cmt.save() else: cmt = cmts[0] if cmt.commit == commit: test.next_token() continue if repo.delta == False: oldcommit = cmt.commit if oldcommit != repo.commit: if git.is_linux_next(): oldcommit = git.get_stable() rflists = git.get_changelist(oldcommit, commit, None, True) else: rflists = flists else: rflists = flists if rtype.type == 0 and sche_weekend_enable is True and len(rflists) > sche_weekend_limit and weekday < 5: stime = datetime.datetime.now() - datetime.timedelta(days=sche_weekend_delta) if Patch.objects.filter(type = rtype, date__gte=stime).count() == 0: logger.info('Delay scan type %d to weekend' % test.get_type()) test.next_token() continue logger.info('Starting scan type %d, total %d files' % (test.get_type(), len(rflists))) exceptfiles = [] for fn in ExceptFile.objects.filter(type = rtype): exceptfiles.append(fn.file) pcount = 0 for sfile in rflists: if not is_source_file(sfile): continue if exceptfiles.count(sfile) != 0: logger.logger.info('skip except file %s, type %d' % (sfile, rtype.id)) continue # treat patch marked with Rejected as except file if Patch.objects.filter(file = sfile, type = rtype, status = STATUS_REJECTED).count() > 0: continue # treat patch marked with Applied and commit is '' as EXISTS patch if Patch.objects.filter(file = sfile, type = rtype, status = STATUS_ACCEPTED, commit = '').count() > 0: continue patchs = Patch.objects.filter(file = sfile, type = rtype) rpatchs = [] for p in patchs: if not p.status in [STATUS_NEW, STATUS_SENT, STATUS_MARKED]: continue rpatchs.append(p) test.set_filename(sfile) # source file maybe removed if not os.path.exists(test._get_file_path()): for p in rpatchs: p.status = STATUS_REMOVED p.save() continue # if the same file has a patch for this type, ignore it # because the last patch does not accepted should_patch = test.should_patch() if test.has_error(): continue if len(rpatchs) != 0 and should_patch == False: for p in rpatchs: if p.status == STATUS_SENT: p.status = STATUS_ACCEPTED elif p.mergered != 0: mpatch = Patch.objects.filter(id = p.mergered) if len(mpatch) != 0: if mpatch[0].status == STATUS_SENT: mpatch[0].status = STATUS_ACCEPTED p.status = STATUS_ACCEPTED else: mpatch[0].status = STATUS_FIXED p.status = STATUS_FIXED mpatch[0].save() else: p.status = STATUS_FIXED else: p.status = STATUS_FIXED p.save() if should_patch == True and len(rpatchs) == 0: text = test.get_patch() if (rtype.flags & TYPE_CHANGE_DATE_CHECK) == TYPE_CHANGE_DATE_CHECK: if git.is_change_obsoleted(sfile, text) is True: continue elif rtype.id > 3000 and rtype.type == 0 and sche_obsolete_skip is True: if git.is_change_obsoleted(sfile, text) is True: logger.logger.info('skip obsoleted file %s, type %d' % (sfile, rtype.id)) continue patch = Patch(tag = rtag, file = sfile, type = rtype, status = STATUS_NEW, diff = text) patch.save() # format patch and cache to patch user = patch.username() email = patch.email() desc = test.get_patch_description() title = test.get_patch_title() if desc is None: desc = rtype.pdesc if title is None: title = rtype.ptitle formater = PatchFormater(repo.dirname(), sfile, user, email, title, desc, text) patch.content = formater.format_patch() patch.title = formater.format_title() patch.desc = formater.format_desc() patch.emails = formater.get_mail_list() patch.module = formater.get_module() patch.save() scount += 1 pcount += 1 cmt.commit = commit cmt.save() except: logger.info('Scan ERROR: type %d' % test.get_type()) logger.info('End scan type %d, patch %d' % (test.get_type(), pcount)) logs.logs = logger.getlog() logs.save() test.next_token() count += scount scaninfo.append("%s: %d" % (test.name(), scount)) scaninfo.append("total: %d" % (count)) logs.desc = ', '.join(scaninfo) logs.endtime = strftime("%Y-%m-%d %H:%M:%S", localtime()) logs.logs = logger.getlog() logs.save() return count