def report_semantic_deltascan(request): pids = get_request_paramter(request, 'ids') if pids is None: return HttpResponse('DELTASCAN ERROR: no patch id specified') ids = pids.split(',') coccis = [] for i in ids: cocci = CocciReportEngine.objects.filter(id = i) if len(cocci) == 0: logevent("DELTASCAN: coccinelle semantic [%s], ERROR: id %s does not exists" % (pids, i)) return HttpResponse('DELTASCAN ERROR: id %s does not exists' % i) coccis.append(cocci[0]) for cocci in coccis: rtypes = Type.objects.filter(id = cocci.id + 10000) if len(rtypes) != 0: rtype = rtypes[0] commit = None for repo in GitRepo.objects.all(): if commit is None: commit = repo.commit for gcommit in GitCommit.objects.filter(repo = repo, type = rtype): gcommit.commit = repo.commit gcommit.save() if GitCommit.objects.filter(repo = repo, type = rtype).count() == 0: gcommit = GitCommit(repo = repo, type = rtype, commit = repo.commit) gcommit.save() if commit != None: rtype.commit = commit rtype.save() logevent("DELTASCAN: coccinelle semantic [%s], SUCCEED" % pids, True) return HttpResponse('DELTASCAN SUCCEED: report engine ids [%s]' % pids)
def semantic_deltascan(request): pids = get_request_paramter(request, "ids") if pids is None: return HttpResponse("DELTASCAN ERROR: no patch id specified") ids = pids.split(",") coccis = [] for i in ids: cocci = CocciPatchEngine.objects.filter(id=i) if len(cocci) == 0: logevent("DELTASCAN: coccinelle semantic [%s], ERROR: id %s does not exists" % (pids, i)) return HttpResponse("DELTASCAN ERROR: id %s does not exists" % i) coccis.append(cocci[0]) for cocci in coccis: rtypes = Type.objects.filter(id=cocci.id + 3000) if len(rtypes) != 0: rtype = rtypes[0] commit = None for repo in GitRepo.objects.all(): if commit is None: commit = repo.commit for gcommit in GitCommit.objects.filter(repo=repo, type=rtype): gcommit.commit = repo.commit gcommit.save() if GitCommit.objects.filter(repo=repo, type=rtype).count() == 0: gcommit = GitCommit(repo=repo, type=rtype, commit=repo.commit) gcommit.save() if commit != None: rtype.commit = commit rtype.save() logevent("DELTASCAN: coccinelle semantic [%s], SUCCEED" % pids, True) return HttpResponse("DELTASCAN SUCCEED: engine ids [%s]" % pids)
def checkreport(repo, rtag, flists): git = GitTree(repo.name, repo.dirname(), repo.url, repo.commit, repo.stable) count = 0 scaninfo = [] logs = ScanLog(reponame = repo.name, tagname = rtag.name, starttime = strftime("%Y-%m-%d %H:%M:%S", localtime()), desc = 'Processing, please wait...') logs.save() logger = MyLogger() logger.info('%d Files changed.' % len(flists)) for dot in report_engine_list(): scount = 0 test = dot(repo.dirname(), logger.logger, repo.builddir()) for i in range(test.tokens()): try: rtype = None try: rtype = Type.objects.filter(id = test.get_type())[0] except: test.next_token() continue if rtype.status == False: test.next_token() continue cmts = GitCommit.objects.filter(repo = repo, type = rtype) if len(cmts) == 0: cmt = GitCommit(repo = repo, type = rtype) cmt.save() else: cmt = cmts[0] rflists = flists if repo.delta == False: oldcommit = cmt.commit if oldcommit != repo.commit: if git.is_linux_next(): oldcommit = git.get_stable() rflists = git.get_changelist(oldcommit, repo.commit, None, True) else: rflists = flists logger.info('Starting scan type %d, total %d files' % (test.get_type(), len(rflists))) exceptfiles = [] for fn in ExceptFile.objects.filter(type = rtype): exceptfiles.append(fn.file) rcount = 0 for fname in rflists: if is_source_file(fname) == False: continue if exceptfiles.count(fname) != 0: continue reports = Report.objects.filter(file = fname, type = rtype) if not os.path.exists(os.path.join(repo.dirname(), fname)): for r in reports: if r.status in [STATUS_NEW, STATUS_PATCHED]: r.status = STATUS_REMOVED r.save() continue test.set_filename(fname) should_report = test.should_report() if test.has_error(): continue if should_report is False: for r in reports: if r.status in [STATUS_NEW, STATUS_PATCHED]: if r.mergered == 0: r.status = STATUS_FIXED r.save() else: mreport = Report.objects.filter(id = r.mergered) if len(mreport) != 0: if mreport[0].status in [STATUS_SENT]: mreport[0].status = STATUS_ACCEPTED r.status = STATUS_ACCEPTED else: mreport[0].status = STATUS_FIXED r.status = STATUS_FIXED mreport[0].save() else: r.status = STATUS_FIXED r.save() elif r.status in [STATUS_SENT]: r.status = STATUS_ACCEPTED r.save() continue lcount = 0 for r in reports: if r.status in [STATUS_NEW, STATUS_PATCHED, STATUS_SENT]: lcount += 1 if lcount > 0: continue text = test.get_report() report = Report(tag = rtag, file = fname, type = rtype, status = STATUS_NEW, reportlog = '\n'.join(text)) report.title = rtype.ptitle report.desc = rtype.pdesc report.save() rcount += 1 scount += 1 cmt.commit = repo.commit cmt.save() rtype.commit = repo.commit rtype.save() except: logger.info('Scan ERROR: type %d' % rtype.id) logger.info('End scan type %d, report %d' % (rtype.id, rcount)) logs.logs = logger.getlog() logs.save() test.next_token() count += scount scaninfo.append("%s: %d" % (test.name(), scount)) scaninfo.append("total report: %d" % (count)) logs.desc = ', '.join(scaninfo) logs.endtime = strftime("%Y-%m-%d %H:%M:%S", localtime()) logs.logs = logger.getlog() logs.save() return count
def check_patch(repo, git, rtag, flists, commit): count = 0 scaninfo = [] logs = ScanLog(reponame = repo.name, tagname = rtag.name, starttime = strftime("%Y-%m-%d %H:%M:%S", localtime()), desc = 'Processing, please wait...') logs.save() logger = MyLogger() logger.logger.info('%d Files changed' % len(flists)) #logger.logger.info('=' * 40) #logger.logger.info('%s' % '\n'.join(flists)) #logger.logger.info('=' * 40) sche_weekend_enable = read_config('patch.schedule.weekend.enable', True) sche_weekend_limit = read_config('patch.schedule.weekend.limit', 600) sche_weekend_delta = read_config('patch.schedule.weekend.delta', 90) sche_obsolete_skip = read_config('patch.schedule.obsolete.skip', False) weekday = datetime.datetime.now().weekday() for dot in patch_engine_list(): scount = 0 test = dot(repo.dirname(), logger.logger, repo.builddir()) for i in range(test.tokens()): try: rtype = None try: rtype = Type.objects.filter(id = test.get_type())[0] except: test.next_token() continue if rtype.status == False: test.next_token() continue if (rtype.flags & TYPE_SCAN_NEXT_ONLY) != 0 and not git.is_linux_next(): test.next_token() continue if rtype.type == 0 and sche_weekend_enable is True and len(flists) > sche_weekend_limit and weekday < 5: # if we does not have a patch for this cleanup type in # sche_weekend_limit days, schedule scan only on weekend stime = datetime.datetime.now() - datetime.timedelta(days=sche_weekend_delta) if Patch.objects.filter(type = rtype, date__gte=stime).count() == 0: logger.info('Delay scan type %d to weekend' % test.get_type()) test.next_token() continue cmts = GitCommit.objects.filter(repo = repo, type = rtype) if len(cmts) == 0: cmt = GitCommit(repo = repo, type = rtype) cmt.save() else: cmt = cmts[0] if cmt.commit == commit: test.next_token() continue if repo.delta == False: oldcommit = cmt.commit if oldcommit != repo.commit: if git.is_linux_next(): oldcommit = git.get_stable() rflists = git.get_changelist(oldcommit, commit, None, True) else: rflists = flists else: rflists = flists if rtype.type == 0 and sche_weekend_enable is True and len(rflists) > sche_weekend_limit and weekday < 5: stime = datetime.datetime.now() - datetime.timedelta(days=sche_weekend_delta) if Patch.objects.filter(type = rtype, date__gte=stime).count() == 0: logger.info('Delay scan type %d to weekend' % test.get_type()) test.next_token() continue logger.info('Starting scan type %d, total %d files' % (test.get_type(), len(rflists))) exceptfiles = [] for fn in ExceptFile.objects.filter(type = rtype): exceptfiles.append(fn.file) pcount = 0 for sfile in rflists: if not is_source_file(sfile): continue if exceptfiles.count(sfile) != 0: logger.logger.info('skip except file %s, type %d' % (sfile, rtype.id)) continue # treat patch marked with Rejected as except file if Patch.objects.filter(file = sfile, type = rtype, status = STATUS_REJECTED).count() > 0: continue # treat patch marked with Applied and commit is '' as EXISTS patch if Patch.objects.filter(file = sfile, type = rtype, status = STATUS_ACCEPTED, commit = '').count() > 0: continue patchs = Patch.objects.filter(file = sfile, type = rtype) rpatchs = [] for p in patchs: if not p.status in [STATUS_NEW, STATUS_SENT, STATUS_MARKED]: continue rpatchs.append(p) test.set_filename(sfile) # source file maybe removed if not os.path.exists(test._get_file_path()): for p in rpatchs: p.status = STATUS_REMOVED p.save() continue # if the same file has a patch for this type, ignore it # because the last patch does not accepted should_patch = test.should_patch() if test.has_error(): continue if len(rpatchs) != 0 and should_patch == False: for p in rpatchs: if p.status == STATUS_SENT: p.status = STATUS_ACCEPTED elif p.mergered != 0: mpatch = Patch.objects.filter(id = p.mergered) if len(mpatch) != 0: if mpatch[0].status == STATUS_SENT: mpatch[0].status = STATUS_ACCEPTED p.status = STATUS_ACCEPTED else: mpatch[0].status = STATUS_FIXED p.status = STATUS_FIXED mpatch[0].save() else: p.status = STATUS_FIXED else: p.status = STATUS_FIXED p.save() if should_patch == True and len(rpatchs) == 0: text = test.get_patch() if (rtype.flags & TYPE_CHANGE_DATE_CHECK) == TYPE_CHANGE_DATE_CHECK: if git.is_change_obsoleted(sfile, text) is True: continue elif rtype.id > 3000 and rtype.type == 0 and sche_obsolete_skip is True: if git.is_change_obsoleted(sfile, text) is True: logger.logger.info('skip obsoleted file %s, type %d' % (sfile, rtype.id)) continue patch = Patch(tag = rtag, file = sfile, type = rtype, status = STATUS_NEW, diff = text) patch.save() # format patch and cache to patch user = patch.username() email = patch.email() desc = test.get_patch_description() title = test.get_patch_title() if desc is None: desc = rtype.pdesc if title is None: title = rtype.ptitle formater = PatchFormater(repo.dirname(), sfile, user, email, title, desc, text) patch.content = formater.format_patch() patch.title = formater.format_title() patch.desc = formater.format_desc() patch.emails = formater.get_mail_list() patch.module = formater.get_module() patch.save() scount += 1 pcount += 1 cmt.commit = commit cmt.save() except: logger.info('Scan ERROR: type %d' % test.get_type()) logger.info('End scan type %d, patch %d' % (test.get_type(), pcount)) logs.logs = logger.getlog() logs.save() test.next_token() count += scount scaninfo.append("%s: %d" % (test.name(), scount)) scaninfo.append("total: %d" % (count)) logs.desc = ', '.join(scaninfo) logs.endtime = strftime("%Y-%m-%d %H:%M:%S", localtime()) logs.logs = logger.getlog() logs.save() return count