def semantic_move_to_report(request): sids = get_request_paramter(request, 'ids') if sids is None: return HttpResponse('MOVE ERROR: no semantic id specified') ids = sids.split(',') coccis = [] for i in ids: cocci = CocciPatchEngine.objects.filter(id = i) if len(cocci) == 0: logevent("MOVE: coccinelle semantic [%s], ERROR: id %s does not exists" % (sids, i)) return HttpResponse('MOVE ERROR: id %s does not exists' % i) coccis.append(cocci[0]) for cocci in coccis: rtypes = Type.objects.filter(id = cocci.id + 3000) if len(rtypes) != 0: rtype = rtypes[0] patchs = Patch.objects.filter(type = rtype) efiles = ExceptFile.objects.filter(type = rtype) ncocci = CocciReportEngine(file = cocci.file, options = cocci.options, content = cocci.content) ncocci.save() rewrite_report_engine(ncocci) rtype.id = ncocci.id + 10000 rtype.save() # move patchs owner by this type for patch in patchs: report = Report(tag = patch.tag, type = rtype, status = patch.status, file = patch.file, date = patch.date, mergered = 0, mglist = '', commit = patch.commit, reportlog = patch.diff, diff = patch.diff, title = patch.title, desc = patch.desc, emails = patch.emails, content = patch.content, build = patch.build, buildlog = patch.buildlog) report.save() tag = patch.tag patch.delete() tag.total -= 1 tag.rptotal += 1 tag.save() # delete except files owner by this type for efile in efiles: efile.type = rtype efile.save() if os.path.exists(cocci.fullpath()): os.unlink(cocci.fullpath()) cocci.delete() logevent("MOVE: coccinelle semantic [%s], SUCCEED" % sids, True) return HttpResponse('MOVE SUCCEED: engine ids [%s]' % sids)
def report_new(request): if request.method == "POST": tagid = get_request_paramter(request, 'tag') typeid = get_request_paramter(request, 'type') rfile = get_request_paramter(request, 'file') rtags = GitTag.objects.filter(id = tagid) if len(rtags) == 0: logevent("NEW: report , ERROR: tag id %s does not exists" % tagid) return HttpResponse('NEW: report, ERROR: tag id %s does not exists' % tagid) rtypes = Type.objects.filter(id = typeid) if len(rtypes) == 0: logevent("NEW: report , ERROR: type id %s does not exists" % typeid) return HttpResponse('NEW: report, ERROR: type id %s does not exists' % typeid) report = Report(tag = rtags[0], type = rtypes[0], file = rfile, status = STATUS_NEW, diff = '') if not os.path.exists(report.sourcefile()): logevent("NEW: report , ERROR: type id %s does not exists" % typeid) return HttpResponse('NEW: report, ERROR: type id %s does not exists' % typeid) report.title = rtypes[0].ptitle report.desc = rtypes[0].pdesc report.save() for dot in report_engine_list(): test = dot(rtags[0].repo.dirname()) for i in range(test.tokens()): if test.get_type() != rtypes[0].id: test.next_token() continue test.set_filename(rfile) if test.should_report(): text = test.get_report() report.reportlog = '\n'.join(text) report.save() break rtags[0].rptotal += 1 rtags[0].save() logevent("NEW: report for %s, SUCCEED: new id %s" % (rfile, report.id), True) return HttpResponse('NEW: report for file, SUCCEED') else: repoid = int(get_request_paramter(request, 'repo', '1')) tagname = get_request_paramter(request, 'tag') context = RequestContext(request) context['form'] = ReportNewForm(repoid, tagname) return render_to_response("report/reportnew.html", context)
def report_merge(request): pids = get_request_paramter(request, 'ids') if pids is None: return HttpResponse('MERGE ERROR: no report id specified') ids = pids.split(',') if len(ids) < 2: return HttpResponse('MERGE ERROR: at least two report ids need') reports = [] rtype = None tag = None rdir = None fstats = [] fstatlen = 0 stats = [0, 0, 0] diffs = '' logs = '' for i in ids: report = Report.objects.get(id = i) if not report: logevent("MERGE: report [%s], ERROR: report %s does not exists" % (pids, i), False) return HttpResponse('MERGE ERROR: report %s does not exists' % i) if report.mergered != 0: logevent("MERGE: report [%s], ERROR: report %s already merged" % (pids, i), False) return HttpResponse('MERGE ERROR: report %s already merged' % i) if rtype is None: rtype = report.type elif rtype != report.type: logevent("MERGE: report [%s], ERROR: report %s type different" % (pids, i)) return HttpResponse('MERGE ERROR: report %s type different' % i) if tag is None: tag = report.tag elif tag != report.tag: logevent("MERGE: report [%s], ERROR: report %s tag different" % (pids, i)) return HttpResponse('MERGE ERROR:, report %s tag different' % i) if rdir is None: rdir = os.path.dirname(report.file) elif rdir != os.path.dirname(report.file): logevent("MERGE: report [%s], ERROR: report %s dirname different" % (pids, i)) return HttpResponse('MERGE ERROR: report %s dirname different' % i) if report.diff is None or len(report.diff) == 0: logevent("MERGE: report [%s], ERROR: report %s has no patch" % (pids, i)) return HttpResponse('MERGE ERROR: report %s has no patch' % i) reports.append(report) logs += '\n' + report.reportlog lines = report.diff.split('\n') for i in range(len(lines)): if re.search(r" \S+\s+\|\s+\d+\s+[+-]+", lines[i]) != None: fstats.append(lines[i]) if fstatlen < lines[i].find('|'): fstatlen = lines[i].find('|') elif re.search(r"\d+ file[s]* changed", lines[i]) != None: astat = lines[i].split(',') for stat in astat: if re.search(r"\d+ file[s]* changed", stat) != None: num = stat.strip().split(' ')[0] stats[0] += int(num) elif stat.find('insertion') != -1: num = stat.strip().split(' ')[0] stats[1] += int(num) elif stat.find('deletion') != -1: num = stat.strip().split(' ')[0] stats[2] += int(num) else: diffs += '\n'.join(lines[i:]) break for i in range(len(fstats)): append = fstatlen - fstats[i].find('|') fstats[i] = fstats[i].replace('|', ' ' * append + '|') statline = " %d files changed" % stats[0] if stats[1] == 1: statline += ", %d insertion(+)" % stats[1] elif stats[1] != 0: statline += ", %d insertions(+)" % stats[1] if stats[2] == 1: statline += ", %d deletion(-)" % stats[2] elif stats[2] != 0: statline += ", %d deletions(-)" % stats[2] diffs = "%s\n%s\n%s" % ('\n'.join(fstats), statline, diffs) report = Report(tag = tag, file = rdir + '/', diff = diffs, reportlog = logs, type = rtype, status = STATUS_PATCHED, mglist = ','.join(ids)) report.save() user = report.username() email = report.email() formater = PatchFormater(tag.repo.dirname(), rdir, user, email, rtype.ptitle, rtype.pdesc, diffs) report.content = formater.format_patch() report.title = formater.format_title() report.desc = rtype.pdesc report.emails = formater.get_mail_list() report.save() for p in reports: p.mergered = report.id p.save() tag.total -= len(reports) - 1 tag.save() logevent("MERGE: report [%s], SUCCEED: new report id %s" % (pids, report.id), True) return HttpResponse('MERGE SUCCEED: new report id %s' % report.id)
def checkreport(repo, rtag, flists): git = GitTree(repo.name, repo.dirname(), repo.url, repo.commit, repo.stable) count = 0 scaninfo = [] logs = ScanLog(reponame = repo.name, tagname = rtag.name, starttime = strftime("%Y-%m-%d %H:%M:%S", localtime()), desc = 'Processing, please wait...') logs.save() logger = MyLogger() logger.info('%d Files changed.' % len(flists)) for dot in report_engine_list(): scount = 0 test = dot(repo.dirname(), logger.logger, repo.builddir()) for i in range(test.tokens()): try: rtype = None try: rtype = Type.objects.filter(id = test.get_type())[0] except: test.next_token() continue if rtype.status == False: test.next_token() continue cmts = GitCommit.objects.filter(repo = repo, type = rtype) if len(cmts) == 0: cmt = GitCommit(repo = repo, type = rtype) cmt.save() else: cmt = cmts[0] rflists = flists if repo.delta == False: oldcommit = cmt.commit if oldcommit != repo.commit: if git.is_linux_next(): oldcommit = git.get_stable() rflists = git.get_changelist(oldcommit, repo.commit, None, True) else: rflists = flists logger.info('Starting scan type %d, total %d files' % (test.get_type(), len(rflists))) exceptfiles = [] for fn in ExceptFile.objects.filter(type = rtype): exceptfiles.append(fn.file) rcount = 0 for fname in rflists: if is_source_file(fname) == False: continue if exceptfiles.count(fname) != 0: continue reports = Report.objects.filter(file = fname, type = rtype) if not os.path.exists(os.path.join(repo.dirname(), fname)): for r in reports: if r.status in [STATUS_NEW, STATUS_PATCHED]: r.status = STATUS_REMOVED r.save() continue test.set_filename(fname) should_report = test.should_report() if test.has_error(): continue if should_report is False: for r in reports: if r.status in [STATUS_NEW, STATUS_PATCHED]: if r.mergered == 0: r.status = STATUS_FIXED r.save() else: mreport = Report.objects.filter(id = r.mergered) if len(mreport) != 0: if mreport[0].status in [STATUS_SENT]: mreport[0].status = STATUS_ACCEPTED r.status = STATUS_ACCEPTED else: mreport[0].status = STATUS_FIXED r.status = STATUS_FIXED mreport[0].save() else: r.status = STATUS_FIXED r.save() elif r.status in [STATUS_SENT]: r.status = STATUS_ACCEPTED r.save() continue lcount = 0 for r in reports: if r.status in [STATUS_NEW, STATUS_PATCHED, STATUS_SENT]: lcount += 1 if lcount > 0: continue text = test.get_report() report = Report(tag = rtag, file = fname, type = rtype, status = STATUS_NEW, reportlog = '\n'.join(text)) report.title = rtype.ptitle report.desc = rtype.pdesc report.save() rcount += 1 scount += 1 cmt.commit = repo.commit cmt.save() rtype.commit = repo.commit rtype.save() except: logger.info('Scan ERROR: type %d' % rtype.id) logger.info('End scan type %d, report %d' % (rtype.id, rcount)) logs.logs = logger.getlog() logs.save() test.next_token() count += scount scaninfo.append("%s: %d" % (test.name(), scount)) scaninfo.append("total report: %d" % (count)) logs.desc = ', '.join(scaninfo) logs.endtime = strftime("%Y-%m-%d %H:%M:%S", localtime()) logs.logs = logger.getlog() logs.save() return count