def patch_new(request): if request.method == "POST": tagid = get_request_paramter(request, 'tag') typeid = get_request_paramter(request, 'type') rfile = get_request_paramter(request, 'file') rtags = GitTag.objects.filter(id = tagid) if len(rtags) == 0: logevent("NEW: patch , ERROR: tag id %s does not exists" % tagid) return HttpResponse('NEW: patch, ERROR: tag id %s does not exists' % tagid) rtypes = Type.objects.filter(id = typeid) if len(rtypes) == 0: logevent("NEW: patch , ERROR: type id %s does not exists" % typeid) return HttpResponse('NEW: patch, ERROR: type id %s does not exists' % typeid) rtype = rtypes[0] patch = Patch(tag = rtags[0], type = rtype, file = rfile, status = STATUS_NEW, diff = '') if not os.path.exists(patch.sourcefile()): logevent("NEW: patch , ERROR: type id %s does not exists" % typeid) return HttpResponse('NEW: patch, ERROR: type id %s does not exists' % typeid) patch.save() for dot in patch_engine_list(): test = dot(rtags[0].repo.dirname(), None, rtags[0].repo.builddir()) for i in range(test.tokens()): if test.get_type() != rtype.id: test.next_token() continue test.set_filename(rfile) if test.should_patch(): text = test.get_patch() patch.diff = text user = patch.username() email = patch.email() desc = test.get_patch_description() title = test.get_patch_title() if desc is None: desc = rtype.pdesc if title is None: title = rtype.ptitle formater = PatchFormater(rtags[0].repo.dirname(), rfile, user, email, title, desc, text) patch.content = formater.format_patch() patch.title = formater.format_title() patch.desc = formater.format_desc() patch.emails = formater.get_mail_list() patch.module = formater.get_module() patch.save() break rtags[0].total += 1 rtags[0].save() logevent("NEW: patch for %s, SUCCEED: new id %s" % (rfile, patch.id), True) return HttpResponse('NEW: patch for file, SUCCEED') else: repoid = int(get_request_paramter(request, 'repo', '1')) tagname = get_request_paramter(request, 'tag') context = RequestContext(request) context['form'] = PatchNewForm(repoid, tagname) return render_to_response("patch/patchnew.html", context)
def patchlistmerge(request): pids = get_request_paramter(request, 'ids') if pids is None: return HttpResponse('MERGE ERROR: no patch id specified') ids = pids.split(',') if len(ids) < 2: return HttpResponse('MERGE ERROR: at least two patch ids need') patchs = [] rtype = None tag = None rdir = None fstats = [] fstatlen = 0 stats = [0, 0, 0] diffs = '' for i in ids: patch = Patch.objects.get(id = i) if not patch: logevent("MERGE: patch [%s], ERROR: patch %s does not exists" % (pids, i), False) return HttpResponse('MERGE ERROR: patch %s does not exists' % i) if patch.mergered != 0: logevent("MERGE: patch [%s], ERROR: patch %s already merged" % (pids, i), False) return HttpResponse('MERGE ERROR: patch %s already merged' % i) if rtype is None: rtype = patch.type elif rtype != patch.type: logevent("MERGE: patch [%s], ERROR: patch %s type different" % (pids, i)) return HttpResponse('MERGE ERROR: patch %s type different' % i) if tag is None: tag = patch.tag elif tag != patch.tag: logevent("MERGE: patch [%s], ERROR: patch %s tag different" % (pids, i)) return HttpResponse('MERGE ERROR:, patch %s tag different' % i) if rdir is None: rdir = os.path.dirname(patch.file) elif rdir != os.path.dirname(patch.file): logevent("MERGE: patch [%s], ERROR: patch %s dirname different" % (pids, i)) return HttpResponse('MERGE ERROR: patch %s dirname different' % i) patchs.append(patch) lines = patch.diff.split('\n') for i in range(len(lines)): if re.search(r" \S+\s+\|\s+\d+\s+[+-]+", lines[i]) != None: fstats.append(lines[i]) if fstatlen < lines[i].find('|'): fstatlen = lines[i].find('|') elif re.search(r"\d+ file[s]* changed", lines[i]) != None: astat = lines[i].split(',') for stat in astat: if re.search(r"\d+ file[s]* changed", stat) != None: num = stat.strip().split(' ')[0] stats[0] += int(num) elif stat.find('insertion') != -1: num = stat.strip().split(' ')[0] stats[1] += int(num) elif stat.find('deletion') != -1: num = stat.strip().split(' ')[0] stats[2] += int(num) else: diffs += '\n'.join(lines[i:]) break for i in range(len(fstats)): append = fstatlen - fstats[i].find('|') fstats[i] = fstats[i].replace('|', ' ' * append + '|') statline = " %d files changed" % stats[0] if stats[1] == 1: statline += ", %d insertion(+)" % stats[1] elif stats[1] != 0: statline += ", %d insertions(+)" % stats[1] if stats[2] == 1: statline += ", %d deletion(-)" % stats[2] elif stats[2] != 0: statline += ", %d deletions(-)" % stats[2] diffs = "%s\n%s\n%s" % ('\n'.join(fstats), statline, diffs) patch = Patch(tag = tag, file = rdir + '/', diff = diffs, type = rtype, status = STATUS_NEW, mglist = ','.join(ids)) patch.save() user = patch.username() email = patch.email() formater = PatchFormater(tag.repo.dirname(), rdir, user, email, rtype.ptitle, rtype.pdesc, diffs) patch.content = formater.format_patch() patch.title = formater.format_title() patch.desc = rtype.pdesc patch.emails = formater.get_mail_list() patch.save() for p in patchs: p.mergered = patch.id p.save() tag.total -= len(patchs) - 1 tag.save() logevent("MERGE: patch [%s], SUCCEED: new patch id %s" % (pids, patch.id), True) return HttpResponse('MERGE SUCCEED: new patch id %s' % patch.id)
def patch_new(request): if request.method == "POST": tagid = get_request_paramter(request, 'tag') typeid = get_request_paramter(request, 'type') rfile = get_request_paramter(request, 'file') rtags = GitTag.objects.filter(id=tagid) if len(rtags) == 0: logevent("NEW: patch , ERROR: tag id %s does not exists" % tagid) return HttpResponse( 'NEW: patch, ERROR: tag id %s does not exists' % tagid) rtypes = Type.objects.filter(id=typeid) if len(rtypes) == 0: logevent("NEW: patch , ERROR: type id %s does not exists" % typeid) return HttpResponse( 'NEW: patch, ERROR: type id %s does not exists' % typeid) rtype = rtypes[0] patch = Patch(tag=rtags[0], type=rtype, file=rfile, status=STATUS_NEW, diff='') if not os.path.exists(patch.sourcefile()): logevent("NEW: patch , ERROR: type id %s does not exists" % typeid) return HttpResponse( 'NEW: patch, ERROR: type id %s does not exists' % typeid) patch.save() for dot in patch_engine_list(): test = dot(rtags[0].repo.dirname(), None, rtags[0].repo.builddir()) for i in range(test.tokens()): if test.get_type() != rtype.id: test.next_token() continue test.set_filename(rfile) if test.should_patch(): text = test.get_patch() patch.diff = text user = patch.username() email = patch.email() desc = test.get_patch_description() title = test.get_patch_title() if desc is None: desc = rtype.pdesc if title is None: title = rtype.ptitle formater = PatchFormater(rtags[0].repo.dirname(), rfile, user, email, title, desc, text) patch.content = formater.format_patch() patch.title = formater.format_title() patch.desc = formater.format_desc() patch.emails = formater.get_mail_list() patch.module = formater.get_module() patch.save() break rtags[0].total += 1 rtags[0].save() logevent("NEW: patch for %s, SUCCEED: new id %s" % (rfile, patch.id), True) return HttpResponse('NEW: patch for file, SUCCEED') else: repoid = int(get_request_paramter(request, 'repo', '1')) tagname = get_request_paramter(request, 'tag') context = RequestContext(request) context['form'] = PatchNewForm(repoid, tagname) return render_to_response("patch/patchnew.html", context)
def check_patch(repo, git, rtag, flists, commit): count = 0 scaninfo = [] logs = ScanLog(reponame = repo.name, tagname = rtag.name, starttime = strftime("%Y-%m-%d %H:%M:%S", localtime()), desc = 'Processing, please wait...') logs.save() logger = MyLogger() logger.logger.info('%d Files changed' % len(flists)) #logger.logger.info('=' * 40) #logger.logger.info('%s' % '\n'.join(flists)) #logger.logger.info('=' * 40) sche_weekend_enable = read_config('patch.schedule.weekend.enable', True) sche_weekend_limit = read_config('patch.schedule.weekend.limit', 600) sche_weekend_delta = read_config('patch.schedule.weekend.delta', 90) sche_obsolete_skip = read_config('patch.schedule.obsolete.skip', False) weekday = datetime.datetime.now().weekday() for dot in patch_engine_list(): scount = 0 test = dot(repo.dirname(), logger.logger, repo.builddir()) for i in range(test.tokens()): try: rtype = None try: rtype = Type.objects.filter(id = test.get_type())[0] except: test.next_token() continue if rtype.status == False: test.next_token() continue if (rtype.flags & TYPE_SCAN_NEXT_ONLY) != 0 and not git.is_linux_next(): test.next_token() continue if rtype.type == 0 and sche_weekend_enable is True and len(flists) > sche_weekend_limit and weekday < 5: # if we does not have a patch for this cleanup type in # sche_weekend_limit days, schedule scan only on weekend stime = datetime.datetime.now() - datetime.timedelta(days=sche_weekend_delta) if Patch.objects.filter(type = rtype, date__gte=stime).count() == 0: logger.info('Delay scan type %d to weekend' % test.get_type()) test.next_token() continue cmts = GitCommit.objects.filter(repo = repo, type = rtype) if len(cmts) == 0: cmt = GitCommit(repo = repo, type = rtype) cmt.save() else: cmt = cmts[0] if cmt.commit == commit: test.next_token() continue if repo.delta == False: oldcommit = cmt.commit if oldcommit != repo.commit: if git.is_linux_next(): oldcommit = git.get_stable() rflists = git.get_changelist(oldcommit, commit, None, True) else: rflists = flists else: rflists = flists if rtype.type == 0 and sche_weekend_enable is True and len(rflists) > sche_weekend_limit and weekday < 5: stime = datetime.datetime.now() - datetime.timedelta(days=sche_weekend_delta) if Patch.objects.filter(type = rtype, date__gte=stime).count() == 0: logger.info('Delay scan type %d to weekend' % test.get_type()) test.next_token() continue logger.info('Starting scan type %d, total %d files' % (test.get_type(), len(rflists))) exceptfiles = [] for fn in ExceptFile.objects.filter(type = rtype): exceptfiles.append(fn.file) pcount = 0 for sfile in rflists: if not is_source_file(sfile): continue if exceptfiles.count(sfile) != 0: logger.logger.info('skip except file %s, type %d' % (sfile, rtype.id)) continue # treat patch marked with Rejected as except file if Patch.objects.filter(file = sfile, type = rtype, status = STATUS_REJECTED).count() > 0: continue # treat patch marked with Applied and commit is '' as EXISTS patch if Patch.objects.filter(file = sfile, type = rtype, status = STATUS_ACCEPTED, commit = '').count() > 0: continue patchs = Patch.objects.filter(file = sfile, type = rtype) rpatchs = [] for p in patchs: if not p.status in [STATUS_NEW, STATUS_SENT, STATUS_MARKED]: continue rpatchs.append(p) test.set_filename(sfile) # source file maybe removed if not os.path.exists(test._get_file_path()): for p in rpatchs: p.status = STATUS_REMOVED p.save() continue # if the same file has a patch for this type, ignore it # because the last patch does not accepted should_patch = test.should_patch() if test.has_error(): continue if len(rpatchs) != 0 and should_patch == False: for p in rpatchs: if p.status == STATUS_SENT: p.status = STATUS_ACCEPTED elif p.mergered != 0: mpatch = Patch.objects.filter(id = p.mergered) if len(mpatch) != 0: if mpatch[0].status == STATUS_SENT: mpatch[0].status = STATUS_ACCEPTED p.status = STATUS_ACCEPTED else: mpatch[0].status = STATUS_FIXED p.status = STATUS_FIXED mpatch[0].save() else: p.status = STATUS_FIXED else: p.status = STATUS_FIXED p.save() if should_patch == True and len(rpatchs) == 0: text = test.get_patch() if (rtype.flags & TYPE_CHANGE_DATE_CHECK) == TYPE_CHANGE_DATE_CHECK: if git.is_change_obsoleted(sfile, text) is True: continue elif rtype.id > 3000 and rtype.type == 0 and sche_obsolete_skip is True: if git.is_change_obsoleted(sfile, text) is True: logger.logger.info('skip obsoleted file %s, type %d' % (sfile, rtype.id)) continue patch = Patch(tag = rtag, file = sfile, type = rtype, status = STATUS_NEW, diff = text) patch.save() # format patch and cache to patch user = patch.username() email = patch.email() desc = test.get_patch_description() title = test.get_patch_title() if desc is None: desc = rtype.pdesc if title is None: title = rtype.ptitle formater = PatchFormater(repo.dirname(), sfile, user, email, title, desc, text) patch.content = formater.format_patch() patch.title = formater.format_title() patch.desc = formater.format_desc() patch.emails = formater.get_mail_list() patch.module = formater.get_module() patch.save() scount += 1 pcount += 1 cmt.commit = commit cmt.save() except: logger.info('Scan ERROR: type %d' % test.get_type()) logger.info('End scan type %d, patch %d' % (test.get_type(), pcount)) logs.logs = logger.getlog() logs.save() test.next_token() count += scount scaninfo.append("%s: %d" % (test.name(), scount)) scaninfo.append("total: %d" % (count)) logs.desc = ', '.join(scaninfo) logs.endtime = strftime("%Y-%m-%d %H:%M:%S", localtime()) logs.logs = logger.getlog() logs.save() return count
def patchlistmerge(request): pids = get_request_paramter(request, 'ids') if pids is None: return HttpResponse('MERGE ERROR: no patch id specified') ids = pids.split(',') if len(ids) < 2: return HttpResponse('MERGE ERROR: at least two patch ids need') patchs = [] rtype = None tag = None rdir = None fstats = [] fstatlen = 0 stats = [0, 0, 0] diffs = '' for i in ids: patch = Patch.objects.get(id=i) if not patch: logevent( "MERGE: patch [%s], ERROR: patch %s does not exists" % (pids, i), False) return HttpResponse('MERGE ERROR: patch %s does not exists' % i) if patch.mergered != 0: logevent( "MERGE: patch [%s], ERROR: patch %s already merged" % (pids, i), False) return HttpResponse('MERGE ERROR: patch %s already merged' % i) if rtype is None: rtype = patch.type elif rtype != patch.type: logevent("MERGE: patch [%s], ERROR: patch %s type different" % (pids, i)) return HttpResponse('MERGE ERROR: patch %s type different' % i) if tag is None: tag = patch.tag elif tag != patch.tag: logevent("MERGE: patch [%s], ERROR: patch %s tag different" % (pids, i)) return HttpResponse('MERGE ERROR:, patch %s tag different' % i) if rdir is None: rdir = os.path.dirname(patch.file) elif rdir != os.path.dirname(patch.file): logevent("MERGE: patch [%s], ERROR: patch %s dirname different" % (pids, i)) return HttpResponse('MERGE ERROR: patch %s dirname different' % i) patchs.append(patch) lines = patch.diff.split('\n') for i in range(len(lines)): if re.search(r" \S+\s+\|\s+\d+\s+[+-]+", lines[i]) != None: fstats.append(lines[i]) if fstatlen < lines[i].find('|'): fstatlen = lines[i].find('|') elif re.search(r"\d+ file[s]* changed", lines[i]) != None: astat = lines[i].split(',') for stat in astat: if re.search(r"\d+ file[s]* changed", stat) != None: num = stat.strip().split(' ')[0] stats[0] += int(num) elif stat.find('insertion') != -1: num = stat.strip().split(' ')[0] stats[1] += int(num) elif stat.find('deletion') != -1: num = stat.strip().split(' ')[0] stats[2] += int(num) else: diffs += '\n'.join(lines[i:]) break for i in range(len(fstats)): append = fstatlen - fstats[i].find('|') fstats[i] = fstats[i].replace('|', ' ' * append + '|') statline = " %d files changed" % stats[0] if stats[1] == 1: statline += ", %d insertion(+)" % stats[1] elif stats[1] != 0: statline += ", %d insertions(+)" % stats[1] if stats[2] == 1: statline += ", %d deletion(-)" % stats[2] elif stats[2] != 0: statline += ", %d deletions(-)" % stats[2] diffs = "%s\n%s\n%s" % ('\n'.join(fstats), statline, diffs) patch = Patch(tag=tag, file=rdir + '/', diff=diffs, type=rtype, status=STATUS_NEW, mglist=','.join(ids)) patch.save() user = patch.username() email = patch.email() formater = PatchFormater(tag.repo.dirname(), rdir, user, email, rtype.ptitle, rtype.pdesc, diffs) patch.content = formater.format_patch() patch.title = formater.format_title() patch.desc = rtype.pdesc patch.emails = formater.get_mail_list() patch.save() for p in patchs: p.mergered = patch.id p.save() tag.total -= len(patchs) - 1 tag.save() logevent("MERGE: patch [%s], SUCCEED: new patch id %s" % (pids, patch.id), True) return HttpResponse('MERGE SUCCEED: new patch id %s' % patch.id)