def saveRequest(token, request_raw): request_info = parse_request_service(request_raw) rid = getRid(request_raw) request_info['rid'] = rid request_info['token'] = token request_info['update_time'] = time.strftime("%Y%m%d-%H%M%S", time.localtime(time.time())) return request_info
def scan(): token = request.query.token if token: postdata = request.body.read() flag1 = request_filter(parse_request_service(postdata), '', black_rules) #url 去重 flag2 = is_duplicate( 'results', getRid(postdata)) # results 表去重,如果以及扫描过了,就不再进行扫描(不区分token去重)) flag3 = is_duplicate('requests', getRid(postdata)) # requests 表去重 if flag1 == True: if flag2 == flag3 == False: data = {'token': token, 'body': postdata} sendToMQ(data) return 'send to burp scan' else: return 'fail:repeat scan' else: return 'fail: %s' % flag1
def getStatus(): while True: # 不需要退出 url = getStatus_api try: res = requests.get(url,timeout=15).text #请求一定要捕获超时异常,否则可能会导致该协程停止运行,如果数据量很多的话,很容易卡住这一块,因此需要超时更长 if res: resdata = json.loads(res) print 'receive data:', len(resdata) if len(resdata) == 0: print 'no task runing,sleep 10s' time.sleep(10) if debug: print resdata for line in resdata: if line['status'] == 'finished': issues = line.pop('issues') try: mydb.update('requests', {'scan_burp': 1}, {'token': line['token'], 'rid': line['rid']}) if not is_duplicate('results',line['rid']): #如果已经保存了记录,就不重复插入results和issues表。 mydb.insert('results', line) for issue in issues: issue['token'] = line['token'] issue['rid'] = line['rid'] httpService = parse_request_service(issue['issueRequest']) url = httpService['host'] + ':' + str(httpService['port']) + httpService['path'] if httpService['method'].lower() == 'get': issue['issueUrl'] = httpService[ 'method'] + ' <a href="http://{0}" target="_blank">{0}</a>'.format(url) if httpService['method'].lower() == 'post': issue['issueUrl'] = httpService[ 'method'] + ' <a href="http://{0}" target="_blank">{0}</a>'.format( url) + '\r\nbody: ' + httpService['body'] mydb.insert('issues', issue) print 'save success' except Exception, e: print 'error1',e,'res:',res else: print line time.sleep(3) #每3s获取一次结果 except Exception,e: print 'error2',e
if black_rule == host: # 同上 if debug: print 'blackrule filter' return False if _white_rules: for white_rule in _white_rules: if white_rule.startswith('*'): pattern = '.*' + front_netloc(white_rule) if re.match(pattern, host): # 如果再白名单中,返回True return True else: if white_rule == host: return True if debug: print 'white rule filter' return False if debug: print 'bypass' return True postdata = open('../data.txt').read() white_rules = [] black_rules = [] print request_filter(parse_request_service(postdata), '', black_rules)