def user(): if 'name' in session or 'urls' in session: redispool.hset('assets', session['name'], session['urls']) session.pop('name') session.pop('urls') allcode = InvitationCode.query.order_by( InvitationCode.id.desc()).limit(10).all() user_id = session.get('user_id') nowuser = User.query.filter(User.id == user_id).first() username = nowuser.username photoname = redispool.hget('imagename', nowuser.email) if not photoname: photoname = 'springbird.jpg' profile = Profile.query.filter(Profile.userid == user_id).first() assetname = redispool.hkeys('assets') followlist = redispool.hgetall('FollowList') if request.method == 'GET': return render_template('user-center.html', allcode=allcode, username=username, profile=profile, assetname=assetname, followlist=followlist, photoname=photoname) else: session['name'] = request.form.get('asset') session['urls'] = request.form.get('assets') return redirect(url_for('user'))
def bugdetail(id=None): bugbit, bugtype = core.GetBit() if not id: buginfo = BugList.query.order_by(BugList.id.desc()).first() else: buginfo = BugList.query.filter(BugList.id == id).first() oldurlinfo = BaseInfo.query.filter(BaseInfo.url == buginfo.oldurl).first() if redispool.hexists('FollowList', buginfo.id): flag = False else: flag = True if request.method == 'GET': return render_template('bug-details.html', buginfo=buginfo, oldurlinfo=oldurlinfo, bugbit=bugbit, bugtype=bugtype, flag=flag) else: redispool.hset('FollowList', buginfo.id, buginfo.bugurl) return render_template('bug-details.html', buginfo=buginfo, oldurlinfo=oldurlinfo, bugbit=bugbit, bugtype=bugtype, flag=False)
def photo(): user_id = session.get('user_id') nowuser = User.query.filter(User.id == user_id).first() photoname = redispool.hget('imagename', nowuser.email) if request.method == 'GET': return render_template('photo.html', photoname=photoname) else: img = request.files['photo'] if img and core.allowed_file(img.filename): ext = img.filename.rsplit('.', 1)[1] email = nowuser.email photoname = email.split('@')[0] + "." + ext img.save(os.path.join(os.getcwd() + "/static/photo", photoname)) redispool.hset('imagename', email, photoname) return redirect(url_for('user')) return '<p> 上传失败</p>'
def POCmanage(): bugbit, bugtype = core.GetBit() poclist = POC.query.order_by(POC.id.desc()).all() if request.method == 'GET': return render_template('pocmanage.html', bugbit=bugbit, bugtype=bugtype, poclist=poclist) else: pocname = request.form.get('pocname') rule = request.form.get('rule') expression = request.form.get('expression') buggrade = request.form.get('buggrade') redispool.hset('bugtype', pocname, buggrade) poc = POC(name=pocname, rule=rule, expression=expression) redispool.pfadd("poc", pocname) db.session.add(poc) db.session.commit() poclist = POC.query.order_by(POC.id.desc()).all() return render_template('pocmanage.html', bugbit=bugbit, bugtype=bugtype, poclist=poclist)
def inputfilter(url): ''' 入口过滤函数 输入源的格式可多变: 127.0.0.1 http://127.0.0.1 www.baidu.com https://www.baidu.com 等 返回格式为 : return www.baidu.com,https://www.baidu.com,baidu.rep :param url: :return: ''' rep, rep1, rep2 = None, None, None if url.endswith("/"): url = url[:-1] if not url.startswith("http://") and not url.startswith("https://"): attackurl1 = "http://" + url attackurl2 = "https://" + url try: rep1 = requests.get(attackurl1, headers=core.GetHeaders(), timeout=4, verify=False) except Exception as e: pass try: rep2 = requests.get(attackurl2, headers=core.GetHeaders(), timeout=4, verify=False) except Exception as e: pass if rep1: return url, attackurl1, rep1 elif rep2: return url, attackurl2, rep2 else: print("None data") try: count = redispool.hget('targetscan', 'waitcount') if 'str' in str(type(count)): waitcount = int(count) - 1 redispool.hset("targetscan", "waitcount", str(waitcount)) else: redispool.hset("targetscan", "waitcount", "0") redispool.hdel("targetscan", "nowscan") except Exception as e: print(e) pass return None, None, None else: attackurl = url try: rep = requests.get(attackurl, headers=core.GetHeaders(), timeout=4, verify=False) except: pass if rep: if "http://" in url: return url.replace("http://", ""), attackurl, rep else: return url.replace("https://", ""), attackurl, rep else: print("{}访问超时".format(attackurl)) return None, None, None
def SZheScan(url): try: #输入入口进行过滤 url, attackurl, rep = inputfilter(url) #若过滤后无url,即url无效或响应时间过长,退出对该url的扫描 if not url: print("Not Allow This URL") return redispool.hset("targetscan", "nowscan", attackurl) with app.app_context(): # 对该url基础信息进行搜集,实例化GetBaseMessage对象 baseinfo = GetBaseMessage(url, attackurl, rep) #正则表达式判断其为IP或是域名,并且实例化相应的深度信息搜集对象 pattern = re.compile('^\d+\.\d+\.\d+\.\d+(:(\d+))?$') #判断IP是否存在端口 if pattern.findall(url) and ":" in url: infourl = url.split(":")[0] else: infourl = url if pattern.findall(url): boolcheck = True ipinfo = IPMessage(infourl) else: boolcheck = False domaininfo = DomainMessage(url) info = BaseInfo(url=url, boolcheck=boolcheck, status=baseinfo.GetStatus(), title=baseinfo.GetTitle(), date=baseinfo.GetDate(), responseheader=baseinfo.GetResponseHeader(), Server=baseinfo.GetFinger(), portserver=baseinfo.PortScan(), sendir=baseinfo.SenDir()) db.session.add(info) db.session.flush() infoid = info.id db.session.commit() baseinfo.WebLogicScan() baseinfo.AngelSwordMain() if boolcheck: redispool.pfadd("ip", infourl) ipinfo = IPInfo(baseinfoid=infoid, bindingdomain=ipinfo.GetBindingIP(), sitestation=ipinfo.GetSiteStation(), CMessage=ipinfo.CScanConsole(), ipaddr=ipinfo.FindIpAdd()) db.session.add(ipinfo) else: redispool.pfadd("domain", infourl) domaininfo = DomainInfo( baseinfoid=infoid, subdomain=domaininfo.GetSubDomain(), whois=domaininfo.GetWhoisMessage(), bindingip=domaininfo.GetBindingIP(), sitestation=domaininfo.GetSiteStation(), recordinfo=domaininfo.GetRecordInfo(), domainaddr=domaininfo.FindDomainAdd()) db.session.add(domaininfo) db.session.commit() #默认url深度爬取为 2 ,避免大站链接过多,可在设置中进行修改 redispool.append("runlog", "对{}页面进行深度爬取\n".format(attackurl)) SpiderGetUrl2(attackurl, deepth=2) redispool.append("runlog", "对该网站{}爬取到的url进行常规漏扫 :D\n".format(attackurl)) print("对该网站爬取到的url进行常规漏扫 :D") BugScanConsole(url) count = redispool.hget('targetscan', 'waitcount') if 'str' in str(type(count)): waitcount = int(count) - 1 redispool.hset("targetscan", "waitcount", str(waitcount)) else: redispool.hset("targetscan", "waitcount", "0") redispool.hdel("targetscan", "nowscan") #漏洞列表中存在该url的漏洞,证明该url是受到影响的,将redis havebugpc受影响主机加一 firstbugurl = BugList.query.order_by( BugList.id.desc()).first().oldurl if firstbugurl == url: redispool.pfadd("havebugpc", url) redispool.append("runlog", "{} scan end !\n".format(url)) print("{} scan end !".format(url)) # print(redispool.get('runlog')) except Exception as e: print(e) pass