def SenFileScan(domain, url): """ 敏感文件、目录扫描 字典:dict\SEN_scan.txt :param domain: :param :return: """ pools = 20 urlList = [] for i in range(0, redispool.llen("SenScan")): suffix = redispool.lindex("SenScan", i) senurl = "{}/{}".format(url, suffix) urlList.append(senurl) pool = ThreadPool(pools) SenFileMessage = pool.map(UrlRequest, urlList) SenFileMessage2 = "" pool.close() pool.join() url404 = "{}/springbird404page".format(url) try: rep404 = requests.get(url404, headers=core.GetHeaders(), timeout=3, verify=False).text except Exception as e: print("超时") rep404 = str(e) pass if len(SenFileMessage) != 0: with app.app_context(): print("Sen file and dir : \n") for url in SenFileMessage: try: if url is None: continue rep = requests.get(url, headers=core.GetHeaders(), timeout=1, verify=False) #添加404界面的判断,避免过多杂乱信息 if not core.is_similar_page(rep404, rep.text, radio=0.85): print(url) bug = BugList(oldurl=domain, bugurl=url, bugname="SenDir", buggrade=redispool.hget( 'bugtype', "SenDir"), payload=url, bugdetail=rep.text) SenFileMessage2 += url + "\n" redispool.pfadd(redispool.hget('bugtype', "SenDir"), url) redispool.pfadd("SenDir", url) db.session.add(bug) except Exception as e: # print(e) pass db.session.commit() return SenFileMessage2
def BugScanConsole(attackurl): ''' 动态调用类方法,减少冗余代码 将存在bug的url存在buglist表中,同时根据漏洞类型的不同,指向bugtype表中对应的漏洞类型 ''' try: while redispool.scard(attackurl) != 0: url = redispool.spop(attackurl) Bug = BugScan(attackurl, url) with app.app_context(): for value in Bugs: vulnerable, payload, bugdetail = getattr(Bug, value)() if vulnerable: bug = BugList(oldurl=attackurl, bugurl=url, bugname=value, buggrade=redispool.hget( 'bugtype', value), payload=payload, bugdetail=bugdetail) redispool.pfadd(redispool.hget('bugtype', value), url) redispool.pfadd(value, url) db.session.add(bug) db.session.commit() Bug.POCScan() # time.sleep(0.5) except Exception as e: print(e) pass
def POCScanConsole(attackurl, url): try: allpoc = POC.query.all() with app.app_context(): for poc in allpoc: rep = requests.get(url + poc.rule, headers=core.GetHeaders(), timeout=2) if rep.status_code != 404 and poc.expression in rep.text: bug = BugList(oldurl=attackurl, bugurl=url, bugname=poc.name, buggrade=redispool.hget('bugtype', poc.name), payload=url + poc, bugdetail=rep.text) redispool.pfadd(redispool.hget('bugtype', poc.name), url) redispool.pfadd(poc.name, url) db.session.add(bug) db.session.commit() except Exception as e: print(e) pass
def cmspoc_check(oldurl,cmsurl): poc_class = pocdb_pocs(cmsurl) poc_dict = poc_class.cmspocdict cprint(">>>CMS漏洞扫描URL: "+cmsurl+"\t可用POC个数["+str(len(poc_dict))+"]", "magenta") cmspool.map(cmsprint, poc_dict.keys()) print("\r") results = cmspool.map(cmscheck, poc_dict.values()) cmspool.close() cmspool.join() try: with app.app_context(): for result in results: vulnerable,bugurl,bugname,payload,bugdetail=result if vulnerable: bug = BugList(oldurl=oldurl, bugurl=bugurl, bugname=bugname, buggrade=redispool.hget('bugtype', bugname), payload=payload, bugdetail=bugdetail) db.session.add(bug) redispool.pfadd(redispool.hget('bugtype', bugname), bugurl) redispool.pfadd(bugname, bugurl) db.session.commit() except Exception as e: print(e) pass
def POCmanage(): bugbit, bugtype = core.GetBit() poclist = POC.query.order_by(POC.id.desc()).all() if request.method == 'GET': return render_template('pocmanage.html', bugbit=bugbit, bugtype=bugtype, poclist=poclist) else: pocname = request.form.get('pocname') rule = request.form.get('rule') expression = request.form.get('expression') buggrade = request.form.get('buggrade') redispool.hset('bugtype', pocname, buggrade) poc = POC(name=pocname, rule=rule, expression=expression) redispool.pfadd("poc", pocname) db.session.add(poc) db.session.commit() poclist = POC.query.order_by(POC.id.desc()).all() return render_template('pocmanage.html', bugbit=bugbit, bugtype=bugtype, poclist=poclist)
def WebLogicScan(self): redispool.append("runlog", "正在进行{}weblogic漏洞检测!\n".format(self.url)) print("正在进行weblogic漏洞检测!") try: results = WebLogicScan.run(self.domain) with app.app_context(): for result in results: vulnerable, bugurl, bugname, bugdetail = result if vulnerable: bug = BugList(oldurl=self.domain, bugurl=bugurl, bugname=bugname, buggrade=redispool.hget( 'bugtype', bugname), payload=bugurl, bugdetail=bugdetail) redispool.pfadd(redispool.hget('bugtype', bugname), bugurl) redispool.pfadd(bugname, bugurl) db.session.add(bug) db.session.commit() except Exception as e: print(e) pass
def SZheScan(url): try: #输入入口进行过滤 url, attackurl, rep = inputfilter(url) #若过滤后无url,即url无效或响应时间过长,退出对该url的扫描 if not url: print("Not Allow This URL") return redispool.hset("targetscan", "nowscan", attackurl) with app.app_context(): # 对该url基础信息进行搜集,实例化GetBaseMessage对象 baseinfo = GetBaseMessage(url, attackurl, rep) #正则表达式判断其为IP或是域名,并且实例化相应的深度信息搜集对象 pattern = re.compile('^\d+\.\d+\.\d+\.\d+(:(\d+))?$') #判断IP是否存在端口 if pattern.findall(url) and ":" in url: infourl = url.split(":")[0] else: infourl = url if pattern.findall(url): boolcheck = True ipinfo = IPMessage(infourl) else: boolcheck = False domaininfo = DomainMessage(url) info = BaseInfo(url=url, boolcheck=boolcheck, status=baseinfo.GetStatus(), title=baseinfo.GetTitle(), date=baseinfo.GetDate(), responseheader=baseinfo.GetResponseHeader(), Server=baseinfo.GetFinger(), portserver=baseinfo.PortScan(), sendir=baseinfo.SenDir()) db.session.add(info) db.session.flush() infoid = info.id db.session.commit() baseinfo.WebLogicScan() baseinfo.AngelSwordMain() if boolcheck: redispool.pfadd("ip", infourl) ipinfo = IPInfo(baseinfoid=infoid, bindingdomain=ipinfo.GetBindingIP(), sitestation=ipinfo.GetSiteStation(), CMessage=ipinfo.CScanConsole(), ipaddr=ipinfo.FindIpAdd()) db.session.add(ipinfo) else: redispool.pfadd("domain", infourl) domaininfo = DomainInfo( baseinfoid=infoid, subdomain=domaininfo.GetSubDomain(), whois=domaininfo.GetWhoisMessage(), bindingip=domaininfo.GetBindingIP(), sitestation=domaininfo.GetSiteStation(), recordinfo=domaininfo.GetRecordInfo(), domainaddr=domaininfo.FindDomainAdd()) db.session.add(domaininfo) db.session.commit() #默认url深度爬取为 2 ,避免大站链接过多,可在设置中进行修改 redispool.append("runlog", "对{}页面进行深度爬取\n".format(attackurl)) SpiderGetUrl2(attackurl, deepth=2) redispool.append("runlog", "对该网站{}爬取到的url进行常规漏扫 :D\n".format(attackurl)) print("对该网站爬取到的url进行常规漏扫 :D") BugScanConsole(url) count = redispool.hget('targetscan', 'waitcount') if 'str' in str(type(count)): waitcount = int(count) - 1 redispool.hset("targetscan", "waitcount", str(waitcount)) else: redispool.hset("targetscan", "waitcount", "0") redispool.hdel("targetscan", "nowscan") #漏洞列表中存在该url的漏洞,证明该url是受到影响的,将redis havebugpc受影响主机加一 firstbugurl = BugList.query.order_by( BugList.id.desc()).first().oldurl if firstbugurl == url: redispool.pfadd("havebugpc", url) redispool.append("runlog", "{} scan end !\n".format(url)) print("{} scan end !".format(url)) # print(redispool.get('runlog')) except Exception as e: print(e) pass