def useMasscanTCP(self): mylog('hostprint', True).log.info(pyfancy().green( '[+]执行masscan TCP端口扫描: {}'.format(self.host))) try: ports = list() cmd = [ 'masscan', '-sS', '-Pn', '-p21-25,53,80-90,99,110,113,119,121-123,137-139,\ 170,443-445,456,554,513-514,559,873,888,1080-1099,1200-1212,1234,1243-1255,\ 1433-1434,1521,2000,2049,2181,2200-2300,2375,2535,3127-3128,3300-3310,3389,\ 4443-4444,5000-5001,5432,5900-5901,5432,5984,6000,6370-6380,6984,7000-7010,\ 8000-8200,8443-8449,8880-8900,9000-9001,9043,9080-9100,9200-9210,9300,9668,\ 9876,9990-10000,10080,11211,12345,16379,18080,20000-20010,22220-23000,26379,\ 27010-27020,33060,50070', self.host ] out1, err1 = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate() out2, err2 = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate() pattern = re.compile('[0-9]+/tcp') list1 = re.findall(pattern, bytes.decode(out1)) list2 = re.findall(pattern, bytes.decode(out2)) listres = list(set(list1).union(set(list2))) for item in listres: ports.append(item.replace('/tcp', '')) return ports except Exception as e: mylog('hostprint').log.critical(e) return {}
def sendrequestshead(self, url): headers = {'User-Agent': findProxy().randomUA()} targeturl = self.url + url self.count += 1 #cprint("[*] 加载") cprint('#Process: {}\t[{:.2%}]{}\r'.format( targeturl, (self.count / 9739), ' ' * (len(targeturl) - 90)), 'yellow', attrs=['bold'], end='', flush=True) sys.stdout.flush() try: req = requests.head(targeturl, headers=headers, verify=False, timeout=10, allow_redirects=False) if req.status_code != 404 and req.status_code != 400 and req.status_code != 412 and req.status_code != 403: if url in self._403 and req.status_code == 403: pass else: tmpdict = { 'url': targeturl, 'status_code': req.status_code } mylog('webpath', True).log.info(pyfancy().green( '[+]发现web路径: {0}'.format(str(tmpdict)))) self.webpath.append(tmpdict) except: pass gevent.sleep(0)
def useBuiltwith(self): # 全局取消SSL证书验证 ssl._create_default_https_context = ssl._create_unverified_context mylog('webprint', True).log.info(pyfancy().green( '[+]执行builtwith识别前端组件: {}'.format(self.url))) res = builtwith.builtwith(self.url) return res
def sendrequestsget(self, url): headers = {'User-Agent': findProxy().randomUA()} targeturl = self.url + url try: req = requests.get(targeturl, headers=headers, verify=False, timeout=10, allow_redirects=False) if len(req.text) == self.length or len( req.text) - 32 == self.length: pass else: if req.status_code != 404 and req.status_code != 400 and req.status_code != 412 and req.status_code != 403: if url in self._403 and req.status_code == 403: pass else: tmpdict = { 'url': targeturl, 'status_code': req.status_code, 'length': len(req.text) } mylog('webpath', True).log.info(pyfancy().green( '[+]发现web路径: {0}'.format(str(tmpdict)))) self.webpath.append(tmpdict) except: pass gevent.sleep(0)
def useMasscanUDP(self): mylog('hostprint', True).log.info(pyfancy().green( '[+]执行masscan UDP端口扫描: {}'.format(self.host))) try: ports = list() cmd = [ 'masscan', '-sS', '-Pn', '-pU:20-25,79,110,123,137-139,161,180,513-514,559,666,999,\ 1011-1032,1042-1054,1200-1201,1342-1349,2000-2002,3333,6666,26274,26374,26444,26573,\ 27184,27444,29589,29891,30103,31320-31340,34555,35555', self.host ] out1, err1 = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate() out2, err2 = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate() pattern = re.compile('[0-9]+/udp') list1 = re.findall(pattern, bytes.decode(out1)) list2 = re.findall(pattern, bytes.decode(out2)) listres = list(set(list1).union(set(list2))) for item in listres: ports.append(item.replace('/udp', '')) return ports except Exception as e: mylog('hostprint').log.critical(e) return {}
def jsfinder_routine(self, url): mylog('webprint', True).log.info(pyfancy().green('[+]搜集目标url: {0}'.format(url))) judge = judgement(url) subdomain = judge.giveresult(judge.find_by_url(), urllib.parse.urlparse(url)[1]) print(subdomain) self.domains += subdomain self.domains = list(set(self.domains)) gevent.sleep(0)
def routineudp(self, port): service = self.hostrecon.useNmapServUDP(port) try: sqlstr = 'INSERT INTO hostrecon (Project, Host, Port, Service) VALUE ("{0}", "{1}", "{2}", "{3}")'.format( self.project, self.host, port, pymysql.escape_string(str(service))) db().execute(sqlstr) except Exception as e: mylog('hostprint').log.critical(pyfancy().red(e))
def useWhois(self): mylog('webprint', True).log.info(pyfancy().green( '[+]执行whois检查注册信息: {}'.format(self.url))) judge = judgement(self.url).urlSplit()[1] try: domain_reg = whois.whois(judge) return domain_reg except Exception as e: mylog('webprint').log.critical(pyfancy().red(e)) return {}
def sendrequesthead(self, url): headers = { 'User-Agent': findProxy().randomUA(), 'X-Forwarded-For': findProxy().randomXFF(), } try: req = requests.head(url, headers=headers, timeout=20, verify=False) if req.status_code == 200: return url except Exception as e: mylog('webprint').log.critical(pyfancy().red(e))
def runexplore(self, url): mylog('webprint', True).log.info(pyfancy().green('[+]执行web信息收集: {}'.format(url))) runApp = explore(url) cdnheader = runApp.useCDNHeader() dig = runApp.useDig() getheaders = runApp.header whois = runApp.useWhois() builtwith = runApp.useBuiltwith() mycdn = runApp.myCdnWaf() wappalyzer = runApp.useWappalyzer() whatweb = runApp.useWhatweb() hsec = runApp.hsecscan() '''网页版模板 webinfo_html = '{0}{1}{2}{3}{4}{5}{6}{7}{8}{9}{10}{11}'.format( self.html_start(), self.webinfo_header(), self.webinfo_template('通用CDN检测', cdnheader), self.webinfo_template('Dig CDN', dig), self.webinfo_template('Get Headers', getheaders), self.webinfo_template('Whois', whois), self.webinfo_template('builtwith', builtwith), self.webinfo_template('CDN/WAF Detect', mycdn), self.webinfo_template('Wappalyzer', wappalyzer), self.webinfo_template('Whatweb', whatweb), self.webinfo_template('headers sec', hsec), self.html_end()) dirpath = os.path.join(GlobalConf().progpath['location'], 'Heaven_Hell/webrecon') if not os.path.isdir(dirpath): os.makedirs(dirpath) filename = '{0}_{1}.html'.format(urlparse(url)[1], time.strftime('%Y-%m-%d_%H_%M_%S',time.localtime(time.time()))) pathname = os.path.join(dirpath, filename) with open(pathname, 'w') as f: f.write(webinfo_html) mylog('webprint', True).log.info(pyfancy().light_cyan('[+]web信息写入文件: {}'.format(pathname))) ''' #数据库归并 iprecon = judgement(self.url).iplocation() sqlstr = 'INSERT INTO webrecon (Project, URL, cdnheader, Dig, Headers, Whois, Builtwith, Mycdn, wappalyzer, Whatweb, Hsec, Iprecon) VALUE ("{0}", "{1}", "{2}", "{3}", "{4}", "{5}", "{6}", "{7}", "{8}", "{9}", "{10}", "{11}")'.format( self.project, self.url, cdnheader, dig, pymysql.escape_string(str(getheaders)), pymysql.escape_string(str(whois)), pymysql.escape_string(str(builtwith)), pymysql.escape_string(str(mycdn)), pymysql.escape_string(str(wappalyzer)), pymysql.escape_string(str(whatweb)), pymysql.escape_string(str(hsec)), pymysql.escape_string(str(iprecon))) db().execute(sqlstr) mylog('webprint', True).log.info(pyfancy().green('[*]结束web信息收集: {}'.format(url)))
def useNmapServTCP(self, port): mylog('hostprint', True).log.info(pyfancy().green('[+]执行nmap TCP端口服务探测: {0}:{1}'.format(self.host, port))) try: cmd = ['nmap', '-sV', '-Pn', '--scan-delay', '2', '--host-timeout', '2m', '--version-intensity', '6', self.host, '-p', port] out, err = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate() service = bytes.decode(out) return service except Exception as e: cmd = ['nmap', '--host-timeout', '1m', self.host, '-p', port] out, err = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate() service = bytes.decode(out) return service
def useWhatweb(self): out = '' mylog('webprint', True).log.info(pyfancy().green( '[+]执行whatweb识别后端组件: {}'.format(self.url))) try: cmd = ['whatweb', '-a', '3', '--color=never', self.url] process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = process.communicate() out = bytes.decode(out) except Exception as e: mylog('webprint').log.critical(pyfancy().red(e)) return out
def useDig(self): mylog('webprint', True).log.info(pyfancy().green( '[+]执行dig识别: {}'.format(self.url))) judge = judgement(self.url).urlSplit()[1] try: cmd = ['dig', judge] process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = process.communicate() content_str = bytes.decode(out).lower() return content_str.strip() except Exception as e: mylog('webprint').log.critical(pyfancy().red(e)) return 'NULL'
def useWappalyzer(self): out = '' mylog('webprint', True).log.info(pyfancy().green( '[+]执行Wappalyzer识别目标指纹: {}'.format(self.url))) try: wappalyzer = Wappalyzer.latest() webpage = WebPage.new_from_url(self.url) webprints = wappalyzer.analyze(webpage) if len(webprints) > 0: return webprints else: return {} except Exception as e: mylog('webprint').log.critical(pyfancy().red(e)) return out
def sendrequestget(self, url): headers = { 'User-Agent': findProxy().randomUA(), 'X-Forwarded-For': findProxy().randomXFF(), } try: req = requests.get(url, headers=headers, timeout=20, verify=False) if req.status_code == 200: if len(req.text) == self.length or len( req.text) - 32 == self.length: pass else: return url except Exception as e: mylog('webprint').log.critical(pyfancy().red(e))
def useCDNHeader(self): mylog('webprint', True).log.info(pyfancy().green( '[+]执行通用CDN识别: {}'.format(self.url))) try: key = False cdn_headers = [ 'X-CDN', 'via', 'x-cache', 'x-swift-cachetime', 'X-Cache-Lookup', 'X-Via', 'Via', 'X-Via-CDN', 'X-Cdn', 'X-Cache', 'CDN-Cache', 'CDN-Server', 'X-Cdn-Srv', 'Cdn', 'CDN', 'Cache-Control', 'X-Cache-Error', 'X-Upper-Cache', "X-Cacheable", 'X-Cacheable-status', 'X-Status', 'X-DNS', 'X-Proxy', 'CacheStatus', 'X-Fastcgi-Cache', 'X-Backend', 'X-PingBack', 'X-Executed-By', 'X-Front', 'X-Server', 'CDN-Node', 'X-Rack-Cache', 'X-Request-Id', 'X-Runtime', ] for cdn_head in cdn_headers: if cdn_head in self.header: key = True return key except Exception as e: mylog('webprint').log.critical(pyfancy().red(e)) return False
def execute(self, sql, param=None): if self.connectdb(): try: if self.conn and self.cursor: self.cursor.execute(sql, param) self.conn.commit() except Exception as e: mylog('database').log.critical(pyfancy().red( '数据库错误: {}'.format(e))) # 发生错误时回滚 self.conn.rollback() mylog('database').log.info(pyfancy().green( 'SQL已执行: {}'.format(sql))) data = self.cursor.fetchall() self.close() return data
def sendproxyrequestget(self, url): headers = { 'User-Agent': findProxy().randomUA(), 'X-Forwarded-For': findProxy().randomXFF(), } proxies = {'http': 'http://{}'.format(random.choice(self.proxies))} try: req = requests.get(url, proxies=proxies, headers=headers, timeout=20, verify=False) if req.status_code == 200: return url except Exception as e: mylog('webprint').log.critical(pyfancy().red(e))
def connectdb(self): try: self.conn = pymysql.connect( host=self.host, port=self.port, user=self.user, password=self.password, db='SatanSword', charset='utf8mb4', cursorclass=pymysql.cursors.DictCursor, ) except Exception as e: mylog('database').log.critical('[-]数据库连接失败: {}'.format(e)) return False self.cursor = self.conn.cursor() mylog('database').log.info(pyfancy().green('[+]数据库连接成功')) return True
def myCdnWaf(self): key = list() mylog('webprint', True).log.info(pyfancy().green( '[+]执行自定义CDN/WAF识别: {}'.format(self.url))) db = cdnwafidentity().cdnwafdb # 一维字典与二维字典的键值比较 try: sdict = self.header for skey in sdict: for dkey in db: if skey in db[dkey]: if db[dkey][skey] in sdict[skey]: key.append(dkey) except Exception as e: mylog('webprint').log.critical(pyfancy().red(e)) key = list(set(key)) return key
def useScript(self, port): self.tport = port mylog('hostprint', True).log.info(pyfancy().green('[+]执行自定义脚本探测系统服务: {}'.format(self.host))) pool = Pool(20) servlist = list() self.loadmodule() poclist = list() try: sqlstring = 'SELECT servicepoc FROM hostprint' res = db().execute(sqlstring) for item in res: poclist.append(item['servicepoc']) threads = [pool.spawn(self.pocexec, item) for item in poclist] gevent.joinall(threads) for servprint in self.prints: if servprint['isService']: servlist.append(servprint) print(servlist) except Exception as e: mylog('hostprint').log.critical(e)
def runpocwithcmsname(self, keyword): try: poclist = list() pool = Pool(self.threads) self.loadmodule() sql = 'SELECT poc FROM webexploit WHERE vulname LIKE "%{}%"'.format( keyword) res = db().execute(sql) for item in res: poclist.append(item['poc']) mylog('webexploit', True).log.info(pyfancy().green( '[+]针对目标:{0} 加载{1} webpoc {2}个'.format(self.url, keyword, len(poclist)))) threads = [pool.spawn(self.pocexec, item) for item in poclist] gevent.joinall(threads) for vuln in self.vulns: db().execute( 'INSERT INTO webvulnlist (url, vulname, vulnurl, isvul, payload, proof, exception) VALUE ("{0}", "{1}", "{2}", "{3}", "{4}", "{5}", "{6}")' .format(self.url, vuln['vulnname'], pymysql.escape_string(str(vuln['vulnurl'])), vuln['isvul'], pymysql.escape_string(str(vuln['payload'])), pymysql.escape_string(str(vuln['proof'])), pymysql.escape_string(str(vuln['exception'])))) vuln = json.dumps(vuln, indent=4) mylog('webexploit').log.debug(pyfancy().magenta( '[*] {}'.format(vuln))) self.vulns = [] except Exception as e: mylog('webexploit').log.critical(pyfancy().red(e))
def runpocwithsysname(self, keyword): try: poclist = list() self.loadmodule() sql = 'SELECT poc from hostexploit WHERE vulname like "%{}%"'.format( keyword) res = db().execute(sql) for item in res: poclist.append(item['poc']) self.queue.put_nowait(item['poc']) mylog('hostexploit', True).log.info(pyfancy().green( '[+]针对目标:{0}:{1} 加载{2} hostpoc {3}个'.format( self.host, self.port, keyword, len(poclist)))) threads = [gevent.spawn(self.Consumer, item) for item in poclist] gevent.joinall(threads) for vuln in self.vulns: sqlstr = 'INSERT INTO hostvulnlist (vulnhost, vulnport, vulnname, isvul, payload, proof, exception) VALUE ("{0}", "{1}", "{2}", "{3}", "{4}", "{5}", "{6}")'.format( vuln['vulnhost'], vuln['vulnport'], vuln['vulnname'], vuln['isvul'], pymysql.escape_string(str(vuln['payload'])), vuln['proof'], pymysql.escape_string(str(vuln['exception']))) db().execute(sqlstr) vuln = json.dumps(vuln, indent=4) mylog('hostexploit').log.debug(pyfancy().magenta( '[*] {0}'.format(vuln))) self.vulns = [] except Exception as e: mylog('hostexploit').log.critical(pyfancy().red(e))
def useGetheaders(self): mylog('webprint', True).log.info(pyfancy().green( '[+]执行获取headers: {}'.format(self.url))) headers = { 'User-Agent': findProxy().randomUA(), 'X-Forwarded-For': findProxy().randomXFF() } try: req = requests.get(self.url, headers=headers, timeout=15, verify=False) return req.headers except Exception as e: try: req = requests.head(self.url, headers=headers, timeout=15, verify=False) return req.headers except: mylog('webprint').log.critical(pyfancy().red(e)) return {}
def runexplore(self): mylog('hostprint', True).log.info(pyfancy().green( '[+]执行host信息收集: {}'.format(self.host))) pool = Pool(10) tcp_ports_list = self.hostrecon.useMasscanTCP() mylog('hostprint', True).log.info(pyfancy().yellow('[+][{0}]{1}'.format( self.host, tcp_ports_list))) if len(tcp_ports_list) != 0: threads = [ pool.spawn(self.routinetcp, item) for item in tcp_ports_list ] gevent.joinall(threads) udp_ports_list = self.hostrecon.useMasscanUDP() mylog('hostprint', True).log.info(pyfancy().yellow('[+][{0}]{1}'.format( self.host, udp_ports_list))) if len(udp_ports_list) != 0: threads = [ pool.spawn(self.routineudp, item) for item in udp_ports_list ] gevent.joinall(threads) mylog('hostprint', True).log.info(pyfancy().green( '[*]结束host信息收集: {}'.format(self.host)))
def useCmsprint(self, proxy): mylog('webprint', True).log.info(pyfancy().green( '[+]执行cms识别通用系统信息: {}'.format(self.url))) urls = list() prefix_urls = list() cmsname = list() """ 提取静态文件md5方式 """ sql = "SELECT staticurl FROM cmsprint" for item in db().execute(sql): prefix_urls.append(item['staticurl']) # 去除空元素和重复元素 prefix_urls = list(set(filter(None, prefix_urls))) # 组合url for item in prefix_urls: urls.append(self.url + item) # 设置并发协程 pool = Pool(30) if proxy: proxyclass = findProxy() proxyclass.search() proxyclass.connectest(self.url) self.proxies = proxyclass.proxylist if self.check404(self.url): checksumlist = list( set(filter(None, pool.map(self.sendproxyrequesthead, urls)))) else: self.get404length(self.url) checksumlist = list( set(filter(None, pool.map(self.sendproxyrequestget, urls)))) else: if self.check404(self.url): checksumlist = list( set(filter(None, pool.map(self.sendrequesthead, urls)))) else: self.get404length(self.url) checksumlist = list( set(filter(None, pool.map(self.sendrequestget, urls)))) if checksumlist: # 重新置空 prefix_urls = [] for item in checksumlist: prefix_urls.append(urlparse(item)[2]) cms_set = db().execute( 'SELECT cmsname, staticurl, checksum FROM cmsprint WHERE staticurl!=""' ) for text in prefix_urls: md5sum = self.getchecksum(text) for item in cms_set: if md5sum in item['checksum']: cmsname.append(item['cmsname']) mylog('cmsprint').log.debug(pyfancy().blue( '匹配到cms: {0} {1}'.format(item['cmsname'], item['checksum']))) """ 搜索页面关键字方式 """ urls = [] prefix_urls = [] sql = "SELECT homeurl FROM cmsprint" for item in db().execute(sql): prefix_urls.append(item['homeurl']) # 去除空元素和重复元素 prefix_urls = list(set(filter(None, prefix_urls))) # 组合url for item in prefix_urls: urls.append(self.url + item) pool = Pool(30) if proxy: if self.check404(self.url): preglist = list( set(filter(None, pool.map(self.sendproxyrequesthead, urls)))) else: self.get404length(self.url) preglist = list( set(filter(None, pool.map(self.sendproxyrequestget, urls)))) else: if self.check404(self.url): preglist = list( set(filter(None, pool.map(self.sendrequesthead, urls)))) else: self.get404length(self.url) preglist = list( set(filter(None, pool.map(self.sendrequestget, urls)))) if preglist: # 重新置空 prefix_urls = [] for item in preglist: prefix_urls.append(urlparse(item)[2]) cms_set = db().execute( 'SELECT cmsname, homeurl, keyword FROM cmsprint WHERE homeurl!=""' ) for text in prefix_urls: for item in cms_set: if item['homeurl'] in text: if self.pregmatch(text, item['keyword']): cmsname.append(item['cmsname']) mylog('cmsprint').log.debug(pyfancy().blue( '匹配到cms: {0} {1} {2}'.format( item['cmsname'], item['homeurl'], item['keyword']))) # 去重cmsname cmsname = list(set(cmsname)) return cmsname
def hsecscan(self): hlist = list() mylog('webprint', True).log.info(pyfancy().green( '[+]执行headers安全检查: {}'.format(self.url))) try: headers = str(self.header) # secure flag if not re.search(r'secure;', headers, re.I): hlist.append('[+]Cookie without Secure flag set') # httponly flag if not re.search(r'httponly;', headers, re.I): hlist.append('[+]Cookie without HttpOnly flag set') # domain get if re.search(r'domain\=\S*', headers, re.I): domain = re.findall(r'domain\=(.+?);', headers, re.I) if domain: hlist.append( '[+]Session Cookie are valid only at Sub/Domain: %s' % domain[0]) # path get if re.search(r'path\=\S*', headers, re.I): path = re.findall(r'path\=(.+?);', headers, re.I) if path: hlist.append( '[+]Session Cookie are valid only on that Path: %s' % path[0]) # multiple cookie if re.search(r'(.+?)\=\S*;', headers, re.I): cookie_sessions = re.findall(r'(.+?)\=\S*;', headers, re.I) for cs in cookie_sessions: if cs not in ['domain', 'path', 'expires']: hlist.append( '[+]Cookie Header contains multiple cookies') break # x-xss-protection flag if not re.search(r'x-xss-protection', headers, re.I): hlist.append('[+]X-XSS-Protection header missing') # x-frame-options flag if not re.search(r'x-frame-options', headers, re.I): hlist.append('[+]Clickjacking: X-Frame-Options header missing') # content-type if not re.search(r'content-type', headers, re.I): hlist.append('[+]Content-Type header missing') # strict-transport-security flag if not re.search(r'strict-transport-security', headers, re.I): hlist.append('[+]Strict-Transport-Security header missing') # x-content-type-options flag if not re.search(r'x-content-type-options', headers, re.I): hlist.append('[+]X-Content-Type-Options header missing') # content-security-policy flag if not re.search(r'content-security-policy', headers, re.I): hlist.append('[+]Content-Security-Policy header missing') # x-permitted-cross-domain-policies if not re.search(r'x-permitted-cross-domain-policies', headers, re.I): hlist.append( '[+]X-Permitted-Cross-Domain-Policies header missing') # referrer-policy if not re.search(r'referrer-policy', headers, re.I): hlist.append('[+]Referrer-Policy header missing') except Exception as e: mylog('webprint').log.critical(pyfancy().red(e)) return hlist