def run(self, url, html): if (not url.find("?")): return False Downloader = Download.Downloader() BOOLEAN_TESTS = (" AND %d=%d", " OR NOT (%d=%d)") DBMS_ERRORS = {# regular expressions used for DBMS recognition based on error message response "MySQL": (r"SQL syntax.*MySQL", r"Warning.*mysql_.*", r"valid MySQL result", r"MySqlClient\."), "PostgreSQL": (r"PostgreSQL.*ERROR", r"Warning.*\Wpg_.*", r"valid PostgreSQL result", r"Npgsql\."), "Microsoft SQL Server": (r"Driver.* SQL[\-\_\ ]*Server", r"OLE DB.* SQL Server", r"(\W|\A)SQL Server.*Driver", r"Warning.*mssql_.*", r"(\W|\A)SQL Server.*[0-9a-fA-F]{8}", r"(?s)Exception.*\WSystem\.Data\.SqlClient\.", r"(?s)Exception.*\WRoadhouse\.Cms\."), "Microsoft Access": (r"Microsoft Access Driver", r"JET Database Engine", r"Access Database Engine"), "Oracle": (r"\bORA-[0-9][0-9][0-9][0-9]", r"Oracle error", r"Oracle.*Driver", r"Warning.*\Woci_.*", r"Warning.*\Wora_.*"), "IBM DB2": (r"CLI Driver.*DB2", r"DB2 SQL error", r"\bdb2_\w+\("), "SQLite": (r"SQLite/JDBCDriver", r"SQLite.Exception", r"System.Data.SQLite.SQLiteException", r"Warning.*sqlite_.*", r"Warning.*SQLite3::", r"\[SQLITE_ERROR\]"), "Sybase": (r"(?i)Warning.*sybase.*", r"Sybase message", r"Sybase.*Server message.*"), } _url = url + "%29%28%22%27" #用)('"进行闭合 _content = Downloader.get(_url) for (dbms, regex) in ((dbms, regex) for dbms in DBMS_ERRORS for regex in DBMS_ERRORS[dbms]): if (re.search(regex, _content)): print "sql fonud: %" % url return True content = {} content["origin"] = Downloader.get(_url) for test_payload in BOOLEAN_TESTS: RANDINT = random.randint(1, 255) _url = url + test_payload % (RANDINT, RANDINT) content["true"] = Downloader.get(_url) _url = url + test_payload % (RANDINT, RANDINT + 1) content["false"] = Downloader.get(_url) if content["origin"] == content["true"] != content["false"]: print "sql fonud: %" % url return True
def run(self, url, html): download = Download.Downloader() urls = common.urlsplit(url) if urls is None: return False for _urlp in urls: for _payload in payload: _url = _urlp.replace("my_Payload", _payload) print "[xss test]:", _url #我们需要对URL每个参数进行拆分,测试 _str = download.get(_url) if _str is None: return False if (_str.find(_payload) != -1): print "xss found:%s" % url return False
def run(self, url, html): if url is None: return False downloader = Download.Downloader() urls = common.urlsplit(url) if urls is None: return False for _urlp in urls: for _payload in payload: _url = _urlp.replace('my_Payload', _payload) print("[xss test]:", url) _str = downloader.get(_url) if _str is None: return False if _str.find(_payload) != -1: return True return False
def run(self, url, html): download = Download.Downloader() urls = common.urlsplit(url) if urls is None: return False for _urlp in urls: for _payload in payload: _url = _urlp.replace("my_Payload", _payload) print("[xss test]:", _url) #对URL每个参数进行拆分,测试 _str = download.get(_url) if _str is None: return False if (_str.find(_payload) != -1): print("xss found:%s" % url) websec.output.add_list("Xss", "url:%s payload:%s" % (url, _url)) return False
class webcms(object): """ WebCMS指纹识别类 """ workQueue = queue.Queue() URL = "" threadNum = 0 NotFound = True Downloader = Download.Downloader() result = "" def __init__(self, url, threadNum=10): self.URL = url self.threadNum = threadNum filename = os.path.join(sys.path[0], 'fuzz', 'cms.json') with open(filename, encoding="utf-8") as f: webdata = json.load(f, encoding='utf-8') for i in webdata: self.workQueue.put(i) def getmd5(self, body): m2 = hashlib.md5() m2.update(body) return m2.hexdigest() def th_whatweb(self): if (self.workQueue.empty()): self.NotFound = False return False if (self.NotFound is False): return False cms = self.workQueue.get() _url = self.URL + cms['url'] html = self.Downloader.get(_url) print("[whatweb log] : checking %s" % _url) if (html is None): return False if cms['re']: if (html.find(cms['re']) != -1): self.result = cms['name'] self.NotFound = False return True else: md5 = self.getmd5(html) if (md5 == cms['md5']): self.result = cms['name'] self.NotFound = False return True def run(self): while (self.NotFound): th = [] for i in range(self.threadNum): t = threading.Thread(target=self.th_whatweb) t.start() th.append(t) for t in th: t.join() if (self.result): print("[webcms] : %s cms is %s" % (self.URL, self.result)) else: print("[webcms] : %s cms NotFound!" % self.URL)
class webcms(object): workQueue = Queue.Queue() URL = "" threadNum = 0 NotFound = True Downloader = Download.Downloader() result = "" def __init__(self,url,threadNum = 10): self.URL = url self.threadNum = threadNum filename = os.path.join(sys.path[0], "data", "data.json") fp = open(filename) webdata = json.load(fp,encoding="utf-8") for i in webdata: self.workQueue.put(i) fp.close() def getmd5(self, body): m2 = hashlib.md5() m2.update(body) return m2.hexdigest() def th_whatweb(self): if(self.workQueue.empty()): self.NotFound = False return False if(self.NotFound is False): return False cms = self.workQueue.get() _url = self.URL + cms["url"] html = self.Downloader.get(_url) print "[whatweb log]:checking %s"%_url if(html is None): return False if cms["re"]: if(html.find(cms["re"])!=-1): self.result = cms["name"] self.NotFound = False return True else: md5 = self.getmd5(html) if(md5==cms["md5"]): self.result = cms["name"] self.NotFound = False return True def run(self): while(self.NotFound): th = [] for i in range(self.threadNum): t = threading.Thread(target=self.th_whatweb) t.start() th.append(t) for t in th: t.join() if(self.result): print "[webcms]:%s cms is %s"%(self.URL,self.result) output.add("Webcms","[webcms]:%s cms is %s"%(self.URL,self.result)) else: print "[webcms]:%s cms NOTFound!"%self.URL output.add("Webcms","[webcms]:%s cms NOTFound!"%self.URL)
def __init__(self, root, threadNum): self.urls = UrlManager.UrlManager() self.download = Download.Downloader() self.root = root self.threadNum = threadNum
def __init__(self, root, threadNum): self.urls = UrlManager() self.download = Download.Downloader() self.root = root self.threadNum = threadNum self.domian = urlparse.urlparse(root).netloc
#!/usr/bin/env python #-*- coding:utf-8 -*- import re, random from lib.core import Download Downloader = Download.Downloader() url = "http://127.0.0.1/Less-2/?id=1" content = {} content["origin"] = Downloader.get(url) BOOLEAN_TESTS = (" AND %d=%d", " OR NOT (%d=%d)") for test_payload in BOOLEAN_TESTS: RANDINT = random.randint(1, 255) _url = url + test_payload % (RANDINT, RANDINT) content["true"] = Downloader.get(_url) _url = url + test_payload % (RANDINT, RANDINT + 1) content["false"] = Downloader.get(_url) if content["origin"] == content["true"] != content["false"]: print url + "存在数字型SQL注入漏洞"