def link_proxy(proxy): proxy_info = proxy.get_proxy_info() log.output_log("[proxy] switch server: %s" % proxy_info["server_ip"], True) proxy.index += 1 proxy.start_proxy(proxy_info) while proxy.get_addr() is False: proxy.start_proxy(proxy_info) time.sleep(5)
def lines_to_file(self, filename, lines): try: f = open(filename, "w") for line in lines: f.write(line + '\n') f.close() except Exception, e: log.output_log("".join(["[error] ", str(e)]), True)
def get_page_size(self, url): try: req = requests.get(url, timeout=self.timeout) # 有时候没有content-length这个键 page_size = int(req.headers["content-length"]) if "content-length" in req.headers else len(req.content) return page_size except Exception, e: log.output_log("[error] " + str(e), True) return 0
def get_page_size(self, url): try: req = requests.get(url, timeout=self.timeout) # 有时候没有content-length这个键 page_size = int(req.headers["content-length"]) if "content-length" in req.headers else len(req.content) return page_size except Exception, e: log.output_log("".join(["[error] ", str(e)]), True) return 0
def __init_adb_server(self): # self._adb_base.restart_server() self._adb_base.get_devices() if self._adb_base.devices is not None: if self._adb_base.connect() is True: if self._adb_base.check_root() is False: log.output_log("adb running not in root", True) else: return True return False
def create_adb_shell(self): if self.is_init_server is False: if self.__init_adb_server() is True: self.is_init_server = True else: log.output_log("init adb server false", True) exit() self.adb_list.append(self._adb_base) return self._adb_base
def content_2_lines(self, page_source): try: res_lines = [] data_lines = page_source.split('\n') if len(data_lines) > 1: for sub_str in data_lines: res_lines += self.ExtractTextTagContent(sub_str) return res_lines except Exception, e: log.output_log("".join(["[error] ", str(e)]), True)
def test_page_size(self, inject_url1, inject_url2): if check_vul_sign() is True: return log.output_log("[test] inject url 1=1 " + inject_url1), log.output_log("[test] inject url 1=2 " + inject_url2) url_ret_page_size = self.web_site.get_page_size(self.url) url_ret_page_size_1 = self.web_site.get_page_size(inject_url1) url_ret_page_size_2 = self.web_site.get_page_size(inject_url2) if url_ret_page_size == url_ret_page_size_1 and url_ret_page_size != url_ret_page_size_2: self.get_server_info() self.result_dispose("[%s] [%s] [server:%s + web:%s] %s" % ( "sqlinject", self.inject_1_equ_2, self.server_info["server"], self.server_info["x-powered-by"], inject_url2))
def url_scan_go(self): if self.url_parse.is_param_url(self.url) is False: log.output_log("[*] url need params %s" % self.url) return False cookies = self.data_parse.get_cookies(self.cookie_file) if self.cookie_file is not None else None # set proxy if self.is_proxy == 1: from proxy import proxy_switch self.proxy = proxy_switch.Proxy() proxy_switch.link_proxy(self.proxy) self.scan_url(cookies)
def xss_go(self): log.output_log("[xss] test url " + self.url, True) test_params = self.url_parse.get_params(self.url, DORK_PAYLOAD) # ---------------------- 多线程切换 ----------------------------------------- if IS_MULTI_THREAD is True: threads = [ Thread(self.judge_out, (self.url.replace( i, test_params[i]), DORK_PAYLOAD, self.urls)) for i in test_params ] for i in threads: i.start() for i in threads: i.join() else: for param in test_params: url = self.url.replace(param, test_params[param]) Thread(self.judge_out, (url, DORK_PAYLOAD, self.urls)) # self.enc = get_charset(self.url) if IS_MULTI_THREAD is True: threads = [Thread(self.judge_location, (i, )) for i in self.urls] for i in threads: i.start() for i in threads: i.join() else: for i in self.urls: Thread(self.judge_location, (i, )) # 去重 for i in self.test_urls: if self.test_urls[i]: self.test_urls[i] = list(set(self.test_urls[i])) if IS_MULTI_THREAD is True: threads = [ Thread(self.test_xss, (j, i)) for i in self.test_urls for j in self.test_urls[i] ] for i in threads: i.start() for i in threads: i.join() else: for i in self.test_urls: for j in self.test_urls[i]: Thread(self.test_xss, (j, i))
def connect(self): if self.devices is None: log.output_log("can't find devices", True) return False # connect 3 times for i in range(3): output = self.connect_remote() if 'already connected to' in output: return True time.sleep(1) log.output_log("connect time out", True) return False
def test_page_size(self, inject_url1, inject_url2): if check_vul_sign() is True: return log.output_log("[test] inject url 1=1 " + inject_url1), log.output_log("[test] inject url 1=2 " + inject_url2) url_ret_page_size = self.web_site.get_page_size(self.url) url_ret_page_size_1 = self.web_site.get_page_size(inject_url1) url_ret_page_size_2 = self.web_site.get_page_size(inject_url2) if url_ret_page_size == url_ret_page_size_1 and url_ret_page_size != url_ret_page_size_2: self.get_server_info() self.result_dispose( "[%s] [%s] [server:%s + web:%s] %s" % ("sqlinject", self.inject_1_equ_2, self.server_info["server"], self.server_info["x-powered-by"], inject_url2))
def get_page_charset(self, page_source): try: coding = None data_lines = page_source.split('\n') # '<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />' # '<meta charset="utf-8"> regex = re.compile(r'<meta[\S\s]+charset *= *["\']?([a-zA-Z-0-9]+)["\']?', re.IGNORECASE) for line in data_lines: # 分块进行检测 pattern = regex.search(line) if pattern: coding = pattern.group(1) return coding except Exception, e: log.output_log("[error] " + str(e), True)
def get_server_info(self): log.output_log("[inject url] " + self.url, True) self.server_info = self.web_site.get_server_info(self.url) log.output_log("[*] the type of server is : " + self.server_info["server"]) log.output_log("[*] the type of web powered by " + self.server_info["x-powered-by"]) log.output_log("[*] please wait......")
def get_page_charset(self, page_source): try: coding = None data_lines = page_source.split('\n') # '<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />' # '<meta charset="utf-8"> regex = re.compile(r'<meta[\S\s]+charset *= *["\']?([a-zA-Z-0-9]+)["\']?', re.IGNORECASE) for line in data_lines: # 分块进行检测 pattern = regex.search(line) if pattern: coding = pattern.group(1) return coding except Exception, e: log.output_log("".join(["[error] ", str(e)]), True)
def check_site_dir(self, site_root): while True: test_dir = self.get_payload() if test_dir is None: break test_url = site_root + test_dir try: req = requests.get(test_url, headers=self.headers, cookies=None, timeout=3) status_code = req.status_code status = self.status_codes[str(status_code)][0] \ if str(status_code) in self.status_codes else "Undefined" self.result_dispose("[site_dir][%d][%s]%s" % (status_code, str(status), test_url)) except Exception, e: log.output_log("[error] " + str(e), True) pass
def xss_go(self): log.output_log("".join(["[xss] test url ", self.url]), True) test_params = self.url_parse.get_params(self.url, _DORK_PAYLOAD) # ---------------------- 多线程切换 ----------------------------------------- if _IS_MULTI_THREAD is True: threads = [Thread(self.judge_out, (self.url.replace(i, test_params[i]), _DORK_PAYLOAD, self.urls)) for i in test_params] for i in threads: i.start() for i in threads: i.join() else: for param in test_params: url = self.url.replace(param, test_params[param]) Thread(self.judge_out, (url, _DORK_PAYLOAD, self.urls)) # self.enc = get_charset(self.url) if _IS_MULTI_THREAD is True: threads = [Thread(self.judge_location, (i,)) for i in self.urls] for i in threads: i.start() for i in threads: i.join() else: for i in self.urls: Thread(self.judge_location, (i,)) # 去重 for i in self.test_urls: if self.test_urls[i]: self.test_urls[i] = list(set(self.test_urls[i])) if _IS_MULTI_THREAD is True: threads = [Thread(self.test_xss, (j, i)) for i in self.test_urls for j in self.test_urls[i]] for i in threads: i.start() for i in threads: i.join() else: for i in self.test_urls: for j in self.test_urls[i]: Thread(self.test_xss, (j, i))
def get_page_source(self, url, headers, cookies, times=0): if times < 3: try: req = requests.get(url, headers=headers, cookies=cookies, timeout=self.timeout) if req.status_code == 200: html = req.text req.close() return html if req.status_code == 403: times += 1 log.output_log("[error] 403 and try to connet %d" % times, True) proxy_switch.link_proxy(self.proxy) self.get_page_source_info(url, headers, cookies, times) return None except Exception, e: log.output_log("".join(["[error] ", str(e)]), True) return None
def select_info(self, tag=1): try: sql = mysql.MySQL(*self.cloud_db) if tag == 1: sql_string = "SELECT id, apk_name, url" + \ " FROM " + self.db_table + \ " WHERE webview_state = 0 LIMIT 1" ret_tuple = sql.get_value(sql_string) if ret_tuple is not None: if tag == 1: sql_string = "UPDATE " + self.db_table + \ " SET webview_state = 1" + \ " WHERE id = " + str(ret_tuple[0]) sql.insert_value(sql_string) return ret_tuple return None except Exception, e: log.output_log("".join(["[error] ", str(e)]), True) return None
def select_info(self, tag=0): try: sql = mysql.MySQL(*self.cloud_db) if tag == 0: sql_string = "SELECT id, file_name, uri" + \ " FROM " + self.db_table + \ " WHERE xss_state LIKE 0 AND sql_state LIKE 0 LIMIT 1" if tag == 1: sql_string = "" ret_tuple = sql.get_value(sql_string) if ret_tuple is not None: sql_string = "UPDATE " + self.db_table + \ " SET xss_state = 1, sql_state = 1" + \ " WHERE id LIKE " + str(ret_tuple[0]) sql.insert_value(sql_string) return ret_tuple return None except Exception, e: log.output_log("[error] " + str(e)) return None
def get_page_source_info(self, url, headers, cookies, times=0): if times < 3: try: # test proxy # proxy_switch.link_proxy(self.proxy) req = requests.get(url, headers=headers, cookies=cookies, timeout=self.timeout) if req.status_code == 200: # 获取网页编码 encoding = None try: encoding = req.apparent_encoding if encoding is not None: encoding = encoding.lower() encoding = encoding if 'utf' in encoding or 'gbk' in encoding else None except Exception, e: log.output_log("".join(["[error] ", str(e)]), True) encoding = self.get_page_charset(req.content) if encoding is None else encoding req.encoding = "utf-8" if encoding is None else encoding html = req.text req.close() return [html, encoding] if req.status_code == 403: times += 1 log.output_log("[error] 403 and try to connet %d" % times, True) proxy_switch.link_proxy(self.proxy) self.get_page_source_info(url, headers, cookies, times) return None except Exception, e: log.output_log("".join(["[error] ", str(e)]), True) return None
def get_page_source_info(self, url, headers, cookies, times=0): if times < 3: try: # test proxy proxy_switch.link_proxy(self.proxy) req = requests.get(url, headers=headers, cookies=cookies, timeout=self.timeout) if req.status_code == 200: # 获取网页编码 encoding = None try: encoding = req.apparent_encoding if encoding is not None: encoding = encoding.lower() encoding = encoding if 'utf' in encoding or 'gbk' in encoding else None except Exception, e: log.output_log("[error] " + str(e), True) encoding = self.get_page_charset(req.content) if encoding is None else encoding req.encoding = "utf-8" if encoding is None else encoding html = req.text req.close() return [html, encoding] if req.status_code == 403: times += 1 log.output_log("[error] 403 and try to connet %d" % times, True) proxy_switch.link_proxy(self.proxy) self.get_page_source_info(url, headers, cookies, times) return None except Exception, e: log.output_log("[error] " + str(e), True) return None
def insert_info_s(self, insert_info_list, tag=0): try: sql = mysql.MySQL(*self.cloud_db) if tag == 0: for insert_info in insert_info_list: vul_detail = insert_info[3].replace("'", "\\'") sql_string = "INSERT INTO " + self.db_table + "(scan_id, file_name, vul_type, vul_detail, vul_url) values(" + \ str(insert_info[0]) + ", '" + \ insert_info[1] + "', '" + \ insert_info[2] + "', '" + \ vul_detail + "', '" + \ insert_info[4] + "')" sql.insert_value_not_commit(sql_string) sql.commit() elif tag == 1: sql_string = '' sql.insert_value(sql_string) return True except Exception, e: log.output_log("[error] " + str(e)) return False
def insert_info_s(self, insert_info_list, tag=1): try: sql = mysql.MySQL(*self.cloud_db) if tag == 1: for insert_info in insert_info_list: vul_code = insert_info["code"].replace("'", "\\'") sql_string = "INSERT INTO " + self.db_table + "(apk_name, vul_activity, vul_method, vul_code) values('" + \ insert_info["apk_name"] + "', '" + \ insert_info["activity"] + "', '" + \ insert_info["method"] + "', '" + \ vul_code + "')" sql.insert_value_not_commit(sql_string) sql.commit() elif tag == 2: sql_string = '' sql.insert_value(sql_string) return True except Exception, e: log.output_log("".join(["[error] ", str(e)]), True) return False
def insert_info_s(self, insert_info_list, tag = 0): try: sql = mysql.MySQL( *self.cloud_db ) if tag == 0: for insert_info in insert_info_list: vul_detail = insert_info[3].replace("'", "\\'") sql_string = "INSERT INTO " + self.db_table + "(scan_id, file_name, vul_type, vul_detail, vul_url) values(" + \ str(insert_info[0]) + ", '" + \ insert_info[1] + "', '" + \ insert_info[2] + "', '" + \ vul_detail + "', '" + \ insert_info[4] + "')" sql.insert_value_not_commit(sql_string) sql.commit() elif tag == 1: sql_string = '' sql.insert_value(sql_string) return True except Exception, e: log.output_log("[error] " + str(e)) return False
def set_proxy(self, ip_address, port): try: socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5, ip_address, port) socket.socket = socks.socksocket log.output_log("[proxy] switch proxy success! port %d" % port) return True except Exception, e: log.output_log("[error] switch proxy false!", True) log.output_log("".join(["[error] ", str(e)]), True) return False
def set_proxy(self, ip_address, port): try: socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5, ip_address, port) socket.socket = socks.socksocket log.output_log("[proxy] switch proxy success! port %d" % port) return True except Exception, e: log.output_log("[error] switch proxy false!", True) log.output_log("[error] %s" % str(e)) return False
def start_proxy(self, proxy_info): try: command = "%s %s -N -ssh -2 -P 22 -l %s -D %s:%d -pw %s" % ( self.proxy_info.plink_path, proxy_info["server_ip"], proxy_info["user_name"], proxy_info["client_ip"], proxy_info["port"], proxy_info["pwd"]) subprocess.Popen(command) log.output_log("[proxy] start proxy success!") self.set_proxy(proxy_info["client_ip"], proxy_info["port"]) except Exception, e: log.output_log("[error] start proxy false!", True) log.output_log("[error] %s" % str(e))
def start_proxy(self, proxy_info): try: command = "%s %s -N -ssh -2 -P 22 -l %s -D %s:%d -pw %s" % ( self.proxy_info.plink_path, proxy_info["server_ip"], proxy_info["user_name"], proxy_info["client_ip"], proxy_info["port"], proxy_info["pwd"]) subprocess.Popen(command) log.output_log("[proxy] start proxy success!") self.set_proxy(proxy_info["client_ip"], proxy_info["port"]) except Exception, e: log.output_log("[error] start proxy false!", True) log.output_log("".join(["[error] ", str(e)]), True)
def scan_url(self, cookies): if self.scan_mode & 1: log.output_log("[*] test xss...") xss = xss_scan.XssScan(self.url, cookies, self.proxy) if len(xss.result) > 0: pass if self.scan_mode & 2: log.output_log("[*] test sql inject...") sql = sql_scan.SqlScan(self.url, self.proxy) if len(sql.result) > 0: pass if self.scan_mode & 4: log.output_log("[*] test site dir...") site_dir = site_dir_scan.SiteDirScan(self.url, self.proxy) if len(site_dir.result) > 0: pass
def scan_db_urls(self, cookies): # 存储类方法初始化 vul_db_cfg = config.VulInfoDB() vul_db_op = databases.ScanInfo(vul_db_cfg) # 扫描类方法初始化 scan_db_cfg = config.ScanInfoDB() scan_db_op = databases.ScanInfo(scan_db_cfg) # 开始扫描 # 获取扫描链接 while True: scan_info_tuple = scan_db_op.get_scan_uri() if scan_info_tuple is None: log.output_log("[*] no url in db, please wait......") time.sleep(1 * 60 * 60) continue url, insert_info = scan_info_tuple[2], scan_info_tuple[:-1] if self.url_parse.is_param_url(url) is False: continue log.output_log("[*] begin scan url " + url, True) # xss scan if self.scan_mode & 1: log.output_log("[*] test xss...") xss = xss_scan.XssScan(url, cookies, self.proxy) if len(xss.result) > 0: # # 打印,存文件 # parse.output_result(xss.result) xss_info_list = self.data_parse.format_vul_info( xss.result, insert_info) # 漏洞存数据库 vul_db_op.save_vul_info(xss_info_list) else: log.output_log("[xss] not found xss") # sql scan if self.scan_mode & 2: log.output_log("[*] test sql inject...") sql = sql_scan.SqlScan(url, self.proxy) if len(sql.result) > 0: # # 打印,存文件 # parse.output_result(sql.result) sql_info_list = self.data_parse.format_vul_info( sql.result, insert_info) # 漏洞存数据库 vul_db_op.save_vul_info(sql_info_list) else: log.output_log("[sql] not found inject") # site dir scan if self.scan_mode & 4: log.output_log("[*] test site dir...") site_dir = site_dir_scan.SiteDirScan(url, self.proxy) if len(site_dir.result) > 0: pass
def scan_db_urls(self, cookies): # xss and sql # 存储类方法初始化 vul_db_cfg = config.VulInfoDB() vul_db_op = vul_info_db.ScanInfo(vul_db_cfg) # 扫描类方法初始化 scan_db_cfg = config.ScanInfoDB() scan_db_op = vul_info_db.ScanInfo(scan_db_cfg) # 开始扫描 # 获取扫描链接 while True: # xss scan if self.scan_mode & 1: scan_info_tuple = scan_db_op.get_scan_url(1) if scan_info_tuple is None: log.output_log("[*] no url need xss scan in db, please wait...") time.sleep(2*60*60) continue url, insert_info = scan_info_tuple[2], scan_info_tuple[:-1] if self.url_parse.is_param_url(url) is False: log.output_log("".join(["[xss] valid url, begin get next url..."])) continue log.output_log("".join(["[xss] begin scan url ", url]), True) # begin scan xss = xss_scan.XssScan(url, cookies, self.proxy) if len(xss.result) > 0: # # 打印,存文件 # parse.output_result(xss.result) xss_info_list = self.data_parse.format_vul_info(xss.result, insert_info) # 漏洞存数据库 vul_db_op.save_vul_info_s(xss_info_list) else: log.output_log("[xss] not found xss") # sql scan if self.scan_mode & 2: scan_info_tuple = scan_db_op.get_scan_url(2) if scan_info_tuple is None: log.output_log("[*] no url need sql scan in db, please wait......") time.sleep(2*60*60) continue url, insert_info = scan_info_tuple[2], scan_info_tuple[:-1] if self.url_parse.is_param_url(url) is False: log.output_log("".join(["[sqlinject] valid url, begin get next url..."])) continue log.output_log("".join(["[sqlinject] begin scan url ", url]), True) """ # begin scan # sql = sql_scan.SqlScan(url, self.proxy) if len(sql.result) > 0: # # 打印,存文件 # parse.output_result(sql.result) sql_info_list = self.data_parse.format_vul_info(sql.result, insert_info) # 漏洞存数据库 vul_db_op.save_vul_info_s(sql_info_list) else: log.output_log("[sql] not found inject") """ # begin scan by # update 2015.04.30 by huanghenghui sql = MySQLInject.MySQLInject() # sql_result = sql.CheckSQLInject("http://special.hi-chic.com/?cat_id=47") sql_result = sql.CheckSQLInject(url) if sql_result["is_sql_inject"] is not None: vul_info = insert_info + (sql_result["vul_type"], sql_result["vul_detail"], sql_result["vul_url"]) # 漏洞存数据库 vul_db_op.save_vul_info(vul_info) else: log.output_log("[sql] not found inject") # site dir scan if self.scan_mode & 4: log.output_log("[*] test site dir...") site_dir = site_dir_scan.SiteDirScan(url, self.proxy) if len(site_dir.result) > 0: pass
def result_dispose(self, str_result, timeout=1): if MUTEX.acquire(timeout): self.result.append(str_result) MUTEX.release() log.output_log(str_result, True)
def _error_log(self, error_string): log.output_log("[error] " + error_string)
def result_dispose(self, str_result): if set_vul_sign(1) is True: self.result.append(str_result) log.output_log(str_result, True)
def test_single_payload(self, url, location, payload): # if one thread found vul, other thread don't found with other payload test again if check_vul_sign() is True: return # test payloads test_url = url.replace(DORK_PAYLOAD, urllib2.quote(payload)) log.output_log("[test] %s" % test_url) page_source_info = self.web_site.get_page_source_info( test_url, self.headers, self.cookies) if page_source_info is None: return None page_source, self.enc = page_source_info[0], page_source_info[1] soup = BeautifulSoup(page_source) if (location in ("betweenCommonTag", "betweenTitle", "betweenTextarea", "betweenXmp", "betweenIframe", "betweenNoscript", "betweenNoframes", "betweenPlaintext") and soup.findAll("x55test") and self.confirm_parent_tag(soup)): self.result_dispose("[xss] [%s] [%s] %s" % (location, payload, test_url)) if (location == "betweenScript" and (soup.findAll("x55test") or soup.findAll(name="script", text=re.compile(r"[^\\]%s" % payload.replace( "(", "\(").replace(")", "\)"))))): self.result_dispose("[xss] [%s] [%s] %s" % (location, payload, test_url)) if (location == "betweenScript" and self.enc == "gbk" and soup.findAll( name="script", text=re.compile( r"\\%s" % payload.replace("(", "\(").replace(")", "\)")))): self.result_dispose("[xss] [%s] [%s] %s" % (location, payload, test_url)) if (location == "betweenStyle" and (soup.findAll("x55test") or soup.findAll( name="style", text=re.compile("%s" % payload.replace(".", "\.").replace( "(", "\(").replace(")", "\)"))))): self.result_dispose("[xss] [%s] [%s] %s" % (location, payload, test_url)) if (location == "inMetaRefresh" and soup.findAll(name="meta", attrs={ "http-equiv": "Refresh", "content": re.compile(payload) })): self.result_dispose("[xss] [%s] [%s] %s" % (location, payload, test_url)) if location == "utf-7" and page_source.startswith( "+/v8 +ADw-x55test+AD4-"): self.result_dispose("[xss] [%s] [%s] %s" % (location, payload, test_url)) if (location == "inCommonAttr" and (soup.findAll("x55test") or soup.findAll(attrs={"x55test": re.compile("x55")}))): self.result_dispose("[xss] [%s] [%s] %s" % (location, payload, test_url)) if (location == "inSrcHrefAction" and (soup.findAll(attrs={"src": re.compile("^(%s)" % payload)}) or soup.findAll(attrs={"href": re.compile("^(%s)" % payload)}) or soup.findAll(attrs={"action": re.compile("^(%s)" % payload)}))): self.result_dispose("[xss] [%s] [%s] %s" % (location, payload, test_url)) if (location == "inScript" and self.confirm_in_script(soup, payload)): self.result_dispose("[xss] [%s] [%s] %s" % (location, payload, test_url)) if (location == "inStyle" and soup.findAll( attrs={ "style": re.compile("%s" % payload.replace(".", "\.").replace( "(", "\(").replace(")", "\)")) })): self.result_dispose("[xss] [%s] [%s] %s" % (location, payload, test_url))
def _error_log(self, error_string): log.output_log("".join(["[error] ", error_string]), True)
def test_single_payload(self, url, location, payload): # if one thread found vul, other thread don't found with other payload test again if check_vul_sign() is True: return # test payloads test_url = url.replace(_DORK_PAYLOAD, urllib2.quote(payload)) log.output_log("[test] %s" % test_url) page_source_info = self.web_site.get_page_source_info(test_url, self.headers, self.cookies) if page_source_info is None: return None page_source, self.enc = page_source_info[0], page_source_info[1] soup = BeautifulSoup(page_source) if (location in ("betweenCommonTag", "betweenTitle", "betweenTextarea", "betweenXmp", "betweenIframe", "betweenNoscript", "betweenNoframes", "betweenPlaintext" )and soup.findAll("x55test") and self.confirm_parent_tag(soup)): self.result_dispose("[xss] [%s] [%s] %s" % (location, payload, test_url)) if (location == "betweenScript" and (soup.findAll("x55test") or soup.findAll(name="script", text=re.compile(r"[^\\]%s" % payload.replace("(", "\(").replace(")", "\)"))))): self.result_dispose("[xss] [%s] [%s] %s" % (location, payload, test_url)) if (location == "betweenScript" and self.enc == "gbk" and soup.findAll(name="script", text=re.compile(r"\\%s" % payload.replace("(", "\(").replace(")", "\)")))): self.result_dispose("[xss] [%s] [%s] %s" % (location, payload, test_url)) if (location == "betweenStyle" and (soup.findAll("x55test") or soup.findAll(name="style", text=re.compile("%s" % payload.replace(".", "\.").replace("(", "\(").replace(")", "\)"))))): self.result_dispose("[xss] [%s] [%s] %s" % (location, payload, test_url)) if (location == "inMetaRefresh" and soup.findAll( name="meta", attrs={"http-equiv": "Refresh", "content": re.compile(payload)})): self.result_dispose("[xss] [%s] [%s] %s" % (location, payload, test_url)) if location == "utf-7" and page_source.startswith("+/v8 +ADw-x55test+AD4-"): self.result_dispose("[xss] [%s] [%s] %s" % (location, payload, test_url)) if (location == "inCommonAttr" and (soup.findAll("x55test") or soup.findAll(attrs={"x55test": re.compile("x55")}))): self.result_dispose("[xss] [%s] [%s] %s" % (location, payload, test_url)) if (location == "inSrcHrefAction" and (soup.findAll(attrs={"src": re.compile("^(%s)" % payload)}) or soup.findAll(attrs={"href": re.compile("^(%s)" % payload)}) or soup.findAll(attrs={"action": re.compile("^(%s)" % payload)}))): self.result_dispose("[xss] [%s] [%s] %s" % (location, payload, test_url)) if (location == "inScript" and self.confirm_in_script(soup, payload)): self.result_dispose("[xss] [%s] [%s] %s" % (location, payload, test_url)) if (location == "inStyle" and soup.findAll(attrs={"style": re.compile("%s" % payload.replace(".", "\.").replace("(", "\(").replace(")", "\)"))})): self.result_dispose("[xss] [%s] [%s] %s" % (location, payload, test_url))
def scan_db_urls(self, cookies): # 存储类方法初始化 vul_db_cfg = config.VulInfoDB() vul_db_op = databases.ScanInfo(vul_db_cfg) # 扫描类方法初始化 scan_db_cfg = config.ScanInfoDB() scan_db_op = databases.ScanInfo(scan_db_cfg) # 开始扫描 # 获取扫描链接 while True: scan_info_tuple = scan_db_op.get_scan_uri() if scan_info_tuple is None: log.output_log("[*] no url in db, please wait......") time.sleep(1*60*60) continue url, insert_info = scan_info_tuple[2], scan_info_tuple[:-1] if self.url_parse.is_param_url(url) is False: continue log.output_log("[*] begin scan url " + url, True) # xss scan if self.scan_mode & 1: log.output_log("[*] test xss...") xss = xss_scan.XssScan(url, cookies, self.proxy) if len(xss.result) > 0: # # 打印,存文件 # parse.output_result(xss.result) xss_info_list = self.data_parse.format_vul_info(xss.result, insert_info) # 漏洞存数据库 vul_db_op.save_vul_info(xss_info_list) else: log.output_log("[xss] not found xss") # sql scan if self.scan_mode & 2: log.output_log("[*] test sql inject...") sql = sql_scan.SqlScan(url, self.proxy) if len(sql.result) > 0: # # 打印,存文件 # parse.output_result(sql.result) sql_info_list = self.data_parse.format_vul_info(sql.result, insert_info) # 漏洞存数据库 vul_db_op.save_vul_info(sql_info_list) else: log.output_log("[sql] not found inject") # site dir scan if self.scan_mode & 4: log.output_log("[*] test site dir...") site_dir = site_dir_scan.SiteDirScan(url, self.proxy) if len(site_dir.result) > 0: pass