def crawl(self, root_url): if not isinstance(root_url, URL): root_url_obj = URL(root_url) else: root_url_obj = root_url self._target_domain = root_url_obj.get_host() self._url_list.append(root_url_obj) root_req = Request(root_url_obj) q = Queue() q.put((root_req, 0)) self._start_time = time.time() while True: if q.empty(): break this.req, depth - q.get() if this_req.get_url().get_ext() in self._block_ext: continue if depth > self.depth_limit: print "depth limit break" break if self.get_discovery_time() > self.time_limit: print "time limit break" break if self.num_reqs > self.req_limit: print "reqs num limit break" break if this_req in self._already_seen_urls: continue try: self._already_seen_reqs.add(this_req) om.info("%s:%s" % (this_req.get_method(), this_req.get_url().url_string)) response = None try: response = wcurl._send_req(this_req) except Exception, e: print str(e) pass if is_404(response): continue if response is None: continue new_reqs = self._get_reqs_from_resp(response) filter_reqs = self._do_with_reqs(new_reqs) depth = depth + 1 for req in filter_reqs: q.put((req, depth)) self.num_reqs = len(self._already_seen_reqs) om.info("Already Send Reqs!:" + str(self.num_reqs) + "Left Reqs:" + str(q.qsize())) except Exception, e: print traceback.print_exc() om.info("ERROR:Can't process url'%s'(%s)" % (this_req.get_url(), e)) continue
def scan_api(self, api_list): ''' api_item:{"apiurl":"","method":"","cookie":"","data":""} ''' db_info.set_start_time( datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')) api_report = [] for item in api_list: api_report.append(item.get("apiurl")) db_info.set_api(api_report) for item in api_list: method = item.get("method").upper() if method == "GET": api_url = URL(item.get("apiurl")) cookie = item.get("cookie") req = Request(api_url, "GET", cookie=cookie) self._api_request_list.append(req) elif method == "POST": api_url = URL(item.get("apiurl")) cookie = item.get("cookie") post_data = item.get("data") req = Request(api_url, "POST", cookie=cookie, post_data=post_data) self._api_request_list.append(req) else: pass # Add All Domain For Poc Check for r in self._api_request_list: domain = r.get_url().get_host() if domain not in self._api_domain_list: self._api_domain_list.append(domain) log.info("Check URL:" + r.get_url_string()) self.scan_request(r) for site in self._api_domain_list: urlobj = URL(site) self.scan_poc(urlobj) db_info.set_end_time( datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S'))
def post(self, url, headers={}, data=None, **kwargs): ''' ''' default_headers = self.get_default_headers(headers) if not isinstance(url, URL): url = URL(url) requests_response = None try: requests_response = requests.post(url.url_string, headers=default_headers, data=data, **kwargs) except: return self._make_response(requests_response, url) response = self._make_response(requests_response, url) return response
def scan_config(): ''' ''' scan_banner() options = parseargs() cfg.save("target", URL(options.url)) cfg.save("profile", options.profile) cfg.save("taskid", int(options.taskid)) cfg.save("mode", options.mode) #rpc server configure cfg.save("RPC_SERVER_IP", "123.57.242.231") if cfg["mode"].lower() == "tweb": cfg.save("RPC_SERVER_PORT", 9999) else: cfg.save("RPC_SERVER_PORT", 8888) if cfg["taskid"] > 100000: cfg["remote_mysql"] = True else: cfg["remote_mysql"] = False #profile config cfg["domain_scan"] = False t_profile = cfg.get("profile") if t_profile is not None: #type,rate,useragent,proxy,cookie set_profile = json.loads(t_profile) if int(set_profile.get("type")) == 2: cfg["domain_scan"] = True cfg["max_domain_scan"] = 20 if set_profile.get("useragent"): cfg["scan_signature"] = set_profile.get("useragent") else: cfg["scan_signature"] = "TScanner/1.0" cfg["scan_cookies"] = cookie(set_profile.get("cookie")) cfg["scan_proxies"] = {'http': set_profile.get("pxory")} else: cfg["scan_signature"] = "TScanner/1.0" cfg["scan_cookies"] = {} cfg["scan_proxies"] = {} l_profile = {} l_profile["useragent"] = cfg["scan_signature"] l_profile["cookie"] = cfg["scan_cookies"] l_profile["proxies"] = cfg["scan_proxies"] db_info.set_profile(l_profile)
def read_request(str): ''' ''' rlist = str.split("\n\nHTTP/1.1") req_str = rlist[0].strip() if req_str.startswith("HTTP/1.1"): return None basereq = HTTPRequest(req_str) method = basereq.command urlpath = basereq.path headers = basereq.headers netloc = basereq.headers['host'] del headers['host'] url = URL(netloc + urlpath) treq = Request(url, method, headers=headers) return treq
def test_HtmlParser(): ''' ''' req_url = "http://192.168.126.147" real_contain_urls = [ 'http://www.w3.org/1999/xhtml', 'http://192.168.126.147/lnmp.gif', 'http://lnmp.org', 'http://192.168.126.147/p.php', 'http://192.168.126.147/phpinfo.php', 'http://192.168.126.147/phpmyadmin/', 'http://lnmp.org', 'http://bbs.vpser.net/forum-25-1.html', 'http://www.vpser.net/vps-howto/', 'http://www.vpser.net/usa-vps/', 'http://lnmp.org', 'http://blog.licess.com/', 'http://www.vpser.net' ] r = wcurl.get(req_url) parser = HtmlParser(r) re_urls, tag_urls = parser.urls print "Regex URL:" for item in re_urls: print item print "Tag URL:" for item in tag_urls: print item page_urls = [] page_urls.extend(re_urls) page_urls.extend(tag_urls) true_num = 0 for item in real_contain_urls: real_url = URL(item) if real_url in page_urls: true_num += 1 else: print real_url assert len(real_contain_urls) == true_num
def security_hole(self, url): ''' ''' if isinstance(url, URL): url = url.url_string else: url = url name = self._poc_info['w_vul']['title'] method = self._poc_info['w_vul']['method'] link_info = self._poc_info['w_vul']['info'] rank = self._poc_info['w_vul']['rank'] v = vuln() v.set_url(url) v.set_name(name) v.set_rank(rank) v.set_method(method) v.set_link_info(link_info) site = URL(v.get_url()).get_host() vm.append(self, site, v.get_name(), v)
#coding=utf-8
def scan_finger(self, site): ''' ''' app_name_list = [] for item in self._app_db: if item.startswith("#"): continue dict_item = json.loads(item.strip()) app_name = "".join(dict_item.keys()).strip() app_info = dict_item.get(app_name) url = app_info.get("url") urlobj = URL(site) if self._scan_mode == 1: test_url = urlobj.get_uri_string() if test_url.endswith("/"): target_url = test_url[0:-1] + url else: target_url = test_url + url else: test_url = urlobj.get_netloc() target_url = urlobj.get_scheme() + "://" + test_url + url log.info(target_url) try: res = wcurl.get(target_url) except: continue dst_headers = res.headers dst_body = res.body self._http_code = res.get_code() try: self._server_finger = dst_headers["server"] except: pass if dst_body is None: continue md5_body = self.md5(dst_body) key_list = app_info.keys() if "headers" in key_list: app_headers = app_info.get("headers") app_key = app_headers[0].lower() app_value = app_headers[1] if app_key in dst_headers.keys(): dst_info = dst_headers.get(app_key) result = re.search(app_value, dst_info, re.I) if result: if "body" in key_list: app_body = app_info.get("body") result = re.search(app_body, dst_body, re.I) if result: app_name_list.append((target_url, app_name)) else: app_name_list.append((target_url, app_name)) elif "body" in key_list: app_body = app_info.get("body") result = re.search(app_body, dst_body, re.I) if result: app_name_list.append((target_url, app_name)) elif "md5" in key_list: app_md5 = app_info.get("md5") if app_md5 == md5_body: app_name_list.append((target_url, app_name)) return app_name_list
def scan_site(self, target): ''' ''' # initial self._initial() site = '' myip = '' ipaddr = '' # config info target_domain = target.get_domain() netloc = target.get_netloc() myip = get_my_ip() ipaddr = self._ps.get_ipaddr(target_domain) db_info.set_entry(target_domain) db_info.set_myip(myip) db_info.set_ipaddr(ipaddr) db_info.set_start_time( datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')) pthread = thread.start_new_thread(self.update_progress, ()) if not self._ps.is_alive(netloc): print "Target is not alive!" self._progress_status = True self.update_scan_status() self.end() root_domain = target.get_root_domain() db_info.set_domain(root_domain) if cfg["domain_scan"] == True: domain_list, ipaddr_list = self.scan_domain(root_domain) if target_domain not in domain_list: domain_list.append(target_domain) db_info.set_subdomain(domain_list) # ipaddr_list不包含当前目标的IP地址 db_info.set_relate_ipaddr(ipaddr_list) self._http_task.extend(domain_list) self._host_task.extend(ipaddr_list) if ipaddr not in self._host_task: self._host_task.append(ipaddr) for item in self._host_task: http_target = self.scan_host(URL(item)) for t in http_target: if t not in self._http_task: self._http_task.append(t) scan_count = 0 for task in self._http_task: if scan_count > cfg["max_domain_scan"]: break url = URL(task) self._scan_worker(url) scan_count = scan_count + 1 else: http_target = [] # http_target = self.scan_host(target) # nmap is filter,only one target if len(http_target) == 0: target_self = target.get_host() + ":" + str(target.get_port()) http_target.append(target_self) for t in http_target: self._http_task.append(t) for task in self._http_task: url = URL(task) self._scan_worker(url) db_info.set_end_time( datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')) self._progress_status = True self.update_scan_status()
postdata = "" postdata += "--5b4729970b854f95b01a01a2e799996f\r\n" if type == "normal": postdata += "Content-Disposition: form-data; name=\"filename\"; filename=\"test.txt\"\r\n\r\n" postdata += testdata + "just for a test!\r\n\r\n" else: postdata += "Content-Disposition: form-data; name=\"filename\"; filename=\"test.txt\"" + fuzzdata + "\r\n\r\n" postdata += "just for a test!\r\n\r\n" postdata += "--5b4729970b854f95b01a01a2e799996f--" print len(postdata) return postdata def get_res_time(self, url, type="normal"): headers = {"Content-Type": "multipart/form-data; boundary=5b4729970b854f95b01a01a2e799996f"} data = self.get_post_data(type) res = requests.post(url, headers=headers, data=data) return res.elapsed def get_class_name(self): return self.__class__.__name__ if __name__ == "__main__": if len(sys.argv) < 2: sys.exit() target = sys.argv[1] print "Scanning Target======>%s" % (target) urlobj = URL(target) p = php_multipart_dos() p.check(urlobj)
log_x_forward_for = info_tuple[9] log_body = info_tuple[10] temp_request = log_request.split(" ") method = temp_request[0][1:].upper() uri = temp_request[1][0:-8] url = log_host + uri headers = {"Referer": "", "Cookie": "", "X-Forward-For": ""} headers["Referer"] = log_referer headers["Cookie"] = log_cookie if log_x_forward_for == '-': del headers["X-Forward-For"] else: headers["X-Forward-For"] = log_x_forward_for post_data = postdata(log_body) urlobj = URL(url) if method == "GET": req = Request(urlobj, method, headers=headers) elif method == "POST": req = Request(urlobj, method, headers=headers, post_data=post_data) print req from teye_core.tcore import tCore scan_engine = tCore() scan_engine.scan_request(req) print vm.get_all_vuln()
else: return False def get_name(self): ''' ''' return self.__class__.__name__ if __name__ == "__main__": from optparse import OptionParser usage = "usage: %prog [options] arg" parser = OptionParser(usage) parser.add_option("-u", "--url", action="store", dest="url", default=None, help="Scan the target url") (options, args) = parser.parse_args() if not options.url: parser.print_help() sys.exit(-1) else: url = options.url target_url = URL(url) check_inst = discuz_faq_sql() check_inst.check(target_url) print vm.get_all_vuln()
def TestURL(): url = URL("http://www.anquanbao.com/book/index.php?id=1#top") assert url.get_host() == "www.anquanbao.com" print url.get_port() assert url.get_port() == 80 assert url.get_path() == "/book/index.php" assert url.get_filename() == "index.php" assert url.get_ext() == "php" assert url.get_fragment() == "top" url = URL("http://www.anquanbao.com/book") print url.get_filename() url = URL("http://www.anquanbao.com/book/") print url.get_filename()