def inject(self, req_false, req_true, payload_right, payload_false): rf = request(**req_false) rt = request(**req_true) if rf != None and rt != None: if self.dictdata.get("response").get("mime_stated") == "HTML": rf_text = getFilteredPageContent(removeDynamicContent(rf.text, self.dynamic)) rt_text = getFilteredPageContent(removeDynamicContent(rt.text, self.dynamic)) else: rf_text = removeDynamicContent(rf.text, self.dynamic) rt_text = removeDynamicContent(rt.text, self.dynamic) rf_similar = round(similar(rf_text, self.text), 3) rt_similar = round(similar(rt_text, self.text), 3) rt_rf_similar = round(similar(rf.text, rt.text), 3) # print("{} rtpayload{} rfpayload{} rf:{},rt:{},both:{}".format(self.data.url_path,self.payload_rt,self.payload_rf,rf_similar,rt_similar,rt_rf_similar)) if rt_rf_similar != 1.0 and rt_similar > rf_similar and rt_similar > 0.98: response_rt = response_parser(rt) response_rf = response_parser(rf) self.result.append({ "name": self.name, "url": self.dictdata.get("url").get("url").split("?")[0], "level": self.level, # 0:Low 1:Medium 2:High "detail": { "vulmsg": self.vulmsg, "payload": "payload_true:{} payload_false:{}".format(payload_right, payload_false), "similar_rate": "payload_false_rate:{} payload_true_rate:{} payload_true_false_rate:{}".format( rf_similar, rt_similar, rt_rf_similar), "request_true": response_rt.getrequestraw(), "response_true": response_rt.getresponseraw(), "request_false": response_rf.getrequestraw(), "response_false": response_rf.getresponseraw(), } }) return True
def inject(self, req_false, req_true, payload_right, payload_false, param_name): found_flag = 0 response_rt = b"" response_rf = b"" rf_similar = 0 rt_similar = 0 rt_rf_similar = 0 for count_num in range(self.verify_count): rf = request(**req_false) rt = request(**req_true) if rf != None and rt != None: if self.dictdata.get("response").get("mime_stated") == "HTML": rf_text = getFilteredPageContent( removeDynamicContent(rf.text, self.dynamic)) rt_text = getFilteredPageContent( removeDynamicContent(rt.text, self.dynamic)) else: rf_text = removeDynamicContent(rf.text, self.dynamic) rt_text = removeDynamicContent(rt.text, self.dynamic) rf_similar = round(similar(rf_text, self.text), 3) rt_similar = round(similar(rt_text, self.text), 3) rt_rf_similar = round(similar(rf.text, rt.text), 3) if rt_rf_similar != 1.0 and rt_similar > rf_similar and rt_similar > 0.98: response_rt = response_parser(rt) response_rf = response_parser(rf) found_flag += count_num continue break if found_flag == sum(range(self.verify_count)): self.result.append({ "name": self.name, "url": self.dictdata.get("url").get("url").split("?")[0], "level": self.level, # 0:Low 1:Medium 2:High "detail": { "vulmsg": self.vulmsg, "param": param_name, "payload": "payload_true:{} payload_false:{}".format( payload_right, payload_false), "similar_rate": "payload_true_rate:{} payload_false_rate:{} payload_true_false_rate:{}" .format(rt_similar, rf_similar, rt_rf_similar), "request_true": response_rt.getrequestraw(), "response_true": response_rt.getresponseraw(), "request_false": response_rf.getrequestraw(), "response_false": response_rf.getresponseraw(), } }) return True
def verify(self): # 添加限定条件 if "." + self.dictdata.get("url").get( "extension").lower() not in acceptedExt: return dirname = self.dictdata.get("url").get("path_folder") basename = os.path.basename(self.dictdata.get("url").get("path", "")) rand = get_random_str(6).lower() paths = [{ "real": os.path.join(dirname, "." + basename + ".swp"), "fake": os.path.join(dirname, "." + rand + basename + ".swp") }, { "real": os.path.join(dirname, basename + "~"), "fake": os.path.join(dirname, rand + basename + "~") }] req = { "method": "GET", "url": "", "timeout": 10, "verify": False, "allow_redirects": False, } for path in paths: req["url"] = path.get("real") r = request(**req) if r is not None and r.status_code == 200 and "/html" not in r.headers.get( "Content-Type", ""): req["url"] = path.get("fake") r_fake = request(**req) if r_fake is not None and similar(r.content, r_fake.content) < 0.8: self.save(r)
def verify(self): if self.dictdata.get("url").get("extension").lower() not in [ 'js', 'css', 'png', 'gif', 'svg' ]: parser = dictdata_parser(self.dictdata) request_headers_forpayload = self.delcookie_token() req = { "method": self.dictdata.get("request").get("method"), "url": parser.getfilepath(), "params": parser.getrequestparams_urlorcookie("url"), "headers": request_headers_forpayload, "data": parser.getrequestbody(), "timeout": 10, "verify": False, "allow_redirects": False, } r = request(**req) if r != None: rate = similar(r.content, parser.getresponsebody()) if rate > self.similar_min: self.result.append({ "name": self.name, "url": parser.url.get("url"), "level": self.level, # 0:Low 1:Medium 2:High "detail": { "vulmsg": self.vulmsg, "similar_rate": rate, "request": parser.getrequestraw(), "response": parser.getresponseraw() } })
def check_url(self, path, verify=True): if not path.startswith("/"): path = "/" + path # url = self.rootpath + path url = self.url[:-1] + path req = self.parser.generaterequest({"url": url, "method": "GET"}) r = request(**req) if r is not None: if verify: if r.status_code == 200: # 根据错误页面相似度比较 if self.error_content is not None: if similar(self.error_content, r.content) > self.similar_rate: return False # args = "" if "?" in path: path, args = path.split("?", 1) args = "?" + args ext = "" if not path.endswith("/"): dirname = os.path.dirname(path) a, b = os.path.splitext(os.path.basename(path)) a = a + get_random_str(4) ext = b path_error = "".join([dirname, a, b]) + args else: path_error = path[:-1] + get_random_str(4) + path[-1] # 进行content-type比较 if ext in self.exts_bak: if "/html" in r.headers.get("Content-Type"): return False # 进行不同文件名内容相似度比较 url_ = self.url[:-1] + path_error logger.debug("test new url:{}".format(url_)) req_ = self.parser.generaterequest({"url": url_, "method": "GET"}) r_ = request(**req_) if r_ is not None and similar(r_.content, r.content) < self.similar_rate: # 再来一关,根据历史发现的文件返回包的Content-Type,Size比较 if self.similar_others(r): return (r.status_code, len(r.content)) else: return r.content return False
def check_second(self, path, raw): random_str = get_random_str(4).lower() path = random_str + path req = { "method": "GET", "url": self.url + path, "timeout": 10, "verify": False, "allow_redirects": False } r = request(**req) if r is not None and self.max_similar > similar(raw, r.content): return True
def verify(self): # 根据config.py 配置的深度,限定一下目录深度 if self.url.count("/") > int(scan_set.get("max_dir", 1)) + 2: return if "nginx" not in self.dictdata["response"]["headers"].get( "Server", "").lower(): return parse = dictdata_parser(self.dictdata) if not self.can_output(parse.getrootpath() + self.name): # 限定只输出一次 return random_s = get_random_str(5).lower() + ".php" req = { "method": "GET", "url": self.url + random_s, "timeout": 10, "verify": False, } r = request(**req) if r is not None and r.status_code != 200: path = "" if self.dictdata["url"]["extension"] != "php": path = "index.php" else: path = self.dictdata["url"]["path"][1:] req = { "method": "GET", "url": self.url + path, "timeout": 10, "verify": False, } r = request(**req) if r is not None and r.status_code == 200: req["url"] = self.url + path + "/.php" r1 = request(**req) if r1 is not None and r1.status_code == 200: if similar(r1.content, r.content) > 0.9: parse_ = response_parser(r1) self.result.append({ "name": self.name, "url": self.url, "level": self.level, # 0:Low 1:Medium 2:High "detail": { "vulmsg": self.vulmsg, "request": parse_.getrequestraw(), "response": parse_.getresponseraw() } }) self.can_output(parse.getrootpath() + self.name, True)
def verify(self): if self.dictdata.get("url").get("extension").lower() in notAcceptedExt: return # 配置不当 parser = dictdata_parser(self.dictdata) req_headers = self.dictdata.get("request").get("headers") req_headers_withpayload = copy.deepcopy(req_headers) params_url = parser.getrequestparams_urlorcookie("url") if params_url and "callback" in params_url.keys(): findit = False for k, v in req_headers_withpayload.items(): if k.lower() == "referer": p = urlparse.urlparse(v) port = "" if ":" in p.netloc: netloc_, port = p.netloc.split(":", 1) else: netloc_ = p.netloc if netloc_.count(".") < 2: newnetloc = netloc_ + ".com.cn" else: newnetloc = netloc_ + "." + get_random_str( 3).lower() + ".".join(netloc_.split(".")[-2:]) v = v.replace(p.netloc, newnetloc + port, 1) req_headers_withpayload[k] = v findit = True break if not findit: req_headers_withpayload[ "Referer"] = "https://www.baidusectest.com/index.php" req = parser.generaterequest({"headers": req_headers_withpayload}) r = request(**req) if r != None: similar_rate = similar(r.content, parser.getresponsebody()) if similar_rate > 0.9: parser_ = response_parser(r) self.result.append({ "name": self.name, "url": self.dictdata.get("url").get("url").split("?")[0], "level": self.level, # 0:Low 1:Medium 2:High "detail": { "vulmsg": self.vulmsg, "resp_similar": similar_rate, "request": parser_.getrequestraw(), "response": parser_.getresponseraw(), } })
def check_url(self, path, verify=True): if not path.startswith("/"): path = "/" + path # url = self.rootpath + path url = self.url[:-1] + path req = self.parser.generaterequest({"url": url, "method": "GET"}) r = request(**req) if r is not None: if verify: if r.status_code == 200: # 根据错误页面相似度比较 if self.error_content is not None: if similar(self.error_content, r.content) > self.similar_rate: return False # 进行不同文件名内容相似度比较 if not path.endswith("/"): dirname = os.path.dirname(path) a, b = os.path.splitext(os.path.basename(path)) a = a + get_random_str(4) path_error = "".join([dirname, a, b]) else: path_error = path[:-1] + get_random_str(4) + path[-1] url_ = self.rootpath + path_error logger.debug("test new url:{}".format(url_)) req_ = self.parser.generaterequest({ "url": url_, "method": "GET" }) r_ = request(**req_) if r_ is not None and similar( r_.content, r.content) < self.similar_rate: return (r.status_code, len(r.content)) # 其实还可以根据 filetype库去比较 .rar .7z 等结尾的path,不过耗内存,算了 else: return r.content return False
def check(self, path): req = { "method": "GET", "url": self.url + path, "timeout": 10, "verify": False, "allow_redirects": False } r = request(**req) # words = [ # "method", # "spring", # "TYPE", # "system", # "database", # "cron", # "reloadByURL", # "JMXConfigurator", # "JMImplementation", # "EnvironmentManager", # ] words_header = [ "X-Application-Context", "application/json", "application/vnd.spring-boot.actuator", "hprof", ] # if r is not None and r.status_code == 200 and any( # [x.encode() in r.content for x in words] and [x in str(r.headers) for x in words_header]): if r is not None and r.status_code == 200 and any([ x in str(r.headers) for x in words_header ]) and similar(r.content, self.error_page) < self.max_similar: if self.check_second(path, r.content): parser_ = response_parser(r) self.result.append({ "name": self.name, "url": self.url + path, "level": self.level, # 0:Low 1:Medium 2:High "detail": { "vulmsg": self.vulmsg, "request": parser_.getrequestraw(), "response": parser_.getresponseraw() } })
def verify(self): if self.dictdata.get("url").get("url").count( "/") >= 4 and self.dictdata.get("url").get("extension") in [ "js", "css" ] and self.dictdata.get("response").get("status") == 200: parse = dictdata_parser(self.dictdata) if self.can_output(parse.getrootpath() + self.name): url = self.dictdata.get("url").get("url") url_split = url.split("/") for new_url in [ "/".join(url_split[:3] + [url_split[3] + "../" + url_split[3]] + url_split[4:]), "/".join(url_split[:3] + [url_split[3] + "/..;/" + url_split[3]] + url_split[4:]) ]: req = parse.generaterequest({"url": new_url}) r = request(**req) if r is not None and r.status_code == self.dictdata.get( "response").get("status"): similar_rate = similar(parse.getresponsebody(), r.content) if similar_rate > self.min_similar_rate: self.result.append({ "name": self.name, "url": parse.getrootpath(), "level": self.level, # 0:Low 1:Medium 2:High "detail": { "vulmsg": self.vulmsg, "source_url": url, "new_url": new_url, "similar_rate": similar_rate } }) self.can_output(parse.getrootpath() + self.name, True)
def inject_headers(self, data): sql_flag = [ "' and '{0}'='{1}", '" and "{0}"="{1}', ] headers, k, v = data for sql_ in sql_flag: found_flag = 0 response_rt = b"" response_rf = b"" rf_similar = 0 rt_similar = 0 rt_rf_similar = 0 payload_right = "" payload_false = "" random_str = get_random_str(2).lower() for count_num in range(self.verify_count): req_false_header = copy.deepcopy(headers) req_true_header = copy.deepcopy(headers) payload_false = sql_.format(random_str + "b", random_str + "c") payload_right = sql_.format(random_str + "b", random_str + "b") req_false_header[k] = req_false_header[k] + payload_false req_true_header[k] = req_true_header[k] + payload_right req_false = self.parser.generaterequest( {"headers": req_false_header}) req_true = self.parser.generaterequest( {"headers": req_true_header}) rf = request(**req_false) rt = request(**req_true) if rf != None and rt != None: if self.dictdata.get("response").get( "mime_stated") == "HTML": rf_text = getFilteredPageContent( removeDynamicContent(rf.text, self.dynamic)) rt_text = getFilteredPageContent( removeDynamicContent(rt.text, self.dynamic)) else: rf_text = removeDynamicContent(rf.text, self.dynamic) rt_text = removeDynamicContent(rt.text, self.dynamic) rf_similar = round(similar(rf_text, self.text), 3) rt_similar = round(similar(rt_text, self.text), 3) rt_rf_similar = round(similar(rf.text, rt.text), 3) if rt_rf_similar != 1.0 and rt_similar > rf_similar and rt_similar > 0.98: response_rt = response_parser(rt) response_rf = response_parser(rf) found_flag += count_num continue break if found_flag == sum(range(self.verify_count)): self.result.append({ "name": self.name, "url": self.dictdata.get("url").get("url").split("?")[0], "level": self.level, # 0:Low 1:Medium 2:High "detail": { "vulmsg": self.vulmsg, "param": "header's {}".format(k), "payload": "payload_true:{} payload_false:{}".format( payload_right, payload_false), "similar_rate": "payload_true_rate:{} payload_false_rate:{} payload_true_false_rate:{}" .format(rt_similar, rf_similar, rt_rf_similar), "request_true": response_rt.getrequestraw(), "response_true": response_rt.getresponseraw(), "request_false": response_rf.getrequestraw(), "response_false": response_rf.getresponseraw(), } }) return