def check_reverse(): ver = platform.system() dns_random_str = "myscan_dnstest_" + get_random_str(10) http_random_str = "myscan_httptest_" + get_random_str(10) domain = "{}.{}".format(dns_random_str, reverse_set.get("reverse_domain")) url = "http://{}:{}/?d={}".format(reverse_set.get("reverse_http_ip"), reverse_set.get("reverse_http_port"), http_random_str) logger.info( "Will exec ping ,nslookup,mshta,curl,wget to test server , it will take around 20s" ) if ver.lower() == "windows": cmd = "ping -n 2 {}>nul & nslookup {} >nul & mshta {}".format( domain, domain, url) else: cmd = "ping -c 2 {} 2>&1 >/dev/null & nslookup {} 2>&1 >/dev/null & curl {} 2>&1 >/dev/null & wget {} --output-document=/dev/null".format( domain, domain, url, url) logger.info("Start exec cmd:{}".format(cmd)) run_cmd(cmd) res_http = query_reverse(http_random_str) res_dns = query_reverse(domain, False) # 此处需添加rmi 服务的检测代码,需本地模拟一个rmi的client if res_http[0]: logger.critical("Client connect http reverse server: Success") else: logger.warning("Client connect http reverse server: Fail") if res_dns[0]: logger.critical("Client connect dns reverse server: Success") else: logger.warning("Client disconnect dns reverse server: Fail")
def ldap_response(client, address): try: client.settimeout(30) buf = client.recv(512) if buf.hex().startswith("300c0201"): send_data = b"\x30\x0c\x02\x01\x01\x61\x07\x0a\x01\x00\x04\x00\x04\x00" client.send(send_data) total = 3 # 防止socket的recv接收数据不完整 buf1 = b"" while total: buf1 += client.recv(512) if len(buf1) > 16: break if buf1: path = getldappath(buf1).decode(errors="ignore") logger.debug("client:{} send path:{}".format(address, path)) res = {} res["type"] = "ldap" res["client"] = address[0] res["query"] = path res["info"] = decode(path) res["time"] = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time())) logger.info("Insert to db:" + str(res)) insert_db(res) except Exception as ex: logger.warning('Run ldap error:{} address:{}'.format(ex, address)) finally: client.close()
def rmi_response(client, address): try: client.settimeout(5) buf = client.recv(1024) if b"\x4a\x52\x4d\x49" in buf: send_data = b"\x4e" send_data += struct.pack(">h", len(address[0])) send_data += address[0].encode() send_data += b"\x00\x00" send_data += struct.pack(">H", address[1]) client.send(send_data) total = 3 # 防止socket的recv接收数据不完整 buf1 = b"" while total: buf1 += client.recv(512) if len(buf1) > 50: break if buf1: path = bytearray(buf1).split(b"\xdf\x74")[-1][2:].decode( errors="ignore") print("client:{} send path:{}".format(address, path)) res = {} res["type"] = "dns" res["client"] = address[0] res["query"] = path res["info"] = decode_rmi(path) res["time"] = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time())) logger.info("Insert to db:" + str(res)) insert_db(res) except Exception as ex: logger.warning('Run rmi error:{} address:{}'.format(ex, address)) finally: client.close()
def redis_conn(): arg_redis = cmd_line_options.redis if arg_redis: if "@" in arg_redis: pwd, ipport = arg_redis.split("@", 1) if not pwd: pwd = None if ":" in ipport and ipport.count(".") >= 2: ip, port, db = ipport.split(":", 2) else: ip = ipport port = 6379 db = 0 logger.info( "Redis connection args: pwd:{},ip:{},port:{},db:{}".format( pwd, ip, port, db)) conn.redis = redis.ConnectionPool(max_connections=300, host=ip, password=pwd, port=int(port), db=int(db)) red = getredis() else: # error_msg = "Set reids connection error,please check redis-server" error_msg = "Please use --redis pass@host:port:db ,if pass is none ,like --redis @host:port:db" logger.warning(error_msg) sys.exit()
def set_es_conn(): if db_set.get("es_open"): try: client = connections.create_connection( hosts=db_set.get("es_addr"), http_auth=db_set.get("es_auth"), timeout=10) info = client.info() if "You Know, for Search" in str(info): if int(info.get("version").get("number").replace(".", "")) > 700: logger.info("Success connect es : {}".format( db_set.get("es_addr"))) others.es_conn = client set_httpinfo() else: logger.warning("Your es version should be > 7.0.0") sys.exit() except Exception as ex: logger.warning( "es conn get error :{} , will exit program".format(ex)) logger.warning( "if you don't want to use elasticsearch .please config 'es_open':False, in config.py" .format(ex)) sys.exit()
def main(): max_conn = 200 sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) ip_port = ("0.0.0.0", int(reverse_set.get("reverse_rmi_port"))) sock.bind(ip_port) sock.listen(200) logger.info("RMI listen: 0.0.0.0:{}".format( int(reverse_set.get("reverse_rmi_port")))) while True: client, address = sock.accept() thread = threading.Thread(target=rmi_response, args=(client, address)) thread.setDaemon(True) thread.start()
def main(): set_paths(os.path.dirname(os.path.realpath(__file__))) init_options() if cmd_line_options.command == "webscan": logger.info("Start webscan mode") set_conn() cleandb() start_count_status() start_write_results() process_start() start() elif cmd_line_options.command == "reverse": logger.info("Start reverse mode") reverse_start()
def main(): sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) # ip=reverse_set.get("reverse_rmi_ip") ip = "0.0.0.0" # 这里不用配置中的ip是因为,像腾讯云,监听IP是个内网,但是有个公网地址。 ip_port = (ip, int(reverse_set.get("reverse_rmi_port"))) sock.bind(ip_port) sock.listen(200) logger.info("RMI listen: {}:{}".format( ip, int(reverse_set.get("reverse_rmi_port")))) while True: client, address = sock.accept() thread = threading.Thread(target=rmi_response, args=(client, address)) thread.setDaemon(True) thread.start()
def reverse_start(): try: secret_key = reverse_set.get("secret_key") if not secret_key: secret_key = get_random_str(9) logger.info("Reverse http server: http://{}:{} secret_key: {}".format( reverse_set.get("reverse_http_ip"), reverse_set.get("reverse_http_port"), secret_key)) logger.info("Reverse dns server: {}".format( reverse_set.get("reverse_domain"))) logger.info("Reverse rmi server: {}:{}".format( reverse_set.get("reverse_rmi_ip"), reverse_set.get("reverse_rmi_port"))) logger.info("Reverse ldap server: {}:{}".format( reverse_set.get("reverse_ldap_ip"), reverse_set.get("reverse_ldap_port"))) init_db() try: p = Process(target=http_start, args=(secret_key, )) p.daemon = True p.start() p1 = Process(target=rmi_start) p1.daemon = True p1.start() p2 = Process(target=ldap_start) p2.daemon = True p2.start() dns_start() except KeyboardInterrupt as ex: logger.warning("Ctrl+C was pressed ,aborted program") except Exception as ex: logger.warning("Start reverse get error:{}".format(ex)) sys.exit()
def set_conn(): try: redis_conn() red=getredis() if not red.ping(): error_msg = "redis ping error . will exit program" logger.warning(error_msg) sys.exit() else: logger.info("Redis ping success") except Exception as ex: error_msg =" connnect redis get error {}:please use --redis pass@host:port:db ,if pass is none ,like --redis @host:port:db".format(ex) logger.warning(error_msg) sys.exit()
def handle(self): data = self.request[0].strip() dns = SinDNSFrame(data) conn = self.request[1] query_name = dns.getname() # A record if dns.query.type == 1: response = ip_address if query_name.endswith( reverse_set.get("reverse_domain")) else None if response: dns.setip(response) log_format = { 'client_ip': self.client_address[0], 'client_port': self.client_address[1], 'query': query_name, 'record-type': 'A', 'response': response } conn.sendto(dns.getbytes(), self.client_address) res = {} res["type"] = "dns" res["client"] = self.client_address[0] res["query"] = query_name res["info"] = decode_dns(query_name) res["time"] = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time())) logger.info("Insert to db:" + str(res)) insert_db(res) # # AAAA record # elif dns.query.type == 28: # response = ip_address if query_name.endswith(reverse_set.get("reverse_domain")) else None # if response: # dns.setip(response) # conn.sendto(dns.getbytes(), self.client_address) # log_format = {'client_ip': self.client_address[0], 'client_port': self.client_address[1], # 'query': query_name, 'record-type': 'AAAA', 'response': response} # res = {} # res["type"] = "dns" # res["client"] = self.client_address[0] # res["query"] = query_name # res["info"] = "" # res["time"] = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time())) # logger.info(str(res)) else: dns.setip(ip_address) conn.sendto(dns.getbytes(), self.client_address)
def start_input(): if cmd_line_options.command == "hostscan": try: red = getredis() if cmd_line_options.input_nmaptext: datas = get_data_from_textfile(cmd_line_options.input_nmaptext) logger.info("input {} lines from nmap_text".format(len(datas))) for data in datas: red.lpush("hostdata", json.dumps(data)) if cmd_line_options.input_jsonfile: datas = get_data_from_jsonfile(cmd_line_options.input_jsonfile) logger.info("input {} lines from nmap_json".format(len(datas))) for data in datas: red.lpush("hostdata", json.dumps(data)) except Exception as ex: traceback.print_exc() logger.warning("input target to hostdata get error:{}".format(ex))
def process_start(): try: work_process = [] try: logger.info("Python Script use {} process".format(cmd_line_options.process)) logger.info("Some scan use {} threads ".format(cmd_line_options.threads)) for x in range(cmd_line_options.process): work2 = Process(target=run_python) work_process.append(work2) for p in work_process: p.daemon = True p.start() except Exception as ex: traceback.print_exc() err_msg = "Error occurred while starting new scan ('{0}')".format(str(ex)) logger.warning(err_msg) except KeyboardInterrupt as ex: logger.warning("Ctrl+C was pressed ,aborted program")
def index(): data = request.args.get("d", None) if data: try: info = "" try: info = binascii.a2b_hex(data[4:].encode()).decode() except: pass res = {} res["type"] = "http" res["client"] = request.remote_addr res["query"] = data res["info"] = info res["time"] = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time())) logger.info("Insert to db:" + str(res)) insert_db(res) return json.dumps({"status": "success"}) except Exception as ex: logger.warning("scan index d get error:{}".format(ex)) pass return json.dumps({"status": "fail", "reason": ""})
def search(): q = request.args.get("query", None) k = request.args.get("key", None) if q: if k == app.config["secret_key"]: res = {} res["status"] = "success" if q == "myscan_total": cur = g.db.execute( 'select type, client, info,query,time from {} order by id desc' .format(TABLE)) else: cur = g.db.execute( 'select type, client, info,query,time from {} where query like ? order by id desc' .format(TABLE), (q + "%", )) total = cur.fetchall() res["total"] = len(total) if q == "myscan_total": res["data"] = [] else: res["data"] = [ dict(type=row[0], client=row[1], info=row[2], query=row[3], time=row[4]) for row in total ] logger.info("Out to client:{}".format(res)) return json.dumps(res) else: return json.dumps({ "status": "fail", "reason": "secret_key is not right" }) return json.dumps({"status": "fail", "reason": ""})
def init_options(): # 打补丁 pathch_urlencode() cmd_line_options.update(cmd_line_parser().__dict__) # 判断banner if cmd_line_options.show_version: print(banner()) sys.exit() print(banner()) # 判断check-reveres if cmd_line_options.check_reverse: check_reverse() sys.exit() if cmd_line_options.command == "reverse": return # 此处需要改进,添加判读,容错,和sock代理等 if cmd_line_options.proxy: host_port = cmd_line_options.proxy cmd_line_options.proxy = {"http": "http://{}".format(host_port), "https": "https://{}".format(host_port), } else: cmd_line_options.proxy = {} if cmd_line_options.verbose == 0: logger.logger.setLevel(logging.DEBUG) elif cmd_line_options.verbose == 1: logger.logger.setLevel(logging.INFO) elif cmd_line_options.verbose == 2: logger.logger.setLevel(logging.WARNING) elif cmd_line_options.verbose == 3: logger.logger.setLevel(logging.CRITICAL) # 验证DNS_Servers,添加到全局变量 if db_set.get("es_open"): servers = find_dns_server().find_dnsservers() logger.info("Found dns_servers:{}".format(servers)) if servers == []: logger.warning("Not Found dns_servers, Check your Networks or edit data/common/dns_servers.txt") sys.exit() others.dns_servers = servers # 处理html-output logger.info("Vuln results will output to: {}".format(cmd_line_options.html_output)) cmd_line_options.allow_poc = [] cmd_line_options.allow_plugin = {} cmd_line_options.pocs_perfile = [] cmd_line_options.pocs_perfoler = [] cmd_line_options.pocs_perscheme = [] cmd_line_options.pocs_perserver = [] cmd_line_options.pocs_load_moudle = { "perfile": {}, "perfolder": {}, "perscheme": {}, "perserver": {} } poc_keys = { "perfile": cmd_line_options.pocs_perfile, "perfolder": cmd_line_options.pocs_perfoler, "perscheme": cmd_line_options.pocs_perscheme, "perserver": cmd_line_options.pocs_perserver } if cmd_line_options.command == "webscan": cmd_line_options.poc_folders = ["perfile", "perfolder", "perscheme"] if cmd_line_options.command == "hostscan": cmd_line_options.poc_folders = ["perserver"] if "all" not in cmd_line_options.disable: if cmd_line_options.disable: cmd_line_options.enable = None for _dir in cmd_line_options.poc_folders: # old way # path_dir = os.path.join(paths.MYSCAN_POCS_PATH, _dir) # exists_poc_with_ext = list( # filter(lambda x: not x.startswith("__"), os.listdir(path_dir))) # temp = copy.deepcopy(exists_poc_with_ext) # for disable in cmd_line_options.disable: # for poc in exists_poc_with_ext: # if disable in poc and poc in temp: # temp.remove(poc) # for x in temp: # poc_keys.get(_dir).append(os.path.join(path_dir, x)) # new way to get subdir for root, dirs, files in os.walk(os.path.join(paths.MYSCAN_POCS_PATH, _dir)): for file in files: if file.endswith(".py") and not file.startswith("__"): if not any([disable in file for disable in cmd_line_options.disable]): poc_keys.get(_dir).append(os.path.abspath(os.path.join(root, file))) else: for _dir in cmd_line_options.poc_folders: # path_dir = os.path.join(paths.MYSCAN_POCS_PATH, _dir) # exists_poc_with_ext = list( # filter(lambda x: (not x.startswith("__") and x.endswith(".py")), # os.listdir(path_dir))) # if "*" == cmd_line_options.enable: # for poc in exists_poc_with_ext: # poc_keys.get(_dir).append(os.path.join(path_dir, poc)) # else: # for disable in cmd_line_options.enable: # for poc in exists_poc_with_ext: # if disable in poc: # poc_keys.get(_dir).append(os.path.join(path_dir, poc)) for root, dirs, files in os.walk(os.path.join(paths.MYSCAN_POCS_PATH, _dir)): for file in files: if file.endswith(".py") and not file.startswith("__"): if not cmd_line_options.enable: poc_keys.get(_dir).append(os.path.abspath(os.path.join(root, file))) else: if any([enable in file for enable in cmd_line_options.enable]): poc_keys.get(_dir).append(os.path.abspath(os.path.join(root, file))) # # for enable in cmd_line_options.enable: # if enable in file: # poc_keys.get(_dir).append(os.path.abspath(os.path.join(root, file))) for _dir in cmd_line_options.poc_folders: logger.debug("{} total: {} pocs".format(_dir.capitalize(), len(list(set(poc_keys.get(_dir)))))) for poc in list(set(poc_keys.get(_dir))): logger.info("Load Pocs:{}".format(poc)) cmd_line_options.pocs_load_moudle[_dir][hash(poc)] = { "poc": poc, "class": load_file_to_module(poc) } if cmd_line_options.command == "webscan": if not (cmd_line_options.pocs_perfile or cmd_line_options.pocs_perfoler or cmd_line_options.pocs_perscheme): logger.warning("No Pocs ,please use --enable un_auth sqli") sys.exit() if cmd_line_options.command == "hostscan": if not cmd_line_options.pocs_perserver: logger.warning("No Pocs ,please use --enable brute ms17010") sys.exit() else: logger.warning("No Pocs Load!") # languages 插件参数处理 plugins_dir = os.path.join(paths.MYSCAN_PLUGINS_PATH, cmd_line_options.command) exists_poc_with_ext = list( filter(lambda x: not x.startswith("__"), os.listdir(plugins_dir))) if cmd_line_options.plugins: for openplugin in list(set(cmd_line_options.plugins)): for plugin in exists_poc_with_ext: if openplugin in plugin: plugin_path = os.path.join(plugins_dir, plugin) logger.info("Load Plugin:{}".format(plugin_path)) cmd_line_options.allow_plugin[hash(plugin_path)] = { "poc": plugin_path, "class": load_file_to_module(plugin_path) } if len(cmd_line_options.allow_plugin) == 0: logger.warning("No Plugins Load!") total_poc = 0 for x in cmd_line_options.pocs_load_moudle.values(): total_poc += len(x) others.total_pocs=total_poc if total_poc == 0 and len(cmd_line_options.allow_plugin) == 0: logger.warning("No Plugins Pocs Load! Check your arguments ,Program will exit") sys.exit() # 处理ssti全局变量 importssti() # 需要注册一下需要urlpath的插件 poc1 = os.path.join(paths.MYSCAN_POCS_PATH, "perfolder", "info", "myscan_dirscan.py") if poc1 in cmd_line_options.pocs_perfoler: get_dict() # 打补丁 # patch_banner_timeout() #好像没用 ipv6_patch() # 配置连接 set_es_conn() # 配置dishost host if cmd_line_options.host: cmd_line_options.dishost = []
def set_httpinfo(): body = { 'settings': { 'analysis': { 'char_filter': { 'replace_slash_to_null': { 'type': 'pattern_replace', 'pattern': '/', 'replacement': ' ' }, 'get_root_domain': { 'type': 'pattern_replace', 'pattern': '^.+?\\.([0-9a-z\\-]+\\.[a-z]+)$', 'replacement': '$1' }, "replace_realdomain_to_true": { "type": "pattern_replace", "pattern": "^.*?\\.[a-z]+$", "replacement": "true" }, "replace_ipdomain_to_false": { "type": "pattern_replace", "pattern": "^.*?\\.[0-9]{1,3}$", "replacement": "false" } }, 'tokenizer': { 'tokenizer_slash': { 'type': 'pattern', 'pattern': '/' }, 'tokenizer_dot': { 'type': 'pattern', 'pattern': '\\.' } }, "normalizer": { "root_domain_keyword": { "type": "custom", "char_filter": "get_root_domain", "filter": ["lowercase"] }, "verify_is_domain": { "type": "custom", "char_filter": [ "replace_realdomain_to_true", "replace_ipdomain_to_false" ], "filter": ["lowercase"] } }, 'analyzer': { 'analyzer_path': { 'type': 'custom', 'tokenizer': 'tokenizer_slash', 'filter': ['lowercase'] }, 'analyzer_host': { 'type': 'custom', 'tokenizer': 'tokenizer_dot', 'filter': ['lowercase'] }, 'analyzer_domain': { 'type': 'custom', 'tokenizer': 'whitespace', 'char_filter': 'get_root_domain', 'filter': ['lowercase'] } } } }, 'mappings': { 'date_detection': False, 'dynamic_templates': [{ 'request_raw': { 'path_match': 'request.raw', 'mapping': { 'type': 'text', 'analyzer': 'ik_max_word' } } }, { 'response_raw': { 'path_match': 'response.raw', 'mapping': { 'type': 'text' } } }, { 'request_headers': { 'path_match': 'request.headers', 'mapping': { 'type': 'text' } } }, { 'response_headers': { 'path_match': 'response.headers', 'mapping': { 'type': 'keyword', 'ignore_above': 256 } } }, { 'url_path': { 'path_match': 'url.path', 'mapping': { 'type': 'text', 'analyzer': 'analyzer_path', 'fields': { 'keyword': { 'type': 'keyword', 'ignore_above': 500 } } } } }, { 'url_host': { 'path_match': 'url.host', 'mapping': { 'type': 'text', 'analyzer': 'analyzer_host', 'fields': { 'keyword': { 'type': 'keyword', 'ignore_above': 256 }, 'domain': { 'type': 'keyword', 'normalizer': 'root_domain_keyword', 'ignore_above': 256, }, "isdomain": { "type": "keyword", "normalizer": "verify_is_domain" } } } } }, { 'url_url': { 'path_match': 'url.url', 'mapping': { 'type': 'keyword', 'ignore_above': 1000 } } }, { 'url_protocol': { 'path_match': 'url.protocol', 'mapping': { 'type': 'keyword', 'ignore_above': 256 } } }, { 'url_port': { 'path_match': 'url.port', 'mapping': { 'type': 'integer' } } }, { 'url_pathroot': { 'path_match': 'url.path_root', 'mapping': { 'type': 'keyword', 'ignore_above': 256 } } }, { 'url_extension': { 'path_match': 'url.extension', 'mapping': { 'type': 'keyword', 'ignore_above': 256 } } }, { 'url_ip': { 'path_match': 'url.ip', 'mapping': { 'type': 'keyword', 'ignore_above': 256 } } }, { 'url_icon': { 'path_match': 'url.icon_hash', 'mapping': { 'type': 'keyword' } } }], 'properties': { 'ts': { 'type': 'date', 'format': 'epoch_millis' }, 'source': { 'type': 'keyword', 'ignore_above': 256 } } } } index = "httpinfo" if not others.es_conn.indices.exists(index): logger.warning( "elasticsearch not exist :{} , will create it".format(index)) if others.es_conn.indices.create(index=index, body=body).get("acknowledged"): logger.info("elasticsearch create {} success".format(index)) else: logger.warning( "elasticsearch create {} failed , will exit program".format( index)) sys.exit()
def start(): logger.info("Myscan Python Moudle Listen ...") red = getredis() try: while True: data = red.lpop("burpdata") if data: red.hincrby("count_all", "doned", amount=1) logger.debug("Get one data from burpdata") dictdata = "" try: dictdata = json.loads(data) except Exception as ex: logger.warning("Process burpdata to json get error:" + str(ex)) continue if dictdata != "": # 开启plugin if cmd_line_options.plugins: plugin(dictdata) if "all" in cmd_line_options.disable: continue is_filter = dictdata.get("filter") host = dictdata.get("url").get("host") port = dictdata.get("url").get("port") block = block_info(host, port) if allow_host(host) and not block.is_block(): # 是否启动被动搜索模式 if scan_set.get("search_open", False): s = searchmsg(dictdata) s.verify() s.saveresult() data_parser = dictdata_parser(dictdata) # perfile if cmd_line_options.pocs_perfile: if not is_filter or not data_parser.is_perfile_doned( ): logger.debug( data_parser.getperfile().capitalize() + " is_perfile_doned res:False") red.lpush( "work_data_py", pickle.dumps({ "data": data_parser.getperfile(), "dictdata": dictdata, "type": "perfile" })) else: logger.debug( data_parser.getperfile().capitalize() + " is_perfile_doned res:True") # perfolder if cmd_line_options.pocs_perfoler: if not is_filter: folders = data_parser.getperfolders() else: folders = data_parser.is_perfolder_doned() if folders != []: for folder in folders: red.lpush( "work_data_py", pickle.dumps({ "data": folder, "dictdata": dictdata, "type": "perfolder" })) # scheme if cmd_line_options.pocs_perscheme: if not is_filter or not data_parser.is_perscheme_doned( ): logger.debug( data_parser.getperfile().capitalize() + " is_perscheme_doned res:False") red.lpush( "work_data_py", pickle.dumps({ "dictdata": dictdata, # 这里没有data字段,无关data字段了 "type": "perscheme" })) else: logger.debug( data_parser.getperfile().capitalize() + " is_perscheme_doned res:True") else: logger.debug("Host block:" + host) else: time.sleep(random.uniform(1, 2)) except KeyboardInterrupt as ex: logger.warning("Ctrl+C was pressed ,aborted program")
def init_options(): cmd_line_options.update(cmd_line_parser().__dict__) # 判断banner if cmd_line_options.show_version: print(banner()) sys.exit() print(banner()) #判断check-reveres if cmd_line_options.check_reverse: check_reverse() sys.exit() # 此处需要改进,添加判读,容错,和sock代理等 if cmd_line_options.proxy: host_port = cmd_line_options.proxy cmd_line_options.proxy = { "http": "http://" + host_port, "https": "https://" + host_port, } else: cmd_line_options.proxy = {} if cmd_line_options.verbose == 0: logger.logger.setLevel(logging.DEBUG) elif cmd_line_options.verbose == 1: logger.logger.setLevel(logging.INFO) elif cmd_line_options.verbose == 2: logger.logger.setLevel(logging.WARNING) elif cmd_line_options.verbose == 3: logger.logger.setLevel(logging.CRITICAL) # 处理html-output if cmd_line_options.command == "webscan": logger.info("Vuln results will output to: {}".format( cmd_line_options.html_output)) # if os.path.exists(cmd_line_options.html_output): # logger.warning( # "file {} already exists, please backup and remove it at first".format(cmd_line_options.html_output)) # sys.exit() # else: # try: # with open(cmd_line_options.html_output, "w") as f: # f.write(gethtmlheader()) # except Exception as ex: # logger.warning("Create file {} get error:{}".format(cmd_line_options.html_output, ex)) # sys.exit() cmd_line_options.allow_poc = [] cmd_line_options.pocs_perfile = [] cmd_line_options.pocs_perfoler = [] cmd_line_options.pocs_perscheme = [] cmd_line_options.pocs_load_moudle = { "perfile": [], "perfolder": [], "perscheme": [] } print(cmd_line_options.disable) if "all" not in cmd_line_options.disable: poc_keys = { "perfile": cmd_line_options.pocs_perfile, "perfolder": cmd_line_options.pocs_perfoler, "perscheme": cmd_line_options.pocs_perscheme } if cmd_line_options.disable: cmd_line_options.enable = None for _dir in ["perfile", "perfolder", "perscheme"]: path_dir = os.path.join(paths.MYSCAN_POCS_PATH, _dir) exists_poc_with_ext = list( filter(lambda x: not x.startswith("__"), os.listdir(path_dir))) temp = copy.deepcopy(exists_poc_with_ext) for disable in cmd_line_options.disable: for poc in exists_poc_with_ext: if disable in poc and poc in temp: temp.remove(poc) for x in temp: poc_keys.get(_dir).append(os.path.join(path_dir, x)) if cmd_line_options.enable: for _dir in ["perfile", "perfolder", "perscheme"]: path_dir = os.path.join(paths.MYSCAN_POCS_PATH, _dir) exists_poc_with_ext = list( filter( lambda x: (not x.startswith("__") and (x.endswith(".py") or x.endswith(".yaml"))), os.listdir(path_dir))) if "*" == cmd_line_options.enable: for poc in exists_poc_with_ext: poc_keys.get(_dir).append( os.path.join(path_dir, poc)) else: for disable in cmd_line_options.enable: for poc in exists_poc_with_ext: if disable in poc: poc_keys.get(_dir).append( os.path.join(path_dir, poc)) for _dir in ["perfile", "perfolder", "perscheme"]: logger.debug("{} total: {} pocs".format( _dir.capitalize(), len(poc_keys.get(_dir)))) for poc in poc_keys.get(_dir): logger.info("Load Pocs:{}".format(poc)) cmd_line_options.pocs_load_moudle[_dir].append({ "poc": poc, "class": load_file_to_module(poc) }) if not (cmd_line_options.pocs_perfile or cmd_line_options.pocs_perfoler or cmd_line_options.pocs_perscheme): logger.warning( "No Pocs ,please use --enable * or like --enable un_auth sqli" ) sys.exit() # plugin 插件参数处理 cmd_line_options.open_lugins = [] plugins_dir = paths.MYSCAN_PLUGINS_PATH exists_poc_with_ext = list( filter(lambda x: not x.startswith("__"), os.listdir(plugins_dir))) if cmd_line_options.plugins: for openplugin in list(set(cmd_line_options.plugins)): for plugin in exists_poc_with_ext: if openplugin in plugin: logger.info("Load Plugin:{}".format( os.path.join(plugins_dir, plugin))) cmd_line_options.open_lugins.append( os.path.join(plugins_dir, plugin))
def start(): logger.info("Myscan Python Moudle Listen ...") red = getredis() try: while True: try: if cmd_line_options.command == "webscan": data = red.rpop("burpdata") if data: red.hincrby("count_all", "doned", amount=1) logger.debug("Get one data from burpdata") dictdata = None try: dictdata = json.loads(data) except Exception as ex: logger.warning( "Process burpdata to json get error:" + str(ex)) continue if dictdata is not None: # 把dictdata分配一个id id = get_random_str(10) + str(get_random_num(5)) toredisdatas = [] # 开启plugin if cmd_line_options.allow_plugin: for pluginhash, plugin_info in cmd_line_options.allow_plugin.items( ): toredisdatas.append( ("plugin_data_py", pickle.dumps({ "id": id, "pochash": pluginhash, "poc": plugin_info.get("poc") }))) is_filter = dictdata.get("filter") host = dictdata.get("url").get("host") port = dictdata.get("url").get("port") block = block_info(host, port) if allow_host(host) and not block.is_block(): # 是否启动被动搜索模式 if scan_set.get("search_open", False): s = searchmsg(dictdata) s.verify() # s.saveresult() data_parser = dictdata_parser(dictdata) # perfile if cmd_line_options.pocs_perfile: if not is_filter or not data_parser.is_perfile_doned( ): logger.debug( data_parser.getperfile( ).capitalize() + " is_perfile_doned res:False") for poc in cmd_line_options.pocs_perfile: toredisdatas.append( ("work_data_py", pickle.dumps({ "id": id, "data": data_parser.getperfile(), "poc": poc, "type": "perfile" }))) else: logger.debug( data_parser.getperfile( ).capitalize() + " is_perfile_doned res:True") # perfolder if cmd_line_options.pocs_perfoler: if not is_filter: folders = data_parser.getperfolders() else: folders = data_parser.is_perfolder_doned( ) if folders != []: for folder in folders: for poc in cmd_line_options.pocs_perfoler: # red.lpush("work_data_py", pickle.dumps({ # "data": folder, # "dictdata": dictdata, # "type": "perfolder" # })) toredisdatas.append( ("work_data_py", pickle.dumps({ "id": id, "data": folder, "poc": poc, "type": "perfolder" }))) # scheme if cmd_line_options.pocs_perscheme: if not is_filter or not data_parser.is_perscheme_doned( ): logger.debug( data_parser.getperfile( ).capitalize() + " is_perscheme_doned res:False") for poc in cmd_line_options.pocs_perscheme: toredisdatas.append(( "work_data_py", pickle.dumps({ "id": id, "data": None, # 这里没有data字段,无关data字段了 "poc": poc, "type": "perscheme" }))) else: logger.debug( data_parser.getperfile( ).capitalize() + " is_perscheme_doned res:True") else: logger.debug("Host block:" + host) # 分发 if toredisdatas: # 给id新建一个hash red.hmset(id, { 'data': data, 'count': len(toredisdatas) }) for key, pickledata in toredisdatas: if key == "plugin_data_py": red.lpush("plugin_data_py", pickledata) else: if scan_set.get("random_test", False): red.sadd("work_data_py_set", pickledata) else: red.lpush("work_data_py", pickledata) else: time.sleep(random.uniform(0, 1)) elif cmd_line_options.command == "hostscan": data = red.rpop("hostdata") if data: red.hincrby("count_all", "doned", amount=1) logger.debug("Get one data from hostdata") dictdata = None try: dictdata = json.loads(data) except Exception as ex: logger.warning( "Process hostdata to json get error:" + str(ex)) continue if dictdata is not None: # 开启plugin if cmd_line_options.plugins: plugin(dictdata) if "all" in cmd_line_options.disable: continue is_filter = dictdata.get("filter") host = dictdata.get("addr") port = dictdata.get("port") block = block_info(host, port) id = get_random_str(10) + str(get_random_num(5)) if allow_host(host): toredisdatas = [] if is_filter: if not block.is_block(): block.block_it() else: continue for poc in cmd_line_options.pocs_perserver: toredisdatas.append( pickle.dumps({ "id": id, "data": None, # 这里没有data字段,无关data字段了 "poc": poc, "type": "perserver" })) if toredisdatas: red.hmset(id, { 'data': data, 'count': len(toredisdatas) }) red.hmset(id, { 'data': data, 'count': len(toredisdatas) }) for pickledata in toredisdatas: if scan_set.get("random_test", False): red.sadd("work_data_py_set", pickledata) else: red.lpush("work_data_py", pickledata) else: time.sleep(random.uniform(1, 2)) except Exception as ex: logger.debug("Run start get error:{}".format(ex)) traceback.print_exc() continue except KeyboardInterrupt as ex: logger.warning("Ctrl+C was pressed ,aborted program")