def run(self): dictdata = json.loads(self.red.hget(self.workdata.get("id"), "data")) # count==0 则删除,防止内存过大 current_count = self.red.hincrby(self.workdata.get("id"), "count", amount=-1) if current_count == 0: logger.debug("Will delete data for id:{}".format(self.workdata.get("id"))) self.red.delete(self.workdata.get("id")) self.workdata["dictdata"] = copy.deepcopy(dictdata) self.poc = self.workdata.get("poc") func_data = cmd_line_options.pocs_load_moudle[self.workdata.get('type')].get(hash(self.poc), None) if func_data is None: logger.warning("{} poc not found,will kill this task".format(self.poc)) return func = copy.deepcopy(func_data.get("class").POC) class_poc = func(self.workdata) logger.debug("Start python script:{} at {}".format(self.poc, self.workdata.get("data", "None"))) self.red.hincrby("count_all", "active", amount=1) try: class_poc.verify() # process = psutil.Process(os.getpid()) # os.getpid() # memInfo = process.memory_info() # print('pid: {}'.format(os.getpid()), int(memInfo.rss / 1024 / 1014), 'mb on {}'.format(os.path.basename(self.poc))) if class_poc.result: self.result = class_poc.result self.saveResult() # logger.critical(poc.result) logger.debug("Done python script:{} at {}".format(self.poc, self.workdata.get("data", "None"))) except Exception as ex: traceback.print_exc() finally: self.red.hincrby("count_all", "active", amount=-1)
def writeresults(): red = getredis() total_write = 0 if "." not in cmd_line_options.html_output: cmd_line_options.html_output = cmd_line_options.html_output + ".html" while True: try: results = [] while True: id = red.lpop("vuln_all_write") if id: pickle_data = red.get(id) if pickle_data: results.append(pickle.loads(pickle_data)) else: if results: for result in results: total_write += 1 current = int(total_write / scan_set.get("max_html_output", 10)) outfilename = "{}{}.html".format( '.'.join( cmd_line_options.html_output.split(".") [:-1]), current) check(outfilename) out = htmlexport([result], outfilename) out.save() results = [] time.sleep(5) except KeyboardInterrupt as ex: logger.warning("Ctrl+C was pressed ,aborted program") except Exception as ex: traceback.print_exc() logger.warning(ex) pass
def saveResult(self): for result in self.result: if not isinstance(result, dict): logger.warning("Poc (python script) result error,it's a dict .") return url_default = "" if cmd_line_options.command == "webscan": url_default = self.workdata.get("dictdata").get("url").get("url") elif cmd_line_options.command == "hostscan": url_default = "{type}://{addr}:{port}".format(**self.workdata.get("dictdata")) result_data = { "name": result.get("name", os.path.splitext(os.path.split(self.poc)[-1])[0]), "url": result.get("url", url_default), "level": result.get("level", "-1"), "createtime": time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), "detail": { } } if result.get("detail", None) and isinstance(result.get("detail"), dict): result_data["detail"] = result.get("detail") else: result_data["detail"] = {"noshow": "no details"} random_id = get_random_str(9) self.red.set("result_" + random_id, pickle.dumps(result_data)) self.red.lpush("vuln_" + result_data["name"].replace(" ", "_"), "result_" + random_id) self.red.lpush("vuln_all", "result_" + random_id) self.red.lpush("vuln_all_write", "result_" + random_id) # 保存结果 for k, v in result_data.get("detail").items(): if str(k).lower().startswith("request") or str(k).lower().startswith("response"): if str(v).__len__() > 1000: result_data.get("detail")[k] = str(v)[:500] + " ..." logger.critical(result_data)
def rmi_response(client, address): try: client.settimeout(5) buf = client.recv(1024) if b"\x4a\x52\x4d\x49" in buf: send_data = b"\x4e" send_data += struct.pack(">h", len(address[0])) send_data += address[0].encode() send_data += b"\x00\x00" send_data += struct.pack(">H", address[1]) client.send(send_data) total = 3 # 防止socket的recv接收数据不完整 buf1 = b"" while total: buf1 += client.recv(512) if len(buf1) > 50: break if buf1: path = bytearray(buf1).split(b"\xdf\x74")[-1][2:].decode( errors="ignore") print("client:{} send path:{}".format(address, path)) res = {} res["type"] = "dns" res["client"] = address[0] res["query"] = path res["info"] = decode_rmi(path) res["time"] = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time())) logger.info("Insert to db:" + str(res)) insert_db(res) except Exception as ex: logger.warning('Run rmi error:{} address:{}'.format(ex, address)) finally: client.close()
def decode_rmi(query): info = "" try: info = binascii.a2b_hex(query[4:].encode()).decode() except Exception as ex: logger.warning("decode rmi error:{} sourquery:{}".format(ex, query)) return info
def run(self): dictdata = json.loads(self.red.hget(self.workdata.get("id"), "data")) # count==0 则删除,防止内存过大 current_count = self.red.hincrby(self.workdata.get("id"), "count", amount=-1) if current_count == 0: logger.debug("Will delete data for id:{}".format(self.workdata.get("id"))) self.red.delete(self.workdata.get("id")) # self.workdata["dictdata"] = copy.deepcopy(dictdata) self.poc = self.workdata.get("poc") func_data = cmd_line_options.allow_plugin[self.workdata.get('pochash')].get("class", None) if func_data is None: logger.warning("{} poc not found,will kill this task".format(self.poc)) return func = func_data.POC class_poc = func(dictdata) logger.debug("Start python plugin script:{} ".format(self.poc)) try: class_poc.verify() # process = psutil.Process(os.getpid()) # os.getpid() # memInfo = process.memory_info() # print('pid: {}'.format(os.getpid()), int(memInfo.rss / 1024 / 1014), 'mb on {}'.format(os.path.basename(self.poc))) logger.debug("Done python plugin script:{} ".format(self.poc)) except Exception as ex: traceback.print_exc()
def reverse_start(): try: secret_key = reverse_set.get("secret_key") if not secret_key: secret_key = get_random_str(9) logger.info("Reverse http server: http://{}:{} secret_key: {}".format( reverse_set.get("reverse_http_ip"), reverse_set.get("reverse_http_port"), secret_key)) logger.info("Reverse dns server: {}".format( reverse_set.get("reverse_domain"))) logger.info("Reverse rmi server: {}:{}".format( reverse_set.get("reverse_rmi_ip"), reverse_set.get("reverse_rmi_port"))) logger.info("Reverse ldap server: {}:{}".format( reverse_set.get("reverse_ldap_ip"), reverse_set.get("reverse_ldap_port"))) init_db() try: p = Process(target=http_start, args=(secret_key, )) p.daemon = True p.start() p1 = Process(target=rmi_start) p1.daemon = True p1.start() p2 = Process(target=ldap_start) p2.daemon = True p2.start() dns_start() except KeyboardInterrupt as ex: logger.warning("Ctrl+C was pressed ,aborted program") except Exception as ex: logger.warning("Start reverse get error:{}".format(ex)) sys.exit()
def saveResult(self): for result in self.result: if not isinstance(result, dict): logger.warning("Poc (python script) result error,it's a dict .") return result_data = { "name": result.get("name", os.path.splitext(os.path.split(self.poc)[-1])[0]), "url": result.get("url", self.workdata.get("dictdata").get("url").get("url")), "level": result.get("level", "-1"), "createtime": time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), "detail": { } } if result.get("detail", None) and isinstance(result.get("detail"), dict): result_data["detail"] = result.get("detail") else: result_data["detail"] = {"noshow": "no details"} random_id = get_random_str(9) self.red.set("result_" + random_id, pickle.dumps(result_data)) self.red.lpush("vuln_" + result_data["name"].replace(" ", "_"), "result_" + random_id) self.red.lpush("vuln_all", "result_" + random_id) self.red.lpush("vuln_all_write", "result_" + random_id) # 保存结果 logger.critical(result_data)
def redis_conn(): arg_redis = cmd_line_options.redis if arg_redis: if "@" in arg_redis: pwd, ipport = arg_redis.split("@", 1) if not pwd: pwd = None if ":" in ipport and ipport.count(".") >= 2: ip, port, db = ipport.split(":", 2) else: ip = ipport port = 6379 db = 0 logger.info( "Redis connection args: pwd:{},ip:{},port:{},db:{}".format( pwd, ip, port, db)) conn.redis = redis.ConnectionPool(max_connections=300, host=ip, password=pwd, port=int(port), db=int(db)) red = getredis() else: # error_msg = "Set reids connection error,please check redis-server" error_msg = "Please use --redis pass@host:port:db ,if pass is none ,like --redis @host:port:db" logger.warning(error_msg) sys.exit()
def ldap_response(client, address): try: client.settimeout(30) buf = client.recv(512) if buf.hex().startswith("300c0201"): send_data = b"\x30\x0c\x02\x01\x01\x61\x07\x0a\x01\x00\x04\x00\x04\x00" client.send(send_data) total = 3 # 防止socket的recv接收数据不完整 buf1 = b"" while total: buf1 += client.recv(512) if len(buf1) > 16: break if buf1: path = getldappath(buf1).decode(errors="ignore") logger.debug("client:{} send path:{}".format(address, path)) res = {} res["type"] = "ldap" res["client"] = address[0] res["query"] = path res["info"] = decode(path) res["time"] = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time())) logger.info("Insert to db:" + str(res)) insert_db(res) except Exception as ex: logger.warning('Run ldap error:{} address:{}'.format(ex, address)) finally: client.close()
def check_reverse(): ver = platform.system() dns_random_str = "myscan_dnstest_" + get_random_str(10) http_random_str = "myscan_httptest_" + get_random_str(10) domain = "{}.{}".format(dns_random_str, reverse_set.get("reverse_domain")) url = "http://{}:{}/?d={}".format(reverse_set.get("reverse_http_ip"), reverse_set.get("reverse_http_port"), http_random_str) logger.info( "Will exec ping ,nslookup,mshta,curl,wget to test server , it will take around 20s" ) if ver.lower() == "windows": cmd = "ping -n 2 {}>nul & nslookup {} >nul & mshta {}".format( domain, domain, url) else: cmd = "ping -c 2 {} 2>&1 >/dev/null & nslookup {} 2>&1 >/dev/null & curl {} 2>&1 >/dev/null & wget {} --output-document=/dev/null".format( domain, domain, url, url) logger.info("Start exec cmd:{}".format(cmd)) run_cmd(cmd) res_http = query_reverse(http_random_str) res_dns = query_reverse(domain, False) # 此处需添加rmi 服务的检测代码,需本地模拟一个rmi的client if res_http[0]: logger.critical("Client connect http reverse server: Success") else: logger.warning("Client connect http reverse server: Fail") if res_dns[0]: logger.critical("Client connect dns reverse server: Success") else: logger.warning("Client disconnect dns reverse server: Fail")
def run(self): while not self.queue.empty(): arg = self.queue.get() try: self.func(arg) except Exception as e: logger.warning("run thread error:{}".format(str(e)))
def importssti(): try: test_payloads = [] for plugin in plugins: current_plugin = plugin() test_payloads += current_plugin.generate_payloads() others.ssti_payloads = test_payloads # logger.debug("import ssti payloads success") except Exception as ex: logger.warning("import ssti payloads error:{}".format(ex))
def run(self): for plugin in cmd_line_options.open_lugins: try: c = load_file_to_module(plugin) class_plugin = c.plugin(copy.deepcopy(self.dictdata)) logger.debug("Start plugin script:{}".format(plugin)) class_plugin.run() except Exception as ex: logger.warning("run plugin script:{} error:{}".format( plugin, ex))
def check(): if os.path.exists(cmd_line_options.html_output): pass else: try: with open(cmd_line_options.html_output, "w") as f: f.write(gethtmlheader()) except Exception as ex: logger.warning("Create file {} get error:{}".format( cmd_line_options.html_output, ex))
def check(filename): if os.path.exists(filename): pass else: try: with open(filename, "w") as f: f.write(gethtmlheader()) f.flush() except Exception as ex: logger.warning("Create file {} get error:{}".format(filename, ex))
def get_data_from_file(filename): lines = [] try: with open(filename, errors="ignore") as f: for line in f: line = line.strip() if line: lines.append(line) except Exception as ex: logger.warning("get_data_from_file get error:{}".format(ex)) return lines
def verify(self): dictdata = self.dictdata # 把请求体和响应体 base64解码,便于搜索 request_raw = base64.b64decode(self.dictdata.get("request").get("raw").encode("utf8")) response_raw = base64.b64decode(self.dictdata.get("response").get("raw").encode("utf8")) dictdata["request"]["raw"] = request_raw.decode("utf-8", errors="ignore") dictdata["response"]["raw"] = response_raw.decode("utf-8", errors="ignore") dictdata["request"]["headers"] = str(dictdata["request"]["headers"]) dictdata["response"]["headers"] = str(dictdata["response"]["headers"]) if "others" in dictdata.keys(): del dictdata["others"] if "filter" in dictdata.keys(): del dictdata["filter"] dictdata["source"] = "burp" dictdata["url"]["ip"] = self.getaddr(dictdata.get("url").get("host")) dictdata["url"]["pathroot"] = "{protocol}://{host}:{port}/".format(**dictdata.get("url")) if dictdata["url"]["extension"] == "ico": body = response_raw[int(dictdata.get("response").get("bodyoffset")):] dictdata["url"]["icon_hash"] = str(mmh3.hash(base64.b64encode(body))) actions = [] action = { "_index": "httpinfo", "_id": self.http_md5(dictdata), "_source": dictdata } actions.append(action) # get url from html urls_from_html = self.get_html_url(dictdata["url"]["url"], response_raw[int(dictdata.get("response").get("bodyoffset")):].decode( "utf-8", "ignore"), dictdata["response"]["mime_inferred"]) logger.debug("urls_from_html total:{}".format(len(urls_from_html))) if urls_from_html: mythread(self.getaddr, self.hosts, 50) for url_data in urls_from_html: url_data["ip"] = self.dict_host_ip[url_data["host"]] action_ = { "_index": "httpinfo", "_id": getmd5("{ip}{pathroot}{path}".format(**url_data)), "_source": {"url": url_data, "source": "html", "ts": dictdata["ts"]} } actions.append(action_) try: helpers.bulk(others.es_conn, actions) logger.debug("es insert {} lines".format(len(actions))) except Exception as ex: logger.warning("Plugin {} get error:{}".format(__name__, ex)) traceback.print_exc()
def count_status(): red = getredis() while True: try: time.sleep(int(scan_set.get("status_flush_time", 30))) burpdata_undo = red.llen("burpdata") if scan_set.get("random_test", False): # workdata = red.spop("work_data_py_set") unactive = red.scard("work_data_py_set") else: # red.lpush("work_data_py", pickledata) # workdata = red.lpop("work_data_py") unactive = red.llen("work_data_py") vuln = red.llen("vuln_all") data = red.hmget("count_all", "doned", "request", "block_host", "request_fail", "active") burpdata_doned, request, block_host, request_fail, active = list( map(lambda x: x.decode(), data)) reverse_count = 0 res, resdata = query_reverse("myscan_total") if res: reverse_count = int(resdata.get("total")) if cmd_line_options.command == "hostscan": logger.warning( "do/undo/active/unactive:{}/{}/{}/{} vuln:{}/reverse:{}". format(burpdata_doned, burpdata_undo, active, unactive, vuln, reverse_count), text="STATUS") elif cmd_line_options.command == "webscan": if cmd_line_options.allow_plugin: undoplugin = red.llen("plugin_data_py") logger.warning( "do/undo/active/unactive/undoplugin:{}/{}/{}/{}/{} req_total/fail:{}/{} blockhost:{} vuln:{}/reverse:{}" .format(burpdata_doned, burpdata_undo, active, unactive, undoplugin, request, request_fail, block_host, vuln, reverse_count), text="STATUS") else: logger.warning( "do/undo/active/unactive:{}/{}/{}/{} req_total/fail:{}/{} blockhost:{} vuln:{}/reverse:{}" .format(burpdata_doned, burpdata_undo, active, unactive, request, request_fail, block_host, vuln, reverse_count), text="STATUS") except KeyboardInterrupt as ex: logger.warning("Ctrl+C was pressed ,aborted program") except Exception as ex: logger.warning("Count stat moudle get error:{}".format(ex)) traceback.print_exc() pass
def decode_dns(dnslog): res = "" try: dnslog = dnslog[4:] dnslog = dnslog.replace("." + reverse_set.get("reverse_domain"), "") dnslog = "".join(dnslog.split(".")) res = binascii.a2b_hex(dnslog.encode()).decode() except binascii.Error as ex: pass except Exception as ex: logger.warning('decode dns get error:{}'.format(ex)) return res
def verify(self): for info in tests: self.max_out = info.get("max") if info.get( "max") else scan_set.get("search_maxout") if not self.output(info.get("vulmsg")): # 限定一下输出数目 continue if self.checktest(info): try: res = None parse = "" if info.get("search_mode") == "search": res = re.search(info.get("contains"), self.rawresponse, re.I | re.S) if info.get("search_mode") == "findall": res = re.findall(info.get("contains"), self.rawresponse, re.I | re.S) if res != None: if info.get("search_mode") == "search": if res.groups(): parse = res.groups() else: parse = res.group() if info.get("search_mode") == "findall": parse = str(res) self.saveresult( { "name": "sensitive_msg_leak", "url": self.url, "level": info.get("level"), # 0:Low 1:Medium 2:High "detail": { "vulmsg": info.get("vulmsg"), "search": "mode:{search_mode} contains:{contains}". format(**info), "parse": parse, "request": self.rawrequest[:300] + b"..." if len(self.rawrequest) > 300 else self.rawrequest, "response": self.rawresponse[:300] + b"..." if len(self.rawresponse) > 300 else self.rawresponse } }, info) except Exception as ex: # print(traceback.print_exc()) logger.warning("run search poc get error:" + str(ex))
def run(self): for plugin in cmd_line_options.open_lugins: try: c = load_file_to_module(plugin) class_plugin = c.plugin(copy.deepcopy(self.dictdata)) logger.debug("Start languages script:{}".format(plugin)) class_plugin.run() logger.debug("Done languages script:{}".format(plugin)) except Exception as ex: traceback.print_exc() logger.warning("run languages script:{} error:{}".format( plugin, ex))
def getldappath(buff): berdecoder = pureldap.LDAPBERDecoderContext_TopLevel( inherit=pureldap.LDAPBERDecoderContext_LDAPMessage( fallback=pureldap.LDAPBERDecoderContext( fallback=pureber.BERDecoderContext()), inherit=pureldap.LDAPBERDecoderContext( fallback=pureber.BERDecoderContext()))) # buff=b'\x30\x81\xa9\x02\x01\x02c\x81\x86\x04fAaBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB\n\x01\x00\n\x01\x03\x02\x01\x00\x02\x01\x00\x01\x01\x00\x87\x0bobjectClass0\x00\xa0\x1b0\x19\x04\x172.16.840.1.113730.3.4.2' try: o, bytes = pureber.berDecodeObject(berdecoder, buff) return o.value.baseObject except pureber.BERExceptionInsufficientData as ex: logger.warning("get error:{}".format(ex)) return None
def get_dict(): others.url_dict_path = [] if plugin_set.get("dirscan").get("dirfile"): filename = plugin_set.get("dirscan").get("dirfile") else: filename = os.path.join(paths.MYSCAN_DATA_PATH, "dir", "dicc.txt") try: with open(filename) as f: for line in f: line_ = line.strip() if line_: others.url_dict_path.append(line_) except Exception as ex: logger.warning("dirscan can't open file:{} , get error:{}".format(filename, ex)) return others.url_dict_path
def set_conn(): try: redis_conn() red=getredis() if not red.ping(): error_msg = "redis ping error . will exit program" logger.warning(error_msg) sys.exit() else: logger.info("Redis ping success") except Exception as ex: error_msg =" connnect redis get error {}:please use --redis pass@host:port:db ,if pass is none ,like --redis @host:port:db".format(ex) logger.warning(error_msg) sys.exit()
def start_input(): if cmd_line_options.command == "hostscan": try: red = getredis() if cmd_line_options.input_nmaptext: datas = get_data_from_textfile(cmd_line_options.input_nmaptext) logger.info("input {} lines from nmap_text".format(len(datas))) for data in datas: red.lpush("hostdata", json.dumps(data)) if cmd_line_options.input_jsonfile: datas = get_data_from_jsonfile(cmd_line_options.input_jsonfile) logger.info("input {} lines from nmap_json".format(len(datas))) for data in datas: red.lpush("hostdata", json.dumps(data)) except Exception as ex: traceback.print_exc() logger.warning("input target to hostdata get error:{}".format(ex))
def verify(self): for info in tests: if self.checktest(info): try: res = None parse = "" if info.get("search_mode") == "search": res = re.search(info.get("contains"), self.rawresponse, re.I | re.S) if info.get("search_mode") == "findall": res = re.findall(info.get("contains"), self.rawresponse, re.I | re.S) if res: if info.get("search_mode") == "search": if res.groups(): parse = res.groups() else: parse = res.group() if info.get("search_mode") == "findall": parse = str(res) self.result.append({ "name": "sensitive_msg_leak", "url": self.url, "level": info.get("level"), # 0:Low 1:Medium 2:High "detail": { "vulmsg": info.get("vulmsg"), "search": "mode:{search_mode} contains:{contains}". format(**info), "parse": parse, "request": self.rawrequest, "response": self.rawresponse } }) except Exception as ex: # print(traceback.print_exc()) logger.warning("run search poc get error:" + str(ex))
def process_start(): try: work_process = [] try: logger.info("Python Script use {} process".format(cmd_line_options.process)) logger.info("Some scan use {} threads ".format(cmd_line_options.threads)) for x in range(cmd_line_options.process): work2 = Process(target=run_python) work_process.append(work2) for p in work_process: p.daemon = True p.start() except Exception as ex: traceback.print_exc() err_msg = "Error occurred while starting new scan ('{0}')".format(str(ex)) logger.warning(err_msg) except KeyboardInterrupt as ex: logger.warning("Ctrl+C was pressed ,aborted program")
def run(self, path): try: res = self.check_url(path) if res is not False: status_code, length = res if not path.startswith("/"): path = "/" + path url = self.rootpath + path self.result.append({ "name": self.name, "url": url, "level": self.level, # 0:Low 1:Medium 2:High "detail": { "vulmsg": self.vulmsg, "status_code": status_code, "length": length } }) except Exception as ex: logger.warning("dir scan run error:{}".format(ex))
def set_es_conn(): if db_set.get("es_open"): try: client = connections.create_connection( hosts=db_set.get("es_addr"), http_auth=db_set.get("es_auth"), timeout=10) info = client.info() if "You Know, for Search" in str(info): if int(info.get("version").get("number").replace(".", "")) > 700: logger.info("Success connect es : {}".format( db_set.get("es_addr"))) others.es_conn = client set_httpinfo() else: logger.warning("Your es version should be > 7.0.0") sys.exit() except Exception as ex: logger.warning( "es conn get error :{} , will exit program".format(ex)) logger.warning( "if you don't want to use elasticsearch .please config 'es_open':False, in config.py" .format(ex)) sys.exit()