def xss_scan(request, config_level): message = {"request_stat": 0, "message": ""} dom = minidom.parse(config.rule_read("xss", get_file_handle=True)).documentElement for node in dom.getElementsByTagName('couple'): couple_id = int(node.getAttribute("id")) if couple_id <= config_level: payloads = node.getElementsByTagName('requests')[0].childNodes[0].nodeValue.strip() for payload in payloads.splitlines(): for param_name in urlparse.urlparse(request['url']).query.split("&"): response = request_payload(request, payload.strip(), param_name) if payload.strip().encode("utf-8") in response: message['request_stat'] = 1 message['message'] += "payload: %s|#|param: %s|#|findstr: %s|,|" % (payload.strip().encode('utf-8'), param_name.split("=")[0], payload.strip().encode('utf-8')) if config.load()['only_one_match'].lower() == "true": return message for param_name in request['postdata'].split("&"): if request['postdata'] == "": break else: response = request_payload(request, payload.strip(), param_name, postdata=True) if payload.strip().encode("utf-8") in response: message['request_stat'] = 1 message['message'] += "payload: %s|#|param: %s|#|findstr: %s|,|" % (payload.strip().encode('utf-8'), param_name.split("=")[0], payload.strip().encode('utf-8')) if config.load()['only_one_match'].lower() == "true": return message return message
def sqlibool_scan(request, config_level): message = {"request_stat": 0, "message": ""} dom = minidom.parse(config.rule_read("sqlibool", get_file_handle=True)).documentElement for node in dom.getElementsByTagName('couple'): couple_id = int(node.getAttribute("id")) if couple_id <= config_level: for compare in node.getElementsByTagName("compare"): compare1 = compare.getElementsByTagName("compare1")[0].childNodes[0].nodeValue compare11 = compare.getElementsByTagName("compare11")[0].childNodes[0].nodeValue compare2 = compare.getElementsByTagName("compare2")[0].childNodes[0].nodeValue compare22 = compare.getElementsByTagName("compare22")[0].childNodes[0].nodeValue for param_name in urlparse.urlparse(request['url']).query.split("&"): response1 = request_payload(request, compare1, param_name) response11 = request_payload(request, compare11, param_name) response2 = request_payload(request, compare2, param_name) response22 = request_payload(request, compare22, param_name) if response1 == response11 and response2 == response22 and response1 != response2: message['request_stat'] = 2 message['message'] += "payload1: %s|#|payload2: %s|#|param: %s|,|" % (compare1.encode('utf-8'), compare2.encode('utf-8'), param_name.split("=")[0]) if config.load()['only_one_match'].lower() == "true": return message for param_name in request['postdata'].split("&"): if request['postdata'] == "": break response1 = request_payload(request, compare1, param_name, postdata=True) response11 = request_payload(request, compare11, param_name, postdata=True) response2 = request_payload(request, compare2, param_name, postdata=True) response22 = request_payload(request, compare22, param_name, postdata=True) if response1 == response11 and response2 == response22 and response1 != response2: message['request_stat'] = 2 message['message'] += "payload1: %s|#|payload2: %s|#|param: %s|,|" % (compare1.encode('utf-8'), compare2.encode('utf-8'), param_name.split("=")[0]) if config.load()['only_one_match'].lower() == "true": return message return message
def content_deal(headers, host, method, postdata, uri, packet): u = urlparse.urlparse(uri) url = uri.split(u.netloc)[-1] white_domain = config.load()['white_domain'] black_domain = config.load()['black_domain'] black_ext = config.load()['black_ext'] for ext in black_ext.split(','): if u.path.lower().endswith(ext): return for domain in black_domain.split(','): if u.netloc.lower().split(':')[0].endswith(domain): return if white_domain != "": for domain in white_domain.split(','): if not u.netloc.lower().split(':')[0].endswith(domain): return reqhash = get_hash(host, uri, postdata) if 'Gdscan' not in headers.keys(): request = { 'headers': headers, 'host': host, 'method': method, 'postdata': postdata, 'url': uri, 'packet': packet } b64req = base64.encodestring(json.dumps(request)) if conn.hsetnx("request", reqhash, b64req): conn.lpush("waiting", reqhash)
def capture(x): out = config.load()['scapy_out'] if config.load()['scapy_stat'].lower() == 'false': raise Exception('scapy', 'out') if 'HTTP/' in x.lastlayer( ).original and x.lastlayer().original[0:4] != 'HTTP': body = x.lastlayer().original http = extract(body, out)
def post(self): conf_all = config.load() for i in self.request.body.split("&"): para = secure.clear(urllib.unquote(i.split("=", 1)[0])) value = secure.clear(urllib.unquote(i.split("=", 1)[1])) if para in conf_all.keys(): conf_all[para] = value config.update(conf_all) return self.render("config.html", config=config.load())
def main(): NIC = config.load()["scapy_network_card"] # network adapter name try: if NIC == 'all': sniff(filter="tcp", prn=lambda x: capture(x)) else: sniff(iface=NIC, filter="tcp", prn=lambda x: capture(x)) except Exception as e: error("scapy out!") conf = config.load() conf['scapy_stat'].lower = "false" config.update(conf)
def check_update(): out.good("Checking update...") try: res = requests.get(config.load()['check_url'], timeout=10) version = res.content if version != config.load()['version']: update() return True else: return False except: out.error("Can not connect to update server!") return False
def size_control(): if os.path.getsize(SESSION_CONF_FILE) > config.load()["session_size"]: with open(SESSION_CONF_FILE, 'r') as f: lines = f.readlines() f.close() ff = open(SESSION_CONF_FILE, 'w') size = 0 for line in lines: size += len(line) if size < config.load()["session_size"]: ff.write(line) else: ff.close() return print size ff.close()
def size_control(): if os.path.getsize(os.getcwd() + os.path.sep + "session") > config.load()["session_size"]: with open(os.getcwd() + os.path.sep + "session", 'r') as f: lines = f.readlines() f.close() ff = open(os.getcwd() + os.path.sep + "session", 'w') size = 0 for line in lines: size += len(line) if size < config.load()["session_size"]: ff.write(line) else: ff.close() return print size ff.close()
def get(self): stat = secure.clear(self.get_argument("stat")) config_all = config.load() config_all['scan_stat'] = stat config.update(config_all) if stat.lower() == "true": thread = threading.Thread(target=scan.scan_start, args=()) thread.start() return self.write(out.jump("/scan_config"))
def post(self): account = secure.clear(self.get_argument("account")) password = secure.clear(self.get_argument("password")) if account == config.load()['account'] and password == config.load( )['password']: cookie = session.new("self.request.remote_ip") self.set_cookie("ysrc_token", cookie, expires_days=int( config.load()["session_expires_time"])) session.update(cookie) self.set_header("Location", "/") self.set_status(302) return else: location = "/login" content = "Something wrong with you account or password!" return self.render("302.html", location=location, content=content)
def new_scan(reqhash, request, rules): out.good("start new mission: %s" % reqhash) request_stat = 0 request_message = [] request_result = {} vulnerable = 0 for rule in rules: if config.load()['scan_stat'].lower() == "true": message = eval(rule + "_scan")(request, config.load()['scan_level']) request_stat = message['request_stat'] if request_stat > vulnerable: vulnerable = request_stat request_message = message['message'].split("|,|") request_result[rule] = {"stat": request_stat, "message": request_message} request_result['stat'] = vulnerable if vulnerable > 0: conn.lpush("vulnerable", reqhash) conn.hset("results", reqhash, base64.b64encode(json.dumps(request_result))) conn.lrem("running", 1, reqhash) conn.lpush("finished", reqhash)
def get(self): start = {} rule = [ "sqlireflect", "sqlitime", "sqlmap", "xpath", "xss", "lfi", "ldap", "sqlibool" ] for i in rule: start[i + "_true"] = "" start[i + "_false"] = "checked" for i in config.load_rule()["scan_type"]: start[i + "_true"] = "checked" start[i + "_false"] = "" rules = {} for i in rule: rules[i] = config.rule_read(i) return self.render("scan_config.html", config=config.load(), start=start, rules=rules, scan_stat=config.load()['scan_stat'], sqlmap_api=config.load_rule()['sqlmap_api'])
def post(self): proxy_type = self.get_argument("type") if proxy_type == "mix_proxy": conf = config.load() conf["mix_addr"] = secure.clear(self.get_argument("mix_addr")) conf["mix_port"] = secure.clear(self.get_argument("mix_port")) config.update(conf) elif proxy_type == "scapy": conf = config.load() conf['scapy_out'] = secure.clear(self.get_argument('scapy_out')) conf['scapy_network_card'] = self.get_argument( 'scapy_network_card') config.update(conf) elif proxy_type == "tornado": conf = config.load() conf['tornado_address'] = secure.clear( self.get_argument('tornado_address')) conf['tornado_port'] = secure.clear( self.get_argument('tornado_port')) config.update(conf) return self.write(out.jump("/proxy?type=" + proxy_type))
def main(): define("port", default=int(config.load()["port"]), type=int) define("address", default=config.load()["ip"]) tornado.options.parse_command_line() path = lambda root, *a: os.path.join(root, *a) ROOT = os.path.dirname(os.path.abspath(__file__)) settings = {} settings['static_path'] = path(ROOT, "static") settings['template_loader'] = tornado.template.Loader( path(ROOT, "templates")) settings['login_url'] = "/login" site.addsitedir(path(ROOT, 'handlers')) conf = config.load() conf['scapy_stat'] = 'false' conf['tornado_stat'] = 'false' conf['scan_stat'] = 'false' conf['mix_stat'] = 'false' config.update(conf) app = make_app(settings) app.listen(port=options.port, address=options.address) out.good("Web app start at: http://%s:%s" % (options.address, options.port)) tornado.ioloop.IOLoop.current().start()
def main(): try: addr = config.load()['mix_addr'] port = int(config.load()['mix_port']) bindsocket = socket.socket() bindsocket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) bindsocket.bind((addr, port)) bindsocket.listen(300) except Exception as e: conf = config.load() conf['mix_stat'].lower = "false" config.update(conf) print e exit() while config.load()['mix_stat'].lower() == "true": try: connstream, fromaddr = bindsocket.accept() t = threading.Thread(target=client_conn, args=(connstream, )) t.start() except Exception as e: print e if 'connstream' in dir(): connstream.close() bindsocket.close()
def get(self): waiting = conn.lrange("waiting", 0, 15) running = conn.lrange("running", 0, 15) finished = conn.lrange("finished", 0, 15) vulnerable = conn.lrange("vulnerable", 0, 15) self.render("index.html", waiting_num=conn.llen("waiting"), running_num=conn.llen("running"), finished_num=conn.llen("finished"), vulnerable_num=conn.llen("vulnerable"), waiting=waiting, running=running, finished=finished, vulnerable=vulnerable, time=config.load()["flush_time"]) return
def scan_start(): while config.load()['scan_stat'].lower() == "true": try: while thread_filled(): time.sleep(5) reqhash = conn.rpoplpush("waiting", "running") if not reqhash: time.sleep(10) continue reqed = conn.hget("request", reqhash) request = json.loads(ds(reqed)) rules = config.load_rule()['scan_type'] url = urlparse.urlparse(request['url']).query if (request['method'] == "GET" and url != "") or (request['method'] == "POST" and (request["postdata"] != "" or url != "")): t = threading.Thread(target=new_scan, args=(reqhash, requests_convert(request), rules)) t.start() else: conn.lrem("running", 1, reqhash) conn.lpush("finished", reqhash) except Exception,e: out.error(str(e))
def get(self): list_type = self.get_argument("type") try: start = int(self.get_argument("start")) except: start = 0 page_num = int(config.load()['page_num']) length = conn.llen(list_type) last = start + page_num - 1 page_now = start / page_num + 1 end_page = -1 * ((-1 * length) / page_num) end_num = end_page * page_num - page_num if page_now - 2 >= 1: pages_first = page_now - 2 else: pages_first = 1 if page_now + 2 <= end_page: pages_last = page_now + 2 else: pages_last = end_page pages = range(pages_first, pages_last + 1) content = conn.lrange(list_type, start, last) req_content = {} for reqhash in content: request_content = json.loads( base64.b64decode(conn.hget("request", reqhash))) req_content[reqhash] = request_content[ 'method'] + "|" + request_content['url'] return self.render("list.html", page_now=page_now, page_num=page_num, pages=pages, content=content, list_type=list_type, length=length, req_content=req_content, end_num=end_num)
def wrapper(self, *args, **kwargs): if config.load()['tornado_stat'].lower() == "false": return return method(self, *args, **kwargs)
def main(): port = int(config.load()['tornado_port']) address = config.load()['tornado_address'] print("Starting HTTP proxy on port %d" % port) run_proxy(port, address)
def new(ip): return md5.new(config.load()["cookie_secret"] + str(time.time()) + ip).hexdigest()
def thread_filled(): running_length = conn.llen("running") if running_length < int(config.load()['threads_num']): return False else: return True
def get(self): return self.render("config.html", config=config.load())
def sqlitime_scan(request, config_level): message = {"request_stat": 0, "message": ""} dom = minidom.parse(config.rule_read("sqlitime", get_file_handle=True)).documentElement for node in dom.getElementsByTagName('couple'): couple_id = int(node.getAttribute("id")) if couple_id <= config_level: payloads = node.getElementsByTagName('requests')[0].childNodes[0].nodeValue.strip() for payload in payloads.splitlines(): if "TIME_VAR" in payload: for param_name in urlparse.urlparse(request['url']).query.split("&"): response, time0 = request_payload(request, payload.strip().replace("TIME_VAR", "0"), param_name, time_check=True) response, time3 = request_payload(request, payload.strip().replace("TIME_VAR", "3"), param_name, time_check=True) if time3 - time0 >= 2: response, time6 = request_payload(request, payload.strip().replace("TIME_VAR", "6"), param_name, time_check=True) num = (time6 - time0) / (time3 - time0) if num <= 2.3 and num >= 1.7: message['request_stat'] = 3 message['message'] += "payload: %s|#|param: %s|,|" % (payload.strip().replace("TIME_VAR", '5').encode('utf-8'), param_name.split("=")[0]) if config.load()['only_one_match'].lower() == "true": return message for param_name in request['postdata'].split("&"): if request['postdata'] == "": break response, time0 = request_payload(request, payload.strip().replace("TIME_VAR", "0"), param_name, postdata=True, time_check=True) response, time3 = request_payload(request, payload.strip().replace("TIME_VAR", "3"), param_name, postdata=True, time_check=True) if time3 - time0 >= 2: response, time6 = request_payload(request, payload.strip().replace("TIME_VAR", "6"), param_name, postdata=True, time_check=True) num = (time6 - time0) / (time3 - time0) if num <= 2.3 and num >= 1.7: message['request_stat'] = 3 message['message'] += "payload: %s|#|param: %s|,|" % (payload.strip().replace("TIME_VAR", '5').encode('utf-8'), param_name.split("=")[0]) if config.load()['only_one_match'].lower() == "true": return message elif "NUM_VAR" in payload: for param_name in urlparse.urlparse(request['url']).query.split("&"): response, time0 = request_payload(request, payload.strip().replace("NUM_VAR", "0"), param_name, time_check=True) VAR = '500000' for NUM_VAR in range(3): VAR += '0' response, time_more = request_payload(request, payload.strip().replace("NUM_VAR", VAR), param_name, time_check=True) if time_more - time0 >= 3: response, time6 = request_payload(request, payload.strip().replace("NUM_VAR", str(int(VAR) * 2)), param_name, time_check=True) num = (time6 - time0) / (time_more - time0) if num <= 2.3 and num >= 1.7: message['request_stat'] = 3 message['message'] += "payload: %s|#|param: %s|,|" % (payload.strip().replace("NUM_VAR", VAR).encode('utf-8'), param_name.split("=")[0]) if config.load()['only_one_match'].lower() == "true": return message else: break else: break for param_name in request['postdata'].split("&"): if request['postdata'] == "": break response, time0 = request_payload(request, payload.strip().replace("NUM_VAR", "0"), param_name, time_check=True) VAR = '500000' for NUM_VAR in range(3): VAR += '0' response, time_more = request_payload(request, payload.strip().replace("NUM_VAR", VAR), param_name, postdata=True, time_check=True) if time_more - time0 >= 3: response, time6 = request_payload(request, payload.strip().replace("NUM_VAR", str(int(VAR) * 2)), param_name, postdata=True, time_check=True) num = (time6 - time0) / (time_more - time0) if num <= 2.3 and num >= 1.7: message['request_stat'] = 3 message['message'] += "payload: %s|#|param: %s|,|" % (payload.strip().replace("NUM_VAR", VAR).encode('utf-8'), param_name.split("=")[0]) if config.load()['only_one_match'].lower() == "true": return message else: break else: break return message
def get(self): proxy_type = self.get_argument("type") conf = {} if proxy_type == "mix_proxy": conf['mix_addr'] = config.load()['mix_addr'] conf['mix_port'] = config.load()['mix_port'] stat = config.load()['mix_stat'] try: start_stat = self.get_argument("stat") start_conf = config.load() start_conf['mix_stat'] = start_stat config.update(start_conf) if start_stat.lower() == "true": thread = threading.Thread(target=mix_proxy.main) thread.start() else: secure.kill(config.load()['mix_addr'], int(config.load()['mix_port']), "GE") return self.write(out.jump("/proxy?type=" + proxy_type)) except: pass elif proxy_type == "scapy": conf['scapy_out'] = config.load()['scapy_out'] conf['scapy_network_card'] = config.load()['scapy_network_card'] stat = config.load()['scapy_stat'] try: start_stat = secure.clear(self.get_argument("stat")) start_conf = config.load() start_conf['scapy_stat'] = start_stat config.update(start_conf) if start_stat.lower() == "true": thread = threading.Thread(target=pyscapy.main) thread.start() return self.write(out.jump("/proxy?type=" + proxy_type)) except: pass elif proxy_type == "tornado": conf['tornado_address'] = config.load()['tornado_address'] conf['tornado_port'] = config.load()['tornado_port'] stat = config.load()['tornado_stat'] try: start_stat = secure.clear(self.get_argument("stat")) start_conf = config.load() start_conf['tornado_stat'] = start_stat config.update(start_conf) if start_stat.lower() == "true": thread = threading.Thread(target=proxy_io.main) thread.start() return self.write(out.jump("/proxy?type=" + proxy_type)) except: pass else: return self.write(out.jump("/")) return self.render("proxy.html", proxy_type=proxy_type, conf=conf, stat=stat)