def main_intel_amt(url, agent=None, proxy=None): proxy = proxy_string_to_dict(proxy) or None agent = agent or DEFAULT_USER_AGENT logger.info( set_color( "attempting to connect to '{}' and get hardware info...".format( url))) try: json_data = __get_hardware(url, agent=agent, proxy=proxy) if json_data is None: logger.error( set_color("unable to get any information, skipping...", level=40)) pass else: print("-" * 40) for key in json_data.keys(): print("{}:".format(str(key).capitalize())) for item in json_data[key]: print(" - {}: {}".format(item.capitalize(), json_data[key][item])) print("-" * 40) except Exception as e: if "Temporary failure in name resolution" in str(e): logger.error( set_color("failed to connect on '{}', skipping...".format(url), level=40)) pass else: logger.exception( set_color( "ran into exception '{}', cannot continue...".format(e))) fix_log_file() request_issue_creation()
def __run_attacks_main(): which_log_to_use = { "dork": URL_LOG_PATH, "spider": SPIDER_LOG_PATH } options = (opt.useRandomDork, opt.dorkToUse, opt.dorkFileToUse) to_use = which_log_to_use["dork"] if any(options) is True else which_log_to_use["spider"] try: urls_to_use = get_latest_log_file(to_use) except TypeError: urls_to_use = None if urls_to_use is None: logger.error(set_color( "unable to run attacks appears that no file was created for the retrieved data...", level=40 )) shutdown() if opt.runSqliScan or opt.runPortScan or opt.intelCheck or opt.adminPanelFinder or opt.runXssScan: with open(urls_to_use) as urls: for url in urls.readlines(): __run_attacks( url.strip(), sqlmap=opt.runSqliScan, nmap=opt.runPortScan, intel=opt.intelCheck, xss=opt.runXssScan, admin=opt.adminPanelFinder, given_path=opt.givenSearchPath, auto=opt.autoStartSqlmap, verbose=opt.runInVerbose, batch=opt.runInBatch )
def __run_attacks( url, sqlmap=False, nmap=False, intel=False, xss=False, verbose=False, admin=False, given_path=None, auto=False, batch=False ): """ run the attacks if any are requested """ __enabled_attacks = { "sqlmap": opt.runSqliScan, "port": opt.runPortScan, "xss": opt.runXssScan, "admin": opt.adminPanelFinder, "intel": opt.intelCheck } enabled = set() for key in __enabled_attacks.keys(): if __enabled_attacks[key] is True: enabled.add(key) if len(enabled) > 1: logger.error(set_color( "it appears that you have enabled multiple attack types, " "as of now only 1 attack is supported at a time, choose " "your attack and try again. You can use the -f flag if " "you do not want to complete an entire search again...", level=40 )) shutdown() if not batch: question = prompt( "would you like to process found URL: '{}'".format(url), opts=["y", "N"] ) else: question = "y" if question.lower().startswith("y"): if sqlmap: return sqlmap_scan.sqlmap_scan_main(url.strip(), verbose=verbose, opts=__create_arguments(sqlmap=True), auto_search=auto, given_path=given_path) elif nmap: url_ip_address = replace_http(url.strip()) return nmap_scan.perform_port_scan(url_ip_address, verbose=verbose, opts=__create_arguments(nmap=True)) elif intel: url = get_true_url(url) return intel_me.main_intel_amt(url, agent=agent_to_use, proxy=proxy_to_use) elif admin: main(url, show=opt.showAllConnections, verbose=verbose) elif xss: main_xss(url, verbose=verbose, proxy=proxy_to_use, agent=agent_to_use, tamper=opt.tamperXssPayloads) else: pass else: logger.warning(set_color( "skipping '{}'...".format(url), level=30 ))
def main_xss(start_url, verbose=False, proxy=None, agent=None): find_xss_script(start_url) logger.info(set_color( "loading payloads..." )) payloads = __load_payloads() if verbose: logger.debug(set_color( "a total of {} payloads loaded...".format(len(payloads)), level=10 )) logger.info(set_color( "payloads will be written to a temporary file and read from there..." )) filename = create_urls(start_url, payloads) if verbose: logger.debug(set_color( "loaded URL's have been saved to '{}'...".format(filename), level=10 )) logger.info(set_color( "testing for XSS vulnerabilities on host '{}'...".format(start_url) )) if proxy is not None: logger.info(set_color( "using proxy '{}'...".format(proxy) )) success = set() with open(filename) as urls: for url in urls.readlines(): url = url.strip() result = scan_xss(url, proxy=proxy, agent=agent) payload = find_xss_script(url) logger.info(set_color( "trying payload '{}'...".format(payload) )) if result: success.add(url) if verbose: logger.debug(set_color( "payload '{}' appears to be usable...".format(payload), level=10 )) elif result is "sqli": logger.error(set_color( "loaded URL '{}' threw a DBMS error and appears to be SQLi vulnerable, test for SQL injection".format( url ), level=30 )) else: if verbose: logger.debug(set_color( "host '{}' does not appear to be vulnerable to XSS attacks with payload '{}'...".format( start_url, payload ), level=10 )) create_tree(start_url, list(success))
def create_urls(url, payload_list, tamper=None): tf = tempfile.NamedTemporaryFile(delete=False) tf_name = tf.name with tf as tmp: for i, payload in enumerate(payload_list): if tamper: try: if i < 1: payload = __tamper_payload(payload, tamper_type=tamper, warning=True) else: payload = __tamper_payload(payload, tamper_type=tamper, warning=False) except InvalidTamperProvided: logger.error(set_color( "you provided and invalid tamper script, acceptable tamper scripts are: {}...".format( " | ".join(list_tamper_scripts()), level=40 ) )) shutdown() loaded_url = "{}{}\n".format(url.strip(), payload.strip()) tmp.write(loaded_url) return tf_name
def request_issue_creation(): question = prompt( "would you like to create an anonymous issue and post it to Zeus's Github", opts="yN") if question.lower().startswith("n"): logger.error( set_color( "Zeus has experienced an internal error and cannot continue, shutting down...", level=40)) shutdown() fix_log_file() logger.info( set_color( "Zeus got an unexpected error and will automatically create an issue for this error, please wait..." )) def __extract_stacktrace(file_data): logger.info(set_color("extracting traceback from log file...")) retval, buff_mode, _buffer = [], False, "" with open(file_data, "r+") as log: for line in log: if "Traceback" in line: buff_mode = True if line and len(line) < 5: buff_mode = False retval.append(_buffer) _buffer = "" if buff_mode: if len(line) > 400: line = line[:400] + "...\n" _buffer += line return "".join(retval) logger.info(set_color("getting authorization...")) encoded = __get_encoded_string() n = get_decode_num(encoded) token = decode(n, encoded) current_log_file = get_latest_log_file(CURRENT_LOG_FILE_PATH) stacktrace = __extract_stacktrace(current_log_file) issue_title = stacktrace.split("\n")[-2] issue_data = { "title": issue_title, "body": "Zeus version:\n`{}`\n\n" "Error info:\n```{}````\n\n" "Running details:\n`{}`\n\n" "Commands used:\n`{}`\n\n" "Log file info:\n```{}```".format(VERSION, str(stacktrace), str(platform.platform()), " ".join(sys.argv), open(current_log_file).read()), } _json_data = json.dumps(issue_data) if sys.version_info > (3, ): _json_data = _json_data.encode("utf-8") try: req = urllib2.Request( url="https://api.github.com/repos/ekultek/zeus-scanner/issues", data=_json_data, headers={"Authorization": "token {}".format(token)}) urllib2.urlopen(req, timeout=10).read() logger.info( set_color( "issue has been created successfully with the following name '{}'..." .format(issue_title))) except Exception as e: logger.exception( set_color("failed to auto create the issue, got exception '{}', " "you may manually create an issue...".format(e), level=50))
else: logger.fatal(set_color( "failed to provide an attack argument, attack argument must be provided " "for Zeus to attack the provided URL's", level=50 )) else: logger.critical(set_color( "failed to provide a mandatory argument, you will be redirected to the help menu...", level=50 )) time.sleep(2) subprocess.call("python zeus.py --help", shell=True) except KeyboardInterrupt: logger.error(set_color( "user aborted process...", level=40 )) except UnboundLocalError: logger.warning(set_color( "do not interrupt the browser when selenium is running, " "it will cause Zeus to crash...", level=30 )) except Exception as e: logger.exception(set_color( "ran into exception '{}' exception has been saved to log file...".format(e), level=50 )) fix_log_file() request_issue_creation() fix_log_file() shutdown()
"for Zeus to attack the provided URL's", level=50 )) else: logger.critical(set_color( "failed to provide a mandatory argument, you will be redirected to the help menu...", level=50 )) time.sleep(2) subprocess.call("python zeus.py --help", shell=True) except IOError: logger.fatal(set_color( "provided file does not exist, make sure you have the full path...", level=50 )) except KeyboardInterrupt: logger.error(set_color( "user aborted process...", level=40 )) except UnboundLocalError: logger.warning(set_color( "do not interrupt the browser when selenium is running, " "it will cause Zeus to crash...", level=30 )) except Exception as e: if "url did not match a true url" in str(e).lower(): logger.error(set_color( "you did not provide a URL that is capable of being processed, " "the URL provided to the spider needs to contain protocol as well " "ie. 'http://google.com' (it is advised not to add the GET parameter), " "fix the URL you want to scan and try again...", level=40 )) shutdown()
def check_for_admin_page(url, exts, protocol="http://", show_possibles=False, verbose=False): possible_connections, connections = set(), set() stripped_url = replace_http(url.strip()) for ext in exts: ext = ext.strip() true_url = "{}{}{}".format(protocol, stripped_url, ext) if verbose: logger.debug(set_color("trying '{}'...".format(true_url), level=10)) try: urlopen(true_url, timeout=5) logger.info( set_color( "connected successfully to '{}'...".format(true_url))) connections.add(true_url) except HTTPError as e: data = str(e).split(" ") if verbose: if "Access Denied" in str(e): logger.warning( set_color( "got access denied, possible control panel found without external access on '{}'..." .format(true_url), level=30)) possible_connections.add(true_url) else: logger.error( set_color( "failed to connect got error code {}...".format( data[2]), level=40)) except Exception as e: if verbose: if "<urlopen error timed out>" or "timeout: timed out" in str( e): logger.warning( set_color( "connection timed out after five seconds " "assuming won't connect and skipping...", level=30)) else: logger.exception( set_color( "failed to connect with unexpected error '{}'...". format(str(e)), level=50)) fix_log_file() request_issue_creation() possible_connections, connections = list(possible_connections), list( connections) data_msg = "found {} possible connections(s) and {} successful connection(s)..." logger.info( set_color(data_msg.format(len(possible_connections), len(connections)))) if len(connections) != 0: logger.info(set_color("creating connection tree...")) create_tree(url, connections) else: logger.fatal( set_color( "did not find any successful connections to {}'s " "admin page", level=50)) if show_possibles: if len(possible_connections) != 0: logger.info(set_color("creating possible connection tree...")) create_tree(url, possible_connections) else: logger.fatal( set_color( "did not find any possible connections to {}'s " "admin page", level=50))
def main_xss(start_url, verbose=False, proxy=None, agent=None, tamper=None): if tamper: logger.info(set_color( "tampering payloads with '{}'...".format(tamper) )) find_xss_script(start_url) logger.info(set_color( "loading payloads..." )) payloads = __load_payloads() if verbose: logger.debug(set_color( "a total of {} payloads loaded...".format(len(payloads)), level=10 )) logger.info(set_color( "payloads will be written to a temporary file and read from there..." )) filename = create_urls(start_url, payloads, tamper=tamper) logger.info(set_color( "loaded URL's have been saved to '{}'...".format(filename) )) logger.info(set_color( "testing for XSS vulnerabilities on host '{}'...".format(start_url) )) if proxy is not None: logger.info(set_color( "using proxy '{}'...".format(proxy) )) success = set() with open(filename) as urls: for i, url in enumerate(urls.readlines(), start=1): url = url.strip() result = scan_xss(url, proxy=proxy, agent=agent) payload = find_xss_script(url) if verbose: logger.info(set_color( "trying payload '{}'...".format(payload) )) if result[0] != "sqli" and result[0] is True: success.add(url) if verbose: logger.debug(set_color( "payload '{}' appears to be usable...".format(payload), level=10 )) elif result[0] is "sqli": if i <= 1: logger.error(set_color( "loaded URL '{}' threw a DBMS error and appears to be injectable, test for SQL injection, " "backend DBMS appears to be '{}'...".format( url, result[1] ), level=40 )) else: if verbose: logger.error(set_color( "SQL error discovered...", level=40 )) else: if verbose: logger.debug(set_color( "host '{}' does not appear to be vulnerable to XSS attacks with payload '{}'...".format( start_url, payload ), level=10 )) if len(success) != 0: logger.info(set_color( "possible XSS scripts to be used:" )) create_tree(start_url, list(success)) else: logger.error(set_color( "host '{}' does not appear to be vulnerable to XSS attacks...".format(start_url) )) save = prompt( "would you like to keep the URL's saved for further testing", opts="yN" ) if save.lower().startswith("n"): os.remove(filename)
def search_multiple_pages(query, link_amount, proxy=None, agent=None, verbose=False): def __config_proxy(proxy_string): proxy_type_schema = { "http": httplib2.socks.PROXY_TYPE_HTTP, "socks4": httplib2.socks.PROXY_TYPE_SOCKS4, "socks5": httplib2.socks.PROXY_TYPE_SOCKS5 } proxy_type = get_proxy_type(proxy_string)[0] proxy_dict = proxy_string_to_dict(proxy_string) proxy_config = httplib2.ProxyInfo( proxy_type=proxy_type_schema[proxy_type], proxy_host="".join(proxy_dict.keys()), proxy_port="".join(proxy_dict.values())) return proxy_config if proxy is not None: if verbose: logger.debug( set_color("configuring to use proxy '{}'...".format(proxy), level=10)) __config_proxy(proxy) if agent is not None: if verbose: logger.debug( set_color("settings user-agent to '{}'...".format(agent), level=10)) logger.warning( set_color( "multiple pages will be searched using Google's API client, searches may be blocked after a certain " "amount of time...", level=30)) results, limit, found, index = set(), link_amount, 0, google_api.search( query, user_agent=agent, safe="on") try: while limit > 0: results.add(next(index)) limit -= 1 found += 1 except Exception as e: if "Error 503" in str(e): logger.fatal( set_color( "Google is blocking the current IP address, dumping already found URL's...", level=50)) results = results pass retval = set() for url in results: if URL_REGEX.match(url) and URL_QUERY_REGEX.match(url): if verbose: logger.debug(set_color("found '{}'...".format(url), level=10)) retval.add(url) if len(retval) != 0: logger.info( set_color( "a total of {} links found out of requested {}...".format( len(retval), link_amount))) write_to_log_file(list(retval), URL_LOG_PATH, "url-log-{}.log") else: logger.error( set_color("unable to extract URL's from results...", level=40))