def __run_attacks_main(): which_log_to_use = { "dork": URL_LOG_PATH, "spider": SPIDER_LOG_PATH } options = (opt.useRandomDork, opt.dorkToUse, opt.dorkFileToUse, opt.fileToEnumerate) to_use = which_log_to_use["dork"] if any(arg for arg in options) is True else which_log_to_use["spider"] try: urls_to_use = get_latest_log_file(to_use) except TypeError: urls_to_use = None if urls_to_use is None: logger.error(set_color( "unable to run attacks appears that no file was created for the retrieved data...", level=40 )) shutdown() options = [ opt.runSqliScan, opt.runPortScan, opt.intelCheck, opt.adminPanelFinder, opt.runXssScan ] if any(options): with open(urls_to_use) as urls: for url in urls.readlines(): __run_attacks( url.strip(), sqlmap=opt.runSqliScan, nmap=opt.runPortScan, intel=opt.intelCheck, xss=opt.runXssScan, admin=opt.adminPanelFinder, verbose=opt.runInVerbose, batch=opt.runInBatch, auto_start=opt.autoStartSqlmap )
def __run_attacks_main(**kwargs): """ main method to run the attacks """ log_to_use = kwargs.get("log", None) if log_to_use is None: options = (opt.dorkToUse, opt.useRandomDork, opt.dorkFileToUse) log_to_use = URL_LOG_PATH if any(o for o in options) else SPIDER_LOG_PATH try: urls_to_use = get_latest_log_file(log_to_use) except TypeError: urls_to_use = None else: urls_to_use = log_to_use if urls_to_use is None: logger.error(set_color( "unable to run attacks appears that no file was created for the retrieved data", level=40 )) shutdown() options = [ opt.runSqliScan, opt.runPortScan, opt.adminPanelFinder, opt.runXssScan, opt.performWhoisLookup, opt.performClickjackingScan, opt.pgpLookup ] if any(options): with open(urls_to_use) as urls: for i, url in enumerate(urls.readlines(), start=1): current = i if "webcache" in url: logger.warning(set_color( "ran into unexpected webcache URL skipping", level=30 )) current -= 1 else: if not url.strip() == "http://" or url == "https://": logger.info(set_color( "currently running on '{}' (target #{})".format( url.strip(), current ), level=25 )) logger.info(set_color( "fetching target meta-data" )) identified = main_header_check( url, verbose=opt.runInVerbose, agent=agent_to_use, proxy=proxy_to_use, xforward=opt.forwardedForRandomIP, identify_plugins=opt.identifyPlugin, identify_waf=opt.identifyProtection, show_description=opt.showPluginDescription ) if not identified: logger.error(set_color( "target is refusing to allow meta-data dumping, skipping", level=40 )) run_attacks( url.strip(), sqlmap=opt.runSqliScan, nmap=opt.runPortScan, pgp=opt.pgpLookup, xss=opt.runXssScan, whois=opt.performWhoisLookup, admin=opt.adminPanelFinder, clickjacking=opt.performClickjackingScan, github=opt.searchGithub, verbose=opt.runInVerbose, batch=opt.runInBatch, auto_start=opt.autoStartSqlmap, xforward=opt.forwardedForRandomIP, sqlmap_args=opt.sqlmapArguments, nmap_args=opt.nmapArguments, show_all=opt.showAllConnections, do_threading=opt.threadPanels, tamper_script=opt.tamperXssPayloads, timeout=opt.controlTimeout, proxy=proxy_to_use, agent=agent_to_use, conf_file=opt.sqlmapConfigFile, threads=opt.amountOfThreads, force_ssl=opt.forceSSL ) print("\n") else: logger.warning(set_color( "malformed URL discovered, skipping", level=30 ))
# run the setup on the program setup(verbose=opt.runInVerbose) if not opt.hideBanner: print(BANNER) start_up() if opt.runInVerbose: being_run = find_running_opts(opt) logger.debug(set_color( "running with options '{}'".format(being_run), level=10 )) logger.info(set_color( "log file being saved to '{}'".format(get_latest_log_file(CURRENT_LOG_FILE_PATH)) )) def __run_attacks_main(**kwargs): """ main method to run the attacks """ log_to_use = kwargs.get("log", None) if log_to_use is None: options = (opt.dorkToUse, opt.useRandomDork, opt.dorkFileToUse) log_to_use = URL_LOG_PATH if any(o for o in options) else SPIDER_LOG_PATH try: urls_to_use = get_latest_log_file(log_to_use) except TypeError: urls_to_use = None
def request_issue_creation(): question = prompt( "would you like to create an anonymous issue and post it to Zeus's Github", opts="yN") if question.lower().startswith("n"): logger.error( set_color( "Zeus has experienced an internal error and cannot continue, shutting down...", level=40)) shutdown() fix_log_file() logger.info( set_color( "Zeus got an unexpected error and will automatically create an issue for this error, please wait..." )) def __extract_stacktrace(file_data): logger.info(set_color("extracting traceback from log file...")) retval, buff_mode, _buffer = [], False, "" with open(file_data, "r+") as log: for line in log: if "Traceback" in line: buff_mode = True if line and len(line) < 5: buff_mode = False retval.append(_buffer) _buffer = "" if buff_mode: if len(line) > 400: line = line[:400] + "...\n" _buffer += line return "".join(retval) logger.info(set_color("getting authorization...")) encoded = __get_encoded_string() n = get_decode_num(encoded) token = decode(n, encoded) current_log_file = get_latest_log_file(CURRENT_LOG_FILE_PATH) stacktrace = __extract_stacktrace(current_log_file) issue_title = stacktrace.split("\n")[-2] issue_data = { "title": issue_title, "body": "Zeus version:\n`{}`\n\n" "Error info:\n```{}````\n\n" "Running details:\n`{}`\n\n" "Commands used:\n`{}`\n\n" "Log file info:\n```{}```".format(VERSION, str(stacktrace), str(platform.platform()), " ".join(sys.argv), open(current_log_file).read()), } _json_data = json.dumps(issue_data) if sys.version_info > (3, ): _json_data = _json_data.encode("utf-8") try: req = urllib2.Request( url="https://api.github.com/repos/ekultek/zeus-scanner/issues", data=_json_data, headers={"Authorization": "token {}".format(token)}) urllib2.urlopen(req, timeout=10).read() logger.info( set_color( "issue has been created successfully with the following name '{}'..." .format(issue_title))) except Exception as e: logger.exception( set_color("failed to auto create the issue, got exception '{}', " "you may manually create an issue...".format(e), level=50))
# run the setup on the program setup(verbose=opt.runInVerbose) if not opt.hideBanner: print(BANNER) start_up() if opt.runInVerbose: being_run = find_running_opts(opt) logger.debug( set_color("running with options '{}'".format(being_run), level=10)) logger.info( set_color("log file being saved to '{}'".format( get_latest_log_file(CURRENT_LOG_FILE_PATH)))) def __run_attacks_main(**kwargs): """ main method to run the attacks """ log_to_use = kwargs.get("log", None) if log_to_use is None: options = (opt.dorkToUse, opt.useRandomDork, opt.dorkFileToUse) log_to_use = URL_LOG_PATH if any( o for o in options) else SPIDER_LOG_PATH try: urls_to_use = get_latest_log_file(log_to_use) except TypeError: urls_to_use = None else: