コード例 #1
0
def main_intel_amt(url, agent=None, proxy=None):
    proxy = proxy_string_to_dict(proxy) or None
    agent = agent or DEFAULT_USER_AGENT
    logger.info(
        set_color(
            "attempting to connect to '{}' and get hardware info...".format(
                url)))
    try:
        json_data = __get_hardware(url, agent=agent, proxy=proxy)
        if json_data is None:
            logger.error(
                set_color("unable to get any information, skipping...",
                          level=40))
            pass
        else:
            print("-" * 40)
            for key in json_data.keys():
                print("{}:".format(str(key).capitalize()))
                for item in json_data[key]:
                    print(" - {}: {}".format(item.capitalize(),
                                             json_data[key][item]))
            print("-" * 40)
    except Exception as e:
        if "Temporary failure in name resolution" in str(e):
            logger.error(
                set_color("failed to connect on '{}', skipping...".format(url),
                          level=40))
            pass
        else:
            logger.exception(
                set_color(
                    "ran into exception '{}', cannot continue...".format(e)))
            fix_log_file()
            request_issue_creation()
コード例 #2
0
def perform_port_scan(url,
                      ports=None,
                      scanner=NmapHook,
                      verbose=False,
                      opts=None,
                      **kwargs):
    """
    main function that will initalize the port scanning
    """
    url = url.strip()
    logger.info(
        set_color(
            "attempting to find IP address for hostname '{}'...".format(url)))
    found_ip_address = socket.gethostbyname(url)
    logger.info(
        set_color("found IP address for given URL -> '{}'...".format(
            found_ip_address)))
    if verbose:
        logger.debug(set_color("checking for nmap on your system...",
                               level=10))
    nmap_exists = find_nmap(verbose=verbose)
    if nmap_exists:
        if verbose:
            logger.debug(
                set_color(
                    "nmap has been found under '{}'...".format(nmap_exists),
                    level=10))
        logger.info(
            set_color("starting port scan on IP address '{}'...".format(
                found_ip_address)))
        try:
            data = scanner(found_ip_address, ports=ports, opts=opts)
            json_data = data._get_all_info()
            data.show_open_ports(json_data)
            file_path = data.send_to_file(json_data)
            logger.info(
                set_color(
                    "port scan completed, all data saved to JSON file under '{}'..."
                    .format(file_path)))
        except KeyError:
            logger.fatal(
                set_color("no port information found for '{}({})'...".format(
                    url, found_ip_address),
                          level=50))
        except Exception as e:
            logger.exception(
                set_color(
                    "ran into exception '{}', cannot continue quitting...".
                    format(e),
                    level=50))
            request_issue_creation()
            pass
    else:
        logger.fatal(
            set_color(
                "nmap was not found on your system, please install it...",
                level=50))
コード例 #3
0
def request_issue_creation():
    question = prompt(
        "would you like to create an anonymous issue and post it to Zeus's Github",
        opts="yN")
    if question.lower().startswith("n"):
        logger.error(
            set_color(
                "Zeus has experienced an internal error and cannot continue, shutting down...",
                level=40))
        shutdown()

    fix_log_file()
    logger.info(
        set_color(
            "Zeus got an unexpected error and will automatically create an issue for this error, please wait..."
        ))

    def __extract_stacktrace(file_data):
        logger.info(set_color("extracting traceback from log file..."))
        retval, buff_mode, _buffer = [], False, ""
        with open(file_data, "r+") as log:
            for line in log:
                if "Traceback" in line:
                    buff_mode = True
                if line and len(line) < 5:
                    buff_mode = False
                    retval.append(_buffer)
                    _buffer = ""
                if buff_mode:
                    if len(line) > 400:
                        line = line[:400] + "...\n"
                    _buffer += line
        return "".join(retval)

    logger.info(set_color("getting authorization..."))

    encoded = __get_encoded_string()
    n = get_decode_num(encoded)
    token = decode(n, encoded)

    current_log_file = get_latest_log_file(CURRENT_LOG_FILE_PATH)
    stacktrace = __extract_stacktrace(current_log_file)
    issue_title = stacktrace.split("\n")[-2]

    issue_data = {
        "title":
        issue_title,
        "body":
        "Zeus version:\n`{}`\n\n"
        "Error info:\n```{}````\n\n"
        "Running details:\n`{}`\n\n"
        "Commands used:\n`{}`\n\n"
        "Log file info:\n```{}```".format(VERSION, str(stacktrace),
                                          str(platform.platform()),
                                          " ".join(sys.argv),
                                          open(current_log_file).read()),
    }

    _json_data = json.dumps(issue_data)
    if sys.version_info > (3, ):
        _json_data = _json_data.encode("utf-8")

    try:
        req = urllib2.Request(
            url="https://api.github.com/repos/ekultek/zeus-scanner/issues",
            data=_json_data,
            headers={"Authorization": "token {}".format(token)})
        urllib2.urlopen(req, timeout=10).read()
        logger.info(
            set_color(
                "issue has been created successfully with the following name '{}'..."
                .format(issue_title)))
    except Exception as e:
        logger.exception(
            set_color("failed to auto create the issue, got exception '{}', "
                      "you may manually create an issue...".format(e),
                      level=50))
コード例 #4
0
    search_engine = __config_search_engine(verbose=opt.runInVerbose)

    try:
        # use a personal dork as the query
        if opt.dorkToUse is not None:
            logger.info(set_color(
                "starting dork scan with query '{}'...".format(opt.dorkToUse)
            ))
            try:
                search.parse_search_results(
                    opt.dorkToUse, search_engine, verbose=opt.runInVerbose, proxy=proxy_to_use,
                    agent=agent_to_use
                )
            except Exception as e:
                logger.exception(set_color(
                    "ran into exception '{}'...".format(e), level=50
                ))
                fix_log_file()
                request_issue_creation()
                pass

            urls_to_use = get_latest_log_file(URL_LOG_PATH)
            if opt.runSqliScan or opt.runPortScan or opt.intelCheck or opt.adminPanelFinder or opt.runXssScan:
                with open(urls_to_use) as urls:
                    for url in urls.readlines():
                        __run_attacks(
                            url.strip(),
                            sqlmap=opt.runSqliScan, nmap=opt.runPortScan, intel=opt.intelCheck, xss=opt.runXssScan,
                            admin=opt.adminPanelFinder, given_path=opt.givenSearchPath,
                            auto=opt.autoStartSqlmap, verbose=opt.runInVerbose, batch=opt.runInBatch
                        )
コード例 #5
0
ファイル: search.py プロジェクト: olivierh59500/Zeus-Scanner
def parse_search_results(query,
                         url,
                         verbose=False,
                         dirname="{}/log/url-log",
                         filename="url-log-{}.log",
                         **kwargs):
    """
      Parse a webpage from Google for URL's with a GET(query) parameter
    """
    exclude = "google" or "webcache" or "youtube"

    create_dir(dirname.format(os.getcwd()))
    full_file_path = "{}/{}".format(
        dirname.format(os.getcwd()),
        filename.format(len(os.listdir(dirname.format(os.getcwd()))) + 1))

    def __get_headers():
        try:
            proxy_string = kwargs.get("proxy")
        except:
            pass

        try:
            user_agent = kwargs.get("agent")
        except:
            pass

        return proxy_string, user_agent

    if verbose:
        logger.debug(
            set_color("checking for user-agent and proxy configuration...",
                      level=10))
    proxy_string, user_agent = __get_headers()

    if proxy_string is None:
        proxy_string = None
    else:
        proxy_string = proxy_string_to_dict(proxy_string)
    if user_agent is None:
        user_agent = DEFAULT_USER_AGENT
    else:
        user_agent = user_agent

    user_agent_info = "adjusting user-agent header to {}..."
    if user_agent is not DEFAULT_USER_AGENT:
        user_agent_info = user_agent_info.format(user_agent.strip())
    else:
        user_agent_info = user_agent_info.format(
            "default user agent '{}'".format(DEFAULT_USER_AGENT))

    proxy_string_info = "setting proxy to {}..."
    if proxy_string is not None:
        proxy_string_info = proxy_string_info.format(
            ''.join(proxy_string.keys()) + "://" +
            ''.join(proxy_string.values()))
    else:
        proxy_string_info = "no proxy configuration detected..."

    headers = {"Connection": "close", "user-agent": user_agent}
    logger.info(set_color("attempting to gather query URL..."))
    try:
        query_url = get_urls(query,
                             url,
                             verbose=verbose,
                             user_agent=user_agent,
                             proxy=proxy_string)
    except Exception as e:
        if "WebDriverException" in str(e):
            logger.exception(
                set_color(
                    "it seems that you exited the browser, please allow the browser "
                    "to complete it's run so that Zeus can bypass captchas and API "
                    "calls",
                    level=50))
        else:
            logger.exception(
                set_color(
                    "{} failed to gather the URL from search engine, caught exception '{}' "
                    "exception has been logged to current log file...".format(
                        os.path.basename(__file__),
                        str(e).strip()),
                    level=50))
        shutdown()
    logger.info(
        set_color(
            "URL successfully gathered, searching for GET parameters..."))
    logger.info(set_color(proxy_string_info))
    req = requests.get(query_url, proxies=proxy_string)
    logger.info(set_color(user_agent_info))
    req.headers.update(headers)
    found_urls = URL_REGEX.findall(req.text)
    retval = set()
    for urls in list(found_urls):
        for url in list(urls):
            url = urllib.unquote(url)
            if URL_QUERY_REGEX.match(url) and exclude not in url:
                if type(url) is unicode:
                    url = str(url).encode("utf-8")
                if verbose:
                    logger.debug(
                        set_color("found '{}'...".format(url), level=10))
                retval.add(url.split("&amp;")[0])
    logger.info(
        set_color("found a total of {} URL's with a GET parameter...".format(
            len(retval))))
    if len(retval) != 0:
        logger.info(
            set_color(
                "saving found URL's under '{}'...".format(full_file_path)))
        with open(full_file_path, "a+") as log:
            for url in list(retval):
                log.write(url + "\n")
    else:
        logger.critical(
            set_color(
                "did not find any usable URL's with the given query '{}' "
                "using search engine '{}'...".format(query, url),
                level=50))
        shutdown()
    return list(retval) if len(retval) != 0 else None
コード例 #6
0
ファイル: __init__.py プロジェクト: Sts0mrg0/Zeus-Scanner
def check_for_admin_page(url,
                         exts,
                         protocol="http://",
                         show_possibles=False,
                         verbose=False):
    possible_connections, connections = set(), set()
    stripped_url = replace_http(url.strip())
    for ext in exts:
        ext = ext.strip()
        true_url = "{}{}{}".format(protocol, stripped_url, ext)
        if verbose:
            logger.debug(set_color("trying '{}'...".format(true_url),
                                   level=10))
        try:
            urlopen(true_url, timeout=5)
            logger.info(
                set_color(
                    "connected successfully to '{}'...".format(true_url)))
            connections.add(true_url)
        except HTTPError as e:
            data = str(e).split(" ")
            if verbose:
                if "Access Denied" in str(e):
                    logger.warning(
                        set_color(
                            "got access denied, possible control panel found without external access on '{}'..."
                            .format(true_url),
                            level=30))
                    possible_connections.add(true_url)
                else:
                    logger.error(
                        set_color(
                            "failed to connect got error code {}...".format(
                                data[2]),
                            level=40))
        except Exception as e:
            if verbose:
                if "<urlopen error timed out>" or "timeout: timed out" in str(
                        e):
                    logger.warning(
                        set_color(
                            "connection timed out after five seconds "
                            "assuming won't connect and skipping...",
                            level=30))
                else:
                    logger.exception(
                        set_color(
                            "failed to connect with unexpected error '{}'...".
                            format(str(e)),
                            level=50))
                    fix_log_file()
                    request_issue_creation()
    possible_connections, connections = list(possible_connections), list(
        connections)
    data_msg = "found {} possible connections(s) and {} successful connection(s)..."
    logger.info(
        set_color(data_msg.format(len(possible_connections),
                                  len(connections))))
    if len(connections) != 0:
        logger.info(set_color("creating connection tree..."))
        create_tree(url, connections)
    else:
        logger.fatal(
            set_color(
                "did not find any successful connections to {}'s "
                "admin page",
                level=50))
    if show_possibles:
        if len(possible_connections) != 0:
            logger.info(set_color("creating possible connection tree..."))
            create_tree(url, possible_connections)
        else:
            logger.fatal(
                set_color(
                    "did not find any possible connections to {}'s "
                    "admin page",
                    level=50))
コード例 #7
0
ファイル: search.py プロジェクト: Expertasif/Zeus-Scanner
def parse_search_results(query, url_to_search, verbose=False, **kwargs):
    """
      Parse a webpage from Google for URL's with a GET(query) parameter
    """
    exclude = ("www.google.com", "map.google.com", "mail.google.com",
               "drive.google.com", "news.google.com", "accounts.google.com")
    splitter = "&amp;"
    retval = set()
    query_url = None

    def __get_headers():
        proxy_string, user_agent = None, None
        try:
            proxy_string = kwargs.get("proxy")
        except:
            pass

        try:
            user_agent = kwargs.get("agent")
        except:
            pass

        return proxy_string, user_agent

    if verbose:
        logger.debug(
            set_color("checking for user-agent and proxy configuration...",
                      level=10))
    proxy_string, user_agent = __get_headers()

    if proxy_string is None:
        proxy_string = None
    else:
        proxy_string = proxy_string_to_dict(proxy_string)
    if user_agent is None:
        user_agent = DEFAULT_USER_AGENT
    else:
        user_agent = user_agent

    user_agent_info = "adjusting user-agent header to {}..."
    if user_agent is not DEFAULT_USER_AGENT:
        user_agent_info = user_agent_info.format(user_agent.strip())
    else:
        user_agent_info = user_agent_info.format(
            "default user agent '{}'".format(DEFAULT_USER_AGENT))

    proxy_string_info = "setting proxy to {}..."
    if proxy_string is not None:
        proxy_string_info = proxy_string_info.format(
            ''.join(proxy_string.keys()) + "://" +
            ''.join(proxy_string.values()))
    else:
        proxy_string_info = "no proxy configuration detected..."

    headers = {"Connection": "close", "user-agent": user_agent}
    logger.info(set_color("attempting to gather query URL..."))
    try:
        query_url = get_urls(query,
                             url_to_search,
                             verbose=verbose,
                             user_agent=user_agent,
                             proxy=proxy_string)
    except Exception as e:
        if "WebDriverException" in str(e):
            logger.exception(
                set_color(
                    "it seems that you exited the browser, please allow the browser "
                    "to complete it's run so that Zeus can bypass captchas and API "
                    "calls",
                    level=50))
        elif "'/usr/lib/firefoxdriver/webdriver.xpi'" in str(e):
            logger.fatal(
                set_color(
                    "firefox was not found in the default location on your system, "
                    "check your installation and make sure it is in /usr/lib, if you "
                    "find it there, restart your system and try again...",
                    level=50))
        else:
            logger.exception(
                set_color(
                    "{} failed to gather the URL from search engine, caught exception '{}' "
                    "exception has been logged to current log file...".format(
                        os.path.basename(__file__),
                        str(e).strip()),
                    level=50))
            request_issue_creation()
        shutdown()
    logger.info(
        set_color(
            "URL successfully gathered, searching for GET parameters..."))

    logger.info(set_color(proxy_string_info))
    req = requests.get(query_url, proxies=proxy_string)
    logger.info(set_color(user_agent_info))
    req.headers.update(headers)
    found_urls = URL_REGEX.findall(req.text)
    for urls in list(found_urls):
        for url in list(urls):
            url = unquote(url)
            if URL_QUERY_REGEX.match(url) and not any(l in url
                                                      for l in exclude):
                if isinstance(url, unicode):
                    url = str(url).encode("utf-8")
                if "webcache" in url:
                    logger.info(
                        set_color(
                            "received webcache URL, extracting URL from webcache..."
                        ))
                    url = extract_webcache_url(url)
                if verbose:
                    try:
                        logger.debug(
                            set_color("found '{}'...".format(
                                url.split(splitter)[0]),
                                      level=10))
                    except TypeError:
                        logger.debug(
                            set_color("found '{}'...".format(
                                str(url).split(splitter)[0]),
                                      level=10))
                    except AttributeError:
                        logger.debug(
                            set_color("found '{}...".format(str(url)),
                                      level=10))
                retval.add(url.split("&amp;")[0])
    logger.info(
        set_color("found a total of {} URL's with a GET parameter...".format(
            len(retval))))
    if len(retval) != 0:
        write_to_log_file(retval, URL_LOG_PATH, "url-log-{}.log")
    else:
        logger.critical(
            set_color(
                "did not find any usable URL's with the given query '{}' "
                "using search engine '{}'...".format(query, url_to_search),
                level=50))
        shutdown()
    return list(retval) if len(retval) != 0 else None