Пример #1
0
def main_intel_amt(url, agent=None, proxy=None):
    proxy = proxy_string_to_dict(proxy) or None
    agent = agent or DEFAULT_USER_AGENT
    logger.info(
        set_color(
            "attempting to connect to '{}' and get hardware info...".format(
                url)))
    try:
        json_data = __get_hardware(url, agent=agent, proxy=proxy)
        if json_data is None:
            logger.error(
                set_color("unable to get any information, skipping...",
                          level=40))
            pass
        else:
            print("-" * 40)
            for key in json_data.keys():
                print("{}:".format(str(key).capitalize()))
                for item in json_data[key]:
                    print(" - {}: {}".format(item.capitalize(),
                                             json_data[key][item]))
            print("-" * 40)
    except Exception as e:
        if "Temporary failure in name resolution" in str(e):
            logger.error(
                set_color("failed to connect on '{}', skipping...".format(url),
                          level=40))
            pass
        else:
            logger.exception(
                set_color(
                    "ran into exception '{}', cannot continue...".format(e)))
            request_issue_creation()
Пример #2
0
def detect_plugins(html, headers, **kwargs):
    verbose = kwargs.get("verbose", False)

    try:
        retval = []
        plugin_skip_schema = ("__init__", ".pyc")
        plugin_file_list = [f for f in os.listdir(DETECT_PLUGINS_PATH) if not any(s in f for s in plugin_skip_schema)]
        for plugin in plugin_file_list:
            plugin = plugin[:-3]
            if verbose:
                logger.debug(set_color(
                    "loading script '{}'".format(plugin), level=10
                ))
            plugin_detection = "lib.plugins.{}"
            plugin_detection = plugin_detection.format(plugin)
            plugin_detection = importlib.import_module(plugin_detection)
            if plugin_detection.search(html, headers=headers) is True:
                retval.append((plugin_detection.__product__, plugin_detection.__description__))
        if len(retval) > 0:
            return retval
        return None
    except Exception as e:
        logger.exception(str(e))
        if "Read timed out." or "Connection reset by peer" in str(e):
            logger.warning(set_color(
                "plugin request failed, assuming no plugins and continuing", level=30
            ))
            return None
        else:
            logger.exception(set_color(
                "plugin detection has failed with error {}".format(str(e))
            ))
            request_issue_creation()
Пример #3
0
def perform_port_scan(url,
                      ports=None,
                      scanner=NmapHook,
                      verbose=False,
                      opts=None,
                      **kwargs):
    """
    main function that will initalize the port scanning
    """
    url = url.strip()
    logger.info(
        set_color(
            "attempting to find IP address for hostname '{}'...".format(url)))
    found_ip_address = socket.gethostbyname(url)
    logger.info(
        set_color("found IP address for given URL -> '{}'...".format(
            found_ip_address)))
    if verbose:
        logger.debug(set_color("checking for nmap on your system...",
                               level=10))
    nmap_exists = find_nmap(verbose=verbose)
    if nmap_exists:
        if verbose:
            logger.debug(
                set_color(
                    "nmap has been found under '{}'...".format(nmap_exists),
                    level=10))
        logger.info(
            set_color("starting port scan on IP address '{}'...".format(
                found_ip_address)))
        try:
            data = scanner(found_ip_address, ports=ports, opts=opts)
            json_data = data._get_all_info()
            data.show_open_ports(json_data)
            file_path = data.send_to_file(json_data)
            logger.info(
                set_color(
                    "port scan completed, all data saved to JSON file under '{}'..."
                    .format(file_path)))
        except KeyError:
            logger.fatal(
                set_color("no port information found for '{}({})'...".format(
                    url, found_ip_address),
                          level=50))
        except Exception as e:
            logger.exception(
                set_color(
                    "ran into exception '{}', cannot continue quitting...".
                    format(e),
                    level=50))
            request_issue_creation()
            pass
    else:
        logger.fatal(
            set_color(
                "nmap was not found on your system, please install it...",
                level=50))
Пример #4
0
def parse_search_results(
        query, url_to_search, verbose=False, **kwargs):
    """
      Parse a webpage from Google for URL's with a GET(query) parameter
    """
    splitter = "&"
    retval = set()
    query_url = None

    proxy_string, user_agent = kwargs.get("proxy", None), kwargs.get("agent", None)

    if verbose:
        logger.debug(set_color(
            "checking for user-agent and proxy configuration...", level=10
        ))

    user_agent_info = "adjusting user-agent header to {}..."
    if user_agent is not DEFAULT_USER_AGENT:
        user_agent_info = user_agent_info.format(user_agent.strip())
    else:
        user_agent_info = user_agent_info.format("default user agent '{}'".format(DEFAULT_USER_AGENT))

    proxy_string_info = "setting proxy to {}..."
    if proxy_string is not None:
        proxy_string_info = proxy_string_info.format(
            ''.join(proxy_string.keys()) + "://" + ''.join(proxy_string.values()))
    else:
        proxy_string_info = "no proxy configuration detected..."

    headers = {
        "Connection": "close",
        "user-agent": user_agent
    }
    logger.info(set_color(
        "attempting to gather query URL..."
    ))
    try:
        query_url = get_urls(query, url_to_search, verbose=verbose, user_agent=user_agent, proxy=proxy_string)
    except Exception as e:
        if "WebDriverException" in str(e):
            logger.exception(set_color(
                "it seems that you exited the browser, please allow the browser "
                "to complete it's run so that Zeus can bypass captchas and API "
                "calls", level=50
            ))
        elif "'/usr/lib/firefoxdriver/webdriver.xpi'" in str(e):
            logger.fatal(set_color(
                "firefox was not found in the default location on your system, "
                "check your installation and make sure it is in /usr/lib, if you "
                "find it there, restart your system and try again...", level=50
            ))
        else:
            logger.exception(set_color(
                "{} failed to gather the URL from search engine, caught exception '{}' "
                "exception has been logged to current log file...".format(
                    os.path.basename(__file__), str(e).strip()), level=50)
            )
            request_issue_creation()
        shutdown()
    logger.info(set_color(
        "URL successfully gathered, searching for GET parameters..."
    ))

    logger.info(set_color(proxy_string_info))
    req = requests.get(query_url, proxies=proxy_string)
    logger.info(set_color(user_agent_info))
    req.headers.update(headers)
    found_urls = URL_REGEX.findall(req.text)
    url_skip_schema = ("maps.google", "play.google", "youtube")
    for urls in list(found_urls):
        for url in list(urls):
            url = unquote(url)
            if not any(u in url for u in url_skip_schema):
                if URL_QUERY_REGEX.match(url) and not any(l in url for l in URL_EXCLUDES):
                    if isinstance(url, unicode):
                        url = str(url).encode("utf-8")
                    if "webcache" in url:
                        logger.info(set_color(
                            "received webcache URL, extracting URL from webcache..."
                        ))
                        webcache_url = url
                        url = extract_webcache_url(webcache_url)
                        if url is None:
                            logger.warning(set_color(
                                "unable to extract url from given webcache URL '{}'...".format(
                                    webcache_url
                                ), level=30
                            ))
                    if verbose:
                        try:
                            logger.debug(set_color(
                                "found '{}'...".format(url.split(splitter)[0]), level=10
                            ))
                        except TypeError:
                            logger.debug(set_color(
                                "found '{}'...".format(str(url).split(splitter)[0]), level=10
                            ))
                        except AttributeError:
                            logger.debug(set_color(
                                "found '{}...".format(str(url)), level=10
                            ))
                    if url is not None:
                        retval.add(url.split("&")[0])
    logger.info(set_color(
        "found a total of {} URL's with a GET parameter...".format(len(retval))
    ))
    if len(retval) != 0:
        write_to_log_file(retval, URL_LOG_PATH, "url-log-{}.log")
    else:
        logger.critical(set_color(
            "did not find any usable URL's with the given query '{}' "
            "using search engine '{}'...".format(query, url_to_search), level=50
        ))
        shutdown()
    return list(retval) if len(retval) != 0 else None
Пример #5
0
def sqlmap_scan_main(url,
                     port=None,
                     verbose=None,
                     opts=None,
                     auto_start=False):
    """
    the main function that will be called and initialize everything
    """
    def ___dict_args():
        """
        create argument tuples for the sqlmap arguments passed by the user
        """
        return {key: value for key, value in opts}

    is_started = lib.core.settings.search_for_process("sqlmapapi.py")

    if auto_start:
        lib.core.settings.logger.error(
            lib.core.settings.set_color(
                "auto starting sqlmap is not implemented yet, you will need to start "
                "the API manually for now...",
                level=40))
        lib.core.settings.prompt("press enter when ready to continue...")
        '''lib.core.settings.logger.info(lib.core.settings.set_color(
            "attempting to find sqlmap on your system..."
        ))
        try:
            path = "".join(find_sqlmap("sqlmap", verbose=verbose))
            lib.core.settings.logger.info(lib.core.settings.set_color(
                "attempting to call sqlmap API..."
            ))
            subprocess.Popen(["python {}/{} -s".format(path, "sqlmapapi.py")], shell=True,
                             close_fds=True, stdout=subprocess.PIPE)
            lib.core.settings.logger.info(lib.core.settings.set_color(
                "API started, continuing process..."
                )
            )
            time.sleep(3)
            if not is_started:
                lib.core.settings.prompt(
                    "appears that sqlmap's API was not started successfully, start it manually and press"
                    " enter..."
                )
        except Exception as e:
            print e
            lib.core.settings.logger.error(lib.core.settings.set_color(
                "ran into an error while trying to start the sqlmap API, please do it manually...", level=50
            ))
            lib.core.settings.prompt(
                "press enter when ready to start..."
            )'''
    else:
        if not is_started:
            lib.core.settings.prompt(
                "sqlmap API is not started, start it and press enter to continue..."
            )
    try:
        sqlmap_scan = SqlmapHook(url, port=port)
        lib.core.settings.logger.info(
            lib.core.settings.set_color(
                "initializing new sqlmap scan with given URL '{}'...".format(
                    url)))
        sqlmap_scan.init_new_scan()
        if verbose:
            lib.core.settings.logger.debug(
                lib.core.settings.set_color("scan initialized...", level=10))
        lib.core.settings.logger.info(
            lib.core.settings.set_color("gathering sqlmap API scan ID..."))
        api_id = sqlmap_scan.get_scan_id()
        if verbose:
            lib.core.settings.logger.debug(
                lib.core.settings.set_color(
                    "current sqlmap scan ID: '{}'...".format(api_id),
                    level=10))
        lib.core.settings.logger.info(
            lib.core.settings.set_color(
                "starting sqlmap scan on url: '{}'...".format(url)))
        if opts:
            if verbose:
                lib.core.settings.logger.debug(
                    lib.core.settings.set_color(
                        "using arguments: '{}'...".format(___dict_args()),
                        level=10))
            lib.core.settings.logger.info(
                lib.core.settings.set_color(
                    "adding arguments to sqlmap API..."))
        else:
            if verbose:
                lib.core.settings.logger.debug(
                    lib.core.settings.set_color(
                        "no arguments passed, skipping...", level=10))
        lib.core.settings.logger.warning(
            lib.core.settings.set_color(
                "please keep in mind that this is the API, output will "
                "not be saved to log file, it may take a little longer "
                "to finish processing, launching sqlmap...",
                level=30))
        sqlmap_scan.start_scan(api_id, opts=opts)
        print("-" * 30)
        sqlmap_scan.show_sqlmap_log(api_id)
        print("-" * 30)
    except requests.exceptions.HTTPError as e:
        lib.core.settings.logger.exception(
            lib.core.settings.set_color(
                "ran into error '{}', seems you didn't start the server, check "
                "the server port and try again...".format(e),
                level=50))
        pass
    except Exception as e:
        if "HTTPConnectionPool(host='127.0.0.1'" in str(e):
            lib.core.settings.logger.error(
                lib.core.settings.set_color(
                    "sqlmap API is not started, did you forget to start it? "
                    "You will need to open a new terminal, cd into sqlmap, and "
                    "run `python sqlmapapi.py -s` otherwise pass the correct flags "
                    "to auto start the API...",
                    level=40))
            pass
        else:
            lib.core.settings.logger.exception(
                lib.core.settings.set_color(
                    "ran into error '{}', seems something went wrong, error has "
                    "been saved to current log file.".format(e),
                    level=50))
            request_issue_creation()
            pass
Пример #6
0
def main_header_check(url, **kwargs):
    """
    main function
    """
    verbose = kwargs.get("verbose", False)
    agent = kwargs.get("agent", None)
    proxy = kwargs.get("proxy", None)
    xforward = kwargs.get("xforward", False)
    identify_waf = kwargs.get("identify_waf", True)
    identify_plugins = kwargs.get("identify_plugins", True)
    show_description = kwargs.get("show_description", False)
    attempts = kwargs.get("attempts", 3)

    default_sleep_time = 5
    protection = {"hostname": url}
    definition = {
        "x-xss": ("protection against XSS attacks", "XSS"),
        "strict-transport": ("protection against unencrypted connections (force HTTPS connection)", "HTTPS"),
        "x-frame": ("protection against clickjacking vulnerabilities", "CLICKJACKING"),
        "x-content": ("protection against MIME type attacks", "MIME"),
        "x-csrf": ("protection against Cross-Site Forgery attacks", "CSRF"),
        "x-xsrf": ("protection against Cross-Site Forgery attacks", "CSRF"),
        "public-key": ("protection to reduce success rates of MITM attacks", "MITM"),
        "content-security": ("header protection against multiple attack types", "ALL")
    }

    try:
        req, status, html, headers = get_page(url, proxy=proxy, agent=agent, xforward=xforward)

        logger.info(set_color(
            "detecting target charset"
        ))
        charset = get_charset(html, headers)
        if charset is not None:
            logger.info(set_color(
                "target charset appears to be '{}'".format(charset), level=25
            ))
        else:
            logger.warning(set_color(
                "unable to detect target charset", level=30
            ))
        if identify_waf:
            waf_url = "{} {}".format(url.strip(), PROTECTION_CHECK_PAYLOAD)
            _, waf_status, waf_html, waf_headers = get_page(waf_url, xforward=xforward, proxy=proxy, agent=agent)
            logger.info(set_color(
                "checking if target URL is protected by some kind of WAF/IPS/IDS"
            ))
            if verbose:
                logger.debug(set_color(
                    "attempting connection to '{}'".format(waf_url), level=10
                ))

            identified_waf = detect_protection(url, waf_status, waf_html, waf_headers, verbose=verbose)

            if identified_waf is None:
                logger.info(set_color(
                    "no WAF/IDS/IPS has been identified on target URL", level=25
                ))
            else:
                logger.warning(set_color(
                    "the target URL WAF/IDS/IPS has been identified as '{}'".format(identified_waf), level=35
                ))

        if identify_plugins:
            logger.info(set_color(
                "attempting to identify plugins"
            ))
            identified_plugin = detect_plugins(html, headers, verbose=verbose)
            if identified_plugin is not None:
                for plugin in identified_plugin:
                    if show_description:
                        logger.info(set_color(
                            "possible plugin identified as '{}' (description: '{}')".format(
                                plugin[0], plugin[1]
                            ), level=25
                        ))
                    else:
                        logger.info(set_color(
                            "possible plugin identified as '{}'".format(
                                plugin[0]
                            ), level=25
                        ))
            else:
                logger.warning(set_color(
                    "no known plugins identified on target", level=30
                ))

        if verbose:
            logger.debug(set_color(
                "loading XML data", level=10
            ))
        comparable_headers = load_xml_data(HEADER_XML_DATA)
        logger.info(set_color(
            "attempting to get request headers for '{}'".format(url.strip())
        ))
        try:
            found_headers = load_headers(url, req)
        except (ConnectionError, Exception) as e:
            if "Read timed out." or "Connection reset by peer" in str(e):
                found_headers = None
            else:
                logger.exception(set_color(
                    "Zeus has hit an unexpected error and cannot continue '{}'".format(e), level=50
                ))
                request_issue_creation()

        if found_headers is not None:
            if verbose:
                logger.debug(set_color(
                    "fetched {}".format(found_headers), level=10
                ))
            headers_established = [str(h) for h in compare_headers(found_headers, comparable_headers)]
            for key in definition.iterkeys():
                if any(key in h.lower() for h in headers_established):
                    logger.warning(set_color(
                        "provided target has {}".format(definition[key][0]), level=30
                    ))
            for key in found_headers.iterkeys():
                protection[key] = found_headers[key]
            logger.info(set_color(
                "writing found headers to log file", level=25
            ))
            return write_to_log_file(protection, HEADER_RESULT_PATH, HEADERS_FILENAME.format(replace_http(url)))
        else:
            logger.error(set_color(
                "unable to retrieve headers for site '{}'".format(url.strip()), level=40
            ))
    except ConnectionError:
        attempts = attempts - 1
        if attempts == 0:
            return False
        logger.warning(set_color(
            "target actively refused the connection, sleeping for {}s and retrying the request".format(
                default_sleep_time
            ), level=30
        ))
        time.sleep(default_sleep_time)
        main_header_check(
            url, proxy=proxy, agent=agent, xforward=xforward, show_description=show_description,
            identify_plugins=identify_plugins, identify_waf=identify_waf, verbose=verbose,
            attempts=attempts
        )
    except ReadTimeout:
        logger.error(set_color(
            "meta-data retrieval failed due to target URL timing out, skipping", level=40
        ))
    except KeyboardInterrupt:
        if not pause():
            shutdown()
    except Exception as e:
        logger.exception(set_color(
            "meta-data retrieval failed with unexpected error '{}'".format(
                str(e)
            ), level=50
        ))
Пример #7
0
def perform_port_scan(url, scanner=NmapHook, **kwargs):
    """
    main function that will initalize the port scanning
    """
    verbose = kwargs.get("verbose", False)
    opts = kwargs.get("opts", None)

    url = url.strip()
    lib.core.settings.logger.info(
        lib.core.settings.set_color(
            "attempting to find IP address for hostname '{}'...".format(url)))
    found_ip_address = socket.gethostbyname(url)
    lib.core.settings.logger.info(
        lib.core.settings.set_color(
            "found IP address for given URL -> '{}'...".format(
                found_ip_address),
            level=25))
    if verbose:
        lib.core.settings.logger.debug(
            lib.core.settings.set_color("checking for nmap on your system...",
                                        level=10))
    nmap_exists = "".join(find_nmap())
    if nmap_exists:
        if verbose:
            lib.core.settings.logger.debug(
                lib.core.settings.set_color(
                    "nmap has been found under '{}'...".format(nmap_exists),
                    level=10))
        lib.core.settings.logger.info(
            lib.core.settings.set_color(
                "starting port scan on IP address '{}'...".format(
                    found_ip_address)))
        try:
            data = scanner(found_ip_address, opts=opts)
            json_data = data.get_all_info()
            data.show_open_ports(json_data)
            file_path = data.send_to_file(json_data)
            lib.core.settings.logger.info(
                lib.core.settings.set_color(
                    "port scan completed, all data saved to JSON file under '{}'..."
                    .format(file_path)))
        except KeyError:
            lib.core.settings.logger.fatal(
                lib.core.settings.set_color(
                    "no port information found for '{}({})'...".format(
                        url, found_ip_address),
                    level=50))
        except KeyboardInterrupt:
            if not lib.core.common.pause():
                lib.core.common.shutdown()
        except Exception as e:
            lib.core.settings.logger.exception(
                lib.core.settings.set_color(
                    "ran into exception '{}', cannot continue quitting...".
                    format(e),
                    level=50))
            request_issue_creation()
            pass
    else:
        lib.core.settings.logger.fatal(
            lib.core.settings.set_color("nmap was not found on your system...",
                                        level=50))
        question = lib.core.common.prompt(
            "would you like to automatically install it", opts="yN")
        if question.lower().startswith("y"):
            install_nmap_command = shlex.split("sudo sh {}".format(
                lib.core.settings.NMAP_INSTALLER_TOOL))
            subprocess.call(install_nmap_command)
            lib.core.settings.logger.info(
                lib.core.settings.set_color(
                    "nmap has been successfully installed, re-running...",
                    level=25))
            perform_port_scan(url, verbose=verbose, opts=opts)
        else:
            lib.core.settings.logger.fatal(
                lib.core.settings.set_color(
                    "nmap is not installed, please install it in order to continue...",
                    level=50))
Пример #8
0
def detect_protection(url, **kwargs):
    verbose = kwargs.get("verbose", False)
    agent = kwargs.get("agent", None)
    proxy = kwargs.get("proxy", None)
    xforward = kwargs.get("xforward", False)

    url = "{} {}".format(url.strip(), PROTECTION_CHECK_PAYLOAD)

    if verbose:
        logger.debug(set_color(
            "attempting connection to '{}'...".format(url), level=10
        ))
    try:
        _, status, html, headers = get_page(url, agent=agent, proxy=proxy, xforward=xforward)

        # make sure there are no DBMS errors in the HTML
        for dbms in DBMS_ERRORS:
            for regex in DBMS_ERRORS[dbms]:
                if re.compile(regex).search(html) is not None:
                    logger.warning(set_color(
                        "it appears that the WAF/IDS/IPS check threw a DBMS error and may be vulnerable "
                        "to SQL injection attacks. it appears the backend DBMS is '{}', site will be "
                        "saved for further processing...".format(dbms), level=30
                    ))
                    write_to_log_file(url, SQLI_SITES_FILEPATH, SQLI_FOUND_FILENAME)
                    return None

        retval = []
        file_list = [f for f in os.listdir(DETECT_FIREWALL_PATH) if not any(ex in f for ex in ["__init__", ".pyc"])]
        for item in file_list:
            item = item[:-3]
            if verbose:
                logger.debug(set_color(
                    "loading script '{}'...".format(item), level=10
                ))
            detection_name = "lib.firewall.{}"
            detection_name = detection_name.format(item)
            detection_name = importlib.import_module(detection_name)
            if detection_name.detect(html, headers=headers, status=status) is True:
                retval.append(detection_name.__item__)
        if len(retval) != 0:
            if len(retval) >= 2:
                try:
                    del retval[retval.index("Generic (Unknown)")]
                except (Exception, IndexError):
                    logger.warning(set_color(
                        "multiple firewalls identified ({}), displaying most likely...".format(
                            ", ".join([item.split("(")[0] for item in retval])
                        ), level=30
                    ))
                    del retval[retval.index(retval[1])]
                    if len(retval) >= 2:
                        del retval[retval.index(retval[1])]
            if retval[0] == "Generic (Unknown)":
                logger.warning(set_color(
                    "discovered firewall is unknown to Zeus, saving fingerprint to file. "
                    "if you know the details or the context of the firewall please create "
                    "an issue ({}) with the fingerprint, or a pull request with the script...".format(
                        ISSUE_LINK
                    ), level=30
                ))
                fingerprint = "<!---\nHTTP 1.1\nStatus Code: {}\nHTTP Headers: {}\n--->\n{}".format(
                    status, headers, html
                )
                write_to_log_file(fingerprint, UNKNOWN_FIREWALL_FINGERPRINT_PATH, UNKNOWN_FIREWALL_FILENAME)
            return "".join(retval) if isinstance(retval, list) else retval
        else:
            return None

    except Exception as e:
        if "Read timed out." or "Connection reset by peer" in str(e):
            logger.warning(set_color(
                "detection request failed, assuming no protection and continuing...", level=30
            ))
            return None
        else:
            logger.exception(set_color(
                "Zeus ran into an unexpected error '{}'...".format(e), level=50
            ))
            request_issue_creation()
            return None
Пример #9
0
def search_multiple_pages(query, link_amount, verbose=False, **kwargs):
    """
    search multiple pages for a lot of links, this will not be done via Google
    """
    proxy = kwargs.get("proxy", None)
    agent = kwargs.get("agent", None)
    xforward = kwargs.get("xforward", False)
    batch = kwargs.get("batch", False)
    show_success = kwargs.get("show_success", False)
    attrib, desc = "a", "href"
    retval = set()
    search_engine = AUTHORIZED_SEARCH_ENGINES["search-results"]

    logger.warning(
        set_color("searching multiple pages will not be done on Google".format(
            search_engine),
                  level=30))

    if not parse_blacklist(query, BLACKLIST_FILE_PATH, batch=batch):
        shutdown()

    if not xforward:
        params = {"Connection": "close", "user-agent": agent}
    else:
        ip_list = (create_random_ip(), create_random_ip(), create_random_ip())
        params = {
            "Connection":
            "close",
            "user-agent":
            agent,
            "X-Forwarded-For":
            "{}, {}, {}".format(ip_list[0], ip_list[1], ip_list[2])
        }

    page_number = 1
    try:
        while len(retval) <= link_amount:
            if verbose:
                logger.debug(
                    set_color("searching page number {}".format(page_number),
                              level=10))
            if page_number % 10 == 0:
                logger.info(
                    set_color("currently on page {} of search results".format(
                        page_number)))
            page_request = requests.get(search_engine.format(
                page_number, query, page_number),
                                        params=params,
                                        proxies=proxy_string_to_dict(proxy))
            if page_request.status_code == 200:
                html_page = page_request.content
                soup = BeautifulSoup(html_page, "html.parser")
                if not NO_RESULTS_REGEX.findall(str(soup)):
                    for link in soup.findAll(attrib):
                        redirect = link.get(desc)
                        if redirect is not None:
                            if not any(ex in redirect for ex in URL_EXCLUDES):
                                if URL_REGEX.match(redirect):
                                    retval.add(redirect)
                    if page_number < MAX_PAGE_NUMBER:
                        page_number += 1
                    else:
                        logger.warning(
                            set_color("hit max page number {}".format(
                                MAX_PAGE_NUMBER),
                                      level=30))
                        break
                else:
                    logger.warning(
                        set_color("no more results found for given query '{}'".
                                  format(query),
                                  level=30))
                    break
    except KeyboardInterrupt:
        logger.error(
            set_color("user aborted, dumping already found URL(s)", level=40))
        write_to_log_file(retval, URL_LOG_PATH, URL_FILENAME)
        logger.info(
            set_color("found a total of {} URL(s)".format(len(retval)),
                      level=25))
        shutdown()
    except Exception as e:
        logger.exception(
            set_color("Zeus ran into an unexpected error '{}'".format(e),
                      level=50))
        request_issue_creation()
        shutdown()

    if len(retval) > 0:
        logger.info(
            set_color(
                "a total of {} URL(s) found out of the requested {}".format(
                    len(retval), link_amount),
                level=25))
        file_path = write_to_log_file(retval, URL_LOG_PATH, URL_FILENAME)
        if show_success:
            amount_of_urls = len(open(file_path).readlines())
            success_rate = calculate_success(amount_of_urls)
            logger.info(
                set_color("provided query has a {} success rate".format(
                    success_rate)))
        return list(retval)
    else:
        logger.warning(
            set_color(
                "did not find any links with given query '{}' writing to blacklist"
                .format(query),
                level=30))
        write_to_log_file(query, BLACKLIST_FILE_PATH, BLACKLIST_FILENAME)
Пример #10
0
def get_urls(query, url, verbose=False, **kwargs):
    """
      Bypass Google captchas and Google API by using selenium-webdriver to gather
      the Google URL. This will open a robot controlled browser window and attempt
      to get a URL from Google that will be used for scraping afterwards.
    """
    query = query.decode('unicode_escape').encode('utf-8')
    proxy, user_agent = kwargs.get("proxy",
                                   None), kwargs.get("user_agent", None)
    tor, tor_port = kwargs.get("tor", False), kwargs.get("tor_port", None)
    batch = kwargs.get("batch", False)
    xforward = kwargs.get("xforward", False)
    logger.info(set_color("setting up virtual display to hide the browser"))
    ff_display = Display(visible=0, size=(800, 600))
    ff_display.start()
    browser = var.search.SetBrowser(agent=user_agent,
                                    proxy=proxy,
                                    tor=tor,
                                    xforward=xforward).set_browser()
    logger.info(set_color("browser will open shortly", level=25))
    browser.get(url)
    if verbose:
        logger.debug(
            set_color(
                "searching search engine for the 'q' element (search button)",
                level=10))
    search = browser.find_element_by_name('q')
    logger.info(
        set_color("searching search engine using query '{}'".format(query)))
    try:
        # enter the text you want to search and hit enter
        search.send_keys(query)
        search.send_keys(Keys.RETURN)
        if not tor:
            time.sleep(3)
        else:
            logger.warning(
                set_color(
                    "sleep time has been increased to 10 seconds due to tor being used",
                    level=30))
            time.sleep(10)
    except ElementNotInteractableException:
        # get rid of the popup box and hit enter after entering the text to search
        browser.execute_script(
            "document.querySelectorAll('label.boxed')[1].click()")
        search.send_keys(query)
        search.send_keys(Keys.RETURN)
        time.sleep(3)
    except UnicodeDecodeError:
        logger.error(
            set_color(
                "your query '{}' appears to have unicode characters in it, selenium is not "
                "properly formatted to handle unicode characters, this dork will be skipped"
                .format(query),
                level=40))
    if verbose:
        logger.debug(set_color("obtaining URL from selenium"))
    try:
        retval = browser.current_url
    except UnexpectedAlertPresentException:
        logger.warning(set_color("alert present, closing", level=30))
        # discover the alert and close it before continuing
        alert = browser.switch_to.alert
        alert.accept()
        retval = browser.current_url
    # if you have been IP banned, we'll extract the URL from it
    if IP_BAN_REGEX.search(retval) is not None:
        logger.warning(
            set_color(
                "it appears that Google is attempting to block your IP address, attempting bypass",
                level=30))
        try:
            retval = URLParser(retval).extract_ip_ban_url()
            question_msg = (
                "zeus was able to successfully extract the URL from Google's ban URL "
                "it is advised to shutdown zeus and attempt to extract the URL's manually. "
                "failing to do so will most likely result in no results being found by zeus. "
                "would you like to shutdown")
            if not batch:
                do_continue = prompt(question_msg, opts="yN")
            else:
                do_continue = prompt(question_msg, opts="yN", default="y")

            # shutdown and write the URL to a file
            if not str(do_continue).lower().startswith("n"):
                write_to_log_file(retval, EXTRACTED_URL_LOG,
                                  EXTRACTED_URL_FILENAME)
                logger.info(
                    set_color(
                        "it is advised to extract the URL's from the produced URL written to the above "
                        "(IE open the log, copy the url into firefox)".format(
                            retval)))
                shutdown()
        except Exception as e:
            # stop all the random rogue processes, this isn't guaranteed to stop the processes
            # that's why we have the clean up script in case this fails
            browser.close()
            ff_display.stop()
            logger.exception(
                set_color(
                    "zeus was unable to extract the correct URL from the ban URL '{}', "
                    "got exception '{}'".format(unquote(retval), e),
                    level=50))
            request_issue_creation()
            shutdown()
    if verbose:
        logger.debug(
            set_color("found current URL from selenium browser", level=10))
    logger.info(set_color("closing the browser and continuing process.."))
    browser.close()
    ff_display.stop()
    return retval
Пример #11
0
def parse_search_results(query, url_to_search, verbose=False, **kwargs):
    """
      Parse a webpage from Google for URL's with a GET(query) parameter
    """
    possible_leftovers = URLParser(None).possible_leftovers
    splitter = "&amp;"
    retval = set()
    query_url = None

    parse_webcache, pull_all = kwargs.get("parse_webcache", False), kwargs.get(
        "pull_all", False)
    proxy_string, user_agent = kwargs.get("proxy",
                                          None), kwargs.get("agent", None)
    forward_for = kwargs.get("forward_for", False)
    tor = kwargs.get("tor", False)
    batch = kwargs.get("batch", False)
    show_success = kwargs.get("show_success", False)

    if verbose:
        logger.debug(set_color("parsing blacklist", level=10))
    parse_blacklist(query, BLACKLIST_FILE_PATH, batch=batch)

    if verbose:
        logger.debug(
            set_color("checking for user-agent and proxy configuration",
                      level=10))

    if not parse_webcache and "google" in url_to_search:
        logger.warning(
            set_color(
                "will not parse webcache URL's (to parse webcache pass -W)",
                level=30))
    if not pull_all:
        logger.warning(
            set_color(
                "only pulling URLs with GET(query) parameters (to pull all URL's pass -E)",
                level=30))

    user_agent_info = "adjusting user-agent header to {}"
    if user_agent is not DEFAULT_USER_AGENT:
        user_agent_info = user_agent_info.format(user_agent.strip())
    else:
        user_agent_info = user_agent_info.format(
            "default user agent '{}'".format(DEFAULT_USER_AGENT))

    proxy_string_info = "setting proxy to {}"
    if proxy_string is not None:
        proxy_string = proxy_string_to_dict(proxy_string)
        proxy_string_info = proxy_string_info.format(
            ''.join(proxy_string.keys()) + "://" +
            ''.join(proxy_string.values()))
    elif tor:
        proxy_string = proxy_string_to_dict("socks5://127.0.0.1:9050")
        proxy_string_info = proxy_string_info.format("tor proxy settings")
    else:
        proxy_string_info = "no proxy configuration detected"

    if forward_for:
        ip_to_use = (create_random_ip(), create_random_ip(),
                     create_random_ip())
        if verbose:
            logger.debug(
                set_color(
                    "random IP addresses generated for headers '{}'".format(
                        ip_to_use),
                    level=10))

        headers = {
            HTTP_HEADER.CONNECTION:
            "close",
            HTTP_HEADER.USER_AGENT:
            user_agent,
            HTTP_HEADER.X_FORWARDED_FOR:
            "{}, {}, {}".format(ip_to_use[0], ip_to_use[1], ip_to_use[2])
        }
    else:
        headers = {
            HTTP_HEADER.CONNECTION: "close",
            HTTP_HEADER.USER_AGENT: user_agent
        }
    logger.info(set_color("attempting to gather query URL"))
    try:
        query_url = get_urls(query,
                             url_to_search,
                             verbose=verbose,
                             user_agent=user_agent,
                             proxy=proxy_string,
                             tor=tor,
                             batch=batch,
                             xforward=forward_for)
    except Exception as e:
        if "'/usr/lib/firefoxdriver/webdriver.xpi'" in str(e):
            logger.fatal(
                set_color(
                    "firefox was not found in the default location on your system, "
                    "check your installation and make sure it is in /usr/lib, if you "
                    "find it there, restart your system and try again",
                    level=50))
        elif "connection refused" in str(e).lower():
            logger.fatal(
                set_color(
                    "there are to many sessions of firefox opened and selenium cannot "
                    "create a new one",
                    level=50))
            run_fix(
                "would you like to attempt to auto clean the open sessions",
                "sudo sh {}".format(CLEANUP_TOOL_PATH),
                "kill off the open sessions of firefox and re-run Zeus",
                exit_process=True)
        elif "Program install error!" in str(e):
            logger.error(
                set_color(
                    "seems the program is having some trouble installing would you like "
                    "to try and automatically fix this issue",
                    level=40))
            run_fix(
                "would you like to attempt to fix this issue automatically",
                "sudo sh {}".format(FIX_PROGRAM_INSTALL_PATH),
                "you can manually try and re-install Xvfb to fix the problem",
                exit_process=True)
        elif "Message: Reached error page:" in str(e):
            logger.fatal(
                set_color(
                    "geckodriver has hit an error that usually means it needs to be reinstalled",
                    level=50))
            question = prompt(
                "would you like to attempt a reinstallation of the geckodriver",
                opts="yN")
            if question.lower().startswith("y"):
                logger.warning(
                    set_color(
                        "rewriting all executed information, path information, and removing geckodriver",
                        level=30))
                rewrite_all_paths()
                logger.info(
                    set_color(
                        "all paths rewritten, you will be forced to re-install everything next run of Zeus"
                    ))
            else:
                logger.fatal(
                    set_color(
                        "you will need to remove the geckodriver from /usr/bin and reinstall it",
                        level=50))
                shutdown()
        elif "Unable to find a matching set of capabilities" in str(e):
            logger.fatal(
                set_color(
                    "it appears that firefox, selenium, and geckodriver are not playing nice with one another",
                    level=50))
            run_fix(
                "would you like to attempt to resolve this issue automatically",
                "sudo sh {}".format(REINSTALL_TOOL),
                ("you will need to reinstall firefox to a later version, update selenium, and reinstall the "
                 "geckodriver to continue using Zeus"),
                exit_process=True)
        else:
            logger.exception(
                set_color(
                    "{} failed to gather the URL from search engine, caught exception '{}' "
                    "exception has been logged to current log file".format(
                        os.path.basename(__file__),
                        str(e).strip()),
                    level=50))
            request_issue_creation()
        shutdown()
    logger.info(
        set_color("URL successfully gathered, searching for GET parameters"))

    logger.info(set_color(proxy_string_info))

    try:
        req = requests.get(query_url, proxies=proxy_string, params=headers)
    except ConnectionError:
        logger.warning(
            set_color(
                "target machine refused connection, delaying and trying again",
                level=30))
        time.sleep(3)
        req = requests.get(query_url, proxies=proxy_string, params=headers)

    logger.info(set_color(user_agent_info))
    req.headers.update(headers)
    found_urls = URL_REGEX.findall(req.text)
    for urls in list(found_urls):
        for url in list(urls):
            url = unquote(url)
            if not any(u in url for u in URL_EXCLUDES):
                if not url == "http://" and not url == "https://":
                    if URL_REGEX.match(url):
                        if isinstance(url, unicode):
                            url = str(url).encode("utf-8")
                        if pull_all:
                            retval.add(url.split(splitter)[0])
                        else:
                            if URL_QUERY_REGEX.match(url.split(splitter)[0]):
                                retval.add(url.split(splitter)[0])
                        if verbose:
                            try:
                                logger.debug(
                                    set_color("found '{}'".format(
                                        url.split(splitter)[0]),
                                              level=10))
                            except TypeError:
                                logger.debug(
                                    set_color("found '{}'".format(
                                        str(url).split(splitter)[0]),
                                              level=10))
                            except AttributeError:
                                logger.debug(
                                    set_color("found '{}".format(str(url)),
                                              level=10))
                        if url is not None:
                            retval.add(url.split(splitter)[0])
    true_retval = set()
    for url in list(retval):
        if any(l in url for l in possible_leftovers):
            url = URLParser(url).strip_url_leftovers()
        if parse_webcache:
            if "webcache" in url:
                logger.info(set_color("found a webcache URL, extracting"))
                url = URLParser(url).extract_webcache_url()
                if verbose:
                    logger.debug(set_color("found '{}'".format(url), level=15))
                true_retval.add(url)
            else:
                true_retval.add(url)
        else:
            true_retval.add(url)

    if len(true_retval) != 0:
        file_path = write_to_log_file(true_retval, URL_LOG_PATH, URL_FILENAME)
        if show_success:
            amount_of_urls = len(open(file_path).readlines())
            success_rate = calculate_success(amount_of_urls)
            logger.info(
                set_color("provided query has a {} success rate".format(
                    success_rate)))
    else:
        logger.fatal(
            set_color(
                "did not find any URLs with given query '{}' writing query to blacklist"
                .format(query),
                level=50))
        write_to_log_file(query,
                          BLACKLIST_FILE_PATH,
                          BLACKLIST_FILENAME,
                          blacklist=True)
        shutdown()
    logger.info(
        set_color("found a total of {} URLs with given query '{}'".format(
            len(true_retval), query)))
    return list(true_retval) if len(true_retval) != 0 else None
Пример #12
0
def main_intel_amt(url, agent=None, proxy=None, **kwargs):
    """
    main attack method to be called
    """
    do_ip_address = kwargs.get("do_ip", False)
    proxy = proxy_string_to_dict(proxy) or None
    agent = agent or DEFAULT_USER_AGENT
    if do_ip_address:
        logger.warning(
            set_color(
                "running against IP addresses may result in the targets refusing the connection...",
                level=30))
        logger.info(
            set_color("will run against IP address instead of hostname..."))
        try:
            url = replace_http(url)
            url = socket.gethostbyname(url)
            logger.info(set_color("discovered IP address {}...".format(url)))
        except Exception as e:
            logger.error(
                set_color(
                    "failed to gather IP address from hostname '{}', received an error '{}'. "
                    "will just run against hostname...".format(url, e),
                    level=40))
            url = url
    logger.info(
        set_color(
            "attempting to connect to '{}' and get hardware info...".format(
                url)))
    try:
        json_data = __get_hardware(url, agent=agent, proxy=proxy)
        if json_data is None:
            logger.error(
                set_color("unable to get any information, skipping...",
                          level=40))
            pass
        else:
            print("-" * 40)
            for key in json_data.keys():
                print("{}:".format(str(key).capitalize()))
                for item in json_data[key]:
                    print(" - {}: {}".format(item.capitalize(),
                                             json_data[key][item]))
            print("-" * 40)
    except requests.exceptions.ConnectionError as e:
        if "Max retries exceeded with url" in str(e):
            logger.error(
                set_color(
                    "failed connection, target machine is actively refusing the connection, skipping...",
                    level=40))
            pass
        else:
            logger.error(
                set_color("failed connection with '{}', skipping...",
                          level=40))
            pass
    except Exception as e:
        if "Temporary failure in name resolution" in str(e):
            logger.error(
                set_color("failed to connect on '{}', skipping...".format(url),
                          level=40))
            pass
        else:
            logger.exception(
                set_color(
                    "ran into exception '{}', cannot continue...".format(e)))
            request_issue_creation()
Пример #13
0
def sqlmap_scan_main(url,
                     port=None,
                     verbose=None,
                     auto_search=False,
                     opts=None,
                     given_path=None,
                     full_path=None):
    """
    the main function that will be called and initialize everything
    """
    def ___dict_args():
        """
        create argument tuples for the sqlmap arguments passed by the user
        """
        return {key: value for key, value in opts}

    if auto_search:
        lib.settings.logger.info(
            lib.settings.set_color(
                "attempting to find sqlmap on your system..."))
        path = ''.join(
            find_sqlmap(verbose=verbose, given_search_path=given_path))
        if path:
            subprocess.check_output(["python", path, "-s"])
    else:
        lib.settings.prompt(
            "start the sqlmap API server and press enter when ready...")
        try:
            sqlmap_scan = SqlmapHook(url, port=port)
            lib.settings.logger.info(
                lib.settings.set_color(
                    "initializing new sqlmap scan with given URL '{}'...".
                    format(url)))
            sqlmap_scan.init_new_scan()
            if verbose:
                lib.settings.logger.debug(
                    lib.settings.set_color("scan initialized...", level=10))
            lib.settings.logger.info(
                lib.settings.set_color("gathering sqlmap API scan ID..."))
            api_id = sqlmap_scan.get_scan_id()
            if verbose:
                lib.settings.logger.debug(
                    lib.settings.set_color(
                        "current sqlmap scan ID: '{}'...".format(api_id),
                        level=10))
            lib.settings.logger.info(
                lib.settings.set_color(
                    "starting sqlmap scan on url: '{}'...".format(url)))
            if opts:
                if verbose:
                    lib.settings.logger.debug(
                        lib.settings.set_color(
                            "using arguments: '{}'...".format(___dict_args()),
                            level=10))
                lib.settings.logger.info(
                    lib.settings.set_color(
                        "adding arguments to sqlmap API..."))
            else:
                if verbose:
                    lib.settings.logger.debug(
                        lib.settings.set_color(
                            "no arguments passed, skipping...", level=10))
            lib.settings.logger.warning(
                lib.settings.set_color(
                    "please keep in mind that this is the API, output will "
                    "not be saved to log file, it may take a little longer "
                    "to finish processing, and you will need to restart the sqlmap "
                    "API server after each iteration, launching sqlmap...",
                    level=30))
            sqlmap_scan.start_scan(api_id, opts=opts)
            print("-" * 30)
            sqlmap_scan.show_sqlmap_log(api_id)
            print("-" * 30)
        except requests.exceptions.HTTPError as e:
            lib.settings.logger.exception(
                lib.settings.set_color(
                    "ran into error '{}', seems you didn't start the server, check "
                    "the server port and try again...".format(e),
                    level=50))
            pass
        except Exception as e:
            if "HTTPConnectionPool(host='127.0.0.1'" in str(e):
                lib.settings.logger.error(
                    lib.settings.set_color(
                        "sqlmap API is not started, did you forget to start it? "
                        "You will need to open a new terminal, cd into sqlmap, and "
                        "run `python sqlmapapi.py -s` otherwise pass the correct flags "
                        "to auto start the API...",
                        level=40))
                pass
            else:
                lib.settings.logger.exception(
                    lib.settings.set_color(
                        "ran into error '{}', seems something went wrong, error has "
                        "been saved to current log file.".format(e),
                        level=50))
                lib.settings.fix_log_file()
                request_issue_creation()
                pass
Пример #14
0
def check_for_admin_page(url, exts, protocol="http://", **kwargs):
    """
    bruteforce the admin page of given URL
    """
    verbose = kwargs.get("verbose", False)
    show_possibles = kwargs.get("show_possibles", False)
    possible_connections, connections = set(), set()
    stripped_url = lib.core.settings.replace_http(str(url).strip())
    for ext in exts:
        # each extension is loaded before this process begins to save time
        # while running this process.
        # it will be loaded and passed instead of loaded during.
        ext = ext.strip()
        true_url = "{}{}{}".format(protocol, stripped_url, ext)
        if verbose:
            lib.core.settings.logger.debug(
                lib.core.settings.set_color("trying '{}'...".format(true_url),
                                            level=10))
        try:
            urlopen(true_url, timeout=5)
            lib.core.settings.logger.info(
                lib.core.settings.set_color(
                    "connected successfully to '{}'...".format(true_url),
                    level=25))
            connections.add(true_url)
        except HTTPError as e:
            data = str(e).split(" ")
            if verbose:
                if "Access Denied" in str(e):
                    lib.core.settings.logger.warning(
                        lib.core.settings.set_color(
                            "got access denied, possible control panel found without external access on '{}'..."
                            .format(true_url),
                            level=30))
                    possible_connections.add(true_url)
                else:
                    lib.core.settings.logger.error(
                        lib.core.settings.set_color(
                            "failed to connect got error code {}...".format(
                                data[2]),
                            level=40))
        except Exception as e:
            if verbose:
                if "<urlopen error timed out>" or "timeout: timed out" in str(
                        e):
                    lib.core.settings.logger.warning(
                        lib.core.settings.set_color(
                            "connection timed out assuming won't connect and skipping...",
                            level=30))
                else:
                    lib.core.settings.logger.exception(
                        lib.core.settings.set_color(
                            "failed to connect with unexpected error '{}'...".
                            format(str(e)),
                            level=50))
                    request_issue_creation()
    possible_connections, connections = list(possible_connections), list(
        connections)
    data_msg = "found {} possible connections(s) and {} successful connection(s)..."
    lib.core.settings.logger.info(
        lib.core.settings.set_color(
            data_msg.format(len(possible_connections), len(connections))))
    if len(connections) > 0:
        # create the connection tree if we got some connections
        lib.core.settings.logger.info(
            lib.core.settings.set_color("creating connection tree..."))
        lib.core.settings.create_tree(url, connections)
    else:
        lib.core.settings.logger.fatal(
            lib.core.settings.set_color(
                "did not receive any successful connections to the admin page of "
                "{}...".format(url),
                level=50))
    if show_possibles:
        if len(possible_connections) > 0:
            lib.core.settings.logger.info(
                lib.core.settings.set_color(
                    "creating possible connection tree..."))
            lib.core.settings.create_tree(url, possible_connections)
        else:
            lib.core.settings.logger.fatal(
                lib.core.settings.set_color(
                    "did not find any possible connections to {}'s "
                    "admin page".format(url),
                    level=50))
    if len(connections) > 0:
        lib.core.settings.logger.warning(
            lib.core.settings.set_color(
                "only writing successful connections to log file...",
                level=30))
        lib.core.common.write_to_log_file(
            list(connections), lib.core.settings.ADMIN_PAGE_FILE_PATH,
            lib.core.settings.ADMIN_PAGE_FILE_PATH.format(
                lib.core.settings.replace_http(url)))
Пример #15
0
def get_urls(query, url, verbose=False, warning=True, **kwargs):
    """
      Bypass Google captchas and Google API by using selenium-webdriver to gather
      the Google URL. This will open a robot controlled browser window and attempt
      to get a URL from Google that will be used for scraping afterwards.
    """
    proxy, user_agent = kwargs.get("proxy", None), kwargs.get("user_agent", None)
    if verbose:
        logger.debug(set_color(
            "setting up the virtual display to hide the browser...", level=10
        ))
    ff_display = Display(visible=0, size=(800, 600))
    ff_display.start()
    logger.info(set_color(
        "firefox browser display will be hidden while it performs the query..."
    ))
    if warning:
        logger.warning(set_color(
            "your web browser will be automated in order for Zeus to successfully "
            "bypass captchas and API calls. this is done in order to grab the URL "
            "from the search and parse the results. please give selenium time to "
            "finish it's task...", level=30
        ))
    if verbose:
        logger.debug(set_color(
            "running selenium-webdriver and launching browser...", level=10
        ))

    if verbose:
        logger.debug(set_color(
            "adjusting selenium-webdriver user-agent to '{}'...".format(user_agent), level=10
        ))
    if proxy is not None:
        proxy_type = proxy.keys()
        proxy_to_use = Proxy({
            "proxyType": ProxyType.MANUAL,
            "httpProxy": proxy[proxy_type[0]],
            "ftpProxy": proxy[proxy_type[0]],
            "sslProxy": proxy[proxy_type[0]],
            "noProxy": ""
        })
        if verbose:
            logger.debug(set_color(
                "setting selenium proxy to '{}'...".format(
                    ''.join(proxy_type) + "://" + ''.join(proxy.values())
                ), level=10
            ))
    else:
        proxy_to_use = None

    profile = webdriver.FirefoxProfile()
    profile.set_preference("general.useragent.override", user_agent)
    browser = webdriver.Firefox(profile, proxy=proxy_to_use)
    logger.info(set_color("browser will open shortly..."))
    browser.get(url)
    if verbose:
        logger.debug(set_color(
            "searching search engine for the 'q' element (search button)...", level=10
        ))
    search = browser.find_element_by_name('q')
    logger.info(set_color(
        "searching '{}' using query '{}'...".format(url, query)
    ))
    try:
        search.send_keys(query)
        search.send_keys(Keys.RETURN)  # hit return after you enter search text
        time.sleep(3)
    except ElementNotInteractableException:
        browser.execute_script("document.querySelectorAll('label.boxed')[1].click()")
        search.send_keys(query)
        search.send_keys(Keys.RETURN)  # hit return after you enter search text
        time.sleep(3)
    if verbose:
        logger.debug(set_color(
            "obtaining URL from selenium..."
        ))
    try:
        retval = browser.current_url
    except UnexpectedAlertPresentException:
        logger.warning(set_color(
            "alert present, closing...", level=30
        ))
        alert = browser.switch_to.alert
        alert.accept()
        retval = browser.current_url
    ban_url_schema = ["http://ipv6.google.com", "http://ipv4.google.com"]
    if any(u in retval for u in ban_url_schema):  # if you got IP banned
        logger.warning(set_color(
            "it appears that Google is attempting to block your IP address, attempting bypass...", level=30
        ))
        try:
            retval = bypass_ip_block(retval)
            do_continue = prompt(
                "zeus was able to successfully extract the URL from Google's ban URL "
                "it is advised to shutdown zeus and attempt to extract the URL's manually. "
                "failing to do so will most likely result in no results being found by zeus. "
                "would you like to shutdown", opts="yN"
            )
            if not str(do_continue).lower().startswith("n"):  # shutdown and write the URL to a file
                write_to_log_file(retval, EXTRACTED_URL_LOG, "extracted-url-{}.log")
                logger.info(set_color(
                    "it is advised to use the built in blackwidow crawler with the extracted URL "
                    "(IE -b '{}')".format(retval)
                ))
                shutdown()
        except Exception as e:
            browser.close()  # stop all the random rogue processes
            ff_display.stop()
            logger.exception(set_color(
                "zeus was unable to extract the correct URL from the ban URL '{}', "
                "got exception '{}'...".format(
                    unquote(retval), e
                ), level=50
            ))
            request_issue_creation()
            shutdown()
    if verbose:
        logger.debug(set_color(
            "found current URL from selenium browser...", level=10
        ))
    logger.info(set_color(
        "closing the browser and continuing process.."
    ))
    browser.close()
    ff_display.stop()
    return retval
Пример #16
0
def detect_protection(url, **kwargs):
    verbose = kwargs.get("verbose", False)
    agent = kwargs.get("agent", None)
    proxy = kwargs.get("proxy", None)
    xforward = kwargs.get("xforward", False)

    if xforward:
        ip_list = (create_random_ip(), create_random_ip(), create_random_ip())
        headers = {
            HTTP_HEADER.CONNECTION:
            "close",
            HTTP_HEADER.USER_AGENT:
            agent,
            HTTP_HEADER.X_FORWARDED_FOR:
            "{}, {}, {}".format(ip_list[0], ip_list[1], ip_list[2])
        }
    else:
        headers = {
            HTTP_HEADER.CONNECTION: "close",
            HTTP_HEADER.USER_AGENT: agent
        }

    url = "{} {}".format(url.strip(), PROTECTION_CHECK_PAYLOAD)

    if verbose:
        logger.debug(
            set_color("attempting connection to '{}'...".format(url),
                      level=10))
    try:
        protection_check_req = requests.get(
            url,
            params=headers,
            proxies=proxy_string_to_dict(proxy),
            timeout=20)

        html, status, headers = protection_check_req.content, protection_check_req.status_code, protection_check_req.headers

        for dbms in DBMS_ERRORS:  # make sure there are no DBMS errors in the HTML
            for regex in DBMS_ERRORS[dbms]:
                if re.compile(regex).search(html) is not None:
                    logger.info(
                        set_color(
                            "it appears that the WAF/IDS/IPS check threw a DBMS error and may be vulnerable "
                            "to SQL injection attacks. it appears the backend DBMS is '{}'..."
                            .format(dbms),
                            level=25))
                    return None

        retval = []
        if status != 200 and "not found" not in html.lower():
            file_list = [
                f for f in os.listdir(DETECT_FIREWALL_PATH)
                if not any(ex in f for ex in ["__init__", ".pyc"])
            ]
            for item in file_list:
                item = item[:-3]
                detection_name = "lib.firewall.{}"
                detection_name = detection_name.format(item)
                detection_name = importlib.import_module(detection_name)
                if detection_name.detect(html, headers=headers, status=status):
                    retval.append(detection_name.__item__)
            if len(retval) > 1:
                if "Generic (Unknown)" in retval:
                    item = retval.index("Generic (Unknown)")
                    del retval[item]
            else:
                if retval[0] == "Generic (Unknown)":
                    logger.warning(
                        set_color(
                            "identified WAF/IDS/IPS is unknown to Zeus, if you know the firewall and the context "
                            "of the firewall, please create an issue ({}), fingerprint of the firewall will be "
                            "written to a log file...".format(ISSUE_LINK),
                            level=30))
                    full_finger_print = "HTTP/1.1 {}\n{}\n{}".format(
                        status, headers, html)
                    write_to_log_file(
                        full_finger_print, UNKNOWN_FIREWALL_FINGERPRINT_PATH,
                        UNKNOWN_FIREWALL_FILENAME.format(replace_http(url)))
        else:
            retval = None

        return ''.join(retval) if isinstance(retval, list) else retval

    except Exception as e:
        if "Read timed out." or "Connection reset by peer" in str(e):
            logger.warning(
                set_color(
                    "detection request timed out, assuming no protection and continuing...",
                    level=30))
            return None
        else:
            logger.exception(
                set_color(
                    "Zeus ran into an unexpected error '{}'...".format(e),
                    level=50))
            request_issue_creation()
            return None
Пример #17
0
def main_header_check(url, **kwargs):
    """
    main function
    """
    verbose = kwargs.get("verbose", False)
    agent = kwargs.get("agent", None)
    proxy = kwargs.get("proxy", None)
    xforward = kwargs.get("xforward", False)
    identify = kwargs.get("identify", True)

    protection = {"hostname": url}
    definition = {
        "x-xss": ("protection against XSS attacks", "XSS"),
        "strict-transport": ("protection against unencrypted connections (force HTTPS connection)", "HTTPS"),
        "x-frame": ("protection against clickjacking vulnerabilities", "CLICKJACKING"),
        "x-content": ("protection against MIME type attacks", "MIME"),
        "x-csrf": ("protection against Cross-Site Forgery attacks", "CSRF"),
        "x-xsrf": ("protection against Cross-Site Forgery attacks", "CSRF"),
        "public-key": ("protection to reduce success rates of MITM attacks", "MITM"),
        "content-security": ("header protection against multiple attack types", "ALL")
    }

    try:
        if identify:
            logger.info(set_color(
                "checking if target URL is protected by some kind of WAF/IPS/IDS..."
            ))
            identified = detect_protection(url, proxy=proxy, agent=agent, verbose=verbose, xforward=xforward)

            if identified is None:
                logger.info(set_color(
                    "no WAF/IDS/IPS has been identified on target URL...", level=25
                ))
            else:
                logger.warning(set_color(
                    "the target URL WAF/IDS/IPS has been identified as '{}'...".format(identified), level=35
                ))

        if verbose:
            logger.debug(set_color(
                "loading XML data...", level=10
            ))
        comparable_headers = load_xml_data(HEADER_XML_DATA)
        logger.info(set_color(
            "attempting to get request headers for '{}'...".format(url.strip())
        ))
        try:
            found_headers = load_headers(url, proxy=proxy, agent=agent, xforward=xforward)
        except (ConnectionError, Exception) as e:
            if "Read timed out." or "Connection reset by peer" in str(e):
                found_headers = None
            else:
                logger.exception(set_color(
                    "Zeus has hit an unexpected error and cannot continue '{}'...".format(e), level=50
                ))
                request_issue_creation()

        if found_headers is not None:
            if verbose:
                logger.debug(set_color(
                    "fetched {}...".format(found_headers), level=10
                ))
            headers_established = [str(h) for h in compare_headers(found_headers, comparable_headers)]
            for key in definition.iterkeys():
                if any(key in h.lower() for h in headers_established):
                    logger.warning(set_color(
                        "provided target has {}...".format(definition[key][0]), level=30
                    ))
            for key in found_headers.iterkeys():
                protection[key] = found_headers[key]
            logger.info(set_color(
                "writing found headers to log file...", level=25
            ))
            return write_to_log_file(protection, HEADER_RESULT_PATH, HEADERS_FILENAME.format(replace_http(url)))
        else:
            logger.error(set_color(
                "unable to retrieve headers for site '{}'...".format(url.strip()), level=40
            ))
    except KeyboardInterrupt:
        if not pause():
            shutdown()
Пример #18
0
def check_for_admin_page(url,
                         exts,
                         protocol="http://",
                         show_possibles=False,
                         verbose=False):
    possible_connections, connections = set(), set()
    stripped_url = replace_http(url.strip())
    for ext in exts:
        ext = ext.strip()
        true_url = "{}{}{}".format(protocol, stripped_url, ext)
        if verbose:
            logger.debug(set_color("trying '{}'...".format(true_url),
                                   level=10))
        try:
            urlopen(true_url, timeout=5)
            logger.info(
                set_color(
                    "connected successfully to '{}'...".format(true_url)))
            connections.add(true_url)
        except HTTPError as e:
            data = str(e).split(" ")
            if verbose:
                if "Access Denied" in str(e):
                    logger.warning(
                        set_color(
                            "got access denied, possible control panel found without external access on '{}'..."
                            .format(true_url),
                            level=30))
                    possible_connections.add(true_url)
                else:
                    logger.error(
                        set_color(
                            "failed to connect got error code {}...".format(
                                data[2]),
                            level=40))
        except Exception as e:
            if verbose:
                if "<urlopen error timed out>" or "timeout: timed out" in str(
                        e):
                    logger.warning(
                        set_color(
                            "connection timed out after five seconds "
                            "assuming won't connect and skipping...",
                            level=30))
                else:
                    logger.exception(
                        set_color(
                            "failed to connect with unexpected error '{}'...".
                            format(str(e)),
                            level=50))
                    fix_log_file()
                    request_issue_creation()
    possible_connections, connections = list(possible_connections), list(
        connections)
    data_msg = "found {} possible connections(s) and {} successful connection(s)..."
    logger.info(
        set_color(data_msg.format(len(possible_connections),
                                  len(connections))))
    if len(connections) != 0:
        logger.info(set_color("creating connection tree..."))
        create_tree(url, connections)
    else:
        logger.fatal(
            set_color(
                "did not find any successful connections to {}'s "
                "admin page",
                level=50))
    if show_possibles:
        if len(possible_connections) != 0:
            logger.info(set_color("creating possible connection tree..."))
            create_tree(url, possible_connections)
        else:
            logger.fatal(
                set_color(
                    "did not find any possible connections to {}'s "
                    "admin page",
                    level=50))
Пример #19
0
def perform_port_scan(url, scanner=NmapHook, **kwargs):
    """
    main function that will initalize the port scanning
    """
    verbose = kwargs.get("verbose", False)
    opts = kwargs.get("opts", None)
    timeout_time = kwargs.get("timeout", None)

    if timeout_time is None:
        timeout_time = 120

    with lib.core.decorators.TimeOut(seconds=timeout_time):
        lib.core.settings.logger.warning(
            lib.core.settings.set_color(
                "if the port scan is not completed in {}(m) it will timeout..."
                .format(lib.core.settings.convert_to_minutes(timeout_time)),
                level=30))
        url = url.strip()
        lib.core.settings.logger.info(
            lib.core.settings.set_color(
                "attempting to find IP address for hostname '{}'...".format(
                    url)))
        found_ip_address = socket.gethostbyname(url)
        lib.core.settings.logger.info(
            lib.core.settings.set_color(
                "found IP address for given URL -> '{}'...".format(
                    found_ip_address),
                level=25))
        if verbose:
            lib.core.settings.logger.debug(
                lib.core.settings.set_color(
                    "checking for nmap on your system...", level=10))
        nmap_exists = "".join(find_nmap())
        if nmap_exists:
            if verbose:
                lib.core.settings.logger.debug(
                    lib.core.settings.set_color(
                        "nmap has been found under '{}'...".format(
                            nmap_exists),
                        level=10))
            lib.core.settings.logger.info(
                lib.core.settings.set_color(
                    "starting port scan on IP address '{}'...".format(
                        found_ip_address)))
            try:
                data = scanner(found_ip_address, opts=opts)
                json_data = data.get_all_info()
                data.show_open_ports(json_data)
                file_path = data.send_to_file(json_data)
                lib.core.settings.logger.info(
                    lib.core.settings.set_color(
                        "port scan completed, all data saved to JSON file under '{}'..."
                        .format(file_path)))
            except KeyError:
                lib.core.settings.logger.fatal(
                    lib.core.settings.set_color(
                        "no port information found for '{}({})'...".format(
                            url, found_ip_address),
                        level=50))
            except KeyboardInterrupt:
                if not lib.core.common.pause():
                    lib.core.common.shutdown()
            except lib.core.errors.PortScanTimeOutException:
                lib.core.settings.logger.error(
                    lib.core.settings.set_color(
                        "port scan is taking to long and has hit the timeout, you "
                        "can increase this time by passing the --time-sec flag (IE "
                        "--time-sec 300)...",
                        level=40))
            except Exception as e:
                lib.core.settings.logger.exception(
                    lib.core.settings.set_color(
                        "ran into exception '{}', cannot continue quitting...".
                        format(e),
                        level=50))
                request_issue_creation()
                pass
        else:
            lib.core.settings.logger.fatal(
                lib.core.settings.set_color(
                    "nmap was not found on your system...", level=50))
            lib.core.common.run_fix(
                "would you like to automatically install it",
                "sudo sh {}".format(lib.core.settings.NMAP_INSTALLER_TOOL),
                "nmap is not installed, please install it in order to continue..."
            )
Пример #20
0
def sqlmap_scan_main(url,
                     port=None,
                     verbose=None,
                     opts=None,
                     auto_start=False):
    """
    the main function that will be called and initialize everything
    """

    is_started = lib.core.settings.search_for_process("sqlmapapi.py")
    found_path = find_sqlmap()

    if auto_start:
        lib.core.settings.logger.info(
            lib.core.settings.set_color("attempting to launch sqlmap API"))
        sqlmap_api_command = shlex.split("sudo sh {} p {}".format(
            lib.core.settings.LAUNCH_SQLMAP_API_TOOL, "".join(found_path)))
        subprocess.Popen(sqlmap_api_command, stdout=subprocess.PIPE)
        if is_started:
            lib.core.settings.logger.info(
                lib.core.settings.set_color(
                    "sqlmap API is up and running, continuing process"))
        else:
            lib.core.settings.logger.error(
                lib.core.settings.set_color(
                    "there was a problem starting sqlmap API", level=40))
            lib.core.common.prompt(
                "manually start the API and press enter when ready")
    else:
        if not is_started:
            lib.core.common.prompt(
                "sqlmap API is not started, start it and press enter to continue"
            )
    try:
        sqlmap_scan = SqlmapHook(url, port=port)
        lib.core.settings.logger.info(
            lib.core.settings.set_color(
                "initializing new sqlmap scan with given URL '{}'".format(
                    url)))
        sqlmap_scan.init_new_scan()
        if verbose:
            lib.core.settings.logger.debug(
                lib.core.settings.set_color("scan initialized", level=10))
        lib.core.settings.logger.info(
            lib.core.settings.set_color("gathering sqlmap API scan ID"))
        api_id = sqlmap_scan.get_scan_id()
        if verbose:
            lib.core.settings.logger.debug(
                lib.core.settings.set_color(
                    "current sqlmap scan ID: '{}'".format(api_id), level=10))
        lib.core.settings.logger.info(
            lib.core.settings.set_color(
                "starting sqlmap scan on url: '{}'".format(url), level=25))
        if opts:
            if verbose:
                lib.core.settings.logger.debug(
                    lib.core.settings.set_color(
                        "using arguments: '{}'".format(opts), level=10))
            lib.core.settings.logger.info(
                lib.core.settings.set_color("adding arguments to sqlmap API"))
        else:
            if verbose:
                lib.core.settings.logger.debug(
                    lib.core.settings.set_color(
                        "no arguments passed, skipping", level=10))
        lib.core.settings.logger.warning(
            lib.core.settings.set_color(
                "please keep in mind that this is the API, output will "
                "not be saved to log file, it may take a little longer "
                "to finish processing, launching sqlmap",
                level=30))
        sqlmap_scan.start_scan(api_id, opts=opts)
        print("-" * 30)
        sqlmap_scan.show_sqlmap_log(api_id)
        print("-" * 30)
    except requests.exceptions.HTTPError as e:
        lib.core.settings.logger.exception(
            lib.core.settings.set_color(
                "ran into error '{}', seems you didn't start the server, check "
                "the server port and try again".format(e),
                level=50))
        pass
    except KeyboardInterrupt:
        if not lib.core.common.pause():
            lib.core.common.shutdown()
    except Exception as e:
        if "HTTPConnectionPool(host='127.0.0.1'" in str(e):
            lib.core.settings.logger.error(
                lib.core.settings.set_color(
                    "sqlmap API is not started, did you forget to start it? "
                    "You will need to open a new terminal, cd into sqlmap, and "
                    "run `python sqlmapapi.py -s` otherwise pass the correct flags "
                    "to auto start the API",
                    level=40))
            pass
        else:
            lib.core.settings.logger.exception(
                lib.core.settings.set_color(
                    "ran into error '{}', seems something went wrong, error has "
                    "been saved to current log file.".format(e),
                    level=50))
            request_issue_creation()
            pass
Пример #21
0
                    "the provided proxy is not valid, specify the protocol and try again, supported "
                    "proxy protocols are {} (IE socks5://127.0.0.1:9050)".format(
                        ", ".join(list(supported_proxy_types))), level=50
                ))
            except Exception as e:
                if "Permission denied:" in str(e):
                    logger.fatal(set_color(
                        "your permissions are not allowing Zeus to run, "
                        "try running Zeus with sudo", level=50
                    ))
                    shutdown()
                else:
                    logger.exception(set_color(
                        "ran into exception '{}'".format(e), level=50
                    ))
                request_issue_creation()
                pass

            __run_attacks_main()

        # search multiple pages of Google
        elif opt.dorkToUse is not None or opt.useRandomDork and opt.searchMultiplePages:
            if opt.dorkToUse is not None:
                dork_to_use = opt.dorkToUse
            elif opt.useRandomDork:
                dork_to_use = get_random_dork()
            else:
                dork_to_use = None

            if dork_to_use is None:
                logger.warning(set_color(
Пример #22
0
def detect_protection(url, **kwargs):
    verbose = kwargs.get("verbose", False)
    agent = kwargs.get("agent", None)
    proxy = kwargs.get("proxy", None)
    xforward = kwargs.get("xforward", False)

    if xforward:
        ip_list = (
            create_random_ip(),
            create_random_ip(),
            create_random_ip()
        )
        headers = {
            HTTP_HEADER.CONNECTION: "close",
            HTTP_HEADER.USER_AGENT: agent,
            HTTP_HEADER.X_FORWARDED_FOR: "{}, {}, {}".format(ip_list[0], ip_list[1], ip_list[2])
        }
    else:
        headers = {
            HTTP_HEADER.CONNECTION: "close",
            HTTP_HEADER.USER_AGENT: agent
        }

    url = "{} {}".format(url.strip(), PROTECTION_CHECK_PAYLOAD)

    if verbose:
        logger.debug(set_color(
            "attempting connection to '{}'...".format(url), level=10
        ))
    try:
        protection_check_req = requests.get(
            url, params=headers, proxies=proxy_string_to_dict(proxy), timeout=20
        )

        html, status, headers = protection_check_req.content, protection_check_req.status_code, protection_check_req.headers

        for dbms in DBMS_ERRORS:  # make sure there are no DBMS errors in the HTML
            for regex in DBMS_ERRORS[dbms]:
                if re.compile(regex).search(html) is not None:
                    logger.info(set_color(
                        "it appears that the WAF/IDS/IPS check threw a DBMS error and may be vulnerable "
                        "to SQL injection attacks. it appears the backend DBMS is '{}'...".format(dbms), level=25
                    ))
                    return None

        retval = []
        file_list = [f for f in os.listdir(DETECT_FIREWALL_PATH) if not any(ex in f for ex in ["__init__", ".pyc"])]
        for item in file_list:
            item = item[:-3]
            if verbose:
                logger.debug(set_color(
                    "loading script '{}'...".format(item), level=10
                ))
            detection_name = "lib.firewall.{}"
            detection_name = detection_name.format(item)
            detection_name = importlib.import_module(detection_name)
            if detection_name.detect(html, headers=headers, status=status) is True:
                retval.append(detection_name.__item__)
        if len(retval) != 0:
            if len(retval) >= 2:
                try:
                    del retval[retval.index("Generic (Unknown)")]
                except:
                    logger.warning(set_color(
                        "multiple firewalls identified ({}), displaying most likely...".format(
                            ", ".join(retval)
                        ), level=30
                    ))
                    del retval[retval.index(retval[1])]
            if retval[0] == "Generic (Unknown)":
                logger.warning(set_color(
                    "discovered firewall is unknown to Zeus, saving fingerprint to file. "
                    "if you know the details or the context of the firewall please create "
                    "an issue with the fingerprint, or a pull request with the script...", level=30
                ))
                fingerprint = "<!---\nStatus: {}\nHeaders: {}\n--->\n{}".format(
                    status, headers, html
                )
                write_to_log_file(fingerprint, UNKNOWN_FIREWALL_FINGERPRINT_PATH, UNKNOWN_FIREWALL_FILENAME)
            return "".join(retval) if isinstance(retval, list) else retval
        else:
            return None

    except Exception as e:
        if "Read timed out." or "Connection reset by peer" in str(e):
            logger.warning(set_color(
                "detection request failed, assuming no protection and continuing...", level=30
            ))
            return None
        else:
            logger.exception(set_color(
                "Zeus ran into an unexpected error '{}'...".format(e), level=50
            ))
            request_issue_creation()
            return None