Exemple #1
0
    def __run_attacks(
            url, sqlmap=False, nmap=False, intel=False, xss=False,
            verbose=False, admin=False, given_path=None, auto=False, batch=False
    ):
        """
        run the attacks if any are requested
        """
        if not batch:
            question = prompt(
                "would you like to process found URL: '{}'".format(url), opts=["y", "N"]
            )
        else:
            question = "y"

        if question.lower().startswith("y"):
            if sqlmap:
                return sqlmap_scan.sqlmap_scan_main(url.strip(), verbose=verbose, opts=__create_arguments(sqlmap=True),
                                                    auto_search=auto, given_path=given_path)
            elif nmap:
                url_ip_address = replace_http(url.strip())
                return nmap_scan.perform_port_scan(url_ip_address, verbose=verbose, opts=__create_arguments(nmap=True))
            elif intel:
                url = get_true_url(url)
                return intel_me.main_intel_amt(url, agent=agent_to_use, proxy=proxy_to_use)
            elif admin:
                main(url, show=opt.showAllConnections, verbose=verbose)
            elif xss:
                main_xss(url, verbose=verbose, proxy=proxy_to_use, agent=agent_to_use, tamper=opt.tamperXssPayloads)
            else:
                pass
        else:
            logger.warning(set_color(
                "skipping '{}'...".format(url), level=30
            ))
Exemple #2
0
 def __choose_attack(choice, attacks):
     while True:
         if int(choice) in range(len(attacks)):
             return int(choice)
         else:
             logger.warning(
                 set_color("{} is not a valid choice...".format(choice)))
Exemple #3
0
    def __run_attacks(url,
                      sqlmap=False,
                      verbose=False,
                      nmap=False,
                      given_path=None,
                      auto=False,
                      batch=False):
        """
        run the attacks if any are requested
        """
        if not batch:
            question = prompt(
                "would you like to process found URL: '{}'".format(url),
                opts=["y", "N"])
        else:
            question = "y"

        if question.lower().startswith("y"):
            if sqlmap:
                return sqlmap_scan.sqlmap_scan_main(
                    url.strip(),
                    verbose=verbose,
                    opts=__create_sqlmap_arguments(),
                    auto_search=auto,
                    given_path=given_path)
            elif nmap:
                url_ip_address = replace_http(url.strip())
                return nmap_scan.perform_port_scan(url_ip_address,
                                                   verbose=verbose)
            else:
                pass
        else:
            logger.warning(set_color("skipping '{}'...".format(url)))
def tamper(payload, warning=True, **kwargs):
    if warning:
        logger.warning(
            set_color(
                "base64 tamper scripts may increase the possibility of not finding vulnerabilities "
                "in otherwise vulnerable sites...",
                level=30))
    return base64.b64encode(payload)
Exemple #5
0
def __tamper_warnings(script):
    warn_msg = ""
    if script == "hex":
        warn_msg += "hex tamper script may increase risk of false positives..."
    elif script == " base64":
        warn_msg += "base64 tamper script may increase risk of not finding a vulnerability..."
    else:
        pass
    logger.warning(set_color(warn_msg, level=30))
Exemple #6
0
def tamper(payload, warning=True, **kwargs):
    if warning:
        logger.warning(set_color(
            "hex tamper scripts may increase the risk of false positives...", level=30
        ))
    retval = hex(hash(payload))
    if "-" in str(retval):
        return retval[1:-1]
    else:
        return retval
Exemple #7
0
    def __run_attacks(
            url, sqlmap=False, nmap=False, intel=False, xss=False,
            verbose=False, admin=False, given_path=None, auto=False, batch=False
    ):
        """
        run the attacks if any are requested
        """
        __enabled_attacks = {
            "sqlmap": opt.runSqliScan,
            "port": opt.runPortScan,
            "xss": opt.runXssScan,
            "admin": opt.adminPanelFinder,
            "intel": opt.intelCheck
        }

        enabled = set()
        for key in __enabled_attacks.keys():
            if __enabled_attacks[key] is True:
                enabled.add(key)
            if len(enabled) > 1:
                logger.error(set_color(
                    "it appears that you have enabled multiple attack types, "
                    "as of now only 1 attack is supported at a time, choose "
                    "your attack and try again. You can use the -f flag if "
                    "you do not want to complete an entire search again...", level=40
                ))
                shutdown()

        if not batch:
            question = prompt(
                "would you like to process found URL: '{}'".format(url), opts=["y", "N"]
            )
        else:
            question = "y"

        if question.lower().startswith("y"):
            if sqlmap:
                return sqlmap_scan.sqlmap_scan_main(url.strip(), verbose=verbose, opts=__create_arguments(sqlmap=True),
                                                    auto_search=auto, given_path=given_path)
            elif nmap:
                url_ip_address = replace_http(url.strip())
                return nmap_scan.perform_port_scan(url_ip_address, verbose=verbose, opts=__create_arguments(nmap=True))
            elif intel:
                url = get_true_url(url)
                return intel_me.main_intel_amt(url, agent=agent_to_use, proxy=proxy_to_use)
            elif admin:
                main(url, show=opt.showAllConnections, verbose=verbose)
            elif xss:
                main_xss(url, verbose=verbose, proxy=proxy_to_use, agent=agent_to_use, tamper=opt.tamperXssPayloads)
            else:
                pass
        else:
            logger.warning(set_color(
                "skipping '{}'...".format(url), level=30
            ))
Exemple #8
0
 def __create_sqlmap_arguments():
     """
     create the sqlmap arguments (a list of tuples) that will be passed to the API
     """
     retval = []
     if opt.sqlmapArguments is not None:
         for line in opt.sqlmapArguments.split(","):
             to_use = line.strip().split(" ")
             option = (to_use[0], to_use[1])
             if to_use[0] in SQLMAP_API_OPTIONS:
                 retval.append(option)
             else:
                 logger.warning(
                     set_color(
                         "option '{}' is not recognized by sqlmap API, skipping..."
                         .format(option[0]),
                         level=30))
     return retval
Exemple #9
0
 def __create_arguments(sqlmap=False, nmap=False):
     """
     create the sqlmap arguments (a list of tuples) that will be passed to the API
     """
     logger.info(set_color(
         "creating arguments for {}...".format("sqlmap" if sqlmap else "nmap")
     ))
     retval = []
     splitter = {"sqlmap": ",", "nmap": "|"}
     if sqlmap:
         if opt.sqlmapArguments is not None:
             for line in opt.sqlmapArguments.split(splitter["sqlmap"]):
                 to_use = line.strip().split(" ")
                 option = (to_use[0], to_use[1])
                 if to_use[0] in SQLMAP_API_OPTIONS:
                     retval.append(option)
                 else:
                     logger.warning(set_color(
                         "option '{}' is not recognized by sqlmap API, skipping...".format(option[0]),
                         level=30
                     ))
     elif nmap:
         warning_msg = "option {} is not known by the nmap api, skipping..."
         if opt.nmapArguments is not None:
             for line in opt.nmapArguments.split(splitter["nmap"]):
                 try:
                     data = line.index(" ")
                 except:
                     data = None
                     pass
                 if data is not None:
                     argument = line[0:data]
                     if argument in NMAP_API_OPTS:
                         retval.append(line)
                     else:
                         logger.warning(set_color(
                             warning_msg.format(argument), level=30
                         ))
                 else:
                     if line in NMAP_API_OPTS:
                         retval.append(line)
                     else:
                         logger.warning(set_color(
                             warning_msg.format(line), level=30
                         ))
     return retval
Exemple #10
0
                else:
                    logger.fatal(set_color(
                        "failed to provide an attack argument, attack argument must be provided "
                        "for Zeus to attack the provided URL's", level=50
                    ))

        else:
            logger.critical(set_color(
                "failed to provide a mandatory argument, you will be redirected to the help menu...", level=50
            ))
            time.sleep(2)
            subprocess.call("python zeus.py --help", shell=True)

    except KeyboardInterrupt:
        logger.error(set_color(
            "user aborted process...", level=40
        ))
    except UnboundLocalError:
        logger.warning(set_color(
            "do not interrupt the browser when selenium is running, "
            "it will cause Zeus to crash...", level=30
        ))
    except Exception as e:
        logger.exception(set_color(
            "ran into exception '{}' exception has been saved to log file...".format(e), level=50
        ))
        fix_log_file()
        request_issue_creation()

    fix_log_file()
shutdown()
Exemple #11
0
def get_urls(query,
             url,
             verbose=False,
             warning=True,
             user_agent=None,
             proxy=None,
             **kwargs):
    """
      Bypass Google captchas and Google API by using selenium-webdriver to gather
      the Google URL. This will open a robot controlled browser window and attempt
      to get a URL from Google that will be used for scraping afterwards.

      Only downside to this method is that your IP and user agent will be visible
      until the application pulls the URL.
    """
    if verbose:
        logger.debug(
            set_color("setting up the virtual display to hide the browser...",
                      level=10))
    ff_display = Display(visible=0, size=(800, 600))
    ff_display.start()
    logger.info(
        set_color(
            "firefox browser display will be hidden while it performs the query..."
        ))
    if warning:
        logger.warning(
            set_color(
                "your web browser will be automated in order for Zeus to successfully "
                "bypass captchas and API calls. this is done in order to grab the URL "
                "from the search and parse the results. please give selenium time to "
                "finish it's task...",
                level=30))
    if verbose:
        logger.debug(
            set_color("running selenium-webdriver and launching browser...",
                      level=10))

    if verbose:
        logger.debug(
            set_color(
                "adjusting selenium-webdriver user-agent to '{}'...".format(
                    user_agent),
                level=10))
    if proxy is not None:
        proxy_type = proxy.keys()
        proxy_to_use = Proxy({
            "proxyType": ProxyType.MANUAL,
            "httpProxy": proxy[proxy_type[0]],
            "ftpProxy": proxy[proxy_type[0]],
            "sslProxy": proxy[proxy_type[0]],
            "noProxy": ""
        })
        if verbose:
            logger.debug(
                set_color("setting selenium proxy to '{}'...".format(
                    ''.join(proxy_type) + "://" + ''.join(proxy.values())),
                          level=10))
    else:
        proxy_to_use = None

    profile = webdriver.FirefoxProfile()
    profile.set_preference("general.useragent.override", user_agent)
    browser = webdriver.Firefox(profile, proxy=proxy_to_use)
    logger.info(set_color("browser will open shortly..."))
    browser.get(url)
    if verbose:
        logger.debug(
            set_color(
                "searching search engine for the 'q' element (search button)...",
                level=10))
    search = browser.find_element_by_name('q')
    logger.info(
        set_color("searching '{}' using query '{}'...".format(url, query)))
    search.send_keys(query)
    search.send_keys(Keys.RETURN)  # hit return after you enter search text
    time.sleep(3)
    if verbose:
        logger.debug(set_color("obtaining URL from selenium..."))
    retval = browser.current_url
    ban_url_schema = ["http://ipv6.google.com", "http://ipv4.google.com"]
    if any(u in retval for u in ban_url_schema):  # if you got IP banned
        logger.warning(
            set_color(
                "it appears that Google is attempting to block your IP address, attempting bypass...",
                level=30))
        try:
            retval = bypass_ip_block(retval)
        except IndexError:
            browser.close()  # stop all the random rogue processes
            ff_display.stop()
            logger.warning(
                set_color(
                    "for now the IP ban bypass will only work for queries that have Google's search syntax "
                    "in them. (IE inurl:, incontext:, incontent:)",
                    level=30))
            raise NotImplementedError(
                "bypass for query '{}' is not implemented yet, try again with a different dork, "
                "or change your IP address...".format(query))
    if verbose:
        logger.debug(
            set_color("found current URL from selenium browser '{}'...".format(
                retval),
                      level=10))
    logger.info(set_color("closing the browser and continuing process.."))
    browser.close()
    ff_display.stop()
    return retval
Exemple #12
0
def check_for_admin_page(url,
                         exts,
                         protocol="http://",
                         show_possibles=False,
                         verbose=False):
    possible_connections, connections = set(), set()
    stripped_url = replace_http(url.strip())
    for ext in exts:
        ext = ext.strip()
        true_url = "{}{}{}".format(protocol, stripped_url, ext)
        if verbose:
            logger.debug(set_color("trying '{}'...".format(true_url),
                                   level=10))
        try:
            urlopen(true_url, timeout=5)
            logger.info(
                set_color(
                    "connected successfully to '{}'...".format(true_url)))
            connections.add(true_url)
        except HTTPError as e:
            data = str(e).split(" ")
            if verbose:
                if "Access Denied" in str(e):
                    logger.warning(
                        set_color(
                            "got access denied, possible control panel found without external access on '{}'..."
                            .format(true_url),
                            level=30))
                    possible_connections.add(true_url)
                else:
                    logger.error(
                        set_color(
                            "failed to connect got error code {}...".format(
                                data[2]),
                            level=40))
        except Exception as e:
            if verbose:
                if "<urlopen error timed out>" or "timeout: timed out" in str(
                        e):
                    logger.warning(
                        set_color(
                            "connection timed out after five seconds "
                            "assuming won't connect and skipping...",
                            level=30))
                else:
                    logger.exception(
                        set_color(
                            "failed to connect with unexpected error '{}'...".
                            format(str(e)),
                            level=50))
                    fix_log_file()
                    request_issue_creation()
    possible_connections, connections = list(possible_connections), list(
        connections)
    data_msg = "found {} possible connections(s) and {} successful connection(s)..."
    logger.info(
        set_color(data_msg.format(len(possible_connections),
                                  len(connections))))
    if len(connections) != 0:
        logger.info(set_color("creating connection tree..."))
        create_tree(url, connections)
    else:
        logger.fatal(
            set_color(
                "did not find any successful connections to {}'s "
                "admin page",
                level=50))
    if show_possibles:
        if len(possible_connections) != 0:
            logger.info(set_color("creating possible connection tree..."))
            create_tree(url, possible_connections)
        else:
            logger.fatal(
                set_color(
                    "did not find any possible connections to {}'s "
                    "admin page",
                    level=50))
Exemple #13
0
def search_multiple_pages(query,
                          link_amount,
                          proxy=None,
                          agent=None,
                          verbose=False):
    def __config_proxy(proxy_string):
        proxy_type_schema = {
            "http": httplib2.socks.PROXY_TYPE_HTTP,
            "socks4": httplib2.socks.PROXY_TYPE_SOCKS4,
            "socks5": httplib2.socks.PROXY_TYPE_SOCKS5
        }
        proxy_type = get_proxy_type(proxy_string)[0]
        proxy_dict = proxy_string_to_dict(proxy_string)
        proxy_config = httplib2.ProxyInfo(
            proxy_type=proxy_type_schema[proxy_type],
            proxy_host="".join(proxy_dict.keys()),
            proxy_port="".join(proxy_dict.values()))
        return proxy_config

    if proxy is not None:
        if verbose:
            logger.debug(
                set_color("configuring to use proxy '{}'...".format(proxy),
                          level=10))
        __config_proxy(proxy)

    if agent is not None:
        if verbose:
            logger.debug(
                set_color("settings user-agent to '{}'...".format(agent),
                          level=10))

    logger.warning(
        set_color(
            "multiple pages will be searched using Google's API client, searches may be blocked after a certain "
            "amount of time...",
            level=30))
    results, limit, found, index = set(), link_amount, 0, google_api.search(
        query, user_agent=agent, safe="on")
    try:
        while limit > 0:
            results.add(next(index))
            limit -= 1
            found += 1
    except Exception as e:
        if "Error 503" in str(e):
            logger.fatal(
                set_color(
                    "Google is blocking the current IP address, dumping already found URL's...",
                    level=50))
            results = results
            pass

    retval = set()
    for url in results:
        if URL_REGEX.match(url) and URL_QUERY_REGEX.match(url):
            if verbose:
                logger.debug(set_color("found '{}'...".format(url), level=10))
            retval.add(url)

    if len(retval) != 0:
        logger.info(
            set_color(
                "a total of {} links found out of requested {}...".format(
                    len(retval), link_amount)))
        write_to_log_file(list(retval), URL_LOG_PATH, "url-log-{}.log")
    else:
        logger.error(
            set_color("unable to extract URL's from results...", level=40))
Exemple #14
0
def get_urls(query, url, verbose=False, warning=True, user_agent=None, proxy=None, **kwargs):
    """
      Bypass Google captchas and Google API by using selenium-webdriver to gather
      the Google URL. This will open a robot controlled browser window and attempt
      to get a URL from Google that will be used for scraping afterwards.

      Only downside to this method is that your IP and user agent will be visible
      until the application pulls the URL.
    """
    if verbose:
        logger.debug(set_color(
            "setting up the virtual display to hide the browser...", level=10
        ))
    ff_display = Display(visible=0, size=(800, 600))
    ff_display.start()
    logger.info(set_color(
        "firefox browser display will be hidden while it performs the query..."
    ))
    if warning:
        logger.warning(set_color(
            "your web browser will be automated in order for Zeus to successfully "
            "bypass captchas and API calls. this is done in order to grab the URL "
            "from the search and parse the results. please give selenium time to "
            "finish it's task...", level=30
        ))
    if verbose:
        logger.debug(set_color(
            "running selenium-webdriver and launching browser...", level=10
        ))

    if verbose:
        logger.debug(set_color(
            "adjusting selenium-webdriver user-agent to '{}'...".format(user_agent), level=10
        ))
    if proxy is not None:
        proxy_type = proxy.keys()
        proxy_to_use = Proxy({
            "proxyType": ProxyType.MANUAL,
            "httpProxy": proxy[proxy_type[0]],
            "ftpProxy": proxy[proxy_type[0]],
            "sslProxy": proxy[proxy_type[0]],
            "noProxy": ""
        })
        if verbose:
            logger.debug(set_color(
                "setting selenium proxy to '{}'...".format(
                    ''.join(proxy_type) + "://" + ''.join(proxy.values())
                ), level=10
            ))
    else:
        proxy_to_use = None

    profile = webdriver.FirefoxProfile()
    profile.set_preference("general.useragent.override", user_agent)
    browser = webdriver.Firefox(profile, proxy=proxy_to_use)
    logger.info(set_color("browser will open shortly..."))
    browser.get(url)
    if verbose:
        logger.debug(set_color(
            "searching search engine for the 'q' element (search button)...", level=10
        ))
    search = browser.find_element_by_name('q')
    logger.info(set_color(
        "searching '{}' using query '{}'...".format(url, query)
    ))
    search.send_keys(query)
    search.send_keys(Keys.RETURN)  # hit return after you enter search text
    time.sleep(3)
    if verbose:
        logger.debug(set_color(
            "obtaining URL from selenium..."
        ))
    retval = browser.current_url
    if verbose:
        logger.debug(set_color(
            "found current URL from selenium browser '{}'...".format(retval), level=10
        ))
    logger.info(set_color(
        "closing the browser and continuing process.."
    ))
    browser.close()
    ff_display.stop()
    return retval