예제 #1
0
def perform_port_scan(url,
                      ports=None,
                      scanner=NmapHook,
                      verbose=False,
                      opts=None,
                      **kwargs):
    """
    main function that will initalize the port scanning
    """
    url = url.strip()
    logger.info(
        set_color(
            "attempting to find IP address for hostname '{}'...".format(url)))
    found_ip_address = socket.gethostbyname(url)
    logger.info(
        set_color("found IP address for given URL -> '{}'...".format(
            found_ip_address)))
    if verbose:
        logger.debug(set_color("checking for nmap on your system...",
                               level=10))
    nmap_exists = find_nmap(verbose=verbose)
    if nmap_exists:
        if verbose:
            logger.debug(
                set_color(
                    "nmap has been found under '{}'...".format(nmap_exists),
                    level=10))
        logger.info(
            set_color("starting port scan on IP address '{}'...".format(
                found_ip_address)))
        try:
            data = scanner(found_ip_address, ports=ports, opts=opts)
            json_data = data._get_all_info()
            data.show_open_ports(json_data)
            file_path = data.send_to_file(json_data)
            logger.info(
                set_color(
                    "port scan completed, all data saved to JSON file under '{}'..."
                    .format(file_path)))
        except KeyError:
            logger.fatal(
                set_color("no port information found for '{}({})'...".format(
                    url, found_ip_address),
                          level=50))
        except Exception as e:
            logger.exception(
                set_color(
                    "ran into exception '{}', cannot continue quitting...".
                    format(e),
                    level=50))
            request_issue_creation()
            pass
    else:
        logger.fatal(
            set_color(
                "nmap was not found on your system, please install it...",
                level=50))
예제 #2
0
                        )

        elif opt.fileToEnumerate is not None:
            with open(opt.fileToEnumerate) as urls:
                if opt.runSqliScan or opt.runPortScan or opt.intelCheck or opt.adminPanelFinder or opt.runXssScan:
                    for url in urls.readlines():
                        url = url.strip()
                        __run_attacks(
                            url,
                            sqlmap=opt.runSqliScan, nmap=opt.runPortScan, intel=opt.intelCheck, xss=opt.runXssScan,
                            admin=opt.adminPanelFinder, given_path=opt.givenSearchPath,
                            auto=opt.autoStartSqlmap, verbose=opt.runInVerbose, batch=opt.runInBatch
                        )
                else:
                    logger.fatal(set_color(
                        "failed to provide an attack argument, attack argument must be provided "
                        "for Zeus to attack the provided URL's", level=50
                    ))

        else:
            logger.critical(set_color(
                "failed to provide a mandatory argument, you will be redirected to the help menu...", level=50
            ))
            time.sleep(2)
            subprocess.call("python zeus.py --help", shell=True)

    except KeyboardInterrupt:
        logger.error(set_color(
            "user aborted process...", level=40
        ))
    except UnboundLocalError:
        logger.warning(set_color(
예제 #3
0
                    agent=agent_to_use
                )
            except Exception as e:
                logger.exception(set_color(
                    "ran into exception '{}'...".format(e), level=50
                ))
                request_issue_creation()
                pass

            __run_attacks_main()

        # search multiple pages of Google
        elif opt.dorkToUse is not None and opt.searchMultiplePages:
            if opt.amountToSearch is None:
                logger.fatal(set_color(
                    "did not specify amount of links to find...", level=50
                ))
                shutdown()
            link_amount_to_search = opt.amountToSearch
            logger.info(set_color(
                "searching Google using dork '{}' for a total of {} links...".format(opt.dorkToUse, opt.amountToSearch)
            ))
            try:
                search.search_multiple_pages(opt.dorkToUse, link_amount_to_search, proxy=proxy_to_use,
                                             agent=agent_to_use, verbose=opt.runInVerbose)
            except Exception as e:
                if "Error 400" in str(e):
                    logger.fatal(set_color(
                        "failed to connect to search engine...".format(e), level=50
                    ))
                elif "Error 503" in str(e):
예제 #4
0
def check_for_admin_page(url,
                         exts,
                         protocol="http://",
                         show_possibles=False,
                         verbose=False):
    possible_connections, connections = set(), set()
    stripped_url = replace_http(url.strip())
    for ext in exts:
        ext = ext.strip()
        true_url = "{}{}{}".format(protocol, stripped_url, ext)
        if verbose:
            logger.debug(set_color("trying '{}'...".format(true_url),
                                   level=10))
        try:
            urlopen(true_url, timeout=5)
            logger.info(
                set_color(
                    "connected successfully to '{}'...".format(true_url)))
            connections.add(true_url)
        except HTTPError as e:
            data = str(e).split(" ")
            if verbose:
                if "Access Denied" in str(e):
                    logger.warning(
                        set_color(
                            "got access denied, possible control panel found without external access on '{}'..."
                            .format(true_url),
                            level=30))
                    possible_connections.add(true_url)
                else:
                    logger.error(
                        set_color(
                            "failed to connect got error code {}...".format(
                                data[2]),
                            level=40))
        except Exception as e:
            if verbose:
                if "<urlopen error timed out>" or "timeout: timed out" in str(
                        e):
                    logger.warning(
                        set_color(
                            "connection timed out after five seconds "
                            "assuming won't connect and skipping...",
                            level=30))
                else:
                    logger.exception(
                        set_color(
                            "failed to connect with unexpected error '{}'...".
                            format(str(e)),
                            level=50))
                    fix_log_file()
                    request_issue_creation()
    possible_connections, connections = list(possible_connections), list(
        connections)
    data_msg = "found {} possible connections(s) and {} successful connection(s)..."
    logger.info(
        set_color(data_msg.format(len(possible_connections),
                                  len(connections))))
    if len(connections) != 0:
        logger.info(set_color("creating connection tree..."))
        create_tree(url, connections)
    else:
        logger.fatal(
            set_color(
                "did not find any successful connections to {}'s "
                "admin page",
                level=50))
    if show_possibles:
        if len(possible_connections) != 0:
            logger.info(set_color("creating possible connection tree..."))
            create_tree(url, possible_connections)
        else:
            logger.fatal(
                set_color(
                    "did not find any possible connections to {}'s "
                    "admin page",
                    level=50))
예제 #5
0
def search_multiple_pages(query,
                          link_amount,
                          proxy=None,
                          agent=None,
                          verbose=False):
    def __config_proxy(proxy_string):
        proxy_type_schema = {
            "http": httplib2.socks.PROXY_TYPE_HTTP,
            "socks4": httplib2.socks.PROXY_TYPE_SOCKS4,
            "socks5": httplib2.socks.PROXY_TYPE_SOCKS5
        }
        proxy_type = get_proxy_type(proxy_string)[0]
        proxy_dict = proxy_string_to_dict(proxy_string)
        proxy_config = httplib2.ProxyInfo(
            proxy_type=proxy_type_schema[proxy_type],
            proxy_host="".join(proxy_dict.keys()),
            proxy_port="".join(proxy_dict.values()))
        return proxy_config

    if proxy is not None:
        if verbose:
            logger.debug(
                set_color("configuring to use proxy '{}'...".format(proxy),
                          level=10))
        __config_proxy(proxy)

    if agent is not None:
        if verbose:
            logger.debug(
                set_color("settings user-agent to '{}'...".format(agent),
                          level=10))

    logger.warning(
        set_color(
            "multiple pages will be searched using Google's API client, searches may be blocked after a certain "
            "amount of time...",
            level=30))
    results, limit, found, index = set(), link_amount, 0, google_api.search(
        query, user_agent=agent, safe="on")
    try:
        while limit > 0:
            results.add(next(index))
            limit -= 1
            found += 1
    except Exception as e:
        if "Error 503" in str(e):
            logger.fatal(
                set_color(
                    "Google is blocking the current IP address, dumping already found URL's...",
                    level=50))
            results = results
            pass

    retval = set()
    for url in results:
        if URL_REGEX.match(url) and URL_QUERY_REGEX.match(url):
            if verbose:
                logger.debug(set_color("found '{}'...".format(url), level=10))
            retval.add(url)

    if len(retval) != 0:
        logger.info(
            set_color(
                "a total of {} links found out of requested {}...".format(
                    len(retval), link_amount)))
        write_to_log_file(list(retval), URL_LOG_PATH, "url-log-{}.log")
    else:
        logger.error(
            set_color("unable to extract URL's from results...", level=40))
예제 #6
0
def parse_search_results(query, url_to_search, verbose=False, **kwargs):
    """
      Parse a webpage from Google for URL's with a GET(query) parameter
    """
    exclude = ("www.google.com", "map.google.com", "mail.google.com",
               "drive.google.com", "news.google.com", "accounts.google.com")
    splitter = "&amp;"
    retval = set()
    query_url = None

    def __get_headers():
        proxy_string, user_agent = None, None
        try:
            proxy_string = kwargs.get("proxy")
        except:
            pass

        try:
            user_agent = kwargs.get("agent")
        except:
            pass

        return proxy_string, user_agent

    if verbose:
        logger.debug(
            set_color("checking for user-agent and proxy configuration...",
                      level=10))
    proxy_string, user_agent = __get_headers()

    if proxy_string is None:
        proxy_string = None
    else:
        proxy_string = proxy_string_to_dict(proxy_string)
    if user_agent is None:
        user_agent = DEFAULT_USER_AGENT
    else:
        user_agent = user_agent

    user_agent_info = "adjusting user-agent header to {}..."
    if user_agent is not DEFAULT_USER_AGENT:
        user_agent_info = user_agent_info.format(user_agent.strip())
    else:
        user_agent_info = user_agent_info.format(
            "default user agent '{}'".format(DEFAULT_USER_AGENT))

    proxy_string_info = "setting proxy to {}..."
    if proxy_string is not None:
        proxy_string_info = proxy_string_info.format(
            ''.join(proxy_string.keys()) + "://" +
            ''.join(proxy_string.values()))
    else:
        proxy_string_info = "no proxy configuration detected..."

    headers = {"Connection": "close", "user-agent": user_agent}
    logger.info(set_color("attempting to gather query URL..."))
    try:
        query_url = get_urls(query,
                             url_to_search,
                             verbose=verbose,
                             user_agent=user_agent,
                             proxy=proxy_string)
    except Exception as e:
        if "WebDriverException" in str(e):
            logger.exception(
                set_color(
                    "it seems that you exited the browser, please allow the browser "
                    "to complete it's run so that Zeus can bypass captchas and API "
                    "calls",
                    level=50))
        elif "'/usr/lib/firefoxdriver/webdriver.xpi'" in str(e):
            logger.fatal(
                set_color(
                    "firefox was not found in the default location on your system, "
                    "check your installation and make sure it is in /usr/lib, if you "
                    "find it there, restart your system and try again...",
                    level=50))
        else:
            logger.exception(
                set_color(
                    "{} failed to gather the URL from search engine, caught exception '{}' "
                    "exception has been logged to current log file...".format(
                        os.path.basename(__file__),
                        str(e).strip()),
                    level=50))
            request_issue_creation()
        shutdown()
    logger.info(
        set_color(
            "URL successfully gathered, searching for GET parameters..."))

    logger.info(set_color(proxy_string_info))
    req = requests.get(query_url, proxies=proxy_string)
    logger.info(set_color(user_agent_info))
    req.headers.update(headers)
    found_urls = URL_REGEX.findall(req.text)
    for urls in list(found_urls):
        for url in list(urls):
            url = unquote(url)
            if URL_QUERY_REGEX.match(url) and not any(l in url
                                                      for l in exclude):
                if isinstance(url, unicode):
                    url = str(url).encode("utf-8")
                if "webcache" in url:
                    logger.info(
                        set_color(
                            "received webcache URL, extracting URL from webcache..."
                        ))
                    url = extract_webcache_url(url)
                if verbose:
                    try:
                        logger.debug(
                            set_color("found '{}'...".format(
                                url.split(splitter)[0]),
                                      level=10))
                    except TypeError:
                        logger.debug(
                            set_color("found '{}'...".format(
                                str(url).split(splitter)[0]),
                                      level=10))
                    except AttributeError:
                        logger.debug(
                            set_color("found '{}...".format(str(url)),
                                      level=10))
                retval.add(url.split("&amp;")[0])
    logger.info(
        set_color("found a total of {} URL's with a GET parameter...".format(
            len(retval))))
    if len(retval) != 0:
        write_to_log_file(retval, URL_LOG_PATH, "url-log-{}.log")
    else:
        logger.critical(
            set_color(
                "did not find any usable URL's with the given query '{}' "
                "using search engine '{}'...".format(query, url_to_search),
                level=50))
        shutdown()
    return list(retval) if len(retval) != 0 else None