def main_xss(start_url, verbose=False, proxy=None, agent=None): find_xss_script(start_url) logger.info(set_color( "loading payloads..." )) payloads = __load_payloads() if verbose: logger.debug(set_color( "a total of {} payloads loaded...".format(len(payloads)), level=10 )) logger.info(set_color( "payloads will be written to a temporary file and read from there..." )) filename = create_urls(start_url, payloads) if verbose: logger.debug(set_color( "loaded URL's have been saved to '{}'...".format(filename), level=10 )) logger.info(set_color( "testing for XSS vulnerabilities on host '{}'...".format(start_url) )) if proxy is not None: logger.info(set_color( "using proxy '{}'...".format(proxy) )) success = set() with open(filename) as urls: for url in urls.readlines(): url = url.strip() result = scan_xss(url, proxy=proxy, agent=agent) payload = find_xss_script(url) logger.info(set_color( "trying payload '{}'...".format(payload) )) if result: success.add(url) if verbose: logger.debug(set_color( "payload '{}' appears to be usable...".format(payload), level=10 )) elif result is "sqli": logger.error(set_color( "loaded URL '{}' threw a DBMS error and appears to be SQLi vulnerable, test for SQL injection".format( url ), level=30 )) else: if verbose: logger.debug(set_color( "host '{}' does not appear to be vulnerable to XSS attacks with payload '{}'...".format( start_url, payload ), level=10 )) create_tree(start_url, list(success))
def check_for_admin_page(url, exts, protocol="http://", show_possibles=False, verbose=False): possible_connections, connections = set(), set() stripped_url = replace_http(url.strip()) for ext in exts: ext = ext.strip() true_url = "{}{}{}".format(protocol, stripped_url, ext) if verbose: logger.debug(set_color("trying '{}'...".format(true_url), level=10)) try: urlopen(true_url, timeout=5) logger.info( set_color( "connected successfully to '{}'...".format(true_url))) connections.add(true_url) except HTTPError as e: data = str(e).split(" ") if verbose: if "Access Denied" in str(e): logger.warning( set_color( "got access denied, possible control panel found without external access on '{}'..." .format(true_url), level=30)) possible_connections.add(true_url) else: logger.error( set_color( "failed to connect got error code {}...".format( data[2]), level=40)) except Exception as e: if verbose: if "<urlopen error timed out>" or "timeout: timed out" in str( e): logger.warning( set_color( "connection timed out after five seconds " "assuming won't connect and skipping...", level=30)) else: logger.exception( set_color( "failed to connect with unexpected error '{}'...". format(str(e)), level=50)) fix_log_file() request_issue_creation() possible_connections, connections = list(possible_connections), list( connections) data_msg = "found {} possible connections(s) and {} successful connection(s)..." logger.info( set_color(data_msg.format(len(possible_connections), len(connections)))) if len(connections) != 0: logger.info(set_color("creating connection tree...")) create_tree(url, connections) else: logger.fatal( set_color( "did not find any successful connections to {}'s " "admin page", level=50)) if show_possibles: if len(possible_connections) != 0: logger.info(set_color("creating possible connection tree...")) create_tree(url, possible_connections) else: logger.fatal( set_color( "did not find any possible connections to {}'s " "admin page", level=50))
def main_xss(start_url, verbose=False, proxy=None, agent=None, tamper=None): if tamper: logger.info(set_color( "tampering payloads with '{}'...".format(tamper) )) find_xss_script(start_url) logger.info(set_color( "loading payloads..." )) payloads = __load_payloads() if verbose: logger.debug(set_color( "a total of {} payloads loaded...".format(len(payloads)), level=10 )) logger.info(set_color( "payloads will be written to a temporary file and read from there..." )) filename = create_urls(start_url, payloads, tamper=tamper) logger.info(set_color( "loaded URL's have been saved to '{}'...".format(filename) )) logger.info(set_color( "testing for XSS vulnerabilities on host '{}'...".format(start_url) )) if proxy is not None: logger.info(set_color( "using proxy '{}'...".format(proxy) )) success = set() with open(filename) as urls: for i, url in enumerate(urls.readlines(), start=1): url = url.strip() result = scan_xss(url, proxy=proxy, agent=agent) payload = find_xss_script(url) if verbose: logger.info(set_color( "trying payload '{}'...".format(payload) )) if result[0] != "sqli" and result[0] is True: success.add(url) if verbose: logger.debug(set_color( "payload '{}' appears to be usable...".format(payload), level=10 )) elif result[0] is "sqli": if i <= 1: logger.error(set_color( "loaded URL '{}' threw a DBMS error and appears to be injectable, test for SQL injection, " "backend DBMS appears to be '{}'...".format( url, result[1] ), level=40 )) else: if verbose: logger.error(set_color( "SQL error discovered...", level=40 )) else: if verbose: logger.debug(set_color( "host '{}' does not appear to be vulnerable to XSS attacks with payload '{}'...".format( start_url, payload ), level=10 )) if len(success) != 0: logger.info(set_color( "possible XSS scripts to be used:" )) create_tree(start_url, list(success)) else: logger.error(set_color( "host '{}' does not appear to be vulnerable to XSS attacks...".format(start_url) )) save = prompt( "would you like to keep the URL's saved for further testing", opts="yN" ) if save.lower().startswith("n"): os.remove(filename)