Пример #1
0
def check_for_robots(url, ext="/robots.txt", data_sep="-" * 30):
    """
    check if the URL has a robots.txt in it and collect `interesting` information
    out of the page
    """
    url = replace_http(url)
    interesting = set()
    full_url = "{}{}{}".format("http://", url, ext)
    conn = requests.get(full_url)
    data = conn.content
    code = conn.status_code
    if code == 404:
        return False
    for line in data.split("\n"):
        if "Allow" in line:
            interesting.add(line.strip())
    if len(interesting) > 0:
        create_tree(full_url, list(interesting))
    else:
        to_display = prompt(
            "nothing interesting found in robots.txt would you like to display the entire page",
            opts="yN")
        if to_display.lower().startswith("y"):
            print("{}\n{}\n{}".format(data_sep, data, data_sep))
    logger.info(set_color("robots.txt page will be saved into a file..."))
    write_to_log_file(data, ROBOTS_PAGE_PATH, "robots-{}.log".format(url))
Пример #2
0
def main_xss(start_url, verbose=False, proxy=None, agent=None, tamper=None):
    """
    main attack method to be called
    """
    if tamper:
        logger.info(set_color(
            "tampering payloads with '{}'...".format(tamper)))
    find_xss_script(start_url)
    logger.info(set_color("loading payloads..."))
    payloads = __load_payloads()
    if verbose:
        logger.debug(
            set_color("a total of {} payloads loaded...".format(len(payloads)),
                      level=10))
    logger.info(
        set_color(
            "payloads will be written to a temporary file and read from there..."
        ))
    filename = create_urls(start_url, payloads, tamper=tamper)
    logger.info(
        set_color("loaded URL's have been saved to '{}'...".format(filename)))
    logger.info(
        set_color("testing for XSS vulnerabilities on host '{}'...".format(
            start_url)))
    if proxy is not None:
        logger.info(set_color("using proxy '{}'...".format(proxy)))
    success = set()
    with open(filename) as urls:
        for i, url in enumerate(urls.readlines(), start=1):
            url = url.strip()
            result = scan_xss(url, proxy=proxy, agent=agent)
            payload = find_xss_script(url)
            if verbose:
                logger.info(set_color(
                    "trying payload '{}'...".format(payload)))
            if result[0] != "sqli" and result[0] is True:
                success.add(url)
                if verbose:
                    logger.debug(
                        set_color(
                            "payload '{}' appears to be usable...".format(
                                payload),
                            level=10))
            elif result[0] is "sqli":
                if i <= 1:
                    logger.error(
                        set_color(
                            "loaded URL '{}' threw a DBMS error and appears to be injectable, test for SQL injection, "
                            "backend DBMS appears to be '{}'...".format(
                                url, result[1]),
                            level=40))
                else:
                    if verbose:
                        logger.error(
                            set_color("SQL error discovered...", level=40))
            else:
                if verbose:
                    logger.debug(
                        set_color(
                            "host '{}' does not appear to be vulnerable to XSS attacks with payload '{}'..."
                            .format(start_url, payload),
                            level=10))
    if len(success) != 0:
        logger.info(set_color("possible XSS scripts to be used:"))
        create_tree(start_url, list(success))
    else:
        logger.error(
            set_color(
                "host '{}' does not appear to be vulnerable to XSS attacks...".
                format(start_url)))
    save = prompt("would you like to keep the URL's saved for further testing",
                  opts="yN")
    if save.lower().startswith("n"):
        os.remove(filename)
Пример #3
0
def check_for_admin_page(url, exts, protocol="http://", **kwargs):
    verbose = kwargs.get("verbose", False)
    show_possibles = kwargs.get("show_possibles", False)
    possible_connections, connections = set(), set()
    stripped_url = replace_http(str(url).strip())
    for ext in exts:
        ext = ext.strip()
        true_url = "{}{}{}".format(protocol, stripped_url, ext)
        if verbose:
            logger.debug(set_color("trying '{}'...".format(true_url),
                                   level=10))
        try:
            urlopen(true_url, timeout=5)
            logger.info(
                set_color(
                    "connected successfully to '{}'...".format(true_url)))
            connections.add(true_url)
        except HTTPError as e:
            data = str(e).split(" ")
            if verbose:
                if "Access Denied" in str(e):
                    logger.warning(
                        set_color(
                            "got access denied, possible control panel found without external access on '{}'..."
                            .format(true_url),
                            level=30))
                    possible_connections.add(true_url)
                else:
                    logger.error(
                        set_color(
                            "failed to connect got error code {}...".format(
                                data[2]),
                            level=40))
        except Exception as e:
            if verbose:
                if "<urlopen error timed out>" or "timeout: timed out" in str(
                        e):
                    logger.warning(
                        set_color(
                            "connection timed out after five seconds "
                            "assuming won't connect and skipping...",
                            level=30))
                else:
                    logger.exception(
                        set_color(
                            "failed to connect with unexpected error '{}'...".
                            format(str(e)),
                            level=50))
                    request_issue_creation()
    possible_connections, connections = list(possible_connections), list(
        connections)
    data_msg = "found {} possible connections(s) and {} successful connection(s)..."
    logger.info(
        set_color(data_msg.format(len(possible_connections),
                                  len(connections))))
    if len(connections) != 0:
        logger.info(set_color("creating connection tree..."))
        create_tree(url, connections)
    else:
        logger.fatal(
            set_color(
                "did not receive any successful connections to the admin page of "
                "{}...".format(url),
                level=50))
    if show_possibles:
        if len(possible_connections) != 0:
            logger.info(set_color("creating possible connection tree..."))
            create_tree(url, possible_connections)
        else:
            logger.fatal(
                set_color(
                    "did not find any possible connections to {}'s "
                    "admin page",
                    level=50))