Exemplo n.º 1
0
def run_xss_scan(url, url_file=None, proxy=None, user_agent=False):
    """ Pointer to run a XSS Scan on a given URL """
    proxy = proxy if proxy is not None else None
    header = RANDOM_USER_AGENT if user_agent is not False else None
    if proxy is not None:
        LOGGER.info("Proxy configured, running through: {}".format(proxy))
    if user_agent is True:
        LOGGER.info("Grabbed random user agent: {}".format(header))

    if url_file is not None:  # Scan a given file full of URLS
        file_path = url_file
        total = len(open(url_file).readlines())
        done = 0
        LOGGER.info("Found a total of {} URLS to scan..".format(total))
        with open(file_path) as urls:
            for url in urls.readlines():
                if QUERY_REGEX.match(url.strip()):
                    question = prompt(
                        "Would you like to scan '{}' for XSS vulnerabilities[y/N]: "
                        .format(url.strip()))
                    if question.lower().startswith("y"):
                        done += 1
                        if not xss.main(
                                url.strip(), proxy=proxy, headers=header):
                            LOGGER.info(
                                "URL '{}' does not appear to be vulnerable to XSS"
                                .format(url.strip()))
                        else:
                            LOGGER.info(
                                "URL '{}' appears to be vulnerable to XSS".
                                format(url.strip()))
                        LOGGER.info("URLS scanned: {}, URLS left: {}".format(
                            done, total - done))
                    else:
                        pass
                else:
                    LOGGER.warn(
                        "URL '{}' does not contain a query (GET) parameter, skipping"
                        .format(url.strip()))
        LOGGER.info("All URLS in file have been scanned, shutting down..")

    else:  # Scan a single URL
        if QUERY_REGEX.match(url):
            LOGGER.info("Searching: {} for XSS vulnerabilities..".format(
                url, proxy=proxy, headers=header))
            if not xss.main(url, proxy=proxy, headers=header):
                LOGGER.error(
                    "{} does not appear to be vulnerable to XSS".format(url))
            else:
                LOGGER.info("{} seems to be vulnerable to XSS.".format(url))
        else:
            error_message = "The URL you provided does not contain a query "
            error_message += "(GET) parameter. In order for this scan you run "
            error_message += "successfully you will need to provide a URL with "
            error_message += "A query (GET) parameter example: http://127.0.0.1/php?id=2"
            LOGGER.fatal(error_message)
Exemplo n.º 2
0
def run_sqli_scan(url,
                  url_file=None,
                  proxy=None,
                  user_agent=False,
                  tamper=None):
    """ Pointer to run a SQLi Scan on a given URL """
    error_message = "URL: '{}' threw an exception ".format(url)
    error_message += "and Pybelt is unable to resolve the URL, "
    error_message += "this could mean that the URL is not allowing connections "
    error_message += "or that the URL is bad. Attempt to connect "
    error_message += "to the URL manually, if a connection occurs "
    error_message += "make an issue."

    if url_file is not None:  # Run through a file list
        file_path = url_file
        total = len(open(file_path).readlines())
        done = 0
        LOGGER.info("Found a total of {} urls in file {}..".format(
            total, file_path))
        with open(file_path) as urls:
            for url in urls.readlines():
                try:
                    if QUERY_REGEX.match(url.strip()):
                        question = prompt(
                            "Would you like to scan '{}' for SQLi vulnerabilities[y/N]: "
                            .format(url.strip()))
                        if question.lower().startswith("y"):
                            LOGGER.info("Starting scan on url: '{}'".format(
                                url.strip()))
                            LOGGER.info(SQLiScanner(url.strip()).sqli_search())
                            done += 1
                            LOGGER.info(
                                "URLS scanned: {}, URLS left: {}".format(
                                    done, total - done))
                        else:
                            pass
                    else:
                        LOGGER.warn(
                            "URL '{}' does not contain a query (GET) parameter, skipping.."
                            .format(url.strip()))
                        pass
                except HTTPError:
                    LOGGER.fatal(error_message)
        LOGGER.info("No more URLS found in file, shutting down..")

    else:  # Run a single URL
        try:
            if QUERY_REGEX.match(url):
                LOGGER.info("Starting SQLi scan on '{}'..".format(url))
                LOGGER.info(SQLiScanner(url).sqli_search())
            else:
                LOGGER.error(
                    "URL does not contain a query (GET) parameter. Example: http://example.com/php?id=2"
                )
        except HTTPError:
            LOGGER.fatal(error_message)