Ejemplo n.º 1
0
class PortScanner(object):

    connection_made = []  # Connection made in list form

    def __init__(self, host):
        self.host = host
        self.ports = RESERVED_PORTS

    def connect_to_host(self):
        start_time = time.time()
        try:
            for port in RESERVED_PORTS.keys():
                sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
                res = sock.connect_ex((self.host, port))
                if res == 0:
                    LOGGER.info("[*] Open: {}  {}".format(
                        port, RESERVED_PORTS[port]))
                    self.connection_made.append(port)
                sock.close()
        except Exception, e:
            print e

        stop_time = time.time()
        LOGGER.info("Completed in {} seconds".format(
            str(stop_time - start_time)))
        LOGGER.info("Ports readily available: {}".format(''.join(
            str(self.connection_made))))
Ejemplo n.º 2
0
def run_port_scan(host):
    """ Pointer to run a Port Scan on a given host """
    if re.search(IP_ADDRESS_REGEX, host) is not None:
        LOGGER.info("Starting port scan on IP: {}".format(host))
        LOGGER.info(PortScanner(host).connect_to_host())
    elif re.search(URL_REGEX,
                   host) is not None and re.search(QUERY_REGEX, host) is None:
        try:
            LOGGER.info("Fetching resolve IP...")
            ip_address = socket.gethostbyname(host)
            LOGGER.info("Done! IP: {}".format(ip_address))
            LOGGER.info("Starting scan on URL: {} IP: {}".format(
                host, ip_address))
            PortScanner(ip_address).connect_to_host()
        except socket.gaierror:
            error_message = "Unable to resolve IP address from {}.".format(
                host)
            error_message += " You can manually get the IP address and try again,"
            error_message += " dropping the query parameter in the URL (IE php?id=),"
            error_message += " or dropping the http or https"
            error_message += " and adding www in place of it. IE www.google.com"
            error_message += " may fix this issue."
            LOGGER.fatal(error_message)
    else:
        error_message = "You need to provide a host to scan,"
        error_message += " this can be given in the form of a URL "
        error_message += "or a IP address."
        LOGGER.fatal(error_message)
Ejemplo n.º 3
0
def run_dork_checker(dork):
    """ Pointer to run a Dork Check on a given Google Dork """
    LOGGER.info("Starting dork scan, using query: '{}'..".format(dork))
    try:
        LOGGER.info(DorkScanner(dork).check_urls_for_queries())
    except HTTPError:
        LOGGER.fatal(GoogleBlockException(GOOGLE_TEMP_BLOCK_ERROR_MESSAGE))
Ejemplo n.º 4
0
 def enumerate_hash_types(items, max_likeliest=3):
     LOGGER.info("{} possible hash types found..".format(len(items)))
     for count, item in enumerate(items, start=1):
         if count <= max_likeliest:
             print("\033[92m[*] Most likely possible hash type: {}\033[0m".
                   format(item))
             if count == max_likeliest:
                 print("")
         else:
             print("\033[33m[*] Least likely possible hash type: {}\033[0m".
                   format(item))
Ejemplo n.º 5
0
 def scn(port):
     try:
         sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
         res = sock.connect_ex((self.host, port))
         sock.settimeout(3)
         if res == 0:
             LOGGER.info("[*] Open: {}  {}".format(port, RESERVED_PORTS[port]))
             self.connection_made.append(port)
         sock.close()
     except Exception, e:
         print e
Ejemplo n.º 6
0
def connect_and_pull_info():
    """ Connect to the proxy source and pull the proxies in JSON form """
    results = {}
    count = 0
    data = json.loads(urllib2.urlopen(PROXY_URL).read())
    for i in range(0, 30):
        count += 1
        results[count] = data[i]
    LOGGER.info(
        "Found {} possible proxies, moving to connection attempts..".format(
            len(results)))
    return results
Ejemplo n.º 7
0
 def connect_to_host(self):
     start_time = time.time()
     try:
         for port in RESERVED_PORTS.keys():
             sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
             res = sock.connect_ex((self.host, port))
             if res == 0:
                 LOGGER.info("[*] Open: {}  {}".format(
                     port, RESERVED_PORTS[port]))
                 self.connection_made.append(port)
             sock.close()
     except Exception, e:
         print e
Ejemplo n.º 8
0
 def check_urls_for_queries(self):
     """ The returned URLS will be run through a query regex to see if they have a query parameter
         http://google.com <- False
         http://example.com/php?id=2 <- True """
     filename = settings.create_random_filename()
     LOGGER.info("File being saved to: {}".format(filename))
     with open("{}\\{}.txt".format(settings.DORK_SCAN_RESULTS_PATH, filename), "a+") as results:
         for url in self.connect_to_search_engine():
             match = settings.QUERY_REGEX.match(url)  # Match by regex for anything that has a ?<PARAM>= in it
             if match:
                 results.write(url + "\n")
     amount_of_urls = len(open(settings.DORK_SCAN_RESULTS_PATH + "\\" + filename + ".txt", 'r').readlines())
     success_rate = ((amount_of_urls // 10) + 1) * 10
     return "Found a total of {} usable links with query (GET) parameters, urls have been saved to {}\\{}.txt. " \
            "This Dork has a success rate of {}%".format(amount_of_urls, settings.DORK_SCAN_RESULTS_PATH, filename,
                                                         success_rate)
Ejemplo n.º 9
0
def run_hash_cracker(hash_to_crack):
    """ Pointer to run the Hash Cracking system """
    try:
        items = list(''.join(hash_to_crack).split(":"))
        if items[1] == "all":
            LOGGER.info(
                "Starting hash cracking without knowledge of algorithm...")
            HashCracker(items[0]).try_all_algorithms()
        else:
            LOGGER.info("Starting hash cracking using %s as algorithm type.." %
                        items[1])
            HashCracker(items[0], type=items[1]).try_certain_algorithm()
    except IndexError:
        error_message = "You must specify a hash type in order for this to work. "
        error_message += "Example: 'python pybelt.py -c 098f6bcd4621d373cade4e832627b4f6:md5'"
        LOGGER.fatal(error_message)
Ejemplo n.º 10
0
def connect_and_pull_info():
    """ Connect to the proxy source and pull the proxies in JSON form """
    results = {}
    count = 0
    data = json.loads(urllib2.urlopen(PROXY_URL).read())
    for i in range(0, 60):
        try:
            results[count] = data[i]
            count += 1
        except IndexError:
            pass
    amount = len(results)
    if amount != 0:
        LOGGER.info("Found {} possible proxies, moving to connection attempts..".format(len(results)))
    else:
        LOGGER.warning("No usable proxies discovered")
    return results
Ejemplo n.º 11
0
 def try_certain_algorithm(self):
     """ Use a certain type of algorithm to do the hashing, md5, sha256, etc..
     >>> HashCracker("9a8b1b7eee229046fc2701b228fc2aff", type="md5").try_certain_algorithm()
     {... ,'9a8b1b7eee229046fc2701b228fc2aff': ['want', 'md5'], ...} """
     data = hashlib.new(self.type)
     for word in self.words:
         data.update(word.strip())
         self.results[data.hexdigest()] = [word.strip(), self.type]
     LOGGER.info("Created %i hashes to verify.." % len(self.results.keys()))
     LOGGER.info("Attempting to crack hash (%s).." % self.hash)
     if self.verify_hashes() is False:
         error_message = "Unable to verify %s against %i different hashes." % (self.hash, len(self.results))
         error_message += " You used algorithm: %s you can attempt all algorithms " % str(self.type).upper()
         error_message += "available on the system by running with 'all' as the hash type. "
         error_message += "IE: python pybelt.py -c 9a8b1b7eee229046fc2701b228fc2aff:all"
         LOGGER.fatal(error_message)
         exit(1)
Ejemplo n.º 12
0
def run_sqli_scan(url, proxy=None, user_agent=False):
    """ Pointer to run a SQLi Scan on a given URL """
    try:
        if QUERY_REGEX.match(url):
            LOGGER.info("Starting SQLi scan on '{}'..".format(url))
            LOGGER.info(SQLiScanner(url).sqli_search())
        else:
            LOGGER.error(
                "URL does not contain a query (GET) parameter. Example: http://example.com/php?id=2"
            )
    except HTTPError as e:
        error_message = "URL: '{}' threw an exception: '{}' ".format(url, e)
        error_message += "and Pybelt is unable to resolve the URL, "
        error_message += "this could mean that the URL is not allowing connections "
        error_message += "or that the URL is bad. Attempt to connect "
        error_message += "to the URL manually, if a connection occurs "
        error_message += "make an issue."
        LOGGER.fatal(error_message)
Ejemplo n.º 13
0
 def verify_hashes(self):
     """ Verify if the hashes match, as long as the hash is in the results dict, it will be found
     >>> print(self.results)
     {... ,'9a8b1b7eee229046fc2701b228fc2aff': ['want', 'md5'], ... }
     >>> HashCracker("9a8b1b7eee229046fc2701b228fc2aff", type="md5").verify_hashes()
     [06:08:49 INFO] Original Hash: 9a8b1b7eee229046fc2701b228fc2aff
                     Algorithm Used: MD5
                     Plain Text: want """
     spacer = " " * 16
     while self.cracked is False:
         for h in self.results.keys():
             if self.hash == h:
                 hash_results = "Original Hash: %s" % self.hash
                 hash_results += "\n%sAlgorithm Used: %s" % (spacer, self.results[self.hash][1].upper())
                 hash_results += "\n%sPlain Text: %s" % (spacer, self.results[self.hash][0])
                 LOGGER.info(hash_results)
                 self.cracked = True
         break
     return
Ejemplo n.º 14
0
 def try_all_algorithms(self):
     """ Try every algorithm available on the computer using the 'algorithms_available' functions from hashlib
     an example of this functions would be:
     >>> print(hashlib.algorithms_available)
     set(['SHA1', 'SHA224', 'SHA', 'SHA384', ...])
     >>> HashCracker("9a8b1b7eee229046fc2701b228fc2aff", type=None).try_all_algorithms()
     {..., 'dc1e4c61bea0e5390c140fb1299a68a0f31b7af51f90abbd058f09689a8bb823': ['1 endow', 'sha256'],
     '362b004395a3f52d9a0132868bd180bd': ['17 fellowship', 'MD5'],
     '03195f6b6fa8dc1951f4944aed8cc4582cd72321': ['lovingkindness', 'RIPEMD160'], ..."""
     for alg in hashlib.algorithms_available:
         for word in self.words:
             data = hashlib.new(alg)
             data.update(word.strip())
             self.results[data.hexdigest()] = [word.strip(), alg]
     LOGGER.info("Created %i hashes, verifying against given hash (%s)" % (len(self.results), self.hash))
     if self.verify_hashes() is False:
         LOGGER.fatal("Unable to verify hash: %s" % self.hash)
     else:
         return self.verify_hashes()
Ejemplo n.º 15
0
class RunScanThread(PortScanner):
    def run_scan(self):
        start_time = time.time()

        def scn(port):
            try:
                sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
                res = sock.connect_ex((self.host, port))
                sock.settimeout(3)
                if res == 0:
                    LOGGER.info("[*] Open: {}  {}".format(
                        port, RESERVED_PORTS[port]))
                    self.connection_made += "{}, ".format(port)
                sock.close()
            except Exception, e:
                print e

        q = Queue.Queue()

        def threader():
            worker = q.get()
            scn(worker)
            q.task_done()

        for x in range(200):
            t = threading.Thread(target=threader)
            t.daemon = True
            t.start()

        for worker in RESERVED_PORTS.keys():
            q.put(worker)
            pass

        q.join()

        stop_time = time.time()
        no_ports = "\033[91mNo ports available or open\033[0m"
        LOGGER.info("Completed in {} seconds".format(
            str(stop_time - start_time)))
        LOGGER.info("Ports readily available: {}".format(''.join(
            str(self.connection_made if self.
                connection_made is not "" else no_ports))))
Ejemplo n.º 16
0
def run_xss_scan(url, proxy=None, user_agent=False):
    """ Pointer to run a XSS Scan on a given URL """
    if QUERY_REGEX.match(url):
        proxy = proxy if proxy is not None else None
        header = RANDOM_USER_AGENT if user_agent is not False else None
        if proxy is not None:
            LOGGER.info("Proxy configured, running through: {}".format(proxy))
        if user_agent is True:
            LOGGER.info("Grabbed random user agent: {}".format(header))
        LOGGER.info("Searching: {} for XSS vulnerabilities..".format(
            url, proxy=proxy, headers=header))
        if not xss.main(url, proxy=proxy, headers=header):
            LOGGER.error(
                "{} does not appear to be vulnerable to XSS".format(url))
        else:
            LOGGER.info("{} seems to be vulnerable to XSS.".format(url))
    else:
        error_message = "The URL you provided does not contain a query "
        error_message += "(GET) parameter. In order for this scan you run "
        error_message += "successfully you will need to provide a URL with "
        error_message += "A query (GET) parameter example: http://127.0.0.1/php?id=2"
        LOGGER.fatal(error_message)
Ejemplo n.º 17
0
def run_dork_checker(dork, dork_file=None, proxy=None):
    """ Pointer to run a Dork Check on a given Google Dork """
    if dork is not None:
        LOGGER.info("Starting dork scan, using query: '{}'..".format(dork))
        try:
            LOGGER.info(
                DorkScanner(dork, dork_file=dork_file,
                            proxy=proxy).check_urls_for_queries())
        except HTTPError:
            LOGGER.fatal(GoogleBlockException(GOOGLE_TEMP_BLOCK_ERROR_MESSAGE))
    elif dork is None and dork_file is not None:
        if proxy is None:
            proxy_warn = "It is advised to use proxies while running "
            proxy_warn += "a dork list due to the temporary Google "
            proxy_warn += "bans.."
            LOGGER.warning(proxy_warn)
            question = prompt(
                "Would you like to find proxies with the built in finder first[y/N]: "
            )
            if question.upper().startswith("Y"):
                subprocess.call(["python", "pybelt.py", "-f"])
            else:
                pass
        try:
            with open("{}".format(dork_file)) as dork_list:
                for dork in dork_list.readlines():
                    LOGGER.info("Starting dork scan on {}..".format(
                        dork.strip()))
                    LOGGER.info(
                        DorkScanner(dork, dork_file=dork_file,
                                    proxy=proxy).check_urls_for_queries())
        except HTTPError:
            LOGGER.fatal(GoogleBlockException(GOOGLE_TEMP_BLOCK_ERROR_MESSAGE))
        except IOError:
            LOGGER.fatal(
                "The filename {} does not exist, please verify path and try again"
                .format(dork_file))
Ejemplo n.º 18
0
def attempt_to_connect_to_proxies():
    """ Attempted connections to the proxies pulled from the JSON data """
    results = []
    prox_info = connect_and_pull_info()
    for i, proxy in enumerate(prox_info, start=1):
        if prox_info[i]["type"] == "HTTP":
            candidate = "{}://{}:{}".format(prox_info[i]["type"],
                                            prox_info[i]["ip"],
                                            prox_info[i]["port"])
            opener = urllib2.build_opener(urllib2.ProxyHandler({"http": candidate}))
            urllib2.install_opener(opener)
            request = urllib2.Request("http://google.com")
            try:
                start_time = time.time()
                urllib2.urlopen(request, timeout=10)
                stop_time = time.time() - start_time
                LOGGER.info("Successful: {}\n\t\tLatency: {}s\n\t\tOrigin: {}\n\t\tAnonymity: {}\n\t\tType: {}".format(
                    candidate.lower(), stop_time, prox_info[i]["country"],
                    prox_info[i]["anonymity"], prox_info[i]["type"]
                ))
                results.append("http://" + prox_info[i]["ip"] + ":" + prox_info[i]["port"])
            except urllib2.HTTPError:
                pass
            except urllib2.URLError:
                pass
            except socket.timeout:
                pass
            except httplib.BadStatusLine:
                pass
            except socket.error:
                pass
    amount = len(results)
    if amount != 0:
        LOGGER.info("Found a total of {} proxies.".format(len(results)))
        filename = create_random_filename()
        create_dir(PROXY_SCAN_RESULTS)
        with open(PROXY_SCAN_RESULTS + "/" + filename + ".txt", "a+") as res:
            for prox in results:
                res.write(prox + "\n")
        LOGGER.info("Results saved to: {}".format(PROXY_SCAN_RESULTS + "/" + filename + ".txt"))
    else:
        pass
Ejemplo n.º 19
0
def run_hash_verification(hash_to_verify, hash_ver_file=None):
    """ Pointer to run the Hash Verification system"""
    if hash_ver_file is not None and hash_to_verify is None:
        try:
            total = len(open(hash_ver_file).readlines())
            LOGGER.info("Found a total of {} hashes in file..".format(total))
        except IOError:
            LOGGER.critical(
                "That file does not exist, check path and try again.")

        with open(hash_ver_file, "r+") as hashes:
            for h in hashes.readlines():
                question = prompt("Attempt to verify '{}'[y/N]: ".format(
                    h.strip()))
                if question.startswith("y"):
                    LOGGER.info("Analyzing hash: '{}'".format(h.strip()))
                    HashChecker(h.strip()).obtain_hash_type()
                    print("\n")
                else:
                    LOGGER.warning("Skipping '{}'..".format(h.strip()))
    else:
        LOGGER.info("Analyzing hash: '{}'".format(hash_to_verify))
        HashChecker(hash_to_verify).obtain_hash_type()
Ejemplo n.º 20
0
    opts.add_argument('--version', action="store_true", dest="version",
                      help="Show the version number and exit")
    opts.add_argument('--rand-wordlist', action="store_true", dest="random_wordlist",
                      help="Create a random wordlist to use for dictionary attacks"),
    opts.add_argument("--proxy", metavar="PROXY", dest="configProxy",
                      help="Configure the program to use a proxy when connecting")
    opts.add_argument('--rand-agent', action="store_true", dest="randomUserAgent",
                      help="Use a random user agent from a file list")
    args = opts.parse_args()

    print(BANNER + "\033[91m{}\033[0m".format(LEGAL_DISC) + "\n") if args.legal is False else \
        BANNER + "\033[91m{}\033[0m".format(LONG_LEGAL_DISCLAIMER + "\n")

    try:
        if args.version is True:  # Show the version number and exit
            LOGGER.info(VERSION_STRING)
            sys.exit(0)

        if args.random_wordlist is True:  # Create a random wordlist
            LOGGER.info("Creating a random wordlist..")
            create_wordlist(random.choice(WORDLIST_LINKS))
            LOGGER.info("Wordlist created, resuming process..")

        if args.proxysearch is True:  # Find some proxies
            LOGGER.info("Starting proxy search..")
            attempt_to_connect_to_proxies()

        if args.hashcheck is not None:  # Check what hash type you have
            LOGGER.info("Analyzing hash: '{}'".format(args.hashcheck))
            HashChecker(args.hashcheck).obtain_hash_type()
Ejemplo n.º 21
0
    opts.add_argument('--version',
                      action="store_true",
                      dest="version",
                      help="Show the version number and exit")
    opts.add_argument('--rand-wordlist',
                      action="store_true",
                      dest="random_wordlist",
                      help=argparse.SUPPRESS)
    args = opts.parse_args()

    print(BANNER + "\033[91m{}\033[0m".format(LEGAL_DISC) + "\n") if args.legal is False else \
        BANNER + "\033[91m{}\033[0m".format(LONG_LEGAL_DISCLAIMER + "\n")

    try:
        if args.version is True:
            LOGGER.info(VERSION_STRING)
            sys.exit(0)

        if args.random_wordlist is True:
            LOGGER.info("Creating a random wordlist..")
            create_wordlist(random.choice(WORDLIST_LINKS))
            LOGGER.info("Wordlist created, resuming process..")

        if args.sqliscan is not None:
            if QUERY_REGEX.match(args.sqliscan):
                LOGGER.info("Starting SQLi scan on {}..".format(args.sqliscan))
                LOGGER.info(SQLiScanner(args.sqliscan).sqli_search())
            else:
                LOGGER.error(
                    "URL does not contain a query (GET) parameter. Example: http://example.com/php?id=2"
                )
Ejemplo n.º 22
0
def run_xss_scan(url, url_file=None, proxy=None, user_agent=False):
    """ Pointer to run a XSS Scan on a given URL """
    proxy = proxy if proxy is not None else None
    header = RANDOM_USER_AGENT if user_agent is not False else None
    if proxy is not None:
        LOGGER.info("Proxy configured, running through: {}".format(proxy))
    if user_agent is True:
        LOGGER.info("Grabbed random user agent: {}".format(header))

    if url_file is not None:  # Scan a given file full of URLS
        file_path = url_file
        done = 0
        try:
            total = len(open(url_file).readlines())
            LOGGER.info("Found a total of {} URLS to scan..".format(total))
            with open(file_path) as urls:
                for url in urls.readlines():
                    if QUERY_REGEX.match(url.strip()):
                        question = prompt(
                            "Would you like to scan '{}' for XSS vulnerabilities[y/N]: "
                            .format(url.strip()))
                        if question.lower().startswith("y"):
                            done += 1

                            try:
                                if not xss.main(url.strip(),
                                                proxy=proxy,
                                                headers=header):
                                    LOGGER.info(
                                        "URL '{}' does not appear to be vulnerable to XSS"
                                        .format(url.strip()))
                                else:
                                    LOGGER.info(
                                        "URL '{}' appears to be vulnerable to XSS"
                                        .format(url.strip()))
                            except ConnectionError:
                                LOGGER.warning(
                                    "{} failed to respond, skipping..".format(
                                        url.strip()))

                            LOGGER.info(
                                "URLS scanned: {}, URLS left: {}".format(
                                    done, total - done))
                        else:
                            done += 1
                            pass
                    else:
                        done += 1
                        LOGGER.warn(
                            "URL '{}' does not contain a query (GET) parameter, skipping.."
                            .format(url.strip()))
            LOGGER.info("All URLS in file have been scanned, shutting down..")
        except IOError:
            LOGGER.fatal(
                "That file does not exist, verify path and try again.")

    else:  # Scan a single URL
        if QUERY_REGEX.match(url):
            LOGGER.info("Searching: {} for XSS vulnerabilities..".format(
                url, proxy=proxy, headers=header))
            if not xss.main(url, proxy=proxy, headers=header):
                LOGGER.error(
                    "{} does not appear to be vulnerable to XSS".format(url))
            else:
                LOGGER.info("{} seems to be vulnerable to XSS.".format(url))
        else:
            error_message = "The URL you provided does not contain a query "
            error_message += "(GET) parameter. In order for this scan you run "
            error_message += "successfully you will need to provide a URL with "
            error_message += "A query (GET) parameter example: http://127.0.0.1/php?id=2"
            LOGGER.fatal(error_message)
Ejemplo n.º 23
0
def run_proxy_finder():
    """ Pointer to run Proxy Finder """
    LOGGER.info("Starting proxy search..")
    attempt_to_connect_to_proxies()
Ejemplo n.º 24
0
def run_hash_verification(hash_to_verify):
    """ Pointer to run the Hash Verification system"""
    LOGGER.info("Analyzing hash: '{}'".format(hash_to_verify))
    HashChecker(hash_to_verify).obtain_hash_type()
Ejemplo n.º 25
0
                      help="Create a random wordlist to use for dictionary attacks"),
    opts.add_argument('--rand-agent', action="store_true", dest="randomUserAgent",
                      help="Use a random user agent from a file list")

    opts.add_argument('--anon', metavar="ANON", dest="anonLvl",
                      help=argparse.SUPPRESS)
    opts.add_argument('--hash-list', metavar="FILE", dest="hashList",
                      help=argparse.SUPPRESS)
    opts.add_argument('--tamper', metavar="SCRIPT", dest="tamper",
                      help=argparse.SUPPRESS)
    args = opts.parse_args()

    hide_banner(hide=True if args.banner else False,
                legal=True if args.legal else False) if args.version is False else hide_banner(hide=True)

    LOGGER.info("Checking program integrity..")

    try:
        integrity_check()
    except HTTPError:
        check_fail = "Integrity check failed to connect "
        check_fail += "you are running a non verified "
        check_fail += "Pybelt, this may or may not be insecure. "
        check_fail += "Suggestion would be to re-download Pybelt from "
        check_fail += "{}"
        LOGGER.error(check_fail.format(CLONE_LINK))
        answer = prompt("Would you like to continue anyways[y/N] ")
        if answer.upper().startswith("Y"):
            pass
        else:
            err_msg = "Please download the latest version from "
Ejemplo n.º 26
0
def run_sqli_scan(url,
                  url_file=None,
                  proxy=None,
                  user_agent=False,
                  tamper=None):
    """ Pointer to run a SQLi Scan on a given URL """
    error_message = "URL: '{}' threw an exception {} "
    error_message += "and Pybelt is unable to resolve the URL, "
    error_message += "this could mean that the URL is not allowing connections "
    error_message += "or that the URL is bad. Attempt to connect "
    error_message += "to the URL manually, if a connection occurs "
    error_message += "make an issue."
    if url_file is not None:  # Run through a file list
        file_path = url_file
        done = 0
        try:
            total = len(open(file_path).readlines())
            LOGGER.info("Found a total of {} urls in file {}..".format(
                total, file_path))
            with open(file_path) as urls:
                for url in urls.readlines():
                    if QUERY_REGEX.match(url.strip()):
                        question = prompt(
                            "Would you like to scan '{}' for SQLi vulnerabilities[y/N]: "
                            .format(url.strip()))
                        if question.lower().startswith("y"):
                            LOGGER.info("Starting scan on url: '{}'".format(
                                url.strip()))
                            try:
                                LOGGER.info(
                                    SQLiScanner(url.strip()).sqli_search())
                                done += 1
                                LOGGER.info(
                                    "URLS scanned: {}, URLS left: {}".format(
                                        done, total - done))
                            except urllib2.URLError:
                                done += 1
                                LOGGER.warning(
                                    "{} did not respond, skipping..".format(
                                        url.strip()))
                        else:
                            done += 1
                            pass
                    else:
                        done += 1
                        LOGGER.warn(
                            "URL '{}' does not contain a query (GET) parameter, skipping.."
                            .format(url.strip()))
                        pass
            LOGGER.info("No more URLS found in file, shutting down..")
        except HTTPError as e:
            LOGGER.fatal(error_message.format(url.strip(), e))
        except IOError as e:
            print e
            LOGGER.fatal(
                "That file does not exist, verify path and try again.")

    else:  # Run a single URL
        try:
            if QUERY_REGEX.match(url):
                LOGGER.info("Starting SQLi scan on '{}'..".format(url))
                LOGGER.info(SQLiScanner(url).sqli_search())
            else:
                LOGGER.error(
                    "URL does not contain a query (GET) parameter. Example: http://example.com/php?id=2"
                )
        except HTTPError as e:
            LOGGER.fatal(error_message.format(url, e))