class PortScanner(object): connection_made = [] # Connection made in list form def __init__(self, host): self.host = host self.ports = RESERVED_PORTS def connect_to_host(self): start_time = time.time() try: for port in RESERVED_PORTS.keys(): sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) res = sock.connect_ex((self.host, port)) if res == 0: LOGGER.info("[*] Open: {} {}".format( port, RESERVED_PORTS[port])) self.connection_made.append(port) sock.close() except Exception, e: print e stop_time = time.time() LOGGER.info("Completed in {} seconds".format( str(stop_time - start_time))) LOGGER.info("Ports readily available: {}".format(''.join( str(self.connection_made))))
def create_packet(): try: return socket.socket(socket.AF_INET, socket.SOCK_RAW, socket.IPPROTO_RAW) except socket.error, e: error_message = "Unable to create raw IP packet." error_message += " IP packet failed with error code: {}".format(e) LOGGER.fatal(error_message)
def connect_to_host(self): try: # Calling the thread class rst = RunScanThread(self.host) t2 = threading.Thread(target=(rst.run_scan)) t2.start() except Exception, e: LOGGER.error(e)
def enumerate_hash_types(items, max_likeliest=3): LOGGER.info("{} possible hash types found..".format(len(items))) for count, item in enumerate(items, start=1): if count <= max_likeliest: print("\033[92m[*] Most likely possible hash type: {}\033[0m". format(item)) if count == max_likeliest: print("") else: print("\033[33m[*] Least likely possible hash type: {}\033[0m". format(item))
def scn(port): try: sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) res = sock.connect_ex((self.host, port)) sock.settimeout(3) if res == 0: LOGGER.info("[*] Open: {} {}".format(port, RESERVED_PORTS[port])) self.connection_made.append(port) sock.close() except Exception, e: print e
def connect_and_pull_info(): """ Connect to the proxy source and pull the proxies in JSON form """ results = {} count = 0 data = json.loads(urllib2.urlopen(PROXY_URL).read()) for i in range(0, 30): count += 1 results[count] = data[i] LOGGER.info( "Found {} possible proxies, moving to connection attempts..".format( len(results))) return results
def connect_to_host(self): start_time = time.time() try: for port in RESERVED_PORTS.keys(): sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) res = sock.connect_ex((self.host, port)) if res == 0: LOGGER.info("[*] Open: {} {}".format( port, RESERVED_PORTS[port])) self.connection_made.append(port) sock.close() except Exception, e: print e
def obtain_hash_type(self): for algorithm in HASH_TYPE_REGEX: if algorithm.match(self.hash): self.found = True self.enumerate_hash_types(HASH_TYPE_REGEX[algorithm]) if self.found is False: error_message = "Unable to verify hash type " error_message += "for hash: '{}'. This could mean ".format( self.hash) error_message += "that this is not a valid hash, or that " error_message += "this hash is not supported by Pybelt " error_message += "yet. If you feel this should be supported " error_message += "make an issue regarding this hash." LOGGER.error(error_message) return
def check_urls_for_queries(self): """ The returned URLS will be run through a query regex to see if they have a query parameter http://google.com <- False http://example.com/php?id=2 <- True """ filename = settings.create_random_filename() LOGGER.info("File being saved to: {}".format(filename)) with open("{}\\{}.txt".format(settings.DORK_SCAN_RESULTS_PATH, filename), "a+") as results: for url in self.connect_to_search_engine(): match = settings.QUERY_REGEX.match(url) # Match by regex for anything that has a ?<PARAM>= in it if match: results.write(url + "\n") amount_of_urls = len(open(settings.DORK_SCAN_RESULTS_PATH + "\\" + filename + ".txt", 'r').readlines()) success_rate = ((amount_of_urls // 10) + 1) * 10 return "Found a total of {} usable links with query (GET) parameters, urls have been saved to {}\\{}.txt. " \ "This Dork has a success rate of {}%".format(amount_of_urls, settings.DORK_SCAN_RESULTS_PATH, filename, success_rate)
def connect_and_pull_info(): """ Connect to the proxy source and pull the proxies in JSON form """ results = {} count = 0 data = json.loads(urllib2.urlopen(PROXY_URL).read()) for i in range(0, 60): try: results[count] = data[i] count += 1 except IndexError: pass amount = len(results) if amount != 0: LOGGER.info("Found {} possible proxies, moving to connection attempts..".format(len(results))) else: LOGGER.warning("No usable proxies discovered") return results
def try_certain_algorithm(self): """ Use a certain type of algorithm to do the hashing, md5, sha256, etc.. >>> HashCracker("9a8b1b7eee229046fc2701b228fc2aff", type="md5").try_certain_algorithm() {... ,'9a8b1b7eee229046fc2701b228fc2aff': ['want', 'md5'], ...} """ data = hashlib.new(self.type) for word in self.words: data.update(word.strip()) self.results[data.hexdigest()] = [word.strip(), self.type] LOGGER.info("Created %i hashes to verify.." % len(self.results.keys())) if self.verify_hashes() is False: error_message = "Unable to verify %s against %i different hashes." % ( self.hash, len(self.results)) error_message += " You used algorithm: %s you can attempt all algorithms " % str( self.type).upper() error_message += "available on the system by running with 'all' as the hash type. " error_message += "IE: python pybelt.py -c 9a8b1b7eee229046fc2701b228fc2aff:all" LOGGER.fatal(error_message) exit(1)
def verify_xss_vulnerable(context, scripted_url): """ Verify if there's a vulnerability in the URL by checking if the alert exists as is in the HTML, if it does exist that means that the URL does not convert the alert >>> verify_xss_vulnerable("<HTML>" ,"https://www.google.com/webhp?safe=1") False """ script = scripted_url.split("=")[1] if script in context: return True else: LOGGER.warning("Basic tests failed, moving to tampered data..") data = tamper_payload(scripted_url) url_data_tampered = get_context(data) script = data.split("=")[1] if script in url_data_tampered: return True else: return False
def verify_hashes(self): """ Verify if the hashes match, as long as the hash is in the results dict, it will be found >>> print(self.results) {... ,'9a8b1b7eee229046fc2701b228fc2aff': ['want', 'md5'], ... } >>> HashCracker("9a8b1b7eee229046fc2701b228fc2aff", type="md5").verify_hashes() [06:08:49 INFO] Original Hash: 9a8b1b7eee229046fc2701b228fc2aff Algorithm Used: MD5 Plain Text: want """ spacer = " " * 16 while self.cracked is False: for h in self.results.keys(): if self.hash == h: hash_results = "Original Hash: %s" % self.hash hash_results += "\n%sAlgorithm Used: %s" % (spacer, self.results[self.hash][1].upper()) hash_results += "\n%sPlain Text: %s" % (spacer, self.results[self.hash][0]) LOGGER.info(hash_results) self.cracked = True break return
def try_all_algorithms(self): """ Try every algorithm available on the computer using the 'algorithms_available' functions from hashlib an example of this functions would be: >>> print(hashlib.algorithms_available) set(['SHA1', 'SHA224', 'SHA', 'SHA384', ...]) >>> HashCracker("9a8b1b7eee229046fc2701b228fc2aff", type=None).try_all_algorithms() {..., 'dc1e4c61bea0e5390c140fb1299a68a0f31b7af51f90abbd058f09689a8bb823': ['1 endow', 'sha256'], '362b004395a3f52d9a0132868bd180bd': ['17 fellowship', 'MD5'], '03195f6b6fa8dc1951f4944aed8cc4582cd72321': ['lovingkindness', 'RIPEMD160'], ...""" for alg in hashlib.algorithms_available: for word in self.words: data = hashlib.new(alg) data.update(word.strip()) self.results[data.hexdigest()] = [word.strip(), alg] LOGGER.info("Created %i hashes, verifying against given hash (%s)" % (len(self.results), self.hash)) if self.verify_hashes() is False: LOGGER.fatal("Unable to verify hash: %s" % self.hash) else: return self.verify_hashes()
class RunScanThread(PortScanner): def run_scan(self): start_time = time.time() def scn(port): try: sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) res = sock.connect_ex((self.host, port)) sock.settimeout(3) if res == 0: LOGGER.info("[*] Open: {} {}".format( port, RESERVED_PORTS[port])) self.connection_made += "{}, ".format(port) sock.close() except Exception, e: print e q = Queue.Queue() def threader(): worker = q.get() scn(worker) q.task_done() for x in range(200): t = threading.Thread(target=threader) t.daemon = True t.start() for worker in RESERVED_PORTS.keys(): q.put(worker) pass q.join() stop_time = time.time() no_ports = "\033[91mNo ports available or open\033[0m" LOGGER.info("Completed in {} seconds".format( str(stop_time - start_time))) LOGGER.info("Ports readily available: {}".format(''.join( str(self.connection_made if self. connection_made is not "" else no_ports))))
def run_dork_checker(dork): """ Pointer to run a Dork Check on a given Google Dork """ LOGGER.info("Starting dork scan, using query: '{}'..".format(dork)) try: LOGGER.info(DorkScanner(dork).check_urls_for_queries()) except HTTPError: LOGGER.fatal(GoogleBlockException(GOOGLE_TEMP_BLOCK_ERROR_MESSAGE))
def run_hash_verification(hash_to_verify, hash_ver_file=None): """ Pointer to run the Hash Verification system""" if hash_ver_file is not None and hash_to_verify is None: try: total = len(open(hash_ver_file).readlines()) LOGGER.info("Found a total of {} hashes in file..".format(total)) except IOError: LOGGER.critical( "That file does not exist, check path and try again.") with open(hash_ver_file, "r+") as hashes: for h in hashes.readlines(): question = prompt("Attempt to verify '{}'[y/N]: ".format( h.strip())) if question.startswith("y"): LOGGER.info("Analyzing hash: '{}'".format(h.strip())) HashChecker(h.strip()).obtain_hash_type() print("\n") else: LOGGER.warning("Skipping '{}'..".format(h.strip())) else: LOGGER.info("Analyzing hash: '{}'".format(hash_to_verify)) HashChecker(hash_to_verify).obtain_hash_type()
def run_sqli_scan(url, proxy=None, user_agent=False): """ Pointer to run a SQLi Scan on a given URL """ try: if QUERY_REGEX.match(url): LOGGER.info("Starting SQLi scan on '{}'..".format(url)) LOGGER.info(SQLiScanner(url).sqli_search()) else: LOGGER.error( "URL does not contain a query (GET) parameter. Example: http://example.com/php?id=2" ) except HTTPError as e: error_message = "URL: '{}' threw an exception: '{}' ".format(url, e) error_message += "and Pybelt is unable to resolve the URL, " error_message += "this could mean that the URL is not allowing connections " error_message += "or that the URL is bad. Attempt to connect " error_message += "to the URL manually, if a connection occurs " error_message += "make an issue." LOGGER.fatal(error_message)
def run_hash_cracker(hash_to_crack): """ Pointer to run the Hash Cracking system """ try: items = list(''.join(hash_to_crack).split(":")) if items[1] == "all": LOGGER.info( "Starting hash cracking without knowledge of algorithm...") HashCracker(items[0]).try_all_algorithms() else: LOGGER.info("Starting hash cracking using %s as algorithm type.." % items[1]) HashCracker(items[0], type=items[1]).try_certain_algorithm() except IndexError: error_message = "You must specify a hash type in order for this to work. " error_message += "Example: 'python pybelt.py -c 098f6bcd4621d373cade4e832627b4f6:md5'" LOGGER.fatal(error_message)
def attempt_to_connect_to_proxies(): """ Attempted connections to the proxies pulled from the JSON data """ results = [] prox_info = connect_and_pull_info() for i, proxy in enumerate(prox_info, start=1): if prox_info[i]["type"] == "HTTP": candidate = "{}://{}:{}".format(prox_info[i]["type"], prox_info[i]["ip"], prox_info[i]["port"]) opener = urllib2.build_opener(urllib2.ProxyHandler({"http": candidate})) urllib2.install_opener(opener) request = urllib2.Request("http://google.com") try: start_time = time.time() urllib2.urlopen(request, timeout=10) stop_time = time.time() - start_time LOGGER.info("Successful: {}\n\t\tLatency: {}s\n\t\tOrigin: {}\n\t\tAnonymity: {}\n\t\tType: {}".format( candidate.lower(), stop_time, prox_info[i]["country"], prox_info[i]["anonymity"], prox_info[i]["type"] )) results.append("http://" + prox_info[i]["ip"] + ":" + prox_info[i]["port"]) except urllib2.HTTPError: pass except urllib2.URLError: pass except socket.timeout: pass except httplib.BadStatusLine: pass except socket.error: pass amount = len(results) if amount != 0: LOGGER.info("Found a total of {} proxies.".format(len(results))) filename = create_random_filename() create_dir(PROXY_SCAN_RESULTS) with open(PROXY_SCAN_RESULTS + "/" + filename + ".txt", "a+") as res: for prox in results: res.write(prox + "\n") LOGGER.info("Results saved to: {}".format(PROXY_SCAN_RESULTS + "/" + filename + ".txt")) else: pass
opts.add_argument('--version', action="store_true", dest="version", help="Show the version number and exit") opts.add_argument('--rand-wordlist', action="store_true", dest="random_wordlist", help="Create a random wordlist to use for dictionary attacks"), opts.add_argument("--proxy", metavar="PROXY", dest="configProxy", help="Configure the program to use a proxy when connecting") opts.add_argument('--rand-agent', action="store_true", dest="randomUserAgent", help="Use a random user agent from a file list") args = opts.parse_args() print(BANNER + "\033[91m{}\033[0m".format(LEGAL_DISC) + "\n") if args.legal is False else \ BANNER + "\033[91m{}\033[0m".format(LONG_LEGAL_DISCLAIMER + "\n") try: if args.version is True: # Show the version number and exit LOGGER.info(VERSION_STRING) sys.exit(0) if args.random_wordlist is True: # Create a random wordlist LOGGER.info("Creating a random wordlist..") create_wordlist(random.choice(WORDLIST_LINKS)) LOGGER.info("Wordlist created, resuming process..") if args.proxysearch is True: # Find some proxies LOGGER.info("Starting proxy search..") attempt_to_connect_to_proxies() if args.hashcheck is not None: # Check what hash type you have LOGGER.info("Analyzing hash: '{}'".format(args.hashcheck)) HashChecker(args.hashcheck).obtain_hash_type()
def run_port_scan(host): """ Pointer to run a Port Scan on a given host """ if re.search(IP_ADDRESS_REGEX, host) is not None: LOGGER.info("Starting port scan on IP: {}".format(host)) LOGGER.info(PortScanner(host).connect_to_host()) elif re.search(URL_REGEX, host) is not None and re.search(QUERY_REGEX, host) is None: try: LOGGER.info("Fetching resolve IP...") ip_address = socket.gethostbyname(host) LOGGER.info("Done! IP: {}".format(ip_address)) LOGGER.info("Starting scan on URL: {} IP: {}".format( host, ip_address)) PortScanner(ip_address).connect_to_host() except socket.gaierror: error_message = "Unable to resolve IP address from {}.".format( host) error_message += " You can manually get the IP address and try again," error_message += " dropping the query parameter in the URL (IE php?id=)," error_message += " or dropping the http or https" error_message += " and adding www in place of it. IE www.google.com" error_message += " may fix this issue." LOGGER.fatal(error_message) else: error_message = "You need to provide a host to scan," error_message += " this can be given in the form of a URL " error_message += "or a IP address." LOGGER.fatal(error_message)
def run_xss_scan(url, proxy=None, user_agent=False): """ Pointer to run a XSS Scan on a given URL """ if QUERY_REGEX.match(url): proxy = proxy if proxy is not None else None header = RANDOM_USER_AGENT if user_agent is not False else None if proxy is not None: LOGGER.info("Proxy configured, running through: {}".format(proxy)) if user_agent is True: LOGGER.info("Grabbed random user agent: {}".format(header)) LOGGER.info("Searching: {} for XSS vulnerabilities..".format( url, proxy=proxy, headers=header)) if not xss.main(url, proxy=proxy, headers=header): LOGGER.error( "{} does not appear to be vulnerable to XSS".format(url)) else: LOGGER.info("{} seems to be vulnerable to XSS.".format(url)) else: error_message = "The URL you provided does not contain a query " error_message += "(GET) parameter. In order for this scan you run " error_message += "successfully you will need to provide a URL with " error_message += "A query (GET) parameter example: http://127.0.0.1/php?id=2" LOGGER.fatal(error_message)
def run_proxy_finder(): """ Pointer to run Proxy Finder """ LOGGER.info("Starting proxy search..") attempt_to_connect_to_proxies()
help="Use a random user agent from a file list") opts.add_argument('--anon', metavar="ANON", dest="anonLvl", help=argparse.SUPPRESS) opts.add_argument('--hash-list', metavar="FILE", dest="hashList", help=argparse.SUPPRESS) opts.add_argument('--dork-list', metavar="FILE", dest="dorkList", help=argparse.SUPPRESS) opts.add_argument('--tamper', metavar="SCRIPT", dest="tamper", help=argparse.SUPPRESS) args = opts.parse_args() hide_banner(hide=True if args.banner else False, legal=True if args.legal else False) if args.version is False else hide_banner(hide=True) LOGGER.info("Checking program integrity..") integrity_check() try: if len(sys.argv) == 1: # If you failed to provide an argument prompt = pybelt_shell.PybeltConsole() # Launch the shell prompt.prompt = "{}@pybelt > ".format(getpass.getuser()) info_message = "You have failed to provide a flag so you have been " info_message += "redirected to the Pybelt Console. For available " info_message += "flags type: 'run -hh', to see help type: 'help' " info_message += "to exit the console type: 'quit'" try: prompt.cmdloop(LOGGER.info(info_message)) except TypeError as e: LOGGER.info("Terminating session...") exit(0)
def run_hash_verification(hash_to_verify): """ Pointer to run the Hash Verification system""" LOGGER.info("Analyzing hash: '{}'".format(hash_to_verify)) HashChecker(hash_to_verify).obtain_hash_type()
def run_sqli_scan(url, url_file=None, proxy=None, user_agent=False, tamper=None): """ Pointer to run a SQLi Scan on a given URL """ error_message = "URL: '{}' threw an exception {} " error_message += "and Pybelt is unable to resolve the URL, " error_message += "this could mean that the URL is not allowing connections " error_message += "or that the URL is bad. Attempt to connect " error_message += "to the URL manually, if a connection occurs " error_message += "make an issue." if url_file is not None: # Run through a file list file_path = url_file done = 0 try: total = len(open(file_path).readlines()) LOGGER.info("Found a total of {} urls in file {}..".format( total, file_path)) with open(file_path) as urls: for url in urls.readlines(): if QUERY_REGEX.match(url.strip()): question = prompt( "Would you like to scan '{}' for SQLi vulnerabilities[y/N]: " .format(url.strip())) if question.lower().startswith("y"): LOGGER.info("Starting scan on url: '{}'".format( url.strip())) try: LOGGER.info( SQLiScanner(url.strip()).sqli_search()) done += 1 LOGGER.info( "URLS scanned: {}, URLS left: {}".format( done, total - done)) except urllib2.URLError: done += 1 LOGGER.warning( "{} did not respond, skipping..".format( url.strip())) else: done += 1 pass else: done += 1 LOGGER.warn( "URL '{}' does not contain a query (GET) parameter, skipping.." .format(url.strip())) pass LOGGER.info("No more URLS found in file, shutting down..") except HTTPError as e: LOGGER.fatal(error_message.format(url.strip(), e)) except IOError as e: print e LOGGER.fatal( "That file does not exist, verify path and try again.") else: # Run a single URL try: if QUERY_REGEX.match(url): LOGGER.info("Starting SQLi scan on '{}'..".format(url)) LOGGER.info(SQLiScanner(url).sqli_search()) else: LOGGER.error( "URL does not contain a query (GET) parameter. Example: http://example.com/php?id=2" ) except HTTPError as e: LOGGER.fatal(error_message.format(url, e))
def run_xss_scan(url, url_file=None, proxy=None, user_agent=False): """ Pointer to run a XSS Scan on a given URL """ proxy = proxy if proxy is not None else None header = RANDOM_USER_AGENT if user_agent is not False else None if proxy is not None: LOGGER.info("Proxy configured, running through: {}".format(proxy)) if user_agent is True: LOGGER.info("Grabbed random user agent: {}".format(header)) if url_file is not None: # Scan a given file full of URLS file_path = url_file done = 0 try: total = len(open(url_file).readlines()) LOGGER.info("Found a total of {} URLS to scan..".format(total)) with open(file_path) as urls: for url in urls.readlines(): if QUERY_REGEX.match(url.strip()): question = prompt( "Would you like to scan '{}' for XSS vulnerabilities[y/N]: " .format(url.strip())) if question.lower().startswith("y"): done += 1 try: if not xss.main(url.strip(), proxy=proxy, headers=header): LOGGER.info( "URL '{}' does not appear to be vulnerable to XSS" .format(url.strip())) else: LOGGER.info( "URL '{}' appears to be vulnerable to XSS" .format(url.strip())) except ConnectionError: LOGGER.warning( "{} failed to respond, skipping..".format( url.strip())) LOGGER.info( "URLS scanned: {}, URLS left: {}".format( done, total - done)) else: done += 1 pass else: done += 1 LOGGER.warn( "URL '{}' does not contain a query (GET) parameter, skipping.." .format(url.strip())) LOGGER.info("All URLS in file have been scanned, shutting down..") except IOError: LOGGER.fatal( "That file does not exist, verify path and try again.") else: # Scan a single URL if QUERY_REGEX.match(url): LOGGER.info("Searching: {} for XSS vulnerabilities..".format( url, proxy=proxy, headers=header)) if not xss.main(url, proxy=proxy, headers=header): LOGGER.error( "{} does not appear to be vulnerable to XSS".format(url)) else: LOGGER.info("{} seems to be vulnerable to XSS.".format(url)) else: error_message = "The URL you provided does not contain a query " error_message += "(GET) parameter. In order for this scan you run " error_message += "successfully you will need to provide a URL with " error_message += "A query (GET) parameter example: http://127.0.0.1/php?id=2" LOGGER.fatal(error_message)
help="Create a random wordlist to use for dictionary attacks"), opts.add_argument('--rand-agent', action="store_true", dest="randomUserAgent", help="Use a random user agent from a file list") opts.add_argument('--anon', metavar="ANON", dest="anonLvl", help=argparse.SUPPRESS) opts.add_argument('--hash-list', metavar="FILE", dest="hashList", help=argparse.SUPPRESS) opts.add_argument('--tamper', metavar="SCRIPT", dest="tamper", help=argparse.SUPPRESS) args = opts.parse_args() hide_banner(hide=True if args.banner else False, legal=True if args.legal else False) if args.version is False else hide_banner(hide=True) LOGGER.info("Checking program integrity..") try: integrity_check() except HTTPError: check_fail = "Integrity check failed to connect " check_fail += "you are running a non verified " check_fail += "Pybelt, this may or may not be insecure. " check_fail += "Suggestion would be to re-download Pybelt from " check_fail += "{}" LOGGER.error(check_fail.format(CLONE_LINK)) answer = prompt("Would you like to continue anyways[y/N] ") if answer.upper().startswith("Y"): pass else: err_msg = "Please download the latest version from "
def run_dork_checker(dork, dork_file=None, proxy=None): """ Pointer to run a Dork Check on a given Google Dork """ if dork is not None: LOGGER.info("Starting dork scan, using query: '{}'..".format(dork)) try: LOGGER.info( DorkScanner(dork, dork_file=dork_file, proxy=proxy).check_urls_for_queries()) except HTTPError: LOGGER.fatal(GoogleBlockException(GOOGLE_TEMP_BLOCK_ERROR_MESSAGE)) elif dork is None and dork_file is not None: if proxy is None: proxy_warn = "It is advised to use proxies while running " proxy_warn += "a dork list due to the temporary Google " proxy_warn += "bans.." LOGGER.warning(proxy_warn) question = prompt( "Would you like to find proxies with the built in finder first[y/N]: " ) if question.upper().startswith("Y"): subprocess.call(["python", "pybelt.py", "-f"]) else: pass try: with open("{}".format(dork_file)) as dork_list: for dork in dork_list.readlines(): LOGGER.info("Starting dork scan on {}..".format( dork.strip())) LOGGER.info( DorkScanner(dork, dork_file=dork_file, proxy=proxy).check_urls_for_queries()) except HTTPError: LOGGER.fatal(GoogleBlockException(GOOGLE_TEMP_BLOCK_ERROR_MESSAGE)) except IOError: LOGGER.fatal( "The filename {} does not exist, please verify path and try again" .format(dork_file))