def send_to_file(self, data): """ send all the information to a JSON file for further use """ create_dir(self.dir) full_nmap_path = "{}/{}".format(self.dir, self.file.format(self.ip)) with open(full_nmap_path, "a+") as log: log.write(data) return full_nmap_path
def create_wordlist(warning=True, verbose=False, add=False): """ Create a bruteforcing wordlist > :param max_length: max amount of words to have > :param max_word_length: how long the words should be > :param warning: output the warning message to say that BF'ing sucks > :return: a wordlist """ max_length, max_word_length, dirname = 10000000, 10, "bf-dicts" if add: max_word_length += 2 warn_msg = ( "It is highly advised to use a dictionary attack over bruteforce. " "Bruteforce requires extreme amounts of memory to accomplish and " "it is possible that it could take a lifetime to successfully " "crack your hash. To run a dictionary attack all you need to do is" " pass the wordlist switch ('-w/--wordlist PATH') with the path to " "your wordlist. (IE: --bruteforce -w ~/dicts/dict.txt)") if warning: LOGGER.warning(warn_msg) if verbose: LOGGER.debug( "Creating {} words with a max length of {} characters".format( max_length, max_word_length)) create_dir(dirname, verbose=verbose) with open(dirname + "/" + WORDLIST_NAME, "a+") as lib: word = Generators().word_generator(length_max=max_word_length) lib.seek(0, 0) line_count = len(lib.readlines()) try: for _ in range(line_count, max_length): lib.write(next(word) + "\n") except StopIteration: # SHOULD NEVER GET HERE # if we run out of mutations we'll retry with a different word length lib.seek(0, 0) err_msg = ( "Ran out of mutations at {} mutations. You can try upping " "the max length or just use what was processed. If you " "make the choice not to continue the program will add +2 " "to the max length and try to create the wordlist again.." ).format(len(lib.readlines())) LOGGER.error(err_msg) q = prompt("Would you like to continue", "y/N") if not q.startswith(("y", "Y")): lib.truncate(0) create_wordlist(warning=False, add=True) LOGGER.info( "Wordlist generated, words saved to: {}. Please re-run the application, exiting.." .format(WORDLIST_NAME)) shutdown()
def unzip_iso(filepath, label=None, verbose=False, directory_name="ISO_dir"): """ Unzip the ISO file into a directory using the file-roller command """ def create_rand_dir_name(chars=string.ascii_letters, verbose=False): """ Create a random directory name """ if verbose: console_log.LOGGER.debug("Creating random directory name..") retval = set() for _ in range(8): retval.add(random.choice(chars)) return ''.join(list(retval)) dir_name = create_rand_dir_name(verbose=verbose) if verbose: console_log.LOGGER.debug("Creating directory: {}/{}/*..".format(directory_name, dir_name)) create_dir(directory_name) create_dir(directory_name + "/" + dir_name, verbose=verbose) full_dir_path = os.getcwd() + "/" + directory_name + "/" + dir_name if verbose: console_log.LOGGER.debug("Directory created, full path being saved to: {}..".format(full_dir_path)) # Create the file roller command, this will eventually be replaced, but # we're gonna go ahead and use it for now. At least until I can figure out # what the f**k I'm doing when it comes to ISO9660 or hachoir parsers. # Have you ever tried using those f*****g things? It's damn near impossible # there is ZERO documentation on it, and about 234982349028 f*****g files with it # like WTF?! Why even create it just to have people guess how it works?! # So guess what hachoir?! F**K YOU and ISO9660, f**k you too! ** END RANT ** cmd = "file-roller -e {} {} 2> /dev/null".format(full_dir_path, filepath) if verbose: console_log.LOGGER.debug("Starting command: {} ..".format(cmd)) stdout_data = subprocess.check_call(cmd, shell=True) if stdout_data == 0: if verbose: console_log.LOGGER.debug("Command completed successfully..") if label is None: label_name = [c for c in os.listdir(full_dir_path) if c.isupper()] else: label_name = label create_autorun(''.join(label_name) if type(label_name) == list else label_name, full_dir_path) return True else: if verbose: console_log.LOGGER.debug("Command failed with code: {}".format(stdout_data)) return False
#!/usr/bin/env python import os from var.extract import unzip_iso from var.format.formatter import Formatter from lib.settings import ( search_for_iso, avail_drives, download, mount_drive, create_autorun, BANNER, create_dir ) print BANNER create_dir("log") # print avail_drives(verbose=True) # Formatter("/dev/sdb1").format_usb(4096) # print search_for_iso(verbose=True) # unzip_iso("dsl-4.11.rc1.iso", label="TEST", verbose=True) print download("pentest", "kali", verbose=True) # mount_drive("/media/baal/ARCH_201707") # create_autorun("test", directory=os.getcwd())
def parse_search_results(query, url, verbose=False, dirname="{}/log/url-log", filename="url-log-{}.log", **kwargs): """ Parse a webpage from Google for URL's with a GET(query) parameter """ exclude = "google" or "webcache" or "youtube" create_dir(dirname.format(os.getcwd())) full_file_path = "{}/{}".format( dirname.format(os.getcwd()), filename.format(len(os.listdir(dirname.format(os.getcwd()))) + 1)) def __get_headers(): try: proxy_string = kwargs.get("proxy") except: pass try: user_agent = kwargs.get("agent") except: pass return proxy_string, user_agent if verbose: logger.debug( set_color("checking for user-agent and proxy configuration...", level=10)) proxy_string, user_agent = __get_headers() if proxy_string is None: proxy_string = None else: proxy_string = proxy_string_to_dict(proxy_string) if user_agent is None: user_agent = DEFAULT_USER_AGENT else: user_agent = user_agent user_agent_info = "adjusting user-agent header to {}..." if user_agent is not DEFAULT_USER_AGENT: user_agent_info = user_agent_info.format(user_agent.strip()) else: user_agent_info = user_agent_info.format( "default user agent '{}'".format(DEFAULT_USER_AGENT)) proxy_string_info = "setting proxy to {}..." if proxy_string is not None: proxy_string_info = proxy_string_info.format( ''.join(proxy_string.keys()) + "://" + ''.join(proxy_string.values())) else: proxy_string_info = "no proxy configuration detected..." headers = {"Connection": "close", "user-agent": user_agent} logger.info(set_color("attempting to gather query URL...")) try: query_url = get_urls(query, url, verbose=verbose, user_agent=user_agent, proxy=proxy_string) except Exception as e: if "WebDriverException" in str(e): logger.exception( set_color( "it seems that you exited the browser, please allow the browser " "to complete it's run so that Zeus can bypass captchas and API " "calls", level=50)) else: logger.exception( set_color( "{} failed to gather the URL from search engine, caught exception '{}' " "exception has been logged to current log file...".format( os.path.basename(__file__), str(e).strip()), level=50)) shutdown() logger.info( set_color( "URL successfully gathered, searching for GET parameters...")) logger.info(set_color(proxy_string_info)) req = requests.get(query_url, proxies=proxy_string) logger.info(set_color(user_agent_info)) req.headers.update(headers) found_urls = URL_REGEX.findall(req.text) retval = set() for urls in list(found_urls): for url in list(urls): url = urllib.unquote(url) if URL_QUERY_REGEX.match(url) and exclude not in url: if type(url) is unicode: url = str(url).encode("utf-8") if verbose: logger.debug( set_color("found '{}'...".format(url), level=10)) retval.add(url.split("&")[0]) logger.info( set_color("found a total of {} URL's with a GET parameter...".format( len(retval)))) if len(retval) != 0: logger.info( set_color( "saving found URL's under '{}'...".format(full_file_path))) with open(full_file_path, "a+") as log: for url in list(retval): log.write(url + "\n") else: logger.critical( set_color( "did not find any usable URL's with the given query '{}' " "using search engine '{}'...".format(query, url), level=50)) shutdown() return list(retval) if len(retval) != 0 else None
if update_status == 0: pass else: LOGGER.fatal("No git repository found in path..") exit(0) # create a hash list from a given file or a series of given files # for this to work effectively when passing multiple files you must # enclose them in quotes and separate them by commas, IE -H "test.txt, testing.txt". # This will parse the given file for anything pertaining to a hash and save it # under a new file. You can then pass that file to the program if opt.createHashFile: files_to_process = opt.createHashFile hash_file_name = "hash-file-{}.hash".format( random_salt_generator(use_string=True)[0]) create_dir("{}/hash_files".format(os.getcwd()), verbose=opt.runInVerbose) full_hash_path = "{}/{}/{}".format(os.getcwd(), "hash_files", hash_file_name) with open(full_hash_path, "a+") as filename: if len(files_to_process.split(",")) > 1: LOGGER.info( "Found multiple files to process: '{}'..".format( files_to_process)) for f in files_to_process.split(","): try: for item in Generators( f.strip()).hash_file_generator(): filename.write(item.strip() + "\n") except IOError: LOGGER.warning( "Provided file '{}' does not exist, skipping.."
def bruteforce_main(verf_hash, algorithm=None, wordlist=None, salt=None, placement=None, all_algs=False, posx="", use_hex=False, verbose=False, batch=False, rounds=10): """ Main function to be used for bruteforcing a hash """ wordlist_created = False if wordlist is None: create_dir("bf-dicts", verbose=verbose) for item in os.listdir(os.getcwd() + "/bf-dicts"): if WORDLIST_RE.match(item): wordlist_created = True wordlist = "{}/bf-dicts/{}".format(os.getcwd(), item) if not wordlist_created: LOGGER.info("Creating wordlist..") create_wordlist(verbose=verbose) else: LOGGER.info("Reading from, {}..".format(wordlist)) if algorithm is None: hash_type = verify_hash_type(verf_hash, least_likely=all_algs) LOGGER.info( "Found {} possible hash type(s) to run against: {} ".format( len(hash_type) - 1 if hash_type[1] is None else len(hash_type), hash_type[0] if hash_type[1] is None else hash_type)) for alg in hash_type: if alg is None: err_msg = ( "Ran out of algorithms to try. There are no more " "algorithms currently available that match this hashes " "length, and complexity.") LOGGER.fatal(err_msg.format(DAGON_ISSUE_LINK)) break else: if ":::" in verf_hash: LOGGER.debug( "It appears that you are trying to crack an '{}' hash, " "these hashes have a certain sequence to them that looks " "like this 'USERNAME:SID:LM_HASH:NTLM_HASH:::'. What you're " "wanting is the NTLM part, of the hash, fix your hash and try " "again..".format(alg.upper())) shutdown(1) LOGGER.info("Starting bruteforce with {}..".format( alg.upper())) bruteforcing = hash_words(verf_hash, wordlist, alg, salt=salt, placement=placement, posx=posx, use_hex=use_hex, verbose=verbose, rounds=rounds) if bruteforcing is None: LOGGER.warning( "Unable to find a match for '{}', using {}..".format( verf_hash, alg.upper())) else: match_found(bruteforcing) break else: LOGGER.info("Using algorithm, {}..".format(algorithm.upper())) results = hash_words(verf_hash, wordlist, algorithm, salt=salt, placement=placement, posx=posx, verbose=verbose) if results is None: LOGGER.warning("Unable to find a match using {}..".format( algorithm.upper())) if not batch: verify = prompt( "Would you like to attempt to verify the hash type automatically and crack it", "y/N") else: verify = "n" if verify.startswith(("y", "Y")): bruteforce_main(verf_hash, wordlist=wordlist, salt=salt, placement=placement, posx=posx, use_hex=use_hex, verbose=verbose) else: LOGGER.warning( "Unable to produce a result for given hash '{}' using {}.." .format(verf_hash, algorithm.upper())) else: match_found(results)