Beispiel #1
0
    def __run_attacks_main():
        which_log_to_use = {
            "dork": URL_LOG_PATH,
            "spider": SPIDER_LOG_PATH
        }

        options = (opt.useRandomDork, opt.dorkToUse, opt.dorkFileToUse)

        to_use = which_log_to_use["dork"] if any(options) is True else which_log_to_use["spider"]

        try:
            urls_to_use = get_latest_log_file(to_use)
        except TypeError:
            urls_to_use = None

        if urls_to_use is None:
            logger.error(set_color(
                "unable to run attacks appears that no file was created for the retrieved data...", level=40
            ))
            shutdown()
        if opt.runSqliScan or opt.runPortScan or opt.intelCheck or opt.adminPanelFinder or opt.runXssScan:
            with open(urls_to_use) as urls:
                for url in urls.readlines():
                    __run_attacks(
                        url.strip(),
                        sqlmap=opt.runSqliScan, nmap=opt.runPortScan, intel=opt.intelCheck, xss=opt.runXssScan,
                        admin=opt.adminPanelFinder, given_path=opt.givenSearchPath,
                        auto=opt.autoStartSqlmap, verbose=opt.runInVerbose, batch=opt.runInBatch
                    )
Beispiel #2
0
def verify_hash_type(hash_to_verify, least_likely=False, verbose=False):
    """
      Attempt to verify a given hash by type (md5, sha1, etc..)

      >  :param hash_to_verify: hash string
      >  :param least_likely: show least likely options as well
      >  :return: likely options, least likely options, or none

      Example:
        >>> verify_hash_type("098f6bcd4621d373cade4e832627b4f6", least_likely=True)
        [('md5', 'md4', 'md2'), ('double md5', 'lm', ... )]
    """
    for regex, hash_types in HASH_TYPE_REGEX.items(
    ):  # iter is not available in Python 3.x
        if verbose:
            LOGGER.debug("Testing: {}".format(hash_types))
        if regex.match(hash_to_verify):
            return hash_types if least_likely else hash_types[0]
    error_msg = (
        "Unable to find any algorithms to match the given hash. If you "
        "feel this algorithm should be implemented make an issue here: {}")
    LOGGER.fatal(error_msg.format(DAGON_ISSUE_LINK))
    # hash_guarantee(hash_to_verify)
    LOGGER.warning("`hash_guarantee` has been turned off for the time being")
    shutdown(1)
Beispiel #3
0
    def __run_attacks(
            url, sqlmap=False, nmap=False, intel=False, xss=False,
            verbose=False, admin=False, given_path=None, auto=False, batch=False
    ):
        """
        run the attacks if any are requested
        """
        __enabled_attacks = {
            "sqlmap": opt.runSqliScan,
            "port": opt.runPortScan,
            "xss": opt.runXssScan,
            "admin": opt.adminPanelFinder,
            "intel": opt.intelCheck
        }

        enabled = set()
        for key in __enabled_attacks.keys():
            if __enabled_attacks[key] is True:
                enabled.add(key)
            if len(enabled) > 1:
                logger.error(set_color(
                    "it appears that you have enabled multiple attack types, "
                    "as of now only 1 attack is supported at a time, choose "
                    "your attack and try again. You can use the -f flag if "
                    "you do not want to complete an entire search again...", level=40
                ))
                shutdown()

        if not batch:
            question = prompt(
                "would you like to process found URL: '{}'".format(url), opts=["y", "N"]
            )
        else:
            question = "y"

        if question.lower().startswith("y"):
            if sqlmap:
                return sqlmap_scan.sqlmap_scan_main(url.strip(), verbose=verbose, opts=__create_arguments(sqlmap=True),
                                                    auto_search=auto, given_path=given_path)
            elif nmap:
                url_ip_address = replace_http(url.strip())
                return nmap_scan.perform_port_scan(url_ip_address, verbose=verbose, opts=__create_arguments(nmap=True))
            elif intel:
                url = get_true_url(url)
                return intel_me.main_intel_amt(url, agent=agent_to_use, proxy=proxy_to_use)
            elif admin:
                main(url, show=opt.showAllConnections, verbose=verbose)
            elif xss:
                main_xss(url, verbose=verbose, proxy=proxy_to_use, agent=agent_to_use, tamper=opt.tamperXssPayloads)
            else:
                pass
        else:
            logger.warning(set_color(
                "skipping '{}'...".format(url), level=30
            ))
Beispiel #4
0
def create_wordlist(warning=True, verbose=False, add=False):
    """
      Create a bruteforcing wordlist

      > :param max_length: max amount of words to have
      > :param max_word_length: how long the words should be
      > :param warning: output the warning message to say that BF'ing sucks
      > :return: a wordlist
    """
    max_length, max_word_length, dirname = 10000000, 10, "bf-dicts"
    if add:
        max_word_length += 2

    warn_msg = (
        "It is highly advised to use a dictionary attack over bruteforce. "
        "Bruteforce requires extreme amounts of memory to accomplish and "
        "it is possible that it could take a lifetime to successfully "
        "crack your hash. To run a dictionary attack all you need to do is"
        " pass the wordlist switch ('-w/--wordlist PATH') with the path to "
        "your wordlist. (IE: --bruteforce -w ~/dicts/dict.txt)")

    if warning:
        LOGGER.warning(warn_msg)

    if verbose:
        LOGGER.debug(
            "Creating {} words with a max length of {} characters".format(
                max_length, max_word_length))

    create_dir(dirname, verbose=verbose)
    with open(dirname + "/" + WORDLIST_NAME, "a+") as lib:
        word = Generators().word_generator(length_max=max_word_length)
        lib.seek(0, 0)
        line_count = len(lib.readlines())
        try:
            for _ in range(line_count, max_length):
                lib.write(next(word) + "\n")
        except StopIteration:  # SHOULD NEVER GET HERE
            # if we run out of mutations we'll retry with a different word length
            lib.seek(0, 0)
            err_msg = (
                "Ran out of mutations at {} mutations. You can try upping "
                "the max length or just use what was processed. If you "
                "make the choice not to continue the program will add +2 "
                "to the max length and try to create the wordlist again.."
            ).format(len(lib.readlines()))
            LOGGER.error(err_msg)
            q = prompt("Would you like to continue", "y/N")
            if not q.startswith(("y", "Y")):
                lib.truncate(0)
                create_wordlist(warning=False, add=True)
    LOGGER.info(
        "Wordlist generated, words saved to: {}. Please re-run the application, exiting.."
        .format(WORDLIST_NAME))
    shutdown()
Beispiel #5
0
def create_wordlist(max_length=10000000,
                    max_word_length=10,
                    warning=True,
                    perms=""):
    """
      Create a bruteforcing wordlist

      > :param max_length: max amount of words to have
      > :param max_word_length: how long the words should be
      > :param warning: output the warning message to say that BF'ing sucks
      > :return: a wordlist
    """
    warn_msg = "It is highly advised to use a dictionary attack over bruteforce. "
    warn_msg += "Bruteforce requires extreme amounts of memory to accomplish and "
    warn_msg += "it is possible that it could take a lifetime to successfully crack "
    warn_msg += "your hash. To run a dictionary attack all you need to do is pass "
    warn_msg += "the wordlist switch ('--wordlist PATH') with the path to your wordlist. "
    warn_msg += "(IE: --bruteforce --wordlist ~/dicts/dict.txt)"
    if warning is True:
        LOGGER.warning(warn_msg)

    with open(WORDLIST_NAME, "a+") as lib:
        word = word_generator(length_max=max_word_length, perms=perms)
        lib.seek(0, 0)
        line_count = len(lib.readlines())
        try:
            for _ in range(line_count, max_length):
                lib.write(next(word) + "\n")
        except StopIteration:
            # if we run out of mutations we'll retry with a different word length
            lib.seek(0, 0)
            err_msg = "Ran out of mutations at {} mutations. You can try upping the max length ".format(
                len(lib.readlines()))
            err_msg += "or just use what was processed. If you make the choice not to continue "
            err_msg += "the program will add +2 to the max length and try to create the wordlist again.."
            LOGGER.error(err_msg)
            q = prompt("Would you like to continue", "y/N")
            if q.lower().startswith("y"):
                pass
            else:
                lib.truncate(0)
                create_wordlist(max_word_length=max_length + 2, warning=False)
    LOGGER.info(
        "Wordlist generated, words saved to: {}. Please re-run the application, exiting.."
        .format(WORDLIST_NAME))
    shutdown()
Beispiel #6
0
def create_urls(url, payload_list, tamper=None):
    tf = tempfile.NamedTemporaryFile(delete=False)
    tf_name = tf.name
    with tf as tmp:
        for i, payload in enumerate(payload_list):
            if tamper:
                try:
                    if i < 1:
                        payload = __tamper_payload(payload, tamper_type=tamper, warning=True)
                    else:
                        payload = __tamper_payload(payload, tamper_type=tamper, warning=False)
                except InvalidTamperProvided:
                    logger.error(set_color(
                        "you provided and invalid tamper script, acceptable tamper scripts are: {}...".format(
                            " | ".join(list_tamper_scripts()), level=40
                        )
                    ))
                    shutdown()
            loaded_url = "{}{}\n".format(url.strip(), payload.strip())
            tmp.write(loaded_url)
    return tf_name
Beispiel #7
0
def request_issue_creation():
    question = prompt(
        "would you like to create an anonymous issue and post it to Zeus's Github",
        opts="yN")
    if question.lower().startswith("n"):
        logger.error(
            set_color(
                "Zeus has experienced an internal error and cannot continue, shutting down...",
                level=40))
        shutdown()

    fix_log_file()
    logger.info(
        set_color(
            "Zeus got an unexpected error and will automatically create an issue for this error, please wait..."
        ))

    def __extract_stacktrace(file_data):
        logger.info(set_color("extracting traceback from log file..."))
        retval, buff_mode, _buffer = [], False, ""
        with open(file_data, "r+") as log:
            for line in log:
                if "Traceback" in line:
                    buff_mode = True
                if line and len(line) < 5:
                    buff_mode = False
                    retval.append(_buffer)
                    _buffer = ""
                if buff_mode:
                    if len(line) > 400:
                        line = line[:400] + "...\n"
                    _buffer += line
        return "".join(retval)

    logger.info(set_color("getting authorization..."))

    encoded = __get_encoded_string()
    n = get_decode_num(encoded)
    token = decode(n, encoded)

    current_log_file = get_latest_log_file(CURRENT_LOG_FILE_PATH)
    stacktrace = __extract_stacktrace(current_log_file)
    issue_title = stacktrace.split("\n")[-2]

    issue_data = {
        "title":
        issue_title,
        "body":
        "Zeus version:\n`{}`\n\n"
        "Error info:\n```{}````\n\n"
        "Running details:\n`{}`\n\n"
        "Commands used:\n`{}`\n\n"
        "Log file info:\n```{}```".format(VERSION, str(stacktrace),
                                          str(platform.platform()),
                                          " ".join(sys.argv),
                                          open(current_log_file).read()),
    }

    _json_data = json.dumps(issue_data)
    if sys.version_info > (3, ):
        _json_data = _json_data.encode("utf-8")

    try:
        req = urllib2.Request(
            url="https://api.github.com/repos/ekultek/zeus-scanner/issues",
            data=_json_data,
            headers={"Authorization": "token {}".format(token)})
        urllib2.urlopen(req, timeout=10).read()
        logger.info(
            set_color(
                "issue has been created successfully with the following name '{}'..."
                .format(issue_title)))
    except Exception as e:
        logger.exception(
            set_color("failed to auto create the issue, got exception '{}', "
                      "you may manually create an issue...".format(e),
                      level=50))
Beispiel #8
0
        logger.info(set_color(
            "there are a total of {} arguments understood by sqlmap API, "
            "they include:".format(len(SQLMAP_API_OPTIONS))
        ))
        print("\n")
        for arg in SQLMAP_API_OPTIONS:
            print(
                "[*] {}".format(arg)
            )
        print("\n")
        logger.info(set_color(
            "for more information about sqlmap arguments, see here '{}'...".format(
                SQLMAP_MAN_PAGE_URL
            )
        ))
        shutdown()

    if opt.showNmapArgs:
        logger.info(set_color(
            "there are a total of {} arguments understood by nmap, they include:".format(
                len(NMAP_API_OPTS)
            )
        ))
        print("\n")
        for arg in NMAP_API_OPTS:
            print(
                "[*] {}".format(arg)
            )
        print("\n")
        logger.info(set_color(
            "for more information on what the arguments do please see here '{}'...".format(
Beispiel #9
0
def parse_search_results(query,
                         url,
                         verbose=False,
                         dirname="{}/log/url-log",
                         filename="url-log-{}.log",
                         **kwargs):
    """
      Parse a webpage from Google for URL's with a GET(query) parameter
    """
    exclude = "google" or "webcache" or "youtube"

    create_dir(dirname.format(os.getcwd()))
    full_file_path = "{}/{}".format(
        dirname.format(os.getcwd()),
        filename.format(len(os.listdir(dirname.format(os.getcwd()))) + 1))

    def __get_headers():
        try:
            proxy_string = kwargs.get("proxy")
        except:
            pass

        try:
            user_agent = kwargs.get("agent")
        except:
            pass

        return proxy_string, user_agent

    if verbose:
        logger.debug(
            set_color("checking for user-agent and proxy configuration...",
                      level=10))
    proxy_string, user_agent = __get_headers()

    if proxy_string is None:
        proxy_string = None
    else:
        proxy_string = proxy_string_to_dict(proxy_string)
    if user_agent is None:
        user_agent = DEFAULT_USER_AGENT
    else:
        user_agent = user_agent

    user_agent_info = "adjusting user-agent header to {}..."
    if user_agent is not DEFAULT_USER_AGENT:
        user_agent_info = user_agent_info.format(user_agent.strip())
    else:
        user_agent_info = user_agent_info.format(
            "default user agent '{}'".format(DEFAULT_USER_AGENT))

    proxy_string_info = "setting proxy to {}..."
    if proxy_string is not None:
        proxy_string_info = proxy_string_info.format(
            ''.join(proxy_string.keys()) + "://" +
            ''.join(proxy_string.values()))
    else:
        proxy_string_info = "no proxy configuration detected..."

    headers = {"Connection": "close", "user-agent": user_agent}
    logger.info(set_color("attempting to gather query URL..."))
    try:
        query_url = get_urls(query,
                             url,
                             verbose=verbose,
                             user_agent=user_agent,
                             proxy=proxy_string)
    except Exception as e:
        if "WebDriverException" in str(e):
            logger.exception(
                set_color(
                    "it seems that you exited the browser, please allow the browser "
                    "to complete it's run so that Zeus can bypass captchas and API "
                    "calls",
                    level=50))
        else:
            logger.exception(
                set_color(
                    "{} failed to gather the URL from search engine, caught exception '{}' "
                    "exception has been logged to current log file...".format(
                        os.path.basename(__file__),
                        str(e).strip()),
                    level=50))
        shutdown()
    logger.info(
        set_color(
            "URL successfully gathered, searching for GET parameters..."))
    logger.info(set_color(proxy_string_info))
    req = requests.get(query_url, proxies=proxy_string)
    logger.info(set_color(user_agent_info))
    req.headers.update(headers)
    found_urls = URL_REGEX.findall(req.text)
    retval = set()
    for urls in list(found_urls):
        for url in list(urls):
            url = urllib.unquote(url)
            if URL_QUERY_REGEX.match(url) and exclude not in url:
                if type(url) is unicode:
                    url = str(url).encode("utf-8")
                if verbose:
                    logger.debug(
                        set_color("found '{}'...".format(url), level=10))
                retval.add(url.split("&amp;")[0])
    logger.info(
        set_color("found a total of {} URL's with a GET parameter...".format(
            len(retval))))
    if len(retval) != 0:
        logger.info(
            set_color(
                "saving found URL's under '{}'...".format(full_file_path)))
        with open(full_file_path, "a+") as log:
            for url in list(retval):
                log.write(url + "\n")
    else:
        logger.critical(
            set_color(
                "did not find any usable URL's with the given query '{}' "
                "using search engine '{}'...".format(query, url),
                level=50))
        shutdown()
    return list(retval) if len(retval) != 0 else None
Beispiel #10
0
                                q = prompt(
                                    "Attempt to verify hash '{}'".format(
                                        h.strip()), "y/N")

                            if q.startswith(("y", "Y")):
                                match_found(verify_hash_type(
                                    h.strip(),
                                    least_likely=opt.displayLeastLikely),
                                            kind="else",
                                            all_types=opt.displayLeastLikely)

                # Finish the benchmark test
                if opt.runBenchMarkTest:
                    stop_time = time.time()
                    LOGGER.info(
                        "Time elapsed during benchmark test: {} seconds".
                        format(stop_time - start_time))

                shutdown(verbose=opt.runInVerbose)

            # You never provided a mandatory argument
            else:
                LOGGER.fatal(
                    "Missing mandatory argument, redirecting to help menu..")
                subprocess.call("python dagon.py --help", shell=True)

        # Why you gotta interrupt my awesome?
        except KeyboardInterrupt:
            LOGGER.fatal("User exited process...")
            # TODO:/ Pause/resume function
Beispiel #11
0
def bruteforce_main(verf_hash,
                    algorithm=None,
                    wordlist=None,
                    salt=None,
                    placement=None,
                    all_algs=False,
                    posx="",
                    use_hex=False,
                    verbose=False,
                    batch=False,
                    rounds=10):
    """
      Main function to be used for bruteforcing a hash
    """
    wordlist_created = False
    if wordlist is None:
        create_dir("bf-dicts", verbose=verbose)
        for item in os.listdir(os.getcwd() + "/bf-dicts"):
            if WORDLIST_RE.match(item):
                wordlist_created = True
                wordlist = "{}/bf-dicts/{}".format(os.getcwd(), item)
        if not wordlist_created:
            LOGGER.info("Creating wordlist..")
            create_wordlist(verbose=verbose)
    else:
        LOGGER.info("Reading from, {}..".format(wordlist))

    if algorithm is None:
        hash_type = verify_hash_type(verf_hash, least_likely=all_algs)
        LOGGER.info(
            "Found {} possible hash type(s) to run against: {} ".format(
                len(hash_type) - 1 if hash_type[1] is None else len(hash_type),
                hash_type[0] if hash_type[1] is None else hash_type))
        for alg in hash_type:
            if alg is None:
                err_msg = (
                    "Ran out of algorithms to try. There are no more "
                    "algorithms currently available that match this hashes "
                    "length, and complexity.")
                LOGGER.fatal(err_msg.format(DAGON_ISSUE_LINK))
                break
            else:
                if ":::" in verf_hash:
                    LOGGER.debug(
                        "It appears that you are trying to crack an '{}' hash, "
                        "these hashes have a certain sequence to them that looks "
                        "like this 'USERNAME:SID:LM_HASH:NTLM_HASH:::'. What you're "
                        "wanting is the NTLM part, of the hash, fix your hash and try "
                        "again..".format(alg.upper()))
                    shutdown(1)
                LOGGER.info("Starting bruteforce with {}..".format(
                    alg.upper()))
                bruteforcing = hash_words(verf_hash,
                                          wordlist,
                                          alg,
                                          salt=salt,
                                          placement=placement,
                                          posx=posx,
                                          use_hex=use_hex,
                                          verbose=verbose,
                                          rounds=rounds)
                if bruteforcing is None:
                    LOGGER.warning(
                        "Unable to find a match for '{}', using {}..".format(
                            verf_hash, alg.upper()))
                else:
                    match_found(bruteforcing)
                    break
    else:
        LOGGER.info("Using algorithm, {}..".format(algorithm.upper()))
        results = hash_words(verf_hash,
                             wordlist,
                             algorithm,
                             salt=salt,
                             placement=placement,
                             posx=posx,
                             verbose=verbose)
        if results is None:
            LOGGER.warning("Unable to find a match using {}..".format(
                algorithm.upper()))
            if not batch:
                verify = prompt(
                    "Would you like to attempt to verify the hash type automatically and crack it",
                    "y/N")
            else:
                verify = "n"
            if verify.startswith(("y", "Y")):
                bruteforce_main(verf_hash,
                                wordlist=wordlist,
                                salt=salt,
                                placement=placement,
                                posx=posx,
                                use_hex=use_hex,
                                verbose=verbose)
            else:
                LOGGER.warning(
                    "Unable to produce a result for given hash '{}' using {}.."
                    .format(verf_hash, algorithm.upper()))
        else:
            match_found(results)
Beispiel #12
0
def parse_search_results(query, url_to_search, verbose=False, **kwargs):
    """
      Parse a webpage from Google for URL's with a GET(query) parameter
    """
    exclude = ("www.google.com", "map.google.com", "mail.google.com",
               "drive.google.com", "news.google.com", "accounts.google.com")
    splitter = "&amp;"
    retval = set()
    query_url = None

    def __get_headers():
        proxy_string, user_agent = None, None
        try:
            proxy_string = kwargs.get("proxy")
        except:
            pass

        try:
            user_agent = kwargs.get("agent")
        except:
            pass

        return proxy_string, user_agent

    if verbose:
        logger.debug(
            set_color("checking for user-agent and proxy configuration...",
                      level=10))
    proxy_string, user_agent = __get_headers()

    if proxy_string is None:
        proxy_string = None
    else:
        proxy_string = proxy_string_to_dict(proxy_string)
    if user_agent is None:
        user_agent = DEFAULT_USER_AGENT
    else:
        user_agent = user_agent

    user_agent_info = "adjusting user-agent header to {}..."
    if user_agent is not DEFAULT_USER_AGENT:
        user_agent_info = user_agent_info.format(user_agent.strip())
    else:
        user_agent_info = user_agent_info.format(
            "default user agent '{}'".format(DEFAULT_USER_AGENT))

    proxy_string_info = "setting proxy to {}..."
    if proxy_string is not None:
        proxy_string_info = proxy_string_info.format(
            ''.join(proxy_string.keys()) + "://" +
            ''.join(proxy_string.values()))
    else:
        proxy_string_info = "no proxy configuration detected..."

    headers = {"Connection": "close", "user-agent": user_agent}
    logger.info(set_color("attempting to gather query URL..."))
    try:
        query_url = get_urls(query,
                             url_to_search,
                             verbose=verbose,
                             user_agent=user_agent,
                             proxy=proxy_string)
    except Exception as e:
        if "WebDriverException" in str(e):
            logger.exception(
                set_color(
                    "it seems that you exited the browser, please allow the browser "
                    "to complete it's run so that Zeus can bypass captchas and API "
                    "calls",
                    level=50))
        elif "'/usr/lib/firefoxdriver/webdriver.xpi'" in str(e):
            logger.fatal(
                set_color(
                    "firefox was not found in the default location on your system, "
                    "check your installation and make sure it is in /usr/lib, if you "
                    "find it there, restart your system and try again...",
                    level=50))
        else:
            logger.exception(
                set_color(
                    "{} failed to gather the URL from search engine, caught exception '{}' "
                    "exception has been logged to current log file...".format(
                        os.path.basename(__file__),
                        str(e).strip()),
                    level=50))
            request_issue_creation()
        shutdown()
    logger.info(
        set_color(
            "URL successfully gathered, searching for GET parameters..."))

    logger.info(set_color(proxy_string_info))
    req = requests.get(query_url, proxies=proxy_string)
    logger.info(set_color(user_agent_info))
    req.headers.update(headers)
    found_urls = URL_REGEX.findall(req.text)
    for urls in list(found_urls):
        for url in list(urls):
            url = unquote(url)
            if URL_QUERY_REGEX.match(url) and not any(l in url
                                                      for l in exclude):
                if isinstance(url, unicode):
                    url = str(url).encode("utf-8")
                if "webcache" in url:
                    logger.info(
                        set_color(
                            "received webcache URL, extracting URL from webcache..."
                        ))
                    url = extract_webcache_url(url)
                if verbose:
                    try:
                        logger.debug(
                            set_color("found '{}'...".format(
                                url.split(splitter)[0]),
                                      level=10))
                    except TypeError:
                        logger.debug(
                            set_color("found '{}'...".format(
                                str(url).split(splitter)[0]),
                                      level=10))
                    except AttributeError:
                        logger.debug(
                            set_color("found '{}...".format(str(url)),
                                      level=10))
                retval.add(url.split("&amp;")[0])
    logger.info(
        set_color("found a total of {} URL's with a GET parameter...".format(
            len(retval))))
    if len(retval) != 0:
        write_to_log_file(retval, URL_LOG_PATH, "url-log-{}.log")
    else:
        logger.critical(
            set_color(
                "did not find any usable URL's with the given query '{}' "
                "using search engine '{}'...".format(query, url_to_search),
                level=50))
        shutdown()
    return list(retval) if len(retval) != 0 else None
Beispiel #13
0
def bruteforce_main(verf_hash,
                    algorithm=None,
                    wordlist=None,
                    salt=None,
                    placement=None,
                    all_algs=False,
                    perms="",
                    posx="",
                    use_hex=False):
    """
      Main function to be used for bruteforcing a hash
    """
    wordlist_created = False
    if wordlist is None:
        for item in os.listdir(os.getcwd()):
            if WORDLIST_RE.match(item):
                wordlist_created = True
                wordlist = item
        if wordlist_created is True:
            pass
        else:
            LOGGER.info("Creating wordlist..")
            create_wordlist(perms=perms)
    else:
        LOGGER.info("Reading from, {}..".format(wordlist))

    if algorithm is None:
        hash_type = verify_hash_type(verf_hash, least_likely=all_algs)
        LOGGER.info("Found {} possible hash types to run against: {} ".format(
            len(hash_type) - 1 if hash_type[1] is None else len(hash_type),
            hash_type[0] if hash_type[1] is None else hash_type))
        for alg in hash_type:
            if alg is None:
                err_msg = "Ran out of algorithms to try. There are no more algorithms "
                err_msg += "currently available that match this hashes length, and complexity. "
                err_msg += "Please attempt to use your own wordlist (switch '--wordlist'), "
                err_msg += "download one (switch '--download'), use salt (switch '-S SALT'), "
                err_msg += "or find the algorithm type and create a issue here {}.. "
                LOGGER.fatal(err_msg.format(DAGON_ISSUE_LINK))
                break
            else:
                if ":" in verf_hash:
                    LOGGER.debug(
                        "It appears that you are trying to crack an '{}' hash, "
                        "these hashes have a certain sequence to them that looks "
                        "like this 'USERNAME:SID:LM_HASH:NTLM_HASH:::'. What you're "
                        "wanting is the NTLM part, of the hash, fix your hash and try "
                        "again..".format(alg.upper()))
                    shutdown(1)
                LOGGER.info("Starting bruteforce with {}..".format(
                    alg.upper()))
                bruteforcing = hash_words(verf_hash,
                                          wordlist,
                                          alg,
                                          salt=salt,
                                          placement=placement,
                                          posx=posx,
                                          use_hex=use_hex)
                if bruteforcing is None:
                    LOGGER.warning(
                        "Unable to find a match for '{}', using {}..".format(
                            verf_hash, alg.upper()))
                else:
                    match_found(bruteforcing)
                    break
    else:
        LOGGER.info("Using algorithm, {}..".format(algorithm.upper()))
        results = hash_words(verf_hash,
                             wordlist,
                             algorithm,
                             salt=salt,
                             placement=placement,
                             posx=posx)
        if results is None:
            LOGGER.warning("Unable to find a match using {}..".format(
                algorithm.upper()))
            verifiy = prompt(
                "Would you like to attempt to verify the hash type automatically and crack it",
                "y/N")
            if verifiy.lower().startswith("y"):
                bruteforce_main(verf_hash,
                                wordlist=wordlist,
                                salt=salt,
                                placement=placement,
                                posx=posx,
                                use_hex=use_hex)
            else:
                LOGGER.warning(
                    "Unable to produce a result for given hash '{}' using {}.. Exiting.."
                    .format(verf_hash, algorithm.upper()))
        else:
            match_found(results)