Exemple #1
0
def call_nmap(nmap_call: list, redr_filepath: str):
    """
    Call Nmap using the given arguments and output redirection file
    """

    LOGGER.info("Executing Nmap call '%s'", " ".join(nmap_call))

    # open file handle to redirect nmap's output
    redr_file = open(redr_filepath, "w")

    # call nmap with the created command
    if VERBOSE:
        with subprocess.Popen(nmap_call,
                              stdout=subprocess.PIPE,
                              stderr=subprocess.STDOUT,
                              bufsize=1,
                              universal_newlines=True) as proc:
            for line in proc.stdout:
                util.printit(line, end="")
                redr_file.write(line)
    else:
        subprocess.call(nmap_call, stdout=redr_file, stderr=subprocess.STDOUT)

    # close redirect file again
    redr_file.close()

    LOGGER.info(
        "Nmap scan done. Stdout and Stderr have been written to '%s'. %s '%s'",
        redr_filepath, "The XML output has been written to",
        nmap_call[nmap_call.index("-oX") + 1])
Exemple #2
0
def run_nmap_vuln_scripts(targets):
    """ Run all available Nmap SMB vuln scripts on the given targets """

    # open redirect file for Nmap output
    redr_file = open(NMAP_OUTPUT_FILE, "w")

    # iterate over targets and scan them
    for ip, ports in targets.items():
        ports = ",".join(ports)

        # Prepare Nmap call
        call = ["nmap", "-Pn", "-n", "--script", "smb-vuln-*", "-p", ports, ip]

        # Execute Nmap call
        with subprocess.Popen(call,
                              stdout=subprocess.PIPE,
                              stderr=subprocess.STDOUT,
                              bufsize=1,
                              universal_newlines=True) as proc:
            for line in proc.stdout:
                if VERBOSE:
                    util.printit(line, end="")
                redr_file.write(line)
        redr_file.write("\n")

    redr_file.close()
Exemple #3
0
 def print_results(self):
     rtypes = [rtype.strip() for rtype in self.config["core"].get("print_result_types", "").split(",")]
     for rtype, result in self.results.items():
         if rtype.value in rtypes:
             util.printit("%s Result:" % rtype.value, color=util.BRIGHT_BLUE)
             self.result_processors[rtype].print_aggr_result(result)
             print()
Exemple #4
0
    def move_files_to_outdir(created_files: list, module_dir: str,
                             module_output_dir: str):
        """ Move all files in created_files from module_dir to module_output_dir. """

        for file in created_files:
            if os.path.isabs(module_output_dir):
                file_out_dir = module_output_dir
            else:
                util.printit(
                    "[WARNING] Filepath '%s' should have been absolute",
                    color=util.RED)
            os.makedirs(file_out_dir, exist_ok=True)
            file_out_path = os.path.join(file_out_dir, os.path.basename(file))

            # in case of existing output directory, remove it
            if os.path.isdir(file_out_path):
                shutil.rmtree(file_out_path)

            # move file / directory
            if os.path.isabs(file) and (os.path.isfile(file)
                                        or os.path.isdir(file)):
                shutil.move(file, file_out_path)
            else:
                abs_file = os.path.join(module_dir, file)
                if os.path.isfile(abs_file) or os.path.isdir(abs_file):
                    shutil.move(abs_file, file_out_path)
Exemple #5
0
def add_cves_to_node(node: dict, ip: str):
    """
    Search and store all CVEs the given node's CPEs are affected by.
    Print the given string if a CPE with its CVEs would be added twice to the node.
    """

    if "cpes" in node:
        node_cpes = node["cpes"]
        node["original_cpes"] = node_cpes
        node["cpes"] = {}
        broad_cpes = set()
        for cpe in node_cpes:
            # take care of printing
            if VERBOSE:
                protocol = "base"
                if "service" in node:
                    protocol = node["service"].upper()
                elif "protocol" in node:
                    protocol = node["protocol"].upper()
                port = ":" + node["portid"] if protocol != "base" else ""
                print_str = util.BRIGHT_CYAN + "[+] %s%s (%s)" % (ip, port, protocol) + util.SANE
                print_str += " - " + util.YELLOW + cpe + util.SANE + "\n"
                util.printit(print_str)

            # get CPE's CVEs
            all_cves, broad_search = get_cves_to_cpe(cpe)

            if VERBOSE:
                print_cves(all_cves)
                columns = shutil.get_terminal_size((80, 20)).columns
                util.printit("-" * columns + "\n")

            # save all CPEs with their CVEs to the node
            for cur_cpe, cves in all_cves.items():
                if broad_search:
                    broad_cpes.add(cpe)

                if cur_cpe not in node["cpes"]:
                    node["cpes"][cur_cpe] = cves
                elif cur_cpe not in node["original_cpes"]:
                    LOGGER.warning("CPE '%s' already stored in host '%s'\'s %s %s", cur_cpe, ip,
                                   "information node; check whether program correctly replaced",
                                   "vaguer CPEs with more specific CPEs")

        # inform user about imprecise software / vulnerability information
        if len(broad_cpes) == 1:
            add_extra_info(node, "cve_extrainfo", ("Original CPE was invalid, unofficial " +
                                                   "or too broad '%s'. " % next(iter(broad_cpes))) +
                           "Determined more specific / correct CPEs and included their CVEs")
        elif len(broad_cpes) > 1:
            add_extra_info(node, "cve_extrainfo", ("The following original CPEs were invalid, " +
                                                   "unofficial or too broad '%s'. "
                                                   % ", ".join(broad_cpes)) +
                           "Determined more specific / correct CPEs and included their CVEs")
    else:
        # Maybe use https://github.com/cloudtracer/text2cpe here?
        LOGGER.warning("OS of host %s does not have a CPE. %s", ip,
                       "Therefore no CVE analysis can be done for this host's OS.")
Exemple #6
0
 def check_sufficient_privs():
     nonlocal scan_type
     if os.geteuid() != 0 and ("S" in scan_type or "U" in scan_type):
         util.printit("Configured scan type requires root privileges!",
                      color=util.RED)
         util.printit("Either run as root or change the config file.",
                      color=util.RED)
         return False
     return True
Exemple #7
0
    def _process_module_results(self, module_path: str, results: list, created_files: list):
        """
        Process the given modules results, i.e. move all result files and parse the main results
        """

        # create output directory for the module's results
        module_name = self._get_module_name(module_path)
        module_name_short = module_name.replace("%s." % MODULE_DIR_PREFIX, "", 1)
        module_output_dir = os.path.join(self.output_dir, MODULE_DIR_PREFIX)
        module_output_dir = os.path.join(module_output_dir,
                                         os.sep.join(module_name_short.split(".")[:-1]))
        os.makedirs(module_output_dir, exist_ok=True)

        for rtype, result in results:
            if rtype in ResultType:
                # if module provides result as file, parse it to a python data structure
                is_valid_result, is_file_result = True, False
                if isinstance(result, str):
                    try:
                        result = self.result_processors[rtype].parse_result(result)
                        is_file_result = True
                    except InvalidResultException as e:
                        is_valid_result = False
                else:
                    is_valid_result = self.result_processors[rtype].__class__.is_valid_result(result)

                # if result is valid, store it
                if is_valid_result:
                    # if not in single network mode, delete hosts outside of the network
                    if len(self.networks) == 1:
                        util.del_hosts_outside_net(result, self.networks[0])

                    if not is_file_result:
                        result_path = os.path.join(module_output_dir, "%s_result.json" % rtype.value.lower())
                        if os.path.isfile(result_path):
                            base, ext = os.path.splitext(result_path)
                            name_hash = hashlib.sha256(module_name_short.encode()).hexdigest()[:5]
                            result_path = base + "_" + name_hash + ext
                        self.result_processors[rtype].store_result(result, result_path)
                        if not result_path in created_files:
                            created_files.append(result_path)

                    self.result_processors[rtype].add_to_results(module_path, result)
                else:
                    self.logger.warning("Warning - module '%s' returned an unprocessable %s result: %s\n",
                                        module_name, rtype.value, result)
                    util.printit("Warning - module '%s' returned an unprocessable %s result: %s\n" %
                                 (module_name, rtype.value, result), color=util.RED)
            else:
                self.logger.warning("Warning - module '%s' returned a result with unknown type: %s\n",
                                    module_name, rtype.value)
                util.printit("Warning - module '%s' returned a result with unknown type: %s\n" %
                             (module_name, rtype.value), color=util.RED)

        # move all created files into the output directory of the current module
        ModuleManager.move_files_to_outdir(created_files, os.path.dirname(module_path),
                                           module_output_dir)
Exemple #8
0
    def _set_module_parameters(self, module):
        """
        Set the given modules's parameters depening on which parameters it has declared.

        :param module: the module whose parameters to set
        """
        all_module_attributes = [
            attr_tuple[0] for attr_tuple in inspect.getmembers(module)
        ]

        # "normal" parameters
        if "VERBOSE" in all_module_attributes:
            module.VERBOSE = self.verbose

        if "CONFIG" in all_module_attributes:
            module_name = module.__name__.replace("modules.", "", 1)
            module.CONFIG = self.config.get(module_name, {})

        if "CORE_CONFIG" in all_module_attributes:
            module.CONFIG = copy.deepcopy(self.config.get("core", {}))

        if "NETWORKS" in all_module_attributes:
            module.NETWORKS = copy.deepcopy(self.networks)

        if "OMIT_NETWORKS" in all_module_attributes:
            module.OMIT_NETWORKS = copy.deepcopy(self.omit_networks)

        if "PORTS" in all_module_attributes:
            module.PORTS = copy.deepcopy(self.ports)

        if "HOSTS" in all_module_attributes:
            self._extend_networks_to_hosts()
            module.HOSTS = copy.deepcopy(self.hosts)

        # intermediate results
        if "INTERMEDIATE_RESULTS" in all_module_attributes:
            intermediate_results = {}
            for rtype in module.INTERMEDIATE_RESULTS:
                if rtype in ResultType:
                    intermediate_results[rtype] = copy.deepcopy(
                        self.result_processors[rtype].aggregate_results())
                else:
                    rtype_keys = list(
                        filter(lambda item: item.value == rtype, ResultType))
                    if rtype_keys:
                        rtype_key = rtype_keys[0]
                        intermediate_results[rtype] = copy.deepcopy(
                            self.result_processors[rtype_key].
                            aggregate_results())
                    else:
                        util.printit(
                            "Warning - module '%s' requested an intermediate result "
                            % module_name +
                            "with an unknown type: %s\n" % rtype,
                            color=util.RED)
            module.INTERMEDIATE_RESULTS = intermediate_results
Exemple #9
0
    def crawl(self):
        """
        Start crawling with the configuration set via the constructor.

        :return: a tuple as (webhost_map, new_netlocs, comments)
        """

        # create helper process and setup IPC
        self.socket.listen(1)
        help_out_fd = open(self.helper_outfile, "w")
        with subprocess.Popen("./crawl_helper.py", stdout=help_out_fd, stderr=subprocess.STDOUT) as proc:
            self.helper_pid = proc.pid
            try:
                conn, _ = self.socket.accept()
                # create initial params for crawler helper and send them
                new_urls = set()
                setup_params = {"start_urls": self.start_urls, "allowed_domains": [self.domain],
                                "cookies": self.cookies, "user_agent": self.config["user_agent"]}
                ipc_operations.send_object(conn, setup_params)

                # loop: receive a response object, then send new URLs to crawl. Catch & handle problems.
                while True:
                    try:
                        proc.wait(timeout=0.001)
                        break
                    except subprocess.TimeoutExpired:
                        response = ipc_operations.receive_object(conn)
                        if not response:  # socket is dead / closed
                            break
                        new_urls = self.process_response(response)
                        ipc_operations.send_object(conn, new_urls)
                    except socket.timeout:
                        util.printit("Unix socket connection to scrapy crawler unexpectedly broke. " +
                                     "Quitting crawling of %s" % self.base_url, color=util.RED)
                        break
            finally:
                # ensure connection is closed and helper process killed in any case
                conn.close()
                proc.kill()

        # after the actual crawling, extract all the gathered cookies from Selenium
        if self.config["use_selenium"].lower() == "true":
            selenium_cookies = self.driver.get_cookies()
            for cookie in selenium_cookies:
                if not any(cookie["name"] == c["name"] and cookie["path"] == c["path"] and
                           cookie["domain"] == c["domain"] for c in self.found_cookies):
                    parsed_cookie = {}
                    for key in ("name", "path", "domain", "httpOnly", "secure"):
                        parsed_cookie[key] = cookie[key]
                    self.found_cookies.append(parsed_cookie)

        help_out_fd.close()
        return self.create_results()
Exemple #10
0
    def do_assessment(self):
        """
        Conduct the vulnerability assessment either in "normal" or "single network mode".
        """

        networks = self.networks
        net_dir_map = {}
        network_vuln_scores = {}

        def do_network_assessment(networks: list, out_dir: str):
            nonlocal network_vuln_scores
            self.module_manager.set_networks(networks)
            self.module_manager.set_output_dir(out_dir)
            self.module_manager.run()
            self.module_manager.create_results()
            self.module_manager.store_results()
            self.module_manager.print_results()
            net_score = self.module_manager.get_network_vuln_score()
            self.module_manager.reset_results()
            return net_score

        if (not self.separate_networks) or len(networks) <= 1:
            # if there is only one assessment
            score = do_network_assessment(networks, self.output_dir)
            if (not self.separate_networks) or (not self.networks):
                if score:
                    network_vuln_scores["assessed_network"] = score
            else:
                if score:
                    network_vuln_scores[networks[0]] = score
        else:
            # if there are multiple scans, place results into separate directory
            for i, net in enumerate(networks):
                util.printit("Assessment of network '%s':" % net, color=util.YELLOW)
                util.printit("===========================================", color=util.YELLOW)
                net_dir_map[net] = "network_%d" % (i + 1)
                score = do_network_assessment([net], os.path.join(self.output_dir, net_dir_map[net]))
                if score:
                    network_vuln_scores[net] = score
            if net_dir_map:
                net_dir_map_out = os.path.join(self.output_dir, NET_DIR_MAP_FILE)
                with open(net_dir_map_out, "w") as file:
                    file.write(json.dumps(net_dir_map, ensure_ascii=False, indent=3))

        # visualize results
        if not all((not score) or score == "N/A" for score in network_vuln_scores):
            outfile = os.path.join(self.output_dir, "network_vulnerability_ratings.json")
            outfile_orig = os.path.join(self.orig_out_dir, "network_vulnerability_ratings.json")

            title = util.BRIGHT_BLUE + "Final network vulnerability scores:" + util.SANE
            visualizer.visualize_dict_results(title, network_vuln_scores, outfile)
            self.logger.info("The main output file is called '%s'", outfile)
            print("The main output file is called: %s" % outfile_orig)
Exemple #11
0
 def _add_user_results(self):
     for rtype, result_files in self.user_results.items():
         for result_file in result_files:
             relpath, abspath = result_file
             basename = os.path.basename(abspath)
             user_result_dir = os.path.join(self.output_dir, USER_RESULT_DIR)
             copy_path = os.path.join(os.path.join(user_result_dir, rtype.value.lower()), basename)
             copy_path = ModuleManager.save_copy_file(abspath, copy_path)
             try:
                 result = self.result_processors[rtype].parse_result_file(copy_path)
                 self.result_processors[rtype].add_to_results(relpath, result)
             except InvalidResultException as e:
                 util.printit(e, color=util.RED)
Exemple #12
0
    def _set_modules(self):
        """Assign the modules to use for the assessment"""

        # actual assessment modules
        config_modules = self.config["core"].get("modules", None)
        if config_modules is None:
            util.printit("Warning: No modules specified in config file(s)\n" +
                         "Did you modify the default config file?", color=util.RED)

        self.modules = []
        for module in config_modules.split(","):
            if module:
                module_path = os.path.join(MODULE_DIR_PREFIX, module.strip()).replace(".", os.sep)
                self.modules.append(module_path + ".py")
Exemple #13
0
def print_progress(cur_nr, total):
    """ Print progress of testing a path for SQLi """

    bar_count = shutil.get_terminal_size(
        (80, 20)).columns // 2  # half the terminal width
    completed_bars, remaining_bars = cur_nr, total - cur_nr
    if total > bar_count:
        completed_bars = int(cur_nr * bar_count / total)
        remaining_bars = bar_count - completed_bars

    if CONFIG.get("show_sqlmap_output", "false").lower() == "false":
        util.clear_previous_line()
        util.clear_previous_line()
    print_str = "    Progress: [" + util.GREEN + completed_bars * "=" + util.SANE + remaining_bars * "·" + "] "
    print_str += "(%d/%d sqlmap calls)\n" % (cur_nr, total)
    util.printit(print_str)
Exemple #14
0
def run(results):
    """ Entry point of module """
    def print_divider(tool, extra_end="\n"):
        """ Print a divider to differentiate output of different modules """
        nonlocal cols
        tool = " %s " % tool
        count = cols - len(tool)
        util.printit(math.floor(count / 2) * "-",
                     end="",
                     color=util.BRIGHT_CYAN)
        util.printit(tool, end="")
        util.printit(math.ceil(count / 2) * "-" + extra_end,
                     color=util.BRIGHT_CYAN)

    # setup logger
    logger = logging.getLogger(__name__)
    logger.info("Starting SMB enumeration")

    # determine targets
    targets = get_targets()

    # parse specified accounts to use from config
    accounts = []
    if CONFIG.get("accounts", ""):
        accounts_str = CONFIG["accounts"]
        accounts = parse_accounts_str(accounts_str)
    # add guest account to beginning of account list
    accounts.insert(0, ("", ""))

    # run in order: SMBMap, Enum4Linux, Nmap SMB vuln scripts
    cols = shutil.get_terminal_size((100, 20))[0]
    if VERBOSE:
        print_divider("SMBMap", extra_end="")
    run_smbmap(targets, accounts)
    if VERBOSE:
        util.printit()
        print_divider("Enum4Linux")
    run_enum4linux(targets, accounts)
    if CONFIG.get("use_nmap_scripts", "true").lower() == "true":
        if VERBOSE:
            util.printit()
            print_divider("Nmap SMB Vulnerability Scripts")
        run_nmap_vuln_scripts(targets)

    # no AVAIN results are returned
    logger.info("Finished SMB enumeration")
Exemple #15
0
def banner():
    border_color, avain_color, by_color, sane = util.BRIGHT_BLUE, util.BRIGHT_BLUE, util.YELLOW, util.SANE
    util.printit("\n|" + "-" * 78 + "|", color=border_color)
    print("""\
{0}                                                                              {0}
{0}                         ___  _    __ ___     ____ _   __                     {0}
{0}                        /   || |  / //   |   /  _// | / /                     {0}
{0}                       / /| || | / // /| |   / / /  |/ /                      {0}
{0}                      / ___ || |/ // ___ | _/ / / /|  /                       {0}
{0}                     /_/  |_||___//_/  |_|/___//_/ |_/      {1}           {0}
{0}                                                                              {0}\
""".format(border_color + "|" + avain_color,
           util.BRIGHT_GREEN + "(%s)" % __version__ + border_color))
    print(border_color + "|" + sane + " " * 19 + by_color +
          "[ Created by - Dustin Born (ra1nb0rn) ]" + sane + " " * 20 +
          border_color + "|" + sane)
    util.printit("|" + "-" * 78 + "|", color=border_color)
    print()
Exemple #16
0
def process_hydra_result(hydra_result: dict):
    """
    Process the given hydra result to retrieve
    vulnerable hosts and valid credentials
    """
    global VALID_CREDS

    if VERBOSE:
        util.printit()

    for entry in hydra_result["results"]:
        addr, port = entry["host"], entry["port"]
        account = {"user": entry["login"], "pass": entry["password"]}

        if VERBOSE:
            util.printit("[%s:%s]" % (addr, port), end=" ", color=util.BRIGHT_BLUE)
            util.printit("Valid SSH account found: " + str(account))

        # Add to credential storage
        if addr not in VALID_CREDS:
            VALID_CREDS[addr] = {}
        if port not in VALID_CREDS[addr]:
            VALID_CREDS[addr][port] = []
        if account not in VALID_CREDS[addr][port]:
            VALID_CREDS[addr][port].append(account)
Exemple #17
0
def run(results: list):
    """
    Entry point for this module.
    """

    global LOGGER, EXCLUDE_DIRS

    # setup logger
    LOGGER = logging.getLogger(__name__)
    LOGGER.info("Starting with gobuster scan")

    set_targets()
    webserver_map = {}
    EXCLUDE_DIRS = set(
        [x.strip() for x in CONFIG.get("exclude_dirs", "").split(",")])

    # open file handle to redirect output
    redr_file = open(CREATED_FILES[0], "w+")

    # check if installed gobuster is older and has only mode 'dir'
    is_old_gobuster = check_is_old_gobuster()

    for (ip, host, port, protocol) in TARGETS:
        # for every new IP create a new webserver_map entry and print a seperator
        if ip not in webserver_map:
            webserver_map[ip] = {}
            LOGGER.info("Initiating scan for %s", ip)
            if VERBOSE:
                util.printit("*" * 30)
                redr_file.write("*" * 30 + "\n")

                util.printit("+ " + ip + " " + "*" * (27 - len(ip)))
                redr_file.write("+ " + ip + " " + "*" * (27 - len(ip)) + "\n")

                util.printit("*" * 30)
                redr_file.write("*" * 30 + "\n")

        if port not in webserver_map[ip]:
            webserver_map[ip][port] = {}
        if host in webserver_map[ip][port]:
            continue

        # omit port in url if possible
        if (protocol == "http" and port == "80") or (protocol == "https"
                                                     and port == "443"):
            url = protocol + "://" + host
        else:
            url = protocol + "://" + host + ":" + port

        # run gobuster on target URL and save the results
        host_web_map = run_gobuster(url, redr_file, is_old_gobuster)
        webserver_map[ip][port][host] = host_web_map

    # close redirect file
    redr_file.close()

    LOGGER.info("Finished gobuster scan")
    results.append((ResultType.WEBSERVER_MAP, webserver_map))
Exemple #18
0
def print_cves(all_cves: dict):
    """
    Print all CVEs contained in the given dictionary.

    :param all_cves: a dictionary of {cpe: cves} pairs
    """

    all_cve_nodes_list = list(all_cves.values())
    all_cve_nodes = {}
    for list_entry in all_cve_nodes_list:
        for cve_id, cve_node in list_entry.items():
            all_cve_nodes[cve_id] = cve_node

    all_cve_nodes = sorted(all_cve_nodes.values(),
                           key=lambda entry: entry["cvssv3"],
                           reverse=True)
    count = int(CONFIG.get("max_print_count", -1))
    if count == -1:
        count = len(all_cve_nodes)
    for print_node in all_cve_nodes[:count]:
        description = print_node["description"].replace(
            "\r\n\r\n", "\n").replace("\n\n", "\n").strip()
        print_str = util.GREEN + print_node["id"] + util.SANE
        print_str += " (" + util.MAGENTA + str(
            print_node["cvssv3"]) + util.SANE + "): %s\n" % description

        if "exploits" in print_node:
            print_str += util.YELLOW + "Exploits:  " + util.SANE + print_node[
                "exploits"][0] + "\n"
            if len(print_node["exploits"]) > 1:
                for edb_link in print_node["exploits"][1:]:
                    print_str += len("Exploits:  ") * " " + edb_link + "\n"

        print_str += "Reference: " + print_node["href"]
        print_str += ", " + print_node["published"].split(" ")[0]
        util.printit(print_str)
    def print_cves():
        nonlocal all_cves

        all_cve_nodes_list = list(all_cves.values())
        all_cve_nodes = {}
        for list_entry in all_cve_nodes_list:
            for cve_id, cve_node in list_entry.items():
                all_cve_nodes[cve_id] = cve_node

        all_cve_nodes = sorted(all_cve_nodes.values(),
                               key=lambda entry: entry["cvssv3"],
                               reverse=True)
        count = int(CONFIG.get("max_print_count", -1))
        if count == -1:
            count = len(all_cve_nodes)
        for print_node in all_cve_nodes[:count]:
            description = print_node["description"].replace(
                "\r\n\r\n", "\n").replace("\n\n", "\n").strip()
            print_str = util.GREEN + print_node["id"] + util.SANE
            print_str += " (" + util.MAGENTA + str(
                print_node["cvssv3"]) + util.SANE + "): "
            print_str += description + "\n" + "Reference: " + print_node["href"]
            print_str += ", " + print_node["published"].split(" ")[0]
            util.printit(print_str)
Exemple #20
0
 def print_divider(tool, extra_end="\n"):
     """ Print a divider to differentiate output of different modules """
     nonlocal cols
     tool = " %s " % tool
     count = cols - len(tool)
     util.printit(math.floor(count / 2) * "-",
                  end="",
                  color=util.BRIGHT_CYAN)
     util.printit(tool, end="")
     util.printit(math.ceil(count / 2) * "-" + extra_end,
                  color=util.BRIGHT_CYAN)
def add_cves_to_node(node: dict, ip: str):
    """
    Search and store all CVEs the given node's CPEs are affected by.
    Print the given string if a CPE with its CVEs would be added twice to the node.
    """
    def print_cves():
        nonlocal all_cves

        all_cve_nodes_list = list(all_cves.values())
        all_cve_nodes = {}
        for list_entry in all_cve_nodes_list:
            for cve_id, cve_node in list_entry.items():
                all_cve_nodes[cve_id] = cve_node

        all_cve_nodes = sorted(all_cve_nodes.values(),
                               key=lambda entry: entry["cvssv3"],
                               reverse=True)
        count = int(CONFIG.get("max_print_count", -1))
        if count == -1:
            count = len(all_cve_nodes)
        for print_node in all_cve_nodes[:count]:
            description = print_node["description"].replace(
                "\r\n\r\n", "\n").replace("\n\n", "\n").strip()
            print_str = util.GREEN + print_node["id"] + util.SANE
            print_str += " (" + util.MAGENTA + str(
                print_node["cvssv3"]) + util.SANE + "): "
            print_str += description + "\n" + "Reference: " + print_node["href"]
            print_str += ", " + print_node["published"].split(" ")[0]
            util.printit(print_str)

    if "cpes" in node:
        node_cpes = node["cpes"]
        node["original_cpes"] = node_cpes
        node["cpes"] = {}
        broad_cpes = set()
        for cpe in node_cpes:
            # take care of printing
            if VERBOSE:
                protocol = "base"
                if "service" in node:
                    protocol = node["service"].upper()
                elif "protocol" in node:
                    protocol = node["protocol"].upper()
                port = ":" + node["portid"] if protocol != "base" else ""
                print_str = util.BRIGHT_CYAN + "[+] %s%s (%s)" % (
                    ip, port, protocol) + util.SANE
                print_str += " - " + util.YELLOW + cpe + util.SANE + "\n"
                util.printit(print_str)

            # get CPE's CVEs
            all_cves, broad_search = get_cves_to_cpe(cpe)

            if VERBOSE:
                print_cves()
                columns = shutil.get_terminal_size((80, 20)).columns
                util.printit("-" * columns + "\n")

            # save all CPEs with their CVEs to the node
            for cur_cpe, cves in all_cves.items():
                if broad_search:
                    broad_cpes.add(cpe)

                if cur_cpe not in node["cpes"]:
                    node["cpes"][cur_cpe] = cves
                elif cur_cpe not in node["original_cpes"]:
                    LOGGER.warning(
                        "CPE '%s' already stored in host '%s'\'s %s %s",
                        cur_cpe, ip,
                        "information node; check whether program correctly replaced",
                        "vaguer CPEs with more specific CPEs")

        # inform user about imprecise software / vulnerability information
        if len(broad_cpes) == 1:
            add_extra_info(
                node, "cve_extrainfo",
                ("Original CPE was invalid, unofficial " +
                 "or too broad '%s'. " % next(iter(broad_cpes))) +
                "Determined more specific / correct CPEs and included their CVEs"
            )
        elif len(broad_cpes) > 1:
            add_extra_info(
                node, "cve_extrainfo",
                ("The following original CPEs were invalid, " +
                 "unofficial or too broad '%s'. " % ", ".join(broad_cpes)) +
                "Determined more specific / correct CPEs and included their CVEs"
            )
    else:
        # Maybe use https://github.com/cloudtracer/text2cpe here?
        LOGGER.warning(
            "OS of host %s does not have a CPE. %s", ip,
            "Therefore no CVE analysis can be done for this host's OS.")
Exemple #22
0
    def run_modules(self, modules, mtype=""):
        """
        Run the given modules
        """

        def get_created_files(module):
            """ Retrieve all files created by the module """

            created_files = []
            for attr, val in inspect.getmembers(module):
                if attr == "CREATED_FILES":
                    created_files = val
                    break
            return created_files


        # create the output directory for all module results
        os.makedirs(self.output_dir, exist_ok=True)

        if mtype:
            self.logger.info("Invoking the specified %s-modules" % mtype)
            print(util.BRIGHT_BLUE + "Running the specified %s-modules:" % mtype)
        else:
            self.logger.info("Invoking the specified modules")
            print(util.BRIGHT_BLUE + "Running the specified modules:")

        if len(modules) == 1:
            self.logger.info("1 module was found")
        else:
            self.logger.info("%d modules were found", len(modules))
        self.logger.debug("The following modules have been found: %s" % ", ".join(modules))

        # iterate over all specified modules
        for i, module_path in enumerate(modules):
            # get module name
            module_name = self._get_module_name(module_path)
            module_name_no_prefix = module_name.replace("%s." % MODULE_DIR_PREFIX, "", 1)

            # import the respective python module
            module = importlib.import_module(module_name)

            # change into the module's directory
            main_cwd = os.getcwd()
            os.chdir(os.path.dirname(module_path))

            # set the module's parameters (e.g. config, verbosity, ...)
            self._set_module_parameters(module)

            # setup execution of module with its specific function to run
            self.logger.info("Invoking module %d of %d - %s", i+1,
                             len(modules), module_name_no_prefix)
            module_results = []
            module_func = getattr(module, MODULE_RUN_FUNCTION, None)
            if not module_func or not callable(module_func):
                self.logger.warning("Module '%s' does not have a '%s' function. Module is skipped.",
                                    module_name, MODULE_RUN_FUNCTION)
                os.chdir(main_cwd)
                continue

            module_thread = threading.Thread(target=module_func, args=(module_results,))

            # print section header for current module
            if self.verbose:
                columns = shutil.get_terminal_size((80, 20)).columns
                count = columns - (len(module_name_no_prefix) + 2)
                if i > 0:
                    print()
                util.printit(math.floor(count / 2) * "=", end="", color=util.BRIGHT_BLUE)
                util.printit(" %s " % module_name_no_prefix, end="", color=util.SANE)
                util.printit(math.ceil(count / 2) * "=" + "\n", color=util.BRIGHT_BLUE)

            # run module
            try:
                module_thread.start()
                show_progress_state = 0
                while module_thread.is_alive():
                    module_thread.join(timeout=self.join_timeout)

                    if not util.acquire_print(timeout=PRINT_LOCK_ACQUIRE_TIMEOUT):
                        continue

                    print(util.GREEN + "Running module %d of %d - " % (i+1, len(modules)), end="")
                    print(util.SANE + module_name_no_prefix + "  ", end="")
                    print(util.YELLOW + SHOW_PROGRESS_SYMBOLS[show_progress_state])
                    print(util.SANE, end="")  # cleanup colors
                    util.clear_previous_line()
                    util.release_print()

                    if (show_progress_state + 1) % len(SHOW_PROGRESS_SYMBOLS) == 0:
                        show_progress_state = 0
                    else:
                        show_progress_state += 1
            except KeyboardInterrupt:
                kill_func = getattr(module, MODULE_KILL_FUNCTION, None)
                if kill_func and callable(kill_func):
                    kill_func()
                util.printit("Module '%s' was manually killed." % module_name_no_prefix, color=util.RED)

            # change back into the main directory
            os.chdir(main_cwd)

            # process module results and misc created files
            created_files = get_created_files(module)
            if mtype != "update":
                for j, res in enumerate(module_results):
                    if not isinstance(res, tuple):
                        self.logger.warning("Warning - module '%s' returned a non-tuple result: %s", module_name, type(res))
                        util.printit("Warning - module '%s' returned a non-tuple result: %s\n" %
                                     (module_name, type(res)), color=util.RED)
                        del module_results[j]

                # warn if no results are available
                if not module_results and created_files:
                    self.logger.info("Module %s did not return any final results" % module_name)
                elif not created_files:
                    self.logger.info("Module %s did not return any results" % module_name)

                self._process_module_results(module_path, module_results, created_files)
            elif mtype == "update":
                update_output_dir = os.path.join(self.output_dir, UPDATE_OUT_DIR)
                module_output_dir = os.path.join(update_output_dir, os.sep.join(module_name_no_prefix.split(".")[:-1]))
                os.makedirs(module_output_dir, exist_ok=True)
                ModuleManager.move_files_to_outdir(created_files, os.path.dirname(module_path), module_output_dir)
            else:
                self.logger.info("Module %s did not return any results" % module_name)

            self.logger.info("Module %d of %d completed", i+1, len(modules))

        if self.verbose:
            util.printit("\n" + shutil.get_terminal_size((80, 20)).columns * "=" + "\n", color=util.BRIGHT_BLUE)

        if len(modules) == 1:
            print(util.GREEN + "The one module has completed.")
        else:
            print(util.GREEN + "All %d modules have completed." % len(modules))
        print(util.SANE)
        self.logger.info("All modules have been executed")
Exemple #23
0
    def process_response(self, response):
        """
        Process the given scrapy response. Extract new URLs, HTTP parameters,
        new network locations, cookies and code comments.

        :return: a set of URLs that shall be crawled in the future
        """

        if response.status == 404:
            return set()

        # store response HTTP code if not redirect
        if not (response.status == 301 or response.status == 302):
            if response.url not in self.crawled_urls:
                self.crawled_urls[response.url] = response.status

        # some colorful printing
        if self.verbose:
            code = str(response.status)
            extra_print = ""
            if code[0] == "2":
                color = util.GREEN
            elif code[0] == "3":
                color = util.BRIGHT_CYAN
                extra_print = (util.BRIGHT_CYAN + " --> " + util.SANE +
                               response.headers["Location"].decode())
            elif code[0] == "4":
                color = util.RED
            elif code[0] == "5":
                color = util.MAGENTA
            else:
                color = util.SANE
            util.printit("  [", end="")
            util.printit(str(response.status), color=color, end="")
            util.printit("]  " + response.url + extra_print)

        # extract cookies from HTTP header response
        self.extract_cookies(response.headers.getlist("Set-Cookie"),
                             response.url)

        # use scrapy's lxml linkextractor to extract links / URLs
        try:
            scrapy_links = LinkExtractor(
                allow_domains=[self.domain],
                tags=("a", "area", "script", "link", "source", "img"),
                attrs=("src", "href"),
                deny_extensions=set()).extract_links(response)
        except AttributeError as e:
            if str(e) == "Response content isn't text":
                # stop processing and return no new URLs
                return set()
            raise e

        # run the different URL / link discovery mechanisms
        linkfinder_urls, dynamic_urls, form_urls, sub_urls = set(), set(), set(
        ), set()
        if self.config["use_linkfinder"].lower() == "true":
            linkfinder_urls = self.run_linkfinder(response.text,
                                                  response.urljoin)
        if self.config["use_selenium"].lower() == "true":
            dynamic_urls = self.extract_dynamic_urls(response.url)
        if self.config["extract_info_from_forms"].lower() == "true":
            form_data = extract_form_data(response)
            # extract new URLs and HTTP parameters from parsed form data
            form_urls = self.process_form_data(form_data, response.urljoin)

        # extract sub URLs, i.e. URLs with parent paths
        sub_urls = extract_sub_urls(response.url)

        # extract comments if configured
        if self.config["extract_comments"].lower() == "true":
            self.extract_comments(response)

        # unite discovered URLs
        urls = set()
        for link in scrapy_links:
            urls.add(link.url)
        urls |= linkfinder_urls
        urls |= dynamic_urls
        urls |= form_urls
        urls |= sub_urls

        # store info about redirect and add redirect URL to discovered URLs
        if response.status == 301 or response.status == 302:
            location = response.headers["Location"].decode()
            self.redirects[response.url] = {
                "code": response.status,
                "to": location
            }
            urls.add(self.to_absolute_url(location, response.urljoin))

        # process all the discovered URLs, i.e. extract new information and decide which to crawl
        yield_urls = set()
        for url in urls:
            # strip anchor
            if "#" in url:
                url = url[:url.rfind("#")]

            # replace entities and parse URL
            url = url.replace("&amp;", "&")
            url = url.replace("&#038;", "&")
            parsed_url = urllib.parse.urlparse(url)

            # extract GET parameters and cut URL if option is configured
            params = {}
            if parsed_url.query:
                if self.config["crawl_parameter_links"].lower() != "true":
                    url = "%s://%s/%s" % (parsed_url.scheme, parsed_url.netloc,
                                          parsed_url.path)
                params = get_query_params(parsed_url.query)
            elif url.endswith("?"):
                url = url[:-1]

            # add URL as instance of its path
            if self.url_has_netloc(url) and params:
                self.add_path_instance(parsed_url.path, params, {}, {})

            # skip already crawled URLs
            if url in self.found_urls:
                continue
            self.found_urls.add(url)

            # skip URLs with different network location
            if not self.url_has_netloc(url):
                continue
            if url == response.url:
                continue

            # try to avoid going to a logout / login page if custom cookies were supplied
            if self.cookies:
                if "logout" in parsed_url.path.split("/")[-1].lower():
                    continue
                elif "logout" in parsed_url.query.lower():
                    continue

                if "login" in parsed_url.path.split("/")[-1].lower():
                    continue
                elif "login" in parsed_url.query.lower():
                    continue

            # check whether to add this URL to the to-be-crawled URLs
            if url not in yield_urls:
                # limit the crawling depth
                max_depth = int(self.config["max_depth"])
                if max_depth > 0:
                    depth = parsed_url.path.count("/")
                    if depth > max_depth:
                        continue

                # limit the number of times a path can be crawled to avoid endless
                # crawling upon GET parameter variation
                if parsed_url.path not in self.crawled_paths:
                    self.crawled_paths[parsed_url.path] = 0
                self.crawled_paths[parsed_url.path] += 1
                if self.crawled_paths[parsed_url.path] > int(
                        self.config["max_path_visits"]):
                    continue

                yield_urls.add(url)

        return yield_urls
Exemple #24
0
def run(results: list):
    """
    Analyze the specified hosts in HOSTS for susceptibility to SSH password
    cracking with the configured list of credentials (by default the Mirai creds).

    :return: a tuple containing the analyis results/scores
    """

    # setup logger
    global HOSTS, LOGGER, CREATED_FILES

    HOSTS = INTERMEDIATE_RESULTS[ResultType.SCAN]
    LOGGER = logging.getLogger(__name__)
    LOGGER.info("Starting with Mirai SSH susceptibility analysis")

    # cleanup potentially old files
    cleanup()
    # write all potential targets to a file
    wrote_target = write_targets_file()

    # run hydra if at least one target exists
    if wrote_target:
        CREATED_FILES.append(HYDRA_TARGETS_FILE)
        # get wordlists
        wordlists = [w.strip() for w in CONFIG.get("wordlists", MIRAI_WORDLIST_PATH).split(",")]

        if len(wordlists) > 1:
            os.makedirs(HYDRA_OUTPUT_DIR, exist_ok=True)

        # call Hydra once for every configured wordlist
        for i, wlist in enumerate(wordlists):
            if not os.path.isfile(wlist):
                LOGGER.warning("%s does not exist", wlist)
                continue

            # determine correct output file names
            text_out, json_out = HYDRA_TEXT_OUTPUT, HYDRA_JSON_OUTPUT
            if i > 0:
                txt_base, txt_ext = os.path.splitext(text_out)
                json_base, json_ext = os.path.splitext(json_out)
                text_out = txt_base + "_%d" % i + txt_ext
                json_out = json_base + "_%d" % i + json_ext

            if len(wordlists) > 1:
                text_out = os.path.join(HYDRA_OUTPUT_DIR, text_out)
                json_out = os.path.join(HYDRA_OUTPUT_DIR, json_out)

            # Prepare Hydra call
            tasks = CONFIG.get("tasks", "4")
            hydra_call = ["hydra", "-C", wlist, "-I", "-t", tasks, "-M", HYDRA_TARGETS_FILE,
                          "-b", "json", "-o", json_out, "ssh"]
            LOGGER.info("Beginning Hydra SSH Brute Force with command: %s", " ".join(hydra_call))
            redr_file = open(text_out, "w")
            CREATED_FILES += [text_out, json_out]
            found_credential_regex = re.compile(r"^\[(\d+)\]\[(\w+)\]\s*host:\s*(\S+)\s*login:\s*(\S+)\s*password:\s*(\S+)\s*$")

            # Execute Hydra call
            if VERBOSE:
                with subprocess.Popen(hydra_call, stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
                                      bufsize=1, universal_newlines=True) as proc:
                    for line in proc.stdout:
                        # color found credentials like Hydra does when run in TTY
                        colored_line = util.color_elements_in_string(line, found_credential_regex, util.BRIGHT_GREEN)
                        # print modified line to stdout and store original in redirect file
                        util.printit(colored_line, end="")
                        redr_file.write(line)
            else:
                subprocess.call(hydra_call, stdout=redr_file, stderr=subprocess.STDOUT)
            redr_file.close()
            LOGGER.info("Done")

            # parse and process Hydra output
            LOGGER.info("Processing Hydra Output")
            if os.path.isfile(json_out):
                process_hydra_output(json_out)
            LOGGER.info("Done")
    else:
        # remove created but empty targets file
        os.remove(HYDRA_TARGETS_FILE)
        LOGGER.info("Did not receive any targets. Skipping analysis.")
        CREATED_FILES = []

    # assign a score to every vulnerable host
    result = {}
    for host in VALID_CREDS:
        result[host] = 9.8  # Give vulnerable host CVSSv3 score of 9.8

    # store valid credentials
    if VALID_CREDS:
        with open(VALID_CREDS_FILE, "w") as file:
            file.write(json.dumps(VALID_CREDS, ensure_ascii=False, indent=3))
        CREATED_FILES.append(VALID_CREDS_FILE)

    # return result
    results.append((ResultType.VULN_SCORE, result))
def run(results: list):
    """
    Analyze the specified hosts in HOSTS for CVEs that its software (or hardware) is affected by.
    """
    def process_port_cves(protocol: str):
        nonlocal host, ip
        for _, portinfos in host[protocol].items():
            for portinfo in portinfos:
                add_cves_to_node(portinfo, ip)

    global LOGGER, DB_CURSOR, CREATED_FILES

    # setup logger
    LOGGER = logging.getLogger(__name__)
    LOGGER.info("Starting with CVE analysis")

    hosts = INTERMEDIATE_RESULTS[ResultType.SCAN]

    # initialize database and check for up-to-dateness
    db_conn = None
    try:
        check_database()
    except Exception as excpt:
        util.printit(str(excpt), color=util.RED)
    try:
        db_creation_date = datetime.datetime.fromtimestamp(
            os.stat(DATABASE_FILE).st_ctime)
        LOGGER.info(
            "Gathering information using the local CVE database last updated on %s",
            str(db_creation_date))
        db_conn = sqlite3.connect(DATABASE_FILE)
        DB_CURSOR = db_conn.cursor()
    except Exception as excpt:
        util.printit(str(excpt), color=util.RED)
        return

    # start CVE discovery
    LOGGER.info("Starting with CVE discovery of all hosts")
    CREATED_FILES += [HOST_CVE_FILE, SUMMARY_FILE]
    for ip, host in hosts.items():

        if VERBOSE:
            header = "******** %s ********" % ip
            full_header = ("*" * len(header) + "\n" + header + "\n" +
                           "*" * len(header) + "\n")
            util.printit(full_header)

        # get TCP and UDP CVEs
        process_port_cves("tcp")
        process_port_cves("udp")

        # get OS CVEs
        if CONFIG.get("skip_os", "false").lower() == "true":
            LOGGER.info("Skipping OS CVE analysis as stated in config file")
        else:
            for os_info in host["os"]:
                add_cves_to_node(os_info, ip)

    # compute scores and create summary
    LOGGER.info("Done")
    LOGGER.info("Computing final CVSSv3 scores for all hosts")
    scores = calculate_final_scores(hosts)
    LOGGER.info("Done")

    with open(HOST_CVE_FILE, "w") as file:
        file.write(json.dumps(hosts, ensure_ascii=False, indent=3))

    LOGGER.info("Creating summary")
    create_cve_summary(hosts)
    LOGGER.info("Done")

    results.append((ResultType.VULN_SCORE, scores))
Exemple #26
0
    def __init__(self, networks: list, add_networks: list, omit_networks: list,
                 update_modules: bool, config_path: str, ports: list,
                 output_dir: str, user_results: dict, single_network: bool,
                 verbose: bool):
        """
        Create a Controller object.

        :param networks: A list of strings specifying the networks to analyze
        :param add_networks: A list of networks as strings to additionally analyze
        :param omit_networks: A list of networks as strings to omit from the analysis
        :param update_modules: Whether modules should be updated or initialized
        :param config_path: The path to a config file
        :param ports: A list of port expressions
        :param output_dir: A string specifying the output directory of the analysis
        :param user_results: A list of filenames whose files contain user provided results
        :param single_network: A boolean specifying whether all given networks are to be considered
                               hosts in one single network
        :param vebose: Specifying whether to provide verbose output or not
        """

        self.networks = networks if networks is not None else []
        self.networks += add_networks if add_networks is not None else []
        self.omit_networks = omit_networks

        # determine output directory
        if output_dir:
            self.output_dir = output_dir
        else:
            self.output_dir = "avain_output-" + util.get_current_timestamp()
        self.orig_out_dir = self.output_dir
        self.output_dir = os.path.abspath(self.output_dir)
        os.makedirs(self.output_dir, exist_ok=True)

        # check for user scan and analysis results
        self.user_results = {}

        if user_results:
            for rtype, filenames in user_results.items():
                if rtype not in self.user_results:
                    self.user_results[rtype] = []
                for filename in filenames:
                    self.user_results[rtype].append(
                        (filename, os.path.abspath(filename)))

        # store absolute config path
        if config_path:
            config_path = os.path.abspath(config_path)

        # change into AVAIN directory
        self.original_cwd = os.getcwd()
        core_dir = os.path.dirname(os.path.join(os.path.realpath(__file__)))
        avain_dir = os.path.abspath(os.path.join(core_dir, os.pardir))
        os.chdir(avain_dir)

        # parse default and user configs
        self.config = {}
        if os.path.isfile(DEFAULT_CONFIG_PATH):
            try:
                self.config = util.parse_config(DEFAULT_CONFIG_PATH,
                                                self.config)
            except Exception as excpt:
                print(util.MAGENTA +
                      ("Warning: Could not parse default config file. " +
                       "Proceeding without default config.\n") + util.SANE,
                      file=sys.stderr)
                util.print_exception_and_continue(excpt)
        elif not config_path:
            print(util.MAGENTA + "Warning: Could not find default config.\n" +
                  util.SANE,
                  file=sys.stderr)

        if config_path:
            try:
                self.config = util.parse_config(config_path, self.config)
            except Exception as excpt:
                print(util.MAGENTA +
                      ("Warning: Could not parse custom config file. " +
                       "Proceeding without custom config.\n") + util.SANE,
                      file=sys.stderr)
                util.print_exception_and_continue(excpt)

        # set remaining variables
        self.single_network = single_network
        self.verbose = verbose
        self.ports = ports
        self.update_modules = update_modules
        if (not self.update_modules) and self.config["core"][
                "automatic_module_updates"].lower() == "true":
            # retrieve last update timestamp based on last CPE dict download time
            last_update = util.get_creation_date(
                "modules/resources/official-cpe-dictionary_v2.2.xml")
            passed_time = datetime.datetime.now() - last_update
            update_interval = datetime.timedelta(
                minutes=int(self.config["core"]["module_update_interval"]))
            if passed_time > update_interval:
                util.printit(
                    "[INFO] Module data is out-of-date and will be updated\n",
                    color=util.MAGENTA)
                self.update_modules = True

        # setup module_manager
        self.module_manager = ModuleManager(self.networks, self.output_dir,
                                            self.omit_networks, self.ports,
                                            self.user_results, self.config,
                                            self.verbose)

        # setup logging
        self.setup_logging()
        self.logger.info("Starting the AVAIN program")
        self.logger.info("Executed call: avain %s", " ".join(sys.argv[1:]))
Exemple #27
0
def run_wpscan(targets, redr_fd_color, redr_fd):
    """ Run WPScan on the given targets and redirect the output """

    def reader(fd):
        """Read from the given file descriptor"""
        try:
            while True:
                buffer = os.read(fd, 1024)
                if not buffer:
                    return
                yield buffer
        except (IOError, OSError) as e:
            pass

    found_wp_versions = set()
    cols = shutil.get_terminal_size((80, 20)).columns
    for target in targets:
        # just some printing ...
        count = cols - len(" %s " % target)
        redr_fd_color.write(util.BRIGHT_CYAN + math.floor(count / 2) * "-" + util.SANE)
        redr_fd.write(math.floor(count / 2) * "-")
        redr_fd_color.write(" %s " % target)
        redr_fd.write(" %s " % target)
        redr_fd_color.write(util.BRIGHT_CYAN + math.ceil(count / 2) * "-" + util.SANE + "\n")
        redr_fd.write(math.ceil(count / 2) * "-" + "\n")
        if VERBOSE:
            util.printit(math.floor(count / 2) * "-", end="", color=util.BRIGHT_CYAN)
            util.printit(" %s " % target, end="")
            util.printit(math.ceil(count / 2) * "-", color=util.BRIGHT_CYAN)

        # setup WPScan call (for --enumerate: 'dbe' disabled for now b/c WPScan error)
        call = ["wpscan", "-v", "--url", target, "--enumerate", "vp,vt,tt,cb,u,m"]
        if CONFIG.get("wpvulndb_api_token", ""):
            call += ["--api-token", CONFIG["wpvulndb_api_token"]]
        if CONFIG.get("cookie_str", ""):
            call += ["--cookie-string", CONFIG["cookie_str"]]
        if CONFIG.get("max_threads", ""):
            call += ["--max-threads", CONFIG["max_threads"]]
        if CONFIG.get("disable_tls_checks", "true").lower() == "true":
            call += ["--disable-tls-checks"]
        if CONFIG.get("stealthy", "false").lower() == "true":
            call += ["--stealthy"]
        elif CONFIG.get("user_agent", ""):
            call += ["--user-agent", CONFIG["user_agent"]]

        util.acquire_print()
        # execute WPScan call in separate PTY to capture good output
        # adapted from: https://stackoverflow.com/a/28925318
        master, slave = pty.openpty()
        with subprocess.Popen(call, stdout=slave, stderr=subprocess.STDOUT, stdin=subprocess.PIPE,
                              bufsize=1, universal_newlines=True) as proc:
            os.close(slave)
            next_line_add = ""
            for line in reader(master):
                # decoding and processing of WPScan output specifics
                line = line.decode()
                line = line.replace("\r\n", "\n")
                line = next_line_add + line
                next_line_add = ""
                if line.endswith("\x1b["):
                    next_line_add = line[-2:]
                    line = line[:-2]

                # print to screen
                if VERBOSE:
                    print(line, end="")

                # do not write temporary output to output file
                if not ("Time" in line and "ETA" in line):
                    redr_fd_color.write(line)
                    redr_fd.write(util.strip_ansi_escape_seq(line))

                # get WordPress version from WPScan output
                ver_match = WP_VERSION_RE.search(line)
                if ver_match:
                    # proactively try to fix badly formatted version strings
                    version = ver_match.group(1)
                    dot_count = version.count(".")
                    if dot_count == 0:
                        version += ".0"
                    elif dot_count == 1 and version.endswith("."):
                        version += "0"
                    elif dot_count == 2 and version.endswith("."):
                        version += "0"
                    elif dot_count == 3 and version.endswith("."):
                        version = version[:-1]
                    found_wp_versions.add(version)

        os.close(master)
        util.release_print()

        util.printit("-" * cols, color=util.BRIGHT_CYAN)
        redr_fd_color.write(util.BRIGHT_CYAN + "-" * cols + util.SANE + "\n")
        redr_fd.write("-" * cols + "\n")
        return found_wp_versions
    def _aggregate_results(self):
        """
        Aggregate the "grouped and reduced" results to one final result. The
        aggregation is done depending on the config value for "scan_result_aggr_scheme".

        Value "SINGLE"   : the single result with the highest trust rating is chosen
        Value "MULTIPLE" : the results are returned without further processing
        Value "FILTER"   : similar products are filtered out, i.e. out of macOS 10.12
                           and macOS 10.13, only the one with the highest trust rating
                           is returned
        """

        processed_results = self._group_and_reduce()

        if self.config["core"].get("scan_result_aggr_scheme",
                                   "").upper() == "MULTIPLE":
            return processed_results

        if self.config["core"].get("scan_result_aggr_scheme",
                                   "").upper() == "SINGLE":
            for _, host in processed_results.items():
                if "os" in host:
                    host["os"] = [
                        max(host["os"], key=lambda entry: entry["trust"])
                    ]

                for protocol in ("tcp", "udp"):
                    if protocol in host:
                        for portid, port_entries in host[protocol].items():
                            host[protocol][portid] = [
                                max(port_entries,
                                    key=lambda entry: entry["trust"])
                            ]
            return processed_results

        if self.config["core"].get("scan_result_aggr_scheme",
                                   "FILTER").upper() == "FILTER":
            product_groups = self._group_by_product(processed_results)

            for _, host in product_groups.items():
                if "os" in host:
                    os_items = []
                    for group in host["os"]:
                        os_items.append(
                            max(group, key=lambda entry: entry["trust"]))
                    host["os"] = os_items

                for protocol in ("tcp", "udp"):
                    if protocol in host:
                        for portid, port_groups in host[protocol].items():
                            port_items = []
                            for group in port_groups:
                                port_items.append(
                                    max(group,
                                        key=lambda entry: entry["trust"]))
                            host[protocol][portid] = port_items

            return product_groups

        util.printit(
            "Warning: unknown config value for 'scan_result_aggr_scheme'",
            color=util.RED)
        return {}
Exemple #29
0
def run_smbmap(targets, accounts):
    """ Run SMBMap on the given targets with all of the given accounts """

    # open redirect file for SMBMap output
    redr_file = open(SMBMAP_OUTPUT_FILE, "w")

    # iterate over targets and scan them
    for ip, ports in targets.items():
        for port in ports:
            # skip port 139 if 445 is also open b/c it 445 prefered for SMB
            if str(port) == "139" and ("445" in ports or 445 in ports):
                continue

            for user, passwd in accounts:
                # Prepare SMBMap call
                call = [
                    "smbmap/smbmap.py", "-u", user, "-p", passwd, "-P", port,
                    "-H", ip
                ]

                # Execute SMBMap call
                with subprocess.Popen(call,
                                      stdout=subprocess.PIPE,
                                      stderr=subprocess.STDOUT,
                                      bufsize=1,
                                      universal_newlines=True) as proc:

                    # process the direct SMBMap output to improve reprinting
                    prev_line_work = False
                    for line in proc.stdout:
                        if prev_line_work:
                            if VERBOSE:
                                util.clear_previous_line()
                            prev_line_work = False
                        if "working on it..." in line.lower():
                            prev_line_work = True

                        # color target IP and show the used authentication
                        print_line = util.strip_ansi_escape_seq(line)
                        if line.startswith("[+] ") and "IP" in line:
                            name = line[line.find("Name: "):].strip()
                            print_line = util.GREEN + "[+] " + "%s:%s" % (ip,
                                                                          port)
                            if user:
                                print_line += util.SANE + "  (auth --> %s:%s)" % (
                                    user, passwd)
                            else:
                                print_line += util.SANE + "  (guest session)"
                            if name:
                                print_line += "    Name: " + name
                        print_line = print_line.replace("\n", "")

                        # print output to screen and write it to this module's output file
                        if VERBOSE and print_line:
                            util.printit(print_line)
                        if "working on it..." not in print_line.lower():
                            print_line = util.strip_ansi_escape_seq(
                                print_line) + "\n"
                            redr_file.write(print_line)

    redr_file.close()
Exemple #30
0
def run_enum4linux(targets, accounts):
    """ Run Enum4Linux on the given targets with all of the given accounts """

    # check that enum4linux is installed
    e4l_installed = subprocess.run(["which", "enum4linux"],
                                   stdout=subprocess.DEVNULL,
                                   stderr=subprocess.PIPE)
    # if it's not installed, return
    if e4l_installed.returncode != 0:
        if VERBOSE:
            util.printit("Skipping, because Enum4Linux is not installed.")
            util.printit(
                "If you want AVAIN to use Enum4Linux, you have to install it manually."
            )
        return

    # some regexes to process output
    target_def_re = re.compile(r"Target\W*\.+\W*\d+\.\d+\.\d+.\d+")
    user_def_re = re.compile(r"Username\W*\.+\W*'.*'")
    pass_def_re = re.compile(r"Password\W*\.+\W*'.*'")

    # open redirect file for Enum4Linux output
    redr_file = open(ENUM4LINUX_OUTPUT_FILE, "w")

    # iterate over targets and scan them
    for ip in targets:
        # note: enum4linux does not allow specification of ports

        # call Enum4Linux once for every account
        for user, passwd in accounts:
            # Prepare Enum4Linux call
            if user or passwd:
                call = ["enum4linux", "-u", user, "-p", passwd, ip]
            else:
                call = ["enum4linux", ip]

            # some more regexes to process output
            cur_ip_re = re.compile(r"%s" % re.escape(ip))
            cur_user_re = re.compile(r"%s" % re.escape(user))
            cur_passwd_re = re.compile(r"%s" % re.escape(passwd))

            # Execute Enum4Linux call
            with subprocess.Popen(call,
                                  stdout=subprocess.PIPE,
                                  stderr=subprocess.STDOUT,
                                  bufsize=1,
                                  universal_newlines=True) as proc:

                for line in proc.stdout:
                    print_line = line

                    # add colorization to better differ output of several Enum4Linux runs
                    if target_def_re.match(print_line):
                        print_line = util.GREEN + print_line
                    elif user_def_re.match(print_line) or pass_def_re.match(
                            print_line):
                        print_line = util.YELLOW + print_line
                    else:
                        # color target IP and used username/password
                        print_line = cur_ip_re.sub(util.GREEN + ip + util.SANE,
                                                   print_line)
                        if user:
                            print_line = cur_user_re.sub(
                                util.YELLOW + user + util.SANE, print_line)
                        if passwd:
                            print_line = cur_passwd_re.sub(
                                util.YELLOW + passwd + util.SANE, print_line)

                    # print processed line to screen
                    if VERBOSE and line and "*unknown*\*unknown*" not in line:
                        util.printit(print_line, end="")

                    # write original line to output file
                    redr_file.write(line)

            if VERBOSE:
                util.printit("\n")
            redr_file.write("\n\n")

    redr_file.close()