示例#1
0
    def build_script(self):
        script = ["nmap", "-Pn", self.target]

        if self.port_range:
            HelpUtilities.validate_port_range(self.port_range)
            script.append("-p")
            script.append(self.port_range)
            self.logger.info("{} Added port range {} to Nmap script".format(
                COLORED_COMBOS.NOTIFY, self.port_range))

        if self.full_scan:
            script.append("-sV")
            script.append("-sC")
            self.logger.info(
                "{} Added scripts and services to Nmap script".format(
                    COLORED_COMBOS.NOTIFY))
            return script
        else:
            if self.scripts:
                self.logger.info(
                    "{} Added safe-scripts scan to Nmap script".format(
                        COLORED_COMBOS.NOTIFY))
                script.append("-sC")
            if self.services:
                self.logger.info("{} Added service scan to Nmap script".format(
                    COLORED_COMBOS.NOTIFY))
                script.append("-sV")
        return script
示例#2
0
    def build_script(self):
        script = ["nmap", "-Pn", "-sV", "--script", self.vulners_path, self.target]

        if self.port_range:
            HelpUtilities.validate_port_range(self.port_range)
            script.append("-p")
            script.append(self.port_range)
            self.logger.info("{} Added port range {} to Nmap script".format(COLORED_COMBOS.NOTIFY, self.port_range))
        return script
示例#3
0
 def generate_dns_dumpster_mapping(cls, host, sout_logger):
     sout_logger.info("{} Trying to fetch DNS Mapping for {} from DNS dumpster".format(
         COLORED_COMBOS.INFO, host))
     try:
         page = HelpUtilities.query_dns_dumpster(host=host)
         if page.status_code == 200:
             path = HelpUtilities.get_output_path("{}/dns_mapping.png".format(host.target))
             with open(path, "wb") as target_image:
                 target_image.write(page.content)
         sout_logger.info("{} Successfully fetched DNS mapping for {}".format(
             COLORED_COMBOS.GOOD, host.target)
         )
     except RaccoonException:
         sout_logger.info("{} Failed to generate DNS mapping. A connection error occurred.".format(
             COLORED_COMBOS.BAD))
示例#4
0
 def generate_dns_dumpster_mapping(cls, host, sout_logger):
     sout_logger.info("{} Trying to generate DNS Mapping for {} from DNS dumpster".format(
         COLORED_COMBOS.INFO, host))
     try:
         page = HelpUtilities.query_dns_dumpster(host=host)
         if page.status_code == 200:
             path = HelpUtilities.get_output_path("{}/dns_mapping.png".format(host.target))
             with open(path, "wb") as target_image:
                 target_image.write(page.content)
         sout_logger.info("{} Successfully fetched DNS mapping for {}".format(
             COLORED_COMBOS.GOOD, host.target)
         )
     except RaccoonException:
         sout_logger.info("{} Failed to generate DNS mapping. A connection error occurred.".format(
             COLORED_COMBOS.BAD))
示例#5
0
 def __init__(self, host, port_range, vulners_path):
     self.target = host.target
     self.port_range = port_range
     self.path = HelpUtilities.get_output_path("{}/nmap_vulners_scan.txt".format(self.target))
     self.vulners_path = vulners_path
     self.logger = Logger(self.path)
     self.script = self.build_script()
示例#6
0
    def get_log_file_path(self, path):
        if path:
            log_file = path
        else:
            log_file = "{}/url_fuzz.txt".format(self.target)

        return Logger(HelpUtilities.get_output_path(log_file))
示例#7
0
    def get_log_file_path(self, path):
        if path:
            log_file = path
        else:
            log_file = "{}/url_fuzz.txt".format(self.target)

        return Logger(HelpUtilities.get_output_path(log_file))
示例#8
0
 def __init__(self, host):
     self.host = host
     self.cnames = host.dns_results.get('CNAME')
     self.request_handler = RequestHandler()
     self.web_server_validator = WebServerValidator()
     self.waf_present = False
     self.waf_cname_map = {
         "incapdns": "Incapsula",
         "edgekey": "Akamai",
         "akamai": "Akamai",
         "edgesuite": "Akamai",
         "distil": "Distil Networks",
         "cloudfront": "CloudFront",
         "netdna-cdn": "MaxCDN"
     }
     self.waf_app_method_map = {
         "CloudFront": WAFApplicationMethods.detect_cloudfront,
         "Cloudflare": WAFApplicationMethods.detect_cloudflare,
         "Incapsula": WAFApplicationMethods.detect_incapsula,
         "MaxCDN": WAFApplicationMethods.detect_maxcdn,
         "Edgecast": WAFApplicationMethods.detect_edgecast,
         "Distil Networks": WAFApplicationMethods.detect_distil,
         "Sucuri": WAFApplicationMethods.detect_sucuri,
         "Reblaze": WAFApplicationMethods.detect_reblaze
     }
     log_file = HelpUtilities.get_output_path("{}/WAF.txt".format(self.host.target))
     self.logger = Logger(log_file)
示例#9
0
 def __init__(self, host):
     self.host = host
     self.cnames = host.dns_results.get('CNAME')
     self.request_handler = RequestHandler()
     self.web_server_validator = WebServerValidator()
     self.waf_present = False
     self.waf_cname_map = {
         "incapdns": "Incapsula",
         "edgekey": "Akamai",
         "akamai": "Akamai",
         "edgesuite": "Akamai",
         "distil": "Distil Networks",
         "cloudfront": "CloudFront",
         "netdna-cdn": "MaxCDN"
     }
     self.waf_app_method_map = {
         "CloudFront": WAFApplicationMethods.detect_cloudfront,
         "Cloudflare": WAFApplicationMethods.detect_cloudflare,
         "Incapsula": WAFApplicationMethods.detect_incapsula,
         "MaxCDN": WAFApplicationMethods.detect_maxcdn,
         "Edgecast": WAFApplicationMethods.detect_edgecast,
         "Distil Networks": WAFApplicationMethods.detect_distil,
         "Sucuri": WAFApplicationMethods.detect_sucuri,
         "Reblaze": WAFApplicationMethods.detect_reblaze
     }
     log_file = HelpUtilities.get_output_path("{}/WAF.txt".format(
         self.host.target))
     self.logger = Logger(log_file)
示例#10
0
 def __init__(self, host, port_range, full_scan=None, scripts=None, services=None):
     self.target = host.target
     self.full_scan = full_scan
     self.scripts = scripts
     self.services = services
     self.port_range = port_range
     self.path = HelpUtilities.get_output_path("{}/nmap_scan.txt".format(self.target))
     self.logger = Logger(self.path)
示例#11
0
 def __init__(self, host, full_scan, scripts, services, port_range):
     self.target = host.target
     self.full_scan = full_scan
     self.scripts = scripts
     self.services = services
     self.port_range = port_range
     self.path = HelpUtilities.get_output_path("{}/nmap_scan.txt".format(self.target))
     self.logger = Logger(self.path)
     self.script = self.build_script()
示例#12
0
 def __init__(self, host):
     self.host = host
     self.request_handler = RequestHandler()
     self.web_server_validator = WebServerValidator()
     self.web_scan_results = []
     self.headers = None
     self.robots = None
     log_file = HelpUtilities.get_output_path("{}/web_scan.txt".format(
         self.host.target))
     self.target_dir = "/".join(log_file.split("/")[:-1])
     self.logger = Logger(log_file)
示例#13
0
 def _extract_from_dns_dumpster(self):
     self.logger.info("{} Trying to extract subdomains from DNS dumpster".format(COLORED_COMBOS.NOTIFY))
     try:
         page = HelpUtilities.query_dns_dumpster(host=self.host)
         soup = BeautifulSoup(page.text, "lxml")
         hosts_table = soup.select(".table")[0]
         for row in hosts_table.find_all("tr"):
             tds = row.select("td")
             sub_domain = tds[0].text.split('\n')[0]  # Grab just the URL, truncate other information
             self.logger.info("{} Found subdomain in DNS dumpster: {}".format(COLORED_COMBOS.GOOD, sub_domain))
     except (RaccoonException, IndexError):
         self.logger.info("{} Failed to query DNS dumpster for subdomains".format(COLORED_COMBOS.BAD))
示例#14
0
 def _extract_from_dns_dumpster(self):
     self.logger.info("{} Trying to extract subdomains from DNS dumpster".format(COLORED_COMBOS.NOTIFY))
     try:
         page = HelpUtilities.query_dns_dumpster(host=self.host)
         soup = BeautifulSoup(page.text, "lxml")
         hosts_table = soup.select(".table")[-1]
         for row in hosts_table.find_all("tr"):
             tds = row.select("td")
             sub_domain = tds[0].text.split('\n')[0]  # Grab just the URL, truncate other information
             self.logger.info("{} Found subdomain in DNS dumpster: {}".format(COLORED_COMBOS.GOOD, sub_domain))
             self.sub_domains.add(sub_domain)
     except RaccoonException:
         self.logger.info("{} Failed to query DNS dumpster for subdomains".format(COLORED_COMBOS.BAD))
示例#15
0
    def build_script(self):
        script = ["nmap", "-Pn", self.target]

        if self.port_range:
            HelpUtilities.validate_port_range(self.port_range)
            script.append("-p")
            script.append(self.port_range)
            self.logger.info("{} Added port range {} to Nmap script".format(COLORED_COMBOS.NOTIFY, self.port_range))

        if self.full_scan:
            script.append("-sV")
            script.append("-sC")
            self.logger.info("{} Added scripts and services to Nmap script".format(COLORED_COMBOS.NOTIFY))
            return script
        else:
            if self.scripts:
                self.logger.info("{} Added safe-scripts scan to Nmap script".format(COLORED_COMBOS.NOTIFY))
                script.append("-sC")
            if self.services:
                self.logger.info("{} Added service scan to Nmap script".format(COLORED_COMBOS.NOTIFY))
                script.append("-sV")
        return script
示例#16
0
 def __init__(self, host, sans, domain_list, ignored_response_codes,
              num_threads, follow_redirects, no_sub_enum):
     self.host = host
     self.target = host.target
     self.sans = sans
     self.domain_list = domain_list
     self.ignored_error_codes = ignored_response_codes
     self.num_threads = num_threads
     self.follow_redirects = follow_redirects
     self.no_sub_enum = no_sub_enum
     self.request_handler = RequestHandler()
     log_file = HelpUtilities.get_output_path("{}/subdomains.txt".format(
         self.target))
     self.logger = Logger(log_file)
示例#17
0
 def __init__(self, host, port):
     super().__init__(host)
     self.target = host.target
     self.port = port
     self._versions = ("tls1", "tls1_1", "tls1_2")
     # OpenSSL likes to hang, Linux timeout to the rescue
     self._base_script = "timeout 10 openssl s_client -connect {}:{} ".format(self.target, self.port)
     self.begin = "-----BEGIN CERTIFICATE-----"
     self.end = "-----END CERTIFICATE-----"
     self.sni_data = {}
     self.non_sni_data = {}
     self.ciphers = ""
     log_file = HelpUtilities.get_output_path("{}/tls_report.txt".format(self.target))
     self.logger = Logger(log_file)
示例#18
0
 def __init__(self, host):
     self.host = host
     self.request_handler = RequestHandler()
     self.web_server_validator = WebServerValidator()
     self.headers = None
     self.robots = None
     self.forms = None
     self.fuzzable_urls = set()
     self.emails = set()
     log_file = HelpUtilities.get_output_path("{}/web_scan.txt".format(
         self.host.target))
     self.target_dir = "/".join(log_file.split("/")[:-1])
     self.logger = Logger(log_file)
     self.storage_explorer = StorageExplorer(host, self.logger)
示例#19
0
 def __init__(self, host, port=443):
     super().__init__(host)
     self.target = host.target
     self.port = port
     self._versions = ("tls1", "tls1_1", "tls1_2")
     # OpenSSL likes to hang, Linux timeout to the rescue
     self._base_script = "timeout 10 openssl s_client -connect {}:{} ".format(self.target, self.port)
     self.begin = "-----BEGIN CERTIFICATE-----"
     self.end = "-----END CERTIFICATE-----"
     self.sni_data = {}
     self.non_sni_data = {}
     self.ciphers = ""
     log_file = HelpUtilities.get_output_path("{}/tls_report.txt".format(self.target))
     self.logger = Logger(log_file)
示例#20
0
 def __init__(self,
              host,
              sans,
              domain_list,
              ignored_response_codes,
              num_threads,
              follow_redirects,
              no_sub_enum):
     self.host = host
     self.target = host.target
     self.sans = sans
     self.domain_list = domain_list
     self.ignored_error_codes = ignored_response_codes
     self.num_threads = num_threads
     self.follow_redirects = follow_redirects
     self.no_sub_enum = no_sub_enum
     self.request_handler = RequestHandler()
     self.sub_domains = set()
     log_file = HelpUtilities.get_output_path("{}/subdomains.txt".format(self.target))
     self.logger = Logger(log_file)
示例#21
0
    async def grab_whois(cls, host):
        if not host.naked:
            return

        script = "whois {}".format(host.naked).split()
        log_file = HelpUtilities.get_output_path("{}/whois.txt".format(host.target))
        logger = Logger(log_file)

        process = await create_subprocess_exec(
            *script,
            stdout=PIPE,
            stderr=PIPE
        )
        result, err = await process.communicate()

        if process.returncode == 0:
            logger.info("{} {} WHOIS information retrieved".format(COLORED_COMBOS.GOOD, host))
            for line in result.decode().strip().split("\n"):
                    if ":" in line:
                        logger.debug(line)
示例#22
0
    async def grab_whois(cls, host):
        if not host.naked:
            return

        script = "whois {}".format(host.naked).split()
        log_file = HelpUtilities.get_output_path("{}/whois.txt".format(host.target))
        logger = Logger(log_file)

        process = await create_subprocess_exec(
            *script,
            stdout=PIPE,
            stderr=PIPE
        )
        result, err = await process.communicate()

        if process.returncode == 0:
            logger.info("{} {} WHOIS information retrieved".format(COLORED_COMBOS.GOOD, host))
            for line in result.decode().strip().split("\n"):
                    if ":" in line:
                        logger.debug(line)
示例#23
0
    def generate_dns_dumpster_mapping(cls, host, sout_logger):
        # Start DNS Dumpster session for the token
        request_handler = RequestHandler()
        dnsdumpster_session = request_handler.get_new_session()
        url = "https://dnsdumpster.com"
        if host.naked:
            target = host.naked
        else:
            target = host.target
        payload = {
            "targetip": target,
            "csrfmiddlewaretoken": None
        }
        sout_logger.info("{} Trying to generate DNS Mapping for {} from DNS dumpster".format(
            COLORED_COMBOS.INFO, host))
        try:
            dnsdumpster_session.get(url, timeout=10)
            jar = dnsdumpster_session.cookies
            for c in jar:
                if not c.__dict__.get("name") == "csrftoken":
                    continue
                payload["csrfmiddlewaretoken"] = c.__dict__.get("value")
                break

            dnsdumpster_session.post(url, data=payload, headers={"Referer": "https://dnsdumpster.com/"})
            time.sleep(3)
            page = dnsdumpster_session.get("https://dnsdumpster.com/static/map/{}.png".format(target))
            if page.status_code == 200:
                path = HelpUtilities.get_output_path("{}/dns_mapping.png".format(host.target))
                with open(path, "wb") as target_image:
                    target_image.write(page.content)
            sout_logger.info("{} Successfully fetched DNS mapping for {}".format(
                COLORED_COMBOS.GOOD, host.target)
            )
        except ConnectionError:
            sout_logger.info("{} Failed to generate DNS mapping. A connection error occurred.".format(
                COLORED_COMBOS.BAD))
示例#24
0
文件: main.py 项目: whoami213/Raccoon
def main(target,
         tor_routing,
         proxy_list,
         proxy,
         cookies,
         dns_records,
         wordlist,
         threads,
         ignored_response_codes,
         subdomain_list,
         full_scan,
         scripts,
         services,
         port,
         tls_port,
         skip_health_check,
         follow_redirects,
         no_url_fuzzing,
         no_sub_enum,
         skip_nmap_scan,
         # delay,
         outdir,
         quiet):
    try:
        # ------ Arg validation ------
        # Set logging level and Logger instance
        log_level = HelpUtilities.determine_verbosity(quiet)
        logger = SystemOutLogger(log_level)
        intro(logger)

        target = target.lower()
        try:
            HelpUtilities.validate_executables()
        except RaccoonException as e:
            logger.critical(str(e))
            exit(9)
        HelpUtilities.validate_wordlist_args(proxy_list, wordlist, subdomain_list)
        HelpUtilities.validate_proxy_args(tor_routing, proxy, proxy_list)
        HelpUtilities.create_output_directory(outdir)

        if tor_routing:
            logger.info("{} Testing that Tor service is up...".format(COLORED_COMBOS.NOTIFY))
        elif proxy_list:
            if proxy_list and not os.path.isfile(proxy_list):
                raise FileNotFoundError("Not a valid file path, {}".format(proxy_list))
            else:
                logger.info("{} Routing traffic using proxies from list {}\n".format(
                    COLORED_COMBOS.NOTIFY, proxy_list))
        elif proxy:
            logger.info("{} Routing traffic through proxy {}\n".format(COLORED_COMBOS.NOTIFY, proxy))

        # TODO: Sanitize delay argument

        dns_records = tuple(dns_records.split(","))
        ignored_response_codes = tuple(int(code) for code in ignored_response_codes.split(","))

        if port:
            HelpUtilities.validate_port_range(port)

        # ------ /Arg validation ------

        if cookies:
            try:
                cookies = HelpUtilities.parse_cookie_arg(cookies)
            except RaccoonException as e:
                logger.critical("{}{}{}".format(COLOR.RED, str(e), COLOR.RESET))
                exit(2)

        # Set Request Handler instance
        request_handler = RequestHandler(
            proxy_list=proxy_list,
            tor_routing=tor_routing,
            single_proxy=proxy,
            cookies=cookies
        )

        if tor_routing:
            try:
                HelpUtilities.confirm_traffic_routs_through_tor()
                logger.info("{} Validated Tor service is up. Routing traffic anonymously\n".format(
                    COLORED_COMBOS.NOTIFY))
            except RaccoonException as err:
                print("{}{}{}".format(COLOR.RED, str(err), COLOR.RESET))
                exit(3)

        main_loop = asyncio.get_event_loop()

        logger.info("{}### Raccoon Scan Started ###{}\n".format(COLOR.GRAY, COLOR.RESET))
        logger.info("{} Trying to gather information about host: {}".format(COLORED_COMBOS.INFO, target))

        # TODO: Populate array when multiple targets are supported
        # hosts = []
        try:
            host = Host(target=target, dns_records=dns_records)
            host.parse()
        except HostHandlerException as e:
            logger.critical("{}{}{}".format(COLOR.RED, str(e), COLOR.RESET))
            exit(11)

        if not skip_health_check:
            try:
                HelpUtilities.validate_target_is_up(host)
            except RaccoonException as err:
                logger.critical("{}{}{}".format(COLOR.RED, str(err), COLOR.RESET))
                exit(42)

        if not skip_nmap_scan:
            logger.info("\n{} Setting Nmap scan to run in the background".format(COLORED_COMBOS.INFO))
            nmap_scan = NmapScan(host, full_scan, scripts, services, port)
            # # # TODO: Populate array when multiple targets are supported
            # nmap_threads = []
            nmap_thread = threading.Thread(target=Scanner.run, args=(nmap_scan,))
            # Run Nmap scan in the background. Can take some time
            nmap_thread.start()

        # Run first set of checks - TLS, Web/WAF Data, DNS data
        waf = WAF(host)
        tls_info_scanner = TLSHandler(host, tls_port)
        web_app_scanner = WebApplicationScanner(host)
        tasks = (
            asyncio.ensure_future(tls_info_scanner.run()),
            asyncio.ensure_future(waf.detect()),
            asyncio.ensure_future(DNSHandler.grab_whois(host)),
            asyncio.ensure_future(web_app_scanner.run_scan()),
            asyncio.ensure_future(DNSHandler.generate_dns_dumpster_mapping(host, logger))
        )

        main_loop.run_until_complete(asyncio.wait(tasks))

        # Second set of checks - URL fuzzing, Subdomain enumeration
        if not no_url_fuzzing:
            fuzzer = URLFuzzer(host, ignored_response_codes, threads, wordlist, follow_redirects)
            main_loop.run_until_complete(fuzzer.fuzz_all())

        if not host.is_ip:
            sans = tls_info_scanner.sni_data.get("SANs")
            subdomain_enumerator = SubDomainEnumerator(
                host,
                domain_list=subdomain_list,
                sans=sans,
                ignored_response_codes=ignored_response_codes,
                num_threads=threads,
                follow_redirects=follow_redirects,
                no_sub_enum=no_sub_enum
            )
            main_loop.run_until_complete(subdomain_enumerator.run())

        if not skip_nmap_scan:
            if nmap_thread.is_alive():
                logger.info("{} All scans done. Waiting for Nmap scan to wrap up. "
                            "Time left may vary depending on scan type and port range".format(COLORED_COMBOS.INFO))

                while nmap_thread.is_alive():
                    time.sleep(15)

        logger.info("\n{}### Raccoon scan finished ###{}\n".format(COLOR.GRAY, COLOR.RESET))
        os.system("stty sane")

    except KeyboardInterrupt:
        print("{}Keyboard Interrupt detected. Exiting{}".format(COLOR.RED, COLOR.RESET))
        # Fix F'd up terminal after CTRL+C
        os.system("stty sane")
        exit(42)
示例#25
0
 def __init__(self, host, port_range, vulners_path):
     super().__init__(host=host, port_range=port_range)
     self.vulners_path = vulners_path
     self.path = HelpUtilities.get_output_path("{}/nmap_vulners_scan.txt".format(self.target))
     self.logger = Logger(self.path)
示例#26
0
 def create_host_dir_and_set_file_logger(self):
     log_file = HelpUtilities.get_output_path("{}/dns_records.txt".format(self.target))
     self._create_host_dir(log_file)
     self.logger = Logger(log_file)
示例#27
0
def main(target,
         tor_routing,
         proxy_list,
         proxy,
         dns_records,
         wordlist,
         threads,
         ignored_response_codes,
         subdomain_list,
         full_scan,
         scripts,
         services,
         port,
         tls_port,
         skip_health_check,
         no_redirects,
         no_url_fuzzing,
         no_sub_enum,
         # delay,
         outdir,
         quiet):
    try:
        # ------ Arg validation ------

        # Set logging level and Logger instance
        log_level = HelpUtilities.determine_verbosity(quiet)
        logger = SystemOutLogger(log_level)
        intro(logger)

        target = target.lower()
        try:
            HelpUtilities.validate_executables()
        except RaccoonException as e:
            logger.critical(str(e))
            exit(9)
        HelpUtilities.validate_wordlist_args(proxy_list, wordlist, subdomain_list)
        HelpUtilities.validate_proxy_args(tor_routing, proxy, proxy_list)
        HelpUtilities.create_output_directory(outdir)

        if tor_routing:
            logger.info("{} Testing that Tor service is up...".format(COLORED_COMBOS.NOTIFY))
        elif proxy_list:
            if proxy_list and not os.path.isfile(proxy_list):
                raise FileNotFoundError("Not a valid file path, {}".format(proxy_list))
            else:
                logger.info("{} Routing traffic using proxies from list {}\n".format(
                    COLORED_COMBOS.NOTIFY, proxy_list))
        elif proxy:
            logger.info("Routing traffic through proxy {}\n".format(COLORED_COMBOS.NOTIFY, proxy))

        # TODO: Sanitize delay argument

        dns_records = tuple(dns_records.split(","))
        ignored_response_codes = tuple(int(code) for code in ignored_response_codes.split(","))
        follow_redirects = not no_redirects

        if port:
            HelpUtilities.validate_port_range(port)

        # ------ /Arg validation ------

        # Set Request Handler instance
        request_handler = RequestHandler(proxy_list=proxy_list, tor_routing=tor_routing, single_proxy=proxy)

        if tor_routing:
            try:
                HelpUtilities.confirm_traffic_routs_through_tor()
                logger.info("{} Validated Tor service is up. Routing traffic anonymously\n".format(
                    COLORED_COMBOS.NOTIFY))
            except RaccoonException as err:
                print("{}{}{}".format(COLOR.RED, str(err), COLOR.RESET))
                exit(3)

        if not skip_health_check:
            HelpUtilities.validate_target_is_up(target)

        main_loop = asyncio.get_event_loop()

        logger.info("{}### Raccoon Scan Started ###{}\n".format(COLOR.GRAY, COLOR.RESET))
        logger.info("{} Trying to gather information about host: {}".format(COLORED_COMBOS.INFO, target))

        # TODO: Populate array when multiple targets are supported
        # hosts = []
        host = Host(target=target, dns_records=dns_records)
        host.parse()

        logger.info("\n{} Setting Nmap scan to run in the background".format(COLORED_COMBOS.INFO))
        nmap_scan = NmapScan(host, full_scan, scripts, services, port)
        # # # TODO: Populate array when multiple targets are supported
        # nmap_threads = []
        nmap_thread = threading.Thread(target=Scanner.run, args=(nmap_scan,))
        # Run Nmap scan in the background. Can take some time
        nmap_thread.start()

        # Run first set of checks - TLS, Web/WAF Data, DNS data
        waf = WAF(host)
        tls_info_scanner = TLSHandler(host, tls_port)
        web_app_scanner = WebApplicationScanner(host)
        tasks = (
            asyncio.ensure_future(tls_info_scanner.run()),
            asyncio.ensure_future(waf.detect()),
            asyncio.ensure_future(DNSHandler.grab_whois(host)),
            asyncio.ensure_future(web_app_scanner.run_scan())
        )

        main_loop.run_until_complete(asyncio.wait(tasks))

        # DNS dumpster visualization
        DNSHandler.generate_dns_dumpster_mapping(host, logger)

        # Second set of checks - URL fuzzing, Subdomain enumeration
        if not no_url_fuzzing:
            fuzzer = URLFuzzer(host, ignored_response_codes, threads, wordlist, follow_redirects)
            main_loop.run_until_complete(fuzzer.fuzz_all())

        if not host.is_ip:
            sans = tls_info_scanner.sni_data.get("SANs")
            subdomain_enumerator = SubDomainEnumerator(
                host,
                domain_list=subdomain_list,
                sans=sans,
                ignored_response_codes=ignored_response_codes,
                num_threads=threads,
                follow_redirects=follow_redirects,
                no_sub_enum=no_sub_enum
            )
            main_loop.run_until_complete(subdomain_enumerator.run())

        if nmap_thread.is_alive():
            logger.info("{} All scans done. Waiting for Nmap scan to wrap up. "
                        "Time left may vary depending on scan type and port range".format(COLORED_COMBOS.INFO))

            while nmap_thread.is_alive():
                time.sleep(15)

        logger.info("\n{}### Raccoon scan finished ###{}\n".format(COLOR.GRAY, COLOR.RESET))
        os.system("stty sane")

    except KeyboardInterrupt:
        print("{}Keyboard Interrupt detected. Exiting{}".format(COLOR.RED, COLOR.RESET))
        # Fix F'd up terminal after CTRL+C
        os.system("stty sane")
        exit(42)
示例#28
0
 def create_host_dir_and_set_file_logger(self):
     log_file = HelpUtilities.get_output_path("{}/dns_records.txt".format(
         self.target))
     self._create_host_dir(log_file)
     self.logger = Logger(log_file)