async def bruteforce(self): path = "{}/subdomain_fuzz.txt".format(self.host.target) # If a naked domain exists, use it if self.host.naked: self.host.target = self.host.naked self.logger.info("{} Bruteforcing subdomains".format( COLORED_COMBOS.NOTIFY)) sub_domain_fuzzer = URLFuzzer( host=self.host, wordlist=self.domain_list, num_threads=self.num_threads, ignored_response_codes=self.ignored_error_codes, follow_redirects=self.follow_redirects) await sub_domain_fuzzer.fuzz_all(sub_domain=True, log_file_path=path)
def main(target, tor_routing, proxy_list, proxy, cookies, dns_records, wordlist, threads, ignored_response_codes, subdomain_list, full_scan, scripts, services, port, tls_port, skip_health_check, follow_redirects, no_url_fuzzing, no_sub_enum, skip_nmap_scan, # delay, outdir, quiet): try: # ------ Arg validation ------ # Set logging level and Logger instance log_level = HelpUtilities.determine_verbosity(quiet) logger = SystemOutLogger(log_level) intro(logger) target = target.lower() try: HelpUtilities.validate_executables() except RaccoonException as e: logger.critical(str(e)) exit(9) HelpUtilities.validate_wordlist_args(proxy_list, wordlist, subdomain_list) HelpUtilities.validate_proxy_args(tor_routing, proxy, proxy_list) HelpUtilities.create_output_directory(outdir) if tor_routing: logger.info("{} Testing that Tor service is up...".format(COLORED_COMBOS.NOTIFY)) elif proxy_list: if proxy_list and not os.path.isfile(proxy_list): raise FileNotFoundError("Not a valid file path, {}".format(proxy_list)) else: logger.info("{} Routing traffic using proxies from list {}\n".format( COLORED_COMBOS.NOTIFY, proxy_list)) elif proxy: logger.info("{} Routing traffic through proxy {}\n".format(COLORED_COMBOS.NOTIFY, proxy)) # TODO: Sanitize delay argument dns_records = tuple(dns_records.split(",")) ignored_response_codes = tuple(int(code) for code in ignored_response_codes.split(",")) if port: HelpUtilities.validate_port_range(port) # ------ /Arg validation ------ if cookies: try: cookies = HelpUtilities.parse_cookie_arg(cookies) except RaccoonException as e: logger.critical("{}{}{}".format(COLOR.RED, str(e), COLOR.RESET)) exit(2) # Set Request Handler instance request_handler = RequestHandler( proxy_list=proxy_list, tor_routing=tor_routing, single_proxy=proxy, cookies=cookies ) if tor_routing: try: HelpUtilities.confirm_traffic_routs_through_tor() logger.info("{} Validated Tor service is up. Routing traffic anonymously\n".format( COLORED_COMBOS.NOTIFY)) except RaccoonException as err: print("{}{}{}".format(COLOR.RED, str(err), COLOR.RESET)) exit(3) main_loop = asyncio.get_event_loop() logger.info("{}### Raccoon Scan Started ###{}\n".format(COLOR.GRAY, COLOR.RESET)) logger.info("{} Trying to gather information about host: {}".format(COLORED_COMBOS.INFO, target)) # TODO: Populate array when multiple targets are supported # hosts = [] try: host = Host(target=target, dns_records=dns_records) host.parse() except HostHandlerException as e: logger.critical("{}{}{}".format(COLOR.RED, str(e), COLOR.RESET)) exit(11) if not skip_health_check: try: HelpUtilities.validate_target_is_up(host) except RaccoonException as err: logger.critical("{}{}{}".format(COLOR.RED, str(err), COLOR.RESET)) exit(42) if not skip_nmap_scan: logger.info("\n{} Setting Nmap scan to run in the background".format(COLORED_COMBOS.INFO)) nmap_scan = NmapScan(host, full_scan, scripts, services, port) # # # TODO: Populate array when multiple targets are supported # nmap_threads = [] nmap_thread = threading.Thread(target=Scanner.run, args=(nmap_scan,)) # Run Nmap scan in the background. Can take some time nmap_thread.start() # Run first set of checks - TLS, Web/WAF Data, DNS data waf = WAF(host) tls_info_scanner = TLSHandler(host, tls_port) web_app_scanner = WebApplicationScanner(host) tasks = ( asyncio.ensure_future(tls_info_scanner.run()), asyncio.ensure_future(waf.detect()), asyncio.ensure_future(DNSHandler.grab_whois(host)), asyncio.ensure_future(web_app_scanner.run_scan()), asyncio.ensure_future(DNSHandler.generate_dns_dumpster_mapping(host, logger)) ) main_loop.run_until_complete(asyncio.wait(tasks)) # Second set of checks - URL fuzzing, Subdomain enumeration if not no_url_fuzzing: fuzzer = URLFuzzer(host, ignored_response_codes, threads, wordlist, follow_redirects) main_loop.run_until_complete(fuzzer.fuzz_all()) if not host.is_ip: sans = tls_info_scanner.sni_data.get("SANs") subdomain_enumerator = SubDomainEnumerator( host, domain_list=subdomain_list, sans=sans, ignored_response_codes=ignored_response_codes, num_threads=threads, follow_redirects=follow_redirects, no_sub_enum=no_sub_enum ) main_loop.run_until_complete(subdomain_enumerator.run()) if not skip_nmap_scan: if nmap_thread.is_alive(): logger.info("{} All scans done. Waiting for Nmap scan to wrap up. " "Time left may vary depending on scan type and port range".format(COLORED_COMBOS.INFO)) while nmap_thread.is_alive(): time.sleep(15) logger.info("\n{}### Raccoon scan finished ###{}\n".format(COLOR.GRAY, COLOR.RESET)) os.system("stty sane") except KeyboardInterrupt: print("{}Keyboard Interrupt detected. Exiting{}".format(COLOR.RED, COLOR.RESET)) # Fix F'd up terminal after CTRL+C os.system("stty sane") exit(42)
def main(target, tor_routing, proxy_list, proxy, dns_records, wordlist, threads, ignored_response_codes, subdomain_list, full_scan, scripts, services, port, tls_port, skip_health_check, no_redirects, no_url_fuzzing, no_sub_enum, # delay, outdir, quiet): try: # ------ Arg validation ------ # Set logging level and Logger instance log_level = HelpUtilities.determine_verbosity(quiet) logger = SystemOutLogger(log_level) intro(logger) target = target.lower() try: HelpUtilities.validate_executables() except RaccoonException as e: logger.critical(str(e)) exit(9) HelpUtilities.validate_wordlist_args(proxy_list, wordlist, subdomain_list) HelpUtilities.validate_proxy_args(tor_routing, proxy, proxy_list) HelpUtilities.create_output_directory(outdir) if tor_routing: logger.info("{} Testing that Tor service is up...".format(COLORED_COMBOS.NOTIFY)) elif proxy_list: if proxy_list and not os.path.isfile(proxy_list): raise FileNotFoundError("Not a valid file path, {}".format(proxy_list)) else: logger.info("{} Routing traffic using proxies from list {}\n".format( COLORED_COMBOS.NOTIFY, proxy_list)) elif proxy: logger.info("Routing traffic through proxy {}\n".format(COLORED_COMBOS.NOTIFY, proxy)) # TODO: Sanitize delay argument dns_records = tuple(dns_records.split(",")) ignored_response_codes = tuple(int(code) for code in ignored_response_codes.split(",")) follow_redirects = not no_redirects if port: HelpUtilities.validate_port_range(port) # ------ /Arg validation ------ # Set Request Handler instance request_handler = RequestHandler(proxy_list=proxy_list, tor_routing=tor_routing, single_proxy=proxy) if tor_routing: try: HelpUtilities.confirm_traffic_routs_through_tor() logger.info("{} Validated Tor service is up. Routing traffic anonymously\n".format( COLORED_COMBOS.NOTIFY)) except RaccoonException as err: print("{}{}{}".format(COLOR.RED, str(err), COLOR.RESET)) exit(3) if not skip_health_check: HelpUtilities.validate_target_is_up(target) main_loop = asyncio.get_event_loop() logger.info("{}### Raccoon Scan Started ###{}\n".format(COLOR.GRAY, COLOR.RESET)) logger.info("{} Trying to gather information about host: {}".format(COLORED_COMBOS.INFO, target)) # TODO: Populate array when multiple targets are supported # hosts = [] host = Host(target=target, dns_records=dns_records) host.parse() logger.info("\n{} Setting Nmap scan to run in the background".format(COLORED_COMBOS.INFO)) nmap_scan = NmapScan(host, full_scan, scripts, services, port) # # # TODO: Populate array when multiple targets are supported # nmap_threads = [] nmap_thread = threading.Thread(target=Scanner.run, args=(nmap_scan,)) # Run Nmap scan in the background. Can take some time nmap_thread.start() # Run first set of checks - TLS, Web/WAF Data, DNS data waf = WAF(host) tls_info_scanner = TLSHandler(host, tls_port) web_app_scanner = WebApplicationScanner(host) tasks = ( asyncio.ensure_future(tls_info_scanner.run()), asyncio.ensure_future(waf.detect()), asyncio.ensure_future(DNSHandler.grab_whois(host)), asyncio.ensure_future(web_app_scanner.run_scan()) ) main_loop.run_until_complete(asyncio.wait(tasks)) # DNS dumpster visualization DNSHandler.generate_dns_dumpster_mapping(host, logger) # Second set of checks - URL fuzzing, Subdomain enumeration if not no_url_fuzzing: fuzzer = URLFuzzer(host, ignored_response_codes, threads, wordlist, follow_redirects) main_loop.run_until_complete(fuzzer.fuzz_all()) if not host.is_ip: sans = tls_info_scanner.sni_data.get("SANs") subdomain_enumerator = SubDomainEnumerator( host, domain_list=subdomain_list, sans=sans, ignored_response_codes=ignored_response_codes, num_threads=threads, follow_redirects=follow_redirects, no_sub_enum=no_sub_enum ) main_loop.run_until_complete(subdomain_enumerator.run()) if nmap_thread.is_alive(): logger.info("{} All scans done. Waiting for Nmap scan to wrap up. " "Time left may vary depending on scan type and port range".format(COLORED_COMBOS.INFO)) while nmap_thread.is_alive(): time.sleep(15) logger.info("\n{}### Raccoon scan finished ###{}\n".format(COLOR.GRAY, COLOR.RESET)) os.system("stty sane") except KeyboardInterrupt: print("{}Keyboard Interrupt detected. Exiting{}".format(COLOR.RED, COLOR.RESET)) # Fix F'd up terminal after CTRL+C os.system("stty sane") exit(42)