def build_script(self): script = ["nmap", "-Pn", self.target] if self.port_range: HelpUtilities.validate_port_range(self.port_range) script.append("-p") script.append(self.port_range) self.logger.info("{} Added port range {} to Nmap script".format( COLORED_COMBOS.INFO, self.port_range)) if self.full_scan: script.append("-sV") script.append("-sC") self.logger.info( "{} Added scripts and services to Nmap script".format( COLORED_COMBOS.INFO)) return script else: if self.scripts: self.logger.info( "{} Added safe-scripts scan to Nmap script".format( COLORED_COMBOS.INFO)) script.append("-sC") if self.services: self.logger.info("{} Added service scan to Nmap script".format( COLORED_COMBOS.INFO)) script.append("-sV") return script
def get_log_file_path(self, path): if path: log_file = path else: log_file = "{}/url_fuzz.txt".format(self.target) return Logger(HelpUtilities.get_output_path(log_file))
def __init__(self, host): self.host = host self.cnames = host.dns_results.get('CNAME') self.request_handler = RequestHandler() self.web_server_validator = WebServerValidator() self.waf_present = False self.waf_cname_map = { "incapdns": "Incapsula", "edgekey": "Akamai", "akamai": "Akamai", "edgesuite": "Akamai", "distil": "Distil Networks", "cloudfront": "CloudFront", "netdna-cdn": "MaxCDN" } self.waf_app_method_map = { "CloudFront": WAFApplicationMethods.detect_cloudfront, "Cloudflare": WAFApplicationMethods.detect_cloudflare, "Incapsula": WAFApplicationMethods.detect_incapsula, "MaxCDN": WAFApplicationMethods.detect_maxcdn, "Edgecast": WAFApplicationMethods.detect_edgecast, "Distil Networks": WAFApplicationMethods.detect_distil } log_file = HelpUtilities.get_output_path("{}/WAF.txt".format( self.host.target)) self.logger = Logger(log_file)
def __init__(self, host, full_scan, scripts, services, port_range): self.target = host.target self.full_scan = full_scan self.scripts = scripts self.services = services self.port_range = port_range self.path = HelpUtilities.get_output_path("{}/nmap_scan.txt".format( self.target)) self.logger = Logger(self.path) self.script = self.build_script()
def __init__(self, host): self.host = host self.request_handler = RequestHandler() self.web_server_validator = WebServerValidator() self.web_scan_results = [] self.headers = None self.robots = None log_file = HelpUtilities.get_output_path("{}/web_scan.txt".format(self.host.target)) self.target_dir = "/".join(log_file.split("/")[:-1]) self.logger = Logger(log_file)
def __init__(self, host, sans, domain_list, ignored_response_codes, num_threads, follow_redirects, bruteforce_subdomains): self.host = host self.target = host.target self.sans = sans self.domain_list = domain_list self.ignored_error_codes = ignored_response_codes self.num_threads = num_threads self.follow_redirects = follow_redirects self.bruteforce_subdomains = bruteforce_subdomains self.request_handler = RequestHandler() self.sub_domains = set() log_file = HelpUtilities.get_output_path("{}/subdomains.txt".format( self.target)) self.logger = Logger(log_file)
def __init__(self, host, port=443): super().__init__(host) self.target = host.target self.port = port self._versions = ("tls1", "tls1_1", "tls1_2") # OpenSSL likes to hang, Linux timeout to the rescue self._base_script = "timeout 10 openssl s_client -connect {}:{} ".format( self.target, self.port) self.begin = "-----BEGIN CERTIFICATE-----" self.end = "-----END CERTIFICATE-----" self.sni_data = {} self.non_sni_data = {} self.ciphers = "" log_file = HelpUtilities.get_output_path("{}/tls_report.txt".format( self.target)) self.logger = Logger(log_file)
async def grab_whois(cls, host): if not host.naked: return script = "whois {}".format(host.naked).split() log_file = HelpUtilities.get_output_path("{}/whois.txt".format(host.target)) logger = Logger(log_file) process = await create_subprocess_exec( *script, stdout=PIPE, stderr=PIPE ) result, err = await process.communicate() if process.returncode == 0: logger.info("{} {} WHOIS information retrieved".format(COLORED_COMBOS.GOOD, host)) for line in result.decode().strip().split("\n"): if ":" in line: logger.debug(line)
def generate_dns_dumpster_mapping(cls, host, sout_logger): # Start DNS Dumpster session for the token dnsdumpster_session = DNSHandler.request_handler.get_new_session() url = "https://dnsdumpster.com" if host.naked: target = host.naked else: target = host.target payload = { "targetip": target, "csrfmiddlewaretoken": None } sout_logger.info("{} Trying to generate DNS Mapping for {} from DNS dumpster".format( COLORED_COMBOS.INFO, host)) try: dnsdumpster_session.get(url, timeout=10) jar = dnsdumpster_session.cookies for c in jar: if not c.__dict__.get("name") == "csrftoken": continue payload["csrfmiddlewaretoken"] = c.__dict__.get("value") break dnsdumpster_session.post(url, data=payload, headers={"Referer": "https://dnsdumpster.com/"}) time.sleep(3) page = dnsdumpster_session.get("https://dnsdumpster.com/static/map/{}.png".format(target)) if page.status_code == 200: path = HelpUtilities.get_output_path("{}/dns_mapping.png".format(host.target)) with open(path, "wb") as target_image: target_image.write(page.content) sout_logger.info("{} Successfully fetched DNS mapping for {}".format( COLORED_COMBOS.GOOD, host.target) ) except ConnectionError: sout_logger.info("{} Failed to generate DNS mapping. A connection error occurred.".format( COLORED_COMBOS.BAD))
def main( target, tor_routing, proxy_list, proxy, dns_records, wordlist, threads, ignored_response_codes, subdomain_list, full_scan, scripts, services, port, tls_port, no_health_check, follow_redirects, no_url_fuzzing, no_sub_enum, # delay, outdir, quiet): try: # ------ Arg validation ------ # Set logging level and Logger instance log_level = HelpUtilities.determine_verbosity(quiet) logger = SystemOutLogger(log_level) intro(logger) target = target.lower() HelpUtilities.validate_wordlist_args(proxy_list, wordlist, subdomain_list) HelpUtilities.validate_proxy_args(tor_routing, proxy, proxy_list) HelpUtilities.create_output_directory(outdir) if tor_routing: logger.info("{} Routing traffic anonymously through Tor\n".format( COLORED_COMBOS.WARNING)) elif proxy_list: if proxy_list and not os.path.isfile(proxy_list): raise FileNotFoundError( "Not a valid file path, {}".format(proxy_list)) else: logger.info( "{} Routing traffic using proxies from list {}\n".format( COLORED_COMBOS.WARNING, proxy_list)) elif proxy: logger.info("Routing traffic through proxy {}\n".format( COLORED_COMBOS.WARNING, proxy)) # TODO: Sanitize delay argument dns_records = tuple(dns_records.split(",")) ignored_response_codes = tuple( int(code) for code in ignored_response_codes.split(",")) if port: HelpUtilities.validate_port_range(port) # ------ /Arg validation ------ # Set Request Handler instance request_handler = RequestHandler(proxy_list=proxy_list, tor_routing=tor_routing, single_proxy=proxy) if not no_health_check: HelpUtilities.validate_target_is_up(target) main_loop = asyncio.get_event_loop() logger.info("{}### Raccoon Scan Started ###{}\n".format( COLOR.BLUE, COLOR.RESET)) logger.info("{} Trying to gather information about host: {}".format( COLORED_COMBOS.INFO, target)) # TODO: Populate array when multiple targets are supported # hosts = [] host = Host(target=target, dns_records=dns_records) host.parse() logger.info("\n{} Setting Nmap scan to run in the background".format( COLORED_COMBOS.INFO)) nmap_scan = NmapScan(host, full_scan, scripts, services, port) # # # TODO: Populate array when multiple targets are supported # nmap_threads = [] nmap_thread = threading.Thread(target=Scanner.run, args=(nmap_scan, )) # Run Nmap scan in the background. Can take some time nmap_thread.start() # Run first set of checks - TLS, Web/WAF Data, DNS data waf = WAF(host) tls_info_scanner = TLSHandler(host, tls_port) web_app_scanner = WebApplicationScanner(host) tasks = (asyncio.ensure_future(tls_info_scanner.run()), asyncio.ensure_future(waf.detect()), asyncio.ensure_future(DNSHandler.grab_whois(host)), asyncio.ensure_future(web_app_scanner.run_scan())) main_loop.run_until_complete(asyncio.wait(tasks)) # DNS dumpster visualization DNSHandler.generate_dns_dumpster_mapping(host, logger) # Second set of checks - URL fuzzing, Subdomain enumeration if not no_url_fuzzing: fuzzer = URLFuzzer(host, ignored_response_codes, threads, wordlist, follow_redirects) main_loop.run_until_complete(fuzzer.fuzz_all()) if not host.is_ip: sans = tls_info_scanner.sni_data.get("SANs") subdomain_enumerator = SubDomainEnumerator( host, domain_list=subdomain_list, sans=sans, ignored_response_codes=ignored_response_codes, num_threads=threads, follow_redirects=follow_redirects, bruteforce_subdomains=no_sub_enum) main_loop.run_until_complete(subdomain_enumerator.run()) if nmap_thread.is_alive(): logger.info( "{} All scans done. Waiting for Nmap scan to wrap up. " "Time left may vary depending on scan type and port range". format(COLORED_COMBOS.INFO)) while nmap_thread.is_alive(): time.sleep(15) logger.info("\n{}### Raccoon scan finished ###{}\n".format( COLOR.BLUE, COLOR.RESET)) os.system("stty sane") except KeyboardInterrupt: print("{}Keyboard Interrupt detected. Exiting{}".format( COLOR.RED, COLOR.RESET)) # Fix F'd up terminal after CTRL+C os.system("stty sane") exit(42)
def create_host_dir_and_set_file_logger(self): log_file = HelpUtilities.get_output_path("{}/dns_records.txt".format( self.target)) self._create_host_dir(log_file) self.logger = Logger(log_file)