def thread_handler(self): print("[+] Firing 'MassDNS' to resolve collected subdomains...") massdns_proc = self.subdom_resolver_proc(self.raw_domains_file) output_buffer = massdns_proc.communicate()[0].decode("utf-8") store_results(output_buffer, self.output_file) print("[+] MassDNS resolved the following subdomains:", output_buffer, sep='\n\n') print("[+] Resolving subdomains completed") print("[+] 'HUNTSMAN' sequence in progress...\n\n")
def thread_handler(self): print("[+] Firing 'Assetfinder' to hunt subdomains...") output_buffer = "" for target in self.op.targets: assetf_proc = self.enumerator_proc(target) result = assetf_proc.communicate()[0].decode('utf-8') print("[+] Assetfinder found the following subdomains for '" + target + "':", result, sep='\n\n') output_buffer += result.rstrip() + "\n" store_results(output_buffer, self.output_file) print("[+] Assetfinder hunt completed") print("[+] 'HUNTSMAN' sequence in progress...\n\n")
def thread_handler(self): print("[+] Firing 'Amass' to hunt subdomains...") target_domains = ','.join(self.op.targets) amass_proc = self.enumerator_proc(target_domains) output_buffer = amass_proc.communicate()[0].decode("utf-8") store_results(output_buffer, self.output_file) print("[+] Amass retrieved the following subdomains:", output_buffer, sep='\n\n') print("[+] Amass hunt completed") print("[+] 'HUNTSMAN' sequence in progress...\n\n")
def thread_handler(self): print("[+] Firing 'GoSpider' to hunt endpoints...") makedirs( self.output_dir, exist_ok=True ) # ensure output dir exist to avoid failure of the subprocess gospider_proc = self.crawler_proc() output_buffer = gospider_proc.communicate()[0].decode("utf-8") store_results(output_buffer, self.output_file) print("[+] GoSpider retrieved the following endpoints:", output_buffer, sep='\n\n') print("[+] GoSpider hunt completed") print("[+] 'HUNTSMAN' sequence in progress...\n\n")
def thread_handler(self): print("[+] Removing redundant endpoints...") with open(self.target_file) as f: qsreplace_stdin = f.read().encode('utf-8') if qsreplace_stdin: qsreplace_proc = self.filter_proc() output_buffer = qsreplace_proc.communicate( input=qsreplace_stdin)[0].decode("utf-8") else: output_buffer = '' store_results(output_buffer, self.target_file) print("[+] Removing redundant endpoints completed") print("[+] 'HUNTSMAN' sequence in progress...\n\n")
def thread_handler(self): print("[+] Firing 'httprobe' to find the live subdomains...") with open(self.input_file) as f: httprobe_stdin = f.read().encode('utf-8') if httprobe_stdin: httprobe_proc = self.discovery_proc() output_buffer = httprobe_proc.communicate(input=httprobe_stdin)[0].decode("utf-8") else: output_buffer = '' store_results(output_buffer, self.output_file) print("[+] httprobe found the following web services:", output_buffer, sep='\n\n') print("[+] Live web services discovery completed") print("[+] 'HUNTSMAN' sequence in progress...\n\n")
def thread_handler(self): print("[+] Dorking GitHub for subdomains...") output_buffer = "" for target in self.op.targets: dorkers_proc = self.enumerator_proc(target, self.op.github_token) result = dorkers_proc.communicate()[0].decode('utf-8') print("[+] Attempted to find subdomains on github for '" + target + "':", result, sep='\n\n') output_buffer += result.rstrip() + "\n" time.sleep(5) store_results(output_buffer, self.output_file) print("[+] Dorking GitHub for subdomains completed") print("[+] 'HUNTSMAN' sequence in progress...\n\n")
def thread_handler(self): print("[+] Firing 'WaybackURLs' to hunt endpoints...") with open(self.input_file) as f: wayback_stdin = f.read().encode('utf-8') if wayback_stdin: wayback_proc = self.enumerator_proc() output_buffer = wayback_proc.communicate( input=wayback_stdin)[0].decode("utf-8") else: output_buffer = '' store_results(output_buffer, self.output_file) print("[+] WaybackURLs retrieved the following endpoints:", output_buffer, sep='\n\n') print("[+] WaybackURLs hunt completed") print("[+] 'HUNTSMAN' sequence in progress...\n\n")