def process_urls(data): i, urls, timeout = data blacklist = [ 'Date', 'Connection', 'Content-Type', 'Content-Length', 'Keep-Alive', 'Content-Encoding', 'Vary' ] new_headers = {} for u in urls: display("Processing %s" % u) try: res = requests.get(u, timeout=int(timeout), verify=False) for k in res.headers.keys(): if k not in blacklist: if not new_headers.get(u, False): new_headers[u] = [] new_headers[u].append("%s: %s" % (k, res.headers[k])) except KeyboardInterrupt: display_warning("Got Ctrl+C, exiting") sys.exit(1) except Exception as e: display_error("%s no good, skipping: %s" % (u, e)) return (i, new_headers)
def process_output(self, cmds): for data in cmds: try: xmldata = xmltodict.parse(open(data["output"]).read()) cert = xmldata["nmaprun"]["host"]["ports"]["port"]["script"]["@output"] if cert: # print("Cert found: {}".format(cert)) svc = self.Port.all(id=data["service_id"])[0] # pdb.set_trace() if not svc.meta.get("sslcert", False): svc.meta["sslcert"] = {} svc.meta["sslcert"][data["target"]] = cert print( "Cert added to {} for {}".format( data["service_id"], data["target"] ) ) svc.save() except Exception as e: display_error("File not valid: {}\nError: {}".format(data["output"], e)) self.Port.commit()
def run_cmd(cmd): c = cmd[:-1] timeout = cmd[-1] display("Executing command: %s" % ' '.join(c)) try: Popen(c).wait(timeout=timeout) except: display_error( "Timeout of %s reached. Aborting thread for command: %s" % (timeout, ' '.join(c)))
def process_output(self, cmds): for c in cmds: target = c["target"] output_path = c["output"] try: fierceOutput = open(output_path).read() except IOError: display_error( "The output file for {} was not found. If fierce timed out, but is still running, you can run this tool again with the --no_binary flag to just grab the file." .format(target)) continue domains = [] if "Now performing" in fierceOutput: hosts = re.findall( "^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\t.*$", fierceOutput, re.MULTILINE, ) if hosts: for host in hosts: # print host domain = (host.split("\t")[1].lower().replace( "www.", "").rstrip(".")) if domain not in domains: domains.append(domain) elif "Whoah, it worked" in fierceOutput: print("Zone transfer found!") hosts = re.findall( ".*\tA\t\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$", fierceOutput, re.MULTILINE, ) if hosts: for host in hosts: domain = (host.split("\t")[0].lower().replace( "www.", "").rstrip(".")) if domain not in domains: domains.append(domain) else: display_error( "Unable to process {}. If fierce timed out, but is still running, you can run this tool again with the --no_binary flag to just grab the file." .format(output_path)) if domains: for _domain in domains: created, domain = self.Domain.find_or_create( domain=_domain)
def get_targets(self, args): ''' This module is used to build out a target list and output file list, depending on the arguments. Should return a list in the format [(target, output), (target, output), etc, etc] ''' targets = [] if args.import_database: if args.rescan: all_domains = self.BaseDomain.all(scope_type="passive") else: all_domains = self.BaseDomain.all(tool=self.name, scope_type="passive") for d in all_domains: # We need to find all of the http/https ports and create the json file. output_path = os.path.join(self.base_config['PROJECT']['base_path'], 'aquatone', d.domain ) if not os.path.exists(output_path): os.makedirs(output_path) hosts = [] open_ports = [] urls = [] targets.append(d.domain) for s in d.subdomains: name = s.domain for ip in s.ip_addresses: port_list = [] for p in ip.ports: if 'http' in p.service_name: hosts.append("{}.{}".format(name, ip.ip_address)) port_list.append(p.port_number) urls.append('{}://{}:{}/'.format(p.service_name, name, p.port_number)) urls.append('{}://{}:{}/'.format(p.service_name, ip.ip_address, p.port_number)) if port_list: open_ports.append('{},{}'.format(ip.ip_address, ','.join([str(o) for o in port_list]))) open(os.path.join(output_path, 'hosts.txt'), 'w').write('\n'.join(list(set(hosts)))) open(os.path.join(output_path, 'urls.txt'), 'w').write('\n'.join(list(set(urls)))) open(os.path.join(output_path, 'open_ports.txt'), 'w').write('\n'.join(list(set(open_ports)))) else: display_error("You need to supply domain(s).") res = [] for t in targets: res.append({'target':t }) return res
def run(self, args): # pdb.set_trace() if not args.binary: self.binary = which.run("LinkedInt.py") else: self.binary = which.run(args.binary) if not self.binary: display_error( "LinkedInt binary not found. Please explicitly provide path with --binary" ) if args.domain: created, domain = self.BaseDomain.find_or_create(domain=args.domain) if args.top: titles = [ user.job_title.split(" at ")[0] for user in domain.users if user.job_title ] words = [] for t in titles: words += [w.lower() for w in t.split(" ")] word_count = Counter(words).most_common() display("Using the top %s words:" % args.top) res = [] for w in word_count[: int(args.top)]: display("\t{}\t{}".format(w[0], w[1])) res.append(w[0]) # pdb.set_trace() args.smart_shuffle = ",".join(res) if args.smart_shuffle: args.keywords = " OR ".join( ['"{}"'.format(i) for i in args.smart_shuffle.split(",")] ) self.process_domain(domain, args) args.keywords = " AND ".join( ['-"{}"'.format(i) for i in args.smart_shuffle.split(",")] ) self.process_domain(domain, args) else: self.process_domain(domain, args) self.BaseDomain.commit()
def process_output(self, cmds): for c in cmds: target = c["target"] output_path = c["output"] if os.path.isfile(output_path): data = open(output_path).read().split("\n") for d in data: if "Found: " in d: new_domain = d.split(" ")[1].lower() created, subdomain = self.Domain.find_or_create( domain=new_domain ) else: display_error("{} not found.".format(output_path)) self.Domain.commit()
def run(self, args): if not args.api_key: display_error("You must supply an API key to use shodan!") return if args.search: ranges = [args.search] if args.import_db: ranges = [] if args.rescan: ranges += [ "net:{}".format(c.cidr) for c in self.ScopeCidr.all() ] else: ranges += [ "net:{}".format(c.cidr) for c in self.ScopeCidr.all(tool=self.name) ] api = shodan_api.Shodan(args.api_key) for r in ranges: time.sleep(1) # pdb.set_trace() results = api.search(r) display("{} results found for: {}".format(results['total'], r)) for res in results['matches']: ip_address_str = res['ip_str'] port_str = res['port'] transport = res['transport'] display("Processing IP: {} Port: {}/{}".format( ip_address_str, port_str, transport)) created, IP = self.IPAddress.find_or_create( ip_address=ip_address_str) created, port = self.Port.find_or_create(ip_address=IP, port_number=port_str, proto=transport) port.meta['shodan_data'] = res port.save() self.IPAddress.commit()
def process_output(self, cmds): for c in cmds: target = c['target'] output_path = c['output'] if os.path.isfile(output_path): data = open(output_path).read().split('\n') else: display_error("{} not found.".format(output_path)) return for d in data: if 'Found: ' in d: new_domain = d.split(' ')[1].lower() created, subdomain = self.Domain.find_or_create( domain=new_domain) self.Domain.commit()
def reclassify_domain(self, bd): if bd.meta.get('whois', False): display_new("Whois data found for {}".format(bd.domain)) print(bd.meta['whois']) res = raw_input("Should this domain be scoped (A)ctive, (P)assive, or (N)ot? [a/p/N] ") if res.lower() == 'a': bd.in_scope = True bd.passive_scope = True elif res.lower() == 'p': bd.in_scope = False bd.passive_scope = True else: bd.in_scope = False bd.passive_scope = False bd.save() else: display_error("Unfortunately, there is no whois information for {}. Please populate it using the Whois module".format(bd.domain))
def process_output(self, cmds): for cmd in cmds: output_path = cmd["output"] if os.path.isfile(output_path): data = open(output_path).read().split("\n") else: display_error("{} not found.".format(output_path)) next for d in data: new_domain = d.split(":")[0].lower() if new_domain: created, subdomain = self.Domain.find_or_create( domain=new_domain) self.Domain.commit()
def run_cmd(cmd): c = cmd[:-1] timeout = cmd[-1] display("Executing command: %s" % " ".join(c)) current_time = time.time() if timeout: process = Popen(c) while time.time() < current_time + timeout and process.poll() is None: time.sleep(5) if process.poll() is None: display_error( "Timeout of %s reached. Aborting thread for command: %s" % (timeout, " ".join(c))) process.terminate() else: Popen(c).wait()
def process_output(self, cmds): for data in cmds: try: xmldata = xmltodict.parse(open(data['output']).read()) cert = xmldata['nmaprun']['host']['ports']['port']['script'][ '@output'] if cert: svc = self.Port.all(id=data['service_id'])[0] if not svc.meta.get(self.name, False): svc.meta[self.name] = {} svc.meta[self.name][data['target']] = cert svc.update() except: display_error("File not valid: {}".format(data['output'])) self.Port.commit()
def process_output(self, cmds): for c in cmds: target = c['target'] output_path = c['output'] try: res = json.loads(open(output_path).read()) except IOError: display_error("DnsRecon failed for {}".format(target)) if " -d " in res[0]['arguments']: created, dbrec = self.Domain.find_or_create(domain=target) dbrec.dns = res dbrec.save() for record in res: domain = None if record.get("type") == "A" or record.get("type") == "PTR": domain = record.get("name").lower().replace("www.","") elif record.get("type") == "MX": domain = record.get("exchange").lower().replace("www.","") elif record.get("type") == "SRV" or record.get("type" == "NS"): domain = record.get("target").lower().replace("www.","") elif record.get("type") == "SOA": domain = record.get("mname").lower().replace("www.","") if domain: created, domain_obj = self.Domain.find_or_create(domain=domain) self.Domain.commit()
def get_targets(self, args): """ This module is used to build out a target list and output file list, depending on the arguments. Should return a list in the format [(target, output), (target, output), etc, etc] """ targets = [] if args.import_database: if args.rescan: all_domains = self.BaseDomain.all(scope_type="passive") else: all_domains = self.BaseDomain.all(tool=self.name, scope_type="passive") for d in all_domains: # We need to find all of the http/https ports and create the json file. output_path = os.path.join( self.base_config["PROJECT"]["base_path"], "aquatone", d.domain ) if not os.path.exists(output_path): os.makedirs(output_path) hosts_j = {} hosts = [] open_ports = [] urls = [] targets.append(d.domain) for s in d.subdomains: name = s.domain for ip in s.ip_addresses: hosts_j[name] = ip.ip_address port_list = [] for p in ip.ports: if "http" in p.service_name: hosts.append("{}.{}".format(name, ip.ip_address)) port_list.append(p.port_number) urls.append( "{}://{}:{}/".format( p.service_name, name, p.port_number ) ) urls.append( "{}://{}:{}/".format( p.service_name, ip.ip_address, p.port_number ) ) if port_list: open_ports.append( "{},{}".format( ip.ip_address, ",".join([str(o) for o in port_list]) ) ) open(os.path.join(output_path, "hosts.txt"), "w").write( "\n".join(list(set(hosts))) ) open(os.path.join(output_path, "urls.txt"), "w").write( "\n".join(list(set(urls))) ) open(os.path.join(output_path, "open_ports.txt"), "w").write( "\n".join(list(set(open_ports))) ) open(os.path.join(output_path, "hosts.json"), "w").write( json.dumps(hosts_j) ) else: display_error("You need to supply domain(s).") res = [] for t in targets: res.append({"target": t}) return res
def run(self, args): if args.import_file: for nFile in args.import_file: self.process_data(nFile, args) elif args.launch: if ( not args.username and not args.password and not args.host and not args.uuid and not args.policy_id and not args.folder_id ): display_error( "You must supply a username, password, and host to launch a Nessus job" ) else: n = NessusRequest( args.username, args.password, args.host, uuid=args.uuid, policy_id=args.policy_id, folder_id=args.folder_id, ) ips = [ ip.ip_address for ip in self.IPAddress.all(scope_type="active", tool=self.name) ] cidrs = [cidr.cidr for cidr in self.ScopeCIDR.all(tool=self.name)] domains = [ domain.domain for domain in self.Domain.all(scope_type="active", tool=self.name) ] targets = ", ".join(merge_ranges(ips + cidrs) + domains) res = n.launch_job(targets, args.job_name) display("New Nessus job launched with ID {}".format(res)) display( "Remember this number! You'll need it to download the job once it is done." ) elif args.download: if ( not args.username and not args.password and not args.host and not args.job_id ): display_error( "You must supply host, username, password and job_id to download a report to import" ) else: n = NessusRequest( args.username, args.password, args.host, proxies={"https": "127.0.0.1:8080"}, ) if args.output_path[0] == "/": output_path = os.path.join( self.base_config["PROJECT"]["base_path"], args.output_path[1:] ) else: output_path = os.path.join( self.base_config["PROJECT"]["base_path"], args.output_path ) if not os.path.exists(output_path): os.makedirs(output_path) output_path = os.path.join( output_path, "Nessus-export-{}.nessus".format(int(time.time())) ) n.export_file(args.job_id, output_path) self.process_data(output_path, args)
def run(self, args): if not args.api_key: display_error("You must supply an API key to use shodan!") return if args.search: ranges = [args.search] if args.import_db: ranges = [] if args.rescan: if args.fast: ranges += [ "net:{}".format(c.cidr) for c in self.ScopeCidr.all() ] else: cidrs = [c.cidr for c in self.ScopeCidr.all()] for c in cidrs: ranges += [str(i) for i in IPNetwork(c)] if not args.cidr_only: ranges += [ "{}".format(i.ip_address) for i in self.IPAddress.all(scope_type="active") ] else: if args.fast: ranges += [ "net:{}".format(c.cidr) for c in self.ScopeCidr.all(tool=self.name) ] else: cidrs = [ c.cidr for c in self.ScopeCidr.all(tool=self.name) ] for c in cidrs: ranges += [str(i) for i in IPNetwork(c)] if not args.cidr_only: ranges += [ "{}".format(i.ip_address) for i in self.IPAddress.all(scope_type="active", tool=self.name) ] api_host_url = "https://api.shodan.io/shodan/host/{}?key={}" api_search_url = ( "https://api.shodan.io/shodan/host/search?key={}&query={}&page={}") for r in ranges: time.sleep(1) if ":" in r: display("Doing Shodan search: {}".format(r)) try: results = json.loads( requests.get(api_search_url.format(args.api_key, r, 1)).text) if results.get("error") and "request timed out" in results[ "error"]: display_warning( "Timeout occurred on Shodan's side.. trying again in 5 seconds." ) results = json.loads( requests.get( api_search_url.format(args.api_key, r, 1)).text) except Exception as e: display_error("Something went wrong: {}".format(e)) next total = len(results["matches"]) matches = [] i = 1 while total > 0: display("Adding {} results from page {}".format(total, i)) matches += results["matches"] i += 1 try: time.sleep(1) results = json.loads( requests.get( api_search_url.format(args.api_key, r, i)).text) if (results.get("error") and "request timed out" in results["error"]): display_warning( "Timeout occurred on Shodan's side.. trying again in 5 seconds." ) results = json.loads( requests.get( api_search_url.format(args.api_key, r, 1)).text) total = len(results["matches"]) except Exception as e: display_error("Something went wrong: {}".format(e)) total = 0 pdb.set_trace() for res in matches: ip_str = res["ip_str"] port_str = res["port"] transport = res["transport"] display("Processing IP: {} Port: {}/{}".format( ip_str, port_str, transport)) created, IP = self.IPAddress.find_or_create( ip_address=ip_str) IP.meta["shodan_data"] = results created, port = self.Port.find_or_create( ip_address=IP, port_number=port_str, proto=transport) if created: svc = "" if res.get("ssl", False): svc = "https" elif res.get("http", False): svc = "http" else: svc = "" port.service_name = svc port.meta["shodan_data"] = res port.save() else: try: results = json.loads( requests.get(api_host_url.format(r, args.api_key)).text) except Exception as e: display_error("Something went wrong: {}".format(e)) next # pdb.set_trace() if results.get("data", False): display("{} results found for: {}".format( len(results["data"]), r)) for res in results["data"]: ip_str = res["ip_str"] port_str = res["port"] transport = res["transport"] display("Processing IP: {} Port: {}/{}".format( ip_str, port_str, transport)) created, IP = self.IPAddress.find_or_create( ip_address=ip_str) IP.meta["shodan_data"] = results created, port = self.Port.find_or_create( ip_address=IP, port_number=port_str, proto=transport) if created: svc = "" if res.get("ssl", False): svc = "https" elif res.get("http", False): svc = "http" else: svc = "" port.service_name = svc port.meta["shodan_data"] = res port.save() self.IPAddress.commit()