def run_cmd_noout(cmd_data): cmd = cmd_data[0] output = cmd_data[1] c = cmd[:-1] timeout = cmd[-1] display("Executing command: %s" % " ".join(c)) current_time = time.time() f = open(output, 'w') if timeout: process = Popen(c, stdout=f, stderr=STDOUT) while time.time() < current_time + timeout and process.poll() is None: time.sleep(5) if process.poll() is None: display_error( "Timeout of %s reached. Aborting thread for command: %s" % (timeout, " ".join(c))) process.terminate() else: Popen(c, stdout=f, stderr=STDOUT).wait() f.close() return cmd_data
def run_cmd(cmd): c = cmd[:-1] timeout = cmd[-1] display("Executing command: %s" % " ".join(c)) current_time = time.time() if timeout: process = Popen(c) while time.time() < current_time + timeout and process.poll() is None: time.sleep(5) if process.poll() is None: display_error( "Timeout of %s reached. Aborting thread for command: %s" % (timeout, " ".join(c))) process.terminate() else: Popen(c).wait()
def process_output(self, cmds): for cmd in cmds: target = cmd['target'] proto = target.split('/')[0] url = target.split('/')[2] if ':' in url: port_num = url.split(':')[1] url = url.split(':')[0] elif proto == 'http': port_num = "80" elif proto == 'https': port_num = "443" else: port_num = "0" try: [int(i) for i in url.split('.')] created, ip = self.IPAddress.find_or_create(ip_address=url) port = [p for p in ip.ports if p.port_number == int(port_num) and p.proto == 'tcp'][0] port.set_tool(self.name) except: display("Domain found: {}".format(url)) created, domain = self.Domain.find_or_create(domain=url) for ip in domain.ip_addresses: try: port = [p for p in ip.ports if p.port_number == int(port_num) and p.proto == 'tcp'][0] port.set_tool(self.name) except Exception as e: print("Error getting ports: {}".format(e)) self.Port.commit()
def process_data(self, nFile, args): display("Reading " + nFile) tree = ET.parse(nFile) root = tree.getroot() self.args = args for ReportHost in root.iter("ReportHost"): os = [] hostname = "" hostIP = "" for HostProperties in ReportHost.iter("HostProperties"): for tag in HostProperties: if tag.get("name") == "host-ip": hostIP = tag.text if tag.get("name") == "host-fqdn": hostname = tag.text.lower() hostname = hostname.replace("www.", "") if tag.get("name") == "operating-system": os = tag.text.split("\n") if hostIP: # apparently nessus doesn't always have an IP to work with... if hostname: display("Gathering Nessus info for {} ( {} )".format( hostIP, hostname)) else: display("Gathering Nessus info for {}".format(hostIP)) created, ip = self.IPAddress.find_or_create(ip_address=hostIP) if hostname: created, domain = self.Domain.find_or_create( domain=hostname) if ip not in domain.ip_addresses: ip.save() domain.ip_addresses.append(ip) domain.save() if os: for o in os: if not ip.OS: ip.OS = o else: if o not in ip.OS.split(" OR "): ip.OS += " OR " + o self.getVulns(ip, ReportHost) self.IPAddress.commit() return
def run(self, args): ranges = [] cidrs = [] ips = [] search = [] if not args.api_key: display_error("You must supply an API key to use shodan!") return if args.search: search = [args.search] if args.import_db: if args.rescan: if args.fast: search += [ "net:{}".format(c.cidr) for c in self.ScopeCidr.all() ] else: cidrs += [c.cidr for c in self.ScopeCidr.all()] if not args.cidr_only: ips += [ "{}".format(i.ip_address) for i in self.IPAddress.all(scope_type="active") ] else: if args.fast: search += [ "net:{}".format(c.cidr) for c in self.ScopeCidr.all(tool=self.name) ] else: cidrs += [ c.cidr for c in self.ScopeCidr.all(tool=self.name) ] if not args.cidr_only: ips += [ "{}".format(i.ip_address) for i in self.IPAddress.all(scope_type="active", tool=self.name) ] if args.target: if '/' not in args.target: ips += [args.target] elif args.fast: cidrs += ["net:{}".format(args.target)] else: cidrs += [args.target] for c in cidrs: ranges += [str(i) for i in IPNetwork(c)] ranges += ips ranges += search display( "Doing a total of {} queries. Estimated time: {} days, {} hours, {} minutes and {} seconds." .format(len(ranges), int(len(ranges) / 24.0 / 60.0 / 60.0), int(len(ranges) / 60.0 / 60.0) % 60, int(len(ranges) / 60.0) % 60, len(ranges) % 60)) for c in cidrs: ranges = [str(i) for i in IPNetwork(c)] display( "Processing {} IPs. Estimated time: {} days, {} hours, {} minutes and {} seconds." .format(c, int(len(ranges) / 24.0 / 60.0 / 60.0), int(len(ranges) / 60.0 / 60.0) % 60, int(len(ranges) / 60.0) % 60, len(ranges) % 60)) for r in ranges: self.get_shodan(r, args) created, cd = self.ScopeCidr.find_or_create(cidr=c) if created: cd.delete() else: cd.set_tool(self.name) self.ScopeCidr.commit() display( "Processing {} IPs. Estimated time: {} days, {} hours, {} minutes and {} seconds." .format(len(ips), int(len(ranges) / 24.0 / 60.0 / 60.0), int(len(ranges) / 60.0 / 60.0) % 60, int(len(ranges) / 60.0) % 60, len(ranges) % 60)) for i in ips: self.get_shodan(i, args) created, ip = self.IPAddress.find_or_create(ip_address=i) if created: ip.delete() else: ip.set_tool(self.name) self.IPAddress.commit() for s in search: self.get_shodan(s, args) if s[:4] == "net:": created, cd = self.ScopeCidr.find_or_create(cidr=s[4:]) if created: cd.delete() else: cd.set_tool(self.name) self.ScopeCidr.commit()
def get_shodan(self, r, args): api_host_url = "https://api.shodan.io/shodan/host/{}?key={}" api_search_url = ( "https://api.shodan.io/shodan/host/search?key={}&query={}&page={}") time.sleep(1) if ":" in r: display("Doing Shodan search: {}".format(r)) try: results = json.loads( requests.get(api_search_url.format(args.api_key, r, 1)).text) if results.get( "error") and "request timed out" in results["error"]: display_warning( "Timeout occurred on Shodan's side.. trying again in 5 seconds." ) results = json.loads( requests.get(api_search_url.format(args.api_key, r, 1)).text) except Exception as e: display_error("Something went wrong: {}".format(e)) next total = len(results["matches"]) matches = [] i = 1 # pdb.set_trace() while total > 0: display("Adding {} results from page {}".format(total, i)) matches += results["matches"] i += 1 try: time.sleep(1) results = json.loads( requests.get(api_search_url.format(args.api_key, r, i)).text) if (results.get("error") and "request timed out" in results["error"] # noqa: W503 ): display_warning( "Timeout occurred on Shodan's side.. trying again in 5 seconds." ) results = json.loads( requests.get( api_search_url.format(args.api_key, r, 1)).text) total = len(results["matches"]) except Exception as e: display_error("Something went wrong: {}".format(e)) total = 0 pdb.set_trace() domains = [] for res in matches: ip_str = res["ip_str"] port_str = res["port"] transport = res["transport"] display("Processing IP: {} Port: {}/{}".format( ip_str, port_str, transport)) created, IP = self.IPAddress.find_or_create(ip_address=ip_str) IP.meta["shodan_data"] = results created, port = self.Port.find_or_create(ip_address=IP, port_number=port_str, proto=transport) if created: svc = "" if res.get("ssl", False): svc = "https" elif res.get("http", False): svc = "http" else: svc = "" port.service_name = svc port.status = "open" port.meta["shodan_data"] = res port.save() if res.get("ssl", {}).get('cert', {}).get('extensions'): for d in res['ssl']['cert']['extensions']: if d['name'] == 'subjectAltName': domains += get_domains_from_data(d['name']) if res.get("ssl", {}).get('cert', {}).get( 'subject', {} ).get('CN') and '*' not in res['ssl']['cert']['subject']['CN']: domains.append(res['ssl']['cert']['subject']['CN']) if res.get('hostnames'): domains += res['hostnames'] for d in list(set(domains)): display("Adding discovered domain {}".format(only_valid(d))) created, domain = self.Domain.find_or_create( domain=only_valid(d)) else: display("Searching for {}".format(r)) try: results = json.loads( requests.get(api_host_url.format(r, args.api_key)).text) except Exception as e: display_error("Something went wrong: {}".format(e)) next # pdb.set_trace() if results.get("data", False): display("{} results found for: {}".format( len(results["data"]), r)) domains = [] for res in results["data"]: ip_str = res["ip_str"] port_str = res["port"] transport = res["transport"] display("Processing IP: {} Port: {}/{}".format( ip_str, port_str, transport)) created, IP = self.IPAddress.find_or_create( ip_address=ip_str) IP.meta["shodan_data"] = results created, port = self.Port.find_or_create( ip_address=IP, port_number=port_str, proto=transport) if created: svc = "" if res.get("ssl", False): svc = "https" elif res.get("http", False): svc = "http" else: svc = "" port.service_name = svc port.status = "open" port.meta["shodan_data"] = res port.save() if res.get("ssl", {}).get('cert', {}).get('extensions'): for d in res['ssl']['cert']['extensions']: if d['name'] == 'subjectAltName': domains += get_domains_from_data(d['data']) display( "Domains discovered in subjectAltName: {}". format(", ".join( get_domains_from_data(d['data'])))) if res.get("ssl", {}).get('cert', {}).get( 'subject', {}).get('CN') and '*' not in res['ssl'][ 'cert']['subject']['CN']: domains.append(res['ssl']['cert']['subject']['CN']) if res.get('hostnames'): domains += res['hostnames'] for d in list(set(domains)): display("Adding discovered domain {}".format(d)) created, domain = self.Domain.find_or_create(domain=d)
def run(self, args): if args.import_file: for nFile in args.import_file: self.process_data(nFile, args) elif args.launch: if (not args.username # noqa: W503 and not args.password # noqa: W503 and not args.host # noqa: W503 and not args.uuid # noqa: W503 and not args.policy_id # noqa: W503 and not args.folder_id # noqa: W503 ): display_error( "You must supply a username, password, and host to launch a Nessus job" ) else: n = NessusRequest( args.username, args.password, args.host, uuid=args.uuid, policy_id=args.policy_id, folder_id=args.folder_id, ) ips = [ ip.ip_address for ip in self.IPAddress.all(scope_type="active", tool=self.name) ] cidrs = [ cidr.cidr for cidr in self.ScopeCIDR.all(tool=self.name) ] domains = [ domain.domain for domain in self.Domain.all(scope_type="active", tool=self.name) ] targets = ", ".join(merge_ranges(ips + cidrs) + domains) res = n.launch_job(targets, args.job_name) display("New Nessus job launched with ID {}".format(res)) display( "Remember this number! You'll need it to download the job once it is done." ) elif args.download: if (not args.username # noqa: W503 and not args.password # noqa: W503 and not args.host # noqa: W503 and not args.job_id # noqa: W503 ): display_error( "You must supply host, username, password and job_id to download a report to import" ) else: n = NessusRequest( args.username, args.password, args.host, ) if args.output_path[0] == "/": output_path = os.path.join( self.base_config["PROJECT"]["base_path"], args.output_path[1:]) else: output_path = os.path.join( self.base_config["PROJECT"]["base_path"], args.output_path) if not os.path.exists(output_path): os.makedirs(output_path) output_path = os.path.join( output_path, "Nessus-export-{}.nessus".format(int(time.time()))) n.export_file(args.job_id, output_path) self.process_data(output_path, args)
def run(self, args): if not args.keyword: display_error("You need to supply a keyword to search for.") return if not args.binary: self.binary = which.run(self.binary_name) else: self.binary = args.binary if not self.binary: display_error( "Asnlookup binary not found. Please explicitly provide path with --binary" ) if args.output_path[0] == "/": output_path = os.path.join( self.base_config["PROJECT"]["base_path"], 'output', args.output_path[1:]) else: output_path = os.path.join( self.base_config["PROJECT"]["base_path"], 'output', args.output_path) if not os.path.exists(output_path): os.makedirs(output_path) command_args = " -o {} --output {} ".format(args.keyword, output_path) current_dir = os.getcwd() new_dir = "/".join(self.binary.split("/")[:-1]) os.chdir(new_dir) cmd = shlex.split("python3 " + self.binary + command_args) print("Executing: %s" % " ".join(cmd)) subprocess.Popen(cmd).wait() os.chdir(current_dir) ip_ranges = open( os.path.join(output_path, "{}_ipv4.txt".format( args.keyword))).read().split('\n') for r in ip_ranges: if r: display("Processing {}".format(r)) current_cidrs = [c.cidr for c in self.ScopeCIDRs.all()] new_cidr = True for nc in current_cidrs: if IPNetwork(r) in IPNetwork(nc): new_cidr = False if new_cidr: created, SC = self.ScopeCIDRs.find_or_create(cidr=r) if created: display_new( "New CIDR added to ScopeCIDRS: {}".format(r)) self.ScopeCIDRs.commit()
def run(self, args): # pdb.set_trace() if not args.binary: self.binary = which.run("LinkedInt.py") else: self.binary = which.run(args.binary) if not self.binary: display_error( "LinkedInt binary not found. Please explicitly provide path with --binary" ) if args.domain: created, domain = self.BaseDomain.find_or_create( domain=args.domain) if args.top: titles = [ user.job_title.split(" at ")[0] for user in domain.users if user.job_title ] words = [] for t in titles: words += [w.lower() for w in get_words(t)] word_count = Counter(words).most_common() display("Using the top %s words:" % args.top) res = [] for w in word_count[:int(args.top)]: display("\t{}\t{}".format(w[0], w[1])) res.append(w[0]) # pdb.set_trace() args.smart_shuffle = ",".join(res) if args.auto_keyword: if not args.top: display_error( "You must specify the top number of keywords using --top" ) else: if os.path.isfile('/tmp/armory_linkedinsearchqueries'): blacklist = open('/tmp/armory_linkedinsearchqueries' ).read().split('\n') else: blacklist = [] bfile = open('/tmp/armory_linkedinsearchqueries', 'a') for w in args.smart_shuffle.split(','): if w not in blacklist: args.keywords = w self.process_domain(domain, args) self.BaseDomain.commit() bfile.write('{}\n'.format(w)) else: display( "Skipped {} due to it already being searched.". format(w)) bfile.close() elif args.smart_shuffle: args.keywords = " OR ".join( ['"{}"'.format(i) for i in args.smart_shuffle.split(",")]) self.process_domain(domain, args) self.BaseDomain.commit() args.keywords = " AND ".join( ['-"{}"'.format(i) for i in args.smart_shuffle.split(",")]) self.process_domain(domain, args) self.BaseDomain.commit() else: self.process_domain(domain, args) self.BaseDomain.commit() self.BaseDomain.commit()
def find_or_create(self, ip_str, only_tool=False, in_scope=False, passive_scope=True, label=None, force_cidr=None, **kwargs): res = False if label and force_cidr: res = ([force_cidr, label], ) for cidr in private_subnets: if IPAddress(ip_str) in cidr: res = ([str(cidr), "Non-Public Subnet"], ) for cidr in CIDRRepository(self.db, "").all(): if IPAddress(ip_str) in IPNetwork(cidr.cidr): res = ([str(cidr.cidr), cidr.org_name], ) display("Subnet already in database, not rechecking whois.") if res: cidr_data = res else: while True: try: res = IPWhois(ip_str).lookup_whois(get_referral=True) except Exception: try: res = IPWhois(ip_str).lookup_whois() except Exception as e: display_error( "Error trying to resolve whois: {}".format(e)) res = {} if res.get('nets', []): break else: display_warning( "The networks didn't populate from whois. Defaulting to a /24." ) # again = raw_input("Would you like to try again? [Y/n]").lower() # if again == 'y': # time.sleep(5) # else: res = { 'nets': [{ 'cidr': '{}.0/24'.format('.'.join(ip_str.split('.')[:3])), 'description': 'Whois failed to resolve.' }] } break cidr_data = [] for n in res["nets"]: if "," in n["cidr"]: for cidr_str in n["cidr"].split(", "): cidr_data.append([cidr_str, n["description"]]) else: cidr_data.append([n["cidr"], n["description"]]) cidr_data = [ cidr_d for cidr_d in cidr_data if IPAddress(ip_str) in IPNetwork(cidr_d[0]) ] if cidr_data: try: cidr_len = len(IPNetwork(cidr_data[0][0])) except Exception: pdb.set_trace() matching_cidr = cidr_data[0] for c in cidr_data: if len(IPNetwork(c[0])) < cidr_len: matching_cidr = c display("Processing CIDR from whois: %s - %s" % (str(matching_cidr[1]).split('\n')[0], matching_cidr[0])) created, cidr = super(CIDRRepository, self).find_or_create(only_tool, cidr=matching_cidr[0]) if created: display_new("CIDR %s added to database" % cidr.cidr) cidr.org_name = str(matching_cidr[1]).split('\n')[0] cidr.update() return created, cidr
def find_or_create(self, only_tool=False, in_scope=False, passive_scope=False, **kwargs): created, d = super(DomainRepository, self).find_or_create(only_tool, **kwargs) display("Processing %s" % d.domain) if created: # If this is a new subdomain, set scoping info based on what is passed to the function initially. d.in_scope = in_scope d.passive_scope = passive_scope base_domain = ".".join( [t for t in tldextract.extract(d.domain)[1:] if t]) BaseDomains = BaseDomainRepository(self.db, "") # If the base domain is new, it'll inherit the same scoping permissions. created, bd = BaseDomains.find_or_create( only_tool, passive_scope=d.passive_scope, in_scope=in_scope, domain=base_domain, ) if created: display_new( "The base domain %s is being added to the database. Active Scope: %s Passive Scope: %s" % (base_domain, bd.in_scope, bd.passive_scope)) else: # If the base domain already exists, then the subdomain inherits the scope info from the base domain. d.passive_scope = bd.passive_scope d.in_scope = bd.in_scope d.base_domain = bd # Get all IPs that this domain resolves to. #use utility.... ips = get_ip(d.domain) if not ips: display_warning("No IPs discovered for %s" % d.domain) for i in ips: IPAddresses = IPRepository(self.db, "") if ':' not in i: display("Processing IP address %s" % i) created, ip = IPAddresses.find_or_create( only_tool, in_scope=d.in_scope, passive_scope=d.passive_scope, ip_address=i, ) # If the IP is in scope, then the domain should be if ip.in_scope: d.in_scope = ip.in_scope ip.passive_scope = True d.passive_scope = True # display("%s marked active scope due to IP being marked active." % d.domain) elif ip.passive_scope: d.passive_scope = ip.passive_scope d.ip_addresses.append(ip) display_new( "%s is being added to the database. Active Scope: %s Passive Scope: %s" % (d.domain, d.in_scope, d.passive_scope)) # Final sanity check - if a domain is active scoped, it should also be passively scoped. if d.in_scope: d.passive_scope = True return created, d