def process_domain(self, domain_str): domain, created = Domain.objects.get_or_create( name=domain_str, defaults={ "active_scope": self.active_scope, "passive_scope": self.passive_scope, }, ) if not created: if (domain.active_scope != self.active_scope or domain.passive_scope != self.passive_scope # noqa: W503 ): display( "Domain %s already exists with different scoping. Updating to Active Scope: %s Passive Scope: %s" % (domain_str, self.active_scope, self.passive_scope)) domain.active_scope = self.active_scope domain.passive_scope = self.passive_scope domain.save() if domain.basedomain.name == domain.name: display( "Name also matches a base domain. Updating that as well." ) domain.basedomain.active_scope = self.active_scope domain.basedomain.passive_scope = self.passive_scope domain.basedomain.save()
def process_ip(self, ip_str, force_scope=True): ip, created = IPAddress.objects.get_or_create( ip_address=ip_str, defaults={ "active_scope": self.active_scope, "passive_scope": self.passive_scope, }, ) if not created: if (ip.active_scope != self.active_scope or ip.passive_scope != self.passive_scope): display( "IP %s already exists with different scoping. Updating to Active Scope: %s Passive Scope: %s" % (ip_str, self.active_scope, self.passive_scope)) ip.active_scope = self.active_scope ip.passive_scope = self.passive_scope ip.save() else: if (self.active_scope and not ip.active_scope) or (self.passive_scope and not ip.passive_scope): if self.active_scope: ip.active_scope = self.active_scope if self.passive_scope: ip.passive_scope = self.passive_scope display_new( "Updating %s to match forced scope. Updating to Active Scope: %s Passive Scope: %s" % (ip_str, self.active_scope, self.passive_scope)) return ip
def run_cmd_noout(cmd_data): cmd = cmd_data[0] output = cmd_data[1] c = cmd[:-1] timeout = cmd[-1] display("Executing command: %s" % " ".join(c)) current_time = time.time() f = open(output, 'w') if timeout: process = Popen(c, stdout=f, stderr=STDOUT) while time.time() < current_time + timeout and process.poll() is None: time.sleep(5) if process.poll() is None: display_error( "Timeout of %s reached. Aborting thread for command: %s" % (timeout, " ".join(c)) ) process.terminate() else: Popen(c, stdout=f, stderr=STDOUT).wait() f.close() return cmd_data
def process_cidr(self, line, label=None): display("Processing %s" % line) if "/" in line: # pdb.set_trace() cidr, created = CIDR.objects.get_or_create( name=line.strip(), defaults={ "org_name": label, "active_scope": True, "passive_scope": True, }, ) if created: display_new("Adding %s to Active CIDRs in database" % line.strip()) elif "-" in line: start_ip, end_ip = line.strip().replace(" ", "").split("-") if "." not in end_ip: end_ip = ".".join(start_ip.split(".")[:3] + [end_ip]) cidrs = iprange_to_cidrs(start_ip, end_ip) for c in cidrs: cidr, created = CIDR.objects.get_or_create(name=str(c), defaults={ "active_scope": True, "passive_scope": True }) if created: display_new("Adding %s to Active CIDRs in database" % line.strip())
def run_cmd(cmd): # c = [] # for cm in cmd[:-1]: # if ' ' in cm: # c.append('"' + cm + '"') # else: # c.append(cm) c = cmd[:-1] timeout = cmd[-1] display("Executing command: %s" % " ".join(c)) current_time = time.time() if timeout: process = Popen(c) while time.time() < current_time + timeout and process.poll() is None: time.sleep(5) if process.poll() is None: display_error( "Timeout of %s reached. Aborting thread for command: %s" % (timeout, " ".join(c)) ) process.terminate() else: Popen(c).wait() return cmd
def process_output(self, cmds): for cmd in cmds: try: data = xmltodict.parse(open(cmd["output"] + ".xml").read()) except Exception as e: # display_error("Error with {}: {}".format(cmd["output"], e)) data = None if data and data.get('theHarvester'): if data["theHarvester"].get("email", False): if type(data["theHarvester"]["email"]) == list: emails = data["theHarvester"]["email"] else: emails = [data["theHarvester"]["email"]] for e in emails: display("Processing E-mail: {}".format(e)) domain, created = BaseDomain.objects.get_or_create( name=e.split("@")[1]) user, created = User.objects.get_or_create( email=e, domain=domain) user.save() if created: display_new("New email: %s" % e) if data["theHarvester"].get("host", False): if type(data["theHarvester"]["host"]) == list: hosts = data["theHarvester"]["host"] else: hosts = [data["theHarvester"]["host"]] for d in hosts: if type(d) == str: hostname = d else: hostname = d["hostname"] domain, created = Domain.objects.get_or_create( name=hostname) if data["theHarvester"].get("vhost", False): if type(data["theHarvester"]["vhost"]) == list: hosts = data["theHarvester"]["vhost"] else: hosts = [data["theHarvester"]["vhost"]] for d in hosts: if type(d) == str: hostname = d else: hostname = d["hostname"] domain, created = Domain.objects.get_or_create( name=hostname)
def descope_cidr(self, cidr): c = CIDR.objects.get(name=cidr) if c: display("Unscoping {} from CIDRs".format(c.name)) c.active_scope = False cnet = IPNetwork(cidr) for ip in IPAddress.objects.filter(active_scope=True): if nIPAddress(ip.ip_address) in cnet: self.descope_ip(ip.ip_address)
def process_output(self, cmds): for c in cmds: get_urls.add_tool_url(c['target'], self.name, self.args.tool_args) # Xsscrapy dumps results in its current directory. hosts = {} for f in [ g for g in glob.glob(os.path.dirname(self.binary) + '/*.txt') if 'requirements.txt' not in g ]: res = open(f).read() if res[1:4] == 'URL': # This looks like results data = res[1:].split('\n\n') for d in data: host = d.split('\n')[0].split(' ')[1] if not hosts.get(host, False): hosts[host] = [] hosts[host].append(d) os.unlink(f) # pdb.set_trace() for h, v in hosts.items(): display_new("Found data for {}".format(h)) output = os.path.join( self.output_path, "{}.txt".format(h.replace(':', '_').replace('/', '_'))) f = open(output, 'w') for d in v: display("URL: {}".format(d.split('\n')[0].split(' ')[1])) f.write(d + '\n\n') port = get_urls.get_port_object(h) if not port: display_warning(f"Port object for {h} not found") else: if not port.meta.get('Xsscrapy'): port.meta['Xsscrapy'] = {} if not port.meta['Xsscrapy'].get(h): port.meta['Xsscrapy'][h] = [] if output not in port.meta['Xsscrapy'][h]: port.meta['Xsscrapy'][h].append(output) port.save() display_warning( "There is currently no post-processing for this module. For the juicy results, refer to the output file paths." )
def process_urls(data): # silence insecure url warnings requests.packages.urllib3.disable_warnings(InsecureRequestWarning) u = data[0] timeout = data[1] blacklist = [ "Date", "Connection", "Content-Type", "Content-Length", "Keep-Alive", "Content-Encoding", "Vary", ] new_headers = {} new_cookies = {} display("Processing %s" % u) try: res = requests.get( u, timeout=int(timeout), verify=False, headers={ "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 14.5; rv:10.0) Gecko/20100101 Firefox/89.0" }) res.raise_for_status() for k in res.headers.keys(): if k not in blacklist: if not new_headers.get(u, False): new_headers[u] = [] new_headers[u].append("{}: {}".format(k, res.headers[k])) new_cookies[u] = dict(res.cookies) except requests.exceptions.HTTPError as http_error: display_error("Http Error: {}".format(http_error)) except requests.exceptions.ConnectionError as connect_error: display_error("Error Connecting: {}".format(connect_error)) except KeyboardInterrupt: display_warning("Got Ctrl+C, exiting") sys.exit(1) except Exception as e: display_error("{} no good, skipping: {}".format(u, e)) return (new_headers, new_cookies)
def process_output(self, cmds): for cmd in cmds: target = cmd['target'] proto = target.split('/')[0] url = target.split('/')[2] if ':' in url: port_num = url.split(':')[1] url = url.split(':')[0] elif proto == 'http:': port_num = "80" elif proto == 'https:': port_num = "443" else: port_num = "0" try: [int(i) for i in url.split('.')] ip, created = IPAddress.objects.get_or_create( ip_address=url, defaults={'active_scope': True}) ip.add_tool_run(tool=self.name, args="{}-{}".format(port_num, self.args.tool_args)) except: display("Domain found: {}".format(url)) domain, created = Domain.objects.get_or_create(name=url) domain.add_tool_run(tool=self.name, args="{}-{}".format( port_num, self.args.tool_args)) port = get_urls.get_port_object("blah://{}:{}".format( url, port_num)) # pdb.set_trace() if port: if not port.meta.get('FFuF'): port.meta['FFuF'] = [] port.meta['FFuF'].append(cmd['output']) port.save()
def descope_ip(self, ip): ip = IPAddress.objects.get(ip_address=ip) if ip: for i in ip: display("Removing IP {} from scope".format(i.ip_address)) i.active_scope = False i.passive_scope = False i.save() for d in i.domains: active_scope_ips = [ ipa for ipa in d.ip_addresses if ipa.active_scope or ipa.passive_scope ] if not active_scope_ips: display( "Domain {} has no more scoped IPs. Removing from scope." .format(d.name)) d.active_scope = False d.passive_scope = False
def run(self, args): # pdb.set_trace() if not args.binary: self.binary = which.run("LinkedInt.py") else: self.binary = args.binary if not self.binary: display_error( "LinkedInt binary not found. Please explicitly provide path with --binary" ) if args.domain: domain, created = BaseDomain.objects.get_or_create(name=args.domain) if args.top: titles = [ user.job_title.split(" at ")[0] for user in domain.user_set.all() if user.job_title ] words = [] for t in titles: words += [w.lower() for w in get_words(t)] word_count = Counter(words).most_common() display("Using the top %s words:" % args.top) res = [] for w in word_count[: int(args.top)]: display("\t{}\t{}".format(w[0], w[1])) res.append(w[0]) # pdb.set_trace() args.smart_shuffle = ",".join(res) if args.auto_keyword: if not args.top: display_error("You must specify the top number of keywords using --top") else: if os.path.isfile('/tmp/armory_linkedinsearchqueries'): blacklist = open('/tmp/armory_linkedinsearchqueries').read().split('\n') else: blacklist = [] bfile = open('/tmp/armory_linkedinsearchqueries', 'a') for w in args.smart_shuffle.split(','): if w not in blacklist: args.keywords = w self.process_domain(domain, args) bfile.write('{}\n'.format(w)) else: display("Skipped {} due to it already being searched.".format(w)) bfile.close() elif args.smart_shuffle: args.keywords = " OR ".join( ['"{}"'.format(i) for i in args.smart_shuffle.split(",")] ) self.process_domain(domain, args) args.keywords = " AND ".join( ['-"{}"'.format(i) for i in args.smart_shuffle.split(",")] ) self.process_domain(domain, args) else: self.process_domain(domain, args)
def run(self, args): ranges = [] cidrs = [] ips = [] search = [] if not args.api_key: display_error("You must supply an API key to use shodan!") return if args.search: search = [args.search] if args.import_db: if args.rescan: if args.fast: search += [ "net:{}".format(c.name) for c in CIDR.get_set(scope_type="active") ] else: cidrs += [ c.name for c in CIDR.get_set(scope_type="active") ] if not args.cidr_only: ips += [ "{}".format(i.ip_address) for i in IPAddress.get_set(scope_type="active") ] else: if args.fast: search += [ "net:{}".format(c.name) for c in CIDR.get_set(scope_type="active") ] else: cidrs += [ c.name for c in CIDR.get_set(scope_type="active") ] if not args.cidr_only: ips += [ "{}".format(i.ip_address) for i in IPAddress.get_set(scope_type="active", tool=self.name) ] if args.target: if '/' not in args.target: ips += [args.target] elif args.fast: cidrs += ["net:{}".format(args.target)] else: cidrs += [args.target] for c in cidrs: ranges += [str(i) for i in IPNetwork(c)] ranges += ips ranges += search display( "Doing a total of {} queries. Estimated time: {} days, {} hours, {} minutes and {} seconds." .format(len(ranges), int(len(ranges) / 24.0 / 60.0 / 60.0), int(len(ranges) / 60.0 / 60.0) % 60, int(len(ranges) / 60.0) % 60, len(ranges) % 60)) for c in cidrs: ranges = [str(i) for i in IPNetwork(c)] display( "Processing {} IPs. Estimated time: {} days, {} hours, {} minutes and {} seconds." .format(c, int(len(ranges) / 24.0 / 60.0 / 60.0), int(len(ranges) / 60.0 / 60.0) % 60, int(len(ranges) / 60.0) % 60, len(ranges) % 60)) for r in ranges: self.get_shodan(r, args) cd = CIDR.objects.filter(name=c) if cd: cd[0].add_tool_run(tool=self.name) display( "Processing {} IPs. Estimated time: {} days, {} hours, {} minutes and {} seconds." .format(len(ips), int(len(ranges) / 24.0 / 60.0 / 60.0), int(len(ranges) / 60.0 / 60.0) % 60, int(len(ranges) / 60.0) % 60, len(ranges) % 60)) for i in ips: self.get_shodan(i, args) ip = IPAddress.objects.all().filter(ip_address=i) if ip: ip[0].add_tool_run(tool=self.name) for s in search: self.get_shodan(s, args) if s[:4] == "net:": cd = CIDR.objects.filter(name=s[4:]) if cd: cd[0].add_tool_run(tool=self.name)
def run(self, args): urls = [] if args.url: urls.append(args.url) if args.file: url = open(args.file).read().split("\n") for u in url: if u: urls.append(u) if args.import_db: if args.rescan: urls += run(scope_type="active") else: urls += run(scope_type="active", tool=self.name) if urls: pool = ThreadPool(int(args.threads)) data = [(u, args.timeout) for u in urls] # pdb.set_trace() results = pool.map(process_urls, data) display_new("Adding headers to the database") for headers, cookies in results: if len(list(headers.keys())) > 0: h = list(headers.keys())[0] dom, dom_type, scheme, port = get_url_data(h) display( "Processing headers and cookies from URL {}".format(h)) if dom_type == 'ip': ip, created = IPAddress.objects.get_or_create( ip_address=dom) ip.add_tool_run(tool=self.name) # pdb.set_trace() p, created = Port.objects.get_or_create( ip_address=ip, port_number=port, service_name=scheme, proto="tcp") if not p.meta.get('headers'): p.meta['headers'] = {} p.meta['headers'][dom] = headers[h] if not p.meta.get('cookies'): p.meta['cookies'] = {} p.meta['cookies'][dom] = cookies.get(h, []) p.save() else: domain, created = Domain.objects.get_or_create( name=dom) domain.add_tool_run(tool=self.name) for ip in domain.ip_addresses.all(): p, created = Port.objects.get_or_create( ip_address=ip, port_number=port, service_name=scheme, proto="tcp") if not p.meta.get('headers'): p.meta['headers'] = {} p.meta['headers'][dom] = headers[h] if not p.meta.get('cookies'): p.meta['cookies'] = {} p.meta['cookies'][dom] = cookies.get(h, []) p.save()
def run(self, args): if not args.keyword: display_error("You need to supply a keyword to search for.") return if not args.binary: self.binary = which.run(self.binary_name) else: self.binary = args.binary if not self.binary: display_error( "Asnlookup binary not found. Please explicitly provide path with --binary" ) if args.output_path[0] == "/": output_path = os.path.join(self.base_config["ARMORY_BASE_PATH"], 'output', args.output_path[1:]) else: output_path = os.path.join(self.base_config["ARMORY_BASE_PATH"], 'output', args.output_path) if not os.path.exists(output_path): os.makedirs(output_path) command_args = " -o {} --output {} ".format(args.keyword, output_path) current_dir = os.getcwd() new_dir = "/".join(self.binary.split("/")[:-1]) os.chdir(new_dir) cmd = shlex.split("python3 " + self.binary + command_args) print("Executing: %s" % " ".join(cmd)) subprocess.Popen(cmd).wait() os.chdir(current_dir) ip_ranges = open( os.path.join(output_path, "{}_ipv4.txt".format( args.keyword))).read().split('\n') for r in ip_ranges: if r: display("Processing {}".format(r)) current_cidrs = [c.name for c in CIDR.objects.all()] new_cidr = True for nc in current_cidrs: if IPNetwork(r) in IPNetwork(nc): new_cidr = False if new_cidr: SC, created = CIDR.objects.get_or_create( name=r, defaults={ 'active_scope': True, 'passive_scope': True }) if created: display_new( "New CIDR added to ScopeCIDRS: {}".format(r))
def post_run(self, args): display("Potential takeovers are stored in {}".format( os.environ["HOME"])) os.environ["HOME"] = self.orig_home
def get_shodan(self, r, args): api_host_url = "https://api.shodan.io/shodan/host/{}?key={}" api_search_url = ( "https://api.shodan.io/shodan/host/search?key={}&query={}&page={}") time.sleep(1) if ":" in r: display("Doing Shodan search: {}".format(r)) try: results = json.loads( requests.get(api_search_url.format(args.api_key, r, 1)).text) if results.get( "error") and "request timed out" in results["error"]: display_warning( "Timeout occurred on Shodan's side.. trying again in 5 seconds." ) results = json.loads( requests.get(api_search_url.format(args.api_key, r, 1)).text) except Exception as e: display_error("Something went wrong: {}".format(e)) next total = len(results["matches"]) matches = [] i = 1 # pdb.set_trace() while total > 0: display("Adding {} results from page {}".format(total, i)) matches += results["matches"] i += 1 try: time.sleep(1) results = json.loads( requests.get(api_search_url.format(args.api_key, r, i)).text) if (results.get("error") and "request timed out" in results["error"] # noqa: W503 ): display_warning( "Timeout occurred on Shodan's side.. trying again in 5 seconds." ) results = json.loads( requests.get( api_search_url.format(args.api_key, r, 1)).text) total = len(results["matches"]) except Exception as e: display_error("Something went wrong: {}".format(e)) total = 0 pdb.set_trace() domains = [] for res in matches: ip_str = res["ip_str"] port_str = res["port"] transport = res["transport"] display("Processing IP: {} Port: {}/{}".format( ip_str, port_str, transport)) IP, created = IPAddress.objects.get_or_create( ip_address=ip_str) IP.meta["shodan_data"] = results port, created = Port.objects.get_or_create( ip_address=IP, port_number=port_str, proto=transport) if created: svc = "" if res.get("ssl", False): svc = "https" elif res.get("http", False): svc = "http" else: svc = "" port.service_name = svc port.status = "open" port.meta["shodan_data"] = res port.save() if res.get("ssl", {}).get('cert', {}).get('extensions'): for d in res['ssl']['cert']['extensions']: if d['name'] == 'subjectAltName': domains += get_domains_from_data(d['name']) if res.get("ssl", {}).get('cert', {}).get( 'subject', {} ).get('CN') and '*' not in res['ssl']['cert']['subject']['CN']: domains.append(res['ssl']['cert']['subject']['CN']) if res.get('hostnames'): domains += res['hostnames'] for d in list(set(domains)): if d.count('.') > 0: display("Adding discovered domain {}".format( only_valid(d))) domain, created = Domain.objects.get_or_create( name=only_valid(d)) else: display_error("Ignoring bad domain {}".format(d)) else: display("Searching for {}".format(r)) try: results = json.loads( requests.get(api_host_url.format(r, args.api_key)).text) except Exception as e: display_error("Something went wrong: {}".format(e)) next # pdb.set_trace() if results.get("data", False): display("{} results found for: {}".format( len(results["data"]), r)) domains = [] for res in results["data"]: ip_str = res["ip_str"] port_str = res["port"] transport = res["transport"] display("Processing IP: {} Port: {}/{}".format( ip_str, port_str, transport)) IP, created = IPAddress.objects.get_or_create( ip_address=ip_str) IP.meta["shodan_data"] = results port, created = Port.objects.get_or_create( ip_address=IP, port_number=port_str, proto=transport) if created: svc = "" if res.get("ssl", False): svc = "https" elif res.get("http", False): svc = "http" else: svc = "" port.service_name = svc port.status = "open" port.meta["shodan_data"] = res port.save() if res.get("ssl", {}).get('cert', {}).get('extensions'): for d in res['ssl']['cert']['extensions']: if d['name'] == 'subjectAltName': domains += get_domains_from_data(d['data']) display( "Domains discovered in subjectAltName: {}". format(", ".join( get_domains_from_data(d['data'])))) if res.get("ssl", {}).get('cert', {}).get( 'subject', {}).get('CN') and '*' not in res['ssl'][ 'cert']['subject']['CN']: domains.append(res['ssl']['cert']['subject']['CN']) if res.get('hostnames'): domains += res['hostnames'] for d in list(set(domains)): if d.count('.') > 0 and not is_ip(d): display("Adding discovered domain {}".format(d)) domain, created = Domain.objects.get_or_create(name=d) else: display_error("Ignoring bad domain {}".format(d))
def process_data(self, nFile, args): display("Reading " + nFile) tree = ET.parse(nFile) root = tree.getroot() current_ips = set([i.ip_address for i in IPAddress.objects.all()]) current_domains = set([d.name for d in Domain.objects.all()]) new_ips = [] new_ip_list = [] new_domains = {} just_domains = [] self.args = args print("Preprocessing IPs/Domains") for ReportHost in root.iter("ReportHost"): hostname = "" hostIP = "" os = "" for HostProperties in ReportHost.iter("HostProperties"): for tag in HostProperties: if tag.get("name") == "host-ip": hostIP = tag.text if tag.get("name") == "host-fqdn": hostname = tag.text.lower() hostname = hostname.replace("www.", "") if tag.get("name") == "operating-system": os = " OR ".join(tag.text.split("\n")) # pdb.set_trace() if hostIP and hostIP not in current_ips: res = validate_ip(hostIP) if res == "ipv4": v = 4 else: v = 6 new_ips.append( IPAddress(ip_address=hostIP, active_scope=True, passive_scope=True, version=v, os=os)) new_ip_list.append(hostIP) current_ips.add(hostIP) if hostIP and hostname and '.' in hostname: # Filter out the random hostnames that aren't fqdns if not new_domains.get(hostIP): new_domains[hostIP] = [] hostname = ''.join([ i for i in hostname.lower() if i in 'abcdefghijklmnopqrstuvwxyz.-0123456789' ]) if hostname not in current_domains and hostname not in new_domains[ hostIP]: new_domains[hostIP].append(hostname) if hostname not in just_domains: just_domains.append(hostname) cidrs = {c.name: c.id for c in CIDR.objects.all()} for instance in new_ips: found = False for c, v in cidrs.items(): if instance.ip_address in IPNetwork(c): instance.cidr__id = v found = True break cidr_data, org_name = get_cidr_info(instance.ip_address) cidr, created = CIDR.objects.get_or_create( name=cidr_data, defaults={'org_name': org_name}) instance.cidr = cidr cidrs[cidr.name] = cidr.id display("Bulk creating IPs...") IPAddress.objects.bulk_create(new_ips) domain_objs = [] base_domains = {bd.name: bd.id for bd in BaseDomain.objects.all()} for d in just_domains: base_domain = '.'.join(d.split('.')[-2:]) if not base_domains.get(base_domain): bd = BaseDomain(name=base_domain, active_scope=False, passive_scope=False) bd.save() bd_id = bd.id base_domains[bd.name] = bd.id else: bd_id = base_domains[base_domain] domain_objs.append(Domain(name=d, basedomain_id=bd_id)) display("Bulk creating domains...") Domain.objects.bulk_create(domain_objs) current_ips = {i.ip_address: i.id for i in IPAddress.objects.all()} current_domains = {d.name: d.id for d in Domain.objects.all()} ThroughModel = Domain.ip_addresses.through many_objs = [] for i, v in new_domains.items(): ip_id = current_ips[i] for d in v: if d in just_domains or i in new_ip_list: d_id = current_domains[d] many_objs.append( ThroughModel(domain_id=d_id, ipaddress_id=ip_id)) display("Bulk gluing them together") ThroughModel.objects.bulk_create(many_objs) for ReportHost in root.iter("ReportHost"): for HostProperties in ReportHost.iter("HostProperties"): for tag in HostProperties: if tag.get("name") == "host-ip": hostIP = tag.text if hostIP: # apparently nessus doesn't always have an IP to work with... ip_id = current_ips[hostIP] self.getVulns(ip_id, ReportHost) ports = [] current_ports = set([ f"{p.ip_address_id}|{p.port_number}|{p.proto}" for p in Port.objects.all() ]) for i, v in self.ip_data.items(): for k, data in v.items(): if f"{i}|{k}" not in current_ports: ports.append( Port(ip_address_id=i, port_number=k.split('|')[0], proto=k.split('|')[1], service_name=data['service_name'])) current_ports.add(f"{i}|{k}") display(f"Bulk loading {len(ports)} Ports") Port.objects.bulk_create(ports) # Maybe race condition? # time.sleep(30) all_ports = { f"{p.ip_address_id}|{p.port_number}|{p.proto}": p.id for p in Port.objects.all() } display("Gluing in Vulnerabilities") ThroughModel = Vulnerability.ports.through vuln_port_current = set([ f"{v.vulnerability_id}|{v.port_id}" for v in ThroughModel.objects.all() ]) port_vuln_data = [] for d in self.ports: p = '|'.join(d.split('|')[:3]) v = d.split('|')[-1] if not all_ports.get(p): # Weird edge case that seems to happen at random. Relook this one up in DB print(f"Missing {p}") p_id, p_num, p_proto = p.split('|') port_obj = Port.objects.get(ip_address_id=p_id, port_number=p_num, proto=p_proto) all_ports[p] = port_obj.id if f"{v}|{all_ports[p]}" not in vuln_port_current: port_vuln_data.append( ThroughModel(port_id=all_ports[p], vulnerability_id=v)) vuln_port_current.add(f"{v}|{all_ports[p]}") ThroughModel.objects.bulk_create(port_vuln_data) vuln_outputs = [] vulnobject_data = set([ f"{v.port.ip_address_id}|{v.port.port_number}|{v.port.proto}|{v.vulnerability_id}" for v in VulnOutput.objects.all() ]) for k, v in self.vulnobjects.items(): if k not in vulnobject_data: port_id = all_ports['|'.join(k.split('|')[:3])] vuln_id = k.split('|')[-1] vuln_outputs.append( VulnOutput(port_id=port_id, vulnerability_id=vuln_id, data=v)) display(f"Adding in {len(vuln_outputs)} output data") VulnOutput.objects.bulk_create(vuln_outputs) display("Attaching CVEs") cve_current = [c.name for c in CVE.objects.all()] new_cves = [] for k, v in self.cve_data.items(): if k not in cve_current: new_cves.append( CVE(name=k, description=v[0], temporal_score=v[1])) CVE.objects.bulk_create(new_cves) display("And finally gluing them to the vulns") cve_current = {c.name: c.id for c in CVE.objects.all()} ThroughModel = Vulnerability.cves.through cve_through = set([ f"{c.cve_id}|{c.vulnerability_id}" for c in ThroughModel.objects.all() ]) cve_glue = [] for m in list(set(self.cve_map)): cve_id = cve_current[m.split('|')[0]] vuln_id = m.split('|')[1] if f"{cve_id}|{vuln_id}" not in cve_through: cve_glue.append( ThroughModel(cve_id=cve_id, vulnerability_id=vuln_id)) ThroughModel.objects.bulk_create(cve_glue) # Now to add all of this data into the database return
def run(self, args): if args.import_file: for nFile in args.import_file: self.process_data(nFile, args) elif args.launch: if (not args.username # noqa: W503 and not args.password # noqa: W503 and not args.host # noqa: W503 and not args.uuid # noqa: W503 and not args.policy_id # noqa: W503 and not args.folder_id # noqa: W503 ): display_error( "You must supply a username, password, and host to launch a Nessus job" ) else: n = NessusRequest( args.username, args.password, args.host, uuid=args.uuid, policy_id=args.policy_id, folder_id=args.folder_id, ) ips = [ ip.ip_address for ip in IPAddress.get_set(scope_type="active", tool=self.name) ] cidrs = [ cidr.name for cidr in CIDR.get_set(tool=self.name, scope_type="active") ] domains = [ domain.name for domain in Domain.get_set(scope_type="active", tool=self.name) ] if args.max_hosts > 0: all_ips = [] for c in cidrs: all_ips += [str(i) for i in IPNetwork(c)] all_ips += ips targets = list(set(all_ips)) + domains chunks = [ targets[i:i + args.max_hosts] for i in range(0, len(targets), args.max_hosts) ] display(f"Creating {len(chunks)}") i = 0 for c in chunks: i += 1 if i == 1: res = n.launch_job(", ".join(c), args.job_name + f" ({i})") display( "New Nessus job launched with ID {}".format( res)) else: res = n.launch_job(", ".join(c), args.job_name + f" ({i})", autostart=False) display( f"New Nessus job created with ID {res}. You'll need to launch it manually" ) display( "Remember this number! You'll need it to download the job once it is done." ) else: targets = ", ".join(merge_ranges(ips + cidrs) + domains) res = n.launch_job(targets, args.job_name) display("New Nessus job launched with ID {}".format(res)) display( "Remember this number! You'll need it to download the job once it is done." ) elif args.download: if (not args.username # noqa: W503 and not args.password # noqa: W503 and not args.host # noqa: W503 and not args.job_id # noqa: W503 ): display_error( "You must supply host, username, password and job_id to download a report to import" ) else: n = NessusRequest( args.username, args.password, args.host, ) if args.output_path[0] == "/": output_path = os.path.join( self.base_config["ARMORY_BASE_PATH"], args.output_path[1:]) else: output_path = os.path.join( self.base_config["ARMORY_BASE_PATH"], args.output_path) if not os.path.exists(output_path): os.makedirs(output_path) output_path = os.path.join( output_path, "Nessus-export-{}.nessus".format(int(time.time()))) n.export_file(args.job_id, output_path) self.process_data(output_path, args)