def __init__(self, db): self.db = db self.BaseDomain = BaseDomainRepository(db, self.name) self.Domain = DomainRepository(db, self.name) self.IPAddress = IPRepository(db, self.name) self.CIDR = CIDRRepository(db, self.name) self.ScopeCIDR = ScopeCIDRRepository(db, self.name)
def __init__(self, db): self.db = db self.BaseDomain = BaseDomainRepository(db, self.name) self.Domain = DomainRepository(db, self.name) self.IPAddress = IPRepository(db, self.name) self.Port = PortRepository(db, self.name) self.Vulnerability = VulnRepository(db, self.name) self.CVE = CVERepository(db, self.name)
class Module(ModuleTemplate): """ Ingests domains and IPs. Domains get ip info and cidr info, and IPs get CIDR info. """ name = "Ingestor" def __init__(self, db): self.db = db self.BaseDomain = BaseDomainRepository(db, self.name) self.Domain = DomainRepository(db, self.name) self.IPAddress = IPRepository(db, self.name) self.CIDR = CIDRRepository(db, self.name) self.ScopeCIDR = ScopeCIDRRepository(db, self.name) def set_options(self): super(Module, self).set_options() self.options.add_argument( "-d", "--import_domains", help= "Either domain to import or file containing domains to import. One per line", ) self.options.add_argument( "-i", "--import_ips", help= "Either IP/range to import or file containing IPs and ranges, one per line.", ) self.options.add_argument( "-a", "--active", help="Set scoping on imported data as active", action="store_true", ) self.options.add_argument( "-p", "--passive", help="Set scoping on imported data as passive", action="store_true", ) self.options.add_argument( "-sc", "--scope_cidrs", help= "Cycle through out of scope networks and decide if you want to add them in scope", action="store_true", ) self.options.add_argument( "-sb", "--scope_base_domains", help= "Cycle through out of scope base domains and decide if you want to add them in scope", action="store_true", ) self.options.add_argument("--descope", help="Descope an IP, domain, or CIDR") self.options.add_argument( "-Ii", "--import_database_ips", help="Import IPs from database", action="store_true", ) self.options.add_argument( "--force", help="Force processing again, even if already processed", action="store_true", ) def run(self, args): self.in_scope = args.active self.passive_scope = args.passive if args.descope: if "/" in args.descope: self.descope_cidr(args.descope) elif check_string(args.descope): pass else: self.descope_ip(args.descope) # Check if in ScopeCIDR and remove if found if args.import_ips: try: ips = open(args.import_ips) for line in ips: if line.strip(): if "/" in line or "-" in line: self.process_cidr(line) else: self.process_ip(line.strip(), force_scope=True) self.Domain.commit() except IOError: if "/" in args.import_ips or "-" in args.import_ips: self.process_cidr(args.import_ips) else: self.process_ip(args.import_ips.strip(), force_scope=True) self.Domain.commit() if args.import_domains: try: domains = open(args.import_domains) for line in domains: if line.strip(): self.process_domain(line.strip()) self.Domain.commit() except IOError: self.process_domain(args.import_domains.strip()) self.Domain.commit() if args.scope_base_domains: base_domains = self.BaseDomain.all(in_scope=False, passive_scope=False) for bd in base_domains: self.reclassify_domain(bd) self.BaseDomain.commit() def get_domain_ips(self, domain): ips = [] try: answers = dns.resolver.query(domain, "A") for a in answers: ips.append(a.address) return ips except: return [] def process_domain(self, domain_str): created, domain = self.Domain.find_or_create( only_tool=True, domain=domain_str, in_scope=self.in_scope, passive_scope=self.passive_scope, ) if not created: if (domain.in_scope != self.in_scope or domain.passive_scope != self.passive_scope): display( "Domain %s already exists with different scoping. Updating to Active Scope: %s Passive Scope: %s" % (domain_str, self.in_scope, self.passive_scope)) domain.in_scope = self.in_scope domain.passive_scope = self.passive_scope domain.update() def process_ip(self, ip_str, force_scope=True): created, ip = self.IPAddress.find_or_create( only_tool=True, ip_address=ip_str, in_scope=self.in_scope, passive_scope=self.passive_scope, ) if not created: if ip.in_scope != self.in_scope or ip.passive_scope != self.passive_scope: display( "IP %s already exists with different scoping. Updating to Active Scope: %s Passive Scope: %s" % (ip_str, self.in_scope, self.passive_scope)) ip.in_scope = self.in_scope ip.passive_scope = self.passive_scope ip.update() return ip def process_cidr(self, line): display("Processing %s" % line) if "/" in line: created, cidr = self.ScopeCIDR.find_or_create(cidr=line.strip()) if created: display_new("Adding %s to scoped CIDRs in database" % line.strip()) cidr.in_scope = True cidr.update() elif "-" in line: start_ip, end_ip = line.strip().replace(" ", "").split("-") if "." not in end_ip: end_ip = ".".join(start_ip.split(".")[:3] + [end_ip]) cidrs = iprange_to_cidrs(start_ip, end_ip) for c in cidrs: created, cidr = self.ScopeCIDR.find_or_create(cidr=str(c)) if created: display_new("Adding %s to scoped CIDRs in database" % line.strip()) cidr.in_scope = True cidr.update() def scope_ips(self): IPAddresses = self.IPAddress.all() def reclassify_domain(self, bd): if bd.meta.get("whois", False): display_new("Whois data found for {}".format(bd.domain)) print(bd.meta["whois"]) res = raw_input( "Should this domain be scoped (A)ctive, (P)assive, or (N)ot? [a/p/N] " ) if res.lower() == "a": bd.in_scope = True bd.passive_scope = True elif res.lower() == "p": bd.in_scope = False bd.passive_scope = True else: bd.in_scope = False bd.passive_scope = False bd.save() else: display_error( "Unfortunately, there is no whois information for {}. Please populate it using the Whois module" .format(bd.domain)) def descope_ip(self, ip): ip = self.IPAddress.all(ip_address=ip) if ip: for i in ip: display("Removing IP {} from scope".format(i.ip_address)) i.in_scope = False i.passive_scope = False i.update() for d in i.domains: in_scope_ips = [ ipa for ipa in d.ip_addresses if ipa.in_scope or ipa.passive_scope ] if not in_scope_ips: display( "Domain {} has no more scoped IPs. Removing from scope." .format(d.domain)) d.in_scope = False d.passive_scope = False self.IPAddress.commit() def descope_cidr(self, cidr): CIDR = self.ScopeCIDR.all(cidr=cidr) if CIDR: for c in CIDR: display("Removing {} from ScopeCIDRs".format(c.cidr)) c.delete() cnet = IPNetwork(cidr) for ip in self.IPAddress.all(): if IPAddress(ip.ip_address) in cnet: self.descope_ip(ip.ip_address)
class Module(ModuleTemplate): ''' Ingests domains and IPs. Domains get ip info and cidr info, and IPs get CIDR info. ''' name = "Ingestor" def __init__(self, db): self.db = db self.BaseDomain = BaseDomainRepository(db, self.name) self.Domain = DomainRepository(db, self.name) self.IPAddress = IPRepository(db, self.name) self.CIDR = CIDRRepository(db, self.name) self.ScopeCIDR = ScopeCIDRRepository(db, self.name) def set_options(self): super(Module, self).set_options() self.options.add_argument( '-f', '--import_file', help="File containing domains to import. One per line") self.options.add_argument('-d', '--domain', help="Single domain to import") self.options.add_argument( '-i', '--import_ips', help="File containing IPs and ranges, one per line.") self.options.add_argument('-Id', '--import_database_domains', help='Import domains from database', action="store_true") self.options.add_argument('-Ii', '--import_database_ips', help='Import IPs from database', action="store_true") self.options.add_argument( '--force', help="Force processing again, even if already processed", action="store_true") def run(self, args): if args.import_file: domains = open(args.import_file) for line in domains: if line.strip(): self.process_domain(line.strip(), force_scope=True) self.Domain.commit() if args.domain: self.process_domain(args.domain, force_scope=True) self.Domain.commit() if args.import_database_domains: if args.force: domains = self.Domain.all() else: domains = self.Domain.all(tool=self.name) for d in domains: # pdb.set_trace() self.process_domain(d.domain) self.Domain.commit() if args.import_ips: try: ips = open(args.import_ips) for line in ips: if line.strip(): if '/' in line or '-' in line: self.process_cidr(line) else: self.process_ip(line.strip(), force_scope=True) self.Domain.commit() except IOError: if '/' in args.import_ips or '-' in args.import_ips: self.process_cidr(args.import_ips) else: self.process_ip(args.import_ips.strip(), force_scope=True) self.Domain.commit() if args.import_database_ips: for ip in self.IPAddress.all(): self.process_ip(ip.ip_address) self.Domain.commit() def get_domain_ips(self, domain): ips = [] try: answers = dns.resolver.query(domain, 'A') for a in answers: ips.append(a.address) return ips except: return [] def process_domain(self, domain_str, force_scope=False): # First check if the root domain exists, and if it doesn't, add it created, domain = self.Domain.find_or_create( only_tool=True, domain=domain_str, force_in_scope=force_scope) # if not created: # # print("%s already processed, skipping." % domain_str) # return print("Processing %s" % domain_str) # Next get ip addresses of domain ips = self.get_domain_ips(domain_str) for i in ips: ip = self.process_ip(i, force_scope=force_scope) domain.ip_addresses.append(ip) domain.save() def process_ip(self, ip_str, force_scope=False): created, ip = self.IPAddress.find_or_create(only_tool=True, ip_address=ip_str, force_in_scope=force_scope) if created: print(" - Found New IP: %s" % ip_str) res = self.check_private_subnets(ip_str) if res: cidr_data = res else: try: res = IPWhois(ip_str).lookup_whois(get_referral=True) except: res = IPWhois(ip_str).lookup_whois() cidr_data = [] for n in res['nets']: if ',' in n['cidr']: for cidr_str in n['cidr'].split(', '): cidr_data.append([cidr_str, n['description']]) else: cidr_data.append([n['cidr'], n['description']]) try: cidr_data = [ cidr_d for cidr_d in cidr_data if IPAddress(ip_str) in IPNetwork(cidr_d[0]) ] except: pdb.set_trace() cidr_len = len(IPNetwork(cidr_data[0][0])) matching_cidr = cidr_data[0] for c in cidr_data: if len(IPNetwork(c[0])) < cidr_len: matching_cidr = c print("New CIDR found: %s - %s" % (matching_cidr[1], matching_cidr[0])) cidr = self.CIDR.find_or_create(only_tool=True, cidr=matching_cidr[0], org_name=matching_cidr[1])[1] ip.cidr = cidr ip.save() # else: # print(" - IP Already processed: %s" % ip_str) return ip def check_private_subnets(self, ip_str): for cidr in private_subnets: if IPAddress(ip_str) in cidr: return ([str(cidr), 'Non-Public Subnet'], ) return False def process_cidr(self, line): if '/' in line: print("Adding %s to scoped CIDRs" % line.strip()) self.ScopeCIDR.find_or_create(cidr=line.strip()) elif '-' in line: start_ip, end_ip = line.strip().replace(' ', '').split('-') if '.' not in end_ip: end_ip = '.'.join(start_ip.split('.')[:3] + [end_ip]) cidrs = iprange_to_cidrs(start_ip, end_ip) for c in cidrs: print("Adding %s to scoped CIDRs" % str(c)) self.ScopeCIDR.find_or_create(cidr=str(c))
class Module(ToolTemplate): name = "Subfinder" binary_name = "subfinder" def __init__(self, db): self.db = db self.BaseDomains = BaseDomainRepository(db, self.name) self.Domains = DomainRepository(db, self.name) self.IPs = IPRepository(db, self.name) def set_options(self): super(Module, self).set_options() self.options.add_argument( "-a", "--bruteforce-all", help="Brute-force subdomains." ) self.options.add_argument( "-d", "--domain", help="Domain to run subfinder against." ) self.options.add_argument( "-dL", "--domain-list", help="Read in a list of domains within the given file.", ) self.options.add_argument( "-i", "--db_domains", help="Import the domains from the database.", action="store_true", ) self.options.add_argument( "-r", "--resolvers", help="A list of resolvers(comma-separated) or a file containing a list of resolvers.", ) self.options.add_argument( "--rescan", help="Overwrite files without asking", action="store_true" ) self.options.add_argument( "-w", "--wordlist", help="The wordlist for when bruteforcing is selected." ) def get_targets(self, args): targets = [] outpath = "" if args.output_path: if not os.path.exists(args.output_path): os.makedirs(args.output_path) outpath = args.output_path if args.domain or args.db_domains: self.db_domain_file = self.__get_tempfile(args.domain, args) out_file = "database_domains.subfinder" if args.domain: created, domain = self.BaseDomains.find_or_create(domain=args.domain) out_file = os.path.join(outpath, "{}.subfinder".format(args.domain)) if not self.db_domain_file: return targets targets.append({"target": self.db_domain_file, "output": out_file}) elif args.domain_list: domains = io.open(args.domain_list, encoding="utf-8").read().split("\n") for d in domains: if d: created, domain = self.BaseDomains.find_or_create(domain=d) targets.append( { "target": args.domain_list, "output": os.path.join( outpath, "{}.subfinder".format(args.domain_list) ), } ) return targets def build_cmd(self, args): if args.binary: cmd = "{} ".format(args.binary) else: cmd = "{} ".format(self.binary_name) cmd = "{} -o {} -dL {}".format(cmd, "{output}", "{target}") return cmd def process_output(self, targets): for target in targets: with io.open(target["output"], encoding="utf-8") as fd: for line in fd: domain = line.strip() ips = get_domain_ip.run(domain) ip_obj = None _, dom = self.Domains.find_or_create(domain=domain) if ips: for ip in ips: _, ip_obj = self.IPs.find_or_create(ip_address=ip) if ip_obj: dom.ip_addresses.append(ip_obj) dom.save() self.BaseDomains.commit() self.IPs.commit() def post_run(self, args): # Remove the temporary db file if it was created. if getattr(self, "db_domain_file", None): try: os.unlink(self.db_domain_file) except IOError as e: print("Failed to remove the Subfinder db temp file: '{}'.".format(e)) def __get_tempfile(self, domain=None, args=None): # Create a temporary file and place all of the current database domains within the file. from tempfile import NamedTemporaryFile with NamedTemporaryFile(delete=False) as fd: if domain: fd.write("{}\n".format(domain).encode("utf-8")) else: # Go through the database and grab the domains adding them to the file. if args.rescan: domains = self.BaseDomains.all(passive_scope=True) else: domains = self.BaseDomains.all(tool=self.name, passive_scope=True) if domains: for domain in domains: fd.write("{}\n".format(domain.domain).encode("utf-8")) else: return None return fd.name
class Module(ToolTemplate): name = "Gowitness" binary_name = "gowitness" def __init__(self, db): self.db = db self.IPAddress = IPRepository(db, self.name) def set_options(self): super(Module, self).set_options() self.options.add_argument( "-i", "--import_database", help="Import URLs from the database", action="store_true", ) self.options.add_argument("-f", "--import_file", help="Import URLs from file") self.options.add_argument( "--group_size", help="How many hosts per group (default 250)", type=int, default=250, ) self.options.add_argument( "--rescan", help="Rerun gowitness on systems that have already been processed.", action="store_true", ) self.options.add_argument( "--scan_folder", help= "Generate list of URLs based off of a folder containing GobusterDir output files", ) self.options.add_argument("--counter_max", help="Max number of screenshots per host", default="20") def get_targets(self, args): timestamp = str(int(time())) targets = [] if args.import_file: targets += [t for t in open(args.file).read().split("\n") if t] if args.import_database: if args.rescan: targets += get_urls.run(self.db, scope_type="active") else: targets += get_urls.run(self.db, scope_type="active", tool=self.name) if args.scan_folder: files = os.listdir(args.scan_folder) counter_max = str(args.counter_max) for f in files: if f.count("_") == 4: counter = 0 http, _, _, domain, port = f.split("-dir.txt")[0].split( "_") for data in (open(os.path.join(args.scan_folder, f)).read().split("\n")): if "(Status: 200)" in data: targets.append("{}://{}:{}{}".format( http, domain, port, data.split(" ")[0])) counter += 1 if counter >= counter_max: break if args.output_path[0] == "/": self.path = os.path.join( self.base_config["PROJECT"]["base_path"], args.output_path[1:], timestamp, args.output_path[1:] + "_{}", ) else: self.path = os.path.join( self.base_config["PROJECT"]["base_path"], args.output_path, timestamp, args.output_path + "_{}", ) res = [] i = 0 for url_chunk in self.chunks(targets, args.group_size): i += 1 _, file_name = tempfile.mkstemp() open(file_name, "w").write("\n".join(url_chunk)) if not os.path.exists(self.path.format(i)): os.makedirs(self.path.format(i)) res.append({"target": file_name, "output": self.path.format(i)}) return res def build_cmd(self, args): command = (self.binary + " file -D {output}/gowitness.db -d {output} -s {target} ") if args.tool_args: command += tool_args return command def process_output(self, cmds): """ Not really any output to process with this module, but you need to cwd into directory to make database generation work, so I'll do that here. """ cwd = os.getcwd() for cmd in cmds: target = cmd["target"] output = cmd["output"] cmd = [self.binary, "generate"] os.chdir(output) Popen(cmd, shell=False).wait() os.chdir(cwd) self.IPAddress.commit() def chunks(self, chunkable, n): """ Yield successive n-sized chunks from l. """ for i in xrange(0, len(chunkable), n): yield chunkable[i:i + n]
class Module(ModuleTemplate): name = "Nessus" def __init__(self, db): self.db = db self.BaseDomain = BaseDomainRepository(db, self.name) self.Domain = DomainRepository(db, self.name) self.IPAddress = IPRepository(db, self.name) self.Port = PortRepository(db, self.name) self.Vulnerability = VulnRepository(db, self.name) self.CVE = CVERepository(db, self.name) def set_options(self): super(Module, self).set_options() self.options.add_argument('--import_file', help="Import separated Nessus files separated by a space. DO NOT USE QUOTES OR COMMAS", nargs='+') self.options.add_argument('--interactive', help="Prompt to store domains not in Base Domains already", action="store_true") self.options.add_argument('--internal', help="Store domains not in Base Domains already", action="store_true") def run(self, args): if not args.import_file: print("You need to supply some options to do something.") else: for nFile in args.import_file: self.process_data(nFile, args) def nessCheckPlugin(self, tag): nessPlugins = ["10759","77026", "20089", "56984", "71049", "70658", "40984", "11411"] pluginID = tag.get("pluginID") if pluginID in nessPlugins: #print pluginID + " is in the list" if pluginID == "10759": if tag.find("plugin_output") is not None: return tag.find("plugin_output").text.split("\n\n")[3].strip() #returns IP for Web Server HTTP Header INternal IP disclosure else: return "" if pluginID == "77026": if tag.find("plugin_output") is not None: return tag.find("plugin_output").text.split("\n\n")[3].strip() #Microsoft Exchange Client Access Server Information Disclosure (IP addy) else: return "" if pluginID == "71049" or pluginID == "70658": output = "" if tag.find("plugin_output") is not None: tmp = tag.find("plugin_output").text.split(":")[1] #SSH Weak MAC & CBC Algorithms Enabled #print "#"*5 tmp = tmp.split("\n\n")[1].replace(" ","") #print "#"*5 output = tmp.split("\n") #print ", ".join(output) return ", ".join(output) if pluginID == "56984": if tag.find("plugin_output") is not None: tmp = tag.find("plugin_output").text.split("This port supports ")[1].strip() # SSL / TLS Versions Supported tmp = tmp.split("/") bad = [] for i in tmp: #print i if "SSLv" in i: bad.append(i) elif "TLSv1.0" in i: bad.append(i) if bad != []: return ", ".join(bad).rstrip(".") else: return "" else: return "" if pluginID == "40984": #broswable web dirs if tag.find("plugin_output") is not None: tmp = tag.find("plugin_output").text.split("The following directories are browsable :")[1].strip() directories = tmp.split("\n") return "\n".join(directories) if pluginID == "11411": #Backup Files Disclosure if tag.find("plugin_output") is not None: urls = [] tmp = tag.find("plugin_output").text.split("It is possible to read the following backup files :")[1].strip() tmpUrls = tmp.split("\n") for url in tmpUrls: if "URL" in url: urls.append(url.split(":")[1].lstrip()) if urls: return "\n".join(urls) else: return "" if pluginID == "20089": #F5 cookie if tag.find("plugin_output") is not None: f5Output = [] cookieVal = [] output = tag.find("plugin_output").text.strip().split("\n") for line in output: #print line line = line.split(":") for i, item in enumerate(line): item = item.strip() if "Cookie" in item: cabbage = line.pop(i) tmp = line.pop(i) tmp.strip() cookieVal.append(tmp) else: item = "".join(item) f5Output.append(item) f5Output = " : ".join(f5Output) f5Output = f5Output.replace(" : : ", ", ") f5Output += " [" + ", ".join(cookieVal) + "]" c = 0 tmpF5Output = f5Output.split() for i, letter in enumerate(tmpF5Output): if letter == ":": c += 1 if (c%2) == 0: tmpF5Output[i] = " " return "".join(tmpF5Output).replace("["," [") else: return "" else: return False def getVulns(self, ip, ReportHost): '''Gets vulns and associated services''' for tag in ReportHost.iter("ReportItem"): exploitable = False cves = [] vuln_refs = {} proto = tag.get("protocol") port = tag.get("port") svc_name = tag.get("svc_name").replace("?","") tmpPort = proto+"/"+port if tmpPort.lower() == "tcp/443": portName = "https" elif tmpPort.lower() == "tcp/80": portName = "http" elif svc_name == "www": plugin_name = tag.get("pluginName") if "tls" in plugin_name.lower() or "ssl" in plugin_name.lower(): portName = "https" else: portName = "http" else: portName = svc_name if "general" not in portName: created, db_port = self.Port.find_or_create(port_number=port, status='open', proto=proto, ip_address_id=ip.id) if db_port.service_name == "http": if portName == "https": db_port.service_name = portName elif db_port.service_name == "https": pass else: db_port.service_name = portName db_port.save() if tag.get("pluginID") == "56984": severity = 1 elif tag.get("pluginID") == "11411": severity = 3 else: severity = int(tag.get("severity")) findingName = tag.get("pluginName") description = tag.find("description").text if tag.find("solution") is not None and tag.find("solution") != "n/a": solution = tag.find("solution").text else: solution = "No Remediation From Nessus" nessCheck = self.nessCheckPlugin(tag) if nessCheck: if not db_port.info: db_port.info = {findingName:nessCheck} else: db_port.info[findingName] = nessCheck db_port.save() if tag.find("exploit_available") is not None: #print "\nexploit avalable for", findingName exploitable = True metasploits = tag.findall("metasploit_name") if metasploits: vuln_refs['metasploit'] = [] for tmp in metasploits: vuln_refs['metasploit'].append(tmp.text) edb_id = tag.findall("edb-id") if edb_id: vuln_refs['edb-id'] = [] for tmp in edb_id: vuln_refs['edb-id'].append(tmp.text) tmpcves = tag.findall("cve") for c in tmpcves: if c.text not in cves: cves.append(c.text) if not self.Vulnerability.find(name=findingName): created, db_vuln = self.Vulnerability.find_or_create(name=findingName, severity=severity, description=description, remediation=solution) db_vuln.ports.append(db_port) db_vuln.exploitable = exploitable if exploitable == True: print "\nexploit avalable for", findingName print if vuln_refs: db_vuln.exploit_reference = vuln_refs else: db_vuln = self.Vulnerability.find(name=findingName) db_vuln.ports.append(db_port) db_vuln.exploitable = exploitable if vuln_refs: if db_vuln.exploit_reference is not None: for key in vuln_refs.keys(): if key not in db_vuln.exploit_reference.keys(): db_vuln.exploit_reference[key] = vuln_refs[key] else: for ref in vuln_refs[key]: if ref not in db_vuln.exploit_reference[key]: db_vuln.exploit_reference[key].append(ref) else: db_vuln.exploit_reference = vuln_refs for cve in cves: if not self.CVE.find(name=cve): #print "Gathering CVE information for", cve try: res = json.loads(requests.get('http://cve.circl.lu/api/cve/%s' % cve).text) cveDescription = res['summary'] cvss = float(res['cvss']) except: cveDescription = None cvss = None if not self.CVE.find(name=cve): created, db_cve = self.CVE.find_or_create(name=cve, description=cveDescription, temporal_score=cvss) db_cve.vulnerabilities.append(db_vuln) else: db_cve = self.CVE.find(name=cve) if db_cve.description is None and cveDescription is not None: db_cve.description = cveDescription if db_cve.temporal_score is None and cvss is not None: db_cve.temporal_score = cvss db_cve.vulnerabilities.append(db_vuln) def process_data(self, nFile, args): print "Reading",nFile tree = ET.parse(nFile) root = tree.getroot() skip = [] for ReportHost in root.iter('ReportHost'): os = [] hostname = "" hostIP = "" for HostProperties in ReportHost.iter("HostProperties"): for tag in HostProperties: if tag.get('name') == "host-ip": hostIP = tag.text if tag.get('name') == "host-fqdn": hostname = tag.text.lower() hostname = hostname.replace("www.","") if tag.get('name') == "operating-system": os = tag.text.split("\n") if hostIP: #apparently nessus doesn't always have an IP to work with... if hostname: print "Gathering Nessus info for {} ( {} )".format(hostIP,hostname) else: print "Gathering Nessus info for",hostIP created, ip = self.IPAddress.find_or_create(ip_address=hostIP) if hostname: if not args.internal: created, domain = self.Domain.find_or_create(domain=hostname) if ip not in domain.ip_addresses: ip.save() domain.ip_addresses.append(ip) domain.save() else: created, domain = self.Domain.find_or_create(domain=hostname) if ip not in domain.ip_addresses: domain.ip_addresses.append(ip) domain.update() if os: for o in os: if not ip.OS: ip.OS = o else: if o not in ip.OS.split(" OR "): ip.OS += " OR "+o self.getVulns(ip, ReportHost) self.IPAddress.commit() return
class Module(ToolTemplate): """ Module for running nmap. Make sure to pass all nmap-specific arguments at the end, after --tool_args """ name = "Nmap" binary_name = "nmap" def __init__(self, db): self.db = db self.BaseDomain = BaseDomainRepository(db, self.name) self.Domain = DomainRepository(db, self.name) self.IPAddress = IPRepository(db, self.name) self.Port = PortRepository(db, self.name) self.Vulnerability = VulnRepository(db, self.name) self.CVE = CVERepository(db, self.name) self.ScopeCIDR = ScopeCIDRRepository(db, self.name) def set_options(self): super(Module, self).set_options() self.options.add_argument( "--hosts", help= "Things to scan separated by a space. DO NOT USE QUOTES OR COMMAS", nargs="+", ) self.options.add_argument("--hosts_file", help="File containing hosts") self.options.add_argument( "-i", "--hosts_database", help="Use unscanned hosts from the database", action="store_true", ) self.options.add_argument("--rescan", help="Overwrite files without asking", action="store_true") self.options.add_argument( "--filename", help="Output filename. By default will use the current timestamp.", ) self.options.set_defaults(timeout=None) def get_targets(self, args): targets = [] if args.hosts: if type(args.hosts) == list: targets += args.hosts else: targets += [args.hosts] if args.hosts_database: if args.rescan: targets += [ h.ip_address for h in self.IPAddress.all(scope_type="active") ] targets += [h.cidr for h in self.ScopeCIDR.all()] else: targets += [ h.ip_address for h in self.IPAddress.all(tool=self.name, scope_type="active") ] targets += [h.cidr for h in self.ScopeCIDR.all(tool=self.name)] if args.hosts_file: targets += [ l for l in open(args.hosts_file).read().split("\n") if l ] # Here we should deduplicate the targets, and ensure that we don't have IPs listed that also exist inside CIDRs data = [] for t in targets: ips = [str(i) for i in list(IPNetwork(t))] data += ips _, file_name = tempfile.mkstemp() open(file_name, "w").write("\n".join(list(set(data)))) if args.output_path[0] == "/": self.path = os.path.join(self.base_config["PROJECT"]["base_path"], args.output_path[1:]) else: self.path = os.path.join(self.base_config["PROJECT"]["base_path"], args.output_path) if not os.path.exists(self.path): os.makedirs(self.path) if args.filename: output_path = os.path.join(self.path, args.filename) else: output_path = os.path.join( self.path, "nmap-scan-%s.xml" % datetime.datetime.now().strftime("%Y.%m.%d-%H.%M.%S"), ) return [{"target": file_name, "output": output_path}] def build_cmd(self, args): command = "sudo " + self.binary + " -oX {output} -iL {target} " if args.tool_args: command += args.tool_args return command def process_output(self, cmds): self.import_nmap(cmds[0]["output"]) os.unlink(cmds[0]["target"]) def parseHeaders(self, httpHeaders): bsHeaders = [ "Pragma", "Expires", "Date", "Transfer-Encoding", "Connection", "X-Content-Type-Options", "Cache-Control", "X-Frame-Options", "Content-Type", "Content-Length", "(Request type", ] keepHeaders = {} for i in range(0, len(httpHeaders)): if httpHeaders[i].strip() != "" and httpHeaders[i].split( ":")[0].strip() not in " ".join(bsHeaders): hName = httpHeaders[i].split(":")[0].strip() hValue = "".join(httpHeaders[i].split(":")[1:]).strip() keepHeaders[hName] = hValue if keepHeaders == {}: keepHeaders = "" return keepHeaders def import_nmap( self, filename): # domains={}, ips={}, rejects=[] == temp while no db nFile = filename try: tree = ET.parse(nFile) root = tree.getroot() hosts = root.findall("host") except: print(nFile + " doesn't exist somehow...skipping") return tmpNames = [] tmpIPs = ( {} ) # tmpIps = {'127.0.0.1':['domain.com']} -- not really used; decided to just let nslookup take care of IP info skip = [] for host in hosts: hostIP = host.find("address").get("addr") created, ip = self.IPAddress.find_or_create(ip_address=hostIP) for hostname in host.findall("hostnames/hostname"): hostname = hostname.get("name") hostname = hostname.lower().replace("www.", "") reHostname = re.search( r"\d{1,3}\-\d{1,3}\-\d{1,3}\-\d{1,3}", hostname) # attempt to not get PTR record if not reHostname: created, domain = self.Domain.find_or_create( domain=hostname) if ip not in domain.ip_addresses: domain.ip_addresses.append(ip) domain.save() for port in host.findall("ports/port"): if port.find("state").get("state"): portState = port.find("state").get("state") hostPort = port.get("portid") portProto = port.get("protocol") created, db_port = self.Port.find_or_create( port_number=hostPort, status=portState, proto=portProto, ip_address=ip, ) if port.find("service") != None: portName = port.find("service").get("name") if portName == "http" and hostPort == "443": portName = "https" else: portName = "Unknown" if created: db_port.service_name = portName info = db_port.info if not info: info = {} for script in port.findall( "script"): # just getting commonName from cert if script.get("id") == "ssl-cert": db_port.cert = script.get("output") cert_domains = self.get_domains_from_cert( script.get("output")) for hostname in cert_domains: hostname = hostname.lower().replace("www.", "") created, domain = self.Domain.find_or_create( domain=hostname) if created: print("New domain found: %s" % hostname) elif script.get("id") == "vulners": print( "Gathering vuln info for {} : {}/{}\n".format( hostIP, portProto, hostPort)) self.parseVulners(script.get("output"), db_port) elif script.get("id") == "banner": info["banner"] = script.get("output") elif script.get("id") == "http-headers": httpHeaders = script.get("output") httpHeaders = httpHeaders.strip().split("\n") keepHeaders = self.parseHeaders(httpHeaders) info["http-headers"] = keepHeaders elif script.get("id") == "http-auth": info["http-auth"] = script.get("output") elif script.get("id") == "http-title": info["http-title"] = script.get("output") db_port.info = info db_port.save() self.IPAddress.commit() def parseVulners(self, scriptOutput, db_port): urls = re.findall("(https://vulners.com/cve/CVE-\d*-\d*)", scriptOutput) for url in urls: vuln_refs = [] exploitable = False cve = url.split("/cve/")[1] vulners = requests.get("https://vulners.com/cve/%s" % cve).text exploitdb = re.findall( "https://www.exploit-db.com/exploits/\d{,7}", vulners) for edb in exploitdb: exploitable = True if edb.split("/exploits/")[1] not in vuln_refs: vuln_refs.append(edb.split("/exploits/")[1]) if not self.CVE.find(name=cve): # print "Gathering CVE info for", cve try: res = json.loads( requests.get("http://cve.circl.lu/api/cve/%s" % cve).text) cveDescription = res["summary"] cvss = float(res["cvss"]) findingName = res["oval"][0]["title"] if int(cvss) <= 3: severity = 1 elif (int(cvss) / 2) == 5: severity = 4 else: severity = int(cvss) / 2 if not self.Vulnerability.find(name=findingName): # print "Creating", findingName created, db_vuln = self.Vulnerability.find_or_create( name=findingName, severity=severity, description=cveDescription, ) db_vuln.ports.append(db_port) db_vuln.exploitable = exploitable if vuln_refs: db_vuln.exploit_reference = {"edb-id": vuln_refs} db_vuln.save() else: # print "modifying",findingName db_vuln = self.Vulnerability.find(name=findingName) db_vuln.ports.append(db_port) db_vuln.exploitable = exploitable if vuln_refs: db_vuln.exploitable = exploitable if db_vuln.exploit_reference is not None: if "edb-id" in db_vuln.exploit_reference: for ref in vuln_refs: if (ref not in db_vuln. exploit_reference["edb-id"]): db_vuln.exploit_reference[ "edb-id"].append(ref) else: db_vuln.exploit_reference[ "edb-id"] = vuln_refs else: db_vuln.exploit_reference = { "edb-id": vuln_refs } db_vuln.save() if not self.CVE.find(name=cve): created, db_cve = self.CVE.find_or_create( name=cve, description=cveDescription, temporal_score=cvss) db_cve.vulnerabilities.append(db_vuln) db_cve.save() else: db_cve = self.CVE.find(name=cve) db_cve.vulnerabilities.append(db_vuln) db_cve.save() self.Vulnerability.commit() self.CVE.commit() except: print( "something went wrong with the vuln/cve info gathering" ) if vulners: print( "Vulners report was found but no exploit-db was discovered" ) # "Affected vulners items" # print vulners print("Affected CVE") print(cve) pass else: db_cve = self.CVE.find(name=cve) for db_vulns in db_cve.vulnerabilities: if db_port not in db_vulns.ports: db_vulns.ports.append(db_port) return def get_domains_from_cert(self, cert): # Shamelessly lifted regex from stack overflow regex = r"(?:[a-zA-Z0-9](?:[a-zA-Z0-9\-]{,61}[a-zA-Z0-9])?\.)+[a-zA-Z]{2,6}" domains = list( set([d for d in re.findall(regex, cert) if "*" not in d])) return domains
class Module(ModuleTemplate): ''' The Shodan module will either iterate through Shodan search results from net:<cidr> for all scoped CIDRs, or a custom search query. The resulting IPs and ports will be added to the database, along with a dictionary object of the API results. ''' name = "ShodanImport" def __init__(self, db): self.db = db self.Port = PortRepository(db, self.name) self.IPAddress = IPRepository(db, self.name) self.ScopeCidr = ScopeCIDRRepository(db, self.name) def set_options(self): super(Module, self).set_options() self.options.add_argument('-k', '--api_key', help='API Key for accessing Shodan') self.options.add_argument('-s', '--search', help="Custom search string") self.options.add_argument('-i', '--import_db', help="Import scoped IPs from the database", action="store_true") self.options.add_argument('--rescan', help="Rescan CIDRs already processed", action="store_true") def run(self, args): if not args.api_key: display_error("You must supply an API key to use shodan!") return if args.search: ranges = [args.search] if args.import_db: ranges = [] if args.rescan: ranges += [ "net:{}".format(c.cidr) for c in self.ScopeCidr.all() ] else: ranges += [ "net:{}".format(c.cidr) for c in self.ScopeCidr.all(tool=self.name) ] api = shodan_api.Shodan(args.api_key) for r in ranges: time.sleep(1) # pdb.set_trace() results = api.search(r) display("{} results found for: {}".format(results['total'], r)) for res in results['matches']: ip_address_str = res['ip_str'] port_str = res['port'] transport = res['transport'] display("Processing IP: {} Port: {}/{}".format( ip_address_str, port_str, transport)) created, IP = self.IPAddress.find_or_create( ip_address=ip_address_str) created, port = self.Port.find_or_create(ip_address=IP, port_number=port_str, proto=transport) port.meta['shodan_data'] = res port.save() self.IPAddress.commit()
def __init__(self, db): self.IPAddress = IPRepository(db) self.Domains = DomainRepository(db) self.BaseDomains = BaseDomainRepository(db) self.CIDRs = CIDRRepository(db)
def __init__(self, db): self.Domain = DomainRepository(db) self.IPAddress = IPRepository(db) self.CIDR = CIDRRepository(db)
class Module(ModuleTemplate): name = "Nmap" def __init__(self, db): self.db = db self.BaseDomain = BaseDomainRepository(db, self.name) self.Domain = DomainRepository(db, self.name) self.IPAddress = IPRepository(db, self.name) self.Port = PortRepository(db, self.name) self.Vulnerability = VulnRepository(db, self.name) self.CVE = CVERepository(db, self.name) self.ScopeCIDR = ScopeCIDRRepository(db, self.name) def set_options(self): super(Module, self).set_options() self.options.add_argument( '--hosts', help= "Things to scan separated by a space. DO NOT USE QUOTES OR COMMAS", nargs='+') self.options.add_argument('--hosts_file', help="File containing hosts") self.options.add_argument('--hosts_database', help="Use unscanned hosts from the database", action="store_true") self.options.add_argument('--import_file', help="Import nmap XML file") self.options.add_argument('-A', help="OS and service info", action="store_true") self.options.add_argument( '-o', '--output_path', help="Relative directory to store Nmap XML output name", default="nmap") self.options.add_argument('-nf', '--nFile', help="Nmap XML output name") self.options.add_argument( '-T', '--timing', help="Set timing template (higher is faster)", default="4", type=str) self.options.add_argument( '--scripts', help="Nmap scripts", default= 'ssl-cert,http-headers,http-methods,http-auth,http-title,http-robots.txt,banner' ) self.options.add_argument( '-p', help="Comma separate ports to scan", default= "21,22,23,25,80,110,443,467,587,8000,8080,8081,8082,8443,8008,1099,5005,9080,8880,8887,7001,7002,16200" ) self.options.add_argument('-Pn', help="Disable ping", action="store_true", default=False) self.options.add_argument('-sS', help='Syn scan (default)', action="store_true", default=True) self.options.add_argument('-sT', help='TCP scan', action="store_true", default=False) self.options.add_argument('-sU', help='UDP scan', action="store_true", default=False) self.options.add_argument('-OS', help='Enable OS detection', action="store_true", default=False) self.options.add_argument("--open", help="Only show open ports", action="store_true", default=False) self.options.add_argument("--top_ports", help="Only check X top ports") self.options.add_argument("--force", help="Overwrite files without asking", action="store_true") self.options.add_argument( '--interactive', help="Prompt to store domains not in Base Domains already", default=False, action="store_true") self.options.add_argument( '--internal', help="Store domains not in Base Domains already", action="store_true") def run(self, args): if args.binary: if os.path.exists(args.binary): command = args.binary + " " else: exit("Specified binary doesn't exist. Quitting now") else: if not which.run('nmap'): exit("Nmap is not globally accessible. Quitting now") else: command = "nmap " if args.output_path[0] == "/": self.path = os.path.join(self.base_config['PROJECT']['base_path'], args.output_path[1:]) else: self.path = os.path.join(self.base_config['PROJECT']['base_path'], args.output_path) if not os.path.exists(self.path): os.makedirs(self.path) file_name = "" if args.hosts: if type(args.hosts) == list: hosts = args.hosts else: hosts = [args.hosts] _, file_name = tempfile.mkstemp() open(file_name, 'w').write('\n'.join(hosts)) elif args.hosts_database: hosts = [h.ip_address for h in self.IPAddress.all(tool=self.name)] hosts += [h.cidr for h in self.ScopeCIDR.all(tool=self.name)] _, file_name = tempfile.mkstemp() open(file_name, 'w').write('\n'.join(hosts)) elif args.hosts_file: file_name = args.hosts_file if file_name: self.execute_nmap(args, file_name, command) elif args.import_file: self.import_nmap(args.import_file, args) def execute_nmap(self, args, host_file, command): if args.nFile: nFile = os.path.join(self.path, args.nFile) else: nFile = os.path.join(self.path, "nmap-scan.xml") # if type(hosts) == list: # nFile = os.path.join(self.path,"nmaped_"+",".join(hosts).replace("/","_")) # else: # nFile = os.path.join(self.path,"nmaped_"+hosts.replace("/","_").replace(" ",",")) if os.path.isfile(nFile) and not args.force: print(nFile, "exists.") answered = False while answered == False: rerun = raw_input( "Would you like to [r]un nmap again and overwrite the file, [p]arse the file, or change the file [n]ame? " ) if rerun.lower() == 'r': answered = True elif rerun.lower() == 'p': answered = True return nFile elif rerun.lower() == 'n': new = False while new == False: newFile = raw_input("enter a new file name: ") if not os.path.exists(path + newFile): nFile = path + newFile answered = True new = True else: print("That file exists as well") else: "Please enter \'r\' to run nmap, \'p\' to parse the file" if args.sS and args.sT: technique = "-sT " else: technique = "-sS " if args.sU: technique += "-sU " if args.A: technique += "-A " command += technique command += "-T" + str(args.timing) + " " if args.Pn: command += "-Pn " if args.open: command += "--open " if args.top_ports: command += "--top-ports %s " % args.top_ports if args.OS: command += "-O " command += "--script " + args.scripts + " " command += "-p " + args.p + " " command += " -iL %s " % host_file command += "-oX " + nFile if (args.sS and not args.sT) or args.OS: if os.geteuid() != 0: #exit("You need to have root privileges to run a Syn scan and OS detection.\nPlease try again, this time using 'sudo'. Exiting.") command = "sudo " + command scan = subprocess.Popen(command, shell=True).wait() self.import_nmap(nFile, args) return nFile def parseHeaders(self, httpHeaders): bsHeaders = [ 'Pragma', 'Expires', 'Date', 'Transfer-Encoding', 'Connection', 'X-Content-Type-Options', 'Cache-Control', 'X-Frame-Options', 'Content-Type', 'Content-Length', '(Request type' ] keepHeaders = {} for i in range(0, len(httpHeaders)): if httpHeaders[i].strip() != '' and httpHeaders[i].split( ":")[0].strip() not in " ".join(bsHeaders): hName = httpHeaders[i].split(":")[0].strip() hValue = "".join(httpHeaders[i].split(":")[1:]).strip() keepHeaders[hName] = hValue if keepHeaders == {}: keepHeaders = "" return keepHeaders def import_nmap(self, filename, args): #domains={}, ips={}, rejects=[] == temp while no db nFile = filename try: tree = ET.parse(nFile) root = tree.getroot() hosts = root.findall("host") except: print(nFile + " doesn't exist somehow...skipping") return domains, ips, rejects #needs to be changed for db tmpNames = [] tmpIPs = { } #tmpIps = {'127.0.0.1':['domain.com']} -- not really used; decided to just let nslookup take care of IP info skip = [] for host in hosts: hostIP = host.find("address").get("addr") created, ip = self.IPAddress.find_or_create(ip_address=hostIP) for hostname in host.findall("hostnames/hostname"): hostname = hostname.get("name") hostname = hostname.lower().replace("www.", "") reHostname = re.search( r"\d{1,3}\-\d{1,3}\-\d{1,3}\-\d{1,3}", hostname) #attempt to not get PTR record if not reHostname and not args.internal: created, domain = self.Domain.find_or_create( domain=hostname) if ip not in domain.ip_addresses: domain.ip_addresses.append(ip) domain.save() elif not reHostname and args.internal: created, domain = self.Domain.find_or_create( domain=hostname) if ip not in domain.ip_addresses: domain.ip_addresses.append(ip) domain.save() #else: # print("IP hostname found? %s" % hostname) for port in host.findall("ports/port"): if port.find("state").get("state"): portState = port.find("state").get("state") hostPort = port.get("portid") portProto = port.get("protocol") created, db_port = self.Port.find_or_create( port_number=hostPort, status=portState, proto=portProto, ip_address=ip) if port.find("service") != None: portName = port.find("service").get("name") if portName == "http" and hostPort == "443": portName = "https" else: portName = "Unknown" if created: db_port.service_name = portName info = db_port.info if not info: info = {} for script in port.findall( "script"): #just getting commonName from cert if script.get("id") == "ssl-cert": db_port.cert = script.get('output') cert_domains = self.get_domains_from_cert( script.get('output')) for hostname in cert_domains: hostname = hostname.lower().replace("www.", "") created, domain = self.Domain.find_or_create( domain=hostname) if created: print("New domain found: %s" % hostname) elif script.get("id") == "vulners": print( "Gathering vuln info for {} : {}/{}\n".format( hostIP, portProto, hostPort)) self.parseVulners(script.get("output"), db_port) elif script.get("id") == "banner": info["banner"] = script.get("output") elif script.get("id") == "http-headers": httpHeaders = script.get("output") httpHeaders = httpHeaders.strip().split("\n") keepHeaders = self.parseHeaders(httpHeaders) info["http-headers"] = keepHeaders elif script.get("id") == "http-auth": info['http-auth'] = script.get("output") elif script.get("id") == "http-title": info['http-title'] = script.get("output") db_port.info = info db_port.save() self.IPAddress.commit() def parseVulners(self, scriptOutput, db_port): urls = re.findall('(https://vulners.com/cve/CVE-\d*-\d*)', scriptOutput) for url in urls: vuln_refs = [] exploitable = False cve = url.split("/cve/")[1] vulners = requests.get("https://vulners.com/cve/%s" % cve).text exploitdb = re.findall( 'https://www.exploit-db.com/exploits/\d{,7}', vulners) for edb in exploitdb: exploitable = True if edb.split("/exploits/")[1] not in vuln_refs: vuln_refs.append(edb.split("/exploits/")[1]) if not self.CVE.find(name=cve): #print "Gathering CVE info for", cve try: res = json.loads( requests.get('http://cve.circl.lu/api/cve/%s' % cve).text) cveDescription = res['summary'] cvss = float(res['cvss']) findingName = res['oval'][0]['title'] if int(cvss) <= 3: severity = 1 elif (int(cvss) / 2) == 5: severity = 4 else: severity = int(cvss) / 2 if not self.Vulnerability.find(name=findingName): #print "Creating", findingName created, db_vuln = self.Vulnerability.find_or_create( name=findingName, severity=severity, description=cveDescription) db_vuln.ports.append(db_port) db_vuln.exploitable = exploitable if vuln_refs: db_vuln.exploit_reference = {'edb-id': vuln_refs} db_vuln.save() else: #print "modifying",findingName db_vuln = self.Vulnerability.find(name=findingName) db_vuln.ports.append(db_port) db_vuln.exploitable = exploitable if vuln_refs: db_vuln.exploitable = exploitable if db_vuln.exploit_reference is not None: if 'edb-id' in db_vuln.exploit_reference: for ref in vuln_refs: if ref not in db_vuln.exploit_reference[ 'edb-id']: db_vuln.exploit_reference[ 'edb-id'].append(ref) else: db_vuln.exploit_reference[ 'edb-id'] = vuln_refs else: db_vuln.exploit_reference = { 'edb-id': vuln_refs } db_vuln.save() if not self.CVE.find(name=cve): created, db_cve = self.CVE.find_or_create( name=cve, description=cveDescription, temporal_score=cvss) db_cve.vulnerabilities.append(db_vuln) db_cve.save() else: db_cve = self.CVE.find(name=cve) db_cve.vulnerabilities.append(db_vuln) db_cve.save() self.Vulnerability.commit() self.CVE.commit() except: print( "something went wrong with the vuln/cve info gathering" ) if vulners: print( "Vulners report was found but no exploit-db was discovered" ) #"Affected vulners items" #print vulners print("Affected CVE") print(cve) pass else: db_cve = self.CVE.find(name=cve) for db_vulns in db_cve.vulnerabilities: if db_port not in db_vulns.ports: db_vulns.ports.append(db_port) return def get_domains_from_cert(self, cert): # Shamelessly lifted regex from stack overflow regex = r'(?:[a-zA-Z0-9](?:[a-zA-Z0-9\-]{,61}[a-zA-Z0-9])?\.)+[a-zA-Z]{2,6}' domains = list( set([d for d in re.findall(regex, cert) if '*' not in d])) return domains
#!/usr/bin/python from armory import initialize_database from armory import get_config_options from database.repositories import BaseDomainRepository, DomainRepository, IPRepository, CIDRRepository, UserRepository, CredRepository, VulnRepository, PortRepository, UrlRepository, ScopeCIDRRepository config = get_config_options() db = initialize_database(config) Domains = DomainRepository(db, "Shell Client") BaseDomains = BaseDomainRepository(db, "Shell Client") IPAddresses = IPRepository(db, "Shell Client") CIDRs = CIDRRepository(db, "Shell Client") Users = UserRepository(db, "Shell Client") Creds = CredRepository(db, "Shell Client") Vulns = VulnRepository(db, "Shell Client") Ports = PortRepository(db, "Shell Client") Urls = UrlRepository(db, "Shell Client") ScopeCIDRs = ScopeCIDRRepository(db, "Shell Client") print("Make sure to use this script with ipython and -i") print(" ipython -i shell.py") print("Available database modules: Domains, BaseDomains, IPAddresses,") print(" CIDRs, Users, Creds, Vulns, Services, Ports, Urls, ScopeCIDRs")
class Module(ModuleTemplate): ''' Ingests domains and IPs. Domains get ip info and cidr info, and IPs get CIDR info. ''' name = "Ingestor" def __init__(self, db): self.db = db self.BaseDomain = BaseDomainRepository(db, self.name) self.Domain = DomainRepository(db,self.name) self.IPAddress = IPRepository(db, self.name) self.CIDR = CIDRRepository(db, self.name) self.ScopeCIDR = ScopeCIDRRepository(db, self.name) def set_options(self): super(Module, self).set_options() self.options.add_argument('-d', '--import_domains', help="Either domain to import or file containing domains to import. One per line") self.options.add_argument('-i', '--import_ips', help="Either IP/range to import or file containing IPs and ranges, one per line.") self.options.add_argument('-a', '--active', help='Set scoping on imported data as active', action="store_true") self.options.add_argument('-p', '--passive', help='Set scoping on imported data as passive', action="store_true") self.options.add_argument('-sc', '--scope_cidrs', help='Cycle through out of scope networks and decide if you want to add them in scope', action="store_true") self.options.add_argument('-sb', '--scope_base_domains', help='Cycle through out of scope base domains and decide if you want to add them in scope', action="store_true") self.options.add_argument('-Ii', '--import_database_ips', help='Import IPs from database', action="store_true") self.options.add_argument('--force', help="Force processing again, even if already processed", action="store_true") def run(self, args): self.in_scope = args.active self.passive_scope = args.passive if args.import_ips: try: ips = open(args.import_ips) for line in ips: if line.strip(): if '/' in line or '-' in line: self.process_cidr(line) else: self.process_ip(line.strip(), force_scope=True) self.Domain.commit() except IOError: if '/' in args.import_ips or '-' in args.import_ips: self.process_cidr(args.import_ips) else: self.process_ip(args.import_ips.strip(), force_scope=True) self.Domain.commit() if args.import_domains: try: domains = open(args.import_domains) for line in domains: if line.strip(): self.process_domain(line.strip()) self.Domain.commit() except IOError: self.process_domain(args.import_domains.strip()) self.Domain.commit() if args.scope_base_domains: base_domains = self.BaseDomain.all(in_scope=False, passive_scope=False) for bd in base_domains: self.reclassify_domain(bd) self.BaseDomain.commit() def get_domain_ips(self, domain): ips = [] try: answers = dns.resolver.query(domain, 'A') for a in answers: ips.append(a.address) return ips except: return [] def process_domain(self, domain_str): created, domain = self.Domain.find_or_create(only_tool=True, domain=domain_str, in_scope=self.in_scope, passive_scope=self.passive_scope) if not created: if domain.in_scope != self.in_scope or domain.passive_scope != self.passive_scope: display("Domain %s already exists with different scoping. Updating to Active Scope: %s Passive Scope: %s" % (domain_str, self.in_scope, self.passive_scope)) domain.in_scope = self.in_scope domain.passive_scope = self.passive_scope domain.update() def process_ip(self, ip_str, force_scope=True): created, ip = self.IPAddress.find_or_create(only_tool=True, ip_address=ip_str, in_scope=in_scope, passive_scope=self.passive_scope) if not created: if ip.in_scope != self.in_scope or ip.passive_scope != self.passive_scope: display("IP %s already exists with different scoping. Updating to Active Scope: %s Passive Scope: %s" % (ip_str, self.in_scope, self.passive_scope)) ip.in_scope = self.in_scope ip.passive_scope = self.passive_scope ip.update() return ip def process_cidr(self, line): display("Processing %s" % line) if '/' in line: created, cidr = self.ScopeCIDR.find_or_create(cidr=line.strip()) if created: display_new("Adding %s to scoped CIDRs in database" % line.strip()) cidr.in_scope = True cidr.update() elif '-' in line: start_ip, end_ip = line.strip().replace(' ', '').split('-') if '.' not in end_ip: end_ip = '.'.join(start_ip.split('.')[:3] + [end_ip]) cidrs = iprange_to_cidrs(start_ip, end_ip) for c in cidrs: created, cidr = self.ScopeCIDR.find_or_create(cidr=str(c)) if created: display_new("Adding %s to scoped CIDRs in database" % line.strip()) cidr.in_scope = True cidr.update() def scope_ips(self): IPAddresses = self.IPAddress.all() def reclassify_domain(self, bd): if bd.meta.get('whois', False): display_new("Whois data found for {}".format(bd.domain)) print(bd.meta['whois']) res = raw_input("Should this domain be scoped (A)ctive, (P)assive, or (N)ot? [a/p/N] ") if res.lower() == 'a': bd.in_scope = True bd.passive_scope = True elif res.lower() == 'p': bd.in_scope = False bd.passive_scope = True else: bd.in_scope = False bd.passive_scope = False bd.save() else: display_error("Unfortunately, there is no whois information for {}. Please populate it using the Whois module".format(bd.domain))
class Module(ToolTemplate): name = "Gowitness" binary_name = "gowitness" def __init__(self, db): self.db = db self.IPAddress = IPRepository(db, self.name) def set_options(self): super(Module, self).set_options() self.options.add_argument('-i', '--import_database', help="Import URLs from the database", action="store_true") self.options.add_argument('-f', '--import_file', help="Import URLs from file") self.options.add_argument( '--group_size', help="How many hosts per group (default 250)", type=int, default=250) self.options.add_argument( '--rescan', help="Rerun gowitness on systems that have already been processed." ) def get_targets(self, args): targets = [] if args.import_file: targets += [t for t in open(args.file).read().split('\n') if t] if args.import_database: if args.rescan: targets += get_urls.run(self.db, scope_type="active") else: targets += get_urls.run(self.db, scope_type="active", tool=self.name) if args.output_path[0] == "/": self.path = os.path.join(self.base_config['PROJECT']['base_path'], args.output_path[1:], args.output_path[1:] + "_{}") else: self.path = os.path.join(self.base_config['PROJECT']['base_path'], args.output_path, args.output_path + "_{}") res = [] i = 0 for url_chunk in self.chunks(targets, args.group_size): i += 1 _, file_name = tempfile.mkstemp() open(file_name, 'w').write('\n'.join(url_chunk)) if not os.path.exists(self.path.format(i)): os.makedirs(self.path.format(i)) res.append({'target': file_name, 'output': self.path.format(i)}) return res def build_cmd(self, args): command = self.binary + " file -D {output}/gowitness.db -d {output} -s {target} " if args.tool_args: command += tool_args return command def process_output(self, cmds): ''' Not really any output to process with this module, but you need to cwd into directory to make database generation work, so I'll do that here. ''' cwd = os.getcwd() for cmd in cmds: target = cmd['target'] output = cmd['output'] cmd = [self.binary, "generate"] os.chdir(output) Popen(cmd, shell=False).wait() os.chdir(cwd) self.IPAddress.commit() def chunks(self, chunkable, n): """ Yield successive n-sized chunks from l. """ for i in xrange(0, len(chunkable), n): yield chunkable[i:i + n]
def __init__(self, db): self.db = db self.IPAddress = IPRepository(db, self.name) self.Domain = DomainRepository(db, self.name) self.Port = PortRepository(db, self.name) self.Url = UrlRepository(db, self.name)
class Module(ModuleTemplate): """ The Shodan module will either iterate through Shodan search results from net:<cidr> for all scoped CIDRs, or a custom search query. The resulting IPs and ports will be added to the database, along with a dictionary object of the API results. """ name = "ShodanImport" def __init__(self, db): self.db = db self.Port = PortRepository(db, self.name) self.IPAddress = IPRepository(db, self.name) self.ScopeCidr = ScopeCIDRRepository(db, self.name) def set_options(self): super(Module, self).set_options() self.options.add_argument("-k", "--api_key", help="API Key for accessing Shodan") self.options.add_argument( "-s", "--search", help="Custom search string (will use credits)") self.options.add_argument( "-i", "--import_db", help="Import scoped IPs from the database", action="store_true", ) self.options.add_argument("--rescan", help="Rescan CIDRs already processed", action="store_true") self.options.add_argument("--fast", help="Use 'net' filter. (May use credits)", action="store_true") self.options.add_argument( "--cidr_only", help="Import only CIDRs from database (not individual IPs)", action="store_true", ) def run(self, args): if not args.api_key: display_error("You must supply an API key to use shodan!") return if args.search: ranges = [args.search] if args.import_db: ranges = [] if args.rescan: if args.fast: ranges += [ "net:{}".format(c.cidr) for c in self.ScopeCidr.all() ] else: cidrs = [c.cidr for c in self.ScopeCidr.all()] for c in cidrs: ranges += [str(i) for i in IPNetwork(c)] if not args.cidr_only: ranges += [ "{}".format(i.ip_address) for i in self.IPAddress.all(scope_type="active") ] else: if args.fast: ranges += [ "net:{}".format(c.cidr) for c in self.ScopeCidr.all(tool=self.name) ] else: cidrs = [ c.cidr for c in self.ScopeCidr.all(tool=self.name) ] for c in cidrs: ranges += [str(i) for i in IPNetwork(c)] if not args.cidr_only: ranges += [ "{}".format(i.ip_address) for i in self.IPAddress.all(scope_type="active", tool=self.name) ] api_host_url = "https://api.shodan.io/shodan/host/{}?key={}" api_search_url = ( "https://api.shodan.io/shodan/host/search?key={}&query={}&page={}") for r in ranges: time.sleep(1) if ":" in r: display("Doing Shodan search: {}".format(r)) try: results = json.loads( requests.get(api_search_url.format(args.api_key, r, 1)).text) if results.get("error") and "request timed out" in results[ "error"]: display_warning( "Timeout occurred on Shodan's side.. trying again in 5 seconds." ) results = json.loads( requests.get( api_search_url.format(args.api_key, r, 1)).text) except Exception as e: display_error("Something went wrong: {}".format(e)) next total = len(results["matches"]) matches = [] i = 1 while total > 0: display("Adding {} results from page {}".format(total, i)) matches += results["matches"] i += 1 try: time.sleep(1) results = json.loads( requests.get( api_search_url.format(args.api_key, r, i)).text) if (results.get("error") and "request timed out" in results["error"]): display_warning( "Timeout occurred on Shodan's side.. trying again in 5 seconds." ) results = json.loads( requests.get( api_search_url.format(args.api_key, r, 1)).text) total = len(results["matches"]) except Exception as e: display_error("Something went wrong: {}".format(e)) total = 0 pdb.set_trace() for res in matches: ip_str = res["ip_str"] port_str = res["port"] transport = res["transport"] display("Processing IP: {} Port: {}/{}".format( ip_str, port_str, transport)) created, IP = self.IPAddress.find_or_create( ip_address=ip_str) IP.meta["shodan_data"] = results created, port = self.Port.find_or_create( ip_address=IP, port_number=port_str, proto=transport) if created: svc = "" if res.get("ssl", False): svc = "https" elif res.get("http", False): svc = "http" else: svc = "" port.service_name = svc port.meta["shodan_data"] = res port.save() else: try: results = json.loads( requests.get(api_host_url.format(r, args.api_key)).text) except Exception as e: display_error("Something went wrong: {}".format(e)) next # pdb.set_trace() if results.get("data", False): display("{} results found for: {}".format( len(results["data"]), r)) for res in results["data"]: ip_str = res["ip_str"] port_str = res["port"] transport = res["transport"] display("Processing IP: {} Port: {}/{}".format( ip_str, port_str, transport)) created, IP = self.IPAddress.find_or_create( ip_address=ip_str) IP.meta["shodan_data"] = results created, port = self.Port.find_or_create( ip_address=IP, port_number=port_str, proto=transport) if created: svc = "" if res.get("ssl", False): svc = "https" elif res.get("http", False): svc = "http" else: svc = "" port.service_name = svc port.meta["shodan_data"] = res port.save() self.IPAddress.commit()
class Module(ModuleTemplate): name = "Nessus" def __init__(self, db): self.db = db self.BaseDomain = BaseDomainRepository(db, self.name) self.Domain = DomainRepository(db, self.name) self.IPAddress = IPRepository(db, self.name) self.Port = PortRepository(db, self.name) self.Vulnerability = VulnRepository(db, self.name) self.CVE = CVERepository(db, self.name) self.ScopeCIDR = ScopeCIDRRepository(db, self.name) def set_options(self): super(Module, self).set_options() self.options.add_argument( "--import_file", help="Import separated Nessus files separated by a space. DO NOT USE QUOTES OR COMMAS", nargs="+", ) self.options.add_argument( "--interactive", help="Prompt to store domains not in Base Domains already", action="store_true", ) self.options.add_argument( "--internal", help="Store domains not in Base Domains already", action="store_true", ) self.options.add_argument( "--launch", help="Launch Nessus scan using Actively scoped IPs and domains in the database", action="store_true", ) self.options.add_argument( "--job_name", help="Job name inside Nessus", default="Armory Job" ) self.options.add_argument("--username", help="Nessus Username") self.options.add_argument("--password", help="Nessus Password") self.options.add_argument( "--host", help="Hostname:Port of Nessus web interface (ie localhost:8835" ) self.options.add_argument("--uuid", help="UUID of Nessus Policy to run") self.options.add_argument("--policy_id", help="Policy ID to use") self.options.add_argument("--folder_id", help="ID for folder to store job in") self.options.add_argument( "--download", help="Download Nessus job from server and import", action="store_true", ) self.options.add_argument("--job_id", help="Job ID to download and import") self.options.add_argument( "--output_path", help="Path to store downloaded file (Default: Nessus)", default=self.name, ) def run(self, args): if args.import_file: for nFile in args.import_file: self.process_data(nFile, args) elif args.launch: if ( not args.username and not args.password and not args.host and not args.uuid and not args.policy_id and not args.folder_id ): display_error( "You must supply a username, password, and host to launch a Nessus job" ) else: n = NessusRequest( args.username, args.password, args.host, uuid=args.uuid, policy_id=args.policy_id, folder_id=args.folder_id, ) ips = [ ip.ip_address for ip in self.IPAddress.all(scope_type="active", tool=self.name) ] cidrs = [cidr.cidr for cidr in self.ScopeCIDR.all(tool=self.name)] domains = [ domain.domain for domain in self.Domain.all(scope_type="active", tool=self.name) ] targets = ", ".join(merge_ranges(ips + cidrs) + domains) res = n.launch_job(targets, args.job_name) display("New Nessus job launched with ID {}".format(res)) display( "Remember this number! You'll need it to download the job once it is done." ) elif args.download: if ( not args.username and not args.password and not args.host and not args.job_id ): display_error( "You must supply host, username, password and job_id to download a report to import" ) else: n = NessusRequest( args.username, args.password, args.host, proxies={"https": "127.0.0.1:8080"}, ) if args.output_path[0] == "/": output_path = os.path.join( self.base_config["PROJECT"]["base_path"], args.output_path[1:] ) else: output_path = os.path.join( self.base_config["PROJECT"]["base_path"], args.output_path ) if not os.path.exists(output_path): os.makedirs(output_path) output_path = os.path.join( output_path, "Nessus-export-{}.nessus".format(int(time.time())) ) n.export_file(args.job_id, output_path) self.process_data(output_path, args) def nessCheckPlugin(self, tag): nessPlugins = [ "10759", "77026", "20089", "56984", "71049", "70658", "40984", "11411", ] pluginID = tag.get("pluginID") if pluginID in nessPlugins: # print pluginID + " is in the list" if pluginID == "10759": if tag.find("plugin_output") is not None: return ( tag.find("plugin_output").text.split("\n\n")[3].strip() ) # returns IP for Web Server HTTP Header INternal IP disclosure else: return "" if pluginID == "77026": if tag.find("plugin_output") is not None: return ( tag.find("plugin_output").text.split("\n\n")[3].strip() ) # Microsoft Exchange Client Access Server Information Disclosure (IP addy) else: return "" if pluginID == "71049" or pluginID == "70658": output = "" if tag.find("plugin_output") is not None: tmp = tag.find("plugin_output").text.split(":")[ 1 ] # SSH Weak MAC & CBC Algorithms Enabled # print "#"*5 tmp = tmp.split("\n\n")[1].replace(" ", "") # print "#"*5 output = tmp.split("\n") # print ", ".join(output) return ", ".join(output) if pluginID == "56984": if tag.find("plugin_output") is not None: tmp = ( tag.find("plugin_output") .text.split("This port supports ")[1] .strip() ) # SSL / TLS Versions Supported tmp = tmp.split("/") bad = [] for i in tmp: # print i if "SSLv" in i: bad.append(i) elif "TLSv1.0" in i: bad.append(i) if bad != []: return ", ".join(bad).rstrip(".") else: return "" else: return "" if pluginID == "40984": # browsable web dirs if tag.find("plugin_output") is not None: tmp = ( tag.find("plugin_output") .text.split("The following directories are browsable :")[1] .strip() ) directories = tmp.split("\n") return "\n".join(directories) if pluginID == "11411": # Backup Files Disclosure if tag.find("plugin_output") is not None: urls = [] tmp = ( tag.find("plugin_output") .text.split( "It is possible to read the following backup files :" )[1] .strip() ) tmpUrls = tmp.split("\n") for url in tmpUrls: if "URL" in url: urls.append(url.split(":")[1].lstrip()) if urls: return "\n".join(urls) else: return "" if pluginID == "20089": # F5 cookie if tag.find("plugin_output") is not None: f5Output = [] cookieVal = [] output = tag.find("plugin_output").text.strip().split("\n") for line in output: # print line line = line.split(":") for i, item in enumerate(line): item = item.strip() if "Cookie" in item: cabbage = line.pop(i) tmp = line.pop(i) tmp.strip() cookieVal.append(tmp) else: item = "".join(item) f5Output.append(item) f5Output = " : ".join(f5Output) f5Output = f5Output.replace(" : : ", ", ") f5Output += " [" + ", ".join(cookieVal) + "]" c = 0 tmpF5Output = f5Output.split() for i, letter in enumerate(tmpF5Output): if letter == ":": c += 1 if (c % 2) == 0: tmpF5Output[i] = " " return "".join(tmpF5Output).replace("[", " [") else: return "" else: return False def getVulns(self, ip, ReportHost): """Gets vulns and associated services""" for tag in ReportHost.iter("ReportItem"): exploitable = False cves = [] vuln_refs = {} proto = tag.get("protocol") port = tag.get("port") svc_name = tag.get("svc_name").replace("?", "") tmpPort = proto + "/" + port if tmpPort.lower() == "tcp/443": portName = "https" elif tmpPort.lower() == "tcp/80": portName = "http" elif svc_name == "www": plugin_name = tag.get("pluginName") if "tls" in plugin_name.lower() or "ssl" in plugin_name.lower(): portName = "https" else: portName = "http" else: portName = svc_name if "general" not in portName: created, db_port = self.Port.find_or_create( port_number=port, status="open", proto=proto, ip_address_id=ip.id ) if db_port.service_name == "http": if portName == "https": db_port.service_name = portName elif db_port.service_name == "https": pass else: db_port.service_name = portName db_port.save() if tag.get("pluginID") == "56984": severity = 1 elif tag.get("pluginID") == "11411": severity = 3 else: severity = int(tag.get("severity")) findingName = tag.get("pluginName") description = tag.find("description").text if tag.find("solution") is not None and tag.find("solution") != "n/a": solution = tag.find("solution").text else: solution = "No Remediation From Nessus" nessCheck = self.nessCheckPlugin(tag) if nessCheck: if not db_port.info: db_port.info = {findingName: nessCheck} else: db_port.info[findingName] = nessCheck db_port.save() if tag.find("exploit_available") is not None: # print "\nexploit avalable for", findingName exploitable = True metasploits = tag.findall("metasploit_name") if metasploits: vuln_refs["metasploit"] = [] for tmp in metasploits: vuln_refs["metasploit"].append(tmp.text) edb_id = tag.findall("edb-id") if edb_id: vuln_refs["edb-id"] = [] for tmp in edb_id: vuln_refs["edb-id"].append(tmp.text) tmpcves = tag.findall("cve") for c in tmpcves: if c.text not in cves: cves.append(c.text) if not self.Vulnerability.find(name=findingName): created, db_vuln = self.Vulnerability.find_or_create( name=findingName, severity=severity, description=description, remediation=solution, ) db_vuln.ports.append(db_port) db_vuln.exploitable = exploitable if exploitable == True: print("\nexploit avalable for", findingName) print() if vuln_refs: db_vuln.exploit_reference = vuln_refs else: db_vuln = self.Vulnerability.find(name=findingName) db_vuln.ports.append(db_port) db_vuln.exploitable = exploitable if vuln_refs: if db_vuln.exploit_reference is not None: for key in vuln_refs.keys(): if key not in db_vuln.exploit_reference.keys(): db_vuln.exploit_reference[key] = vuln_refs[key] else: for ref in vuln_refs[key]: if ref not in db_vuln.exploit_reference[key]: db_vuln.exploit_reference[key].append(ref) else: db_vuln.exploit_reference = vuln_refs for cve in cves: if not self.CVE.find(name=cve): # print "Gathering CVE information for", cve try: res = json.loads( requests.get( "http://cve.circl.lu/api/cve/%s" % cve ).text ) cveDescription = res["summary"] cvss = float(res["cvss"]) except: cveDescription = None cvss = None if not self.CVE.find(name=cve): created, db_cve = self.CVE.find_or_create( name=cve, description=cveDescription, temporal_score=cvss, ) db_cve.vulnerabilities.append(db_vuln) else: db_cve = self.CVE.find(name=cve) if ( db_cve.description is None and cveDescription is not None ): db_cve.description = cveDescription if db_cve.temporal_score is None and cvss is not None: db_cve.temporal_score = cvss db_cve.vulnerabilities.append(db_vuln) def process_data(self, nFile, args): print("Reading", nFile) tree = ET.parse(nFile) root = tree.getroot() skip = [] for ReportHost in root.iter("ReportHost"): os = [] hostname = "" hostIP = "" for HostProperties in ReportHost.iter("HostProperties"): for tag in HostProperties: if tag.get("name") == "host-ip": hostIP = tag.text if tag.get("name") == "host-fqdn": hostname = tag.text.lower() hostname = hostname.replace("www.", "") if tag.get("name") == "operating-system": os = tag.text.split("\n") if hostIP: # apparently nessus doesn't always have an IP to work with... if hostname: display( "Gathering Nessus info for {} ( {} )".format(hostIP, hostname) ) else: display("Gathering Nessus info for {}".format(hostIP)) created, ip = self.IPAddress.find_or_create(ip_address=hostIP) if hostname: if not args.internal: created, domain = self.Domain.find_or_create(domain=hostname) if ip not in domain.ip_addresses: ip.save() domain.ip_addresses.append(ip) domain.save() else: created, domain = self.Domain.find_or_create(domain=hostname) if ip not in domain.ip_addresses: domain.ip_addresses.append(ip) domain.update() if os: for o in os: if not ip.OS: ip.OS = o else: if o not in ip.OS.split(" OR "): ip.OS += " OR " + o self.getVulns(ip, ReportHost) self.IPAddress.commit() return
def __init__(self, db): self.db = db self.Port = PortRepository(db, self.name) self.IPAddress = IPRepository(db, self.name) self.ScopeCidr = ScopeCIDRRepository(db, self.name)
def run(hosts, db, proto="tcp", svc="ssl", lookup_domains=False): IPAddress = IPRepository(db) Domain = DomainRepository(db) ips = {} for h in hosts: if h.count(":") == 2: host, port, svc = h.split(":") else: host, port = h.split(":") try: int(host.replace(".", "")) if not ips.get(host, False): ips[host] = {"domains": [], "ports": []} if (port, svc) not in ips[host]["ports"]: ips[host]["ports"].append((port, svc)) except: domains = Domain.all(domain=host) if domains: domain = domains[0] for ip in domain.ip_addresses: if not ips.get(ip.ip_address, False): ips[ip.ip_address] = {"domains": [], "ports": []} if host not in ips[ip.ip_address]["domains"]: ips[ip.ip_address]["domains"].append(host) if (port, svc) not in ips[ip.ip_address]["ports"]: ips[ip.ip_address]["ports"].append((port, svc)) else: # domain is not in the database. domain_ips = get_ip(host) for ip in domain_ips: ips[ip] = {"domains": [host], "ports": []} results = [] if lookup_domains: for ip in sorted(ips.keys()): # print("Checking %s" % ip) try: ip_obj = IPAddress.all(ip_address=ip)[0] domains = [d.domain for d in ip_obj.domains] if domains: results.append("%s / %s: %s" % ( ip, ", ".join(sorted(domains)), ", ".join([ "%s/%s/%s" % (proto, p, svc) for p, svc in sorted(ips[ip]["ports"]) ]), )) else: results.append("%s / No Hostname Registered: %s" % ( ip, ", ".join([ "%s/%s/%s" % (proto, p, svc) for p, svc in sorted(ips[ip]["ports"]) ]), )) except: if ips[ip]["domains"]: results.append("%s / %s: %s" % ( ip, ", ".join(sorted(ips[ip]["domains"])), ", ".join([ "%s/%s/%s" % (proto, p, svc) for p, svc in sorted(ips[ip]["ports"]) ]), )) else: results.append("%s / No Hostname Registered: %s" % ( ip, ", ".join([ "%s/%s/%s" % (proto, p, svc) for p, svc in sorted(ips[ip]["ports"]) ]), )) else: for ip in sorted(ips.keys()): if ips[ip]["domains"]: results.append("%s / %s: %s" % ( ip, ", ".join(sorted(ips[ip]["domains"])), ", ".join([ "%s/%s/%s" % (proto, p, svc) for p, svc in sorted(ips[ip]["ports"]) ]), )) else: results.append("%s / No Hostname Registered: %s" % ( ip, ", ".join([ "%s/%s/%s" % (proto, p, svc) for p, svc in sorted(ips[ip]["ports"]) ]), )) return results
class Module(ToolTemplate): name = "JexBoss" binary_name = "jexboss.py" def __init__(self, db): self.db = db self.IPAddress = IPRepository(db, self.name) def set_options(self): super(Module, self).set_options() self.options.add_argument( "-i", "--import_database", help="Import URLs from the database", action="store_true", ) self.options.add_argument("-f", "--import_file", help="Import URLs from file") self.options.add_argument( "--group_size", help="How many hosts per group (default all urls in same group)", type=int, default=0, ) self.options.add_argument( "--rescan", help="Rerun gowitness on systems that have already been processed.", action="store_true", ) self.options.add_argument( "--scan_folder", help= "Generate list of URLs based off of a folder containing GobusterDir output files", ) self.options.add_argument("--counter_max", help="Max number of screenshots per host", default="20") def get_targets(self, args): timestamp = str(int(time())) targets = [] if args.import_file: targets += [t for t in open(args.file).read().split("\n") if t] if args.import_database: if args.rescan: targets += get_urls.run(self.db, scope_type="active") else: targets += get_urls.run(self.db, scope_type="active", tool=self.name) if args.scan_folder: files = os.listdir(args.scan_folder) counter_max = str(args.counter_max) for f in files: if f.count("_") == 4: counter = 0 http, _, _, domain, port = f.split("-dir.txt")[0].split( "_") for data in (open(os.path.join(args.scan_folder, f)).read().split("\n")): if "(Status: 200)" in data: targets.append("{}://{}:{}{}".format( http, domain, port, data.split(" ")[0])) counter += 1 if counter >= counter_max: break if args.output_path[0] == "/": self.path = os.path.join( self.base_config["PROJECT"]["base_path"], args.output_path[1:], timestamp, ) else: self.path = os.path.join(self.base_config["PROJECT"]["base_path"], args.output_path, timestamp) if not os.path.exists(self.path): os.makedirs(self.path) res = [] i = 0 if args.group_size == 0: args.group_size = len(targets) for url_chunk in self.chunks(targets, args.group_size): i += 1 _, file_name = tempfile.mkstemp() open(file_name, "w").write("\n".join(url_chunk)) res.append({ "target": file_name, "output": self.path + "-results-{}.txt".format(i) }) return res def build_cmd(self, args): command = self.binary + " -m file-scan -file {target} -out {output} " if args.tool_args: command += tool_args return command def process_output(self, cmds): """ """ self.IPAddress.commit() def chunks(self, chunkable, n): """ Yield successive n-sized chunks from l. """ for i in xrange(0, len(chunkable), n): yield chunkable[i:i + n]
def __init__(self, db): self.db = db self.IPAddress = IPRepository(db, self.name)
def __init__(self, db): self.db = db self.BaseDomains = BaseDomainRepository(db, self.name) self.Domains = DomainRepository(db, self.name) self.IPs = IPRepository(db, self.name)