def check(self, checks=None): '''Run checks, or provided in param''' for check_name, check in self.checks.items(): if check and (checks is None or check_name in checks): if check_name == 'fuzz' and not (self.fuzz or checks): continue if (check_name == 'subdomains' and not self.subdomains and not checks): continue print(f'\n{check_name.upper()}') res = check() if not res: nfound('no data') continue if isinstance(res, dict): for n, r in res.items(): if isinstance(r, str): found(f'{n}:', r) else: found(f'{n}:', ', '.join(r)) elif isinstance(res, list): found(', '.join(res)) elif isinstance(res, set): for r in res: found(r) elif not isinstance(res, bool): found(res) else: info('found')
def check_bigfuzz(self): '''Fuzz paths to find misconfigs''' from concurrent.futures import ThreadPoolExecutor from random import randrange from lib.progress import Progress # First, try to check if random path exists. # If it is, we potentially cant find misconfigs, # coz it is SPA random_path = ''.join( chr(randrange(ord('a'), ord('z') + 1)) for _ in range(8)) ok, path, *_ = self._check_path(f'/{random_path}') if ok: info(path, 'possible SPA') return False paths = (self.DIR / 'data/fuzz_large.txt', ) status = False for p in paths: with p.open() as f: progress = Progress(sum(1 for _ in f)) f.seek(0) with ThreadPoolExecutor() as ex: r = ex.map(self._check_path, f.read().splitlines()) for res, path, code, c_len in r: if res: print(end='\r') found(f'[{code}] {path} ({c_len} B)') status = True progress(path) return status
def serve(self): loop = asyncio.get_event_loop() tcp_success = 0 for port in self.tcp_ports: if self.verbose >= 2: debug('Starting listener for TCP port {byellow}{port}{rst}...') try: tcp_socket = socket.socket(family=socket.AF_INET6, type=socket.SOCK_STREAM) tcp_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) tcp_socket.bind(('::', int(port))) loop_server = loop.run_until_complete( loop.create_server( lambda: HoneypotServerTCP(self), sock=tcp_socket)) loop.create_task(loop_server.serve_forever()) tcp_success += 1 except: error('Failed to bind to TCP port {port}.') info('Started listening on {byellow}{tcp_success}{rst} TCP ports.') udp_success = 0 for port in self.udp_ports: if self.verbose >= 3: debug('Starting listener for UDP port {byellow}{port}{rst}...') try: udp_socket = socket.socket(family=socket.AF_INET6, type=socket.SOCK_DGRAM) udp_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) udp_socket.bind(('::', int(port))) transport, protocol = loop.run_until_complete( loop.create_datagram_endpoint( lambda: HoneypotServerUDP(self), sock=udp_socket)) udp_success += 1 except: error('Failed to bind to UDP port {port}.') info('Started listening on {byellow}{udp_success}{rst} UDP ports.') try: loop.run_forever() except: pass
def download_nvd_dbs(): os.makedirs('nvd', exist_ok=True) if os.path.exists('nvd/cpe-dict.xml.gz') and ( datetime.datetime.today() - datetime.datetime.fromtimestamp( os.path.getmtime('nvd/cpe-dict.xml.gz'))).days > 1: os.unlink('nvd/cpe-dict.xml.gz') if not os.path.exists('nvd/cpe-dict.xml.gz'): info('Downloading CPE dictionary...') download_archives( 'https://static.nvd.nist.gov/feeds/xml/cpe/dictionary/official-cpe-dictionary_v2.3.xml.gz', 'nvd/cpe-dict.xml.gz') else: debug( 'Not downloading CPE dictionary: file is less than 24 hours old.') if os.path.exists('nvd/cpe-aliases.lst') and ( datetime.datetime.today() - datetime.datetime.fromtimestamp( os.path.getmtime('nvd/cpe-aliases.lst'))).days > 1: os.unlink('nvd/cpe-aliases.lst') if not os.path.exists('nvd/cpe-aliases.lst'): info('Downloading CPE aliases...') download_archives( 'https://salsa.debian.org/dlange/debian_security_security-tracker_split_files_v2/-/raw/master/data/CPE/aliases', 'nvd/cpe-aliases.lst') else: debug('Not downloading CPE aliases: file is less than 24 hours old.') currentyear = datetime.datetime.now().year for year in range(2002, currentyear): if os.path.exists('nvd/cve-items-' + str(year) + '.json.gz'): debug( 'Not downloading CVE entries for year {year}: file already exists.' ) continue info('Downloading CVE entries for year {year}...') download_archives( 'https://nvd.nist.gov/feeds/json/cve/1.1/nvdcve-1.1-' + str(year) + '.json.gz', 'nvd/cve-items-' + str(year) + '.json.gz') if os.path.exists('nvd/cve-items-' + str(currentyear) + '.json.gz') and ( datetime.datetime.today() - datetime.datetime.fromtimestamp( os.path.getmtime('nvd/cve-items-' + str(currentyear) + '.json.gz'))).days > 1: os.unlink('nvd/cve-items-' + str(currentyear) + '.json.gz') if not os.path.exists('nvd/cve-items-' + str(currentyear) + '.json.gz'): info('Downloading CVE entries for year {currentyear}...') download_archives( 'https://nvd.nist.gov/feeds/json/cve/1.1/nvdcve-1.1-' + str(currentyear) + '.json.gz', 'nvd/cve-items-' + str(currentyear) + '.json.gz') else: debug( 'Not downloading CVE entries for year {currentyear}: file is less than 24 hours old.' )
def _scan_host(self, scanner_group, address, results): for idx, scanner in enumerate(scanner_group): name = scanner.name() cache = scanner.code() result = None if idx > 0: info( 'Re-trying {bblue}{name}{rst}/{byellow}{address}{rst} with next implementation...' ) if self.has_cached_result(address, cache): if self.verbose >= 1: debug( 'Returning {bblue}{name}{rst}/{byellow}{address}{rst} from recent cache.' ) result = self.read_result(address, cache) if result is None and not self.no_query: if self.verbose >= 1: debug( 'Getting fresh {bblue}{name}{rst}/{byellow}{address}{rst} data...' ) result = scanner.get(address) if result is not None: self.write_result(address, cache, result) if result is None: error( 'Failed to get passive scan data for {byellow}{address}{rst}.' ) continue parsed = scanner.enum(result) if self.verbose >= 1: for svc in parsed: debug( 'Discovered service {bgreen}{svc[service]}{rst} on port {bgreen}{svc[port]}{rst}/{bgreen}{svc[transport]}{rst} running {bgreen}{svc[product]}{rst}/{bgreen}{svc[version]}{rst}.' ) results[name] = parsed break
def enum_dns(self, address, port, service, basedir): nmblookup = e( "nmblookup -A {address} | grep '<00>' | grep -v '<GROUP>' | cut -d' ' -f1" ) info('Running task {bgreen}nmblookup-{port}{rst}' + (' with {bblue}' + nmblookup + '{rst}' if self.verbose >= 1 else '...')) try: host = subprocess.check_output( nmblookup, shell=True, stderr=subprocess.DEVNULL).decode().strip() except subprocess.CalledProcessError: return self.run_cmds([(e( 'dig -p{port} @{host}.thinc.local thinc.local axfr > "{basedir}/{port}_dns_dig.txt"' ), e('dig-{port}'))])
def scan_host(self, address): info('Scanning host {byellow}{address}{rst}...') basedir = os.path.join(self.outdir, address + self.srvname) os.makedirs(basedir, exist_ok=True) services = self.run_nmap(address) if any('unknown' in s for s in services): services = self.run_amap(services) if self.deepscan: if len(services) != 0: info('Starting scan of services...') if os.path.exists(os.path.join(basedir, '0_untouched.txt')): os.unlink(os.path.join(basedir, '0_untouched.txt')) for service in services: self.scan_service(*service)
def parse_cpe_aliases(): aliases = [] info('Parsing file {bgreen}nvd/cpe-aliases.lst{rst}...') with open('nvd/cpe-aliases.lst') as file: alias_group = [] for line in file: if line.startswith('#'): continue if len(line.strip()) == 0: if len(alias_group) != 0: aliases.append(alias_group) alias_group = [] continue alias_group.append(parse.unquote(line.strip()[5:])) return aliases
def process_nmap(file): report = NmapParser.parse_fromfile(file) for host in report.hosts: for service in host.services: msg = 'Service {bgreen}{host.address}{rst}:{bgreen}{service.port}{rst}/{bgreen}{service.protocol}{rst}' if 'cpelist' in service.service_dict and len( service.service_dict['cpelist']) > 0: info(msg + ' is {byellow}' + '{rst}, {byellow}'.join(service.service_dict['cpelist']) + '{rst}') for cpe in service.service_dict['cpelist']: get_vulns_cli(cpe) elif 'product' in service.service_dict and len( service.service_dict['product']) > 0: product = service.service_dict[ 'product'] if 'product' in service.service_dict else '' version = service.service_dict[ 'version'] if 'version' in service.service_dict else '' extrainfo = service.service_dict[ 'extrainfo'] if 'extrainfo' in service.service_dict else '' full = (product + ' ' + version + ' ' + extrainfo).strip() cpe = fuzzy_find_cpe(product + ' ' + extrainfo, version) if cpe is None: warn( msg + ' was identified as {bblue}{full}{rst} with no matching CPE name.' ) else: info( msg + ' was identified as {bblue}{full}{rst} and fuzzy-matched to {byellow}cpe:/{cpe}{rst}.' ) get_vulns_cli(cpe) else: warn(msg + ' was not identified.')
def datagram_received(self, data, addr): lport = self.transport.get_extra_info('sockname')[1] if self.server.verbose >= 1: debug('Data on {byellow}udp:{lport}{rst} from {byellow}{addr[0]}{rst}: {bblue}{data}{rst}') proto, reply, error = self.server.handle_message(data, 'UDP') if reply is not None: if self.server.verbose >= 1: trunc = reply if self.server.verbose < 2 and len(trunc) > self.truncation_length: trunc = trunc[:60] + self.truncation_marker debug('Replying to {byellow}{addr[0]}{rst} with {bgreen}{proto}{rst}: {bblue}{trunc}{rst}') else: info('Data on {byellow}udp:{lport}{rst} from {byellow}{addr[0]}{rst}, replying with {bgreen}{proto}{rst}.') self.transport.sendto(reply, addr) else: if self.server.verbose >= 1: debug('Closing connection on {byellow}udp:{lport}{rst} with {byellow}{addr[0]}{rst}: {bred}{error}{rst}') else: info('Refusing connection on {byellow}udp:{lport}{rst} from {byellow}{addr[0]}{rst}: {bred}{error}{rst}')
def run_cmd(self, cmd, tag='?', redirect=None): if redirect is None: redirect = self.verbose >= 2 info(('Skipping' if self.dryrun else 'Running') + ' task {bgreen}{tag}{rst}' + (' with {bblue}{cmd}{rst}' if self.verbose >= 1 else '...')) if self.dryrun: return True proc = subprocess.Popen( cmd, shell=True, stdout=subprocess.PIPE if redirect else subprocess.DEVNULL, stderr=subprocess.PIPE if redirect else subprocess.DEVNULL) if redirect: thdout = threading.Event() thderr = threading.Event() threading.Thread(target=self.dump_pipe, args=(proc.stdout, thdout, tag)).start() threading.Thread(target=self.dump_pipe, args=(proc.stderr, thderr, tag, Fore.RED)).start() ret = proc.wait() if redirect: thdout.set() thderr.set() if ret != 0: error('Task {bred}{tag}{rst} returned non-zero exit code: {ret}') else: info('Task {bgreen}{tag}{rst} finished successfully.') return ret == 0
def run_nmap(self, address): out = os.path.join(self.outdir, address + self.srvname) self.run_cmds([ (e('nmap -vv --reason -sV -sC {self.nmapparams} -p- -oN "{out}/0_tcp_nmap.txt" -oX "{out}/0_tcp_nmap.xml" {address}' ), 'nmap-tcp'), (e('nmap -vv --reason -sV --version-intensity 0 -sC -sU {self.nmapparams} -oN "{out}/0_udp_nmap.txt" -oX "{out}/0_udp_nmap.xml" {address}' ), 'nmap-udp') ]) nmap_svcs = [] if os.path.exists(out + '/0_tcp_nmap.xml'): report = NmapParser.parse_fromfile(out + '/0_tcp_nmap.xml') nmap_svcs += report.hosts[0].services if os.path.exists(out + '/0_udp_nmap.xml'): report = NmapParser.parse_fromfile(out + '/0_udp_nmap.xml') nmap_svcs += report.hosts[0].services services = [] nmap_svcs = sorted(nmap_svcs, key=lambda s: s.port) for service in nmap_svcs: if 'open' not in service.state: continue info( 'Service {bgreen}{service.port}{rst}/{bgreen}{service.protocol}{rst} is {bgreen}{service.service}{rst}' + (' running {green}' + service.service_dict['product'] + '{crst}' if 'product' in service.service_dict else '') + (' version {green}' + service.service_dict['version'] + '{crst}' if 'version' in service.service_dict else '')) services.append((address, service.port * -1 if service.protocol == 'udp' else service.port, service.service)) return services
def parse_cpe_names(): names = [] info('Parsing file {bgreen}nvd/cpe-dict.xml.gz{rst}...') root = None with gzip.open('nvd/cpe-dict.xml.gz', 'r') as f: root = etree.fromstring(f.read()) for entry in root.findall('{http://cpe.mitre.org/dictionary/2.0}cpe-item'): name = parse.unquote(entry.attrib['name'][5:]) titles = entry.findall('{http://cpe.mitre.org/dictionary/2.0}title') if titles is not None: if len(titles) > 1: for localtitle in titles: if localtitle.attrib[ '{http://www.w3.org/XML/1998/namespace}lang'] == 'en-US': title = localtitle else: title = titles[0] names.append([name, title.text]) return names
def prepare(self): '''Make initial request''' info('Get initial response...') try: self.response = self.get(self.target, allow_redirects=self.allow_redirects) except SSLError as e: err('SSL error', e) self.response = self.get(self.target, allow_redirects=self.allow_redirects, verify=False) if not self.response.ok: raise Exception(f'Status: {self.response.status_code}') info(f'[{self.response.status_code}]') if self.response.is_redirect: info('Location:', self.response.headers.get('location')) self.html = self.response.text
def scan_host(self, address): info('Getting passive scan data for host {byellow}{address}{rst}...') basedir = os.path.join(self.outdir, address) os.makedirs(basedir, exist_ok=True) scanners = self.get_scanners() jobs = [] results = {} if self.parallel: manager = multiprocessing.Manager() results = manager.dict() for scanner_group in scanners: if not self.parallel: self._scan_host(scanner_group, address, results) else: job = multiprocessing.Process(target=self._scan_host, args=(scanner_group, address, results)) jobs.append(job) job.start() if self.parallel: for job in jobs: if job.is_alive(): job.join() info('Amalgamated results for host {byellow}{address}{rst}:') merged = self.merge_results(results) for svc in merged: info( 'Discovered service {bgreen}{svc[service]}{rst} on port {bgreen}{svc[port]}{rst}/{bgreen}{svc[transport]}{rst} running {bgreen}{svc[product]}{rst}/{bgreen}{svc[version]}{rst}.' ) return merged
exit() dumpall = args.all dumpexp = args.exploits verbose = args.verbose if not os.path.isfile('vulns.db'): fail( 'Failed to find {bgreen}vulns.db{rst}. Use {bblue}-u{rst} to download the dependencies and build the database.' ) conn = sqlite3.connect('vulns.db') c = conn.cursor() if args.query.lower().startswith('cpe:/'): info('Finding vulnerabilities for {bgreen}{query}{rst}...', query=args.query.lower()) get_vulns_cli(args.query.lower()) elif os.path.isfile(args.query): info('Processing nmap report {bgreen}{args.query}{rst}...') process_nmap(args.query) else: info('Performing fuzzy matching for {bgreen}{args.query}{rst}...') cpe = fuzzy_find_cpe(args.query) if cpe is None: error('Failed to resolve query to a CPE name.') else: info('Fuzzy-matched query to name {byellow}cpe:/{cpe}{rst}.') get_vulns_cli(cpe)
def __init__(self, target, fuzz=False, subdomains=False, allow_redirects=False, resolve_ip=True): super().__init__() self.headers['User-Agent'] = 'Mozilla/5.0' # initial data self.target = target self.fuzz = fuzz self.subdomains = subdomains self.allow_redirects = allow_redirects # all defined checks self.checks = OrderedDict( # base info headers=self.check_headers, domains=self.check_domains, # parse source linked_domains=self.check_linked_domains, robots_disallow=self.check_robots, cms=self.check_cms, techs=self.check_techs, analytics=self.check_analytics, contacts=self.check_contacts, social=self.check_social, # fuzz fuzz=self.check_fuzz, extra=self.check_bigfuzz, subdomains=self.check_subdomains, ) self.interesting_headers = { 'access-control-allow-origin', 'content-security-policy', # for additional domains. Deprecated? 'last-modified', 'server', 'set-cookie', 'via', 'x-backend-server', 'x-powered-by', } # target url parts pu = urlparse(target) self.scheme = pu.scheme self.hostname = pu.hostname self.netloc = pu.netloc self.port = pu.port or {'http': 80, 'https': 443}.get(self.scheme) self.path = pu.path if resolve_ip and self.hostname: self.ip = gethostbyname(self.hostname) info(f'Target: {self.target}') info('IP:', self.ip or 'not resolved') print('-' * 42) self.prepare()
def parse_exploits(): # TODO refactor this to use one code path for all lists exploitdb_names = None exploitdb_map = None if os.path.exists('nvd/exploitdb.lst'): info('Using curated {bblue}ExploitDB{rst} references.') exploitdb_names = {} exploitdb_map = {} with open('nvd/exploitdb.lst') as file: for line in file: if line.startswith('#'): continue fields = line.strip().split(';') cves = fields[1].split(',') exploitdb_names[ fields[0]] = fields[2] if len(fields) > 2 else None for cve in cves: if cve not in exploitdb_map: exploitdb_map[cve] = [] exploitdb_map[cve].append(fields[0]) else: info('Using {bblue}ExploitDB{rst} links from CVE references.') secfocus_names = None secfocus_map = None if os.path.exists('nvd/securityfocus.lst'): info('Using curated {bblue}SecurityFocus{rst} references.') secfocus_names = {} secfocus_map = set() with open('nvd/securityfocus.lst') as file: for line in file: if line.startswith('#'): continue fields = line.strip().split(';') secfocus_names[ fields[0]] = fields[1] if len(fields) > 1 else None secfocus_map.add(fields[0]) else: info('Using {bblue}SecurityFocus{rst} links from CVE references.') metasploit_names = None metasploit_map = None if os.path.exists('nvd/metasploit.lst'): info('Using curated {bblue}Metasploit{rst} references.') metasploit_names = {} metasploit_map = {} with open('nvd/metasploit.lst') as file: for line in file: if line.startswith('#'): continue fields = line.strip().split(';') cves = fields[1].split(',') metasploit_names[ fields[0]] = fields[2] if len(fields) > 2 else None for cve in cves: if cve not in metasploit_map: metasploit_map[cve] = [] metasploit_map[cve].append(fields[0]) l337day_names = None l337day_map = None if os.path.exists('nvd/1337day.lst'): info('Using curated {bblue}1337day{rst} references.') l337day_names = {} l337day_map = {} with open('nvd/1337day.lst') as file: for line in file: if line.startswith('#'): continue fields = line.strip().split(';') cves = fields[1].split(',') l337day_names[ fields[0]] = fields[2] if len(fields) > 2 else None for cve in cves: if cve not in l337day_map: l337day_map[cve] = [] l337day_map[cve].append(fields[0]) return (exploitdb_names, exploitdb_map, secfocus_names, secfocus_map, metasploit_names, metasploit_map, l337day_names, l337day_map)
def parse_cve_items(exploits): (exploitdb_names, exploitdb_map, secfocus_names, secfocus_map, metasploit_names, metasploit_map, l337day_names, l337day_map) = exploits vulns = [] parser = cysimdjson.JSONParser() entries = None for file in sorted(glob.glob('nvd/cve-items-*.json.gz')): info('Parsing file {bgreen}{file}{rst}...') with gzip.open(file, 'rb') as f: entries = parser.parse_in_place(f.read()).at_pointer('/CVE_Items') for entry in entries: vuln = { 'id': None, 'date': None, 'description': None, 'availability': None, 'affected': [], 'vendor': [], '_exploitdb': [], '_securityfocus': [], '_metasploit': [], '_l337day': [] } vuln['id'] = entry['cve']['CVE_data_meta']['ID'][4:] vuln['date'] = entry['publishedDate'] vuln['description'] = entry['cve']['description'][ 'description_data'][0]['value'] if 'baseMetricV2' in entry['impact']: vuln['availability'] = entry['impact']['baseMetricV2'][ 'cvssV2']['accessComplexity'] # TODO implement proper matching for CPEs with provided operators for node in entry['configurations']['nodes']: for child in node['children']: for cpe in child['cpe_match']: vuln['affected'].append(cpe['cpe23Uri']) for reference in entry['cve']['references']['reference_data']: url = reference['url'] source = reference['refsource'] tags = reference['tags'] if 'Vendor Advisory' in tags: vuln['vendor'].append(url) elif source == 'EXPLOIT-DB': vuln['_exploitdb'].append(url) elif source == 'BID': vuln['_securityfocus'].append(url) # TODO refactor this to use one code path for all lists if exploitdb_map is not None and vuln['id'] in exploitdb_map: for expid in exploitdb_map[vuln['id']]: vuln['_exploitdb'].append(expid) vuln['_exploitdb'] = set(vuln['_exploitdb']) vuln['exploitdb'] = [] for exploit in vuln['_exploitdb']: vuln['exploitdb'].append({ 'id': exploit, 'title': exploitdb_names[exploit] if exploit in exploitdb_names else None }) vuln['_exploitdb'] = None else: vuln['exploitdb'] = [] for exploit in vuln['_exploitdb']: vuln['exploitdb'].append({'id': exploit, 'title': None}) vuln['_exploitdb'] = None if secfocus_map is not None and vuln['_securityfocus']: exploits = [] for sfid in vuln['_securityfocus']: if sfid in secfocus_map: exploits.append(sfid) vuln['securityfocus'] = [] for exploit in exploits: vuln['securityfocus'].append({ 'id': exploit, 'title': secfocus_names[exploit] if exploit in secfocus_names else None }) vuln['_securityfocus'] = None else: vuln['securityfocus'] = [] for exploit in vuln['_securityfocus']: vuln['securityfocus'].append({ 'id': exploit, 'title': None }) vuln['_securityfocus'] = None if metasploit_map is not None and vuln['id'] in metasploit_map: for expid in metasploit_map[vuln['id']]: vuln['_metasploit'].append(expid) vuln['_metasploit'] = set(vuln['_metasploit']) vuln['metasploit'] = [] for exploit in vuln['_metasploit']: vuln['metasploit'].append({ 'id': exploit, 'title': metasploit_names[exploit] if exploit in metasploit_names else None }) vuln['_metasploit'] = None if l337day_map is not None and vuln['id'] in l337day_map: for expid in l337day_map[vuln['id']]: vuln['_l337day'].append(expid) vuln['_l337day'] = set(vuln['_l337day']) vuln['l337day'] = [] for exploit in vuln['_l337day']: vuln['l337day'].append({ 'id': exploit, 'title': l337day_names[exploit] if exploit in l337day_names else None }) vuln['_l337day'] = None vulns.append(vuln) info('Extracted {byellow}{vulncount:,}{rst} vulnerabilites.', vulncount=len(vulns)) return vulns
def create_vulndb(names, aliases, vulns): info('Initiating SQLite creation...') if os.path.isfile('vulns.db'): os.unlink('vulns.db') conn = sqlite3.connect('vulns.db') c = conn.cursor() c.execute( 'create table vulns (id integer primary key autoincrement, cve text, date datetime, description text, availability char(1), vendor text)' ) c.execute( 'create table affected (vuln_id integer not null, cpe text, foreign key(vuln_id) references vulns(id))' ) c.execute('create table aliases (class int, cpe text)') c.execute( 'create table exploits (site int, sid text, cve text, title text)') # c.execute('create table names (cpe text, name text, foreign key(cpe) references affected(cpe))') c.execute('create virtual table names using fts4(cpe, name)') info( 'Creating tables {bgreen}vulns{rst}, {bgreen}affected{rst} and {bgreen}exploits{rst}...' ) for vuln in vulns: c.execute( 'insert into vulns (cve, date, description, availability, vendor) values (?, ?, ?, ?, ?)', [ vuln['id'], vuln['date'], vuln['description'], vuln['availability'], '\x1e'.join(vuln['vendor']) if vuln['vendor'] else None ]) id = c.lastrowid for affected in vuln['affected']: c.execute('insert into affected (vuln_id, cpe) values (?, ?)', [id, affected[8:]]) if 'exploitdb' in vuln: for exploit in vuln['exploitdb']: c.execute( 'insert into exploits (site, sid, cve, title) values (?, ?, ?, ?)', [1, exploit['id'], vuln['id'], exploit['title']]) if 'securityfocus' in vuln: for exploit in vuln['securityfocus']: c.execute( 'insert into exploits (site, sid, cve, title) values (?, ?, ?, ?)', [2, exploit['id'], vuln['id'], exploit['title']]) if 'metasploit' in vuln: for exploit in vuln['metasploit']: c.execute( 'insert into exploits (site, sid, cve, title) values (?, ?, ?, ?)', [5, exploit['id'], vuln['id'], exploit['title']]) if 'l337day' in vuln: for exploit in vuln['l337day']: c.execute( 'insert into exploits (site, sid, cve, title) values (?, ?, ?, ?)', [10, exploit['id'], vuln['id'], exploit['title']]) info('Creating table {bgreen}names{rst}...') for name in names: c.execute('insert into names (cpe, name) values (?, ?)', name) info('Creating table {bgreen}aliases{rst}...') group_counter = 0 for alias_group in aliases: for alias in alias_group: c.execute('insert into aliases (class, cpe) values (?, ?)', [group_counter, alias]) group_counter += 1 info('Creating indices...') c.execute('create index cpe_vuln_idx on affected (cpe collate nocase)') c.execute('create index cpe_alias_cpe_idx on aliases (cpe collate nocase)') c.execute('create index cpe_alias_class_idx on aliases (class)') c.execute('create index cve_exploit_idx on exploits (cve, site)') conn.commit() conn.close() info('Finished database creation.')
def get_vulns_cli(cpe): vulns = get_vulns(cpe) if not cpe.startswith('cpe:/'): cpe = 'cpe:/' + cpe if vulns is not None and len(vulns) == 0: info('Entry {byellow}{cpe}{rst} has no vulnerabilities.') return if vulns is None: # get_vulns() returns None on error, which is already printed to the user return if not dumpexp: info('Entry {byellow}{cpe}{rst} has the following vulnerabilities:') cols = int(os.environ['COLUMNS']) cves = [] for vuln in vulns: cves.append(vuln[0]) if dumpexp: continue color = '{red}' if vuln[ 4] == 'C' else '{yellow}' if vuln[4] == 'P' else '{crst}' descr = vuln[3] if len(descr) > cols - 18: descr = descr[:cols - 20] + ' >' descr = re.sub( r'\b(denial.of.service|execute|arbitrary|code|overflow|gain|escalate|privileges?)\b', r'{bgreen}\1{rst}', descr) tally(color + '{bright}CVE-{vuln[0]}{rst} ' + descr) exploits = get_exploits(cves) if exploits: info('Entry {byellow}{cpe}{rst} has the following public exploits:') last_cve = '' descr = '' for exploit in exploits: if last_cve != exploit[2]: if last_cve: tally('{bred}CVE-{last_cve}{rst} ' + descr) descr = '' last_cve = exploit[2] descr += '\n - ' if exploit[3] is not None: descr += '{bright}' + exploit[3] + '{srst}\n ' if exploit[0] == 1: descr += 'https://www.exploit-db.com/exploits/' + exploit[1] elif exploit[0] == 2: descr += 'http://www.securityfocus.com/bid/' + exploit[ 1] + '/exploit' elif exploit[0] == 5: descr += 'metasploit ' + exploit[1] elif exploit[0] == 10: descr += 'http://0day.today/exploit/' + exploit[1] else: descr += exploit[1] tally('{bred}CVE-{last_cve}{rst} ' + descr) else: info('Entry {byellow}{cpe}{rst} has no public exploits.')
def scan_service(self, address, port, service): if port < 0: is_udp = True port *= -1 else: is_udp = False info( 'Scanning service {bgreen}{service}{rst} on port {bgreen}{port}{rst}/{bgreen}{proto}{rst}...', proto='udp' if is_udp else 'tcp') basedir = os.path.join(self.outdir, address + self.srvname) os.makedirs(basedir, exist_ok=True) if self.bruteforce: error('self.bruteforce-only mode is currently not available.') return if 'http' in service: self.enum_http(address, port, service, basedir) elif 'smtp' in service: self.enum_smtp(address, port, service, basedir) elif 'pop3' in service: self.enum_pop3(address, port, service, basedir) elif 'imap' in service: self.enum_imap(address, port, service, basedir) elif 'ftp' in service: self.enum_ftp(address, port, service, basedir) elif 'microsoft-ds' in service or 'netbios' in service: self.enum_smb(address, port, service, basedir) elif 'ms-sql' in service or 'msSql' in service: self.enum_mssql(address, port, service, basedir) elif 'mysql' in service: self.enum_mysql(address, port, service, basedir) elif 'oracle' in service: self.enum_oracle(address, port, service, basedir) elif 'nfs' in service or 'rpcbind' in service: self.enum_nfs(address, port, service, basedir) elif 'snmp' in service: self.enum_snmp(address, port, service, basedir) elif 'domain' in service or 'dns' in service: self.enum_dns(address, port, service, basedir) elif 'rdp' in service or 'ms-wbt-server' in service or 'ms-term-serv' in service: self.enum_rdp(address, port, service, basedir) elif 'vnc' in service: self.enum_vnc(address, port, service, basedir) elif not is_udp: warn( 'Service {byellow}{service}{rst} will be scanned generically.') self.enum_generic_tcp(address, port, service, basedir) else: if port <= 1024: warn( 'Service {byellow}{service}{rst} will be scanned generically.' ) self.enum_generic_udp(address, port, service, basedir) else: warn( 'Service {byellow}{service}{rst} will not be scanned generically.' ) with open(os.path.join(basedir, '0_untouched.txt'), 'a') as file: file.writelines( str(port) + '\t' + ('udp' if is_udp else 'tcp') + '\t' + service + '\n')