def connect_exploits(): """ Call the connect_exploits() function which links known vulnerabilities to exploits based on f_vulnid or f_cve """ form = SQLFORM.factory( Field('f_taskit', type='boolean', default=auth.user.f_scheduler_tasks, label=T('Run in background task')), ) from skaldship.exploits import connect_exploits if form.accepts(request.vars, session): if form.vars.f_taskit: task = scheduler.queue_task( connect_exploits, group_name=settings.scheduler_group_name, sync_output=5, timeout=settings.scheduler_timeout, ) if task.id: redirect(URL('tasks', 'status', args=task.id)) else: response.flash = "Error submitting job: %s" % (task.errors) else: connect_exploits() response.flash = "Exploits and vulnerabilities connected" redirect(URL('list')) response.title = "%s :: Connect Exploits" % (settings.title) return dict(form=form)
def connect_exploits(): """ Process Nexpose exploits.xml file into the database """ from skaldship.exploits import connect_exploits connect_exploits() return True
def import_canvas_xml(): """ Process ImmunitySec's Exploit.xml which can be genrated from the URL http://exploitlist.immunityinc.com/ or by running ./canvasengine.py -e from your CANVAS directory http://exploitlist.immunityinc.com/home/serve/live """ import os kvasir_path = os.path.join(request.folder, 'static/etc') form = SQLFORM.factory( Field('f_filename', 'upload', uploadfolder=os.path.join(request.folder, 'data/misc'), label=T('XML File')), Field('f_use_kvasir_local', 'boolean', label=T('Use Kvasir static path')), Field('f_use_local', 'boolean', label=T('Use local file path')), Field('f_pathname', 'string', default=kvasir_path, label=T('Local path')), Field('f_download', 'boolean', label=T('Download')), Field('f_taskit', type='boolean', default=auth.user.f_scheduler_tasks, label=T('Run in background task')), col3 = { 'f_use_kvasir_local': 'static/etc/canvas_exploits.xml', 'f_use_local': 'Directory where canvas_exploits.xml is located', 'f_download': 'Download from ImmunitySec website', } ) if form.errors: response.flash = 'Error in form' elif form.accepts(request.vars, session): if form.vars.f_use_local: filename = os.path.join(form.vars.f_pathname, 'canvas_exploits.xml') elif form.vars.f_use_kvasir_local: filename = os.path.join(request.folder,'static','etc','canvas_exploits.xml') elif form.vars.f_download: filename = None else: filename = os.path.join(request.folder,'data','misc',form.vars.f_filename) if form.vars.f_taskit: task = scheduler.queue_task( canvas_exploit_xml, pargs=[filename], group_name=settings.scheduler_group_name, sync_output=5, timeout=settings.scheduler_timeout, ) if task.id: redirect(URL('tasks', 'status', args=task.id)) else: response.flash = "Error submitting job: %s" % (task.errors) else: from skaldship.canvas import process_exploits from skaldship.exploits import connect_exploits process_exploits(filename) connect_exploits() response.flash = "Canvas Exploit data uploaded" redirect(URL('list')) response.title = "%s :: Import ImmunitySec CANVAS Exploits XML" % (settings.title) return dict(form=form)
def canvas_exploit_xml(filename=None): """ Process ImmunitySec CANVAS Exploits.xml file into the database """ from skaldship.canvas import process_exploits from skaldship.exploits import connect_exploits process_exploits(filename) connect_exploits() return True
def nexpose_exploit_xml(filename=None): """ Process Nexpose exploits.xml file into the database """ from skaldship.nexpose import process_exploits from skaldship.exploits import connect_exploits process_exploits(filename) connect_exploits() return True
def update_dynamic_fields(): """ Executes the following functions that update dynamic field entries: skaldship.general.do_host_status skaldship.exploits.connect_exploits """ response.title = "%s :: Update Dynamic Fields" % (settings.title) users = db(db.auth_user).select() userlist = [] for user in users: userlist.append([user.id, user.username]) ag = db(db.t_hosts).select(db.t_hosts.f_asset_group, distinct=True).as_list() asset_groups = map((lambda x: x["f_asset_group"]), ag) form = SQLFORM.factory( Field("f_exploit_link", type="boolean", default=True, label=T("Exploit linking")), Field("f_host_status", type="boolean", default=True, label=T("Host Service/Vuln counts")), Field( "f_asset_group", type="list:string", label=T("Asset Group"), requires=IS_EMPTY_OR(IS_IN_SET(asset_groups, multiple=False)), ), Field("f_taskit", type="boolean", default=auth.user.f_scheduler_tasks, label=T("Run in background task")), ) from skaldship.general import do_host_status from skaldship.exploits import connect_exploits if form.accepts(request.vars, session): if form.vars.f_exploit_link: connect_exploits() if form.vars.f_host_status: if form.vars.f_taskit: task = scheduler.queue_task( do_host_status, pvars=dict(asset_group=form.vars.f_asset_group), group_name=settings.scheduler_group_name, sync_output=5, timeout=300, # 5 minutes ) if task.id: redirect(URL("tasks", "status", args=task.id)) else: resp_text = "Error submitting job: %s" % (task.errors) else: do_host_status(asset_group=form.vars.f_asset_group) response.flash = "Task completed!" elif form.errors: response.flash = "Error in form" return dict(form=form, err404=get_oreally_404(request.folder))
def import_nexpose_xml(): """ Insert/Update exploit references from Nexpose exploits.xml file File is located in /opt/rapid7/nexpose/plugins/conf """ import os response.title = "%s :: Import Nexpose Exploits XML" % (settings.title) form = SQLFORM.factory( Field('f_filename', 'upload', uploadfolder=os.path.join(request.folder, 'data', 'misc'), label=T('XML File')), Field('f_use_kvasir_local', 'boolean', label=T('Use Kvasir static path')), Field('f_use_local', 'boolean', label=T('Use local file path')), Field('f_pathname', 'string', default="/opt/rapid7/nexpose/plugins/conf", label=T('Local pathname')), Field('f_taskit', type='boolean', default=auth.user.f_scheduler_tasks, label=T('Run in background task')), col3 = { 'f_use_kvasir_local': 'static/etc/nexpose_exploits.xml', 'f_use_local': 'Directory where exploits.xml is located', 'f_pathname': 'Requires Nexpose and possibly root access' } ) if form.errors: response.flash = 'Error in form' elif form.accepts(request.vars, session): # process nexpose exploits.xml file if form.vars.f_use_local: filename = os.path.join(form.vars.f_pathname, 'exploits.xml') elif form.vars.f_use_kvasir_local: filename = os.path.join(request.folder,'static','etc','nexpose_exploits.xml') else: filename = os.path.join(request.folder,'data', 'misc', form.vars.f_filename) if form.vars.f_taskit: task = scheduler.queue_task( nexpose_exploit_xml, pargs=[filename], group_name=settings.scheduler_group_name, sync_output=5, timeout=settings.scheduler_timeout, ) if task.id: redirect(URL('tasks', 'status', args=task.id)) else: response.flash = "Error submitting job: %s" % (task.errors) else: from skaldship.nexpose import process_exploits from skaldship.exploits import connect_exploits process_exploits(filename) connect_exploits() redirect(URL('list')) return dict(form=form)
def process_exploits(filename=None): """ Process Nexpose exploits.xml file into the database """ log("Processing Nexpose exploits file: %s ..." % filename) try: exploits = etree.parse(filename) except etree.ParseError as e: raise Exception("Error processing file: %s" % e) except IOError as e: raise Exception("Error opening file: %s" % e) r = exploits.getroot() counter = 0 from .exploits import add_exploit, connect_exploits for exploit in r.findall('exploit'): #"adobe-unspec-bof-cve-2010-1297","13787","0day Exploit for Adobe Flash and Reader PoC (from the wild)","Description","1","Expert" f_name = exploit.findtext('name') f_title = exploit.findtext('id') f_description = str(exploit.findtext('description')).encode( 'iso-8859-1').decode('cp1252') f_description = f_description.replace("\\'", "'").replace('\\x', "0x") f_source = exploit.findtext('source') f_level = exploit.findtext( 'rank') or 'Unknown' # exploiter experience level estimate f_rank = exploit.findtext( 'exploitrank') or 'Unknown' # rank of the exploit # exploit records can have multiple Nexpose vulnerabilitiy identifiers f_vulnid = [] for nex_id in exploit.findall("vulnerabilities/vulnerability"): f_vulnid.append(nex_id.get('id').lower()) res = add_exploit( cve=None, vuln_ids=f_vulnid, f_name=f_name, f_title=f_title, f_description=f_description, f_source=f_source, f_level=f_level, f_rank=f_rank, ) if res > 0: counter += 1 else: log("Error importing exploit: %s" % f_name, logging.ERROR) connect_exploits() log("%d exploits added/updated" % counter) return True
def update_dynamic_fields(): """ Executes the following functions that update dynamic field entries: skaldship.hosts.do_host_status skaldship.exploits.connect_exploits """ response.title = "%s :: Update Dynamic Fields" % (settings.title) users = db(db.auth_user).select() userlist = [] for user in users: userlist.append( [ user.id, user.username ] ) ag = db(db.t_hosts).select(db.t_hosts.f_asset_group, distinct=True).as_list() asset_groups = map((lambda x: x['f_asset_group']), ag) form = SQLFORM.factory( Field('f_exploit_link', type='boolean', default=True, label=T('Exploit linking')), Field('f_host_status', type='boolean', default=True, label=T('Host Service/Vuln counts')), Field('f_asset_group', type='list:string', label=T('Asset Group'), requires=IS_EMPTY_OR(IS_IN_SET(asset_groups, multiple=False))), Field('f_taskit', type='boolean', default=auth.user.f_scheduler_tasks, label=T('Run in background task')), ) from skaldship.hosts import do_host_status from skaldship.exploits import connect_exploits if form.accepts(request.vars, session): if form.vars.f_exploit_link: connect_exploits() if form.vars.f_host_status: if form.vars.f_taskit: task = scheduler.queue_task( do_host_status, pvars=dict(asset_group=form.vars.f_asset_group), group_name=settings.scheduler_group_name, sync_output=5, timeout=settings.scheduler_timeout, ) if task.id: redirect(URL('tasks', 'status', args=task.id)) else: resp_text = "Error submitting job: %s" % (task.errors) else: do_host_status(asset_group=form.vars.f_asset_group) response.flash = "Task completed!" elif form.errors: response.flash = 'Error in form' return dict( form=form, err404=get_oreally_404(request.folder), )
def import_vulnid(): """ Downloads the detailed vulnerability data from Nexpose based on a vuln id passed to it """ form = SQLFORM.factory( Field("nexid", "string", label=T("Nexpose ID")), Field("nexid_list", "text", label=T("Nexpose ID List")) ) response.title = "%s :: Import Nexpose VulnID" % settings.title nexpose_config = nexpose_get_config() if form.process().accepted: from NexposeAPI import VulnData from skaldship.nexpose import vuln_parse nxvulns = VulnData() nxvulns.host = nexpose_config["host"] nxvulns.port = nexpose_config["port"] nexpose_ids = [] if form.vars.nexid: nexpose_ids.extend([form.vars.nexid]) if form.vars.nexid_list: nexpose_ids.extend(form.vars.nexid_list.split("\r\n")) res = nxvulns.login(user_id=nexpose_config["user"], password=nexpose_config["password"]) if res: stats = {"added": 0, "invalid": 0} for nexid in nexpose_ids: vulndetails = nxvulns.detail(nexid) if vulndetails is not None: (vulnfields, references) = vuln_parse(vulndetails.find("Vulnerability"), fromapi=True) else: stats["invalid"] += 1 continue # add the vulnerability to t_vulndata query = db.t_vulndata.f_vulnid == nexid vulnid = db.t_vulndata.update_or_insert(query, **vulnfields) if not vulnid: row = db(query).select().first() if row: vulnid = row.id else: log(" [!] Could not find %s in database.." % nexid, logging.WARN) stats["invalid"] += 1 continue db.commit() # add the references if vulnid is not None and references: for reference in references: # check to see if reference exists first query = (db.t_vuln_refs.f_source == reference[0]) & (db.t_vuln_refs.f_text == reference[1]) ref_id = db.t_vuln_refs.update_or_insert(query, f_source=reference[0], f_text=reference[1]) if not ref_id: ref_id = db(query).select().first().id # make many-to-many relationship with t_vuln_data db.t_vuln_references.update_or_insert(f_vuln_ref_id=ref_id, f_vulndata_id=vulnid) db.commit() from skaldship.exploits import connect_exploits connect_exploits() log(" [-] Added Nexpose vulnerability: %s" % nexid) stats["added"] += 1 response.flash = "%s added, %s skipped" % (stats["added"], stats["invalid"]) return dict(form=form) else: response.flash = "Unable to login to Nexpose" elif form.errors: response.flash = "Error in form" return dict(form=form)
def import_vulnid(): """ Downloads the detailed vulnerability data from Nexpose based on a vuln id passed to it """ form = SQLFORM.factory( Field('nexid', 'string', label=T('Nexpose ID')), Field('nexid_list', 'text', label=T('Nexpose ID List'))) response.title = "%s :: Import Nexpose VulnID" % settings.title nexpose_config = nexpose_get_config() if form.process().accepted: from NexposeAPI import VulnData from skaldship.nexpose import vuln_parse nxvulns = VulnData() nxvulns.host = nexpose_config['host'] nxvulns.port = nexpose_config['port'] nexpose_ids = [] if form.vars.nexid: nexpose_ids.extend([form.vars.nexid]) if form.vars.nexid_list: nexpose_ids.extend(form.vars.nexid_list.split('\r\n')) res = nxvulns.login(user_id=nexpose_config['user'], password=nexpose_config['password']) if res: stats = {'added': 0, 'invalid': 0} for nexid in nexpose_ids: vulndetails = nxvulns.detail(nexid) if vulndetails is not None: (vulnfields, references) = vuln_parse( vulndetails.find('Vulnerability'), fromapi=True) else: stats['invalid'] += 1 continue # add the vulnerability to t_vulndata query = (db.t_vulndata.f_vulnid == nexid) vulnid = db.t_vulndata.update_or_insert(query, **vulnfields) if not vulnid: row = db(query).select().first() if row: vulnid = row.id else: log(" [!] Could not find %s in database.." % nexid, logging.WARN) stats['invalid'] += 1 continue db.commit() # add the references if vulnid is not None and references: for reference in references: # check to see if reference exists first query = (db.t_vuln_refs.f_source == reference[0]) & ( db.t_vuln_refs.f_text == reference[1]) ref_id = db.t_vuln_refs.update_or_insert( query, f_source=reference[0], f_text=reference[1]) if not ref_id: ref_id = db(query).select().first().id # make many-to-many relationship with t_vuln_data db.t_vuln_references.update_or_insert( f_vuln_ref_id=ref_id, f_vulndata_id=vulnid) db.commit() from skaldship.exploits import connect_exploits connect_exploits() log(" [-] Added Nexpose vulnerability: %s" % nexid) stats['added'] += 1 response.flash = "%s added, %s skipped" % (stats['added'], stats['invalid']) return dict(form=form) else: response.flash = "Unable to login to Nexpose" elif form.errors: response.flash = "Error in form" return dict(form=form)
def import_canvas_xml(): """ Process ImmunitySec's Exploit.xml which can be genrated from the URL http://exploitlist.immunityinc.com/ or by running ./canvasengine.py -e from your CANVAS directory http://exploitlist.immunityinc.com/home/serve/live """ import os kvasir_path = os.path.join(request.folder, 'static/etc') form = SQLFORM.factory( Field('f_filename', 'upload', uploadfolder=os.path.join(request.folder, 'data/misc'), label=T('XML File')), Field('f_use_kvasir_local', 'boolean', label=T('Use Kvasir static path')), Field('f_use_local', 'boolean', label=T('Use local file path')), Field('f_pathname', 'string', default=kvasir_path, label=T('Local path')), Field('f_download', 'boolean', label=T('Download')), Field('f_taskit', type='boolean', default=auth.user.f_scheduler_tasks, label=T('Run in background task')), col3={ 'f_use_kvasir_local': 'static/etc/canvas_exploits.xml', 'f_use_local': 'Directory where canvas_exploits.xml is located', 'f_download': 'Download from ImmunitySec website', }) if form.errors: response.flash = 'Error in form' elif form.accepts(request.vars, session): if form.vars.f_use_local: filename = os.path.join(form.vars.f_pathname, 'canvas_exploits.xml') elif form.vars.f_use_kvasir_local: filename = os.path.join(request.folder, 'static', 'etc', 'canvas_exploits.xml') elif form.vars.f_download: filename = None else: filename = os.path.join(request.folder, 'data', 'misc', form.vars.f_filename) if form.vars.f_taskit: task = scheduler.queue_task( canvas_exploit_xml, pargs=[filename], group_name=settings.scheduler_group_name, sync_output=5, timeout=settings.scheduler_timeout, ) if task.id: redirect(URL('tasks', 'status', args=task.id)) else: response.flash = "Error submitting job: %s" % (task.errors) else: from skaldship.canvas import process_exploits from skaldship.exploits import connect_exploits process_exploits(filename) connect_exploits() response.flash = "Canvas Exploit data uploaded" redirect(URL('list')) response.title = "%s :: Import ImmunitySec CANVAS Exploits XML" % ( settings.title) return dict(form=form)
def import_nexpose_xml(): """ Insert/Update exploit references from Nexpose exploits.xml file File is located in /opt/rapid7/nexpose/plugins/conf """ import os response.title = "%s :: Import Nexpose Exploits XML" % (settings.title) form = SQLFORM.factory(Field('f_filename', 'upload', uploadfolder=os.path.join( request.folder, 'data', 'misc'), label=T('XML File')), Field('f_use_kvasir_local', 'boolean', label=T('Use Kvasir static path')), Field('f_use_local', 'boolean', label=T('Use local file path')), Field('f_pathname', 'string', default="/opt/rapid7/nexpose/plugins/conf", label=T('Local pathname')), Field('f_taskit', type='boolean', default=auth.user.f_scheduler_tasks, label=T('Run in background task')), col3={ 'f_use_kvasir_local': 'static/etc/nexpose_exploits.xml', 'f_use_local': 'Directory where exploits.xml is located', 'f_pathname': 'Requires Nexpose and possibly root access' }) if form.errors: response.flash = 'Error in form' elif form.accepts(request.vars, session): # process nexpose exploits.xml file if form.vars.f_use_local: filename = os.path.join(form.vars.f_pathname, 'exploits.xml') elif form.vars.f_use_kvasir_local: filename = os.path.join(request.folder, 'static', 'etc', 'nexpose_exploits.xml') else: filename = os.path.join(request.folder, 'data', 'misc', form.vars.f_filename) if form.vars.f_taskit: task = scheduler.queue_task( nexpose_exploit_xml, pargs=[filename], group_name=settings.scheduler_group_name, sync_output=5, timeout=settings.scheduler_timeout, ) if task.id: redirect(URL('tasks', 'status', args=task.id)) else: response.flash = "Error submitting job: %s" % (task.errors) else: from skaldship.nexpose import process_exploits from skaldship.exploits import connect_exploits process_exploits(filename) connect_exploits() redirect(URL('list')) return dict(form=form)
def process_scanfile( filename=None, asset_group=None, engineer=None, msf_settings={}, ip_ignore_list=None, ip_include_list=None, update_hosts=False, ): """ Process a Nessus XML or CSV Report file. There are two types of CSV output, the first is very basic and is generated by a single Nessus instance. The second comes from the centralized manager. I forget what it's called but it packs more data. If you have a standalone scanner, always export/save as .nessus. :param filename: A local filename to process :param asset_group: Asset group to assign hosts to :param engineer: Engineer record number to assign hosts to :param msf_workspace: If set a Metasploit workspace to send the scanfile to via the API :param ip_ignore_list: List of IP addresses to ignore :param ip_include_list: List of IP addresses to ONLY import (skip all others) :param update_hosts: Boolean to update/append to hosts, otherwise hosts are skipped :returns msg: A string status message """ from skaldship.cpe import lookup_cpe nessus_config = nessus_get_config() db = current.globalenv['db'] # build the hosts only/exclude list ip_exclude = [] if ip_ignore_list: ip_exclude = ip_ignore_list.split('\r\n') # TODO: check for ip subnet/range and break it out to individuals ip_only = [] if ip_include_list: ip_only = ip_include_list.split('\r\n') # TODO: check for ip subnet/range and break it out to individuals log(" [*] Processing Nessus scan file %s" % filename) fIN = open(filename, "rb") # check to see if file is a CSV file, if so set nessus_csv to True line = fIN.readline() fIN.seek(0) if line.startswith('Plugin'): import csv csv.field_size_limit(sys.maxsize) # field size must be increased nessus_iterator = csv.DictReader(fIN) nessus_csv_type = 'Standalone' log(" [*] CSV file is from Standalone scanner") elif line.startswith('"Plugin"'): import csv csv.field_size_limit(sys.maxsize) # field size must be increased nessus_iterator = csv.DictReader(fIN) nessus_csv_type = 'SecurityCenter' log(" [*] CSV file is from SecurityCenter") else: nessus_csv_type = False try: nessus_xml = etree.parse(filename) log(" [*] XML file identified") except etree.ParseError as e: msg = " [!] Invalid Nessus scan file (%s): %s " % (filename, e) log(msg, logging.ERROR) return msg root = nessus_xml.getroot() nessus_iterator = root.findall("Report/ReportHost") nessus_hosts = NessusHosts(engineer, asset_group, ip_include_list, ip_ignore_list, update_hosts) nessus_vulns = NessusVulns() services = Services() svcs = db.t_services def _plugin_parse(host_id, vuln_id, vulndata, vulnextradata): # Parse the plugin data. This is where CSV and XML diverge. port = vulnextradata['port'] proto = vulnextradata['proto'] svcname = vulnextradata['svcname'] plugin_output = vulnextradata['plugin_output'] pluginID = vulnextradata['pluginID'] # check to see if we are to ignore this plugin ID or not. if int(pluginID) in nessus_config.get('ignored_plugins'): return svc_fields = { 'f_proto': proto, 'f_number': port, 'f_name': svcname, 'f_hosts_id': host_id } svc_rec = services.get_record(**svc_fields) # Nessus only guesses the services (and appends a ? at the end) splited = svc_fields['f_name'].split("?") if svc_rec is not None: if splited[0] != svc_rec.f_name and svc_rec.f_name not in splited[ 0]: svc_fields['f_name'] = "%s | %s" % (svc_rec.f_name, splited[0]) svc_id = svcs.update_or_insert(_key=svc_rec.id, **svc_fields) else: svc_fields['f_name'] = splited[0] svc_rec = services.get_record(create_or_update=True, **svc_fields) # create t_service_vulns entry for this pluginID svc_vuln = { 'f_services_id': svc_rec.id, 'f_vulndata_id': vuln_id, 'f_proof': plugin_output } # you may be a vulnerability if... if vulnextradata['exploit_available'] == 'true': # if you have exploits available you may be an extra special vulnerability svc_vuln['f_status'] = 'vulnerable-exploited' elif svcname == 'general': # if general service then you may not be a vulnerability svc_vuln['f_status'] = 'general' elif vulndata['f_severity'] == 0: # if there is no severity then you may not be a vulnerability svc_vuln['f_status'] = 'general' else: # you're a vulnerability svc_vuln['f_status'] = 'vulnerable' db.t_service_vulns.update_or_insert(**svc_vuln) ###################################################################################################### ## Let the parsing of Nessus Plugin Output commence! ## ## Many Plugins provide useful data in plugin_output. We'll go through the list here and extract ## out the good bits and add them to Kvasir's database. Some Plugins will not be added as vulnerabilities ## because they're truly informational. This list will change if somebody keeps it up to date. ## ## TODO: This should be moved into a separate function so we can also process csv data ## TODO: Add t_service_info key/value records (standardize on Nexpose-like keys?) ## ###################################################################################################### d = {} nessus_vulns.stats['added'] += 1 #### SNMP if pluginID == '10264': # snmp community strings for snmp in re.findall(' - (.*)', plugin_output): db.t_snmp.update_or_insert(f_hosts_id=host_id, f_community=snmp) db.commit() #### SMB/NetBIOS if pluginID in ['10860', '10399']: # SMB Use Host SID (10860) or Domain SID (10399) to enumerate users for user in re.findall(' - (.*)', plugin_output): username = user[0:user.find('(') - 1] try: gid = re.findall('\(id (\d+)', user)[0] except: gid = '0' # Windows users, local groups, and global groups d['f_username'] = username d['f_gid'] = gid d['f_services_id'] = svc_rec.id d['f_source'] = '10860' db.t_accounts.update_or_insert(**d) db.commit() if pluginID == '17651': # Microsoft Windows SMB : Obtains the Password Policy d['f_hosts_id'] = host_id try: d['f_lockout_duration'] = re.findall( 'Locked account time \(s\): (\d+)', plugin_output)[0] d['f_lockout_limit'] = re.findall( 'Number of invalid logon before locked out \(s\): (\d+)', plugin_output)[0] except IndexError: d['f_lockout_duration'] = 1800 d['f_lockout_limit'] = 0 db.t_netbios.update_or_insert(**d) db.commit() if pluginID == '10395': # Microsoft Windows SMB Shares Enumeration d['f_hosts_id'] = host_id d['f_shares'] = re.findall(' - (.*)', plugin_output) db.t_netbios.update_or_insert(**d) if pluginID == '10150': # Windows NetBIOS / SMB Remote Host Information Disclosure try: d['f_hosts_id'] = host_id d['f_domain'] = re.findall('(\w+).*= Workgroup / Domain name', plugin_output)[0] db.t_netbios.update_or_insert(**d) except IndexError: pass #### FTP if pluginID == '10092': # FTP Server Detection RE_10092 = re.compile('The remote FTP banner is :\n\n(.*)', re.DOTALL) try: d['f_banner'] = RE_10092.findall(plugin_output)[0] svc_rec.update(**d) db.commit() db(db.t_service_info) db.t_service_info.update_or_insert(f_services_id=svc_rec.id, f_name='ftp.banner', f_text=d['f_banner']) db.commit() except Exception as e: log("Error parsing FTP banner (id 10092): %s" % str(e), logging.ERROR) #### SSH if pluginID == '10267': # SSH Server Type and Version Information try: ssh_banner, ssh_supported_auth = re.findall( 'SSH version : (.*)\nSSH supported authentication : (.*)', plugin_output)[0] d['f_banner'] = ssh_banner svc_rec.update_record(**d) db.commit() db.t_service_info.update_or_insert(f_services_id=svc_rec.id, f_name='ssh.banner', f_text=d['f_banner']) db.t_service_info.update_or_insert(f_services_id=svc_rec.id, f_name='ssh.authentication', f_text=ssh_supported_auth) db.commit() except Exception as e: log("Error parsing SSH banner (id 10267): %s" % str(e), logging.ERROR) if pluginID == '10881': # SSH Protocol Versions Supported try: ssh_versions = re.findall(' - (.*)', plugin_output) db.t_service_info.update_or_insert( f_services_id=svc_rec.id, f_name='ssh.versions', f_text=', '.join(ssh_versions)) db.commit() except Exception as e: log("Error parsing SSH versions (id 10881): %s" % str(e), logging.ERROR) try: fingerprint = re.findall('SSHv2 host key fingerprint : (.*)', plugin_output) db.t_service_info.update_or_insert(f_services_id=svc_rec.id, f_name='ssh.fingerprint', f_text=fingerprint[0]) db.commit() except Exception as e: log("Error parsing SSH fingerprint (id 10881): %s" % str(e), logging.ERROR) ### Telnet if pluginID in ['10281', '42263']: # Telnet banner try: snip_start = plugin_output.find( 'snip ------------------------------\n') snip_end = plugin_output.rfind( 'snip ------------------------------\n') if snip_start > 0 and snip_end > snip_start: d['f_banner'] = plugin_output[snip_start + 36:snip_end - 36] svc_rec.update(**d) db.commit() else: log( "Error finding Telnet banner: (st: %s, end: %s, banner: %s)" % (snip_start, snip_end, plugin_output), logging.ERROR) except Exception as e: log("Error parsing Telnet banner: %s" % str(e), logging.ERROR) ### HTTP Server Info if pluginID == '10107': # HTTP Server Type and Version RE_10107 = re.compile('The remote web server type is :\n\n(.*)', re.DOTALL) try: d['f_banner'] = RE_10107.findall(plugin_output)[0] svc_rec.update(**d) db.commit() db.t_service_info.update_or_insert(f_services_id=svc_rec.id, f_name='http.banner.server', f_text=d['f_banner']) db.commit() except Exception as e: log("Error parsing HTTP banner (id 10107): %s" % str(e), logging.ERROR) ### Operating Systems and CPE if pluginID == '45590': # Common Platform Enumeration (CPE) for cpe_os in re.findall('(cpe:/o:.*)', plugin_output): os_id = lookup_cpe(cpe_os.replace('cpe:/o:', '').rstrip(' ')) if os_id: db.t_host_os_refs.update_or_insert( f_certainty='0.90', # just a stab f_family='Unknown', # not given in Nessus f_class=hostdata.get('system-type'), f_hosts_id=host_id, f_os_id=os_id) db.commit() for host in nessus_iterator: if not nessus_csv_type: (host_id, hostdata, hostextradata) = nessus_hosts.parse(host.find('HostProperties')) else: (host_id, hostdata, hostextradata) = nessus_hosts.parse(host) if not host_id: # no host_id returned, it was either skipped or errored out continue if not nessus_csv_type: # Parse the XML <ReportItem> sections where plugins, ports and output are all in for rpt_item in host.iterfind('ReportItem'): (vuln_id, vulndata, extradata) = nessus_vulns.parse(rpt_item) if not vuln_id: # no vulnerability id continue _plugin_parse(host_id, vuln_id, vulndata, extradata) else: (vuln_id, vulndata, extradata) = nessus_vulns.parse(host) _plugin_parse(host_id, vuln_id, vulndata, extradata) if msf_settings.get('workspace'): try: # check to see if we have a Metasploit RPC instance configured and talking from MetasploitProAPI import MetasploitProAPI msf_api = MetasploitProAPI(host=msf_settings.get('url'), apikey=msf_settings.get('key')) working_msf_api = msf_api.login() except Exception as error: log(" [!] Unable to authenticate to MSF API: %s" % str(error), logging.ERROR) working_msf_api = False try: scan_data = open(filename, "r+").readlines() except Exception as error: log( " [!] Error loading scan data to send to Metasploit: %s" % str(error), logging.ERROR) scan_data = None if scan_data and working_msf_api: task = msf_api.pro_import_data( msf_settings.get('workspace'), "".join(scan_data), { #'preserve_hosts': form.vars.preserve_hosts, 'blacklist_hosts': "\n".join(ip_ignore_list) }, ) msf_workspace_num = session.msf_workspace_num or 'unknown' msfurl = os.path.join(msf_settings.get('url'), 'workspaces', msf_workspace_num, 'tasks', task['task_id']) log(" [*] Added file to MSF Pro: %s" % msfurl) # any new Nessus vulns need to be checked against exploits table and connected log(" [*] Connecting exploits to vulns and performing do_host_status") connect_exploits() do_host_status(asset_group=asset_group) msg = (' [*] Import complete: hosts: %s added, %s updated, %s skipped ' '- %s vulns processed, %s added' % (nessus_hosts.stats['added'], nessus_hosts.stats['updated'], nessus_hosts.stats['skipped'], nessus_vulns.stats['processed'], nessus_vulns.stats['added'])) log(msg) return msg
def import_all_vulndata(overwrite=False, nexpose_server={}): """ Uses the NexposeAPI and imports each and every vulnerability to Kvasir. Can take a looooong time. Args: overwrite: Whether or not to overwrite an existing t_vulndata record Returns: msg: A string message of status. """ from NexposeAPI import VulnData db = current.globalenv['db'] vuln_class = VulnData() vuln_class.host = nexpose_server.get('host', 'localhost') vuln_class.port = nexpose_server.get('port', '3780') if vuln_class.login(user_id=nexpose_server.get('user'), password=nexpose_server.get('pw')): log(" [*] Populating list of Nexpose vulnerability ID summaries") try: vuln_class.populate_summary() except Exception as e: log(" [!] Error populating summaries: %s" % str(e), logging.ERROR) return False try: vulnxml = etree.parse(StringIO(vuln_class.vulnxml)) except Exception as e: log(" [!] Error parsing summary XML: %s" % str(e), logging.ERROR) return False vulns = vulnxml.findall('VulnerabilitySummary') log(" [*] %s vulnerabilities to parse" % len(vulns)) if vuln_class.vulnerabilities > 0: existing_vulnids = [] [ existing_vulnids.extend([x['f_vulnid']]) for x in db(db.t_vulndata.f_source == "Nexpose").select( db.t_vulndata.f_vulnid).as_list() ] log(" [*] Found %d vulnerabilities in the database already." % (len(existing_vulnids))) stats = {'added': 0, 'updated': 0, 'skipped': 0, 'errors': 0} for vuln in vulns: if vuln.attrib['id'] in existing_vulnids and not overwrite: # skip over existing entries if we're not overwriting stats['skipped'] += 1 continue try: vulndetails = vuln_class.detail(vuln.attrib['id']) except Exception as e: log( " [!] Error retrieving details for %s: %s" % (vuln.attrib['id'], str(e)), logging.ERROR) stats['errors'] += 1 if stats['errors'] == 50: log(" [!] Too many errors, aborting!", logging.ERROR) return False else: continue if vulndetails is not None: (vulnfields, references) = vuln_parse( vulndetails.find('Vulnerability'), fromapi=True) else: log( " [!] Unable to find %s in Nexpose" % vuln.attrib['id'], logging.WARN) continue # add the vulnerability to t_vulndata vulnid = db.t_vulndata.update_or_insert(**vulnfields) if not vulnid: vulnid = db(db.t_vulndata.f_vulnid == vulnfields['f_vulnid']).select().first().id stats['updated'] += 1 log(" [-] Updated %s" % vulnfields['f_vulnid']) else: stats['added'] += 1 log(" [-] Added %s" % vulnfields['f_vulnid']) db.commit() # add the references if vulnid is not None and references: for reference in references: # check to see if reference exists first query = (db.t_vuln_refs.f_source == reference[0]) & ( db.t_vuln_refs.f_text == reference[1]) ref_id = db.t_vuln_refs.update_or_insert( query, f_source=reference[0], f_text=reference[1]) if not ref_id: ref_id = db(query).select().first().id # make many-to-many relationship with t_vuln_data db.t_vuln_references.update_or_insert( f_vuln_ref_id=ref_id, f_vulndata_id=vulnid) db.commit() from skaldship.exploits import connect_exploits connect_exploits() msg = "%s added, %s updated, %s skipped" % ( stats['added'], stats['updated'], stats['skipped']) log(" [*] %s" % msg) else: msg = "No vulndata populated from Nexpose" log(" [!] Error: %s" % msg, logging.ERROR) else: msg = "Unable to communicate with Nexpose" log(" [!] Error: %s" % msg, logging.ERROR) return msg
def process_xml( filename=None, asset_group=None, engineer=None, msf_settings={}, ip_ignore_list=None, ip_include_list=None, update_hosts=False, ): # Upload and process Nexpose XML Scan file from skaldship.cpe import lookup_cpe from skaldship.hosts import get_host_record from gluon.validators import IS_IPADDRESS import os db = current.globalenv['db'] session = current.globalenv['session'] parser = html.parser.HTMLParser() user_id = db.auth_user(engineer) # build the hosts only/exclude list ip_exclude = [] if ip_ignore_list: ip_exclude = ip_ignore_list.split('\r\n') # TODO: check for ip subnet/range and break it out to individuals ip_only = [] if ip_include_list: ip_only = ip_include_list.split('\r\n') # TODO: check for ip subnet/range and break it out to individuals log(" [*] Processing Nexpose scan file %s" % filename) try: nexpose_xml = etree.parse(filename) except etree.ParseError as e: msg = " [!] Invalid Nexpose XML file (%s): %s " % (filename, e) log(msg, logging.ERROR) return msg root = nexpose_xml.getroot() existing_vulnids = db(db.t_vulndata()).select( db.t_vulndata.id, db.t_vulndata.f_vulnid).as_dict(key='f_vulnid') log(" [*] Found %d vulnerabilities in the database already." % len(existing_vulnids)) # start with the vulnerability details vulns_added = 0 vulns_skipped = 0 vulns = root.findall("VulnerabilityDefinitions/vulnerability") log(" [*] Parsing %d vulnerabilities" % len(vulns)) for vuln in vulns: # nexpose identifiers are always lower case in kvasir. UPPER CASE IS FOR SHOUTING!!! vulnid = vuln.attrib['id'].lower() if vulnid in existing_vulnids: #log(" [-] Skipping %s - It's in the db already" % vulnid) vulns_skipped += 1 else: # add the vulnerability to t_vulndata - any duplicates are errored out (vulnfields, references) = vuln_parse(vuln, fromapi=False) try: vulnid = db.t_vulndata.update_or_insert(**vulnfields) if not vulnid: vulnid = db(db.t_vulndata.f_vulnid == vulnfields['f_vulnid']).select().first().id vulns_added += 1 db.commit() except Exception as e: log( " [!] Error inserting %s to vulndata: %s" % (vulnfields['f_vulnid'], e), logging.ERROR) vulnid = None db.commit() continue # add the references if vulnid is not None: for reference in references: # check to see if reference exists first ref_id = db(db.t_vuln_refs.f_text == reference[1]) if ref_id.count() == 0: # add because it doesn't ref_id = db.t_vuln_refs.insert(f_source=reference[0], f_text=reference[1]) db.commit() else: # pick the first reference as the ID ref_id = ref_id.select()[0].id # make many-to-many relationship with t_vuln_data res = db.t_vuln_references.insert(f_vuln_ref_id=ref_id, f_vulndata_id=vulnid) db.commit() log(" [*] %d Vulnerabilities added, %d skipped" % (vulns_added, vulns_skipped)) # re-make the existing_vulnids dict() since we've updated the system existing_vulnids = db(db.t_vulndata()).select( db.t_vulndata.id, db.t_vulndata.f_vulnid).as_dict(key='f_vulnid') # parse the nodes now nodes = root.findall("nodes/node") log(" [-] Parsing %d nodes" % len(nodes)) hoststats = {'added': 0, 'skipped': 0, 'updated': 0, 'errored': 0} hosts = [] # array of host_id fields for node in nodes: log(" [-] Node %s status is: %s" % (node.attrib['address'], node.attrib['status'])) #sys.stderr.write(msg) if node.attrib['status'] != "alive": hoststats['skipped'] += 1 continue if node.attrib['address'] in ip_exclude: log(" [-] Node is in exclude list... skipping") hoststats['skipped'] += 1 continue nodefields = {} if len(ip_only) > 0 and node.attrib['address'] not in ip_only: log(" [-] Node is not in the only list... skipping") hoststats['skipped'] += 1 continue # we'll just take the last hostname in the names list since it'll usually be the full dns name names = node.findall("names/name") for name in names: nodefields['f_hostname'] = name.text ip = node.attrib['address'] if IS_IPADDRESS()(ip): nodefields['f_ipaddr'] = ip else: log(" [!] Invalid IP Address: %s" % ip, logging.ERROR) nodefields['f_engineer'] = user_id nodefields['f_asset_group'] = asset_group nodefields['f_confirmed'] = False if 'hardware-address' in node.attrib: nodefields['f_macaddr'] = node.attrib['hardware-address'] if node.find('names/name') is not None: # XXX: for now just take the first hostname nodefields['f_hostname'] = node.find('names/name').text # check to see if IP exists in DB already query = (db.t_hosts.f_ipaddr == ip) host_rec = db(query).select().first() if host_rec is None: host_id = db.t_hosts.insert(**nodefields) db.commit() hoststats['added'] += 1 log(" [-] Adding IP: %s" % ip) elif update_hosts: db.commit() db(db.t_hosts.f_ipaddr == nodefields['f_ipaddr']).update( **nodefields) db.commit() host_id = get_host_record(nodefields['f_ipaddr']) host_id = host_id.id hoststats['updated'] += 1 log(" [-] Updating IP: %s" % ip) else: hoststats['skipped'] += 1 db.commit() log(" [-] Skipped IP: %s" % ip) continue hosts.append(host_id) # tests that aren't specific to any port we wrap up into a meta service # called "INFO" tests = node.findall("tests/test") if len(tests) > 0: svc_id = db.t_services.update_or_insert(f_proto="info", f_number="0", f_status="info", f_hosts_id=host_id) db.commit() for test in tests: d = {} vulnid = test.get('id').lower() # we may have valid username. if "cifs-acct-" in vulnid: username = test.get('key') if username is not None: d['f_services_id'] = svc_id d['f_username'] = username d['f_active'] = True d['f_source'] = vulnid query = (db.t_accounts.f_services_id == d['f_services_id']) &\ (db.t_accounts.f_username == d['f_username']) db.t_accounts.update_or_insert(query, **d) db.commit() if test.attrib['status'] == 'vulnerable-exploited' or \ test.attrib['status'] == 'potential' or \ test.attrib['status'] == 'exception-vulnerable-exploited' or \ test.attrib['status'] == 'exception-vulnerable-version' or \ test.attrib['status'] == 'exception-vulnerable-potential' or \ test.attrib['status'] == 'vulnerable-version': if vulnid in existing_vulnids: vuln_id = existing_vulnids[vulnid]['id'] else: continue if vulnid == 'cifs-nt-0001': # Windows users, local groups, and global groups infotext = nx_xml_to_html( StringIO(etree.tostring(test, xml_declaration=False))) try: unames = re.search( "Found user\(s\): (?P<unames>.+?) </li>", infotext).group('unames') except AttributeError as e: # regex not found continue for uname in unames.split(): # add account d['f_username'] = uname d['f_services_id'] = svc_id d['f_source'] = 'cifs-nt-0001' db.t_accounts.update_or_insert(**d) db.commit() test_str = etree.tostring(test, xml_declaration=False, encoding=str) test_str = test_str.encode('ascii', 'xmlcharrefreplace') proof = nx_xml_to_html(StringIO(test_str)) proof = html_to_markmin(proof) if vulnid == 'cifs-insecure-acct-lockout-limit': d['f_hosts_id'] = host_id try: d['f_lockout_limit'] = re.search( "contains: (?P<l>\d+)", proof).group('l') except AttributeError: d['f_lockout_limit'] = 0 query = (db.t_netbios.f_hosts_id == host_id) db.t_netbios.update_or_insert(query, **d) db.commit() # Check for CIFS uid/pw if "cifs-" in vulnid: try: uid = re.search("uid\[(?P<u>.*?)\]", proof).group('u') pw = re.search("pw\[(?P<p>.*?)\]", proof).group('p') realm = re.search("realm\[(?P<r>.*?)\]", proof).group('r') d = { 'f_services_id': svc_id, 'f_username': uid, 'f_password': pw, 'f_description': realm, 'f_active': True, 'f_compromised': True, 'f_source': vulnid } query = (db.t_accounts.f_services_id == svc_id) & (db.t_accounts.f_username == uid) db.t_accounts.update_or_insert(query, **d) db.commit() except AttributeError: db.commit() except Exception as e: log("Error inserting account (%s): %s" % (uid, e), logging.ERROR) db.commit() # solaris-kcms-readfile shadow file if vulnid.lower() == "rpc-solaris-kcms-readfile": # funky chicken stuff, if they mess with this output then we've got to # change this around as well. thems the breaks, maynard! shadow = parser.unescape(proof) for line in shadow.split("<br />")[1:-1]: user, pw, uid = line.split(':')[0:3] d['f_services_id'] = svc_id d['f_username'] = user d['f_hash1'] = pw d['f_hash1_type'] = "crypt" d['f_uid'] = uid d['f_source'] = "shadow" d['f_active'] = True d['f_source'] = "rpc-solaris-kcms-readfile" query = (db.t_accounts.f_services_id == svc_id) & ( db.t_accounts.f_username == user) db.t_accounts.update_or_insert(query, **d) db.commit() db.t_service_vulns.update_or_insert( f_services_id=svc_id, f_status=test.attrib['status'], f_proof=proof, f_vulndata_id=vuln_id) if "cisco-default-http-account" in vulnid.lower(): d['f_services_id'] = svc_id d['f_username'] = vulnid.split('-')[4] d['f_password'] = vulnid.split('-')[6] d['f_source'] = "cisco-default-http-account" query = (db.t_accounts.f_services_id == svc_id) & ( db.t_accounts.f_username == d['f_username']) db.t_accounts.update_or_insert(query, **d) db.commit() # add services (ports) and resulting vulndata for endpoint in node.findall("endpoints/endpoint"): f_proto = endpoint.attrib['protocol'] f_number = endpoint.attrib['port'] f_status = endpoint.attrib['status'] query = (db.t_services.f_hosts_id == host_id) \ & (db.t_services.f_proto == f_proto) \ & (db.t_services.f_number == f_number) svc_id = db.t_services.update_or_insert(query, f_proto=f_proto, f_number=f_number, f_status=f_status, f_hosts_id=host_id) if not svc_id: svc_id = db(query).select().first().id for service in endpoint.findall("services/service"): d = {} if 'name' in service.attrib: db.t_services[svc_id] = dict(f_name=service.attrib['name']) for test in service.findall("tests/test"): vulnid = test.get('id').lower() if test.attrib['status'] == 'vulnerable-exploited' or \ test.attrib['status'] == 'potential' or \ test.attrib['status'] == 'exception-vulnerable-exploited' or \ test.attrib['status'] == 'exception-vulnerable-version' or \ test.attrib['status'] == 'exception-vulnerable-potential' or \ test.attrib['status'] == 'vulnerable-version': if vulnid in existing_vulnids: vuln_id = existing_vulnids[vulnid]['id'] else: log( " [!] Unknown vulnid, Skipping! (id: %s)" % vulnid, logging.ERROR) continue test_str = etree.tostring(test, xml_declaration=False, encoding=str) test_str = test_str.encode('ascii', 'xmlcharrefreplace') proof = nx_xml_to_html(StringIO(test_str)) proof = html_to_markmin(proof) # Check for SNMP strings if "snmp-read-" in vulnid: snmpstring = re.search("pw\[(?P<pw>.*?)\]", proof).group('pw') db.t_snmp.update_or_insert(f_hosts_id=host_id, f_community=snmpstring, f_access="READ", f_version="v1") db.commit() if "snmp-write" in vulnid: snmpstring = re.search("pw\[(?P<pw>.*?)\]", proof).group('pw') db.t_snmp.update_or_insert(f_hosts_id=host_id, f_community=snmpstring, f_access="WRITE", f_version="v1") db.commit() # TODO: account names # Dell DRAC root/calvin if vulnid == "http-drac-default-login": d['f_services_id'] = svc_id d['f_username'] = '******' d['f_password'] = '******' d['f_active'] = True d['f_compromised'] = True d['f_source'] = vulnid query = (db.t_accounts.f_services_id == svc_id) & ( db.t_accounts.f_username == 'root') db.t_accounts.update_or_insert(query, **d) db.commit() # Check for uid/pw if "ftp-iis-" in vulnid or \ "telnet-" in vulnid or \ "cifs-" in vulnid or \ "tds-" in vulnid or \ "oracle-" in vulnid or \ "-default-" in vulnid or \ "ftp-generic-" in vulnid: try: uid = re.search("uid\[(?P<u>.*?)\]", proof).group('u') pw = re.search("pw\[(?P<p>.*?)\]", proof).group('p') realm = re.search("realm\[(?P<r>.*?)\]", proof).group('r') d['f_services_id'] = svc_id d['f_username'] = uid d['f_password'] = pw d['f_description'] = realm d['f_active'] = True d['f_compromised'] = True d['f_source'] = vulnid query = (db.t_accounts.f_services_id == svc_id) & (db.t_accounts.f_username == uid) db.t_accounts.update_or_insert(query, **d) db.commit() except AttributeError: db.commit() except Exception as e: log( "Error inserting account (%s): %s" % (uid, e), logging.ERROR) db.commit() # cisco default http login accounts if "cisco-default-http-account" in vulnid.lower(): d['f_services_id'] = svc_id d['f_username'] = vulnid.split('-')[4] d['f_password'] = vulnid.split('-')[6] d['f_source'] = "cisco-default-http-account" query = (db.t_accounts.f_services_id == svc_id) \ & (db.t_accounts.f_username == d['f_username']) db.t_accounts.update_or_insert(query, **d) db.commit() db.t_service_vulns.update_or_insert( f_services_id=svc_id, f_status=test.attrib['status'], f_proof=proof, f_vulndata_id=vuln_id) db.commit() for config in service.findall("configuration/config"): db.t_service_info.update_or_insert( f_services_id=svc_id, f_name=config.attrib['name'], f_text=config.text) db.commit() if re.match('\w+.banner$', config.attrib['name']): db.t_services[svc_id] = dict(f_banner=config.text) db.commit() if config.attrib['name'] == 'mac-address': # update the mac address of the host db.t_hosts[host_id] = dict(f_macaddr=config.text) db.commit() if "advertised-name" in config.attrib['name']: # netbios computer name d = config.text.split(" ")[0] if "Computer Name" in config.text: data = {'f_netbios_name': d} # if hostname isn't defined then lowercase netbios name and put it in if db.t_hosts[host_id].f_hostname is None: data['f_hostname'] = d.lower() db(db.t_hosts.id == host_id).update(**data) db.commit() elif "Domain Name" in config.text: query = (db.t_netbios.f_hosts_id == host_id) db.t_netbios.update_or_insert(query, f_hosts_id=host_id, f_domain=d) db.commit() for os_rec in node.findall('fingerprints/os'): """ <os certainty="1.00" device-class="Workstation" vendor="Microsoft" family="Windows" product="Windows 2000 Professional" version="SP4" arch="x86"/> if using SCAP output the os line looks like: <os certainty="0.66" device-class="General" vendor="Microsoft" family="Windows" product="Windows XP" arch="x86" cpe="cpe:/o:microsoft:windows_xp::sp3"/> """ if 'cpe' in os_rec.attrib: # we have a cpe entry from xml! hooray! cpe_name = os_rec.attrib['cpe'].replace('cpe:/o:', '') os_id = lookup_cpe(cpe_name) else: # no cpe attribute in xml, go through our messy lookup os_id = guess_cpe_os(os_rec) if os_id is not None: db.t_host_os_refs.update_or_insert( f_certainty=os_rec.attrib['certainty'], f_family=os_rec.get('family', 'Unknown'), f_class=os_rec.get('device-class', 'Other'), f_hosts_id=host_id, f_os_id=os_id) db.commit() else: log( " [!] os_rec could not be parsed: %s" % etree.tostring(os_rec), logging.ERROR) db.commit() if msf_settings.get('workspace'): try: # check to see if we have a Metasploit RPC instance configured and talking from MetasploitProAPI import MetasploitProAPI msf_api = MetasploitProAPI(host=msf_settings.get('url'), apikey=msf_settings.get('key')) working_msf_api = msf_api.login() except Exception as error: log(" [!] Unable to authenticate to MSF API: %s" % str(error), logging.ERROR) working_msf_api = False try: scan_data = open(filename, "r+").readlines() except Exception as error: log( " [!] Error loading scan data to send to Metasploit: %s" % str(error), logging.ERROR) scan_data = None if scan_data and working_msf_api: task = msf_api.pro_import_data( msf_settings.get('workspace'), "".join(scan_data), { #'preserve_hosts': form.vars.preserve_hosts, 'blacklist_hosts': "\n".join(ip_ignore_list) }, ) msf_workspace_num = session.msf_workspace_num or 'unknown' msfurl = os.path.join(msf_settings.get('url'), 'workspaces', msf_workspace_num, 'tasks', task['task_id']) log(" [*] Added file to MSF Pro: %s" % msfurl) # any new nexpose vulns need to be checked against exploits table and connected log(" [*] Connecting exploits to vulns and performing do_host_status") connect_exploits() do_host_status(asset_group=asset_group) msg = " [*] Import complete: hosts: %s added, %s skipped, %s errors - vulns: %s added, %s skipped" % ( hoststats['added'], hoststats['skipped'], hoststats['errored'], vulns_added, vulns_skipped) log(msg) return msg
task = msf_api.pro_import_data( msf_settings.get('workspace'), "".join(scan_data), { #'preserve_hosts': form.vars.preserve_hosts, 'blacklist_hosts': "\n".join(ip_ignore_list) }, ) msf_workspace_num = session.msf_workspace_num or 'unknown' msfurl = os.path.join(msf_settings.get('url'), 'workspaces', msf_workspace_num, 'tasks', task['task_id']) log(" [*] Added file to MSF Pro: %s" % msfurl) # any new Nessus vulns need to be checked against exploits table and connected log(" [*] Connecting exploits to vulns and performing do_host_status") connect_exploits() do_host_status(asset_group=asset_group) msg = (' [*] Import complete: hosts: %s added, %s updated, %s skipped ' '- %s vulns processed, %s added' % ( nessus_hosts.stats['added'], nessus_hosts.stats['updated'], nessus_hosts.stats['skipped'], nessus_vulns.stats['processed'], nessus_vulns.stats['added'] )) log(msg) return msg ##-------------------------------------------------------------------------
str(error), logging.ERROR) scan_data = None if scan_data and working_msf_api: task = msf_api.pro_import_data( msf_settings.get('workspace'), "".join(scan_data), { #'preserve_hosts': form.vars.preserve_hosts, 'blacklist_hosts': "\n".join(ip_ignore_list) }, ) msf_workspace_num = session.msf_workspace_num or 'unknown' msfurl = os.path.join(msf_settings.get('url'), 'workspaces', msf_workspace_num, 'tasks', task['task_id']) log(" [*] Added file to MSF Pro: %s" % msfurl) # any new Nessus vulns need to be checked against exploits table and connected log(" [*] Connecting exploits to vulns and performing do_host_status") connect_exploits() do_host_status(asset_group=asset_group) msg = (' [*] Import complete: hosts: %s added, %s updated, %s skipped ' '- %s vulns processed, %s added' % (nessus_hosts.stats['added'], nessus_hosts.stats['updated'], nessus_hosts.stats['skipped'], nessus_vulns.stats['processed'], nessus_vulns.stats['added'])) log(msg) return msg