def asset_extraction(scanner): for sid in scanner.sitelist.keys(): debug.printd('requesting devices for site %s (%s)' % \ (scanner.sitelist[sid]['id'], scanner.sitelist[sid]['name'])) devdata = scanner.conn.site_device_listing(sid) root = ET.fromstring(devdata) for s in root: if s.tag != 'SiteDevices': continue siteid = s.attrib['site-id'] devlist = [] for d in s: newdev = {} newdev['id'] = int(d.attrib['id']) newdev['address'] = d.attrib['address'] newdev['vulns'] = [] scanner.sitelist[sid]['assets'].append(newdev) add_asset_properties(scanner) debug.printd('requesting asset groups') grpdata = scanner.conn.asset_group_listing() root = ET.fromstring(grpdata) for g in root: if g.tag != 'AssetGroupSummary': continue newgrp = {} newgrp['name'] = g.attrib['name'] newgrp['id'] = g.attrib['id'] scanner.grouplist[int(g.attrib['id'])] = newgrp
def calculate_compliance(uid): debug.printd('calculating compliance for %s' % uid) ret = dbconn.compliance_values(uid) failvid = None failage = 0 max_cvss = 0 for level in ComplianceLevels.ORDERING: for val in ret: vid = val[0] cvss = val[1] age = val[2] if cvss >= ComplianceLevels.FLOOR[level] and \ age > ComplianceLevels.LEVELS[level]: # Compliance failure, note the vulnerability that caused the # failure that has the highest CVSS base score if failvid == None or max_cvss < cvss: failvid = vid max_cvss = cvss failage = age if failvid != None: break failflag = False if failvid != None: debug.printd('asset fails compliance due to vid %d ' '(cvss=%f, age=%d)' % (failvid, max_cvss, failage)) failflag = True dbconn.compliance_update(uid, failflag, failvid)
def load_vulnauto_list(path): debug.printd('reading automation data from %s' % path) cp = ConfigParser.SafeConfigParser() cp.read(path) for s in cp.sections(): n = VulnAutoEntry(s) for k, v in cp.items(s): if k == 'mincvss': n.mincvss = float(v) pass elif k == 'ipmatch': if v != '': newval = vulnauto_extract_pri(v) n.add_match(newval[0], newval[1]) elif k == 'namematch': if v != '': for i in v.split(): if i == '#AUTOADD': continue newval = vulnauto_extract_pri(i) n.add_namematch(newval[0], newval[1]) elif k == 'name': n.title = v elif k == 'description': n.description = v else: sys.stderr.write('vulnauto option %s not available under ' \ '%s\n' % (k, s)) sys.exit(1) vulnautolist.append(n)
def calculate_compliance(uid): debug.printd('calculating compliance for %s' % uid) ret = dbconn.compliance_values(uid) failvid = None failage = 0 max_cvss = 0 for level in ComplianceLevels.ORDERING: for val in ret: vid = val[0] cvss = val[1] age = val[2] if cvss >= ComplianceLevels.FLOOR[level] and \ age > ComplianceLevels.LEVELS[level]: # Compliance failure, note the vulnerability that caused the # failure that has the highest CVSS base score if failvid == None or max_cvss < cvss: failvid = vid max_cvss = cvss failage = age if failvid != None: break failflag = False if failvid != None: debug.printd('asset fails compliance due to vid %d ' '(cvss=%f, age=%d)' % (failvid, max_cvss, failage)) failflag = True dbconn.compliance_update(uid, failflag, failvid)
def vuln_get_age_data(scanner): squery = ''' SELECT asset_id, vulnerability_id, age_in_days FROM fact_asset_vulnerability_age ''' ret = {} debug.printd('requesting vulnerability age information') sites = scanner.sitelist.keys() if len(sites) == 0: return vulndata = scanner.conn.adhoc_report(squery, sites, api_version='1.3.2') reader = csv.reader(StringIO.StringIO(vulndata)) for i in reader: if len(i) == 0: continue if i[0] == 'asset_id': continue assetid = int(i[0]) vid = int(i[1]) age = float(i[2]) if assetid not in ret: ret[assetid] = {} if vid not in ret[assetid]: ret[assetid][vid] = age else: if age > ret[assetid][vid]: ret[assetid][vid] = age return ret
def asset_extraction(scanner): for sid in scanner.sitelist.keys(): debug.printd('requesting devices for site %s (%s)' % \ (scanner.sitelist[sid]['id'], scanner.sitelist[sid]['name'])) devdata = scanner.conn.site_device_listing(sid) root = ET.fromstring(devdata) for s in root: if s.tag != 'SiteDevices': continue siteid = s.attrib['site-id'] devlist = [] for d in s: newdev = {} newdev['id'] = int(d.attrib['id']) newdev['address'] = d.attrib['address'] newdev['vulns'] = [] scanner.sitelist[sid]['assets'].append(newdev) add_asset_properties(scanner) debug.printd('requesting asset groups') grpdata = scanner.conn.asset_group_listing() root = ET.fromstring(grpdata) for g in root: if g.tag != 'AssetGroupSummary': continue newgrp = {} newgrp['name'] = g.attrib['name'] newgrp['id'] = g.attrib['id'] scanner.grouplist[int(g.attrib['id'])] = newgrp
def escalate_vulns(escdir, scanner, escalate_vulns, escalate_compliance): ret = dbconn.asset_list() debug.printd('processing %d assets' % len(ret)) vlist = [] for i in ret: wfes = dbconn.get_workflow(i) for w in wfes: if w.status == WorkflowElement.STATUS_NONE: w.status = WorkflowElement.STATUS_ESCALATED elif w.status == WorkflowElement.STATUS_RESOLVED: w.status = WorkflowElement.STATUS_CLOSED w.vulnerability.os = sitelist_get_os(w.assetid_site, scanner) # Assign a risk likelihood indicator value to the event. We # default to MEDIUM. HIGH and MAXIMUM are reserved for issues # that are manually flagged as such, which this tool does not # currently support handling. w.vulnerability.likelihood_indicator = 'medium' # Create JSON event from the element jv = vmjson.wf_to_json(w) vlist.append(jv) # Mark this workflow element as handled now dbconn.workflow_handled(w.workflow_id, w.status) # Send coverage indicators services.send_indicators(scanner) vlist = services.serviceapi_vulnlist(vlist) if len(vlist) > 0: if escalate_vulns: write_vuln_escalations(vlist, escdir) clist = [] # Do the same thing for compliance items for i in ret: ce = dbconn.get_compliance(i) # get_compliance returning None means the system passed compliance # checks, we still want to create an event though. if ce == None: target = dbconn.aid_to_host(i) autogroup = dbconn.aid_to_autogroup(i) else: target = ce.failvuln.hostname autogroup = ce.failvuln.autogroup jc = vmjson.ce_to_json(ce, target, autogroup) clist.append(jc) clist = services.serviceapi_complist(clist) if len(clist) > 0: if escalate_compliance: write_compliance_escalations(clist, escdir)
def escalate_vulns(escdir, scanner, escalate_vulns, escalate_compliance): ret = dbconn.asset_list() debug.printd('processing %d assets' % len(ret)) vlist = [] for i in ret: wfes = dbconn.get_workflow(i) for w in wfes: if w.status == WorkflowElement.STATUS_NONE: w.status = WorkflowElement.STATUS_ESCALATED elif w.status == WorkflowElement.STATUS_RESOLVED: w.status = WorkflowElement.STATUS_CLOSED w.vulnerability.os = sitelist_get_os(w.assetid_site, scanner) # Assign a risk likelihood indicator value to the event. We # default to MEDIUM. HIGH and MAXIMUM are reserved for issues # that are manually flagged as such, which this tool does not # currently support handling. w.vulnerability.likelihood_indicator = 'medium' # Create JSON event from the element jv = vmjson.wf_to_json(w) vlist.append(jv) # Mark this workflow element as handled now dbconn.workflow_handled(w.workflow_id, w.status) # Send coverage indicators services.send_indicators(scanner) vlist = services.serviceapi_vulnlist(vlist) if len(vlist) > 0: if escalate_vulns: write_vuln_escalations(vlist, escdir) clist = [] # Do the same thing for compliance items for i in ret: ce = dbconn.get_compliance(i) # get_compliance returning None means the system passed compliance # checks, we still want to create an event though. if ce == None: target = dbconn.aid_to_host(i) autogroup = dbconn.aid_to_autogroup(i) else: target = ce.failvuln.hostname autogroup = ce.failvuln.autogroup jc = vmjson.ce_to_json(ce, target, autogroup) clist.append(jc) clist = services.serviceapi_complist(clist) if len(clist) > 0: if escalate_compliance: write_compliance_escalations(clist, escdir)
def write_vuln_escalations(vlist, escdir): fname = 'vulns-%d-%d.dat' % (int(calendar.timegm( time.gmtime())), os.getpid()) outfile = os.path.join(escdir, fname) tmpoutfile = outfile + '.tmp' debug.printd('writing vulnerabilities escalations to %s' % outfile) fd = open(tmpoutfile, 'w') cPickle.dump(vlist, fd) fd.close() os.rename(tmpoutfile, outfile)
def site_extraction(scanner): debug.printd('requesting site information') sitedata = scanner.conn.list_sites() for s in sitedata: siteinfo = {} siteinfo['name'] = s.name siteinfo['id'] = str(s.id) siteinfo['assets'] = [] scanner.sitelist[siteinfo['id']] = siteinfo debug.printd('read %d sites' % len(scanner.sitelist))
def write_vuln_escalations(vlist, escdir): fname = 'vulns-%d-%d.dat' % (int(calendar.timegm(time.gmtime())), os.getpid()) outfile = os.path.join(escdir, fname) tmpoutfile = outfile + '.tmp' debug.printd('writing vulnerabilities escalations to %s' % outfile) fd = open(tmpoutfile, 'w') cPickle.dump(vlist, fd) fd.close() os.rename(tmpoutfile, outfile)
def write_compliance_escalations(clist, escdir): fname = 'compliance-%d-%d.dat' % (int(calendar.timegm(time.gmtime())), os.getpid()) outfile = os.path.join(escdir, fname) tmpoutfile = outfile + '.tmp' debug.printd('writing compliance escalations to %s' % outfile) fd = open(tmpoutfile, 'w') cPickle.dump(clist, fd) fd.close() os.rename(tmpoutfile, outfile)
def load_exemption_list(path): debug.printd('reading exemptions from %s' % path) cp = ConfigParser.SafeConfigParser() cp.read(path) for s in cp.sections(): # XXX Need to validate format and warn on syntax issues if '/' in s: exemptlist_nets.append(s) else: exemptlist_hosts.append(s)
def load_vulnauto(dirpath, vmdbconn): global dbconn debug.printd('reading vulnerability automation data...') dbconn = vmdbconn dirlist = os.listdir(dirpath) for i in dirlist: # Ignore templates if '.tmpl' in i: continue load_vulnauto_list(os.path.join(dirpath, i))
def write_compliance_escalations(clist, escdir): fname = 'compliance-%d-%d.dat' % (int(calendar.timegm( time.gmtime())), os.getpid()) outfile = os.path.join(escdir, fname) tmpoutfile = outfile + '.tmp' debug.printd('writing compliance escalations to %s' % outfile) fd = open(tmpoutfile, 'w') cPickle.dump(clist, fd) fd.close() os.rename(tmpoutfile, outfile)
def site_extraction(scanner): debug.printd('requesting site information') sitedata = scanner.conn.list_sites() for s in sitedata: siteinfo = {} siteinfo['name'] = s.name siteinfo['id'] = str(s.id) siteinfo['assets'] = [] scanner.sitelist[siteinfo['id']] = siteinfo debug.printd('read %d sites' % len(scanner.sitelist))
def asset_unique_id(address, mac, hostname, aid): if mac == '': u_mac = 'NA' else: u_mac = mac if hostname == '': u_hostname = 'NA' else: u_hostname = hostname ret = '0|%s|%s|%s|%s' % (aid, address, u_hostname, u_mac) debug.printd('using identifier %s' % ret) return ret
def asset_unique_id(address, mac, hostname, aid): if mac == '': u_mac = 'NA' else: u_mac = mac if hostname == '': u_hostname = 'NA' else: u_hostname = hostname ret = '0|%s|%s|%s|%s' % (aid, address, u_hostname, u_mac) debug.printd('using identifier %s' % ret) return ret
def site_update_from_files(scanner, sid, pathlist): tmpfile = tempfile.mkstemp() tmpfilefd = os.fdopen(tmpfile[0], 'w') for i in pathlist: try: fd = open(i, 'r') except IOError: debug.printd('cannot read %s, skipping site' % i) os.remove(tmpfile[1]) return tmpfilefd.write(fd.read()) fd.close() tmpfilefd.close() site_update_from_file(scanner, sid, tmpfile[1]) os.remove(tmpfile[1])
def site_update_from_files(scanner, sid, pathlist): tmpfile = tempfile.mkstemp() tmpfilefd = os.fdopen(tmpfile[0], 'w') for i in pathlist: try: fd = open(i, 'r') except IOError: debug.printd('cannot read %s, skipping site' % i) os.remove(tmpfile[1]) return tmpfilefd.write(fd.read()) fd.close() tmpfilefd.close() site_update_from_file(scanner, sid, tmpfile[1]) os.remove(tmpfile[1])
def load_exemption_list(path): debug.printd('reading exemptions from %s' % path) cp = ConfigParser.SafeConfigParser() cp.read(path) for s in cp.sections(): # XXX Need to validate format and warn on syntax issues if '/' in s: exemptlist_nets.append(s) else: exemptlist_hosts.append(s) for k, v in cp.items(s): if k == 'include': if v != '': for i in v.split(): butinclude.append(i)
def load_exemption_list(path): debug.printd('reading exemptions from %s' % path) cp = ConfigParser.SafeConfigParser() cp.read(path) for s in cp.sections(): # XXX Need to validate format and warn on syntax issues if '/' in s: exemptlist_nets.append(s) else: exemptlist_hosts.append(s) for k, v in cp.items(s): if k == 'include': if v != '': for i in v.split(): butinclude.append(i)
def asset_update_group(scanner, groupdata): usegroup = -1 vgent = groupdata['autoentry'] for g in scanner.grouplist: if scanner.grouplist[g]['name'] == vgent.title: usegroup = int(scanner.grouplist[g]['id']) if usegroup == -1: debug.printd('creating new asset group') else: debug.printd('updating asset group %d' % usegroup) e = ET.Element('AssetGroup', attrib={'id': str(usegroup), 'name': vgent.title, 'description': vgent.description}) de = ET.SubElement(e, 'Devices') for i in groupdata['assetids']: newsub = ET.SubElement(de, 'device', attrib={'id': str(i)}) scanner.conn.asset_group_save((ET.tostring(e),))
def add_asset_properties(scanner): squery = ''' SELECT asset_id, ds.name AS site_name, da.ip_address, da.host_name, da.mac_address, dos.description AS operating_system, dht.description, dos.asset_type, dos.cpe, fa.aggregated_credential_status_id FROM dim_asset da JOIN dim_operating_system dos USING (operating_system_id) JOIN dim_host_type dht USING (host_type_id) JOIN dim_site_asset dsa USING (asset_id) JOIN dim_site ds USING (site_id) JOIN fact_asset fa USING (asset_id) ''' debug.printd('requesting additional asset properties') sites = scanner.sitelist.keys() if len(sites) == 0: return vulndata = nexadhoc.nexpose_adhoc(scanner, squery, sites, api_version='2.0.2') reader = csv.reader(StringIO.StringIO(vulndata)) atable = {} for i in reader: if len(i) == 0: continue if i[0] == 'asset_id': continue atable[int(i[0])] = i[1:] for s in scanner.sitelist.keys(): for a in scanner.sitelist[s]['assets']: if a['id'] not in atable.keys(): a['hostname'] = '' a['macaddress'] = '' a['credsok'] = False continue a['hostname'] = atable[a['id']][2] a['macaddress'] = atable[a['id']][3] a['os'] = atable[a['id']][4] a['credsok'] = False cstatus = atable[a['id']][8] if int(cstatus) >= CREDSTATUS_LOGINSUCC: a['credsok'] = True
def reptest(scanner): squery = ''' SELECT da.ip_address, da.host_name, os.name, critical_vulnerabilities, severe_vulnerabilities, exploits, riskscore, aggregated_credential_status_description FROM fact_asset JOIN dim_aggregated_credential_status USING(aggregated_credential_status_id) JOIN dim_asset da USING(asset_id) JOIN dim_operating_system os USING(operating_system_id) ''' debug.printd('adhoc report test') sites = scanner.sitelist.keys() if len(sites) == 0: return ret = nexadhoc.nexpose_adhoc(scanner, squery, sites, api_version='1.3.2') print ret sys.exit(0)
def reptest(scanner): squery = ''' SELECT da.ip_address, da.host_name, os.name, critical_vulnerabilities, severe_vulnerabilities, exploits, riskscore, aggregated_credential_status_description FROM fact_asset JOIN dim_aggregated_credential_status USING(aggregated_credential_status_id) JOIN dim_asset da USING(asset_id) JOIN dim_operating_system os USING(operating_system_id) ''' debug.printd('adhoc report test') sites = scanner.sitelist.keys() if len(sites) == 0: return ret = scanner.conn.adhoc_report(squery, sites, api_version='1.3.2') print(ret) sys.exit(0)
def escalate_vulns(escdir, escalate_vulns, escalate_compliance): ret = dbconn.asset_list() debug.printd('processing %d assets' % len(ret)) vlist = [] for i in ret: wfes = dbconn.get_workflow(i) for w in wfes: if w.status == WorkflowElement.STATUS_NONE: w.status = WorkflowElement.STATUS_ESCALATED elif w.status == WorkflowElement.STATUS_RESOLVED: w.status = WorkflowElement.STATUS_CLOSED # Create JSON event from the element jv = vmjson.wf_to_json(w) vlist.append(jv) # Mark this workflow element as handled now dbconn.workflow_handled(w.workflow_id, w.status) if len(vlist) > 0: if escalate_vulns: write_vuln_escalations(vlist, escdir) clist = [] # Do the same thing for compliance items for i in ret: ce = dbconn.get_compliance(i) # get_compliance returning None means the system passed compliance # checks, we still want to create an event though. if ce == None: target = dbconn.aid_to_host(i) else: target = ce.failvuln.hostname jc = vmjson.ce_to_json(ce, target) clist.append(jc) if len(clist) > 0: if escalate_compliance: write_compliance_escalations(clist, escdir)
def report_list(scanner): debug.printd('requesting report list') replist = scanner.conn.report_listing() ret = {} root = ET.fromstring(replist) for s in root: if s.tag != 'ReportConfigSummary': continue newrep = {} newrep['name'] = s.attrib['name'] newrep['id'] = s.attrib['cfg-id'] newrep['last-generated'] = s.attrib['generated-on'] newrep['status'] = s.attrib['status'] if 'report-URI' in s.attrib: newrep['url'] = s.attrib['report-URI'] else: newrep['url'] = None ret[newrep['id']] = newrep return ret
def report_list(scanner): debug.printd('requesting report list') replist = scanner.conn.report_listing() ret = {} root = ET.fromstring(replist) for s in root: if s.tag != 'ReportConfigSummary': continue newrep = {} newrep['name'] = s.attrib['name'] newrep['id'] = s.attrib['cfg-id'] newrep['last-generated'] = s.attrib['generated-on'] newrep['status'] = s.attrib['status'] if 'report-URI' in s.attrib: newrep['url'] = s.attrib['report-URI'] else: newrep['url'] = None ret[newrep['id']] = newrep return ret
def vuln_proc_pipeline(vlist, aid, address, mac, hostname): global uidcache vidcache = [] debug.printd('vulnerability process pipeline for asset id %d' % aid) vauto = vuln_auto_finder(address, mac, hostname) if vauto == -1: debug.printd('skipping pipeline for asset id %d, no handler' % aid) return uid = asset_unique_id(address, mac, hostname, aid) if uid not in uidcache: uidcache.append(uid) # XXX We will probably want to add something here to search and update # any existing references for this asset where we had less information, # this will likely need some sort of partial matching on fields. # Make sure the asset exists in the database, if not add it dbassetid = dbconn.add_asset(uid, aid, address, mac, hostname) if dbassetid == None: # The asset wasn't added, probably because it is a duplicate of another # asset, if this happens we are done return debug.printd('using db asset %d' % dbassetid) for v in vlist: vidcache.append(int(v.vid)) # We don't want to look at everything, query the handlers minimum # CVSS value to see if we should proceed if v.cvss >= vauto.mincvss: debug.printd('processing vulnerability %s' % v.vid) dbconn.add_vulnerability(v, dbassetid, vauto) else: debug.printd('skipping vulnerability %s as it does not meet ' \ 'minimum cvss score' % v.vid) dbconn.resolve_vulnerability(vidcache, dbassetid) # Calculate the compliance score for the asset calculate_compliance(uid)
def load_vulnauto(vmdbconn): global dbconn global defaultvulnauto dbconn = vmdbconn debug.printd('adding default vulnauto entry') defaultvulnauto = VulnAutoEntry('default') defaultvulnauto.title = 'default' defaultvulnauto.mincvss = 6.0 defaultvulnauto.description = 'default' debug.printd('requesting automation data from service api') vad = pyservicelib.get_vulnauto() for i in vad['vulnauto']: ne = VulnAutoEntry(str(i['v2bkey'])) ne.mincvss = 6.0 ne.title = str(i['v2bkey']) ne.description = ne.title ne.add_namematch(str(i['match']), 1) vulnautolist.append(ne)
def load_vulnauto(vmdbconn): global dbconn global defaultvulnauto dbconn = vmdbconn debug.printd('adding default vulnauto entry') defaultvulnauto = VulnAutoEntry('default') defaultvulnauto.title = 'default' defaultvulnauto.mincvss = 6.0 defaultvulnauto.description = 'default' debug.printd('requesting automation data from service api') vad = pyservicelib.get_vulnauto() for i in vad['vulnauto']: ne = VulnAutoEntry(str(i['v2bkey'])) ne.mincvss = 6.0 ne.title = str(i['v2bkey']) ne.description = ne.title ne.add_namematch(str(i['match']), 1) vulnautolist.append(ne)
def adhoc_group(scanner, tgfile): groupdata = {} addrlist = [] fd = open(tgfile, 'r') while True: buf = fd.readline() if buf == None or buf == '': break addrlist.append(buf.strip()) fd.close() debug.printd('will group on %d addresses' % len(addrlist)) # Create a psuedo vulnauto entry to support creation of the adhoc # group. groupdata['autoentry'] = vuln.VulnAutoEntry('adhoc') groupdata['autoentry'].description = 'adhoc' groupdata['autoentry'].title = 'adhoc' # Find each asset that matches an entry in the addrlist; since this is # primarily used with MIG database dumps we just match on IP address. # This could be expanded to match on hostname and other fields if needed. groupdata['assetids'] = [] for s in scanner.sitelist: for a in scanner.sitelist[s]['assets']: if a['address'] in addrlist: groupdata['assetids'].append(a['id']) debug.printd('matched on %d assets' % len(groupdata['assetids'])) # Create the adhoc group. debug.printd('updating adhoc group') asset_update_group(scanner, groupdata)
def vuln_proc_pipeline(vlist, aid, address, mac, hostname): global uidcache vidcache = [] debug.printd('vulnerability process pipeline for asset id %d' % aid) vauto = vuln_auto_finder(address, mac, hostname) if vauto == -1: debug.printd('skipping pipeline for asset id %d, no handler' % aid) return uid = asset_unique_id(address, mac, hostname, aid) if uid not in uidcache: uidcache.append(uid) dbconn.asset_search_and_update(uid, aid, address, mac, hostname) # Make sure the asset exists in the database, if not add it dbassetid = dbconn.add_asset(uid, aid, address, mac, hostname) if dbassetid == None: # The asset wasn't added, probably because it is a duplicate of another # asset, if this happens we are done return debug.printd('using db asset %d' % dbassetid) for v in vlist: vidcache.append(int(v.vid)) # We don't want to look at everything, query the handlers minimum # CVSS value to see if we should proceed if v.cvss >= vauto.mincvss: debug.printd('processing vulnerability %s' % v.vid) dbconn.add_vulnerability(v, dbassetid, vauto) else: debug.printd('skipping vulnerability %s as it does not meet ' \ 'minimum cvss score' % v.vid) dbconn.resolve_vulnerability(vidcache, dbassetid) # Calculate the compliance score for the asset calculate_compliance(uid)
def asset_grouping(scanner): # Each automation entry that was loaded will result in an asset group groupdata = {} for x in vuln.vulnautolist: groupdata[x.name] = {} groupdata[x.name]['autoentry'] = x groupdata[x.name]['assetids'] = [] for s in scanner.sitelist: for a in scanner.sitelist[s]['assets']: vent = vuln.vuln_auto_finder(a['address'], a['macaddress'], a['hostname']) if vent == None: continue for i in groupdata: if groupdata[i]['autoentry'] == vent: groupdata[i]['assetids'].append(a['id']) for x in groupdata: usegroup = -1 vgent = groupdata[x]['autoentry'] # See if we are updating an existing group or creating a new one for g in scanner.grouplist: if scanner.grouplist[g]['name'] == vgent.title: usegroup = int(scanner.grouplist[g]['id']) if usegroup == -1: debug.printd('creating a new asset group') else: debug.printd('updating asset group %d' % usegroup) e = ET.Element('AssetGroup', attrib={ 'id': str(usegroup), 'name': vgent.title, 'description': vgent.description }) de = ET.SubElement(e, 'Devices') for i in groupdata[x]['assetids']: newsub = ET.SubElement(de, 'device', attrib={'id': str(i)}) scanner.conn.asset_group_save((ET.tostring(e), ))
def vuln_auto_finder(address, mac, hostname): candlist = None last = -1 for va in vulnautolist: ret = va.name_test(hostname) if ret != -1: if ret > last: cand = va last = ret continue ret = va.ip_test(address) if ret == -1: continue if ret > last: cand = va last = ret if cand != None: debug.printd('using VulnAutoEntry %s (score: %d)' % (cand.name, last)) else: debug.printd('unable to match automation handler') return cand
def vuln_auto_finder(address, mac, hostname): candlist = None last = -1 cand = None for va in vulnautolist: ret = va.name_test(hostname) if ret != -1: if ret > last: cand = va last = ret ret = va.ip_test(address) if ret == -1: continue if ret > last: cand = va last = ret if cand != None: debug.printd('using VulnAutoEntry %s (score: %d)' % (cand.name, last)) else: debug.printd('using default vulnauto entry') cand = defaultvulnauto return cand
def vuln_auto_finder(address, mac, hostname): candlist = None last = -1 cand = None for va in vulnautolist: ret = va.name_test(hostname) if ret != -1: if ret > last: cand = va last = ret ret = va.ip_test(address) if ret == -1: continue if ret > last: cand = va last = ret if cand != None: debug.printd('using VulnAutoEntry %s (score: %d)' % (cand.name, last)) else: debug.printd('using default vulnauto entry') cand = defaultvulnauto return cand
def add_asset_properties(scanner): squery = ''' SELECT asset_id, ds.name AS site_name, da.ip_address, da.host_name, da.mac_address, dos.description AS operating_system, dht.description, dos.asset_type, dos.cpe FROM dim_asset da JOIN dim_operating_system dos USING (operating_system_id) JOIN dim_host_type dht USING (host_type_id) JOIN dim_site_asset dsa USING (asset_id) JOIN dim_site ds USING (site_id) ''' debug.printd('requesting additional asset properties') sites = scanner.sitelist.keys() if len(sites) == 0: return vulndata = scanner.conn.adhoc_report(squery, sites, api_version='1.3.2') reader = csv.reader(StringIO.StringIO(vulndata)) atable = {} for i in reader: if len(i) == 0: continue if i[0] == 'asset_id': continue atable[int(i[0])] = i[1:] for s in scanner.sitelist.keys(): for a in scanner.sitelist[s]['assets']: if a['id'] not in atable.keys(): a['hostname'] = '' a['macaddress'] = '' continue a['hostname'] = atable[a['id']][2] a['macaddress'] = atable[a['id']][3]
def generate_report(scanner, repid): debug.printd('requesting generation of report %s' % repid) replist = scanner.conn.report_generate(repid) debug.printd('polling for completion, standby') replist = None while True: replist = report_list(scanner) if replist[repid]['status'] == 'Generated': debug.printd('report generation complete') break time.sleep(5) ret = nexpose_fetch_report(repid, replist[repid]['url']) return ret
def generate_report(scanner, repid): debug.printd('requesting generation of report %s' % repid) replist = scanner.conn.report_generate(repid) debug.printd('polling for completion, standby') replist = None while True: replist = report_list(scanner) if replist[repid]['status'] == 'Generated': debug.printd('report generation complete') break time.sleep(5) ret = nexpose_fetch_report(repid, replist[repid]['url']) return ret
def ip_exempt(ip): if ip == '': return False if ip in butinclude: debug.printd('address %s exempted but in include list' % ip) return False # Only look at IP addresses here if not re.match('\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}', ip): return False if ip in exemptlist_hosts: debug.printd('address %s is exempt from address match' % ip) return True for i in exemptlist_nets: if IPAddress(ip) in IPNetwork(i): debug.printd('address %s is exempt from network match' % ip) return True return False
def ip_exempt(ip): if ip == '': return False if ip in butinclude: debug.printd('address %s exempted but in include list' % ip) return False # Only look at IP addresses here if not re.match('\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}', ip): return False if ip in exemptlist_hosts: debug.printd('address %s is exempt from address match' % ip) return True for i in exemptlist_nets: if IPAddress(ip) in IPNetwork(i): debug.printd('address %s is exempt from network match' % ip) return True return False
def group_purge(scanner, gid): remlist = [] debug.printd('purging assets from group %s' % gid) grpconfig = scanner.conn.asset_group_config(gid) root = ET.fromstring(grpconfig) a = root.find('AssetGroup') if a == None: raise Exception('autopurge group not found') if a.attrib['id'] != gid: raise Exception('server returned incorrect asset group') dlist = a.find('Devices') for i in dlist: remlist.append(i.attrib['id']) debug.printd('removing %d assets from group %s' % \ (len(remlist), gid)) for i in remlist: scanner.conn.device_delete(i) debug.printd('removed device %s' % i)
def group_purge(scanner, gid): remlist = [] debug.printd('purging assets from group %s' % gid) grpconfig = scanner.conn.asset_group_config(gid) root = ET.fromstring(grpconfig) a = root.find('AssetGroup') if a == None: raise Exception('autopurge group not found') if a.attrib['id'] != gid: raise Exception('server returned incorrect asset group') dlist = a.find('Devices') for i in dlist: remlist.append(i.attrib['id']) debug.printd('removing %d assets from group %s' % \ (len(remlist), gid)) for i in remlist: scanner.conn.device_delete(i) debug.printd('removed device %s' % i)
def vuln_extraction(scanner, vulnquery_where, writefile=None, readfile=None, targetcve=None): squery = ''' WITH vuln_references AS ( SELECT vulnerability_id, array_to_string(array_agg(reference), ', ') AS references FROM dim_vulnerability JOIN dim_vulnerability_reference USING (vulnerability_id) GROUP BY vulnerability_id ) SELECT ds.name AS site, da.asset_id, da.ip_address, da.host_name, da.mac_address, dv.title AS vulnerability, dvs.description AS status, favi.date AS discovered_date, CASE WHEN favi.port = -1 THEN NULL ELSE favi.port END AS port, dp.name AS protocol, dsvc.name AS service, round(dv.cvss_score::numeric, 2) AS cvss_score, vr.references, dv.exploits, dv.malware_kits, dv.vulnerability_id FROM fact_asset_vulnerability_instance favi JOIN dim_asset da USING (asset_id) JOIN dim_vulnerability dv USING (vulnerability_id) JOIN dim_site_asset dsa USING (asset_id) JOIN dim_site ds USING (site_id) JOIN dim_vulnerability_status dvs USING (status_id) JOIN dim_protocol dp USING (protocol_id) JOIN dim_service dsvc USING (service_id) JOIN vuln_references vr USING (vulnerability_id) %s ORDER BY ds.name, da.ip_address ''' % vulnquery_where debug.printd('requesting vulnerability details') sites = scanner.sitelist.keys() if len(sites) == 0: return agedata = vuln_get_age_data(scanner) if readfile != None: debug.printd('reading vulnerability data from %s' % readfile) fd = open(readfile, 'r') vulndata = fd.read() fd.close() else: vulndata = scanner.conn.adhoc_report(squery, sites, api_version='1.3.2') if writefile != None: fd = open(writefile, 'w') fd.write(vulndata) fd.close() sys.exit(0) reader = csv.reader(StringIO.StringIO(vulndata)) nvulns = 0 linked = 0 for i in reader: if len(i) == 0: continue if i[0] == 'site': continue nvulns += 1 v = vuln.vulnerability() v.sitename = i[0] v.assetid = int(i[1]) v.ipaddr = i[2] v.hostname = i[3] v.macaddr = i[4] v.title = i[5] v.known_exploits = False v.known_malware = False if int(i[13]) > 0: v.known_exploits = True if int(i[14]) > 0: v.known_malware = True v.vid = i[15] idx = i[7].find('.') if idx > 0: dstr = i[7][:idx] else: dstr = i[7] v.age_days = vuln_age_days(v, agedata) dt = datetime.datetime.strptime(dstr, '%Y-%m-%d %H:%M:%S') dt = dt.replace(tzinfo=pytz.UTC) v.discovered_date = dt def get_total_seconds(td): return (td.microseconds + (td.seconds + td.days * 24 * 3600) \ * 1e6) / 1e6 v.discovered_date_unix = int(get_total_seconds(v.discovered_date - \ datetime.datetime(1970, 1, 1, tzinfo=pytz.utc))) v.cvss = float(i[11]) for i in i[12].split(','): buf = i.strip() if 'CVE-' in buf: if v.cves == None: v.cves = [ buf, ] else: v.cves.append(buf) if 'RHSA-' in buf: if v.rhsa == None: v.rhsa = [ buf, ] else: v.rhsa.append(buf) linked += vuln_instance_link(v, scanner) debug.printd('%d vulnerabilities loaded' % nvulns) debug.printd('%d vulnerabilities linked' % linked) vuln.vuln_reset_uid_cache() for s in scanner.sitelist.keys(): for a in scanner.sitelist[s]['assets']: if len(a['vulns']) == 0: continue # If in target CVE report mode, just report on the CVE but # don't actually process the vulnerability if targetcve != None: vuln.vuln_cvereport(a, targetcve) continue vuln.vuln_proc_pipeline(a['vulns'], a['id'], a['address'], a['macaddress'], a['hostname']) if targetcve != None: return vuln.expire_hosts()
def vuln_extraction(scanner, vulnquery_where, writefile=None, readfile=None, targetcve=None, targethosts=False): squery = ''' WITH vuln_references AS ( SELECT vulnerability_id, array_to_string(array_agg(reference), ', ') AS references FROM dim_vulnerability JOIN dim_vulnerability_reference USING (vulnerability_id) GROUP BY vulnerability_id ) SELECT ds.name AS site, da.asset_id, da.ip_address, da.host_name, da.mac_address, dv.title AS vulnerability, dvs.description AS status, favi.date AS discovered_date, CASE WHEN favi.port = -1 THEN NULL ELSE favi.port END AS port, dp.name AS protocol, dsvc.name AS service, round(dv.cvss_score::numeric, 2) AS cvss_score, vr.references, dv.exploits, dv.malware_kits, dv.vulnerability_id, dv.description, dv.cvss_vector, proofAsText(favi.proof), age_in_days FROM fact_asset_vulnerability_instance favi JOIN dim_asset da USING (asset_id) JOIN dim_vulnerability dv USING (vulnerability_id) JOIN dim_site_asset dsa USING (asset_id) JOIN dim_site ds USING (site_id) JOIN dim_vulnerability_status dvs USING (status_id) JOIN dim_protocol dp USING (protocol_id) JOIN dim_service dsvc USING (service_id) JOIN vuln_references vr USING (vulnerability_id) JOIN fact_asset_vulnerability_age USING (asset_id, vulnerability_id) %s ORDER BY ds.name, da.ip_address ''' % vulnquery_where debug.printd('requesting vulnerability details') sites = scanner.sitelist.keys() if len(sites) == 0: return if readfile != None: debug.printd('reading vulnerability data from %s' % readfile) fd = open(readfile, 'r') vulndata = fd.read() fd.close() else: vulndata = nexadhoc.nexpose_adhoc(scanner, squery, sites, api_version='2.0.2') if writefile != None: fd = open(writefile, 'w') fd.write(vulndata) fd.close() sys.exit(0) reader = csv.reader(StringIO.StringIO(vulndata)) nvulns = 0 linked = 0 for i in reader: if len(i) == 0: continue if i[0] == 'site': continue nvulns += 1 v = vuln.vulnerability() v.sitename = i[0] v.assetid = int(i[1]) v.ipaddr = i[2] v.hostname = i[3] v.macaddr = i[4] v.title = i[5] v.known_exploits = False v.known_malware = False if int(i[13]) > 0: v.known_exploits = True if int(i[14]) > 0: v.known_malware = True v.vid = i[15] v.description = i[16] v.cvss_vector = i[17] v.proof = i[18] v.age_days = i[19] idx = i[7].find('.') if idx > 0: dstr = i[7][:idx] else: dstr = i[7] dt = datetime.datetime.strptime(dstr, '%Y-%m-%d %H:%M:%S') dt = dt.replace(tzinfo=pytz.UTC) v.discovered_date = dt def get_total_seconds(td): return (td.microseconds + (td.seconds + td.days * 24 * 3600) \ * 1e6) / 1e6 v.discovered_date_unix = int(get_total_seconds(v.discovered_date - \ datetime.datetime(1970, 1, 1, tzinfo=pytz.utc))) v.cvss = float(i[11]) for i in i[12].split(','): buf = i.strip() if 'CVE-' in buf: if v.cves == None: v.cves = [buf,] else: v.cves.append(buf) if 'RHSA-' in buf: if v.rhsa == None: v.rhsa = [buf,] else: v.rhsa.append(buf) linked += vuln_instance_link(v, scanner) debug.printd('%d vulnerabilities loaded' % nvulns) debug.printd('%d vulnerabilities linked' % linked) vuln.vuln_reset_uid_cache() for s in scanner.sitelist.keys(): for a in scanner.sitelist[s]['assets']: # It's possible the asset has no known issues, but we still want to # pass this infomration into vuln_proc_pipeline so we can resolve any # known issues for the asset. if len(a['vulns']) == 0: debug.printd('scanner reports asset %d with no known issues' % a['id']) # If in target CVE report mode, just report on the CVE but # don't actually process the vulnerability if targetcve != None: vuln.vuln_cvereport(a, targetcve) continue elif targethosts: vuln.vuln_hostreport(a) continue vuln.vuln_proc_pipeline(a['vulns'], a['id'], a['address'], a['macaddress'], a['hostname']) if targetcve != None or targethosts: return vuln.resolve_expired_hosts()
def site_update_from_file(scanner, sid, path): debug.printd('updating site %s from %s' % (sid, path)) sconf = scanner.conn.site_config(sid) root = ET.fromstring(sconf) sitetag = root.find('Site') if sitetag == None: raise Exception('response from server for site %s invalid' % \ sid) ne = sitetag.find('Hosts') updates = 0 try: fd = open(path, 'r') except IOError: sys.stderr.write('unable to read %s, skipping updates ' \ 'for site %s\n' % (path, sid)) return # Expand address ranges to simplify the update for i in ne[:]: if i.tag != 'range': continue low = i.get('from') high = i.get('to') if high == None: continue ipl = list(netaddr.iter_iprange(low, high)) debug.printd('expanding %s -> %s in site %s' % (low, high, sid)) ne.remove(i) for j in ipl: newsub = ET.SubElement(ne, 'range') newsub.set('from', str(j)) # First remove anything from the site we have a known exemption for for i in ne[:]: if i.tag != 'range': debug.printd('removing %s from %s, not a range tag' % \ (i.text, sid)) ne.remove(i) continue checkip = i.get('from') if exempt.ip_exempt(checkip): debug.printd('removing %s from site %s as it is exempted' % \ (checkip, sid)) ne.remove(i) updates += 1 addrtable = [] while True: buf = fd.readline() if buf == None or buf == '': break buf = buf.strip() found = False if exempt.ip_exempt(buf): continue addrtable.append(buf) for i in ne: if i.get('from') == buf: found = True break if found: continue debug.printd('adding %s to site %s' % (buf, sid)) newsub = ET.SubElement(ne, 'range') newsub.set('from', buf) updates += 1 fd.close() # Finally, remove any addresses in the site that don't seem to exist # anymore according to host discovery for i in ne[:]: a = i.get('from') if a not in addrtable: debug.printd('removing %s from site %s' % (a, sid)) ne.remove(i) updates += 1 if updates == 0: debug.printd('no updates needed for site %s' % sid) return debug.printd('%d updates for site %s' % (updates, sid)) scanner.conn.site_save((ET.tostring(sitetag),))
def load_exemptions(dirpath): debug.printd('reading exemptions...') dirlist = os.listdir(dirpath) for i in dirlist: load_exemption_list(os.path.join(dirpath, i))
def site_update_from_file(scanner, sid, path): debug.printd('updating site %s from %s' % (sid, path)) sconf = scanner.conn.site_config(sid) root = ET.fromstring(sconf) sitetag = root.find('Site') if sitetag == None: raise Exception('response from server for site %s invalid' % \ sid) ne = sitetag.find('Hosts') updates = 0 try: fd = open(path, 'r') except IOError: sys.stderr.write('unable to read %s, skipping updates ' \ 'for site %s\n' % (path, sid)) return # Expand address ranges to simplify the update for i in ne[:]: if i.tag != 'range': continue low = i.get('from') high = i.get('to') if high == None: continue ipl = list(netaddr.iter_iprange(low, high)) debug.printd('expanding %s -> %s in site %s' % (low, high, sid)) ne.remove(i) for j in ipl: newsub = ET.SubElement(ne, 'range') newsub.set('from', str(j)) # First remove anything from the site we have a known exemption for for i in ne[:]: if i.tag != 'range': debug.printd('removing %s from %s, not a range tag' % \ (i.text, sid)) ne.remove(i) continue checkip = i.get('from') if exempt.ip_exempt(checkip): debug.printd('removing %s from site %s as it is exempted' % \ (checkip, sid)) ne.remove(i) updates += 1 addrtable = [] while True: buf = fd.readline() if buf == None or buf == '': break buf = buf.strip() found = False if exempt.ip_exempt(buf): continue addrtable.append(buf) for i in ne: if i.get('from') == buf: found = True break if found: continue debug.printd('adding %s to site %s' % (buf, sid)) newsub = ET.SubElement(ne, 'range') newsub.set('from', buf) updates += 1 fd.close() # Finally, remove any addresses in the site that don't seem to exist # anymore according to host discovery for i in ne[:]: a = i.get('from') if a not in addrtable: debug.printd('removing %s from site %s' % (a, sid)) ne.remove(i) updates += 1 if updates == 0: debug.printd('no updates needed for site %s' % sid) return debug.printd('%d updates for site %s' % (updates, sid)) scanner.conn.site_save((ET.tostring(sitetag), ))