def add(): if request.args(0) is not None: record = get_host_record(request.args(0)) db.t_evidence.f_hosts_id.default = record.id else: record = None if request.extension == 'load': buttons=[] else: buttons=['submit'] if record: form=SQLFORM(db.t_evidence, buttons=buttons, upload=URL('download'), fields=['f_type', 'f_other_type', 'f_text', 'f_evidence'], _action=URL('add', args=[ record.id ]), _id="evidence_add_form") else: form=SQLFORM(db.t_evidence, buttons=buttons, upload=URL('download'), fields=['f_hosts_id', 'f_type', 'f_other_type', 'f_text', 'f_evidence'], _action=URL('add'), _id="evidence_add_form") if request.vars.f_evidence is not None: form.vars.f_filename = request.vars.f_evidence.filename if form.accepts(request.vars, session): response.flash = "Evidence added" response.headers['web2py-component-command'] = 'evidencetable.fnReloadAjax();' return "" elif form.errors: response.flash = "Error in form submission" return TABLE(*[TR(k, v) for k, v in form.errors.items()]) db.t_evidence.f_hosts_id.default = None response.title = "%s :: Add Evidence" % (settings.title) return dict(form=form)
def update_db(f_type=None, record=None, data=None, filename=None, ipv4_addr=None): """Adds or updates an existing record id""" if record is None: # inserting a new record into the database if ipv4_addr is None: print "ERROR: No IPv4 address provided" return False host_id = get_host_record(ipv4_addr) if not host_id: print "ERROR: IPv4 address %s is not a host in the database" % (ipv4_addr) return False try: db.t_evidence.insert( f_hosts_id = host_id.id, f_filename = filename, f_data = data, f_type = f_type ) except Exception, e: print "ERROR inserting record:", e db.commit() return False
def process_screenshot_loot(loot_list=[], msf=None): """ Takes an array of loot records in loot_list, downloads the screenshot and adds it to the database """ db = current.globalenv['db'] cache = current.globalenv['cache'] loot_count = 0 for loot_id in loot_list: loot = msf.loot_download(loot_id) ip = loot_dict[loot_id] if loot['ltype'] != 'host.windows.screenshot': logging.error(" [!] %s/%s is not a screenshot, it is a %s" % (ip, loot['name'], loot['ltype'])) else: record = get_host_record(ip) if not record: logging.error(" [!] Cannot find record for %s" % (ip)) continue db.t_evidence.update_or_insert( f_hosts_id = record.id, f_filename = "%s-msfpro-%s.png" % (ip, loot['name']), f_evidence = "%s-msfpro-%s.png" % (ip, loot['name']), f_data = loot['data'], f_type = 'Screenshot', f_text = 'From MetasploitPRO' ) db.commit() loot_count += 1 return loot_count
def add(): if request.args(0): record = get_host_record(request.args(0)) db.t_netbios.f_hosts_id.default = record.id response.title = "%s :: Add NetBIOS Data" % (settings.title) form=crud.create(db.t_netbios, next='edit/[id]', message="NetBIOS data added") return dict(form=form)
def mass_assign(): """ Upload a CSV file that mass-assigns OS records to Hosts. If a CPE record is provided, look it up in the DB. If not lookup the vendor and product in the DB File format: ipaddress,cpe,family,vendor,product,certainty,osclass """ response.title = "%s :: Mass OS Update" % (settings.title) form = SQLFORM.factory( Field('osfile', 'upload', uploadfolder=os.path.join(request.folder, 'data', 'misc'), label=T('OS CSV File')), ) if form.accepts(request.vars,session): filename = os.path.join(request.folder,'data/misc',form.vars.osfile) import csv from skaldship.cpe import lookup_cpe #from skaldship.general import counter = 0 with open(filename, "rb") as f: for row in csv.reader(f): host_id = get_host_record(row[0]) if not host_id: print "[%s] - Record not found" % (row[0]) continue cpe = row[1] family = row[2] vendor = row[3] product = row[4] certainty = row[5] osclass = row[6] os_id = None if cpe: # we have a cpe entry from xml! hooray! cpe_name = cpe.lstrip('cpe:/o:') os_id = lookup_cpe(cpe_name) #else: # no cpe attribute in xml, go through our messsy lookup # os_id = guess_cpe_os(os_rec) if os_id: db.t_host_os_refs.insert(f_certainty=certainty, f_family=family, f_class=osclass, f_hosts_id=host_id, f_os_id=os_id) db.commit() counter += 1 else: logger.error("OS not found: %s" % (row)) response.flash = "%s Hosts updated with new OS records" % (counter) elif form.errors: response.flash = 'Error in form' return dict(form=form)
def add(): if request.args(0) is not None: record = get_host_record(request.args(0)) db.t_snmp.f_hosts_id.default = record.id response.title = "%s :: Create SNMP Entry" % (settings.title) form=crud.create(db.t_snmp,next='edit/[id]') db.t_snmp.f_hosts_id.default = None return dict(form=form)
def by_host(): """ Returns a list of OS records based upon an host identifier (id, ipv4, ipv6) """ if request.args(0) is None: redirect(URL('default', 'error', vars={'msg': T('Host record not found')})) record = get_host_record(request.args(0)) if record is None: redirect(URL('default', 'error', vars={'msg': T('Host record not found')})) response.title = "%s :: SNMP Records for %s" % (settings.title, host_title_maker(record)) snmplist = db(db.t_snmp.f_hosts_id==record.id).select() aaData = [] if request.extension == "json": for snmp in snmplist: # datatables json requires aaData to be specificly formatted aaData.append({ '0': A("edit", _target="snmp_update_%s" % (snmp.id), _href=URL('edit',extension='html',args=snmp.id)).xml(), '1': snmp.f_community, '2': snmp.f_version, '3': snmp.f_access, 'DT_RowId': snmp.id, }) result = { 'sEcho': request.vars.sEcho, 'iTotalRecords': len(aaData), 'aaData': aaData, } return result form = TABLE(THEAD(TR(TH(T('ID'), _width="5%"), TH(T('Community')), TH(T('Version')), TH(T('Access')), ) ), _class="datatable", _id="snmptable", _style="width:100%") add = AddModal( db.t_snmp, 'Add', 'Add', 'Add SNMP String', fields=[ 'f_community', 'f_version', 'f_access'], cmd='snmptable.fnReloadAjax();' ) db.t_snmp.f_hosts_id.default = record.id db.t_snmp.id.comment = add.create() return dict(form=form, host=record, add=add)
def add_host(self, address=None, ports=None): """Looks up the host and adds the result to the query""" host_rec = get_host_record(address) if host_rec is None: sys.stderr.write("%s invalid address!\n" % (address)) else: q = (db.t_services.f_hosts_id == host_rec.id) for port in ports: q &= (db.t_services.f_proto == port[0]) q &= (db.t_services.f_number == port[1]) if self.host_query is None: self.host_query = q else: self.host_query |= q return
def add_ajax(): record = None if request.vars.has_key('f_hosts_id'): record = get_host_record(request.vars.f_hosts_id) if record: db.t_host_notes.f_hosts_id.default = record.id form=SQLFORM(db.t_host_notes, buttons=[], _action=URL('add_ajax', extension='json'), _id="notes_add_form") if form.accepts(request.vars, formname='t_host_notes_create'): response.flash = 'Note added' response.headers['web2py-component-command'] = 'notesumstable.fnReloadAjax(); notestable.fnReloadAjax();' return elif form.errors: response.flash = "Error in form submission" return TABLE(*[TR(k, v) for k, v in form.errors.items()]) db.t_host_notes.f_hosts_id.default = None return dict(form=form)
def process_pwdump_loot(loot_list=[], msf=None): """ Takes an array of loot records in loot_list, downloads the pwdump file and adds the users. """ from skaldship.passwords import process_password_file, insert_or_update_acct db = current.globalenv['db'] cache = current.globalenv['cache'] logging.debug('loot_list = %s' % (loot_list)) data = [] for loot_id in loot_list: loot = msf.loot_download(loot_id) if loot['ltype'] not in ['host.windows.pwdump', 'windows.hashes']: logging.error("Loot is not a pwdump, it is a %s" % loot['ltype']) continue else: # process the pwdump file pw_data = loot['data'].split('\n') accounts = process_password_file( pw_data=pw_data, file_type='PWDUMP', source='Metasploit', ) # find the info/0 service id for the host host_id = get_host_record(loot['host']) query = (db.t_services.f_number == '0') & (db.t_services.f_proto == 'info') & (db.t_services.f_hosts_id == host_id) svc_id = db(query).select().first() if svc_id is None: # info/0 not found.. add it! svc_id = db.t_services.insert(f_proto="info", f_number="0", f_status="info", f_hosts_id=host_id) db.commit() # insert or update the account records resp_text = insert_or_update_acct(svc_id.id, accounts) logging.info("Added pwdump records for host: %s" % (loot['host'])) data.append({ loot['host']: resp_text }) return data
def summary_by_host(): """ Returns a list of notes records based upon an host identifier (id, ipv4, ipv6) """ if request.args(0) is None: redirect(URL('default', 'error', vars={'msg': T('No host record provided')})) record = get_host_record(request.args(0)) if record is None: redirect(URL('default', 'error', vars={'msg': T('Host record not found')})) response.title = "%s :: Notes for host %s" % (settings.title, host_title_maker(record)) rows = db(db.t_host_notes.f_hosts_id == record.id)(db.t_host_notes).select(db.t_host_notes.id, db.t_host_notes.f_note) aaData = [] if request.extension == "json": for r in rows: # datatables json requires aaData to be specificly formatted atxt = [] atxt.append('<a href="javascript:void()" onclick="delnotes_summ(' + str(r.id) +')">X</a>') atxt.append(r.f_note) # add columns after this, don't do anything prior since it'll affect the hidden fields aaData.append(atxt) result = { 'sEcho': request.vars.sEcho, 'iTotalRecords': len(aaData), 'aaData': aaData, } return result notes = TABLE(THEAD(TR(TH(T('[X]'), _width="5%"), TH(T('Note'), _width="90%"), ), _style="display:none" ), _class="table table-condensed", _id="notestable_summary", _style="width:100%") return dict(notes=notes)
def popover(): """ Returns the detail of a host for popovers """ host_rec = get_host_record(request.args(0)) resp = {} if not host_rec: resp['title'] = "Host not found" resp['content'] = "" else: svcs = host_rec.t_services svc_cnt = 0 vuln_cnt = 0 acct_cnt = 0 for svc in svcs.select(): svc_cnt += 1 vuln_cnt += svc.t_service_vulns.count() acct_cnt += svc.t_accounts.count() host_os = (0, 'Unknown') for os_rec in host_rec.t_host_os_refs.select(): if os_rec.f_certainty > host_os[0]: host_os = (os_rec.f_certainty, db.t_os[os_rec.f_os_id].f_title) resp['title'] = host_title_maker(host_rec) resp['content'] = XML(TABLE( TR(TD(T('Asset Group')), TD(host_rec.f_asset_group)), TR(TD(T('Engineer')), TD(db.auth_user[host_rec.f_engineer].username)), TR(TD(T('OS')), TD("%s (%s)" % (host_os[1], host_os[0]))), TR(TD(T('Services')), TD(svc_cnt)), TR(TD(T('Vulnerabilities')), TD(vuln_cnt)), TR(TD(T('Accounts')), TD(acct_cnt)), _class="table table-condensed", )) return resp
def list(): """ Returns a list of notes records based upon an host identifier (id, ipv4, ipv6) """ aaData = [] response.title = "%s :: Notes" % (settings.title) if request.args(0) is not None: record = get_host_record(request.args(0)) if record: response.title = "%s :: Notes for %s" % (settings.title, host_title_maker(record)) else: record = None if request.extension == "json": if record: rows = db(db.t_host_notes.f_hosts_id == record.id).select() else: rows = db(db.t_host_notes.id > 0).select() for r in rows: aTxt = {} aaData.append({ '0': A('edit', _target="host_notes_%s" % (r.id), _href=URL('edit', args=r.id)).xml(), '1': host_a_maker(r.f_hosts_id).xml(), '2': r.f_note, 'DT_RowId': r.id }) result = { 'sEcho': request.vars.sEcho, 'iTotalRecords': len(aaData), 'aaData': aaData, } return result if record: add_fields = ['f_note'] else: add_fields = ['f_hosts_id', 'f_note'] add_note = AddModal( db.t_host_notes, 'Add', 'Add Note', 'Add Note', #fields=add_fields, cmd='notestable.fnReloadAjax();', flash="Note added" ) if record: db.t_host_notes.f_hosts_id.default = record.id db.t_host_notes.id.comment = add_note.create() notes = TABLE(THEAD(TR(TH(T('ID'), _width="5%"), TH(T('Host'), _width="20%"), TH(T('Note'), _width="95%"), ) ), _class="datatable", _id="notestable", _style="width:100%") return dict(notes=notes, host=record, add_note=add_note)
def process_xml( filename=None, addnoports=False, asset_group=None, engineer=None, msf_workspace=False, ip_ignore_list=None, ip_include_list=None, update_hosts=False, ): # Upload and process nMap XML Scan file import re from MetasploitAPI import MetasploitAPI from skaldship.general import get_host_record, do_host_status from skaldship.cpe import lookup_cpe from zenmapCore_Kvasir.NmapParser import NmapParser # output regexes RE_NETBIOS_NAME = re.compile('NetBIOS computer name: (?P<d>.*),') RE_NETBIOS_WORKGROUP = re.compile('Workgroup: (?P<d>.*),') RE_NETBIOS_MAC = re.compile('NetBIOS MAC: (?P<d>([0-9A-Fa-f]{2}[:-]){5}([0-9A-Fa-f]{2}))') # build the hosts only/exclude list ip_exclude = [] if ip_ignore_list: ip_exclude = ip_ignore_list.split('\r\n') # TODO: check for ip subnet/range and break it out to individuals ip_only = [] if ip_include_list: ip_only = ip_include_list.split('\r\n') # TODO: check for ip subnet/range and break it out to individuals log(" [*] Processing nMap scan file %s" % (filename)) nmap_parsed = NmapParser() nmap_parsed.parse_file(filename) #existing_vulnids = db(db.t_vulndata()).select(db.t_vulndata.id, db.t_vulndata.f_vulnid).as_dict(key='f_vulnid') user_id = db.auth_user(engineer) or auth.user.id # parse the hosts, where all the goodies are log(" [-] Parsing %d hosts" % (len(nmap_parsed.hosts))) hoststats = {} hoststats['added'] = 0 hoststats['skipped'] = 0 hoststats['updated'] = 0 hoststats['errored'] = 0 hosts = [] # array of host_id fields svc_db = db.t_services for node in nmap_parsed.hosts: nodefields = {} if node.ipv6: ipaddr = node.ipv6 nodefields['f_ipv4'] = ipaddr elif node.ip.get('type') == 'ipv4': ipaddr = node.ip.get('addr') nodefields['f_ipv4'] = ipaddr else: log(" [!] No IPv4/IPv6 address, skipping") continue nodefields['f_macaddr'] = node.mac status = node.state log(" [-] Host %s status is: %s" % (ipaddr, status)) if status != "up": hoststats['skipped'] += 1 continue if ipaddr in ip_exclude: log(" [-] Host is in exclude list... skipping") hoststats['skipped'] += 1 continue if len(ip_only) > 0 and ipaddr not in ip_only: log(" [-] Host is not in the only list... skipping") hoststats['skipped'] += 1 continue if not node.ports and not addnoports: log(" [-] No ports open and not asked to add those kind... skipping") hoststats['skipped'] += 1 continue # we'lll just take the last hostname in the names list since it'll usually be the full dns name for name in node.hostnames: nodefields['f_hostname'] = name nodefields['f_engineer'] = user_id nodefields['f_asset_group'] = asset_group nodefields['f_confirmed'] = False # check to see if IPv4/IPv6 exists in DB already if 'f_ipv4' in nodefields: host_rec = db(db.t_hosts.f_ipv4 == nodefields['f_ipv4']).select().first() elif 'f_ipv6' in nodefields: host_rec = db(db.t_hosts.f_ipv6 == nodefields['f_ipv6']).select().first() else: log("No IP Address found in record. Skipping", logging.ERROR) continue if host_rec is None: host_id = db.t_hosts.insert(**nodefields) db.commit() hoststats['added'] += 1 log(" [-] Adding %s" % (ipaddr)) elif host_rec is not None and update_hosts: db.commit() if 'f_ipv4' in nodefields: host_id = db(db.t_hosts.f_ipv4 == nodefields['f_ipv4']).update(**nodefields) else: host_id = db(db.t_hosts.f_ipv6 == nodefields['f_ipv6']).update(**nodefields) db.commit() host_id = get_host_record(ipaddr) host_id = host_id.id hoststats['updated'] += 1 log(" [-] Updating %s" % (ipaddr)) else: hoststats['skipped'] += 1 db.commit() log(" [-] Skipped %s" % (ipaddr)) continue hosts.append(host_id) # process non-port <hostscript> entries. Add to info/0: for hostscripts in node.hostscripts: query = (svc_db.f_proto == 'info') & (svc_db.f_number == 0) & (svc_db.f_hosts_id == host_id) svc_id = db.t_services.update_or_insert(query, f_proto='info', f_number=0, f_status='open', f_hosts_id=host_id) if not svc_id: svc_rec = db(query).select(cache=(cache.ram, 180)).first() if svc_rec: svc_id = svc_rec.id else: log(" [!] Service record wasn't created", logging.ERROR) continue db.commit() for script in hostscripts: script_id = script.id output = script.output db.t_service_info.update_or_insert(f_services_id=svc_id, f_name=script_id, f_text=output) db.commit() if script_id == 'nbstat': # pull out NetBIOS info from nbstat output result = RE_NETBIOS_MAC.search(output) if 'd' in result.groupdict(): host_rec.update(f_macaddr=result.group('d')) db.commit() result = RE_NETBIOS_NAME.search(output) if 'd' in result.groupdict(): host_rec.update(f_netbios_name=result.group('d')) db.commit() result = RE_NETBIOS_WORKGROUP.search(output) if 'd' in result.groupdict(): db(db.t_netbios.update_or_insert(f_hosts_id=host_id, f_domain=result.group('d'))) db.commit() # add ports and resulting vulndata for port in node.ports: f_proto = port.get('protocol') f_number = port.get('portid') f_status = port.get('port_state') f_name = port.get('service_name') f_product = port.get('service_product') log(" [-] Adding port: %s/%s (%s)" % (f_proto, f_number, f_name)) svc_id = db.t_services.update_or_insert(f_proto=f_proto, f_number=f_number, f_status=f_status, f_hosts_id=host_id, f_name=f_name) if f_product: version = port.get('service_version') if version: f_product += " (%s)" % (version) db.t_service_info.update_or_insert(f_services_id=svc_id, f_name=f_name, f_text=f_product) db.commit() # Process <script> service entries for script in port.get('scripts'): db.t_service_info.update_or_insert(f_services_id=svc_id, f_name=script.get('id'), f_text=script.get('output')) db.commit() # Process <cpe> service entries for port_cpe in port.get('service_cpe'): cpe_id = port_cpe.text.lstrip('cpe:/') if cpe_id[0] == "a": # process CPE Applications #log(" [-] Found Application CPE data: %s" % (cpe_id)) db.t_service_info.update_or_insert(f_services_id=svc_id, f_name='cpe.app', f_text="cpe:/%s" % (cpe_id)) db.commit() elif cpe_id[0] == "o": # process CPE Operating System os_id = lookup_cpe(cpe_id[2:]) if os_id is not None: db.t_host_os_refs.insert(f_certainty='0.9', f_family='Unknown', f_class='Other', f_hosts_id=host_id, f_os_id=os_id) db.commit() else: # So no CPE or existing OS data, lets split up the CPE data and make our own log(" [!] No os_id found, this is odd !!!") if msf_workspace: msf = MetasploitAPI(host=user_id.f_msf_pro_url, apikey=user_id.f_msf_pro_key) if msf.login(): try: res = msf.pro_import_file( msf_workspace, filename, { 'DS_REMOVE_FILE': False, 'tag': asset_group, }, ) log(" [*] Added file to MSF Pro: %s" % (res)) except MetasploitAPI.MSFAPIError, e: logging.error("MSFAPI Error: %s" % (e)) pass else: log(" [!] Unable to login to Metasploit PRO, check your API key", logging.ERROR) msf = None
def detail(): if request.args(0) is None: redirect(URL('default', 'error', vars={'msg': T('Host record not found')})) response.files.append(URL(request.application,'static','js/jquery.sparkline.js')) response.files.append(URL(request.application,'static','jstree/jstree.min.js')) #query = db.t_hosts.id == request.args(0) #query = create_hostfilter_query(session.hostfilter, query) record = get_host_record(request.args(0)) if record is None: redirect(URL('hosts', 'list')) hostipv4=record.f_ipv4 engineername = db.auth_user[record.f_engineer].username # to allow updating of the host record from this page host=crud.read(db.t_hosts,record) host.attributes['_id'] = "host_record" host_points = {} # build the host_points field which will cover: # the top t_host_os_ref cpe string os_list = db(db.t_host_os_refs.f_hosts_id == record.id).select() host_points['os'] = (0, 'Unknown') for os_rec in os_list: if os_rec.f_certainty > host_points['os'][0]: host_points['os'] = (os_rec.f_certainty, db.t_os[os_rec.f_os_id].f_title) host_points['account_cnt'] = 0 host_points['password_cnt'] = 0 host_points['cracked_pct'] = 0 host_points['vuln_cnt'] = 0 host_points['vuln_exploited_cnt'] = 0 host_points['vuln_potential_cnt'] = 0 vulns = {} vuln_list = [] services = db(db.t_services.f_hosts_id == record.id).select() for svc in services: for vuln in db(db.t_service_vulns.f_services_id == svc.id).select(): vulndata = db.t_vulndata[vuln.f_vulndata_id] vulns[vulndata.f_vulnid] = ( vulndata.f_severity, vulndata.f_cvss_score ) vuln_list.append(vulndata) host_points['vuln_exploited_cnt'] += db((db.t_service_vulns.f_services_id==svc.id) & (db.t_service_vulns.f_status.like('%exploited%'))).count() host_points['vuln_potential_cnt'] += db((db.t_service_vulns.f_services_id==svc.id) & (db.t_service_vulns.f_status.like('%potential%'))).count() host_points['vuln_cnt'] += db(db.t_service_vulns.f_services_id==svc.id).count() host_points['account_cnt'] += db(db.t_accounts.f_services_id==svc.id).count() pwq = ((db.t_accounts.f_services_id==svc.id) & (db.t_accounts.f_compromised == True)) #pwq &= (((db.t_accounts.f_password != None) | (db.t_accounts.f_password != '')) | (db.t_accounts.f_compromised == True)) host_points['password_cnt'] += db(pwq).count() try: host_points['cracked_pct'] = 100 * (host_points['password_cnt'] / host_points['account_cnt']) except ZeroDivisionError: host_points['cracked_pct'] = 0 # breakdown of vuln severity sev_sum_dict = {} for a in range(1, 11): sev_sum_dict[a] = 0 for k,v in vulns.iteritems(): # take the severity and increment the sev_sum set item if settings.use_cvss: severity = int(float(v[1])) else: severity = v[0] count = sev_sum_dict.setdefault(severity, 1) count += 1 sev_sum_dict[severity] = count sev_sum_spark = [] sev_sum = [] for k,v in sev_sum_dict.iteritems(): sev_sum_spark.append(str(v)) if v > 0: sev_sum.append("%s: %s" % (k, v)) host_points['sev_sum_spark'] = ",".join(sev_sum_spark) host_points['sev_sum'] = " / ".join(sev_sum) # netbios record (or none if it's empty) netb_record = db(db.t_netbios.f_hosts_id == record.id).select().first() or None if netb_record is not None: netbios=crud.update(db.t_netbios, netb_record, ondelete=lambda netbios: redirect(URL('host_detail', args=[ record.id ]))) host_points['netb_domain'] = netb_record.f_domain host_points['netb_type'] = netb_record.f_type else: db.t_netbios.f_hosts_id.default = record.id netbios = LOAD('netbios', 'add.load', args=[host.record.id], ajax=True, target='netbios_info') host_pagination = pagination(request, record) response.title = "%s :: Host info :: %s" % (settings.title, host_title_maker(record)) return dict(host=host, netbios=netbios, host_points=host_points, host_pagination=host_pagination, hostipv4=hostipv4, engineername=engineername)
def by_host(): """ Returns a list of services + serviceinfo based upon an host identifier (id, ipv4, ipv6) """ record = get_host_record(request.args(0)) if record is None: redirect(URL('default', 'error', vars={'msg': T('Host record not found')})) response.title = "%s :: Services for %s" % (settings.title, host_title_maker(record)) services = db(db.t_services.f_hosts_id==record.id).select(db.t_services.id, db.t_services.f_proto, db.t_services.f_number, db.t_services.f_status, db.t_services.f_name, db.t_services.f_banner)#, cache=(cache.ram,60)) svcq = (db.t_services.f_hosts_id==record.id) infoq = (db.t_service_info.f_services_id == db.t_services.id) if request.extension == "json": #rows = db(svcq).select(db.t_services.ALL, db.t_service_info.ALL, left=db.t_service_info.on(infoq)) aaData = [] for svc in services: # service info atxt = {} q = db(db.t_service_info.f_services_id == svc.id).select() if len(q) > 0: addl = [] for svcinfo in q: addl.append(TR(TD(svcinfo.f_name), TD(svcinfo.f_text))) atxt['0'] = IMG(_src=URL(request.application,'static','images/details_open.png')).xml() atxt['1'] = TABLE(THEAD(TR(TH(T('Name')), TH(T('Text')))), TBODY(addl)).xml() else: atxt['0'] = ("") atxt['1'] = ("") atxt['2'] = A('edit', _target="services_edit_%s" % (svc.id), _href=URL('edit', args=[svc['id']], extension='html')).xml() atxt['3'] = svc.f_proto if svc.f_number in HTTP_PORTS and svc.f_proto == "tcp" or svc.f_name == "HTTP": atxt['4'] = A(svc.f_number, _href="http://%s:%s/" % (record.f_ipv4, svc.f_number), _target="%s-tcp-%s" % (record.f_ipv4, svc.f_number)).xml() elif svc.f_number in HTTPS_PORTS and svc.f_proto == "tcp" or svc.f_name == "HTTPS": atxt['4'] = A(svc.f_number, _href="https://%s:%s/" % (record.f_ipv4, svc.f_number), _target="%s-tcp-%s" % (record.f_ipv4, svc.f_number)).xml() else: atxt['4'] = svc.f_number atxt['5'] = svc.f_status atxt['6'] = svc.f_name or "" atxt['7'] = svc.f_banner or "" atxt['DT_RowId'] = svc.id aaData.append(atxt) result = { 'sEcho': request.vars._, 'iTotalRecords': len(aaData), 'aaData': aaData, } return result add = AddModal( db.t_services, 'Add', 'Add', 'Add Service', fields=[ 'f_proto', 'f_number', 'f_status', 'f_name', 'f_banner' ], cmd='servicetable.fnReloadAjax();' ) db.t_services.f_hosts_id.default = record.id db.t_services.id.comment = add.create() form = TABLE(THEAD(TR(TH('', _width="5%"), TH('Info'), TH(T('')), TH(T('Protocol')), TH(T('Number')), TH(T('Status')), TH(T('Name')), TH(T('Banner')), ) ), _class="datatable", _id="servicetable", _style="width:100%") return dict(form=form, host=record, add=add)
nodefields['f_hostname'] = node.find('names/name').text # check to see if IP exists in DB already query = (db.t_hosts.f_ipv4 == ip) | (db.t_hosts.f_ipv6 == ip) host_rec = db(query).select().first() if host_rec is None: host_id = db.t_hosts.insert(**nodefields) db.commit() hoststats['added'] += 1 log(" [-] Adding IP: %s" % ip) elif update_hosts: db.commit() if 'f_ipv4' in nodefields: db(db.t_hosts.f_ipv4 == nodefields['f_ipv4']).update(**nodefields) db.commit() host_id = get_host_record(nodefields['f_ipv4']) host_id = host_id.id hoststats['updated'] += 1 log(" [-] Updating IP: %s" % ip) else: db(db.t_hosts.f_ipv6 == nodefields['f_ipv6']).update(**nodefields) db.commit() host_id = get_host_record(nodefields['f_ipv6']) host_id = host_id.id hoststats['updated'] += 1 log(" [-] Updating IP: %s" % ip) else: hoststats['skipped'] += 1 db.commit() log(" [-] Skipped IP: %s" % ip) continue
def launch_terminal(record=None, launch_cmd=None): """ Opens a terminal on the Web Server. This only works if the web2py server is running on the user's workstation. The command to execute is stored in the user's settings db under auth_user.f_launch_cmd. Variables translated: _IP_ -- The current IP Address (v4 by default, v6 if exists) _LOGFILE_ -- Session logfile name (we prepend the path) If an IPv6 address is used then ':' is changed to '_' Example: xterm -sb -sl 1500 -vb -T 'manual hacking: _IP_' -n 'manual hacking: _IP_' -e script _LOGFILE_ """ record = get_host_record(record) # only execute launch on requests from localhost! if request.env['remote_addr'] != '127.0.0.1': logger.error("Can only launch from localhost! remote_addr = %s" % (request.env['remote_addr'])) return "Can only launch from localhost" if record is None: return "No record found" import string, os, subprocess import time # if no launch command use the default if not launch_cmd: launch_cmd = "xterm -sb -sl 1500 -vb -T 'manual hacking: _IP_' -n 'manual hacking: _IP_' -e 'script _LOGFILE_'" # check ip address if record.f_ipv6 is None or len(record.f_ipv6) == 0: ip = record.f_ipv4 logip = record.f_ipv4 else: ip = record.f_ipv6 logip = record.f_ipv6.replace(":", "_") logdir = "session-logs" logfilename = "%s-%s.log" % (logip, time.strftime("%Y%m%d%H%M%S", time.localtime(time.time()))) logfile = os.path.join(logdir, logfilename) launch_cmd = launch_cmd.replace("_IP_", ip) launch_cmd = launch_cmd.replace("_LOGFILE_", logfile) from skaldship.general import check_datadir # Check to see if data directories exist, create otherwise check_datadir(request.folder) datadir = os.path.join(request.folder, "data") # chdir to datadir! launch_cmd = launch_cmd.replace("_DATADIR_", datadir) os.chdir(datadir) # set environment variables os.environ['IP'] = ip os.environ['HOSTNAME'] = record.f_hostname or "" os.environ['DATADIR'] = datadir try: logger.info("Spawning: %s\n" % (launch_cmd)) print("Spawning: %s" % (launch_cmd)) subprocess.Popen(launch_cmd, shell=True)#, stdout=None, stdin=None, stderr=None) except Exception, e: logger.error("Error spawning launch cmd (%s): %s\n" % (launch_cmd, e)) print("Error spawning launch cmd (%s): %s\n" % (launch_cmd, e))
def process_xml( filename=None, addnoports=False, asset_group=None, engineer=None, msf_settings={}, ip_ignore_list=None, ip_include_list=None, update_hosts=False, ): # Upload and process Nmap XML Scan file import re import os from skaldship.general import get_host_record, do_host_status from skaldship.cpe import lookup_cpe from zenmapCore_Kvasir.NmapParser import NmapParser # output regexes RE_NETBIOS_NAME = re.compile("NetBIOS computer name: (?P<d>.*),") RE_NETBIOS_WORKGROUP = re.compile("Workgroup: (?P<d>.*),") RE_NETBIOS_MAC = re.compile("NetBIOS MAC: (?P<d>([0-9A-Fa-f]{2}[:-]){5}([0-9A-Fa-f]{2}))") # build the hosts only/exclude list ip_exclude = [] if ip_ignore_list: ip_exclude = ip_ignore_list.split("\r\n") # TODO: check for ip subnet/range and break it out to individuals ip_only = [] if ip_include_list: ip_only = ip_include_list.split("\r\n") # TODO: check for ip subnet/range and break it out to individuals log(" [*] Processing Nmap scan file %s" % (filename)) nmap_parsed = NmapParser() nmap_parsed.parse_file(filename) # existing_vulnids = db(db.t_vulndata()).select(db.t_vulndata.id, db.t_vulndata.f_vulnid).as_dict(key='f_vulnid') # parse the hosts, where all the goodies are log(" [-] Parsing %d hosts" % (len(nmap_parsed.hosts))) hoststats = {} hoststats["added"] = 0 hoststats["skipped"] = 0 hoststats["updated"] = 0 hoststats["errored"] = 0 hosts = [] # array of host_id fields svc_db = db.t_services for node in nmap_parsed.hosts: nodefields = {} if node.ipv6: ipaddr = node.ipv6 nodefields["f_ipv4"] = ipaddr elif node.ip.get("type") == "ipv4": ipaddr = node.ip.get("addr") nodefields["f_ipv4"] = ipaddr else: log(" [!] No IPv4/IPv6 address, skipping") continue try: nodefields["f_macaddr"] = node.mac["addr"] except TypeError: nodefields["f_macaddr"] = None status = node.state log(" [-] Host %s status is: %s" % (ipaddr, status)) if status != "up": hoststats["skipped"] += 1 continue if ipaddr in ip_exclude: log(" [-] Host is in exclude list... skipping") hoststats["skipped"] += 1 continue if len(ip_only) > 0 and ipaddr not in ip_only: log(" [-] Host is not in the only list... skipping") hoststats["skipped"] += 1 continue if not node.ports and not addnoports: log(" [-] No ports open and not asked to add those kind... skipping") hoststats["skipped"] += 1 continue # we'lll just take the last hostname in the names list since it'll usually be the full dns name for name in node.hostnames: nodefields["f_hostname"] = name["hostname"] nodefields["f_engineer"] = engineer nodefields["f_asset_group"] = asset_group nodefields["f_confirmed"] = False # see if host exists, if so update. if not, insert! query = (db.t_hosts.f_ipv4 == ipaddr) | (db.t_hosts.f_ipv6 == ipaddr) host_rec = db(query).select().first() if host_rec is None: host_id = db.t_hosts.insert(**nodefields) db.commit() hoststats["added"] += 1 log(" [-] Adding %s" % (ipaddr)) elif host_rec is not None and update_hosts: db.commit() if "f_ipv4" in nodefields: host_id = db(db.t_hosts.f_ipv4 == nodefields["f_ipv4"]).update(**nodefields) else: host_id = db(db.t_hosts.f_ipv6 == nodefields["f_ipv6"]).update(**nodefields) db.commit() host_id = get_host_record(ipaddr) host_id = host_id.id hoststats["updated"] += 1 log(" [-] Updating %s" % (ipaddr)) else: hoststats["skipped"] += 1 db.commit() log(" [-] Skipped %s" % (ipaddr)) continue hosts.append(host_id) # process non-port <hostscript> entries. Add to info/0: for hostscripts in node.hostscripts: query = (svc_db.f_proto == "info") & (svc_db.f_number == 0) & (svc_db.f_hosts_id == host_id) svc_id = db.t_services.update_or_insert( query, f_proto="info", f_number=0, f_status="open", f_hosts_id=host_id ) if not svc_id: svc_rec = db(query).select(cache=(cache.ram, 180)).first() if svc_rec: svc_id = svc_rec.id else: log(" [!] Service record wasn't created", logging.ERROR) continue db.commit() for script in hostscripts: script_id = script.id output = script.output db.t_service_info.update_or_insert(f_services_id=svc_id, f_name=script_id, f_text=output) db.commit() if script_id == "nbstat": # pull out NetBIOS info from nbstat output result = RE_NETBIOS_MAC.search(output) if "d" in result.groupdict(): host_rec.update(f_macaddr=result.group("d")) db.commit() result = RE_NETBIOS_NAME.search(output) if "d" in result.groupdict(): host_rec.update(f_netbios_name=result.group("d")) db.commit() result = RE_NETBIOS_WORKGROUP.search(output) if "d" in result.groupdict(): db(db.t_netbios.update_or_insert(f_hosts_id=host_id, f_domain=result.group("d"))) db.commit() # add ports and resulting vulndata for port in node.ports: f_proto = port.get("protocol") f_number = port.get("portid") f_status = port.get("port_state") f_name = port.get("service_name") f_product = port.get("service_product") log(" [-] Adding port: %s/%s (%s)" % (f_proto, f_number, f_name)) svc_id = db.t_services.update_or_insert( f_proto=f_proto, f_number=f_number, f_status=f_status, f_hosts_id=host_id, f_name=f_name ) if f_product: version = port.get("service_version") if version: f_product += " (%s)" % (version) db.t_service_info.update_or_insert(f_services_id=svc_id, f_name=f_name, f_text=f_product) db.commit() # Process <script> service entries for script in port.get("scripts"): db.t_service_info.update_or_insert( f_services_id=svc_id, f_name=script.get("id"), f_text=script.get("output") ) db.commit() # Process <cpe> service entries port_cpe = port.get("service_cpe") if port_cpe: cpe_id = port_cpe.lstrip("cpe:/") if cpe_id.startswith("a"): # process CPE Applications # log(" [-] Found Application CPE data: %s" % (cpe_id)) db.t_service_info.update_or_insert( f_services_id=svc_id, f_name="cpe.app", f_text="cpe:/%s" % (cpe_id) ) db.commit() elif cpe_id.startswith("o"): # process CPE Operating System os_id = lookup_cpe(cpe_id[2:]) if os_id is not None: db.t_host_os_refs.insert( f_certainty="0.9", f_family="Unknown", f_class="Other", f_hosts_id=host_id, f_os_id=os_id ) db.commit() else: # So no CPE or existing OS data, lets split up the CPE data and make our own log(" [!] No os_id found, this is odd !!!") if msf_settings.get("workspace"): try: # check to see if we have a Metasploit RPC instance configured and talking from MetasploitAPI import MetasploitAPI msf_api = MetasploitAPI(host=msf_settings.get("url"), apikey=msf_settings.get("key")) working_msf_api = msf_api.login() except Exception, error: log(" [!] Unable to authenticate to MSF API: %s" % str(error), logging.ERROR) working_msf_api = False try: scan_data = open(filename, "r+").readlines() except Exception, error: log(" [!] Error loading scan data to send to Metasploit: %s" % str(error), logging.ERROR) scan_data = None
def host_to_ip(host_rec): if isinstance(host_rec, (int, str)): host_rec = get_host_record(host_rec) if not host_rec: return None return host_rec.get('f_ipv4') or host_rec.get('f_ipv6')
try: os.chdir(loot_dir) loot_files = os.listdir(loot_dir) except OSError: loot_files = [] loot_file_details = [] for loot in loot_files: try: (timestamp, workspace, ipaddr, filetype, extension) = re.split('_', loot) except ValueError: logging.warn("Invalid loot file: %s" % (loot)) continue # TODO: service_list = get_services(ipaddr) host_rec = get_host_record(ipaddr) services = [] for service in db(dbsvcs.f_hosts_id==host_rec).select(dbsvcs.id, dbsvcs.f_proto, dbsvcs.f_number, cache=(cache.ram, 120)): services.append([service.id, "%s/%s" % (service.f_proto, service.f_number)]) loot_file_details.append([ workspace, ipaddr, services, filetype ]) form_lootdir = SQLFORM.factory( Field('lootdir', 'string', default=loot_dir, requires=IS_NOT_EMPTY(), label=T('Metasploit Loot Directory')), ) return dict(form_lootdir=form_lootdir, loot_file_details=loot_file_details) ##------------------------------------------------------------------------- ## report
except AttributeError, e: logger.error("Invalid port sent: %s", port) try: pw_data = open(filename, "rb").readlines().split('\n') except IOError, e: logger.error("Error opening %s: %s" % (filename, e)) accounts = process_password_file( pw_data=pw_data, file_type=file_type, source='Metasploit', ) # find the info/0 service id for the host host_id = get_host_record(loot['host']) query = (db.t_services.f_number == number) & (db.t_services.f_proto == proto) & (db.t_services.f_hosts_id == host_id) svc_id = db(query).select().first() if svc_id is None: # info/0 not found.. add it! svc_id = db.t_services.insert(f_proto=proto, f_number=number, f_hosts_id=host_id) db.commit() # insert or update the account records resp_text = insert_or_update_acct(svc_id.id, accounts) logging.info("Added loot accounts for host: %s" % ()) data.append({ loot['host']: resp_text }) ##------------------------------------------------------------------------- def process_report_xml(
def parse(self, host_properties): """ Parse out the <HostProperties> xml content. There can be a number of <tag> entries that are either useful to us in t_hosts or other areas. These are processed and returned as dictionary entries in 'hostdata' Args: host_properties: A <HostProperties> section from .nessus Returns: t_hosts.id, { hostdata } """ from gluon.validators import IS_IPADDRESS if not etree.iselement(host_properties): logging.error("Invalid HostProperties value received") return None, {} hostdata = {} for tag in host_properties.findall('tag'): hostdata[tag.get('name')] = tag.text ipaddr = hostdata.get('host-ip') if (ipaddr not in self.ip_include and self.ip_include) or (ipaddr in self.ip_exclude): log("Host in exclude or not in include list, skipping") self.stats['skipped'] += 1 return None, {} host_id = get_host_record(ipaddr) if host_id and not self.update_hosts: return host_id, hostdata # new host found, pull what we need for t_hosts hostfields = {} hostfields['f_engineer'] = self.engineer hostfields['f_asset_group'] = self.asset_group hostfields['f_confirmed'] = False # check ipv4/ipv6 and set hostfields accordingly if IS_IPADDRESS(is_ipv4=True)(ipaddr)[1] is None: hostfields['f_ipv4'] = ipaddr elif IS_IPADDRESS(is_ipv6=True)(ipaddr)[1] is None: hostfields['f_ipv6'] = ipaddr else: log("Invalid IP Address in HostProperties: %s" % ipaddr, logging.ERROR) return None, {} # pull out relevant hostfields for (k,v) in hostdata.iteritems(): if k == 'mac-address': # multiple mac addrs may appear wildly, just pull the first hostfields['f_macaddr'] = v[:v.find('\n')] elif k == 'host-fqdn': hostfields['f_hostname'] = v elif k == 'netbios-name': hostfields['f_netbios_name'] = v if not self.update_hosts and not host_id: result = self.db.t_hosts.validate_and_insert(**hostfields) if not result.id: log("Error adding host to DB: %s" % result.errors, logging.ERROR) return None, {} self.stats['added'] += 1 host_id = result.id log(" [-] Adding host: %s" % ipaddr) elif self.update_hosts: if hostfields['f_ipv4']: host_id = self.db(localdb.t_hosts.f_ipv4 == hostfields['f_ipv4']).update(**hostfields) self.db.commit() host_id = get_host_record(hostfields['f_ipv4']) host_id = host_id.id log(" [-] Updating IP: %s" % (hostfields['f_ipv4'])) else: host_id = self.db(localdb.t_hosts.f_ipv6 == hostfields['f_ipv6']).update(**hostfields) self.db.commit() host_id = get_host_record(hostfields['f_ipv6']) host_id = host_id.id log(" [-] Updating IP: %s" % (hostfields['f_ipv6'])) self.stats['updated'] += 1 return host_id, hostfields
def parse_host(self, host): """ Parse an XML host data from ShodanHQ results """ from gluon.validators import IS_IPADDRESS db = current.globalenv['db'] hostfields = {} ipaddr = host.get('ip') if self.ip_only and ipaddr not in self.ip_only: log(" [-] %s is not in the only list... skipping" % (ipaddr)) #sys.stderr.write(msg) self.stats['hosts_skipped'] += 1 return if ipaddr in self.ip_exclude: log(" [-] %s is in exclude list... skipping" % (ipaddr)) if IS_IPADDRESS(is_ipv4=True)(ipaddr)[1] is None: # address is IPv4: hostfields['f_ipv4'] = ipaddr elif IS_IPADDRESS(is_ipv6=True)(ipaddr)[1] is None: hostfields['f_ipv6'] = ipaddr else: log(" [!] Invalid IP Address in report: %s" % (ipaddr)) return hostname = host.findtext('hostnames') if hostname: hostfields['f_hostname'] = hostname # check to see if IP exists in DB already if 'f_ipv4' in hostfields: host_rec = db(db.t_hosts.f_ipv4 == hostfields['f_ipv4']).select().first() else: host_rec = db(db.t_hosts.f_ipv6 == hostfields['f_ipv6']).select().first() if host_rec is None: hostfields['f_asset_group'] = self.asset_group hostfields['f_engineer'] = self.engineer host_id = db.t_hosts.insert(**hostfields) db.commit() self.stats['hosts_added'] += 1 log(" [-] Adding IP: %s" % (ipaddr)) elif host_rec is not None: db.commit() if 'f_ipv4' in hostfields: host_id = db(db.t_hosts.f_ipv4 == hostfields['f_ipv4']).update(**hostfields) db.commit() host_id = get_host_record(hostfields['f_ipv4']) host_id = host_id.id self.stats['hosts_updated'] += 1 log(" [-] Updating IP: %s" % (hostfields['f_ipv4'])) else: host_id = db(db.t_hosts.f_ipv6 == hostfields['f_ipv6']).update(**hostfields) db.commit() host_id = get_host_record(hostfields['f_ipv6']) host_id = host_id.id self.stats['hosts_updated'] += 1 log(" [-] Updating IP: %s" % (hostfields['f_ipv6'])) else: self.stats['hosts_skipped'] += 1 db.commit() log(" [-] Skipped IP: %s" % (ipaddr)) return # process the service / data f_number = host.get('port') if f_number == '161': # only udp provided by shodanhq is snmp f_proto = 'udp' else: f_proto = 'tcp' f_status = 'open' f_name = '' addl_fields = {} # extract the data field for processing port_data = host.findtext('data') # for ssh, telnet and smtp throw data into the banner if f_number == '21': f_banner = "\n".join(self.SMTP_FTP_220.findall(port_data)) f_name = 'FTP' addl_fields = { 'ftp.banner': port_data, } elif f_number == '22': f_banner = port_data f_name = 'SSH' addl_fields = { 'ssh.banner': port_data, } elif f_number == '23': f_banner = port_data f_name = 'Telnet' elif f_number == '25': f_banner = "\n".join(self.SMTP_FTP_220.findall(port_data)) f_name = 'SMTP' addl_fields = { 'smtp.banner': port_data, } elif f_number == '80': # TODO: parse HTTP headers.. ugly f_banner = port_data f_name = 'HTTP' addl_fields = { 'http.banner': port_data, } elif f_number == '1900': f_banner = port_data f_name = 'UPNP' addl_fields = { 'upnp.banner': port_data, } else: f_banner = port_data query = (db.t_services.f_proto == f_proto) & (db.t_services.f_number == f_number) & (db.t_services.f_hosts_id == host_id) svc_row = db(query).select().first() if svc_row: # we found a service record! Check for similar status, names and banners do_update = False if svc_row.f_status != f_status: svc_row.f_status = f_status do_update = True if svc_row.f_name != f_name: svc_row.f_name = f_name do_update = True if svc_row.f_banner != f_banner: svc_row.f_banner = f_banner do_update = True svc_id = svc_row.id if do_update: svc_row.update_record() db.commit() didwhat = "Updated" self.stats['services_updated'] += 1 else: didwhat = "Unaltered" else: # we have a new service! svc_id = db.t_services.insert( f_proto=f_proto, f_number=f_number, f_status=f_status, f_name=f_name, f_banner=f_banner, f_hosts_id=host_id ) db.commit() didwhat = "Added" self.stats['services_added'] += 1 log(" [-] %s service: (%s) %s/%s" % (didwhat, ipaddr, f_proto, f_number)) for k, v in addl_fields.iteritems(): # add additional field entries as service_info records db.t_service_info.update_or_insert( f_services_id=svc_id, f_name=k, f_text=v, ) db.commit()
else: logger.error("Error with password process" % (mass_pw_data['error'])) except Exception, e: logger.error("Error with line (%s): %s" % (line, e)) # run through the ip_accts now to add/update them to the database from skaldship.general import get_host_record for k,v in ip_dict.iteritems(): for ip_acct in v: # build a query to find the service for this host/port combo query = (db.t_hosts.f_ipv4 == k) & (db.t_services.f_hosts_id == db.t_hosts.id) query &= (db.t_services.f_proto==ip_acct['f_proto']) & (db.t_services.f_number==ip_acct['f_number']) svc = db(query).select(db.t_services.id, cache=(cache.ram, 60)).first() if svc is None: # no service found, get the host record based on the IP host_rec = get_host_record(k) if host_rec is None and add_hosts: # add host to the database, unfortunately all we know is the IP address so it's pretty bare. # assign it to the current user and asset group of "new_hosts_medusa" fields = { 'f_ipv4': k, 'f_engineer': user_id, 'f_asset_group': 'new_hosts_medusa', } host_rec = db.t_hosts.insert(**fields) db.commit() logger.info("Added new host from Medusa output: %s" % (k)) new_hosts += 1 elif host_rec is None: # no host and not asking to add hosts so print message and continue logger.error("Unable to find host_rec for %s" % (k))
def refs_by_host(): """ Returns a list of OS records based upon an host identifier (id, ipv4, ipv6) """ if request.args(0) is None: redirect(URL('default', 'error', vars={'msg': T('No host record sent')})) record = get_host_record(request.args(0)) if record is None: redirect(URL('default', 'error', vars={'msg': T('Host record not found')})) response.title = "%s :: OS Records for %s" % (settings.title, host_title_maker(record)) oslist = db(db.t_host_os_refs.f_hosts_id==record.id).select() aaData = [] if request.extension == "json": for osdetail in oslist: osinfo = db.t_os(osdetail['f_os_id']) # datatables json requires aaData to be specificly formatted atxt = {} atxt['0'] = A('edit', _target="oswindow_%s" % (osdetail.id), _href=URL('refs_edit', args=[osdetail.id], extension='html')).xml() atxt['1'] = osdetail.f_family atxt['2'] = osdetail.f_class atxt['3'] = osdetail.f_certainty atxt['4'] = osinfo.f_cpename atxt['5'] = osinfo.f_title atxt['DT_RowId'] = osdetail.id aaData.append(atxt) result = { 'sEcho': request.vars.sEcho, 'iTotalRecords': len(aaData), 'aaData': aaData, } return result form = TABLE(THEAD(TR(TH(T(''), _width="5%"), TH(T('Family')), TH(T('Class')), TH(T('Certainty')), TH(T('CPE Name')), TH(T('Title')), ) ), _class="datatable", _id="ostable", _style="width:100%") add_os_refs = AddModal( db.t_host_os_refs, 'Add', 'Add', 'Add OS', fields=['f_certainty', 'f_class', 'f_family', 'f_os_id'], cmd='ostable.fnReloadAjax();' ) db.t_host_os_refs.f_hosts_id.default = record.id db.t_host_os_refs.id.comment = add_os_refs.create() add_non_cpe = AddModal( db.t_os, 'Add Non-CPE OS', 'Add Non-CPE OS', 'Add Non-CPE OS', #fields=[], #cmd='ostable.fnReloadAjax();' ) db.t_os.id.comment = add_non_cpe.create() return dict(form=form, host=record, add_os_refs=add_os_refs, add_non_cpe=add_non_cpe)
def list(): """ Returns a list of evidence based on a host (id, ipv4, ipv6) or all """ import os, string if request.args(0) is not None: record = get_host_record(request.args(0)) if record is None: redirect(URL('default', 'error', vars={'msg': T('Host record not found')})) response.title = "%s :: Evidence for host %s" % (settings.title, host_title_maker(record)) else: response.title = "%s :: Evidence listing" % (settings.title) record = None aaData = [] if request.extension == "json": if record is None: rows = db(db.t_evidence).select(db.t_evidence.id, db.t_evidence.f_hosts_id, db.t_evidence.f_type, db.t_evidence.f_other_type, db.t_evidence.f_text, db.t_evidence.f_filename, db.t_evidence.f_evidence, db.t_evidence.f_data.len()+1) else: rows = db(db.t_evidence.f_hosts_id == record.id).select(db.t_evidence.id, db.t_evidence.f_hosts_id, db.t_evidence.f_type, db.t_evidence.f_other_type, db.t_evidence.f_text, db.t_evidence.f_filename, db.t_evidence.f_evidence, db.t_evidence.f_data.len()+1) for r in rows: atxt = {} cnt = 0 atxt[cnt] = A('edit', _target="evidence_edit_%s" % (r.t_evidence.id), _href=URL('edit', extension='html', args=r.t_evidence.id)).xml() cnt += 1 if record is None: atxt[cnt] = host_a_maker(r.t_evidence.f_hosts_id).xml() cnt += 1 if r.t_evidence.f_other_type: atxt[cnt] = "Other: %s" % (r.t_evidence.f_other_type) else: atxt[cnt] = r.t_evidence.f_type cnt += 1 atxt[cnt] = r.t_evidence.f_text cnt += 1 if r.t_evidence.f_filename is not None: if string.lower(os.path.splitext(r.t_evidence.f_filename)[1]) in ('.png', '.jpeg', '.jpg', '.gif'): atxt[cnt] = A(IMG(_src=URL('download', args=[r.t_evidence.f_evidence]), _width="50%", _height="20%"), _href=URL('download', args=[r.t_evidence.f_evidence]), _target="evidence_image_%s" % (r.t_evidence.id), _id="evidence_image").xml() cnt += 1 atxt[cnt] = "%sb" % (r._extra['(LENGTH(t_evidence.f_data) + 1)']) cnt += 1 else: atxt[cnt] = A(r.t_evidence.f_filename, _target="evidence_other_%s" % (r.t_evidence.id), _id="evidence_other", _href=URL('download', args=[r.t_evidence.f_evidence])).xml() cnt += 1 atxt[cnt] = "%sb" % (r._extra['(LENGTH(t_evidence.f_data) + 1)']) cnt += 1 else: atxt[cnt] = r.t_evidence.f_filename cnt += 1 atxt['DT_RowId'] = r.t_evidence.id aaData.append(atxt) return { 'sEcho': request.vars.sEcho, 'iTotalRecords': len(aaData), 'aaData': aaData, } if record: th_rows = (TH(T(''), _width="5%"), TH(T('Type')), TH(T('Text')), TH(T('Evidence')), TH(T('File Size')), ) else: th_rows = (TH(T(''), _width="5%"), TH(T('Host')), TH(T('Type')), TH(T('Text')), TH(T('Evidence'), _width="35%"), TH(T('File Size')), ) evidence = TABLE(THEAD(TR(th_rows)), _class="datatable", _id="evidencetable", _style="width:100%") return dict(evidence=evidence, host=record)
def vulndata_by_host(): """ Returns a list of vulnerabilties based upon an host identifier (id, ipv4, ipv6) """ record = get_host_record(request.args(0)) if record is None: redirect(URL('default', 'error', vars={'msg': T('Host record not found')})) response.title = "%s :: Vulnerabilities for %s" % (settings.title, host_title_maker(record)) services = db(db.t_services.f_hosts_id==record.id).select(db.t_services.id, db.t_services.f_proto, db.t_services.f_number) if request.extension == "json": aaData = [] for svc in services: # service info q = db(db.t_service_vulns.f_services_id == svc.id).select() for vulninfo in q: atxt = {} exploit_list = [] vulndetails = db(db.t_vulndata.id == vulninfo.f_vulndata_id).select(cache=(cache.ram, 300)).first() exploits = db(db.t_exploit_references.f_vulndata_id == vulninfo.f_vulndata_id).select(orderby=~db.t_exploit_references.id) if len(exploits) > 0: expl_count = "Yes (%d)" % (len(exploits)) for expl in exploits: for expl_data in db(db.t_exploits.id == expl.f_exploit_id).select(cache=(cache.ram, 300)): exp_link = expl_data.f_name if expl_data.f_source == 'exploitdb': exp_link = A(IMG(_align="absmiddle", _width=16, _height=16, _src=URL('static','images/exploitdb.ico')), ' exploitdb - ' + expl_data.f_name,_href='http://www.exploit-db.com/exploits/' + expl_data.f_title, _target="exploitdb_%s" % (expl_data.f_name)) elif expl_data.f_source == 'metasploit': if session.msf_workspace: msf_uri = os.path.join(auth.user.f_msf_pro_url, 'workspaces', session.msf_workspace_num, 'tasks/new_module_run') else: msf_uri = 'http://www.metasploit.com/modules/' exp_link = A(IMG(_align="absmiddle", _width=16, _height=16, _src=URL('static','images/msf.gif')), ' metasploit - ' + expl_data.f_name,_href=os.path.join(msf_uri, expl_data.f_title), _target="msf_%s" % (expl_data.f_name)) elif expl_data.f_source == 'canvas': exp_link = SPAN(IMG(_align="absmiddle", _width=16, _height=16, _src=URL('static','images/canvas.png')), ' canvas - ' + expl_data.f_name) exploit_list.append("%s : %s (%s/%s)" % (expl_data.f_title, exp_link, expl_data.f_rank, expl_data.f_level)) else: expl_count = "" atxt['0'] = IMG(_src=URL(request.application,'static','images/details_open.png')).xml() atxt['1'] = A('edit', _target="service_vuln_update_%s" % (vulninfo.id), _href=URL('vulns', 'service_vulns_edit', args=vulninfo.id, extension='html')).xml() if vulninfo.f_exploited: atxt['2'] = '<input id="exploited" value="' + str(vulninfo.id) + '" type="checkbox", checked>' else: atxt['2'] = '<input id="exploited" value="' + str(vulninfo.id) + '" type="checkbox">' atxt['3'] = "%s/%s" % (svc.f_proto, svc.f_number) atxt['4'] = A(vulndetails.f_vulnid, _target="vulndata_%s" % (vulndetails.id), _href=URL('vulns', 'vulninfo_by_vulnid', args=vulndetails.f_vulnid, extension='html')).xml() atxt['5'] = vulndetails.f_severity atxt['6'] = vulndetails.f_cvss_score atxt['7'] = SPAN(vulninfo.f_status,_id="vulninfo_status",_vulnstatus=vulninfo.f_status).xml() atxt['8'] = expl_count atxt['9'] = MARKMIN(vulninfo.f_proof).xml() atxt['10'] = MARKMIN(vulndetails.f_description).xml() atxt['11'] = vulndetails.f_title atxt['12'] = "<br />\n".join(exploit_list) atxt['DT_RowId'] = vulninfo.id aaData.append(atxt) result = { 'sEcho': request.vars.sEcho, 'iTotalRecords': len(aaData), 'aaData': aaData, } return result add = AddModal( db.t_service_vulns, 'Add', 'Add', 'Add Vulnerability', #fields=[ #], cmd='vulntable.fnReloadAjax();' ) #db.t_service_vulns.f_services_id.default = svc.id svc_set = [] for svc in services: svc_set.append([svc.id, "%s :: %s/%s" % (host_title_maker(db.t_hosts[record.id]), svc.f_proto, svc.f_number)]) db.t_service_vulns.f_services_id.requires = IS_IN_SET(svc_set) db.t_service_vulns.id.comment = add.create() form = TABLE(THEAD(TR(TH('', _width="5%"), TH(T(''), _width="5%"), TH(T('Pwned'), width="5%"), TH(T('Port')), TH(T('Vuln ID')), TH(T('Sev')), TH(T('CVSS')), TH(T('Status')), TH(T('Exploits')), TH(T('Proof')), TH(T('Description')), TH(T('Title')), TH(T('Exploit List')), ) ), _class="datatable", _id="vulntable", _style="width:100%") return dict(form=form, host=record, add=add)
def process_xml( filename=None, addnoports=False, asset_group=None, engineer=None, msf_settings={}, ip_ignore_list=None, ip_include_list=None, update_hosts=False, ): # Upload and process Nmap XML Scan file import re import os from skaldship.general import get_host_record, do_host_status from skaldship.cpe import lookup_cpe from zenmapCore_Kvasir.NmapParser import NmapParser # output regexes RE_NETBIOS_NAME = re.compile("NetBIOS computer name: (?P<d>.*),") RE_NETBIOS_WORKGROUP = re.compile("Workgroup: (?P<d>.*),") RE_NETBIOS_MAC = re.compile("NetBIOS MAC: (?P<d>([0-9A-Fa-f]{2}[:-]){5}([0-9A-Fa-f]{2}))") # build the hosts only/exclude list ip_exclude = [] if ip_ignore_list: ip_exclude = ip_ignore_list.split("\r\n") # TODO: check for ip subnet/range and break it out to individuals ip_only = [] if ip_include_list: ip_only = ip_include_list.split("\r\n") # TODO: check for ip subnet/range and break it out to individuals log(" [*] Processing Nmap scan file %s" % (filename)) nmap_parsed = NmapParser() nmap_parsed.parse_file(filename) # existing_vulnids = db(db.t_vulndata()).select(db.t_vulndata.id, db.t_vulndata.f_vulnid).as_dict(key='f_vulnid') # parse the hosts, where all the goodies are log(" [-] Parsing %d hosts" % (len(nmap_parsed.hosts))) hoststats = {} hoststats["added"] = 0 hoststats["skipped"] = 0 hoststats["updated"] = 0 hoststats["errored"] = 0 hosts = [] # array of host_id fields svc_db = db.t_services for node in nmap_parsed.hosts: nodefields = {} if node.ipv6: ipaddr = node.ipv6 nodefields["f_ipv4"] = ipaddr elif node.ip.get("type") == "ipv4": ipaddr = node.ip.get("addr") nodefields["f_ipv4"] = ipaddr else: log(" [!] No IPv4/IPv6 address, skipping") continue try: nodefields["f_macaddr"] = node.mac["addr"] except TypeError: nodefields["f_macaddr"] = None status = node.state log(" [-] Host %s status is: %s" % (ipaddr, status)) if status != "up": hoststats["skipped"] += 1 continue if ipaddr in ip_exclude: log(" [-] Host is in exclude list... skipping") hoststats["skipped"] += 1 continue if len(ip_only) > 0 and ipaddr not in ip_only: log(" [-] Host is not in the only list... skipping") hoststats["skipped"] += 1 continue if not node.ports and not addnoports: log(" [-] No ports open and not asked to add those kind... skipping") hoststats["skipped"] += 1 continue # we'lll just take the last hostname in the names list since it'll usually be the full dns name for name in node.hostnames: nodefields["f_hostname"] = name["hostname"] nodefields["f_engineer"] = engineer nodefields["f_asset_group"] = asset_group nodefields["f_confirmed"] = False # see if host exists, if so update. if not, insert! query = (db.t_hosts.f_ipv4 == ipaddr) | (db.t_hosts.f_ipv6 == ipaddr) host_rec = db(query).select().first() if host_rec is None: host_id = db.t_hosts.insert(**nodefields) db.commit() hoststats["added"] += 1 log(" [-] Adding %s" % (ipaddr)) elif host_rec is not None and update_hosts: db.commit() if "f_ipv4" in nodefields: host_id = db(db.t_hosts.f_ipv4 == nodefields["f_ipv4"]).update(**nodefields) else: host_id = db(db.t_hosts.f_ipv6 == nodefields["f_ipv6"]).update(**nodefields) db.commit() host_id = get_host_record(ipaddr) host_id = host_id.id hoststats["updated"] += 1 log(" [-] Updating %s" % (ipaddr)) else: hoststats["skipped"] += 1 db.commit() log(" [-] Skipped %s" % (ipaddr)) continue hosts.append(host_id) # process OS related info for os in node.osmatches: os_id = None host_id = None f_title = os["name"] # title for k in os["osclasses"]: f_cpename = k["cpe"].lstrip("cpe:/o:") f_vendor = k["vendor"] f_product = k["osfamily"] f_version = k["osgen"] f_class = k["type"] f_family = k["osfamily"] f_certainty = k["accuracy"] cpe_res = db((db.t_os.f_cpename == f_cpename) & (db.t_os.f_title == f_title)).select().first() if cpe_res is not None: os_id = cpe_res.id else: try: os_id = db.t_os.insert( f_cpename=f_cpename, f_title=f_title, f_vendor=f_vendor, f_product=f_product, f_version=f_version, ) except Exception, e: logger.error("Error inserting OS: %s" % (e)) db.commit() if os_id and (f_class or f_family or f_certainty): ipaddr = node.ip.get("addr") host_id = get_host_record(ipaddr) host_id = host_id.id try: db.t_host_os_refs.insert( f_certainty=f_certainty, f_family=f_family, f_class=f_class, f_hosts_id=host_id, f_os_id=os_id, ) except Exception, e: logger.error("Error inserting OS: %s" % (e)) db.commit()
def aa_by_host(): """ Returns a list of vulnerabilties per port in a tree view format based upon an host identifier (id, ipv4, ipv6) """ record = get_host_record(request.args(0)) if record is None: redirect(URL('default', 'error', vars={'msg': T('Host record not found')})) treeul=UL(_id='aatree_ul') db_svcs = db.t_services db_svulns = db.t_service_vulns db_vulns = db.t_vulndata services = db(db_svcs.f_hosts_id==record.id).select(db_svcs.f_number, db_svcs.id, db_svcs.f_proto, db_svcs.f_name,orderby=db_svcs.id) tree = DIV(_id="aatree") for svc in services: nexlist = [] nexlist_single = [] expl_count = 0 exploit_list = UL() exploitdb = 0 metasploit = 0 canvas = 0 prev_f_status = '' vulnclass = '' for vulninfo in db( (db_svulns.f_services_id == svc.id) & (db_vulns.id == db_svulns.f_vulndata_id) ).select(orderby=~db_svulns.f_status|~db_vulns.f_severity, cache=(cache.ram, 120)): #init variables vulndetails = vulninfo.t_vulndata vulninfo = vulninfo.t_service_vulns cur_f_status = vulninfo.f_status #Generating the exploit lists exploits = db(db.t_exploit_references.f_vulndata_id == vulninfo.f_vulndata_id).select(orderby=~db.t_exploit_references.id) exploit_list_single = UL() if len(exploits) > 0: for expl in exploits: for expl_data in db(db.t_exploits.id == expl.f_exploit_id).select(db.t_exploits.f_source, db.t_exploits.f_title, db.t_exploits.f_name, db.t_exploits.f_rank, db.t_exploits.f_level): exp_link = expl_data.f_name if expl_data.f_source == 'exploitdb': exploitdb += 1 exp_link = A(IMG(_align="absmiddle", _width=16, _height=16, _src=URL('static','images/exploitdb.ico')), ' exploitdb - ' + expl_data.f_name,_href='http://www.exploit-db.com/exploits/' + expl_data.f_title, _target="exploitdb_%s" % (expl_data.f_name)) elif expl_data.f_source == 'metasploit': metasploit += 1 if session.msf_workspace: msf_uri = auth.user.f_msf_pro_url + "/" + session.msf_workspace + "/modules/" else: msf_uri = 'http://www.metasploit.com/modules/' exp_link = A(IMG(_align="absmiddle", _width=16, _height=16, _src=URL('static','images/msf.gif')), ' metasploit - ' + expl_data.f_name,_href=os.path.join(msf_uri, expl_data.f_title), _target="msf_%s" % (expl_data.f_name)) elif expl_data.f_source == 'canvas': canvas += 1 exp_link = SPAN(IMG(_align="absmiddle", _width=16, _height=16, _src=URL('static','images/canvas.png')), ' canvas - ' + expl_data.f_name) #expl_link = ' canvas - ' + expl_data.f_name expl_count += 1 exploit_list_single.append(LI(expl_data.f_title , " : " , exp_link , " (" , expl_data.f_rank , "/" , expl_data.f_level, ")")) textdecoration="" if vulninfo.f_exploited == True and len(exploits) > 0: textdecoration="text-decoration:line-through underline; " elif vulninfo.f_exploited == True and len(exploits) == 0: textdecoration="text-decoration: line-through; " elif (vulninfo.f_exploited == False or vulninfo.f_exploited == None) and len(exploits) == 0: textdecoration="text-decoration: none;" #generation vuln link style = textdecoration + "color:" + severity_mapping(vulndetails.f_severity - 1)[2] vuln_title_link = A(vulndetails.f_vulnid, _title = vulninfo.f_status+ ' Severity: ' + str(vulndetails.f_severity),_style=style, _target="vulndata_%s" % (vulndetails.id), _href=URL(request.application, 'vulns', 'vulninfo_by_vulnid', args=vulndetails.f_vulnid, extension='html')) if cur_f_status != prev_f_status and prev_f_status != '': nexlist.append(SPAN(nexlist_single, _class=vulnclass)) #for a line in the bottom nexlist.append(' ') nexlist_single = [] else: nexlist_single.append(' ') nexlist_single.append(vuln_title_link ) prev_f_status = vulninfo.f_status vulnclass = '' #style for vuln links if vulninfo.f_status == 'vulnerable-version': vulnclass='host_detail_vulnerable-version' if vulninfo.f_status == 'vulnerable-exploited': vulnclass='host_detail_vulnerable-exploited' if vulninfo.f_status == 'potential': vulnclass='host_detail_potential' if len(exploit_list_single) > 0: exploit_list.append(LI(SPAN(vuln_title_link), exploit_list_single)) #attach the last vuln list if len(nexlist_single)>0: nexlist.append(SPAN(nexlist_single, _class=vulnclass)) service_disp=SPAN(svc.f_proto + '/' + svc.f_number + ' - ' + str(svc.f_name)) expl_count = "Exploits - (%d)" % (expl_count) if len(nexlist)>0: if len(exploit_list) == 0: treeul.append(LI(service_disp,UL(LI(nexlist)))) #No exploits else: expl_count = SPAN(expl_count + " : metasploit (%d) exploitdb (%d) canvas (%d)" % (metasploit, exploitdb, canvas),_style="color:red") treeul.append(LI(service_disp,UL(LI(nexlist)), UL(LI(expl_count,exploit_list,_class="closed")))) else: treeul.append(LI(service_disp)) #No vulns tree = DIV(treeul, _id="aatree") return dict(tree=tree)