def portmap_route(): """visualize portmap""" # join allows filter over host attrs query = db.session.query(Service.state, func.count(Service.id).label('state_count')).join(Host) \ .group_by(Service.state).order_by(desc('state_count')) if 'filter' in request.values: query = apply_filters(query, FILTER_PARSER.parse( request.values.get('filter')), do_auto_join=False) portstates = query.all() # join allows filter over host attrs query = db.session.query(Service.port, func.count( Service.id)).join(Host).order_by(Service.port).group_by(Service.port) if 'filter' in request.values: query = apply_filters(query, FILTER_PARSER.parse( request.values.get('filter')), do_auto_join=False) portmap = [{'port': port, 'count': count} for port, count in query.all()] # compute sizing for rendered element lowest = min(portmap, key=lambda x: x['count'])['count'] if portmap else 0 highest = max(portmap, key=lambda x: x['count'])['count'] if portmap else 0 coef = (VIZPORTS_HIGH - VIZPORTS_LOW) / max(1, (highest - lowest)) for tmp in portmap: tmp['size'] = VIZPORTS_LOW + ((tmp['count'] - lowest) * coef) return render_template('visuals/portmap.html', portmap=portmap, portstates=portstates)
def note_list_json_route(): """list notes, data endpoint""" columns = [ ColumnDT(Note.id, mData='id'), ColumnDT(Host.id, mData='host_id'), ColumnDT(Host.address, mData='host_address'), ColumnDT(Host.hostname, mData='host_hostname'), # break pylint duplicate-code ColumnDT(Service.proto, mData='service_proto'), ColumnDT(Service.port, mData='service_port'), ColumnDT(func.concat_ws('/', Service.port, Service.proto), mData='service'), ColumnDT(Note.via_target, mData='via_target'), ColumnDT(Note.xtype, mData='xtype'), ColumnDT(Note.data, mData='data'), ColumnDT(Note.tags, mData='tags'), ColumnDT(Note.comment, mData='comment'), ColumnDT(literal_column('1'), mData='_buttons', search_method='none', global_search=False) ] query = db.session.query().select_from(Note).outerjoin(Host, Note.host_id == Host.id).outerjoin(Service, Note.service_id == Service.id) if 'filter' in request.values: query = apply_filters(query, FILTER_PARSER.parse(request.values.get('filter')), do_auto_join=False) notes = DataTables(request.values.to_dict(), query, columns).output_result() return jsonify(notes)
def service_list_json_route(): """list services, data endpoint""" columns = [ ColumnDT(Service.id, mData='id'), ColumnDT(Host.id, mData='host_id'), ColumnDT(Host.address, mData='host_address'), ColumnDT(Host.hostname, mData='host_hostname'), ColumnDT(Service.proto, mData='proto'), ColumnDT(Service.port, mData='port'), ColumnDT(Service.name, mData='name'), ColumnDT(Service.state, mData='state'), ColumnDT(Service.info, mData='info'), ColumnDT(Service.tags, mData='tags'), ColumnDT(Service.comment, mData='comment'), ColumnDT(literal_column('1'), mData='_buttons', search_method='none', global_search=False) ] query = db.session.query().select_from(Service).outerjoin(Host) if 'filter' in request.values: query = apply_filters(query, FILTER_PARSER.parse( request.values.get('filter')), do_auto_join=False) services = DataTables(request.values.to_dict(), query, columns).output_result() return jsonify(services)
def job_list_json_route(): """list jobs, data endpoint""" columns = [ ColumnDT(Job.id, mData='id'), ColumnDT(Queue.name, mData='queue_name'), ColumnDT(Job.assignment, mData='assignment'), ColumnDT(Job.retval, mData='retval'), ColumnDT(Job.time_start, mData='time_start'), ColumnDT(Job.time_end, mData='time_end'), ColumnDT( (func.coalesce(Job.time_end, datetime.utcnow()) - Job.time_start), mData='time_taken'), ColumnDT(literal_column('1'), mData='_buttons', search_method='none', global_search=False) ] query = db.session.query().select_from(Job).outerjoin(Queue) if 'filter' in request.values: query = apply_filters(query, FILTER_PARSER.parse( request.values.get('filter')), do_auto_join=False) jobs = DataTables(request.values.to_dict(), query, columns).output_result() return Response(json.dumps(jobs, cls=SnerJSONEncoder), mimetype='application/json')
def user_list_json_route(): """list users, data endpoint""" columns = [ ColumnDT(User.id, mData='id'), ColumnDT(User.username, mData='username'), ColumnDT(User.email, mData='email'), ColumnDT(User.apikey.isnot(None), mData='apikey'), # pylint: disable=no-member ColumnDT(User.roles, mData='roles'), ColumnDT(User.active, mData='active'), ColumnDT(literal_column('1'), mData='_buttons', search_method='none', global_search=False) ] query = db.session.query().select_from(User) if 'filter' in request.values: query = apply_filters(query, FILTER_PARSER.parse( request.values.get('filter')), do_auto_join=False) users = DataTables(request.values.to_dict(), query, columns).output_result() return jsonify(users)
def vuln_export(qfilter=None): """export all vulns in storage without aggregation""" host_address_format = case([(func.family( Host.address) == 6, func.concat('[', func.host(Host.address), ']'))], else_=func.host(Host.address)) host_ident = case([(func.char_length(Host.hostname) > 0, Host.hostname)], else_=host_address_format) endpoint_address = func.concat_ws(':', host_address_format, Service.port) endpoint_hostname = func.concat_ws(':', host_ident, Service.port) query = db.session \ .query( host_ident.label('host_ident'), Vuln.name.label('vulnerability'), Vuln.descr.label('description'), Vuln.data, func.text(Vuln.severity).label('severity'), Vuln.tags, endpoint_address.label('endpoint_address'), endpoint_hostname.label('endpoint_hostname'), Vuln.refs.label('references') ) \ .outerjoin(Host, Vuln.host_id == Host.id) \ .outerjoin(Service, Vuln.service_id == Service.id) if qfilter: query = apply_filters(query, FILTER_PARSER.parse(qfilter), do_auto_join=False) content_trimmed = False fieldnames = [ 'id', 'host_ident', 'vulnerability', 'severity', 'description', 'data', 'tags', 'endpoint_address', 'endpoint_hostname', 'references' ] output_buffer = StringIO() output = DictWriter(output_buffer, fieldnames, restval='', quoting=QUOTE_ALL) output.writeheader() for row in query.all(): rdata = row._asdict() rdata['tags'] = list_to_lines(rdata['tags']) rdata['references'] = list_to_lines( map(url_for_ref, rdata['references'])) rdata, trim_trigger = trim_rdata(rdata) content_trimmed |= trim_trigger output.writerow(rdata) if content_trimmed: output.writerow({'host_ident': 'WARNING: some cells were trimmed'}) return output_buffer.getvalue()
def excl_list_json_route(): """list target exclusions, data endpoint""" columns = [ ColumnDT(Excl.id, mData='id'), ColumnDT(Excl.family, mData='family'), ColumnDT(Excl.value, mData='value'), ColumnDT(Excl.comment, mData='comment'), ColumnDT(literal_column('1'), mData='_buttons', search_method='none', global_search=False) ] query = db.session.query().select_from(Excl) if 'filter' in request.values: query = apply_filters(query, FILTER_PARSER.parse(request.values.get('filter')), do_auto_join=False) excls = DataTables(request.values.to_dict(), query, columns).output_result() return Response(json.dumps(excls, cls=SnerJSONEncoder), mimetype='application/json')
def host_list_json_route(): """list hosts, data endpoint""" query_cnt_services = db.session.query( Service.host_id, func.count(Service.id).label('cnt')).group_by( Service.host_id).subquery() query_cnt_vulns = db.session.query( Vuln.host_id, func.count(Vuln.id).label('cnt')).group_by(Vuln.host_id).subquery() query_cnt_notes = db.session.query( Note.host_id, func.count(Note.id).label('cnt')).group_by(Note.host_id).subquery() columns = [ ColumnDT(Host.id, mData='id'), ColumnDT(Host.address, mData='address'), ColumnDT(Host.hostname, mData='hostname'), ColumnDT(Host.os, mData='os'), ColumnDT(func.coalesce(query_cnt_services.c.cnt, 0), mData='cnt_s', global_search=False), ColumnDT(func.coalesce(query_cnt_vulns.c.cnt, 0), mData='cnt_v', global_search=False), ColumnDT(func.coalesce(query_cnt_notes.c.cnt, 0), mData='cnt_n', global_search=False), ColumnDT(Host.tags, mData='tags'), ColumnDT(Host.comment, mData='comment'), ColumnDT(literal_column('1'), mData='_buttons', search_method='none', global_search=False) ] query = db.session.query().select_from(Host) \ .outerjoin(query_cnt_services, Host.id == query_cnt_services.c.host_id) \ .outerjoin(query_cnt_vulns, Host.id == query_cnt_vulns.c.host_id) \ .outerjoin(query_cnt_notes, Host.id == query_cnt_notes.c.host_id) if 'filter' in request.values: query = apply_filters(query, FILTER_PARSER.parse( request.values.get('filter')), do_auto_join=False) hosts = DataTables(request.values.to_dict(), query, columns).output_result() return jsonify(hosts)
def dnstree_json_route(): """dns hierarchy tree visualization data generator""" # from all hostnames we know, create tree structure dict-of-dicts def to_tree(node, items): if not items: return {} if items[0] not in node: node[items[0]] = {} node[items[0]] = to_tree(node[items[0]], items[1:]) return node # walk through the tree and generate list of nodes and links def to_graph_data(parentid, treedata, nodes, links): for node in treedata: nodeid = len(nodes) nodes.append({'name': node, 'id': nodeid}) if parentid is not None: links.append({'source': parentid, 'target': nodeid}) (nodes, links) = to_graph_data(nodeid, treedata[node], nodes, links) return (nodes, links) query = Host.query if 'filter' in request.values: query = apply_filters(query, FILTER_PARSER.parse( request.values.get('filter')), do_auto_join=False) crop = request.values.get('crop', 0, type=int) hostnames_tree = {} for ihost in query.all(): if ihost.hostname: tmp = list(reversed(ihost.hostname.split('.')[crop:])) if tmp: hostnames_tree = to_tree(hostnames_tree, ['DOTROOT'] + tmp) (nodes, links) = to_graph_data(None, hostnames_tree, [], []) nodes[0].update({'size': 10}) return jsonify({'nodes': nodes, 'links': links})
def storage_service_list(**kwargs): """service listing; used to feed manymap queues from storage data""" def get_host(svc, hostnames=False): """return address or hostname""" if hostnames and svc.host.hostname: return svc.host.hostname return format_host_address(svc.host.address) def get_data(svc): """return common data as dict""" return { 'proto': svc.proto, 'port': svc.port, 'name': svc.name, 'state': svc.state, 'info': json.dumps(svc.info) } if kwargs['long'] and kwargs['short']: current_app.logger.error( '--short and --long are mutualy exclusive options') sys.exit(1) query = Service.query if kwargs['filter']: query = apply_filters(query, FILTER_PARSER.parse(kwargs['filter']), do_auto_join=False) fmt = '{proto}://{host}:{port}' if kwargs['short']: fmt = '{host}' elif kwargs['simple']: fmt = '{host} {port}' elif kwargs['long']: fmt = '{proto}://{host}:{port} {name} {state} {info}' for tmp in query.all(): print( fmt.format(**get_data(tmp), host=get_host(tmp, kwargs['hostnames'])))
def portinfos_json_route(): """service info visualization json data endpoint""" info_column = service_info_column(request.args.get('crop')) # join allows filter over host attrs query = db.session.query(info_column.label('info'), func.count(Service.id).label('info_count')).join(Host) \ .filter(Service.info != '', Service.info != None).group_by(info_column).order_by(desc('info_count')) # noqa: E501,E711 pylint: disable=singleton-comparison if 'filter' in request.values: query = apply_filters(query, FILTER_PARSER.parse( request.values.get('filter')), do_auto_join=False) if request.values.get('limit'): query = query.limit(request.values.get('limit')) return jsonify([{ 'info': info, 'count': count } for info, count in query.all()])
def queue_list_json_route(): """list queues, data endpoint""" query_nr_targets = db.session.query(Target.queue_id, func.count( Target.id).label('cnt')).group_by( Target.queue_id).subquery() query_nr_jobs = db.session.query(Job.queue_id, func.count(Job.id).label('cnt')).group_by( Job.queue_id).subquery() columns = [ ColumnDT(Queue.id, mData='id'), ColumnDT(Queue.name, mData='name'), ColumnDT(Queue.config, mData='config'), ColumnDT(Queue.group_size, mData='group_size'), ColumnDT(Queue.priority, mData='priority'), ColumnDT(Queue.active, mData='active'), ColumnDT(Queue.reqs, mData='reqs'), ColumnDT(func.coalesce(query_nr_targets.c.cnt, 0), mData='nr_targets', global_search=False), ColumnDT(func.coalesce(query_nr_jobs.c.cnt, 0), mData='nr_jobs', global_search=False), ColumnDT(literal_column('1'), mData='_buttons', search_method='none', global_search=False) ] query = db.session.query().select_from(Queue) \ .outerjoin(query_nr_targets, Queue.id == query_nr_targets.c.queue_id) \ .outerjoin(query_nr_jobs, Queue.id == query_nr_jobs.c.queue_id) if 'filter' in request.values: query = apply_filters(query, FILTER_PARSER.parse( request.values.get('filter')), do_auto_join=False) queues = DataTables(request.values.to_dict(), query, columns).output_result() return jsonify(queues)
def vuln_list_json_route(): """list vulns, data endpoint""" columns = [ ColumnDT(literal_column('1'), mData='_select', search_method='none', global_search=False), ColumnDT(Vuln.id, mData='id'), ColumnDT(Host.id, mData='host_id'), ColumnDT(Host.address, mData='host_address'), ColumnDT(Host.hostname, mData='host_hostname'), ColumnDT(Service.proto, mData='service_proto'), ColumnDT(Service.port, mData='service_port'), ColumnDT(func.concat_ws('/', Service.port, Service.proto), mData='service'), ColumnDT(Vuln.via_target, mData='via_target'), ColumnDT(Vuln.name, mData='name'), ColumnDT(Vuln.xtype, mData='xtype'), ColumnDT(Vuln.severity, mData='severity'), ColumnDT(Vuln.refs, mData='refs'), ColumnDT(Vuln.tags, mData='tags'), ColumnDT(Vuln.comment, mData='comment'), ColumnDT(literal_column('1'), mData='_buttons', search_method='none', global_search=False) ] query = db.session.query().select_from(Vuln).outerjoin( Host, Vuln.host_id == Host.id).outerjoin(Service, Vuln.service_id == Service.id) if 'filter' in request.values: query = apply_filters(query, FILTER_PARSER.parse( request.values.get('filter')), do_auto_join=False) vulns = DataTables(request.values.to_dict(), query, columns).output_result() return Response(json.dumps(vulns, cls=SnerJSONEncoder), mimetype='application/json')
def vuln_grouped_json_route(): """view grouped vulns, data endpoint""" columns = [ ColumnDT(Vuln.name, mData='name'), ColumnDT(Vuln.severity, mData='severity'), ColumnDT(Vuln.tags, mData='tags'), ColumnDT(func.count(Vuln.id), mData='cnt_vulns', global_search=False), ] # join allows filter over host attrs query = db.session.query().select_from(Vuln).join(Host).group_by( Vuln.name, Vuln.severity, Vuln.tags) if 'filter' in request.values: query = apply_filters(query, FILTER_PARSER.parse( request.values.get('filter')), do_auto_join=False) vulns = DataTables(request.values.to_dict(), query, columns).output_result() return Response(json.dumps(vulns, cls=SnerJSONEncoder), mimetype='application/json')
def service_grouped_json_route(): """view grouped services, data endpoint""" info_column = service_info_column(request.args.get('crop')) columns = [ ColumnDT(info_column, mData='info'), ColumnDT(func.count(Service.id), mData='cnt_services', global_search=False), ] # join allows filter over host attrs query = db.session.query().select_from(Service).join(Host).group_by( info_column) if 'filter' in request.values: query = apply_filters(query, FILTER_PARSER.parse( request.values.get('filter')), do_auto_join=False) services = DataTables(request.values.to_dict(), query, columns).output_result() return jsonify(services)
def portmap_portstat_route(port): """generate port statistics fragment""" stats = db.session.query(Service.proto, func.count(Service.id)).join(Host) \ .filter(Service.port == port) \ .group_by(Service.proto).order_by(Service.proto) infos = db.session.query(Service.info, func.count(Service.id).label('info_count')).join(Host) \ .filter(Service.port == port, Service.info != '', Service.info != None) \ .group_by(Service.info).order_by(desc('info_count')) # noqa: E501,E711 pylint: disable=singleton-comparison comments = db.session.query(func.distinct(Service.comment)).join(Host) \ .filter(Service.port == port, Service.comment != '') \ .order_by(Service.comment) hosts = db.session.query(Host.address, Host.hostname, Host.id).select_from(Service).outerjoin(Host) \ .filter(Service.port == port).order_by(Host.address) if 'filter' in request.values: parsed_filter = FILTER_PARSER.parse(request.values.get('filter')) stats = apply_filters(stats, parsed_filter, do_auto_join=False) infos = apply_filters(infos, parsed_filter, do_auto_join=False) comments = apply_filters(comments, parsed_filter, do_auto_join=False) hosts = apply_filters(hosts, parsed_filter, do_auto_join=False) try: portname = getservbyport(int(port)) except OSError: portname = '' return render_template('visuals/portmap_portstat.html', port=port, portname=portname, stats=stats.all(), infos=infos.all(), hosts=hosts.all(), comments=comments.all())
def check(testcase, expected): """test helper""" output = FILTER_PARSER.parse(testcase) print('testcase: %s outputs %s' % (testcase, output)) assert output == expected
def vuln_report(qfilter=None, group_by_host=False): # pylint: disable=too-many-locals """generate report from storage data""" host_address_format = case([(func.family( Host.address) == 6, func.concat('[', func.host(Host.address), ']'))], else_=func.host(Host.address)) host_ident = case([(func.char_length(Host.hostname) > 0, Host.hostname)], else_=host_address_format) endpoint_address = func.concat_ws(':', host_address_format, Service.port) endpoint_hostname = func.concat_ws(':', host_ident, Service.port) # note1: refs (itself and array) must be unnested in order to be correctly uniq and agg as individual elements by used axis # note2: unnesting refs should be implemented as # SELECT vuln.name, array_remove(array_agg(urefs.ref), NULL) FROM vuln # LEFT OUTER JOIN LATERAL unnest(vuln.refs) as urefs(ref) ON TRUE # GROUP BY vuln.name;` # but could not construct appropriate sqla expression `.label('x(y)')` always rendered as string instead of 'table with column' unnested_refs = db.session.query(Vuln.id, func.unnest( Vuln.refs).label('ref')).subquery() query = db.session \ .query( Vuln.name.label('vulnerability'), Vuln.descr.label('description'), func.text(Vuln.severity).label('severity'), Vuln.tags, func.array_agg(func.distinct(host_ident)).label('host_ident'), func.array_agg(func.distinct(endpoint_address)).label('endpoint_address'), func.array_agg(func.distinct(endpoint_hostname)).label('endpoint_hostname'), func.array_remove(func.array_agg(func.distinct(unnested_refs.c.ref)), None).label('references') ) \ .outerjoin(Host, Vuln.host_id == Host.id) \ .outerjoin(Service, Vuln.service_id == Service.id) \ .outerjoin(unnested_refs, Vuln.id == unnested_refs.c.id) \ .group_by(Vuln.name, Vuln.descr, Vuln.severity, Vuln.tags) if group_by_host: query = query.group_by(host_ident) if qfilter: query = apply_filters(query, FILTER_PARSER.parse(qfilter), do_auto_join=False) content_trimmed = False fieldnames = [ 'id', 'asset', 'vulnerability', 'severity', 'advisory', 'state', 'endpoint_address', 'description', 'tags', 'endpoint_hostname', 'references' ] output_buffer = StringIO() output = DictWriter(output_buffer, fieldnames, restval='', extrasaction='ignore', quoting=QUOTE_ALL) output.writeheader() for row in query.all(): rdata = row._asdict() # must count endpoints, multiple addrs can coline in hostnames if group_by_host: rdata['asset'] = rdata['host_ident'][0] else: rdata['asset'] = rdata['host_ident'][0] if len( rdata['endpoint_address']) == 1 else 'misc' for col in ['endpoint_address', 'endpoint_hostname', 'tags']: rdata[col] = list_to_lines(rdata[col]) rdata['references'] = list_to_lines( map(url_for_ref, rdata['references'])) rdata, trim_trigger = trim_rdata(rdata) content_trimmed |= trim_trigger output.writerow(rdata) if content_trimmed: output.writerow({'asset': 'WARNING: some cells were trimmed'}) return output_buffer.getvalue()