def analyzeAlertObservables(alert_id, observables): """ Analyze TheHive observables """ alert_id = alert_id cortex_url = parser.get('cortex', 'cortex_url') cortex_key = parser.get('cortex', 'cortex_key') api = hiveInit() analyzers = getCortexAnalyzers() for analyzer in analyzers: # Get our list of Cortex servers (IDs) for cortexId in analyzer['cortexIds']: # Look through all of our observables for observable in observables: # Check to see if observable type supported by analyzer if observable['dataType'] in analyzer['dataTypeList']: headers = { 'Authorization': 'Bearer ' + cortex_key, 'Content-Type': 'application/json' } data = { "data": observable['data'], "dataType": observable['dataType'] } # Run analyzer startjob = requests.post(cortex_url + '/api/analyzer/' + analyzer['id'] + '/run', headers=headers, data=json.dumps(data), verify=hive_verifycert) wait_interval = '10second' job_id = startjob.json()['id'] headers = { 'Authorization': 'Bearer ' + cortex_key } getresults = requests.get(cortex_url + '/api/job/' + job_id + '/waitreport?atMost=' + wait_interval, headers=headers, verify=hive_verifycert) analyzer_results = getresults.json() job_status = analyzer_results['status'] if job_status == "Success": level = analyzer_results['report']['summary']['taxonomies'][0]['level'] customFields = {"customFields": {}} reputation = dict(order=1, string=level) customFields['customFields']['reputation'] = reputation headers = { 'Authorization': 'Bearer ' + hive_key, 'Content-Type': 'application/json' } data = json.dumps(customFields) requests.patch(hive_url + '/api/alert/' + alert_id, headers=headers, data=data, verify=hive_verifycert) else: pass return "OK"
def sendHiveAlert(title, tlp, tags, description, sourceRef, artifact_string): hive_url = parser.get('hive', 'hive_url') hive_key = parser.get('hive', 'hive_key') hive_verifycert = parser.get('hive', 'hive_verifycert') tlp = int(parser.get('hive', 'hive_tlp')) # Check if verifying cert if 'False' in hive_verifycert: hiveapi = TheHiveApi(hive_url, hive_key, cert=False) else: hiveapi = TheHiveApi(hive_url, hive_key, cert=True) newtags = tags.strip('][').replace("'","").split(', ') artifacts = json.loads(artifact_string) #print(newtags) # Build alert hivealert = Alert( title= title, tlp=tlp, tags=newtags, description=description, type='external', source='SecurityOnion', sourceRef=sourceRef, artifacts=artifacts ) # Send it off response = hiveapi.create_alert(hivealert) if response.status_code == 201: print(json.dumps(response.json(), indent=4, sort_keys=True)) print('') id = response.json()['id'] # If running standalone / eval tell ES that we sent the alert #es_type = 'doc' #es_index = index #es_headers = {'Content-Type' : 'application/json'} #es_data = '{"script" : {"source": "ctx._source.tags.add(params.tag)","lang": "painless","params" : {"tag" : "Sent to TheHive"}}}' #update_es_event = requests.post(es_url + '/' + es_index + '/' + es_type + '/' + esid + '/_update', headers=es_headers, data=es_data) #print(update_es_event.content) else: print('ko: {}/{}'.format(response.status_code, response.text)) sys.exit(0) # Redirect to TheHive instance return redirect(hive_url + '/index.html#!/alert/list')
def createRTIRIncident(esid, index): search = getHits(esid, index) for result in search['hits']['hits']: result = result['_source'] message = result['message'] description = str(message) event_type = result['event_type'] rtir_text = '' rtir_web = 'Content-type: text/html \n <!DOCTYPE html><html><head></head><body><table>' for key, value in result.items(): rtir_web = rtir_web + '<tr><td>' + str(key) + '</td><td>' + str(value) + '</td></tr>' rtir_text = rtir_text + str(key) + ': ' + str(value) + '\n' rtir_web = rtir_web + '</table></body></html>' rtir_url = parser.get('rtir', 'rtir_url') rtir_api = parser.get('rtir', 'rtir_api') rtir_user = parser.get('rtir', 'rtir_user') rtir_pass = parser.get('rtir', 'rtir_pass') rtir_queue = parser.get('rtir', 'rtir_queue') #rtir_owner = parser.get('rtir', 'rtir_owner') rtir_url_ticket = parser.get('rtir', 'rtir_url_ticket') rtir_classification = parser.get('rtir', 'rtir_classification') rtir_subject_line = parser.get('rtir', 'rtir_subject') rtir_subject = event_type + rtir_subject_line rtir_rt = rt.Rt(str(rtir_url) + '/' + str(rtir_api), rtir_user, rtir_pass, verify_cert=False) rtir_rt.login() rtir_ticket_id = rtir_rt.create_ticket(Queue=rtir_queue, Subject=rtir_subject, Text=rtir_web, **{'CF.{Classification}': rtir_classification}) rtir_rt.logout() rtir_url_ticket_full = rtir_url_ticket + str(rtir_ticket_id) # Redirect to RTIR instance return "Ticket created!" + "\n Ticket url is: " + "<a href=" + rtir_url_ticket_full + ">" + rtir_url_ticket_full + "</a>"
def createHiveCase(esid): search = get_hits(esid) tlp = int(parser.get('hive', 'hive_tlp')) severity = 2 for item in search['hits']['hits']: result = item['_source'] es_id = item['_id'] try: message = result['message'] description = str(message) except: description = str(result) sourceRef = str(uuid.uuid4())[0:6] tags = ["SecurityOnion"] artifacts = [] event = result['event'] src = srcport = dst = dstport = None if event['dataset'] == 'alert': title = result['rule']['name'] else: title = f'New {event["module"].capitalize()} {event["dataset"].capitalize()} Event' form = DefaultForm() #artifact_string = jsonpickle.encode(artifacts) return render_template('hive.html', title=title, description=description, severity=severity, form=form)
def sendHiveAlert(title, tlp, tags, description, sourceRef, artifact_string): tlp = int(parser.get('hive', 'hive_tlp')) hive_api = hiveInit() newtags = tags.strip('][').replace("'", "").split(', ') description = description.strip('"') artifacts = json.loads(artifact_string) # Build alert hivealert = Alert(title=title, tlp=tlp, tags=newtags, description=description, type='external', source='SecurityOnion', sourceRef=sourceRef, artifacts=artifacts) # Send it off response = hive_api.create_alert(hivealert) if response.status_code == 201: print(json.dumps(response.json(), indent=4, sort_keys=True)) print('') else: print('ko: {}/{}'.format(response.status_code, response.text)) sys.exit(0) # Redirect to TheHive instance return redirect(hive_url + '/index.html#!/alert/list')
def sendHiveCase(title, description, severity): soc_url = parser.get('soc', 'soc_url') description = str(description.strip('"')) headers = { 'Content-Type': 'application/json', } data = { "title": title, "description": description, "severity": int(severity) } response = requests.post(soc_url + '/api/case', headers=headers, json=data, verify=False) if response.status_code == 200: print(json.dumps(response.json(), indent=4, sort_keys=True)) print('') else: print('ko: {}/{}'.format(response.status_code, response.text)) sys.exit(0) # Redirect to TheHive instance return redirect(hive_url + '/index.html')
def createGRRFlow(esid, flow_name): search = getHits(esid) grr_url = parser.get('grr', 'grr_url') grr_user = parser.get('grr', 'grr_user') grr_pass = parser.get('grr', 'grr_pass') grrapi = api.InitHttp(api_endpoint=grr_url, auth=(grr_user, grr_pass)) for result in search['hits']['hits']: result = result['_source'] message = result['message'] description = str(message) info = description if 'source_ip' in result: source_ip = result['source_ip'] if 'destination_ip' in result: destination_ip = result['destination_ip'] for ip in source_ip, destination_ip: search_result = grrapi.SearchClients(ip) grr_result = {} client_id = '' for client in search_result: # Get client id client_id = client.client_id client_last_seen_at = client.data.last_seen_at grr_result[client_id] = client_last_seen_at #flow_name = "ListProcesses" if client_id is None: pass # Run flow flow_obj = grrapi.Client(client_id) flow_obj.CreateFlow(name=flow_name) if client_id: # Redirect to GRR instance return redirect(grr_url + '/#/clients/' + client_id + '/flows') else: return "No matches found for source or destination ip"
def createSlackAlert(esid): search = getHits(esid) for result in search['hits']['hits']: result = result['_source'] message = result['message'] description = str(message) slack_url = parser.get('slack', 'slack_url') webhook_url = parser.get('slack', 'slack_webhook') slack_data = {'text': description} response = requests.post(webhook_url, data=json.dumps(slack_data), headers={'Content-Type': 'application/json'}) if response.status_code != 200: raise ValueError( 'Request to slack returned an error %s, the response is:\n%s' % (response.status_code, response.text)) # Redirect to Slack workspace return redirect(slack_url)
def createRTIRIncident(esid): search = get_hits(esid) rtir_url = parser.get('rtir', 'rtir_url') rtir_api = parser.get('rtir', 'rtir_api') rtir_user = parser.get('rtir', 'rtir_user') rtir_pass = parser.get('rtir', 'rtir_pass') rtir_queue = parser.get('rtir', 'rtir_queue') rtir_creator = parser.get('rtir', 'rtir_creator') verify_cert = parser.getboolean('rtir', 'rtir_verifycert', fallback=False) for result in search['hits']['hits']: result = result['_source'] message = result['message'] description = str(message) event = result['event'] rtir_subject = f'New {event["module"]}_{event["dataset"]} Event From Security Onion' rtir_text = description rtir_rt = rt.Rt(rtir_url + '/' + rtir_api, rtir_user, rtir_pass, verify_cert=verify_cert) rtir_rt.login() rtir_rt.create_ticket(Queue=rtir_queue, Owner=rtir_creator, Subject=rtir_subject, Text=rtir_text) rtir_rt.logout() # Redirect to RTIR instance return redirect(rtir_url)
def runOTX(): '''Retrieve intel from OTXv2 API.''' days = int(parser.get('otx', 'days_of_history')) key = parser.get('otx', 'api_key') mem_host = parser.get('memcached', 'mem_host') mem_port = int(parser.get('memcached', 'mem_port')) memcached = Client((mem_host, mem_port)) memcached_agetime = int(parser.get('memcached', 'agetime')) memcached_sleeptime = int(parser.get('memcached', 'sleeptime')) mtime = (datetime.now() - timedelta(days=days)).isoformat() for pulse in iter_pulses(key, mtime): pulse_name = pulse['name'] pulse_id = pulse['id'] for indicator in pulse[u'indicators']: ioc = indicator['indicator'] ioc_type = map_indicator_type(indicator[u'type']) tag = pulse_name + '-' + pulse_id if ioc_type is None: continue try: url = pulse[u'references'][0] except IndexError: url = 'https://otx.alienvault.com' memcached_key = ioc_type + '-' + ioc try: memcached.set(memcached_key.encode('utf-8'), tag.encode('utf-8'), memcached_agetime) except: pass time.sleep(memcached_sleeptime)
def createRTIRIncident(esid): search = getHits(esid) for result in search['hits']['hits']: result = result['_source'] message = result['message'] description = str(message) event_type = result['event_type'] rtir_url = parser.get('rtir', 'rtir_url') rtir_uri = parser.get('rtir', 'rtir_api') rtir_user = parser.get('rtir', 'rtir_user') rtir_pass = parser.get('rtir', 'rtir_pass') rtir_queue = parser.get('rtir', 'rtir_queue') rtir_creator = parser.get('rtir', 'rtir_creator') rtir_subject = 'New ' + event_type + ' event from Security Onion!' rtir_text = description rtir_rt = rt.Rt(rtir_url + '/' + rtir_api, rtir_user, rtir_pass, verify_cert=False) rtir_rt.login() rtir_rt.create_ticket(Queue=rtir_queue, Owner=rtir_creator, Subject=rtir_subject, Text=rtir_text) rtir_rt.logout() # Redirect to RTIR instance return redirect(rtir_url)
def createMISPEvent(esid): search = get_hits(esid) # MISP Stuff misp_url = parser.get('misp', 'misp_url') misp_key = parser.get('misp', 'misp_key') misp_verifycert = parser.getboolean('misp', 'misp_verifycert', fallback=False) distrib = parser.get('misp', 'distrib') threat = parser.get('misp', 'threat') analysis = parser.get('misp', 'analysis') for result in search['hits']['hits']: result = result['_source'] message = result['message'] description = str(message) info = description def init(url, key): return PyMISP(url, key, ssl=misp_verifycert, debug=True) misp = init(misp_url, misp_key) event = misp.new_event(distrib, threat, analysis, info) event_id = str(event['Event']['id']) if result.get('source', {}).get('ip'): data_type = "ip-src" source_ip = result['source']['ip'] misp.add_named_attribute(event_id, data_type, source_ip) if result.get('destination', {}).get('ip'): data_type = "ip-dst" destination_ip = result['destination']['ip'] misp.add_named_attribute(event_id, data_type, destination_ip) # Redirect to MISP instance return redirect(misp_url + '/events/index')
def doUpdate(esindex, esid, tags): # Connect to Elastic and get information about the connection. esserver = parser.get('es', 'es_url') es = Elasticsearch(esserver) localindex = esindex.split(":")[1] #source_to_update = '{"doc"{ "tags":["blue"]}}' update = es.update(index=localindex, doc_type="_doc", id=esid, body={"doc": { "tags": tags }}, refresh=True) #update = es.update(index=esindex, doc_type="_doc", id=esid, body={"doc":{ "tags":["green"]}}) return update
def createFIREvent(esid): search = getHits(esid) for result in search['hits']['hits']: result = result['_source'] message = result['message'] event_type = result['event_type'] description = str(message) fir_api = '/api/incidents' fir_url = parser.get('fir', 'fir_url') fir_token = parser.get('fir', 'fir_token') actor = parser.get('fir', 'fir_actor') category = parser.get('fir', 'fir_category') confidentiality = parser.get('fir', 'fir_confidentiality') detection = parser.get('fir', 'fir_detection') plan = parser.get('fir', 'fir_plan') severity = parser.get('fir', 'fir_severity') subject = str('New ' + event_type + ' event from Security Onion!') headers = { 'Authorization': 'Token ' + fir_token, 'Content-type': 'application/json' } response = requests.get(fir_url + fir_api, headers=headers, verify=False) data = { "actor": actor, "category": category, "confidentiality": confidentiality, "description": description, "detection": detection, "plan": plan, "severity": int(severity), "subject": subject } response = requests.post(fir_url + fir_api, headers=headers, data=json.dumps(data), verify=False) # Redirect to FIR instance return redirect(fir_url + '/events')
def createFIREvent(esid): search = get_hits(esid) fir_api = '/api/incidents' fir_url = parser.get('fir', 'fir_url') fir_token = parser.get('fir', 'fir_token') actor = parser.get('fir', 'fir_actor') category = parser.get('fir', 'fir_category') confidentiality = parser.get('fir', 'fir_confidentiality') detection = parser.get('fir', 'fir_detection') plan = parser.get('fir', 'fir_plan') severity = parser.get('fir', 'fir_severity') verify_cert = parser.getboolean('fir', 'fir_verifycert', fallback=False) for result in search['hits']['hits']: result = result['_source'] message = result['message'] event = result['event'] description = str(message) subject = f'New {event["module"]}_{event["dataset"]} Event From Security Onion' headers = { 'Authorization': 'Token ' + fir_token, 'Content-type': 'application/json' } data = { "actor": actor, "category": category, "confidentiality": confidentiality, "description": description, "detection": detection, "plan": plan, "severity": int(severity), "subject": subject } requests.post(fir_url + fir_api, headers=headers, data=json.dumps(data), verify=verify_cert) # Redirect to FIR instance return redirect(fir_url + '/events')
def getHits(esid): # Connect to Elastic and get information about the connection. esserver = parser.get('es', 'es_url') es = Elasticsearch(esserver) search = es.search( index="*:logstash-*", doc_type="doc", body={"query": { "bool": { "must": { "match": { '_id': esid } } } }}) hits = search['hits']['total'] if hits > 0: return search
def getConn(conn_id): esserver = parser.get('es', 'es_url') es = Elasticsearch(esserver) connsearch = es.search(index="*:logstash-*", doc_type="doc", body={ "query": { "bool": { "must": [{ "match": { "event_type": "bro_conn" } }, { "match": { "uid": conn_id } }] } } }) #search = (index="*:logstash-bro*", doc_type="doc", body={"query": {"bool": {"must": [ {"terms": { "uid" : "test" }}, { "terms" :{ "event_type" : "bro_conn" } } ] } } } ) hits = connsearch['hits']['total'] if hits > 0: return connsearch
from config import parser, es_index import playbook import json import uuid import sys import rt import requests import os import base64 import time import jsonpickle import urllib3 urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) es_url = parser.get('es', 'es_url') hive_url = parser.get('hive', 'hive_url') hive_key = parser.get('hive', 'hive_key') hive_verifycert = parser.getboolean('hive', 'hive_verifycert', fallback=False) def hiveInit(): return TheHiveApi(hive_url, hive_key, cert=hive_verifycert) def createHiveAlert(esid): search = get_hits(esid) # Hive Stuff hive_url = parser.get('hive', 'hive_url') hive_api = hiveInit() tlp = int(parser.get('hive', 'hive_tlp'))
import os import re import shutil import subprocess import uuid from time import gmtime, strftime import requests import ruamel.yaml from config import parser yaml = ruamel.yaml.YAML(typ='safe') playbook_headers = { 'X-Redmine-API-Key': parser.get("playbook", "playbook_key"), 'Content-Type': 'application/json' } playbook_url = parser.get("playbook", "playbook_url") hive_headers = { 'Authorization': f"Bearer {parser.get('hive', 'hive_key')}", 'Accept': 'application/json, text/plain', 'Content-Type': 'application/json;charset=utf-8' } def navigator_update(): #Get play data from Redmine url = f"{playbook_url}/issues.json?status_id=3" response_data = requests.get(url, headers=playbook_headers,
def processHiveReq(webhook_content): api = hiveInit() event_details = getHiveStatus(webhook_content) event_id = event_details.split(' ')[0] event_status = event_details.split(' ')[1] auto_analyze_alerts = parser.get('cortex', 'auto_analyze_alerts') # Run analyzers before case import if event_status == "alert_creation": if auto_analyze_alerts == "yes": sys.stdout.flush() alert_id = webhook_content['objectId'] observables = webhook_content['object']['artifacts'] analyzeAlertObservables(alert_id, observables) # Check to see if new case creation #if event_status == "case_creation": # try: # observables = api.get_case_observables(case_id).json() # except: # pass # else: # analyzeCaseObservables(observables) # Check to see if we are creating a new task if event_status == "case_task_creation": headers = {'Authorization': 'Bearer ' + hive_key} task = webhook_content task_id = webhook_content['objectId'] task_status = "InProgress" task_group = webhook_content['object']['group'] task_case = webhook_content['object']['_parent'] task_title = webhook_content['object']['title'] #task_desc = webhook_content['object']['description'] # Check the task to see if it matches our conventionm for auto-analyze tasks (via Playbook, etc) if "Analyzer" in task_title: analyzer_minimal = task_title.split(" - ")[1] enabled_analyzers = getCortexAnalyzers() supported_analyzers = parser.get('cortex', 'supported_analyzers').split(",") if analyzer_minimal in supported_analyzers: # Start task response = requests.patch(hive_url + '/api/case/task/' + task_id, headers=headers, data={'status': task_status}, verify=False) # Get observables related to case observables = api.get_case_observables(task_case).json() for analyzer in enabled_analyzers: if analyzer_minimal in analyzer['name']: for cortexId in analyzer['cortexIds']: # Look through all of our observables for observable in observables: # Check to see if observable type supported by analyzer if observable['dataType'] in analyzer[ 'dataTypeList']: # Run analyzer api.run_analyzer(cortexId, observable['id'], analyzer['id']) #analyzeCaseObservables(observables) # Add task log headers = { 'Authorization': 'Bearer ' + hive_key, 'Content-Type': 'application/json' } task_log = "Automation - Ran " + analyzer_minimal + " analyzer." data = {'message': task_log} response = requests.post(hive_url + '/api/case/task/' + task_id + '/log', headers=headers, data=json.dumps(data), verify=False) # Close task task_status = "Completed" response = requests.patch(hive_url + '/api/case/task/' + task_id, headers=headers, data={'status': task_status}, verify=False) sys.stdout.flush() return "success"
def createHiveAlert(esid): search = getHits(esid) #Hive Stuff #es_url = parser.get('es', 'es_url') hive_url = parser.get('hive', 'hive_url') api = hiveInit() tlp = int(parser.get('hive', 'hive_tlp')) for result in search['hits']['hits']: # Get initial details message = result['_source']['message'] es_id = result['_id'] description = str(message) sourceRef = str(uuid.uuid4())[0:6] tags = ["SecurityOnion"] artifacts = [] id = None host = str(result['_index']).split(":")[0] index = str(result['_index']).split(":")[1] event_type = result['_source']['event_type'] if 'source_ip' in result['_source']: src = str(result['_source']['source_ip']) if 'destination_ip' in result['_source']: dst = str(result['_source']['destination_ip']) if 'source_port' in result['_source']: srcport = str(result['_source']['source_port']) if 'destination_port' in result['_source']: dstport = str(result['_source']['destination_port']) # NIDS Alerts if 'ids' in event_type: alert = result['_source']['alert'] sid = str(result['_source']['sid']) category = result['_source']['category'] sensor = result['_source']['sensor_name'] masterip = str(es_url.split("//")[1].split(":")[0]) tags.append("nids") tags.append(category) title = alert print(alert) sys.stdout.flush() # Add artifacts artifacts.append(AlertArtifact(dataType='ip', data=src)) artifacts.append(AlertArtifact(dataType='ip', data=dst)) artifacts.append(AlertArtifact(dataType='other', data=sensor)) description = "`NIDS Dashboard:` \n\n <https://" + masterip + "/kibana/app/kibana#/dashboard/ed6f7e20-e060-11e9-8f0c-2ddbf5ed9290?_g=(refreshInterval:(display:Off,pause:!f,value:0),time:(from:now-24h,mode:quick,to:now))&_a=(columns:!(_source),index:'*:logstash-*',interval:auto,query:(query_string:(analyze_wildcard:!t,query:'sid:" + sid + "')),sort:!('@timestamp',desc))> \n\n `IPs: `" + src + ":" + srcport + "-->" + dst + ":" + dstport + "\n\n `Signature:`" + alert + "\n\n `PCAP:` " + "https://" + masterip + "/kibana/app//sensoroni/securityonion/joblookup?redirectUrl=/sensoroni/&esid=" + es_id # Bro logs elif 'bro' in event_type: _map_key_type = { "conn": "Connection", "dhcp": "DHCP", "dnp3": "DNP3", "dns": "DNS", "files": "Files", "ftp": "FTP", "http": "HTTP", "intel": "Intel", "irc": "IRC", "kerberos": "Kerberos", "modbus": "Modbus", "mysql": "MySQL", "ntlm": "NTLM", "pe": "PE", "radius": "RADIUS", "rdp": "RDP", "rfb": "RFB", "sip": "SIP", "smb": "SMB", "smtp": "SMTP", "snmp": "SNMP", "ssh": "SSH", "ssl": "SSL", "syslog": "Syslog", "weird": "Weird", "x509": "X509" } def map_key_type(indicator_type): ''' Maps a key type to use in the request URL. ''' return _map_key_type.get(indicator_type) bro_tag = event_type.strip('bro_') bro_tag_title = map_key_type(bro_tag) title = str('New Bro ' + bro_tag_title + ' record!') if 'source_ip' in result['_source']: artifacts.append(AlertArtifact(dataType='ip', data=src)) if 'destination_ip' in result['_source']: artifacts.append(AlertArtifact(dataType='ip', data=dst)) if 'sensor_name' in result['_source']: sensor = str(result['_source']['sensor_name']) artifacts.append(AlertArtifact(dataType='other', data=sensor)) if 'uid' in result['_source']: uid = str(result['_source']['uid']) title = str('New Bro ' + bro_tag_title + ' record! - ' + uid) artifacts.append(AlertArtifact(dataType='other', data=uid)) if 'fuid' in result['_source']: fuid = str(result['_source']['fuid']) title = str('New Bro ' + bro_tag_title + ' record! - ' + fuid) artifacts.append(AlertArtifact(dataType='other', data=fuid)) if 'id' in result['_source']: fuid = str(result['_source']['id']) title = str('New Bro ' + bro_tag_title + ' record! - ' + fuid) artifacts.append(AlertArtifact(dataType='other', data=fuid)) tags.append('bro') tags.append(bro_tag) # Wazuh/OSSEC logs elif 'ossec' in event_type: agent_name = result['_source']['agent']['name'] if 'description' in result['_source']: ossec_desc = result['_source']['description'] else: ossec_desc = result['_source']['full_log'] if 'ip' in result['_source']['agent']: agent_ip = result['_source']['agent']['ip'] artifacts.append(AlertArtifact(dataType='ip', data=agent_ip)) artifacts.append( AlertArtifact(dataType='other', data=agent_name)) else: artifacts.append( AlertArtifact(dataType='other', data=agent_name)) title = ossec_desc tags.append("wazuh") elif 'sysmon' in event_type: if 'ossec' in result['_source']['tags']: agent_name = result['_source']['agent']['name'] agent_ip = result['_source']['agent']['ip'] ossec_desc = result['_source']['full_log'] artifacts.append(AlertArtifact(dataType='ip', data=agent_ip)) artifacts.append( AlertArtifact(dataType='other', data=agent_name)) tags.append("wazuh") elif 'beat' in result['_source']['tags']: agent_name = str(result['_source']['beat']['hostname']) if 'beat_host' in result['_source']: os_name = str(result['_source']['beat_host']['os']['name']) artifacts.append( AlertArtifact(dataType='other', data=os_name)) if 'source_hostname' in result['_source']: source_hostname = str(result['_source']['source_hostname']) artifacts.append( AlertArtifact(dataType='fqdn', data=source_hostname)) if 'source_ip' in result['_source']: source_ip = str(result['_source']['source_ip']) artifacts.append( AlertArtifact(dataType='ip', data=source_ip)) if 'destination_ip' in result['_source']: destination_ip = str(result['_source']['destination_ip']) artifacts.append( AlertArtifact(dataType='ip', data=destination_ip)) if 'image_path' in result['_source']: image_path = str(result['_source']['image_path']) artifacts.append( AlertArtifact(dataType='filename', data=image_path)) if 'Hashes' in result['_source']['event_data']: hashes = result['_source']['event_data']['Hashes'] for hash in hashes.split(','): if hash.startswith('MD5') or hash.startswith('SHA256'): artifacts.append( AlertArtifact(dataType='hash', data=hash.split('=')[1])) tags.append("beats") else: agent_name = '' title = "New Sysmon Event! - " + agent_name else: title = "New " + event_type + " Event From Security Onion" form = DefaultForm() artifact_string = jsonpickle.encode(artifacts) return render_template('hive.html', title=title, tlp=tlp, tags=tags, description=description, artifact_string=artifact_string, sourceRef=sourceRef, form=form)
def createGRRFlow(esid, flow_name): search = getHits(esid) tlp = int(parser.get('hive', 'hive_tlp')) # Check if verifying cert if 'False' in hive_verifycert: hiveapi = TheHiveApi(hive_url, hive_key, cert=False) else: hiveapi = TheHiveApi(hive_url, hive_key, cert=True) grr_url = parser.get('grr', 'grr_url') grr_user = parser.get('grr', 'grr_user') grr_pass = parser.get('grr', 'grr_pass') grrapi = api.InitHttp(api_endpoint=grr_url, auth=(grr_user, grr_pass)) base64string = '%s:%s' % (grr_user, grr_pass) base64string = base64.b64encode(bytes(base64string, "utf-8")) authheader = "Basic %s" % base64string index_response = requests.get(grr_url, auth=HTTPBasicAuth(grr_user, grr_pass)) csrf_token = index_response.cookies.get("csrftoken") headers = { "Authorization": authheader, "x-csrftoken": csrf_token, "x-requested-with": "XMLHttpRequest" } cookies = {"csrftoken": csrf_token} for result in search['hits']['hits']: result = result['_source'] message = result['message'] description = str(message) info = description if 'source_ip' in result: source_ip = result['source_ip'] if 'destination_ip' in result: destination_ip = result['destination_ip'] for ip in source_ip, destination_ip: search_result = grrapi.SearchClients(ip) grr_result = {} client_id = '' for client in search_result: # Get client id client_id = client.client_id client_last_seen_at = client.data.last_seen_at grr_result[client_id] = client_last_seen_at #flow_name = "ListProcesses" if client_id is None: pass # Process flow and get flow id flow_id = listProcessFlow(client_id, grr_url, headers, cookies, grr_user, grr_pass) # Get status status = checkFlowStatus(client_id, grr_url, flow_id, headers, cookies, grr_user, grr_pass) # Keep checking to see if complete while status != "terminated": time.sleep(15) print( "Flow not yet completed..watiing 15 secs before attempting to check status again..." ) status = checkFlowStatus(client_id, grr_url, flow_id, headers, cookies, grr_user, grr_pass) # If terminated, run the download if status == "terminated": downloadFlowResults(client_id, grr_url, flow_id, headers, cookies, grr_user, grr_pass) #print("Done!") # Run flow via API client #flow_obj = grrapi.Client(client_id) #flow_obj.CreateFlow(name=flow_name) title = "Test Alert with GRR Flow" description = str(message) sourceRef = str(uuid.uuid4())[0:6] tags = ["SecurityOnion", "GRR"] artifacts = [] id = None filepath = "/tmp/soctopus/" + client_id + ".zip" artifacts.append( AlertArtifact(dataType='file', data=str(filepath))) # Build alert hivealert = Alert(title=title, tlp=tlp, tags=tags, description=description, type='external', source='SecurityOnion', sourceRef=sourceRef, artifacts=artifacts) # Send it off response = hiveapi.create_alert(hivealert) if client_id: # Redirect to GRR instance return redirect(grr_url + '/#/clients/' + client_id + '/flows') else: return "No matches found for source or destination ip"
async def timetable(request): if not request.body_exists or not isconvert(await request.text()): return forbidden parameters = await request.json() action = parameters.get('action') version = parameters.get('version') context = parameters.get('context') if "parameters" in action or action is not None: parameters = action.get('parameters') if parameters.get('KEY').get('value') != parser.get('TOKEN', 'key'): return forbidden else: return forbidden display = False if context is not None: interface = context.get("supportedInterfaces") if "Display" in interface: display = True display_version = interface.get("Display").get("version") display_token = interface.get("Display").get("token") playServiceId = interface.get("Display").get("playServiceId") school_nm = parameters.get('TIMETABLE_SCHOOL_NM').get('value') sc_m = school(school_nm) if 'TIMETABLE_LCP' in parameters: location = parameters.get('TIMETABLE_LCP').get('value') data = await sc_m.school(location) else: data = await sc_m.school() if data is school_exception.NotFound: return json_response(exception("school_not_found", version), status=200) elif data is school_exception.Internal_Server_Error: return json_response(exception("backend_proxy_error", version), status=200) if len(data) > 1: location_data = [] for i in data: for j in data: if i == j: continue # 분교 판정 오류를 구분하기 위하여 끝부분이 동일해야함. 또한, 가나다초등학교와 나다초등학교 이런 방식의 명칭을 구분하기 위하여 최소 2자 이상이어야함. elif i['SCHUL_NM'].endswith( j['SCHUL_NM']) and i not in location_data and len( i['SCHUL_NM'].replace(j['SCHUL_NM'], "")) > 1: location_data.append(i) locate = {} for i in location_data: # 0번째, 도/시 | 1번째, 시/군/구 location_i = i.get('ORG_RDNMA').split() if location_i[0] not in locate: locate[location_i[0]] = list() if location_i[1] not in locate[location_i[0]]: locate[location_i[0]].append(location_i[1]) if len(locate) > 1: json_data = exception("regional_redundancy_error", version) area_candidate = str() for i in locate.keys(): area_candidate += f", {i} {', '.join(locate.get(i))}" json_data['output'] = { "area_candidate": area_candidate.replace(",", "", 1) } return json_response(json_data, status=200) SCHUL_NM = data[0]['SCHUL_NM'] if "TIMETABLE_DT_2" in parameters: dateV = date(parameters.get("TIMETABLE_DT_1"), parameters.get("TIMETABLE_DT_2")) else: dateV = date(parameters.get("TIMETABLE_DT_1")) if dateV is None: return json_response(exception("date_not_found", version), status=200) json_data = await sc_m.timetable( data[0], ALL_TI_YMD=dateV.datetime.strftime('%Y%m%d'), CLASS=parameters.get("TIMETABLE_CLASS").get("value").rstrip("반"), GRADE=parameters.get("TIMETABLE_GRADE").get("value").rstrip("학년")) if json_data is school_exception.NotFound: return json_response(exception("timetable_not_found", version), status=200) elif json_data is school_exception.Internal_Server_Error: return json_response(exception("backend_proxy_error", version), status=200) type_nm = data[0].get("SCHUL_KND_SC_NM") data2 = json_data[f'{Type_list[type_nm]}Timetable'][1]['row'] data_count = len(data2) table = ["" for _ in range(data_count)] for i in data2: perio = int(i.get('PERIO')) - 1 table[perio] = i.get('ITRT_CNTNT') answer = "" count = 1 for i in table: answer += f", {count}교시 {i}" count += 1 data = { "version": version, "resultCode": "OK", "output": { "TIMETABLE_STATUS": answer.replace(",", "", 1), "DT_ANSWER": dateV.name } } # Capability Interface 처리 if display: count = 1 listItems = [] for i in table: listItems.append({ "token": display_token, "header": { "text": f"{count} 교시" }, "body": { "text": f"{i}" } }) count += 1 data['directives'] = [{ "type": "Display.TextList1", "version": display_version, "playServiceId": playServiceId, "token": display_token, "title": { "logo": { "sources": [{ "url": "https://yhs.kr/api/nugu/icon" }] }, "text": { "text": f"{SCHUL_NM} {parameters.get('TIMETABLE_CLASS').get('value')} {parameters.get('TIMETABLE_CLASS').get('value')}의 시간표 정보" } }, "badgeNumber": "false", "listItems": listItems, }] return json_response(data, status=200)
async def meal(request): if not request.body_exists or not isconvert(await request.text()): return forbidden parameters = await request.json() action = parameters.get('action') version = parameters.get('version') context = parameters.get('context') if "parameters" in action or action is not None: parameters = action.get('parameters') if parameters.get('KEY').get('value') != parser.get('TOKEN', 'key'): return forbidden else: return forbidden display = False if context is not None: interface = context.get("supportedInterfaces") if "Display" in interface: display = True display_version = interface.get("Display").get("version") display_token = interface.get("Display").get("token") playServiceId = interface.get("Display").get("playServiceId") school_nm = parameters.get('MEAL_SCHOOL_NM').get('value') sc_m = school(school_nm) if 'MEAL_LCP' in parameters: location = parameters.get('MEAL_LCP').get('value') data = await sc_m.school(location) else: data = await sc_m.school() if data is school_exception.NotFound: return json_response(exception("school_not_found", version), status=200) elif data is school_exception.Internal_Server_Error: return json_response(exception("backend_proxy_error", version), status=200) if len(data) > 1: location_data = [] for i in data: for j in data: if i == j: continue # 분교 판정 오류를 구분하기 위하여 끝부분이 동일해야함. 또한, 가나다초등학교와 나다초등학교 이런 방식의 명칭을 구분하기 위하여 최소 2자 이상이어야함. elif i['SCHUL_NM'].endswith( j['SCHUL_NM']) and i not in location_data and len( i['SCHUL_NM'].replace(j['SCHUL_NM'], "")) > 1: location_data.append(i) locate = {} for i in location_data: # 0번째, 도/시 | 1번째, 시/군/구 location_i = i.get('ORG_RDNMA').split() if location_i[0] not in locate: locate[location_i[0]] = list() if location_i[1] not in locate[location_i[0]]: locate[location_i[0]].append(location_i[1]) if len(locate) > 1: json_data = exception("regional_redundancy_error", version) area_candidate = str() for i in locate.keys(): area_candidate += f", {i} {', '.join(locate.get(i))}" json_data['output'] = { "area_candidate": area_candidate.replace(",", "", 1) } return json_response(json_data, status=200) SCHUL_NM = data[0]['SCHUL_NM'] if "MEAL_DT_2" in parameters: dateV = date(parameters.get("MEAL_DT_1"), parameters.get("MEAL_DT_2")) else: dateV = date(parameters.get("MEAL_DT_1")) if dateV is None: return json_response(exception("date_not_found", version), status=200) json_data = await sc_m.meal(data[0], MLSV_YMD=dateV.datetime.strftime('%Y%m%d')) if json_data is school_exception.NotFound: return json_response(exception("meal_not_found1", version), status=200) elif json_data is school_exception.Internal_Server_Error: return json_response(exception("backend_proxy_error", version), status=200) listItems = [] if display: date_inform = {} firstDay = getWeekFirstDate(dateV.datetime).strftime('%Y%m%d') lastDay = getWeekLastDate(dateV.datetime).strftime('%Y%m%d') for i in range(int(firstDay), int(lastDay)): if str(i) == dateV.datetime.strftime('%Y%m%d'): continue date_inform[str(i)] = None dp_json_data = await sc_m.meal(data[0], MLSV_FROM_YMD=firstDay, MLSV_TO_YMD=lastDay) mt = parameters.get('MEAL_TYPE').get('value') food = None for i in json_data.get('mealServiceDietInfo')[1].get("row"): if i['MMEAL_SC_NM'] == parameters.get('MEAL_TYPE').get('value'): food = i.get("DDISH_NM") if food is not None: listItems.append({ "token": display_token, "header": { "text": f"{change_weekday(dateV.datetime.weekday())} {mt}" }, "body": [{ "text": f"{read_food_display(food)}" }] }) else: listItems.append({ "token": display_token, "header": { "text": f"{change_weekday(dateV.datetime.weekday())} {mt}" }, "body": [{ "text": "급식정보 없음." }], "footer": { "text": "휴교 중이거나, 방학 중에는 급식 정보가 없습니다." } }) for i in dp_json_data.get('mealServiceDietInfo')[1].get("row"): if i.get('MMEAL_SC_NM') == parameters.get('MEAL_TYPE').get( 'value'): if i.get("MLSV_YMD") in date_inform.keys(): date_inform[i.get("MLSV_YMD")] = i for i in range(int(firstDay), int(lastDay)): if str(i) == dateV.datetime.strftime('%Y%m%d'): continue date_time_obj = datetime.strptime(str(i), '%Y%m%d') if date_inform[str(i)] is None: listItems.append({ "token": display_token, "header": { "text": f"{change_weekday(date_time_obj.weekday())} {mt}" }, "body": [{ "text": "급식정보 없음." }], "footer": { "text": "휴교 중이거나, 방학 중에는 급식 정보가 없습니다." } }) continue cacheDT = date_inform[str(i)] listItems.append({ "token": display_token, "header": { "text": f"{change_weekday(date_time_obj.weekday())} {mt}" }, "body": [{ "text": f"{read_food_display(cacheDT.get('DDISH_NM'))}" }] }) food = None for i in json_data.get('mealServiceDietInfo')[1].get("row"): if i['MMEAL_SC_NM'] == parameters.get('MEAL_TYPE').get('value'): food = i.get("DDISH_NM") if food is None: return json_response(exception("meal_not_found2", version), status=200) data = { "version": version, "resultCode": "OK", "output": { "DT_ANSWER": dateV.name, "MEAL_STATUS": read_food(food) } } # Capability Interface 처리 if display: data['directives'] = [{ "type": "Display.TextList3", "version": display_version, "playServiceId": playServiceId, "token": display_token, "title": { "logo": { "sources": [{ "url": "https://yhs.kr/api/nugu/icon" }] }, "text": { "text": f"{SCHUL_NM}의 급식 정보" } }, "badgeNumber": "false", "listItems": listItems, "caption": "알레르기 정보: 1.난류, 2.우유, 3.메밀, 4.땅콩, 5.대두, 6.밀, 7.고등어, 8.게, 9.새우, 10.돼지고기, 11.복숭아, 12.토마토, 13.아황산염, 14.호두, 15.닭고기, 16.쇠고기, 17.오징어, 18.조개류(굴,전복,홍합 등)" }] return json_response(data, status=200)
#!/usr/bin/env python # -*- coding: utf-8 -*- import requests from requests.utils import quote from config import parser, es_index esserver = parser.get('es', 'es_url') es_user = parser.get('es', 'es_user', fallback="") es_pass = parser.get('es', "es_pass", fallback="") es_verifycert = parser.getboolean('es', 'es_verifycert', fallback=False) search_index = f'*:{es_index}' def get_hits(esid: str) -> dict: query = {"query": {"bool": {"must": {"match": {'_id': esid}}}}} res_json = __es_search__(query) if res_json['hits']['total']['value'] > 0: return res_json def get_conn(conn_id: str) -> dict: query = {"bool": {"must": [{"match": {"event_type": "bro_conn"}}, {"match": {"uid": conn_id}}]}} res_json = __es_search__(query) if res_json['hits']['total']['value'] > 0: return res_json def do_update(esindex: str, esid: str, tags: str) -> dict: local_index = esindex.split(":")[1] query = {"doc": {"tags": tags}}
def createHiveAlert(esid): search = get_hits(esid) # Hive Stuff hive_url = parser.get('hive', 'hive_url') hive_api = hiveInit() tlp = int(parser.get('hive', 'hive_tlp')) for item in search['hits']['hits']: # Get initial details result = item['_source'] message = result['message'] es_id = item['_id'] description = str(message) sourceRef = str(uuid.uuid4())[0:6] tags = ["SecurityOnion"] artifacts = [] event = result['event'] src = srcport = dst = dstport = None if 'source' in result: if 'ip' in result['source']: src = str(result['source']['ip']) if 'port' in result['source']: srcport = str(result['source']['port']) if 'destination' in result: if 'ip' in result['destination']: dst = str(result['destination']['ip']) if 'port' in result['destination']: dstport = str(result['destination']['port']) # NIDS Alerts if event['module'] == 'ids': alert = result['rule']['name'] sid = str(result['rule']['signature_id']) category = result['rule']['category'] sensor = result['observer']['name'] masterip = str(es_url.split("//")[1].split(":")[0]) tags.append("nids") tags.append(category) title = alert print(alert) sys.stdout.flush() # Add artifacts artifacts.append(AlertArtifact(dataType='ip', data=src)) artifacts.append(AlertArtifact(dataType='ip', data=dst)) artifacts.append(AlertArtifact(dataType='other', data=sensor)) description = "`NIDS Dashboard:` \n\n <https://" + masterip + f"/kibana/so-soctopus/kibana#/dashboard/ed6f7e20-e060-11e9-8f0c-2ddbf5ed9290?_g=(refreshInterval:(display:Off,pause:!f,value:0),time:(from:now-24h,mode:quick,to:now))&_a=(columns:!(_source),index:'*:{es_index}',interval:auto,query:(query_string:(analyze_wildcard:!t,query:'sid:" + sid + "')),sort:!('@timestamp',desc))> \n\n `IPs: `" + src + ":" + srcport + "-->" + dst + ":" + dstport + "\n\n `Signature:`" + alert + "\n\n `PCAP:` " + "https://" + masterip + "/kibana/so-soctopus//sensoroni/securityonion/joblookup?redirectUrl=/sensoroni/&esid=" + es_id # Zeek logs elif event['module'] == 'zeek': _map_key_type = { "conn": "Connection", "dhcp": "DHCP", "dnp3": "DNP3", "dns": "DNS", "file": "Files", "ftp": "FTP", "http": "HTTP", "intel": "Intel", "irc": "IRC", "kerberos": "Kerberos", "modbus": "Modbus", "mysql": "MySQL", "ntlm": "NTLM", "pe": "PE", "radius": "RADIUS", "rdp": "RDP", "rfb": "RFB", "sip": "SIP", "smb": "SMB", "smtp": "SMTP", "snmp": "SNMP", "ssh": "SSH", "ssl": "SSL", "syslog": "Syslog", "weird": "Weird", "x509": "X509" } zeek_tag = event['dataset'] zeek_tag_title = _map_key_type.get(zeek_tag) title = str('New Zeek ' + zeek_tag_title + ' record!') if src: artifacts.append(AlertArtifact(dataType='ip', data=src)) if dst: artifacts.append(AlertArtifact(dataType='ip', data=dst)) if result.get('observer', {}).get('name'): sensor = str(result['observer']['name']) artifacts.append(AlertArtifact(dataType='other', data=sensor)) if result.get('log', {}).get('id', {}).get('uid'): uid = str(result['log']['id']['uid']) title = str('New Zeek ' + zeek_tag_title + ' record! - ' + uid) artifacts.append(AlertArtifact(dataType='other', data=uid)) if result.get('log', {}).get('id', {}).get('fuid'): fuid = str(result['log']['id']['fuid']) title = str('New Zeek ' + zeek_tag_title + ' record! - ' + fuid) artifacts.append(AlertArtifact(dataType='other', data=fuid)) if result.get('log', {}).get('id', {}).get('id'): fuid = str(result['log']['id']['id']) title = str('New Zeek ' + zeek_tag_title + ' record! - ' + fuid) artifacts.append(AlertArtifact(dataType='other', data=fuid)) tags.append('zeek') tags.append(zeek_tag) # Wazuh/OSSEC logs elif event['module'] == 'ossec': agent_name = result['agent']['name'] if 'description' in result: ossec_desc = result['rule']['description'] else: ossec_desc = result['log']['full'] if 'ip' in result['agent']: agent_ip = result['agent']['ip'] artifacts.append(AlertArtifact(dataType='ip', data=agent_ip)) artifacts.append( AlertArtifact(dataType='other', data=agent_name)) else: artifacts.append( AlertArtifact(dataType='other', data=agent_name)) title = ossec_desc tags.append("wazuh") # Sysmon logs elif event['module'] == 'sysmon': if 'ossec' in result['tags']: agent_name = result['agent']['name'] agent_ip = result['agent']['ip'] artifacts.append(AlertArtifact(dataType='ip', data=agent_ip)) artifacts.append( AlertArtifact(dataType='other', data=agent_name)) tags.append("wazuh") elif 'beat' in result['tags']: agent_name = str(result['agent']['hostname']) if result.get('agent'): try: os_name = str(result['agent']['os']['name']) artifacts.append( AlertArtifact(dataType='other', data=os_name)) except: pass try: beat_name = str(result['agent']['name']) artifacts.append( AlertArtifact(dataType='other', data=beat_name)) except: pass if result.get('source', {}).get('hostname'): source_hostname = result['source']['hostname'] artifacts.append( AlertArtifact(dataType='fqdn', data=source_hostname)) if result.get('source', {}).get('ip'): source_ip = str(result['source']['ip']) artifacts.append( AlertArtifact(dataType='ip', data=source_ip)) if result.get('destination', {}).get('ip'): destination_ip = str(result['destination']['ip']) artifacts.append( AlertArtifact(dataType='ip', data=destination_ip)) # FIXME: find what "image_path" has been changed to # if 'image_path' in result: # image_path = str(result['image_path']) # artifacts.append(AlertArtifact(dataType='filename', data=image_path)) # FIXME: find what "Hashes" has been changed to # if 'Hashes' in result['data']['data']: # hashes = result['event']['data']['Hashes'] # for hash in hashes.split(','): # if hash.startswith('MD5') or hash.startswith('SHA256'): # artifacts.append(AlertArtifact(dataType='hash', data=hash.split('=')[1])) tags.append("agent") else: agent_name = '' title = "New Sysmon Event! - " + agent_name else: title = f'New {event["module"]}_{event["dataset"]} Event From Security Onion' form = DefaultForm() artifact_string = jsonpickle.encode(artifacts) return render_template('hive.html', title=title, tlp=tlp, tags=tags, description=description, artifact_string=artifact_string, sourceRef=sourceRef, form=form)
def createHiveAlert(esid): search = getHits(esid) #Hive Stuff #es_url = parser.get('es', 'es_url') hive_url = parser.get('hive', 'hive_url') hive_key = parser.get('hive', 'hive_key') hive_verifycert = parser.get('hive', 'hive_verifycert') tlp = int(parser.get('hive', 'hive_tlp')) # Check if verifying cert if 'False' in hive_verifycert: api = TheHiveApi(hive_url, hive_key, cert=False) else: api = TheHiveApi(hive_url, hive_key, cert=True) #if hits > 0: for result in search['hits']['hits']: # Get initial details message = result['_source']['message'] description = str(message) sourceRef = str(uuid.uuid4())[0:6] tags=["SecurityOnion"] artifacts=[] id = None host = str(result['_index']).split(":")[0] index = str(result['_index']).split(":")[1] event_type = result['_source']['event_type'] if 'source_ip' in result['_source']: src = str(result['_source']['source_ip']) if 'destination_ip' in result['_source']: dst = str(result['_source']['destination_ip']) #if 'source_port' in result['_source']: # srcport = result['_source']['source_port'] #if 'destination_port' in result['_source']: # dstport = result['_source']['destination_port'] # NIDS Alerts if 'snort' in event_type: alert = result['_source']['alert'] category = result['_source']['category'] sensor = result['_source']['interface'] tags.append("nids") tags.append(category) title=alert # Add artifacts artifacts.append(AlertArtifact(dataType='ip', data=src)) artifacts.append(AlertArtifact(dataType='ip', data=dst)) artifacts.append(AlertArtifact(dataType='other', data=sensor)) # Bro logs elif 'bro' in event_type: _map_key_type ={ "conn": "Connection", "dhcp": "DHCP", "dnp3": "DNP3", "dns": "DNS", "files": "Files", "ftp": "FTP", "http": "HTTP", "intel": "Intel", "irc": "IRC", "kerberos": "Kerberos", "modbus": "Modbus", "mysql": "MySQL", "ntlm": "NTLM", "pe": "PE", "radius": "RADIUS", "rdp": "RDP", "rfb": "RFB", "sip" : "SIP", "smb": "SMB", "smtp": "SMTP", "snmp": "SNMP", "ssh": "SSH", "ssl": "SSL", "syslog": "Syslog", "weird": "Weird", "x509": "X509" } def map_key_type(indicator_type): ''' Maps a key type to use in the request URL. ''' return _map_key_type.get(indicator_type) bro_tag = event_type.strip('bro_') bro_tag_title = map_key_type(bro_tag) title= str('New Bro ' + bro_tag_title + ' record!') if 'source_ip' in result['_source']: artifacts.append(AlertArtifact(dataType='ip', data=src)) if 'destination_ip' in result['_source']: artifacts.append(AlertArtifact(dataType='ip', data=dst)) if 'sensor_name' in result['_source']: sensor = str(result['_source']['sensor_name']) artifacts.append(AlertArtifact(dataType='other', data=sensor)) if 'uid' in result['_source']: uid = str(result['_source']['uid']) title= str('New Bro ' + bro_tag_title + ' record! - ' + uid) artifacts.append(AlertArtifact(dataType='other', data=uid)) if 'fuid' in result['_source']: fuid = str(result['_source']['fuid']) title= str('New Bro ' + bro_tag_title + ' record! - ' + fuid) artifacts.append(AlertArtifact(dataType='other', data=fuid)) if 'id' in result['_source']: fuid = str(result['_source']['id']) title= str('New Bro ' + bro_tag_title + ' record! - ' + fuid) artifacts.append(AlertArtifact(dataType='other', data=fuid)) tags.append('bro') tags.append(bro_tag) # Wazuh/OSSEC logs elif 'ossec' in event_type: agent_name = result['_source']['agent']['name'] if 'description' in result['_source']: ossec_desc = result['_source']['description'] else: ossec_desc = result['_source']['full_log'] if 'ip' in result['_source']['agent']: agent_ip = result['_source']['agent']['ip'] artifacts.append(AlertArtifact(dataType='ip', data=agent_ip)) artifacts.append(AlertArtifact(dataType='other', data=agent_name)) else: artifacts.append(AlertArtifact(dataType='other', data=agent_name)) title= ossec_desc tags.append("wazuh") elif 'sysmon' in event_type: if 'ossec' in result['_source']['tags']: agent_name = result['_source']['agent']['name'] agent_ip = result['_source']['agent']['ip'] ossec_desc = result['_source']['full_log'] artifacts.append(AlertArtifact(dataType='ip', data=agent_ip)) artifacts.append(AlertArtifact(dataType='other', data=agent_name)) title= "New Sysmon Event! - " + agent_name tags.append("wazuh") else: title = "New " + event_type + " Event From Security Onion" # Build alert hivealert = Alert( title= title, tlp=tlp, tags=tags, description=description, type='external', source='SecurityOnion', sourceRef=sourceRef, artifacts=artifacts ) # Send it off response = api.create_alert(hivealert) if response.status_code == 201: print(json.dumps(response.json(), indent=4, sort_keys=True)) print('') id = response.json()['id'] # If running standalone / eval tell ES that we sent the alert #es_type = 'doc' #es_index = index #es_headers = {'Content-Type' : 'application/json'} #es_data = '{"script" : {"source": "ctx._source.tags.add(params.tag)","lang": "painless","params" : {"tag" : "Sent to TheHive"}}}' #update_es_event = requests.post(es_url + '/' + es_index + '/' + es_type + '/' + esid + '/_update', headers=es_headers, data=es_data) #print(update_es_event.content) else: print('ko: {}/{}'.format(response.status_code, response.text)) sys.exit(0)
import uuid import sys import rt import requests import os import base64 import time import jsonpickle from requests.packages.urllib3.exceptions import InsecureRequestWarning requests.packages.urllib3.disable_warnings(InsecureRequestWarning) import urllib3 urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) es_url = parser.get('es', 'es_url') hive_url = parser.get('hive', 'hive_url') hive_key = parser.get('hive', 'hive_key') hive_verifycert = parser.get('hive', 'hive_verifycert') def hiveInit(): # Check if verifying cert if 'False' in hive_verifycert: api = TheHiveApi(hive_url, hive_key, cert=False) else: api = TheHiveApi(hive_url, hive_key, cert=True) return api def createHiveAlert(esid):
import logging from aiohttp import web from config import parser from route.meal import meal from route.timetable import timetable from route.icon import icon from route.health import health if __name__ == "__main__": logging.basicConfig(level=logging.INFO) route = web.RouteTableDef() app = web.Application() app.add_routes([ web.post('/api/NUGU/meal', meal), web.post('/api/NUGU/timetable', timetable), web.get('/api/NUGU/icon', icon), web.get('/health', health) ]) web.run_app(app, host=parser.get('DEFAULT', 'host'), port=parser.get('DEFAULT', 'port'), access_log_format=parser.get('DEFAULT', 'access_log_format'))
import time import yaml import requests import requests from pymisp import PyMISP from config import parser from pymemcache.client.base import Client from requests.packages.urllib3.exceptions import InsecureRequestWarning requests.packages.urllib3.disable_warnings(InsecureRequestWarning) memcached_host = parser.get('memcached', 'host') memcached_port = int(parser.get('memcached', 'port')) memcached_agetime = int(parser.get('memcached', 'agetime')) memcached_sleeptime = int(parser.get('memcached', 'sleeptime')) memcached = Client((memcached_host, memcached_port)) misp_url = parser.get('misp', 'url') misp_key = parser.get('misp', 'apikey') misp_verifycert = parser.getboolean('misp', 'verifycert') def getAttrs(): def init(url, key): return PyMISP(url, key, misp_verifycert, 'json') misp = init(misp_url, misp_key) call_path = 'attributes/restSearch' with open('misp2elastic.yaml', 'r') as f: mispyaml = yaml.safe_load(f) for i in mispyaml["iocs"]: for t in mispyaml["iocs"][i]["tags"]: