def add_required_fields(self): if 'receivedtimestamp' not in self: self['receivedtimestamp'] = toUTC(datetime.now()).isoformat() if 'utctimestamp' not in self: self['utctimestamp'] = toUTC(datetime.now()).isoformat() if 'timestamp' not in self: self['timestamp'] = toUTC(datetime.now()).isoformat() if 'mozdefhostname' not in self: self['mozdefhostname'] = socket.gethostname() if 'type' not in self: self['type'] = self.DEFAULT_TYPE if 'tags' not in self: self['tags'] = [] if 'category' not in self: self['category'] = self.DEFAULT_STRING if 'hostname' not in self: self['hostname'] = self.DEFAULT_STRING if 'processid' not in self: self['processid'] = self.DEFAULT_STRING if 'processname' not in self: self['processname'] = self.DEFAULT_STRING if 'severity' not in self: self['severity'] = self.DEFAULT_STRING if 'source' not in self: self['source'] = self.DEFAULT_STRING if 'summary' not in self: self['summary'] = self.DEFAULT_STRING if 'plugins' not in self: self['plugins'] = [] if 'details' not in self: self['details'] = {}
def create_timestamp_from_now(hour, minute, second): return toUTC(datetime.now().replace(hour=hour, minute=minute, second=second).isoformat())
def test_eve_log_alert_http(self): event = { 'customendpoint': '', 'category': 'suricata', 'SOURCE': 'eve-log', 'event_type': 'alert' } MESSAGE = { "timestamp": "2018-09-12T22:24:09.546736+0000", "flow_id": 1484802709084080, "in_iface": "enp216s0f0", "event_type": "alert", "vlan": 75, "src_ip": "10.48.240.19", "src_port": 44741, "dest_ip": "10.48.74.17", "dest_port": 3128, "proto": "017", "alert": { "action": "allowed", "gid": 1, "signature_id": 2024897, "rev": 1, "signature": "ET USER_AGENTS Go HTTP Client User-Agent", "category": "", "severity": 3 }, "app_proto": "http", "flow": { "pkts_toserver": 555, "pkts_toclient": 20, "bytes_toserver": 350, "bytes_toclient": 4444, "start": "2018-10-12T22:24:09.546736+0000" }, "payload": "Q09OTkVDVCBzZWN1cml0eS10cmFja2VyLmRlYmlhbi5vcmc6NDQzIEhUVFAvMS4xDQpIb3N0OiBzZWN1cml0eS10cmFja2VyLmRlYmlhbi5vcmc6NDQzDQpVc2VyLUFnZW50OiBHby1odHRwLWNsaWVudC8xLjENCg0K", "payload_printable": "CONNECT security-tracker.debian.org:443 HTTP\/1.1\r\nHost: security-tracker.debian.org:443\r\nUser-Agent: Go-http-client\/1.1\r\n\r\n", "stream": 0, "packet": "RQAAKAAAAABABgAACjBLMAowShHR6Aw4ClEmlrx\/mcdQEgoAAAAAAA==", "packet_info": { "linktype": 12 }, "http": { "hostname": "security-tracker.debian.org", "url": "security-tracker.debian.org:443", "http_user_agent": "Go-http-client\/1.1", "http_method": "CONNECT", "protocol": "HTTP\/1.1", "status": 200, "length": 0, "redirect": "afakedestination" }, } event['MESSAGE'] = json.dumps(MESSAGE) result, metadata = self.plugin.onMessage(event, self.metadata) self.verify_defaults(result) self.verify_metadata(metadata) assert toUTC( MESSAGE['flow']['start']).isoformat() == result['utctimestamp'] assert toUTC( MESSAGE['flow']['start']).isoformat() == result['timestamp'] assert result['details']['host'] == MESSAGE['http']['hostname'] assert result['details']['method'] == MESSAGE['http']['http_method'] assert result['details']['user_agent'] == MESSAGE['http'][ 'http_user_agent'] assert result['details']['status_code'] == MESSAGE['http']['status'] assert result['details']['uri'] == MESSAGE['http']['url'] assert result['details']['redirect_dst'] == MESSAGE['http']['redirect'] assert result['details']['request_body_len'] == MESSAGE['http'][ 'length']
def onMessage(self, message, metadata): # make sure I really wanted to see this message # bail out early if not if u'customendpoint' not in message: return message, metadata if u'category' not in message: return message, metadata if u'source' not in message: return message, metadata if message['category'] != 'bro': return message, metadata # set the doc type to bro # to avoid data type conflicts with other doc types # (int v string, etc) # index holds documents of type 'type' # index -> type -> doc metadata['doc_type'] = 'nsm' # move Bro specific fields under 'details' while preserving metadata newmessage = dict() newmessage['details'] = message newmessage['customendpoint'] = 'bro' # move some fields that are expected at the event 'root' where they belong if 'hostname' in newmessage['details']: newmessage['hostname'] = newmessage['details']['hostname'] del (newmessage['details']['hostname']) if 'tags' in newmessage['details']: newmessage['tags'] = newmessage['details']['tags'] del (newmessage['details']['tags']) if 'category' in newmessage['details']: newmessage['category'] = newmessage['details']['category'] del (newmessage['details']['category']) if 'customendpoint' in newmessage['details']: del (newmessage['details']['customendpoint']) if 'source' in newmessage['details']: newmessage['source'] = newmessage['details']['source'] del (newmessage['details']['source']) # add mandatory fields if 'ts' in newmessage['details']: newmessage[u'utctimestamp'] = toUTC( newmessage['details']['ts']).isoformat() newmessage[u'timestamp'] = toUTC( newmessage['details']['ts']).isoformat() else: # a malformed message somehow managed to crawl to us, let's put it somewhat together newmessage[u'utctimestamp'] = toUTC(datetime.now()).isoformat() newmessage[u'timestamp'] = toUTC(datetime.now()).isoformat() newmessage[u'receivedtimestamp'] = toUTC(datetime.now()).isoformat() newmessage[u'eventsource'] = u'nsm' newmessage[u'severity'] = u'INFO' newmessage[u'mozdefhostname'] = self.mozdefhostname # re-arrange the position of some fields # {} vs {'details':{}} if 'details' in newmessage: # All Bro logs need special treatment, so we provide it # Not a known log source? Mark it as such and return if 'source' not in newmessage: newmessage['source'] = u'unknown' return newmessage, metadata else: logtype = newmessage['source'] if logtype == 'conn': newmessage[u'details'][u'originipbytes'] = newmessage[ 'details']['orig_ip_bytes'] newmessage[u'details'][u'responseipbytes'] = newmessage[ 'details']['resp_ip_bytes'] del (newmessage['details']['orig_ip_bytes']) del (newmessage['details']['resp_ip_bytes']) if 'history' not in newmessage['details']: newmessage['details'][u'history'] = '' newmessage[u'summary'] = ( u'{sourceipaddress}:' + u'{sourceport} -> ' + u'{destinationipaddress}:' u'{destinationport} ' + u'{history} ' + u'{originipbytes} bytes / ' u'{responseipbytes} bytes').format( **newmessage['details']) return (newmessage, metadata) if logtype == 'files': if 'rx_hosts' in newmessage['details']: newmessage['details'][ u'sourceipaddress'] = u'{0}'.format( newmessage['details']['rx_hosts'][0]) if 'tx_hosts' in newmessage['details']: newmessage['details'][ u'destinationipaddress'] = u'{0}'.format( newmessage['details']['tx_hosts'][0]) if 'mime_type' not in newmessage['details']: newmessage['details'][u'mime_type'] = u'unknown' if 'filename' not in newmessage['details']: newmessage['details'][u'filename'] = u'unknown' if 'total_bytes' not in newmessage['details']: newmessage['details'][u'total_bytes'] = u'0' if 'md5' not in newmessage['details']: newmessage['details'][u'md5'] = u'None' if 'filesource' not in newmessage['details']: newmessage['details'][u'filesource'] = u'None' newmessage[u'summary'] = (u'{rx_hosts[0]} ' u'downloaded (MD5) ' u'{md5} ' u'filename {filename} ' u'MIME {mime_type} ' u'({total_bytes} bytes) ' u'from {tx_hosts[0]} ' u'via {filesource}').format( **newmessage['details']) return (newmessage, metadata) if logtype == 'dns': if 'qtype_name' not in newmessage['details']: newmessage['details'][u'qtype_name'] = u'' if 'query' not in newmessage['details']: newmessage['details'][u'query'] = u'' if 'rcode_name' not in newmessage['details']: newmessage['details'][u'rcode_name'] = u'' newmessage[u'summary'] = ( u'{sourceipaddress} -> ' u'{destinationipaddress}:{destinationport} ' u'{qtype_name} ' u'{query} ' u'{rcode_name}').format(**newmessage['details']) return (newmessage, metadata) if logtype == 'http': if 'method' not in newmessage['details']: newmessage['details'][u'method'] = u'' if 'host' not in newmessage['details']: newmessage['details'][u'host'] = u'' if 'uri' not in newmessage['details']: newmessage['details'][u'uri'] = u'' if 'status_code' not in newmessage['details']: newmessage['details'][u'status_code'] = u'' newmessage[u'summary'] = (u'{method} ' u'{host} ' u'{uri} ' u'{status_code}').format( **newmessage['details']) return (newmessage, metadata) if logtype == 'ssl': if 'server_name' not in newmessage['details']: # fake it till you make it newmessage['details'][u'server_name'] = newmessage[ 'details']['destinationipaddress'] newmessage[u'summary'] = (u'SSL: {sourceipaddress} -> ' u'{destinationipaddress}:' u'{destinationport} ' u'{server_name}').format( **newmessage['details']) return (newmessage, metadata) if logtype == 'dhcp': newmessage[u'summary'] = ('{assigned_ip} assigned to ' '{mac}').format( **newmessage['details']) return (newmessage, metadata) if logtype == 'ftp': if 'command' not in newmessage['details']: newmessage['details'][u'command'] = u'' if 'user' not in newmessage['details']: newmessage['details'][u'user'] = u'' newmessage[u'summary'] = (u'FTP: {sourceipaddress} -> ' u'{destinationipaddress}:' u'{destinationport} ' u'{command} ' u'{user}').format( **newmessage['details']) return (newmessage, metadata) if logtype == 'pe': if 'os' not in newmessage['details']: newmessage['details']['os'] = '' if 'subsystem' not in newmessage['details']: newmessage['details']['subsystem'] = '' newmessage[u'summary'] = (u'PE file: {os} ' u'{subsystem}').format( **newmessage['details']) return (newmessage, metadata) if logtype == 'smtp': if 'from' in newmessage['details']: from_decoded = newmessage['details'][u'from'].decode( 'unicode-escape') newmessage['details'][u'from'] = from_decoded else: newmessage['details'][u'from'] = u'' if 'to' not in newmessage['details']: newmessage['details'][u'to'] = [u''] if 'msg_id' not in newmessage['details']: newmessage['details'][u'msg_id'] = u'' newmessage[u'summary'] = (u'SMTP: {sourceipaddress} -> ' u'{destinationipaddress}:' u'{destinationport} ' u'from {from} ' u'to ' u'{to[0]} ' u'ID {msg_id}').format( **newmessage['details']) return (newmessage, metadata) if logtype == 'ssh': if 'auth_success' not in newmessage['details']: newmessage['details'][u'auth_success'] = u'unknown' newmessage[u'summary'] = ( u'SSH: {sourceipaddress} -> ' u'{destinationipaddress}:' u'{destinationport} ' u'success {auth_success}').format( **newmessage['details']) return (newmessage, metadata) if logtype == 'tunnel': if 'tunnel_type' not in newmessage['details']: newmessage['details'][u'tunnel_type'] = u'' if 'action' not in newmessage['details']: newmessage['details'][u'action'] = u'' newmessage[u'summary'] = (u'{sourceipaddress} -> ' u'{destinationipaddress}:' u'{destinationport} ' u'{tunnel_type} ' u'{action}').format( **newmessage['details']) return (newmessage, metadata) if logtype == 'intel': if 'seenindicator' not in newmessage['details']: newmessage['details'][u'seenindicator'] = u'' newmessage[u'summary'] = (u'Bro intel match: ' u'{seenindicator}').format( **newmessage['details']) return (newmessage, metadata) if logtype == 'knowncerts': if 'serial' not in newmessage['details']: newmessage['details'][u'serial'] = u'0' newmessage[u'summary'] = (u'Certificate seen from: ' u'{host}:' u'{port_num} ' u'serial {serial}').format( **newmessage['details']) return (newmessage, metadata) if logtype == 'knowndevices': if 'mac' not in newmessage['details']: newmessage['details'][u'mac'] = u'' if 'dhcp_host_name' not in newmessage['details']: newmessage['details'][u'dhcp_host_name'] = u'' newmessage[u'summary'] = (u'New host: ' u'{mac} ' u'{dhcp_host_name}').format( **newmessage['details']) return (newmessage, metadata) if logtype == 'knownhosts': if 'host' not in newmessage['details']: newmessage['details'][u'host'] = u'' newmessage[u'summary'] = (u'New host: ' u'{host}').format( **newmessage['details']) return (newmessage, metadata) if logtype == 'knownservices': if 'service' not in newmessage['details']: newmessage['details']['service'] = [] if not newmessage['details']['service']: newmessage['details'][u'service'] = [u'Unknown'] if 'host' not in newmessage['details']: newmessage['details'][u'host'] = u'unknown' if 'port_num' not in newmessage['details']: newmessage['details'][u'port_num'] = u'0' if 'port_proto' not in newmessage['details']: newmessage['details'][u'port_proto'] = u'' newmessage[u'summary'] = (u'New service: ' u'{service[0]} ' u'on host ' u'{host}:' u'{port_num} / ' u'{port_proto}').format( **newmessage['details']) return (newmessage, metadata) if logtype == 'notice': newmessage['details'][u'indicators'] = [] if 'sub' not in newmessage['details']: newmessage['details'][u'sub'] = u'' if 'msg' not in newmessage['details']: newmessage['details'][u'msg'] = u'' if 'note' not in newmessage['details']: newmessage['details'][u'note'] = u'' newmessage[u'summary'] = (u'{note} ' u'{msg} ' u'{sub}').format( **newmessage['details']) # clean up the action notice IP addresses if 'actions' in newmessage['details']: if newmessage['details'][ 'actions'] == "Notice::ACTION_LOG": # retrieve indicator ip addresses from the sub field # "sub": "Indicator: 1.2.3.4, Indicator: 5.6.7.8" newmessage['details']['indicators'] = [ ip for ip in findIPv4(newmessage['details'] ['sub']) ] # remove the details.src field and add it to indicators # as it may not be the actual source. if 'src' in newmessage['details']: if isIPv4(newmessage[u'details'][u'src']): newmessage[u'details'][u'indicators'].append( newmessage[u'details'][u'src']) # If details.src is present overwrite the source IP address with it newmessage[u'details'][ u'sourceipaddress'] = newmessage[u'details'][ u'src'] newmessage[u'details'][ u'sourceipv4address'] = newmessage[u'details'][ u'src'] if isIPv6(newmessage[u'details'][u'src']): newmessage[u'details'][u'indicators'].append( newmessage[u'details'][u'src']) # If details.src is present overwrite the source IP address with it newmessage[u'details'][ u'sourceipv6address'] = newmessage[u'details'][ u'src'] # Thank you for your service del newmessage[u'details'][u'src'] return (newmessage, metadata) if logtype == 'rdp': if 'cookie' not in newmessage['details']: newmessage['details'][u'cookie'] = u'unknown' newmessage[u'summary'] = (u'RDP: {sourceipaddress} -> ' u'{destinationipaddress}:' u'{destinationport} ' u'cookie {cookie}').format( **newmessage['details']) return (newmessage, metadata) if logtype == 'sip': if 'status_msg' not in newmessage['details']: newmessage['details'][u'status_msg'] = u'unknown' if 'uri' not in newmessage['details']: newmessage['details'][u'uri'] = u'unknown' if 'method' not in newmessage['details']: newmessage['details'][u'method'] = u'unknown' newmessage[u'summary'] = (u'SIP: {sourceipaddress} -> ' u'{destinationipaddress}:' u'{destinationport} ' u'method {method} ' u'uri {uri} ' u'status {status_msg}').format( **newmessage['details']) return (newmessage, metadata) if logtype == 'software': if 'name' not in newmessage['details']: newmessage['details'][u'name'] = u'unparsed' if 'software_type' not in newmessage['details']: newmessage['details'][ u'software_type'] = u'unknown software' if 'host' not in newmessage['details']: newmessage['details'] = u'' newmessage[u'summary'] = (u'Found {software_type} ' u'name {name} ' u'on {host}').format( **newmessage['details']) return (newmessage, metadata) if logtype == 'socks': if 'version' not in newmessage['details']: newmessage['details'][u'version'] = u'0' if 'status' not in newmessage['details']: newmessage['details'][u'status'] = u'unknown' newmessage[u'summary'] = (u'SOCKSv{version}: ' u'{sourceipaddress} -> ' u'{destinationipaddress}:' u'{destinationport} ' u'status {status}').format( **newmessage['details']) return (newmessage, metadata) if logtype == 'dcerpc': if 'endpoint' not in newmessage['details']: newmessage['details'][u'endpoint'] = u'unknown' if 'operation' not in newmessage['details']: newmessage['details'][u'operation'] = u'unknown' newmessage[u'summary'] = (u'DCERPC: {sourceipaddress} -> ' u'{destinationipaddress}:' u'{destinationport} ' u'endpoint {endpoint} ' u'operation {operation}').format( **newmessage['details']) return (newmessage, metadata) if logtype == 'kerberos': if 'request_type' not in newmessage['details']: newmessage['details'][u'request_type'] = u'unknown' if 'client' not in newmessage['details']: newmessage['details'][u'client'] = u'unknown' if 'service' not in newmessage['details']: newmessage['details'][u'service'] = u'unknown' if 'success' not in newmessage['details']: newmessage['details'][u'success'] = u'unknown' if 'error_msg' not in newmessage['details']: newmessage['details'][u'error_msg'] = u'' newmessage[u'summary'] = (u'{sourceipaddress} -> ' u'{destinationipaddress}:' u'{destinationport} ' u'client {client} ' u'request {request_type} ' u'service {service} ' u'success {success} ' u'{error_msg}').format( **newmessage['details']) return (newmessage, metadata) if logtype == 'ntlm': if 'ntlmdomainname' not in newmessage['details']: newmessage['details'][u'ntlmdomainname'] = u'unknown' if 'ntlmhostname' not in newmessage['details']: newmessage['details'][u'ntlmhostname'] = u'unknown' if 'ntlmusername' not in newmessage['details']: newmessage['details'][u'ntlmusername'] = u'unknown' if 'success' not in newmessage['details']: newmessage['details'][u'success'] = u'unknown' if 'status' not in newmessage['details']: newmessage['details'][u'status'] = u'unknown' newmessage[u'summary'] = (u'NTLM: {sourceipaddress} -> ' u'{destinationipaddress}:' u'{destinationport} ' u'user {ntlmusername} ' u'host {ntlmhostname} ' u'domain {ntlmdomainname} ' u'success {success} ' u'status {status}').format( **newmessage['details']) return (newmessage, metadata) if logtype == 'smbfiles': if 'path' not in newmessage['details']: newmessage['details'][u'path'] = u'' if 'name' not in newmessage['details']: newmessage['details'][u'name'] = u'' if 'action' not in newmessage['details']: newmessage['details'][u'action'] = u'' newmessage[u'summary'] = ('SMB file: ' u'{sourceipaddress} -> ' u'{destinationipaddress}:' u'{destinationport} ' u'{action}').format( **newmessage['details']) return (newmessage, metadata) if logtype == 'smbmapping': if 'share_type' not in newmessage['details']: newmessage['details'][u'share_type'] = u'' if 'path' not in newmessage['details']: newmessage['details'][u'path'] = u'' newmessage[u'summary'] = ('SMB mapping: ' u'{sourceipaddress} -> ' u'{destinationipaddress}:' u'{destinationport} ' u'{share_type}').format( **newmessage['details']) return (newmessage, metadata) if logtype == 'snmp': if 'version' not in newmessage['details']: newmessage['details'][u'version'] = u'Unknown' if 'get_bulk_requests' not in newmessage['details']: newmessage['details']['get_bulk_requests'] = 0 if 'get_requests' not in newmessage['details']: newmessage['details']['get_requests'] = 0 if 'set_requests' not in newmessage['details']: newmessage['details']['set_requests'] = 0 if 'get_responses' not in newmessage['details']: newmessage['details']['get_responses'] = 0 newmessage['details']['getreqestssum'] = u'{0}'.format( newmessage['details']['get_bulk_requests'] + newmessage['details']['get_requests']) newmessage[u'summary'] = ( u'SNMPv{version}: ' u'{sourceipaddress} -> ' u'{destinationipaddress}:' u'{destinationport} ' u'({getreqestssum} get / ' u'{set_requests} set requests ' u'{get_responses} get responses)').format( **newmessage['details']) return (newmessage, metadata) if logtype == 'x509': if 'certificateserial' not in newmessage['details']: newmessage['details'][u'certificateserial'] = u'0' newmessage[u'summary'] = ( 'Certificate seen serial {certificateserial}').format( **newmessage['details']) return (newmessage, metadata) return (newmessage, metadata)
def loggerTimeStamp(self, record, datefmt=None): return toUTC(datetime.now()).isoformat()
def onMessage(self, message, metadata): # make sure I really wanted to see this message # bail out early if not if u'customendpoint' not in message: return message, metadata if u'category' not in message: return message, metadata if message['category'] != 'suricata': return message, metadata # set the doc type to nsm # to avoid data type conflicts with other doc types # (int v string, etc) # index holds documents of type 'type' # index -> type -> doc metadata['doc_type']= 'nsm' # move Suricata specific fields under 'details' while preserving metadata newmessage = dict() try: newmessage['details'] = json.loads(message['MESSAGE']) except: newmessage['details'] = {} newmessage['rawdetails'] = message # move some fields that are expected at the event 'root' where they belong if 'HOST_FROM' in message: newmessage['hostname'] = message['HOST_FROM'] if 'tags' in message: newmessage['tags'] = message['tags'] if 'category' in message: newmessage['category'] = message['category'] newmessage[u'source'] = u'unknown' if 'SOURCE' in message: newmessage[u'source'] = message['SOURCE'] logtype = newmessage['source'] newmessage[u'event_type'] = u'unknown' if 'event_type' in message: newmessage[u'event_type'] = message['event_type'] eventtype = newmessage['event_type'] # add mandatory fields if 'flow' in newmessage['details']: if 'start' in newmessage['details']['flow']: newmessage[u'utctimestamp'] = toUTC(newmessage['details']['flow']['start']).isoformat() newmessage[u'timestamp'] = toUTC(newmessage['details']['flow']['start']).isoformat() else: # a malformed message somehow managed to crawl to us, let's put it somewhat together newmessage[u'utctimestamp'] = toUTC(datetime.now()).isoformat() newmessage[u'timestamp'] = toUTC(datetime.now()).isoformat() newmessage[u'receivedtimestamp'] = toUTC(datetime.now()).isoformat() newmessage[u'eventsource'] = u'nsm' newmessage[u'severity'] = u'INFO' newmessage[u'mozdefhostname'] = self.mozdefhostname if 'details' in newmessage: newmessage[u'details'][u'sourceipaddress'] = "0.0.0.0" newmessage[u'details'][u'destinationipaddress'] = "0.0.0.0" newmessage[u'details'][u'sourceport'] = 0 newmessage[u'details'][u'destinationport'] = 0 if 'alert' in newmessage[u'details']: newmessage[u'details'][u'suricata_alert'] = newmessage[u'details'][u'alert'] del(newmessage[u'details'][u'alert']) if 'src_ip' in newmessage['details']: newmessage[u'details'][u'sourceipaddress'] = newmessage['details']['src_ip'] del(newmessage['details']['src_ip']) if 'src_port' in newmessage['details']: newmessage[u'details'][u'sourceport'] = newmessage['details']['src_port'] del(newmessage['details']['src_port']) if 'dest_ip' in newmessage['details']: newmessage[u'details'][u'destinationipaddress'] = newmessage['details']['dest_ip'] del(newmessage['details']['dest_ip']) if 'dest_port' in newmessage['details']: newmessage[u'details'][u'destinationport'] = newmessage['details']['dest_port'] del(newmessage['details']['dest_port']) if 'FILE_NAME' in newmessage['details']: del(newmessage['details']['FILE_NAME']) if 'MESSAGE' in newmessage['details']: del(newmessage['details']['MESSAGE']) if 'SOURCE' in newmessage['details']: del(newmessage['details']['SOURCE']) if logtype == 'eve-log': if eventtype == 'alert': # Truncate packet, payload and payload_printable to reasonable sizes if 'packet' in newmessage[u'details']: newmessage[u'details'][u'packet'] = newmessage[u'details'][u'packet'][0:4095] if 'payload' in newmessage[u'details']: newmessage[u'details'][u'payload'] = newmessage[u'details'][u'payload'][0:4095] if 'payload_printable' in newmessage[u'details']: newmessage[u'details'][u'payload_printable'] = newmessage[u'details'][u'payload_printable'][0:4095] # Match names to Bro newmessage[u'details'][u'originipbytes'] = 0 newmessage[u'details'][u'responseipbytes'] = 0 newmessage[u'details'][u'orig_pkts'] = 0 newmessage[u'details'][u'resp_pkts'] = 0 if 'flow' in newmessage[u'details']: if 'bytes_toserver' in newmessage[u'details'][u'flow']: newmessage[u'details'][u'originipbytes'] = newmessage['details']['flow']['bytes_toserver'] del(newmessage['details']['flow']['bytes_toserver']) if 'bytes_toclient' in newmessage[u'details'][u'flow']: newmessage[u'details'][u'responseipbytes'] = newmessage['details']['flow']['bytes_toclient'] del(newmessage['details']['flow']['bytes_toclient']) if 'pkts_toserver' in newmessage[u'details'][u'flow']: newmessage[u'details'][u'orig_pkts'] = newmessage['details']['flow']['pkts_toserver'] del(newmessage['details']['flow']['pkts_toserver']) if 'pkts_toclient' in newmessage[u'details'][u'flow']: newmessage[u'details'][u'resp_pkts'] = newmessage['details']['flow']['pkts_toclient'] del(newmessage['details']['flow']['pkts_toclient']) if 'http' in newmessage[u'details']: if 'hostname' in newmessage[u'details'][u'http']: newmessage[u'details'][u'host'] = newmessage[u'details'][u'http'][u'hostname'] del(newmessage[u'details'][u'http'][u'hostname']) if 'http_method' in newmessage[u'details'][u'http']: newmessage[u'details'][u'method'] = newmessage[u'details'][u'http'][u'http_method'] del(newmessage[u'details'][u'http'][u'http_method']) if 'http_user_agent' in newmessage[u'details'][u'http']: newmessage[u'details'][u'user_agent'] = newmessage[u'details'][u'http'][u'http_user_agent'] del(newmessage[u'details'][u'http'][u'http_user_agent']) if 'status' in newmessage[u'details'][u'http']: newmessage[u'details'][u'status_code'] = newmessage[u'details'][u'http'][u'status'] del(newmessage[u'details'][u'http'][u'status']) if 'url' in newmessage[u'details'][u'http']: newmessage[u'details'][u'uri'] = newmessage[u'details'][u'http'][u'url'] del(newmessage[u'details'][u'http'][u'url']) if 'redirect' in newmessage[u'details'][u'http']: newmessage[u'details'][u'redirect_dst'] = newmessage[u'details'][u'http'][u'redirect'] del(newmessage[u'details'][u'http'][u'redirect']) if 'length' in newmessage[u'details'][u'http']: newmessage[u'details'][u'request_body_len'] = newmessage[u'details'][u'http'][u'length'] del(newmessage[u'details'][u'http'][u'length']) if 'http_response_body' in newmessage[u'details'][u'http']: newmessage[u'details'][u'http_response_body'] = newmessage[u'details'][u'http'][u'http_response_body'][0:4095] del(newmessage[u'details'][u'http'][u'http_response_body']) if 'http_response_body_printable' in newmessage[u'details'][u'http']: newmessage[u'details'][u'http_response_body_printable'] = newmessage[u'details'][u'http'][u'http_response_body_printable'][0:4095] del(newmessage[u'details'][u'http'][u'http_response_body_printable']) if 'app_proto' in newmessage[u'details']: newmessage['details']['service'] = newmessage['details']['app_proto'] del(newmessage['details']['app_proto']) # Make sure details.vars.flowbits exceptions are handled if 'vars' in newmessage['details']: if 'flowbits' in newmessage['details']['vars']: if 'ET.http.javaclient' in newmessage['details']['vars']['flowbits']: if 'ET.http.javaclient.vulnerable': del(newmessage['details']['vars']['flowbits']['ET.http.javaclient']) newmessage['details']['vars']['flowbits']['ET.http.javaclient.vulnerable'] = "True" newmessage[u'summary'] = ( u'{sourceipaddress}:'+ u'{sourceport} -> '+ u'{destinationipaddress}:' u'{destinationport}' ).format(**newmessage['details']) return (newmessage, metadata) return (newmessage, metadata)
def main(): logger.debug('Connecting to Elasticsearch') client = ElasticsearchClient(options.esservers) logger.debug('Connecting to threat exchange') access_token(options.appid, options.appsecret) state = State(options.state_file_name) current_timestamp = toUTC(datetime.now()).isoformat() # We're setting a default for the past 2 days of data # if there isnt a state file since_date_obj = toUTC(datetime.now()) - timedelta(days=2) since_date = since_date_obj.isoformat() if 'lastrun' in state.data.keys(): since_date = state.data['lastrun'] # A master dict of all the different types of # data we want to pull from threat exchange params = { 'malware_hash': { 'threat_class': Malware, 'query_params': {}, }, 'ip_address': { 'threat_class': ThreatDescriptor, 'query_params': { 'type_': 'IP_ADDRESS', } }, 'domain': { 'threat_class': ThreatDescriptor, 'query_params': { 'type_': 'DOMAIN', } }, 'uri': { 'threat_class': ThreatDescriptor, 'query_params': { 'type_': 'URI', } }, 'debug_string': { 'threat_class': ThreatDescriptor, 'query_params': { 'type_': 'DEBUG_STRING', } }, 'banner': { 'threat_class': ThreatDescriptor, 'query_params': { 'type_': 'BANNER', } }, 'email_address': { 'threat_class': ThreatDescriptor, 'query_params': { 'type_': 'EMAIL_ADDRESS', } }, 'file_created': { 'threat_class': ThreatDescriptor, 'query_params': { 'type_': 'FILE_CREATED', } }, } docs = {} for param_key, param in params.iteritems(): param['query_params']['since'] = str(since_date) param['query_params']['until'] = str(current_timestamp) param['query_params']['dict_generator'] = True docs = pull_threat_exchange_data(param_key, param) logger.debug('Saving {0} {1} to ES'.format(len(docs), param_key)) for doc in docs: client.save_object(index='threat-exchange', doc_type=param_key, body=doc) state.data['lastrun'] = current_timestamp state.save()
def keyMapping(aDict): '''map common key/fields to a normalized structure, explicitly typed when possible to avoid schema changes for upsteam consumers Special accomodations made for logstash,nxlog, beaver, heka and CEF Some shippers attempt to conform to logstash-style @fieldname convention. This strips the leading at symbol since it breaks some elastic search libraries like elasticutils. ''' returndict = dict() # uncomment to save the source event for debugging, or chain of custody/forensics # returndict['original']=aDict # set the timestamp when we received it, i.e. now returndict['receivedtimestamp'] = toUTC(datetime.now()).isoformat() returndict['mozdefhostname'] = options.mozdefhostname returndict['details'] = {} try: for k, v in aDict.iteritems(): k = removeAt(k).lower() if k in ('message', 'summary'): returndict[u'summary'] = toUnicode(v) if k in ('payload') and 'summary' not in aDict.keys(): # special case for heka if it sends payload as well as a summary, keep both but move payload to the details section. returndict[u'summary'] = toUnicode(v) elif k in ('payload'): returndict[u'details']['payload'] = toUnicode(v) if k in ('eventtime', 'timestamp', 'utctimestamp'): returndict[u'utctimestamp'] = toUTC(v).isoformat() returndict[u'timestamp'] = toUTC(v).isoformat() if k in ('hostname', 'source_host', 'host'): returndict[u'hostname'] = toUnicode(v) if k in ('tags'): if len(v) > 0: returndict[u'tags'] = v # nxlog keeps the severity name in syslogseverity,everyone else should use severity or level. if k in ('syslogseverity', 'severity', 'severityvalue', 'level'): returndict[u'severity'] = toUnicode(v).upper() if k in ('facility', 'syslogfacility'): returndict[u'facility'] = toUnicode(v) if k in ('pid', 'processid'): returndict[u'processid'] = toUnicode(v) # nxlog sets sourcename to the processname (i.e. sshd), everyone else should call it process name or pname if k in ('pname', 'processname', 'sourcename'): returndict[u'processname'] = toUnicode(v) # the file, or source if k in ('path', 'logger', 'file'): returndict[u'eventsource'] = toUnicode(v) if k in ('type', 'eventtype', 'category'): returndict[u'category'] = toUnicode(v) # custom fields as a list/array if k in ('fields', 'details'): if type(v) is not dict: returndict[u'details'][u'message'] = v else: if len(v) > 0: for details_key, details_value in v.iteritems(): returndict[u'details'][details_key] = details_value # custom fields/details as a one off, not in an array # i.e. fields.something=value or details.something=value # move them to a dict for consistency in querying if k.startswith('fields.') or k.startswith('details.'): newName = k.replace('fields.', '') newName = newName.lower().replace('details.', '') # add field with a special case for shippers that # don't send details # in an array as int/floats/strings # we let them dictate the data type with field_datatype # convention if newName.endswith('_int'): returndict[u'details'][unicode(newName)] = int(v) elif newName.endswith('_float'): returndict[u'details'][unicode(newName)] = float(v) else: returndict[u'details'][unicode(newName)] = toUnicode(v) # nxlog windows log handling if 'Domain' in aDict.keys() and 'SourceModuleType' in aDict.keys(): # nxlog parses all windows event fields very well # copy all fields to details returndict[u'details'][k] = v if 'utctimestamp' not in returndict.keys(): # default in case we don't find a reasonable timestamp returndict['utctimestamp'] = toUTC(datetime.now()).isoformat() except Exception as e: logger.exception('Exception normalizing the message %r' % e) logger.error('Malformed message dict: %r' % aDict) return None return returndict
def makeAttackers(): ''' send events that will be correlated into attackers using pre-defined IPs ''' try: #time for us to run? timetoRun = toUTC(options.lastattacker) + timedelta( minutes=options.attackersminutesinterval) if timetoRun > toUTC(datetime.now()): #print(timetoRun) return #print(timetoRun, options.lastalert) eventfiles = glob.glob(options.alertsglob) #pick a random number of events to send for i in range(0, options.alertscount): #pick a random type of event to send eventfile = random.choice(eventfiles) events = json.load(open(eventfile)) target = random.randint(0, len(events)) # if there's only one event in the file..use it. if len(events) == 1 and target == 1: target = 0 for event in events[target:target + 1]: event['timestamp'] = pytz.timezone('UTC').localize( datetime.utcnow()).isoformat() #remove stored times if 'utctimestamp' in event.keys(): del event['utctimestamp'] if 'receivedtimestamp' in event.keys(): del event['receivedtimestamp'] #add demo to the tags so it's clear it's not real data. if 'tags' not in event.keys(): event['tags'] = list() event['tags'].append('demodata') event['tags'].append('demoalert') #replace potential <randomipaddress> with a random ip address if 'summary' in event.keys( ) and '<randomipaddress>' in event['summary']: randomIP = genAttackerIPv4() event['summary'] = event['summary'].replace( "<randomipaddress>", randomIP) if 'details' not in event.keys(): event['details'] = dict() event['details']['sourceipaddress'] = randomIP event['details']['sourceipv4address'] = randomIP if 'duplicate' in event.keys(): # send this event multiple times to trigger an alert for x in range(0, int(event['duplicate'])): logcache.put(json.dumps(event)) else: logcache.put(json.dumps(event)) lastattacker = toUTC(datetime.now()).isoformat() setConfig('lastattacker', lastattacker, options.configfile) if not logcache.empty(): time.sleep(.01) try: postingProcess = Process(target=postLogs, args=(logcache, ), name="json2MozdefDemoData") postingProcess.start() except OSError as e: if e.errno == 35: #resource temporarily unavailable. print(e) pass else: logger.error('%r' % e) except KeyboardInterrupt as e: sys.exit(1)
def on_message(self, body, message): try: # just to be safe..check what we were sent. if isinstance(body, dict): bodyDict = body elif isinstance(body, str) or isinstance(body, unicode): try: bodyDict = json.loads(body) # lets assume it's json except ValueError as e: # not json..ack but log the message logger.exception( "mozdefbot_slack exception: unknown body type received %r" % body) return else: logger.exception( "mozdefbot_slack exception: unknown body type received %r" % body) return if 'notify_mozdefbot' in bodyDict and bodyDict[ 'notify_mozdefbot'] is False: # If the alert tells us to not notify, then don't post message message.ack() return # process valid message # see where we send this alert channel = options.alert_channel if 'channel' in bodyDict.keys(): if bodyDict['channel'] in options.channels: channel = bodyDict['channel'] # see if we need to delay a bit before sending the alert, to avoid # flooding the channel if self.lastalert is not None: delta = toUTC(datetime.now()) - self.lastalert sys.stdout.write( 'new alert, delta since last is {}\n'.format(delta)) if delta.seconds < 2: sys.stdout.write('throttling before writing next alert\n') time.sleep(1) self.lastalert = toUTC(datetime.now()) if len(bodyDict['summary']) > 450: sys.stdout.write('alert is more than 450 bytes, truncating\n') bodyDict[ 'summary'] = bodyDict['summary'][:450] + ' truncated...' severity = bodyDict['severity'].upper() if severity == 'CRITICAL': self.bot.post_critical_message(formatAlert(bodyDict), channel) elif severity == 'WARNING': self.bot.post_warning_message(formatAlert(bodyDict), channel) elif severity == 'INFO': self.bot.post_info_message(formatAlert(bodyDict), channel) elif severity == 'NOTICE': self.bot.post_notice_message(formatAlert(bodyDict), channel) else: self.bot.post_unknown_severity_message(formatAlert(bodyDict), channel) message.ack() except ValueError as e: logger.exception( "mozdefbot_slack exception while processing events queue %r" % e)
def main(): if options.output=='syslog': logger.addHandler(SysLogHandler(address=(options.sysloghostname,options.syslogport))) else: sh=logging.StreamHandler(sys.stderr) sh.setFormatter(formatter) logger.addHandler(sh) logger.debug('started') #logger.debug(options) try: es = ElasticsearchClient((list('{0}'.format(s) for s in options.esservers))) s = requests.Session() s.headers.update({'Accept': 'application/json'}) s.headers.update({'Content-type': 'application/json'}) s.headers.update({'Authorization':'SSWS {0}'.format(options.apikey)}) #capture the time we start running so next time we catch any events created while we run. state = State(options.state_file) lastrun = toUTC(datetime.now()).isoformat() #in case we don't archive files..only look at today and yesterday's files. yesterday=date.strftime(datetime.utcnow()-timedelta(days=1),'%Y/%m/%d') today = date.strftime(datetime.utcnow(),'%Y/%m/%d') r = s.get('https://{0}/api/v1/events?startDate={1}&limit={2}'.format( options.oktadomain, toUTC(state.data['lastrun']).strftime('%Y-%m-%dT%H:%M:%S.000Z'), options.recordlimit )) if r.status_code == 200: oktaevents = json.loads(r.text) for event in oktaevents: if 'published' in event.keys(): if toUTC(event['published']) > toUTC(state.data['lastrun']): try: mozdefEvent = dict() mozdefEvent['utctimestamp']=toUTC(event['published']).isoformat() mozdefEvent['category'] = 'okta' mozdefEvent['tags'] = ['okta'] if 'action' in event.keys() and 'message' in event['action'].keys(): mozdefEvent['summary'] = event['action']['message'] mozdefEvent['details'] = event # Actor parsing # While there are various objectTypes attributes, we just take any attribute that matches # in case Okta changes it's structure around a bit # This means the last instance of each attribute in all actors will be recorded in mozdef # while others will be discarded # Which ends up working out well in Okta's case. if 'actors' in event.keys(): for actor in event['actors']: if 'ipAddress' in actor.keys(): mozdefEvent['details']['sourceipaddress'] = actor['ipAddress'] if 'login' in actor.keys(): mozdefEvent['details']['username'] = actor['login'] if 'requestUri' in actor.keys(): mozdefEvent['details']['source_uri'] = actor['requestUri'] # We are renaming action to activity because there are # currently mapping problems with the details.action field mozdefEvent['details']['activity'] = mozdefEvent['details']['action'] mozdefEvent['details'].pop('action') jbody=json.dumps(mozdefEvent) res = es.save_event(doc_type='okta',body=jbody) logger.debug(res) except Exception as e: logger.error('Error handling log record {0} {1}'.format(r, e)) continue else: logger.error('Okta event does not contain published date: {0}'.format(event)) state.data['lastrun'] = lastrun state.write_state_file() else: logger.error('Could not get Okta events HTTP error code {} reason {}'.format(r.status_code, r.reason)) except Exception as e: logger.error("Unhandled exception, terminating: %r"%e)
def test_unparseable_suspectedDate(self): with pytest.raises(ValueError): toUTC("This is not a date")
def test_float_epoch(self): result = toUTC(1468443523.0) self.result_is_datetime(result) assert str(result) == '2016-07-13 15:58:43+00:00'
def test_long_epoch_without_timezone(self): result = toUTC(1468443523000000000) self.result_is_datetime(result) assert str(result) == '2016-07-13 15:58:43+00:00'
def test_normal_date_str_with_timezone(self): result = toUTC("2016-07-13 14:33:31.625443-08:00") self.result_is_datetime(result) assert str(result) == '2016-07-13 22:33:31.625443+00:00'
def searchMongoAlerts(mozdefdb): attackers = mozdefdb['attackers'] alerts = mozdefdb['alerts'] # search the last X alerts for IP addresses # aggregated by CIDR mask/24 # aggregate IPv4 addresses in the most recent alerts # to find common attackers. ipv4TopHits = alerts.aggregate([ {"$sort": {"utcepoch":-1}}, # reverse sort the current alerts {"$limit": 100}, #most recent 100 {"$match": {"events.documentsource.details.sourceipaddress":{"$exists": True}}}, # must have an ip address {"$match": {"attackerid":{"$exists": False}}}, # must not be already related to an attacker {"$unwind":"$events"}, #make each event into it's own doc {"$project":{"_id":0, "sourceip":"$events.documentsource.details.sourceipaddress"}}, #emit the source ip only {"$group": {"_id": "$sourceip", "hitcount": {"$sum": 1}}}, # count by ip {"$match":{"hitcount":{"$gt":5}}}, # limit to those with X observances {"$sort": SON([("hitcount", -1), ("_id", -1)])}, # sort {"$limit": 10} # top 10 ]) for ip in ipv4TopHits['result']: # sanity check ip['_id'] which should be the ipv4 address if isIPv4(ip['_id']) and ip['_id'] not in netaddr.IPSet(['0.0.0.0']): ipcidr = netaddr.IPNetwork(ip['_id']) # expand it to a /24 CIDR # todo: lookup ipwhois for asn_cidr value # potentially with a max mask value (i.e. asn is /8, limit attackers to /24) ipcidr.prefixlen = 24 # append to or create attacker. # does this match an existing attacker's indicators if not ipcidr.ip.is_loopback() and not ipcidr.ip.is_private() and not ipcidr.ip.is_reserved(): logger.debug('Searching for existing attacker with ip ' + str(ipcidr)) attacker = attackers.find_one({'indicators.ipv4address': str(ipcidr)}) if attacker is None: logger.debug('Attacker not found, creating new one') # new attacker # generate a meteor-compatible ID # save the ES document type, index, id newAttacker = genNewAttacker() # str to get the ip/cidr rather than netblock cidr. # i.e. '1.2.3.4/24' not '1.2.3.0/24' newAttacker['indicators'].append(dict(ipv4address=str(ipcidr))) matchingalerts = alerts.find( {"events.documentsource.details.sourceipaddress": str(ipcidr.ip), }) total_events = 0 if matchingalerts is not None: # update list of alerts this attacker matched. for alert in matchingalerts: newAttacker['alerts'].append( dict(alertid=alert['_id']) ) # update alert with attackerID alert['attackerid'] = newAttacker['_id'] alerts.save(alert) total_events += len(alert['events']) if len(alert['events']) > 0: newAttacker['lastseentimestamp'] = toUTC(alert['events'][-1]['documentsource']['utctimestamp']) newAttacker['alertscount'] = len(newAttacker['alerts']) newAttacker['eventscount'] = total_events attackers.insert(newAttacker) # update geoIP info latestGeoIP = [a['events'] for a in alerts.find( {"events.documentsource.details.sourceipaddress": str(ipcidr.ip), })][-1][0]['documentsource'] updateAttackerGeoIP(mozdefdb, newAttacker['_id'], latestGeoIP) if options.broadcastattackers: broadcastAttacker(newAttacker) else: logger.debug('Found existing attacker') # if alert not present in this attackers list # append this to the list # todo: trim the list at X (i.e. last 100) # search alerts without attackerid matchingalerts = alerts.find( {"events.documentsource.details.sourceipaddress": str(ipcidr.ip), "attackerid":{"$exists": False} }) if matchingalerts is not None: logger.debug('Matched alert with attacker') # update list of alerts this attacker matched. for alert in matchingalerts: attacker['alerts'].append( dict(alertid=alert['_id']) ) # update alert with attackerID alert['attackerid'] = attacker['_id'] alerts.save(alert) attacker['eventscount'] += len(alert['events']) attacker['lastseentimestamp'] = toUTC(alert['events'][-1]['documentsource']['utctimestamp']) # geo ip could have changed, update it to the latest updateAttackerGeoIP(mozdefdb, attacker['_id'], alert['events'][-1]['documentsource']) # update counts attacker['alertscount'] = len(attacker['alerts']) attackers.save(attacker) # should we autocategorize the attacker # based on their alerts? if attacker['category'] == 'unknown' and options.autocategorize: # take a look at recent alerts for this attacker # and if they are all the same category # auto-categorize the attacker matchingalerts = alerts.find( {"attackerid":attacker['_id']} ).sort('utcepoch', -1).limit(50) # summarize the alert categories # returns list of tuples: [(u'bruteforce', 8)] categoryCounts= mostCommon(matchingalerts,'category') #are the alerts all the same category? if len(categoryCounts) == 1: #is the alert category mapped to an attacker category? for category in options.categorymapping: if category.keys()[0] == categoryCounts[0][0]: attacker['category'] = category[category.keys()[0]] attackers.save(attacker)
def test_abnormal_date_str_without_timezone(self): result = toUTC("Jan 2 08:01:57") self.result_is_datetime(result) assert str(result) == str(date.today().year) + '-01-02 08:01:57+00:00'
def main(): ''' Get health and status stats and post to ES Post both as a historical reference (for charts) and as a static docid (for realtime current health/EPS displays) ''' logger.debug('starting') logger.debug(options) es = ElasticsearchClient( (list('{0}'.format(s) for s in options.esservers))) try: auth = HTTPBasicAuth(options.mquser, options.mqpassword) for server in options.mqservers: logger.debug('checking message queues on {0}'.format(server)) r = requests.get('http://{0}:{1}/api/queues'.format( server, options.mqapiport), auth=auth) mq = r.json() # setup a log entry for health/status. healthlog = dict(utctimestamp=toUTC(datetime.now()).isoformat(), hostname=server, processid=os.getpid(), processname=sys.argv[0], severity='INFO', summary='mozdef health/status', category='mozdef', tags=[], details=[]) healthlog['details'] = dict(username='******') healthlog['details']['loadaverage'] = list(os.getloadavg()) healthlog['details']['queues'] = list() healthlog['details']['total_deliver_eps'] = 0 healthlog['details']['total_publish_eps'] = 0 healthlog['details']['total_messages_ready'] = 0 healthlog['tags'] = ['mozdef', 'status'] for m in mq: if 'message_stats' in m.keys() and isinstance( m['message_stats'], dict): if 'messages_ready' in m.keys(): mready = m['messages_ready'] healthlog['details']['total_messages_ready'] += m[ 'messages_ready'] else: mready = 0 if 'messages_unacknowledged' in m.keys(): munack = m['messages_unacknowledged'] else: munack = 0 queueinfo = dict(queue=m['name'], vhost=m['vhost'], messages_ready=mready, messages_unacknowledged=munack) if 'deliver_details' in m['message_stats'].keys(): queueinfo['deliver_eps'] = round( m['message_stats']['deliver_details']['rate'], 2) healthlog['details']['total_deliver_eps'] += round( m['message_stats']['deliver_details']['rate'], 2) if 'deliver_no_ack_details' in m['message_stats'].keys(): queueinfo['deliver_eps'] = round( m['message_stats']['deliver_no_ack_details'] ['rate'], 2) healthlog['details']['total_deliver_eps'] += round( m['message_stats']['deliver_no_ack_details'] ['rate'], 2) if 'publish_details' in m['message_stats'].keys(): queueinfo['publish_eps'] = round( m['message_stats']['publish_details']['rate'], 2) healthlog['details']['total_publish_eps'] += round( m['message_stats']['publish_details']['rate'], 2) healthlog['details']['queues'].append(queueinfo) # post to elastic search servers directly without going through # message queues in case there is an availability issue es.save_event(doc_type='mozdefhealth', body=json.dumps(healthlog)) # post another doc with a static docid and tag # for use when querying for the latest status healthlog['tags'] = ['mozdef', 'status', 'latest'] es.save_event(doc_type='mozdefhealth', doc_id=getDocID(server), body=json.dumps(healthlog)) except Exception as e: logger.error("Exception %r when gathering health and status " % e)
def test_abnormal_date_obj_with_timezone_in_date(self): result = toUTC(parse("2016-01-02 08:01:57+06:00")) self.result_is_datetime(result) assert str(result) == '2016-01-02 02:01:57+00:00'
def keyMapping(aDict): '''map common key/fields to a normalized structure, explicitly typed when possible to avoid schema changes for upsteam consumers Special accomodations made for logstash,nxlog, beaver, heka and CEF Some shippers attempt to conform to logstash-style @fieldname convention. This strips the leading at symbol since it breaks some elastic search libraries like elasticutils. ''' returndict = dict() returndict['source'] = 'cloudtrail' returndict['details'] = {} returndict['category'] = 'cloudtrail' returndict['processid'] = str(os.getpid()) returndict['processname'] = sys.argv[0] returndict['severity'] = 'INFO' if 'sourceIPAddress' in aDict and 'eventName' in aDict and 'eventSource' in aDict: summary_str = "{0} performed {1} in {2}".format( aDict['sourceIPAddress'], aDict['eventName'], aDict['eventSource']) returndict['summary'] = summary_str if 'eventName' in aDict: # Uppercase first character aDict['eventName'] = aDict['eventName'][0].upper( ) + aDict['eventName'][1:] returndict['details']['eventVerb'] = CLOUDTRAIL_VERB_REGEX.findall( aDict['eventName'])[0] returndict['details']['eventReadOnly'] = ( returndict['details']['eventVerb'] in ['Describe', 'Get', 'List']) # set the timestamp when we received it, i.e. now returndict['receivedtimestamp'] = toUTC(datetime.now()).isoformat() returndict['mozdefhostname'] = options.mozdefhostname try: for k, v in aDict.iteritems(): k = removeAt(k).lower() if k == 'sourceip': returndict[u'details']['sourceipaddress'] = v elif k == 'sourceipaddress': returndict[u'details']['sourceipaddress'] = v elif k == 'facility': returndict[u'source'] = v elif k in ('eventsource'): returndict[u'hostname'] = v elif k in ('message', 'summary'): returndict[u'summary'] = toUnicode(v) elif k in ('payload') and 'summary' not in aDict.keys(): # special case for heka if it sends payload as well as a summary, keep both but move payload to the details section. returndict[u'summary'] = toUnicode(v) elif k in ('payload'): returndict[u'details']['payload'] = toUnicode(v) elif k in ('eventtime', 'timestamp', 'utctimestamp', 'date'): returndict[u'utctimestamp'] = toUTC(v).isoformat() returndict[u'timestamp'] = toUTC(v).isoformat() elif k in ('hostname', 'source_host', 'host'): returndict[u'hostname'] = toUnicode(v) elif k in ('tags'): if 'tags' not in returndict.keys(): returndict[u'tags'] = [] if type(v) == list: returndict[u'tags'] += v else: if len(v) > 0: returndict[u'tags'].append(v) # nxlog keeps the severity name in syslogseverity,everyone else should use severity or level. elif k in ('syslogseverity', 'severity', 'severityvalue', 'level', 'priority'): returndict[u'severity'] = toUnicode(v).upper() elif k in ('facility', 'syslogfacility'): returndict[u'facility'] = toUnicode(v) elif k in ('pid', 'processid'): returndict[u'processid'] = toUnicode(v) # nxlog sets sourcename to the processname (i.e. sshd), everyone else should call it process name or pname elif k in ('pname', 'processname', 'sourcename', 'program'): returndict[u'processname'] = toUnicode(v) # the file, or source elif k in ('path', 'logger', 'file'): returndict[u'eventsource'] = toUnicode(v) elif k in ('type', 'eventtype', 'category'): returndict[u'category'] = toUnicode(v) # custom fields as a list/array elif k in ('fields', 'details'): if type(v) is not dict: returndict[u'details'][u'message'] = v else: if len(v) > 0: for details_key, details_value in v.iteritems(): returndict[u'details'][details_key] = details_value # custom fields/details as a one off, not in an array # i.e. fields.something=value or details.something=value # move them to a dict for consistency in querying elif k.startswith('fields.') or k.startswith('details.'): newName = k.replace('fields.', '') newName = newName.lower().replace('details.', '') # add a dict to hold the details if it doesn't exist if 'details' not in returndict.keys(): returndict[u'details'] = dict() # add field with a special case for shippers that # don't send details # in an array as int/floats/strings # we let them dictate the data type with field_datatype # convention if newName.endswith('_int'): returndict[u'details'][unicode(newName)] = int(v) elif newName.endswith('_float'): returndict[u'details'][unicode(newName)] = float(v) else: returndict[u'details'][unicode(newName)] = toUnicode(v) else: returndict[u'details'][k] = v if 'utctimestamp' not in returndict.keys(): # default in case we don't find a reasonable timestamp returndict['utctimestamp'] = toUTC(datetime.now()).isoformat() except Exception as e: logger.exception(e) logger.error('Malformed message: %r' % aDict) return returndict
def test_short_epoch_without_timezone(self): result = toUTC(1468443523) self.result_is_datetime(result) assert str(result) == '2016-07-13 20:58:43+00:00'
def onMessage(self, message, metadata): """ Ensure all messages have the mandatory mozdef fields """ # Making sufficiently sure this is a fluentd-forwarded message from # fluentd SQS plugin, so that we don't spend too much time on other # message types if ((not 'az' in message.keys()) and (not 'instance_id' in message.keys()) and (not '__tag' in message.keys())): return (message, metadata) if not 'details' in message.keys(): message['details'] = dict() if (not 'summary' in message.keys()) and ('message' in message.keys()): message['summary'] = message['message'] if ((not 'utctimestamp' in message.keys()) and ('time' in message.keys())): message['utctimestamp'] = toUTC(message['time']).isoformat() # Bro format of {u'Timestamp': 1.482437837e+18} if ((not 'utctimestamp' in message.keys()) and ('Timestamp' in message.keys())): message['utctimestamp'] = toUTC(message['Timestamp']).isoformat() # host is used to store dns-style-ip entries in AWS, for ex # ip-10-162-8-26 is 10.162.8.26. obviously there is no strong guarantee # that this is always trusted. It's better than nothing though. At the # time of writing, there is no ipv6 support AWS-side for this kind of # field. It may be overridden later by a better field, if any exists if 'host' in message.keys(): tmp = message['host'] if tmp.startswith('ip-'): ipText = tmp.split('ip-')[1].replace('-', '.') if isIPv4(ipText): if 'destinationipaddress' not in message.keys(): message['details']['destinationipaddress'] = ipText if 'destinationipv4address' not in message.keys(): message['details']['destinationipv4address'] = ipText else: message['details']['destinationipaddress'] = '0.0.0.0' message['details']['destinationipv4address'] = '0.0.0.0' addError( message, 'plugin: {0} error: {1}:{2}'.format( 'fluentSqsFixUp.py', 'destinationipaddress is invalid', ipText)) if not 'hostname' in message.keys(): message['hostname'] = tmp # All messages with __tag 'ec2.forward*' are actually syslog forwarded # messages, so classify as such if '__tag' in message.keys(): tmp = message['__tag'] if tmp.startswith('ec2.forward'): message['category'] = 'syslog' message['source'] = 'syslog' if 'ident' in message.keys(): tmp = message['ident'] message['details']['program'] = tmp if ((not 'processname' in message.keys()) and ('program' in message['details'].keys())): message['processname'] = message['details']['program'] if ((not 'processid' in message.keys()) and ('pid' in message.keys())): message['processid'] = message['pid'] else: message['processid'] = 0 # Unknown really, but this field is mandatory. if not 'severity' in message.keys(): message['severity'] = 'INFO' # We already have the time of event stored in 'timestamp' so we don't # need 'time' if 'time' in message.keys(): message.pop('time') # Any remaining keys which aren't mandatory fields should be moved # to details # https://mozdef.readthedocs.io/en/latest/usage.html#mandatory-fields for key in message.keys(): if key not in [ 'summary', 'utctimestamp', 'hostname', 'category', 'source', 'processname', 'processid', 'severity', 'tags', 'details' ]: message['details'][key] = message[key] message.pop(key) return (message, metadata)
def test_long_float_epoch(self): result = toUTC(1.468443523e+18) self.result_is_datetime(result) assert str(result) == '2016-07-13 20:58:43+00:00'
def on_message(self, message): try: # default elastic search metadata for an event metadata = {'index': 'events', 'doc_type': 'event', 'id': None} event = {} event['receivedtimestamp'] = toUTC(datetime.now()).isoformat() event['mozdefhostname'] = self.options.mozdefhostname if 'tags' in event: event['tags'].extend([self.options.taskexchange]) else: event['tags'] = [self.options.taskexchange] event['severity'] = 'INFO' # Set defaults event['processid'] = '' event['processname'] = '' event['category'] = 'syslog' for message_key, message_value in message.iteritems(): if 'Message' == message_key: try: message_json = json.loads(message_value) for inside_message_key, inside_message_value in message_json.iteritems( ): if inside_message_key in ('processid', 'pid'): processid = str(inside_message_value) processid = processid.replace('[', '') processid = processid.replace(']', '') event['processid'] = processid elif inside_message_key in ('pname'): event['processname'] = inside_message_value elif inside_message_key in ('hostname'): event['hostname'] = inside_message_value elif inside_message_key in ('time', 'timestamp'): event['timestamp'] = toUTC( inside_message_value).isoformat() event['utctimestamp'] = toUTC( event['timestamp']).astimezone( pytz.utc).isoformat() elif inside_message_key in ('type'): event['category'] = inside_message_value elif inside_message_key in ('payload', 'message'): event['summary'] = inside_message_value else: if 'details' not in event: event['details'] = {} event['details'][ inside_message_key] = inside_message_value except ValueError: event['summary'] = message_value (event, metadata) = sendEventToPlugins(event, metadata, self.pluginList) # Drop message if plugins set to None if event is None: return self.save_event(event, metadata) except Exception as e: logger.exception(e) logger.error('Malformed message: %r' % message)
def test_float_epoch_milliseconds(self): result = toUTC(1.468443523e+11) self.result_is_datetime(result) assert str(result) == '2016-07-13 20:58:43+00:00'
def test_eve_log_alert_flow(self): event = { 'customendpoint': '', 'category': 'suricata', 'SOURCE': 'eve-log', 'event_type': 'alert' } MESSAGE = { "timestamp": "2018-09-12T22:24:09.546736+0000", "flow_id": 1484802709084080, "in_iface": "enp216s0f0", "event_type": "alert", "vlan": 75, "src_ip": "10.48.240.19", "src_port": 44741, "dest_ip": "10.48.75.120", "dest_port": 53, "proto": "017", "alert": { "action": "allowed", "gid": 1, "signature_id": 2500003, "rev": 1, "signature": "SURICATA DNS Query to a Suspicious *.ws Domain", "category": "", "severity": 3 }, "app_proto": "dns", "flow": { "pkts_toserver": 2, "pkts_toclient": 0, "bytes_toserver": 150, "bytes_toclient": 0, "start": "2018-09-12T22:24:09.546736+0000" }, "payload": "qFEBAAABAAAAAAAAA3d3dwhpbnNlY3VyZQJ3cwAAHAAB", "payload_printable": ".Q...........www.insecure.ws.....", "stream": 0, "packet": "AABeAAEKtAwl4EAQCABFAAA9M+5AAD4RuNYKMPATCjBLeK7FADUAKZFMqFEBAAABAAAAAAAAA3d3dwhpbnNlY3VyZQJ3cwAAHAAB", "packet_info": { "linktype": 1 } } event['MESSAGE'] = json.dumps(MESSAGE) result, metadata = self.plugin.onMessage(event, self.metadata) self.verify_defaults(result) self.verify_metadata(metadata) assert toUTC( MESSAGE['flow']['start']).isoformat() == result['utctimestamp'] assert toUTC( MESSAGE['flow']['start']).isoformat() == result['timestamp'] assert result['details']['originipbytes'] == MESSAGE['flow'][ 'bytes_toserver'] assert result['details']['responseipbytes'] == MESSAGE['flow'][ 'bytes_toclient'] assert result['details']['orig_pkts'] == MESSAGE['flow'][ 'pkts_toserver'] assert result['details']['resp_pkts'] == MESSAGE['flow'][ 'pkts_toclient'] assert result['details']['service'] == MESSAGE['app_proto'] assert 'pkts_toserver' not in result['details']['flow'] assert 'pkts_toclient' not in result['details']['flow'] assert 'bytes_toserver' not in result['details']['flow'] assert 'bytes_toclient' not in result['details']['flow'] assert 'app_proto' not in result['details'] assert result['summary'] == '10.48.240.19:44741 -> 10.48.75.120:53'
def onMessage(self, message, metadata): # make sure I really wanted to see this message # bail out early if not if u'customendpoint' not in message: return message, metadata if u'category' not in message: return message, metadata if u'SOURCE' not in message: return message, metadata if message['category'] != 'bro': return message, metadata # set the doc type to bro # to avoid data type conflicts with other doc types # (int v string, etc) # index holds documents of type 'type' # index -> type -> doc metadata['doc_type']= 'nsm' # move Bro specific fields under 'details' while preserving metadata newmessage = dict() try: newmessage['details'] = json.loads(message['MESSAGE']) except: newmessage['details'] = {} newmessage['rawdetails'] = message newmessage['customendpoint'] = 'bro' # move some fields that are expected at the event 'root' where they belong if 'HOST_FROM' in message: newmessage['hostname'] = message['HOST_FROM'] if 'tags' in message: newmessage['tags'] = message['tags'] if 'category' in message: newmessage['category'] = message['category'] if 'SOURCE' in message: # transform bro_files into files fast newmessage['source'] = message['SOURCE'][4:] if 'resp_cc' in newmessage['details']: del(newmessage['details']['resp_cc']) # add mandatory fields if 'ts' in newmessage['details']: newmessage[u'utctimestamp'] = toUTC(float(newmessage['details']['ts'])).isoformat() newmessage[u'timestamp'] = toUTC(float(newmessage['details']['ts'])).isoformat() #del(newmessage['details']['ts']) else: # a malformed message somehow managed to crawl to us, let's put it somewhat together newmessage[u'utctimestamp'] = toUTC(datetime.now()).isoformat() newmessage[u'timestamp'] = toUTC(datetime.now()).isoformat() newmessage[u'receivedtimestamp'] = toUTC(datetime.now()).isoformat() newmessage[u'eventsource'] = u'nsm' newmessage[u'severity'] = u'INFO' newmessage[u'mozdefhostname'] = self.mozdefhostname if 'id.orig_h' in newmessage['details']: newmessage[u'details'][u'sourceipaddress'] = newmessage['details']['id.orig_h'] del(newmessage['details']['id.orig_h']) if 'id.orig_p' in newmessage['details']: newmessage[u'details'][u'sourceport'] = newmessage['details']['id.orig_p'] del(newmessage['details']['id.orig_p']) if 'id.resp_h' in newmessage['details']: newmessage[u'details'][u'destinationipaddress'] = newmessage['details']['id.resp_h'] del(newmessage['details']['id.resp_h']) if 'id.resp_p' in newmessage['details']: newmessage[u'details'][u'destinationport'] = newmessage['details']['id.resp_p'] del(newmessage['details']['id.resp_p']) if 'details' in newmessage: if 'FILE_NAME' in newmessage['details']: del(newmessage['details']['FILE_NAME']) if 'MESSAGE' in newmessage['details']: del(newmessage['details']['MESSAGE']) if 'SOURCE' in newmessage['details']: del(newmessage['details']['SOURCE']) # All Bro logs need special treatment, so we provide it # Not a known log source? Mark it as such and return if 'source' not in newmessage: newmessage['source'] = u'unknown' return newmessage, metadata else: logtype = newmessage['source'] if logtype == 'conn': newmessage[u'details'][u'originipbytes'] = newmessage['details']['orig_ip_bytes'] newmessage[u'details'][u'responseipbytes'] = newmessage['details']['resp_ip_bytes'] del(newmessage['details']['orig_ip_bytes']) del(newmessage['details']['resp_ip_bytes']) if 'history' not in newmessage['details']: newmessage['details'][u'history'] = '' newmessage[u'summary'] = ( u'{sourceipaddress}:'+ u'{sourceport} -> '+ u'{destinationipaddress}:' u'{destinationport} '+ u'{history} '+ u'{originipbytes} bytes / ' u'{responseipbytes} bytes' ).format(**newmessage['details']) return (newmessage, metadata) if logtype == 'files': if 'rx_hosts' in newmessage['details']: newmessage['details'][u'sourceipaddress'] = u'{0}'.format(newmessage['details']['rx_hosts'][0]) if 'tx_hosts' in newmessage['details']: newmessage['details'][u'destinationipaddress'] = u'{0}'.format(newmessage['details']['tx_hosts'][0]) if 'mime_type' not in newmessage['details']: newmessage['details'][u'mime_type'] = u'unknown' if 'filename' not in newmessage['details']: newmessage['details'][u'filename'] = u'unknown' if 'total_bytes' not in newmessage['details']: newmessage['details'][u'total_bytes'] = u'0' if 'md5' not in newmessage['details']: newmessage['details'][u'md5'] = u'None' if 'filesource' not in newmessage['details']: newmessage['details'][u'filesource'] = u'None' newmessage[u'summary'] = ( u'{rx_hosts[0]} ' u'downloaded (MD5) ' u'{md5} ' u'MIME {mime_type} ' u'({total_bytes} bytes) ' u'from {tx_hosts[0]} ' u'via {filesource}' ).format(**newmessage['details']) return (newmessage, metadata) if logtype == 'dns': if 'qtype_name' not in newmessage['details']: newmessage['details'][u'qtype_name'] = u'unknown' if 'query' not in newmessage['details']: newmessage['details'][u'query'] = u'' if 'rcode_name' not in newmessage['details']: newmessage['details'][u'rcode_name'] = u'' newmessage[u'summary'] = ( u'DNS {qtype_name} type query ' u'{sourceipaddress} -> ' u'{destinationipaddress}:{destinationport}' ).format(**newmessage['details']) return (newmessage, metadata) if logtype == 'http': if 'method' not in newmessage['details']: newmessage['details'][u'method'] = u'' if 'host' not in newmessage['details']: newmessage['details'][u'host'] = u'' if 'uri' not in newmessage['details']: newmessage['details'][u'uri'] = u'' if 'status_code' not in newmessage['details']: newmessage['details'][u'status_code'] = u'' newmessage[u'summary'] = ( u'HTTP {method} ' u'{sourceipaddress} -> ' u'{destinationipaddress}:' u'{destinationport}' ).format(**newmessage['details']) return (newmessage, metadata) if logtype == 'ssl': if 'server_name' not in newmessage['details']: # fake it till you make it newmessage['details'][u'server_name'] = newmessage['details']['destinationipaddress'] newmessage[u'summary'] = ( u'SSL: {sourceipaddress} -> ' u'{destinationipaddress}:' u'{destinationport}' ).format(**newmessage['details']) return (newmessage, metadata) if logtype == 'dhcp': newmessage[u'summary'] = ( '{assigned_ip} assigned to ' '{mac}' ).format(**newmessage['details']) return (newmessage, metadata) if logtype == 'ftp': if 'command' not in newmessage['details']: newmessage['details'][u'command'] = u'' if 'user' not in newmessage['details']: newmessage['details'][u'user'] = u'' newmessage[u'summary'] = ( u'FTP: {sourceipaddress} -> ' u'{destinationipaddress}:' u'{destinationport}' ).format(**newmessage['details']) return (newmessage, metadata) if logtype == 'pe': if 'os' not in newmessage['details']: newmessage['details']['os'] = '' if 'subsystem' not in newmessage['details']: newmessage['details']['subsystem'] = '' newmessage[u'summary'] = ( u'PE file: {os} ' u'{subsystem}' ).format(**newmessage['details']) return (newmessage, metadata) if logtype == 'smtp': if 'from' in newmessage['details']: from_decoded = newmessage['details'][u'from'].decode('unicode-escape') newmessage['details'][u'from'] = from_decoded else: newmessage['details'][u'from'] = u'' if 'to' not in newmessage['details']: newmessage['details'][u'to'] = [u''] if 'msg_id' not in newmessage['details']: newmessage['details'][u'msg_id'] = u'' newmessage[u'summary'] = ( u'SMTP: {sourceipaddress} -> ' u'{destinationipaddress}:' u'{destinationport}' ).format(**newmessage['details']) return (newmessage, metadata) if logtype == 'ssh': if 'auth_success' not in newmessage['details']: newmessage['details'][u'auth_success'] = u'unknown' newmessage[u'summary'] = ( u'SSH: {sourceipaddress} -> ' u'{destinationipaddress}:' u'{destinationport} ' u'success {auth_success}' ).format(**newmessage['details']) return (newmessage, metadata) if logtype == 'tunnel': if 'tunnel_type' not in newmessage['details']: newmessage['details'][u'tunnel_type'] = u'' if 'action' not in newmessage['details']: newmessage['details'][u'action'] = u'' newmessage[u'summary'] = ( u'{sourceipaddress} -> ' u'{destinationipaddress}:' u'{destinationport} ' u'{tunnel_type} ' u'{action}' ).format(**newmessage['details']) return (newmessage, metadata) if logtype == 'intel': if 'seen.indicator' in newmessage['details']: newmessage['details']['seenindicator'] = newmessage['details']['seen.indicator'] del(newmessage['details']['seen.indicator']) else: newmessage['details'][u'seenindicator'] = u'' if 'seen.node' in newmessage['details']: newmessage['details'][u'seennode'] = newmessage['details']['seen.node'] del(newmessage['details']['seen.node']) if 'seen.where' in newmessage['details']: newmessage['details'][u'seenwhere'] = newmessage['details']['seen.where'] del(newmessage['details']['seen.where']) if 'seen.indicator_type' in newmessage['details']: newmessage['details'][u'seenindicatortype'] = newmessage['details']['seen.indicator_type'] del(newmessage['details']['seen.indicator_type']) newmessage[u'summary'] = ( u'Bro intel match ' u'of {seenindicatortype} ' u'in {seenwhere}' ).format(**newmessage['details']) return (newmessage, metadata) if logtype == 'known_certs': if 'serial' not in newmessage['details']: newmessage['details'][u'serial'] = u'0' newmessage[u'summary'] = ( u'Certificate X509 seen from: ' u'{host}:' u'{port_num}' ).format(**newmessage['details']) return (newmessage, metadata) if logtype == 'known_devices': if 'mac' not in newmessage['details']: newmessage['details'][u'mac'] = u'' if 'dhcp_host_name' not in newmessage['details']: newmessage['details'][u'dhcp_host_name'] = u'' newmessage[u'summary'] = ( u'New host: ' u'{mac}' ).format(**newmessage['details']) return (newmessage, metadata) if logtype == 'known_hosts': if 'host' not in newmessage['details']: newmessage['details'][u'host'] = u'' newmessage[u'summary'] = ( u'New host: ' u'{host}' ).format(**newmessage['details']) return (newmessage, metadata) if logtype == 'known_services': if 'service' not in newmessage['details']: newmessage['details']['service'] = [] if not newmessage['details']['service']: newmessage['details'][u'service'] = [u'Unknown'] if 'host' not in newmessage['details']: newmessage['details'][u'host'] = u'unknown' if 'port_num' not in newmessage['details']: newmessage['details'][u'port_num'] = u'0' if 'port_proto' not in newmessage['details']: newmessage['details'][u'port_proto'] = u'' newmessage[u'summary'] = ( u'New service: ' u'{service[0]} ' u'on host ' u'{host}:' u'{port_num} / ' u'{port_proto}' ).format(**newmessage['details']) return (newmessage, metadata) if logtype == 'notice': newmessage['details'][u'indicators'] = [] if 'sub' not in newmessage['details']: newmessage['details'][u'sub'] = u'' if 'msg' not in newmessage['details']: newmessage['details'][u'msg'] = u'' if 'note' not in newmessage['details']: newmessage['details'][u'note'] = u'' # clean up the action notice IP addresses if 'actions' in newmessage['details']: if newmessage['details']['actions'] == "Notice::ACTION_LOG": # retrieve indicator ip addresses from the sub field # "sub": "Indicator: 1.2.3.4, Indicator: 5.6.7.8" newmessage['details']['indicators'] = [ip for ip in findIPv4(newmessage['details']['sub'])] # remove the details.src field and add it to indicators # as it may not be the actual source. if 'src' in newmessage['details']: if isIPv4(newmessage[u'details'][u'src']): newmessage[u'details'][u'indicators'].append(newmessage[u'details'][u'src']) # If details.src is present overwrite the source IP address with it newmessage[u'details'][u'sourceipaddress'] = newmessage[u'details'][u'src'] newmessage[u'details'][u'sourceipv4address'] = newmessage[u'details'][u'src'] if isIPv6(newmessage[u'details'][u'src']): newmessage[u'details'][u'indicators'].append(newmessage[u'details'][u'src']) # If details.src is present overwrite the source IP address with it newmessage[u'details'][u'sourceipv6address'] = newmessage[u'details'][u'src'] del newmessage[u'details'][u'src'] sumstruct = {} sumstruct['note'] = newmessage['details'][u'note'] if 'sourceipv6address' in newmessage['details']: sumstruct['src'] = newmessage['details']['sourceipv6address'] else: if 'sourceipv4address' in newmessage['details']: sumstruct['src'] = newmessage['details']['sourceipv4address'] else: sumstruct['src'] = u'unknown' if 'dst' in newmessage['details']: sumstruct['dst'] = newmessage['details']['dst'] del(newmessage[u'details'][u'dst']) if isIPv4(sumstruct[u'dst']): newmessage['details'][u'destinationipaddress'] = sumstruct['dst'] newmessage['details'][u'destinationipv4address'] = sumstruct['dst'] if isIPv6(sumstruct[u'dst']): newmessage['details'][u'destinationipv6address'] = sumstruct['dst'] else: sumstruct['dst'] = u'unknown' if 'p' in newmessage['details']: sumstruct['p'] = newmessage['details']['p'] else: sumstruct['p'] = u'unknown' newmessage[u'summary'] = ( u'{note} ' u'source {src} ' u'destination {dst} ' u'port {p}' ).format(**sumstruct) # Thank you for your service return (newmessage, metadata) if logtype == 'rdp': if 'cookie' not in newmessage['details']: newmessage['details'][u'cookie'] = u'unknown' newmessage[u'summary'] = ( u'RDP: {sourceipaddress} -> ' u'{destinationipaddress}:' u'{destinationport}' ).format(**newmessage['details']) return (newmessage, metadata) if logtype == 'sip': if 'status_msg' not in newmessage['details']: newmessage['details'][u'status_msg'] = u'unknown' if 'uri' not in newmessage['details']: newmessage['details'][u'uri'] = u'unknown' if 'method' not in newmessage['details']: newmessage['details'][u'method'] = u'unknown' newmessage[u'summary'] = ( u'SIP: {sourceipaddress} -> ' u'{destinationipaddress}:' u'{destinationport} ' u'method {method} ' u'status {status_msg}' ).format(**newmessage['details']) return (newmessage, metadata) if logtype == 'software': if 'name' not in newmessage['details']: newmessage['details'][u'name'] = u'unparsed' if 'software_type' not in newmessage['details']: newmessage['details'][u'software_type'] = u'unknown' if 'host' not in newmessage['details']: newmessage['details'] = u'' newmessage[u'summary'] = ( u'Found {software_type} software ' u'on {host}' ).format(**newmessage['details']) return (newmessage, metadata) if logtype == 'socks': if 'version' not in newmessage['details']: newmessage['details'][u'version'] = u'0' if 'status' not in newmessage['details']: newmessage['details'][u'status'] = u'unknown' newmessage[u'summary'] = ( u'SOCKSv{version}: ' u'{sourceipaddress} -> ' u'{destinationipaddress}:' u'{destinationport} ' u'status {status}' ).format(**newmessage['details']) return (newmessage, metadata) if logtype == 'dce_rpc': if 'endpoint' not in newmessage['details']: newmessage['details'][u'endpoint'] = u'unknown' if 'operation' not in newmessage['details']: newmessage['details'][u'operation'] = u'unknown' newmessage[u'summary'] = ( u'DCERPC: {sourceipaddress} -> ' u'{destinationipaddress}:' u'{destinationport}' ).format(**newmessage['details']) return (newmessage, metadata) if logtype == 'kerberos': if 'request_type' not in newmessage['details']: newmessage['details'][u'request_type'] = u'unknown' if 'client' not in newmessage['details']: newmessage['details'][u'client'] = u'unknown' if 'service' not in newmessage['details']: newmessage['details'][u'service'] = u'unknown' if 'success' not in newmessage['details']: newmessage['details'][u'success'] = u'unknown' if 'error_msg' not in newmessage['details']: newmessage['details'][u'error_msg'] = u'' newmessage[u'summary'] = ( u'{sourceipaddress} -> ' u'{destinationipaddress}:' u'{destinationport} ' u'request {request_type} ' u'success {success}' ).format(**newmessage['details']) return (newmessage, metadata) if logtype == 'ntlm': newmessage['details'][u'ntlm'] = {} if 'domainname' in newmessage['details']: newmessage['details'][u'ntlm'][u'domainname'] = newmessage['details']['domainname'] del(newmessage['details']['domainname']) else: newmessage['details'][u'ntlm'][u'domainname'] = u'unknown' if 'hostname' in newmessage['details']: newmessage['details'][u'ntlm'][u'hostname'] = newmessage['details']['hostname'] del(newmessage['details']['hostname']) else: newmessage['details'][u'ntlm'][u'hostname'] = u'unknown' if 'username' in newmessage['details']: newmessage['details'][u'ntlm'][u'username'] = newmessage['details']['username'] del(newmessage['details']['username']) else: newmessage['details'][u'ntlm'][u'username'] = u'unknown' if 'success' not in newmessage['details']: newmessage['details'][u'success'] = u'unknown' if 'status' not in newmessage['details']: newmessage['details'][u'status'] = u'unknown' newmessage[u'summary'] = ( u'NTLM: {sourceipaddress} -> ' u'{destinationipaddress}:' u'{destinationport} ' u'success {success} ' u'status {status}' ).format(**newmessage['details']) return (newmessage, metadata) if logtype == 'smb_files': newmessage['details']['smbtimes'] = {} if 'path' not in newmessage['details']: newmessage['details'][u'path'] = u'' if 'name' not in newmessage['details']: newmessage['details'][u'name'] = u'' if 'action' not in newmessage['details']: newmessage['details'][u'action'] = u'' if 'times.accessed' in newmessage['details']: newmessage['details']['smbtimes']['accessed'] = toUTC(float(newmessage['details']['times.accessed'])).isoformat() del(newmessage['details']['times.accessed']) if 'times.changed' in newmessage['details']: newmessage['details']['smbtimes']['changed'] = toUTC(float(newmessage['details']['times.changed'])).isoformat() del(newmessage['details']['times.changed']) if 'times.created' in newmessage['details']: newmessage['details']['smbtimes']['created'] = toUTC(float(newmessage['details']['times.created'])).isoformat() del(newmessage['details']['times.created']) if 'times.modified' in newmessage['details']: newmessage['details']['smbtimes']['modified'] = toUTC(float(newmessage['details']['times.modified'])).isoformat() del(newmessage['details']['times.modified']) newmessage[u'summary'] = ( 'SMB file: ' u'{sourceipaddress} -> ' u'{destinationipaddress}:' u'{destinationport} ' u'{action}' ).format(**newmessage['details']) return(newmessage, metadata) if logtype == 'smb_mapping': if 'share_type' not in newmessage['details']: newmessage['details'][u'share_type'] = u'' if 'path' not in newmessage['details']: newmessage['details'][u'path'] = u'' newmessage[u'summary'] = ( 'SMB mapping: ' u'{sourceipaddress} -> ' u'{destinationipaddress}:' u'{destinationport} ' u'{share_type}' ).format(**newmessage['details']) return(newmessage, metadata) if logtype == 'snmp': if 'version' not in newmessage['details']: newmessage['details'][u'version'] = u'Unknown' if 'get_bulk_requests' not in newmessage['details']: newmessage['details']['get_bulk_requests'] = 0 if 'get_requests' not in newmessage['details']: newmessage['details']['get_requests'] = 0 if 'set_requests' not in newmessage['details']: newmessage['details']['set_requests'] = 0 if 'get_responses' not in newmessage['details']: newmessage['details']['get_responses'] = 0 newmessage['details']['getreqestssum'] = u'{0}'.format(newmessage['details']['get_bulk_requests'] + newmessage['details']['get_requests']) newmessage[u'summary'] = ( u'SNMPv{version}: ' u'{sourceipaddress} -> ' u'{destinationipaddress}:' u'{destinationport} ' u'({getreqestssum} get / ' u'{set_requests} set requests ' u'{get_responses} get responses)' ).format(**newmessage['details']) return (newmessage, metadata) if logtype == 'x509': newmessage['details'][u'certificate'] = {} if 'basic_constraints.ca' in newmessage['details']: newmessage['details'][u'certificate'][u'basic_constraints_ca'] = newmessage['details'][u'basic_constraints.ca'] del(newmessage['details'][u'basic_constraints.ca']) if 'basic_constraints.path_len' in newmessage['details']: newmessage['details'][u'certificate'][u'basic_constraints_path_len'] = newmessage['details'][u'basic_constraints.path_len'] del(newmessage['details'][u'basic_constraints.path_len']) if 'certificate.exponent' in newmessage['details']: newmessage['details'][u'certificate'][u'exponent'] = newmessage['details'][u'certificate.exponent'] del(newmessage['details'][u'certificate.exponent']) if 'certificate.issuer' in newmessage['details']: newmessage['details'][u'certificate'][u'issuer'] = newmessage['details'][u'certificate.issuer'] del(newmessage['details'][u'certificate.issuer']) if 'certificate.key_alg' in newmessage['details']: newmessage['details'][u'certificate'][u'key_alg'] = newmessage['details'][u'certificate.key_alg'] del(newmessage['details'][u'certificate.key_alg']) if 'certificate.key_length' in newmessage['details']: newmessage['details'][u'certificate'][u'key_length'] = newmessage['details'][u'certificate.key_length'] del(newmessage['details'][u'certificate.key_length']) if 'certificate.key_type' in newmessage['details']: newmessage['details'][u'certificate'][u'key_type'] = newmessage['details'][u'certificate.key_type'] del(newmessage['details'][u'certificate.key_type']) if 'certificate.not_valid_after' in newmessage['details']: newmessage['details'][u'certificate'][u'not_valid_after'] = toUTC(float(newmessage['details'][u'certificate.not_valid_after'])).isoformat() del(newmessage['details'][u'certificate.not_valid_after']) if 'certificate.not_valid_before' in newmessage['details']: newmessage['details'][u'certificate'][u'not_valid_before'] = toUTC(float(newmessage['details'][u'certificate.not_valid_before'])).isoformat() del(newmessage['details'][u'certificate.not_valid_before']) if 'certificate.sig_alg' in newmessage['details']: newmessage['details'][u'certificate'][u'sig_alg'] = newmessage['details'][u'certificate.sig_alg'] del(newmessage['details'][u'certificate.sig_alg']) if 'certificate.subject' in newmessage['details']: newmessage['details'][u'certificate'][u'subject'] = newmessage['details'][u'certificate.subject'] del(newmessage['details'][u'certificate.subject']) if 'certificate.version' in newmessage['details']: newmessage['details'][u'certificate'][u'version'] = newmessage['details'][u'certificate.version'] del(newmessage['details'][u'certificate.version']) if 'certificate.serial' in newmessage['details']: newmessage['details'][u'certificate'][u'serial'] = newmessage['details'][u'certificate.serial'] del(newmessage['details'][u'certificate.serial']) else: newmessage['details'][u'certificate'][u'serial'] = u'0' newmessage[u'summary'] = ( 'X509 certificate seen' ).format(**newmessage['details']['certificate']) return (newmessage, metadata) return (newmessage, metadata)
def current_timestamp(): return toUTC(datetime.now()).isoformat()
def esRotateIndexes(): if options.output == 'syslog': logger.addHandler(SysLogHandler(address=(options.sysloghostname, options.syslogport))) else: sh = logging.StreamHandler(sys.stderr) sh.setFormatter(formatter) logger.addHandler(sh) logger.debug('started') try: es = ElasticsearchClient((list('{0}'.format(s) for s in options.esservers))) indices = es.get_indices() # calc dates for use in index names events-YYYYMMDD, alerts-YYYYMM, etc. odate_day = date.strftime(toUTC(datetime.now()) - timedelta(days=1), '%Y%m%d') odate_month = date.strftime(toUTC(datetime.now()) - timedelta(days=1), '%Y%m') ndate_day = date.strftime(toUTC(datetime.now()), '%Y%m%d') ndate_month = date.strftime(toUTC(datetime.now()), '%Y%m') # examine each index in the .conf file # for rotation settings for (index, dobackup, rotation, pruning) in zip(options.indices, options.dobackup, options.rotation, options.pruning): try: if rotation != 'none': oldindex = index newindex = index if rotation == 'daily': oldindex += '-%s' % odate_day newindex += '-%s' % ndate_day elif rotation == 'monthly': oldindex += '-%s' % odate_month newindex += '-%s' % ndate_month # do not rotate before the month ends if oldindex == newindex: logger.debug('do not rotate %s index, month has not changed yet' % index) continue if newindex not in indices: logger.debug('Creating %s index' % newindex) es.create_index(newindex) # set aliases: events to events-YYYYMMDD # and events-previous to events-YYYYMMDD-1 logger.debug('Setting {0} alias to index: {1}'.format(index, newindex)) es.create_alias(index, newindex) if oldindex in indices: logger.debug('Setting {0}-previous alias to index: {1}'.format(index, oldindex)) es.create_alias('%s-previous' % index, oldindex) else: logger.debug('Old index %s is missing, do not change %s-previous alias' % (oldindex, index)) except Exception as e: logger.error("Unhandled exception while rotating %s, terminating: %r" % (index, e)) indices = es.get_indices() # Create weekly aliases for certain indices week_ago_date = toUTC(datetime.now()) - timedelta(weeks=1) week_ago_str = week_ago_date.strftime('%Y%m%d') current_date = toUTC(datetime.now()) for index in options.weekly_rotation_indices: weekly_index_alias = '%s-weekly' % index logger.debug('Trying to re-alias {0} to indices since {1}'.format(weekly_index_alias, week_ago_str)) existing_weekly_indices = [] for day_obj in daterange(week_ago_date, current_date): day_str = day_obj.strftime('%Y%m%d') day_index = index + '-' + str(day_str) if day_index in indices: existing_weekly_indices.append(day_index) else: logger.debug('%s not found, so cant assign weekly alias' % day_index) if existing_weekly_indices: logger.debug('Creating {0} alias for {1}'.format(weekly_index_alias, existing_weekly_indices)) es.create_alias_multiple_indices(weekly_index_alias, existing_weekly_indices) else: logger.warning('No indices within the past week to assign events-weekly to') except Exception as e: logger.error("Unhandled exception, terminating: %r" % e)
def test_zero_int(self): result = toUTC(0) self.result_is_datetime(result) assert str(result) == '1970-01-01 00:00:00+00:00'
def test_abnormal_date_str_without_timezone(self): result = toUTC("Jan 2 08:01:57") self.result_is_datetime(result) assert str(result) == '2016-01-02 08:01:57+00:00'