def stream(self, records): self.logger.info('UpdateAlertsCommand: %s', self) # logs command line #self.logger.info('SEARCHINFO %s', self._metadata.searchinfo) if not self.alerts: self.alerts = AlertCollection( self._metadata.searchinfo.session_key) for record in records: if self.json and self.json in record: self.alerts.replace( json.loads(record[self.json]), notes=self.notes, logger=self.logger, sid=self._metadata.searchinfo.sid, username=self._metadata.searchinfo.username) elif self.action and self.status and self.key and self.key in record: notes = None if self.notes: notes = self.notes if self.notes_field and self.notes_field in record and record[ self.notes_field]: notes = record[self.notes_field] self.alerts.update(record[self.key], action=self.action, status=self.status, notes=notes, logger=self.logger, sid=self._metadata.searchinfo.sid, username=self._metadata.searchinfo.username) else: self.logger.error( 'json field should be present OR the key field, action value and status value should be provided' ) yield record
def generate(self): self.logger.info('ListAlertsCommand: %s', self) if not self.alerts: self.alerts = AlertCollection( self._metadata.searchinfo.session_key) if self.status: status = self.status.split(',') else: status = [] if self.type: type = self.type.split(',') else: type = [] if self.severity: severity = self.severity.split(',') else: severity = [] if self.analyst: analyst = self.analyst.split(',') else: analyst = [] if self._metadata.searchinfo.earliest_time != 0: earliest_time = self._metadata.searchinfo.earliest_time else: earliest_time = None if self._metadata.searchinfo.latest_time != 0: latest_time = self._metadata.searchinfo.latest_time else: latest_time = None for record in self.alerts.list(status=status, type=type, severity=severity, analyst=analyst, earliest_time=earliest_time, latest_time=latest_time, logger=self.logger): event = { '_time': record['time'], 'sourcetype': 'alerts', 'type': record['type'], 'severity': record.get('severity'), 'entity': record['entity'], 'kv_key': record['_key'], 'analyst': record.get('analyst'), 'status': record['status'], 'sid': record['sid'] } data = record['data'] if self.data: event[self.data] = json.dumps(record['data']) if self.data_prefix is not None: for key, value in data.iteritems(): event[self.data_prefix + key] = value if self.json_field: event[self.json_field] = json.dumps(record) yield event
class UpdateAlertsCommand(StreamingCommand): json = Option( doc=''' **Syntax:** **json=***<field>* **Description:** Field name that contains the alert as a json string''', require=False, validate=validators.Fieldname()) key = Option( doc=''' **Syntax:** **key=***<field>* **Description:** The internal key of the alert''', require=False, validate=validators.Fieldname()) status = Option( doc=''' **Syntax:** **status=***<string>* **Description:** The new status''', require=False) action = Option( doc=''' **Syntax:** **action=***<string>* **Description:** The action''', require=False) notes = Option( doc=''' **Syntax:** **notes=***<string>* **Description:** Optional notes to be added to the work log''', require=False) alerts = None def stream(self, records): self.logger.info('UpdateAlertsCommand: %s', self) # logs command line #self.logger.info('SEARCHINFO %s', self._metadata.searchinfo) if not self.alerts: self.alerts = AlertCollection(self._metadata.searchinfo.session_key) for record in records: if self.json and self.json in record: self.alerts.replace(json.loads(record[self.json]), notes = self.notes, logger=self.logger, sid=self._metadata.searchinfo.sid, username=self._metadata.searchinfo.username) elif self.action and self.status and self.key and self.key in record: self.alerts.update(record[self.key], action=self.action, status=self.status, notes=self.notes, logger=self.logger, sid=self._metadata.searchinfo.sid, username=self._metadata.searchinfo.username) else: self.logger.error('json field should be present OR the key field, action value and status value should be provided') yield record
def stream(self, records): self.logger.info('DeleteAlertsCommand: %s', self) # logs command line if not self.alerts: self.alerts = AlertCollection(self._metadata.searchinfo.session_key) for record in records: if self.key in record: self.alerts.delete(record[self.key], logger=self.logger) else: self.logger.error('DeleteAlertsCommand: no key field %s', str(self.json)) # logs command line yield record
def stream(self, records): #self.logger.info('MakeAlertsCommand: %s, type of record %s', self, type(records)) # logs command line #self.logger.info('SEARCHINFO %s', self._metadata.searchinfo) sid = self._metadata.searchinfo.sid self.loggerExtra = CustomLogAdapter(self.logger, { 'sid': sid, 'type': self.alert_type }) if not self.interactive and not self.is_scheduled(): raise RuntimeError( "When testing makealerts from interactive search, provide the 'interative=t' option." ) if not self.alerts: self.alerts = AlertCollection( self._metadata.searchinfo.session_key) for record in records: self.load_whitelist(self._metadata.searchinfo) for wl in self.whitelist: context = Context(record) if wl.is_whitelisted(context): self.insert_stats.whitelisted += 1 self.loggerExtra.info("s3tag=criteria,evaluation=\"%s\"", str(context.debug)) self.loggerExtra.info("s3tag=whitelisted,name=\"%s\"", wl.name) if self.preview: record['preview'] = 'whitelisted %s' % str( context.debug) break else: search_context = SearchContext(self._metadata.searchinfo, self.loggerExtra) self.alerts.insert(record, event_time=self.time, entity=self.entity, alert_type=self.alert_type, severity=self.severity, idfield=self.idfield, combine=self.combine, combine_window=self.combine_window, preview=self.preview, search_context=search_context, insert_stats=self.insert_stats) if self.preview: record['preview'] = str(search_context.messages) yield record
class DeleteAlertsCommand(StreamingCommand): key = Option( doc=''' **Syntax:** **key=***<field>* **Description:** The internal key of the alert''', require=True, validate=validators.Fieldname()) alerts = None def stream(self, records): self.logger.info('DeleteAlertsCommand: %s', self) # logs command line if not self.alerts: self.alerts = AlertCollection(self._metadata.searchinfo.session_key) for record in records: if self.key in record: self.alerts.delete(record[self.key], logger=self.logger) else: self.logger.error('DeleteAlertsCommand: no key field %s', str(self.json)) # logs command line yield record
class MakeAlertsCommand(StreamingCommand): time = Option(doc=''' **Syntax:** **time=***<field>* **Description:** Field name used to determine event time for the alert''', require=False, validate=validators.Fieldname(), default='_time') entity = Option(doc=''' **Syntax:** **entity=***<field>* **Description:** Field name used to determine the entity triggering the alert (account name, machine name, ...)''', require=False, validate=validators.Fieldname(), default='entity') alert_type = Option(doc=''' **Syntax:** **type=***<string>* **Description:** Field name used to determine the type of alert''', require=True, name='type') severity = Option(doc=''' **Syntax:** **severity=***<field>* **Description:** Field name used to set severity of the alert''', require=False, validate=validators.Fieldname(), default=None) idfield = Option(doc=''' **Syntax:** **idfield=***<field>* **Description:** Field name used to store the alert id''', require=False, default=None, validate=validators.Fieldname()) combine = Option(doc=''' **Syntax:** **combine=***"<fields>"* **Description:** Comma separated field names where alerts should be combined instead of creating new ones.''', require=False, default=None) combine_window = Option(doc=''' **Syntax:** **combine_window=***<string>* **Description:** hours or days. ''', require=False, default=None) interactive = Option(doc=''' **Syntax:** **interactive=***<bool>* **Description:** If true, makealerts can run in an interactive search, otherwise it will run only in scheduled search (this is to prevent alerts created accidentally when copy and pasting scheduled search text)''', require=False, default=False, validate=validators.Boolean()) preview = Option(doc=''' **Syntax:** **preview=***<bool>* **Description:** If true, makealerts does not create alerts but instead indicates what it would do in the preview field''', require=False, default=False, validate=validators.Boolean()) alerts = None def __init__(self): super(MakeAlertsCommand, self).__init__() self.insert_stats = InsertStats() self.loggerExtra = self.logger def is_scheduled(self): sid = self._metadata.searchinfo.sid return sid.startswith("scheduler_") or sid.startswith("rt_scheduler_") def stream(self, records): #self.logger.info('MakeAlertsCommand: %s, type of record %s', self, type(records)) # logs command line #self.logger.info('SEARCHINFO %s', self._metadata.searchinfo) sid = self._metadata.searchinfo.sid self.loggerExtra = CustomLogAdapter(self.logger, { 'sid': sid, 'type': self.alert_type }) if not self.interactive and not self.is_scheduled(): raise RuntimeError( "When testing makealerts from interactive search, provide the 'interative=t' option." ) if not self.alerts: self.alerts = AlertCollection( self._metadata.searchinfo.session_key) for record in records: search_context = SearchContext(self._metadata.searchinfo, self.loggerExtra) self.alerts.insert(record, event_time=self.time, entity=self.entity, alert_type=self.alert_type, severity=self.severity, idfield=self.idfield, combine=self.combine, combine_window=self.combine_window, preview=self.preview, search_context=search_context, insert_stats=self.insert_stats) if self.preview: record['preview'] = str(search_context.messages) yield record def finish(self): if self.interactive and ( not self.is_scheduled()) and self.insert_stats.errors > 0: self.write_error( "There were {0} error(s) when trying to insert data, check logs with this search 'index=_internal MakeAlertsCommand source=*super_simple_siem.log* ERROR'", self.insert_stats.errors) if not self.preview: self.loggerExtra.info('s3tag=stats', str(self.insert_stats)) try: super(MakeAlertsCommand, self).finish() except: pass
f = '' i += 3 else: f += ss[i] i += 1 res.append(f.encode('utf-8')) return res payload = json.loads(sys.stdin.read()) session_key = payload['session_key'] max_count = int(payload['configuration']['max_count']) alerts = AlertCollection(session_key) results_file = payload['results_file'] with gzip.open(results_file, 'rt') as f: file_content = csv.reader(f) lines = list(file_content) header = lines[0] rows = lines[1:] # cap how many alerts can be created if it is defined if max_count > 0: rows = rows[0:max_count] for row in rows: record_all = dict(zip(header, row))
class ListAlertsCommand(GeneratingCommand): data = Option(doc=''' **Syntax:** **data=***<field>* **Description:** Field name that will receive the alert data in json format''', require=False, validate=validators.Fieldname()) data_prefix = Option(doc=''' **Syntax:** **data_prefix=***<string>* **Description:** Prefix that will be inserted before the data fields. Each data field will appear as a separate field.''', require=False) json_field = Option(doc=''' **syntax:** **raw=***<field>* **description:** field name that will receive the entire record as a json object.''', require=False, name='json', validate=validators.Fieldname()) status = Option(doc=''' **syntax:** **status=***<comma_separated_list_of_status>* **description:** Only selects alerts with the provided statuses''', require=False) type = Option(doc=''' **syntax:** **type=***<comma_separated_list_of_types>* **description:** Only selects alerts with the provided types''', require=False) severity = Option(doc=''' **syntax:** **severity=***<comma_separated_list_of_severity>* **description:** Only selects alerts with the provided severity''', require=False) analyst = Option(doc=''' **syntax:** **analyst=***<comma_separated_list_of_analyst>* **description:** Only selects alerts with the provided analysts''', require=False) #count = Option(require=True, validate=validators.Integer(0)) alerts = None def generate(self): self.logger.info('ListAlertsCommand: %s', self) if not self.alerts: self.alerts = AlertCollection( self._metadata.searchinfo.session_key) if self.status: status = self.status.split(',') else: status = [] if self.type: type = self.type.split(',') else: type = [] if self.severity: severity = self.severity.split(',') else: severity = [] if self.analyst: analyst = self.analyst.split(',') else: analyst = [] if self._metadata.searchinfo.earliest_time != 0: earliest_time = self._metadata.searchinfo.earliest_time else: earliest_time = None if self._metadata.searchinfo.latest_time != 0: latest_time = self._metadata.searchinfo.latest_time else: latest_time = None for record in self.alerts.list(status=status, type=type, severity=severity, analyst=analyst, earliest_time=earliest_time, latest_time=latest_time, logger=self.logger): event = { '_time': record['time'], 'sourcetype': 'alerts', 'type': record['type'], 'severity': record.get('severity'), 'entity': record['entity'], 'kv_key': record['_key'], 'analyst': record.get('analyst'), 'status': record['status'], 'sid': record['sid'] } data = record['data'] if self.data: event[self.data] = json.dumps(record['data']) if self.data_prefix is not None: for key, value in data.iteritems(): event[self.data_prefix + key] = value if self.json_field: event[self.json_field] = json.dumps(record) yield event
class MakeAlertsCommand(StreamingCommand): time = Option(doc=''' **Syntax:** **time=***<field>* **Description:** Field name used to determine event time for the alert''', require=False, validate=validators.Fieldname(), default='_time') entity = Option(doc=''' **Syntax:** **entity=***<field>* **Description:** Field name used to determine the entity triggering the alert (account name, machine name, ...)''', require=False, validate=validators.Fieldname(), default='entity') alert_type = Option(doc=''' **Syntax:** **type=***<string>* **Description:** Field name used to determine the type of alert''', require=True, name='type') severity = Option(doc=''' **Syntax:** **severity=***<field>* **Description:** Field name used to set severity of the alert''', require=False, validate=validators.Fieldname(), default=None) idfield = Option(doc=''' **Syntax:** **idfield=***<field>* **Description:** Field name used to store the alert id''', require=False, default=None, validate=validators.Fieldname()) combine = Option(doc=''' **Syntax:** **combine=***"<fields>"* **Description:** Comma separated field names where alerts should be combined instead of creating new ones.''', require=False, default=None) combine_window = Option(doc=''' **Syntax:** **combine_window=***<string>* **Description:** hours or days. ''', require=False, default=None) interactive = Option(doc=''' **Syntax:** **interactive=***<bool>* **Description:** If true, makealerts can run in an interactive search, otherwise it will run only in scheduled search (this is to prevent alerts created accidentally when copy and pasting scheduled search text)''', require=False, default=False, validate=validators.Boolean()) preview = Option(doc=''' **Syntax:** **preview=***<bool>* **Description:** If true, makealerts does not create alerts but instead indicates what it would do in the preview field''', require=False, default=False, validate=validators.Boolean()) alerts = None def __init__(self): super(MakeAlertsCommand, self).__init__() self.insert_stats = InsertStats() self.whitelist = [] self.whitelist_loaded = False self.loggerExtra = self.logger def load_whitelist(self, searchinfo): if not self.whitelist_loaded: self.whitelist_loaded = True service = connect(token=searchinfo.session_key, app=searchinfo.app) rr = results.ResultsReader( service.jobs.oneshot("| inputlookup whitelist")) today = datetime.datetime.today() for result in rr: if isinstance(result, results.Message): self.logger.error( "sid=%s,s3tag=whitelist,type=%s,message=%s", searchinfo.sid, result.type, result.message) elif self.alert_type == result['type']: try: wl = Whitelist(result) if today >= wl.start and today <= wl.end: wl.parse_criteria() self.whitelist.append(wl) except Exception as e: self.logger.error( "sid=%s,s3tag=whitelist,type=\"invalid whitelist\",message=\"%s\",record=%s", searchinfo.sid, str(e), str(result)) def is_scheduled(self): sid = self._metadata.searchinfo.sid return sid.startswith("scheduler_") or sid.startswith("rt_scheduler_") def stream(self, records): #self.logger.info('MakeAlertsCommand: %s, type of record %s', self, type(records)) # logs command line #self.logger.info('SEARCHINFO %s', self._metadata.searchinfo) sid = self._metadata.searchinfo.sid self.loggerExtra = CustomLogAdapter(self.logger, { 'sid': sid, 'type': self.alert_type }) if not self.interactive and not self.is_scheduled(): raise RuntimeError( "When testing makealerts from interactive search, provide the 'interative=t' option." ) if not self.alerts: self.alerts = AlertCollection( self._metadata.searchinfo.session_key) for record in records: self.load_whitelist(self._metadata.searchinfo) for wl in self.whitelist: context = Context(record) if wl.is_whitelisted(context): self.insert_stats.whitelisted += 1 self.loggerExtra.info("s3tag=criteria,evaluation=\"%s\"", str(context.debug)) self.loggerExtra.info("s3tag=whitelisted,name=\"%s\"", wl.name) if self.preview: record['preview'] = 'whitelisted %s' % str( context.debug) break else: search_context = SearchContext(self._metadata.searchinfo, self.loggerExtra) self.alerts.insert(record, event_time=self.time, entity=self.entity, alert_type=self.alert_type, severity=self.severity, idfield=self.idfield, combine=self.combine, combine_window=self.combine_window, preview=self.preview, search_context=search_context, insert_stats=self.insert_stats) if self.preview: record['preview'] = str(search_context.messages) yield record def finish(self): if self.interactive and ( not self.is_scheduled()) and self.insert_stats.errors > 0: self.write_error( "There were {0} error(s) when trying to insert data, check logs with this search 'index=_internal MakeAlertsCommand source=*super_simple_siem.log* ERROR'", self.insert_stats.errors) if not self.preview: self.loggerExtra.info( 's3tag=stats,%s,whitelist=%s', str(self.insert_stats), "[" + ";".join(str(x) for x in self.whitelist) + "]") try: super(MakeAlertsCommand, self).finish() except: pass