def __init__(self, argv): self.testing = False self.send_syslog = True self.ds = None self.get_token = None try: opts, args = getopt.getopt(argv, "htlg") except getopt.GetoptError: self.usage() sys.exit(2) for opt, arg in opts: if opt == '-h': self.usage() sys.exit() elif opt in ("-t"): self.testing = True elif opt in ("-l"): self.send_syslog = False elif opt in ("-g"): self.get_token = True try: self.ds = DefenseStorm('verkadaEventLogs', testing=self.testing, send_syslog=self.send_syslog) except Exception as e: traceback.print_exc() try: self.ds.log('ERROR', 'ERROR: ' + str(e)) except: pass
def __init__(self, argv): self.testing = False self.send_syslog = True self.ds = None try: opts, args = getopt.getopt(argv, "htnld:", ["datedir="]) except getopt.GetoptError: self.usage() sys.exit(2) for opt, arg in opts: if opt == '-h': self.usage() sys.exit() elif opt in ("-t"): self.testing = True elif opt in ("-l"): self.send_syslog = False try: self.ds = DefenseStorm('symanteccloudEventLogs', testing=self.testing, send_syslog=self.send_syslog) except Exception, e: traceback.print_exc() try: self.ds.log('ERROR', 'ERROR: ' + str(e)) except: pass
def __init__(self, argv): self.testing = False self.send_syslog = True self.ds = None self.site_id = None self.staticIndicators = None self.SRC_headers = None try: opts, args = getopt.getopt(argv, "htnld:", ["datedir="]) except getopt.GetoptError: self.usage() sys.exit(2) for opt, arg in opts: if opt == '-h': self.usage() sys.exit() elif opt in ("-t"): self.testing = True elif opt in ("-l"): self.send_syslog = False try: self.ds = DefenseStorm('sentineloneEventLogs', testing=self.testing, send_syslog=self.send_syslog) except Exception as e: traceback.print_exc() try: self.ds.log('ERROR', 'ERROR: ' + str(e)) except: pass try: self.SRC_headers = { "Content-type": "application/json", "Authorization": "APIToken " + self.ds.config_get('sentinelone', 'token') } self.SRC_hostname = 'https://' + self.ds.config_get( 'sentinelone', 'console') + ".sentinelone.net/" except Exception as e: traceback.print_exc() try: self.ds.log('ERROR', 'ERROR: ' + str(e)) except: pass
class integration(object): def run(self): self.ds.log('INFO', 'This is where we would do some work') self.ds.writeCEFEvent() def usage(self): print print os.path.basename(__file__) print print ' No Options: Run a normal cycle' print print ' -t Testing mode. Do all the work but do not send events to GRID via ' print ' syslog Local7. Instead write the events to file \'output.TIMESTAMP\'' print ' in the current directory' print print ' -l Log to stdout instead of syslog Local6' print def __init__(self, argv): self.testing = False self.send_syslog = True self.ds = None try: opts, args = getopt.getopt(argv,"htnld:",["datedir="]) except getopt.GetoptError: self.usage() sys.exit(2) for opt, arg in opts: if opt == '-h': self.usage() sys.exit() elif opt in ("-t"): self.testing = True elif opt in ("-l"): self.send_syslog = False try: self.ds = DefenseStorm('templateEventLogs', testing=self.testing, send_syslog = self.send_syslog) except Exception ,e: traceback.print_exc() try: self.ds.log('ERROR', 'ERROR: ' + str(e)) except: pass
def __init__(self, argv): self.testing = False self.send_syslog = True self.ds = None try: opts, args = getopt.getopt(argv, "htnld:", ["datedir="]) except getopt.GetoptError: self.usage() sys.exit(2) for opt, arg in opts: if opt == '-h': self.usage() sys.exit() elif opt in ("-t"): self.testing = True elif opt in ("-l"): self.send_syslog = False try: self.ds = DefenseStorm('oktaEventLogs', testing=self.testing, send_syslog=self.send_syslog) except Exception as e: traceback.print_exc() try: self.ds.log('ERROR', 'ERROR: ' + str(e)) except: pass self.api_token = self.ds.config_get('okta', 'api_token') self.api_uri = self.ds.config_get('okta', 'api_uri') self.state_dir = os.path.join(self.ds.config_get('okta', 'APP_PATH'), 'state') self.mystate = self.ds.get_state(self.state_dir) self.newstate = datetime.utcnow() if self.mystate == None: self.mystate = self.newstate - timedelta(0, 3600)
def __init__(self, argv): self.testing = False self.cleanup = True self.send_syslog = True self.ds = None self.dir = None self.new_state = None try: opts, args = getopt.getopt(argv,"htnld:",["datedir="]) except getopt.GetoptError: self.usage() sys.exit(2) for opt, arg in opts: if opt == '-h': self.usage() sys.exit() elif opt in ("-d"): self.dir = arg self.cleanup = False elif opt in ("-t"): self.testing = True elif opt in ("-n"): self.cleanup = False elif opt in ("-l"): self.send_syslog = False try: self.ds = DefenseStorm('salesforceEventLogs', testing=self.testing, send_syslog = self.send_syslog) except Exception as e: traceback.print_exc() try: tb = traceback.format_exc() tb = tb.replace('\n', "") self.ds.log('CRITICAL', 'Error Logging into SalesFoce') self.ds.log('ERROR', 'ERROR: %s' %tb) except: pass
def __init__(self, argv): self.testing = False self.send_syslog = True self.ds = None self.conf_file = None self.conn_url = None try: opts, args = getopt.getopt(argv, "htnld:c:", ["datedir="]) except getopt.GetoptError: self.usage() sys.exit(2) for opt, arg in opts: if opt == '-h': self.usage() sys.exit() elif opt in ("-t"): self.testing = True elif opt in ("-l"): self.send_syslog = False elif opt in ("-c"): self.conf_file = arg try: self.ds = DefenseStorm('jdbcEventLogs', testing=self.testing, send_syslog=self.send_syslog, config_file=self.conf_file) except Exception as e: traceback.print_exc() try: self.ds.log('ERROR', 'ERROR: ' + str(e)) except: pass
class integration(object): JSON_field_mappings = { 'site': 'location', 'name': 'sensor_name', 'notification_type': 'event_type', 'video_url': 'url' } def verkada_getCameras(self): response = self.verkada_request('/cameras') r_json = response.json() return r_json['cameras'] def verkada_getEvents(self): pagesize = 10 total_events = [] params = { 'start_time': self.last_run, 'end_time': self.current_run, 'per_page': pagesize } response = self.verkada_request('/notifications', params=params) r_json = response.json() page_cursor = r_json['page_cursor'] events = r_json['notifications'] if page_cursor == None: return events total_events += events while page_cursor != None: params = { 'start_time': self.last_run, 'end_time': self.current_run, 'per_page': pagesize, 'page_cursor': page_cursor } response = self.verkada_request('/notifications', params=params) r_json = response.json() events = r_json['notifications'] total_events += events page_cursor = r_json['page_cursor'] return total_events def verkada_request(self, path, params=None, verify=False, proxies=None): url = self.api_url + '/orgs/' + self.org_id + path headers = { 'Content-Type': 'applicaiton/json', 'x-api-key': self.api_key } self.ds.log( 'INFO', "Attempting to connect to url: " + url + " with params: " + json.dumps(params)) try: response = requests.get(url, headers=headers, params=params, timeout=15, verify=verify, proxies=proxies) except Exception as e: self.ds.log('ERROR', "Exception in verkada_request: {0}".format(str(e))) return None if not response or response.status_code != 200: self.ds.log( 'ERROR', "Received unexpected " + str(response.text) + " response from Brivo Server {0}.".format(url)) self.ds.log('ERROR', "Exiting due to unexpected response.") sys.exit(0) return response def verkada_main(self): self.api_url = self.ds.config_get('verkada', 'api_url') self.state_dir = self.ds.config_get('verkada', 'state_dir') self.org_id = self.ds.config_get('verkada', 'org_id') self.api_key = self.ds.config_get('verkada', 'api_key') self.last_run = self.ds.get_state(self.state_dir) self.current_run = int(time.time()) if self.last_run == None: self.last_run = self.current_run - (86400 * 30) cameras_list = self.verkada_getCameras() cameras = {} for camera in cameras_list: cameras[camera['camera_id']] = camera events = self.verkada_getEvents() if events == None: self.ds.log('INFO', "There are no event logs to send") else: self.ds.log('INFO', "Sending {0} event logs".format(len(events))) for log in events: log['name'] = cameras[log['camera_id']]['name'] log['site'] = cameras[log['camera_id']]['site'] log['message'] = log['name'] + " - " + log['notification_type'] log['timestamp'] = datetime.utcfromtimestamp( int(log['created'])).strftime("%Y-%m-%dT%H:%M:%SZ") self.ds.writeJSONEvent( log, JSON_field_mappings=self.JSON_field_mappings, flatten=False) self.ds.set_state(self.state_dir, self.current_run) self.ds.log('INFO', "Done Sending Notifications") def run(self): try: pid_file = self.ds.config_get('verkada', 'pid_file') fp = open(pid_file, 'w') try: fcntl.lockf(fp, fcntl.LOCK_EX | fcntl.LOCK_NB) except IOError: self.ds.log( 'ERROR', "An instance of cb defense syslog connector is already running" ) # another instance is running sys.exit(0) self.verkada_main() except Exception as e: traceback.print_exc() self.ds.log('ERROR', "Exception {0}".format(str(e))) return def usage(self): print print(os.path.basename(__file__)) print print(' No Options: Run a normal cycle') print print( ' -t Testing mode. Do all the work but do not send events to GRID via ' ) print( ' syslog Local7. Instead write the events to file \'output.TIMESTAMP\'' ) print(' in the current directory') print print(' -l Log to stdout instead of syslog Local6') print print(' -g Authenticate to Get Token then exit') print print def __init__(self, argv): self.testing = False self.send_syslog = True self.ds = None self.get_token = None try: opts, args = getopt.getopt(argv, "htlg") except getopt.GetoptError: self.usage() sys.exit(2) for opt, arg in opts: if opt == '-h': self.usage() sys.exit() elif opt in ("-t"): self.testing = True elif opt in ("-l"): self.send_syslog = False elif opt in ("-g"): self.get_token = True try: self.ds = DefenseStorm('verkadaEventLogs', testing=self.testing, send_syslog=self.send_syslog) except Exception as e: traceback.print_exc() try: self.ds.log('ERROR', 'ERROR: ' + str(e)) except: pass
class integration(object): def usage(self): print print os.path.basename(__file__) print print ' No Options: Run a normal cycle' print print ' -t Testing mode. Do all the work but do not send events to GRID via ' print ' syslog Local7. Instead write the events to file \'output.TIMESTAMP\'' print ' in the current directory' print print ' -l Log to stdout instead of syslog Local6' print def __init__(self, argv): self.testing = False self.send_syslog = True self.ds = None try: opts, args = getopt.getopt(argv, "htnld:", ["datedir="]) except getopt.GetoptError: self.usage() sys.exit(2) for opt, arg in opts: if opt == '-h': self.usage() sys.exit() elif opt in ("-t"): self.testing = True elif opt in ("-l"): self.send_syslog = False try: self.ds = DefenseStorm('symanteccloudEventLogs', testing=self.testing, send_syslog=self.send_syslog) except Exception, e: traceback.print_exc() try: self.ds.log('ERROR', 'ERROR: ' + str(e)) except: pass # Load the config file #with open('NdfConfig.json') as config_file: #self.config = json.load(config_file) # Load your username from the config file self.user = self.ds.config_get('symanteccloud', 'user') # Load your password from the config file self.password = self.ds.config_get('symanteccloud', 'password') # Filenames # Filename of the cookies file. Directory will be loaded from config file self.cookieFile = self.ds.config_get( 'symanteccloud', 'cookiesFilePath') + '/cookies.txt' # Filename of the logs file, Directory will be loaded from config file # (this format is day_month_year) # Format details can be found at https://docs.python.org/2/library/datetime.html#strftime-strptime-behavior #self.logFile = self.config['files']['logsFilePath'] + '/%s-datafeed.json' % datetime.datetime.now().strftime('%d_%m_%Y-%H_%M_%S_%f') # Request Uri self.uri = self.ds.config_get('symanteccloud', 'uri').lower() # Encode the username/password for HTTP Basic Authentication self.base64string = base64.b64encode('%s:%s' % (self.user, self.password))
class integration(object): def jdbc_main(self): # Get JDBC Config info try: driver = self.ds.config_get('jdbc', 'driver') db_jarfile = self.ds.config_get('jdbc', 'db_jarfile') db_json_file = self.ds.config_get('jdbc', 'db_json_file') self.conn_url = self.ds.config_get('jdbc', 'connection_url') self.hostname = self.ds.config_get('jdbc', 'hostname') username = self.ds.config_get('jdbc', 'username') password = self.ds.config_get('jdbc', 'password') self.state_dir = self.ds.config_get('jdbc', 'state_dir') self.last_run = self.ds.get_state(self.state_dir) #self.time_format = self.ds.config_get('jdbc', 'time_format') self.time_format = "%Y-%m-%d %H:%M:%S.%f" current_time = time.time() if self.last_run == None: self.last_run = (datetime.utcfromtimestamp( 60 * ((current_time - 120) // 60))).strftime( self.time_format) self.current_run = ( datetime.utcfromtimestamp(current_time)).strftime( self.time_format) except Exception as e: traceback.print_exc() self.ds.log("ERROR", "Failed to get required configurations") self.ds.log('ERROR', "Exception {0}".format(str(e))) db_tables = None try: with open(db_json_file) as json_file: db_tables = json.load(json_file) except Exception as e: traceback.print_exc() self.ds.log("ERROR", "Failed to load db_json_file + " + db_json_file) self.ds.log('ERROR', "Exception {0}".format(str(e))) self.ds.log("INFO", "Connection URL: " + self.conn_url) try: conn = jaydebeapi.connect(driver, self.conn_url, [username, password], db_jarfile) except Exception as e: traceback.print_exc() self.ds.log("ERROR", "Failed to connect to DB") self.ds.log('ERROR', "Exception {0}".format(str(e))) if conn == None: self.ds.log("ERROR", "Error connecting to the DB, no exception") else: self.ds.log("INFO", "Successfully connected to DB URL") for entry in db_tables: query = "select " + ','.join( entry['values'] ) + " from " + entry['table_name'] + " where " + entry[ 'timestamp'] + " > \'" + self.last_run + "\'" self.ds.log("INFO", "Query: " + query) curs = conn.cursor() curs.execute(query) result = [] columns = tuple([d[0] for d in curs.description]) for row in curs.fetchall(): result.append(dict(zip(columns, row))) for item in result: datestring, blah = item['CreatedUtc'].split('.') timestamp = datetime.strptime(datestring + '.0+0000', self.time_format + '%z') item['timestamp'] = int(datetime.timestamp(timestamp)) item['hostname'] = self.hostname self.ds.writeJSONEvent(item) self.ds.set_state(self.state_dir, self.current_run) self.ds.log('INFO', "Done Sending Notifications") def run(self): try: pid_file = self.ds.config_get('jdbc', 'pid_file') fp = io.open(pid_file, 'w') try: fcntl.lockf(fp, fcntl.LOCK_EX | fcntl.LOCK_NB) except IOError: self.ds.log( 'ERROR', "An instance of csv integration is already running") # another instance is running sys.exit(0) self.jdbc_main() except Exception as e: traceback.print_exc() self.ds.log('ERROR', "Exception {0}".format(str(e))) return def usage(self): print print(os.path.basename(__file__)) print print(' No Options: Run a normal cycle') print print( ' -t Testing mode. Do all the work but do not send events to GRID via ' ) print( ' syslog Local7. Instead write the events to file \'output.TIMESTAMP\'' ) print(' in the current directory') print print(' -l Log to stdout instead of syslog Local6') print def __init__(self, argv): self.testing = False self.send_syslog = True self.ds = None self.conf_file = None self.conn_url = None try: opts, args = getopt.getopt(argv, "htnld:c:", ["datedir="]) except getopt.GetoptError: self.usage() sys.exit(2) for opt, arg in opts: if opt == '-h': self.usage() sys.exit() elif opt in ("-t"): self.testing = True elif opt in ("-l"): self.send_syslog = False elif opt in ("-c"): self.conf_file = arg try: self.ds = DefenseStorm('jdbcEventLogs', testing=self.testing, send_syslog=self.send_syslog, config_file=self.conf_file) except Exception as e: traceback.print_exc() try: self.ds.log('ERROR', 'ERROR: ' + str(e)) except: pass
class integration(object): system_JSON_field_mappings = { 'description' : 'message', 'date' : 'timestamp' } JSON_field_mappings = { 'eventId' : 'event_id', 'processPath' : 'process_path', 'destinations' : 'ip_dest', 'loggedUsers' : 'username', 'macAddresses' : 'mac_address', 'collectorGroup' : 'group', 'rules' : 'rule_name', 'ip' : 'client_ip', 'id' : 'client_info', 'device' : 'client_hostname', 'operatingSystem' : 'os_type', } def ensilo_basicAuth(self): url = self.url + '/management-rest/events/list-events' url = self.url + '/management-rest/system-events/list-system-events' self.ds.log('INFO', "Attempting basic auth to url: " + url) response = self.ensilo_request('/management-rest/system-events/list-system-events') if response == None and response.headers == None: return response headers = response.headers if 'X-Auth-Token' in headers.keys(): token = headers['X-Auth-Token'] else: self.ds.log('WARNING', "Response missing X-Auth-Token in response from enSilo Server {0}.".format(url)) return None return token def ensilo_getEvents(self): params = {'lastSeenFrom':self.last_run, 'lastSeenTo':self.current_run} #params = None response = self.ensilo_request('/management-rest/events/list-events', params = params) events = response.json() extra_events = [] for event in events: event['message'] = "Event ID: " + str(event['eventId']) + " Process: " + event['process'] + " Action: " + event['action'] event['category'] = "events" try: dt_timestamp = datetime.strptime(event['lastSeen'], self.time_format) dt_timestamp = self.pytz_timezone.localize(dt_timestamp) event['timestamp'] = dt_timestamp.isoformat() except Exception as E: self.ds.log('ERROR', "converting timestamp in event") if 'collectors' in event.keys() and event['collectors'] != None: c_events = event['collectors'] for c_event in c_events: c_event['category'] = "events" c_event['message'] = "Event ID: " + str(event['eventId']) + " collectors event" c_event['eventId'] = event['eventId'] c_event['timestamp'] = event['timestamp'] extra_events.append(c_event) del event['collectors'] total_events = events + extra_events return total_events def ensilo_getSystemEvents(self): params = {'fromDate':self.last_run, 'toDate':self.current_run} response = self.ensilo_request('/management-rest/system-events/list-system-events', params = params) return response.json() def ensilo_request(self, path, params = None, verify=False, proxies=None): url = self.url + path self.ds.log('INFO', "Attempting to connect to url: " + url + " with params: " + json.dumps(params)) self.ds.log('INFO', "Attempting to connect to url: " + url) try: if self.token == None: self.ds.log('INFO', "No token. Performing basic auth") if params == None: response = requests.get(url, auth=(self.username, self.password), verify=verify, proxies=proxies) else: response = requests.get(url, auth=(self.username, self.password), params = params, verify=verify, proxies=proxies) else: headers = {'X-Auth-Token': self.token} response = requests.get(url, headers=headers, params = params, timeout=15, verify=verify, proxies=proxies) except Exception as e: self.ds.log('ERROR', "Exception in ensilo_request: {0}".format(str(e))) return None if not response or response.status_code != 200: self.ds.log('ERROR', "Received unexpected " + str(response) + " response from enSilo Server {0}.".format(url)) self.ds.log('ERROR', "Exiting due to unexpected response.") sys.exit(0) return response def ensilo_main(self): self.url = self.ds.config_get('ensilo', 'server_url') self.auth_method = self.ds.config_get('ensilo', 'auth_method') self.state_dir = self.ds.config_get('ensilo', 'state_dir') self.last_run = self.ds.get_state(self.state_dir) self.time_offset = int(self.ds.config_get('ensilo', 'time_offset')) self.timezone = self.ds.config_get('ensilo', 'timezone') self.pytz_timezone = pytz.timezone(self.timezone) self.time_format = "%Y-%m-%d %H:%M:%S" current_time = time.time() utc_tz = pytz.timezone("UTC") self.tz_offset = self.pytz_timezone.localize(datetime.utcfromtimestamp(current_time)).strftime("%z") if self.last_run == None: dt_last_run = datetime.utcfromtimestamp(60 * ((current_time - ((self.time_offset + 900) * 60)) // 60)) dt_last_run = utc_tz.localize(dt_last_run) dt_last_run = dt_last_run.astimezone(self.pytz_timezone) self.last_run = dt_last_run.strftime(self.time_format) dt_current_run = utc_tz.localize(datetime.utcfromtimestamp(current_time - (self.time_offset * 60))) dt_current_run = dt_current_run.astimezone(self.pytz_timezone) self.current_run = dt_current_run.strftime(self.time_format) if self.auth_method == 'basic': self.token = None self.username = self.ds.config_get('ensilo', 'username') self.password = self.ds.config_get('ensilo', 'password') self.token = self.ensilo_basicAuth() if self.token != None and self.get_token == True: print("Token - " + self.token) return None elif self.auth_method == 'token': self.token = self.ds.config_get('ensilo', 'token') else: self.ds.log('ERROR', "Invalid Configuration - 'auth_method'") return None if self.token == None or self.token == '': self.ds.log('ERROR', "Invalid Configuration or auth failed. No token available") return None events = self.ensilo_getEvents() system_events = self.ensilo_getSystemEvents() if events == None: self.ds.log('INFO', "There are no event logs to send") else: self.ds.log('INFO', "Sending {0} event logs".format(len(events))) for log in events: self.ds.writeJSONEvent(log, JSON_field_mappings = self.JSON_field_mappings, flatten = False) if system_events == None: self.ds.log('INFO', "There are no system event logs to send") else: self.ds.log('INFO', "Sending {0} system event logs".format(len(system_events))) for log in system_events: log['category'] = "system-events" self.ds.writeJSONEvent(log, JSON_field_mappings = self.system_JSON_field_mappings) self.ds.set_state(self.state_dir, self.current_run) self.ds.log('INFO', "Done Sending Notifications") def run(self): try: pid_file = self.ds.config_get('ensilo', 'pid_file') fp = open(pid_file, 'w') try: fcntl.lockf(fp, fcntl.LOCK_EX | fcntl.LOCK_NB) except IOError: self.ds.log('ERROR', "An instance of cb defense syslog connector is already running") # another instance is running sys.exit(0) self.ensilo_main() except Exception as e: traceback.print_exc() self.ds.log('ERROR', "Exception {0}".format(str(e))) return def usage(self): print print(os.path.basename(__file__)) print print(' No Options: Run a normal cycle') print print(' -t Testing mode. Do all the work but do not send events to GRID via ') print(' syslog Local7. Instead write the events to file \'output.TIMESTAMP\'') print(' in the current directory') print print(' -l Log to stdout instead of syslog Local6') print print(' -g Authenticate to Get Token then exit') print print def __init__(self, argv): self.testing = False self.send_syslog = True self.ds = None self.get_token = None try: opts, args = getopt.getopt(argv,"htlg") except getopt.GetoptError: self.usage() sys.exit(2) for opt, arg in opts: if opt == '-h': self.usage() sys.exit() elif opt in ("-t"): self.testing = True elif opt in ("-l"): self.send_syslog = False elif opt in ("-g"): self.get_token = True try: self.ds = DefenseStorm('ensiloEventLogs', testing=self.testing, send_syslog = self.send_syslog) except Exception as e: traceback.print_exc() try: self.ds.log('ERROR', 'ERROR: ' + str(e)) except: pass
class integration(object): JSON_field_mappings = { #'published' : 'timestamp', 'displayMessage': 'message', 'actor_type': 'type', 'eventType': 'event_type', 'outcome_result': 'outcome', 'actor_displayName': 'display_name', 'actor_alternateId': 'username', 'client_ipAddress': 'client_ip', 'actor_id': 'user_id', 'client_userAgent_rawUserAgent': 'user_agent', } def getEvents(self): events_url = 'https://' + self.api_uri + '/api/v1/events' headers = {'Authorization': 'SSWS ' + self.api_token} params = {'since': self.mystate.isoformat('T', 'seconds')[:-3] + 'Z'} try: self.ds.log( 'INFO', 'Sending requests {0} params: {1}'.format(events_url, params)) events = requests.get(events_url, headers=headers, params=params) except Exception as e: self.ds.log('ERROR', "Exception {0}".format(str(e))) return [] if not events or events.status_code != 200: self.ds.log( 'WARNING', "Received unexpected " + str(events) + " response from Okta Server {0}.".format(events_url)) return [] ret_list = [] for e in events.json(): if e == 'errorCode': break data = json.loads(json.dumps(e)) data['category'] = 'events' e = json.dumps(data) ret_list.append(e) old_link = "" while events.links['next'][ 'url'] != old_link and 'next' in events.links: old_link = events.links['next']['url'] try: self.ds.log( 'INFO', 'Sending requests {0}'.format(events.links['next']['url'])) events = requests.get(events.links['next']['url'], headers=headers) except Exception as e: self.ds.log('ERROR', "Exception {0}".format(str(e))) return [] if not events or events.status_code != 200: self.ds.log( 'WARNING', "Received unexpected " + str(events) + " response from Okta Server {0}.".format(alerts_url)) return [] for e in events.json(): if e == 'errorCode': break e['category'] = 'events' ret_list.append(e) time.sleep(1) return ret_list def getLogs(self): logs_url = 'https://' + self.api_uri + '/api/v1/logs' headers = {'Authorization': 'SSWS ' + self.api_token} params = {'since': self.mystate.isoformat('T', 'seconds')[:-3] + 'Z'} #params = {'since': self.mystate.isoformat()[:-3] + 'Z' } try: self.ds.log('INFO', 'Sending requests {0}'.format(logs_url)) events = requests.get(logs_url, headers=headers, params=params) except Exception as e: self.ds.log('ERROR', "Exception {0}".format(str(e))) return [] if not events or events.status_code != 200: self.ds.log( 'WARNING', "Received unexpected " + str(events) + " response from Okta Server {0}.".format(logs_url)) return [] ret_list = [] for e in events.json(): if e == 'errorCode': break e['category'] = 'logs' e['timestamp'] = e['published'][:-5] + 'Z' ret_list.append(e) old_link = "" while events.links['next'][ 'url'] != old_link and 'next' in events.links: old_link = events.links['next']['url'] try: self.ds.log( 'INFO', 'Sending requests {0}'.format(events.links['next']['url'])) events = requests.get(events.links['next']['url'], headers=headers) except Exception as e: self.ds.log('ERROR', "Exception {0}".format(str(e))) return [] if not events or events.status_code != 200: self.ds.log( 'WARNING', "Received unexpected " + str(events) + " response from Okta Server {0}.".format( events.status_code)) return [] for e in events.json(): if e == 'errorCode': break e['category'] = 'logs' e['timestamp'] = e['published'][:-5] + 'Z' ret_list.append(e) return ret_list def run(self): self.ds.log('INFO', 'Getting Okta Logs') log_list = self.getLogs() self.ds.log('INFO', 'Getting Okta Events') #event_list = self.getEvents() event_list = [] #self.ds.writeCEFEvent() for event in event_list: self.ds.writeJSONEvent( event, JSON_field_mappings=self.JSON_field_mappings) for event in log_list: self.ds.writeJSONEvent( event, JSON_field_mappings=self.JSON_field_mappings) self.ds.set_state(self.state_dir, self.newstate) def usage(self): print print(os.path.basename(__file__)) print print(' No Options: Run a normal cycle') print print( ' -t Testing mode. Do all the work but do not send events to GRID via ' ) print( ' syslog Local7. Instead write the events to file \'output.TIMESTAMP\'' ) print(' in the current directory') print print(' -l Log to stdout instead of syslog Local6') print def __init__(self, argv): self.testing = False self.send_syslog = True self.ds = None try: opts, args = getopt.getopt(argv, "htnld:", ["datedir="]) except getopt.GetoptError: self.usage() sys.exit(2) for opt, arg in opts: if opt == '-h': self.usage() sys.exit() elif opt in ("-t"): self.testing = True elif opt in ("-l"): self.send_syslog = False try: self.ds = DefenseStorm('oktaEventLogs', testing=self.testing, send_syslog=self.send_syslog) except Exception as e: traceback.print_exc() try: self.ds.log('ERROR', 'ERROR: ' + str(e)) except: pass self.api_token = self.ds.config_get('okta', 'api_token') self.api_uri = self.ds.config_get('okta', 'api_uri') self.state_dir = os.path.join(self.ds.config_get('okta', 'APP_PATH'), 'state') self.mystate = self.ds.get_state(self.state_dir) self.newstate = datetime.utcnow() if self.mystate == None: self.mystate = self.newstate - timedelta(0, 3600)
class integration(object): EVENTS_V1 = '/siem/v1/events' ALERTS_V1 = '/siem/v1/alerts' ENDPOINT_MAP = { 'event': [EVENTS_V1], 'alert': [ALERTS_V1], 'all': [EVENTS_V1, ALERTS_V1] } JSON_field_mappings = { 'source': 'username', 'source_info_ip': 'ip_src', 'created_at': 'timestamp', 'name': 'message', 'endpoint_type': 'host_type' } def sophos_main(self): tuple_endpoint = self.ENDPOINT_MAP['all'] self.state_dir = os.path.join(self.ds.config_get('sophos', 'app_path'), 'state') handler = urlrequest.HTTPSHandler() opener = urlrequest.build_opener(handler) endpoint_config = { 'format': 'json', 'filename': 'stdout', 'state_dir': self.state_dir, 'since': False } for endpoint in tuple_endpoint: self.process_endpoint(endpoint, opener, endpoint_config) def process_endpoint(self, endpoint, opener, endpoint_config): state_file_name = "siem_lastrun_" + endpoint.rsplit('/', 1)[-1] + ".obj" state_file_path = os.path.join(endpoint_config['state_dir'], state_file_name) self.ds.log( 'DEBUG', "Config endpoint=%s, filename='%s' and format='%s'" % (endpoint, endpoint_config['filename'], endpoint_config['format'])) self.ds.log( 'DEBUG', "Config state_file='%s' and cwd='%s'" % (state_file_path, os.getcwd())) cursor = False since = False cursor = self.ds.get_state(self.state_dir) if cursor == None: since = int( calendar.timegm(((datetime.datetime.utcnow() - datetime.timedelta(hours=12)).timetuple()))) self.ds.log( 'INFO', "No datetime found, defaulting to last 12 hours for results") if since is not False: self.ds.log( 'DEBUG', '%s - Retrieving results since: %s' % (endpoint, since)) else: self.ds.log( 'DEBUG', '%s - Retrieving results starting cursor: %s' % (endpoint, cursor)) event_list = self.call_endpoint(opener, endpoint, since, cursor, state_file_path) for line in event_list: self.ds.writeJSONEvent( line, JSON_field_mappings=self.JSON_field_mappings) def call_endpoint(self, opener, endpoint, since, cursor, state_file_path): default_headers = { 'Content-Type': 'application/json; charset=utf-8', 'Accept': 'application/json', 'X-Locale': 'en', 'Authorization': self.ds.config_get('sophos', 'authorization'), 'x-api-key': self.ds.config_get('sophos', 'api-key') } params = {'limit': 1000} if not cursor: params['from_date'] = since else: params['cursor'] = cursor self.jitter() event_list = [] while True: args = '&'.join(['%s=%s' % (k, v) for k, v in params.items()]) events_request_url = '%s%s?%s' % (self.ds.config_get( 'sophos', 'url'), endpoint, args) self.ds.log('DEBUG', "URL: %s" % events_request_url) events_request = urlrequest.Request(events_request_url, None, default_headers) for k, v in default_headers.items(): events_request.add_header(k, v) events_response = self.request_url(opener, events_request) self.ds.log('DEBUG', "RESPONSE: %s" % events_response) if events_response != None: events = json.loads(events_response) else: return [] # events looks like this # { # u'chart_detail': {u'2014-10-01T00:00:00.000Z': 3638}, # u'event_counts': {u'Event::Endpoint::Compliant': 679, # u'events': {} # } event_list += events['items'] for e in events['items']: event_list.append(e) self.ds.set_state(self.state_dir, events['next_cursor']) if not events['has_more']: break else: params['cursor'] = events['next_cursor'] params.pop('from_date', None) return event_list def jitter(self): time.sleep(randint(0, 10)) def request_url(self, opener, request): for i in [1, 2, 3]: # Some ops we simply retry try: response = opener.open(request) except urlerror.HTTPError as e: if e.code in (503, 504, 403, 429): self.ds.log( 'Error "%s" (code %s) on attempt #%s of 3, retrying' % (e, e.code, i)) if i < 3: continue else: return None else: self.ds.log( 'Error during request. Error code: %s, Error message: %s' % (e.code, e.read())) raise return response.read() def remove_null_values(self, data): return {k: v for k, v in data.items() if v is not None} def run(self): self.sophos_main() def usage(self): print print(os.path.basename(__file__)) print print(' No Options: Run a normal cycle') print print( ' -t Testing mode. Do all the work but do not send events to GRID via ' ) print( ' syslog Local7. Instead write the events to file \'output.TIMESTAMP\'' ) print(' in the current directory') print print(' -l Log to stdout instead of syslog Local6') print def __init__(self, argv): self.testing = False self.send_syslog = True self.ds = None try: opts, args = getopt.getopt(argv, "htnld:", ["datedir="]) except getopt.GetoptError: self.usage() sys.exit(2) for opt, arg in opts: if opt == '-h': self.usage() sys.exit() elif opt in ("-t"): self.testing = True elif opt in ("-l"): self.send_syslog = False try: self.ds = DefenseStorm('sophosEventLogs', testing=self.testing, send_syslog=self.send_syslog) except Exception as e: traceback.print_exc() try: self.ds.log('ERROR', 'ERROR: ' + str(e)) except: pass
class integration(object): JSON_field_mappings = { 'systemAccount': 'client_user_name', 'taskDisplayName': 'message', 'ipAddress': 'client_ip', 'userAgent': 'user_agent', 'activityAction': 'action', 'requestTime': 'timestamp', 'deviceType': 'client_device_type', 'target_descriptor': 'object_name', 'target_id': 'object_id', } def get_auditLogsRequest(self, ssl_verify=True, proxies=None, siteId=None, groupId=None, offset=0): audit_logs = [] url = self.rest_url + '/auditLogs' from_time = self.last_run + 'Z' to_time = self.current_run + 'Z' headers = {'Authorization': 'Bearer ' + self.access_token} data = { 'type': 'userActivity', 'to': to_time, 'from': from_time, 'limit': self.limit, 'offset': offset } self.ds.log( 'INFO', "Attempting to connect to url: " + url + ' ,data: ' + str(data)) try: response = requests.get(url, headers=headers, timeout=15, params=data, verify=ssl_verify, proxies=proxies) except Exception as e: self.ds.log('ERROR', "Exception {0}".format(str(e))) return False if not response or response.status_code != 200: self.ds.log( 'WARNING', "Received unexpected " + str(response) + " response from Workday Server {0}.".format(url)) self.ds.log('WARNING', "Response.text: " + str(response.text)) return None json_response = response.json() return json_response def get_auditLogs(self): offset = 0 audit_logs = [] json_response = self.get_auditLogsRequest(offset=offset) if json_response == None: return total = json_response['total'] audit_logs += json_response['data'] while offset < total: offset += self.limit json_response = self.get_auditLogsRequest(offset=offset) audit_logs += json_response['data'] self.ds.log( 'INFO', "Received for auditLogs record count: %s of total %s" % (str(len(audit_logs)), total)) return audit_logs def get_tokens(self, ssl_verify=True, proxies=None): auth_string = self.client_id + ':' + self.client_secret headers = { 'Authorization': 'Basic ' + base64.b64encode(auth_string.encode()).decode('ascii') } data = { 'grant_type': 'refresh_token', 'refresh_token': self.refresh_token, } #self.ds.log('INFO', "Attempting to connect to url: " + self.token_url + ' ,headers: ' + str(headers) + ' ,data: ' + str(data) + ' ,client_id: ' + self.client_id + ' ,secret: ' + self.client_secret) self.ds.log( 'INFO', "Attempting to connect to url: " + self.token_url + ' ,headers: ' + str('not included') + ' ,data: ' + str('not included') + ' ,client_id: ' + self.client_id + ' ,secret: ' + self.client_secret) try: response = requests.post(self.token_url, headers=headers, timeout=15, data=data, verify=ssl_verify, proxies=proxies) except Exception as e: self.ds.log('ERROR', "Exception {0}".format(str(e))) return False if not response or response.status_code != 200: self.ds.log( 'WARNING', "Received unexpected " + str(response) + " response from Workday Server {0}.".format(self.token_url)) self.ds.log('WARNING', "Response.text: " + str(response.text)) return False json_response = response.json() self.access_token = json_response['access_token'] return True def workday_main(self): self.rest_url = self.ds.config_get('workday', 'rest_url') self.token_url = self.ds.config_get('workday', 'token_url') self.state_dir = self.ds.config_get('workday', 'state_dir') self.refresh_token = self.ds.config_get('workday', 'refresh_token') self.client_id = self.ds.config_get('workday', 'client_id') self.client_secret = self.ds.config_get('workday', 'client_secret') if not self.get_tokens(): return self.last_run = self.ds.get_state(self.state_dir) self.time_format = "%Y-%m-%dT%H:%M:%S" self.limit = 100 current_time = datetime.utcnow() self.current_run = current_time.strftime(self.time_format) if self.last_run == None: last_run = current_time - timedelta(hours=8) self.last_run = last_run.strftime(self.time_format) audit_logs = self.get_auditLogs() if audit_logs == None: self.ds.log('Error', "Somethign went wrong. Check logs above") return for audit_log in audit_logs: self.ds.writeJSONEvent( audit_log, JSON_field_mappings=self.JSON_field_mappings) self.ds.set_state(self.state_dir, self.current_run) self.ds.log('INFO', "Done Sending Notifications") def run(self): try: pid_file = self.ds.config_get('workday', 'pid_file') fp = open(pid_file, 'w') try: fcntl.lockf(fp, fcntl.LOCK_EX | fcntl.LOCK_NB) except IOError: self.ds.log( 'ERROR', "An instance of cb defense syslog connector is already running" ) # another instance is running sys.exit(0) self.workday_main() except Exception as e: traceback.print_exc() self.ds.log('ERROR', "Exception {0}".format(str(e))) return def usage(self): print print(os.path.basename(__file__)) print print(' No Options: Run a normal cycle') print print( ' -t Testing mode. Do all the work but do not send events to GRID via ' ) print( ' syslog Local7. Instead write the events to file \'output.TIMESTAMP\'' ) print(' in the current directory') print print(' -l Log to stdout instead of syslog Local6') print def __init__(self, argv): self.testing = False self.send_syslog = True self.ds = None try: opts, args = getopt.getopt(argv, "htnld:", ["datedir="]) except getopt.GetoptError: self.usage() sys.exit(2) for opt, arg in opts: if opt == '-h': self.usage() sys.exit() elif opt in ("-t"): self.testing = True elif opt in ("-l"): self.send_syslog = False try: self.ds = DefenseStorm('workdayEventLogs', testing=self.testing, send_syslog=self.send_syslog) except Exception as e: traceback.print_exc() try: self.ds.log('ERROR', 'ERROR: ' + str(e)) except: pass
class integration(object): # Field mappings are local fields = GRID fields JSON_field_mappings = { 'CLIENT_IP' : 'ip_src', 'USER' : 'username', 'EVENT_TYPE' : 'category', #'TIMESTAMP_DERIVED' : 'timestamp', 'FILE_TYPE' : 'file_type', 'SOURCE_IP' : 'src_ip', 'BROWSER_TYPE' : 'http_user_agent', 'URI' : 'http_path', 'LOGIN_STATUS' : 'status', 'DELEGATED_USER_NAME' : 'src_username' } def getSalesForceLookupList(self, ObjectName, ElementName): entries = {} query = 'SELECT Id,%s From %s' %(ElementName, ObjectName) sf_data = self.sf.query_all(query) self.ds.log('INFO', "Lookup %s totalSize: %s" %(ObjectName, sf_data['totalSize'])) for item in sf_data['records']: entries[item['Id']] = item[ElementName] self.ds.log('INFO', "%s Entries: %s" %(ObjectName, len(entries.keys()))) return entries def getEventLogs(self, dir): ''' Query salesforce service using REST API ''' # query Ids from Event Log File if self.interval == 'hourly': state = self.ds.get_state(self.state_dir) if state != None: query = 'SELECT Id, EventType, Interval, LogDate, LogFile, Sequence From EventLogFile Where Interval = \'hourly\' and LogDate > %s Order By LogDate ASC' %state else: query = 'SELECT Id, EventType, Interval, LogDate, LogFile, Sequence From EventLogFile Where LogDate >= YESTERDAY and Interval = \'hourly\' Order By LogDate ASC' elif self.interval == 'daily': query = 'SELECT Id, EventType, Logdate, Interval From EventLogFile Where LogDate = Last_n_Days:2' else: self.ds.log('ERROR', "Bad entry for 'interval' in conf file") sys.exit() res_dict = self.sf.query_all(query) # capture record result size to loop over total_size = res_dict['totalSize'] last_time = None for item in res_dict['records']: last_time = item['LogDate'] # provide feedback if no records are returned if total_size < 1: self.ds.log('INFO', "No EventLogFiles were returned") sys.exit() # If directory doesn't exist, create one try: os.makedirs(dir) except: self.ds.log('ERROR', "Directory (%s) already exists...cleaning up to try to recover" %dir) try: shutil.rmtree(self.dir) os.makedirs(dir) except: sys.exit() # loop over elements in result and download each file locally for i in range(total_size): # pull attributes out of JSON for file naming ids = res_dict['records'][i]['Id'] types = res_dict['records'][i]['EventType'] dates = res_dict['records'][i]['LogDate'] self.new_state = dates # create REST API request url = self.ds.config_get('salesforce', 'instance_url') + '/services/data/v33.0/sobjects/EventLogFile/'+ids+'/LogFile' headers = {'Authorization' : 'Bearer ' + self.sf.session_id, 'X-PrettyPrint' : '1', 'Accept-encoding' : 'gzip'} # begin profiling start = time.time() # open connection req = request.Request(url, headers=headers) res = request.urlopen(req) # provide feedback to user self.ds.log('DEBUG', 'Downloading: ' + dates + '-' + types + '.csv to ' + os.getcwd() + '/' + dir) # if the response is gzip-encoded as expected # compression code from http://bit.ly/pyCompression if res.info().get('Content-Encoding') == 'gzip': # buffer results buf = io.BytesIO(res.read()) # gzip decode the response f = gzip.GzipFile(fileobj=buf) data = f.read() # close buffer buf.close() else: # buffer results buf = io.BytesIO(res.read()) # get the value from the buffer data = buf.getvalue() buf.close() # write buffer to CSV with following naming convention yyyy-mm-dd-eventtype.csv file = open(dir + '/' +dates+'-'+types+'.csv', 'w', encoding='utf-8') file.write(data.decode("utf-8", 'ignore')) # end profiling end = time.time() secs = end - start self.ds.log('INFO', 'File: ' + dates + '-' + types + '.csv to ' + os.getcwd() + '/' + dir + ' elapsed time: ' + str('%0.2f' %secs) + ' seconds') file.close i = i + 1 # close connection res.close def handleFiles(self, datadir, filelist): for item in filelist: start = time.time() file=item['filename'] self.ds.log('INFO', 'Starting sending file: %s' %item['filename']) with open(datadir+'/'+file) as f: header = f.readline() header = header.replace('\"','') header = header.replace('\n','') elementList = header.split(",") f.seek(0) for line in csv.DictReader(f): try: if 'USER_ID_DERIVED' in line.keys(): line['USER'] = self.UserList[line['USER_ID_DERIVED']] except KeyError: pass try: if 'ORGANIZATION_ID' in line.keys(): line['ORGANIZATION'] = self.OrganizationList[line['ORGANIZATION_ID']] except KeyError: pass try: if 'REPORT_ID' in line.keys(): line['REPORT'] = self.ReportList[line['REPORT_ID']] except KeyError: pass try: if 'DASHBOARD_ID' in line.keys(): line['DASHBOARD'] = self.DashboardList[line['DASHBOARD_ID']] except KeyError: pass try: if 'DOCUMENT_ID' in line.keys(): line['DOCUMENT'] = self.DocumentList[line['DOCUMENT_ID']] except KeyError: pass #try: #if 'ENTITY_ID' in line.keys(): #line['ATTACHMENT'] = self.AttachmentList[line['ENTITY_ID']] #except KeyError: #pass try: if 'DASHBOARD_COMPONENT_ID' in line.keys(): line['DASHBOARD_COMPONENT'] = self.DashboardComponentList[line['DASHBOARD_COMPONENT_ID']] except KeyError: pass try: if 'SITE_ID' in line.keys(): line['SITE'] = self.SiteList[line['SITE_ID']] except KeyError: pass line['message'] = line['EVENT_TYPE'] self.ds.writeJSONEvent(line, JSON_field_mappings = self.JSON_field_mappings, flatten=False) end = time.time() secs = end - start self.ds.log('INFO', 'Completed events from file: %s elapsed time: %s' %(item['filename'], str('%0.2f' %secs))) def dirFile(self, datadir): filelist = [] if not os.path.isdir(datadir): self.ds.log('INFO', 'Data download directory (%s) does not exist' %datadir) return filelist dirlist = [ f for f in os.listdir(datadir) if os.path.isfile(os.path.join(datadir,f)) ] for filename in dirlist: types=filename[11:-4] mydict = { 'type': types, 'filename': filename } filelist.append(mydict) return filelist def getLookupTables(self): self.OrganizationList = self.getSalesForceLookupList('Organization', 'Name') self.ReportList = self.getSalesForceLookupList('Report', 'Name') self.DashboardList = self.getSalesForceLookupList('Dashboard', 'Title') self.DocumentList = self.getSalesForceLookupList('Document', 'Name') #self.AttachmentList = self.getSalesForceLookupList('Attachment', 'Name') self.DashboardComponentList = self.getSalesForceLookupList('DashboardComponent', 'Name') self.SiteList = self.getSalesForceLookupList('Site', 'Name') self.UserList = self.getSalesForceLookupList('User', 'Email') def run(self): try: # SalesForce Credentials self.username = self.ds.config_get('salesforce', 'username') self.password = self.ds.config_get('salesforce', 'password') self.security_token = self.ds.config_get('salesforce', 'security_token') self.instance_url = self.ds.config_get('salesforce', 'instance_url') # CEF Info # Other options self.interval = self.ds.config_get('salesforce', 'interval') self.state_dir = self.ds.config_get('salesforce', 'state_dir') except getopt.GetoptError: self.ds.log('CRITICAL', 'Error reading config values') self.ds.log('CRITICAL', traceback.print_exc()) sys.exit(2) try: self.sf = Salesforce(instance_url=self.instance_url, username=self.username, password=self.password, security_token=self.security_token) except Exception as e: tb = traceback.format_exc() tb = tb.replace('\n', "") self.ds.log('CRITICAL', 'Error Logging into SalesFoce') self.ds.log('CRITICAL', '%s' %tb) sys.exit(2) try: if self.dir == None: self.ds.log('INFO', 'Processing events from Salesforce') if self.interval == 'hourly': self.dir = date.today().strftime("%Y-%m-%d") else: self.dir = (date.today() - timedelta(1)).strftime("%Y-%m-%d") self.getEventLogs(self.dir) else: self.ds.log('INFO', 'Processing events from directory: ' + self.dir) self.filelist = self.dirFile(self.dir) if len(self.filelist) > 0: self.getLookupTables() self.handleFiles(self.dir, self.filelist) if self.cleanup: shutil.rmtree(self.dir) else: self.ds.log('INFO', 'No log files to process. Error downloading or no work to do based on state file') if self.cleanup: self.ds.set_state(self.state_dir, self.new_state) except: self.ds.log('CRITICAL', 'Error handling salesforce events') self.ds.log('CRITICAL', traceback.print_exc()) if self.cleanup: self.ds.log('CRITICAL', 'Attempting Cleanup') try: shutil.rmtree(self.dir) except: pass def usage(self): print('') print(os.path.basename(__file__)) print print(' No Options: Download yesterdays files from SF and process') print print(' -t Testing mode. Do all the work but do not send events to GRID via ') print(' syslog Local7. Instead write the events to file \'output.TIMESTAMP\'') print(' in the current directory') print print(' -d <directory>') print(' Rerun with a set of CSV files on disk in the specificed directory') print(' NOTE: This will not delete the directory after successful run') print print(' -n Do not cleanup the download directory after run') print print(' -l Log to stdout instead of syslog Local6') print def __init__(self, argv): self.testing = False self.cleanup = True self.send_syslog = True self.ds = None self.dir = None self.new_state = None try: opts, args = getopt.getopt(argv,"htnld:",["datedir="]) except getopt.GetoptError: self.usage() sys.exit(2) for opt, arg in opts: if opt == '-h': self.usage() sys.exit() elif opt in ("-d"): self.dir = arg self.cleanup = False elif opt in ("-t"): self.testing = True elif opt in ("-n"): self.cleanup = False elif opt in ("-l"): self.send_syslog = False try: self.ds = DefenseStorm('salesforceEventLogs', testing=self.testing, send_syslog = self.send_syslog) except Exception as e: traceback.print_exc() try: tb = traceback.format_exc() tb = tb.replace('\n', "") self.ds.log('CRITICAL', 'Error Logging into SalesFoce') self.ds.log('ERROR', 'ERROR: %s' %tb) except: pass
class integration(object): def get_site_id(self): params = { "name": self.ds.config_get('sentinelone', 'site') } r = requests.get(self.SRC_hostname+API_SITES, headers=self.SRC_headers, params=params) if r.status_code != 200: #print ("Error: ", r.json()) self.ds.log("ERROR", r.json()) sys.exit() return r.json()['data']['sites'][0]['id'] def get_staticIndicators(self): #print('Loading Static Indicators...') self.ds.log('INFO', 'Loading Static Indicators...') r = requests.get(self.SRC_hostname+API_STATIC_INDICATORS, headers=self.SRC_headers) if r.status_code != 200: #print ("Error: ", r.json()) self.ds.log("ERROR", r.json()) sys.exit() raw = r.json()['data']['indicators'] si = {} for tmp in raw: cID = tmp['categoryId'] cName = tmp['categoryName'] descripClean = re.sub("<.*?>", " ", tmp['description']) id = int(tmp['id']) si[id] = {'catid': cID, 'catname': cName, 'desc': descripClean} return si def get_datalist(self,site_id, lastrun, currentrun): datalist = [] cursor = '' while (cursor != None): params = { "siteIds": site_id, "limit": 100, "cursor": cursor, "createdAt__gte": lastrun, "createdAt__lt": currentrun, } r = requests.get(self.SRC_hostname+API_THREATS, headers=self.SRC_headers, params=params) if r.status_code != 200: #print ("Error: ", r.json()) self.ds.log("ERROR", r.json()) sys.exit("Error while getting datalist, exiting..") cursor = r.json()['pagination']['nextCursor'] datalist.extend(r.json()['data']) return datalist def parseResponse(self, tmp): entry={} if tmp['mitigationReport']['network_quarantine']['status'] is None: entry['mynetwork_quarantine'] = "None" else: entry['mynetwork_quarantine'] = tmp['mitigationReport']['network_quarantine']['status'] if tmp['mitigationReport']['kill']['status'] is None: entry['mitigation_kill'] = "None" else: entry['mitigation_kill'] = tmp['mitigationReport']['kill']['status'] if tmp['mitigationReport']['quarantine']['status'] is None: entry['mitigation_quar'] = "None" else: entry['mitigation_quar'] = tmp['mitigationReport']['quarantine']['status'] if tmp['mitigationReport']['remediate']['status'] is None: entry['mitigation_rem'] = "None" else: entry['mitigation_rem'] = tmp['mitigationReport']['remediate']['status'] if tmp['mitigationReport']['rollback']['status'] is None: entry['mitigation_roll'] = "None" else: entry['mitigation_roll'] = tmp['mitigationReport']['rollback']['status'] # translate indicators (id => description) entry['threatIndicators'] = [] IndicatorIDs = tmp['indicators'] if len(IndicatorIDs) > 0: for i in IndicatorIDs: entry['threatIndicators'].append(self.staticIndicators[i]['desc']) else: entry['threatIndicators'] = 'NI' entry['agentComputerName'] = tmp['agentComputerName'] entry['agentDomain'] = tmp['agentDomain'] entry['agentId'] = tmp['agentId'] entry['agentInfected'] = tmp['agentInfected'] entry['agentIp'] = tmp['agentIp'] entry['agentIsActive'] = tmp['agentIsActive'] entry['agentIsDecommissioned'] = tmp['agentIsDecommissioned'] entry['agentMachineType'] = tmp['agentMachineType'] entry['agentNetworkStatus'] = tmp['agentNetworkStatus'] entry['agentOsType'] = tmp['agentOsType'] entry['agentVersion'] = tmp['agentVersion'] entry['annotation'] = tmp['annotation'] entry['annotationUrl'] = tmp['annotationUrl'] entry['browserType'] = tmp['browserType'] entry['certId'] = tmp['certId'].encode("utf-8") entry['classification'] = tmp['classification'] entry['classificationSource'] = tmp['classificationSource'] entry['classifierName'] = tmp['classifierName'] entry['cloudVerdict'] = tmp['cloudVerdict'] entry['collectionId'] = tmp['collectionId'] entry['createdAt'] = tmp['createdAt'] entry['createdDate'] = tmp['createdDate'] entry['resolved'] = tmp['resolved'] entry['description'] = tmp['description'] entry['engines'] = tmp['engines'] entry['fileCreatedDate'] = tmp['fileCreatedDate'] entry['fileDisplayName'] = tmp['fileDisplayName'] entry['fileExtensionType'] = tmp['fileExtensionType'] entry['fileIsDotNet'] = tmp['fileIsDotNet'] entry['fileIsExecutable'] = tmp['fileIsExecutable'] entry['fileIsSystem'] = tmp['fileIsSystem'] entry['fileMaliciousContent'] = tmp['fileMaliciousContent'] entry['fileObjectId'] = tmp['fileObjectId'] entry['filePath'] = tmp['filePath'] entry['fileSha256'] = tmp['fileSha256'] entry['fileVerificationType'] = tmp['fileVerificationType'] entry['fromCloud'] = tmp['fromCloud'] entry['fromScan'] = tmp['fromScan'] entry['id'] = tmp['id'] entry['isCertValid'] = tmp['isCertValid'] entry['isInteractiveSession'] = tmp['isInteractiveSession'] entry['isPartialStory'] = tmp['isPartialStory'] entry['maliciousGroupId'] = tmp['maliciousGroupId'] entry['maliciousProcessArguments'] = tmp['maliciousProcessArguments'] entry['markedAsBenign'] = tmp['markedAsBenign'] entry['mitigationMode'] = tmp['mitigationMode'] entry['accountId'] = tmp['accountId'] entry['accountName'] = tmp['accountName'] entry['commandId'] = tmp['commandId'] entry['fileContentHash'] = tmp['fileContentHash'] entry['initiatedBy'] = tmp['initiatedBy'] entry['initiatedByDescription'] = tmp['initiatedByDescription'] entry['mitigationMode'] = tmp['mitigationMode'] entry['mitigationStatus'] = tmp['mitigationStatus'] entry['publisher'] = tmp['publisher'].encode("utf-8") entry['rank'] = tmp['rank'] entry['siteId'] = tmp['siteId'] entry['siteName'] = tmp['siteName'] entry['threatAgentVersion'] = tmp['threatAgentVersion'] entry['threatName'] = tmp['threatName'] entry['updatedAt'] = tmp['updatedAt'] entry['username'] = tmp['username'] entry['whiteningOptions'] = tmp['whiteningOptions'] # Build the message entry: entry['message'] = entry['threatName'] + ': Kill:' + entry['mitigation_kill'] + ', Quarantine:' + entry['mitigation_quar'] + ', Host:' + entry['agentComputerName'] # Build the compatible timestamp entry_time = datetime.datetime.strptime(entry['createdAt'], '%Y-%m-%dT%H:%M:%S.%fZ') entry['timestamp'] = entry_time.strftime("%Y-%m-%dT%H:%M:%SZ") return entry def run(self): self.state_dir = self.ds.config_get('sentinelone', 'state_dir') last_run = self.ds.get_state(self.state_dir) if last_run == None: self.ds.log("INFO", "No datetime found, defaulting to last 12 hours for results") last_run = datetime.datetime.utcnow() - datetime.timedelta(hours=24) current_run = datetime.datetime.utcnow() last_run_str = last_run.strftime("%Y-%m-%dT%H:%M:%SZ") current_run_str = current_run.strftime("%Y-%m-%dT%H:%M:%SZ") self.site_id = self.get_site_id() #print ("Getting threats..") self.ds.log("INFO", "Getting threats from: " + last_run_str + " to " + current_run_str) #print ("From Site: "+self.ds.config_get('sentinelone', 'site')+" [ID: "+ self.site_id +"]") self.ds.log("INFO", "From Site: "+self.ds.config_get('sentinelone', 'site')+" [ID: "+ self.site_id +"]") threatdata = self.get_datalist(self.site_id, last_run_str, current_run_str) self.staticIndicators = self.get_staticIndicators() for item in threatdata: for item in threatdata: self.ds.writeJSONEvent(self.parseResponse(item)) self.ds.set_state(self.state_dir, current_run) def usage(self): print print os.path.basename(__file__) print print ' No Options: Run a normal cycle' print print ' -t Testing mode. Do all the work but do not send events to GRID via ' print ' syslog Local7. Instead write the events to file \'output.TIMESTAMP\'' print ' in the current directory' print print ' -l Log to stdout instead of syslog Local6' print def __init__(self, argv): self.testing = False self.send_syslog = True self.ds = None self.site_id = None self.staticIndicators = None self.SRC_headers = None try: opts, args = getopt.getopt(argv,"htnld:",["datedir="]) except getopt.GetoptError: self.usage() sys.exit(2) for opt, arg in opts: if opt == '-h': self.usage() sys.exit() elif opt in ("-t"): self.testing = True elif opt in ("-l"): self.send_syslog = False try: self.ds = DefenseStorm('sentineloneEventLogs', testing=self.testing, send_syslog = self.send_syslog) except Exception as e: traceback.print_exc() try: self.ds.log('ERROR', 'ERROR: ' + str(e)) except: pass try: self.SRC_headers = { "Content-type": "application/json", "Authorization": "APIToken " + self.ds.config_get('sentinelone', 'token') } self.SRC_hostname = 'https://'+self.ds.config_get('sentinelone', 'console')+".sentinelone.net/" except Exception as e: traceback.print_exc() try: self.ds.log('ERROR', 'ERROR: ' + str(e)) except: pass
class integration(object): def get_token(self): params = { 'email': self.ds.config_get('sonicwall', 'email'), 'password': self.ds.config_get('sonicwall', 'password') } response = requests.post(self.url + "/login", data=params) if response.status_code == 200: results = response.json() return results['token'] else: self.ds.log("ERROR", "Failed to Login" + response.status_code) return None def get_clientLogsEvents(self, since=None): if since == None: self.ds.log("ERROR", "No 'since' time specificed for clientLogsEvents") return None params = {'since': since, 'sizePerPage': '1'} response = requests.get(self.url + "/clientLogsEvents/logs", headers=self.headers, params=params) if response.status_code != 200: self.ds.log( "ERROR", "Failed to get managementConsoleLogs" + response.status_code) return None results = response.json() self.ds.log( "INFO", "clientLogsEvents retrieving " + str(results['count']) + " events") params = {'since': since, 'sizePerPage': results['count']} response = requests.get(self.url + "/clientLogsEvents/logs", headers=self.headers, params=params) if response.status_code != 200: self.ds.log( "ERROR", "Failed to get managementConsoleLogs" + response.status_code) return None results = response.json() return results['data'] def get_managementConsoleLogs(self, since=None): if since == None: self.ds.log( "ERROR", "No 'since' time specificed for managementConsoleLogs") return None params = {'since': since, 'sizePerPage': '1'} response = requests.get(self.url + "/logs", headers=self.headers, params=params) if response.status_code != 200: self.ds.log( "ERROR", "Failed to get managementConsoleLogs" + response.status_code) return None results = response.json() self.ds.log( "INFO", "managementConsoleLogs retrieving " + str(results['count']) + " events") params = {'since': since, 'sizePerPage': results['count']} response = requests.get(self.url + "/logs", headers=self.headers, params=params) if response.status_code != 200: self.ds.log( "ERROR", "Failed to get managementConsoleLogs" + response.status_code) return None results = response.json() return results['data'] def run(self): self.url = self.ds.config_get('sonicwall', 'url') token = self.get_token() self.headers = {'Authorization': token} if self.headers == None: self.ds.log("ERROR", "Failed to get Token") return None self.state_dir = self.ds.config_get('sonicwall', 'state_dir') last_run = self.ds.get_state(self.state_dir) if last_run == None: self.ds.log( "INFO", "No datetime found, defaulting to last 12 hours for results") last_run = datetime.datetime.utcnow() - datetime.timedelta( hours=24) current_run = datetime.datetime.utcnow() last_run_str = last_run.strftime("%Y-%m-%dT%H:%M:%SZ") current_run_str = current_run.strftime("%Y-%m-%dT%H:%M:%SZ") results = self.get_clientLogsEvents(since=last_run_str) for item in results: item['timestamp'] = item['messageTime'] self.ds.writeJSONEvent(item) results = self.get_managementConsoleLogs(since=last_run_str) for item in results: item['timestamp'] = item['messageTime'] self.ds.writeJSONEvent(item) self.ds.set_state(self.state_dir, current_run) def usage(self): print print(os.path.basename(__file__)) print print(' No Options: Run a normal cycle') print print( ' -t Testing mode. Do all the work but do not send events to GRID via ' ) print( ' syslog Local7. Instead write the events to file \'output.TIMESTAMP\'' ) print(' in the current directory') print print(' -l Log to stdout instead of syslog Local6') print def __init__(self, argv): self.testing = False self.send_syslog = True self.ds = None try: opts, args = getopt.getopt(argv, "htnld:", ["datedir="]) except getopt.GetoptError: self.usage() sys.exit(2) for opt, arg in opts: if opt == '-h': self.usage() sys.exit() elif opt in ("-t"): self.testing = True elif opt in ("-l"): self.send_syslog = False try: self.ds = DefenseStorm('sonicwallclientcaptureEventLogs', testing=self.testing, send_syslog=self.send_syslog) except Exception as e: traceback.print_exc() try: self.ds.log('ERROR', 'ERROR: ' + str(e)) except: pass except Exception as e: traceback.print_exc() try: self.ds.log('ERROR', 'ERROR: ' + str(e)) except: pass