def get_event_ids_based_on_live_data(): #get all events from server all_events = di.get_events() print('INFO:', len(all_events), 'total visible events on server') filtered_events = [] for event in all_events:
def get_event_ids_based_on_live_data(): #get all events from server all_events = di.get_events() print('INFO:', len(all_events), 'total visible events on server') filtered_events = [] for event in all_events: # !!! TODO !!! - Modify code below to set search parameters(s) for events to modify # -------------------------------------------------------------------------- if event['status'] in ['OPEN']: if event['recorded_device_info']['hostname'] in ['HOSTNAME01']: if event['type'] == ['REFLECTIVE_DOTNET']: if event['path'] in [ 'C:\\Program Files (x86)\\Microsoft SQL Server\\100\\DTS\\Binn\\DTExec.exe' ]: filtered_events.append(event) if event['type'] == ['AMSI_BYPASS']: if event['path'] in [ 'C:\\Program Files (x86)\\Microsoft SQL Server\\100\\DTS\\Binn\\SQLPS.exe' ]: filtered_events.append(event) if event['type'] == ['STATIC_ANALYSIS']: if event['path'] in [ 'C:\\Users\\user\\Desktop\\PANDAFREEAV - Copy.exe' ]: filtered_events.append(event) # -------------------------------------------------------------------------- #write preview of events to be modified to disk filtered_events_df = pandas.DataFrame(filtered_events) folder_name = di.create_export_folder() file_name = f'modified_events_{datetime.datetime.now(datetime.timezone.utc).strftime("%Y-%m-%d_%H.%M")}_UTC.xlsx' filtered_events_df.to_excel(f'{folder_name}/{file_name}', index=False) print( 'INFO:', len(filtered_events), 'events found matching defined criteria and have been written to disk as', f'{folder_name}/{file_name}. Please review before proceeding to confirm expected results.' ) #strip out event ids from filtered_events event_id_list = [] for event in filtered_events: event_id_list.append(event['id']) return event_id_list
with open('event_forwarder_slack.conf', 'w') as f: f.write(str(event_id)) except OSError as e: now = datetime.datetime.now() print(now.strftime("%H:%M"), 'ERROR:', e) #---runtime--- while True: max_event_processed_previously = get_config() print('Getting new events with id greater than', max_event_processed_previously) try: new_events = di.get_events( minimum_event_id=max_event_processed_previously, search=search_parameters) except requests.exceptions.RequestException as e: now = datetime.datetime.now() print(now.strftime("%H:%M"), 'ERROR:', e) new_events = [] print(len(new_events), 'events were returned') if len(new_events) > 0: for event in new_events: sanitize_event(event) print('Sending event', event['id'], 'to Slack') try: send_event_to_slack(event)
#import API Wrapper (Python bindings) and additional libraries import deepinstinct30 as di import pandas, datetime from dateutil import parser #define server config di.key = 'BAR' di.fqdn = 'FOO.customers.deepinstinctweb.com' #get events from server search_parameters = {} search_parameters['type'] = ['STATIC_ANALYSIS'] search_parameters['action'] = ['PREVENTED'] search_parameters['last_action'] = ['QUARANTINE_SUCCESS'] events = di.get_events(search=search_parameters) #using events, calculate list of unique hashes hash_list = [] for event in events: if event['file_hash'] not in hash_list: hash_list.append(event['file_hash']) #get policies all_policies = di.get_policies() #filter policy list windows_policies = [] for policy in all_policies: if policy['os'] == 'WINDOWS': windows_policies.append(policy)
def run_deployment_phase_progression_readiness(fqdn, key, config): print('Beginning data collection') di.fqdn = fqdn di.key = key di.quiet_mode = True config = config mt = di.is_server_multitenancy_enabled() #collect policy data print('\nGetting policy data from server') policies = di.get_policies(include_policy_data=True) #collect event data search_parameters = get_event_search_parameters(config['deployment_phase']) suspicious_search_parameters = get_suspicious_event_search_parameters( config['deployment_phase']) print('\nGetting event data from server') events = di.get_events(search=search_parameters) print(len(events), 'events were returned.') if not config['ignore_suspicious_events']: if suspicious_search_parameters != {}: print('\nGetting suspicious event data from server') suspicious_events = di.get_suspicious_events( search=suspicious_search_parameters) print(len(suspicious_events), 'suspicious events were returned.') events = events + suspicious_events event_counts = di.count_data_by_field(events, 'device_id') #collect device data print('\nGetting device data from server') devices = di.get_devices(include_deactivated=False) print(len(devices), 'devices were found.') #determine if we have data from a single MSP or multiple policy_msp_ids = [] for policy in policies: if policy['msp_id'] not in policy_msp_ids: policy_msp_ids.append(policy['msp_id']) if len(policy_msp_ids) > 1: multiple_msps = True else: multiple_msps = False print('\nAnalyzing policy data') policy_evaluation_results = [] for policy in policies: policy['deployment_phase'] = classify_policy(policy, config) if policy['os'] == 'WINDOWS': if policy['deployment_phase'] > 0: result = f"Policy '{policy['name']}' (ID {policy['id']}) is a Phase {policy['deployment_phase']} policy." else: result = f"Policy '{policy['name']}' (ID {policy['id']}) is not aligned with any defined Deployment Phase." if mt and multiple_msps: result = f"MSP '{policy['msp_name']}' (ID {policy['msp_id']}) {result}" print(result) policy_evaluation_results.append(result) filtered_devices = [] for device in devices: for policy in policies: if policy['id'] == device['policy_id']: device['deployment_phase'] = policy['deployment_phase'] if device['deployment_phase'] == config['deployment_phase']: filtered_devices.append(device) excluded_device_count = len(devices) - len(filtered_devices) devices = filtered_devices print('') print(len(devices), 'devices are in a phase', config['deployment_phase'], 'policy.') if len(devices) == 0: print('ERROR: Aborting analysis due to zero devices to analyze.') sys.exit(0) devices_ready = [] devices_not_ready = [] for device in devices: if device['id'] not in event_counts.keys(): device['event_count'] = 0 else: device['event_count'] = event_counts[device['id']] device['last_contact_days_ago'] = ( datetime.datetime.now(datetime.timezone.utc) - parser.parse(device['last_contact'])).days device['days_since_install'] = ( datetime.datetime.now(datetime.timezone.utc) - parser.parse(device['last_registration'])).days device['progression_criteria_violations'] = [] if device['event_count'] > int(config['max_open_event_quantity']): device['progression_criteria_violations'].append( 'More than ' + str(config['max_open_event_quantity']) + ' open events') if device['last_contact_days_ago'] > int( config['max_days_since_last_contact']): device['progression_criteria_violations'].append( 'Offline for more than ' + str(config['max_days_since_last_contact']) + ' days') if device['days_since_install'] < int( config['min_days_since_install']): device['progression_criteria_violations'].append( 'Installed less than ' + str(config['min_days_since_install']) + ' days ago') if len(device['progression_criteria_violations']) > 0: device['ready_to_move_to_next_phase'] = False devices_not_ready.append(device) else: device['ready_to_move_to_next_phase'] = True devices_ready.append(device) print(len(devices_ready), 'devices are ready to move to the next phase.') print( len(devices_not_ready), 'devices are not ready based on violating one or more of the provided criteria.' ) print( excluded_device_count, 'devices in the system were not assessed due to not being in a phase', config['deployment_phase'], 'policy.') #convert data to be exported to dataframes devices_ready_df = pandas.DataFrame(devices_ready) devices_not_ready_df = pandas.DataFrame(devices_not_ready) config_df = pandas.DataFrame(config.items()) search_parameters_df = pandas.DataFrame(search_parameters.items()) suspicious_search_parameters_df = pandas.DataFrame( suspicious_search_parameters.items()) policy_evaluation_results_df = pandas.DataFrame(policy_evaluation_results) #prep for export folder_name = di.create_export_folder() from_deployment_phase = "{:g}".format(float(config['deployment_phase'])) if di.is_server_multitenancy_enabled() and not multiple_msps: server_shortname = re.sub(r'[^a-z0-9]', '', policies[0]['msp_name'].lower()) else: server_shortname = di.fqdn.split(".", 1)[0] file_name = f'deployment_phase_{from_deployment_phase}_progression_readiness_assessment_{datetime.datetime.now(datetime.timezone.utc).strftime("%Y-%m-%d_%H.%M")}_UTC_{server_shortname}.xlsx' #export dataframes to Excel format with pandas.ExcelWriter(f'{folder_name}/{file_name}') as writer: devices_ready_df.to_excel(writer, sheet_name='ready_for_next_phase', index=False) devices_not_ready_df.to_excel(writer, sheet_name='not_ready_for_next_phase', index=False) config_df.to_excel(writer, sheet_name='config', index=False) search_parameters_df.to_excel(writer, sheet_name='event_search', index=False) suspicious_search_parameters_df.to_excel( writer, sheet_name='suspicious_event_search', index=False) policy_evaluation_results_df.to_excel(writer, sheet_name='policy_evaluation', index=False) print('') print(f'Results were exported to disk as\n{folder_name}\\{file_name}\n')