def _dispatch(self, alert, descriptor): """Send ban hash command to CarbonBlack Publishing: There is currently no method to control carbonblack's behavior with publishers. Args: alert (Alert): Alert instance which triggered a rule descriptor (str): Output descriptor Returns: bool: True if alert was sent successfully, False otherwise """ if not alert.context: LOGGER.error('[%s] Alert must contain context to run actions', self.__service__) return False creds = self._load_creds(descriptor) if not creds: return False client = CbResponseAPI(**creds) carbonblack_context = alert.context.get('carbonblack', {}) # Get md5 hash 'value' passed from the rules engine function action = carbonblack_context.get('action') if action == 'ban': binary_hash = carbonblack_context.get('value') # The binary should already exist in CarbonBlack binary = client.select(Binary, binary_hash) # Determine if the binary is currently listed as banned if binary.banned: # Determine if the banned action is enabled, if true exit if binary.banned.enabled: return True # If the binary is banned and disabled, begin the banning hash operation banned_hash = client.select(BannedHash, binary_hash) banned_hash.enabled = True banned_hash.save() else: # Create a new BannedHash object to be saved banned_hash = client.create(BannedHash) # Begin the banning hash operation banned_hash.md5hash = binary.md5 banned_hash.text = "Banned from StreamAlert" banned_hash.enabled = True banned_hash.save() return banned_hash.enabled is True else: LOGGER.error('[%s] Action not supported: %s', self.__service__, action) return False
def query_watchlists(cb: CbResponseAPI, query: str) -> SimpleQuery: """perform watchlist query""" try: return cb.select(Watchlist).where(query) except Exception as e: LOGGER.error(f"problem querying watchlists: {e}") return []
def make_process_query( cb: CbResponseAPI, query: str, start_time: datetime.datetime = None, last_time: datetime.datetime = None, raise_exceptions=True, ) -> ProcessQuery: """Query the CbResponse environment and interface results. Args: cb: A CbResponseAPI object to use query: The correctly formated query start_time: Set the minimum last update time (relative to server) for this query. last_time: Set the maximum last update time (relative to server) for this query. XXX no_warnings: Do not warn before printing large query result sets. Returns: cbapi.response.models.ProcessQuery or empty list. """ processes = [] LOGGER.debug( f"buiding query: {query} between '{start_time}' and '{last_time}'") try: processes = cb.select(Process).where(query).group_by("id") processes = processes.min_last_server_update( start_time) if start_time else processes processes = processes.max_last_server_update( last_time) if last_time else processes LOGGER.info(f"got {len(processes)} process results grouped by id.") except Exception as e: if raise_exceptions: raise (e) LOGGER.error(f"problem querying carbonblack with '{query}' : {e}") return processes
def these_watchlists_to_list_dict(cb: CbResponseAPI, watchlist_names=[], watchlist_ids=[]) -> List[Dict]: """Convert the listed watchlists to a list of their dictionary representations.""" wl_data = [] for wl_name in watchlist_names: wl = cb.select(Watchlist).where(f"name:{wl_name}") if wl: if len(wl) > 1: LOGGER.warning(f"got {len(wl)} watchlists with name matching {wl_name}. Using first result") wl = wl[0] wl_data.append(watchlist_to_dict(wl)) return wl_data
def make_sensor_query(cb: CbResponseAPI, sensor_query: str) -> SensorQuery: """Construct a SensorQuery object.""" try: if ":" not in sensor_query: LOGGER.warning( "No field specification passed. Fields: ip, hostname, groupid") LOGGER.info( f"Making assumption and updating query to: 'hostname:{sensor_query}'" ) sensor_query = f"hostname:{sensor_query}" sensors = cb.select(Sensor).where(sensor_query) except ValueError as e: LOGGER.error(f"{e}") return False LOGGER.info(f"got {len(sensors)} sensor results.") return sensors
def listAlerts(q): cb = CbResponseAPI(profile=args.instance) alerts = cb.select(Alert).where('hostname:' + args.hostname + ' AND (' + q + ') AND created_time:[' + starttime + ' TO ' + endtime + ']') for alert in alerts: if 'binary' in alert.alert_type: print( "{0} - SCORE: \033[32m{1:d}\033[m - HOST: \033[32m{2:s}\033[m - \033[33mBINARY\033[m: {3:s} - REPORT: {4:s}" .format(alert.created_time, alert.report_score, alert.hostname, alert.md5, alert.watchlist_name)) else: print( "{0} - SCORE: \033[32m{1:d}\033[m - HOST: \033[32m{2:s}\033[m - \033[31mPROCESS\033[m: {3:s} - REPORT: {4:s}" .format(alert.created_time, alert.report_score, alert.hostname, alert.process_name, alert.watchlist_name)) print("\033[1;30;40m{0:s}\033[m".format(alert.process.webui_link))
def search(): global watchlist save_path = 'C:/Users/SMaiorino/Documents/My_Scripts/Master' f_name = os.path.join(save_path, 'Mal_Files_Found_{0}.csv'.format(watchlist)) my_file = open(f_name,'w',newline='') writer = csv.writer(my_file) with open(os.path.join(save_path, 'vulnerable-names.csv'),'r') as n: names = n.readlines() with open(os.path.join(save_path, 'vulnerable-files.csv'), 'r') as f: files = f.readlines() print('\n---------------RESULTS---------------\n') for name in names: name = name.replace('\n', '') api = CbResponseAPI() try: sensor = api.select(Sensor).where('hostname:{0}'.format(name)).first() if sensor.status == 'Offline': writer.writerow([name, 'OFFLINE']) continue with sensor.lr_session() as session: for file in files: file = file.replace('\n', '') try: test_file = session.get_file(r'{0}'.format(file)) if test_file is not None: writer.writerow([name,file]) print('File: {0} \nComputer: {1} \n'.format(file,name)) continue except (TimeoutError, ObjectNotFoundError, LiveResponseError, ApiError, ServerError, AttributeError, TypeError): pass except (TimeoutError): continue except (AttributeError): if sensor is None: continue break my_file.close()
def tempAll(q, instance, hits_bool): instance = instance.strip() tempCSV = [] # Debug prints #print(instance) #print(q) cb = CbResponseAPI(profile=instance) query = cb.select(Process).where('hostname:' + args.hostname + ' AND (' + q + ') AND start:[' + starttime + ' TO ' + endtime + ']').sort("start asc").max_children( args.c) try: print(colorize(instance + " - Total hits: " + str(len(query)), 'green')) except OSError as e: print(e) finally: if hits_bool == True: sys.exit() else: sleep(3) for proc in query: print("{0} {1} {2} {3} {4} \n\033[1;30;40m{5}\033[m".format( proc.start, instance, proc.hostname, proc.username, proc.cmdline, proc.webui_link)) # Show netconns switch if args.n is True: # Iterate the CB netconns object for conns in proc.netconns: print("\033[32m{0}\033[m".format(conns)) # Show child processes switch elif int(args.c) > 0: # Iterate the child processes proc.walk_children(visitor) elif args.csv is True: tempCSV.append({ 'proc.start': proc.start, 'proc.hostname': proc.hostname, 'proc.username': proc.username, 'proc.cmdline': proc.cmdline, 'proc.webui_link': proc.webui_link }) if tempCSV != []: outputCSV(instance, tempCSV)
def cbResponse(): # Set attributes for csv file save_path = 'C:/Users/SMaiorino/Documents/My_Scripts/Master/Computer Lists' f_name = os.path.join(save_path, 'List_Comps_Response.csv') file = open(f_name, 'w', newline='') f_write = csv.writer(file) #f_write.writerow(['NAME']) # Initialize API var and query parameters api = CbResponseAPI() query = "ip:172" sensor = api.select(Sensor).where(query) # Iterate through each object the sensor reads and # output the name of each workstation in response that # is currently installed. for obj in sensor: names = obj.hostname os_name = obj.os_environment_display_string status = obj.status uninstall = obj.uninstall uninstalled = obj.uninstalled group = obj.group_id lastComm = str(obj.last_checkin_time)[0:10] if not 'Server' in os_name and 'Windows' in os_name \ and uninstall == False and uninstalled != True \ and not 'Uninstall' in status and group != 12: f_write.writerow([names]) file.close() # Re-open the file to sort the names in alphabetically # ascending order new_file = csv.reader( open(os.path.join(save_path, 'List_Comps_Response.csv'))) sorted_file = sorted(new_file) # Re-write the sorted names into the file with open(os.path.join(save_path, 'List_Comps_Response.csv'), 'w', newline='') as f: f_write = csv.writer(f) for row in sorted_file: f_write.writerow(row)
def main(): global watchlist, cb cb = CbResponseAPI() vuln_names = open("vulnerable-names-dupes.csv", 'w', newline='') write_names = csv.writer(vuln_names) vuln_files = open('vulnerable-files-dupes.csv', 'w', newline='') write_files = csv.writer(vuln_files) watchlist = input('Watchlist to Search Through: ') # NOTE: need to go into the Response Console and click # on the WatchList of interest - the watchlist ordinal # will appear in the URL field https://172.16.95.214:8443/#watchlist/190/?filterBy=all&sortBy=name # pass this ordinal as a command line parameter when invoking this script binary_query = cb.select(Binary).where("watchlist_{0}:*".format(watchlist)) #find all instances of the binary watchlist hits including historical instances for binary in binary_query: for filename in binary.observed_filename: for endpoint in binary.endpoint: write_names.writerow([endpoint.split("|")[0]]) write_files.writerow([filename]) vuln_names.close() vuln_files.close() remove_dupes() os.remove('vulnerable-names-dupes.csv') os.remove('vulnerable-files-dupes.csv') #call search() to clean the list of files by verifying their presence or absence on the end point search()
#!/usr/bin/python from cbapi.response import CbResponseAPI, Process, Binary, Sensor # # Create our CbAPI object # c = CbResponseAPI() query = c.select(Process).first() print(query) # # take the first process that ran notepad.exe, download the binary and read the first two bytes # #c.select(Process).where('process_name:notepad.exe').first().binary.file.read(2)'MZ' #c.select(Process).where('process_name:notepad.exe').first().binary.file.read(2)'MZ' # # if you want a specific ID, you can put it straight into the .select() call: # #binary = c.select(Binary, "24DA05ADE2A978E199875DA0D859E7EB") # # select all sensors that have ran notepad # #sensors = set() #for proc in c.select(Process).where('process_name:evil.exe'): # sensors.add(proc.sensor) # # iterate over all sensors and isolate # #for s in sensors: # s.network_isolation_enabled = True # s.save()
def get_all_watchlists(cb: CbResponseAPI) -> SimpleQuery: """Return a list of all watchlists.""" return cb.select(Watchlist)
def search(): global watchlist, bin_file, api save_path = 'C:/Users/SMaiorino/Documents/My_Scripts/Master' f_name = os.path.join(save_path, 'Mal_Files_Found_{0}.csv'.format(watchlist)) my_file = open(f_name, 'w', newline='') writer = csv.writer(my_file) # Retrieve the necessary filenames and workstations by reading each csv file with open(os.path.join(save_path, 'vulnerable-names.csv'), 'r') as n: names = n.readlines() with open(os.path.join(save_path, 'vulnerable-files.csv'), 'r') as f: files = f.readlines() print('\n---------------RESULTS---------------\n') for name in names: name = name.replace('\n', '') api = CbResponseAPI() try: sensor = api.select(Sensor).where( 'hostname:{0}'.format(name)).first() # Record Workstations with an Offline Sensor if sensor.status == 'Offline': writer.writerow([name, 'OFFLINE']) continue # We use String manipulation for proper communication between the live response session and the # format of each file / workstation. # We only use 'bin_file' in the copy_encrypt_binary() function below. # # We then Copy the contents of the file on the current end point to a specified folder to a quarantee folder. # The file is then deleted from the current end point and we write the contents to a csv as: # # HOSTNAME FILE FOUND ON COMPUTER (or OFFLINE) # # We then return to the top of loop and move on to the next end point in our list. # Start the live response session with sensor.lr_session() as session: for file in files: bin_file = file.replace('\\', '\\\\') file = file.replace('\n', '') try: # Obtain the contents of each file test_file = session.get_file(r'{0}'.format(file)) if test_file is not None: copy_encrypt_binary() session.delete_file(r'{0}'.format(file)) writer.writerow([name, file]) print('File: {0} \nComputer: {1} \n'.format( file, name)) continue except (TimeoutError, ObjectNotFoundError, LiveResponseError, ApiError, ServerError, AttributeError, TypeError): pass except (TimeoutError): continue except (AttributeError): if sensor is None: continue break my_file.close()
def main(): parser = argparse.ArgumentParser( description="SIP Indicator CbR Search and ACE Alert.") parser.add_argument('-d', '--debug', action="store_true", help="set logging to DEBUG", default=False) args = parser.parse_args() # load config config = ConfigParser() config.read('etc/config.ini') # load SIP indicator specs so we know how to get the indicators we want indicator_specs = {} with open(config['SIP']['indicator_specifications'], 'r') as stream: try: indicator_specs = yaml.safe_load(stream) logging.info( "Successfully loaded indicator specifications: {}".format( indicator_specs)) except yaml.YAMLError as e: logging.error("Couldn't load indicator specs : {}".format(e)) return # Load ACE API ace_api.set_default_remote_host(config['ACE']['ace_address']) ace_api.set_default_ssl_ca_path(config['ACE']['ca_chain_path']) # Create SIP Client and load indicators sip_ssl = config['SIP'].getboolean('ssl_verify') sc = pysip.Client(config['SIP']['sip_address'], config['SIP']['sip_api_key'], verify=sip_ssl) status = indicator_specs[ 'status'] if 'status' in indicator_specs else 'Analyzed' indicators = {} for i_type in indicator_specs['type']: handle_proxy(config['SIP']) indicators[i_type] = sc.get('/indicators?type={}&status={}'.format( i_type, status)) # load field mappings field_map = ConfigParser() field_map.read(config['GLOBAL']['field_mappings']) sip_cbr_map = field_map['SIP-TO-CBR'] sip_ace_map = field_map['SIP-TO-ACE'] cbr_ace_map = field_map['CBR-TO-ACE'] submitted_alerts = [] # Query Carbon Black Response for our indicators #cbq = CBquery(profile=config['CbR']['profile']) handle_proxy(config['CbR']) cb = CbResponseAPI(profile=config['CbR']['profile']) for i_type in indicator_specs['type']: for i in indicators[i_type]: query = '{}:"{}"'.format(sip_cbr_map[i_type], i['value']) logging.debug('Querying CbR for indicator:{} query:{}'.format( i['id'], query)) procs = cb.select(Process).where(query).group_by('id') if procs: # alert ACE Alert = ace_api.Analysis(description='CbR - SIP:{}'.format( i['value']), analysis_mode='correlation', tool='SipCbrAce') print(Alert.description) Alert.add_indicator(i['id']) # get sip tags and tag Alert handle_proxy(config['SIP']) i_details = sc.get('/indicators/{}'.format(i['id'])) handle_proxy(config['CbR']) for tag in i_details['tags']: Alert.add_tag(tag) alert_details = {} alert_details['total_results'] = len(procs) max_results = config['GLOBAL'].getint('alert_max_results') alert_details['included_results'] = 0 alert_details['process_details'] = [] for proc in procs: if alert_details['included_results'] > max_results: break alert_details['process_details'].append(str(proc)) alert_details['included_results'] += 1 Alert.add_hostname(proc.hostname) Alert.add_md5(proc.process_md5) Alert.add_ipv4(proc.comms_ip) Alert.add_ipv4(proc.interface_ip) Alert.add_process_guid(proc.id) Alert.add_user(proc.username) Alert.add_file_name(proc.process_name) Alert.add_file_path(proc.path) #Alert.add_file_location('{}@{}'.format(proc.hostname, proc.path)) #Alert.submit_kwargs['details'] = alert_details handle_proxy(config['ACE']) print(Alert.description) submitted_alerts.append(Alert.submit()) logger.info( "Submitted alert to ACE: {UUID} - URL=https://{HOST}/ace/analysis?direct={UUID}" .format(UUID=Alert.uuid, HOST=Alert.remote_host)) print(submitted_alerts)
class Trawler(CommonUtils): def __init__(self, args): self.args = args self.cb_response_session = CbResponseAPI() def outputResults(self, results): if self.args.format == "json": date = str( (datetime.utcnow() - timedelta(hours=1) ).strftime("%Y_%m_%d_%H_%M")) # Year_Month_Day_Hour_Minute file_name = os.path.join(self.args.output_dir, "%s_results.json" % date) with open(file_name, "wb") as file: file.write(json.dumps(results)) def formatResults(self, results): temp_queue, grouped_results = [], [] for res in results["cb_results"]: name = res["username"] if name not in temp_queue: temp_queue.append(name) urls = list( OrderedDict.fromkeys([ res["url"] for res in results["cb_results"] if name == res["username"] ])) hostnames = list( OrderedDict.fromkeys([ res["hostname"] for res in results["cb_results"] if name == res["username"] ])) links_analysis = self.analyzeLinks(urls) grouped_results.append({ "username": name, "hostnames": hostnames, "host_count": len(hostnames), "url_count": len(urls), "urls": links_analysis, }) del temp_queue return grouped_results def search(self): cb_proc_search = self.cb_response_session.select(Process) cb_proc_search.where(self.args.query) query_results = list( self.cb_response_session.select(Process).where( self.args.query).group_by("id")) results = { "cb_results": [{ "hostname": str(result.hostname), "username": str(result.username), "proc_start": str(result.start), "url": str(self.parseLink(result.cmdline).encode("utf8")) } for result in query_results if self.whitelistDomain( str(self.parseLink(result.cmdline).encode("utf8")))] } formatted_results = self.formatResults(results) self.outputResults(formatted_results)
class Backend(LeetBackend): """Implements the CB backend communication. This class starts the connection to the backend server and enables direct interaction with it. """ def __init__(self, profile_name): """Returns a Backend object. Args: profile_name (str): The profile name that this class will connect, as seen in the 'credentials.response' file. """ super().__init__("CB-" + profile_name, 7) #TODO move max_session to a configuration/variable self._profile_name = profile_name self._cb = None @property def url(self): """The Carbon Black server URL""" return self._cb.url def start(self): """Starts the internal thread (see base class documentation) and start the connection to the CB server. """ super().start() self._cb = CbResponseAPI(profile=self._profile_name) return self def _get_sensor(self, hostname): """Return the sensor related to the hostname. If more than one sensor is found, it will return the one that did the most recent check-in. Args: hostname (str): The machine name Returns: [Sensor]: The list of sensors """ recent_sensor = None query = "hostname:" + hostname sensors = self._cb.select(Sensor).where(query) for sensor in sensors: if recent_sensor is None: recent_sensor = sensor else: if sensor.last_checkin_time > recent_sensor.last_checkin_time: recent_sensor = sensor return recent_sensor def _search_machines(self, search_request): """See base class documentation""" machine_list = [] for hostname in search_request.hostnames: sensor = self._get_sensor(hostname) if sensor is not None: machine_list.append(CBMachine(hostname, self.backend_name, sensor)) return machine_list