def main(): c = CbEnterpriseResponseAPI(profile="default") sensor = c.select(Sensor).first() field_list = [] for k in sorted(sensor._info): field_list.append(k) wr.writerow(field_list) sensors = c.select(Sensor) for sensor in sensors: sensor_details = [] for i in field_list: sensor_details.append(getattr(sensor, i, "")) wr.writerow(sensor_details)
def connect_callback(cb, line): try: sensor_id = int(line) except ValueError: sensor_id = None cb = CbEnterpriseResponseAPI() if not sensor_id: q = cb.select(Sensor).where("hostname:{0}".format(line)) sensor = q.first() else: sensor = cb.select(Sensor, sensor_id) return sensor
def main(): band_dict = {} try: with open('max_id.txt', 'r') as target: id_max_old = target.readline() id_max_old = id_max_old.strip() except: id_max_old = 0 id_max_new = id_max_old c = CbEnterpriseResponseAPI(profile="default") sensor_list = c.select(Sensor) for sensor in sensor_list: if sensor.queued_stats: try: for i in sensor.queued_stats: if i['id'] > id_max_old: if i['id'] > id_max_new: id_max_new = i['id'] dt = i['timestamp'][0:19] sn = sensor.computer_name event_bytes = int(i['num_eventlog_bytes']) binary_bytes = int(i['num_storefile_bytes']) print "%s,%s,%d,%d" % (dt, sn, event_bytes, binary_bytes) except AttributeError: pass with open('max_id.txt', 'w') as target: target.write(id_max_new)
import time from cbapi.response import CbEnterpriseResponseAPI, Sensor cb = CbEnterpriseResponseAPI() sensor_ids = [100, 101, 102, 103, 104, 105] # Add multiple sensor IDs to this list try: average_transfer_time_bytes_per_sec = 0 slowest_transfer_time_bytes_per_sec = 0 fastest_transfer_time_bytes_per_sec = 0 for sensor_id in sensor_ids: sensor = cb.select(Sensor, sensor_id) # Get sensor object from sensor ID # Check online status before continuing, exit if offline if sensor.status != "Online": print('[ERROR] SennsorID: ' + str(sensor_id) + ' is offline. Skipping...') continue # Establish a session to the host sensor print('[INFO] Establishing session to CB Sensor #' + str(sensor.id) + '(' + sensor.hostname + ')') session = cb.live_response.request_session(sensor.id) print("[SUCCESS] Connected on Session #" + str(session.session_id)) file_sizes = [1000000, 75000000, 250000000, 500000000] # 1MB, 75MB, 250MB, and 500MB for file_size in file_sizes:
# Authors: Jared F import datetime from cbapi.response import CbEnterpriseResponseAPI, Sensor c = CbEnterpriseResponseAPI() save_path = r'C:\Users\analyst\Desktop' # Locally saves All_AV_Events.txt here save_to_path = '' # Required, leave as a blank string print('Enter Sensor ID:') sensor_id = raw_input() # sensor_id = 150 # Use this to define the sensor ID in the script, rather than using input try: sensor = c.select(Sensor, sensor_id) print('[INFO] Establishing session to CB Sensor #' + str(sensor.id) + '(' + sensor.hostname + ')') session = c.live_response.request_session(sensor.id) print("[SUCCESS] Connected on Session #" + str(session.session_id)) try: session.create_directory('C:\Windows\CarbonBlack\Reports') except Exception: pass # Existed already session.create_process(r'''cmd.exe /c wevtutil qe "System" /rd:True /q:"*[System[Provider[@Name='Microsoft Antimalware'] and (EventID=1001)]]" /f:Text > C:\Windows\CarbonBlack\Reports\Antimalware_Scan_Events.txt''', True) session.create_process(r'''cmd.exe /c wevtutil qe "Microsoft-Windows-Windows Defender/Operational" /rd:True /q:*[System[(EventID=1001)]] /f:Text > C:\Windows\CarbonBlack\Reports\Defender_Scan_Events.txt''', True) print ('[SUCCESS] Queried all finished AV scan events on Sensor!') antimalware_scan_events_file = session.get_file(r'C:\Windows\CarbonBlack\Reports\Antimalware_Scan_Events.txt') defender_scan_events_file = session.get_file(r'C:\Windows\CarbonBlack\Reports\Defender_Scan_Events.txt') session.delete_file(r'C:\Windows\CarbonBlack\Reports\Antimalware_Scan_Events.txt') session.delete_file(r'C:\Windows\CarbonBlack\Reports\Defender_Scan_Events.txt')
while status != desired_status and time.time() - start_time < timeout: res = cb.get_object(url) if res["status"] == desired_status: return res elif res["status"] == "error": raise LiveResponseError(res) else: time.sleep(delay) raise TimeoutError(uri=url, message="timeout polling for Live Response") if __name__ == "__main__": from cbapi.response import CbEnterpriseResponseAPI import logging root = logging.getLogger() root.addHandler(logging.StreamHandler()) logging.getLogger("cbapi").setLevel(logging.DEBUG) c = CbEnterpriseResponseAPI() j = GetFileJob(r"c:\test.txt") with c.select(Sensor, 3).lr_session() as lr_session: file_contents = lr_session.get_file(r"c:\test.txt") future = c.live_response.submit_job(j.run, 3) wait([ future, ]) print(future.result())
def main(): # # Disable requests insecure warnings # disable_insecure_warnings() # # parse arguments # parser = build_cli_parser("New Binaries with Netconns") parser.add_argument("-d", "--date-to-query", action="store", dest="date", help="New since DATE, format YYYY-MM-DD") parser.add_argument("-f", "--output-file", action="store", dest="output_file", help="output file in csv format") opts = parser.parse_args() if not opts.date: parser.print_usage() sys.exit(-1) # # Initalize the cbapi-ng # TODO get_cb_object # cb = CbEnterpriseResponseAPI() # # Main Query # start_date = "[" + opts.date + "T00:00:00 TO *]" binary_query = cb.select(Binary).where(("host_count:[1 TO 3]" " server_added_timestamp:" + start_date + " -observed_filename:*.dll" " -digsig_publisher:Microsoft*" " -alliance_score_srstrust:*")) # # Setup the csv writer # if not opts.output_file: output_file = open("new_binaries_with_netconns.csv", 'wb') else: output_file = open(opts.output_file, 'wb') csv_writer = csv.writer(output_file) # # Write out CSV header # csv_writer.writerow(("FileName", "Hostname", "Username", "Network Connections", "Process Link", "Binary Link", "Binary MD5", "Signature Status", "Company", "Observed Date", "Host Count", "Binary TimeStamp")) # # Create Progress Bar # pbar = ProgressBar(widgets=[Percentage(), Bar()], maxval=len(binary_query)).start() for i, binary in enumerate(binary_query): # # Update progress bar # pbar.update(i + 1) # # Retrieve the binary timestamp # binary_timestamp = time.asctime(time.gmtime(pefile.PE(data=binary.file.read()).FILE_HEADER.TimeDateStamp)) # # Build a sub query to see if this binary was executed and had netconns # sub_query = "process_md5:" + binary.md5 + " netconn_count:[1 TO *]" process_query = cb.select(Process).where(sub_query) # # Iterate through results # for process in process_query: # # Write out the result # csv_writer.writerow((process.path, process.hostname, process.username, process.netconn_count, process.webui_link, binary.webui_link, binary.md5, binary.digsig_result if binary.digsig_result else "UNSIGNED", binary.company_name, binary.server_added_timestamp, binary.host_count, binary_timestamp)) pbar.finish()
# This batch file deploys a vaccine against the NotPetya Ransomeware/Wiper attack. # The batch file can be found at: https://download.bleepingcomputer.com/bats/nopetyavac.bat # # File: NotPetyaVaccine.py # Date: 06/30/2017 # Author: Jared F import time from cbapi.response import CbEnterpriseResponseAPI, Sensor c = CbEnterpriseResponseAPI() from distutils.version import LooseVersion c.cb_server_version = LooseVersion('5.1.0') # Legacy 5.1.0 used due to bug with how paginated queries are returned in latest version. sensors = c.select(Sensor).all() batch_path = r"C:\Users\YourAcccount\Desktop" # Where is local nopetyavac.bat on your PC? SensorsAwaitingVaccine = [] for sens in sensors: if 'windows' in sens.os_environment_display_string.lower(): if 'Uninstall' not in sens.status.lower(): SensorsAwaitingVaccine.append(sens) # We're creating a list of all installed Windows sensors print("[INFO] " + str(len(SensorsAwaitingVaccine)) + " sensors will be vaccinated against the NotPetya Ransomware/Wiper...") while len(SensorsAwaitingVaccine) != 0: # We're going to loop over these indefinitely until all have the vaccine for s in SensorsAwaitingVaccine: if 'online' in s.status.lower():
def main(): """ Main function for standardization. """ parser = cli.parser("Carbon Black Live Response Universal Triage Script.") args = parser.parse_args() if args.verbose: logging.basicConfig(format='%(asctime)s %(levelname)s: %(message)s', level=logging.DEBUG) else: logging.basicConfig(format='%(asctime)s %(levelname)s: %(message)s', level=logging.INFO) if args.profile: cber = CbEnterpriseResponseAPI(profile=args.profile) else: cber = CbEnterpriseResponseAPI() triage.ROOT_DIR = args.profile.upper() if not os.path.exists(triage.ROOT_DIR): os.mkdir(triage.ROOT_DIR) TriageScript = cli.tdict[args.tscript] TriageScript.init_check() if not os.path.exists("{0}\\{1}".format(triage.ROOT_DIR, TriageScript.path)): os.mkdir("{0}\\{1}".format(triage.ROOT_DIR, TriageScript.path)) jobs = [] if args.sensorid: sensor = cber.select(Sensor, args.sensorid) call_triage(cber, TriageScript, sensor, jobs, args.sensorid, args.nics) elif args.hostname: sensor = cber.select(Sensor).where("hostname:{0}".format( args.hostname)).first() call_triage(cber, TriageScript, sensor, jobs, args.hostname, args.nics) elif args.ipaddress: sensor = cber.select(Sensor).where("ip:{0}".format( args.ipaddress)).first() call_triage(cber, TriageScript, sensor, jobs, args.ipaddress, args.nics) elif args.allhosts: for sensor in cber.select(Sensor): call_triage(cber, TriageScript, sensor, jobs, sensor.id, args.nics) else: if not os.path.isfile(args.sensorlist): tprint("File not present - " + args.sensorlist) return 0 csv_dict = csv.DictReader(open(args.sensorlist), delimiter=',') sid = check_csv_header(csv_dict, args) for row in csv_dict: if "id" in sid: sensor = cber.select(Sensor, row[sid]) elif "computer_name" in sid: sensor = cber.select(Sensor).where("hostname:{0}".format( row[sid])).first() elif "ip" in sid: sensor = cber.select(Sensor).where("ip:{0}".format( row[sid])).first() else: return 0 call_triage(cber, TriageScript, sensor, jobs, row[sid], args.nics) wait(jobs)
from cbapi.response import CbEnterpriseResponseAPI, Sensor cb = CbEnterpriseResponseAPI() print "Enter hostname (ALL CAPS):" MY_HOSTNAME = raw_input() sensor = cb.select(Sensor).where("hostname:" + MY_HOSTNAME).first() if not sensor: print "no sensor" else: print sensor.id
HostnameColNum = 0 # What column are the host names in? Note: The first column is 0, not 1. GroupNameColNum = 1 # What column are the new/correct group names in? Note: The first column is 0, not 1. with open(CSV_file, 'rU') as csvfile: csvDialect = csv.Sniffer().sniff(csvfile.readline()) csvfile.seek(0) csvfile = csv.reader(csvfile, dialect=csvDialect, delimiter=csvDialect.delimiter) for row in csvfile: if row[HostnameColNum] and row[GroupNameColNum]: # print ('[DEBUG] Row: ' + str(row)) # For debugging, prints the row out host_name = str(row[HostnameColNum]).lower().strip() group_name = (str(row[GroupNameColNum]).lower()).strip() if True is True: # Add any exclusions here, if desired try: group = c.select(SensorGroup).where('name:{0}'.format(group_name)).first() host = c.select(Sensor).where('hostname:{0}'.format(host_name)).first() if group and host: # If both are valid old_group_name = str(host.group.name) host.group = group # Set host group to the new group host.save() # Save the change print('[SUCCESS] Moved host: ' + host.hostname + ' from group: ' + old_group_name + ' into group: ' + group.name) else: print('[FAILURE] Failed moving host: ' + host_name + ' into group: ' + group_name) except Exception as err: # Catch exceptions print('[ERROR] Encountered: ' + str(err) + '\n --> [FAILURE] Failed moving host: ' + host_name + ' into group: ' + group_name) # Report error else: continue
to_drop.append(i) return to_drop def obs_to_logprobabilites(obs_matrix): for i in range(len(obs_matrix[0, :])): nhosts = len(obs_matrix[:, 0]) total = np.sum(obs_matrix[:, i]) probability = np.log(total / nhosts) obs_matrix[:, i] *= probability #%% cb = CbEnterpriseResponseAPI() query = cb.select(Binary) query = query.where('is_executable_image:true').where( 'server_added_timestamp:[2020-04-01T23:59:59 TO *]') #query = query.where('md5:9C80EEDD823FFA6CE9CCE22BCF1C427D') #This runs pretty much instantly on test with 10 000 binaries in server bin_list = [] for binary in query: bin_list.append(binary._info) bin_df = pd.DataFrame(bin_list) # produce a list of unique host/binary combinations. bin_df_hosts = bin_df.explode('endpoint') # filter list to only binaries which are unsigned and on less than 60 hosts (arbitary) bin_df_hosts = bin_df_hosts.loc[((bin_df_hosts['signed'] != 'Signed') &
# Could exit with an error if: # - A windows exception is thrown for any other rare reason. # - The live-response session to the sensor has a timeout. # # File: Pull_Cb_Logs.py # Date: 07/14/2017 # Author: Jared F import time import os from cbapi.response import CbEnterpriseResponseAPI, Sensor c = CbEnterpriseResponseAPI() sensor = c.select(Sensor, 1) hst = sensor.hostname p = (r"C:\Windows\CarbonBlack") # Path to retrieve log files from save_path = r"C:\Users\YourAccount\Desktop\{0}".format(hst) # Where to save retrieved log files, {0} is the hostname p = os.path.normpath(p) # Ensures proper OS path syntax save_path = os.path.normpath(save_path) # Ensures proper OS path syntax extensions_to_grab = [".txt", ".log", ".dump", ".dmp", ".tmp", ".db", ".html", "catalog"] print("[INFO] Establishing session to CB Sensor #" + str(sensor.id)) try: session = c.live_response.request_session(sensor.id) print("[SUCCESS] Connected to CB Sensor on Session #" + str(session.session_id)) path = session.walk(p, False) # False because bottom->up walk, not top->down
from cbapi.response import CbEnterpriseResponseAPI, Sensor cb = CbEnterpriseResponseAPI() print "Enter hostname (ALL CAPS):" MY_HOSTNAME = raw_input() sensor = cb.select(Sensor).where("hostname:"+ MY_HOSTNAME).first() if not sensor: print "no sensor" else: print sensor.id
def main(): c = CbEnterpriseResponseAPI(profile="default") sensors = c.select(Sensor) for sensor in sensors: print sensor.computer_name, sensor.id, sensor.status, sensor.physical_memory_size, sensor.computer_dns_name,sensor.next_checkin_time
print("[ERROR] Unable to open " + IOC_report_path + " for writing! Is it open?\n[FAILURE] Fatal error caused exit.") exit(1) print "\n[INFO] Now Reading IOCs From IOC Data File..." process() print "\n[INFO] Now Checking IOCs Against Carbon Black..." print "[INFO] Depending on hit count, this process may take a while. Please wait...\n" with UniocodeWriter(IOC_report_path) as eventwriter: eventwriter.writerow(['Hit From', 'Hostname', 'Username', 'Process Name', 'Process MD5', 'Process Path', 'Event', 'Event Command-Line/Source/Target/IP/Domain', 'Timestamp']) for ip in ips: query = ("" + str(ip)) processes = (c.select(Process).where(query).group_by("id")) if len(processes) > 250: if skip(ip) is False: for proc in processes: write_csv(proc, "ip", ip, IOC_report_path) else: ips.remove(ip) else: for proc in processes: write_csv(proc, "ip", ip, IOC_report_path) for d in domains: query = ("domain:" + str(d)) processes = (c.select(Process).where(query).group_by("id")) if len(processes) > 250:
process() print "\n[INFO] Now Checking IOCs Against Carbon Black..." print "[INFO] Depending on hit count, this process may take a while. Please wait...\n" with UniocodeWriter(IOC_report_path) as eventwriter: eventwriter.writerow([ 'Hit From', 'Hostname', 'Username', 'Process Name', 'Process MD5', 'Process Path', 'Event', 'Event Command-Line/Source/Target/IP/Domain', 'Timestamp' ]) for ip in ips: query = ("" + str(ip)) processes = (c.select(Process).where(query).group_by("id")) if len(processes) > 250: if skip(ip) is False: for proc in processes: write_csv(proc, "ip", ip, IOC_report_path) else: ips.remove(ip) else: for proc in processes: write_csv(proc, "ip", ip, IOC_report_path) for d in domains: query = ("domain:" + str(d)) processes = (c.select(Process).where(query).group_by("id")) if len(processes) > 250:
while status != desired_status and time.time() - start_time < timeout: res = cb.get_object(url) if res["status"] == desired_status: log.debug(json.dumps(res)) return res elif res["status"] == "error": raise LiveResponseError(res) else: time.sleep(delay) raise TimeoutError(uri=url, message="timeout polling for Live Response") if __name__ == "__main__": from cbapi.response import CbEnterpriseResponseAPI import logging root = logging.getLogger() root.addHandler(logging.StreamHandler()) logging.getLogger("cbapi").setLevel(logging.DEBUG) c = CbEnterpriseResponseAPI() j = GetFileJob(r"c:\test.txt") with c.select(Sensor, 3).lr_session() as lr_session: file_contents = lr_session.get_file(r"c:\test.txt") future = c.live_response.submit_job(j.run, 3) wait([future, ]) print(future.result())
with open(CSV_file, 'rU') as csvfile: csvDialect = csv.Sniffer().sniff(csvfile.readline()) csvfile.seek(0) csvfile = csv.reader(csvfile, dialect=csvDialect, delimiter=csvDialect.delimiter) for row in csvfile: if row[HostnameColNum] and row[GroupNameColNum]: # print ('[DEBUG] Row: ' + str(row)) # For debugging, prints the row out host_name = str(row[HostnameColNum]).lower().strip() group_name = (str(row[GroupNameColNum]).lower()).strip() if True is True: # Add any exclusions here, if desired try: group = c.select(SensorGroup).where( 'name:{0}'.format(group_name)).first() host = c.select(Sensor).where( 'hostname:{0}'.format(host_name)).first() if group and host: # If both are valid old_group_name = str(host.group.name) host.group = group # Set host group to the new group host.save() # Save the change print('[SUCCESS] Moved host: ' + host.hostname + ' from group: ' + old_group_name + ' into group: ' + group.name) else: print('[FAILURE] Failed moving host: ' + host_name + ' into group: ' + group_name) except Exception as err: # Catch exceptions
def main(): parser = argparse.ArgumentParser() parser.add_argument("--profile", type=str, action="store", help="The credentials.response profile to use.") # File output parser.add_argument("--prefix", type=str, action="store", help="Output filename prefix.") # Cb Response Sensor query paramaters s = parser.add_mutually_exclusive_group(required=False) s.add_argument("--group-id", type=int, action="store", help="Target sensor group based on numeric ID.") s.add_argument("--hostname", type=str, action="store", help="Target sensor matching hostname.") s.add_argument("--ip", type=str, action="store", help="Target sensor matching IP address (dotted quad).") # Health checking parser.add_argument("--process-count", action="store_true", help="Count processes associated with this sensor.") parser.add_argument("--tamper-count", action="store_true", help="Count tamper events associated with this sensor.") parser.add_argument("--checkin-ip", action="store_true", help="Return the latest public IP associated with the sensor.") args = parser.parse_args() if args.prefix: output_filename = '%s-sensors.csv' % args.prefix else: output_filename = 'sensors.csv' if args.profile: cb = CbEnterpriseResponseAPI(profile=args.profile) else: cb = CbEnterpriseResponseAPI() output_file = open(output_filename, 'w') writer = csv.writer(output_file, quoting=csv.QUOTE_ALL) header_row = ['computer_name', 'computer_dns_name', 'sensor_group_id', 'os', 'os_type', 'computer_sid', 'last_checkin_time', 'registration_time', 'network_adapters', 'id', 'group_id', 'group_name', 'num_eventlog_mb', 'num_storefiles_mb', 'systemvolume_free_size', 'systemvolume_total_size', 'health', 'commit_charge_mb', 'build_version_string', 'process_count', 'tamper_count', 'clock_delta', 'checkin_ip'] writer.writerow(header_row) query_base = None if args.group_id: query_base = 'groupid:{0}'.format(args.group_id) elif args.hostname: query_base = 'hostname:{0}'.format(args.hostname) elif args.ip: query_base = 'ip:{0}'.format(args.ip) if query_base is None: sensors = cb.select(Sensor) else: sensors = cb.select(Sensor).where(query_base) num_sensors = len(sensors) log_info("Found {0} sensors".format(num_sensors)) counter = 1 for sensor in sensors: if counter % 10 == 0: print("{0} of {1}".format(counter, num_sensors)) if len(sensor.resource_status) > 0: commit_charge = "{0:.2f}".format(float(sensor.resource_status[0]['commit_charge'])/1024/1024) else: commit_charge = '' num_eventlog_mb = "{0:.2f}".format(float(sensor.num_eventlog_bytes)/1024/1024) num_storefiles_mb = "{0:.2f}".format(float(sensor.num_storefiles_bytes)/1024/1024) systemvolume_free_size = "{0:.2f}".format(float(sensor.systemvolume_free_size)/1024/1024) systemvolume_total_size = "{0:.2f}".format(float(sensor.systemvolume_total_size)/1024/1024) if args.process_count == True: process_count = len(cb.select(Process).where('sensor_id:{0}'.format(sensor.id))) else: process_count = '' if args.checkin_ip == True: try: checkin_ip = cb.select(Process).where('sensor_id:{0}'.format(sensor.id)).first().comms_ip except AttributeError: checkin_ip = '' else: checkin_ip = '' if args.tamper_count == True: tamper_count = len(cb.select(Process).where('tampered:true AND sensor_id:{0}'.format(sensor.id))) else: tamper_count = '' output_fields = [sensor.computer_name.lower(), sensor.computer_dns_name.lower(), sensor.group_id, sensor.os, sensor.os_type, sensor.computer_sid, sensor.last_checkin_time, sensor.registration_time, sensor.network_adapters, sensor.id, sensor.group_id, sensor.group.name, num_eventlog_mb, num_storefiles_mb, systemvolume_free_size, systemvolume_total_size, sensor.sensor_health_message, commit_charge, sensor.build_version_string, process_count, tamper_count, sensor.clock_delta, checkin_ip] if _python3 == False: row = [col.encode('utf8') if isinstance(col, unicode) else col for col in output_fields] else: row = output_fields writer.writerow(row) counter += 1 output_file.close()
def main(): # # Disable requests insecure warnings # disable_insecure_warnings() # # Parse arguments # parser = build_cli_parser("System Check After Specified Date") parser.add_argument("-d", "--date-to-query", action="store", dest="date", help="New since DATE, format YYYY-MM-DD") parser.add_argument("-f", "--output-file", action="store", dest="output_file", help="output file in csv format") opts = parser.parse_args() if not opts.date: parser.print_usage() sys.exit(-1) # # Setup cbapi-ng # TODO get_cb_object cb = CbEnterpriseResponseAPI() # # query for all processes that match our query # print("Performing Query...") query = "filewrite_md5:* last_update:[" + opts.date + "T00:00:00 TO *]" process_query = cb.select(Process).where(query) # # Create a set so we don't have duplicates # md5_list = set() # # Iterate through all the processs # for proc in process_query: # # Iterate through all the filemods # for fm in proc.filemods: # # if an md5 exists then save it to our set # if fm.md5: md5_list.add(fm.md5) # # Initialize Prgoress Bar # pbar = ProgressBar(widgets=[Percentage(), Bar()], maxval=len(md5_list)).start() # # CSV # if not opts.output_file: output_file = open("new_binaries_after_date.csv", 'wb') else: output_file = open(opts.output_file, 'wb') csv_writer = csv.writer(output_file) csv_writer.writerow(("Binary MD5", "Binary Link", "Signature Status", "Company", "Observed Date", "Host Count", "Binary TimeStamp", "Number of Executions")) # # Iterate through our set # for i, md5 in enumerate(md5_list): pbar.update(i + 1) try: # # refresh our binary object with the CbER server # Note: this might cause an exception if the binary is not found # binary = cb.select(Binary, md5) if not binary: continue binary.refresh() # # Get the binary timestamp # binary_timestamp = time.asctime( time.gmtime( pefile.PE( data=binary.file.read()).FILE_HEADER.TimeDateStamp)) except ObjectNotFoundError: pass else: # # Get the number of times executed by retrieving the number of search results # number_of_times_executed = len( cb.select(Process).where("process_md5:{0:s}".format(md5))) csv_writer.writerow( (binary.md5, binary.webui_link, binary.digsig_result if binary.digsig_result else "UNSIGNED", binary.company_name, binary.server_added_timestamp, binary.host_count, binary_timestamp, number_of_times_executed)) pbar.finish()
process() print "\n[INFO] Now Checking IOCs Against Carbon Black..." print "[INFO] Depending on hit count, this process may take a while. Please wait...\n" with UniocodeWriter(IOC_report) as eventwriter: eventwriter.writerow([ 'Hit From', 'Hostname', 'Username', 'Process Name', 'Process MD5', 'Process Path', 'Event', 'Event Command-Line/Source/Target/IP/Domain', 'Timestamp' ]) for ip in ips: query = ("" + str(ip)) processes = (c.select(Process).where(query).group_by("id")) if len(processes) > 250: if skip(ip) is False: for proc in processes: write_csv(proc, "ip", ip, IOC_report) else: ips.remove(ip) else: for proc in processes: write_csv(proc, "ip", ip, IOC_report) for d in domains: query = ("domain:" + str(d)) processes = (c.select(Process).where(query).group_by("id")) if len(processes) > 250:
# This python script will iterate through the list of sensors on the CB Server and return basic details of each sensor. # Output is in comma-seperated, and can be used for simple CSV creation. # Reflects all sensors, reported by the 'Total Sensor Count' in the CB Server Dashboard. # # File: Forcefully Delete Path or File.py # Date: 06/20/2017 - Modified: 06/15/2018 # Author: Jared F from cbapi.response import CbEnterpriseResponseAPI, Sensor c = CbEnterpriseResponseAPI() sensors = c.select(Sensor).all() ids = [] print('[INFO] Detecting all sensors known to CB Server...\n') print('Sensor ID,Hostname,Status,OS,Group ID,Group,IP,MAC,Last Seen') for s in sensors: ip_listing = '' mac_listing = '' for x in s.network_interfaces: # Get all network interfaces ever seen from the host (ips/macs) try: ip_listing = ip_listing + ( str(x).split("ipaddr=u'")[1].split("'")[0]) + ' ' except Exception as err: print err
def main(): # # Disable requests insecure warnings # disable_insecure_warnings() # # Parse arguments # parser = build_cli_parser("System Check After Specified Date") parser.add_argument("-d", "--date-to-query", action="store", dest="date", help="New since DATE, format YYYY-MM-DD") parser.add_argument("-f", "--output-file", action="store", dest="output_file", help="output file in csv format") opts = parser.parse_args() if not opts.date: parser.print_usage() sys.exit(-1) # # Setup cbapi-ng # TODO get_cb_object cb = CbEnterpriseResponseAPI() # # query for all processes that match our query # print "Performing Query..." query = "filewrite_md5:* last_update:[" + opts.date + "T00:00:00 TO *]" process_query = cb.select(Process).where(query) # # Create a set so we don't have duplicates # md5_list = set() # # Iterate through all the processs # for proc in process_query: # # Iterate through all the filemods # for fm in proc.filemods: # # if an md5 exists then save it to our set # if fm.md5: md5_list.add(fm.md5) # # Initialize Prgoress Bar # pbar = ProgressBar(widgets=[Percentage(), Bar()], maxval=len(md5_list)).start() # # CSV # if not opts.output_file: output_file = open("new_binaries_after_date.csv", 'wb') else: output_file = open(opts.output_file, 'wb') csv_writer = csv.writer(output_file) csv_writer.writerow(("Binary MD5", "Binary Link", "Signature Status", "Company", "Observed Date", "Host Count", "Binary TimeStamp", "Number of Executions")) # # Iterate through our set # for i, md5 in enumerate(md5_list): pbar.update(i + 1) try: # # refresh our binary object with the CbER server # Note: this might cause an exception if the binary is not found # binary = cb.select(Binary, md5) if not binary: continue binary.refresh() # # Get the binary timestamp # binary_timestamp = time.asctime(time.gmtime(pefile.PE(data=binary.file.read()).FILE_HEADER.TimeDateStamp)) except ObjectNotFoundError: pass else: # # Get the number of times executed by retrieving the number of search results # number_of_times_executed = len(cb.select(Process).where("process_md5:{0:s}".format(md5))) csv_writer.writerow((binary.md5, binary.webui_link, binary.digsig_result if binary.digsig_result else "UNSIGNED", binary.company_name, binary.server_added_timestamp, binary.host_count, binary_timestamp, number_of_times_executed)) pbar.finish()
# # File: Forcefully_Delete_Path_Or_File.py # Date: 06/19/2017 # Author: Jared F import time import os from cbapi.response import CbEnterpriseResponseAPI, Sensor c = CbEnterpriseResponseAPI() p = ( r"C://Users//user//Desktop//MalwareFolder" ) # Path to delete, r character before ensures slashes are treated correctly sensors = c.select( Sensor, 1) # Here we define 1 or more sensors we want to delete a file / path on s = sensors # We'd use this if only checking one sensor # for s in sensors: # We'd use this if sensors was a list, not a single sensor print("[INFO] Establishing session to CB Sensor #" + str(s.id)) try: session = c.live_response.request_session(s.id) print("[SUCCESS] Connected to CB Sensor on Session #" + str(session.session_id)) path = session.walk( p, False ) # Walk path. False parameter is to bottom->up walk, not top->down
def main(): # # Disable requests insecure warnings # disable_insecure_warnings() # # parse arguments # parser = build_cli_parser("New Binaries with Netconns") parser.add_argument("-d", "--date-to-query", action="store", dest="date", help="New since DATE, format YYYY-MM-DD") parser.add_argument("-f", "--output-file", action="store", dest="output_file", help="output file in csv format") opts = parser.parse_args() if not opts.date: parser.print_usage() sys.exit(-1) # # Initalize the cbapi-ng # TODO get_cb_object # cb = CbEnterpriseResponseAPI() # # Main Query # start_date = "[" + opts.date + "T00:00:00 TO *]" binary_query = cb.select(Binary).where( ("host_count:[1 TO 3]" " server_added_timestamp:" + start_date + " -observed_filename:*.dll" " -digsig_publisher:Microsoft*" " -alliance_score_srstrust:*")) # # Setup the csv writer # if not opts.output_file: output_file = open("new_binaries_with_netconns.csv", 'wb') else: output_file = open(opts.output_file, 'wb') csv_writer = csv.writer(output_file) # # Write out CSV header # csv_writer.writerow( ("FileName", "Hostname", "Username", "Network Connections", "Process Link", "Binary Link", "Binary MD5", "Signature Status", "Company", "Observed Date", "Host Count", "Binary TimeStamp")) # # Create Progress Bar # pbar = ProgressBar(widgets=[Percentage(), Bar()], maxval=len(binary_query)).start() for i, binary in enumerate(binary_query): # # Update progress bar # pbar.update(i + 1) # # Retrieve the binary timestamp # binary_timestamp = time.asctime( time.gmtime( pefile.PE(data=binary.file.read()).FILE_HEADER.TimeDateStamp)) # # Build a sub query to see if this binary was executed and had netconns # sub_query = "process_md5:" + binary.md5 + " netconn_count:[1 TO *]" process_query = cb.select(Process).where(sub_query) # # Iterate through results # for process in process_query: # # Write out the result # csv_writer.writerow( (process.path, process.hostname, process.username, process.netconn_count, process.webui_link, binary.webui_link, binary.md5, binary.digsig_result if binary.digsig_result else "UNSIGNED", binary.company_name, binary.server_added_timestamp, binary.host_count, binary_timestamp)) pbar.finish()
#%% import time import os import codecs from datetime import datetime, timedelta import pandas as pd import pprint from cbapi.response import CbEnterpriseResponseAPI, Sensor, SensorGroup, Process #%% fields = ['process_name', 'cmdline', 'parent_name', 'modload_count','netconn_count',\ 'filemod_count','crossproc_count', 'childproc_count', 'group', 'hostname', \ 'last_update', 'start', 'id'] cb = CbEnterpriseResponseAPI() query = cb.select(Process) query = query.where( "process_name:lsass.exe AND (crossproc_type:processopentarget or crossproc_type:remotethreadtarget)" ) query = query.group_by('id') query = query.min_last_update(datetime.today() - timedelta(days=100)) print('running query') results = [] crossprocs = [] for process in query: results.append(process) crossprocs = crossprocs + ([[process._info['process_name'],crossproc.source_path,\ crossproc.target_path, crossproc.type, crossproc.privileges,\ process._info['hostname'], process._info['id']]\ for crossproc in process.all_crossprocs() \ if crossproc.target_path.endswith('lsass.exe')])
# # File: Install Mass Process and Run.py # Date: 06/20/2018 # Author: Jared F, keithmccammon (RedCanaryCo) import sys import threading from Queue import Queue from time import sleep from cbapi.response import CbEnterpriseResponseAPI, Sensor from cbapi.errors import * c = CbEnterpriseResponseAPI() ### ==========[START CONFIG VARIABLES]========== ### main_query = c.select(Sensor).all() # All endpoints # c.select(Sensor).where('groupid:1') # One endpoint group only # c.select(Sensor).where('hostname:HostNameHere') # One endpoint only # See 'Custom-exclusions can be added here' to add specific exclusions log_name = 'Mass Install Log.txt' # Script output will be directed to this log file process_name_x86 = 'RunMe.exe' # What is executable name for x86 (32-bit Operating System)? process_name_x64 = 'RunMe.exe' # What is executable name for x64 (64-bit Operating System)? process_location_local = r'C:\Users\analyst\Desktop' # Where are local executables listed above ? process_location_remote = r'C:\Windows\CarbonBlack\Tools' # Where to place executable on remote Sensor ? process_args = '' # Executable arguments to run, ENSURE a leading space if not empty! wait_for_output_bool = False # Wait for the process to output something before continuing. wait_for_completion_bool = False # Wait for process to complete before continuing. process_run_timeout = 30 # Timeout for process in seconds, if reached the install/execute will be reattempted. delete_process_after = False # Should remote executable be deleted after execution? ### ===========[END CONFIG VARIABLES]=========== ###
# - Another running .exe prevents an .exe from being deleted (self-defense or reliance) # - A windows exception is thrown for any other rare reason. # - The live-response session to the sensor has a timeout. # # File: Forcefully_Delete_Path_Or_File.py # Date: 06/19/2017 # Author: Jared F import time import os from cbapi.response import CbEnterpriseResponseAPI, Sensor c = CbEnterpriseResponseAPI() p = (r"C://Users//user//Desktop//MalwareFolder") # Path to delete, r character before ensures slashes are treated correctly sensors = c.select(Sensor, 1) # Here we define 1 or more sensors we want to delete a file / path on s = sensors # We'd use this if only checking one sensor # for s in sensors: # We'd use this if sensors was a list, not a single sensor print("[INFO] Establishing session to CB Sensor #" + str(s.id)) try: session = c.live_response.request_session(s.id) print("[SUCCESS] Connected to CB Sensor on Session #" + str(session.session_id)) path = session.walk(p, False) # Walk path. False parameter is to bottom->up walk, not top->down exes = [] for items in path: # For each subdirectory in the path directory = os.path.normpath((str(items[0]))) # The subdirectory in OS path syntax fileslist = items[2] # List of files in the subdirectory
# Could exit with an error if: # - A windows exception is thrown for any rare reason. # - The live-response session to the sensor has a timeout. # # File: Get_User_Accounts.py # Date: 06/20/2017 # Author: Jared F import time from cbapi.response import CbEnterpriseResponseAPI, Sensor c = CbEnterpriseResponseAPI() from distutils.version import LooseVersion c.cb_server_version = LooseVersion('5.1.0') sensors = c.select(Sensor, 1) # Define sensor, could do sensors = c.select(Sensor) for all upv_path = r"C:\Users\admin\Desktop\CBScripts" # Where is local UserProfilesView.exe ? save_path = r"C:\Users\admin\Desktop\CBScripts\dumps" # Where to save user accounts data file returned ? s = sensors # We'd use this if only checking one sensor # for s in sensors: # We'd use this if sensors was a list, not a single sensor print("[INFO] Establishing session to CB Sensor #" + str(s.id)) try: session = c.live_response.request_session(s.id) print("[SUCCESS] Connected to CB Sensor on Session #" + str(session.session_id)) try: session.create_directory("C:\Windows\CarbonBlack\Reports") except Exception: pass # Existed already try: session.put_file(open(upv_path + "\UserProfilesView.exe", "rb"), "C:\Windows\CarbonBlack\Reports\UPV.exe")
def main(): dir_path = os.path.abspath(os.path.dirname(__file__)) print( "Carbon Black Live Response - Retrieving User Folder Directory Listing " ) strScriptName = "DirListing.ps1" strComputerName = sys.argv[1] strUserName = sys.argv[2] cb = CbEnterpriseResponseAPI() sensor = cb.select(Sensor).where("hostname:" + strComputerName).first() if not sensor: print("Computer not found!") elif sensor.status == "Offline": print("") print("Computer", strComputerName, "is", sensor.status) print("") os.system("pause") else: print("") print("Computer Name:", strComputerName) print("") print("Sensor ID:", sensor.id) print("") sensor_id = sensor.id sensor = cb.select(Sensor, sensor_id) with sensor.lr_session() as session: # Create a folder under C:\Windows\CarbonBlack named ExportedFiles path = "C:\\Windows\\CarbonBlack\\ExportedFiles" try: print( "Creating direcotry: C:\Windows\CarbonBlack\ExportedFiles") session.create_directory(path) except Exception as e: print(" Directory already exists: %s" % e) # Upload Script to the folder print(" ") print(("Uploading {0}....").format(strScriptName)) print(" ") binary = r'{0}'.format(strScriptName) with open(binary, 'rb') as filedata: try: session.put_file( filedata.read(), "C:\\Windows\\CarbonBlack\\ExportedFiles\\" + binary) except Exception as e: session.close() # Run PowerShell Script command = r'PowerShell.exe -nologo -file C:\Windows\CarbonBlack\ExportedFiles\{0} {1}'.format( strScriptName, strUserName) print("Executing: '{0}' ".format(command)) session.create_process(command, wait_timeout=900, wait_for_completion=True) # Download .CSV file FileLocation = r'C:\Windows\CarbonBlack\ExportedFiles' filename = "DirectoryListing.csv" print(" ") print("Downloading DirectoryListing.csv....") open( "{0}\Output\{2}_{1}".format(dir_path, filename, strComputerName), "wb").write( session.get_file(FileLocation + "\{0}".format(filename))) # Sleep for 30 sec before removing folder print(" ") print("Sleep 15 sec after Downloading .CSV file") time.sleep(15) # Remove Folder on remote computer command = r'cmd /c rmdir C:\Windows\CarbonBlack\ExportedFiles /s /q' print("Executing: '{0}' ".format(command)) session.create_process(command, wait_timeout=30, wait_for_completion=True) session.close()
# - A windows exception is thrown for any rare reason. # - The live-response session to the sensor has a timeout. # # File: Get_User_Accounts.py # Date: 06/20/2017 # Author: Jared F import time from cbapi.response import CbEnterpriseResponseAPI, Sensor c = CbEnterpriseResponseAPI() from distutils.version import LooseVersion c.cb_server_version = LooseVersion('5.1.0') sensors = c.select( Sensor, 1) # Define sensor, could do sensors = c.select(Sensor) for all upv_path = r"C:\Users\admin\Desktop\CBScripts" # Where is local UserProfilesView.exe ? save_path = r"C:\Users\admin\Desktop\CBScripts\dumps" # Where to save user accounts data file returned ? s = sensors # We'd use this if only checking one sensor # for s in sensors: # We'd use this if sensors was a list, not a single sensor print("[INFO] Establishing session to CB Sensor #" + str(s.id)) try: session = c.live_response.request_session(s.id) print("[SUCCESS] Connected to CB Sensor on Session #" + str(session.session_id)) try: session.create_directory("C:\Windows\CarbonBlack\Reports")
## ---------------------------------------------------------------------------------------------------------------------------------------- ## ## ## ---------------------------------------------------------------------------------------------------------------------------------------- ## ---------------------------------------------------------------------------------------------------------------------------------------- ## Set Target ## ---------------------------------------------------------------------------------------------------------------------------------------- import time from cbapi.response import CbEnterpriseResponseAPI, Sensor c = CbEnterpriseResponseAPI() print("Enter Sensor ID") name = input() sensor_id = name sensor = c.select(Sensor, sensor_id) with sensor.lr_session( ) as session: # this will wait until the Live Response session is established session.put_file(open("\\\\DIRECTORY\\artifactpullcb.ps1", "rb"), "c:\\windows\\CarbonBlack\\artifactpullcb.ps1") session.create_process("PowerShell SET-EXECUTIONPOLICY UNRESTRICTED") output = session.create_process("PowerShell .\\artifactpullcb.ps1") session.create_process("PowerShell SET-EXECUTIONPOLICY RESTRICTED") time.sleep(1000) print output # add line to delete ps1 file after completion
#%% import pandas as pd from cbapi.response import CbEnterpriseResponseAPI, Sensor, SensorGroup, Process, Binary #%% cb = CbEnterpriseResponseAPI() query = cb.select(Binary) query = query.where('is_executable_image:true') #This runs pretty much instantly on test with 10 000 binaries in server bin_list = [] for binary in query: bin_list.append(binary._info) bin_df = pd.DataFrame(bin_list) bin_df = bin_df.explode('observed_filename') #%% grouped_by_name = bin_df.groupby('observed_filename')[ 'md5', 'digsig_publisher', 'internal_name'].nunique()