def main(): parser = build_cli_parser("Grab all binaries from a Cb server") parser.add_argument('-d', '--destdir', action='store', help='Destination directory to place the events', default=os.curdir) # TODO: we don't have a control on the "start" value in the query yet # parser.add_argument('--start', action='store', dest='startvalue', help='Start from result number', default=0) parser.add_argument('-v', action='store_true', dest='verbose', help='Enable verbose debugging messages', default=False) args = parser.parse_args() cb = get_cb_response_object(args) if args.verbose: logging.basicConfig(level=logging.DEBUG) else: logging.basicConfig(level=logging.INFO) # startvalue = args.startvalue startvalue = 0 return dump_all_binaries(cb, args.destdir, startvalue)
def main(): parser = build_cli_parser( "Search for cmd.exe writing to exe and dll filepaths") args = parser.parse_args() cb = get_cb_response_object(args) for proc in cb.select(Process).where( "process_name:cmd.exe (filemod:*.exe or filemod:*.dll)"): for fm in proc.filemods: if not fm.path.lower().endswith((".exe", ".dll")): continue signed = "" product_name = "" if fm.type == "LastWrote" and fm.md5: try: b = cb.select(Binary, fm.md5) signed = b.signed product_name = b.product_name except ObjectNotFoundError: pass print("%s,%s,%s,%s,%s,%s,%s,%s,%s" % (str(fm.timestamp), proc.hostname, proc.username, proc.path, fm.path, fm.type, fm.md5, signed, product_name))
def main(): parser = build_cli_parser() parser.add_argument("--filename", "-f", action="store", default=None, dest="filename", help="Filename to save the installer package to", required=True) parser.add_argument("--sensor-group", "-g", action="store", default="1", dest="group", help="Sensor group name or ID of the group to download an installer for") parser.add_argument("--installer-type", "-t", action="store", default="windows/exe", dest="type", help="Installer type; must be one of windows/exe, windows/msi, linux, osx") args = parser.parse_args() cb = get_cb_response_object(args) try: sensor_group_id = int(args.group) sensor_group = cb.select(SensorGroup, sensor_group_id, force_init=True) except (ValueError, ObjectNotFoundError): sensor_group = cb.select(SensorGroup).where('name:{0:s}'.format(args.group)).one() except Exception: print("Could not find sensor group via id or name ({0:s})".format(args.group)) return 1 # download the installer package # print("-> Downloading {0:s} installer for group {1:s} to file {2:s}...".format(args.type, sensor_group.name, args.filename)) try: open(args.filename, 'wb').write(sensor_group.get_installer(args.type)) except ObjectNotFoundError: print("** Could not find an installer for {0:s}".format(args.type)) except IOError: print("** Error writing to file {0:s}".format(args.filename)) except Exception as e: print("** Unknown exception: {0:s}".format(str(e))) else: print("-> Download complete")
def main(): parser = build_cli_parser("Bulk resolve alerts") parser.add_argument( "--query", action="store", default="", help= "The query string of alerts to resolve. All matching alerts will be resolved." ) args = parser.parse_args() cb = get_cb_response_object(args) alert_query = cb.select(Alert).where("-status:Resolved " + args.query) resolved_alerts = 0 for alert in alert_query: try: alert.status = "Resolved" alert.save() except ApiError as e: print("Error resolving {0:s}: {1:s}".format( alert.unique_id, str(e))) else: resolved_alerts += 1 print("Resolved {0:s}".format(alert.unique_id)) if resolved_alerts: print("Waiting for alert changes to take effect...") time.sleep(25) print("Complete. Resolved {0:d} alerts.".format(resolved_alerts)) else: print("Congratulations! You have no unresolved alerts!")
def main(): parser = build_cli_parser("Export watchlists into shareable JSON format") parser.add_argument("-f", "--file", help="Select what file output is written to", required=True) parser.add_argument( "-w", "--watchlists", help="Specific watchlist(s) to export. Can be comma separated.") parser.add_argument("-m", "--selective", action="store_true", help="Interactively select which watchlists to export") parser.add_argument("-d", "--description", help="Description for the watchlist export file") parser.add_argument("-a", "--author", help="Author for the watchlist export file") args = parser.parse_args() cb = get_cb_response_object(args) return export_watchlists(cb, args)
def main(): parser = build_cli_parser() commands = parser.add_subparsers(help="Watchlist commands", dest="command_name") list_command = commands.add_parser("list", help="List all configured watchlists") add_command = commands.add_parser("add", help="Add new watchlist") add_command.add_argument("-N", "--name", help="Name of watchlist", required=True) add_command.add_argument("-q", "--query", help="Watchlist query string, e.g. process_name:notepad.exe", required=True) add_command.add_argument("-t", "--type", help="Watchlist type 'events' or 'modules'", required=True) del_command = commands.add_parser("delete", help="Delete watchlists") del_watchlist_specifier = del_command.add_mutually_exclusive_group(required=True) del_watchlist_specifier.add_argument("-i", "--id", type=int, help="ID of watchlist to delete") del_watchlist_specifier.add_argument("-N", "--name", help="Name of watchlist to delete. Specify --force to delete" " multiple watchlists that have the same name") del_command.add_argument("--force", help="If NAME matches multiple watchlists, delete all matching watchlists", action="store_true", default=False) args = parser.parse_args() cb = get_cb_response_object(args) if args.command_name == "list": return list_watchlists(cb, parser, args) elif args.command_name == "add": return add_watchlist(cb, parser, args) elif args.command_name == "delete": return delete_watchlist(cb, parser, args)
def main(): parser = build_cli_parser("Bulk resolve alerts") parser.add_argument("--query", action="store", default="", help="The query string of alerts to resolve. All matching alerts will be resolved.") args = parser.parse_args() cb = get_cb_response_object(args) alert_query = cb.select(Alert).where("-status:Resolved " + args.query) resolved_alerts = 0 for alert in alert_query: try: alert.status = "Resolved" alert.save() except ApiError as e: print("Error resolving {0:s}: {1:s}".format(alert.unique_id, str(e))) else: resolved_alerts += 1 print("Resolved {0:s}".format(alert.unique_id)) if resolved_alerts: print("Waiting for alert changes to take effect...") time.sleep(25) print("Complete. Resolved {0:d} alerts.".format(resolved_alerts)) else: print("Congratulations! You have no unresolved alerts!")
def main(): parser = build_cli_parser("High avg. netconn/second alert") parser.add_argument("--skip-unknown", "-s", action="store_true", default=False, dest="skip_unknown", help="Skip processes with unknown start or last update") parser.add_argument("--rate", "-r", type=float, default=100.0, dest="conn_rate", help="Alert on processes with more than [--rate] network connections per second") parser.add_argument("--gt-count", "-g", type=int, default=100, dest="gt_count", help="Filter processes with greater than [--gt-count] network events") args = parser.parse_args() cb = get_cb_response_object(args) for proc in cb.select(Process).where("netconn_count:[{0:d} TO *]".format(args.gt_count)).sort("last_update desc"): try: runtime = (proc.last_update - proc.start).total_seconds() except Exception: if not args.skip_unknown: runtime = 1.0 else: continue if not runtime and proc.netconn_count > 0: # simulate "infinity" so as to avoid a DivideByZero exception rate = 1000000 else: rate = proc.netconn_count / float(runtime) if rate > args.conn_rate: print("{0:s}|{1:s}|{2:s}|{3:.4f}".format(proc.hostname, proc.username, proc.process_name, rate))
def main(): parser = build_cli_parser() parser.add_argument("--query", help="binary query", default='') args = parser.parse_args() cb = get_cb_response_object(args) binary_query = cb.select(Binary).where(args.query) for binary in binary_query: s = datetime.datetime.strptime(binary.server_added_timestamp, "%Y-%m-%dT%H:%M:%S.%fZ") now = datetime.datetime.utcnow() dwell_time = now-s print(binary.md5sum) print("%-20s : %s" % ('Endpoint(s)', binary.endpoint)) print("%-20s : %s" % ('Dwell Time', dwell_time)) print("%-20s : %s" % ('First Seen', binary.server_added_timestamp)) print("%-20s : %s" % ('Size (bytes)', binary.size)) print "*"*80 proc_query = cb.select(Process).where('filewrite_md5:%s' % (binary.md5sum)) pd = {} for proc in proc_query: pd[proc.unique_id] = {} pd[proc.unique_id]['hostname'] = proc.hostname pd[proc.unique_id]['username'] = proc.username for fm in proc.filemods: if fm.type == "LastWrote" and fm.md5 == binary.md5sum.lower(): idt = now - fm.timestamp pd[proc.unique_id]['dwell_time'] = idt pd[proc.unique_id]['path'] = fm.path for entry in pd.keys(): print pd[entry]['hostname'], pd[entry]['username'], pd[entry]['dwell_time'], pd[entry]['path']
def main(): parser = build_cli_parser() commands = parser.add_subparsers(help="Storage Partition commands", dest="command_name") list_command = commands.add_parser("list", help="List all storage partitions") create_command = commands.add_parser("create", help="Create new active writer partition") del_command = commands.add_parser("delete", help="Delete partition") del_command.add_argument("-N", "--name", help="Name of partition to delete.", required=True) mount_command = commands.add_parser("mount", help="Mount partition") mount_command.add_argument("-N", "--name", help="Name of partition to mount.", required=True) unmount_command = commands.add_parser("unmount", help="Unmount partition") unmount_command.add_argument("-N", "--name", help="Name of partition to unmount.", required=True) args = parser.parse_args() cb = get_cb_response_object(args) if cb.cb_server_version < LooseVersion("6.1.0"): parser.error("This script can only work with server versions >= 6.1.0; {0} is running {1}" .format(cb.url, cb.cb_server_version)) return 1 if args.command_name == "list": return list_partitions(cb, parser, args) elif args.command_name == "create": return create_partition(cb, parser, args) elif args.command_name == "delete": return delete_partition(cb, parser, args) elif args.command_name == "mount": return mount_partition(cb, parser, args) elif args.command_name == "unmount": return unmount_partition(cb, parser, args)
def main(): parser = build_cli_parser() parser.add_argument("--query", help="binary query", default='') args = parser.parse_args() cb = get_cb_response_object(args) for wn in watched_names: bq = "observed_filename:%s" % (wn) binaries = cb.select(Binary).where(bq) for binary in binaries: print("-" * 80) print "Filename: %s Filehash: %s" % (wn, binary.md5sum) print("-" * 80) pq = "process_md5:%s" % (binary.md5) # to limit the search to the previous 24 hours # comment out the above query and uncomment out the below query #pq = "process_md5:%s start:-1440m" % (binary.md5) procs = cb.select(Process).where(pq) proc_names = {} for proc in procs: if proc.process_name in ignored_names : pass else: try: proc_names[proc.process_name] +=1 except KeyError: proc_names[proc.process_name] = 1 for k in sorted(proc_names.keys()): print k + " = " + str(proc_names[k]) print('\n')
def main(): parser = build_cli_parser( description="Export CbR Sensors from your environment as CSV") parser.add_argument("--output", "-o", dest="exportfile", help="The file to export to", required=True) parser.add_argument( "--fields", "-f", dest="exportfields", help="The fields to export", default= "id,hostname,group_id,network_interfaces,os_environment_display_string,build_version_string,network_isolation_enabled,last_checkin_time", required=False) parser.add_argument("--query", "-q", dest="query", help="optional query to filter exported sensors", required=False) args = parser.parse_args() cb = get_cb_response_object(args) export_fields = args.exportfields.split(",") return export_sensors(cb, export_file_name=args.exportfile, export_fields=export_fields, query=args.query)
def main(): parser = build_cli_parser("Bulk resolve alerts") parser.add_argument( "--query", action="store", default="", required=True, help= "The query string of alerts to resolve. All matching alerts will be resolved." ) args = parser.parse_args() cb = get_cb_response_object(args) alert_query = cb.select(Alert).where("-status:Resolved") alert_query = alert_query.where(args.query) alert_count = len(alert_query) if alert_count > 0: print("Resolving {0:d} alerts...".format(len(alert_query))) alert_query.change_status("Resolved") print("Waiting for alert changes to take effect...") time.sleep(25) print("Complete. Resolved {0:d} alerts.".format(alert_count)) else: print("Congratulations! You have no unresolved alerts!")
def main(): parser = build_cli_parser( description="Automatic detection and response based on watchlists") parser.add_argument("--watchlists", "-w", dest="watchlists", help="The watchlists in question", required=True) parser.add_argument("--operation", "-o", dest="operation", help="The operation to perform", required=True, default="Isolate") parser.add_arguemnt("--dryrun", "-d", dest="dryrun", help="Dry run mode", default=False, required=False) args = parser.parse_args() cb = get_cb_response_object(args) return sensor_operations(cb, watchlists=args.watchlists, operation=args.operation, dryrun=args.dryrun)
def main(): parser = build_cli_parser("Restart Cb Response sensors when certain criteria are met") parser.add_argument("--config", "-c", help="Configuration file path", default="restart_sensors.conf") parser.add_argument("--dryrun", "-d", help="Dry run - don't actually restart sensors", action="store_true") options = parser.parse_args() criteria = [c.strip() for c in open(options.config, "r").readlines()] criteria = [c for c in criteria if c != ""] print("Will restart sensors that have any of the following sensor health messages:") for c in criteria: print(" - {0}".format(c)) cb = get_cb_response_object(options) num_sensors_restarted = 0 for sensor in cb.select(Sensor): if sensor.sensor_health_message in criteria: print("Restarting sensor id {0} (hostname {1}) because its health message is {2}" .format(sensor.id, sensor.hostname, sensor.sensor_health_message)) num_sensors_restarted += 1 if not options.dryrun: sensor.restart_sensor() print("{0} {1} sensors.".format("Would have restarted" if options.dryrun else "Restarted", num_sensors_restarted))
def main(): parser = build_cli_parser() parser.add_argument("--query", help="binary query", default='') args = parser.parse_args() cb = get_cb_response_object(args) binary_query = cb.select(Binary).where(args.query) # for each result for binary in binary_query: print(binary.md5sum) print("-" * 80) print("%-20s : %s" % ('Size (bytes)', binary.size)) print("%-20s : %s" % ('Signature Status', binary.signed)) print("%-20s : %s" % ('Publisher', binary.digsig_publisher) if binary.signed == True else "%-20s : %s" % ('Publisher', 'n/a')) print("%-20s : %s" % ('Product Version', binary.product_version)) print("%-20s : %s" % ('File Version', binary.file_version)) print("%-20s : %s" % ('64-bit (x64)', binary.is_64bit)) print("%-20s : %s" % ('EXE', binary.is_executable_image)) for fn in binary.observed_filenames: print("%-20s : %s" % ('On-Disk Filename', fn.split('\\')[-1])) print('\n')
def main(): parser = build_cli_parser("Walk the children of a given process") group = parser.add_mutually_exclusive_group() group.add_argument("--process", "-p", help="process GUID to walk", default='') group.add_argument("--query", "-q", help="walk the children of all processes matching this query") args = parser.parse_args() c = get_cb_response_object(args) if args.process: try: procs = [c.select(Process, args.process, force_init=True)] except ObjectNotFoundError as e: print("Could not find process {0:s}".format(args.procss)) return 1 except ApiError as e: print("Encountered error retrieving process: {0:s}".format(str(e))) return 1 except Exception as e: print("Encountered unknown error retrieving process: {0:s}".format(str(e))) return 1 elif args.query: procs = c.select(Process).where(args.query) else: print("Requires either a --process or --query argument") parser.print_usage() return 2 for root_proc in procs: print("Process {0:s} on {1:s} executed by {2:s} children:".format(root_proc.path, root_proc.hostname, root_proc.username)) root_proc.walk_children(visitor) print("")
def main(): parser = cbhelper.build_cli_parser() parser.add_argument("--guid", dest="guid", help="GUID of target process",required=True) args = parser.parse_args() cbapi = cbhelper.get_cb_response_object(args) repgen = IncidentReportGenerator(cbapi=cbapi) print("[+] Generating report for process guid: {}\n".format(args.guid)) repgen.generate_report(guid=args.guid, verbose=True if args.verbose else False)
def main(): parser = build_cli_parser("Cb Response Live Response example") parser.add_argument("sensorid", nargs=1) args = parser.parse_args() c = get_cb_response_object(args) sensor = c.select(Sensor, int(args.sensorid[0])) run_liveresponse(sensor.lr_session())
def main(): parser = build_cli_parser("Add an MD5 hash to the banned hash list in Cb Response") parser.add_argument("-H", "--hash", help="MD5 hash of the file to ban in Cb Response", required=True) parser.add_argument("-d", "--description", help="Description of why the hash is banned") args = parser.parse_args() cb = get_cb_response_object(args) return ban_hash(cb, args)
def main(): parser = build_cli_parser(description="Automatic detection and response based on watchlists") parser.add_argument("--watchlists", "-w", dest="watchlists", help="The watchlists in question", required=True) parser.add_argument("--operation", "-o", dest="operation", help="The operation to perform", required=True, default="Isolate") parser.add_argument("--dryrun", "-d", dest="dryrun", help="Dry run mode", default=False, required=False) args = parser.parse_args() cb = get_cb_response_object(args) return sensor_operations(cb, watchlists=args.watchlists, operation=args.operation, dryrun=args.dryrun)
def main(): parser = build_cli_parser("Walk the children of a given process") group = parser.add_mutually_exclusive_group() group.add_argument("--process", "-p", help="process GUID to walk", default='') group.add_argument( "--query", "-q", help="walk the children of all processes matching this query") parser.add_argument("--children", "-c", default=15, help="number of children to fetch") args = parser.parse_args() c = get_cb_response_object(args) if args.process: try: procs = [ c.select(Process, args.process, max_children=args.children, force_init=True) ] except ObjectNotFoundError as e: print("Could not find process {0:s}".format(args.procss)) return 1 except ApiError as e: print("Encountered error retrieving process: {0:s}".format(str(e))) return 1 except Exception as e: print("Encountered unknown error retrieving process: {0:s}".format( str(e))) return 1 elif args.query: procs = c.select(Process).where( args.query).group_by("id").max_children(args.children) else: print("Requires either a --process or --query argument") parser.print_usage() return 2 for root_proc in procs: if not root_proc.get('terminated'): duration = "still running" else: duration = str(root_proc.end - root_proc.start) print("Process {0:s} on {1:s} executed by {2:s}:".format( root_proc.cmdline, root_proc.hostname, root_proc.username)) print("started at {0} ({1})".format(str(root_proc.start), duration)) print("Cb Response console link: {0}".format(root_proc.webui_link)) root_proc.walk_children(visitor) print("")
def main(): parser = build_cli_parser("Print processes") args = parser.parse_args() args.profile="default" cb = get_cb_response_object(args) for proc in cb.select(Process).where("process_name:notepad.exe hostname:w2k12"): print proc print "############"
def main(): parser = build_cli_parser("Import watchlists from shareable JSON format") parser.add_argument("-f", "--file", help="Select what file watchlists are read from", required=True) parser.add_argument("-w", "--watchlists", help="Specific watchlist(s) to import. Can be comma separated.") parser.add_argument("-m", "--selective", action="store_true", help="Interactively select which watchlists to import") args = parser.parse_args() cb = get_cb_response_object(args) return import_watchlists(cb, args)
def main(): parser = build_cli_parser(description="Export CbR Sensors from your environment as CSV") parser.add_argument("--output", "-o", dest="exportfile", help="The file to export to", required=True) parser.add_argument("--fields", "-f", dest="exportfields", help="The fields to export", default="id,hostname,group_id,network_interfaces,os_environment_display_string,build_version_string,network_isolation_enabled,last_checkin_time", required=False) parser.add_argument("--query", "-q", dest="query", help="optional query to filter exported sensors", required=False) args = parser.parse_args() cb = get_cb_response_object(args) export_fields = args.exportfields.split(",") return export_sensors(cb, export_file_name=args.exportfile, export_fields=export_fields, query=args.query)
def main(): parser = build_cli_parser() parser.add_argument("--md5", help="binary query", required=True) parser.add_argument("--filename", help="local filename to save the binary as", required=True) args = parser.parse_args() cb = get_cb_response_object(args) binary = cb.select(Binary, args.md5) shutil.copyfileobj(binary.file, open(args.filename, "wb")) print("-> Downloaded binary %s [%u bytes]" % (args.md5, binary.size)) return 0
def main(): parser = build_cli_parser() parser.add_argument("--filename", help="filename for md5 list", required=True) args = parser.parse_args() cb = get_cb_response_object(args) binary_query = cb.select(Binary).all() with open(args.filename, "w") as fp: for b in binary_query: fp.write("{0:s}\n".format(b.md5sum)) return 0
def main(): parser = build_cli_parser("Cb Response Live Response CLI") parser.add_argument("--log", help="Log activity to a file", default='') args = parser.parse_args() cb = get_cb_response_object(args) if args.log: file_handler = logging.FileHandler(args.log) file_handler.setLevel(logging.DEBUG) log.addHandler(file_handler) cli = CblrCli(cb, connect_callback) cli.cmdloop()
def main(): parser = build_cli_parser() parser.add_argument("--query", help="binary query", default='') args = parser.parse_args() cb = get_cb_response_object(args) binary_query = cb.select(Binary).where(args.query) for binary in binary_query: #print binary for k in sorted(binary._info): print k break
def main(): parser = build_cli_parser("Export watchlists into shareable JSON format") parser.add_argument("-f", "--file", help="Select what file output is written to", required=True) parser.add_argument("-w", "--watchlists", help="Specific watchlist(s) to export. Can be comma separated.") parser.add_argument("-m", "--selective", action="store_true", help="Interactively select which watchlists to export") parser.add_argument("-d", "--description", help="Description for the watchlist export file") parser.add_argument("-a", "--author", help="Author for the watchlist export file") args = parser.parse_args() cb = get_cb_response_object(args) return export_watchlists(cb, args)
def main(): parser = build_cli_parser() commands = parser.add_subparsers(help="Watchlist commands", dest="command_name") list_command = commands.add_parser("list", help="List all configured watchlists") add_command = commands.add_parser("add", help="Add new watchlist") add_command.add_argument("-N", "--name", help="Name of watchlist", required=True) add_command.add_argument( "-q", "--query", help="Watchlist query string, e.g. process_name:notepad.exe", required=True) add_command.add_argument("-t", "--type", help="Watchlist type 'events' or 'modules'", required=True) del_command = commands.add_parser("delete", help="Delete watchlists") del_watchlist_specifier = del_command.add_mutually_exclusive_group( required=True) del_watchlist_specifier.add_argument("-i", "--id", type=int, help="ID of watchlist to delete") del_watchlist_specifier.add_argument( "-N", "--name", help="Name of watchlist to delete. Specify --force to delete" " multiple watchlists that have the same name") del_command.add_argument( "--force", help= "If NAME matches multiple watchlists, delete all matching watchlists", action="store_true", default=False) args = parser.parse_args() cb = get_cb_response_object(args) if args.command_name == "list": return list_watchlists(cb, parser, args) elif args.command_name == "add": return add_watchlist(cb, parser, args) elif args.command_name == "delete": return delete_watchlist(cb, parser, args)
def main(): parser = build_cli_parser() parser.add_argument("--md5", help="binary query", required=True) parser.add_argument("--filename", help="local filename to save the binary as", required=True) args = parser.parse_args() cb = get_cb_response_object(args) binary = cb.select(Binary, args.md5) binary_data = binary.file.read() open(args.filename, "wb").write(binary_data) print("-> Downloaded binary %s [%u bytes]" % (args.md5, len(binary_data))) return 0
def main(): parser = build_cli_parser() commands = parser.add_subparsers(help="User commands", dest="command_name") list_command = commands.add_parser("list", help="List all configured users") list_teams_command = commands.add_parser("list-teams", help="List all configured user teams") add_command = commands.add_parser("add", help="Add new user") add_command.add_argument("-u", "--username", help="New user's username", required=True) add_command.add_argument("-f", "--first-name", help="First name", required=True) add_command.add_argument("-l", "--last-name", help="Last name", required=True) add_command.add_argument("-p", "--password", help="Password - if not specified, prompt at runtime", required=False) add_command.add_argument("-e", "--email", help="Email address", required=True) add_command.add_argument("-A", "--global-admin", help="Make new user global admin", default=False, action="store_true") add_command.add_argument("-t", "--team", help="Add new user to this team (can specify multiple teams)", action="append", metavar="TEAM-NAME") add_team_command = commands.add_parser("add-team", help="Add new team") add_team_command.add_argument("-N", "--name", help="Name of the new team") add_team_command.add_argument("-A", "--administrator", help="Add administrator rights to the given sensor group", metavar="SENSOR-GROUP", action="append") add_team_command.add_argument("-V", "--viewer", help="Add viewer rights to the given sensor group", metavar="SENSOR-GROUP", action="append") get_api_key_command = commands.add_parser("get-api-key", help="Get API key for user") get_api_key_command.add_argument("-u", "--username", help="Username", required=True) get_api_key_command.add_argument("-p", "--password", help="Password - if not specified, prompt at runtime", required=False) del_command = commands.add_parser("delete", help="Delete user") del_user_specifier = del_command.add_mutually_exclusive_group(required=True) del_user_specifier.add_argument("-u", "--username", help="Name of user to delete.") args = parser.parse_args() cb = get_cb_response_object(args) if args.command_name == "list": return list_users(cb, parser, args) elif args.command_name == "list-teams": return list_teams(cb, parser, args) elif args.command_name == "get-api-key": return get_api_key(cb, parser, args) elif args.command_name == "add": return add_user(cb, parser, args) elif args.command_name == "add-team": return add_team(cb, parser, args) elif args.command_name == "delete": return delete_user(cb, parser, args)
def main(): parser = build_cli_parser("High avg. netconn/second alert") parser.add_argument( "--skip-unknown", "-s", action="store_true", default=False, dest="skip_unknown", help="Skip processes with unknown start or last update") parser.add_argument( "--rate", "-r", type=float, default=100.0, dest="conn_rate", help= "Alert on processes with more than [--rate] network connections per second" ) parser.add_argument( "--gt-count", "-g", type=int, default=100, dest="gt_count", help="Filter processes with greater than [--gt-count] network events") args = parser.parse_args() cb = get_cb_response_object(args) for proc in cb.select(Process).where("netconn_count:[{0:d} TO *]".format( args.gt_count)).sort("last_update desc"): try: runtime = (proc.last_update - proc.start).total_seconds() except Exception: if not args.skip_unknown: runtime = 1.0 else: continue if not runtime and proc.netconn_count > 0: # simulate "infinity" so as to avoid a DivideByZero exception rate = 1000000 else: rate = proc.netconn_count / float(runtime) if rate > args.conn_rate: print("{0:s}|{1:s}|{2:s}|{3:.4f}".format(proc.hostname, proc.username, proc.process_name, rate))
def main(): parser = build_cli_parser("Download binary from endpoint through Live Response") parser.add_argument("-o", "--output", help="Output file name (default is the base file name)") parser.add_argument("-H", "--hostname", help="Hostname to download from", required=True) parser.add_argument("-p", "--path", help="Path to download", required=True) args = parser.parse_args() cb = get_cb_response_object(args) sensors = cb.select(Sensor).where("hostname:{0}".format(args.hostname)) for sensor in sensors: if sensor.status == "Online": return download_file(sensor, args.path, args.output) print("No sensors for hostname {0} are online, exiting".format(hostname))
def main(): parser = build_cli_parser( "Add an MD5 hash to the banned hash list in Cb Response") parser.add_argument("-H", "--hash", help="MD5 hash of the file to ban in Cb Response", required=True) parser.add_argument("-d", "--description", help="Description of why the hash is banned") args = parser.parse_args() cb = get_cb_response_object(args) return ban_hash(cb, args)
def main(): parser = build_cli_parser("Download binary from endpoint through Live Response") parser.add_argument("-o", "--output", help="Output file name (default is the base file name)") parser.add_argument("-H", "--hostname", help="Hostname to download from", required=True) parser.add_argument("-p", "--path", help="Path to download", required=True) args = parser.parse_args() cb = get_cb_response_object(args) sensors = cb.select(Sensor).where("hostname:{0}".format(args.hostname)) for sensor in sensors: if sensor.status == "Online": return download_file(sensor, args.path, args.output) print("No sensors for hostname {0} are online, exiting".format(args.hostname))
def main(): parser = build_cli_parser() commands = parser.add_subparsers(help="Storage Partition commands", dest="command_name") list_command = commands.add_parser("list", help="List all storage partitions") create_command = commands.add_parser( "create", help="Create new active writer partition") del_command = commands.add_parser("delete", help="Delete partition") del_command.add_argument("-N", "--name", help="Name of partition to delete.", required=True) mount_command = commands.add_parser("mount", help="Mount partition") mount_command.add_argument("-N", "--name", help="Name of partition to mount.", required=True) unmount_command = commands.add_parser("unmount", help="Unmount partition") unmount_command.add_argument("-N", "--name", help="Name of partition to unmount.", required=True) args = parser.parse_args() cb = get_cb_response_object(args) if cb.cb_server_version < LooseVersion("6.1.0"): parser.error( "This script can only work with server versions >= 6.1.0; {0} is running {1}" .format(cb.url, cb.cb_server_version)) return 1 if args.command_name == "list": return list_partitions(cb, parser, args) elif args.command_name == "create": return create_partition(cb, parser, args) elif args.command_name == "delete": return delete_partition(cb, parser, args) elif args.command_name == "mount": return mount_partition(cb, parser, args) elif args.command_name == "unmount": return unmount_partition(cb, parser, args)
def main(): parser = build_cli_parser() commands = parser.add_subparsers(help="Feed commands", dest="command_name") list_command = commands.add_parser("list", help="List all configured feeds") add_command = commands.add_parser("add", help="Add new feed") add_command.add_argument("-u", "--feed-url", help="URL location of feed data", dest="feed_url", required=True) add_command.add_argument("--force", help="Force creation even if feed already exists", action="store_true") add_command.add_argument("-e", "--enable", action="store_true", help="Enable this feed") add_command.add_argument("-p", "--use_proxy", action="store_true", default=False, dest="use_proxy", help="Carbon Black server will use configured web proxy to download feed from feed url") add_command.add_argument("-v", "--validate_server_cert", action="store_true", default=False, dest="validate_server_cert", help="Carbon Black server will verify the SSL certificate of the feed server") http_basic_auth = add_command.add_argument_group("HTTP Authentication") http_basic_auth.add_argument("--username", help="HTTP Basic Authentication username to access Feed URL") http_basic_auth.add_argument("--password", help="HTTP Basic Authentication password to access Feed URL") tls_auth = add_command.add_argument_group("TLS Client Authentication") tls_auth.add_argument("--cert", help="Path to file containing TLS client certificate required to access Feed URL") tls_auth.add_argument("--key", help="Path to file containing TLS client key required to access Feed URL") del_command = commands.add_parser("delete", help="Delete feeds") del_feed_specifier = del_command.add_mutually_exclusive_group(required=True) del_feed_specifier.add_argument("-i", "--id", type=int, help="ID of feed to delete") del_feed_specifier.add_argument("-f", "--feedname", help="Name of feed to delete. Specify --force to delete" " multiple feeds that have the same name") del_command.add_argument("--force", help="If FEEDNAME matches multiple feeds, delete all matching feeds", action="store_true", default=False) enable_command = commands.add_parser("enable", help="Enable a feed") enable_command.add_argument("-f", "--feedname", help="Name of feed to enable", required=True) disable_command = commands.add_parser("disable", help="Disable a feed") disable_command.add_argument("-f", "--feedname", help="Name of feed to disable", required=True) args = parser.parse_args() cb = get_cb_response_object(args) if args.command_name == "list": return list_feeds(cb, parser, args) elif args.command_name == "add": return add_feed(cb, parser, args) elif args.command_name == "delete": return delete_feed(cb, parser, args) elif args.command_name in ("disable", "enable"): return toggle_feed(cb, args.feedname, enable=args.command_name=="enable")
def main(): parser = build_cli_parser() args = parser.parse_args() c = get_cb_response_object(args) hostname_user_pairs = defaultdict(ItemCount) username_activity = defaultdict(ItemCount) for proc in c.select(Process).where("process_name:explorer.exe"): hostname_user_pairs[proc.hostname].add(proc.username) username_activity[proc.username].add(proc.hostname) for hostname, user_activity in iteritems(hostname_user_pairs): print("For host {0:s}:".format(hostname)) for username, count in user_activity.report(): print(" %-20s: logged in %d times" % (username, count))
def main(): parser = build_cli_parser() args = parser.parse_args() # Testing for exception primarily for cases where an api token and url aren't supplied try: cb = get_cb_response_object(args) except: print( "Silly rabbit. Something went wrong. Did you forget your API Token and URL in the ./.carbonblack/credentials.response folder?" ) sys.exit() # print(cb.credentials) # watchlist_list(cb) # watchlist_export(cb) screen_clear() menu_main(cb)
def main(): parser = build_cli_parser("Simple event listener. Calls a callback for every event received on the event bus") args = parser.parse_args() cb = get_cb_response_object(args) event_source = event.RabbitMQEventSource(cb) event_source.start() try: while True: time.sleep(10) except KeyboardInterrupt: event_source.stop() print("Exiting event loop") print("Encountered the following exceptions during processing:") for error in registry.errors: print(error["traceback"])
def main(): parser = cbhelper.build_cli_parser() parser.add_argument( "--folder", dest="folder", help= "Input Folder where Cb Threat Intelligence Feed JSON files are stored", required=True) parser.add_argument("--force-override", dest="override", help="Force update of existing feeds", required=False, default=False) parser.add_argument("--mode", dest="mode", help="import or export", required=True) parser.add_argument("--cert", dest="cert", help="cert", required=False, default=None) parser.add_argument("--key", dest="key", help="key", required=False, default=None) args = parser.parse_args() cbapi = cbhelper.get_cb_response_object(args) shipper = CbFeedShipper(cbapi=cbapi, force_override=args.override, verbose=args.verbose) print("[+] Processing folder: {}\n".format(args.folder)) if args.mode == "export": shipper.export_feeds( outdir=args.folder, cert=(args.cert, args.key) if args.cert and args.key else None) else: shipper.import_folder(folder=args.folder)
def main(): parser = build_cli_parser() parser.add_argument("--query", action="store", default="") args = parser.parse_args() cb = get_cb_response_object(args) alert_query = cb.select(Alert).where(args.query) for alert in alert_query: if 'binary' in alert.alert_type: print("Alert with score {0:d}: Binary {1:s} matched watchlist/report {2:s}".format(alert.report_score, alert.ioc_value, alert.watchlist_name)) else: print("Alert with score {0:d}: Process {1:s} matched watchlist/report {2:s}".format(alert.report_score, alert.process_name, alert.watchlist_name)) print("-> visit {0:s} to view this process in the Carbon Black UI.".format(alert.process.webui_link))
def main(): parser = build_cli_parser("Import watchlists from shareable JSON format") parser.add_argument("-f", "--file", help="Select what file watchlists are read from", required=True) parser.add_argument( "-w", "--watchlists", help="Specific watchlist(s) to import. Can be comma separated.") parser.add_argument("-m", "--selective", action="store_true", help="Interactively select which watchlists to import") args = parser.parse_args() cb = get_cb_response_object(args) return import_watchlists(cb, args)
def main(): parser = build_cli_parser( "Simple event listener. Calls a callback for every event received on the event bus" ) args = parser.parse_args() cb = get_cb_response_object(args) event_source = event.RabbitMQEventSource(cb) event_source.start() try: while True: time.sleep(10) except KeyboardInterrupt: event_source.stop() print("Exiting event loop") print("Encountered the following exceptions during processing:") for error in registry.errors: print(error["traceback"])
def main(): parser = build_cli_parser() parser.add_argument("--query", help="binary query", default='') args = parser.parse_args() cb = get_cb_response_object(args) binary_query = cb.select(Binary).where(args.query) # for each result for binary in binary_query: try: #server_added_timestamp has milliseconds s = datetime.datetime.strptime(binary.server_added_timestamp, "%Y-%m-%dT%H:%M:%S.%fZ") now = datetime.datetime.utcnow() dwell_time = now-s except ValueError: try: #server_added_timestamp does not have milliseconds s = datetime.datetime.strptime(binary.server_added_timestamp, "%Y-%m-%dT%H:%M:%SZ") now = datetime.datetime.utcnow() dwell_time = now-s except ValueError: dwell_time = "Indeterminate" print binary print ("=" * 80) print(binary.md5sum) print("-" * 80) print("%-20s : %s" % ('Dwell Time', dwell_time)) print("%-20s : %s" % ('First Seen', binary.server_added_timestamp)) print("%-20s : %s" % ('Size (bytes)', binary.size)) print("%-20s : %s" % ('Signature Status', binary.signed)) print("%-20s : %s" % ('Publisher', binary.digsig_publisher) if binary.signed == True else "%-20s : %s" % ('Publisher', 'n/a')) print("%-20s : %s" % ('Product Version', binary.product_version)) print("%-20s : %s" % ('File Version', binary.file_version)) print("%-20s : %s" % ('64-bit (x64)', binary.is_64bit)) print("%-20s : %s" % ('EXE', binary.is_executable_image)) for fn in binary.observed_filenames: print("%-20s : %s" % ('On-Disk Filename', fn.split('\\')[-1])) print('\n')
def main(): parser = build_cli_parser("Grab all binaries from a Cb server") parser.add_argument('-d', '--destdir', action='store', help='Destination directory to place the events', default=os.curdir) # TODO: we don't have a control on the "start" value in the query yet parser.add_argument('--query', action='store', dest='query', help='Query string to filter results', default=None) parser.add_argument('-v', action='store_true', dest='verbose', help='Enable verbose debugging messages', default=False) args = parser.parse_args() query = args.query cb = get_cb_response_object(args) if args.verbose: logging.basicConfig(level=logging.DEBUG) else: logging.basicConfig(level=logging.INFO) return dump_all_binaries(cb, args.destdir, query)
def main(): parser = build_cli_parser("Enumerate USB Devices") parser.add_argument("--start", "-s", action="store", default=None, dest="start_time", help="Start time (example: -2h)") args = parser.parse_args() cb = get_cb_response_object(args) query_string = r'regmod:registry\machine\system\currentcontrolset\control\deviceclasses\{53f56307-b6bf-11d0-94f2-00a0c91efb8b}\*' if args.start_time: query_string += ' start:{0:s}'.format(args.start_time) for proc in cb.select(Process).where(query_string): for rm in proc.regmods: if "{53f56307-b6bf-11d0-94f2-00a0c91efb8b}" in rm.path: pieces = rm.path.split("usbstor#disk&") if len(pieces) < 2: print("WARN:::: {0}".format(str(pieces))) else: device_info = pieces[1] #.split('{53f56307-b6bf-11d0-94f2-00a0c91efb8b}')[0] print(device_info)
def main(): parser = build_cli_parser() group = parser.add_mutually_exclusive_group(required=True) group.add_argument("--processid", help="Process ID or URL to Process Analysis page") group.add_argument("--query", help="query to pull multiple processes") args = parser.parse_args() cb = get_cb_response_object(args) if args.processid: if args.processid.startswith("http"): # interpret as a URL proc = cb.from_ui(args.processid) else: # interpret as a Process ID proc = cb.select(Process, args.processid) write_csv(proc, "{0}.csv".format(proc.id)) else: for proc in cb.select(Process).where(args.query).group_by("id"): write_csv(proc, "{0}.csv".format(proc.id))
def main(): parser = build_cli_parser("Event-driven example to BAN hashes, ISOLATE sensors, or LOCK (both ban & isolate) based on watchlist hits") args = parser.parse_args() cb = get_cb_response_object(args) # event_source = event.RabbitMQEventSource(cb) event_source = event.FileEventSource(cb, "/tmp/output.json") event_source.start() print("Listening on the event bus for watchlist hits") try: while True: time.sleep(10) except KeyboardInterrupt: event_source.stop() print("Exiting event loop") print("Encountered the following exceptions during processing:") for error in registry.errors: print(error["exception"])
def main(): parser = build_cli_parser() commands = parser.add_subparsers(help="Feed commands", dest="command_name") list_command = commands.add_parser("list", help="List all configured feeds") add_command = commands.add_parser("add", help="Add new feed") add_command.add_argument("-u", "--feed-url", help="URL location of feed data", dest="feed_url", required=True) add_command.add_argument("--force", help="Force creation even if feed already exists", action="store_true") add_command.add_argument("-e", "--enable", action="store_true", help="Enable this feed") add_command.add_argument("-p", "--use_proxy", action="store_true", default=False, dest="use_proxy", help="Carbon Black server will use configured web proxy to download feed from feed url") add_command.add_argument("-v", "--validate_server_cert", action="store_true", default=False, dest="validate_server_cert", help="Carbon Black server will verify the SSL certificate of the feed server") http_basic_auth = add_command.add_argument_group("HTTP Authentication") http_basic_auth.add_argument("--username", help="HTTP Basic Authentication username to access Feed URL") http_basic_auth.add_argument("--password", help="HTTP Basic Authentication password to access Feed URL") tls_auth = add_command.add_argument_group("TLS Client Authentication") tls_auth.add_argument("--cert", help="Path to file containing TLS client certificate required to access Feed URL") tls_auth.add_argument("--key", help="Path to file containing TLS client key required to access Feed URL") del_command = commands.add_parser("delete", help="Delete feeds") del_feed_specifier = del_command.add_mutually_exclusive_group(required=True) del_feed_specifier.add_argument("-i", "--id", type=int, help="ID of feed to delete") del_feed_specifier.add_argument("-f", "--feedname", help="Name of feed to delete. Specify --force to delete" " multiple feeds that have the same name") del_command.add_argument("--force", help="If FEEDNAME matches multiple feeds, delete all matching feeds", action="store_true", default=False) args = parser.parse_args() cb = get_cb_response_object(args) if args.command_name == "list": return list_feeds(cb, parser, args) elif args.command_name == "add": return add_feed(cb, parser, args) elif args.command_name == "delete": return delete_feed(cb, parser, args)
def main(): # First we create a command line parser to collect our configuration. # We use the built in ``build_cli_parser`` in ``cbapi.example_helpers`` to build # the basic command line parser, then augment with a few parameters specific to # this script. parser = build_cli_parser( "Example CB PSC & Response Live Response automation") parser.add_argument( "--synthetic", help="Generate synthetic notifications with given Device ID", metavar="DEVICE_ID") parser.add_argument("--poll", help="Poll interval for the Notifications API", type=int, default=30) parser.add_argument( "--siemprofile", help="CB Profile for SIEM API key (required to poll for notifications)", default="siem") args = parser.parse_args() log.info("Starting") datastore = DataStore() # Start the thread to listen to notifications if args.synthetic: notification_thread = SyntheticNotificationGenerator( args.synthetic, datastore, poll_interval=args.poll) else: notification_thread = NotificationListener(datastore, poll_interval=args.poll) notification_thread.daemon = True notification_thread.start() cb = get_cb_response_object(args) orchestrator = LiveResponseOrchestrator(cb, get_lr_session_response, CollectContextWorker, datastore) orchestrator.start()
def main(): parser = build_cli_parser("Term Frequency Analysis") parser.add_argument("-p", "--percentage", action="store", default="2", dest="percentless", help="Max Percentage of Term Frequency e.g., 2 ") process_selection = parser.add_mutually_exclusive_group(required=True) process_selection.add_argument("-t", "--term", action="store", default=None, dest="procname", help="Comma separated list of parent processes to get term frequency") process_selection.add_argument("-f", "--termfile", action="store", default=None, dest="procnamefile", help="Text file new line separated list of parent processes to get term frequency") output_selection = parser.add_mutually_exclusive_group(required=False) output_selection.add_argument("--count", action="store_true", help="Count the child processes that match [default]") output_selection.add_argument("--cmdline", action="store_true", help="Output list of child process command lines") args = parser.parse_args() cb = get_cb_response_object(args) if args.procnamefile: processes = [s.strip() for s in open(args.procnamefile).readlines()] else: processes = [s.strip() for s in args.procname.split(",")] behavior = count_children if args.cmdline: behavior = child_cmdlines rows = [] for parent_proc in processes: process_facets = cb.select(Process).where("parent_name:{0}".format(parent_proc)).facets("process_name") for term in reversed(process_facets["process_name"]): termratio = int(float(term['ratio'])) if int(args.percentless) >= termratio: rows.extend(process_hit(cb, parent_proc, term["name"], term["value"], term["ratio"], behavior)) for row in rows: print(",".join([str(x) for x in row]))