def process_result(self): """Write the results to a local file.""" from cbinterface.helpers import get_os_independent_filepath if self.output_filename is None: filepath = get_os_independent_filepath(self._file_path) hostname_part = f"{self.hostname}_" if self.hostname else "" self.output_filename = f"{self.sensor_id}_{hostname_part}{filepath.name}" else: self.output_filename = self.fill_placeholders(self.output_filename) try: if os.path.exists(self.output_filename): LOGGER.debug(f"{self.output_filename} already exists. appending epoch time") _now = str(time.time()) _now = _now[: _now.rfind(".")] self.output_filename = f"{_now}_{self.output_filename}" with open(self.output_filename, "wb") as fp: content_handle = self.result fp.write(content_handle.read()) content_handle.close() if os.path.exists(self.output_filename): LOGGER.info(f"wrote: {self.output_filename}") if self.post_completion_command: self.execute_post_completion() return True except Exception as e: LOGGER.error(f"problem getting file content: {e}") return False
def get_file_content(cb: CbThreatHunterAPI, session_id: str, file_id: str): """Get file content stored in LR session and write the file locally.""" from cbinterface.helpers import get_os_independent_filepath try: real_session_id, device_id = session_id.split(":", 1) filename = f"{real_session_id}_on_{device_id}" file_metadata = cb.get_object( f"{CBLR_BASE}/session/{session_id}/file/{file_id}") if file_metadata: filepath = get_os_independent_filepath(file_metadata["file_name"]) filename = f"{filename}_{filepath.name}" result = cb.session.get( f"{CBLR_BASE}/session/{session_id}/file/{file_id}/content", stream=True) if result.status_code != 200: LOGGER.error( f"got {result.status_code} from server getting file {file_id} content for session {session_id}" ) return with open(filename, "wb") as fp: for chunk in result.iter_content(io.DEFAULT_BUFFER_SIZE): fp.write(chunk) if os.path.exists(filename): LOGGER.info(f"wrote: {filename}") return os.path.exists(filename) except ObjectNotFoundError: LOGGER.warning(f"no file {file_id} content with session {session_id}") return
def run(self, session: CbLRSessionBase): """ Execute the file transfer. Args: session (CbLRSessionBase): The Live Response session being used. Returns: File content """ if "{WILDMATCH}" in self._file_path: # split on "{WILDMATCH}" and search for the first match to collect from cbinterface.helpers import get_os_independent_filepath file_path_parts = [self.fill_placeholders(fpp) for fpp in self._file_path.split("{WILDMATCH}")] dir_path = get_os_independent_filepath(file_path_parts[0]).parent dir_path = f"{dir_path}\\" if "\\" in str(dir_path) else f"{dir_path}/" LOGGER.info(f"attempting to find item at '{dir_path}' like {file_path_parts}") for item in session.list_directory(dir_path): if item["attributes"] == "DIRECTORY": continue if [part for part in file_path_parts if part in item["filename"]]: LOGGER.info(f"found potential match: {item['filename']}") self._file_path = f"{dir_path}{item['filename']}" break self._file_path = self.fill_placeholders(self._file_path) return session.get_raw_file(self._file_path)
def run(self, session: CbLRSessionBase): from cbinterface.helpers import get_os_independent_filepath for process in session.list_processes(): filepath = get_os_independent_filepath(process["path"]) if self.pname.lower() in filepath.name.lower(): LOGGER.info(f"found process to kill: {process['path']} - pid={process['pid']}") self.nested_commands[process["pid"]] = session.kill_process(process["pid"]) return True
def print_process_info(proc: Process, yield_strings: bool = False, raw_print=False, header=True): """Analyst friendly custom process data format. Args: proc: CbTH Process (fully initialized) yield_strings: return string if True, else print it to stdout. Returns: string or None """ if not is_process_loaded(proc): proc = load_process(proc) txt = "" if header: txt += "------ INFO ------\n" if raw_print: txt = str(proc) else: txt += f" Process GUID: {proc.get('process_guid')}\n" process_name = get_os_independent_filepath( proc.get("process_name", "None")).name txt += f" Process Name: {process_name}\n" process_pid = [str(_) for _ in proc.get("process_pid", [])] txt += f" Process PID: {', '.join(process_pid)}\n" txt += f" Process MD5: {proc.get('process_md5')}\n" txt += f" Process SHA256: {proc.get('process_sha256')}\n" txt += f" Process Path: {proc.get('process_name')}\n" txt += f" Process Terminated: {proc.get('process_terminated')}\n" txt += f" Start Time: {as_configured_timezone(proc.get('process_start_time', ''))}\n" process_command_line = (proc.process_cmdline[0] if proc.get("process_cmdline") and len(proc.process_cmdline) == 1 else proc.get("process_cmdline")) txt += f" Command Line: {process_command_line}\n" txt += f" Process Reputation: {proc.get('process_reputation')}\n" txt += f" Parent Name: {proc.get('parent_name')}\n" txt += f" Parent GUID: {proc.get('parent_guid')}\n" parent_sha256 = next( (hsh for hsh in proc.get("parent_hash", []) if len(hsh) == 64), None) txt += f" Parent SHA256: {parent_sha256}\n" txt += f" Process Username: {proc.get('process_username')}\n" txt += f" Device Username: {proc.get('device_username')}\n" txt += f" Device ID: {proc.get('device_id')}\n" txt += f" Device Name: {proc.get('device_name')}\n" txt += f" Device OS: {proc.get('device_os')}\n" txt += f" External IP: {proc.get('device_external_ip')}\n" txt += f" Internal IP: {proc.get('device_internal_ip')}\n" if yield_strings: return txt txt += "\n" print(txt)
def run(self, session: CbLRSessionBase): from cbinterface.helpers import get_os_independent_filepath from cbinterface.response.sessions import CustomLiveResponseSessionManager self.local_session_manager = CustomLiveResponseSessionManager(session._cb) for process in session.list_processes(): filepath = get_os_independent_filepath(process["path"]) if self.pname in filepath.name: LOGGER.info(f"found process to kill: {process['path']} - pid={process['pid']}") cmd = KillProcessByID(process["pid"]) self.local_session_manager.submit_command(cmd, self.sensor_id) return True
def build_playbook_commands(playbook_path, placeholders={}, separate_cleanup=False): """Build the live response commands that define this playbook. Args: lrsm: A CbLRManagerBase based object (CustomLiveResponseSessionManager) devices: iterable of Devices/Sensors (TODO: sensors) Returns: List of Cb live response commands. """ # TODO remove cleanup command stuff ready_live_response_commands = [] playbook = load_playbook(playbook_path) if not playbook: return ready_live_response_commands placeholders = enforce_argument_placeholders(playbook, placeholders) playbook_name = playbook_path[playbook_path.rfind("/") + 1:playbook_path.rfind(".")] cleanup_commands = [] playbook_commands = [ cmd for cmd in playbook.sections() if cmd not in IGNORED_SECTIONS ] if separate_cleanup: playbook_commands = [ cmd for cmd in playbook.sections() if cmd not in IGNORED_SECTIONS and not cmd.startswith("cleanup") ] cleanup_commands = [ cmd for cmd in playbook.sections() if cmd not in IGNORED_SECTIONS and cmd.startswith("cleanup") ] # make sure requirements are met first for command in playbook_commands: op = playbook[command]["operation"].upper() if op not in REQUIRED_OP_KEY_MAP: LOGGER.error("{op} is not a recognized operation") return False if operation_missing_required_keys(playbook, command, REQUIRED_OP_KEY_MAP[op]): return False LOGGER.info(f"building live response commands defined by {playbook_name}") for command in playbook_commands: op = playbook[command]["operation"].upper() post_completion_command = playbook[command].get( "post_completion_command", None) if op == "RUN": command_string = playbook[command]["command"] wait_for_output = playbook[command].getboolean( "wait_for_output", True) remote_output_file_name = playbook[command].get( "remote_output_file_name", None) working_directory = playbook[command].get("working_directory", None) wait_timeout = playbook[command].getint("wait_timeout", 30) wait_for_completion = playbook[command].getboolean( "wait_for_completion", True) print_results = playbook[command].getboolean("print_results", True) write_results_path = playbook[command].get("write_results_path", False) cmd = ExecuteCommand( command_string, wait_for_output=wait_for_output, remote_output_file_name=remote_output_file_name, working_directory=working_directory, wait_timeout=wait_timeout, wait_for_completion=wait_for_completion, print_results=print_results, write_results_path=write_results_path, placeholders=placeholders, post_completion_command=post_completion_command, ) LOGGER.debug(f"built {cmd}") ready_live_response_commands.append(cmd) elif op == "DOWNLOAD" or op == "PUT": file_path = playbook[command]["file_path"] client_file_path = playbook[command]["client_file_path"] if not os.path.exists(file_path): original_fp = file_path file_path = os.path.join(BASE_DIR, file_path) if not os.path.exists(file_path): LOGGER.error( f"Not found: '{original_fp}' OR '{file_path}'") return False file_name = get_os_independent_filepath(file_path).name cmd = PutFile( file_path, sensor_write_filepath=client_file_path, placeholders=placeholders, post_completion_command=post_completion_command, ) cmd.description = f"Put '{file_name}' on device @ '{client_file_path}'" LOGGER.debug(f"built {cmd}") ready_live_response_commands.append(cmd) elif op == "UPLOAD" or op == "GET": path = playbook[command]["path"] write_results_path = playbook[command].get("write_results_path", None) cmd = GetFile( path, output_filename=write_results_path, placeholders=placeholders, post_completion_command=post_completion_command, ) LOGGER.debug(f"built {cmd}") ready_live_response_commands.append(cmd) if separate_cleanup: ready_live_response_cleanup_commands = [] unique_cleanup_commands = [] for command in cleanup_commands: op = playbook[command]["operation"].upper() if op == "RUN": command_string = playbook[command]["command"] if command_string in unique_cleanup_commands: continue unique_cleanup_commands.append(command_string) wait_for_output = playbook[command].getboolean( "wait_for_output", True) remote_output_file_name = playbook[command].get( "remote_output_file_name", None) working_directory = playbook[command].getboolean( "working_directory", None) wait_timeout = playbook[command].getint("wait_timeout", 30) wait_for_completion = playbook[command].getboolean( "wait_for_completion", True) print_results = playbook[command].getboolean( "print_results", True) write_results_path = playbook[command].get( "write_results_path", False) cmd = ExecuteCommand( command_string, wait_for_output=wait_for_output, remote_output_file_name=remote_output_file_name, working_directory=working_directory, wait_timeout=wait_timeout, wait_for_completion=wait_for_completion, print_results=print_results, write_results_path=write_results_path, placeholders=placeholders, ) LOGGER.debug(f"built {cmd}") ready_live_response_commands.append(cmd) return ready_live_response_commands, ready_live_response_cleanup_commands return ready_live_response_commands
def inspect_process_tree( proc: Process, info=False, filemods=False, netconns=False, regmods=False, modloads=False, crossprocs=False, children=False, scriptloads=False, max_depth=0, depth=0, start_time: datetime.datetime = None, end_time: datetime.datetime = None, **kwargs, ): """Walk down the execution chain and print inspection points.""" if max_depth and depth > max_depth: return if depth == 0: print_ancestry(proc) print_process_tree(proc) process_name = get_os_independent_filepath(proc.get( "process_name", "None")).name print(f"\n+ {process_name} - {proc.process_guid}") if info: print_process_info(proc, **kwargs) if filemods: print_filemods(proc, start_time=start_time, end_time=end_time, **kwargs) if netconns: print_netconns(proc, start_time=start_time, end_time=end_time, **kwargs) if regmods: print_regmods(proc, start_time=start_time, end_time=end_time, **kwargs) if modloads: print_modloads(proc, start_time=start_time, end_time=end_time, **kwargs) if crossprocs: print_crossprocs(proc, start_time=start_time, end_time=end_time, **kwargs) if children: print_childprocs(proc, start_time=start_time, end_time=end_time, **kwargs) if scriptloads: print_scriptloads(proc, start_time=start_time, end_time=end_time, **kwargs) try: for child in proc.children: try: inspect_process_tree( child, info=info, filemods=filemods, netconns=netconns, regmods=regmods, modloads=modloads, crossprocs=crossprocs, children=children, scriptloads=scriptloads, max_depth=max_depth, depth=depth + 1, start_time=start_time, end_time=end_time, **kwargs, ) except RecursionError: LOGGER.warning(f"hit RecursionError inspecting process tree.") break except ObjectNotFoundError as e: LOGGER.warning(f"got object not found error for child proc: {e}")
def print_facet_histogram_v2( cb: CbThreatHunterAPI, query: str, start_time: datetime.datetime = None, end_time: datetime.datetime = None, return_string=False, ): """Get query facet results from the CbAPI enriched events facets.""" # NOTE: no support for childproc facets with this built-in from cbinterface.helpers import get_os_independent_filepath post_data = {} post_data["query"] = query fields = [ "parent_name", "process_name", "process_reputation", "process_username", "process_sha256", "device_name", "device_os", ] path_fields = ["parent_name", "process_name"] post_data["terms"] = {"fields": fields} post_data["time_range"] = {} if start_time: post_data["time_range"]["start"] = start_time.isoformat() if end_time: post_data["time_range"]["end"] = end_time.isoformat() # TODO handle status_code!=200 and response is not json for both requests uri = f"/api/investigate/v2/orgs/{cb.credentials.org_key}/processes/facet_jobs" job_id = cb.post_object(uri, post_data).json().get("job_id", None) if not job_id: LOGGER.error(f"failed to get facet job.") return False uri = f"/api/investigate/v2/orgs/{cb.credentials.org_key}/processes/facet_jobs/{job_id}/results" facet_data = cb.get_object(uri) txt = "\n------------------------- FACET HISTOGRAMS -------------------------\n" total = facet_data["num_found"] for facets in facet_data["terms"]: field_name = facets["field"] txt += f"\n\t{field_name} results: {len(facets['values'])}\n" txt += "\t--------------------------------\n" for entry in facets["values"]: entry_name = entry["name"] if field_name in path_fields and len(entry_name) > 55: file_path = get_os_independent_filepath(entry_name) file_name = file_path.name file_path = entry_name[:len(entry_name) - len(file_name)] file_path = file_path[:40 - len(file_name)] entry_name = f"{file_path}...{file_name}" bar_value = int(((entry["total"] / total) * 100) / 2) txt += "%30s: %5s %5s%% %s\n" % ( entry_name, entry["total"], int(entry["total"] / total * 100), "\u25A0" * bar_value, ) txt += "\n" if return_string: return txt print(txt) return
def print_facet_histogram(processes: AsyncProcessQuery): """Print facets""" # NOTE, this is a custom implementations. TODO, look at using the built in # API methods: https://developer.carbonblack.com/reference/carbon-black-cloud/cb-threathunter/latest/process-search-v2/#start-a-process-facet-job # Also, NOTE that this table lists fields that support faceting via the built in method, children is not one of them: # https://developer.carbonblack.com/reference/cb-threathunter/latest/process-search-fields/ from cbinterface.helpers import create_histogram_string, get_os_independent_filepath fields = [ "parent_name", "process_name", "process_reputation", "process_username", "process_sha256", "device_name", "device_os", ] path_fields = ["parent_name", "process_name"] processes = list(processes) facet_dict = {} for field_name in fields: facet_dict[field_name] = {} for proc in processes: value = proc.get(field_name, "None") if isinstance(value, list): if len(value) > 1: LOGGER.info(f"condensing {value} to {value[0]}") value = value[0] elif field_name in path_fields: file_path = get_os_independent_filepath(value) file_name = file_path.name value = file_name if value not in facet_dict[field_name]: facet_dict[field_name][value] = 1 else: facet_dict[field_name][value] += 1 # special case for "children" try: facet_dict["childproc_name"] = {} depth = 0 for proc in processes: if proc.childproc_count < 1: continue children = proc.summary.children or [] for cp in children: process_path = get_os_independent_filepath( cp.get("process_name")) process_name = process_path.name if process_name not in facet_dict["childproc_name"]: facet_dict["childproc_name"][process_name] = 1 else: facet_dict["childproc_name"][process_name] += 1 except Exception as e: LOGGER.warning(f"problem enumerating child process names: {e}") print( "\n------------------------- FACET HISTOGRAMS -------------------------" ) for field_name, facets in facet_dict.items(): print(f"\n\t{field_name} results: {len(facets.keys())}") print("\t--------------------------------") print(create_histogram_string(facets)) return