def execute(self): # pylint: disable=too-many-branches,too-many-locals,too-many-statements """ Execute hang analysis. 1. Get a list of interesting processes 2. Dump useful information or take core dumps """ self._log_system_info() extractor.extract_debug_symbols(self.root_logger) dumpers = dumper.get_dumpers(self.root_logger, self.options.debugger_output) processes = process_list.get_processes(self.process_ids, self.interesting_processes, self.options.process_match, self.root_logger) max_dump_size_bytes = int( self.options.max_core_dumps_size) * 1024 * 1024 # Suspending all processes, except python, to prevent them from getting unstuck when # the hang analyzer attaches to them. for pinfo in [ pinfo for pinfo in processes if not pinfo.name.startswith("python") ]: for pid in pinfo.pidv: process.pause_process(self.root_logger, pinfo.name, pid) # Dump python processes by signalling them. The resmoke.py process will generate # the report.json, when signalled, so we do this before attaching to other processes. for pinfo in [ pinfo for pinfo in processes if pinfo.name.startswith("python") ]: for pid in pinfo.pidv: process.signal_python(self.root_logger, pinfo.name, pid) trapped_exceptions = [] dump_pids = {} # Dump all processes, except python & java. for pinfo in [ pinfo for pinfo in processes if not re.match("^(java|python)", pinfo.name) ]: try: dumpers.dbg.dump_info( pinfo, self.options.dump_core and _check_dump_quota( max_dump_size_bytes, dumpers.dbg.get_dump_ext())) except dumper.DumpError as err: self.root_logger.error(err.message) dump_pids = {**err.dump_pids, **dump_pids} except Exception as err: # pylint: disable=broad-except self.root_logger.info( "Error encountered when invoking debugger %s", err) trapped_exceptions.append(traceback.format_exc()) # Dump java processes using jstack. for pinfo in [ pinfo for pinfo in processes if pinfo.name.startswith("java") ]: for pid in pinfo.pidv: try: dumpers.jstack.dump_info(self.root_logger, self.options.debugger_output, pinfo.name, pid) except Exception as err: # pylint: disable=broad-except self.root_logger.info( "Error encountered when invoking debugger %s", err) trapped_exceptions.append(traceback.format_exc()) # Signal go processes to ensure they print out stack traces, and die on POSIX OSes. # On Windows, this will simply kill the process since python emulates SIGABRT as # TerminateProcess. # Note: The stacktrace output may be captured elsewhere (i.e. resmoke). for pinfo in [ pinfo for pinfo in processes if pinfo.name in self.go_processes ]: for pid in pinfo.pidv: self.root_logger.info( "Sending signal SIGABRT to go process %s with PID %d", pinfo.name, pid) process.signal_process(self.root_logger, pid, signal.SIGABRT) self.root_logger.info("Done analyzing all processes for hangs") # Kill and abort processes if "-k" was specified. if self.options.kill_processes: process.teardown_processes(self.root_logger, processes, dump_pids) else: # Resuming all suspended processes. for pinfo in [ pinfo for pinfo in processes if not pinfo.name.startswith("python") ]: for pid in pinfo.pidv: process.resume_process(self.root_logger, pinfo.name, pid) for exception in trapped_exceptions: self.root_logger.info(exception) if trapped_exceptions: raise RuntimeError( "Exceptions were thrown while dumping. There may still be some valid dumps." )
def execute(self): # pylint: disable=too-many-branches,too-many-locals,too-many-statements """ Execute hang analysis. 1. Get a list of interesting processes 2. Dump useful information or take core dumps """ self._log_system_info() dumpers = dumper.get_dumpers(self.root_logger, self.options.debugger_output) processes = process_list.get_processes(self.process_ids, self.interesting_processes, self.options.process_match, self.root_logger) def is_python_process(pname: str): # "live-record*" and "python*" are Python processes. Sending SIGUSR1 causes resmoke.py # to dump its stack and run the hang analyzer on its child processes. # Sending SIGUSR1 causes live-record to save its recording and terminate. return pname.startswith("python") or pname.startswith( "live-record") # Suspending all processes, except python, to prevent them from getting unstuck when # the hang analyzer attaches to them. for pinfo in [ pinfo for pinfo in processes if not is_python_process(pinfo.name) ]: for pid in pinfo.pidv: process.pause_process(self.root_logger, pinfo.name, pid) # Download symbols after pausing if the task ID is not None and not running with sanitizers. # Sanitizer builds are not stripped and don't require debug symbols. san_options = os.environ.get("san_options", None) if self.task_id is not None and san_options is None: my_symbolizer = Symbolizer(self.task_id, download_symbols_only=True) download_debug_symbols(self.root_logger, my_symbolizer) # Dump python processes by signalling them. The resmoke.py process will generate # the report.json, when signalled, so we do this before attaching to other processes. for pinfo in [ pinfo for pinfo in processes if is_python_process(pinfo.name) ]: for pid in pinfo.pidv: process.signal_python(self.root_logger, pinfo.name, pid) trapped_exceptions = [] dump_pids = {} # Dump core files of all processes, except python & java. if self.options.dump_core: for pinfo in [ pinfo for pinfo in processes if not re.match("^(java|python)", pinfo.name) ]: if self._check_enough_free_space(): try: dumpers.dbg.dump_info(pinfo, take_dump=True) except dumper.DumpError as err: self.root_logger.error(err.message) dump_pids = {**err.dump_pids, **dump_pids} except Exception as err: # pylint: disable=broad-except self.root_logger.info( "Error encountered when invoking debugger %s", err) trapped_exceptions.append(traceback.format_exc()) else: self.root_logger.info( "Not enough space for a core dump, skipping %s processes with PIDs %s", pinfo.name, str(pinfo.pidv)) # Dump info of all processes, except python & java. for pinfo in [ pinfo for pinfo in processes if not re.match("^(java|python)", pinfo.name) ]: try: dumpers.dbg.dump_info(pinfo, take_dump=False) except Exception as err: # pylint: disable=broad-except self.root_logger.info( "Error encountered when invoking debugger %s", err) trapped_exceptions.append(traceback.format_exc()) # Dump java processes using jstack. for pinfo in [ pinfo for pinfo in processes if pinfo.name.startswith("java") ]: for pid in pinfo.pidv: try: dumpers.jstack.dump_info(self.root_logger, self.options.debugger_output, pinfo.name, pid) except Exception as err: # pylint: disable=broad-except self.root_logger.info( "Error encountered when invoking debugger %s", err) trapped_exceptions.append(traceback.format_exc()) # Signal go processes to ensure they print out stack traces, and die on POSIX OSes. # On Windows, this will simply kill the process since python emulates SIGABRT as # TerminateProcess. # Note: The stacktrace output may be captured elsewhere (i.e. resmoke). for pinfo in [ pinfo for pinfo in processes if pinfo.name in self.go_processes ]: for pid in pinfo.pidv: self.root_logger.info( "Sending signal SIGABRT to go process %s with PID %d", pinfo.name, pid) process.signal_process(self.root_logger, pid, signal.SIGABRT) self.root_logger.info("Done analyzing all processes for hangs") # Kill and abort processes if "-k" was specified. if self.options.kill_processes: process.teardown_processes(self.root_logger, processes, dump_pids) else: # Resuming all suspended processes. for pinfo in [ pinfo for pinfo in processes if not pinfo.name.startswith("python") ]: for pid in pinfo.pidv: process.resume_process(self.root_logger, pinfo.name, pid) for exception in trapped_exceptions: self.root_logger.info(exception) if trapped_exceptions: raise RuntimeError( "Exceptions were thrown while dumping. There may still be some valid dumps." )