def load_all_symbols(self): gdb.write("loading vmlinux\n") # Dropping symbols will disable all breakpoints. So save their states # and restore them afterward. saved_states = [] if hasattr(gdb, 'breakpoints') and not gdb.breakpoints() is None: for bp in gdb.breakpoints(): saved_states.append({'breakpoint': bp, 'enabled': bp.enabled}) # drop all current symbols and reload vmlinux orig_vmlinux = 'vmlinux' for obj in gdb.objfiles(): if obj.filename.endswith('vmlinux'): orig_vmlinux = obj.filename gdb.execute("symbol-file", to_string=True) gdb.execute("symbol-file {0}".format(orig_vmlinux)) self.loaded_modules = [] module_list = modules.module_list() if not module_list: gdb.write("no modules found\n") else: [self.load_module_symbols(module) for module in module_list] for saved_state in saved_states: saved_state['breakpoint'].enabled = saved_state['enabled']
def do_enable_pretty_printer (arg, flag): """Internal worker for enabling/disabling pretty-printers.""" (object_re, name_re, subname_re) = parse_printer_regexps(arg) total = 0 if object_re.match("global"): total += do_enable_pretty_printer_1(gdb.pretty_printers, name_re, subname_re, flag) cp = gdb.current_progspace() if object_re.match("progspace"): total += do_enable_pretty_printer_1(cp.pretty_printers, name_re, subname_re, flag) for objfile in gdb.objfiles(): if object_re.match(objfile.filename): total += do_enable_pretty_printer_1(objfile.pretty_printers, name_re, subname_re, flag) if flag: state = "enabled" else: state = "disabled" print ("%d %s %s" % (total, pluralize("printer", total), state)) # Print the total list of printers currently enabled/disabled. # This is to further assist the user in determining whether the result # is expected. Since we use regexps to select it's useful. show_pretty_printer_enabled_summary()
def complete(self, text, word): result = [] for objfile in gdb.objfiles(): self.add_some(result, word, objfile.type_printers) self.add_some(result, word, gdb.current_progspace().type_printers) self.add_some(result, word, gdb.type_printers) return result
def initExtension(): """Extension initialisation function""" global ExtensionSettings global er global osplatform osplatform = sys.platform er = gdb.events er.new_objfile.connect(NewObjfileHandler) # installs event handler to check file arch on new objfile # configure program settings ExtensionSettings = ExtensionProgramSettings() for key in program_default_settings.iterkeys(): ExtensionSettings.createValue(key, program_default_settings[key][0], program_default_settings[key][1]) [twidth, _] = getTerminalSize() if AUTO_SET_TERMINAL_WIDTH: # making assumption here that terminals are all same width, however user can change manually if desired for settings in ExtensionSettings.listMatchingSettings('_terminal_width'): ExtensionSettings.createValue(settings[0], int(twidth), settings[2]) else: # set main terminal width ExtensionSettings.createValue('main_termal_width', int(twidth), ExtensionSettings.getDescription('main_termal_width')) if len(gdb.objfiles()) > 0: CheckFileArch() print "pygdbdis " + program_version + " loaded"
def _sort_list(): """ Internal Worker function to merge all known frame-filter lists, prune any filters with the state set to "disabled", and sort the list on the frame-filter's "priority" attribute. Returns: sorted_list: A sorted, pruned list of frame filters to execute. """ all_filters = [] for objfile in gdb.objfiles(): all_filters = all_filters + objfile.frame_filters.values() cp = gdb.current_progspace() all_filters = all_filters + cp.frame_filters.values() all_filters = all_filters + gdb.frame_filters.values() sorted_frame_filters = sorted(all_filters, key = get_priority, reverse = True) sorted_frame_filters = filter(get_enabled, sorted_frame_filters) return sorted_frame_filters
def exe(self): for obj in gdb.objfiles(): if obj.filename: return obj.filename break if self.alive: auxv = pwndbg.auxv.get() return auxv['AT_EXECFN']
def set_xm_status(arg, status): """Set the status (enabled/disabled) of xmethods matching ARG. This is a helper function for enable/disable commands. ARG is the argument string passed to the commands. """ locus_re, matcher_re, name_re = parse_xm_command_args(arg) set_xm_status1(get_global_method_matchers(locus_re, matcher_re), name_re, status) set_xm_status1(get_method_matchers_in_loci([gdb.current_progspace()], locus_re, matcher_re), name_re, status) set_xm_status1(get_method_matchers_in_loci(gdb.objfiles(), locus_re, matcher_re), name_re, status)
def return_list(name): """ Internal Worker function to return the frame filter dictionary, depending on the name supplied as an argument. If the name is not "all", "global" or "progspace", it is assumed to name an object-file. Arguments: name: The name of the list, as specified by GDB user commands. Returns: A dictionary object for a single specified dictionary, or a list containing all the items for "all" Raises: gdb.GdbError: A dictionary of that name cannot be found. """ # If all dictionaries are wanted in the case of "all" we # cannot return a combined dictionary as keys() may clash in # between different dictionaries. As we just want all the frame # filters to enable/disable them all, just return the combined # items() as a chained iterator of dictionary values. if name == "all": glob = gdb.frame_filters.values() prog = gdb.current_progspace().frame_filters.values() return_iter = itertools.chain(glob, prog) for objfile in gdb.objfiles(): return_iter = itertools.chain(return_iter, objfile.frame_filters.values()) return return_iter if name == "global": return gdb.frame_filters else: if name == "progspace": cp = gdb.current_progspace() return cp.frame_filters else: for objfile in gdb.objfiles(): if name == objfile.filename: return objfile.frame_filters msg = "Cannot find frame-filter dictionary for '" + name + "'" raise gdb.GdbError(msg)
def invoke(self, arg, from_tty): self.print_list("global frame-filters:", gdb.frame_filters, True) cp = gdb.current_progspace() self.print_list("progspace %s frame-filters:" % cp.filename, cp.frame_filters, True) for objfile in gdb.objfiles(): self.print_list("objfile %s frame-filters:" % objfile.filename, objfile.frame_filters, False)
def get_info_proc_mapping(): """ wrapper of 'info proc mapping' cmd with cached mapping info """ global num_objfiles, cached_proc_mapping num = len(gdb.objfiles()) if num_objfiles != num: num_objfiles = num cached_proc_mapping = m_util.gdb_exec_to_str('info proc mapping') return cached_proc_mapping
def invoke(self, arg, from_tty): locus_re, matcher_re, name_re = parse_xm_command_args(arg) print_xm_info(get_global_method_matchers(locus_re, matcher_re), name_re) print_xm_info( get_method_matchers_in_loci([gdb.current_progspace()], locus_re, matcher_re), name_re) print_xm_info( get_method_matchers_in_loci(gdb.objfiles(), locus_re, matcher_re), name_re)
def return_list(name): """ Internal Worker function to return the frame filter dictionary, depending on the name supplied as an argument. If the name is not "all", "global" or "progspace", it is assumed to name an object-file. Arguments: name: The name of the list, as specified by GDB user commands. Returns: A dictionary object for a single specified dictionary, or a list containing all the items for "all" Raises: gdb.GdbError: A dictionary of that name cannot be found. """ # If all dictionaries are wanted in the case of "all" we # cannot return a combined dictionary as keys() may clash in # between different dictionaries. As we just want all the frame # filters to enable/disable them all, just return the combined # items() as a list. if name == "all": all_dicts = gdb.frame_filters.values() all_dicts = all_dicts + gdb.current_progspace().frame_filters.values() for objfile in gdb.objfiles(): all_dicts = all_dicts + objfile.frame_filters.values() return all_dicts if name == "global": return gdb.frame_filters else: if name == "progspace": cp = gdb.current_progspace() return cp.frame_filters else: for objfile in gdb.objfiles(): if name == objfile.filename: return objfile.frame_filters msg = "Cannot find frame-filter dictionary for '" + name + "'" raise gdb.GdbError(msg)
def invoke(self, arg, from_tty): locus_re, matcher_re, name_re = parse_xm_command_args(arg) print_xm_info(get_global_method_matchers(locus_re, matcher_re), name_re) print_xm_info( get_method_matchers_in_loci( [gdb.current_progspace()], locus_re, matcher_re), name_re) print_xm_info( get_method_matchers_in_loci(gdb.objfiles(), locus_re, matcher_re), name_re)
def binary_name(): """Fetch the name of the binary gdb is attached to.""" main_binary_name = gdb.objfiles()[0].filename main_binary_name = os.path.splitext(os.path.basename(main_binary_name))[0] if main_binary_name.endswith('mongod'): return "mongod" if main_binary_name.endswith('mongo'): return "mongo" if main_binary_name.endswith('mongos'): return "mongos" return None
def invoke(self, arg, from_tty): locus_re, name_re = parse_unwinder_command_args(arg) if locus_re.match("global"): self.list_unwinders("Global:", gdb.frame_unwinders, name_re) if locus_re.match("progspace"): cp = gdb.current_progspace() self.list_unwinders("Progspace %s:" % cp.filename, cp.frame_unwinders, name_re) for objfile in gdb.objfiles(): if locus_re.match(objfile.filename): self.list_unwinders("Objfile %s:" % objfile.filename, objfile.frame_unwinders, name_re)
def _install(): current_objfile = gdb.current_objfile() # gdb.current_objfile() is only set when autoloading; # otherwise we can try to assume the first loaded objfile is the kernel. if not current_objfile and gdb.objfiles(): current_objfile = gdb.objfiles()[0] if not current_objfile: print("Warning: no object file set") return register_zircon_pretty_printers(current_objfile) if current_objfile is not None and _is_x86_64(): gdb.unwinder.register_unwinder(current_objfile, _Amd64KernelExceptionUnwinder(), True) print("Zircon extensions installed for {}".format( current_objfile.filename)) if not _is_x86_64() and not _is_arm64(): print( "Warning: Unsupported architecture, KASLR support will be experimental" ) gdb.events.stop.connect(_KASLR_stop_event)
def invoke(self, arg, from_tty): """GDB calls this to perform the command.""" for name in arg.split(): ok = False for objfile in gdb.objfiles(): if self.set_some(name, objfile.type_printers): ok = True if self.set_some(name, gdb.current_progspace().type_printers): ok = True if self.set_some(name, gdb.type_printers): ok = True if not ok: print ("No type printer named '%s'" % name)
def get_type_recognizers(): "Return a list of the enabled type recognizers for the current context." result = [] # First try the objfiles. for objfile in gdb.objfiles(): _get_some_type_recognizers(result, objfile.type_printers) # Now try the program space. _get_some_type_recognizers(result, gdb.current_progspace().type_printers) # Finally, globals. _get_some_type_recognizers(result, gdb.type_printers) return result
def invoke(self, arg, from_tty): """GDB calls this to perform the command.""" (object_re, name_re, subname_re) = parse_printer_regexps(arg) self.invoke1("global pretty-printers:", gdb.pretty_printers, "global", object_re, name_re, subname_re) cp = gdb.current_progspace() self.invoke1("progspace %s pretty-printers:" % cp.filename, cp.pretty_printers, "progspace", object_re, name_re, subname_re) for objfile in gdb.objfiles(): self.invoke1("objfile %s pretty-printers:" % objfile.filename, objfile.pretty_printers, objfile.filename, object_re, name_re, subname_re)
def invoke(self, arg, from_tty): """GDB calls this to perform the command.""" (object_re, name_re, subname_re) = parse_printer_regexps(arg) self.invoke1("global pretty-printers:", gdb.pretty_printers, "global", object_re, name_re, subname_re) cp = gdb.current_progspace() self.invoke1("progspace %s pretty-printers:" % cp.filename, cp.pretty_printers, "progspace", object_re, name_re, subname_re) for objfile in gdb.objfiles(): self.invoke1(" objfile %s pretty-printers:" % objfile.filename, objfile.pretty_printers, objfile.filename, object_re, name_re, subname_re)
def set_xm_status(arg, status): """Set the status (enabled/disabled) of xmethods matching ARG. This is a helper function for enable/disable commands. ARG is the argument string passed to the commands. """ locus_re, matcher_re, name_re = parse_xm_command_args(arg) set_xm_status1(get_global_method_matchers(locus_re, matcher_re), name_re, status) set_xm_status1( get_method_matchers_in_loci([gdb.current_progspace()], locus_re, matcher_re), name_re, status) set_xm_status1( get_method_matchers_in_loci(gdb.objfiles(), locus_re, matcher_re), name_re, status)
def open(self, filename: str, from_tty: bool) -> None: objfiles = gdb.objfiles() if not objfiles: raise gdb.GdbError( "kdumpfile target requires kernel to be already loaded for symbol resolution" ) try: self.kdump = kdumpfile(file=filename) except Exception as e: raise gdb.GdbError("Failed to open `{}': {}".format( filename, str(e))) # pylint: disable=unsupported-assignment-operation self.kdump.attr['addrxlat.ostype'] = 'linux' KERNELOFFSET = "linux.vmcoreinfo.lines.KERNELOFFSET" try: attr = self.kdump.attr.get(KERNELOFFSET, "0") # pylint: disable=no-member self.base_offset = int(attr, base=16) except (TypeError, ValueError): pass vmlinux = gdb.objfiles()[0].filename # Load the kernel at the relocated address # Unfortunately, the percpu section has an offset of 0 and # ends up getting placed at the offset base. This is easy # enough to handle in the percpu code. result = gdb.execute("add-symbol-file {} -o {:#x}".format( vmlinux, self.base_offset), to_string=True) if self.debug: print(result) # Clear out the old symbol cache gdb.execute("file {}".format(vmlinux))
def do_enable_unwinder(arg, flag): """Enable/disable unwinder(s).""" (locus_re, name_re) = parse_unwinder_command_args(arg) total = 0 if locus_re.match("global"): total += do_enable_unwinder1(gdb.frame_unwinders, name_re, flag) if locus_re.match("progspace"): total += do_enable_unwinder1(gdb.current_progspace().frame_unwinders, name_re, flag) for objfile in gdb.objfiles(): if locus_re.match(objfile.filename): total += do_enable_unwinder1(objfile.frame_unwinders, name_re, flag) print("%d unwinder%s %s" % (total, "" if total == 1 else "s", "enabled" if flag else "disabled"))
def __init__(self, mtype=gdb.BP_BREAKPOINT, mqualified=True): """ 1, Creates a Maple breakpoint using gdb python api 2, Initializes a Maple symbol table that this breakpoint will stop on. 3, Disables unnecessary plt breakpoint. """ super().__init__('maple::maple_invoke_method') buf = m_util.gdb_exec_to_str("info b") disable_maple_invoke_bp_plt(buf) self.mbp_table = {} # the symbols here are NOT mirbin_info symbols self.bp_addr, self.bp_info = get_maple_invoke_bp_stop_addr(buf) self.mbp_addr_sym_table = { } # a symbol address keyed table for fast stop logic self.load_objfiles = len(gdb.objfiles()) # initial value
def invoke(self, arg, from_tty): """GDB calls this to perform the command.""" sep = '' for objfile in gdb.objfiles(): if objfile.type_printers: print ("%sType printers for %s:" % (sep, objfile.filename)) self.list_type_printers(objfile.type_printers) sep = '\n' if gdb.current_progspace().type_printers: print ("%sType printers for program space:" % sep) self.list_type_printers(gdb.current_progspace().type_printers) sep = '\n' if gdb.type_printers: print ("%sGlobal type printers:" % sep) self.list_type_printers(gdb.type_printers)
def invoke(self, arg, from_tty): obj_files = gdb.objfiles() if len(obj_files) == 1: objfile = obj_files[0].filename gdb.vgdb_call = subprocess.Popen([ "valgrind", "--tool=memcheck", "--vgdb=yes", "--vgdb-error=0", objfile ]) time.sleep(1) gdb.execute("target remote | vgdb") gdb.execute("break main") gdb.execute("c") else: print( f"{MAGI_HEADER} Correct file could not be determined. Have you loaded a file yet?" )
def count_all_enabled_printers(): """Return a 2-tuble of the enabled state and total number of all printers. This includes subprinters. """ enabled_count = 0 total_count = 0 (t_enabled, t_total) = count_enabled_printers(gdb.pretty_printers) enabled_count += t_enabled total_count += t_total (t_enabled, t_total) = count_enabled_printers(gdb.current_progspace().pretty_printers) enabled_count += t_enabled total_count += t_total for objfile in gdb.objfiles(): (t_enabled, t_total) = count_enabled_printers(objfile.pretty_printers) enabled_count += t_enabled total_count += t_total return (enabled_count, total_count)
def __init__(self): self.completed = True completed = False self.setup_symbol_cache_flush_callback() # We don't want to do lookups immediately if we don't have # an objfile. It'll fail for any custom types but it can # also return builtin types that are eventually changed. if len(gdb.objfiles()) > 0: result = self.check_ready() if not (result is None or result is False): completed = self.callback(result) if completed is False: self.completed = False gdb.events.new_objfile.connect(self.new_objfile_callback)
def open_kernel(self): if self.base_offset is None: raise RuntimeError("Base offset is unconfigured.") self.load_sections() try: list_type = gdb.lookup_type('struct list_head') except gdb.error as e: self.load_debuginfo(gdb.objfiles()[0], None) try: list_type = gdb.lookup_type('struct list_head') except gdb.error as e: raise RuntimeError("Couldn't locate debuginfo for {}".format( self.vmlinux_filename)) self.target.setup_arch()
def setup(lib_name: str, astnode_names: List[str], astnode_kinds: Dict[int, str], prefix: str) -> None: """ Register helpers in GDB internals. This should be run when the generated library is actually loaded in GDB. """ langkit.gdb.setup_done = True context = Context(lib_name, astnode_names, astnode_kinds, prefix) langkit.gdb.global_context = context langkit.gdb.gdb_printers = printers.GDBPrettyPrinters(context) for printer in [ printers.AnalysisUnitPrinter, printers.ASTNodePrinter, printers.EnvNamePrinter, printers.LexicalEnvPrinter, printers.EnvGetterPrinter, printers.ReferencedEnvPrinter, printers.EntityPrinter, printers.ArrayPrettyPrinter, printers.StringPrettyPrinter, printers.LangkitVectorPrinter, printers.RebindingsPrinter, printers.TokenReferencePrinter, ]: langkit.gdb.gdb_printers.append(printer) for objfile in gdb.objfiles(): handle_new_objfile(objfile, lib_name, reparse_debug_info=False) gdb.events.new_objfile.connect( lambda event: handle_new_objfile(event.new_objfile, lib_name) ) for cmd_cls in [ commands.StateCommand, commands.BreakCommand, commands.NextCommand, commands.OutCommand, commands.StepInsideCommand, ]: cmd_cls(context) functions.Match(context)
def invoke(self, arg, from_tty): any_printed = self.print_list("global frame-filters:", gdb.frame_filters, True) cp = gdb.current_progspace() any_printed += self.print_list( "progspace %s frame-filters:" % cp.filename, cp.frame_filters, True) for objfile in gdb.objfiles(): any_printed += self.print_list( "objfile %s frame-filters:" % objfile.filename, objfile.frame_filters, False, ) if any_printed == 0: print("No frame filters.")
def checksec(): filename = gdb.objfiles()[0].filename result = {} result["RELRO"] = 0 result["CANARY"] = 0 result["NX"] = 1 result["PIE"] = 0 result["FORTIFY"] = 0 if filename is None: print("Error") return 0 data = subprocess.check_output("readelf -W -a " + filename, shell=True).decode('utf8') if re.search("GNU_RELRO", data): result["RELRO"] |= 2 if re.search("BIND_NOW", data): result["RELRO"] |= 1 if re.search("__stack_chk_fail", data): result["CANARY"] = 1 if re.search(r"GNU_STACK.*RWE.*\n", data): result["NX"] = 0 if re.search(r"DYN \(", data): result["PIE"] = 4 if re.search(r"\(DEBUG\)", data) and result["PIE"] == 4: result["PIE"] = 1 if re.search("_chk@", data): result["FORTIFY"] = 1 if result["RELRO"] == 1: result["RELRO"] = 0 out = { 0: "\033[31mdisabled\033[37m", 1: "\033[32mENABLE\033[37m", 2: "\033[33mPartial\033[37m", 3: "\033[32mFULL\033[37m", 4: "\033[33mDynamic Shared Object\033[37m" } for (k, v) in sorted(result.items()): print("%s : %s" % (k.ljust(10), out[v])) return
def is_debuginfo_loaded(debuginfo): """Checks if debug symbols are loaded for a given library""" sharedlibrary_output = gdb.execute("info sharedlibrary", to_string=True) if sharedlibrary_output == "No shared libraries loaded at this time.": return False sharedlibrary_lines = sharedlibrary_output.split('\n') # Make sure the columns are what we think they are assert [i.strip() for i in sharedlibrary_lines[0].split(' ') if i.strip() != ''] == \ ["From", "To", "Syms Read", "Shared Object Library"] for line in sharedlibrary_lines[1:]: line_columns = [column.strip() for column in line.split(' ') if column.strip() != ''] if not line_columns: break assert len(line_columns) == 4 syms_read = line_columns[2] sharedlibrary = os.path.basename(line_columns[3]) if sharedlibrary.startswith(debuginfo): if syms_read == "Yes": return True else: return False # Name not found in 'info sharedlibrary' output, check objfiles # XXX - This is a last ditch effort for cases where a full version name # is specified but the sharedlibrary output only gives the name of the # symlink which doesn't allow us to know the underlying version for objfile in gdb.objfiles(): objfile_name = os.path.basename(objfile.filename) if objfile_name.startswith(debuginfo) and objfile_name.endswith(".debug"): return True return False
def checksec(): filename = gdb.objfiles()[0].filename result = {} result["RELRO"] = 0 result["CANARY"] = 0 result["NX"] = 1 result["PIE"] = 0 result["FORTIFY"] = 0 if filename is None : print("Error") return 0 data = subprocess.check_output("readelf -W -a " + filename,shell=True).decode('utf8') if re.search("GNU_RELRO",data): result["RELRO"] |= 2 if re.search("BIND_NOW",data): result["RELRO"] |= 1 if re.search("__stack_chk_fail",data): result["CANARY"] = 1 if re.search(r"GNU_STACK.*RWE.*\n",data): result["NX"] = 0 if re.search(r"DYN \(",data): result["PIE"] = 4 if re.search(r"\(DEBUG\)",data) and result["PIE"] == 4 : result["PIE"] = 1 if re.search("_chk@",data): result["FORTIFY"] = 1 if result["RELRO"] == 1: result["RELRO"] = 0 out = { 0 : "\033[31mdisabled\033[37m", 1 : "\033[32mENABLE\033[37m", 2 : "\033[33mPartial\033[37m", 3 : "\033[32mFULL\033[37m", 4 : "\033[33mDynamic Shared Object\033[37m" } for (k,v) in sorted(result.items()): print("%s : %s" % (k.ljust(10), out[v])) return
def open(self, args: str, from_tty: bool) -> None: argv = shlex.split(args) if len(argv) < 2: raise gdb.GdbError("kdumpfile target requires kernel image and vmcore") vmlinux = argv[0] filename = argv[1] try: self.kdump = kdumpfile(file=filename) except Exception as e: raise gdb.GdbError("Failed to open `{}': {}" .format(filename, str(e))) # pylint: disable=unsupported-assignment-operation self.kdump.attr['addrxlat.ostype'] = 'linux' KERNELOFFSET = "linux.vmcoreinfo.lines.KERNELOFFSET" try: attr = self.kdump.attr.get(KERNELOFFSET, "0") # pylint: disable=no-member self.base_offset = int(attr, base=16) except (TypeError, ValueError): pass # Load the kernel at the relocated address # Unfortunately, the percpu section has an offset of 0 and # ends up getting placed at the offset base. This is easy # enough to handle in the percpu code. result = gdb.execute("symbol-file {} -o {:#x}" .format(vmlinux, self.base_offset), to_string=True) if self.debug: print(result) # We don't have an exec-file so we need to set the architecture # explicitly. arch = gdb.objfiles()[0].architecture.name() result = gdb.execute("set architecture {}".format(arch), to_string=True) if self.debug: print(result)
def CheckFileArch(): """Checks file architecture""" errorstring = "Could not properly determine program architecture to setup environment." global faddress_printf global faddress_and global stacksize global registers if bool(ExtensionSettings.getValue('set_break_on_entry')): setBreakAtEntry() try: target_info=gdb.execute("info target", False, True) mslines=target_info.split('\n') targeti=False for s in mslines: if s.find("file type") > -1: aparts=s.split() arch = aparts[3] if (aparts[3].find("64") > -1): bsize = '64' registers=registers64 faddress_printf="%016x" stacksize=8 faddress_and=0xffffffffffffffff else: bsize = '32' registers=registers32 faddress_printf="%08x" stacksize=4 faddress_and=0xffffffff targeti=True break if (not targeti): print errorstring except: print errorstring print 'pygdbdis configured for objfile ' + gdb.objfiles()[0].filename + '.' print 'objfile has ' + bsize + ' bit architecture ' + arch
def register_pretty_printers(printers, objfile_filter=objfile_filter_true): """ Register pretty-printers in existing objfiles and register a hook to register them in all objfiles that will be loaded. :param gdb.printing.PrettyPrinter printers: Set of pretty-printers to register. :param (gdb.Objfile) -> bool objfile: Function to restrict the set of objfiles into which to register pretty-printers. These will be added only to objfiles for which this function returns true. """ def register(objfile): if objfile_filter(objfile): gdb.printing.register_pretty_printer(objfile, printers) # Give a chance to register pretty-printers for all objfiles already # loaded... for objfile in gdb.objfiles(): register(objfile) # ... and for all objfiles to come! gdb.events.new_objfile.connect(lambda event: register(event.new_objfile))
def invoke(self, args, from_tty): argv = gdb.string_to_argv(args) try: proc_name = gdb.objfiles()[0].filename result = gdb.execute("vmmap", to_string=True) result = re.findall("(0x.+)(0x[0-9a-fA-F]+).*" + proc_name, result) result = [y for x in result for y in x] if len(result) == 0: msg("ap: get process base address error!") return -1 proc_addr_start = int(min(result, key=lambda x: int(x, 16)), 16) proc_addr_end = int(max(result, key=lambda x: int(x, 16)), 16) if len(argv) != 1: msg("ap: please input a args") else: gdb.execute("b * " + hex(proc_addr_start + int(argv[0], 16))) bp_num = gdb.breakpoints()[-1].number add_breakpoint.append(bp_num) except Exception as e: #msg(e) msg("ap: The program is not being run!") return -1
def _sort_list(): """ Internal Worker function to merge all known frame-filter lists, prune any filters with the state set to "disabled", and sort the list on the frame-filter's "priority" attribute. Returns: sorted_list: A sorted, pruned list of frame filters to execute. """ all_filters = [] for objfile in gdb.objfiles(): all_filters = all_filters + objfile.frame_filters.values() cp = gdb.current_progspace() all_filters = all_filters + cp.frame_filters.values() all_filters = all_filters + gdb.frame_filters.values() sorted_frame_filters = sorted(all_filters, key=get_priority, reverse=True) sorted_frame_filters = filter(get_enabled, sorted_frame_filters) return sorted_frame_filters
def _complete_frame_filter_list(text, word, all_flag): """Worker for frame filter dictionary name completion. Arguments: text: The full text of the command line. word: The most recent word of the command line. all_flag: Whether to include the word "all" in completion. Returns: A list of suggested frame filter dictionary name completions from text/word analysis. This list can be empty when there are no suggestions for completion. """ if all_flag == True: filter_locations = ["all", "global", "progspace"] else: filter_locations = ["global", "progspace"] for objfile in gdb.objfiles(): filter_locations.append(objfile.filename) # If the user just asked for completions with no completion # hints, just return all the frame filter dictionaries we know # about. if (text == ""): return filter_locations # Otherwise filter on what we know. flist = filter(lambda x,y=text:x.startswith(y), filter_locations) # If we only have one completion, complete it and return it. if len(flist) == 1: flist[0] = flist[0][len(text)-len(word):] # Otherwise, return an empty list, or a list of frame filter # dictionaries that the previous filter operation returned. return flist
def stop(self): """ provide stop control logic here. note, user will add one or more Maple symbol into mbp_table, only those qualified ones in mbp_table will be admitted to make breakpoint stop return True: to stop False: to bypass """ # if the Maple breakpoint was created before .so is loaded, when the breakpoint is hit, # plt will be not disable. So we disable it buf = m_util.gdb_exec_to_str("info b") if not is_maple_invoke_bp_plt_disabled(buf): disable_maple_invoke_bp_plt(buf) return False # if the Maple breakpoint was created before .so is loaded, bp_addr, bp_info (location) # if not available. But once hit, address and loc start to be available. if not self.bp_info or not self.bp_addr: self.bp_addr, self.bp_info = get_maple_invoke_bp_stop_addr(buf) # determine whether need to look up pending symbol's address. If the number of loaded libraries # is not eaual to the one saved, then libs might have been loaded or unloaded, so we try to update if len(gdb.objfiles()) != self.load_objfiles: self.load_objfiles = len(gdb.objfiles()) pending_addr_symbol_list = self.get_pending_addr_symbol_list() if len(pending_addr_symbol_list) > 0: for sym in pending_addr_symbol_list: self.update_pending_addr_symbol(sym) # NOTE!!! here we are getting the current stack's mir_header's address args_addr = m_symbol.get_symbol_addr_by_current_frame_args() if not args_addr or not args_addr in self.mbp_addr_sym_table: return False # mir_header address matches one breakpoint address list table_args_addr = self.mbp_addr_sym_table[args_addr] args_symbol = table_args_addr['mir_symbol'] match_pattern = table_args_addr['symbol'] # it is possible that match pattern is in mbp_addr_sym_table for address check, # but the self.mbp_addr_sym_table[args_addr]['symbol'] could have been cleared # in self.mbp_table by user's mb -clear command if not match_pattern in self.mbp_table: return False # Maple symbol for the breakpoint is disabled table_match_pattern = self.mbp_table[match_pattern] if table_match_pattern['disabled'] is True: return False if table_match_pattern['ignore_count'] > 0: table_match_pattern['ignore_count'] -= 1 return False table_match_pattern['hit_count'] += 1 # update the Maple gdb runtime metadata store. m_datastore.mgdb_rdata.update_gdb_runtime_data() # update the Maple frame change count m_datastore.mgdb_rdata.update_frame_change_counter() return True
def invoke(self, arg, from_tty): call("ninja " + arg, shell=True) for o in gdb.objfiles(): print("Reload %s" % (o.filename)) self.reload(o.filename)
if len(r) < 3: bp_count[bps[bp]] = 0 else: r = r[2] m = re.match(r'\s*breakpoint already hit (\d+)', r) if m is None: bp_count[bps[bp]] = 0 else: bp_count[bps[bp]] = int(m.group(1)) return bp_count qemu = backgroundProc("qemu-system-arm -gdb tcp::{} -M stm32-p103 -nographic -kernel ".format(port)+gdb.objfiles()[0].filename) try: print "Initialising..." gdb.execute("set confirm off") gdb.execute("set height 0") gdb.execute("delete breakpoints", to_string=True) # gdb.execute("file "+fname, to_string=True) gdb.execute("tar ext :{}".format(port), to_string=True) gdb.execute("load", to_string=True) gdb.execute("break exit", to_string=True) fdir = getExecDir() output_iters = fdir + "/output_iters" files = findSources()
def getExecDir(): obj = gdb.objfiles()[0] return os.path.dirname(obj.filename)
def rippled_objfile(): for of in gdb.objfiles(): if is_rippled(of): return of
def get_process_name(): """Return the main binary we are attached to.""" # The return from gdb.objfiles() could include the file extension of the debug symbols. main_binary_name = gdb.objfiles()[0].filename return os.path.splitext(os.path.basename(main_binary_name))[0]
import sys # some feedback that the nim runtime support is loading, isn't a bad # thing at all. gdb.write("Loading Nim Runtime support.\n", gdb.STDERR) # When error occure they occur regularly. This 'caches' known errors # and prevents them from being reprinted over and over again. errorSet = set() def printErrorOnce(id, message): global errorSet if id not in errorSet: errorSet.add(id) gdb.write(message, gdb.STDERR) nimobjfile = gdb.current_objfile() or gdb.objfiles()[0] nimobjfile.type_printers = [] ################################################################################ ##### Type pretty printers ################################################################################ type_hash_regex = re.compile("^\w*_([A-Za-z0-9]*)$") def getNimRti(type_name): """ Return a ``gdb.Value`` object for the Nim Runtime Information of ``type_name``. """ # Get static const TNimType variable. This should be available for # every non trivial Nim type. m = type_hash_regex.match(type_name) if m:
"""A routine that returns the correct pretty printer for VAL if appropriate. Returns None otherwise. """ if val.type.tag == "type": return StructTypePrettyPrinter(val) elif val.type.tag == "main_type": return StructMainTypePrettyPrinter(val) return None def register_pretty_printer(objfile): """A routine to register a pretty-printer against the given OBJFILE. """ objfile.pretty_printers.append(type_lookup_function) if __name__ == "__main__": if gdb.current_objfile() is not None: # This is the case where this script is being "auto-loaded" # for a given objfile. Register the pretty-printer for that # objfile. register_pretty_printer(gdb.current_objfile()) else: # We need to locate the objfile corresponding to the GDB # executable, and register the pretty-printer for that objfile. # FIXME: The condition used to match the objfile is too simplistic # and will not work on Windows. for objfile in gdb.objfiles(): if os.path.basename(objfile.filename) == "gdb": objfile.pretty_printers.append(type_lookup_function)
def __init__(self, kernel_exec=None, vmcore=None, kernelpath=None, searchpath=None, debug=False): print("crash-python initializing...") if searchpath is None: searchpath = [] autoload_submodules('crash.cache') autoload_submodules('crash.subsystem') autoload_submodules('crash.commands') self.searchpath = searchpath if not kernel_exec: return try: kdump = kdumpfile(vmcore) except OSErrorException as e: raise RuntimeError(str(e)) kaslr_off = long( kdump.attr.get("linux.vmcoreinfo.lines.KERNELOFFSET", "0"), base=16) error = gdb.execute("exec-file {}".format(kernel_exec), to_string=True) tinfo = gdb.execute("info target", to_string=True) args="" textaddr = 0 in_exec = False for line in tinfo.splitlines(): if line.startswith((" ", "\t")): if not in_exec: continue try: (addrs, sect) = line.strip().split(" is ") except ValueError: continue (start, end) = addrs.split(" - ") sect = sect.split(' ')[0] startaddr = long(start, base=0) + kaslr_off if sect == ".text": textaddr = startaddr args += " 0x{:x}".format(startaddr) elif startaddr >= textaddr: args += " -s {} 0x{:x}".format(sect, startaddr) elif line.startswith("Local exec file:"): in_exec = True elif not line.startswith("warning: "): in_exec = False error = gdb.execute("add-symbol-file {}{}".format(kernel_exec, args), to_string=True) try: list_type = gdb.lookup_type('struct list_head') except gdb.error as e: load_debuginfo(searchpath, gdb.objfiles()[0], kernelpath) try: list_type = gdb.lookup_type('struct list_head') except gdb.error as e: raise RuntimeError("Couldn't locate debuginfo for {}".format(kernel_exec)) self.target = crash.kdump.target.Target(kdump, debug) load_modules(self.searchpath)