コード例 #1
0
 def test_ParserCfi(self):
     map_file = _ReadMapFile(_TEST_CFI_MAP_PATH)
     linker_name = linker_map_parser.DetectLinkerNameFromMapFile(
         iter(map_file))
     section_sizes, raw_symbols, _ = (
         linker_map_parser.MapFileParser().Parse(linker_name,
                                                 iter(map_file)))
     return _RenderSectionSizesAndRawSymbols(section_sizes, raw_symbols)
コード例 #2
0
 def test_Tokenize(self):
   ret = []
   map_file = _ReadMapFile(_TEST_MAP_PATH)
   linker_name = linker_map_parser.DetectLinkerNameFromMapFile(iter(map_file))
   parser = linker_map_parser.MapFileParserLld(linker_name)
   tokenizer = parser.Tokenize(iter(map_file))
   for (_, address, size, level, span, tok) in tokenizer:
     ret.append('%8X %8X (%d) %s %s' % (address, size, level, '-' * 8 if
                                        span is None else '%8X' % span, tok))
   return ret
コード例 #3
0
 def test_Parser(self):
   ret = []
   map_file = _ReadMapFile(_TEST_MAP_PATH)
   linker_name = linker_map_parser.DetectLinkerNameFromMapFile(iter(map_file))
   section_sizes, raw_symbols = (
       linker_map_parser.MapFileParser().Parse(linker_name, iter(map_file)))
   ret.append('******** section_sizes ********')
   for k, v in sorted(section_sizes.iteritems()):
     ret.append('%-24s %d' % (k, v))
   ret.append('')
   ret.append('******** raw_symbols ********')
   for sym in raw_symbols:
     ret.append(repr(sym))
   return ret