def _SaveSizeInfo(self, filtered_symbols=None, size_info=None, to_file=None): """Saves a .size file containing only filtered_symbols into to_file. Args: filtered_symbols: Which symbols to include. Defaults to all. size_info: The size_info to filter. Defaults to size_infos[0]. to_file: Defaults to default.size """ size_info = size_info or self._size_infos[0] to_file = to_file or 'default.size' assert to_file.endswith('.size'), 'to_file should end with .size' file_format.SaveSizeInfo(size_info, to_file, include_padding=filtered_symbols is not None, sparse_symbols=filtered_symbols) shortname = os.path.basename(os.path.normpath(to_file)) msg = ( 'Saved locally to {local}. To share, run:\n' '> gsutil.py cp {local} gs://chrome-supersize/oneoffs && gsutil.py -m ' 'acl ch -u AllUsers:R gs://chrome-supersize/oneoffs/{shortname}\n' ' Then view it at https://storage.googleapis.com/chrome-supersize' '/viewer.html?load_url=oneoffs%2F{shortname}') print(msg.format(local=to_file, shortname=shortname))
def Run(args, parser): if not args.size_file.endswith('.size'): parser.error('size_file must end with .size') (output_directory, tool_prefix, apk_path, apk_so_path, elf_path, map_path) = ( DeduceMainPaths(args, parser)) metadata = CreateMetadata(map_path, elf_path, apk_path, tool_prefix, output_directory) apk_elf_result = None if apk_path and elf_path: # Extraction takes around 1 second, so do it in parallel. apk_elf_result = concurrent.ForkAndCall( _ElfInfoFromApk, (apk_path, apk_so_path, tool_prefix)) section_sizes, raw_symbols = CreateSectionSizesAndSymbols( map_path=map_path, tool_prefix=tool_prefix, elf_path=elf_path, apk_path=apk_path, output_directory=output_directory, track_string_literals=args.track_string_literals, metadata=metadata, apk_elf_result=apk_elf_result, pak_files=args.pak_file, pak_info_file=args.pak_info_file) size_info = CreateSizeInfo( section_sizes, raw_symbols, metadata=metadata, normalize_names=False) if logging.getLogger().isEnabledFor(logging.INFO): for line in describe.DescribeSizeInfoCoverage(size_info): logging.info(line) logging.info('Recorded info for %d symbols', len(size_info.raw_symbols)) logging.info('Recording metadata: \n %s', '\n '.join(describe.DescribeMetadata(size_info.metadata))) logging.info('Saving result to %s', args.size_file) file_format.SaveSizeInfo(size_info, args.size_file) size_in_mb = os.path.getsize(args.size_file) / 1024.0 / 1024.0 logging.info('Done. File size is %.2fMiB.', size_in_mb)
def test_Csv(self): with tempfile.NamedTemporaryFile(suffix='.size') as size_file, \ tempfile.NamedTemporaryFile(suffix='.txt') as output_file: file_format.SaveSizeInfo(self._CloneSizeInfo(), size_file.name) query = [ 'Csv(size_info, to_file=%r)' % output_file.name, ] ret = _RunApp('console', [size_file.name, '--query', '; '.join(query)]) with open(output_file.name) as f: ret.extend(l.rstrip() for l in f) return ret
def test_Diff_Basic(self): size_info1 = self._CloneSizeInfo(use_pak=True) size_info2 = self._CloneSizeInfo(use_pak=True) size_info2.build_config['git_revision'] = 'xyz789' container1 = size_info1.containers[0] container2 = size_info2.containers[0] container1.metadata = {"foo": 1, "bar": [1, 2, 3], "baz": "yes"} container2.metadata = {"foo": 1, "bar": [1, 3], "baz": "yes"} size_info1.raw_symbols -= size_info1.raw_symbols.WhereNameMatches( r'pLinuxKernelCmpxchg|pLinuxKernelMemoryBarrier') size_info2.raw_symbols -= size_info2.raw_symbols.WhereNameMatches( r'IDS_AW_WEBPAGE_PARENTAL_|IDS_WEB_FONT_FAMILY|IDS_WEB_FONT_SIZE') changed_sym = size_info1.raw_symbols.WhereNameMatches( 'Patcher::Name_')[0] changed_sym.size -= 10 padding_sym = size_info2.raw_symbols.WhereNameMatches( 'symbol gap 0')[0] padding_sym.padding += 20 padding_sym.size += 20 # Test pak symbols changing .grd files. They should not show as changed. pak_sym = size_info2.raw_symbols.WhereNameMatches( r'IDR_PDF_COMPOSITOR_MANIFEST')[0] pak_sym.full_name = pak_sym.full_name.replace('.grd', '2.grd') # Serialize & de-serialize so that name normalization runs again for the pak # symbol. bytesio = io.BytesIO() file_format.SaveSizeInfo(size_info2, 'path', file_obj=bytesio) bytesio.seek(0) size_info2 = archive.LoadAndPostProcessSizeInfo('path', file_obj=bytesio) d = diff.Diff(size_info1, size_info2) d.raw_symbols = d.raw_symbols.Sorted() self.assertEqual((1, 2, 3), d.raw_symbols.CountsByDiffStatus()[1:]) changed_sym = d.raw_symbols.WhereNameMatches('Patcher::Name_')[0] padding_sym = d.raw_symbols.WhereNameMatches('symbol gap 0')[0] bss_sym = d.raw_symbols.WhereInSection(models.SECTION_BSS)[0] # Padding-only deltas should sort after all non-padding changes. padding_idx = d.raw_symbols.index(padding_sym) changed_idx = d.raw_symbols.index(changed_sym) bss_idx = d.raw_symbols.index(bss_sym) self.assertLess(changed_idx, padding_idx) # And before bss. self.assertLess(padding_idx, bss_idx) return describe.GenerateLines(d, verbose=True)
def test_Console(self): with tempfile.NamedTemporaryFile(suffix='.size') as size_file, \ tempfile.NamedTemporaryFile(suffix='.txt') as output_file: file_format.SaveSizeInfo(self._CloneSizeInfo(), size_file.name) query = [ 'ShowExamples()', 'ExpandRegex("_foo_")', 'canned_queries.CategorizeGenerated()', 'canned_queries.CategorizeByChromeComponent()', 'canned_queries.TemplatesByName()', 'Print(size_info, to_file=%r)' % output_file.name, ] ret = _RunApp('console', [size_file.name, '--query', '; '.join(query)]) with open(output_file.name) as f: ret.extend(l.rstrip() for l in f) return ret
def test_Diff_Basic(self): size_info1 = self._CloneSizeInfo(use_pak=True) size_info2 = self._CloneSizeInfo(use_pak=True) size_info2.build_config['git_revision'] = 'xyz789' container1 = size_info1.containers[0] container2 = size_info2.containers[0] container1.metadata = {"foo": 1, "bar": [1, 2, 3], "baz": "yes"} container2.metadata = {"foo": 1, "bar": [1, 3], "baz": "yes"} size_info1.raw_symbols -= size_info1.raw_symbols[:2] size_info2.raw_symbols -= size_info2.raw_symbols[-3:] changed_sym = size_info1.raw_symbols.WhereNameMatches( 'Patcher::Name_')[0] changed_sym.size -= 10 padding_sym = size_info2.raw_symbols.WhereNameMatches( 'symbol gap 0')[0] padding_sym.padding += 20 padding_sym.size += 20 pak_sym = size_info2.raw_symbols.WhereInSection( models.SECTION_PAK_NONTRANSLATED)[0] pak_sym.full_name = 'foo: ' + pak_sym.full_name.split()[-1] # Serialize & de-serialize so that name normalization runs again for the pak # symbol. bytesio = io.BytesIO() file_format.SaveSizeInfo(size_info2, 'path', file_obj=bytesio) bytesio.seek(0) size_info2 = archive.LoadAndPostProcessSizeInfo('path', file_obj=bytesio) d = diff.Diff(size_info1, size_info2) d.raw_symbols = d.raw_symbols.Sorted() self.assertEqual(d.raw_symbols.CountsByDiffStatus()[1:], (2, 2, 3)) changed_sym = d.raw_symbols.WhereNameMatches('Patcher::Name_')[0] padding_sym = d.raw_symbols.WhereNameMatches('symbol gap 0')[0] bss_sym = d.raw_symbols.WhereInSection(models.SECTION_BSS)[0] # Padding-only deltas should sort after all non-padding changes. padding_idx = d.raw_symbols.index(padding_sym) changed_idx = d.raw_symbols.index(changed_sym) bss_idx = d.raw_symbols.index(bss_sym) self.assertLess(changed_idx, padding_idx) # And before bss. self.assertLess(padding_idx, bss_idx) return describe.GenerateLines(d, verbose=True)
def test_Console(self): with tempfile.NamedTemporaryFile(suffix='.size') as size_file, \ tempfile.NamedTemporaryFile(suffix='.txt') as output_file: file_format.SaveSizeInfo(self._CloneSizeInfo(), size_file.name) query = [ 'ShowExamples()', 'ExpandRegex("_foo_")', 'canned_queries.CategorizeGenerated()', 'canned_queries.CategorizeByChromeComponent()', 'canned_queries.LargeFiles()', 'canned_queries.TemplatesByName()', 'canned_queries.StaticInitializers()', 'canned_queries.PakByPath()', 'Print(ReadStringLiterals(elf_path={}))'.format(repr(_TEST_ELF_PATH)), 'Print(size_info, to_file=%r)' % output_file.name, ] ret = _RunApp('console', [size_file.name, '--query', '; '.join(query)]) with open(output_file.name) as f: ret.extend(l.rstrip() for l in f) return ret
def main(argv): parser = argparse.ArgumentParser(argv) parser.add_argument('input_file', help='Path to input .map file.') parser.add_argument('output_file', help='Path to output .size(.gz) file.') AddOptions(parser) args = helpers.AddCommonOptionsAndParseArgs(parser, argv) if not args.output_file.endswith('.size'): parser.error('output_file must end with .size') size_info = AnalyzeWithArgs(args, args.input_file) if not args.input_file.endswith('.size'): git_rev = _DetectGitRevision(args.input_file) size_info.tag = 'Filename=%s git_rev=%s' % (os.path.basename( args.input_file), git_rev) logging.info('Recording metadata: %s', describe.DescribeSizeInfoMetadata(size_info)) logging.info('Saving result to %s', args.output_file) file_format.SaveSizeInfo(size_info, args.output_file) logging.info('Done')
def _SaveSizeInfo(self, filtered_symbols=None, size_info=None, to_file=None): """Saves a .size file containing only filtered_symbols into to_file. Args: filtered_symbols: Which symbols to include. Defaults to all. size_info: The size_info to filter. Defaults to size_infos[0]. to_file: Defaults to default.size """ size_info = size_info or self._size_infos[0] to_file = to_file or 'default.size' assert to_file.endswith('.size'), 'to_file should end with .size' file_format.SaveSizeInfo(size_info, to_file, include_padding=filtered_symbols is not None, sparse_symbols=filtered_symbols) is_internal = len(size_info.symbols.WherePathMatches('^clank')) > 0 self._PrintUploadCommand(to_file, is_internal)
def test_Diff_NullDiff(self): with tempfile.NamedTemporaryFile(suffix='.size') as temp_file: file_format.SaveSizeInfo(self._CloneSizeInfo(use_elf=True), temp_file.name) return _RunApp('diff', [temp_file.name, temp_file.name])
def main(argv): parser = argparse.ArgumentParser(argv) parser.add_argument('elf_file', help='Path to input ELF file.') parser.add_argument('output_file', help='Path to output .size(.gz) file.') parser.add_argument('--map-file', help='Path to input .map(.gz) file. Defaults to ' '{{elf_file}}.map(.gz)?') paths.AddOptions(parser) args = helpers.AddCommonOptionsAndParseArgs(parser, argv) if not args.output_file.endswith('.size'): parser.error('output_file must end with .size') if args.map_file: map_file_path = args.map_file elif args.elf_file.endswith('.size'): # Allow a .size file to be passed as input as well. Useful for measuring # serialization speed. pass else: map_file_path = args.elf_file + '.map' if not os.path.exists(map_file_path): map_file_path += '.gz' if not os.path.exists(map_file_path): parser.error('Could not find .map(.gz)? file. Use --map-file.') lazy_paths = paths.LazyPaths(args=args, input_file=args.elf_file) metadata = None if args.elf_file and not args.elf_file.endswith('.size'): logging.debug('Constructing metadata') git_rev = _DetectGitRevision(os.path.dirname(args.elf_file)) build_id = BuildIdFromElf(args.elf_file, lazy_paths.tool_prefix) timestamp_obj = datetime.datetime.utcfromtimestamp( os.path.getmtime(args.elf_file)) timestamp = calendar.timegm(timestamp_obj.timetuple()) gn_args = _ParseGnArgs( os.path.join(lazy_paths.output_directory, 'args.gn')) def relative_to_out(path): return os.path.relpath(path, lazy_paths.VerifyOutputDirectory()) metadata = { models.METADATA_GIT_REVISION: git_rev, models.METADATA_MAP_FILENAME: relative_to_out(map_file_path), models.METADATA_ELF_FILENAME: relative_to_out(args.elf_file), models.METADATA_ELF_MTIME: timestamp, models.METADATA_ELF_BUILD_ID: build_id, models.METADATA_GN_ARGS: gn_args, } size_info = Analyze(map_file_path, lazy_paths) if metadata: logging.debug('Validating section sizes') elf_section_sizes = _SectionSizesFromElf(args.elf_file, lazy_paths.tool_prefix) for k, v in elf_section_sizes.iteritems(): assert v == size_info.section_sizes.get(k), ( 'ELF file and .map file do not match.') size_info.metadata = metadata logging.info('Recording metadata: \n %s', '\n '.join(describe.DescribeMetadata(size_info.metadata))) logging.info('Saving result to %s', args.output_file) file_format.SaveSizeInfo(size_info, args.output_file) logging.info('Done')
def test_ConsoleNullDiff(self): with tempfile.NamedTemporaryFile(suffix='.size') as temp_file: file_format.SaveSizeInfo(self._CloneSizeInfo(), temp_file.name) return _RunApp('console.py', '--query', 'Diff(size_info1, size_info2)', temp_file.name, temp_file.name)
def Run(args, parser): if not args.size_file.endswith('.size'): parser.error('size_file must end with .size') elf_path = args.elf_file map_path = args.map_file apk_path = args.apk_file any_input = apk_path or elf_path or map_path if not any_input: parser.error( 'Most pass at least one of --apk-file, --elf-file, --map-file') lazy_paths = paths.LazyPaths(tool_prefix=args.tool_prefix, output_directory=args.output_directory, any_path_within_output_directory=any_input) if apk_path: with zipfile.ZipFile(apk_path) as z: lib_infos = [ f for f in z.infolist() if f.filename.endswith('.so') and f.file_size > 0 ] assert lib_infos, 'APK has no .so files.' # TODO(agrieve): Add support for multiple .so files, and take into account # secondary architectures. apk_so_path = max(lib_infos, key=lambda x: x.file_size).filename logging.debug('Sub-apk path=%s', apk_so_path) if not elf_path and lazy_paths.output_directory: elf_path = os.path.join( lazy_paths.output_directory, 'lib.unstripped', os.path.basename(apk_so_path.replace('crazy.', ''))) logging.debug('Detected --elf-file=%s', elf_path) if map_path: if not map_path.endswith('.map') and not map_path.endswith('.map.gz'): parser.error('Expected --map-file to end with .map or .map.gz') else: map_path = elf_path + '.map' if not os.path.exists(map_path): map_path += '.gz' if not os.path.exists(map_path): parser.error('Could not find .map(.gz)? file. Use --map-file.') tool_prefix = lazy_paths.VerifyToolPrefix() output_directory = None if not args.no_source_paths: output_directory = lazy_paths.VerifyOutputDirectory() metadata = CreateMetadata(map_path, elf_path, apk_path, tool_prefix, output_directory) if apk_path and elf_path: # Extraction takes around 1 second, so do it in parallel. apk_elf_result = concurrent.ForkAndCall( _ElfInfoFromApk, (apk_path, apk_so_path, tool_prefix)) size_info = CreateSizeInfo(map_path, elf_path, tool_prefix, output_directory, normalize_names=False) if metadata: size_info.metadata = metadata if apk_path: logging.debug('Extracting section sizes from .so within .apk') unstripped_section_sizes = size_info.section_sizes apk_build_id, size_info.section_sizes = apk_elf_result.get() assert apk_build_id == metadata[models.METADATA_ELF_BUILD_ID], ( 'BuildID for %s within %s did not match the one at %s' % (apk_so_path, apk_path, elf_path)) packed_section_name = None architecture = metadata[models.METADATA_ELF_ARCHITECTURE] # Packing occurs enabled only arm32 & arm64. if architecture == 'arm': packed_section_name = '.rel.dyn' elif architecture == 'arm64': packed_section_name = '.rela.dyn' if packed_section_name: logging.debug('Recording size of unpacked relocations') if packed_section_name not in size_info.section_sizes: logging.warning('Packed section not present: %s', packed_section_name) else: size_info.section_sizes[ '%s (unpacked)' % packed_section_name] = ( unstripped_section_sizes.get(packed_section_name)) logging.info('Recording metadata: \n %s', '\n '.join(describe.DescribeMetadata(size_info.metadata))) logging.info('Saving result to %s', args.size_file) file_format.SaveSizeInfo(size_info, args.size_file) logging.info('Done')
def Run(args, parser): if not args.size_file.endswith('.size'): parser.error('size_file must end with .size') elf_path = args.elf_file map_path = args.map_file apk_path = args.apk_file pak_files = args.pak_file pak_info_file = args.pak_info_file any_input = apk_path or elf_path or map_path if not any_input: parser.error('Most pass at least one of --apk-file, --elf-file, --map-file') output_directory_finder = path_util.OutputDirectoryFinder( value=args.output_directory, any_path_within_output_directory=any_input) if apk_path: with zipfile.ZipFile(apk_path) as z: lib_infos = [f for f in z.infolist() if f.filename.endswith('.so') and f.file_size > 0] assert lib_infos, 'APK has no .so files.' # TODO(agrieve): Add support for multiple .so files, and take into account # secondary architectures. apk_so_path = max(lib_infos, key=lambda x:x.file_size).filename logging.debug('Sub-apk path=%s', apk_so_path) if not elf_path and output_directory_finder.Tentative(): elf_path = os.path.join( output_directory_finder.Tentative(), 'lib.unstripped', os.path.basename(apk_so_path.replace('crazy.', ''))) logging.debug('Detected --elf-file=%s', elf_path) if map_path: if not map_path.endswith('.map') and not map_path.endswith('.map.gz'): parser.error('Expected --map-file to end with .map or .map.gz') else: map_path = elf_path + '.map' if not os.path.exists(map_path): map_path += '.gz' if not os.path.exists(map_path): parser.error('Could not find .map(.gz)? file. Ensure you have built with ' 'is_official_build=true, or use --map-file to point me a ' 'linker map file.') linker_name = _DetectLinkerName(map_path) tool_prefix_finder = path_util.ToolPrefixFinder( value=args.tool_prefix, output_directory_finder=output_directory_finder, linker_name=linker_name) tool_prefix = tool_prefix_finder.Finalized() output_directory = None if not args.no_source_paths: output_directory = output_directory_finder.Finalized() metadata = CreateMetadata(map_path, elf_path, apk_path, tool_prefix, output_directory) if apk_path and elf_path: # Extraction takes around 1 second, so do it in parallel. apk_elf_result = concurrent.ForkAndCall( _ElfInfoFromApk, (apk_path, apk_so_path, tool_prefix)) section_sizes, raw_symbols = CreateSectionSizesAndSymbols( map_path, elf_path, tool_prefix, output_directory, track_string_literals=args.track_string_literals) if apk_path: AddApkInfo(section_sizes, raw_symbols, apk_path, output_directory, metadata, apk_elf_result) elif pak_files and pak_info_file: AddPakSymbolsFromFiles( section_sizes, raw_symbols, pak_files, pak_info_file) size_info = CreateSizeInfo( section_sizes, raw_symbols, metadata=metadata, normalize_names=False) if logging.getLogger().isEnabledFor(logging.INFO): for line in describe.DescribeSizeInfoCoverage(size_info): logging.info(line) logging.info('Recorded info for %d symbols', len(size_info.raw_symbols)) logging.info('Recording metadata: \n %s', '\n '.join(describe.DescribeMetadata(size_info.metadata))) logging.info('Saving result to %s', args.size_file) file_format.SaveSizeInfo(size_info, args.size_file) size_in_mb = os.path.getsize(args.size_file) / 1024.0 / 1024.0 logging.info('Done. File size is %.2fMiB.', size_in_mb)