def _DoArchiveTest(self, use_output_directory=True, use_elf=True, use_pak=False, debug_measures=False): with tempfile.NamedTemporaryFile(suffix='.size') as temp_file: self._DoArchive( temp_file.name, use_output_directory=use_output_directory, use_elf=use_elf, use_pak=use_pak, debug_measures=debug_measures) size_info = archive.LoadAndPostProcessSizeInfo(temp_file.name) # Check that saving & loading is the same as directly parsing. expected_size_info = self._CloneSizeInfo( use_output_directory=use_output_directory, use_elf=use_elf, use_pak=use_pak) self.assertEquals(expected_size_info.metadata, size_info.metadata) # Don't cluster. expected_size_info.symbols = expected_size_info.raw_symbols size_info.symbols = size_info.raw_symbols expected = list(describe.GenerateLines(expected_size_info, verbose=True)) actual = list(describe.GenerateLines(size_info, verbose=True)) self.assertEquals(expected, actual) sym_strs = (repr(sym) for sym in size_info.symbols) stats = describe.DescribeSizeInfoCoverage(size_info) if size_info.metadata: metadata = describe.DescribeMetadata(size_info.metadata) else: metadata = [] return itertools.chain(metadata, stats, sym_strs)
def Run(args, parser): if not args.size_file.endswith('.size'): parser.error('size_file must end with .size') (output_directory, tool_prefix, apk_path, apk_so_path, elf_path, map_path) = ( DeduceMainPaths(args, parser)) metadata = CreateMetadata(map_path, elf_path, apk_path, tool_prefix, output_directory) apk_elf_result = None if apk_path and elf_path: # Extraction takes around 1 second, so do it in parallel. apk_elf_result = concurrent.ForkAndCall( _ElfInfoFromApk, (apk_path, apk_so_path, tool_prefix)) section_sizes, raw_symbols = CreateSectionSizesAndSymbols( map_path=map_path, tool_prefix=tool_prefix, elf_path=elf_path, apk_path=apk_path, output_directory=output_directory, track_string_literals=args.track_string_literals, metadata=metadata, apk_elf_result=apk_elf_result, pak_files=args.pak_file, pak_info_file=args.pak_info_file) size_info = CreateSizeInfo( section_sizes, raw_symbols, metadata=metadata, normalize_names=False) if logging.getLogger().isEnabledFor(logging.INFO): for line in describe.DescribeSizeInfoCoverage(size_info): logging.info(line) logging.info('Recorded info for %d symbols', len(size_info.raw_symbols)) logging.info('Recording metadata: \n %s', '\n '.join(describe.DescribeMetadata(size_info.metadata))) logging.info('Saving result to %s', args.size_file) file_format.SaveSizeInfo(size_info, args.size_file) size_in_mb = os.path.getsize(args.size_file) / 1024.0 / 1024.0 logging.info('Done. File size is %.2fMiB.', size_in_mb)
def _DoArchiveTest(self, use_output_directory=True, use_elf=True, debug_measures=False): with tempfile.NamedTemporaryFile(suffix='.size') as temp_file: args = [temp_file.name, '--map-file', _TEST_MAP_PATH] if use_output_directory: # Let autodetection find output_directory when --elf-file is used. if not use_elf: args += ['--output-directory', _TEST_OUTPUT_DIR] else: args += ['--no-source-paths'] if use_elf: args += ['--elf-file', _TEST_ELF_PATH] _RunApp('archive', args, debug_measures=debug_measures) size_info = archive.LoadAndPostProcessSizeInfo(temp_file.name) # Check that saving & loading is the same as directly parsing the .map. expected_size_info = self._CloneSizeInfo( use_output_directory=use_output_directory, use_elf=use_elf) self.assertEquals(expected_size_info.metadata, size_info.metadata) # Don't cluster. expected_size_info.symbols = expected_size_info.raw_symbols size_info.symbols = size_info.raw_symbols expected = list(describe.GenerateLines(expected_size_info)) actual = list(describe.GenerateLines(size_info)) self.assertEquals(expected, actual) sym_strs = (repr(sym) for sym in size_info.symbols) stats = describe.DescribeSizeInfoCoverage(size_info) if size_info.metadata: metadata = describe.DescribeMetadata(size_info.metadata) else: metadata = [] return itertools.chain(metadata, stats, sym_strs)
def main(argv): parser = argparse.ArgumentParser(argv) parser.add_argument('elf_file', help='Path to input ELF file.') parser.add_argument('output_file', help='Path to output .size(.gz) file.') parser.add_argument('--map-file', help='Path to input .map(.gz) file. Defaults to ' '{{elf_file}}.map(.gz)?') paths.AddOptions(parser) args = helpers.AddCommonOptionsAndParseArgs(parser, argv) if not args.output_file.endswith('.size'): parser.error('output_file must end with .size') if args.map_file: map_file_path = args.map_file elif args.elf_file.endswith('.size'): # Allow a .size file to be passed as input as well. Useful for measuring # serialization speed. pass else: map_file_path = args.elf_file + '.map' if not os.path.exists(map_file_path): map_file_path += '.gz' if not os.path.exists(map_file_path): parser.error('Could not find .map(.gz)? file. Use --map-file.') lazy_paths = paths.LazyPaths(args=args, input_file=args.elf_file) metadata = None if args.elf_file and not args.elf_file.endswith('.size'): logging.debug('Constructing metadata') git_rev = _DetectGitRevision(os.path.dirname(args.elf_file)) build_id = BuildIdFromElf(args.elf_file, lazy_paths.tool_prefix) timestamp_obj = datetime.datetime.utcfromtimestamp( os.path.getmtime(args.elf_file)) timestamp = calendar.timegm(timestamp_obj.timetuple()) gn_args = _ParseGnArgs( os.path.join(lazy_paths.output_directory, 'args.gn')) def relative_to_out(path): return os.path.relpath(path, lazy_paths.VerifyOutputDirectory()) metadata = { models.METADATA_GIT_REVISION: git_rev, models.METADATA_MAP_FILENAME: relative_to_out(map_file_path), models.METADATA_ELF_FILENAME: relative_to_out(args.elf_file), models.METADATA_ELF_MTIME: timestamp, models.METADATA_ELF_BUILD_ID: build_id, models.METADATA_GN_ARGS: gn_args, } size_info = Analyze(map_file_path, lazy_paths) if metadata: logging.debug('Validating section sizes') elf_section_sizes = _SectionSizesFromElf(args.elf_file, lazy_paths.tool_prefix) for k, v in elf_section_sizes.iteritems(): assert v == size_info.section_sizes.get(k), ( 'ELF file and .map file do not match.') size_info.metadata = metadata logging.info('Recording metadata: \n %s', '\n '.join(describe.DescribeMetadata(size_info.metadata))) logging.info('Saving result to %s', args.output_file) file_format.SaveSizeInfo(size_info, args.output_file) logging.info('Done')
def Run(args, parser): if not args.size_file.endswith('.size'): parser.error('size_file must end with .size') elf_path = args.elf_file map_path = args.map_file apk_path = args.apk_file any_input = apk_path or elf_path or map_path if not any_input: parser.error( 'Most pass at least one of --apk-file, --elf-file, --map-file') lazy_paths = paths.LazyPaths(tool_prefix=args.tool_prefix, output_directory=args.output_directory, any_path_within_output_directory=any_input) if apk_path: with zipfile.ZipFile(apk_path) as z: lib_infos = [ f for f in z.infolist() if f.filename.endswith('.so') and f.file_size > 0 ] assert lib_infos, 'APK has no .so files.' # TODO(agrieve): Add support for multiple .so files, and take into account # secondary architectures. apk_so_path = max(lib_infos, key=lambda x: x.file_size).filename logging.debug('Sub-apk path=%s', apk_so_path) if not elf_path and lazy_paths.output_directory: elf_path = os.path.join( lazy_paths.output_directory, 'lib.unstripped', os.path.basename(apk_so_path.replace('crazy.', ''))) logging.debug('Detected --elf-file=%s', elf_path) if map_path: if not map_path.endswith('.map') and not map_path.endswith('.map.gz'): parser.error('Expected --map-file to end with .map or .map.gz') else: map_path = elf_path + '.map' if not os.path.exists(map_path): map_path += '.gz' if not os.path.exists(map_path): parser.error('Could not find .map(.gz)? file. Use --map-file.') tool_prefix = lazy_paths.VerifyToolPrefix() output_directory = None if not args.no_source_paths: output_directory = lazy_paths.VerifyOutputDirectory() metadata = CreateMetadata(map_path, elf_path, apk_path, tool_prefix, output_directory) if apk_path and elf_path: # Extraction takes around 1 second, so do it in parallel. apk_elf_result = concurrent.ForkAndCall( _ElfInfoFromApk, (apk_path, apk_so_path, tool_prefix)) size_info = CreateSizeInfo(map_path, elf_path, tool_prefix, output_directory, normalize_names=False) if metadata: size_info.metadata = metadata if apk_path: logging.debug('Extracting section sizes from .so within .apk') unstripped_section_sizes = size_info.section_sizes apk_build_id, size_info.section_sizes = apk_elf_result.get() assert apk_build_id == metadata[models.METADATA_ELF_BUILD_ID], ( 'BuildID for %s within %s did not match the one at %s' % (apk_so_path, apk_path, elf_path)) packed_section_name = None architecture = metadata[models.METADATA_ELF_ARCHITECTURE] # Packing occurs enabled only arm32 & arm64. if architecture == 'arm': packed_section_name = '.rel.dyn' elif architecture == 'arm64': packed_section_name = '.rela.dyn' if packed_section_name: logging.debug('Recording size of unpacked relocations') if packed_section_name not in size_info.section_sizes: logging.warning('Packed section not present: %s', packed_section_name) else: size_info.section_sizes[ '%s (unpacked)' % packed_section_name] = ( unstripped_section_sizes.get(packed_section_name)) logging.info('Recording metadata: \n %s', '\n '.join(describe.DescribeMetadata(size_info.metadata))) logging.info('Saving result to %s', args.size_file) file_format.SaveSizeInfo(size_info, args.size_file) logging.info('Done')
def Run(args, parser): if not args.size_file.endswith('.size'): parser.error('size_file must end with .size') elf_path = args.elf_file map_path = args.map_file apk_path = args.apk_file pak_files = args.pak_file pak_info_file = args.pak_info_file any_input = apk_path or elf_path or map_path if not any_input: parser.error('Most pass at least one of --apk-file, --elf-file, --map-file') output_directory_finder = path_util.OutputDirectoryFinder( value=args.output_directory, any_path_within_output_directory=any_input) if apk_path: with zipfile.ZipFile(apk_path) as z: lib_infos = [f for f in z.infolist() if f.filename.endswith('.so') and f.file_size > 0] assert lib_infos, 'APK has no .so files.' # TODO(agrieve): Add support for multiple .so files, and take into account # secondary architectures. apk_so_path = max(lib_infos, key=lambda x:x.file_size).filename logging.debug('Sub-apk path=%s', apk_so_path) if not elf_path and output_directory_finder.Tentative(): elf_path = os.path.join( output_directory_finder.Tentative(), 'lib.unstripped', os.path.basename(apk_so_path.replace('crazy.', ''))) logging.debug('Detected --elf-file=%s', elf_path) if map_path: if not map_path.endswith('.map') and not map_path.endswith('.map.gz'): parser.error('Expected --map-file to end with .map or .map.gz') else: map_path = elf_path + '.map' if not os.path.exists(map_path): map_path += '.gz' if not os.path.exists(map_path): parser.error('Could not find .map(.gz)? file. Ensure you have built with ' 'is_official_build=true, or use --map-file to point me a ' 'linker map file.') linker_name = _DetectLinkerName(map_path) tool_prefix_finder = path_util.ToolPrefixFinder( value=args.tool_prefix, output_directory_finder=output_directory_finder, linker_name=linker_name) tool_prefix = tool_prefix_finder.Finalized() output_directory = None if not args.no_source_paths: output_directory = output_directory_finder.Finalized() metadata = CreateMetadata(map_path, elf_path, apk_path, tool_prefix, output_directory) if apk_path and elf_path: # Extraction takes around 1 second, so do it in parallel. apk_elf_result = concurrent.ForkAndCall( _ElfInfoFromApk, (apk_path, apk_so_path, tool_prefix)) section_sizes, raw_symbols = CreateSectionSizesAndSymbols( map_path, elf_path, tool_prefix, output_directory, track_string_literals=args.track_string_literals) if apk_path: AddApkInfo(section_sizes, raw_symbols, apk_path, output_directory, metadata, apk_elf_result) elif pak_files and pak_info_file: AddPakSymbolsFromFiles( section_sizes, raw_symbols, pak_files, pak_info_file) size_info = CreateSizeInfo( section_sizes, raw_symbols, metadata=metadata, normalize_names=False) if logging.getLogger().isEnabledFor(logging.INFO): for line in describe.DescribeSizeInfoCoverage(size_info): logging.info(line) logging.info('Recorded info for %d symbols', len(size_info.raw_symbols)) logging.info('Recording metadata: \n %s', '\n '.join(describe.DescribeMetadata(size_info.metadata))) logging.info('Saving result to %s', args.size_file) file_format.SaveSizeInfo(size_info, args.size_file) size_in_mb = os.path.getsize(args.size_file) / 1024.0 / 1024.0 logging.info('Done. File size is %.2fMiB.', size_in_mb)