def _CollectLegacyProfile(self): files = [] try: files = self._profiler.CollectProfile( self._compiler.chrome_apk, constants.PACKAGE_INFO['chrome']) self._MaybeSaveProfile(files) self._step_recorder.BeginStep('Process profile') assert os.path.exists(self._compiler.lib_chrome_so) offsets = process_profiles.GetReachedOffsetsFromDumpFiles( files, self._compiler.lib_chrome_so) if not offsets: raise Exception('No profiler offsets found in {}'.format( '\n'.join(files))) processor = process_profiles.SymbolOffsetProcessor( self._compiler.lib_chrome_so) ordered_symbols = processor.GetOrderedSymbols(offsets) if not ordered_symbols: raise Exception('No symbol names from offsets found in {}'.format( '\n'.join(files))) with open(self._GetUnpatchedOrderfileFilename(), 'w') as orderfile: orderfile.write('\n'.join(ordered_symbols)) except Exception: for f in files: self._SaveForDebugging(f) raise finally: self._profiler.Cleanup()
def main(): parser = _CreateArgumentParser() args = parser.parse_args() offsets = _ReadReachedOffsets(args.reached_offsets) assert offsets _WarnAboutDuplicates(offsets) processor = process_profiles.SymbolOffsetProcessor(args.native_library) ordered_symbols = processor.GetOrderedSymbols(offsets) if ordered_symbols is None: return 1 success = False temp_filename = None output_file = None try: (fd, temp_filename) = tempfile.mkstemp(dir=os.path.dirname(args.output)) output_file = os.fdopen(fd, 'w') output_file.write('\n'.join(ordered_symbols)) output_file.close() os.rename(temp_filename, args.output) temp_filename = None success = True finally: if output_file: output_file.close() if temp_filename: os.remove(temp_filename) return 0 if success else 1
def _ProcessPhasedOrderfile(self, files): """Process the phased orderfiles produced by system health benchmarks. The offsets will be placed in _GetUnpatchedOrderfileFilename(). Args: file: Profile files pulled locally. """ self._step_recorder.BeginStep('Process Phased Orderfile') profiles = process_profiles.ProfileManager(files) processor = process_profiles.SymbolOffsetProcessor( self._compiler.lib_chrome_so) phaser = phased_orderfile.PhasedAnalyzer(profiles, processor) if self._options.offsets_for_memory: profile_offsets = phaser.GetOffsetsForMemoryFootprint() else: profile_offsets = phaser.GetOffsetsForStartup() self._output_data['orderfile_size'] = { 'startup_kib': processor.OffsetsPrimarySize(profile_offsets.startup) / 1024, 'common_kib': processor.OffsetsPrimarySize(profile_offsets.common) / 1024, 'interaction_kib': processor.OffsetsPrimarySize(profile_offsets.interaction) / 1024 } offsets_list = (profile_offsets.startup + profile_offsets.common + profile_offsets.interaction) ordered_symbols = processor.GetOrderedSymbols(offsets_list) if not ordered_symbols: raise Exception('Failed to get ordered symbols') with open(self._GetUnpatchedOrderfileFilename(), 'w') as orderfile: orderfile.write('\n'.join(ordered_symbols))
def main(): logging.basicConfig(level=logging.INFO) parser = _CreateArgumentParser() args = parser.parse_args() profiles = process_profiles.ProfileManager( itertools.chain.from_iterable( glob.glob(os.path.join(d, PROFILE_GLOB)) for d in args.profile_directory.split(','))) processor = process_profiles.SymbolOffsetProcessor( os.path.join(args.instrumented_build_dir, 'lib.unstripped', args.library_name)) phaser = PhasedAnalyzer(profiles, processor) stability = phaser.ComputeStability() print 'Stability: {:.2} {:.2} {:.2}'.format(*[s[0] for s in stability]) print 'Sizes: {} {} {}'.format(*[s[1] for s in stability]) if args.offset_output_base is not None: for name, offsets in zip(['_for_memory', '_for_startup'], [ phaser.GetOffsetsForMemoryFootprint(), phaser.GetOffsetsForStartup() ]): with file(args.offset_output_base + name, 'w') as output: output.write('\n'.join( str(i) for i in (offsets.startup + offsets.common + offsets.interaction))) output.write('\n')
def main(): logging.basicConfig(level=logging.INFO) parser = _CreateArgumentParser() args = parser.parse_args() profiles = process_profiles.ProfileManager( itertools.chain.from_iterable( glob.glob(os.path.join(d, PROFILE_GLOB)) for d in args.profile_directory.split(','))) processor = process_profiles.SymbolOffsetProcessor( os.path.join(args.instrumented_build_dir, 'lib.unstripped', args.library_name)) phaser = PhasedAnalyzer(profiles, processor) for name, offsets in (('_for_memory', phaser.GetOffsetsForMemoryFootprint()), ('_for_startup', phaser.GetOffsetsForStartup())): logging.info( '%s Offset sizes (KiB):\n' '%s startup\n%s common\n%s interaction', name, processor.OffsetsPrimarySize(offsets.startup) / 1024, processor.OffsetsPrimarySize(offsets.common) / 1024, processor.OffsetsPrimarySize(offsets.interaction) / 1024) if args.offset_output_base is not None: with file(args.offset_output_base + name, 'w') as output: output.write('\n'.join( str(i) for i in (offsets.startup + offsets.common + offsets.interaction))) output.write('\n')
def main(): logging.basicConfig(level=logging.INFO) parser = _CreateArgumentParser() args = parser.parse_args() profiles = process_profiles.ProfileManager( glob.glob(os.path.join(args.profile_directory, PROFILE_GLOB))) processor = process_profiles.SymbolOffsetProcessor(os.path.join( args.instrumented_build_dir, 'lib.unstripped', args.library_name)) phaser = PhasedAnalyzer(profiles, processor) print 'Stability: {:.2f} {:.2f} {:.2f}'.format(*phaser.ComputeStability())
def main(): parser = _CreateArgumentParser() args = parser.parse_args() assert bool(args.merged_cyglog) ^ bool(args.reached_offsets) if not args.target_arch: args.arch = cygprofile_utils.DetectArchitecture() symbol_extractor.SetArchitecture(args.target_arch) obj_dir = cygprofile_utils.GetObjDir(args.native_library) offsets = [] if args.merged_cyglog: log_file_lines = map(string.rstrip, open(args.merged_cyglog).readlines()) offsets = _ParseLogLines(log_file_lines) else: offsets = _ReadReachedOffsets(args.reached_offsets) assert offsets _WarnAboutDuplicates(offsets) generator = OffsetOrderfileGenerator( process_profiles.SymbolOffsetProcessor(args.native_library), ObjectFileProcessor(obj_dir)) ordered_sections = generator.GetOrderedSections(offsets) if ordered_sections is None: return 1 success = False temp_filename = None output_file = None try: (fd, temp_filename) = tempfile.mkstemp(dir=os.path.dirname(args.output)) output_file = os.fdopen(fd, 'w') output_file.write('\n'.join(ordered_sections)) output_file.close() os.rename(temp_filename, args.output) temp_filename = None success = True finally: if output_file: output_file.close() if temp_filename: os.remove(temp_filename) return 0 if success else 1
def _ProcessPhasedOrderfile(self, files): """Process the phased orderfiles produced by system health benchmarks. The offsets will be placed in _GetUnpatchedOrderfileFilename(). Args: file: Profile files pulled locally. """ self._step_recorder.BeginStep('Process Phased Orderfile') profiles = process_profiles.ProfileManager(files) processor = process_profiles.SymbolOffsetProcessor( self._compiler.lib_chrome_so) ordered_symbols = cluster.ClusterOffsets(profiles, processor) if not ordered_symbols: raise Exception('Failed to get ordered symbols') self._output_data['offsets_kib'] = processor.SymbolsSize( ordered_symbols) / 1024 with open(self._GetUnpatchedOrderfileFilename(), 'w') as orderfile: orderfile.write('\n'.join(ordered_symbols))
def main(): logging.basicConfig(level=logging.INFO) parser = _CreateArgumentParser() args = parser.parse_args() dumps = [] if args.dumps: dumps.extend(args.dumps.split(',')) elif args.dumps_dir: for file_name in os.listdir(args.dumps_dir): dumps.append(os.path.join(args.dumps_dir, file_name)) else: logging.error('Either --dumps or --dumps-dir must be provided') parser.print_help() return 1 logging.info('Parsing dumps') offsets = set() for dump_filename in dumps: offsets |= set(_DumpToOffsets(dump_filename)) logging.info('Found %d reached locations', len(offsets)) library_path = os.path.join(args.build_dir, 'lib.unstripped', args.library_name) processor = process_profiles.SymbolOffsetProcessor(library_path) logging.info('Finding Symbols') offset_to_symbol = processor.GetDumpOffsetToSymbolInfo() reached_symbol_infos = _ReachedSymbols(offsets, offset_to_symbol) reached_symbol_infos.remove(None) with open(args.output, 'w') as f: for s in reached_symbol_infos: f.write('%s\n' % s.name) # Print some stats. reached_size = sum(s.size for s in reached_symbol_infos) logging.info('Total reached size = {}'.format(reached_size)) all_symbol_infos = set() for i in offset_to_symbol: if i is not None: all_symbol_infos.add(i) total_size = sum(s.size for s in all_symbol_infos) logging.info('Total size of known symbols = {}'.format(total_size)) coverage_percent = float(reached_size) / total_size * 100 logging.info('Coverage: {0:.2f}%'.format(coverage_percent))
def _ProcessPhasedOrderfile(self, files): """Process the phased orderfiles produced by system health benchmarks. The offsets will be placed in _GetUnpatchedOrderfileFilename(). Args: file: Profile files pulled locally. """ self._step_recorder.BeginStep('Process Phased Orderfile') profiles = process_profiles.ProfileManager(files) processor = process_profiles.SymbolOffsetProcessor( self._compiler.lib_chrome_so) ordered_symbols = cluster.ClusterOffsets(profiles, processor) if not ordered_symbols: raise Exception('Failed to get ordered symbols') for sym in ordered_symbols: assert not sym.startswith('OUTLINED_FUNCTION_'), ( 'Outlined function found in instrumented function, very likely ' 'something has gone very wrong!') self._output_data['offsets_kib'] = processor.SymbolsSize( ordered_symbols) / 1024 with open(self._GetUnpatchedOrderfileFilename(), 'w') as orderfile: orderfile.write('\n'.join(ordered_symbols))
def Generate(self): """Generates and maybe upload an order.""" profile_uploaded = False orderfile_uploaded = False assert (bool(self._options.profile) ^ bool(self._options.manual_symbol_offsets)) if self._options.profile: try: _UnstashOutputDirectory(self._instrumented_out_dir) self._compiler = ClankCompiler( self._instrumented_out_dir, self._step_recorder, self._options.arch, self._options.jobs, self._options.max_load, self._options.use_goma, self._options.goma_dir) self._compiler.CompileChromeApk(True) self._GenerateAndProcessProfile() self._MaybeArchiveOrderfile( self._GetUnpatchedOrderfileFilename()) profile_uploaded = True finally: self._DeleteTempFiles() _StashOutputDirectory(self._instrumented_out_dir) elif self._options.manual_symbol_offsets: assert self._options.manual_libname assert self._options.manual_objdir with file(self._options.manual_symbol_offsets) as f: symbol_offsets = [int(x) for x in f.xreadlines()] processor = process_profiles.SymbolOffsetProcessor( self._options.manual_libname) generator = cyglog_to_orderfile.OffsetOrderfileGenerator( processor, cyglog_to_orderfile.ObjectFileProcessor( self._options.manual_objdir)) ordered_sections = generator.GetOrderedSections(symbol_offsets) if not ordered_sections: # Either None or empty is a problem. raise Exception('Failed to get ordered sections') with open(self._GetUnpatchedOrderfileFilename(), 'w') as orderfile: orderfile.write('\n'.join(ordered_sections)) if self._options.patch: if self._options.profile: self._RemoveBlanks(self._GetUnpatchedOrderfileFilename(), self._GetPathToOrderfile()) try: _UnstashOutputDirectory(self._uninstrumented_out_dir) self._compiler = ClankCompiler( self._uninstrumented_out_dir, self._step_recorder, self._options.arch, self._options.jobs, self._options.max_load, self._options.use_goma, self._options.goma_dir) self._compiler.CompileLibchrome(False) self._PatchOrderfile() # Because identical code folding is a bit different with and without # the orderfile build, we need to re-patch the orderfile with code # folding as close to the final version as possible. self._compiler.CompileLibchrome(False, force_relink=True) self._PatchOrderfile() self._compiler.CompileLibchrome(False, force_relink=True) self._VerifySymbolOrder() self._MaybeArchiveOrderfile(self._GetPathToOrderfile()) finally: _StashOutputDirectory(self._uninstrumented_out_dir) orderfile_uploaded = True if (self._options.buildbot and self._options.netrc and not self._step_recorder.ErrorRecorded()): unpatched_orderfile_filename = ( self._GetUnpatchedOrderfileFilename() if profile_uploaded else None) orderfile_filename = (self._GetPathToOrderfile() if orderfile_uploaded else None) self._orderfile_updater.CommitFileHashes( unpatched_orderfile_filename, orderfile_filename) self._step_recorder.EndStep() return not self._step_recorder.ErrorRecorded()
def Generate(self): """Generates and maybe upload an order.""" assert (bool(self._options.profile) ^ bool(self._options.manual_symbol_offsets)) if self._options.system_health_orderfile and not self._options.profile: raise AssertionError('--system_health_orderfile must be not be used ' 'with --skip-profile') if (self._options.manual_symbol_offsets and not self._options.system_health_orderfile): raise AssertionError('--manual-symbol-offsets must be used with ' '--system_health_orderfile.') if self._options.profile: try: _UnstashOutputDirectory(self._instrumented_out_dir) self._compiler = ClankCompiler( self._instrumented_out_dir, self._step_recorder, self._options.arch, self._options.use_goma, self._options.goma_dir, self._options.system_health_orderfile, self._monochrome, self._options.public, self._GetPathToOrderfile()) if not self._options.pregenerated_profiles: # If there are pregenerated profiles, the instrumented build should # not be changed to avoid invalidating the pregenerated profile # offsets. self._compiler.CompileChromeApk(instrumented=True, use_call_graph= self._options.use_call_graph) self._GenerateAndProcessProfile() self._MaybeArchiveOrderfile(self._GetUnpatchedOrderfileFilename()) finally: _StashOutputDirectory(self._instrumented_out_dir) elif self._options.manual_symbol_offsets: assert self._options.manual_libname assert self._options.manual_objdir with file(self._options.manual_symbol_offsets) as f: symbol_offsets = [int(x) for x in f.xreadlines()] processor = process_profiles.SymbolOffsetProcessor( self._compiler.manual_libname) generator = cyglog_to_orderfile.OffsetOrderfileGenerator( processor, cyglog_to_orderfile.ObjectFileProcessor( self._options.manual_objdir)) ordered_sections = generator.GetOrderedSections(symbol_offsets) if not ordered_sections: # Either None or empty is a problem. raise Exception('Failed to get ordered sections') with open(self._GetUnpatchedOrderfileFilename(), 'w') as orderfile: orderfile.write('\n'.join(ordered_sections)) if self._options.patch: if self._options.profile: self._RemoveBlanks(self._GetUnpatchedOrderfileFilename(), self._GetPathToOrderfile()) try: _UnstashOutputDirectory(self._uninstrumented_out_dir) self._compiler = ClankCompiler( self._uninstrumented_out_dir, self._step_recorder, self._options.arch, self._options.use_goma, self._options.goma_dir, self._options.system_health_orderfile, self._monochrome, self._options.public, self._GetPathToOrderfile()) self._compiler.CompileLibchrome(instrumented=False, use_call_graph=False) self._PatchOrderfile() # Because identical code folding is a bit different with and without # the orderfile build, we need to re-patch the orderfile with code # folding as close to the final version as possible. self._compiler.CompileLibchrome(instrumented=False, use_call_graph=False, force_relink=True) self._PatchOrderfile() self._compiler.CompileLibchrome(instrumented=False, use_call_graph=False, force_relink=True) self._VerifySymbolOrder() self._MaybeArchiveOrderfile(self._GetPathToOrderfile()) finally: _StashOutputDirectory(self._uninstrumented_out_dir) if self._options.benchmark: self._output_data['orderfile_benchmark_results'] = self.RunBenchmark( self._uninstrumented_out_dir) self._output_data['no_orderfile_benchmark_results'] = self.RunBenchmark( self._no_orderfile_out_dir, no_orderfile=True) if self._options.buildbot: self._orderfile_updater._GitStash() self._step_recorder.EndStep() return not self._step_recorder.ErrorRecorded()