def _BuildReport(paths): before_size_path, after_size_path, outpath = paths try: os.makedirs(os.path.dirname(outpath)) except OSError as e: if e.errno != errno.EEXIST: raise size_info = _SizeInfoFromGsPath(after_size_path) if before_size_path: size_info = diff.Diff(_SizeInfoFromGsPath(before_size_path), size_info) html_report.BuildReportFromSizeInfo(outpath, size_info, all_symbols=False) return outpath
def _SaveNdjsonFunc(self, filtered_symbols, size_info=None, to_file=None): """Saves a .ndjson file containing only filtered_symbols into to_file. Args: filtered_symbols: Which symbols to include size_info: The size_info to filter. Defaults to size_infos[0]. to_file: Defaults to default.ndjson """ size_info = size_info or self._size_infos[0] to_file = to_file or 'default.ndjson' old_raw_symbols = size_info.raw_symbols size_info.raw_symbols = filtered_symbols html_report.BuildReportFromSizeInfo(to_file, size_info) size_info.raw_symbols = old_raw_symbols shortname = os.path.basename(os.path.normpath(to_file)) msg = ( 'Saved locally to {local}. To share, run:\n' '> gsutil.py cp {local} gs://chrome-supersize/oneoffs && gsutil.py -m ' 'acl ch -u AllUsers:R gs://chrome-supersize/oneoffs/{shortname}\n' ' Then view it at https://storage.googleapis.com/chrome-supersize' '/viewer.html?load_url=oneoffs%2F{shortname}') print(msg.format(local=to_file, shortname=shortname))
def main(): parser = argparse.ArgumentParser() parser.add_argument('--author', required=True, help='CL author') parser.add_argument('--apk-name', required=True, help='Name of the apk (ex. Name.apk)') parser.add_argument( '--before-dir', required=True, help='Directory containing the APK from reference build.') parser.add_argument('--after-dir', required=True, help='Directory containing APK for the new build.') parser.add_argument('--results-path', required=True, help='Output path for the trybot result .json file.') parser.add_argument('--staging-dir', required=True, help='Directory to write summary files to.') parser.add_argument('-v', '--verbose', action='store_true') args = parser.parse_args() if args.verbose: logging.basicConfig(level=logging.INFO) logging.info('Creating Supersize diff') supersize_diff_lines, delta_size_info = _CreateSupersizeDiff( args.apk_name, args.before_dir, args.after_dir) changed_symbols = delta_size_info.raw_symbols.WhereDiffStatusIs( models.DIFF_STATUS_UNCHANGED).Inverted() # Monitor dex method count since the "multidex limit" is a thing. logging.info('Checking dex symbols') dex_delta_lines, dex_delta = _CreateMethodCountDelta(changed_symbols) size_deltas = {dex_delta} # Look for native symbols called "kConstant" that are not actually constants. # C++ syntax makes this an easy mistake, and having symbols in .data uses more # RAM than symbols in .rodata (at least for multi-process apps). logging.info('Checking for mutable constants in native symbols') mutable_constants_lines, mutable_constants_delta = ( _CreateMutableConstantsDelta(changed_symbols)) size_deltas.add(mutable_constants_delta) # Check for uncompressed .pak file entries being added to avoid unnecessary # bloat. logging.info('Checking pak symbols') size_deltas.update(_CreateUncompressedPakSizeDeltas(changed_symbols)) # Normalized APK Size is the main metric we use to monitor binary size. logging.info('Creating sizes diff') resource_sizes_lines, resource_sizes_delta = (_CreateResourceSizesDelta( args.apk_name, args.before_dir, args.after_dir)) size_deltas.add(resource_sizes_delta) # .ndjson can be consumed by the html viewer. logging.info('Creating HTML Report') ndjson_path = os.path.join(args.staging_dir, _NDJSON_FILENAME) html_report.BuildReportFromSizeInfo(ndjson_path, delta_size_info) passing_deltas = set(m for m in size_deltas if m.IsAllowable()) failing_deltas = size_deltas - passing_deltas is_roller = '-autoroll' in args.author failing_checks_text = '\n'.join(d.explanation for d in sorted(failing_deltas)) passing_checks_text = '\n'.join(d.explanation for d in sorted(passing_deltas)) checks_text = """\ FAILING: {} PASSING: {} """.format(failing_checks_text, passing_checks_text) if failing_deltas: checks_text += _FAILURE_GUIDANCE status_code = 1 if failing_deltas and not is_roller else 0 summary = '<br>' + '<br>'.join(resource_sizes_lines) if 'Empty Resource Sizes Diff' in summary: summary = '<br>No size metrics were affected.' if failing_deltas: summary += '<br><br>Failed Size Checks:<br>' summary += failing_checks_text.replace('\n', '<br>') summary += '<br>Look at "Size Assertion Results" for guidance.' links_json = [ { 'name': '>>> Size Assertion Results <<<', 'lines': checks_text.splitlines(), }, { 'name': '>>> Mutable Constants Diff <<<', 'lines': mutable_constants_lines, }, { 'name': '>>> Dex Method Diff <<<', 'lines': dex_delta_lines, }, { 'name': '>>> SuperSize Text Diff <<<', 'lines': supersize_diff_lines, }, { 'name': '>>> Supersize HTML Diff <<<', 'url': _HTML_REPORT_BASE_URL + '{{' + _NDJSON_FILENAME + '}}', }, ] # Remove empty diffs (Mutable Constants or Dex Method). links_json = [o for o in links_json if o.get('lines') or o.get('url')] results_json = { 'status_code': status_code, 'summary': summary, 'archive_filenames': [_NDJSON_FILENAME], 'links': links_json, } with open(args.results_path, 'w') as f: json.dump(results_json, f)
def main(): parser = argparse.ArgumentParser() parser.add_argument('--author', required=True, help='CL author') parser.add_argument('--apk-name', required=True, help='Name of the apk (ex. Name.apk)') parser.add_argument( '--before-dir', required=True, help='Directory containing the APK from reference build.') parser.add_argument('--after-dir', required=True, help='Directory containing APK for the new build.') parser.add_argument('--resource-sizes-diff-path', required=True, help='Output path for the resource_sizes.py diff.') parser.add_argument('--supersize-diff-path', required=True, help='Output path for the Supersize diff.') parser.add_argument('--dex-method-count-diff-path', required=True, help='Output path for the dex method count diff.') parser.add_argument('--ndjson-path', required=True, help='Output path for the Supersize HTML report.') parser.add_argument('--results-path', required=True, help='Output path for the trybot result .json file.') parser.add_argument('-v', '--verbose', action='store_true') args = parser.parse_args() if args.verbose: logging.basicConfig(level=logging.INFO) logging.info('Creating Supersize diff') delta_size_info = _CreateAndWriteSupersizeDiff(args.apk_name, args.before_dir, args.after_dir, args.supersize_diff_path) changed_symbols = delta_size_info.raw_symbols.WhereDiffStatusIs( models.DIFF_STATUS_UNCHANGED).Inverted() # Monitor dex method growth since this correlates closely with APK size and # may affect our dex file structure. logging.info('Checking dex symbols') size_deltas = set() size_deltas.add( _CreateAndWriteMethodCountDelta(changed_symbols, args.dex_method_count_diff_path)) # Check for uncompressed .pak file entries being added to avoid unnecessary # bloat. logging.info('Checking pak symbols') size_deltas.update(_CreateUncompressedPakSizeDeltas(changed_symbols)) # Normalized APK Size is the main metric we use to monitor binary size. logging.info('Creating sizes diff') resource_sizes_delta = _CreateAndWriteResourceSizesDelta( args.apk_name, args.before_dir, args.after_dir, args.resource_sizes_diff_path) size_deltas.add(resource_sizes_delta) # .ndjson can be consumed by the html viewer. logging.info('Creating HTML Report') html_report.BuildReportFromSizeInfo(args.ndjson_path, delta_size_info, all_symbols=True) is_roller = '-autoroll' in args.author # Useful for bot debugging to have these printed out: print 'Is Roller:', is_roller for delta in sorted(size_deltas): print '{}\n\n'.format(delta.explanation) passing_deltas = set(m for m in size_deltas if m._IsAllowable()) failing_deltas = size_deltas - passing_deltas status_code = 0 if is_roller else len(failing_deltas) result = 'passed' if status_code == 0 else 'failed' message = """ Binary size checks {}. ******************************************************************************* FAILING: {} ******************************************************************************* PASSING: {} ******************************************************************************* """.format(result, '\n\n'.join(d.explanation for d in sorted(failing_deltas)), '\n\n'.join(d.explanation for d in sorted(passing_deltas))) if status_code != 0: message += _FAILURE_GUIDANCE # Make blank lines not blank prevent them from being stripped. # https://crbug.com/855671 message.replace('\n\n', '\n.\n') with open(args.results_path, 'w') as f: results_json = { 'details': message, 'normalized_apk_size': resource_sizes_delta.actual, 'status_code': status_code } json.dump(results_json, f)
def main(): parser = argparse.ArgumentParser() parser.add_argument('--author', required=True, help='CL author') parser.add_argument('--apk-name', required=True, help='Name of the apk (ex. Name.apk)') parser.add_argument( '--before-dir', required=True, help='Directory containing the APK from reference build.') parser.add_argument('--after-dir', required=True, help='Directory containing APK for the new build.') parser.add_argument('--results-path', required=True, help='Output path for the trybot result .json file.') parser.add_argument('--staging-dir', required=True, help='Directory to write summary files to.') parser.add_argument('-v', '--verbose', action='store_true') args = parser.parse_args() if args.verbose: logging.basicConfig(level=logging.INFO) logging.info('Creating Supersize diff') supersize_diff_lines, delta_size_info = _CreateSupersizeDiff( args.apk_name, args.before_dir, args.after_dir) supersize_text_path = os.path.join(args.staging_dir, _TEXT_FILENAME) with open(supersize_text_path, 'w') as f: describe.WriteLines(supersize_diff_lines, f.write) changed_symbols = delta_size_info.raw_symbols.WhereDiffStatusIs( models.DIFF_STATUS_UNCHANGED).Inverted() # Monitor dex method count since the "multidex limit" is a thing. logging.info('Checking dex symbols') dex_delta_lines, dex_delta = _CreateMethodCountDelta(changed_symbols) size_deltas = {dex_delta} # Look for native symbols called "kConstant" that are not actually constants. # C++ syntax makes this an easy mistake, and having symbols in .data uses more # RAM than symbols in .rodata (at least for multi-process apps). logging.info('Checking for mutable constants in native symbols') mutable_constants_lines, mutable_constants_delta = ( _CreateMutableConstantsDelta(changed_symbols)) size_deltas.add(mutable_constants_delta) # Look for symbols with 'ForTesting' in their name. logging.info('Checking for symbols named "ForTest"') testing_symbols_lines, test_symbols_delta = ( _CreateTestingSymbolsDeltas(changed_symbols)) size_deltas.add(test_symbols_delta) # Check for uncompressed .pak file entries being added to avoid unnecessary # bloat. logging.info('Checking pak symbols') size_deltas.update(_CreateUncompressedPakSizeDeltas(changed_symbols)) # Normalized APK Size is the main metric we use to monitor binary size. logging.info('Creating sizes diff') resource_sizes_lines, resource_sizes_delta = (_CreateResourceSizesDelta( args.apk_name, args.before_dir, args.after_dir)) size_deltas.add(resource_sizes_delta) # .ndjson can be consumed by the html viewer. logging.info('Creating HTML Report') ndjson_path = os.path.join(args.staging_dir, _NDJSON_FILENAME) html_report.BuildReportFromSizeInfo(ndjson_path, delta_size_info) passing_deltas = set(m for m in size_deltas if m.IsAllowable()) failing_deltas = size_deltas - passing_deltas is_roller = '-autoroll' in args.author failing_checks_text = '\n'.join(d.explanation for d in sorted(failing_deltas)) passing_checks_text = '\n'.join(d.explanation for d in sorted(passing_deltas)) checks_text = """\ FAILING Checks: {} PASSING Checks: {} To understand what those checks are and how to pass them, see: https://chromium.googlesource.com/chromium/src/+/master/docs/speed/binary_size/android_binary_size_trybot.md """.format(failing_checks_text, passing_checks_text) status_code = int(bool(failing_deltas)) # Give rollers a free pass, except for mutable constants. # Mutable constants are rare, and other regressions are generally noticed in # size graphs and can be investigated after-the-fact. if is_roller and mutable_constants_delta not in failing_deltas: status_code = 0 summary = '<br>' + checks_text.replace('\n', '<br>') links_json = [ { 'name': '>>> Binary Size Details <<<', 'lines': resource_sizes_lines, }, { 'name': '>>> Mutable Constants Diff <<<', 'lines': mutable_constants_lines, }, { 'name': '>>> "ForTest" Symbols Diff <<<', 'lines': testing_symbols_lines, }, { 'name': '>>> Dex Class and Method Diff <<<', 'lines': dex_delta_lines, }, { 'name': '>>> SuperSize Text Diff <<<', 'url': '{{' + _TEXT_FILENAME + '}}', }, { 'name': '>>> SuperSize HTML Diff <<<', 'url': _HTML_REPORT_BASE_URL + '{{' + _NDJSON_FILENAME + '}}', }, ] # Remove empty diffs (Mutable Constants, Dex Method, ...). links_json = [o for o in links_json if o.get('lines') or o.get('url')] binary_size_listings = [] for delta in size_deltas: if delta.actual == 0: continue listing = { 'name': delta.name, 'delta': '{} {}'.format(_FormatSign(delta.actual), delta.units), 'limit': '{} {}'.format(_FormatSign(delta.expected), delta.units), 'allowed': delta.IsAllowable(), } binary_size_listings.append(listing) binary_size_extras = [ { 'text': 'SuperSize HTML Diff', 'url': _HTML_REPORT_BASE_URL + '{{' + _NDJSON_FILENAME + '}}', }, { 'text': 'SuperSize Text Diff', 'url': '{{' + _TEXT_FILENAME + '}}', }, ] binary_size_plugin_json = { 'listings': binary_size_listings, 'extras': binary_size_extras, } results_json = { 'status_code': status_code, 'summary': summary, 'archive_filenames': [_NDJSON_FILENAME, _TEXT_FILENAME], 'links': links_json, 'gerrit_plugin_details': binary_size_plugin_json, } with open(args.results_path, 'w') as f: json.dump(results_json, f)