Exemple #1
0
    def Run(args, on_config_error):
        if not args.before.endswith('.size'):
            on_config_error('Before input must end with ".size"')
        if not args.after.endswith('.size'):
            on_config_error('After input must end with ".size"')
        if not args.output_file.endswith('.sizediff'):
            on_config_error('Output must end with ".sizediff"')

        before_size_info = archive.LoadAndPostProcessSizeInfo(args.before)
        after_size_info = archive.LoadAndPostProcessSizeInfo(args.after)
        delta_size_info = diff.Diff(before_size_info, after_size_info)

        file_format.SaveDeltaSizeInfo(delta_size_info, args.output_file)
Exemple #2
0
    def _SaveDeltaSizeInfo(self, size_info, to_file=None):
        """Saves a .sizediff file containing only filtered_symbols into to_file.

    Args:
      delta_size_info: The delta_size_info to filter.
      to_file: Defaults to default.sizediff
    """
        to_file = to_file or 'default.sizediff'
        assert to_file.endswith(
            '.sizediff'), 'to_file should end with .sizediff'

        file_format.SaveDeltaSizeInfo(size_info, to_file)
        is_internal = len(size_info.symbols.WherePathMatches('^clank')) > 0
        self._PrintUploadCommand(to_file, is_internal)
Exemple #3
0
  def test_SaveDeltaSizeInfo(self):
    # Check that saving & loading is the same as directly parsing.
    orig_info1 = self._CloneSizeInfo(use_apk=True, use_aux_elf=True)
    orig_info2 = self._CloneSizeInfo(use_elf=True)
    orig_delta = diff.Diff(orig_info1, orig_info2)

    with tempfile.NamedTemporaryFile(suffix='.sizediff') as sizediff_file:
      file_format.SaveDeltaSizeInfo(orig_delta, sizediff_file.name)
      new_info1, new_info2 = archive.LoadAndPostProcessDeltaSizeInfo(
          sizediff_file.name)
    new_delta = diff.Diff(new_info1, new_info2)

    # File format discards unchanged symbols.
    orig_delta.raw_symbols = orig_delta.raw_symbols.WhereDiffStatusIs(
        models.DIFF_STATUS_UNCHANGED).Inverted()

    self.assertEqual(
        '\n'.join(describe.GenerateLines(orig_delta, verbose=True)),
        '\n'.join(describe.GenerateLines(new_delta, verbose=True)))
Exemple #4
0
  def _SaveDeltaSizeInfo(self, size_info, to_file=None):
    """Saves a .sizediff file containing only filtered_symbols into to_file.

    Args:
      delta_size_info: The delta_size_info to filter.
      to_file: Defaults to default.sizediff
    """
    to_file = to_file or 'default.sizediff'
    assert to_file.endswith('.sizediff'), 'to_file should end with .sizediff'

    file_format.SaveDeltaSizeInfo(size_info, to_file)

    shortname = os.path.basename(os.path.normpath(to_file))
    msg = (
        'Saved locally to {local}. To share, run:\n'
        '> gsutil.py cp {local} gs://chrome-supersize/oneoffs && gsutil.py -m '
        'acl ch -u AllUsers:R gs://chrome-supersize/oneoffs/{shortname}\n'
        '  Then view it at https://storage.googleapis.com/chrome-supersize'
        '/viewer.html?load_url=oneoffs%2F{shortname}')
    print(msg.format(local=to_file, shortname=shortname))
def main():
  parser = argparse.ArgumentParser()
  parser.add_argument('--author', required=True, help='CL author')
  parser.add_argument(
      '--apk-name', required=True, help='Name of the apk (ex. Name.apk)')
  parser.add_argument(
      '--before-dir',
      required=True,
      help='Directory containing the APK from reference build.')
  parser.add_argument(
      '--after-dir',
      required=True,
      help='Directory containing APK for the new build.')
  parser.add_argument(
      '--results-path',
      required=True,
      help='Output path for the trybot result .json file.')
  parser.add_argument(
      '--staging-dir',
      required=True,
      help='Directory to write summary files to.')
  parser.add_argument('-v', '--verbose', action='store_true')
  args = parser.parse_args()

  if args.verbose:
    logging.basicConfig(level=logging.INFO)

  logging.info('Creating Supersize diff')
  supersize_diff_lines, delta_size_info = _CreateSupersizeDiff(
      args.apk_name, args.before_dir, args.after_dir)

  changed_symbols = delta_size_info.raw_symbols.WhereDiffStatusIs(
      models.DIFF_STATUS_UNCHANGED).Inverted()

  # Monitor dex method count since the "multidex limit" is a thing.
  logging.info('Checking dex symbols')
  dex_delta_lines, dex_delta = _CreateMethodCountDelta(changed_symbols)
  size_deltas = {dex_delta}
  metrics = {(dex_delta, _DEX_SYMBOLS_LOG)}

  # Look for native symbols called "kConstant" that are not actually constants.
  # C++ syntax makes this an easy mistake, and having symbols in .data uses more
  # RAM than symbols in .rodata (at least for multi-process apps).
  logging.info('Checking for mutable constants in native symbols')
  mutable_constants_lines, mutable_constants_delta = (
      _CreateMutableConstantsDelta(changed_symbols))
  size_deltas.add(mutable_constants_delta)
  metrics.add((mutable_constants_delta, _MUTABLE_CONSTANTS_LOG))

  # Look for symbols with 'ForTesting' in their name.
  logging.info('Checking for DEX symbols named "ForTest"')
  mapping_name = _GuessMappingFilename(args.before_dir, args.apk_name)
  if not mapping_name:
    raise Exception('Cannot find proguard mapping file.')

  before_mapping = os.path.join(args.before_dir, mapping_name)
  after_mapping = os.path.join(args.after_dir, mapping_name)
  testing_symbols_lines, test_symbols_delta = (_CreateTestingSymbolsDeltas(
      before_mapping, after_mapping))
  size_deltas.add(test_symbols_delta)
  metrics.add((test_symbols_delta, _FOR_TESTING_LOG))

  # Check for uncompressed .pak file entries being added to avoid unnecessary
  # bloat.
  logging.info('Checking pak symbols')
  size_deltas.update(_CreateUncompressedPakSizeDeltas(changed_symbols))

  # Normalized APK Size is the main metric we use to monitor binary size.
  logging.info('Creating sizes diff')
  resource_sizes_lines, resource_sizes_delta = (
      _CreateResourceSizesDelta(args.apk_name, args.before_dir, args.after_dir))
  size_deltas.add(resource_sizes_delta)
  metrics.add((resource_sizes_delta, _RESOURCE_SIZES_LOG))

  # .sizediff can be consumed by the html viewer.
  logging.info('Creating HTML Report')
  sizediff_path = os.path.join(args.staging_dir, _SIZEDIFF_FILENAME)
  file_format.SaveDeltaSizeInfo(delta_size_info, sizediff_path)

  passing_deltas = set(d for d in size_deltas if d.IsAllowable())
  failing_deltas = size_deltas - passing_deltas

  is_roller = '-autoroll' in args.author
  failing_checks_text = '\n'.join(d.explanation for d in sorted(failing_deltas))
  passing_checks_text = '\n'.join(d.explanation for d in sorted(passing_deltas))
  checks_text = """\
FAILING Checks:
{}

PASSING Checks:
{}

To understand what those checks are and how to pass them, see:
https://chromium.googlesource.com/chromium/src/+/master/docs/speed/binary_size/android_binary_size_trybot.md

""".format(failing_checks_text, passing_checks_text)

  status_code = int(bool(failing_deltas))

  # Give rollers a free pass, except for mutable constants.
  # Mutable constants are rare, and other regressions are generally noticed in
  # size graphs and can be investigated after-the-fact.
  if is_roller and mutable_constants_delta not in failing_deltas:
    status_code = 0

  summary = '<br>' + checks_text.replace('\n', '<br>')
  supersize_url = _CreateTigerViewerUrl(args.apk_name,
                                        '{{' + _SIZEDIFF_FILENAME + '}}')
  links_json = [
      {
          'name': 'Binary Size Details',
          'lines': resource_sizes_lines,
          'log_name': _RESOURCE_SIZES_LOG,
      },
      {
          'name': 'Mutable Constants Diff',
          'lines': mutable_constants_lines,
          'log_name': _MUTABLE_CONSTANTS_LOG,
      },
      {
          'name': 'ForTest Symbols Diff',
          'lines': testing_symbols_lines,
          'log_name': _FOR_TESTING_LOG,
      },
      {
          'name': 'Dex Class and Method Diff',
          'lines': dex_delta_lines,
          'log_name': _DEX_SYMBOLS_LOG,
      },
      {
          'name': 'SuperSize Text Diff',
          'lines': supersize_diff_lines,
      },
      {
          'name': 'SuperSize HTML Diff',
          'url': supersize_url,
      },
  ]
  # Remove empty diffs (Mutable Constants, Dex Method, ...).
  links_json = [o for o in links_json if o.get('lines') or o.get('url')]

  binary_size_plugin_json = _GenerateBinarySizePluginDetails(
      args.apk_name, metrics)

  results_json = {
      'status_code': status_code,
      'summary': summary,
      'archive_filenames': [_SIZEDIFF_FILENAME],
      'links': links_json,
      'gerrit_plugin_details': binary_size_plugin_json,
  }

  with open(args.results_path, 'w') as f:
    json.dump(results_json, f)
Exemple #6
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('--author', required=True, help='CL author')
    parser.add_argument('--apk-name',
                        required=True,
                        help='Name of the apk (ex. Name.apk)')
    parser.add_argument(
        '--before-dir',
        required=True,
        help='Directory containing the APK from reference build.')
    parser.add_argument('--after-dir',
                        required=True,
                        help='Directory containing APK for the new build.')
    parser.add_argument('--results-path',
                        required=True,
                        help='Output path for the trybot result .json file.')
    parser.add_argument('--staging-dir',
                        required=True,
                        help='Directory to write summary files to.')
    parser.add_argument('-v', '--verbose', action='store_true')
    args = parser.parse_args()

    if args.verbose:
        logging.basicConfig(level=logging.INFO)

    logging.info('Creating Supersize diff')
    supersize_diff_lines, delta_size_info = _CreateSupersizeDiff(
        args.apk_name, args.before_dir, args.after_dir)
    supersize_text_path = os.path.join(args.staging_dir, _TEXT_FILENAME)
    with open(supersize_text_path, 'w') as f:
        describe.WriteLines(supersize_diff_lines, f.write)

    changed_symbols = delta_size_info.raw_symbols.WhereDiffStatusIs(
        models.DIFF_STATUS_UNCHANGED).Inverted()

    # Monitor dex method count since the "multidex limit" is a thing.
    logging.info('Checking dex symbols')
    dex_delta_lines, dex_delta = _CreateMethodCountDelta(changed_symbols)
    size_deltas = {dex_delta}

    # Look for native symbols called "kConstant" that are not actually constants.
    # C++ syntax makes this an easy mistake, and having symbols in .data uses more
    # RAM than symbols in .rodata (at least for multi-process apps).
    logging.info('Checking for mutable constants in native symbols')
    mutable_constants_lines, mutable_constants_delta = (
        _CreateMutableConstantsDelta(changed_symbols))
    size_deltas.add(mutable_constants_delta)

    # Look for symbols with 'ForTesting' in their name.
    logging.info('Checking for symbols named "ForTest"')
    testing_symbols_lines, test_symbols_delta = (
        _CreateTestingSymbolsDeltas(changed_symbols))
    size_deltas.add(test_symbols_delta)

    # Check for uncompressed .pak file entries being added to avoid unnecessary
    # bloat.
    logging.info('Checking pak symbols')
    size_deltas.update(_CreateUncompressedPakSizeDeltas(changed_symbols))

    # Normalized APK Size is the main metric we use to monitor binary size.
    logging.info('Creating sizes diff')
    resource_sizes_lines, resource_sizes_delta = (_CreateResourceSizesDelta(
        args.apk_name, args.before_dir, args.after_dir))
    size_deltas.add(resource_sizes_delta)

    # .sizediff can be consumed by the html viewer.
    logging.info('Creating HTML Report')
    sizediff_path = os.path.join(args.staging_dir, _SIZEDIFF_FILENAME)
    file_format.SaveDeltaSizeInfo(delta_size_info, sizediff_path)

    passing_deltas = set(m for m in size_deltas if m.IsAllowable())
    failing_deltas = size_deltas - passing_deltas

    is_roller = '-autoroll' in args.author
    failing_checks_text = '\n'.join(d.explanation
                                    for d in sorted(failing_deltas))
    passing_checks_text = '\n'.join(d.explanation
                                    for d in sorted(passing_deltas))
    checks_text = """\
FAILING Checks:
{}

PASSING Checks:
{}

To understand what those checks are and how to pass them, see:
https://chromium.googlesource.com/chromium/src/+/master/docs/speed/binary_size/android_binary_size_trybot.md

""".format(failing_checks_text, passing_checks_text)

    status_code = int(bool(failing_deltas))

    # Give rollers a free pass, except for mutable constants.
    # Mutable constants are rare, and other regressions are generally noticed in
    # size graphs and can be investigated after-the-fact.
    if is_roller and mutable_constants_delta not in failing_deltas:
        status_code = 0

    summary = '<br>' + checks_text.replace('\n', '<br>')
    links_json = [
        {
            'name': '>>> Binary Size Details <<<',
            'lines': resource_sizes_lines,
        },
        {
            'name': '>>> Mutable Constants Diff <<<',
            'lines': mutable_constants_lines,
        },
        {
            'name': '>>> "ForTest" Symbols Diff <<<',
            'lines': testing_symbols_lines,
        },
        {
            'name': '>>> Dex Class and Method Diff <<<',
            'lines': dex_delta_lines,
        },
        {
            'name': '>>> SuperSize Text Diff <<<',
            'url': '{{' + _TEXT_FILENAME + '}}',
        },
        {
            'name': '>>> SuperSize HTML Diff <<<',
            'url': _HTML_REPORT_BASE_URL + '{{' + _SIZEDIFF_FILENAME + '}}',
        },
    ]
    # Remove empty diffs (Mutable Constants, Dex Method, ...).
    links_json = [o for o in links_json if o.get('lines') or o.get('url')]

    binary_size_listings = []
    for delta in size_deltas:
        if delta.actual == 0:
            continue
        listing = {
            'name': delta.name,
            'delta': '{} {}'.format(_FormatSign(delta.actual), delta.units),
            'limit': '{} {}'.format(_FormatSign(delta.expected), delta.units),
            'allowed': delta.IsAllowable(),
        }
        binary_size_listings.append(listing)

    binary_size_extras = [
        {
            'text': 'SuperSize HTML Diff',
            'url': _HTML_REPORT_BASE_URL + '{{' + _SIZEDIFF_FILENAME + '}}',
        },
        {
            'text': 'SuperSize Text Diff',
            'url': '{{' + _TEXT_FILENAME + '}}',
        },
    ]

    binary_size_plugin_json = {
        'listings': binary_size_listings,
        'extras': binary_size_extras,
    }

    results_json = {
        'status_code': status_code,
        'summary': summary,
        'archive_filenames': [_SIZEDIFF_FILENAME, _TEXT_FILENAME],
        'links': links_json,
        'gerrit_plugin_details': binary_size_plugin_json,
    }

    with open(args.results_path, 'w') as f:
        json.dump(results_json, f)