Ejemplo n.º 1
0
def _WriteToStream(lines, use_pager=None, to_file=None):
    if to_file:
        use_pager = False
    if use_pager is None and sys.stdout.isatty():
        # Does not take into account line-wrapping... Oh well.
        first_lines = list(itertools.islice(lines, _THRESHOLD_FOR_PAGER))
        use_pager = len(first_lines) == _THRESHOLD_FOR_PAGER
        lines = itertools.chain(first_lines, lines)

    if use_pager:
        with _LessPipe() as stdin:
            describe.WriteLines(lines, stdin.write)
    elif to_file:
        with open(to_file, 'w') as file_obj:
            describe.WriteLines(lines, file_obj.write)
    else:
        describe.WriteLines(lines, sys.stdout.write)
Ejemplo n.º 2
0
    def _SizeStats(self, size_info=None):
        """Prints some statistics for the given size info.

    Args:
      size_info: Defaults to size_infos[0].
    """
        size_info = size_info or self._size_infos[0]
        describe.WriteLines(describe.DescribeSizeInfoCoverage(size_info),
                            sys.stdout.write)
Ejemplo n.º 3
0
    def inner(self):
        actual_lines = func(self)

        if update_goldens:
            with open(golden_path, 'w') as file_obj:
                describe.WriteLines(actual_lines, file_obj.write)
            logging.info('Wrote %s', golden_path)
        else:
            with open(golden_path) as file_obj:
                _AssertGolden(file_obj, actual_lines)
Ejemplo n.º 4
0
  def _WriteFunc(self, obj, path, verbose=False):
    """Same as Print(), but writes to a file.

    Example: Write(Diff(size_info2, size_info1), 'output.txt')
    """
    parent_dir = os.path.dirname(path)
    if parent_dir and not os.path.exists(parent_dir):
      os.makedirs(parent_dir)
    with file_format.OpenMaybeGz(path, 'w') as file_obj:
      lines = describe.GenerateLines(obj, verbose=verbose)
      describe.WriteLines(lines, file_obj.write)
Ejemplo n.º 5
0
    def inner(self):
      actual_lines = func(self)
      actual_lines = (re.sub(r'(elf_mtime=).*', r'\1{redacted}', l)
                      for l in actual_lines)

      if update_goldens:
        with open(golden_path, 'w') as file_obj:
          describe.WriteLines(actual_lines, file_obj.write)
        logging.info('Wrote %s', golden_path)
      else:
        with open(golden_path) as file_obj:
          _AssertGolden(file_obj, actual_lines)
Ejemplo n.º 6
0
 def CheckOrUpdate(golden_path, actual_lines):
   if Golden.do_update:
     with open(golden_path, 'w') as file_obj:
       describe.WriteLines(actual_lines, file_obj.write)
     logging.info('Wrote %s', golden_path)
   else:
     with open(golden_path) as file_obj:
       expected = list(file_obj)
       actual = list(l + '\n' for l in actual_lines)
       assert actual == expected, (
           ('Did not match %s.\n' % golden_path) + ''.join(
               difflib.unified_diff(expected, actual, 'expected', 'actual')))
Ejemplo n.º 7
0
  def _PrintFunc(self, obj, verbose=False, use_pager=None):
    """Prints out the given Symbol / SymbolGroup / SymbolDiff / SizeInfo.

    Args:
      obj: The object to be printed.
      use_pager: Whether to pipe output through `less`. Ignored when |obj| is a
          Symbol.
    """
    lines = describe.GenerateLines(obj, verbose=verbose)
    if use_pager is None and sys.stdout.isatty():
      # Does not take into account line-wrapping... Oh well.
      first_lines = list(itertools.islice(lines, _THRESHOLD_FOR_PAGER))
      if len(first_lines) == _THRESHOLD_FOR_PAGER:
        use_pager = True
      lines = itertools.chain(first_lines, lines)

    if use_pager:
      with _LessPipe() as stdin:
        describe.WriteLines(lines, stdin.write)
    else:
      describe.WriteLines(lines, sys.stdout.write)
def main():
  parser = argparse.ArgumentParser()
  parser.add_argument('--author', required=True, help='CL author')
  parser.add_argument(
      '--apk-name', required=True, help='Name of the apk (ex. Name.apk)')
  parser.add_argument(
      '--before-dir',
      required=True,
      help='Directory containing the APK from reference build.')
  parser.add_argument(
      '--after-dir',
      required=True,
      help='Directory containing APK for the new build.')
  parser.add_argument(
      '--results-path',
      required=True,
      help='Output path for the trybot result .json file.')
  parser.add_argument(
      '--staging-dir',
      required=True,
      help='Directory to write summary files to.')
  parser.add_argument('-v', '--verbose', action='store_true')
  args = parser.parse_args()

  if args.verbose:
    logging.basicConfig(level=logging.INFO)

  logging.info('Creating Supersize diff')
  supersize_diff_lines, delta_size_info = _CreateSupersizeDiff(
      args.apk_name, args.before_dir, args.after_dir)
  supersize_text_path = os.path.join(args.staging_dir, _TEXT_FILENAME)
  with open(supersize_text_path, 'w') as f:
    describe.WriteLines(supersize_diff_lines, f.write)

  changed_symbols = delta_size_info.raw_symbols.WhereDiffStatusIs(
      models.DIFF_STATUS_UNCHANGED).Inverted()

  # Monitor dex method count since the "multidex limit" is a thing.
  logging.info('Checking dex symbols')
  dex_delta_lines, dex_delta = _CreateMethodCountDelta(changed_symbols)
  size_deltas = {dex_delta}

  # Look for native symbols called "kConstant" that are not actually constants.
  # C++ syntax makes this an easy mistake, and having symbols in .data uses more
  # RAM than symbols in .rodata (at least for multi-process apps).
  logging.info('Checking for mutable constants in native symbols')
  mutable_constants_lines, mutable_constants_delta = (
      _CreateMutableConstantsDelta(changed_symbols))
  size_deltas.add(mutable_constants_delta)

  # Check for uncompressed .pak file entries being added to avoid unnecessary
  # bloat.
  logging.info('Checking pak symbols')
  size_deltas.update(_CreateUncompressedPakSizeDeltas(changed_symbols))

  # Normalized APK Size is the main metric we use to monitor binary size.
  logging.info('Creating sizes diff')
  resource_sizes_lines, resource_sizes_delta = (
      _CreateResourceSizesDelta(args.apk_name, args.before_dir, args.after_dir))
  size_deltas.add(resource_sizes_delta)

  # .ndjson can be consumed by the html viewer.
  logging.info('Creating HTML Report')
  ndjson_path = os.path.join(args.staging_dir, _NDJSON_FILENAME)
  html_report.BuildReportFromSizeInfo(ndjson_path, delta_size_info)

  passing_deltas = set(m for m in size_deltas if m.IsAllowable())
  failing_deltas = size_deltas - passing_deltas

  is_roller = '-autoroll' in args.author
  failing_checks_text = '\n'.join(d.explanation for d in sorted(failing_deltas))
  passing_checks_text = '\n'.join(d.explanation for d in sorted(passing_deltas))
  checks_text = """\
FAILING:
{}

PASSING:
{}
""".format(failing_checks_text, passing_checks_text)

  if failing_deltas:
    checks_text += _FAILURE_GUIDANCE

  status_code = int(bool(failing_deltas))

  # Give rollers a free pass, except for mutable constants.
  # Mutable constants are rare, and other regressions are generally noticed in
  # size graphs and can be investigated after-the-fact.
  if is_roller and mutable_constants_delta not in failing_deltas:
    status_code = 0

  summary = '<br>' + '<br>'.join(resource_sizes_lines)
  if 'Empty Resource Sizes Diff' in summary:
    summary = '<br>No size metrics were affected.'
  if failing_deltas:
    summary += '<br><br>Failed Size Checks:<br>'
    summary += failing_checks_text.replace('\n', '<br>')
    summary += '<br>Look at "Size Assertion Results" for guidance.'

  links_json = [
      {
          'name': '>>> Size Assertion Results <<<',
          'lines': checks_text.splitlines(),
      },
      {
          'name': '>>> Mutable Constants Diff <<<',
          'lines': mutable_constants_lines,
      },
      {
          'name': '>>> Dex Method Diff <<<',
          'lines': dex_delta_lines,
      },
      {
          'name': '>>> SuperSize Text Diff <<<',
          'url': '{{' + _TEXT_FILENAME + '}}',
      },
      {
          'name': '>>> Supersize HTML Diff <<<',
          'url': _HTML_REPORT_BASE_URL + '{{' + _NDJSON_FILENAME + '}}',
      },
  ]
  # Remove empty diffs (Mutable Constants or Dex Method).
  links_json = [o for o in links_json if o.get('lines') or o.get('url')]

  binary_size_listings = []
  for delta in size_deltas:
    if delta.actual == 0:
      continue
    listing = {
        'name': delta.name,
        'delta': '{} {}'.format(delta.actual, delta.units),
        'allowed': delta.IsAllowable(),
    }
    binary_size_listings.append(listing)

  binary_size_extras = [
      {
          'text': 'Supersize HTML Diff',
          'url': _HTML_REPORT_BASE_URL + '{{' + _NDJSON_FILENAME + '}}',
      },
      {
          'text': 'SuperSize Text Diff',
          'url': '{{' + _TEXT_FILENAME + '}}',
      },
  ]

  binary_size_plugin_json = {
      'listings': binary_size_listings,
      'extras': binary_size_extras,
  }

  results_json = {
      'status_code': status_code,
      'summary': summary,
      'archive_filenames': [_NDJSON_FILENAME, _TEXT_FILENAME],
      'links': links_json,
      'gerrit_plugin_details': binary_size_plugin_json,
  }

  with open(args.results_path, 'w') as f:
    json.dump(results_json, f)