Example #1
0
def Run(args, parser):
    if not args.input_size_file.endswith('.size'):
        parser.error('Input must end with ".size"')
    if args.diff_with and not args.diff_with.endswith('.size'):
        parser.error('Diff input must end with ".size"')
    if not args.output_report_file.endswith('.ndjson'):
        parser.error('Output must end with ".ndjson"')

    size_info = archive.LoadAndPostProcessSizeInfo(args.input_size_file)
    if args.diff_with:
        before_size_info = archive.LoadAndPostProcessSizeInfo(args.diff_with)
        size_info = diff.Diff(before_size_info, size_info)

    BuildReportFromSizeInfo(args.output_report_file,
                            size_info,
                            all_symbols=args.all_symbols)

    msg = [
        'Done!', 'View using a local server via: ', '    %s start_server %s',
        'or upload to the hosted version here:',
        '    https://storage.googleapis.com/chrome-supersize/viewer.html'
    ]
    supersize_path = os.path.relpath(
        os.path.join(path_util.SRC_ROOT, 'tools', 'binary_size', 'supersize'))
    logging.warning('\n'.join(msg), supersize_path, args.output_report_file)
Example #2
0
def Run(args, on_config_error):
  if not args.input_size_file.endswith('.size'):
    on_config_error('Input must end with ".size"')
  if args.diff_with and not args.diff_with.endswith('.size'):
    on_config_error('Diff input must end with ".size"')
  if not args.output_report_file.endswith('.ndjson'):
    on_config_error('Output must end with ".ndjson"')

  size_info = archive.LoadAndPostProcessSizeInfo(args.input_size_file)
  if args.diff_with:
    before_size_info = archive.LoadAndPostProcessSizeInfo(args.diff_with)
    size_info = diff.Diff(before_size_info, size_info)

  BuildReportFromSizeInfo(
      args.output_report_file, size_info, all_symbols=args.all_symbols)

  logging.warning('Done!')
  msg = [
      'View using a local server via: ',
      '    {0}/upload_html_viwer.py --local',
      'or run:',
      '    gsutil.py cp -a public-read {1} gs://chrome-supersize/oneoffs/'
      '{2}.ndjson',
      '  to view at:',
      '    https://chrome-supersize.firebaseapp.com/viewer.html'
      '?load_url=oneoffs/{2}.ndjson',
  ]
  supersize_path = os.path.relpath(
      os.path.join(path_util.TOOLS_SRC_ROOT, 'tools', 'binary_size'))
  # Use a random UUID as the filename so user can copy-and-paste command
  # directly without a name collision.
  upload_id = uuid.uuid4()
  print('\n'.join(msg).format(supersize_path, args.output_report_file,
                              upload_id))
Example #3
0
def BuildReport(out_file,
                size_file,
                before_size_file=(None, None),
                all_symbols=False):
    """Builds a .ndjson report for a .size file.

  Args:
    out_file: File object to save JSON report to.
    size_file: Size file to use as input. Tuple of path and file object.
    before_size_file: If used, creates a diff report where |size_file| is the
      newer .size file. Tuple of path and file object.
    all_symbols: If true, all symbols will be included in the report rather
      than truncated.
  """
    logging.info('Reading .size file')
    diff_mode = any(before_size_file)

    size_info = archive.LoadAndPostProcessSizeInfo(*size_file)
    if diff_mode:
        before_size_info = archive.LoadAndPostProcessSizeInfo(
            *before_size_file)
        after_size_info = size_info

        size_info = diff.Diff(before_size_info, after_size_info)
        symbols = size_info.raw_symbols
        symbols = symbols.WhereDiffStatusIs(
            models.DIFF_STATUS_UNCHANGED).Inverted()
    else:
        symbols = size_info.raw_symbols

    logging.info('Creating JSON objects')
    meta, tree_nodes = _MakeTreeViewList(symbols, all_symbols)
    meta.update({
        'diff_mode': diff_mode,
        'section_sizes': size_info.section_sizes,
    })
    if diff_mode:
        meta.update({
            'before_metadata': size_info.before.metadata,
            'after_metadata': size_info.after.metadata,
        })
    else:
        meta['metadata'] = size_info.metadata

    # Write newline-delimited JSON file
    logging.info('Serializing JSON')
    # Use separators without whitespace to get a smaller file.
    json_dump_args = {
        'separators': (',', ':'),
        'ensure_ascii': True,
        'check_circular': False,
    }

    json.dump(meta, out_file, **json_dump_args)
    out_file.write('\n')

    for tree_node in tree_nodes:
        json.dump(tree_node, out_file, **json_dump_args)
        out_file.write('\n')
def _CreateSupersizeDiff(apk_name, before_dir, after_dir):
    before_size_path = os.path.join(before_dir, apk_name + '.size')
    after_size_path = os.path.join(after_dir, apk_name + '.size')
    before = archive.LoadAndPostProcessSizeInfo(before_size_path)
    after = archive.LoadAndPostProcessSizeInfo(after_size_path)
    size_info_delta = diff.Diff(before, after, sort=True)

    lines = list(describe.GenerateLines(size_info_delta))
    return lines, size_info_delta
Example #5
0
def Run(args, parser):
  if not args.input_file.endswith('.size'):
    parser.error('Input must end with ".size"')
  if args.diff_with and not args.diff_with.endswith('.size'):
    parser.error('Diff input must end with ".size"')
  elif args.diff_with and not args.tree_view_ui:
    parser.error('Diffs only supported in --tree-view-ui mode')
  if args.tree_view_ui and args.method_count:
    parser.error('--method-count is no longer supported as a command line '
                 'flag, use the client-side options instead.')

  logging.info('Reading .size file')
  size_info = archive.LoadAndPostProcessSizeInfo(args.input_file)
  if args.diff_with:
    before_size_info = archive.LoadAndPostProcessSizeInfo(args.diff_with)
    after_size_info = size_info
    size_info = diff.Diff(before_size_info, after_size_info)
  symbols = size_info.raw_symbols
  if args.method_count:
    symbols = symbols.WhereInSection('m')
  elif not args.include_bss:
    symbols = symbols.WhereInSection('b').Inverted()

  if args.tree_view_ui:
    size_header = 'Delta size' if args.diff_with else 'Size'

    template_src = os.path.join(os.path.dirname(__file__), 'template_tree_view')
    _CopyTreeViewTemplateFiles(template_src, args.report_dir,size_header)
    logging.info('Creating JSON objects')
    tree_root = _MakeTreeViewList(symbols, args.min_symbol_size)

    logging.info('Serializing JSON')
    with open(os.path.join(args.report_dir, 'data.js'), 'w') as out_file:
      out_file.write('var tree_data=`')
      # Use separators without whitespace to get a smaller file.
      json.dump(tree_root, out_file, ensure_ascii=False, check_circular=False,
                separators=(',', ':'))
      out_file.write('`')
  else:
    # Copy report boilerplate into output directory. This also proves that the
    # output directory is safe for writing, so there should be no problems
    # writing the nm.out file later.
    template_src = os.path.join(os.path.dirname(__file__), 'template')
    _CopyTemplateFiles(template_src, args.report_dir)
    logging.info('Creating JSON objects')
    tree_root = _MakeCompactTree(symbols, args.min_symbol_size,
                                 args.method_count)

    logging.info('Serializing JSON')
    with open(os.path.join(args.report_dir, 'data.js'), 'w') as out_file:
      out_file.write('var tree_data=')
      # Use separators without whitespace to get a smaller file.
      json.dump(tree_root, out_file, ensure_ascii=False, check_circular=False,
                separators=(',', ':'))

  logging.warning('Report saved to %s/index.html', args.report_dir)
Example #6
0
def Run(args, parser):
    if not args.input_file.endswith('.size'):
        parser.error('Input must end with ".size"')
    if args.diff_with and not args.diff_with.endswith('.size'):
        parser.error('Diff input must end with ".size"')
    if not args.report_file.endswith('.ndjson'):
        parser.error('Output must end with ".ndjson"')

    logging.info('Reading .size file')
    size_info = archive.LoadAndPostProcessSizeInfo(args.input_file)
    if args.diff_with:
        before_size_info = archive.LoadAndPostProcessSizeInfo(args.diff_with)
        after_size_info = size_info
        size_info = diff.Diff(before_size_info, after_size_info)
        symbols = size_info.raw_symbols
        symbols = symbols.WhereDiffStatusIs(
            models.DIFF_STATUS_UNCHANGED).Inverted()
    else:
        symbols = size_info.raw_symbols

    logging.info('Creating JSON objects')
    meta, tree_nodes = _MakeTreeViewList(symbols, args.all_symbols)
    meta.update({
        'diff_mode': bool(args.diff_with),
        'section_sizes': size_info.section_sizes,
    })
    if args.diff_with:
        meta.update({
            'before_metadata': size_info.before.metadata,
            'after_metadata': size_info.after.metadata,
        })
    else:
        meta['metadata'] = size_info.metadata

    logging.info('Serializing JSON')
    # Write newline-delimited JSON file
    with codecs.open(args.report_file, 'w', encoding='ascii') as out_file:
        # Use separators without whitespace to get a smaller file.
        json_dump_args = {
            'separators': (',', ':'),
            'ensure_ascii': True,
            'check_circular': False,
        }

        json.dump(meta, out_file, **json_dump_args)
        out_file.write('\n')

        for tree_node in tree_nodes:
            json.dump(tree_node, out_file, **json_dump_args)
            out_file.write('\n')

    logging.warning('Report saved to %s', args.report_file)
    logging.warning(
        'Open server by running: \n'
        'tools/binary_size/supersize start_server %s', args.report_file)
def _CreateAndWriteSupersizeDiff(apk_name, before_dir, after_dir, output_path):
    before_size_path = os.path.join(before_dir, apk_name + '.size')
    after_size_path = os.path.join(after_dir, apk_name + '.size')
    before = archive.LoadAndPostProcessSizeInfo(before_size_path)
    after = archive.LoadAndPostProcessSizeInfo(after_size_path)
    size_info_delta = diff.Diff(before, after, sort=True)

    with open(output_path, 'w') as f:
        f.writelines(l + '\n' for l in describe.GenerateLines(size_info_delta))

    return size_info_delta
Example #8
0
    def Run(args, on_config_error):
        if not args.before.endswith('.size'):
            on_config_error('Before input must end with ".size"')
        if not args.after.endswith('.size'):
            on_config_error('After input must end with ".size"')
        if not args.output_file.endswith('.sizediff'):
            on_config_error('Output must end with ".sizediff"')

        before_size_info = archive.LoadAndPostProcessSizeInfo(args.before)
        after_size_info = archive.LoadAndPostProcessSizeInfo(args.after)
        delta_size_info = diff.Diff(before_size_info, after_size_info)

        file_format.SaveDeltaSizeInfo(delta_size_info, args.output_file)
Example #9
0
def Run(args, on_config_error):
    # Up-front check for faster error-checking.
    for path in args.inputs:
        if not path.endswith('.size') and not path.endswith('.sizediff'):
            on_config_error('All inputs must end with ".size" or ".sizediff"')

    size_infos = []
    for path in args.inputs:
        if path.endswith('.sizediff'):
            size_infos.extend(archive.LoadAndPostProcessDeltaSizeInfo(path))
        else:
            size_infos.append(archive.LoadAndPostProcessSizeInfo(path))
    output_directory_finder = path_util.OutputDirectoryFinder(
        value=args.output_directory,
        any_path_within_output_directory=args.inputs[0])
    linker_name = size_infos[-1].build_config.get(
        models.BUILD_CONFIG_LINKER_NAME)
    tool_prefix_finder = path_util.ToolPrefixFinder(
        value=args.tool_prefix,
        output_directory=output_directory_finder.Tentative(),
        linker_name=linker_name)
    session = _Session(size_infos, output_directory_finder, tool_prefix_finder)

    if args.query:
        logging.info('Running query from command-line.')
        session.Eval(args.query)
    else:
        logging.info('Entering interactive console.')
        session.GoInteractive()
Example #10
0
  def _DoArchiveTest(self, use_output_directory=True, use_elf=True,
                     use_pak=False, debug_measures=False):
    with tempfile.NamedTemporaryFile(suffix='.size') as temp_file:
      self._DoArchive(
          temp_file.name, use_output_directory=use_output_directory,
          use_elf=use_elf, use_pak=use_pak, debug_measures=debug_measures)
      size_info = archive.LoadAndPostProcessSizeInfo(temp_file.name)
    # Check that saving & loading is the same as directly parsing.
    expected_size_info = self._CloneSizeInfo(
        use_output_directory=use_output_directory, use_elf=use_elf,
        use_pak=use_pak)
    self.assertEquals(expected_size_info.metadata, size_info.metadata)
    # Don't cluster.
    expected_size_info.symbols = expected_size_info.raw_symbols
    size_info.symbols = size_info.raw_symbols
    expected = list(describe.GenerateLines(expected_size_info, verbose=True))
    actual = list(describe.GenerateLines(size_info, verbose=True))
    self.assertEquals(expected, actual)

    sym_strs = (repr(sym) for sym in size_info.symbols)
    stats = describe.DescribeSizeInfoCoverage(size_info)
    if size_info.metadata:
      metadata = describe.DescribeMetadata(size_info.metadata)
    else:
      metadata = []
    return itertools.chain(metadata, stats, sym_strs)
Example #11
0
def Run(args, parser):
    if not args.input_file.endswith('.size'):
        parser.error('Input must end with ".size"')

    logging.info('Reading .size file')
    size_info = archive.LoadAndPostProcessSizeInfo(args.input_file)
    symbols = size_info.symbols
    if not args.include_bss:
        symbols = symbols.WhereInSection('b').Inverted()
    symbols = symbols.WherePssBiggerThan(0)

    # Copy report boilerplate into output directory. This also proves that the
    # output directory is safe for writing, so there should be no problems writing
    # the nm.out file later.
    _CopyTemplateFiles(args.report_dir)

    logging.info('Creating JSON objects')
    tree_root = _MakeCompactTree(symbols, args.include_symbols)

    logging.info('Serializing JSON')
    with open(os.path.join(args.report_dir, 'data.js'), 'w') as out_file:
        out_file.write('var tree_data=')
        # Use separators without whitespace to get a smaller file.
        json.dump(tree_root,
                  out_file,
                  ensure_ascii=False,
                  check_circular=False,
                  separators=(',', ':'))

    logging.warning('Report saved to %s/index.html', args.report_dir)
Example #12
0
    def _DoArchiveTest(self,
                       use_output_directory=True,
                       use_elf=True,
                       debug_measures=False):
        with tempfile.NamedTemporaryFile(suffix='.size') as temp_file:
            args = [temp_file.name, '--map-file', _TEST_MAP_PATH]
            if use_output_directory:
                # Let autodetection find output_directory when --elf-file is used.
                if not use_elf:
                    args += ['--output-directory', _TEST_OUTPUT_DIR]
            else:
                args += ['--no-source-paths']
            if use_elf:
                args += ['--elf-file', _TEST_ELF_PATH]
            _RunApp('archive', args, debug_measures=debug_measures)
            size_info = archive.LoadAndPostProcessSizeInfo(temp_file.name)
        # Check that saving & loading is the same as directly parsing the .map.
        expected_size_info = self._CloneSizeInfo(
            use_output_directory=use_output_directory, use_elf=use_elf)
        self.assertEquals(expected_size_info.metadata, size_info.metadata)
        # Don't cluster.
        expected_size_info.symbols = expected_size_info.raw_symbols
        size_info.symbols = size_info.raw_symbols
        expected = list(describe.GenerateLines(expected_size_info))
        actual = list(describe.GenerateLines(size_info))
        self.assertEquals(expected, actual)

        sym_strs = (repr(sym) for sym in size_info.symbols)
        stats = describe.DescribeSizeInfoCoverage(size_info)
        if size_info.metadata:
            metadata = describe.DescribeMetadata(size_info.metadata)
        else:
            metadata = []
        return itertools.chain(metadata, stats, sym_strs)
Example #13
0
    def _DoArchiveTest(self,
                       use_output_directory=True,
                       use_elf=False,
                       use_apk=False,
                       use_minimal_apks=False,
                       use_pak=False,
                       use_aux_elf=False,
                       ignore_linker_map=False,
                       debug_measures=False,
                       include_padding=False):
        with tempfile.NamedTemporaryFile(suffix='.size') as temp_file:
            self._DoArchive(temp_file.name,
                            use_output_directory=use_output_directory,
                            use_elf=use_elf,
                            use_apk=use_apk,
                            use_minimal_apks=use_minimal_apks,
                            use_pak=use_pak,
                            use_aux_elf=use_aux_elf,
                            ignore_linker_map=ignore_linker_map,
                            debug_measures=debug_measures,
                            include_padding=include_padding)
            size_info = archive.LoadAndPostProcessSizeInfo(temp_file.name)
        # Check that saving & loading is the same as directly parsing.
        expected_size_info = self._CloneSizeInfo(
            use_output_directory=use_output_directory,
            use_elf=use_elf,
            use_apk=use_apk,
            use_minimal_apks=use_minimal_apks,
            use_pak=use_pak,
            use_aux_elf=use_aux_elf,
            ignore_linker_map=ignore_linker_map)
        self.assertEqual(_AllMetadata(expected_size_info),
                         _AllMetadata(size_info))
        # Don't cluster.
        expected_size_info.symbols = expected_size_info.raw_symbols
        size_info.symbols = size_info.raw_symbols
        expected = list(
            describe.GenerateLines(expected_size_info, verbose=True))
        actual = list(describe.GenerateLines(size_info, verbose=True))
        self.assertEqual(expected, actual)

        sym_strs = (repr(sym) for sym in size_info.symbols)
        stats = data_quality.DescribeSizeInfoCoverage(size_info)
        if len(size_info.containers) == 1:
            # If there's only one container, merge the its metadata into build_config.
            merged_data_desc = describe.DescribeDict(size_info.metadata_legacy)
        else:
            merged_data_desc = describe.DescribeDict(size_info.build_config)
            for m in _AllMetadata(size_info):
                merged_data_desc.extend(describe.DescribeDict(m))
        return itertools.chain(merged_data_desc, stats, sym_strs)
Example #14
0
    def test_Diff_Basic(self):
        size_info1 = self._CloneSizeInfo(use_pak=True)
        size_info2 = self._CloneSizeInfo(use_pak=True)
        size_info2.build_config['git_revision'] = 'xyz789'
        container1 = size_info1.containers[0]
        container2 = size_info2.containers[0]
        container1.metadata = {"foo": 1, "bar": [1, 2, 3], "baz": "yes"}
        container2.metadata = {"foo": 1, "bar": [1, 3], "baz": "yes"}

        size_info1.raw_symbols -= size_info1.raw_symbols.WhereNameMatches(
            r'pLinuxKernelCmpxchg|pLinuxKernelMemoryBarrier')
        size_info2.raw_symbols -= size_info2.raw_symbols.WhereNameMatches(
            r'IDS_AW_WEBPAGE_PARENTAL_|IDS_WEB_FONT_FAMILY|IDS_WEB_FONT_SIZE')
        changed_sym = size_info1.raw_symbols.WhereNameMatches(
            'Patcher::Name_')[0]
        changed_sym.size -= 10
        padding_sym = size_info2.raw_symbols.WhereNameMatches(
            'symbol gap 0')[0]
        padding_sym.padding += 20
        padding_sym.size += 20
        # Test pak symbols changing .grd files. They should not show as changed.
        pak_sym = size_info2.raw_symbols.WhereNameMatches(
            r'IDR_PDF_COMPOSITOR_MANIFEST')[0]
        pak_sym.full_name = pak_sym.full_name.replace('.grd', '2.grd')

        # Serialize & de-serialize so that name normalization runs again for the pak
        # symbol.
        bytesio = io.BytesIO()
        file_format.SaveSizeInfo(size_info2, 'path', file_obj=bytesio)
        bytesio.seek(0)
        size_info2 = archive.LoadAndPostProcessSizeInfo('path',
                                                        file_obj=bytesio)

        d = diff.Diff(size_info1, size_info2)
        d.raw_symbols = d.raw_symbols.Sorted()
        self.assertEqual((1, 2, 3), d.raw_symbols.CountsByDiffStatus()[1:])
        changed_sym = d.raw_symbols.WhereNameMatches('Patcher::Name_')[0]
        padding_sym = d.raw_symbols.WhereNameMatches('symbol gap 0')[0]
        bss_sym = d.raw_symbols.WhereInSection(models.SECTION_BSS)[0]
        # Padding-only deltas should sort after all non-padding changes.
        padding_idx = d.raw_symbols.index(padding_sym)
        changed_idx = d.raw_symbols.index(changed_sym)
        bss_idx = d.raw_symbols.index(bss_sym)
        self.assertLess(changed_idx, padding_idx)
        # And before bss.
        self.assertLess(padding_idx, bss_idx)

        return describe.GenerateLines(d, verbose=True)
Example #15
0
def Run(args, parser):
  for path in args.inputs:
    if not path.endswith('.size'):
      parser.error('All inputs must end with ".size"')

  size_infos = [archive.LoadAndPostProcessSizeInfo(p) for p in args.inputs]
  lazy_paths = paths.LazyPaths(tool_prefix=args.tool_prefix,
                               output_directory=args.output_directory,
                               any_path_within_output_directory=args.inputs[0])
  session = _Session(size_infos, lazy_paths)

  if args.query:
    logging.info('Running query from command-line.')
    session.Eval(args.query)
  else:
    logging.info('Entering interactive console.')
    session.GoInteractive()
Example #16
0
    def test_Diff_Basic(self):
        size_info1 = self._CloneSizeInfo(use_pak=True)
        size_info2 = self._CloneSizeInfo(use_pak=True)
        size_info2.build_config['git_revision'] = 'xyz789'
        container1 = size_info1.containers[0]
        container2 = size_info2.containers[0]
        container1.metadata = {"foo": 1, "bar": [1, 2, 3], "baz": "yes"}
        container2.metadata = {"foo": 1, "bar": [1, 3], "baz": "yes"}

        size_info1.raw_symbols -= size_info1.raw_symbols[:2]
        size_info2.raw_symbols -= size_info2.raw_symbols[-3:]
        changed_sym = size_info1.raw_symbols.WhereNameMatches(
            'Patcher::Name_')[0]
        changed_sym.size -= 10
        padding_sym = size_info2.raw_symbols.WhereNameMatches(
            'symbol gap 0')[0]
        padding_sym.padding += 20
        padding_sym.size += 20
        pak_sym = size_info2.raw_symbols.WhereInSection(
            models.SECTION_PAK_NONTRANSLATED)[0]
        pak_sym.full_name = 'foo: ' + pak_sym.full_name.split()[-1]

        # Serialize & de-serialize so that name normalization runs again for the pak
        # symbol.
        bytesio = io.BytesIO()
        file_format.SaveSizeInfo(size_info2, 'path', file_obj=bytesio)
        bytesio.seek(0)
        size_info2 = archive.LoadAndPostProcessSizeInfo('path',
                                                        file_obj=bytesio)

        d = diff.Diff(size_info1, size_info2)
        d.raw_symbols = d.raw_symbols.Sorted()
        self.assertEqual(d.raw_symbols.CountsByDiffStatus()[1:], (2, 2, 3))
        changed_sym = d.raw_symbols.WhereNameMatches('Patcher::Name_')[0]
        padding_sym = d.raw_symbols.WhereNameMatches('symbol gap 0')[0]
        bss_sym = d.raw_symbols.WhereInSection(models.SECTION_BSS)[0]
        # Padding-only deltas should sort after all non-padding changes.
        padding_idx = d.raw_symbols.index(padding_sym)
        changed_idx = d.raw_symbols.index(changed_sym)
        bss_idx = d.raw_symbols.index(bss_sym)
        self.assertLess(changed_idx, padding_idx)
        # And before bss.
        self.assertLess(padding_idx, bss_idx)

        return describe.GenerateLines(d, verbose=True)
Example #17
0
def Run(args, parser):
    for path in args.inputs:
        if not path.endswith('.size'):
            parser.error('All inputs must end with ".size"')

    size_infos = [archive.LoadAndPostProcessSizeInfo(p) for p in args.inputs]
    output_directory_finder = path_util.OutputDirectoryFinder(
        value=args.output_directory,
        any_path_within_output_directory=args.inputs[0])
    tool_prefix_finder = path_util.ToolPrefixFinder(
        value=args.tool_prefix,
        output_directory_finder=output_directory_finder)
    session = _Session(size_infos, output_directory_finder, tool_prefix_finder)

    if args.query:
        logging.info('Running query from command-line.')
        session.Eval(args.query)
    else:
        logging.info('Entering interactive console.')
        session.GoInteractive()
Example #18
0
  def test_ArchiveContainers(self):
    with tempfile.NamedTemporaryFile(suffix='.size') as temp_file:
      self._DoArchive(temp_file.name,
                      use_output_directory=True,
                      use_ssargs=True)
      size_info = archive.LoadAndPostProcessSizeInfo(temp_file.name)

    # Don't cluster.
    size_info.symbols = size_info.raw_symbols
    sym_strs = (repr(sym) for sym in size_info.symbols)
    build_config = describe.DescribeDict(size_info.build_config)
    metadata = itertools.chain.from_iterable(
        itertools.chain([c.name], describe.DescribeDict(c.metadata))
        for c in size_info.containers)
    return itertools.chain(
        ['BuildConfig:'],
        build_config,
        ['Metadata:'],
        metadata,
        ['Symbols:'],
        sym_strs,
    )
def Run(args, on_config_error):
    for path in args.inputs:
        if not path.endswith('.size'):
            on_config_error('All inputs must end with ".size"')

    size_infos = [archive.LoadAndPostProcessSizeInfo(p) for p in args.inputs]
    output_directory_finder = path_util.OutputDirectoryFinder(
        value=args.output_directory,
        any_path_within_output_directory=args.inputs[0])
    linker_name = size_infos[-1].metadata.get(models.METADATA_LINKER_NAME)
    tool_prefix_finder = path_util.ToolPrefixFinder(
        value=args.tool_prefix,
        output_directory_finder=output_directory_finder,
        linker_name=linker_name)
    session = _Session(size_infos, output_directory_finder, tool_prefix_finder)

    if args.query:
        logging.info('Running query from command-line.')
        session.Eval(args.query)
    else:
        logging.info('Entering interactive console.')
        session.GoInteractive()
Example #20
0
def Run(args, on_config_error):
    # Up-front check for faster error-checking.
    for path in args.inputs:
        if not path.endswith('.size') and not path.endswith('.sizediff'):
            on_config_error('All inputs must end with ".size" or ".sizediff"')

    size_infos = []
    for path in args.inputs:
        if path.endswith('.sizediff'):
            size_infos.extend(archive.LoadAndPostProcessDeltaSizeInfo(path))
        else:
            size_infos.append(archive.LoadAndPostProcessSizeInfo(path))
    output_directory_finder = path_util.OutputDirectoryFinder(
        value=args.output_directory,
        any_path_within_output_directory=args.inputs[0])
    linker_name = size_infos[-1].build_config.get(
        models.BUILD_CONFIG_LINKER_NAME)
    tool_prefix_finder = path_util.ToolPrefixFinder(
        value=args.tool_prefix,
        output_directory=output_directory_finder.Tentative(),
        linker_name=linker_name)
    session = _Session(size_infos, output_directory_finder, tool_prefix_finder)

    if args.query:
        logging.info('Running query from command-line.')
        session.Eval(args.query)
    else:
        logging.info('Entering interactive console.')
        session.GoInteractive()

    # Exit without running GC, which can save multiple seconds due the large
    # number of objects created. It meants atexit and __del__ calls are not
    # made, but this shouldn't matter for console.
    sys.stdout.flush()
    sys.stderr.flush()
    os._exit(0)
Example #21
0
def _SizeInfoFromGsPath(path):
    size_contents = subprocess.check_output(['gsutil.py', 'cat', path])
    file_obj = cStringIO.StringIO(size_contents)
    ret = archive.LoadAndPostProcessSizeInfo(path, file_obj=file_obj)
    file_obj.close()
    return ret