Exemple #1
0
def main(argv):
    parser = argparse.ArgumentParser()
    parser.add_argument('inputs',
                        nargs='+',
                        help='Input .size files to load. For a single file, '
                        'it will be mapped to variables as: size_info & '
                        'symbols (where symbols = size_info.symbols). For '
                        'multiple inputs, the names will be size_info1, '
                        'symbols1, etc.')
    parser.add_argument('--query',
                        help='Print the result of the given snippet. Example: '
                        'symbols.WhereInSection("d").'
                        'WhereBiggerThan(100)')
    paths.AddOptions(parser)
    args = helpers.AddCommonOptionsAndParseArgs(parser, argv)

    for path in args.inputs:
        if not path.endswith('.size'):
            parser.error('All inputs must end with ".size"')

    size_infos = [map2size.Analyze(p) for p in args.inputs]
    lazy_paths = paths.LazyPaths(args=args, input_file=args.inputs[0])
    session = _Session(size_infos, lazy_paths)

    if args.query:
        logging.info('Running query from command-line.')
        session.Eval(args.query)
    else:
        logging.info('Entering interactive console.')
        session.GoInteractive()
Exemple #2
0
    def _ElfPathAndToolPrefixForSymbol(self, size_info, elf_path):
        tool_prefix = self._lazy_paths.tool_prefix
        orig_tool_prefix = size_info.metadata.get(models.METADATA_TOOL_PREFIX)
        if orig_tool_prefix:
            orig_tool_prefix = paths.FromSrcRootRelative(orig_tool_prefix)
            if os.path.exists(orig_tool_prefix + 'objdump'):
                tool_prefix = orig_tool_prefix

        # TODO(agrieve): Would be even better to use objdump --info to check that
        #     the toolchain is for the correct architecture.
        assert tool_prefix is not None, (
            'Could not determine --tool-prefix. Possible fixes include setting '
            '--tool-prefix, or setting --output-directory')

        def build_id_matches(elf_path):
            found_build_id = archive.BuildIdFromElf(elf_path, tool_prefix)
            expected_build_id = size_info.metadata.get(
                models.METADATA_ELF_BUILD_ID)
            return found_build_id == expected_build_id

        filename = size_info.metadata.get(models.METADATA_ELF_FILENAME)
        paths_to_try = []
        if elf_path:
            paths_to_try.append(elf_path)
        else:
            auto_lazy_paths = [
                paths.LazyPaths(any_path_within_output_directory=s.size_path)
                for s in self._size_infos
            ]
            for lazy_paths in auto_lazy_paths + [self._lazy_paths]:
                output_dir = lazy_paths.output_directory
                if output_dir:
                    # Local build: File is located in output directory.
                    paths_to_try.append(
                        os.path.normpath(os.path.join(output_dir, filename)))
                # Downloaded build: File is located beside .size file.
                paths_to_try.append(
                    os.path.normpath(
                        os.path.join(os.path.dirname(size_info.size_path),
                                     os.path.basename(filename))))

        paths_to_try = [p for p in paths_to_try if os.path.exists(p)]

        for i, elf_path in enumerate(paths_to_try):
            if build_id_matches(elf_path):
                return elf_path, tool_prefix

            # Show an error only once all paths are tried.
            if i + 1 == len(paths_to_try):
                assert False, 'Build ID does not match for %s' % elf_path

        assert False, (
            'Could not locate ELF file. If binary was built locally, ensure '
            '--output-directory is set. If output directory is unavailable, '
            'ensure {} is located beside {}, or pass its path explicitly using '
            'elf_path=').format(os.path.basename(filename),
                                size_info.size_path)
Exemple #3
0
def Run(args, parser):
  for path in args.inputs:
    if not path.endswith('.size'):
      parser.error('All inputs must end with ".size"')

  size_infos = [archive.LoadAndPostProcessSizeInfo(p) for p in args.inputs]
  lazy_paths = paths.LazyPaths(tool_prefix=args.tool_prefix,
                               output_directory=args.output_directory,
                               any_path_within_output_directory=args.inputs[0])
  session = _Session(size_infos, lazy_paths)

  if args.query:
    logging.info('Running query from command-line.')
    session.Eval(args.query)
  else:
    logging.info('Entering interactive console.')
    session.GoInteractive()
def main(argv):
  parser = argparse.ArgumentParser()
  parser.add_argument('input_file',
                      help='Path to input file. Can be a linker .map file, or '
                           'a .size file.')
  parser.add_argument('--report-dir', metavar='PATH', required=True,
                      help='Write output to the specified directory. An HTML '
                            'report is generated here.')
  parser.add_argument('--include-bss', action='store_true',
                      help='Include symbols from .bss (which consume no real '
                           'space)')
  parser.add_argument('--include-symbols', action='store_true',
                      help='Use per-symbol granularity rather than per-file.')
  paths.AddOptions(parser)
  args = helpers.AddCommonOptionsAndParseArgs(parser, argv)

  lazy_paths = paths.LazyPaths(args=args, input_file=args.input_file)
  size_info = map2size.Analyze(args.input_file, lazy_paths)
  symbols = size_info.symbols
  if not args.include_bss:
    symbols = symbols.WhereInSection('b').Inverted()
  symbols = symbols.WhereBiggerThan(0)

  # Copy report boilerplate into output directory. This also proves that the
  # output directory is safe for writing, so there should be no problems writing
  # the nm.out file later.
  _CopyTemplateFiles(args.report_dir)

  logging.info('Creating JSON objects')
  tree_root = _MakeCompactTree(symbols, args.include_symbols)

  logging.info('Serializing')
  with open(os.path.join(args.report_dir, 'data.js'), 'w') as out_file:
    out_file.write('var tree_data=')
    # Use separators without whitespace to get a smaller file.
    json.dump(tree_root, out_file, ensure_ascii=False, check_circular=False,
              separators=(',', ':'))

  print 'Report saved to ' + args.report_dir + '/index.html'
Exemple #5
0
def main(argv):
    parser = argparse.ArgumentParser(argv)
    parser.add_argument('elf_file', help='Path to input ELF file.')
    parser.add_argument('output_file', help='Path to output .size(.gz) file.')
    parser.add_argument('--map-file',
                        help='Path to input .map(.gz) file. Defaults to '
                        '{{elf_file}}.map(.gz)?')
    paths.AddOptions(parser)
    args = helpers.AddCommonOptionsAndParseArgs(parser, argv)
    if not args.output_file.endswith('.size'):
        parser.error('output_file must end with .size')

    if args.map_file:
        map_file_path = args.map_file
    elif args.elf_file.endswith('.size'):
        # Allow a .size file to be passed as input as well. Useful for measuring
        # serialization speed.
        pass
    else:
        map_file_path = args.elf_file + '.map'
        if not os.path.exists(map_file_path):
            map_file_path += '.gz'
        if not os.path.exists(map_file_path):
            parser.error('Could not find .map(.gz)? file. Use --map-file.')

    lazy_paths = paths.LazyPaths(args=args, input_file=args.elf_file)
    metadata = None
    if args.elf_file and not args.elf_file.endswith('.size'):
        logging.debug('Constructing metadata')
        git_rev = _DetectGitRevision(os.path.dirname(args.elf_file))
        build_id = BuildIdFromElf(args.elf_file, lazy_paths.tool_prefix)
        timestamp_obj = datetime.datetime.utcfromtimestamp(
            os.path.getmtime(args.elf_file))
        timestamp = calendar.timegm(timestamp_obj.timetuple())
        gn_args = _ParseGnArgs(
            os.path.join(lazy_paths.output_directory, 'args.gn'))

        def relative_to_out(path):
            return os.path.relpath(path, lazy_paths.VerifyOutputDirectory())

        metadata = {
            models.METADATA_GIT_REVISION: git_rev,
            models.METADATA_MAP_FILENAME: relative_to_out(map_file_path),
            models.METADATA_ELF_FILENAME: relative_to_out(args.elf_file),
            models.METADATA_ELF_MTIME: timestamp,
            models.METADATA_ELF_BUILD_ID: build_id,
            models.METADATA_GN_ARGS: gn_args,
        }

    size_info = Analyze(map_file_path, lazy_paths)

    if metadata:
        logging.debug('Validating section sizes')
        elf_section_sizes = _SectionSizesFromElf(args.elf_file,
                                                 lazy_paths.tool_prefix)
        for k, v in elf_section_sizes.iteritems():
            assert v == size_info.section_sizes.get(k), (
                'ELF file and .map file do not match.')

        size_info.metadata = metadata

    logging.info('Recording metadata: \n  %s',
                 '\n  '.join(describe.DescribeMetadata(size_info.metadata)))
    logging.info('Saving result to %s', args.output_file)
    file_format.SaveSizeInfo(size_info, args.output_file)
    logging.info('Done')
 def _CloneSizeInfo(self):
     if not IntegrationTest.size_info:
         lazy_paths = paths.LazyPaths(output_directory=_TEST_DATA_DIR)
         IntegrationTest.size_info = map2size.Analyze(
             _TEST_MAP_PATH, lazy_paths)
     return copy.deepcopy(IntegrationTest.size_info)
Exemple #7
0
def Run(args, parser):
    if not args.size_file.endswith('.size'):
        parser.error('size_file must end with .size')

    elf_path = args.elf_file
    map_path = args.map_file
    apk_path = args.apk_file
    any_input = apk_path or elf_path or map_path
    if not any_input:
        parser.error(
            'Most pass at least one of --apk-file, --elf-file, --map-file')
    lazy_paths = paths.LazyPaths(tool_prefix=args.tool_prefix,
                                 output_directory=args.output_directory,
                                 any_path_within_output_directory=any_input)
    if apk_path:
        with zipfile.ZipFile(apk_path) as z:
            lib_infos = [
                f for f in z.infolist()
                if f.filename.endswith('.so') and f.file_size > 0
            ]
        assert lib_infos, 'APK has no .so files.'
        # TODO(agrieve): Add support for multiple .so files, and take into account
        #     secondary architectures.
        apk_so_path = max(lib_infos, key=lambda x: x.file_size).filename
        logging.debug('Sub-apk path=%s', apk_so_path)
        if not elf_path and lazy_paths.output_directory:
            elf_path = os.path.join(
                lazy_paths.output_directory, 'lib.unstripped',
                os.path.basename(apk_so_path.replace('crazy.', '')))
            logging.debug('Detected --elf-file=%s', elf_path)

    if map_path:
        if not map_path.endswith('.map') and not map_path.endswith('.map.gz'):
            parser.error('Expected --map-file to end with .map or .map.gz')
    else:
        map_path = elf_path + '.map'
        if not os.path.exists(map_path):
            map_path += '.gz'
        if not os.path.exists(map_path):
            parser.error('Could not find .map(.gz)? file. Use --map-file.')

    tool_prefix = lazy_paths.VerifyToolPrefix()
    output_directory = None
    if not args.no_source_paths:
        output_directory = lazy_paths.VerifyOutputDirectory()

    metadata = CreateMetadata(map_path, elf_path, apk_path, tool_prefix,
                              output_directory)
    if apk_path and elf_path:
        # Extraction takes around 1 second, so do it in parallel.
        apk_elf_result = concurrent.ForkAndCall(
            _ElfInfoFromApk, (apk_path, apk_so_path, tool_prefix))

    size_info = CreateSizeInfo(map_path,
                               elf_path,
                               tool_prefix,
                               output_directory,
                               normalize_names=False)

    if metadata:
        size_info.metadata = metadata

        if apk_path:
            logging.debug('Extracting section sizes from .so within .apk')
            unstripped_section_sizes = size_info.section_sizes
            apk_build_id, size_info.section_sizes = apk_elf_result.get()
            assert apk_build_id == metadata[models.METADATA_ELF_BUILD_ID], (
                'BuildID for %s within %s did not match the one at %s' %
                (apk_so_path, apk_path, elf_path))

            packed_section_name = None
            architecture = metadata[models.METADATA_ELF_ARCHITECTURE]
            # Packing occurs enabled only arm32 & arm64.
            if architecture == 'arm':
                packed_section_name = '.rel.dyn'
            elif architecture == 'arm64':
                packed_section_name = '.rela.dyn'

            if packed_section_name:
                logging.debug('Recording size of unpacked relocations')
                if packed_section_name not in size_info.section_sizes:
                    logging.warning('Packed section not present: %s',
                                    packed_section_name)
                else:
                    size_info.section_sizes[
                        '%s (unpacked)' % packed_section_name] = (
                            unstripped_section_sizes.get(packed_section_name))

    logging.info('Recording metadata: \n  %s',
                 '\n  '.join(describe.DescribeMetadata(size_info.metadata)))
    logging.info('Saving result to %s', args.size_file)
    file_format.SaveSizeInfo(size_info, args.size_file)
    logging.info('Done')