Beispiel #1
0
def _PrintStaticInitializersCountFromApk(apk_filename, chartjson=None):
    print 'Finding static initializers (can take a minute)'
    with zipfile.ZipFile(apk_filename) as z:
        infolist = z.infolist()
    out_dir = constants.GetOutDirectory()
    si_count = 0
    for zip_info in infolist:
        # Check file size to account for placeholder libraries.
        if zip_info.filename.endswith('.so') and zip_info.file_size > 0:
            lib_name = os.path.basename(zip_info.filename).replace(
                'crazy.', '')
            unstripped_path = os.path.join(out_dir, 'lib.unstripped', lib_name)
            if os.path.exists(unstripped_path):
                si_count += _PrintStaticInitializersCount(unstripped_path)
            else:
                raise Exception('Unstripped .so not found. Looked here: %s',
                                unstripped_path)
    ReportPerfResult(chartjson, 'StaticInitializersCount', 'count', si_count,
                     'count')
def _LogToFile(results, test_type, suite_name):
  """Log results to local files which can be used for aggregation later."""
  log_file_path = os.path.join(constants.GetOutDirectory(), 'test_logs')
  if not os.path.exists(log_file_path):
    os.mkdir(log_file_path)
  full_file_name = os.path.join(
      log_file_path, re.sub(r'\W', '_', test_type).lower() + '.log')
  if not os.path.exists(full_file_name):
    with open(full_file_name, 'w') as log_file:
      print >> log_file, '\n%s results for %s build %s:' % (
          test_type, os.environ.get('BUILDBOT_BUILDERNAME'),
          os.environ.get('BUILDBOT_BUILDNUMBER'))
    logging.info('Writing results to %s.', full_file_name)

  logging.info('Writing results to %s.', full_file_name)
  with open(full_file_name, 'a') as log_file:
    shortened_suite_name = suite_name[:25] + (suite_name[25:] and '...')
    print >> log_file, '%s%s' % (shortened_suite_name.ljust(30),
                                 results.GetShortForm())
  def Install(self, device, incremental=False):
    if not incremental:
      device.Install(self._apk_helper, reinstall=True,
                     permissions=self._permissions)
      return

    installer_script = os.path.join(constants.GetOutDirectory(), 'bin',
                                    'install_%s_apk_incremental' % self._suite)
    try:
      install_wrapper = imp.load_source('install_wrapper', installer_script)
    except IOError:
      raise Exception(('Incremental install script not found: %s\n'
                       'Make sure to first build "%s_incremental"') %
                      (installer_script, self._suite))
    params = install_wrapper.GetInstallParameters()

    installer.Install(device, self._apk_helper, split_globs=params['splits'],
                      native_libs=params['native_libs'],
                      dex_files=params['dex_files'])
  def _TriggerSetUp(self):
    """Set up the triggering of a test run."""
    logging.info('Triggering test run.')

    if self._env.runner_type:
      logging.warning('Ignoring configured runner_type "%s"',
                      self._env.runner_type)

    if not self._env.runner_package:
      runner_package = self.DEFAULT_RUNNER_PACKAGE
      logging.info('Using default runner package: %s',
                   self.DEFAULT_RUNNER_PACKAGE)
    else:
      runner_package = self._env.runner_package

    dummy_app_path = os.path.join(
        constants.GetOutDirectory(), 'apks', 'remote_device_dummy.apk')

    # pylint: disable=protected-access
    with tempfile.NamedTemporaryFile(suffix='.flags.txt') as flag_file:
      env_vars = dict(self._test_instance.extras)
      if gtest_test_instance.EXTRA_SHARD_NANO_TIMEOUT not in env_vars:
        env_vars[gtest_test_instance.EXTRA_SHARD_NANO_TIMEOUT] = int(
            self._test_instance.shard_timeout * 1e9)

      flags = []

      filter_string = self._test_instance._GenerateDisabledFilterString(None)
      if filter_string:
        flags.append('--gtest_filter=%s' % filter_string)

      if self._test_instance.test_arguments:
        flags.append(self._test_instance.test_arguments)

      if flags:
        flag_file.write('_ ' + ' '.join(flags))
        flag_file.flush()
        env_vars[_EXTRA_COMMAND_LINE_FILE] = os.path.basename(flag_file.name)
        self._test_instance._data_deps.append(
            (os.path.abspath(flag_file.name), None))
      self._AmInstrumentTestSetup(
          dummy_app_path, self._test_instance.apk, runner_package,
          environment_variables=env_vars)
Beispiel #5
0
def GenerateDepsDirUsingIsolate(suite_name, isolate_file_path,
                                isolate_file_paths, deps_exclusion_list):
    """Generate the dependency dir for the test suite using isolate.

  Args:
    suite_name: Name of the test suite (e.g. base_unittests).
    isolate_file_path: .isolate file path to use. If there is a default .isolate
                       file path for the suite_name, this will override it.
    isolate_file_paths: Dictionary with the default .isolate file paths for
                        the test suites.
    deps_exclusion_list: A list of files that are listed as dependencies in the
                         .isolate files but should not be pushed to the device.
  """
    if isolate_file_path:
        if os.path.isabs(isolate_file_path):
            isolate_abs_path = isolate_file_path
        else:
            isolate_abs_path = os.path.join(constants.DIR_SOURCE_ROOT,
                                            isolate_file_path)
    else:
        isolate_rel_path = isolate_file_paths.get(suite_name)
        if not isolate_rel_path:
            logging.info('Did not find an isolate file for the test suite.')
            return
        isolate_abs_path = os.path.join(constants.DIR_SOURCE_ROOT,
                                        isolate_rel_path)

    isolated_abs_path = os.path.join(constants.GetOutDirectory(),
                                     '%s.isolated' % suite_name)
    assert os.path.exists(
        isolate_abs_path), 'Cannot find %s' % isolate_abs_path

    i = isolator.Isolator(constants.ISOLATE_DEPS_DIR)
    i.Clear()
    i.Remap(isolate_abs_path, isolated_abs_path)
    # We're relying on the fact that timestamps are preserved
    # by the remap command (hardlinked). Otherwise, all the data
    # will be pushed to the device once we move to using time diff
    # instead of md5sum. Perform a sanity check here.
    i.VerifyHardlinks()
    i.PurgeExcluded(deps_exclusion_list)
    i.MoveOutputDeps()
Beispiel #6
0
def SetupPrebuiltTools(adb):
  # TODO(bulach): build the host tools for mac, and the targets for x86/mips.
  # Prebuilt tools from r226197.
  has_prebuilt = sys.platform.startswith('linux')
  if has_prebuilt:
    abi = adb.system_properties['ro.product.cpu.abi']
    has_prebuilt = abi.startswith('armeabi')
  if not has_prebuilt:
    logging.error(
        'Prebuilt android tools only available for Linux host and ARM device.')
    return False

  prebuilt_tools = [
      'forwarder_dist/device_forwarder',
      'host_forwarder',
      'md5sum_dist/md5sum_bin',
      'md5sum_bin_host',
      'purge_ashmem',
  ]
  build_type = None
  for t in prebuilt_tools:
    src = os.path.basename(t)
    android_prebuilt_profiler_helper.GetIfChanged(src)
    bin_path = util.FindSupportBinary(t)
    if not build_type:
      build_type = GetBuildTypeOfPath(bin_path) or 'Release'
      constants.SetBuildType(build_type)
    dest = os.path.join(constants.GetOutDirectory(), t)
    if not bin_path:
      logging.warning('Setting up prebuilt %s', dest)
      if not os.path.exists(os.path.dirname(dest)):
        os.makedirs(os.path.dirname(dest))
      prebuilt_path = android_prebuilt_profiler_helper.GetHostPath(src)
      if not os.path.exists(prebuilt_path):
        raise NotImplementedError("""
%s must be checked into cloud storage.
Instructions:
http://www.chromium.org/developers/telemetry/upload_to_cloud_storage
""" % t)
      shutil.copyfile(prebuilt_path, dest)
      os.chmod(dest, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
  return True
Beispiel #7
0
  def _RunCygprofileUnitTests(self):
    """Builds, deploys and runs cygprofile_unittests."""
    # There an no unittests (yet) for the lightweight instrumentation.
    # TODO(lizeb): Fix this.
    if self._options.lightweight_instrumentation:
      return
    tools_compiler = ClankCompiler(
        os.path.dirname(constants.GetOutDirectory()),
        self._step_recorder, self._options.arch, self._options.jobs,
        self._options.max_load, self._options.use_goma, self._options.goma_dir,
        self._options.lightweight_instrumentation)
    tools_compiler.Build(instrumented=False, target='android_tools')
    self._compiler.Build(instrumented=True, target='cygprofile_unittests')

    self._step_recorder.BeginStep('Deploy and run cygprofile_unittests')
    exit_code = self._profiler.RunCygprofileTests()

    if exit_code != 0:
      self._step_recorder.FailStep(
          'cygprofile_unittests exited with non-0 status: %d' % exit_code)
Beispiel #8
0
def ProcessCommonOptions(args):
    """Processes and handles all common options."""
    run_tests_helper.SetLogLevel(args.verbose_count)
    constants.SetBuildType(args.build_type)
    if args.build_directory:
        constants.SetBuildDirectory(args.build_directory)
    if args.output_directory:
        constants.SetOutputDirectory(args.output_directory)

    devil_custom_deps = None
    if args.adb_path:
        devil_custom_deps = {'adb': {devil_env.GetPlatform(): [args.adb_path]}}

    devil_chromium.Initialize(output_directory=constants.GetOutDirectory(),
                              custom_deps=devil_custom_deps)

    # Some things such as Forwarder require ADB to be in the environment path.
    adb_dir = os.path.dirname(constants.GetAdbPath())
    if adb_dir and adb_dir not in os.environ['PATH'].split(os.pathsep):
        os.environ['PATH'] = adb_dir + os.pathsep + os.environ['PATH']
Beispiel #9
0
 def RunTest(self, _test):
   """Runs junit tests from |self._test_suite|."""
   with tempfile.NamedTemporaryFile() as json_file:
     java_script = os.path.join(
         constants.GetOutDirectory(), 'bin', self._test_suite)
     command = [java_script,
                '-test-jars', self._test_suite + '.jar',
                '-json-results-file', json_file.name]
     if self._test_filter:
       command.extend(['-gtest-filter', self._test_filter])
     if self._package_filter:
       command.extend(['-package-filter', self._package_filter])
     if self._runner_filter:
       command.extend(['-runner-filter', self._runner_filter])
     if self._sdk_version:
       command.extend(['-sdk-version', self._sdk_version])
     return_code = cmd_helper.RunCmd(command)
     results_list = json_results.ParseResultsFromJson(
         json.loads(json_file.read()))
     return (results_list, return_code)
Beispiel #10
0
  def __initializeDataDependencyAttributes(self, args, isolate_delegate):
    self._data_deps = []
    if args.isolate_file_path:
      self._isolate_abs_path = os.path.abspath(args.isolate_file_path)
      self._isolate_delegate = isolate_delegate
      self._isolated_abs_path = os.path.join(
          constants.GetOutDirectory(), '%s.isolated' % self._test_package)
    else:
      self._isolate_delegate = None

    # TODO(jbudorick): Deprecate and remove --test-data once data dependencies
    # are fully converted to isolate.
    if args.test_data:
      logging.info('Data dependencies specified via --test-data')
      self._test_data = args.test_data
    else:
      self._test_data = None

    if not self._isolate_delegate and not self._test_data:
      logging.warning('No data dependencies will be pushed.')
Beispiel #11
0
    def __init__(self, suite_name):
        """
    Args:
      suite_name: Name of the test suite (e.g. base_unittests).
    """
        TestPackage.__init__(self, suite_name)
        self.suite_path = os.path.join(constants.GetOutDirectory(),
                                       '%s_apk' % suite_name,
                                       '%s-debug.apk' % suite_name)
        if suite_name == 'content_browsertests':
            self._package_info = constants.PACKAGE_INFO['content_browsertests']
        elif suite_name == 'components_browsertests':
            self._package_info = constants.PACKAGE_INFO[
                'components_browsertests']
        else:
            self._package_info = constants.PACKAGE_INFO['gtest']

        if suite_name == 'net_unittests':
            self._extras = {'RunInSubThread': ''}
        else:
            self._extras = []
Beispiel #12
0
def CalculateHostMd5Sums(paths):
    """Calculates the MD5 sum value for all items in |paths|.

  Directories are traversed recursively and the MD5 sum of each file found is
  reported in the result.

  Args:
    paths: A list of host paths to md5sum.
  Returns:
    A dict mapping file paths to their respective md5sum checksums.
  """
    if isinstance(paths, basestring):
        paths = [paths]

    md5sum_bin_host_path = os.path.join(constants.GetOutDirectory(),
                                        'md5sum_bin_host')
    if not os.path.exists(md5sum_bin_host_path):
        raise IOError('File not built: %s' % md5sum_bin_host_path)
    out = cmd_helper.GetCmdOutput([md5sum_bin_host_path] + [p for p in paths])

    return _ParseMd5SumOutput(out.splitlines())
Beispiel #13
0
def InstallCommands(device):
    if device.IsUserBuild():
        raise Exception(
            'chromium_commands currently requires a userdebug build.')

    chromium_commands_jar_path = os.path.join(constants.GetOutDirectory(),
                                              constants.SDK_BUILD_JAVALIB_DIR,
                                              'chromium_commands.dex.jar')
    if not os.path.exists(chromium_commands_jar_path):
        raise Exception('%s not found. Please build chromium_commands.' %
                        chromium_commands_jar_path)

    device.RunShellCommand(['mkdir', BIN_DIR, _FRAMEWORK_DIR])
    for command, main_class in _COMMANDS.iteritems():
        shell_command = _SHELL_COMMAND_FORMAT % (constants.TEST_EXECUTABLE_DIR,
                                                 main_class)
        shell_file = '%s/%s' % (BIN_DIR, command)
        device.WriteFile(shell_file, shell_command)
        device.RunShellCommand(['chmod', '755', shell_file], check_return=True)

    device.adb.Push(chromium_commands_jar_path,
                    '%s/chromium_commands.jar' % _FRAMEWORK_DIR)
Beispiel #14
0
def ProcessUIAutomatorOptions(options, error_func):
    """Processes UIAutomator options/arguments.

  Args:
    options: optparse.Options object.
    error_func: Function to call with the error message in case of an error.

  Returns:
    A UIAutomatorOptions named tuple which contains all options relevant to
    uiautomator tests.
  """

    ProcessJavaTestOptions(options)

    if not options.package:
        error_func('--package is required.')

    if options.package not in constants.PACKAGE_INFO:
        error_func('Invalid package.')

    if not options.test_jar:
        error_func('--test-jar must be specified.')

    if os.path.exists(options.test_jar):
        # The dexed JAR is fully qualified, assume the info JAR lives along side.
        options.uiautomator_jar = options.test_jar
    else:
        options.uiautomator_jar = os.path.join(constants.GetOutDirectory(),
                                               constants.SDK_BUILD_JAVALIB_DIR,
                                               '%s.dex.jar' % options.test_jar)
    options.uiautomator_info_jar = (
        options.uiautomator_jar[:options.uiautomator_jar.find('.dex.jar')] +
        '_java.jar')

    return uiautomator_test_options.UIAutomatorOptions(
        options.tool, options.cleanup_test_files, options.push_deps,
        options.annotations, options.exclude_annotations, options.test_filter,
        options.test_data, options.save_perf_json, options.screenshot_failures,
        options.uiautomator_jar, options.uiautomator_info_jar, options.package)
Beispiel #15
0
    def RunTest(self, _test):
        """Runs junit tests from |self._test_suite|."""
        with tempfile.NamedTemporaryFile() as json_file:
            java_script = os.path.join(constants.GetOutDirectory(), 'bin',
                                       'helper', self._test_suite)
            command = [java_script]

            # Add Jar arguments.
            jar_args = [
                '-test-jars', self._test_suite + '.jar', '-json-results-file',
                json_file.name
            ]
            if self._test_filter:
                jar_args.extend(['-gtest-filter', self._test_filter])
            if self._package_filter:
                jar_args.extend(['-package-filter', self._package_filter])
            if self._runner_filter:
                jar_args.extend(['-runner-filter', self._runner_filter])
            if self._sdk_version:
                jar_args.extend(['-sdk-version', self._sdk_version])
            command.extend(['--jar-args', '"%s"' % ' '.join(jar_args)])

            # Add JVM arguments.
            jvm_args = []
            if self._coverage_dir:
                if not os.path.exists(self._coverage_dir):
                    os.makedirs(self._coverage_dir)
                elif not os.path.isdir(self._coverage_dir):
                    raise Exception(
                        '--coverage-dir takes a directory, not file path.')
                jvm_args.append('-Demma.coverage.out.file=%s' % os.path.join(
                    self._coverage_dir, '%s.ec' % self._test_suite))
            if jvm_args:
                command.extend(['--jvm-args', '"%s"' % ' '.join(jvm_args)])

            return_code = cmd_helper.RunCmd(command)
            results_list = json_results.ParseResultsFromJson(
                json.loads(json_file.read()))
            return (results_list, return_code)
Beispiel #16
0
    def ExtractAndResolveNativeStackTraces(self,
                                           data_to_symbolize,
                                           device_abi,
                                           include_stack=True):
        """Run the stack tool for given input.

    Args:
      data_to_symbolize: a list of strings to symbolize.
      include_stack: boolean whether to include stack data in output.
      device_abi: the default ABI of the device which generated the tombstone.

    Yields:
      A string for each line of resolved stack output.
    """
        self.UnzipAPKIfNecessary()
        arch = _DeviceAbiToArch(device_abi)
        if not arch:
            logging.warning('No device_abi can be found.')
            return

        cmd = [
            _STACK_TOOL, '--arch', arch, '--output-directory',
            constants.GetOutDirectory(), '--more-info'
        ]
        if self._non_native_packed_relocations and self._apk_libs:
            for apk_lib in self._apk_libs:
                cmd.extend(['--packed-lib', apk_lib])
        env = dict(os.environ)
        env['PYTHONDONTWRITEBYTECODE'] = '1'
        with tempfile.NamedTemporaryFile() as f:
            f.write('\n'.join(data_to_symbolize))
            f.flush()
            _, output = cmd_helper.GetCmdStatusAndOutput(cmd + [f.name],
                                                         env=env)
        for line in output.splitlines():
            if not include_stack and 'Stack Data:' in line:
                break
            yield line
Beispiel #17
0
def CalculateDeviceMd5Sums(paths, device):
    """Calculates the MD5 sum value for all items in |paths|.

  Directories are traversed recursively and the MD5 sum of each file found is
  reported in the result.

  Args:
    paths: A list of device paths to md5sum.
  Returns:
    A dict mapping file paths to their respective md5sum checksums.
  """
    if isinstance(paths, basestring):
        paths = [paths]

    if not device.FileExists(MD5SUM_DEVICE_BIN_PATH):
        md5sum_dist_path = os.path.join(constants.GetOutDirectory(),
                                        'md5sum_dist')
        if not os.path.exists(md5sum_dist_path):
            raise IOError('File not built: %s' % md5sum_dist_path)
        device.adb.Push(md5sum_dist_path, MD5SUM_DEVICE_LIB_PATH)

    out = []

    with tempfile.NamedTemporaryFile() as md5sum_script_file:
        with device_temp_file.DeviceTempFile(
                device.adb) as md5sum_device_script_file:
            md5sum_script = (MD5SUM_DEVICE_SCRIPT_FORMAT.format(
                path=p,
                md5sum_lib=MD5SUM_DEVICE_LIB_PATH,
                md5sum_bin=MD5SUM_DEVICE_BIN_PATH) for p in paths)
            md5sum_script_file.write('; '.join(md5sum_script))
            md5sum_script_file.flush()
            device.adb.Push(md5sum_script_file.name,
                            md5sum_device_script_file.name)
            out = device.RunShellCommand(
                ['sh', md5sum_device_script_file.name])

    return _ParseMd5SumOutput(out)
Beispiel #18
0
  def ExtractAndResolveNativeStackTraces(self, data_to_symbolize,
                                         device_abi, include_stack=True):
    """Run the stack tool for given input.

    Args:
      data_to_symbolize: a list of strings to symbolize.
      include_stack: boolean whether to include stack data in output.
      device_abi: the default ABI of the device which generated the tombstone.

    Yields:
      A string for each line of resolved stack output.
    """
    if not os.path.exists(_STACK_TOOL):
      logging.warning('%s missing. Unable to resolve native stack traces.',
                      _STACK_TOOL)
      return

    arch = _DeviceAbiToArch(device_abi)
    if not arch:
      logging.warning('No device_abi can be found.')
      return

    cmd = [_STACK_TOOL, '--arch', arch, '--output-directory',
           constants.GetOutDirectory(), '--more-info']
    env = dict(os.environ)
    env['PYTHONDONTWRITEBYTECODE'] = '1'
    with tempfile.NamedTemporaryFile(mode='w') as f:
      f.write('\n'.join(data_to_symbolize))
      f.flush()
      start = time.time()
      try:
        _, output = cmd_helper.GetCmdStatusAndOutput(cmd + [f.name], env=env)
      finally:
        self._time_spent_symbolizing += time.time() - start
    for line in output.splitlines():
      if not include_stack and 'Stack Data:' in line:
        break
      yield line
Beispiel #19
0
def _CreateClassToFileNameDict(test_apk):
    """Creates a dict mapping classes to file names from size-info apk."""
    constants.CheckOutputDirectory()
    test_apk_size_info = os.path.join(constants.GetOutDirectory(), 'size-info',
                                      os.path.basename(test_apk) + '.jar.info')

    class_to_file_dict = {}
    # Some tests such as webview_cts_tests use a separately downloaded apk to run
    # tests. This means the apk may not have been built by the system and hence
    # no size info file exists.
    if not os.path.exists(test_apk_size_info):
        logging.debug('Apk size file not found. %s', test_apk_size_info)
        return class_to_file_dict

    with open(test_apk_size_info, 'r') as f:
        for line in f:
            file_class, file_name = line.rstrip().split(',', 1)
            # Only want files that are not prebuilt.
            if file_name.startswith('../../'):
                class_to_file_dict[file_class] = str(
                    file_name.replace('../../', '//', 1))

    return class_to_file_dict
Beispiel #20
0
    def __init__(self, shard_index, test_server_port, test_server_path):
        """Sets up a Python driven test server on the host machine.

    Args:
      shard_index: Index of the current shard.
      test_server_port: Port to run the test server on. This is multiplexed with
                        the shard index. To retrieve the real port access the
                        member variable |port|.
      test_server_path: The path (relative to the root src dir) of the server
    """
        self.host = _TEST_SERVER_HOST
        self.port = test_server_port + shard_index

        src_dir = constants.DIR_SOURCE_ROOT
        # Make dirs into a list of absolute paths.
        abs_dirs = [os.path.join(src_dir, d) for d in _PYTHONPATH_DIRS]
        # Add the generated python files to the path
        abs_dirs.extend([
            os.path.join(src_dir, constants.GetOutDirectory(), d)
            for d in _GENERATED_PYTHONPATH_DIRS
        ])
        current_python_path = os.environ.get('PYTHONPATH')
        extra_python_path = ':'.join(abs_dirs)
        if current_python_path:
            python_path = current_python_path + ':' + extra_python_path
        else:
            python_path = extra_python_path

        # NOTE: A separate python process is used to simplify getting the right
        # system path for finding includes.
        cmd = [
            'python',
            os.path.join(src_dir, test_server_path), '--log-to-console',
            ('--host=%s' % self.host), ('--port=%d' % self.port)
        ]
        self._test_server_process = subprocess.Popen(
            cmd, env={'PYTHONPATH': python_path})
def ProcessUIAutomatorOptions(args):
  """Processes UIAutomator options/arguments.

  Args:
    args: argparse.Namespace object.

  Returns:
    A UIAutomatorOptions named tuple which contains all options relevant to
    uiautomator tests.
  """

  ProcessJavaTestOptions(args)

  if os.path.exists(args.test_jar):
    # The dexed JAR is fully qualified, assume the info JAR lives along side.
    args.uiautomator_jar = args.test_jar
  else:
    args.uiautomator_jar = os.path.join(
        constants.GetOutDirectory(),
        constants.SDK_BUILD_JAVALIB_DIR,
        '%s.dex.jar' % args.test_jar)
  args.uiautomator_info_jar = (
      args.uiautomator_jar[:args.uiautomator_jar.find('.dex.jar')] +
      '_java.jar')

  return uiautomator_test_options.UIAutomatorOptions(
      args.tool,
      args.annotations,
      args.exclude_annotations,
      args.test_filter,
      args.test_data,
      args.save_perf_json,
      args.screenshot_failures,
      args.uiautomator_jar,
      args.uiautomator_info_jar,
      args.package,
      args.set_asserts)
Beispiel #22
0
def _GetNative(relative_func, target_names):
  """Returns an object containing native c++ sources list and its included path

  Iterate through all target_names and their deps to get the list of included
  paths and sources."""
  out_dir = constants.GetOutDirectory()
  with open(os.path.join(out_dir, 'project.json'), 'r') as project_file:
    projects = json.load(project_file)
  project_targets = projects['targets']
  root_dir = projects['build_settings']['root_path']
  includes = set()
  processed_target = set()
  targets_stack = list(target_names)
  sources = []

  while targets_stack:
    target_name = targets_stack.pop()
    if target_name in processed_target:
      continue
    processed_target.add(target_name)
    target = project_targets[target_name]
    includes.update(target.get('include_dirs', []))
    targets_stack.extend(target.get('deps', []))
    # Ignore generated files
    sources.extend(f for f in target.get('sources', [])
                   if f.endswith('.cc') and not f.startswith('//out'))

  def process_paths(paths):
    # Ignores leading //
    return relative_func(
        sorted(os.path.join(root_dir, path[2:]) for path in paths))

  return {
      'sources': process_paths(sources),
      'includes': process_paths(includes),
  }
Beispiel #23
0
def _PrintStaticInitializersCountFromApk(apk_filename, tools_prefix,
                                         chartjson=None):
  with zipfile.ZipFile(apk_filename) as z:
    so_files = [f for f in z.infolist()
                if f.filename.endswith('.so') and f.file_size > 0]
  # Skip checking static initializers for 32 bit .so files when 64 bit .so files
  # are present since the 32 bit versions will be checked by bots that only
  # build the 32 bit version. This avoids the complexity of finding 32 bit .so
  # files in the output directory in 64 bit builds.
  has_64 = any('64' in f.filename for f in so_files)
  files_to_check = [f for f in so_files if not has_64 or '64' in f.filename]
  out_dir = constants.GetOutDirectory()
  si_count = 0
  for so_info in files_to_check:
    lib_name = os.path.basename(so_info.filename).replace('crazy.', '')
    unstripped_path = os.path.join(out_dir, 'lib.unstripped', lib_name)
    if os.path.exists(unstripped_path):
      si_count += _PrintStaticInitializersCount(
          apk_filename, so_info.filename, unstripped_path, tools_prefix)
    else:
      raise Exception('Unstripped .so not found. Looked here: %s',
                      unstripped_path)
  ReportPerfResult(chartjson, 'StaticInitializersCount', 'count', si_count,
                   'count')
def GetDataDependencies(runtime_deps_path):
    """Returns a list of device data dependencies.

  Args:
    runtime_deps_path: A str path to the .runtime_deps file.
  Returns:
    A list of (host_path, device_path) tuples.
  """
    if not runtime_deps_path:
        return []

    with open(runtime_deps_path, 'r') as runtime_deps_file:
        rel_host_files = [l.strip() for l in runtime_deps_file if l]

    output_directory = constants.GetOutDirectory()
    abs_host_files = [
        os.path.abspath(os.path.join(output_directory, r))
        for r in rel_host_files
    ]
    filtered_abs_host_files = _FilterDataDeps(abs_host_files)
    # TODO(crbug.com/752610): Filter out host executables, and investigate
    # whether other files could be filtered as well.
    return [(f, DevicePathComponentsFor(f, output_directory))
            for f in filtered_abs_host_files]
Beispiel #25
0
    def _GetTestsFromRunner(self):
        test_apk_path = self._test_instance.test_apk.path
        pickle_path = '%s-runner.pickle' % test_apk_path
        # For incremental APKs, the code doesn't live in the apk, so instead check
        # the timestamp of the target's .stamp file.
        if self._test_instance.test_apk_incremental_install_json:
            with open(self._test_instance.test_apk_incremental_install_json
                      ) as f:
                data = json.load(f)
            out_dir = constants.GetOutDirectory()
            test_mtime = max(
                os.path.getmtime(os.path.join(out_dir, p))
                for p in data['dex_files'])
        else:
            test_mtime = os.path.getmtime(test_apk_path)

        try:
            return instrumentation_test_instance.GetTestsFromPickle(
                pickle_path, test_mtime)
        except instrumentation_test_instance.TestListPickleException as e:
            logging.info('Could not get tests from pickle: %s', e)
        logging.info('Getting tests by having %s list them.',
                     self._test_instance.junit4_runner_class)

        def list_tests(d):
            def _run(dev):
                with device_temp_file.DeviceTempFile(
                        dev.adb, suffix='.json', dir=dev.
                        GetExternalStoragePath()) as dev_test_list_json:
                    junit4_runner_class = self._test_instance.junit4_runner_class
                    test_package = self._test_instance.test_package
                    extras = {
                        'log': 'true',
                        # Workaround for https://github.com/mockito/mockito/issues/922
                        'notPackage': 'net.bytebuddy',
                    }
                    extras[_EXTRA_TEST_LIST] = dev_test_list_json.name
                    target = '%s/%s' % (test_package, junit4_runner_class)
                    timeout = 120
                    if self._test_instance.wait_for_java_debugger:
                        timeout = None
                    test_list_run_output = dev.StartInstrumentation(
                        target, extras=extras, retries=0, timeout=timeout)
                    if any(test_list_run_output):
                        logging.error('Unexpected output while listing tests:')
                        for line in test_list_run_output:
                            logging.error('  %s', line)
                    with tempfile_ext.NamedTemporaryDirectory() as host_dir:
                        host_file = os.path.join(host_dir, 'list_tests.json')
                        dev.PullFile(dev_test_list_json.name, host_file)
                        with open(host_file, 'r') as host_file:
                            return json.load(host_file)

            return crash_handler.RetryOnSystemCrash(_run, d)

        raw_test_lists = self._env.parallel_devices.pMap(list_tests).pGet(None)

        # If all devices failed to list tests, raise an exception.
        # Check that tl is not None and is not empty.
        if all(not tl for tl in raw_test_lists):
            raise device_errors.CommandFailedError(
                'Failed to list tests on any device')

        # Get the first viable list of raw tests
        raw_tests = [tl for tl in raw_test_lists if tl][0]

        instrumentation_test_instance.SaveTestsToPickle(pickle_path, raw_tests)
        return raw_tests
Beispiel #26
0
 def GetOutDir():
     return constants.GetOutDirectory()
Beispiel #27
0
  def _initializeApkAttributes(self, args, error_func):
    if args.apk_under_test:
      apk_under_test_path = args.apk_under_test
      if (not args.apk_under_test.endswith('.apk')
          and not args.apk_under_test.endswith('.apks')):
        apk_under_test_path = os.path.join(
            constants.GetOutDirectory(), constants.SDK_BUILD_APKS_DIR,
            '%s.apk' % args.apk_under_test)

      # TODO(jbudorick): Move the realpath up to the argument parser once
      # APK-by-name is no longer supported.
      apk_under_test_path = os.path.realpath(apk_under_test_path)

      if not os.path.exists(apk_under_test_path):
        error_func('Unable to find APK under test: %s' % apk_under_test_path)

      self._apk_under_test = apk_helper.ToHelper(apk_under_test_path)

    test_apk_path = args.test_apk
    if not os.path.exists(test_apk_path):
      test_apk_path = os.path.join(
          constants.GetOutDirectory(), constants.SDK_BUILD_APKS_DIR,
          '%s.apk' % args.test_apk)
      # TODO(jbudorick): Move the realpath up to the argument parser once
      # APK-by-name is no longer supported.
      test_apk_path = os.path.realpath(test_apk_path)

    if not os.path.exists(test_apk_path):
      error_func('Unable to find test APK: %s' % test_apk_path)

    self._test_apk = apk_helper.ToHelper(test_apk_path)
    self._suite = os.path.splitext(os.path.basename(args.test_apk))[0]

    self._apk_under_test_incremental_install_json = (
        args.apk_under_test_incremental_install_json)
    self._test_apk_incremental_install_json = (
        args.test_apk_incremental_install_json)

    if self._test_apk_incremental_install_json:
      assert self._suite.endswith('_incremental')
      self._suite = self._suite[:-len('_incremental')]

    self._modules = args.modules
    self._fake_modules = args.fake_modules

    self._test_jar = args.test_jar
    self._test_support_apk = apk_helper.ToHelper(os.path.join(
        constants.GetOutDirectory(), constants.SDK_BUILD_TEST_JAVALIB_DIR,
        '%sSupport.apk' % self._suite))

    if not self._test_jar:
      logging.warning('Test jar not specified. Test runner will not have '
                      'Java annotation info available. May not handle test '
                      'timeouts correctly.')
    elif not os.path.exists(self._test_jar):
      error_func('Unable to find test JAR: %s' % self._test_jar)

    self._test_package = self._test_apk.GetPackageName()
    all_instrumentations = self._test_apk.GetAllInstrumentations()
    all_junit3_runner_classes = [
        x for x in all_instrumentations if ('0xffffffff' in x.get(
            'chromium-junit3', ''))]
    all_junit4_runner_classes = [
        x for x in all_instrumentations if ('0xffffffff' not in x.get(
            'chromium-junit3', ''))]

    if len(all_junit3_runner_classes) > 1:
      logging.warning('This test apk has more than one JUnit3 instrumentation')
    if len(all_junit4_runner_classes) > 1:
      logging.warning('This test apk has more than one JUnit4 instrumentation')

    self._junit3_runner_class = (
      all_junit3_runner_classes[0]['android:name']
      if all_junit3_runner_classes else self.test_apk.GetInstrumentationName())

    self._junit4_runner_class = (
      all_junit4_runner_classes[0]['android:name']
      if all_junit4_runner_classes else None)

    if self._junit4_runner_class:
      if self._test_apk_incremental_install_json:
        self._junit4_runner_supports_listing = next(
            (True for x in self._test_apk.GetAllMetadata()
             if 'real-instr' in x[0] and x[1] in _TEST_LIST_JUNIT4_RUNNERS),
            False)
      else:
        self._junit4_runner_supports_listing = (
            self._junit4_runner_class in _TEST_LIST_JUNIT4_RUNNERS)

    self._package_info = None
    if self._apk_under_test:
      package_under_test = self._apk_under_test.GetPackageName()
      for package_info in constants.PACKAGE_INFO.itervalues():
        if package_under_test == package_info.package:
          self._package_info = package_info
          break
    if not self._package_info:
      logging.warning(("Unable to find package info for %s. " +
                       "(This may just mean that the test package is" +
                       "currently being installed.)"),
                       self._test_package)

    for apk in args.additional_apks:
      if not os.path.exists(apk):
        error_func('Unable to find additional APK: %s' % apk)
    self._additional_apks = (
        [apk_helper.ToHelper(x) for x in args.additional_apks])
Beispiel #28
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('--output-directory',
                        help='Path to the root build directory.')
    parser.add_argument('-v',
                        '--verbose',
                        dest='verbose_count',
                        default=0,
                        action='count',
                        help='Verbose level')
    parser.add_argument(
        '--target',
        dest='targets',
        action='append',
        help='GN target to generate project for. Replaces set of '
        'default targets. May be repeated.')
    parser.add_argument(
        '--extra-target',
        dest='extra_targets',
        action='append',
        help='GN target to generate project for, in addition to '
        'the default ones. May be repeated.')
    parser.add_argument('--project-dir',
                        help='Root of the output project.',
                        default=os.path.join('$CHROMIUM_OUTPUT_DIR', 'gradle'))
    parser.add_argument('--all',
                        action='store_true',
                        help='Include all .java files reachable from any '
                        'apk/test/binary target. On by default unless '
                        '--split-projects is used (--split-projects can '
                        'slow down Studio given too many targets).')
    parser.add_argument('--use-gradle-process-resources',
                        action='store_true',
                        help='Have gradle generate R.java rather than ninja')
    parser.add_argument('--split-projects',
                        action='store_true',
                        help='Split projects by their gn deps rather than '
                        'combining all the dependencies of each target')
    version_group = parser.add_mutually_exclusive_group()
    version_group.add_argument(
        '--beta',
        action='store_true',
        help='Generate a project that is compatible with '
        'Android Studio Beta.')
    version_group.add_argument(
        '--canary',
        action='store_true',
        help='Generate a project that is compatible with '
        'Android Studio Canary.')
    sdk_group = parser.add_mutually_exclusive_group()
    sdk_group.add_argument(
        '--sdk',
        choices=[
            'AndroidStudioCurrent', 'AndroidStudioDefault', 'ChromiumSdkRoot'
        ],
        default='ChromiumSdkRoot',
        help="Set the project's SDK root. This can be set to "
        "Android Studio's current SDK root, the default "
        "Android Studio SDK root, or Chromium's SDK "
        "root. The default is Chromium's SDK root, but "
        "using this means that updates and additions to "
        "the SDK (e.g. installing emulators), will "
        "modify this root, hence possibly causing "
        "conflicts on the next repository sync.")
    sdk_group.add_argument(
        '--sdk-path',
        help='An explict path for the SDK root, setting this '
        'is an alternative to setting the --sdk option')
    args = parser.parse_args()
    if args.output_directory:
        constants.SetOutputDirectory(args.output_directory)
    constants.CheckOutputDirectory()
    output_dir = constants.GetOutDirectory()
    devil_chromium.Initialize(output_directory=output_dir)
    run_tests_helper.SetLogLevel(args.verbose_count)

    if args.use_gradle_process_resources:
        assert args.split_projects, (
            'Gradle resources does not work without --split-projects.')

    _gradle_output_dir = os.path.abspath(
        args.project_dir.replace('$CHROMIUM_OUTPUT_DIR', output_dir))
    jinja_processor = jinja_template.JinjaProcessor(_FILE_DIR)
    build_vars = _ReadPropertiesFile(os.path.join(output_dir,
                                                  'build_vars.txt'))
    source_properties = _ReadPropertiesFile(
        _RebasePath(
            os.path.join(build_vars['android_sdk_build_tools'],
                         'source.properties')))
    if args.beta:
        channel = 'beta'
    elif args.canary:
        channel = 'canary'
    else:
        channel = 'stable'
    generator = _ProjectContextGenerator(_gradle_output_dir, build_vars,
                                         args.use_gradle_process_resources,
                                         jinja_processor, args.split_projects,
                                         channel)
    logging.warning('Creating project at: %s', generator.project_dir)

    # Generate for "all targets" by default when not using --split-projects (too
    # slow), and when no --target has been explicitly set. "all targets" means all
    # java targets that are depended on by an apk or java_binary (leaf
    # java_library targets will not be included).
    args.all = args.all or (not args.split_projects and not args.targets)

    targets_from_args = set(args.targets or _DEFAULT_TARGETS)
    if args.extra_targets:
        targets_from_args.update(args.extra_targets)

    if args.all:
        # Run GN gen if necessary (faster than running "gn gen" in the no-op case).
        _RunNinja(constants.GetOutDirectory(), ['build.ninja'])
        # Query ninja for all __build_config targets.
        targets = _QueryForAllGnTargets(output_dir)
    else:
        targets = [
            re.sub(r'_test_apk$', '_test_apk__apk', t)
            for t in targets_from_args
        ]

    main_entries = [_ProjectEntry.FromGnTarget(t) for t in targets]

    logging.warning('Building .build_config files...')
    _RunNinja(output_dir, [e.NinjaBuildConfigTarget() for e in main_entries])

    if args.all:
        # There are many unused libraries, so restrict to those that are actually
        # used by apks/binaries/tests or that are explicitly mentioned in --targets.
        main_entries = [
            e for e in main_entries
            if (e.GetType() in ('android_apk', 'java_binary',
                                'junit_binary') or e.GnTarget() in
                targets_from_args or e.GnTarget().endswith('_test_apk__apk'))
        ]

    if args.split_projects:
        main_entries = _FindAllProjectEntries(main_entries)

    logging.info('Generating for %d targets.', len(main_entries))

    entries = [e for e in _CombineTestEntries(main_entries) if e.IsValid()]
    logging.info('Creating %d projects for targets.', len(entries))

    # When only one entry will be generated we want it to have a valid
    # build.gradle file with its own AndroidManifest.
    add_all_module = not args.split_projects and len(entries) > 1

    logging.warning('Writing .gradle files...')
    project_entries = []
    zip_tuples = []
    generated_inputs = []
    for entry in entries:
        data = _GenerateGradleFile(entry, generator, build_vars,
                                   source_properties, jinja_processor)
        if data:
            # Build all paths references by .gradle that exist within output_dir.
            generated_inputs.extend(generator.GeneratedInputs(entry))
            zip_tuples.extend((
                s,
                os.path.join(generator.EntryOutputDir(entry), _SRCJARS_SUBDIR))
                              for s in generator.AllSrcjars(entry))
            zip_tuples.extend(
                (s, os.path.join(generator.EntryOutputDir(entry), _RES_SUBDIR))
                for s in generator.AllResZips(entry))
            if not add_all_module:
                project_entries.append(entry)
                _WriteFile(
                    os.path.join(generator.EntryOutputDir(entry),
                                 _GRADLE_BUILD_FILE), data)

    if add_all_module:
        _GenerateModuleAll(_gradle_output_dir, generator, build_vars,
                           source_properties, jinja_processor)

    _WriteFile(os.path.join(generator.project_dir, _GRADLE_BUILD_FILE),
               _GenerateRootGradle(jinja_processor, channel))

    _WriteFile(os.path.join(generator.project_dir, 'settings.gradle'),
               _GenerateSettingsGradle(project_entries, add_all_module))

    if args.sdk != "AndroidStudioCurrent":
        if args.sdk_path:
            sdk_path = _RebasePath(args.sdk_path)
        elif args.sdk == "AndroidStudioDefault":
            sdk_path = os.path.expanduser('~/Android/Sdk')
        else:
            sdk_path = _RebasePath(build_vars['android_sdk_root'])
        _WriteFile(os.path.join(generator.project_dir, 'local.properties'),
                   _GenerateLocalProperties(sdk_path))

    if generated_inputs:
        logging.warning('Building generated source files...')
        targets = _RebasePath(generated_inputs, output_dir)
        _RunNinja(output_dir, targets)

    if zip_tuples:
        _ExtractZips(generator.project_dir, zip_tuples)

    logging.warning('Project created!')
    logging.warning('Generated projects work with Android Studio %s', channel)
    logging.warning('For more tips: https://chromium.googlesource.com/chromium'
                    '/src.git/+/master/docs/android_studio.md')
Beispiel #29
0
def main():
    parser = argparse.ArgumentParser(
        description=__doc__,
        formatter_class=argparse.RawDescriptionHelpFormatter)
    parser.add_argument(
        '-C',
        '--output-directory',
        help='If outdir is not provided, will attempt to guess.')
    parser.add_argument('--gn-labels',
                        action='store_true',
                        help='Print GN labels rather than ninja targets')
    parser.add_argument(
        '--nested',
        action='store_true',
        help='Do not convert nested targets to their top-level equivalents. '
        'E.g. Without this, foo_test__apk -> foo_test')
    parser.add_argument('--print-types',
                        action='store_true',
                        help='Print type of each target')
    parser.add_argument('--build-build-configs',
                        action='store_true',
                        help='Build all .build_config files.')
    parser.add_argument('--type',
                        action='append',
                        help='Restrict to targets of given type',
                        choices=_VALID_TYPES)
    parser.add_argument('--stats',
                        action='store_true',
                        help='Print counts of each target type.')
    parser.add_argument('-v', '--verbose', default=0, action='count')
    args = parser.parse_args()

    args.build_build_configs |= bool(args.type or args.print_types
                                     or args.stats)

    logging.basicConfig(
        level=logging.WARNING - (10 * args.verbose),
        format='%(levelname).1s %(relativeCreated)6d %(message)s')

    if args.output_directory:
        constants.SetOutputDirectory(args.output_directory)
    constants.CheckOutputDirectory()
    output_dir = constants.GetOutDirectory()

    # Query ninja for all __build_config_crbug_908819 targets.
    targets = _query_for_build_config_targets(output_dir)
    entries = [_TargetEntry(t) for t in targets]

    if args.build_build_configs:
        logging.warning('Building %d .build_config files...', len(entries))
        _run_ninja(output_dir, [e.ninja_build_config_target for e in entries])

    if args.type:
        entries = [e for e in entries if e.get_type() in args.type]

    if args.stats:
        counts = collections.Counter(e.get_type() for e in entries)
        for entry_type, count in sorted(counts.items()):
            print(f'{entry_type}: {count}')
    else:
        for e in entries:
            if args.gn_labels:
                to_print = e.gn_target
            else:
                to_print = e.ninja_target

            # Convert to top-level target
            if not args.nested:
                to_print = to_print.replace('__test_apk__apk',
                                            '').replace('__apk', '')

            if args.print_types:
                to_print = f'{to_print}: {e.get_type()}'

            print(to_print)
Beispiel #30
0
class OrderfileGenerator(object):
    """A utility for generating a new orderfile for Clank.

  Builds an instrumented binary, profiles a run of the application, and
  generates an updated orderfile.
  """
    _CLANK_REPO = os.path.join(constants.DIR_SOURCE_ROOT, 'clank')
    _CYGLOG_TO_ORDERFILE_SCRIPT = os.path.join(constants.DIR_SOURCE_ROOT,
                                               'tools', 'cygprofile',
                                               'cyglog_to_orderfile.py')
    _CHECK_ORDERFILE_SCRIPT = os.path.join(constants.DIR_SOURCE_ROOT, 'tools',
                                           'cygprofile', 'check_orderfile.py')
    _BUILD_ROOT = os.path.abspath(
        os.path.dirname(os.path.dirname(
            constants.GetOutDirectory())))  # Normally /path/to/src

    _UNPATCHED_ORDERFILE_FILENAME = os.path.join(_CLANK_REPO, 'orderfiles',
                                                 'unpatched_orderfile.%s')
    _MERGED_CYGLOG_FILENAME = os.path.join(constants.GetOutDirectory(),
                                           'merged_cyglog')

    _PATH_TO_ORDERFILE = os.path.join(_CLANK_REPO, 'orderfiles',
                                      'orderfile.%s.out')

    # Previous orderfile_generator debug files would be overwritten.
    _DIRECTORY_FOR_DEBUG_FILES = '/tmp/orderfile_generator_debug_files'

    def _GetPathToOrderfile(self):
        """Gets the path to the architecture-specific orderfile."""
        return self._PATH_TO_ORDERFILE % self._options.arch

    def _GetUnpatchedOrderfileFilename(self):
        """Gets the path to the architecture-specific unpatched orderfile."""
        return self._UNPATCHED_ORDERFILE_FILENAME % self._options.arch

    def __init__(self, options, orderfile_updater_class):
        self._options = options

        self._instrumented_out_dir = os.path.join(
            self._BUILD_ROOT, self._options.arch + '_instrumented_out')
        self._uninstrumented_out_dir = os.path.join(
            self._BUILD_ROOT, self._options.arch + '_uninstrumented_out')

        if options.profile:
            output_directory = os.path.join(self._instrumented_out_dir,
                                            'Release')
            host_profile_dir = os.path.join(output_directory, 'profile_data')
            urls = [profile_android_startup.AndroidProfileTool.TEST_URL]
            use_wpr = True
            simulate_user = False
            urls = options.urls
            use_wpr = not options.no_wpr
            simulate_user = options.simulate_user
            self._profiler = profile_android_startup.AndroidProfileTool(
                output_directory,
                host_profile_dir,
                use_wpr,
                urls,
                simulate_user,
                device=options.device)

        self._output_data = {}
        self._step_recorder = StepRecorder(options.buildbot)
        self._compiler = None
        assert issubclass(orderfile_updater_class, OrderfileUpdater)
        self._orderfile_updater = orderfile_updater_class(
            self._CLANK_REPO, self._step_recorder, options.branch,
            options.netrc)
        assert os.path.isdir(
            constants.DIR_SOURCE_ROOT), 'No src directory found'
        symbol_extractor.SetArchitecture(options.arch)

    @staticmethod
    def _RemoveBlanks(src_file, dest_file):
        """A utility to remove blank lines from a file.

    Args:
      src_file: The name of the file to remove the blanks from.
      dest_file: The name of the file to write the output without blanks.
    """
        assert src_file != dest_file, 'Source and destination need to be distinct'

        try:
            src = open(src_file, 'r')
            dest = open(dest_file, 'w')
            for line in src:
                if line and not line.isspace():
                    dest.write(line)
        finally:
            src.close()
            dest.close()

    def _GenerateAndProcessProfile(self):
        """Invokes a script to merge the per-thread traces into one file."""
        self._step_recorder.BeginStep('Generate Profile Data')
        files = []
        try:
            logging.getLogger().setLevel(logging.DEBUG)
            files = self._profiler.CollectProfile(
                self._compiler.chrome_apk, constants.PACKAGE_INFO['chrome'])
            self._step_recorder.BeginStep('Process profile')
            assert os.path.exists(self._compiler.lib_chrome_so)
            offsets = process_profiles.GetReachedOffsetsFromDumpFiles(
                files, self._compiler.lib_chrome_so)
            if not offsets:
                raise Exception('No profiler offsets found in {}'.format(
                    '\n'.join(files)))
            with open(self._MERGED_CYGLOG_FILENAME, 'w') as f:
                f.write('\n'.join(map(str, offsets)))
        except Exception:
            for f in files:
                self._SaveForDebugging(f)
            raise
        finally:
            self._profiler.Cleanup()
            logging.getLogger().setLevel(logging.INFO)

        try:
            command_args = [
                '--target-arch=' + self._options.arch,
                '--native-library=' + self._compiler.lib_chrome_so,
                '--output=' + self._GetUnpatchedOrderfileFilename()
            ]
            command_args.append('--reached-offsets=' +
                                self._MERGED_CYGLOG_FILENAME)
            self._step_recorder.RunCommand([self._CYGLOG_TO_ORDERFILE_SCRIPT] +
                                           command_args)
        except CommandError:
            self._SaveForDebugging(self._MERGED_CYGLOG_FILENAME)
            self._SaveForDebuggingWithOverwrite(self._compiler.lib_chrome_so)
            raise

    def _DeleteTempFiles(self):
        """Deletes intermediate step output files."""
        print 'Delete %s' % (self._MERGED_CYGLOG_FILENAME)
        if os.path.isfile(self._MERGED_CYGLOG_FILENAME):
            os.unlink(self._MERGED_CYGLOG_FILENAME)

    def _PatchOrderfile(self):
        """Patches the orderfile using clean version of libchrome.so."""
        self._step_recorder.BeginStep('Patch Orderfile')
        patch_orderfile.GeneratePatchedOrderfile(
            self._GetUnpatchedOrderfileFilename(),
            self._compiler.lib_chrome_so, self._GetPathToOrderfile())

    def _VerifySymbolOrder(self):
        self._step_recorder.BeginStep('Verify Symbol Order')
        return_code = self._step_recorder.RunCommand([
            self._CHECK_ORDERFILE_SCRIPT, self._compiler.lib_chrome_so,
            self._GetPathToOrderfile(), '--target-arch=' + self._options.arch
        ],
                                                     constants.DIR_SOURCE_ROOT,
                                                     raise_on_error=False)
        if return_code:
            self._step_recorder.FailStep('Orderfile check returned %d.' %
                                         return_code)

    def _RecordHash(self, file_name):
        """Records the hash of the file into the output_data dictionary."""
        self._output_data[os.path.basename(file_name) +
                          '.sha1'] = _GenerateHash(file_name)

    def _SaveFileLocally(self, file_name, file_sha1):
        """Saves the file to a temporary location and prints the sha1sum."""
        if not os.path.exists(self._DIRECTORY_FOR_DEBUG_FILES):
            os.makedirs(self._DIRECTORY_FOR_DEBUG_FILES)
        shutil.copy(file_name, self._DIRECTORY_FOR_DEBUG_FILES)
        print 'File: %s, saved in: %s, sha1sum: %s' % (
            file_name, self._DIRECTORY_FOR_DEBUG_FILES, file_sha1)

    def _SaveForDebugging(self, filename):
        """Uploads the file to cloud storage or saves to a temporary location."""
        file_sha1 = _GenerateHash(filename)
        if not self._options.buildbot:
            self._SaveFileLocally(filename, file_sha1)
        else:
            print 'Uploading file for debugging: ' + filename
            self._orderfile_updater.UploadToCloudStorage(
                filename, use_debug_location=True)

    def _SaveForDebuggingWithOverwrite(self, file_name):
        """Uploads and overwrites the file in cloud storage or copies locally.

    Should be used for large binaries like lib_chrome_so.

    Args:
      file_name: (str) File to upload.
    """
        file_sha1 = _GenerateHash(file_name)
        if not self._options.buildbot:
            self._SaveFileLocally(file_name, file_sha1)
        else:
            print 'Uploading file for debugging: %s, sha1sum: %s' % (file_name,
                                                                     file_sha1)
            upload_location = '%s/%s' % (self._CLOUD_STORAGE_BUCKET_FOR_DEBUG,
                                         os.path.basename(file_name))
            self._step_recorder.RunCommand(
                ['gsutil.py', 'cp', file_name, 'gs://' + upload_location])
            print('Uploaded to: https://sandbox.google.com/storage/' +
                  upload_location)

    def _MaybeArchiveOrderfile(self, filename):
        """In buildbot configuration, uploads the generated orderfile to
    Google Cloud Storage.

    Args:
      filename: (str) Orderfile to upload.
    """
        # First compute hashes so that we can download them later if we need to
        self._step_recorder.BeginStep('Compute hash for ' + filename)
        self._RecordHash(filename)
        if self._options.buildbot:
            self._step_recorder.BeginStep('Archive ' + filename)
            self._orderfile_updater.UploadToCloudStorage(
                filename, use_debug_location=False)

    def _GetHashFilePathAndContents(self, base_file):
        """Gets the name and content of the hash file created from uploading the
    given file.

    Args:
      base_file: The file that was uploaded to cloud storage.

    Returns:
      A tuple of the hash file name, relative to the clank repo path, and the
      content, which should be the sha1 hash of the file
      ('base_file.sha1', hash)
    """
        abs_file_name = base_file + '.sha1'
        rel_file_name = os.path.relpath(abs_file_name, self._CLANK_REPO)
        with open(abs_file_name, 'r') as f:
            return (rel_file_name, f.read())

    def Generate(self):
        """Generates and maybe upload an order."""
        profile_uploaded = False
        orderfile_uploaded = False

        assert (bool(self._options.profile)
                ^ bool(self._options.manual_symbol_offsets))

        if self._options.profile:
            try:
                _UnstashOutputDirectory(self._instrumented_out_dir)
                self._compiler = ClankCompiler(
                    self._instrumented_out_dir, self._step_recorder,
                    self._options.arch, self._options.jobs,
                    self._options.max_load, self._options.use_goma,
                    self._options.goma_dir)
                self._compiler.CompileChromeApk(True)
                self._GenerateAndProcessProfile()
                self._MaybeArchiveOrderfile(
                    self._GetUnpatchedOrderfileFilename())
                profile_uploaded = True
            finally:
                self._DeleteTempFiles()
                _StashOutputDirectory(self._instrumented_out_dir)
        elif self._options.manual_symbol_offsets:
            assert self._options.manual_libname
            assert self._options.manual_objdir
            with file(self._options.manual_symbol_offsets) as f:
                symbol_offsets = [int(x) for x in f.xreadlines()]
            processor = process_profiles.SymbolOffsetProcessor(
                self._options.manual_libname)
            generator = cyglog_to_orderfile.OffsetOrderfileGenerator(
                processor,
                cyglog_to_orderfile.ObjectFileProcessor(
                    self._options.manual_objdir))
            ordered_sections = generator.GetOrderedSections(symbol_offsets)
            if not ordered_sections:  # Either None or empty is a problem.
                raise Exception('Failed to get ordered sections')
            with open(self._GetUnpatchedOrderfileFilename(), 'w') as orderfile:
                orderfile.write('\n'.join(ordered_sections))

        if self._options.patch:
            if self._options.profile:
                self._RemoveBlanks(self._GetUnpatchedOrderfileFilename(),
                                   self._GetPathToOrderfile())
            try:
                _UnstashOutputDirectory(self._uninstrumented_out_dir)
                self._compiler = ClankCompiler(
                    self._uninstrumented_out_dir, self._step_recorder,
                    self._options.arch, self._options.jobs,
                    self._options.max_load, self._options.use_goma,
                    self._options.goma_dir)
                self._compiler.CompileLibchrome(False)
                self._PatchOrderfile()
                # Because identical code folding is a bit different with and without
                # the orderfile build, we need to re-patch the orderfile with code
                # folding as close to the final version as possible.
                self._compiler.CompileLibchrome(False, force_relink=True)
                self._PatchOrderfile()
                self._compiler.CompileLibchrome(False, force_relink=True)
                self._VerifySymbolOrder()
                self._MaybeArchiveOrderfile(self._GetPathToOrderfile())
            finally:
                _StashOutputDirectory(self._uninstrumented_out_dir)
            orderfile_uploaded = True

        if (self._options.buildbot and self._options.netrc
                and not self._step_recorder.ErrorRecorded()):
            unpatched_orderfile_filename = (
                self._GetUnpatchedOrderfileFilename()
                if profile_uploaded else None)
            orderfile_filename = (self._GetPathToOrderfile()
                                  if orderfile_uploaded else None)
            self._orderfile_updater.CommitFileHashes(
                unpatched_orderfile_filename, orderfile_filename)

        self._step_recorder.EndStep()
        return not self._step_recorder.ErrorRecorded()

    def GetReportingData(self):
        """Get a dictionary of reporting data (timings, output hashes)"""
        self._output_data['timings'] = self._step_recorder.timings
        return self._output_data