Пример #1
0
def _build_adb(target, force):
    """Builds the adb executable, if needed.

  The resulting executable will be placed at out/adb/<target>/adb.
  """

    build_dir = os.path.join(ADB_OUTPUT_DIR, target)
    stamp_file_path = os.path.join(build_dir, 'STAMP')
    stamp_file = build_common.StampFile(ADB_SOURCE_VERSION,
                                        stamp_file_path,
                                        force=force)
    if stamp_file.is_up_to_date():
        return

    gcc = GCC_NAMES[target]
    try:
        timer = build_common.SimpleTimer()
        timer.start('Building the adb executable for %s' % target, show=True)
        subprocess.check_call(
            ['make', '-j16', '-f', MAKEFILE, 'CC=' + gcc, 'TARGET=' + target],
            stdout=subprocess.PIPE,
            stderr=subprocess.PIPE)
        timer.done()
    except Exception as exception:
        print exception
        raise Exception('Failed to build the adb executable')

    stamp_file.update()
Пример #2
0
def _download_adb_source(force):
    """Downloads the adb source code with git, if needed.

  The source tree will be placed at out/adb/src.
  """
    source_dir = os.path.join(ADB_OUTPUT_DIR, 'src')
    stamp_file_path = os.path.join(source_dir, 'STAMP')
    stamp_file = build_common.StampFile(ADB_SOURCE_VERSION,
                                        stamp_file_path,
                                        force=force)
    if stamp_file.is_up_to_date():
        return

    if os.path.exists(source_dir):
        file_util.rmtree(source_dir)

    try:
        timer = build_common.SimpleTimer()
        timer.start('Downloading the adb source code', show=True)
        _run_git_clone(BRANCH, SYSTEM_CORE_URL,
                       os.path.join(ADB_OUTPUT_DIR, 'src/system/core'))
        _run_git_clone(BRANCH, ZLIB_URL,
                       os.path.join(ADB_OUTPUT_DIR, 'src/external/zlib'))
        timer.done()
    except Exception as exception:
        print exception
        raise Exception('Failed to download the adb source code')

    stamp_file.update()
Пример #3
0
def generate_ninjas():
  needs_clobbering, cache_to_save = _set_up_generate_ninja()
  ninja_list, independent_ninja_cache = _generate_independent_ninjas(
      needs_clobbering)
  cache_to_save.extend(independent_ninja_cache)
  ninja_list.extend(
      _generate_shared_lib_depending_ninjas(ninja_list))
  ninja_list.extend(_generate_dependent_ninjas(ninja_list))

  top_level_ninja = _generate_top_level_ninja(ninja_list)
  ninja_list.append(top_level_ninja)

  # Run verification before emitting to files.
  _verify_ninja_generator_list(ninja_list)

  # Emit each ninja script to a file.
  timer = build_common.SimpleTimer()
  timer.start('Emitting ninja scripts', OPTIONS.verbose())
  for ninja in ninja_list:
    ninja.emit()
  top_level_ninja.emit_depfile()
  top_level_ninja.cleanup_out_directories(ninja_list)
  timer.done()

  if OPTIONS.enable_config_cache():
    for cache_object, cache_path in cache_to_save:
      cache_object.save_to_file(cache_path)
Пример #4
0
def _generate_shared_lib_depending_ninjas(ninja_list):
  timer = build_common.SimpleTimer()

  timer.start('Generating plugin and packaging ninjas', OPTIONS.verbose())
  # We must generate plugin/nexe ninjas after make->ninja lazy generation
  # so that we have the full list of production shared libraries to
  # pass to the load test.
  # These modules depend on shared libraries generated in the previous phase.
  production_shared_libs = (
      ninja_generator.NinjaGenerator.get_production_shared_libs(ninja_list[:]))
  generator_list = list(_list_ninja_generators(
      _config_loader, 'generate_shared_lib_depending_ninjas'))

  if OPTIONS.run_tests():
    generator_list.extend(_list_ninja_generators(
        _config_loader, 'generate_shared_lib_depending_test_ninjas'))

  result_list = ninja_generator_runner.run_in_parallel(
      [ninja_generator_runner.GeneratorTask(
          config_context,
          (generator, production_shared_libs))
       for config_context, generator in generator_list],
      OPTIONS.configure_jobs())
  ninja_list = []
  for config_result in result_list:
    ninja_list.extend(config_result.generated_ninjas)
  ninja_list.sort(key=lambda ninja: ninja.get_module_name())

  timer.done()
  return ninja_list
Пример #5
0
def _generate_dependent_ninjas(ninja_list):
  """Generate the stage of ninjas coming after all executables."""
  timer = build_common.SimpleTimer()

  timer.start('Generating dependent ninjas', OPTIONS.verbose())

  root_dir_install_all_targets = []
  for n in ninja_list:
    root_dir_install_all_targets.extend(build_common.get_android_fs_path(p) for
                                        p in n._root_dir_install_targets)

  generator_list = _list_ninja_generators(_config_loader,
                                          'generate_binaries_depending_ninjas')
  result_list = ninja_generator_runner.run_in_parallel(
      [ninja_generator_runner.GeneratorTask(
          config_context,
          (generator, root_dir_install_all_targets))
          for config_context, generator in generator_list],
      OPTIONS.configure_jobs())
  dependent_ninjas = []
  for config_result in result_list:
    dependent_ninjas.extend(config_result.generated_ninjas)

  notice_ninja = ninja_generator.NoticeNinjaGenerator('notices')
  notice_ninja.build_notices(ninja_list + dependent_ninjas)
  dependent_ninjas.append(notice_ninja)

  all_test_lists_ninja = ninja_generator.NinjaGenerator('all_test_lists')
  all_test_lists_ninja.build_all_test_lists(ninja_list)
  dependent_ninjas.append(all_test_lists_ninja)

  all_unittest_info_ninja = ninja_generator.NinjaGenerator('all_unittest_info')
  all_unittest_info_ninja.build_all_unittest_info(ninja_list)
  dependent_ninjas.append(all_unittest_info_ninja)

  timer.done()
  return dependent_ninjas
Пример #6
0
def create_staging():
    timer = build_common.SimpleTimer()
    timer.start('Staging source files', True)

    staging_root = build_common.get_staging_root()

    # Store where all the old staging links pointed so we can compare after.
    old_staging_links = _get_link_targets(staging_root)

    if os.path.lexists(staging_root):
        file_util.rmtree(staging_root)

    _create_symlink_tree(_MODS_DIR, _THIRD_PARTY_DIR, staging_root)

    # internal/ is an optional checkout
    if build_options.OPTIONS.internal_apks_source_is_internal():
        assert build_common.has_internal_checkout()
        for name in os.listdir(_INTERNAL_THIRD_PARTY_PATH):
            if os.path.exists(os.path.join(_THIRD_PARTY_DIR, name)):
                raise Exception(
                    'Name conflict between internal/third_party and '
                    'third_party: ' + name)
        _create_symlink_tree(_INTERNAL_MODS_PATH, _INTERNAL_THIRD_PARTY_PATH,
                             staging_root)
        subprocess.check_call('internal/build/fix_staging.py')

    # src/ is not overlaid on any directory.
    _create_symlink_tree(_SRC_DIR, None, os.path.join(staging_root, 'src'))

    # Update modification time for files that do not point to the same location
    # that they pointed to in the previous tree to make sure they are built.

    if old_staging_links:
        new_staging_links = _get_link_targets(staging_root)

        # Every file (not directory) under staging is in either one of following
        # two states:
        #
        #   F. The file itself is a symbolic link to a file under third_party or
        #      mods.
        #   D. Some ancestor directory is a symbolic link to a directory under
        #      third_party. (It is important that we do not create symbolic links to
        #      directories under mods)
        #
        # Let us say a file falls under "X-Y" case if it was in state X before
        # re-staging and now in state Y. For all 4 possible cases, we can check if
        # the actual destination of the file changed or not in the following way:
        #
        #   F-F: We can just compare the target of the link.
        #   F-D, D-F: The target may have changed, but it needs some complicated
        #        computation to check. We treat them as changed to be conservative.
        #   D-D: We can leave it as-is since both point third_party.
        #
        # So we want to visit all files in state F either in old staging or new
        # staging. For this purpose we can iterate through |*_staging_links| as
        # they contain all files in state F.
        #
        # Note that |*_staging_links| may contain directory symbolic links, but
        # it is okay to visit them too because directory timestamps do not matter.
        # Actually excluding directory symbolic links from |old_staging_links| is
        # difficult because link targets might be already removed.

        for path in set(list(old_staging_links) + list(new_staging_links)):
            if path in old_staging_links and path in new_staging_links:
                should_touch = old_staging_links[path] != new_staging_links[
                    path]
            else:
                should_touch = True
            if should_touch and os.path.exists(path):
                os.utime(path, None)

    timer.done()
    return True
Пример #7
0
def _download_adb(target, force):
    """Downloads the adb executable for Windows or Mac, if needed.

  The downloaded executable will be placed at out/adb/win-x86_64/adb.exe or
  out/adb/mac-x86_64/adb.
  """

    # URL looks like 'https://dl.google.com/android/adt/adt-xxx.zip'
    url = DEVTOOLS_URLS[target]

    output_dir = os.path.join(ADB_OUTPUT_DIR, target)
    stamp_file_path = os.path.join(output_dir, 'STAMP')
    stamp_file = build_common.StampFile(
        url,  # Use URL as the version.
        stamp_file_path,
        force=force)
    if stamp_file.is_up_to_date():
        return

    if os.path.exists(output_dir):
        file_util.rmtree(output_dir)
    os.makedirs(output_dir)

    is_windows = target.startswith('win-')
    adb_base_name = 'adb.exe' if is_windows else 'adb'
    # The output file name looks like 'out/adb/win-x86_64/adb.exe'
    adb_output_file_name = os.path.join(output_dir, adb_base_name)

    zip_file_name = os.path.basename(urlparse.urlparse(url).path)
    zip_name = os.path.splitext(zip_file_name)[0]
    # The adb path in zip looks like 'adt-xxx/sdk/platform-tools/adb.exe'
    adb_path_in_zip = os.path.join(zip_name, 'sdk/platform-tools',
                                   adb_base_name)
    # For Windows, AdbWinApi.dll is also needed.
    if is_windows:
        dll_path_in_zip = os.path.join(zip_name,
                                       'sdk/platform-tools/AdbWinApi.dll')
        dll_output_file_name = os.path.join(output_dir, 'AdbWinApi.dll')

    try:
        timer = build_common.SimpleTimer()
        timer.start('Downloading the adb executable for %s' % target,
                    show=True)
        with contextlib.closing(
                urllib2.urlopen(url)) as stream, (zipfile.ZipFile(
                    cStringIO.StringIO(stream.read()))) as zip_archive:
            with open(adb_output_file_name, 'w') as adb_file:
                # Don't use zipfile.extract() as it creates sub directories.
                content = zip_archive.read(adb_path_in_zip)
                adb_file.write(content)
            os.chmod(adb_output_file_name, stat.S_IRWXU)
            # Also extract AdbWinApi.dll for Windows.
            if is_windows:
                with open(dll_output_file_name, 'w') as dll_file:
                    content = zip_archive.read(dll_path_in_zip)
                    dll_file.write(content)
                os.chmod(dll_output_file_name, stat.S_IRWXU)
        timer.done()
    except Exception as exception:
        print exception
        raise Exception('Failed to download the adb executable')

    stamp_file.update()
Пример #8
0
def _generate_independent_ninjas(needs_clobbering):
  timer = build_common.SimpleTimer()

  # Invoke an unordered set of ninja-generators distributed across config
  # modules by name, and if that generator is marked for it.
  timer.start('Generating independent generate_ninjas', True)

  generator_list = list(_list_ninja_generators(
      _config_loader, 'generate_ninjas'))
  if OPTIONS.run_tests():
    generator_list.extend(_list_ninja_generators(
        _config_loader, 'generate_test_ninjas'))

  task_list = []
  cached_result_list = []
  cache_miss = {}

  for config_context, generator in generator_list:
    cache_path = _get_cache_file_path(config_context.config_name,
                                      config_context.entry_point)
    config_cache = None
    if OPTIONS.enable_config_cache() and not needs_clobbering:
      config_cache = _load_config_cache_from_file(cache_path)

    if config_cache is not None and config_cache.check_cache_freshness():
      cached_result = config_cache.to_config_result()
      if cached_result is not None:
        cached_result_list.append(cached_result)
        continue

    task_list.append(ninja_generator_runner.GeneratorTask(
        config_context, generator))
    cache_miss[cache_path] = config_cache

  result_list = ninja_generator_runner.run_in_parallel(
      task_list, OPTIONS.configure_jobs())

  aggregated_result = {}
  ninja_list = []
  for config_result in result_list:
    cache_path = _get_cache_file_path(config_result.config_name,
                                      config_result.entry_point)
    ninja_list.extend(config_result.generated_ninjas)
    if cache_path in aggregated_result:
      aggregated_result[cache_path].merge(config_result)
    else:
      aggregated_result[cache_path] = config_result

  for cached_result in cached_result_list:
    ninja_list.extend(cached_result.generated_ninjas)

  cache_to_save = []
  if OPTIONS.enable_config_cache():
    for cache_path, config_result in aggregated_result.iteritems():
      config_cache = cache_miss[cache_path]
      if config_cache is None:
        config_cache = _config_cache_from_config_result(config_result)
      else:
        config_cache.refresh_with_config_result(config_result)

      cache_to_save.append((config_cache, cache_path))

  ninja_list.sort(key=lambda ninja: ninja.get_module_name())
  timer.done()
  return ninja_list, cache_to_save