def get_dependencies_for_integration_tests(): """Gets the paths of all python files needed for the integration tests.""" deps = [] root_path = build_common.get_arc_root() # We assume that the currently loaded modules is the set needed to run # the integration tests. We just narrow the list down to the set of files # contained in the project root directory. for module in sys.modules.itervalues(): # Filter out built-ins and other special cases if not module or not hasattr(module, '__file__'): continue module_path = module.__file__ # Filter out modules external to the project if not module_path or not module_path.startswith(root_path): continue # Convert references to .pyc files to .py files. if module_path.endswith('.pyc'): module_path = module_path[:-1] deps.append(module_path[len(root_path) + 1:]) return deps
def _set_up_git_hooks(): # These git hooks do not make sense for the open source repo because they: # 1) lint the source, but that was already done when committed internally, # and we will run 'ninja all' as a test step before committing to open # source. # 2) add fields to the commit message for the internal dev workflow. if open_source.is_open_source_repo(): return script_dir = os.path.dirname(__file__) hooks = { 'pre-push': os.path.join(script_dir, 'git_pre_push.py'), 'prepare-commit-msg': os.path.join(script_dir, 'git_prepare_commit.py'), 'commit-msg': 'third_party/gerrit/commit-msg', } obsolete_hooks = ['pre-commit'] # Replaced by pre-push hook. git_hooks_dir = os.path.join(build_common.get_arc_root(), '.git', 'hooks') for git_hook, source_path in hooks.iteritems(): symlink_path = os.path.join(git_hooks_dir, git_hook) file_util.create_link(symlink_path, source_path, overwrite=True) for git_hook in obsolete_hooks: symlink_path = os.path.join(git_hooks_dir, git_hook) if os.path.lexists(symlink_path): os.unlink(symlink_path)
def _generate_checkdeps_ninjas(): if open_source.is_open_source_repo(): # Do not run checkdeps on the open source repo since some directories # checked are not included there. return n = ninja_generator.NinjaGenerator('checkdeps', target_groups=['lint']) checkdeps_script = staging.as_staging( 'native_client/tools/checkdeps/checkdeps.py') n.rule('checkdeps', command=('%s -v --root=$root $in_dir > $out.tmp 2>&1 ' '&& mv $out.tmp $out ' '|| (cat $out.tmp; rm $out.tmp; exit 1)' % checkdeps_script), description='checkdeps $in_dir') # Detect bad #include lines in src/. # TODO(crbug.com/323786): Check #include lines in mods/ too. src_dir = os.path.join(build_common.get_arc_root(), 'src') src_deps = os.path.join(src_dir, 'DEPS') for d in ['common', 'ndk_translation', 'posix_translation']: # TODO(crbug.com/323786): Check other directories in src/ too. implicit = build_common.find_all_files( os.path.join(src_dir, d), suffixes=['h', 'c', 'cc', 'cpp', 'java', 'DEPS'], include_tests=True, use_staging=False) implicit.extend([checkdeps_script, src_deps]) out = os.path.join(build_common.OUT_DIR, 'checkdeps_%s.txt' % d) n.build(out, 'checkdeps', [], variables={'root': src_dir, 'in_dir': d}, implicit=implicit)
def __init__(self): super(JsLinter, self).__init__( 'gjslint', target_groups=[_GROUP_JS], # Strip the path to the arc root directory. error_line_filter=('^' + re.escape(build_common.get_arc_root()) + '/(.*)'))
def _generate_checkdeps_ninjas(): if open_source.is_open_source_repo(): # Do not run checkdeps on the open source repo since some directories # checked are not included there. return n = ninja_generator.NinjaGenerator('checkdeps', target_groups=['lint']) checkdeps_script = staging.as_staging( 'native_client/tools/checkdeps/checkdeps.py') n.rule('checkdeps', command=('%s -v --root=$root $in_dir > $out.tmp 2>&1 ' '&& mv $out.tmp $out ' '|| (cat $out.tmp; rm $out.tmp; exit 1)' % checkdeps_script), description='checkdeps $in_dir') # Detect bad #include lines in src/. # TODO(crbug.com/323786): Check #include lines in mods/ too. src_dir = os.path.join(build_common.get_arc_root(), 'src') src_deps = os.path.join(src_dir, 'DEPS') for d in ['common', 'ndk_translation', 'posix_translation']: # TODO(crbug.com/323786): Check other directories in src/ too. implicit = build_common.find_all_files( os.path.join(src_dir, d), suffixes=['h', 'c', 'cc', 'cpp', 'java', 'DEPS'], include_tests=True, use_staging=False) implicit.extend([checkdeps_script, src_deps]) out = os.path.join(build_common.OUT_DIR, 'checkdeps_%s.txt' % d) n.build(out, 'checkdeps', [], variables={ 'root': src_dir, 'in_dir': d }, implicit=implicit)
def _load_internal(path_list): """Loads all files in |path_list|. The files are loaded as an appropriately named submodule. Note that the modules will be loaded even if parent modules are not found, like because of missing __init__.py. In such a case, empty modules will be created. Args: path_list: a list of files to be loaded, relative to ARC_ROOT. Returns: A list of loaded modules. """ # For safety, acquire the import lock. imp.acquire_lock() try: result = [] for path in path_list: path = os.path.normpath(path) abs_path = os.path.join(build_common.get_arc_root(), path) module_name = os.path.splitext(path)[0].replace(os.sep, '.') # Ensure ancestor packages. if '.' in module_name: _import_package(module_name.rsplit('.', 1)[0], os.path.dirname(abs_path)) with open(abs_path, 'rb') as config_file: result.append(imp.load_source(module_name, abs_path, config_file)) return result finally: imp.release_lock()
def __init__(self): super(JsLinter, self).__init__( "gjslint", target_groups=[_GROUP_JS], # Strip the path to the arc root directory. error_line_filter=("^" + re.escape(build_common.get_arc_root()) + "/(.*)"), )
def __init__(self, deps_file_path, unpacked_final_path, url=None, link_subdir=None, download_method=None, unpack_method=None, cache_base_path=None, cache_history_size=None): """Sets up the basic configuration for this package. |deps_file_path| is the relative path to the DEPS.XXXX file to use for this package. |unpacked_final_path| is the path the unpacked package should appear at. |url| is the URL to use to retrieve the download. If not specified (the typical case), the URL is taken from the first line of the DEPS file. |link_subdir| is the subdirectory of the unpacked package from the cache that should appear at the final location. This is useful if the archive unpacks to a subdirectory. |download_method| is a function to call taking a pair of arguments, (URL, archive_path), which should retrieve the package given its URL, and write the contents as a file to archive_path. |unpack_method| is a function to call taking a pair of arguments, (archive_path, destination_path), to extract the archive file to the indicated destination. |cache_base_path| allows a derived class to choose the cache path explicitly, but is really only meant for the unittest. |cache_history_size| allows a derived class to choose the cache history size, but it is really only meant for the unittest. """ if cache_base_path: cache_base_path = os.path.abspath(cache_base_path) self._name = os.path.basename(unpacked_final_path) self._cache_base_path = cache_base_path or _DEFAULT_CACHE_BASE_PATH self._cache_history_size = cache_history_size or _DEFAULT_CACHE_HISTORY_SIZE self._deps_file_path = os.path.join(build_common.get_arc_root(), deps_file_path) self._unpacked_final_path = os.path.join(build_common.get_arc_root(), unpacked_final_path) self._link_subdir = link_subdir or '.' self._download_method = download_method or default_download_url() self._unpack_method = unpack_method or unpack_zip_archive() self._deps_file_lines = file_util.read_metadata_file(deps_file_path) self._url = url or self._deps_file_lines[0] self._unpacked_cache_path = (self._get_cache_entry_path( self._deps_file_lines))
def as_real_path(input_path): """Convert an input path to a real path. example input: android/frameworks/base/... example real path: mods/android/frameworks/base/... """ path = os.path.realpath(as_staging(input_path)) return os.path.relpath(path, build_common.get_arc_root())
def _get_archived_file_paths(): """Returns the file paths to be archived.""" paths = _collect_descendants( remote_executor_util.get_integration_test_deps()) paths |= set(run_integration_tests.get_dependencies_for_integration_tests()) paths.add(os.path.relpath(toolchain.get_adb_path_for_chromeos(), build_common.get_arc_root())) return paths
def post_update_work(self): # Update based on pinned manifest. This part can be as slow as 1-2 minutes # regardless of whether it is a fresh install or an update. logging.info('%s: Updating naclsdk using manifest.', self.name) download_package_util.execute_subprocess([ './naclsdk', 'update', '-U', 'file://' + os.path.join(build_common.get_arc_root(), _DEPS_FILE_PATH), '--force', 'pepper_canary'], cwd=self.unpacked_linked_cache_path)
def _get_archived_file_paths(): """Returns the file paths to be archived.""" paths = _collect_descendants( remote_executor_util.get_integration_test_deps()) paths |= set( run_integration_tests.get_dependencies_for_integration_tests()) paths.add( os.path.relpath(toolchain.get_adb_path_for_chromeos(), build_common.get_arc_root())) return paths
def _get_adb_path_for_localhost(): root = os.path.join(build_common.get_arc_root(), 'out/adb') if platform_util.is_running_on_mac(): return os.path.join(root, 'mac-x86_64/adb') elif platform_util.is_running_on_cygwin(): return os.path.join(root, 'win-x86_64/adb.exe') elif platform_util.is_running_on_chromeos(): return get_adb_path_for_chromeos(relative=False) else: # For Linux desktop. return 'third_party/android-sdk/platform-tools/adb'
def post_update_work(self): # Update based on pinned manifest. This part can be as slow as 1-2 minutes # regardless of whether it is a fresh install or an update. logging.info('%s: Updating naclsdk using manifest.', self.name) download_package_util.execute_subprocess( [ './naclsdk', 'update', '-U', 'file://' + os.path.join(build_common.get_arc_root(), _DEPS_FILE_PATH), '--force', 'pepper_canary' ], cwd=self.unpacked_linked_cache_path)
def __init__(self, deps_file_path, unpacked_final_path, url=None, link_subdir=None, download_method=None, unpack_method=None, cache_base_path=None, cache_history_size=None): """Sets up the basic configuration for this package. |deps_file_path| is the relative path to the DEPS.XXXX file to use for this package. |unpacked_final_path| is the path the unpacked package should appear at. |url| is the URL to use to retrieve the download. If not specified (the typical case), the URL is taken from the first line of the DEPS file. |link_subdir| is the subdirectory of the unpacked package from the cache that should appear at the final location. This is useful if the archive unpacks to a subdirectory. |download_method| is a function to call taking a pair of arguments, (URL, archive_path), which should retrieve the package given its URL, and write the contents as a file to archive_path. |unpack_method| is a function to call taking a pair of arguments, (archive_path, destination_path), to extract the archive file to the indicated destination. |cache_base_path| allows a derived class to choose the cache path explicitly, but is really only meant for the unittest. |cache_history_size| allows a derived class to choose the cache history size, but it is really only meant for the unittest. """ if cache_base_path: cache_base_path = os.path.abspath(cache_base_path) self._name = os.path.basename(unpacked_final_path) self._cache_base_path = cache_base_path or _DEFAULT_CACHE_BASE_PATH self._cache_history_size = cache_history_size or _DEFAULT_CACHE_HISTORY_SIZE self._deps_file_path = os.path.join( build_common.get_arc_root(), deps_file_path) self._unpacked_final_path = os.path.join( build_common.get_arc_root(), unpacked_final_path) self._link_subdir = link_subdir or '.' self._download_method = download_method or default_download_url() self._unpack_method = unpack_method or unpack_zip_archive() self._deps_file_lines = file_util.read_metadata_file(deps_file_path) self._url = url or self._deps_file_lines[0] self._unpacked_cache_path = ( self._get_cache_entry_path(self._deps_file_lines))
def prepare(self, unused_test_methods_to_run): """Builds test jar files for a test.""" shutil.rmtree(self._work_dir, ignore_errors=True) # Copy the source directory to the working directory. # Note that we must not copy the files by python's internal utilities # here, such as shutil.copy, or loops written manually, etc., because it # would cause ETXTBSY in run_subprocess called below if we run this # on multi-threading. Here is the senario: # Let there are two cases A, and B, and, to simplify, let what we do here # are 1) copying the "{A_src,B_src}/build" files to "{A,B}/build", and then # 2) fork() and execute() "{A,B}/build". Each will run on a different # threads, named thread-A and thread-B. # 1) on thread-A, "A_src/build" is copied to "A/build". # 2) on thread-B, "B_src/build" starts to be copied to "B/build". For that # purpose, "B/build" is opened with "write" flag. # 3) on thread-A, the process is fork()'ed, *before the copy of "B/build" # is completed. So, subprocess-A keeps the FD of "B/build" with "write". # 4) on thread-B, "B/build" is copied, and close()'ed, then fork()'ed. # 5) on subprocess-B, it tries to exec "B/build". However, the file is # still kept opened by subprocess-A. As a result, ETXTBSY is reported. # Probably, the ideal solution would be that such an issue should be # handled by the framework (crbug.com/345667), but it seems to need some # more investigation. So, instead, we copy the files in another process. subprocess.check_call(['cp', '-Lr', self._source_dir, self._work_dir]) build_script = os.path.abspath(os.path.join(self._work_dir, 'build')) if not os.path.isfile(build_script): # If not found, use the default-build script. # Note: do not use a python function here, such as shutil.copy directly. # See above comment for details. subprocess.check_call( ['cp', os.path.join(self.get_source_root(), 'etc', 'default-build'), build_script]) # Ensure that the executable bit is set. os.chmod(build_script, stat.S_IRWXU) env = { 'DX': 'dx', 'NEED_DEX': 'true', 'TEST_NAME': self._suite_name, 'JAVAC': toolchain.get_tool('java', 'javac'), 'PATH': ':'.join([ os.path.join(build_common.get_arc_root(), toolchain.get_android_sdk_build_tools_dir()), # Put PATH in the end to prevent shadowing previous path. os.environ['PATH'] ]) } subprocess.check_call([build_script], env=env, cwd=self._work_dir) args = self.get_system_mode_launch_chrome_command(self._name) prep_launch_chrome.prepare_crx_with_raw_args(args)
def _copy_off(where): if os.path.exists(where): file_util.rmtree(where) root = build_common.get_arc_root() generated_ninja_dir = os.path.join(root, build_common.OUT_DIR, 'generated_ninja') top_level_ninja = os.path.join(root, 'build.ninja') if (not os.path.exists(top_level_ninja) or not os.path.exists(generated_ninja_dir)): sys.exit('You must run configure first') shutil.copytree(generated_ninja_dir, where) shutil.copy(top_level_ninja, where)
def get_nacl_tools(): """Returns a list of the NaCl tools that are needed to run unit tests.""" if build_options.OPTIONS.is_bare_metal_build(): return [toolchain.get_nonsfi_loader()] bitsize = build_options.OPTIONS.get_target_bitsize() arch = 'x86_%d' % bitsize nacl_tools = [toolchain.get_nacl_tool('sel_ldr_%s' % arch), toolchain.get_nacl_tool('irt_core_%s.nexe' % arch), os.path.join(toolchain.get_nacl_toolchain_libs_path(bitsize), 'runnable-ld.so')] return [os.path.relpath(nacl_tool, build_common.get_arc_root()) for nacl_tool in nacl_tools]
def add_sources(self, files): for f in files: if os.path.isabs(f): f = os.path.relpath(f, build_common.get_arc_root()) notice_root = self._find_parent_file(os.path.dirname(f), 'NOTICE') if notice_root: self._notice_roots.add(notice_root) if f in self._PER_FILE_LICENSE_KINDS: license_root = f else: license_root = self._find_parent_file(os.path.dirname(f), 'MODULE_LICENSE_*') if license_root: if license_root not in self._license_roots: self._license_roots.add(license_root) self._license_roots_examples[license_root] = f
def _remove_ndk_libraries(apk_path): """Remove ndk libraries installed by previous launches. Package Manager installs shared libraries that match ABI but it doesn't remove them from previous installation. If apk does not contain the library for current ABI, installer does not produce an error. In this case application may launch successfully using previously installed library. We want to see an error instead. """ apk_name = os.path.splitext(os.path.basename(apk_path))[0] if apk_name: native_library_directory = os.path.join(build_common.get_arc_root(), build_common.get_android_root(), 'data', 'app-lib', apk_name) file_util.rmtree(native_library_directory, ignore_errors=True)
def get_adb_path_for_chromeos(relative=True): """Returns the directory that contains the adb executable for Chrome OS.""" if platform_util.is_running_on_chromeos(): # The adb binary is copied to a directory whose filesystem is mounted # without noexec mount options on Chrome OS. root = build_common.get_chromeos_arc_root_without_noexec() else: root = build_common.get_arc_root() # Chrome OS based on linux-i686 is not supported. target = 'linux-arm' if OPTIONS.is_arm() else 'linux-x86_64' path = os.path.join('out/adb', target, 'adb') if relative: return path return os.path.join(root, path)
def _find_config_py(base_path): """Finds config.py files under |base_path| and its descendants. Args: base_path: The root path for the searching, relative to ARC_ROOT. Yields: Paths to the found config.py files, relative to ARC_ROOT. """ arc_root = build_common.get_arc_root() base_abs_path = os.path.join(arc_root, base_path) for dirpath, dirnames, filenames in os.walk(base_abs_path): for name in filenames: if name == 'config.py': # Returns the path relative to ARC_ROOT. # Note that, as |base_abs_path| is an absolute path, dirpath is # also an absolute path. yield os.path.relpath(os.path.join(dirpath, name), arc_root) # "third_party" directories are out of our focus. if 'third_party' in dirnames: dirnames.remove('third_party')
def _load_internal(path_list): """Loads all files in |path_list|. The files are loaded as an appropriately named submodule. Note that the modules will be loaded even if parent modules are not found, like because of missing __init__.py. In such a case, empty modules will be created. Args: path_list: a list of files to be loaded, relative to ARC_ROOT. Returns: A list of loaded modules. """ # For safety, acquire the import lock. imp.acquire_lock() try: result = [] for path in path_list: path = os.path.normpath(path) abs_path = os.path.join(build_common.get_arc_root(), path) module_name = os.path.splitext(path)[0].replace(os.sep, '.') # Ensure ancestor packages. if '.' in module_name: _import_package( module_name.rsplit('.', 1)[0], os.path.dirname(abs_path)) with open(abs_path, 'rb') as config_file: result.append( imp.load_source(module_name, abs_path, config_file)) return result finally: imp.release_lock()
# found in the LICENSE file. """Build APK from C/C++ and java sources with Android SDK and NDK.""" import argparse import os import shutil import subprocess import sys from src.build import build_common from src.build import toolchain from src.build.util import file_util _ARC_ROOT = build_common.get_arc_root() _NDK_PATH = os.path.join(_ARC_ROOT, 'third_party', 'ndk') _SDK_PATH = build_common.get_android_sdk_path() _TOOLS_ROOT = os.path.join(_ARC_ROOT, 'third_party', 'tools') def _build_apk(source_path, use_ndk, use_clang, build_path, install_apk, debug, verbose): if not os.path.isdir(_SDK_PATH): raise Exception('Missing SDK path: ' + str(_SDK_PATH)) print print '--------------------------' print 'Building ' + os.path.basename(install_apk) print '--------------------------'
from src.build.util import chrome_process from src.build.util import concurrent_subprocess from src.build.util import file_util from src.build.util import gdb_util from src.build.util import jdb_util from src.build.util import launch_chrome_util from src.build.util import logging_util from src.build.util import minidump_filter from src.build.util import output_handler from src.build.util import platform_util from src.build.util import remote_executor from src.build.util import signal_util from src.build.util import startup_stats _ROOT_DIR = build_common.get_arc_root() _CHROME_KILL_DELAY = 0.1 _CHROME_KILL_TIMEOUT = 10 _CHROME_PID_PATH = None _PERF_TOOL = 'perf' _USER_DATA_DIR = None # Will be set after we parse the commandline flags. # List of lines for stdout/stderr Chrome output to suppress. _SUPPRESS_LIST = [ # When debugging with gdb, NaCl is emitting many of these messages. 'NaClAppThreadSetSuspendedRegisters: Registers not modified',
def _get_win_chrome_exe_path(): return os.path.join(build_common.get_arc_root(), build_common.get_chrome_prebuilt_path(), 'chrome.exe')
def _get_mac_chrome_exe_path(): return os.path.join(build_common.get_arc_root(), build_common.get_chrome_prebuilt_path(), 'Chromium.app/Contents/MacOS/Chromium')
# Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Build APK from C/C++ and java sources with Android SDK and NDK.""" import argparse import os import shutil import subprocess import sys from src.build import build_common from src.build import toolchain from src.build.util import file_util _ARC_ROOT = build_common.get_arc_root() _NDK_PATH = os.path.join(_ARC_ROOT, 'third_party', 'ndk') _SDK_PATH = build_common.get_android_sdk_path() _TOOLS_ROOT = os.path.join(_ARC_ROOT, 'third_party', 'tools') def _build_apk(source_path, use_ndk, use_clang, build_path, install_apk, debug, verbose): if not os.path.isdir(_SDK_PATH): raise Exception('Missing SDK path: ' + str(_SDK_PATH)) print print '--------------------------' print 'Building ' + os.path.basename(install_apk) print '--------------------------'
def _get_original_ssh_key(): return os.path.join(build_common.get_arc_root(), _TEST_SSH_KEY)
import contextlib import hashlib import json import logging import os import shutil import stat import subprocess import tempfile import time import urllib from src.build import build_common from src.build.util import file_util _DEFAULT_CACHE_BASE_PATH = os.path.join(build_common.get_arc_root(), 'cache') _DEFAULT_CACHE_HISTORY_SIZE = 3 class CacheHistory(object): """Interface for the working with the history of a particular package.""" def __init__(self, name, base_path, history_size, contents): self._name = name self._base_path = base_path self._history_size = history_size self._contents = contents def clean_old(self): """Cleans out the least-recently used entries, deleting cache paths.""" while len(self._contents) > self._history_size: path = self._contents.pop(0)
#!src/build/run_python # Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import os from src.build import build_common os.chdir(build_common.get_arc_root()) if not os.path.exists(build_common.OUT_DIR): os.mkdir(build_common.OUT_DIR) tag_file = os.path.join(build_common.OUT_DIR, 'TAGS') if os.path.exists(tag_file): os.unlink(tag_file) os.system('find . \\( ' '-name \\*.cc -or ' '-name \\*.cpp -or ' '-name \\*.c -or ' '-name \\*.java -or ' '-name \\*.h \\) ' '-and \\! -wholename ./out/staging/\\* ' # ignore out/staging '-and \\! -xtype l ' # ignore broken symlinks '-print0 ' # support filenames with spaces, with xargs -0 '| xargs -0 etags --append --output=' + tag_file)
def get_abs_arc_root(): return os.path.abspath(build_common.get_arc_root())
import hashlib import json import logging import os import shutil import stat import subprocess import tempfile import time import urllib from src.build import build_common from src.build.util import file_util _DEFAULT_CACHE_BASE_PATH = os.path.join(build_common.get_arc_root(), 'cache') _DEFAULT_CACHE_HISTORY_SIZE = 3 class CacheHistory(object): """Interface for the working with the history of a particular package.""" def __init__(self, name, base_path, history_size, contents): self._name = name self._base_path = base_path self._history_size = history_size self._contents = contents def clean_old(self): """Cleans out the least-recently used entries, deleting cache paths.""" while len(self._contents) > self._history_size: