def _generate_check_symbols_ninja(): # If we do not use NDK direct execution, the compatibility is less # important. if not build_common.use_ndk_direct_execution(): return n = ninja_generator.NinjaGenerator('check_symbols') script = staging.as_staging('src/build/check_symbols.py') rule_name = 'check_symbols' n.rule(rule_name, command=('src/build/run_python %s $android_lib $in %s' % ( script, build_common.get_test_output_handler())), description=(rule_name + ' $in')) if OPTIONS.is_arm(): arch_subdir = 'arch-arm' else: arch_subdir = 'arch-x86' lib_dir = os.path.join(_ANDROID_SYSTEM_IMAGE_DIR, arch_subdir, 'usr/lib') for so_file in build_common.find_all_files(lib_dir, suffixes='.so'): lib_name = os.path.basename(so_file) if lib_name not in ['libc.so', 'libdl.so', 'libm.so']: # For now, we only check Bionic. # TODO(crbug.com/408548): Extend this for other libraries. continue result_path = os.path.join(build_common.get_build_dir(), 'check_symbols', 'check_symbols.%s.result' % lib_name) n.build(result_path, rule_name, build_common.get_build_path_for_library(lib_name), variables={'android_lib': staging.as_staging(so_file)}, implicit=[script, staging.as_staging(so_file)])
def _configure_build_options(): if OPTIONS.parse(sys.argv[1:]): print 'Args error' return False # Write out the configure file early so all other scripts can use # the options passed into configure. (e.g., sync_chrome). OPTIONS.write_configure_file() # Target directory is replaced. If an old directory, out/target/<target>, # exists, move it to the new place, out/target/<target>_<opt>. old_path = os.path.join('out/target', OPTIONS.target()) new_path = build_common.get_build_dir() if os.path.lexists(old_path): if os.path.isdir(old_path) and not os.path.islink(old_path): if os.path.exists(new_path): file_util.rmtree(old_path) else: shutil.move(old_path, new_path) else: os.remove(old_path) # Create an empty directory as a placeholder if necessary. file_util.makedirs_safely(new_path) # Create a symlink from new place to old place to keep as compatible as # possible. os.symlink(os.path.basename(new_path), old_path) # Write out the configure file to a target specific location, which can be # queried later to find out what the config for a target was. OPTIONS.write_configure_file(build_common.get_target_configure_options_file()) OPTIONS.set_up_goma() return True
def _generate_check_symbols_ninja(): # If we do not use NDK direct execution, the compatibility is less # important. if not build_common.use_ndk_direct_execution(): return n = ninja_generator.NinjaGenerator('check_symbols') script = staging.as_staging('src/build/check_symbols.py') rule_name = 'check_symbols' n.rule(rule_name, command=('src/build/run_python %s $android_lib $in %s' % (script, build_common.get_test_output_handler())), description=(rule_name + ' $in')) if OPTIONS.is_arm(): arch_subdir = 'arch-arm' else: arch_subdir = 'arch-x86' lib_dir = os.path.join(_ANDROID_SYSTEM_IMAGE_DIR, arch_subdir, 'usr/lib') for so_file in build_common.find_all_files(lib_dir, suffixes='.so'): lib_name = os.path.basename(so_file) if lib_name not in ['libc.so', 'libdl.so', 'libm.so']: # For now, we only check Bionic. # TODO(crbug.com/408548): Extend this for other libraries. continue result_path = os.path.join(build_common.get_build_dir(), 'check_symbols', 'check_symbols.%s.result' % lib_name) n.build(result_path, rule_name, build_common.get_build_path_for_library(lib_name), variables={'android_lib': staging.as_staging(so_file)}, implicit=[script, staging.as_staging(so_file)])
def _generate_disallowed_symbols_ninja(): n = ninja_generator.NinjaGenerator('disallowed_symbols') out_path = os.path.join(build_common.get_build_dir(), 'gen_symbols', 'disallowed_symbols.defined') n.build([out_path], 'copy_symbols_file', 'src/build/disallowed_symbols.txt', implicit='src/build/symbol_tool.py')
def _has_stripped_binary(self, path): """Returns True if a stripped binary corresponding to |path| is found.""" relpath = os.path.relpath(path, build_common.get_build_dir()) if relpath.startswith('../'): # The given file is not under build directory. return False # Returns True if there is a stripped file corresponding to the |path|. return os.path.isfile( os.path.join(build_common.get_stripped_dir(), relpath))
def _get_generated_file(ninja, out_name, script_name): script_path = os.path.join('src/common', script_name) rule_name = os.path.splitext(os.path.basename(script_path))[0] ninja.rule(rule_name, command='%s > $out.tmp && mv $out.tmp $out' % script_path, description=rule_name + ' $in') out_path = os.path.join(build_common.get_build_dir(), 'common_gen_sources', out_name) implicit = python_deps.find_deps(script_path) ninja.build(out_path, rule_name, implicit=implicit) return out_path
def generate_binaries_depending_ninjas(_): if (not OPTIONS.is_bare_metal_i686() or not OPTIONS.is_optimized_build() or # None of the targets analyzed are currently built in the open source # repository. open_source.is_open_source_repo() or # Run the checker only when --disable-debug-code is specified. Locations # of static initializers differ depending on the debug-code option. OPTIONS.is_debug_code_enabled() or # The checker only works with debug symbols. not OPTIONS.is_debug_info_enabled()): # The static analysis tool's output varies between debug and non-debug # builds, so we pick non-debug as the default. return n = ninja_generator.NinjaGenerator('analyze_static_initializers') script = staging.as_staging( 'android/external/chromium_org/tools/linux/dump-static-initializers.py' ) n.rule( 'analyze_static_initializers', command=('python src/build/run_python %s -d $in | head --lines=-1 | ' 'egrep -ve \'^# .*\.cpp \' |' 'sed -e \'s/ T\.[0-9]*/ T.XXXXX/\' |' 'diff -u $expect - && touch $out' % script), description='analyze_static_initializers $in') libraries = build_common.CHECKED_LIBRARIES libraries_fullpath = [ os.path.join(build_common.get_load_library_path(), lib) for lib in libraries ] for library in zip(libraries, libraries_fullpath): # You can manually update the text files by running # src/build/update_static_initializer_expectations.py. expect = 'src/build/dump-static-initializers-%s-expected.txt' % library[ 0] result_path = os.path.join( build_common.get_build_dir(), 'dump_static_initializers', 'dump_static_initializers.%s.result' % library[0]) n.build( result_path, 'analyze_static_initializers', library[1], variables={ 'out': result_path, 'expect': expect }, # Add |libraries_fullpath| to implicit= not to run the analyzer # script until all libraries in |libraries_fullpath| become ready. # This makes it easy to use # update_static_initializer_expectations.py especially when you # remove global variables from two or more libraries at the same # time. implicit=[script, expect] + libraries_fullpath)
def _zip_files(filename, paths): """Creates a zip file that contains the specified files.""" # Set allowZip64=True so that large zip files can be handled. with zipfile.ZipFile(filename, 'w', compression=zipfile.ZIP_DEFLATED, allowZip64=True) as f: for path in set(paths): if path.startswith(build_common.get_stripped_dir()): # When archiving a stripped file, use the path of the corresponding # unstripped file as archive name arcname = os.path.join( build_common.get_build_dir(), os.path.relpath(path, build_common.get_stripped_dir())) f.write(path, arcname=arcname) else: f.write(path)
def generate_binaries_depending_ninjas(_): if (not OPTIONS.is_bare_metal_i686() or not OPTIONS.is_optimized_build() or # None of the targets analyzed are currently built in the open source # repository. open_source.is_open_source_repo() or # Run the checker only when --disable-debug-code is specified. Locations # of static initializers differ depending on the debug-code option. OPTIONS.is_debug_code_enabled() or # The checker only works with debug symbols. not OPTIONS.is_debug_info_enabled()): # The static analysis tool's output varies between debug and non-debug # builds, so we pick non-debug as the default. return n = ninja_generator.NinjaGenerator('analyze_static_initializers') script = staging.as_staging( 'android/external/chromium_org/tools/linux/dump-static-initializers.py') n.rule('analyze_static_initializers', command=('python src/build/run_python %s -d $in | head --lines=-1 | ' 'egrep -ve \'^# .*\.cpp \' |' 'sed -e \'s/ T\.[0-9]*/ T.XXXXX/\' |' 'diff -u $expect - && touch $out' % script), description='analyze_static_initializers $in') libraries = build_common.CHECKED_LIBRARIES libraries_fullpath = [ os.path.join(build_common.get_load_library_path(), lib) for lib in libraries] for library in zip(libraries, libraries_fullpath): # You can manually update the text files by running # src/build/update_static_initializer_expectations.py. expect = 'src/build/dump-static-initializers-%s-expected.txt' % library[0] result_path = os.path.join(build_common.get_build_dir(), 'dump_static_initializers', 'dump_static_initializers.%s.result' % library[0]) n.build(result_path, 'analyze_static_initializers', library[1], variables={'out': result_path, 'expect': expect}, # Add |libraries_fullpath| to implicit= not to run the analyzer # script until all libraries in |libraries_fullpath| become ready. # This makes it easy to use # update_static_initializer_expectations.py especially when you # remove global variables from two or more libraries at the same # time. implicit=[script, expect] + libraries_fullpath)
def _generate_libposix_translation_for_test(): n = ninja_generator.SharedObjectNinjaGenerator( 'libposix_translation_for_test', dt_soname='libposix_translation.so', is_system_library=True, is_for_test=True) # libposix_translation_for_test is built using generated code in the out/ # directory. The logic we have to scan for NOTICES does not find one for this # generated code, and complains later. The libposix_translation.so code # normally inherits the project NOTICE from src/NOTICE, so we just explicitly # point to it here as the notice to use for this special version of the # library. n.add_notice_sources([staging.as_staging('src/NOTICE')]) gen_rule_name = 'gen_wrap_syscall_aliases_s' gen_script_path = os.path.join('src/common', gen_rule_name + '.py') n.rule(gen_rule_name, command='%s > $out.tmp && mv $out.tmp $out' % gen_script_path, description=gen_rule_name + ' $in') gen_out_path = os.path.join(build_common.get_build_dir(), 'posix_translation_gen_sources', 'wrap_syscall_aliases.S') gen_implicit_deps = python_deps.find_deps(gen_script_path) n.build(gen_out_path, gen_rule_name, implicit=gen_implicit_deps) # Following deps order is important. art_libc_supplement_for_test.so should # be placed before libc.so. if not open_source.is_open_source_repo(): # This is for ART unit tests which have not been opensourced. To not let # posix_translation depend on ART on arc_open, use is_open_source_repo(). n.add_library_deps('art_libc_supplement_for_test.so') n.add_library_deps('libc.so', 'libdl.so') n.build_default([gen_out_path], base_path=None).link() # /test/libposix_translation.so should be a symbolic link to # ../lib/libposix_translation_for_test.so. n = ninja_generator.NinjaGenerator('test_libposix_translation') orig_so = os.path.join( build_common.get_load_library_path(), 'libposix_translation_for_test.so') link_so = os.path.join( build_common.get_load_library_path_for_test(), 'libposix_translation.so') command = 'ln -sf %s %s' % ( os.path.join('../../lib/', os.path.basename(orig_so)), link_so) n.build(link_so, 'run_shell_command', implicit=orig_so, variables={'command': command})
def _write_args_if_needed(self, args): """Writes args to a file if it is too long and returns a new args.""" # Do not rewrite args of the commands other than launch_chrome because # the commands do not necessarily support the syntax of reading arguments # from a file. if not launch_chrome_util.is_launch_chrome_command(args): return args remaining_args = launch_chrome_util.remove_leading_launch_chrome_args(args) args_string = '\n'.join(remaining_args) # Do not rewrite args to file if the argument list is short enough. if len(args_string) < SuiteRunnerBase.WRITE_ARGS_MIN_LENGTH: return args args_dir = os.path.join(build_common.get_build_dir(), 'integration_tests') file_util.makedirs_safely(args_dir) args_file = os.path.join(args_dir, self._name + '_args') with open(args_file, 'w') as f: f.write(args_string) return args[:-len(remaining_args)] + ['@' + args_file]
def _write_args_if_needed(self, args): """Writes args to a file if it is too long and returns a new args.""" # Do not rewrite args of the commands other than launch_chrome because # the commands do not necessarily support the syntax of reading arguments # from a file. if not launch_chrome_util.is_launch_chrome_command(args): return args remaining_args = launch_chrome_util.remove_leading_launch_chrome_args( args) args_string = '\n'.join(remaining_args) # Do not rewrite args to file if the argument list is short enough. if len(args_string) < SuiteRunnerBase.WRITE_ARGS_MIN_LENGTH: return args args_dir = os.path.join(build_common.get_build_dir(), 'integration_tests') file_util.makedirs_safely(args_dir) args_file = os.path.join(args_dir, self._name + '_args') with open(args_file, 'w') as f: f.write(args_string) return args[:-len(remaining_args)] + ['@' + args_file]
def _configure_build_options(): if OPTIONS.parse(sys.argv[1:]): print 'Args error' return False # Write out the configure file early so all other scripts can use # the options passed into configure. (e.g., sync_chrome). OPTIONS.write_configure_file() # Target directory is replaced. If an old directory, out/target/<target>, # exists, move it to the new place, out/target/<target>_<opt>. old_path = os.path.join('out/target', OPTIONS.target()) new_path = build_common.get_build_dir() if os.path.lexists(old_path): if os.path.isdir(old_path) and not os.path.islink(old_path): if os.path.exists(new_path): file_util.rmtree(old_path) else: shutil.move(old_path, new_path) else: os.remove(old_path) # Create an empty directory as a placeholder if necessary. file_util.makedirs_safely(new_path) # Create a symlink from new place to old place to keep as compatible as # possible. os.symlink(os.path.basename(new_path), old_path) # Write out the configure file to a target specific location, which can be # queried later to find out what the config for a target was. OPTIONS.write_configure_file( build_common.get_target_configure_options_file()) OPTIONS.set_up_goma() return True
def rsync(self, source_paths, remote_dest_root, exclude_paths=None): """Runs rsync command to copy files to remote host. Sends |source_paths| to |remote_dest_root| directory in the remote machine. |exclude_paths| can be used to exclude files or directories from the sending list. The files which are under |remote_dest_root| but not in the sending list will be deleted. Note: - Known editor temporary files would never be sent. - .pyc files in remote machine will *not* be deleted. - If debug info is enabled and therer is a corresponding stripped binary, the stripped binary will be sent, instead of original (unstripped) binary. - Files newly created in the remote machine will be deleted. Specifically, if a file in the host machine is deleted, the corresponding file in the remote machine is also deleted after the rsync. Args: source_paths: a list of paths to be sent. Each path can be a file or a directory. If the path is directory, all files under the directory will be sent. remote_dest_root: the path to the destination directory in the remote machine. exclude_paths: an optional list of paths to be excluded from the sending path list. Similar to |source_paths|, if a path is directory, all paths under the directory will be excluded. """ filter_list = ( self._build_rsync_filter_list(source_paths, exclude_paths or [])) rsync_options = [ # The remote files need to be writable and executable by chronos. This # option sets read, write, and execute permissions to all users. '--chmod=a=rwx', '--compress', '--copy-links', '--delete', '--delete-excluded', '--inplace', '--perms', '--progress', '--recursive', '--rsh=' + ' '.join(['ssh'] + self._build_shared_ssh_command_options()), '--times', ] dest = '%s@%s:%s' % (self._user, self._remote, remote_dest_root) # Checks both whether to enable debug info and the existence of the stripped # directory because build bots using test bundle may use the configure # option with debug info enabled but binaries are not available in the # stripped directory. unstripped_paths = None if (OPTIONS.is_debug_info_enabled() and os.path.exists(build_common.get_stripped_dir())): # When debug info is enabled, copy the corresponding stripped binaries if # available to save the disk space on ChromeOS. list_output = _run_command( subprocess.check_output, ['rsync'] + filter_list + ['.', dest] + rsync_options + ['--list-only']) paths = [ line.rsplit(' ', 1)[-1] for line in list_output.splitlines()[1:]] unstripped_paths = [ path for path in paths if self._has_stripped_binary(path)] # here, prepend filter rules to "protect" and "exclude" the files which # have the corresponding stripped binary. # Note: the stripped binraies will be sync'ed by the second rsync # command, so it is necessary to "protect" here, too. Otherwise, the # files will be deleted at the first rsync. filter_list = list(itertools.chain.from_iterable( (('--filter', 'P /' + path, '--exclude', '/' + path) for path in unstripped_paths))) + filter_list # Copy files to remote machine. _run_command(subprocess.check_call, ['rsync'] + filter_list + ['.', dest] + rsync_options) if unstripped_paths: # Copy strippted binaries to the build/ directory in the remote machine # directly. stripped_binary_relative_paths = [ os.path.relpath(path, build_common.get_build_dir()) for path in unstripped_paths] logging.debug('rsync stripped_binaries: %s', ', '.join(unstripped_paths)) dest_build = os.path.join(dest, build_common.get_build_dir()) # rsync results in error if the parent directory of the destination # directory does not exist, so ensure it exists. self.run('mkdir -p ' + dest_build.rsplit(':', 1)[-1]) _run_command(_check_call_with_input, ['rsync', '--files-from=-', build_common.get_stripped_dir(), dest_build] + rsync_options, input='\n'.join(stripped_binary_relative_paths))
def rsync(self, source_paths, remote_dest_root, exclude_paths=None): """Runs rsync command to copy files to remote host. Sends |source_paths| to |remote_dest_root| directory in the remote machine. |exclude_paths| can be used to exclude files or directories from the sending list. The files which are under |remote_dest_root| but not in the sending list will be deleted. Note: - Known editor temporary files would never be sent. - .pyc files in remote machine will *not* be deleted. - If debug info is enabled and therer is a corresponding stripped binary, the stripped binary will be sent, instead of original (unstripped) binary. - Files newly created in the remote machine will be deleted. Specifically, if a file in the host machine is deleted, the corresponding file in the remote machine is also deleted after the rsync. Args: source_paths: a list of paths to be sent. Each path can be a file or a directory. If the path is directory, all files under the directory will be sent. remote_dest_root: the path to the destination directory in the remote machine. exclude_paths: an optional list of paths to be excluded from the sending path list. Similar to |source_paths|, if a path is directory, all paths under the directory will be excluded. """ filter_list = (self._build_rsync_filter_list(source_paths, exclude_paths or [])) rsync_options = [ # The remote files need to be writable and executable by chronos. This # option sets read, write, and execute permissions to all users. '--chmod=a=rwx', '--compress', '--copy-links', '--delete', '--delete-excluded', '--inplace', '--perms', '--progress', '--recursive', '--rsh=' + ' '.join(['ssh'] + self._build_shared_ssh_command_options()), '--times', ] dest = '%s@%s:%s' % (self._user, self._remote, remote_dest_root) # Checks both whether to enable debug info and the existence of the stripped # directory because build bots using test bundle may use the configure # option with debug info enabled but binaries are not available in the # stripped directory. unstripped_paths = None if (OPTIONS.is_debug_info_enabled() and os.path.exists(build_common.get_stripped_dir())): # When debug info is enabled, copy the corresponding stripped binaries if # available to save the disk space on ChromeOS. list_output = _run_command(subprocess.check_output, ['rsync'] + filter_list + ['.', dest] + rsync_options + ['--list-only']) paths = [ line.rsplit(' ', 1)[-1] for line in list_output.splitlines()[1:] ] unstripped_paths = [ path for path in paths if self._has_stripped_binary(path) ] # here, prepend filter rules to "protect" and "exclude" the files which # have the corresponding stripped binary. # Note: the stripped binraies will be sync'ed by the second rsync # command, so it is necessary to "protect" here, too. Otherwise, the # files will be deleted at the first rsync. filter_list = list( itertools.chain.from_iterable( (('--filter', 'P /' + path, '--exclude', '/' + path) for path in unstripped_paths))) + filter_list # Copy files to remote machine. _run_command(subprocess.check_call, ['rsync'] + filter_list + ['.', dest] + rsync_options) if unstripped_paths: # Copy strippted binaries to the build/ directory in the remote machine # directly. stripped_binary_relative_paths = [ os.path.relpath(path, build_common.get_build_dir()) for path in unstripped_paths ] logging.debug('rsync stripped_binaries: %s', ', '.join(unstripped_paths)) dest_build = os.path.join(dest, build_common.get_build_dir()) # rsync results in error if the parent directory of the destination # directory does not exist, so ensure it exists. self.run('mkdir -p ' + dest_build.rsplit(':', 1)[-1]) _run_command(_check_call_with_input, [ 'rsync', '--files-from=-', build_common.get_stripped_dir(), dest_build ] + rsync_options, input='\n'.join(stripped_binary_relative_paths))
def _construct_command(test_info, gtest_filter, gtest_list_tests): variables = test_info['variables'].copy() variables.setdefault('argv', '') variables.setdefault('qemu_arm', '') if platform_util.is_running_on_chromeos(): # On ChromeOS, binaries in directories mounted with noexec options are # copied to the corresponding directories mounted with exec option. # Change runner to use the binaries under the directory mounted with exec # option. # Also do not use qemu_arm when running on ARM Chromebook. arc_root_without_noexec = \ build_common.get_chromeos_arc_root_without_noexec() if build_options.OPTIONS.is_bare_metal_build(): variables['runner'] = ' '.join( toolchain.get_bare_metal_runner( bin_dir=arc_root_without_noexec)) variables['runner_without_test_library'] = ' '.join( toolchain.get_bare_metal_runner( bin_dir=arc_root_without_noexec, use_test_library=False)) if build_options.OPTIONS.is_arm(): variables['qemu_arm'] = '' # Update --gtest_filter to re-enable the tests disabled only on qemu. if variables.get('qemu_disabled_tests'): variables[ 'gtest_options'] = unittest_util.build_gtest_options( variables.get('enabled_tests'), variables.get('disabled_tests')) else: variables['runner'] = ' '.join( toolchain.get_nacl_runner( build_options.OPTIONS.get_target_bitsize(), bin_dir=arc_root_without_noexec)) variables['runner_without_test_library'] = ' '.join( toolchain.get_nacl_runner( build_options.OPTIONS.get_target_bitsize(), bin_dir=arc_root_without_noexec, use_test_library=False)) build_dir = build_common.get_build_dir() # Use test binary in the directory mounted without noexec. variables['in'] = variables['in'].replace( build_dir, os.path.join(arc_root_without_noexec, build_dir)) else: if build_options.OPTIONS.is_arm(): # Pass environment variables by -E flag for qemu-arm instead of # "env" command. # TODO(hamaji): This and the is_running_on_chromeos() case above # are both hacky. We probably want to construct the command to # run a unittest here based on the info in variables, and remove # test_info['command']. qemu_arm = variables['qemu_arm'].split(' ') if '$qemu_arm' in test_info['command']: runner = variables['runner'].split(' ') assert runner[0] == 'env' runner.pop(0) qemu_arm.append('-E') while '=' in runner[0]: qemu_arm.append(runner[0]) runner.pop(0) variables['qemu_arm'] = ' '.join(qemu_arm) variables['runner'] = ' '.join(runner) if gtest_filter: variables['gtest_options'] = '--gtest_filter=' + gtest_filter if gtest_list_tests: variables['gtest_options'] = '--gtest_list_tests' # Test is run as a command to build a test results file. command_template = string.Template(test_info['command']) return command_template.substitute(variables)
def _construct_command(test_info, gtest_filter, gtest_list_tests): variables = test_info['variables'].copy() variables.setdefault('argv', '') variables.setdefault('qemu_arm', '') if platform_util.is_running_on_chromeos(): # On ChromeOS, binaries in directories mounted with noexec options are # copied to the corresponding directories mounted with exec option. # Change runner to use the binaries under the directory mounted with exec # option. # Also do not use qemu_arm when running on ARM Chromebook. arc_root_without_noexec = \ build_common.get_chromeos_arc_root_without_noexec() if build_options.OPTIONS.is_bare_metal_build(): variables['runner'] = ' '.join( toolchain.get_bare_metal_runner(bin_dir=arc_root_without_noexec)) variables['runner_without_test_library'] = ' '.join( toolchain.get_bare_metal_runner(bin_dir=arc_root_without_noexec, use_test_library=False)) if build_options.OPTIONS.is_arm(): variables['qemu_arm'] = '' # Update --gtest_filter to re-enable the tests disabled only on qemu. if variables.get('qemu_disabled_tests'): variables['gtest_options'] = unittest_util.build_gtest_options( variables.get('enabled_tests'), variables.get('disabled_tests')) else: variables['runner'] = ' '.join( toolchain.get_nacl_runner( build_options.OPTIONS.get_target_bitsize(), bin_dir=arc_root_without_noexec)) variables['runner_without_test_library'] = ' '.join( toolchain.get_nacl_runner( build_options.OPTIONS.get_target_bitsize(), bin_dir=arc_root_without_noexec, use_test_library=False)) build_dir = build_common.get_build_dir() # Use test binary in the directory mounted without noexec. variables['in'] = variables['in'].replace( build_dir, os.path.join(arc_root_without_noexec, build_dir)) else: if build_options.OPTIONS.is_arm(): # Pass environment variables by -E flag for qemu-arm instead of # "env" command. # TODO(hamaji): This and the is_running_on_chromeos() case above # are both hacky. We probably want to construct the command to # run a unittest here based on the info in variables, and remove # test_info['command']. qemu_arm = variables['qemu_arm'].split(' ') if '$qemu_arm' in test_info['command']: runner = variables['runner'].split(' ') assert runner[0] == 'env' runner.pop(0) qemu_arm.append('-E') while '=' in runner[0]: qemu_arm.append(runner[0]) runner.pop(0) variables['qemu_arm'] = ' '.join(qemu_arm) variables['runner'] = ' '.join(runner) if gtest_filter: variables['gtest_options'] = '--gtest_filter=' + gtest_filter if gtest_list_tests: variables['gtest_options'] = '--gtest_list_tests' # Test is run as a command to build a test results file. command_template = string.Template(test_info['command']) return command_template.substitute(variables)