def _generate_lint_ninjas(): n = ninja_generator.NinjaGenerator('lint', target_groups=['lint']) lint_script = 'src/build/lint_source.py' analyze_diffs_script = 'src/build/analyze_diffs.py' ignore = 'src/build/lint_ignore.txt' implicit_base = [analyze_diffs_script, lint_script, ignore] n.rule('lint', command=('%s -i %s -o $out $in' % (lint_script, ignore)), description='lint $out') n.rule('lint_merge', command='%s --merge -o $out @$out.rsp' % lint_script, rspfile='$out.rsp', rspfile_content='$in_newline', description='lint --merge $out') files = lint_source.get_all_files_to_check() results = [] for f in files: with ninja_generator.open_dependency( f, 'r', ignore_dependency=True) as source_file: tracking_path = analyze_diffs.compute_tracking_path( None, f, source_file) implicit = implicit_base[:] if tracking_path: implicit.append(tracking_path) out = os.path.join(build_common.OUT_DIR, 'lint', f + '.result') n.build(out, 'lint', f, implicit=implicit, use_staging=False) results.append(out) out = os.path.join(build_common.OUT_DIR, 'lint_results.txt') n.build(out, 'lint_merge', results, implicit=implicit_base, use_staging=False)
def _generate_check_symbols_ninja(): # If we do not use NDK direct execution, the compatibility is less # important. if not build_common.use_ndk_direct_execution(): return n = ninja_generator.NinjaGenerator('check_symbols') script = staging.as_staging('src/build/check_symbols.py') rule_name = 'check_symbols' n.rule(rule_name, command=('src/build/run_python %s $android_lib $in %s' % (script, build_common.get_test_output_handler())), description=(rule_name + ' $in')) if OPTIONS.is_arm(): arch_subdir = 'arch-arm' else: arch_subdir = 'arch-x86' lib_dir = os.path.join(_ANDROID_SYSTEM_IMAGE_DIR, arch_subdir, 'usr/lib') for so_file in build_common.find_all_files(lib_dir, suffixes='.so'): lib_name = os.path.basename(so_file) if lib_name not in ['libc.so', 'libdl.so', 'libm.so']: # For now, we only check Bionic. # TODO(crbug.com/408548): Extend this for other libraries. continue result_path = os.path.join(build_common.get_build_dir(), 'check_symbols', 'check_symbols.%s.result' % lib_name) n.build(result_path, rule_name, build_common.get_build_path_for_library(lib_name), variables={'android_lib': staging.as_staging(so_file)}, implicit=[script, staging.as_staging(so_file)])
def _generate_checkdeps_ninjas(): if open_source.is_open_source_repo(): # Do not run checkdeps on the open source repo since some directories # checked are not included there. return n = ninja_generator.NinjaGenerator('checkdeps', target_groups=['lint']) checkdeps_script = staging.as_staging( 'native_client/tools/checkdeps/checkdeps.py') n.rule('checkdeps', command=('%s -v --root=$root $in_dir > $out.tmp 2>&1 ' '&& mv $out.tmp $out ' '|| (cat $out.tmp; rm $out.tmp; exit 1)' % checkdeps_script), description='checkdeps $in_dir') # Detect bad #include lines in src/. # TODO(crbug.com/323786): Check #include lines in mods/ too. src_dir = os.path.join(build_common.get_arc_root(), 'src') src_deps = os.path.join(src_dir, 'DEPS') for d in ['common', 'ndk_translation', 'posix_translation']: # TODO(crbug.com/323786): Check other directories in src/ too. implicit = build_common.find_all_files( os.path.join(src_dir, d), suffixes=['h', 'c', 'cc', 'cpp', 'java', 'DEPS'], include_tests=True, use_staging=False) implicit.extend([checkdeps_script, src_deps]) out = os.path.join(build_common.OUT_DIR, 'checkdeps_%s.txt' % d) n.build(out, 'checkdeps', [], variables={ 'root': src_dir, 'in_dir': d }, implicit=implicit)
def _get_generate_libvpx_asm_ninja(): if not OPTIONS.is_arm(): return None gen_asm_ninja = ninja_generator.NinjaGenerator('libvpx_asm') # Translate RVCT format assembly code into GNU Assembler format. gen_asm_ninja.rule(_GEN_LIBVPX_ASM_RULE, command=_ADS2GAS + ' < $in > $out.tmp && (mv $out.tmp $out)') # Translate C source code into assembly code and run grep to # generate a list of constants. Assembly code generated by this rule # will be included from other assembly code. See # third_party/android/external/libvpx/libvpx.mk for corresponding # rules written in Makefile. asm_include_paths = [ '-I' + staging.as_staging('android/external/libvpx/armv7a-neon'), '-I' + staging.as_staging('android/external/libvpx/libvpx') ] gen_asm_ninja.rule(_GEN_LIBVPX_OFFSETS_ASM_RULE, command=('%s -DINLINE_ASM %s -S $in -o $out.s && ' 'grep \'^[a-zA-Z0-9_]* EQU\' $out.s | ' 'tr -d \'$$\\#\' | ' '%s > $out.tmp && (mv $out.tmp $out)' % (toolchain.get_tool(OPTIONS.target(), 'cc'), ' '.join(asm_include_paths), _ADS2GAS))) return gen_asm_ninja
def _generate_disallowed_symbols_ninja(): n = ninja_generator.NinjaGenerator('disallowed_symbols') out_path = os.path.join(build_common.get_build_dir(), 'gen_symbols', 'disallowed_symbols.defined') n.build([out_path], 'copy_symbols_file', 'src/build/disallowed_symbols.txt', implicit='src/build/symbol_tool.py')
def _generate_libcommon_ninjas(): n = ninja_generator.NinjaGenerator('libcommon_gen_sources') extra_sources = [_get_wrapped_functions_cc(n)] _generate_libcommon_ninja(module_name='libcommon', instances=1, enable_libcxx=False, extra_sources=extra_sources) _generate_libcommon_ninja(module_name='libcommon_libc++', instances=0, enable_libcxx=True, extra_sources=extra_sources)
def generate_binaries_depending_ninjas(_): if (not OPTIONS.is_bare_metal_i686() or not OPTIONS.is_optimized_build() or # None of the targets analyzed are currently built in the open source # repository. open_source.is_open_source_repo() or # Run the checker only when --disable-debug-code is specified. Locations # of static initializers differ depending on the debug-code option. OPTIONS.is_debug_code_enabled() or # The checker only works with debug symbols. not OPTIONS.is_debug_info_enabled()): # The static analysis tool's output varies between debug and non-debug # builds, so we pick non-debug as the default. return n = ninja_generator.NinjaGenerator('analyze_static_initializers') script = staging.as_staging( 'android/external/chromium_org/tools/linux/dump-static-initializers.py' ) n.rule( 'analyze_static_initializers', command=('python src/build/run_python %s -d $in | head --lines=-1 | ' 'egrep -ve \'^# .*\.cpp \' |' 'sed -e \'s/ T\.[0-9]*/ T.XXXXX/\' |' 'diff -u $expect - && touch $out' % script), description='analyze_static_initializers $in') libraries = build_common.CHECKED_LIBRARIES libraries_fullpath = [ os.path.join(build_common.get_load_library_path(), lib) for lib in libraries ] for library in zip(libraries, libraries_fullpath): # You can manually update the text files by running # src/build/update_static_initializer_expectations.py. expect = 'src/build/dump-static-initializers-%s-expected.txt' % library[ 0] result_path = os.path.join( build_common.get_build_dir(), 'dump_static_initializers', 'dump_static_initializers.%s.result' % library[0]) n.build( result_path, 'analyze_static_initializers', library[1], variables={ 'out': result_path, 'expect': expect }, # Add |libraries_fullpath| to implicit= not to run the analyzer # script until all libraries in |libraries_fullpath| become ready. # This makes it easy to use # update_static_initializer_expectations.py especially when you # remove global variables from two or more libraries at the same # time. implicit=[script, expect] + libraries_fullpath)
def _generate_lint_test_ninjas(): n = ninja_generator.NinjaGenerator('analyze_diffs_test') script = 'src/build/analyze_diffs.py' n.rule('analyze_diffs_test_fail', command=('if %s --under_test $in > $output_path 2>&1; then ' ' echo "Expected failure, but there was none"; exit 1; ' 'else ' ' if ! diff $output_path $in.fail > $out; then ' ' echo "Differences from expected errors:"; ' ' cat $out; ' ' rm -f $out; ' ' echo "To update: cp $output_path $in.fail"; ' ' exit 1; ' ' fi; ' 'fi' % script), description='analyze_diffs_test_fail $in') n.rule('analyze_diffs_test_success', command=('if ! %s --under_test $in > $output_path 2>&1; then ' ' echo "Unexpected failure"; cat $output_path; exit 1; ' 'elif [ -s $output_path ]; then ' ' echo "Succeeded but had unexpected output:"; ' ' cat $output_path; ' ' exit 1; ' 'else ' ' touch $out; ' 'fi' % script), description='analyze_diffs_test_success $in') all_mods = build_common.find_all_files([staging.TESTS_MODS_PATH], include_tests=True) out_dir = os.path.join(build_common.get_target_common_dir(), 'analyze_diff_tests') for f in all_mods: no_ext = os.path.splitext(f)[0] no_ext_relative = os.path.relpath(no_ext, staging.TESTS_MODS_PATH) out_path = os.path.join(out_dir, no_ext_relative) output_path = out_path + '.output' results_path = out_path + '.results' variables = {'output_path': output_path} rule = None if f.endswith('.fail'): rule = 'analyze_diffs_test_fail' elif f.endswith('.success'): rule = 'analyze_diffs_test_success' if rule is not None: n.build(results_path, rule, no_ext, variables=variables, implicit=[script], use_staging=False)
def _generate_dependent_ninjas(ninja_list): """Generate the stage of ninjas coming after all executables.""" timer = build_common.SimpleTimer() timer.start('Generating dependent ninjas', OPTIONS.verbose()) root_dir_install_all_targets = [] for n in ninja_list: root_dir_install_all_targets.extend(build_common.get_android_fs_path(p) for p in n._root_dir_install_targets) generator_list = _list_ninja_generators(_config_loader, 'generate_binaries_depending_ninjas') result_list = ninja_generator_runner.run_in_parallel( [ninja_generator_runner.GeneratorTask( config_context, (generator, root_dir_install_all_targets)) for config_context, generator in generator_list], OPTIONS.configure_jobs()) dependent_ninjas = [] for config_result in result_list: dependent_ninjas.extend(config_result.generated_ninjas) notice_ninja = ninja_generator.NoticeNinjaGenerator('notices') notice_ninja.build_notices(ninja_list + dependent_ninjas) dependent_ninjas.append(notice_ninja) all_test_lists_ninja = ninja_generator.NinjaGenerator('all_test_lists') all_test_lists_ninja.build_all_test_lists(ninja_list) dependent_ninjas.append(all_test_lists_ninja) all_unittest_info_ninja = ninja_generator.NinjaGenerator('all_unittest_info') all_unittest_info_ninja.build_all_unittest_info(ninja_list) dependent_ninjas.append(all_unittest_info_ninja) timer.done() return dependent_ninjas
def _generate_libposix_files(): n = ninja_generator.NinjaGenerator('libposix_files') install_files = [ ('proc', 'src/posix_translation/proc/cmdline'), ('proc', 'src/posix_translation/proc/loadavg'), ('proc', 'src/posix_translation/proc/meminfo'), ('proc/net', 'src/posix_translation/proc/net/tcp'), ('proc/net', 'src/posix_translation/proc/net/tcp6'), ('proc/net', 'src/posix_translation/proc/net/udp'), ('proc/net', 'src/posix_translation/proc/net/udp6'), ('proc', 'src/posix_translation/proc/stat'), ('proc', 'src/posix_translation/proc/version')] for dst, src in install_files: file_name = os.path.basename(src) dst = os.path.join(dst, file_name) n.install_to_root_dir(dst, src)
def _generate_libposix_translation_for_test(): n = ninja_generator.SharedObjectNinjaGenerator( 'libposix_translation_for_test', dt_soname='libposix_translation.so', is_system_library=True, is_for_test=True) # libposix_translation_for_test is built using generated code in the out/ # directory. The logic we have to scan for NOTICES does not find one for this # generated code, and complains later. The libposix_translation.so code # normally inherits the project NOTICE from src/NOTICE, so we just explicitly # point to it here as the notice to use for this special version of the # library. n.add_notice_sources([staging.as_staging('src/NOTICE')]) gen_rule_name = 'gen_wrap_syscall_aliases_s' gen_script_path = os.path.join('src/common', gen_rule_name + '.py') n.rule(gen_rule_name, command='%s > $out.tmp && mv $out.tmp $out' % gen_script_path, description=gen_rule_name + ' $in') gen_out_path = os.path.join(build_common.get_build_dir(), 'posix_translation_gen_sources', 'wrap_syscall_aliases.S') gen_implicit_deps = python_deps.find_deps(gen_script_path) n.build(gen_out_path, gen_rule_name, implicit=gen_implicit_deps) # Following deps order is important. art_libc_supplement_for_test.so should # be placed before libc.so. if not open_source.is_open_source_repo(): # This is for ART unit tests which have not been opensourced. To not let # posix_translation depend on ART on arc_open, use is_open_source_repo(). n.add_library_deps('art_libc_supplement_for_test.so') n.add_library_deps('libc.so', 'libdl.so') n.build_default([gen_out_path], base_path=None).link() # /test/libposix_translation.so should be a symbolic link to # ../lib/libposix_translation_for_test.so. n = ninja_generator.NinjaGenerator('test_libposix_translation') orig_so = os.path.join( build_common.get_load_library_path(), 'libposix_translation_for_test.so') link_so = os.path.join( build_common.get_load_library_path_for_test(), 'libposix_translation.so') command = 'ln -sf %s %s' % ( os.path.join('../../lib/', os.path.basename(orig_so)), link_so) n.build(link_so, 'run_shell_command', implicit=orig_so, variables={'command': command})
def _filter(vars): if not (vars.is_java_library() or vars.is_package()): return False module_name = vars.get_module_name() if module_name not in ['android_webview_java_with_new_resources', 'android_webview_java']: return False # Building parts of chromium is special and make_to_ninja can not handle # path expansion on LOCAL_GENERATED_SOURCES correctly. |sources| contains # files that are listed in LOCAL_GENERATED_SOURCES, and following code # replaces them with right places. sources = vars.get_generated_sources() sources[:] = [_fix_gen_source_path(x) for x in sources] if module_name == 'android_webview_java': # Generate a custom NinjaGenerator to generate Java source files from # template files. This rule is based on target .mk files in # android/external/chromium_org/base/. # TODO(crbug.com/394654): Remove a manually converted NinjaGenerator once # make_to_ninja supports 'LOCAL_MODULE_CLASS := GYP' modules. n = ninja_generator.NinjaGenerator(module_name + '_gyp') n.rule('gyp_gcc_preprocess', 'mkdir -p $out_dir && ' 'cd $work_dir && ' 'python ../build/android/gyp/gcc_preprocess.py ' '--include-path=.. --output=$real_out --template=$in', description='gyp/gcc_preprocess.py --include-path=.. ' '--output=$out --template=$in') base_path = vars.get_path() for source in sources: template = _get_template_path(base_path, source) out_dir = os.path.dirname(source) work_dir = os.path.join( base_path, '..', _get_path_components_following_mark(source, 'chromium')[0]) variables = { 'out_dir': out_dir, 'work_dir': work_dir, 'real_out': os.path.realpath(source)} n.build(source, 'gyp_gcc_preprocess', inputs=os.path.realpath(template), variables=variables) return True
def generate_binaries_depending_ninjas(root_dir_install_all_targets): n = ninja_generator.NinjaGenerator('readonly_fs_image') rule_name = 'gen_readonly_fs_image' encoded_symlink_map = ','.join([x + ':' + y for x, y in _SYMLINK_MAP.iteritems()]) encoded_empty_dirs = ','.join(_EMPTY_DIRECTORIES) encoded_empty_files = ','.join(_EMPTY_FILES) n.rule(rule_name, command=_CREATE_READONLY_FS_IMAGE_SCRIPT + ' -o $out ' + '-s "' + encoded_symlink_map + '" ' '-d "' + encoded_empty_dirs + '" ' '-f "' + encoded_empty_files + '" ' '$in', description=rule_name) gen_img = build_common.get_posix_translation_readonly_fs_image_file_path() my_dependencies = sorted(root_dir_install_all_targets) # The configure options file is a dependency as symlinks in the read-only # file system image changes per the configure options. implicit = [_CREATE_READONLY_FS_IMAGE_SCRIPT, OPTIONS.get_configure_options_file()] n.build([gen_img], rule_name, my_dependencies, implicit=implicit)
def _generate_chromium_ppapi_fpabi_shim_ninja(): if not build_common.use_ppapi_fpabi_shim(): return ninja_name = 'libppapi_fpabi_shim' n = ninja_generator.NinjaGenerator(ninja_name) rule_name = 'gen_' + ninja_name ppapi_dir = staging.as_staging('chromium-ppapi/ppapi') script_path = os.path.join(ppapi_dir, 'gen_ppapi_fpabi_shim.py') api_dir = os.path.join(ppapi_dir, 'api') out_file = os.path.join(build_common.get_ppapi_fpabi_shim_dir(), 'ppapi_fpabi_shim.c') gen_command = [ 'src/build/run_python', pipes.quote(script_path), '--wnone', '--fpabi', '--fpabishim', '$out.tmp', '--range=start,end', '--srcroot', pipes.quote(api_dir), '>', '$out.log', '|| (cat $out.log; rm $out.log; exit 1)' ] # On success, touch the output file. The generator does not update the file # if the output content is same. In such a case, without this touch, if the # script is updated (by such as repository sync), the script timestamp gets # newer than the generated code, so that ninja always re-runs the script # generation and later phases. n.rule(rule_name, command=('(' + ' '.join(gen_command) + ')' + ' && touch $out.tmp && mv $out.tmp $out' + ' && printf "%s: %s" $out "$in" > $out.d'), depfile='$out.d', description=rule_name + ' $out') idls = n.find_all_files(api_dir, '.idl', use_staging=False) n.build(out_file, rule_name, idls, implicit=[script_path, 'src/build/run_python'])