def _generate_checkdeps_ninjas(): if open_source.is_open_source_repo(): # Do not run checkdeps on the open source repo since some directories # checked are not included there. return n = ninja_generator.NinjaGenerator('checkdeps', target_groups=['lint']) checkdeps_script = staging.as_staging( 'native_client/tools/checkdeps/checkdeps.py') n.rule('checkdeps', command=('%s -v --root=$root $in_dir > $out.tmp 2>&1 ' '&& mv $out.tmp $out ' '|| (cat $out.tmp; rm $out.tmp; exit 1)' % checkdeps_script), description='checkdeps $in_dir') # Detect bad #include lines in src/. # TODO(crbug.com/323786): Check #include lines in mods/ too. src_dir = os.path.join(build_common.get_arc_root(), 'src') src_deps = os.path.join(src_dir, 'DEPS') for d in ['common', 'ndk_translation', 'posix_translation']: # TODO(crbug.com/323786): Check other directories in src/ too. implicit = build_common.find_all_files( os.path.join(src_dir, d), suffixes=['h', 'c', 'cc', 'cpp', 'java', 'DEPS'], include_tests=True, use_staging=False) implicit.extend([checkdeps_script, src_deps]) out = os.path.join(build_common.OUT_DIR, 'checkdeps_%s.txt' % d) n.build(out, 'checkdeps', [], variables={ 'root': src_dir, 'in_dir': d }, implicit=implicit)
def _generate_checkdeps_ninjas(): if open_source.is_open_source_repo(): # Do not run checkdeps on the open source repo since some directories # checked are not included there. return n = ninja_generator.NinjaGenerator('checkdeps', target_groups=['lint']) checkdeps_script = staging.as_staging( 'native_client/tools/checkdeps/checkdeps.py') n.rule('checkdeps', command=('%s -v --root=$root $in_dir > $out.tmp 2>&1 ' '&& mv $out.tmp $out ' '|| (cat $out.tmp; rm $out.tmp; exit 1)' % checkdeps_script), description='checkdeps $in_dir') # Detect bad #include lines in src/. # TODO(crbug.com/323786): Check #include lines in mods/ too. src_dir = os.path.join(build_common.get_arc_root(), 'src') src_deps = os.path.join(src_dir, 'DEPS') for d in ['common', 'ndk_translation', 'posix_translation']: # TODO(crbug.com/323786): Check other directories in src/ too. implicit = build_common.find_all_files( os.path.join(src_dir, d), suffixes=['h', 'c', 'cc', 'cpp', 'java', 'DEPS'], include_tests=True, use_staging=False) implicit.extend([checkdeps_script, src_deps]) out = os.path.join(build_common.OUT_DIR, 'checkdeps_%s.txt' % d) n.build(out, 'checkdeps', [], variables={'root': src_dir, 'in_dir': d}, implicit=implicit)
def _filter(vars): if open_source.is_open_source_repo() and vars.is_host(): return False # Shared object version just links the static library version. To avoid a # module name conflict, we just build static library version as a converted # shared object. if vars.is_shared(): return False make_to_ninja.Filters.convert_to_shared_lib(vars) # Build libc++.so as a system library so that it does not link to # libstlport.so, and does not emit syscall wrappers. vars.get_generator_args()['is_system_library'] = True if vars.is_target(): # A side-effect of |is_system_library| is that it also removes the # dependencies on libc, libm, and libdl. We still need them, so add them # back. vars.get_shared_deps().extend(['libc', 'libm', 'libdl']) # Install an additional ARM version of libc++.so for Renderscript. if (vars.is_target() and vars.get_module_name() == 'libc++' and not OPTIONS.is_arm()): vars.set_canned_arm(True) vars.get_whole_archive_deps().remove('libcompiler_rt') vars.get_shared_deps().append('libcompiler_rt') return True
def _set_up_git_hooks(): # These git hooks do not make sense for the open source repo because they: # 1) lint the source, but that was already done when committed internally, # and we will run 'ninja all' as a test step before committing to open # source. # 2) add fields to the commit message for the internal dev workflow. if open_source.is_open_source_repo(): return script_dir = os.path.dirname(__file__) hooks = { 'pre-push': os.path.join(script_dir, 'git_pre_push.py'), 'prepare-commit-msg': os.path.join(script_dir, 'git_prepare_commit.py'), 'commit-msg': 'third_party/gerrit/commit-msg', } obsolete_hooks = ['pre-commit'] # Replaced by pre-push hook. git_hooks_dir = os.path.join(build_common.get_arc_root(), '.git', 'hooks') for git_hook, source_path in hooks.iteritems(): symlink_path = os.path.join(git_hooks_dir, git_hook) file_util.create_link(symlink_path, source_path, overwrite=True) for git_hook in obsolete_hooks: symlink_path = os.path.join(git_hooks_dir, git_hook) if os.path.lexists(symlink_path): os.unlink(symlink_path)
def main(args): OPTIONS.parse_configure_file() # TODO(crbug.com/378196): Make qemu-arm available in open source in order to # run any unit tests there. if open_source.is_open_source_repo() and OPTIONS.is_arm(): return 0 for test_name in args[1:]: pipe = subprocess.Popen( ['python', 'src/build/run_unittest.py', test_name], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) out = pipe.communicate()[0] sys.stdout.write(out) if pipe.returncode: sys.stdout.write('FAIL (exit=%d)\n' % pipe.returncode) sys.stdout.write(out + '\n') return 1 elif out.find('PASS\n') < 0: sys.stdout.write('FAIL (no PASS)\n') sys.stdout.write(out + '\n') return 1 else: sys.stdout.write('OK\n') return 0
def main(args): OPTIONS.parse_configure_file() # TODO(crbug.com/378196): Make qemu-arm available in open source in order to # run any unit tests there. if open_source.is_open_source_repo() and OPTIONS.is_arm(): return 0 for test_name in args[1:]: pipe = subprocess.Popen(['python', 'src/build/run_unittest.py', test_name], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) out = pipe.communicate()[0] sys.stdout.write(out) if pipe.returncode: sys.stdout.write('FAIL (exit=%d)\n' % pipe.returncode) sys.stdout.write(out + '\n') return 1 elif out.find('PASS\n') < 0: sys.stdout.write('FAIL (no PASS)\n') sys.stdout.write(out + '\n') return 1 else: sys.stdout.write('OK\n') return 0
def _read_ignore_rule(path): """Reads the mapping of paths to lint checks to ignore from a file. The ignore file is expected to define a simple mapping between file paths and the lint rules to ignore (the <List Class>.NAME attributes). Hash characters ('#') can be used for comments, as well as blank lines for readability. A typical # filter in the file should look like: # Exclude src/xyzzy.cpp from the checks "gnusto" and "rezrov" "src/xyzzy.cpp": ["gnusto", "rezrov"] """ if not path: return {} result = json.loads('\n'.join(file_util.read_metadata_file(path))) # Quick verification. # Make sure everything exists in the non-open source repo. (We do # not run this check on the open source repo since not all files are # currently open sourced.) if not open_source.is_open_source_repo(): unknown_path_list = [key for key in result if not os.path.exists(key)] assert not unknown_path_list, ( 'The key in \'%s\' contains unknown files: %s' % (path, unknown_path_list)) return result
def _read_ignore_rule(path): """Reads the mapping of paths to lint checks to ignore from a file. The ignore file is expected to define a simple mapping between file paths and the lint rules to ignore (the <List Class>.NAME attributes). Hash characters ('#') can be used for comments, as well as blank lines for readability. A typical # filter in the file should look like: # Exclude src/xyzzy.cpp from the checks "gnusto" and "rezrov" "src/xyzzy.cpp": ["gnusto", "rezrov"] """ if not path: return {} result = json.loads("\n".join(file_util.read_metadata_file(path))) # Quick verification. # Make sure everything exists in the non-open source repo. (We do # not run this check on the open source repo since not all files are # currently open sourced.) if not open_source.is_open_source_repo(): unknown_path_list = [key for key in result if not os.path.exists(key)] assert not unknown_path_list, "The key in '%s' contains unknown files: %s" % (path, unknown_path_list) return result
def main(): # Disable line buffering sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0) if not _configure_build_options(): return -1 _update_arc_version_file() _ensure_downloads_up_to_date() if not open_source.is_open_source_repo(): import sync_chrome sync_chrome.run() adb_target = 'linux-arm' if OPTIONS.is_arm() else 'linux-x86_64' sync_adb.run(adb_target) _set_up_internal_repo() _gclient_sync_third_party() _check_javac_version() _cleanup_orphaned_pyc_files() _set_up_git_hooks() _set_up_chromium_org_submodules() # Make sure the staging directory is up to date whenever configure # runs to make it easy to generate rules by scanning directories. staging.create_staging() config_runner.generate_ninjas() return 0
def should_run(self, path): # mods/upstream directory is not yet included in open source so we cannot # run this linter. if open_source.is_open_source_repo(): return False if os.path.basename(path) == "OWNERS": return False return path.startswith(analyze_diffs.UPSTREAM_BASE_PATH)
def should_run(self, path): # mods/upstream directory is not yet included in open source so we cannot # run this linter. if open_source.is_open_source_repo(): return False if os.path.basename(path) == 'OWNERS': return False return path.startswith(analyze_diffs.UPSTREAM_BASE_PATH)
def generate_ninjas(): # Only Java code is built here, so nothing to do in open source. if open_source.is_open_source_repo(): return ninja_generator_runner.request_run_in_parallel( _generate_android_webview, _generate_webview_library, _generate_webview_package)
def generate_ninjas(): if open_source.is_open_source_repo(): # Provide a stub. n = ninja_generator.SharedObjectNinjaGenerator('libjnigraphics') n.add_notice_sources([staging.as_staging('src/NOTICE')]) n.link() return make_to_ninja.MakefileNinjaTranslator( 'android/frameworks/base/native/graphics/jni').generate()
def _verify_mod_description_file(self, desc_file, msg): if (_args and _args.under_test and self._our_path.startswith(staging.TESTS_MODS_PATH)): desc_file = os.path.join(staging.TESTS_BASE_PATH, desc_file) if not os.path.isfile(desc_file): # In open source repo we have no upstream files (except when running # tests) but if in internal repo, we verify the file exists. if (_args and _args.under_test) or not open_source.is_open_source_repo(): show_error(self._stats, msg + desc_file) return True
def _ensure_downloads_up_to_date(): cache_path = OPTIONS.download_cache_path() cache_size = OPTIONS.download_cache_size() sync_nacl_sdk.check_and_perform_updates(cache_path, cache_size) download_sdk_and_ndk.check_and_perform_updates(cache_path, cache_size) download_cts_files.check_and_perform_updates(cache_path, cache_size) download_arc_welder_deps.check_and_perform_updates(cache_path, cache_size) if sync_gdb_multiarch.main(): sys.exit(1) # The open source repository does not have download_internal_apks.py. if (not open_source.is_open_source_repo() and OPTIONS.internal_apks_source() == 'prebuilt'): import download_internal_apks download_internal_apks.check_and_perform_updates(cache_path, cache_size) if not open_source.is_open_source_repo(): import download_third_party_apks download_third_party_apks.check_and_perform_updates(cache_path, cache_size)
def _filter(vars): if open_source.is_open_source_repo() and vars.is_host(): return False if vars.get_module_name() == 'libbacktrace_test': # libbacktrace_test is for building 'backtrace_test' target defined in # the same Android.mk, but we do not have an easy way to build the test # binary. return False vars.enable_clang() vars.enable_cxx11() vars.get_shared_deps().append('liblog') return True
def generate_binaries_depending_ninjas(_): if (not OPTIONS.is_bare_metal_i686() or not OPTIONS.is_optimized_build() or # None of the targets analyzed are currently built in the open source # repository. open_source.is_open_source_repo() or # Run the checker only when --disable-debug-code is specified. Locations # of static initializers differ depending on the debug-code option. OPTIONS.is_debug_code_enabled() or # The checker only works with debug symbols. not OPTIONS.is_debug_info_enabled()): # The static analysis tool's output varies between debug and non-debug # builds, so we pick non-debug as the default. return n = ninja_generator.NinjaGenerator('analyze_static_initializers') script = staging.as_staging( 'android/external/chromium_org/tools/linux/dump-static-initializers.py' ) n.rule( 'analyze_static_initializers', command=('python src/build/run_python %s -d $in | head --lines=-1 | ' 'egrep -ve \'^# .*\.cpp \' |' 'sed -e \'s/ T\.[0-9]*/ T.XXXXX/\' |' 'diff -u $expect - && touch $out' % script), description='analyze_static_initializers $in') libraries = build_common.CHECKED_LIBRARIES libraries_fullpath = [ os.path.join(build_common.get_load_library_path(), lib) for lib in libraries ] for library in zip(libraries, libraries_fullpath): # You can manually update the text files by running # src/build/update_static_initializer_expectations.py. expect = 'src/build/dump-static-initializers-%s-expected.txt' % library[ 0] result_path = os.path.join( build_common.get_build_dir(), 'dump_static_initializers', 'dump_static_initializers.%s.result' % library[0]) n.build( result_path, 'analyze_static_initializers', library[1], variables={ 'out': result_path, 'expect': expect }, # Add |libraries_fullpath| to implicit= not to run the analyzer # script until all libraries in |libraries_fullpath| become ready. # This makes it easy to use # update_static_initializer_expectations.py especially when you # remove global variables from two or more libraries at the same # time. implicit=[script, expect] + libraries_fullpath)
def _verify_ninja_generator_list(ninja_list): module_name_count_dict = collections.defaultdict(int) output_path_name_count_dict = collections.defaultdict(int) archive_ninja_list = [] shared_ninja_list = [] exec_ninja_list = [] test_ninja_list = [] for ninja in ninja_list: # Use is_host() in the key as the accounting should be done separately # for the target and the host. key = (ninja.get_module_name(), ninja.is_host()) module_name_count_dict[key] += 1 output_path_name_count_dict[ninja.get_ninja_path()] += 1 if isinstance(ninja, ninja_generator.ArchiveNinjaGenerator): archive_ninja_list.append(ninja) if isinstance(ninja, ninja_generator.SharedObjectNinjaGenerator): shared_ninja_list.append(ninja) if isinstance(ninja, ninja_generator.ExecNinjaGenerator): if isinstance(ninja, ninja_generator.TestNinjaGenerator): test_ninja_list.append(ninja) else: exec_ninja_list.append(ninja) # Make sure there are no duplicated ninja modules. duplicated_module_list = [ item for item in module_name_count_dict.iteritems() if item[1] > 1] if duplicated_module_list: errors = [] for (module_name, is_host), count in duplicated_module_list: host_or_target = ('host' if is_host else 'target') error = '%s for %s: %d' % (module_name, host_or_target, count) errors.append(error) raise Exception( 'Ninja generated multiple times: ' + ', '.join(errors)) # Make sure there are no duplicated ninja files. duplicated_ninja_paths = [ (path, count) for path, count in output_path_name_count_dict.iteritems() if count > 1] if duplicated_ninja_paths: errors = ['%s: %d' % (path, count) for path, count in duplicated_ninja_paths] raise Exception( 'Ninja files generated multiple times: ' + ', '.join(errors)) # Make sure for each modules, the expected usage count and actual reference # count is same. The open source repository builds a subset of binaries so # we do not check its numbers. if not open_source.is_open_source_repo(): ninja_generator.ArchiveNinjaGenerator.verify_usage( archive_ninja_list, shared_ninja_list, exec_ninja_list, test_ninja_list)
def _ensure_downloads_up_to_date(): cache_path = OPTIONS.download_cache_path() cache_size = OPTIONS.download_cache_size() sync_nacl_sdk.check_and_perform_updates(cache_path, cache_size) download_sdk_and_ndk.check_and_perform_updates(cache_path, cache_size) download_cts_files.check_and_perform_updates(cache_path, cache_size) download_arc_welder_deps.check_and_perform_updates(cache_path, cache_size) if sync_gdb_multiarch.main(): sys.exit(1) # The open source repository does not have download_internal_apks.py. if (not open_source.is_open_source_repo() and OPTIONS.internal_apks_source() == 'prebuilt'): import download_internal_apks download_internal_apks.check_and_perform_updates( cache_path, cache_size) if not open_source.is_open_source_repo(): import download_third_party_apks download_third_party_apks.check_and_perform_updates( cache_path, cache_size)
def _filter_all_make_to_ninja(vars): # All the following filters are only for the target. if vars.is_host(): return True if vars.is_java_library() and open_source.is_open_source_repo(): # We do not yet build all of the Java prerequisites in the open source # repository. return False if vars.is_c_library() or vars.is_executable(): _filter_excluded_libs(vars) if not OPTIONS.is_arm(): _filter_for_when_not_arm(vars) return True
def generate_ninjas(): if open_source.is_open_source_repo(): # Provide a stub. n = ninja_generator.SharedObjectNinjaGenerator('libui') n.add_notice_sources([staging.as_staging('src/NOTICE')]) n.link() return n = ninja_generator.SharedObjectNinjaGenerator( 'libui', base_path='android/frameworks/native/libs/ui') n.add_include_paths('android/system/core/libsync/include') n.add_library_deps('libcutils.so', 'libhardware.so', 'liblog.so', 'libsync.so', 'libutils.so') n.build_default_all_sources() n.link()
def generate_test_ninjas(): if not open_source.is_open_source_repo(): # expected_driver_times_test.py and run_integration_tests_test.py have extra # implicit dependencies, so generate the ninja for them separately. test_list_paths = [ build_common.get_all_integration_test_lists_path(), build_common.get_all_unittest_info_path() ] implicit_map = dict.fromkeys([ 'src/build/cts/expected_driver_times_test.py', 'src/build/run_integration_tests_test.py' ], test_list_paths) ninja_generator.generate_python_test_ninjas_for_path( 'src/build', implicit_map=implicit_map, exclude='perf_test.py') ninja_generator_runner.request_run_in_parallel(_generate_lint_test_ninjas)
def main(): assert not open_source.is_open_source_repo(), ('Cannot be run from open ' 'source repo.') parser = argparse.ArgumentParser() parser.add_argument('--force', action='store_true', help=('Overwrite any changes in the destination')) parser.add_argument('--verbose', '-v', action='store_true', help=('Get verbose output')) parser.add_argument('dest') args = parser.parse_args(sys.argv[1:]) if args.verbose: logging.getLogger().setLevel(logging.INFO) run(args.dest, args.force) logging.info('%s successfully updated' % args.dest) return 0
def generate_test_ninjas(): if not open_source.is_open_source_repo(): # expected_driver_times_test.py and run_integration_tests_test.py have extra # implicit dependencies, so generate the ninja for them separately. test_list_paths = [build_common.get_all_integration_test_lists_path(), build_common.get_all_unittest_info_path()] implicit_map = dict.fromkeys( ['src/build/cts/expected_driver_times_test.py', 'src/build/run_integration_tests_test.py'], test_list_paths) ninja_generator.generate_python_test_ninjas_for_path( 'src/build', implicit_map=implicit_map, exclude='perf_test.py') ninja_generator_runner.request_run_in_parallel( _generate_lint_test_ninjas)
def generate_binaries_depending_ninjas(_): if (not OPTIONS.is_bare_metal_i686() or not OPTIONS.is_optimized_build() or # None of the targets analyzed are currently built in the open source # repository. open_source.is_open_source_repo() or # Run the checker only when --disable-debug-code is specified. Locations # of static initializers differ depending on the debug-code option. OPTIONS.is_debug_code_enabled() or # The checker only works with debug symbols. not OPTIONS.is_debug_info_enabled()): # The static analysis tool's output varies between debug and non-debug # builds, so we pick non-debug as the default. return n = ninja_generator.NinjaGenerator('analyze_static_initializers') script = staging.as_staging( 'android/external/chromium_org/tools/linux/dump-static-initializers.py') n.rule('analyze_static_initializers', command=('python src/build/run_python %s -d $in | head --lines=-1 | ' 'egrep -ve \'^# .*\.cpp \' |' 'sed -e \'s/ T\.[0-9]*/ T.XXXXX/\' |' 'diff -u $expect - && touch $out' % script), description='analyze_static_initializers $in') libraries = build_common.CHECKED_LIBRARIES libraries_fullpath = [ os.path.join(build_common.get_load_library_path(), lib) for lib in libraries] for library in zip(libraries, libraries_fullpath): # You can manually update the text files by running # src/build/update_static_initializer_expectations.py. expect = 'src/build/dump-static-initializers-%s-expected.txt' % library[0] result_path = os.path.join(build_common.get_build_dir(), 'dump_static_initializers', 'dump_static_initializers.%s.result' % library[0]) n.build(result_path, 'analyze_static_initializers', library[1], variables={'out': result_path, 'expect': expect}, # Add |libraries_fullpath| to implicit= not to run the analyzer # script until all libraries in |libraries_fullpath| become ready. # This makes it easy to use # update_static_initializer_expectations.py especially when you # remove global variables from two or more libraries at the same # time. implicit=[script, expect] + libraries_fullpath)
def generate_ninjas(): if open_source.is_open_source_repo(): # Provide a stub. n = ninja_generator.SharedObjectNinjaGenerator('libui') n.add_notice_sources([staging.as_staging('src/NOTICE')]) n.link() return n = ninja_generator.SharedObjectNinjaGenerator( 'libui', base_path='android/frameworks/native/libs/ui') n.add_include_paths('android/system/core/libsync/include') n.add_library_deps( 'libcutils.so', 'libhardware.so', 'liblog.so', 'libsync.so', 'libutils.so') n.build_default_all_sources() n.link()
def _generate_libposix_translation_for_test(): n = ninja_generator.SharedObjectNinjaGenerator( 'libposix_translation_for_test', dt_soname='libposix_translation.so', is_system_library=True, is_for_test=True) # libposix_translation_for_test is built using generated code in the out/ # directory. The logic we have to scan for NOTICES does not find one for this # generated code, and complains later. The libposix_translation.so code # normally inherits the project NOTICE from src/NOTICE, so we just explicitly # point to it here as the notice to use for this special version of the # library. n.add_notice_sources([staging.as_staging('src/NOTICE')]) gen_rule_name = 'gen_wrap_syscall_aliases_s' gen_script_path = os.path.join('src/common', gen_rule_name + '.py') n.rule(gen_rule_name, command='%s > $out.tmp && mv $out.tmp $out' % gen_script_path, description=gen_rule_name + ' $in') gen_out_path = os.path.join(build_common.get_build_dir(), 'posix_translation_gen_sources', 'wrap_syscall_aliases.S') gen_implicit_deps = python_deps.find_deps(gen_script_path) n.build(gen_out_path, gen_rule_name, implicit=gen_implicit_deps) # Following deps order is important. art_libc_supplement_for_test.so should # be placed before libc.so. if not open_source.is_open_source_repo(): # This is for ART unit tests which have not been opensourced. To not let # posix_translation depend on ART on arc_open, use is_open_source_repo(). n.add_library_deps('art_libc_supplement_for_test.so') n.add_library_deps('libc.so', 'libdl.so') n.build_default([gen_out_path], base_path=None).link() # /test/libposix_translation.so should be a symbolic link to # ../lib/libposix_translation_for_test.so. n = ninja_generator.NinjaGenerator('test_libposix_translation') orig_so = os.path.join( build_common.get_load_library_path(), 'libposix_translation_for_test.so') link_so = os.path.join( build_common.get_load_library_path_for_test(), 'libposix_translation.so') command = 'ln -sf %s %s' % ( os.path.join('../../lib/', os.path.basename(orig_so)), link_so) n.build(link_so, 'run_shell_command', implicit=orig_so, variables={'command': command})
def main(): assert not open_source.is_open_source_repo(), ('Cannot be run from open ' 'source repo.') parser = argparse.ArgumentParser() parser.add_argument('--branch', default=None, help='Which branch in the open source repo to push') parser.add_argument('--force', action='store_true', help='Overwrite any changes in the destination') parser.add_argument('--push-changes', action='store_true', help=('Push changes to the destination repository\'s ' 'remote')) parser.add_argument('--verbose', '-v', action='store_true', help='Get verbose output') parser.add_argument('dest') args = parser.parse_args(sys.argv[1:]) if args.verbose: logging.getLogger().setLevel(logging.INFO) _validate_args(args) _clone_repo_if_needed(args.dest) _validate_local_repository(args.dest) if (git.get_uncommitted_files(cwd=args.dest) and not args.force): logging.error('%s has uncommitted files, use --force to override') return 1 _reset_and_clean_repo(args.dest) _check_out_matching_branch(args.dest, args.branch) # Submodules abandoned between branches will still leave their directories # around which can confuse prepare_open_source_commit, so we clean them out. _reset_and_clean_repo(args.dest) prepare_open_source_commit.run(args.dest, args.force) _test_changes(args.dest) if args.push_changes: commit_label = subprocess.check_output(['git', 'describe']).strip() _set_git_user('arc-push', '*****@*****.**', args.dest) _commit_changes(args.dest, commit_label) _sync_head_tags(args.dest, '.') _push_changes(args.dest) else: _reset_and_clean_repo(args.dest) return 0
def generate_ninjas(): def _filter(vars): assert vars.is_java_library() if vars.is_static_java_library() or vars.is_host(): return False return { 'core-junit': True, 'junit-runner': False, # TODO(crbug.com/468670): L-rebase: junit-targetdex exercises our dexopt # compile paths, which are disabled until we find how to do dex2oat in # ninja time. 'junit-targetdex': False, }[vars.get_module_name()] if open_source.is_open_source_repo(): # We currently do not build Java code in open source. return make_to_ninja.MakefileNinjaTranslator( 'android/external/junit').generate(_filter)
def generate_ninjas(): def _filter(vars): assert vars.is_java_library() if vars.is_static_java_library() or vars.is_host(): return False return { 'core-junit': True, 'junit-runner': False, # TODO(crbug.com/468670): L-rebase: junit-targetdex exercises our dexopt # compile paths, which are disabled until we find how to do dex2oat in # ninja time. 'junit-targetdex': False, }[vars.get_module_name()] if open_source.is_open_source_repo(): # We currently do not build Java code in open source. return make_to_ninja.MakefileNinjaTranslator('android/external/junit').generate( _filter)
def generate_ninjas(): if open_source.is_open_source_repo(): # Provide a stub. n = ninja_generator.SharedObjectNinjaGenerator('libskia') n.add_notice_sources([staging.as_staging('src/NOTICE')]) n.link() return def _filter(vars): if vars.is_executable(): return False if vars.get_module_name() == 'libskia': if not OPTIONS.is_optimized_build(): # Enable basic optimizations for SKIA as it otherwise fails as it # refers to unimplemented FT_Get_FSType_Flags. For some reason using # -ffunction-sections with --gc-sections did not solve the problem # here. vars.get_cflags().append('-O1') vars.get_shared_deps().append('libcompiler_rt') return True make_to_ninja.MakefileNinjaTranslator( 'android/external/skia').generate(_filter)
def generate_ninjas(): if open_source.is_open_source_repo(): # Provide a stub. n = ninja_generator.SharedObjectNinjaGenerator('libskia') n.add_notice_sources([staging.as_staging('src/NOTICE')]) n.link() return def _filter(vars): if vars.is_executable(): return False if vars.get_module_name() == 'libskia': if not OPTIONS.is_optimized_build(): # Enable basic optimizations for SKIA as it otherwise fails as it # refers to unimplemented FT_Get_FSType_Flags. For some reason using # -ffunction-sections with --gc-sections did not solve the problem # here. vars.get_cflags().append('-O1') vars.get_shared_deps().append('libcompiler_rt') return True make_to_ninja.MakefileNinjaTranslator('android/external/skia').generate( _filter)
def generate_test_ninjas(): if open_source.is_open_source_repo(): return _generate_unit_test_ninja() _generate_image_generator_ninja() _generate_integration_test_ninja()
def generate_ninjas(): if open_source.is_open_source_repo(): # Provide a stub. n = ninja_generator.SharedObjectNinjaGenerator('libandroid_runtime') n.add_notice_sources([staging.as_staging('src/NOTICE')]) n.link() return def _filter(vars): if OPTIONS.is_nacl_build(): # The flag is valid only for C/ObjC but not for C++ on NaCl gcc. vars.remove_c_or_cxxflag('-Wno-int-to-pointer-cast') # TODO(crbug.com/327496): Move this to generic functionality of # make_to_ninja. intermediates_dir = (make_to_ninja.MakefileNinjaTranslator. get_intermediate_headers_dir()) vars.get_includes().append(intermediates_dir) # TODO(crbug.com/414569): L-rebase: These include directories do not seem to # be being added. Fix that. vars.get_includes().append( 'android/frameworks/av/media/img_utils/include') vars.get_includes().append('android/external/skia/include/pathops') vars.get_includes().append( 'android/system/core/libprocessgroup/include') vars.get_implicit_deps().extend([ intermediates_dir + '/' + i for i in [ 'libsonivox/eas.h', 'libsonivox/eas_types.h', 'libsonivox/eas_reverb.h', 'libsonivox/jet.h' ] ]) # Exclude ZygoteInit for which we have stubbed out jni registrations. vars.get_sources().remove('android/frameworks/base/core/jni/' 'com_android_internal_os_ZygoteInit.cpp') # Exclude GLES31+ functionality since we do not currently support it. vars.get_sources().remove('android/frameworks/base/core/jni/' 'android_opengl_GLES31.cpp') vars.get_sources().remove('android/frameworks/base/core/jni/' 'android_opengl_GLES31Ext.cpp') # Add ARC specific JNIs. vars.get_sources().extend([ 'android/frameworks/base/core/jni/org_chromium_arc_internal_Tracing.cpp', # NOQA 'android/frameworks/base/core/jni/org_chromium_arc_ArcProxySelector.cpp', # NOQA ]) if OPTIONS.disable_hwui(): # Defining this enables support for hardware accelerated rendering in the # user interface code, such as when an application uses a TextureView. The # primary effect is to use libhwui. It is enabled by default in the # Android.mk file, we disable it here when not enabling hwui. vars.get_cflags().remove('-DUSE_OPENGL_RENDERER') if build_common.use_ndk_direct_execution(): vars.get_cflags().append('-DUSE_NDK_DIRECT_EXECUTION') deps = vars.get_shared_deps() excluded_libs = ( 'libprocessgroup', # Not built 'libhardware_legacy', # Not built 'libnetutils', # Not built 'libselinux', # Not built 'libusbhost', # Not built (only for MTP) 'libwpa_client') # Not built deps[:] = [x for x in deps if x not in excluded_libs] return True path = 'android/frameworks/base/core/jni' make_to_ninja.MakefileNinjaTranslator(path).generate(_filter)
def generate_ninjas(): if open_source.is_open_source_repo(): # Provide a stub. n = ninja_generator.SharedObjectNinjaGenerator('libandroid_runtime') n.add_notice_sources([staging.as_staging('src/NOTICE')]) n.link() return def _filter(vars): if OPTIONS.is_nacl_build(): # The flag is valid only for C/ObjC but not for C++ on NaCl gcc. vars.remove_c_or_cxxflag('-Wno-int-to-pointer-cast') # TODO(crbug.com/327496): Move this to generic functionality of # make_to_ninja. intermediates_dir = ( make_to_ninja.MakefileNinjaTranslator.get_intermediate_headers_dir()) vars.get_includes().append(intermediates_dir) # TODO(crbug.com/414569): L-rebase: These include directories do not seem to # be being added. Fix that. vars.get_includes().append('android/frameworks/av/media/img_utils/include') vars.get_includes().append('android/external/skia/include/pathops') vars.get_includes().append('android/system/core/libprocessgroup/include') vars.get_implicit_deps().extend( [intermediates_dir + '/' + i for i in [ 'libsonivox/eas.h', 'libsonivox/eas_types.h', 'libsonivox/eas_reverb.h', 'libsonivox/jet.h']]) # Exclude ZygoteInit for which we have stubbed out jni registrations. vars.get_sources().remove('android/frameworks/base/core/jni/' 'com_android_internal_os_ZygoteInit.cpp') # Exclude GLES31+ functionality since we do not currently support it. vars.get_sources().remove('android/frameworks/base/core/jni/' 'android_opengl_GLES31.cpp') vars.get_sources().remove('android/frameworks/base/core/jni/' 'android_opengl_GLES31Ext.cpp') # Add ARC specific JNIs. vars.get_sources().extend([ 'android/frameworks/base/core/jni/org_chromium_arc_internal_Tracing.cpp', # NOQA 'android/frameworks/base/core/jni/org_chromium_arc_ArcProxySelector.cpp', # NOQA ]) if OPTIONS.disable_hwui(): # Defining this enables support for hardware accelerated rendering in the # user interface code, such as when an application uses a TextureView. The # primary effect is to use libhwui. It is enabled by default in the # Android.mk file, we disable it here when not enabling hwui. vars.get_cflags().remove('-DUSE_OPENGL_RENDERER') if build_common.use_ndk_direct_execution(): vars.get_cflags().append('-DUSE_NDK_DIRECT_EXECUTION') deps = vars.get_shared_deps() excluded_libs = ( 'libprocessgroup', # Not built 'libhardware_legacy', # Not built 'libnetutils', # Not built 'libselinux', # Not built 'libusbhost', # Not built (only for MTP) 'libwpa_client') # Not built deps[:] = [x for x in deps if x not in excluded_libs] return True path = 'android/frameworks/base/core/jni' make_to_ninja.MakefileNinjaTranslator(path).generate(_filter)
def test_open_source_repo(self): # We do not run tests in the open source repo. self.assertFalse(open_source.is_open_source_repo())
def generate_ninjas(): if open_source.is_open_source_repo(): # Provide a stub. n = ninja_generator.SharedObjectNinjaGenerator('libandroid') n.add_notice_sources([staging.as_staging('src/NOTICE')]) n.link()
def generate_test_ninjas(): n = ninja_generator.PpapiTestNinjaGenerator( 'posix_translation_test', base_path='src/posix_translation', force_compiler='clang', enable_cxx11=True) # Build a rootfs image for tests. rule_name = 'gen_test_fs_image' script_path = 'src/posix_translation/scripts/create_test_fs_image.py' gen_prod_image = ( build_common.get_posix_translation_readonly_fs_image_file_path()) # This is a little odd, but we use the documented path to the production image # to also store a test image in the same location for simplicity. out_path = os.path.dirname(gen_prod_image) gen_test_image = os.path.join(out_path, 'test_readonly_fs_image.img') n.rule(rule_name, command=script_path + ' $out_path', description=rule_name + ' $in_real_path') n.add_ppapi_compile_flags() n.build([gen_test_image], rule_name, variables={'out_path': out_path}, # The script calls create_readonly_fs_image.py. implicit=[script_path, _CREATE_READONLY_FS_IMAGE_SCRIPT, ]) all_files = n.find_all_contained_test_sources() n.build_default(all_files, base_path=None) n.add_compiler_flags('-Werror') n.add_library_deps('libposix_translation_static.a', 'libchromium_base.a', 'libcommon.a', 'libgccdemangle_static.a') if build_common.use_ndk_direct_execution(): n.add_defines('USE_NDK_DIRECT_EXECUTION') implicit = [gen_test_image] if open_source.is_open_source_repo(): implicit.append(gen_prod_image) n.add_defines('PROD_READONLY_FS_IMAGE="%s"' % gen_prod_image) else: # Add runtime to implicit dependencies because ReadonlyFsReaderTest uses # the readonly FS image in runtime. implicit.append(build_common.get_runtime_build_stamp()) n.add_defines('PROD_READONLY_FS_IMAGE="%s"' % os.path.join( build_common.get_runtime_platform_specific_path( build_common.get_runtime_out_dir(), OPTIONS.target()), os.path.basename(gen_prod_image))) n.run(n.link(), implicit=implicit) # To be able to refer mock implementation from outside of posix_translation. # Setting instance count is zero because usage count verifier doesn't check # the reference from test executable. See verify_usage_counts in # ninja_generator.py n = ninja_generator.ArchiveNinjaGenerator( 'mock_posix_translation', instances=0, force_compiler='clang', enable_cxx11=True) n.add_libchromium_base_compile_flags() n.add_compiler_flags('-Werror') all_files = ['src/posix_translation/test_util/mock_virtual_file_system.cc'] n.build_default(all_files).archive()