def main(): build_utils.InitLogging('PROGUARD_DEBUG') options = _ParseOptions() logging.debug('Preparing configs') proguard_configs = options.proguard_configs # ProGuard configs that are derived from flags. dynamic_config_data = _CreateDynamicConfig(options) # ProGuard configs that are derived from flags. merged_configs = _CombineConfigs(proguard_configs, dynamic_config_data, exclude_generated=True) print_stdout = _ContainsDebuggingConfig(merged_configs) or options.verbose if options.expected_file: diff_utils.CheckExpectations(merged_configs, options) if options.only_verify_expectations: build_utils.WriteDepfile(options.depfile, options.actual_file, inputs=options.proguard_configs) return logging.debug('Looking for embedded configs') libraries = [] for p in options.classpath: # TODO(bjoyce): Remove filter once old android support libraries are gone. # Fix for having Library class extend program class dependency problem. if 'com_android_support' in p or 'android_support_test' in p: continue # If a jar is part of input no need to include it as library jar. if p not in libraries and p not in options.input_paths: libraries.append(p) _VerifyNoEmbeddedConfigs(options.input_paths + libraries) _OptimizeWithR8(options, proguard_configs, libraries, dynamic_config_data, print_stdout) if not options.disable_checks: logging.debug('Running tracereferences') all_dex_files = [] if options.output_path: all_dex_files.append(options.output_path) if options.dex_dests: all_dex_files.extend(options.dex_dests) _CheckForMissingSymbols(options.r8_path, all_dex_files, options.classpath, options.warnings_as_errors) for output in options.extra_mapping_output_paths: shutil.copy(options.mapping_output, output) inputs = options.proguard_configs + options.input_paths + libraries if options.apply_mapping: inputs.append(options.apply_mapping) _MaybeWriteStampAndDepFile(options, inputs)
def main(): build_utils.InitLogging('LINT_DEBUG') args = _ParseArgs(sys.argv[1:]) # TODO(wnwen): Consider removing lint cache now that there are only two lint # invocations. # Avoid parallelizing cache creation since lint runs without the cache defeat # the purpose of creating the cache in the first place. if (not args.create_cache and not args.skip_build_server and server_utils.MaybeRunCommand(name=args.target_name, argv=sys.argv, stamp_file=args.stamp, force=args.use_build_server)): return sources = [] for java_sources_file in args.java_sources: sources.extend(build_utils.ReadSourcesList(java_sources_file)) resource_sources = [] for resource_sources_file in args.resource_sources: resource_sources.extend(build_utils.ReadSourcesList(resource_sources_file)) possible_depfile_deps = (args.srcjars + args.resource_zips + sources + resource_sources + [ args.baseline, args.manifest_path, ]) depfile_deps = [p for p in possible_depfile_deps if p] _RunLint(args.create_cache, args.lint_binary_path, args.backported_methods, args.config_path, args.manifest_path, args.extra_manifest_paths, sources, args.classpath, args.cache_dir, args.android_sdk_version, args.aars, args.srcjars, args.min_sdk_version, resource_sources, args.resource_zips, args.android_sdk_root, args.lint_gen_dir, args.baseline, testonly_target=args.testonly, warnings_as_errors=args.warnings_as_errors) logging.info('Creating stamp file') build_utils.Touch(args.stamp) if args.depfile: build_utils.WriteDepfile(args.depfile, args.stamp, depfile_deps)
def main(): build_utils.InitLogging('LINT_DEBUG') args = _ParseArgs(sys.argv[1:]) sources = [] for java_sources_file in args.java_sources: sources.extend(build_utils.ReadSourcesList(java_sources_file)) resource_sources = [] for resource_sources_file in args.resource_sources: resource_sources.extend( build_utils.ReadSourcesList(resource_sources_file)) possible_depfile_deps = (args.srcjars + args.resource_zips + sources + resource_sources + [ args.manifest_path, ]) depfile_deps = [p for p in possible_depfile_deps if p] _RunLint(args.lint_path, args.config_path, args.manifest_path, args.result_path, args.product_dir, sources, args.cache_dir, args.android_sdk_version, args.srcjars, args.min_sdk_version, args.manifest_package, resource_sources, args.resource_zips, args.android_sdk_root, testonly_target=args.testonly, can_fail_build=args.can_fail_build, include_unexpected=args.include_unexpected_failures, silent=args.silent) logging.info('Creating stamp file') build_utils.Touch(args.stamp) if args.depfile: build_utils.WriteDepfile(args.depfile, args.stamp, depfile_deps, add_pydeps=False) # pydeps listed in GN.
def main(args): build_utils.InitLogging('DEX_DEBUG') options = _ParseArgs(args) options.class_inputs += options.class_inputs_filearg options.dex_inputs += options.dex_inputs_filearg input_paths = options.class_inputs + options.dex_inputs if options.multi_dex and options.main_dex_list_path: input_paths.append(options.main_dex_list_path) input_paths.append(options.r8_jar_path) output_paths = [options.output] if options.incremental_dir: final_dex_inputs = _IntermediateDexFilePathsFromInputJars( options.class_inputs, options.incremental_dir) output_paths += final_dex_inputs track_subpaths_allowlist = options.class_inputs else: final_dex_inputs = list(options.class_inputs) track_subpaths_allowlist = None final_dex_inputs += options.dex_inputs dex_cmd = [ build_utils.JAVA_PATH, '-jar', options.r8_jar_path, 'd8', '--no-desugaring' ] if options.release: dex_cmd += ['--release'] if options.min_api: dex_cmd += ['--min-api', options.min_api] if options.force_enable_assertions: dex_cmd += ['--force-enable-assertions'] md5_check.CallAndWriteDepfileIfStale( lambda changes: _OnStaleMd5(changes, options, final_dex_inputs, dex_cmd ), options, depfile_deps=options.class_inputs_filearg + options.dex_inputs_filearg, output_paths=output_paths, input_paths=input_paths, input_strings=dex_cmd + [bool(options.incremental_dir)], pass_changes=True, track_subpaths_allowlist=track_subpaths_allowlist)
def main(): build_utils.InitLogging('CREATE_UNWIND_TABLE_DEBUG') parser = argparse.ArgumentParser(description=__doc__) parser.add_argument('--input_path', help='Path to the unstripped binary.', required=True, metavar='FILE') parser.add_argument('--output_path', help='Path to unwind info binary output.', required=True, metavar='FILE') parser.add_argument('--dump_syms_path', required=True, help='The path of the dump_syms binary.', metavar='FILE') parser.add_argument('--readobj_path', required=True, help='The path of the llvm-readobj binary.', metavar='FILE') args = parser.parse_args() proc = subprocess.Popen( ['./' + args.dump_syms_path, args.input_path, '-v'], stdout=subprocess.PIPE, encoding='ascii') function_cfis = ReadFunctionCfi(proc.stdout) function_unwinds = GenerateUnwinds(function_cfis, parsers=ALL_PARSERS) encoded_function_unwinds = EncodeFunctionUnwinds( function_unwinds, ReadTextSectionStartAddress(args.readobj_path, args.input_path)) (page_table, function_table, function_offset_table, unwind_instruction_table) = GenerateUnwindTables(encoded_function_unwinds) unwind_info: bytes = EncodeUnwindInfo(page_table, function_table, function_offset_table, unwind_instruction_table) if proc.wait(): logging.critical('dump_syms exited with return code %d', proc.returncode) sys.exit(proc.returncode) with open(args.output_path, 'wb') as f: f.write(unwind_info) return 0
def main(): build_utils.InitLogging('LINT_DEBUG') args = _ParseArgs(sys.argv[1:]) sources = [] for java_sources_file in args.java_sources: sources.extend(build_utils.ReadSourcesList(java_sources_file)) resource_sources = [] for resource_sources_file in args.resource_sources: resource_sources.extend(build_utils.ReadSourcesList(resource_sources_file)) possible_depfile_deps = (args.srcjars + args.resource_zips + sources + resource_sources + [ args.baseline, args.manifest_path, ]) depfile_deps = [p for p in possible_depfile_deps if p] _RunLint(args.lint_binary_path, args.backported_methods, args.config_path, args.manifest_path, args.extra_manifest_paths, sources, args.classpath, args.cache_dir, args.android_sdk_version, args.aars, args.srcjars, args.min_sdk_version, resource_sources, args.resource_zips, args.android_sdk_root, args.lint_gen_dir, args.baseline, args.expected_warnings, testonly_target=args.testonly, warnings_as_errors=args.warnings_as_errors) logging.info('Creating stamp file') build_utils.Touch(args.stamp) if args.depfile: build_utils.WriteDepfile(args.depfile, args.stamp, depfile_deps)
def main(): build_utils.InitLogging('RESOURCE_SIZES_DEBUG') argparser = argparse.ArgumentParser(description='Print APK size metrics.') argparser.add_argument( '--min-pak-resource-size', type=int, default=20 * 1024, help='Minimum byte size of displayed pak resources.') argparser.add_argument('--chromium-output-directory', dest='out_dir', type=os.path.realpath, help='Location of the build artifacts.') argparser.add_argument('--chartjson', action='store_true', help='DEPRECATED. Use --output-format=chartjson ' 'instead.') argparser.add_argument('--output-format', choices=['chartjson', 'histograms'], help='Output the results to a file in the given ' 'format instead of printing the results.') argparser.add_argument('--loadable_module', help='Obsolete (ignored).') # Accepted to conform to the isolated script interface, but ignored. argparser.add_argument('--isolated-script-test-filter', help=argparse.SUPPRESS) argparser.add_argument('--isolated-script-test-perf-output', type=os.path.realpath, help=argparse.SUPPRESS) output_group = argparser.add_mutually_exclusive_group() output_group.add_argument('--output-dir', default='.', help='Directory to save chartjson to.') output_group.add_argument( '--output-file', help='Path to output .json (replaces --output-dir). Works only for ' '--output-format=chartjson') output_group.add_argument( '--isolated-script-test-output', type=os.path.realpath, help='File to which results will be written in the ' 'simplified JSON output format.') argparser.add_argument('input', help='Path to .apk or .apks file to measure.') trichrome_group = argparser.add_argument_group( 'Trichrome inputs', description='When specified, |input| is used only as Test suite name.') trichrome_group.add_argument('--trichrome-chrome', help='Path to Trichrome Chrome .apks') trichrome_group.add_argument('--trichrome-webview', help='Path to Trichrome WebView .apk(s)') trichrome_group.add_argument('--trichrome-library', help='Path to Trichrome Library .apk') args = argparser.parse_args() devil_chromium.Initialize(output_directory=args.out_dir) # TODO(bsheedy): Remove this once uses of --chartjson have been removed. if args.chartjson: args.output_format = 'chartjson' isolated_script_output = {'valid': False, 'failures': []} test_name = 'resource_sizes (%s)' % os.path.basename(args.input) if args.isolated_script_test_output: args.output_dir = os.path.join( os.path.dirname(args.isolated_script_test_output), test_name) if not os.path.exists(args.output_dir): os.makedirs(args.output_dir) try: _ResourceSizes(args) isolated_script_output = { 'valid': True, 'failures': [], } finally: if args.isolated_script_test_output: results_path = os.path.join(args.output_dir, 'test_results.json') with open(results_path, 'w') as output_file: json.dump(isolated_script_output, output_file) with open(args.isolated_script_test_output, 'w') as output_file: json.dump(isolated_script_output, output_file)
def main(args): build_utils.InitLogging('RESOURCE_DEBUG') args = build_utils.ExpandFileArgs(args) options = _ParseArgs(args) if options.expected_file: actual_data = _CreateNormalizedManifestForVerification(options) diff_utils.CheckExpectations(actual_data, options) if options.only_verify_expectations: return path = options.arsc_path or options.proto_path debug_temp_resources_dir = os.environ.get('TEMP_RESOURCES_DIR') if debug_temp_resources_dir: path = os.path.join(debug_temp_resources_dir, os.path.basename(path)) else: # Use a deterministic temp directory since .pb files embed the absolute # path of resources: crbug.com/939984 path = path + '.tmpdir' build_utils.DeleteDirectory(path) with resource_utils.BuildContext( temp_dir=path, keep_files=bool(debug_temp_resources_dir)) as build: manifest_package_name = _PackageApk(options, build) # If --shared-resources-allowlist is used, all the resources listed in the # corresponding R.txt file will be non-final, and an onResourcesLoaded() # will be generated to adjust them at runtime. # # Otherwise, if --shared-resources is used, the all resources will be # non-final, and an onResourcesLoaded() method will be generated too. # # Otherwise, all resources will be final, and no method will be generated. # rjava_build_options = resource_utils.RJavaBuildOptions() if options.shared_resources_allowlist: rjava_build_options.ExportSomeResources( options.shared_resources_allowlist) rjava_build_options.GenerateOnResourcesLoaded() if options.shared_resources: # The final resources will only be used in WebLayer, so hardcode the # package ID to be what WebLayer expects. rjava_build_options.SetFinalPackageId( protoresources.SHARED_LIBRARY_HARDCODED_ID) elif options.shared_resources or options.app_as_shared_lib: rjava_build_options.ExportAllResources() rjava_build_options.GenerateOnResourcesLoaded() custom_root_package_name = options.r_java_root_package_name grandparent_custom_package_name = None # Always generate an R.java file for the package listed in # AndroidManifest.xml because this is where Android framework looks to find # onResourcesLoaded() for shared library apks. While not actually necessary # for application apks, it also doesn't hurt. apk_package_name = manifest_package_name if options.package_name and not options.arsc_package_name: # Feature modules have their own custom root package name and should # inherit from the appropriate base module package. This behaviour should # not be present for test apks with an apk under test. Thus, # arsc_package_name is used as it is only defined for test apks with an # apk under test. custom_root_package_name = options.package_name grandparent_custom_package_name = options.r_java_root_package_name # Feature modules have the same manifest package as the base module but # they should not create an R.java for said manifest package because it # will be created in the base module. apk_package_name = None logging.debug('Creating R.srcjar') resource_utils.CreateRJavaFiles( build.srcjar_dir, apk_package_name, build.r_txt_path, options.extra_res_packages, rjava_build_options, options.srcjar_out, custom_root_package_name, grandparent_custom_package_name, options.extra_main_r_text_files) build_utils.ZipDir(build.srcjar_path, build.srcjar_dir) # Sanity check that the created resources have the expected package ID. logging.debug('Performing sanity check') if options.package_id: expected_id = options.package_id elif options.shared_resources: expected_id = 0 else: expected_id = 127 # == '0x7f'. _, package_id = resource_utils.ExtractArscPackage( options.aapt2_path, build.arsc_path if options.arsc_path else build.proto_path) if package_id != expected_id: raise Exception('Invalid package ID 0x%x (expected 0x%x)' % (package_id, expected_id)) logging.debug('Copying outputs') _WriteOutputs(options, build) if options.depfile: depfile_deps = (options.dependencies_res_zips + options.dependencies_res_zip_overlays + options.extra_main_r_text_files + options.include_resources) build_utils.WriteDepfile(options.depfile, options.srcjar_out, depfile_deps)
def main(args): build_utils.InitLogging('RESOURCE_DEBUG') args = build_utils.ExpandFileArgs(args) options = _ParseArgs(args) path = options.arsc_path or options.proto_path debug_temp_resources_dir = os.environ.get('TEMP_RESOURCES_DIR') if debug_temp_resources_dir: path = os.path.join(debug_temp_resources_dir, os.path.basename(path)) else: # Use a deterministic temp directory since .pb files embed the absolute # path of resources: crbug.com/939984 path = path + '.tmpdir' build_utils.DeleteDirectory(path) build_utils.MakeDirectory(path) with resource_utils.BuildContext( temp_dir=path, keep_files=bool(debug_temp_resources_dir)) as build: manifest_package_name = _PackageApk(options, build) # If --shared-resources-whitelist is used, the all resources listed in # the corresponding R.txt file will be non-final, and an onResourcesLoaded() # will be generated to adjust them at runtime. # # Otherwise, if --shared-resources is used, the all resources will be # non-final, and an onResourcesLoaded() method will be generated too. # # Otherwise, all resources will be final, and no method will be generated. # rjava_build_options = resource_utils.RJavaBuildOptions() if options.shared_resources_whitelist: rjava_build_options.ExportSomeResources( options.shared_resources_whitelist) rjava_build_options.GenerateOnResourcesLoaded() elif options.shared_resources or options.app_as_shared_lib: rjava_build_options.ExportAllResources() rjava_build_options.GenerateOnResourcesLoaded() custom_root_package_name = options.r_java_root_package_name grandparent_custom_package_name = None if options.package_name and not options.arsc_package_name: # Feature modules have their own custom root package name and should # inherit from the appropriate base module package. This behaviour should # not be present for test apks with an apk under test. Thus, # arsc_package_name is used as it is only defined for test apks with an # apk under test. custom_root_package_name = options.package_name grandparent_custom_package_name = options.r_java_root_package_name if options.shared_resources or options.app_as_shared_lib: package_for_library = manifest_package_name else: package_for_library = None logging.debug('Creating R.srcjar') resource_utils.CreateRJavaFiles( build.srcjar_dir, package_for_library, build.r_txt_path, options.extra_res_packages, options.extra_r_text_files, rjava_build_options, options.srcjar_out, custom_root_package_name, grandparent_custom_package_name, options.extra_main_r_text_files) build_utils.ZipDir(build.srcjar_path, build.srcjar_dir) # Sanity check that the created resources have the expected package ID. logging.debug('Performing sanity check') if options.package_id: expected_id = options.package_id elif options.shared_resources: expected_id = 0 else: expected_id = 127 # == '0x7f'. _, package_id = resource_utils.ExtractArscPackage( options.aapt2_path, build.arsc_path if options.arsc_path else build.proto_path) if package_id != expected_id: raise Exception('Invalid package ID 0x%x (expected 0x%x)' % (package_id, expected_id)) logging.debug('Copying outputs') _WriteOutputs(options, build) if options.depfile: build_utils.WriteDepfile(options.depfile, options.srcjar_out, inputs=options.dependencies_res_zips + options.extra_r_text_files, add_pydeps=False)
def main(argv): build_utils.InitLogging('JAVAC_DEBUG') colorama.init() argv = build_utils.ExpandFileArgs(argv) options, java_files = _ParseOptions(argv) javac_cmd = [] if options.gomacc_path: javac_cmd.append(options.gomacc_path) javac_cmd.append(build_utils.JAVAC_PATH) javac_args = [ '-g', # Chromium only allows UTF8 source files. Being explicit avoids # javac pulling a default encoding from the user's environment. '-encoding', 'UTF-8', # Prevent compiler from compiling .java files not listed as inputs. # See: http://blog.ltgt.net/most-build-tools-misuse-javac/ '-sourcepath', ':', ] if options.enable_errorprone: # All errorprone args are passed space-separated in a single arg. errorprone_flags = ['-Xplugin:ErrorProne'] for warning in ERRORPRONE_WARNINGS_TO_TURN_OFF: errorprone_flags.append('-Xep:{}:OFF'.format(warning)) for warning in ERRORPRONE_WARNINGS_TO_ERROR: errorprone_flags.append('-Xep:{}:ERROR'.format(warning)) if not options.warnings_as_errors: errorprone_flags.append('-XepAllErrorsAsWarnings') javac_args += ['-XDcompilePolicy=simple', ' '.join(errorprone_flags)] # This flag quits errorprone after checks and before code generation, since # we do not need errorprone outputs, this speeds up errorprone by 4 seconds # for chrome_java. javac_args += ['-XDshould-stop.ifNoError=FLOW'] if options.java_version: javac_args.extend([ '-source', options.java_version, '-target', options.java_version, ]) if options.java_version == '1.8': # Android's boot jar doesn't contain all java 8 classes. options.bootclasspath.append(build_utils.RT_JAR_PATH) if options.warnings_as_errors: javac_args.extend(['-Werror']) else: # XDignore.symbol.file makes javac compile against rt.jar instead of # ct.sym. This means that using a java internal package/class will not # trigger a compile warning or error. javac_args.extend(['-XDignore.symbol.file']) if options.processors: javac_args.extend(['-processor', ','.join(options.processors)]) else: # This effectively disables all annotation processors, even including # annotation processors in service provider configuration files named # META-INF/. See the following link for reference: # https://docs.oracle.com/en/java/javase/11/tools/javac.html javac_args.extend(['-proc:none']) if options.bootclasspath: javac_args.extend(['-bootclasspath', ':'.join(options.bootclasspath)]) if options.processorpath: javac_args.extend(['-processorpath', ':'.join(options.processorpath)]) if options.processor_args: for arg in options.processor_args: javac_args.extend(['-A%s' % arg]) javac_args.extend(options.javac_arg) classpath_inputs = ( options.bootclasspath + options.classpath + options.processorpath) depfile_deps = classpath_inputs # Files that are already inputs in GN should go in input_paths. input_paths = depfile_deps + options.java_srcjars + java_files if options.header_jar: input_paths.append(options.header_jar) input_paths += [x[0] for x in options.additional_jar_files] output_paths = [ options.jar_path, options.jar_path + '.info', ] input_strings = javac_cmd + javac_args + options.classpath + java_files if options.jar_info_exclude_globs: input_strings.append(options.jar_info_exclude_globs) md5_check.CallAndWriteDepfileIfStale( lambda: _OnStaleMd5(options, javac_cmd, javac_args, java_files), options, depfile_deps=depfile_deps, input_paths=input_paths, input_strings=input_strings, output_paths=output_paths)
def main(args): build_utils.InitLogging('DEX_DEBUG') options = _ParseArgs(args) options.class_inputs += options.class_inputs_filearg options.dex_inputs += options.dex_inputs_filearg input_paths = options.class_inputs + options.dex_inputs if options.multi_dex and options.main_dex_list_path: input_paths.append(options.main_dex_list_path) input_paths.append(options.r8_jar_path) input_paths.append(options.custom_d8_jar_path) depfile_deps = options.class_inputs_filearg + options.dex_inputs_filearg output_paths = [options.output] track_subpaths_allowlist = [] if options.incremental_dir: final_dex_inputs = _IntermediateDexFilePathsFromInputJars( options.class_inputs, options.incremental_dir) output_paths += final_dex_inputs track_subpaths_allowlist += options.class_inputs else: final_dex_inputs = list(options.class_inputs) final_dex_inputs += options.dex_inputs dex_cmd = build_utils.JavaCmd(options.warnings_as_errors) + [ '-cp', '{}:{}'.format(options.r8_jar_path, options.custom_d8_jar_path), 'org.chromium.build.CustomD8', ] if options.release: dex_cmd += ['--release'] if options.min_api: dex_cmd += ['--min-api', options.min_api] if not options.desugar: dex_cmd += ['--no-desugaring'] elif options.classpath: # The classpath is used by D8 to for interface desugaring. if options.desugar_dependencies: dex_cmd += ['--desugar-dependencies', options.desugar_dependencies] if track_subpaths_allowlist: track_subpaths_allowlist += options.classpath depfile_deps += options.classpath input_paths += options.classpath dex_cmd += ['--lib', build_utils.JAVA_HOME] for path in options.bootclasspath: dex_cmd += ['--lib', path] # Still pass the entire classpath in case a new dependency is needed by # desugar, so that desugar_dependencies will be updated for the next build. for path in options.classpath: dex_cmd += ['--classpath', path] depfile_deps += options.bootclasspath input_paths += options.bootclasspath if options.desugar_jdk_libs_json: dex_cmd += ['--desugared-lib', options.desugar_jdk_libs_json] if options.force_enable_assertions: dex_cmd += ['--force-enable-assertions'] # The changes feature from md5_check allows us to only re-dex the class files # that have changed and the class files that need to be re-desugared by D8. md5_check.CallAndWriteDepfileIfStale( lambda changes: _OnStaleMd5(changes, options, final_dex_inputs, dex_cmd ), options, input_paths=input_paths, input_strings=dex_cmd + [bool(options.incremental_dir)], output_paths=output_paths, pass_changes=True, track_subpaths_allowlist=track_subpaths_allowlist, depfile_deps=depfile_deps)
def main(args): build_utils.InitLogging('RESOURCE_DEBUG') args = build_utils.ExpandFileArgs(args) options = _ParseArgs(args) if options.expected_file: actual_data = _CreateNormalizedManifest(options) diff_utils.CheckExpectations(actual_data, options) if options.only_verify_expectations: return depfile_deps = (options.dependencies_res_zips + options.dependencies_res_zip_overlays + options.extra_main_r_text_files + options.include_resources) possible_input_paths = depfile_deps + options.resources_config_paths + [ options.aapt2_path, options.android_manifest, options.expected_file, options.expected_file_base, options.shared_resources_allowlist, options.use_resource_ids_path, options.webp_binary, ] input_paths = [p for p in possible_input_paths if p] input_strings = [ options.app_as_shared_lib, options.arsc_package_name, options.debuggable, options.extra_res_packages, options.failure_file, options.include_resources, options.locale_allowlist, options.manifest_package, options.max_sdk_version, options.min_sdk_version, options.no_xml_namespaces, options.package_id, options.package_name, options.png_to_webp, options.rename_manifest_package, options.resource_exclusion_exceptions, options.resource_exclusion_regex, options.r_java_root_package_name, options.shared_resources, options.shared_resources_allowlist_locales, options.short_resource_paths, options.strip_resource_names, options.support_zh_hk, options.target_sdk_version, options.values_filter_rules, options.version_code, options.version_name, options.webp_cache_dir, ] output_paths = [options.srcjar_out] possible_output_paths = [ options.actual_file, options.arsc_path, options.emit_ids_out, options.info_path, options.optimized_arsc_path, options.optimized_proto_path, options.proguard_file, options.proguard_file_main_dex, options.proto_path, options.resources_path_map_out_path, options.r_text_out, ] output_paths += [p for p in possible_output_paths if p] # Since we overspecify deps, this target depends on java deps that are not # going to change its output. This target is also slow (6-12 seconds) and # blocking the critical path. We want changes to java_library targets to not # trigger re-compilation of resources, thus we need to use md5_check. md5_check.CallAndWriteDepfileIfStale(lambda: _OnStaleMd5(options), options, input_paths=input_paths, input_strings=input_strings, output_paths=output_paths, depfile_deps=depfile_deps)
def main(argv): build_utils.InitLogging('TURBINE_DEBUG') argv = build_utils.ExpandFileArgs(argv[1:]) parser = argparse.ArgumentParser() build_utils.AddDepfileOption(parser) parser.add_argument('--turbine-jar-path', required=True, help='Path to the turbine jar file.') parser.add_argument('--java-srcjars', action='append', default=[], help='List of srcjars to include in compilation.') parser.add_argument( '--bootclasspath', action='append', default=[], help='Boot classpath for javac. If this is specified multiple times, ' 'they will all be appended to construct the classpath.') parser.add_argument( '--java-version', help= 'Java language version to use in -source and -target args to javac.') parser.add_argument('--classpath', action='append', help='Classpath to use.') parser.add_argument('--processors', action='append', help='GN list of annotation processor main classes.') parser.add_argument( '--processorpath', action='append', help='GN list of jars that comprise the classpath used for Annotation ' 'Processors.') parser.add_argument( '--processor-args', action='append', help='key=value arguments for the annotation processors.') parser.add_argument('--jar-path', help='Jar output path.', required=True) parser.add_argument('--generated-jar-path', required=True, help='Output path for generated source files.') parser.add_argument('--warnings-as-errors', action='store_true', help='Treat all warnings as errors.') options, unknown_args = parser.parse_known_args(argv) options.bootclasspath = build_utils.ParseGnList(options.bootclasspath) options.classpath = build_utils.ParseGnList(options.classpath) options.processorpath = build_utils.ParseGnList(options.processorpath) options.processors = build_utils.ParseGnList(options.processors) options.java_srcjars = build_utils.ParseGnList(options.java_srcjars) files = [] for arg in unknown_args: # Interpret a path prefixed with @ as a file containing a list of sources. if arg.startswith('@'): files.extend(build_utils.ReadSourcesList(arg[1:])) cmd = build_utils.JavaCmd(options.warnings_as_errors) + [ '-classpath', options.turbine_jar_path, 'com.google.turbine.main.Main' ] javac_cmd = [] # Turbine reads lists from command line args by consuming args until one # starts with double dash (--). Thus command line args should be grouped # together and passed in together. if options.processors: cmd += ['--processors'] cmd += options.processors if options.java_version: javac_cmd.extend([ '-source', options.java_version, '-target', options.java_version, ]) if options.java_version == '1.8': # Android's boot jar doesn't contain all java 8 classes. options.bootclasspath.append(build_utils.RT_JAR_PATH) if options.bootclasspath: cmd += ['--bootclasspath'] for bootclasspath in options.bootclasspath: cmd += bootclasspath.split(':') if options.processorpath: cmd += ['--processorpath'] cmd += options.processorpath if options.processor_args: for arg in options.processor_args: javac_cmd.extend(['-A%s' % arg]) if options.classpath: cmd += ['--classpath'] cmd += options.classpath if options.java_srcjars: cmd += ['--source_jars'] cmd += options.java_srcjars if files: # Use jar_path to ensure paths are relative (needed for goma). files_rsp_path = options.jar_path + '.files_list.txt' with open(files_rsp_path, 'w') as f: f.write(' '.join(files)) # Pass source paths as response files to avoid extremely long command lines # that are tedius to debug. cmd += ['--sources'] cmd += ['@' + files_rsp_path] if javac_cmd: cmd.append('--javacopts') cmd += javac_cmd cmd.append('--') # Terminate javacopts # Use AtomicOutput so that output timestamps are not updated when outputs # are not changed. with build_utils.AtomicOutput(options.jar_path) as output_jar, \ build_utils.AtomicOutput(options.generated_jar_path) as generated_jar: cmd += [ '--output', output_jar.name, '--gensrc_output', generated_jar.name ] logging.debug('Command: %s', cmd) start = time.time() build_utils.CheckOutput(cmd, print_stdout=True, fail_on_output=options.warnings_as_errors) end = time.time() - start logging.info('Header compilation took %ss', end) if options.depfile: # GN already knows of the java files, so avoid listing individual java files # in the depfile. depfile_deps = (options.bootclasspath + options.classpath + options.processorpath + options.java_srcjars) build_utils.WriteDepfile(options.depfile, options.jar_path, depfile_deps)
def main(): build_utils.InitLogging('PROGUARD_DEBUG') options = _ParseOptions() logging.debug('Preparing configs') proguard_configs = options.proguard_configs # ProGuard configs that are derived from flags. dynamic_config_data = _CreateDynamicConfig(options) # ProGuard configs that are derived from flags. merged_configs = _CombineConfigs( proguard_configs, dynamic_config_data, exclude_generated=True) print_stdout = _ContainsDebuggingConfig(merged_configs) or options.verbose if options.expected_file: diff_utils.CheckExpectations(merged_configs, options) if options.only_verify_expectations: build_utils.WriteDepfile(options.depfile, options.actual_file, inputs=options.proguard_configs) return logging.debug('Looking for embedded configs') libraries = [] for p in options.classpath: # TODO(bjoyce): Remove filter once old android support libraries are gone. # Fix for having Library class extend program class dependency problem. if 'com_android_support' in p or 'android_support_test' in p: continue # If a jar is part of input no need to include it as library jar. if p not in libraries and p not in options.input_paths: libraries.append(p) _VerifyNoEmbeddedConfigs(options.input_paths + libraries) if options.keep_rules_output_path: _OutputKeepRules(options.r8_path, options.input_paths, options.classpath, options.keep_rules_targets_regex, options.keep_rules_output_path) return base_context = _OptimizeWithR8(options, proguard_configs, libraries, dynamic_config_data, print_stdout) if not options.disable_checks: logging.debug('Running tracereferences') all_dex_files = [] if options.output_path: all_dex_files.append(options.output_path) if options.dex_dests: all_dex_files.extend(options.dex_dests) error_title = 'DEX contains references to non-existent symbols after R8.' _CheckForMissingSymbols(options.r8_path, all_dex_files, options.classpath, options.warnings_as_errors, error_title) # Also ensure that base module doesn't have any references to child dex # symbols. # TODO(agrieve): Remove this check once r8 desugaring is fixed to not put # synthesized classes in the base module. error_title = 'Base module DEX contains references symbols within DFMs.' _CheckForMissingSymbols(options.r8_path, [base_context.final_output_path], options.classpath, options.warnings_as_errors, error_title) for output in options.extra_mapping_output_paths: shutil.copy(options.mapping_output, output) inputs = options.proguard_configs + options.input_paths + libraries if options.apply_mapping: inputs.append(options.apply_mapping) _MaybeWriteStampAndDepFile(options, inputs)
def main(args): build_utils.InitLogging('DEX_DEBUG') options = _ParseArgs(args) options.class_inputs += options.class_inputs_filearg options.dex_inputs += options.dex_inputs_filearg input_paths = options.class_inputs + options.dex_inputs if options.multi_dex and options.main_dex_list_path: input_paths.append(options.main_dex_list_path) input_paths.append(options.r8_jar_path) input_paths.append(options.custom_d8_jar_path) depfile_deps = options.class_inputs_filearg + options.dex_inputs_filearg output_paths = [options.output] if options.incremental_dir: final_dex_inputs = _IntermediateDexFilePathsFromInputJars( options.class_inputs, options.incremental_dir) output_paths += final_dex_inputs track_subpaths_allowlist = options.class_inputs else: final_dex_inputs = list(options.class_inputs) track_subpaths_allowlist = None final_dex_inputs += options.dex_inputs dex_cmd = build_utils.JavaCmd(options.warnings_as_errors) + [ '-cp', '{}:{}'.format(options.r8_jar_path, options.custom_d8_jar_path), 'org.chromium.build.CustomD8', ] if options.release: dex_cmd += ['--release'] if options.min_api: dex_cmd += ['--min-api', options.min_api] if not options.desugar: dex_cmd += ['--no-desugaring'] elif options.classpath: # The classpath is used by D8 to for interface desugaring. classpath_paths = options.classpath if options.desugar_dependencies: dex_cmd += ['--desugar-dependencies', options.desugar_dependencies] if os.path.exists(options.desugar_dependencies): with open(options.desugar_dependencies, 'r') as f: lines = [line.strip() for line in f.readlines()] # Use a set to deduplicate entries. desugar_dependencies = set(dep for dep in lines if dep) # Desugar dependencies are a subset of classpath. classpath_paths = list(desugar_dependencies) depfile_deps += classpath_paths input_paths += classpath_paths dex_cmd += ['--lib', build_utils.JAVA_HOME] for path in options.bootclasspath: dex_cmd += ['--lib', path] # Still pass the entire classpath in case a new dependency is needed by # desugar, so that desugar_dependencies will be updated for the next build. for path in options.classpath: dex_cmd += ['--classpath', path] depfile_deps += options.bootclasspath input_paths += options.bootclasspath if options.desugar_jdk_libs_json: dex_cmd += ['--desugared-lib', options.desugar_jdk_libs_json] if options.force_enable_assertions: dex_cmd += ['--force-enable-assertions'] md5_check.CallAndWriteDepfileIfStale( lambda changes: _OnStaleMd5(changes, options, final_dex_inputs, dex_cmd ), options, input_paths=input_paths, input_strings=dex_cmd + [bool(options.incremental_dir)], output_paths=output_paths, pass_changes=True, track_subpaths_allowlist=track_subpaths_allowlist, depfile_deps=depfile_deps)
def main(argv): build_utils.InitLogging('TURBINE_DEBUG') argv = build_utils.ExpandFileArgs(argv[1:]) parser = argparse.ArgumentParser() build_utils.AddDepfileOption(parser) parser.add_argument('--turbine-jar-path', required=True, help='Path to the turbine jar file.') parser.add_argument('--java-srcjars', action='append', default=[], help='List of srcjars to include in compilation.') parser.add_argument( '--bootclasspath', action='append', default=[], help='Boot classpath for javac. If this is specified multiple times, ' 'they will all be appended to construct the classpath.') parser.add_argument( '--java-version', help= 'Java language version to use in -source and -target args to javac.') parser.add_argument('--classpath', action='append', help='Classpath to use.') parser.add_argument('--processors', action='append', help='GN list of annotation processor main classes.') parser.add_argument( '--processorpath', action='append', help='GN list of jars that comprise the classpath used for Annotation ' 'Processors.') parser.add_argument( '--processor-args', action='append', help='key=value arguments for the annotation processors.') parser.add_argument('--jar-path', help='Jar output path.', required=True) parser.add_argument('--generated-jar-path', required=True, help='Output path for generated source files.') options, unknown_args = parser.parse_known_args(argv) options.bootclasspath = build_utils.ParseGnList(options.bootclasspath) options.classpath = build_utils.ParseGnList(options.classpath) options.processorpath = build_utils.ParseGnList(options.processorpath) options.processors = build_utils.ParseGnList(options.processors) options.java_srcjars = build_utils.ParseGnList(options.java_srcjars) files = [] for arg in unknown_args: # Interpret a path prefixed with @ as a file containing a list of sources. if arg.startswith('@'): files.extend(build_utils.ReadSourcesList(arg[1:])) cmd = [ build_utils.JAVA_PATH, '-classpath', options.turbine_jar_path, 'com.google.turbine.main.Main' ] javac_cmd = [] # Turbine reads lists from command line args by consuming args until one # starts with double dash (--). Thus command line args should be grouped # together and passed in together. if options.processors: cmd += ['--processors'] cmd += options.processors if options.java_version: javac_cmd.extend([ '-source', options.java_version, '-target', options.java_version, ]) if options.java_version == '1.8': # Android's boot jar doesn't contain all java 8 classes. options.bootclasspath.append(build_utils.RT_JAR_PATH) if options.bootclasspath: cmd += ['--bootclasspath'] for bootclasspath in options.bootclasspath: cmd += bootclasspath.split(':') if options.processorpath: cmd += ['--processorpath'] cmd += options.processorpath if options.processor_args: for arg in options.processor_args: javac_cmd.extend(['-A%s' % arg]) classpath_inputs = (options.bootclasspath + options.classpath + options.processorpath) # GN already knows of the java files, so avoid listing individual java files # in the depfile. depfile_deps = classpath_inputs + options.java_srcjars input_paths = depfile_deps + files output_paths = [ options.jar_path, options.generated_jar_path, ] input_strings = cmd + options.classpath + files md5_check.CallAndWriteDepfileIfStale( lambda: _OnStaleMd5(options, cmd, javac_cmd, files, options.classpath), options, depfile_deps=depfile_deps, input_paths=input_paths, input_strings=input_strings, output_paths=output_paths)
def main(argv): build_utils.InitLogging('JAVAC_DEBUG') argv = build_utils.ExpandFileArgs(argv) options, java_files = _ParseOptions(argv) # Only use the build server for errorprone runs. if (options.enable_errorprone and not options.skip_build_server and server_utils.MaybeRunCommand(name=options.target_name, argv=sys.argv, stamp_file=options.jar_path)): return javac_cmd = [] if options.gomacc_path: javac_cmd.append(options.gomacc_path) javac_cmd.append(build_utils.JAVAC_PATH) javac_args = [ '-g', # Chromium only allows UTF8 source files. Being explicit avoids # javac pulling a default encoding from the user's environment. '-encoding', 'UTF-8', # Prevent compiler from compiling .java files not listed as inputs. # See: http://blog.ltgt.net/most-build-tools-misuse-javac/ '-sourcepath', ':', ] if options.enable_errorprone: # All errorprone args are passed space-separated in a single arg. errorprone_flags = ['-Xplugin:ErrorProne'] # Make everything a warning so that when treat_warnings_as_errors is false, # they do not fail the build. errorprone_flags += ['-XepAllErrorsAsWarnings'] # Don't check generated files. errorprone_flags += ['-XepDisableWarningsInGeneratedCode'] errorprone_flags.extend('-Xep:{}:OFF'.format(x) for x in ERRORPRONE_WARNINGS_TO_DISABLE) errorprone_flags.extend('-Xep:{}:WARN'.format(x) for x in ERRORPRONE_WARNINGS_TO_ENABLE) if ERRORPRONE_CHECKS_TO_APPLY: errorprone_flags += [ '-XepPatchLocation:IN_PLACE', '-XepPatchChecks:,' + ','.join(ERRORPRONE_CHECKS_TO_APPLY) ] javac_args += ['-XDcompilePolicy=simple', ' '.join(errorprone_flags)] # This flag quits errorprone after checks and before code generation, since # we do not need errorprone outputs, this speeds up errorprone by 4 seconds # for chrome_java. if not ERRORPRONE_CHECKS_TO_APPLY: javac_args += ['-XDshould-stop.ifNoError=FLOW'] if options.java_version: javac_args.extend([ '-source', options.java_version, '-target', options.java_version, ]) if options.java_version == '1.8': # Android's boot jar doesn't contain all java 8 classes. options.bootclasspath.append(build_utils.RT_JAR_PATH) # This effectively disables all annotation processors, even including # annotation processors in service provider configuration files named # META-INF/. See the following link for reference: # https://docs.oracle.com/en/java/javase/11/tools/javac.html javac_args.extend(['-proc:none']) if options.bootclasspath: javac_args.extend(['-bootclasspath', ':'.join(options.bootclasspath)]) if options.processorpath: javac_args.extend(['-processorpath', ':'.join(options.processorpath)]) if options.processor_args: for arg in options.processor_args: javac_args.extend(['-A%s' % arg]) javac_args.extend(options.javac_arg) classpath_inputs = (options.bootclasspath + options.classpath + options.processorpath) depfile_deps = classpath_inputs # Files that are already inputs in GN should go in input_paths. input_paths = depfile_deps + options.java_srcjars + java_files if options.header_jar: input_paths.append(options.header_jar) input_paths += [x[0] for x in options.additional_jar_files] output_paths = [options.jar_path] if not options.enable_errorprone: output_paths += [options.jar_path + '.info'] input_strings = javac_cmd + javac_args + options.classpath + java_files + [ options.warnings_as_errors, options.jar_info_exclude_globs ] # Use md5_check for |pass_changes| feature. md5_check.CallAndWriteDepfileIfStale(lambda changes: _OnStaleMd5( changes, options, javac_cmd, javac_args, java_files), options, depfile_deps=depfile_deps, input_paths=input_paths, input_strings=input_strings, output_paths=output_paths, pass_changes=True)
def main(argv): build_utils.InitLogging('JAVAC_DEBUG') colorama.init() argv = build_utils.ExpandFileArgs(argv) options, java_files = _ParseOptions(argv) javac_path = build_utils.JAVAC_PATH javac_cmd = [ javac_path, '-g', # Chromium only allows UTF8 source files. Being explicit avoids # javac pulling a default encoding from the user's environment. '-encoding', 'UTF-8', # Prevent compiler from compiling .java files not listed as inputs. # See: http://blog.ltgt.net/most-build-tools-misuse-javac/ '-sourcepath', ':', ] if options.enable_errorprone: # All errorprone args are passed space-separated in a single arg. errorprone_flags = ['-Xplugin:ErrorProne'] for warning in ERRORPRONE_WARNINGS_TO_TURN_OFF: errorprone_flags.append('-Xep:{}:OFF'.format(warning)) for warning in ERRORPRONE_WARNINGS_TO_ERROR: errorprone_flags.append('-Xep:{}:ERROR'.format(warning)) if not options.warnings_as_errors: errorprone_flags.append('-XepAllErrorsAsWarnings') javac_cmd += ['-XDcompilePolicy=simple', ' '.join(errorprone_flags)] if options.java_version: javac_cmd.extend([ '-source', options.java_version, '-target', options.java_version, ]) if options.java_version == '1.8': # Android's boot jar doesn't contain all java 8 classes. options.bootclasspath.append(build_utils.RT_JAR_PATH) if options.warnings_as_errors: javac_cmd.extend(['-Werror']) else: # XDignore.symbol.file makes javac compile against rt.jar instead of # ct.sym. This means that using a java internal package/class will not # trigger a compile warning or error. javac_cmd.extend(['-XDignore.symbol.file']) if options.processors: javac_cmd.extend(['-processor', ','.join(options.processors)]) if options.bootclasspath: javac_cmd.extend(['-bootclasspath', ':'.join(options.bootclasspath)]) if options.processorpath: javac_cmd.extend(['-processorpath', ':'.join(options.processorpath)]) if options.processor_args: for arg in options.processor_args: javac_cmd.extend(['-A%s' % arg]) javac_cmd.extend(options.javac_arg) classpath_inputs = (options.bootclasspath + options.classpath + options.processorpath) # GN already knows of java_files, so listing them just make things worse when # they change. depfile_deps = classpath_inputs + options.java_srcjars input_paths = depfile_deps + java_files input_paths += [x[0] for x in options.additional_jar_files] output_paths = [ options.jar_path, options.jar_path + '.info', ] input_strings = javac_cmd + options.classpath + java_files if options.jar_info_exclude_globs: input_strings.append(options.jar_info_exclude_globs) md5_check.CallAndWriteDepfileIfStale( lambda: _OnStaleMd5(options, javac_cmd, java_files, options.classpath), options, depfile_deps=depfile_deps, input_paths=input_paths, input_strings=input_strings, output_paths=output_paths)
def main(args): build_utils.InitLogging('APKBUILDER_DEBUG') args = build_utils.ExpandFileArgs(args) options = _ParseArgs(args) # Until Python 3.7, there's no better way to set compression level. # The default is 6. if options.best_compression: # Compresses about twice as slow as the default. zlib.Z_DEFAULT_COMPRESSION = 9 else: # Compresses about twice as fast as the default. zlib.Z_DEFAULT_COMPRESSION = 1 # Manually align only when alignment is necessary. # Python's zip implementation duplicates file comments in the central # directory, whereas zipalign does not, so use zipalign for official builds. fast_align = options.format == 'apk' and not options.best_compression native_libs = sorted(options.native_libs) # Include native libs in the depfile_deps since GN doesn't know about the # dependencies when is_component_build=true. depfile_deps = list(native_libs) # For targets that depend on static library APKs, dex paths are created by # the static library's dexsplitter target and GN doesn't know about these # paths. if options.dex_file: depfile_deps.append(options.dex_file) secondary_native_libs = [] if options.secondary_native_libs: secondary_native_libs = sorted(options.secondary_native_libs) depfile_deps += secondary_native_libs if options.java_resources: # Included via .build_config.json, so need to write it to depfile. depfile_deps.extend(options.java_resources) assets = _ExpandPaths(options.assets) uncompressed_assets = _ExpandPaths(options.uncompressed_assets) # Included via .build_config.json, so need to write it to depfile. depfile_deps.extend(x[0] for x in assets) depfile_deps.extend(x[0] for x in uncompressed_assets) depfile_deps.append(options.resource_apk) # Bundle modules have a structure similar to APKs, except that resources # are compiled in protobuf format (instead of binary xml), and that some # files are located into different top-level directories, e.g.: # AndroidManifest.xml -> manifest/AndroidManifest.xml # classes.dex -> dex/classes.dex # res/ -> res/ (unchanged) # assets/ -> assets/ (unchanged) # <other-file> -> root/<other-file> # # Hence, the following variables are used to control the location of files in # the final archive. if options.format == 'bundle-module': apk_manifest_dir = 'manifest/' apk_root_dir = 'root/' apk_dex_dir = 'dex/' else: apk_manifest_dir = '' apk_root_dir = '' apk_dex_dir = '' def _GetAssetDetails(assets, uncompressed_assets, fast_align, allow_reads): ret = _GetAssetsToAdd(assets, fast_align, disable_compression=False, allow_reads=allow_reads) ret.extend( _GetAssetsToAdd(uncompressed_assets, fast_align, disable_compression=True, allow_reads=allow_reads)) return ret libs_to_add = _GetNativeLibrariesToAdd( native_libs, options.android_abi, options.uncompress_shared_libraries, fast_align, options.library_always_compress, options.library_renames) if options.secondary_android_abi: libs_to_add.extend( _GetNativeLibrariesToAdd( secondary_native_libs, options.secondary_android_abi, options.uncompress_shared_libraries, fast_align, options.library_always_compress, options.library_renames)) if options.expected_file: # We compute expectations without reading the files. This allows us to check # expectations for different targets by just generating their build_configs # and not have to first generate all the actual files and all their # dependencies (for example by just passing --only-verify-expectations). asset_details = _GetAssetDetails(assets, uncompressed_assets, fast_align, allow_reads=False) actual_data = _CreateExpectationsData(libs_to_add, asset_details) diff_utils.CheckExpectations(actual_data, options) if options.only_verify_expectations: if options.depfile: build_utils.WriteDepfile(options.depfile, options.actual_file, inputs=depfile_deps) return # If we are past this point, we are going to actually create the final apk so # we should recompute asset details again but maybe perform some optimizations # based on the size of the files on disk. assets_to_add = _GetAssetDetails( assets, uncompressed_assets, fast_align, allow_reads=True) # Targets generally do not depend on apks, so no need for only_if_changed. with build_utils.AtomicOutput(options.output_apk, only_if_changed=False) as f: with zipfile.ZipFile(options.resource_apk) as resource_apk, \ zipfile.ZipFile(f, 'w') as out_apk: def add_to_zip(zip_path, data, compress=True, alignment=4): zipalign.AddToZipHermetic( out_apk, zip_path, data=data, compress=compress, alignment=0 if compress and not fast_align else alignment) def copy_resource(zipinfo, out_dir=''): add_to_zip( out_dir + zipinfo.filename, resource_apk.read(zipinfo.filename), compress=zipinfo.compress_type != zipfile.ZIP_STORED) # Make assets come before resources in order to maintain the same file # ordering as GYP / aapt. http://crbug.com/561862 resource_infos = resource_apk.infolist() # 1. AndroidManifest.xml logging.debug('Adding AndroidManifest.xml') copy_resource( resource_apk.getinfo('AndroidManifest.xml'), out_dir=apk_manifest_dir) # 2. Assets logging.debug('Adding assets/') _AddFiles(out_apk, assets_to_add) # 3. Dex files logging.debug('Adding classes.dex') if options.dex_file: with open(options.dex_file, 'rb') as dex_file_obj: if options.dex_file.endswith('.dex'): max_dex_number = 1 # This is the case for incremental_install=true. add_to_zip( apk_dex_dir + 'classes.dex', dex_file_obj.read(), compress=not options.uncompress_dex) else: max_dex_number = 0 with zipfile.ZipFile(dex_file_obj) as dex_zip: for dex in (d for d in dex_zip.namelist() if d.endswith('.dex')): max_dex_number += 1 add_to_zip( apk_dex_dir + dex, dex_zip.read(dex), compress=not options.uncompress_dex) if options.jdk_libs_dex_file: with open(options.jdk_libs_dex_file, 'rb') as dex_file_obj: add_to_zip( apk_dex_dir + 'classes{}.dex'.format(max_dex_number + 1), dex_file_obj.read(), compress=not options.uncompress_dex) # 4. Native libraries. logging.debug('Adding lib/') _AddFiles(out_apk, libs_to_add) # Add a placeholder lib if the APK should be multi ABI but is missing libs # for one of the ABIs. native_lib_placeholders = options.native_lib_placeholders secondary_native_lib_placeholders = ( options.secondary_native_lib_placeholders) if options.is_multi_abi: if ((secondary_native_libs or secondary_native_lib_placeholders) and not native_libs and not native_lib_placeholders): native_lib_placeholders += ['libplaceholder.so'] if ((native_libs or native_lib_placeholders) and not secondary_native_libs and not secondary_native_lib_placeholders): secondary_native_lib_placeholders += ['libplaceholder.so'] # Add placeholder libs. for name in sorted(native_lib_placeholders): # Note: Empty libs files are ignored by md5check (can cause issues # with stale builds when the only change is adding/removing # placeholders). apk_path = 'lib/%s/%s' % (options.android_abi, name) add_to_zip(apk_path, '', alignment=0x1000) for name in sorted(secondary_native_lib_placeholders): # Note: Empty libs files are ignored by md5check (can cause issues # with stale builds when the only change is adding/removing # placeholders). apk_path = 'lib/%s/%s' % (options.secondary_android_abi, name) add_to_zip(apk_path, '', alignment=0x1000) # 5. Resources logging.debug('Adding res/') for info in sorted(resource_infos, key=lambda i: i.filename): if info.filename != 'AndroidManifest.xml': copy_resource(info) # 6. Java resources that should be accessible via # Class.getResourceAsStream(), in particular parts of Emma jar. # Prebuilt jars may contain class files which we shouldn't include. logging.debug('Adding Java resources') for java_resource in options.java_resources: with zipfile.ZipFile(java_resource, 'r') as java_resource_jar: for apk_path in sorted(java_resource_jar.namelist()): apk_path_lower = apk_path.lower() if apk_path_lower.startswith('meta-inf/'): continue if apk_path_lower.endswith('/'): continue if apk_path_lower.endswith('.class'): continue add_to_zip(apk_root_dir + apk_path, java_resource_jar.read(apk_path)) if options.format == 'apk': zipalign_path = None if fast_align else options.zipalign_path finalize_apk.FinalizeApk(options.apksigner_jar, zipalign_path, f.name, f.name, options.key_path, options.key_passwd, options.key_name, int(options.min_sdk_version), warnings_as_errors=options.warnings_as_errors) logging.debug('Moving file into place') if options.depfile: build_utils.WriteDepfile(options.depfile, options.output_apk, inputs=depfile_deps)
def main(args): build_utils.InitLogging('DEX_DEBUG') options = _ParseArgs(args) options.class_inputs += options.class_inputs_filearg options.dex_inputs += options.dex_inputs_filearg input_paths = options.class_inputs + options.dex_inputs if options.multi_dex and options.main_dex_list_path: input_paths.append(options.main_dex_list_path) input_paths.append(options.r8_jar_path) depfile_deps = options.class_inputs_filearg + options.dex_inputs_filearg output_paths = [options.output] if options.incremental_dir: final_dex_inputs = _IntermediateDexFilePathsFromInputJars( options.class_inputs, options.incremental_dir) output_paths += final_dex_inputs track_subpaths_allowlist = options.class_inputs else: final_dex_inputs = list(options.class_inputs) track_subpaths_allowlist = None final_dex_inputs += options.dex_inputs dex_cmd = build_utils.JavaCmd(options.warnings_as_errors) + [ '-cp', options.r8_jar_path, 'com.android.tools.r8.D8', ] if options.release: dex_cmd += ['--release'] if options.min_api: dex_cmd += ['--min-api', options.min_api] if not options.desugar: dex_cmd += ['--no-desugaring'] elif options.classpath: # Don't pass classpath when Desugar.jar is doing interface desugaring. dex_cmd += ['--lib', build_utils.JAVA_HOME] for path in options.bootclasspath: dex_cmd += ['--lib', path] for path in options.classpath: dex_cmd += ['--classpath', path] depfile_deps += options.classpath depfile_deps += options.bootclasspath input_paths += options.classpath input_paths += options.bootclasspath if options.desugar_jdk_libs_json: dex_cmd += ['--desugared-lib', options.desugar_jdk_libs_json] if options.force_enable_assertions: dex_cmd += ['--force-enable-assertions'] md5_check.CallAndWriteDepfileIfStale( lambda changes: _OnStaleMd5(changes, options, final_dex_inputs, dex_cmd ), options, input_paths=input_paths, input_strings=dex_cmd + [bool(options.incremental_dir)], output_paths=output_paths, pass_changes=True, track_subpaths_allowlist=track_subpaths_allowlist, depfile_deps=depfile_deps)