def _ExpandPaths(paths): """Converts src:dst into tuples and enumerates files within directories. Args: paths: Paths in the form "src_path:dest_path" Returns: A list of (src_path, dest_path) tuples sorted by dest_path (for stable ordering within output .apk). """ ret = [] for path in paths: src_path, dest_path = _SplitAssetPath(path) if os.path.isdir(src_path): for f in build_utils.FindInDirectory(src_path, '*'): ret.append((f, os.path.join(dest_path, f[len(src_path) + 1:]))) else: ret.append((src_path, dest_path)) ret.sort(key=lambda t:t[1]) return ret
def CreateExtraRJavaFiles(r_dir, extra_packages, extra_r_text_files, shared_resources, include_all): if include_all: java_files = build_utils.FindInDirectory(r_dir, "R.java") if len(java_files) != 1: return r_java_file = java_files[0] r_java_contents = codecs.open(r_java_file, encoding='utf-8').read() for package in extra_packages: package_r_java_dir = os.path.join(r_dir, *package.split('.')) build_utils.MakeDirectory(package_r_java_dir) package_r_java_path = os.path.join(package_r_java_dir, 'R.java') new_r_java = re.sub(r'package [.\w]*;', u'package %s;' % package, r_java_contents) codecs.open(package_r_java_path, 'w', encoding='utf-8').write(new_r_java) else: if len(extra_packages) != len(extra_r_text_files): raise Exception('Need one R.txt file per extra package') all_resources = {} r_txt_file = os.path.join(r_dir, 'R.txt') if not os.path.exists(r_txt_file): return with open(r_txt_file) as f: for line in f: m = re.match(r'(int(?:\[\])?) (\w+) (\w+) (.+)$', line) if not m: raise Exception('Unexpected line in R.txt: %s' % line) java_type, resource_type, name, value = m.groups() all_resources[(resource_type, name)] = (java_type, value) for package, r_text_file in zip(extra_packages, extra_r_text_files): if os.path.exists(r_text_file): package_r_java_dir = os.path.join(r_dir, *package.split('.')) build_utils.MakeDirectory(package_r_java_dir) package_r_java_path = os.path.join(package_r_java_dir, 'R.java') CreateExtraRJavaFile(package, package_r_java_path, r_text_file, all_resources, shared_resources)
def _ExtractClassFiles(jar_path, dest_dir, java_files): """Extracts all .class files not corresponding to |java_files|.""" # Two challenges exist here: # 1. |java_files| have prefixes that are not represented in the the jar paths. # 2. A single .java file results in multiple .class files when it contains # nested classes. # Here's an example: # source path: ../../base/android/java/src/org/chromium/Foo.java # jar paths: org/chromium/Foo.class, org/chromium/Foo$Inner.class # To extract only .class files not related to the given .java files, we strip # off ".class" and "$*.class" and use a substring match against java_files. def extract_predicate(path): if not path.endswith('.class'): return False path_without_suffix = re.sub(r'(?:\$|\.)[^/]*class$', '', path) partial_java_path = path_without_suffix + '.java' return not any(p.endswith(partial_java_path) for p in java_files) build_utils.ExtractAll(jar_path, path=dest_dir, predicate=extract_predicate) for path in build_utils.FindInDirectory(dest_dir, '*.class'): shutil.copystat(jar_path, path)
def CreateOutput(self): found_files = build_utils.FindInDirectory(self.staging_dir) if not found_files: raise Exception('Missing dex outputs in {}'.format( self.staging_dir)) if self._final_output_path.endswith('.dex'): if len(found_files) != 1: raise Exception( 'Expected exactly 1 dex file output, found: {}'.format( '\t'.join(found_files))) shutil.move(found_files[0], self._final_output_path) return # Add to .jar using Python rather than having R8 output to a .zip directly # in order to disable compression of the .jar, saving ~500ms. tmp_jar_output = self.staging_dir + '.jar' build_utils.DoZip(found_files, tmp_jar_output, base_dir=self.staging_dir) shutil.move(tmp_jar_output, self._final_output_path)
def main(argv): option_parser = optparse.OptionParser() build_utils.AddDepfileOption(option_parser) option_parser.add_option('--aidl-path', help='Path to the aidl binary.') option_parser.add_option('--imports', help='Files to import.') option_parser.add_option('--includes', help='Directories to add as import search paths.') option_parser.add_option('--srcjar', help='Path for srcjar output.') options, args = option_parser.parse_args(argv[1:]) with build_utils.TempDir() as temp_dir: for f in args: classname = os.path.splitext(os.path.basename(f))[0] output = os.path.join(temp_dir, classname + '.java') aidl_cmd = [options.aidl_path] aidl_cmd += [ '-p' + s for s in build_utils.ParseGypList(options.imports) ] if options.includes is not None: aidl_cmd += [ '-I' + s for s in build_utils.ParseGypList(options.includes) ] aidl_cmd += [ f, output ] build_utils.CheckOutput(aidl_cmd) with zipfile.ZipFile(options.srcjar, 'w') as srcjar: for path in build_utils.FindInDirectory(temp_dir, '*.java'): with open(path) as fileobj: data = fileobj.read() pkg_name = re.search(r'^\s*package\s+(.*?)\s*;', data, re.M).group(1) arcname = '%s/%s' % (pkg_name.replace('.', '/'), os.path.basename(path)) srcjar.writestr(arcname, data) if options.depfile: build_utils.WriteDepfile( options.depfile, build_utils.GetPythonDependencies())
def _HardcodeSharedLibraryDynamicAttributes(zip_path): """Hardcodes the package IDs of dynamic attributes to 0x02. This is a workaround for b/147674078, which affects Android versions pre-N. Args: zip_path: Path to proto APK file. """ with build_utils.TempDir() as tmp_dir: build_utils.ExtractAll(zip_path, path=tmp_dir) # First process the resources file. table = Resources_pb2.ResourceTable() with open(os.path.join(tmp_dir, 'resources.pb')) as f: table.ParseFromString(f.read()) for package in table.package: for _type in package.type: for entry in _type.entry: for config_value in entry.config_value: _ProcessProtoValue(config_value.value) with open(os.path.join(tmp_dir, 'resources.pb'), 'w') as f: f.write(table.SerializeToString()) # Next process all the XML files. xml_files = build_utils.FindInDirectory(tmp_dir, '*.xml') for xml_file in xml_files: xml_node = Resources_pb2.XmlNode() with open(xml_file) as f: xml_node.ParseFromString(f.read()) _ProcessProtoXmlNode(xml_node) with open(xml_file, 'w') as f: f.write(xml_node.SerializeToString()) # Overwrite the original zip file. build_utils.ZipDir(zip_path, tmp_dir)
def _CreateJarFile(jar_path, service_provider_configuration_dir, additional_jar_files, classes_dir): logging.info('Start creating jar file: %s', jar_path) with build_utils.AtomicOutput(jar_path) as f: with zipfile.ZipFile(f.name, 'w') as z: build_utils.ZipDir(z, classes_dir) if service_provider_configuration_dir: config_files = build_utils.FindInDirectory( service_provider_configuration_dir) for config_file in config_files: zip_path = os.path.relpath( config_file, service_provider_configuration_dir) build_utils.AddToZipHermetic(z, zip_path, src_path=config_file) if additional_jar_files: for src_path, zip_path in additional_jar_files: build_utils.AddToZipHermetic(z, zip_path, src_path=src_path) logging.info('Completed jar file: %s', jar_path)
def main(args): args = build_utils.ExpandFileArgs(args) options = _ParseArgs(args) # Resource files aren't explicitly listed in GN. Listing them in the depfile # ensures the target will be marked stale when resource files are removed. depfile_deps = [] for resource_dir in options.resource_dirs: for resource_file in build_utils.FindInDirectory(resource_dir, '*'): # Don't list the empty .keep file in depfile. Since it doesn't end up # included in the .zip, it can lead to -w 'dupbuild=err' ninja errors # if ever moved. if not resource_file.endswith(os.path.join('empty', '.keep')): depfile_deps.append(resource_file) with resource_utils.BuildContext() as build: if options.sources: _CheckAllFilesListed(options.sources, options.resource_dirs) if options.r_text_in: r_txt_path = options.r_text_in else: _GenerateRTxt(options, build.r_txt_path) r_txt_path = build.r_txt_path if options.r_text_out: shutil.copyfile(r_txt_path, options.r_text_out) if options.resource_zip_out: ignore_pattern = resource_utils.AAPT_IGNORE_PATTERN if options.strip_drawables: ignore_pattern += ':*drawable*' _ZipResources(options.resource_dirs, options.resource_zip_out, ignore_pattern) if options.depfile: # Order of output must match order specified in GN so that the correct one # appears first in the depfile. build_utils.WriteDepfile(options.depfile, options.resource_zip_out or options.r_text_out, depfile_deps)
def CreateOutput(self, has_imported_lib=False, keep_rule_output=None): found_files = build_utils.FindInDirectory(self.staging_dir) if not found_files: raise Exception('Missing dex outputs in {}'.format(self.staging_dir)) if self._final_output_path.endswith('.dex'): if has_imported_lib: raise Exception( 'Trying to create a single .dex file, but a dependency requires ' 'JDK Library Desugaring (which necessitates a second file).' 'Refer to %s to see what desugaring was required' % keep_rule_output) if len(found_files) != 1: raise Exception('Expected exactly 1 dex file output, found: {}'.format( '\t'.join(found_files))) shutil.move(found_files[0], self._final_output_path) return # Add to .jar using Python rather than having R8 output to a .zip directly # in order to disable compression of the .jar, saving ~500ms. tmp_jar_output = self.staging_dir + '.jar' build_utils.DoZip(found_files, tmp_jar_output, base_dir=self.staging_dir) shutil.move(tmp_jar_output, self._final_output_path)
def DoJar(options): class_files = build_utils.FindInDirectory(options.classes_dir, '*.class') for exclude in build_utils.ParseGypList(options.excluded_classes): class_files = filter( lambda f: not fnmatch.fnmatch(f, exclude), class_files) jar_path = os.path.abspath(options.jar_path) # The paths of the files in the jar will be the same as they are passed in to # the command. Because of this, the command should be run in # options.classes_dir so the .class file paths in the jar are correct. jar_cwd = options.classes_dir class_files_rel = [os.path.relpath(f, jar_cwd) for f in class_files] jar_cmd = ['jar', 'cf0', jar_path] + class_files_rel record_path = '%s.md5.stamp' % options.jar_path md5_check.CallAndRecordIfStale( lambda: build_utils.CheckCallDie(jar_cmd, cwd=jar_cwd), record_path=record_path, input_paths=class_files, input_strings=jar_cmd) build_utils.Touch(options.jar_path)
def DoJar(options): class_files = build_utils.FindInDirectory(options.classes_dir, '*.class') for exclude in build_utils.ParseGypList(options.excluded_classes): class_files = filter( lambda f: not fnmatch.fnmatch(f, exclude), class_files) jar_path = os.path.abspath(options.jar_path) # The paths of the files in the jar will be the same as they are passed in to # the command. Because of this, the command should be run in # options.classes_dir so the .class file paths in the jar are correct. jar_cwd = options.classes_dir class_files_rel = [os.path.relpath(f, jar_cwd) for f in class_files] jar_cmd = ['jar', 'cf0', jar_path] + class_files_rel md5_stamp = '%s.md5' % options.jar_path md5_checker = md5_check.Md5Checker( stamp=md5_stamp, inputs=class_files, command=jar_cmd) if md5_checker.IsStale(): build_utils.CheckCallDie(jar_cmd, cwd=jar_cwd) else: build_utils.Touch(options.jar_path) md5_checker.Write()
def GenerateV14StyleResourcesInDir(input_dir, output_v14_dir): """Convert style resources to API 14 compatible resources in input_dir.""" for input_filename in build_utils.FindInDirectory(input_dir, '*.xml'): rel_filename = os.path.relpath(input_filename, input_dir) output_v14_filename = os.path.join(output_v14_dir, rel_filename) GenerateV14StyleResource(input_filename, output_v14_filename)
def _OptimizeWithR8(options, config_paths, libraries, dynamic_config_data, print_stdout=False): with build_utils.TempDir() as tmp_dir: if dynamic_config_data: tmp_config_path = os.path.join(tmp_dir, 'proguard_config.txt') with open(tmp_config_path, 'w') as f: f.write(dynamic_config_data) config_paths = config_paths + [tmp_config_path] tmp_mapping_path = os.path.join(tmp_dir, 'mapping.txt') # If there is no output (no classes are kept), this prevents this script # from failing. build_utils.Touch(tmp_mapping_path) tmp_output = os.path.join(tmp_dir, 'r8out') os.mkdir(tmp_output) feature_contexts = [] if options.feature_names: for name, dest_dex, input_paths in zip(options.feature_names, options.dex_dests, options.feature_jars): feature_context = _DexPathContext(name, dest_dex, input_paths, tmp_output) if name == 'base': base_dex_context = feature_context else: feature_contexts.append(feature_context) else: base_dex_context = _DexPathContext('base', options.output_path, options.input_paths, tmp_output) cmd = build_utils.JavaCmd(options.warnings_as_errors) + [ '-Dcom.android.tools.r8.allowTestProguardOptions=1', '-Dcom.android.tools.r8.verticalClassMerging=1', ] if options.disable_outlining: cmd += ['-Dcom.android.tools.r8.disableOutlining=1'] cmd += [ '-cp', options.r8_path, 'com.android.tools.r8.R8', '--no-data-resources', '--output', base_dex_context.staging_dir, '--pg-map-output', tmp_mapping_path, ] if options.disable_checkdiscard: # Info level priority logs are not printed by default. cmd += [ '--map-diagnostics:' 'com.android.tools.r8.errors.CheckDiscardDiagnostic', 'error', 'info', ] if options.desugar_jdk_libs_json: cmd += [ '--desugared-lib', options.desugar_jdk_libs_json, '--desugared-lib-pg-conf-output', options.desugared_library_keep_rule_output, ] if options.min_api: cmd += ['--min-api', options.min_api] if options.force_enable_assertions: cmd += ['--force-enable-assertions'] for lib in libraries: cmd += ['--lib', lib] for config_file in config_paths: cmd += ['--pg-conf', config_file] if options.main_dex_rules_path: for main_dex_rule in options.main_dex_rules_path: cmd += ['--main-dex-rules', main_dex_rule] base_jars = set(base_dex_context.input_paths) # If a jar is present in multiple features, it should be moved to the base # module. all_feature_jars = set() for feature in feature_contexts: base_jars.update(all_feature_jars.intersection( feature.input_paths)) all_feature_jars.update(feature.input_paths) module_input_jars = base_jars.copy() for feature in feature_contexts: feature_input_jars = [ p for p in feature.input_paths if p not in module_input_jars ] module_input_jars.update(feature_input_jars) for in_jar in feature_input_jars: cmd += ['--feature', in_jar, feature.staging_dir] cmd += sorted(base_jars) # Add any extra input jars to the base module (e.g. desugar runtime). extra_jars = set(options.input_paths) - module_input_jars cmd += sorted(extra_jars) try: stderr_filter = dex.CreateStderrFilter( options.show_desugar_default_interface_warnings) logging.debug('Running R8') build_utils.CheckOutput(cmd, print_stdout=print_stdout, stderr_filter=stderr_filter, fail_on_output=options.warnings_as_errors) except build_utils.CalledProcessError as err: debugging_link = ('\n\nR8 failed. Please see {}.'.format( 'https://chromium.googlesource.com/chromium/src/+/HEAD/build/' 'android/docs/java_optimization.md#Debugging-common-failures\n' )) raise build_utils.CalledProcessError(err.cwd, err.args, err.output + debugging_link) base_has_imported_lib = False if options.desugar_jdk_libs_json: logging.debug('Running L8') existing_files = build_utils.FindInDirectory( base_dex_context.staging_dir) jdk_dex_output = os.path.join( base_dex_context.staging_dir, 'classes%d.dex' % (len(existing_files) + 1)) base_has_imported_lib = dex_jdk_libs.DexJdkLibJar( options.r8_path, options.min_api, options.desugar_jdk_libs_json, options.desugar_jdk_libs_jar, options.desugar_jdk_libs_configuration_jar, options.desugared_library_keep_rule_output, jdk_dex_output, options.warnings_as_errors) logging.debug('Collecting ouputs') base_dex_context.CreateOutput( base_has_imported_lib, options.desugared_library_keep_rule_output) for feature in feature_contexts: feature.CreateOutput() with open(options.mapping_output, 'w') as out_file, \ open(tmp_mapping_path) as in_file: # Mapping files generated by R8 include comments that may break # some of our tooling so remove those (specifically: apkanalyzer). out_file.writelines(l for l in in_file if not l.startswith('#'))
def main(): parser = optparse.OptionParser() build_utils.AddDepfileOption(parser) parser.add_option('--lint-path', help='Path to lint executable.') parser.add_option('--config-path', help='Path to lint suppressions file.') parser.add_option('--processed-config-path', help='Path to processed lint suppressions file.') parser.add_option('--manifest-path', help='Path to AndroidManifest.xml') parser.add_option('--result-path', help='Path to XML lint result file.') parser.add_option('--product-dir', help='Path to product dir.') parser.add_option('--src-dirs', help='Directories containing java files.') parser.add_option('--java-files', help='Paths to java files.') parser.add_option('--jar-path', help='Jar file containing class files.') parser.add_option('--resource-dir', help='Path to resource dir.') parser.add_option('--can-fail-build', action='store_true', help='If set, script will exit with nonzero exit status' ' if lint errors are present') parser.add_option('--stamp', help='Path to touch on success.') parser.add_option('--enable', action='store_true', help='Run lint instead of just touching stamp.') options, _ = parser.parse_args() build_utils.CheckOptions(options, parser, required=[ 'lint_path', 'config_path', 'processed_config_path', 'manifest_path', 'result_path', 'product_dir', 'jar_path' ]) if options.enable: sources = [] if options.src_dirs: src_dirs = build_utils.ParseGypList(options.src_dirs) sources = build_utils.FindInDirectories(src_dirs, '*.java') elif options.java_files: sources = build_utils.ParseGypList(options.java_files) else: print 'One of --src-dirs or --java-files must be specified.' return 1 input_paths = [ options.lint_path, options.config_path, options.manifest_path, options.jar_path, ] input_paths.extend(sources) if options.resource_dir: input_paths.extend( build_utils.FindInDirectory(options.resource_dir, '*')) input_strings = [options.processed_config_path] output_paths = [options.result_path] build_utils.CallAndWriteDepfileIfStale( lambda changes: _OnStaleMd5(changes, options.lint_path, options.config_path, options.processed_config_path, options.manifest_path, options.result_path, options.product_dir, sources, options.jar_path, resource_dir=options.resource_dir, can_fail_build=options.can_fail_build), options, input_paths=input_paths, input_strings=input_strings, output_paths=output_paths, pass_changes=True)
def _RunCompiler(options, javac_cmd, java_files, classpath, jar_path, save_outputs=True): logging.info('Starting _RunCompiler') # Compiles with Error Prone take twice as long to run as pure javac. Thus GN # rules run both in parallel, with Error Prone only used for checks. save_outputs = not options.enable_errorprone # Use jar_path's directory to ensure paths are relative (needed for goma). temp_dir = jar_path + '.staging' shutil.rmtree(temp_dir, True) os.makedirs(temp_dir) try: classes_dir = os.path.join(temp_dir, 'classes') service_provider_configuration = os.path.join( temp_dir, 'service_provider_configuration') if save_outputs: input_srcjars_dir = os.path.join(options.generated_dir, 'input_srcjars') annotation_processor_outputs_dir = os.path.join( options.generated_dir, 'annotation_processor_outputs') # Delete any stale files in the generated directory. The purpose of # options.generated_dir is for codesearch. shutil.rmtree(options.generated_dir, True) info_file_context = _InfoFileContext( options.chromium_code, options.jar_info_exclude_globs) else: input_srcjars_dir = os.path.join(temp_dir, 'input_srcjars') annotation_processor_outputs_dir = os.path.join( temp_dir, 'annotation_processor_outputs') if options.java_srcjars: logging.info('Extracting srcjars to %s', input_srcjars_dir) build_utils.MakeDirectory(input_srcjars_dir) for srcjar in options.java_srcjars: extracted_files = build_utils.ExtractAll( srcjar, no_clobber=True, path=input_srcjars_dir, pattern='*.java') java_files.extend(extracted_files) if save_outputs: info_file_context.AddSrcJarSources(srcjar, extracted_files, input_srcjars_dir) logging.info('Done extracting srcjars') if options.header_jar: logging.info('Extracting service provider configs') # Extract META-INF/services/* so that it can be copied into the output # .jar build_utils.ExtractAll(options.header_jar, no_clobber=True, path=service_provider_configuration, pattern='META-INF/services/*') logging.info('Done extracting service provider configs') if save_outputs and java_files: info_file_context.SubmitFiles(java_files) if java_files: # Don't include the output directory in the initial set of args since it # being in a temp dir makes it unstable (breaks md5 stamping). cmd = list(javac_cmd) os.makedirs(classes_dir) cmd += ['-d', classes_dir] if options.processors: os.makedirs(annotation_processor_outputs_dir) cmd += ['-s', annotation_processor_outputs_dir] if classpath: cmd += ['-classpath', ':'.join(classpath)] # Pass source paths as response files to avoid extremely long command # lines that are tedius to debug. java_files_rsp_path = os.path.join(temp_dir, 'files_list.txt') with open(java_files_rsp_path, 'w') as f: f.write(' '.join(java_files)) cmd += ['@' + java_files_rsp_path] # |errorprone_expected_warning_regex| is used in tests for errorprone # warnings. Fail compile if expected warning is not present. stderr_filter = ProcessJavacOutput if (options.enable_errorprone and options.errorprone_expected_warning_regex): stderr_filter = functools.partial( CheckErrorproneStderrWarning, options.jar_path, options.errorprone_expected_warning_regex) logging.debug('Build command %s', cmd) start = time.time() build_utils.CheckOutput(cmd, print_stdout=options.chromium_code, stdout_filter=ProcessJavacOutput, stderr_filter=stderr_filter, fail_on_output=options.warnings_as_errors) end = time.time() - start logging.info('Java compilation took %ss', end) if save_outputs: if options.processors: annotation_processor_java_files = build_utils.FindInDirectory( annotation_processor_outputs_dir) if annotation_processor_java_files: info_file_context.SubmitFiles( annotation_processor_java_files) _CreateJarFile(jar_path, service_provider_configuration, options.additional_jar_files, classes_dir) info_file_context.Commit(jar_path + '.info') else: build_utils.Touch(jar_path) logging.info('Completed all steps in _RunCompiler') finally: shutil.rmtree(temp_dir)
def _OnStaleMd5(changes, options, javac_cmd, java_files, classpath_inputs, runtime_classpath): with build_utils.TempDir() as temp_dir: srcjars = options.java_srcjars # The .excluded.jar contains .class files excluded from the main jar. # It is used for incremental compiles. excluded_jar_path = options.jar_path.replace('.jar', '.excluded.jar') classes_dir = os.path.join(temp_dir, 'classes') os.makedirs(classes_dir) changed_paths = None # jmake can handle deleted files, but it's a rare case and it would # complicate this script's logic. if options.incremental and changes.AddedOrModifiedOnly(): changed_paths = set(changes.IterChangedPaths()) # Do a full compile if classpath has changed. # jmake doesn't seem to do this on its own... Might be that ijars mess up # its change-detection logic. if any(p in changed_paths for p in classpath_inputs): changed_paths = None if options.incremental: # jmake is a compiler wrapper that figures out the minimal set of .java # files that need to be rebuilt given a set of .java files that have # changed. # jmake determines what files are stale based on timestamps between .java # and .class files. Since we use .jars, .srcjars, and md5 checks, # timestamp info isn't accurate for this purpose. Rather than use jmake's # programatic interface (like we eventually should), we ensure that all # .class files are newer than their .java files, and convey to jmake which # sources are stale by having their .class files be missing entirely # (by not extracting them). pdb_path = options.jar_path + '.pdb' javac_cmd = _ConvertToJMakeArgs(javac_cmd, pdb_path) if srcjars: _FixTempPathsInIncrementalMetadata(pdb_path, temp_dir) if srcjars: java_dir = os.path.join(temp_dir, 'java') os.makedirs(java_dir) for srcjar in options.java_srcjars: if changed_paths: changed_paths.update(os.path.join(java_dir, f) for f in changes.IterChangedSubpaths(srcjar)) build_utils.ExtractAll(srcjar, path=java_dir, pattern='*.java') jar_srcs = build_utils.FindInDirectory(java_dir, '*.java') jar_srcs = _FilterJavaFiles(jar_srcs, options.javac_includes) java_files.extend(jar_srcs) if changed_paths: # Set the mtime of all sources to 0 since we use the absense of .class # files to tell jmake which files are stale. for path in jar_srcs: os.utime(path, (0, 0)) if java_files: if changed_paths: changed_java_files = [p for p in java_files if p in changed_paths] if os.path.exists(options.jar_path): _ExtractClassFiles(options.jar_path, classes_dir, changed_java_files) if os.path.exists(excluded_jar_path): _ExtractClassFiles(excluded_jar_path, classes_dir, changed_java_files) # Add the extracted files to the classpath. This is required because # when compiling only a subset of files, classes that haven't changed # need to be findable. classpath_idx = javac_cmd.index('-classpath') javac_cmd[classpath_idx + 1] += ':' + classes_dir # Don't include the output directory in the initial set of args since it # being in a temp dir makes it unstable (breaks md5 stamping). cmd = javac_cmd + ['-d', classes_dir] + java_files # JMake prints out some diagnostic logs that we want to ignore. # This assumes that all compiler output goes through stderr. stdout_filter = lambda s: '' if md5_check.PRINT_EXPLANATIONS: stdout_filter = None build_utils.CheckOutput( cmd, print_stdout=options.chromium_code, stdout_filter=stdout_filter, stderr_filter=ColorJavacOutput) if options.main_class or options.manifest_entry: entries = [] if options.manifest_entry: entries = [e.split(':') for e in options.manifest_entry] manifest_file = os.path.join(temp_dir, 'manifest') _CreateManifest(manifest_file, runtime_classpath, options.main_class, entries) else: manifest_file = None glob = options.jar_excluded_classes inclusion_predicate = lambda f: not build_utils.MatchesGlob(f, glob) exclusion_predicate = lambda f: not inclusion_predicate(f) jar.JarDirectory(classes_dir, options.jar_path, manifest_file=manifest_file, predicate=inclusion_predicate) jar.JarDirectory(classes_dir, excluded_jar_path, predicate=exclusion_predicate)
def _OptimizeWithR8(options, config_paths, libraries, dynamic_config_data, print_stdout=False): with build_utils.TempDir() as tmp_dir: if dynamic_config_data: tmp_config_path = os.path.join(tmp_dir, 'proguard_config.txt') with open(tmp_config_path, 'w') as f: f.write(dynamic_config_data) config_paths = config_paths + [tmp_config_path] tmp_mapping_path = os.path.join(tmp_dir, 'mapping.txt') # If there is no output (no classes are kept), this prevents this script # from failing. build_utils.Touch(tmp_mapping_path) tmp_output = os.path.join(tmp_dir, 'r8out') os.mkdir(tmp_output) feature_contexts = [] if options.feature_names: for name, dest_dex, input_paths in zip( options.feature_names, options.dex_dests, options.feature_jars): feature_context = _DexPathContext(name, dest_dex, input_paths, tmp_output) if name == 'base': base_dex_context = feature_context else: feature_contexts.append(feature_context) else: base_dex_context = _DexPathContext('base', options.output_path, options.input_paths, tmp_output) cmd = [ build_utils.JAVA_PATH, '-cp', options.r8_path, 'com.android.tools.r8.R8', '--no-data-resources', '--output', base_dex_context.staging_dir, '--pg-map-output', tmp_mapping_path, ] if options.desugar_jdk_libs_json: cmd += [ '--desugared-lib', options.desugar_jdk_libs_json, '--desugared-lib-pg-conf-output', options.desugared_library_keep_rule_output, ] if options.min_api: cmd += ['--min-api', options.min_api] if options.force_enable_assertions: cmd += ['--force-enable-assertions'] for lib in libraries: cmd += ['--lib', lib] for config_file in config_paths: cmd += ['--pg-conf', config_file] if options.main_dex_rules_path: for main_dex_rule in options.main_dex_rules_path: cmd += ['--main-dex-rules', main_dex_rule] module_input_jars = set(base_dex_context.input_paths) for feature in feature_contexts: feature_input_jars = [ p for p in feature.input_paths if p not in module_input_jars ] module_input_jars.update(feature_input_jars) for in_jar in feature_input_jars: cmd += ['--feature', in_jar, feature.staging_dir] cmd += base_dex_context.input_paths # Add any extra input jars to the base module (e.g. desugar runtime). extra_jars = set(options.input_paths) - module_input_jars cmd += sorted(extra_jars) env = os.environ.copy() stderr_filter = lambda l: re.sub(r'.*_JAVA_OPTIONS.*\n?', '', l) env['_JAVA_OPTIONS'] = '-Dcom.android.tools.r8.allowTestProguardOptions=1' if options.disable_outlining: env['_JAVA_OPTIONS'] += ' -Dcom.android.tools.r8.disableOutlining=1' try: build_utils.CheckOutput( cmd, env=env, print_stdout=print_stdout, stderr_filter=stderr_filter) except build_utils.CalledProcessError as err: debugging_link = ('R8 failed. Please see {}.'.format( 'https://chromium.googlesource.com/chromium/src/+/HEAD/build/' 'android/docs/java_optimization.md#Debugging-common-failures\n')) raise ProguardProcessError(err, debugging_link) base_has_imported_lib = False if options.desugar_jdk_libs_json: existing_files = build_utils.FindInDirectory(base_dex_context.staging_dir) base_has_imported_lib = dex_jdk_libs.DexJdkLibJar( options.r8_path, options.min_api, options.desugar_jdk_libs_json, options.desugar_jdk_libs_jar, options.desugared_library_keep_rule_output, os.path.join(base_dex_context.staging_dir, 'classes%d.dex' % (len(existing_files) + 1))) base_dex_context.CreateOutput(base_has_imported_lib, options.desugared_library_keep_rule_output) for feature in feature_contexts: feature.CreateOutput() with open(options.mapping_output, 'w') as out_file, \ open(tmp_mapping_path) as in_file: # Mapping files generated by R8 include comments that may break # some of our tooling so remove those (specifically: apkanalyzer). out_file.writelines(l for l in in_file if not l.startswith('#'))
def ZipDir(input_dir, output): inputs = build_utils.FindInDirectory(input_dir, '*') with zipfile.ZipFile(output, 'w') as outfile: for f in inputs: outfile.write(f, os.path.relpath(f, input_dir))
def _OptimizeWithR8(options, config_paths, libraries, dynamic_config_data, print_stdout=False): with build_utils.TempDir() as tmp_dir: if dynamic_config_data: dynamic_config_path = os.path.join(tmp_dir, 'dynamic_config.flags') with open(dynamic_config_path, 'w') as f: f.write(dynamic_config_data) config_paths = config_paths + [dynamic_config_path] tmp_mapping_path = os.path.join(tmp_dir, 'mapping.txt') # If there is no output (no classes are kept), this prevents this script # from failing. build_utils.Touch(tmp_mapping_path) tmp_output = os.path.join(tmp_dir, 'r8out') os.mkdir(tmp_output) split_contexts_by_name = {} if options.feature_names: for name, dest_dex, input_jars in zip(options.feature_names, options.dex_dests, options.feature_jars): parent_name = options.uses_split.get(name) if parent_name is None and name != 'base': parent_name = 'base' split_context = _SplitContext(name, dest_dex, input_jars, tmp_output, parent_name=parent_name) split_contexts_by_name[name] = split_context else: # Base context will get populated via "extra_jars" below. split_contexts_by_name['base'] = _SplitContext('base', options.output_path, [], tmp_output) base_context = split_contexts_by_name['base'] # R8 OOMs with the default xmx=1G. cmd = build_utils.JavaCmd(options.warnings_as_errors, xmx='2G') + [ '-Dcom.android.tools.r8.allowTestProguardOptions=1', '-Dcom.android.tools.r8.disableHorizontalClassMerging=1', ] if options.disable_outlining: cmd += ['-Dcom.android.tools.r8.disableOutlining=1'] if options.dump_inputs: cmd += ['-Dcom.android.tools.r8.dumpinputtofile=r8inputs.zip'] cmd += [ '-cp', options.r8_path, 'com.android.tools.r8.R8', '--no-data-resources', '--output', base_context.staging_dir, '--pg-map-output', tmp_mapping_path, ] if options.disable_checks: # Info level priority logs are not printed by default. cmd += ['--map-diagnostics:CheckDiscardDiagnostic', 'error', 'info'] if options.desugar_jdk_libs_json: cmd += [ '--desugared-lib', options.desugar_jdk_libs_json, '--desugared-lib-pg-conf-output', options.desugared_library_keep_rule_output, ] if options.min_api: cmd += ['--min-api', options.min_api] if options.force_enable_assertions: cmd += ['--force-enable-assertions'] for lib in libraries: cmd += ['--lib', lib] for config_file in config_paths: cmd += ['--pg-conf', config_file] if options.main_dex_rules_path: for main_dex_rule in options.main_dex_rules_path: cmd += ['--main-dex-rules', main_dex_rule] _DeDupeInputJars(split_contexts_by_name) # Add any extra inputs to the base context (e.g. desugar runtime). extra_jars = set(options.input_paths) for split_context in split_contexts_by_name.values(): extra_jars -= split_context.input_jars base_context.input_jars.update(extra_jars) for split_context in split_contexts_by_name.values(): if split_context is base_context: continue for in_jar in sorted(split_context.input_jars): cmd += ['--feature', in_jar, split_context.staging_dir] cmd += sorted(base_context.input_jars) try: stderr_filter = dex.CreateStderrFilter( options.show_desugar_default_interface_warnings) logging.debug('Running R8') build_utils.CheckOutput(cmd, print_stdout=print_stdout, stderr_filter=stderr_filter, fail_on_output=options.warnings_as_errors) except build_utils.CalledProcessError as err: debugging_link = ('\n\nR8 failed. Please see {}.'.format( 'https://chromium.googlesource.com/chromium/src/+/HEAD/build/' 'android/docs/java_optimization.md#Debugging-common-failures\n')) raise build_utils.CalledProcessError(err.cwd, err.args, err.output + debugging_link) base_has_imported_lib = False if options.desugar_jdk_libs_json: logging.debug('Running L8') existing_files = build_utils.FindInDirectory(base_context.staging_dir) jdk_dex_output = os.path.join(base_context.staging_dir, 'classes%d.dex' % (len(existing_files) + 1)) # Use -applymapping to avoid name collisions. l8_dynamic_config_path = os.path.join(tmp_dir, 'l8_dynamic_config.flags') with open(l8_dynamic_config_path, 'w') as f: f.write("-applymapping '{}'\n".format(tmp_mapping_path)) # Pass the dynamic config so that obfuscation options are picked up. l8_config_paths = [dynamic_config_path, l8_dynamic_config_path] if os.path.exists(options.desugared_library_keep_rule_output): l8_config_paths.append(options.desugared_library_keep_rule_output) base_has_imported_lib = dex_jdk_libs.DexJdkLibJar( options.r8_path, options.min_api, options.desugar_jdk_libs_json, options.desugar_jdk_libs_jar, options.desugar_jdk_libs_configuration_jar, jdk_dex_output, options.warnings_as_errors, l8_config_paths) if int(options.min_api) >= 24 and base_has_imported_lib: with open(jdk_dex_output, 'rb') as f: dexfile = dex_parser.DexFile(bytearray(f.read())) for m in dexfile.IterMethodSignatureParts(): print('{}#{}'.format(m[0], m[2])) assert False, ( 'Desugared JDK libs are disabled on Monochrome and newer - see ' 'crbug.com/1159984 for details, and see above list for desugared ' 'classes and methods.') logging.debug('Collecting ouputs') base_context.CreateOutput(base_has_imported_lib, options.desugared_library_keep_rule_output) for split_context in split_contexts_by_name.values(): if split_context is not base_context: split_context.CreateOutput() with open(options.mapping_output, 'w') as out_file, \ open(tmp_mapping_path) as in_file: # Mapping files generated by R8 include comments that may break # some of our tooling so remove those (specifically: apkanalyzer). out_file.writelines(l for l in in_file if not l.startswith('#')) return base_context
def main(): parser = argparse.ArgumentParser() build_utils.AddDepfileOption(parser) parser.add_argument('--lint-path', required=True, help='Path to lint executable.') parser.add_argument('--product-dir', required=True, help='Path to product dir.') parser.add_argument('--result-path', required=True, help='Path to XML lint result file.') parser.add_argument( '--cache-dir', required=True, help='Path to the directory in which the android cache ' 'directory tree should be stored.') parser.add_argument('--platform-xml-path', required=True, help='Path to api-platforms.xml') parser.add_argument( '--create-cache', action='store_true', help='Mark the lint cache file as an output rather than ' 'an input.') parser.add_argument( '--can-fail-build', action='store_true', help='If set, script will exit with nonzero exit status' ' if lint errors are present') parser.add_argument('--config-path', help='Path to lint suppressions file.') parser.add_argument('--enable', action='store_true', help='Run lint instead of just touching stamp.') parser.add_argument('--jar-path', help='Jar file containing class files.') parser.add_argument('--java-files', help='Paths to java files.') parser.add_argument('--manifest-path', help='Path to AndroidManifest.xml') parser.add_argument('--processed-config-path', help='Path to processed lint suppressions file.') parser.add_argument('--resource-dir', help='Path to resource dir.') parser.add_argument('--silent', action='store_true', help='If set, script will not log anything.') parser.add_argument('--src-dirs', help='Directories containing java files.') parser.add_argument('--stamp', help='Path to touch on success.') args = parser.parse_args() if args.enable: sources = [] if args.src_dirs: src_dirs = build_utils.ParseGypList(args.src_dirs) sources = build_utils.FindInDirectories(src_dirs, '*.java') elif args.java_files: sources = build_utils.ParseGypList(args.java_files) if args.config_path and not args.processed_config_path: parser.error( '--config-path specified without --processed-config-path') elif args.processed_config_path and not args.config_path: parser.error( '--processed-config-path specified without --config-path') input_paths = [ args.lint_path, args.platform_xml_path, ] if args.config_path: input_paths.append(args.config_path) if args.jar_path: input_paths.append(args.jar_path) if args.manifest_path: input_paths.append(args.manifest_path) if args.resource_dir: input_paths.extend( build_utils.FindInDirectory(args.resource_dir, '*')) if sources: input_paths.extend(sources) input_strings = [] if args.processed_config_path: input_strings.append(args.processed_config_path) output_paths = [args.result_path] build_utils.CallAndWriteDepfileIfStale( lambda changes: _OnStaleMd5(changes, args.lint_path, args.config_path, args.processed_config_path, args.manifest_path, args.result_path, args.product_dir, sources, args.jar_path, args.cache_dir, resource_dir=args.resource_dir, can_fail_build=args.can_fail_build, silent=args.silent), args, input_paths=input_paths, input_strings=input_strings, output_paths=output_paths, pass_changes=True)
def _OptimizeWithR8(options, config_paths, libraries, dynamic_config_data, print_stdout=False): with build_utils.TempDir() as tmp_dir: if dynamic_config_data: tmp_config_path = os.path.join(tmp_dir, 'proguard_config.txt') with open(tmp_config_path, 'w') as f: f.write(dynamic_config_data) config_paths = config_paths + [tmp_config_path] tmp_mapping_path = os.path.join(tmp_dir, 'mapping.txt') # If there is no output (no classes are kept), this prevents this script # from failing. build_utils.Touch(tmp_mapping_path) tmp_output = os.path.join(tmp_dir, 'r8out') os.mkdir(tmp_output) cmd = [ build_utils.JAVA_PATH, '-jar', options.r8_path, '--no-data-resources', '--output', tmp_output, '--pg-map-output', tmp_mapping_path, ] for lib in libraries: cmd += ['--lib', lib] for config_file in config_paths: cmd += ['--pg-conf', config_file] if options.min_api: cmd += ['--min-api', options.min_api] if options.force_enable_assertions: cmd += ['--force-enable-assertions'] if options.main_dex_rules_path: for main_dex_rule in options.main_dex_rules_path: cmd += ['--main-dex-rules', main_dex_rule] cmd += options.input_paths env = os.environ.copy() stderr_filter = lambda l: re.sub(r'.*_JAVA_OPTIONS.*\n?', '', l) env['_JAVA_OPTIONS'] = '-Dcom.android.tools.r8.allowTestProguardOptions=1' if options.disable_outlining: env['_JAVA_OPTIONS'] += ' -Dcom.android.tools.r8.disableOutlining=1' try: build_utils.CheckOutput(cmd, env=env, print_stdout=print_stdout, stderr_filter=stderr_filter) except build_utils.CalledProcessError as err: debugging_link = ('R8 failed. Please see {}.'.format( 'https://chromium.googlesource.com/chromium/src/+/HEAD/build/' 'android/docs/java_optimization.md#Debugging-common-failures\n' )) raise ProguardProcessError(err, debugging_link) found_files = build_utils.FindInDirectory(tmp_output) if not options.output_path.endswith('.dex'): # Add to .jar using Python rather than having R8 output to a .zip directly # in order to disable compression of the .jar, saving ~500ms. tmp_jar_output = tmp_output + '.jar' build_utils.DoZip(found_files, tmp_jar_output, base_dir=tmp_output) shutil.move(tmp_jar_output, options.output_path) else: if len(found_files) > 1: raise Exception( 'Too many files created: {}'.format(found_files)) shutil.move(found_files[0], options.output_path) with open(options.mapping_output, 'w') as out_file, \ open(tmp_mapping_path) as in_file: # Mapping files generated by R8 include comments that may break # some of our tooling so remove those (specifically: apkanalyzer). out_file.writelines(l for l in in_file if not l.startswith('#'))
def main(args): args = build_utils.ExpandFileArgs(args) options = _ParseArgs(args) # Order of these must match order specified in GN so that the correct one # appears first in the depfile. possible_output_paths = [ options.apk_path, options.resource_zip_out, options.r_text_out, options.srcjar_out, options.proguard_file, options.proguard_file_main_dex, ] output_paths = [x for x in possible_output_paths if x] if options.apk_path and options.create_density_splits: for _, dst_path in _GenerateDensitySplitPaths(options.apk_path): output_paths.append(dst_path) if options.apk_path and options.language_splits: output_paths.extend( _GenerateLanguageSplitOutputPaths(options.apk_path, options.language_splits)) # List python deps in input_strings rather than input_paths since the contents # of them does not change what gets written to the depsfile. input_strings = options.extra_res_packages + [ options.app_as_shared_lib, options.custom_package, options.non_constant_id, options.shared_resources, options.v14_skip, options.exclude_xxxhdpi, options.xxxhdpi_whitelist, str(options.debuggable), str(options.png_to_webp), str(options.support_zh_hk), str(options.no_xml_namespaces), ] if options.apk_path: input_strings.extend(_CreateLinkApkArgs(options)) possible_input_paths = [ options.aapt_path, options.android_manifest, options.android_sdk_jar, options.shared_resources_whitelist, ] input_paths = [x for x in possible_input_paths if x] input_paths.extend(options.dependencies_res_zips) input_paths.extend(options.extra_r_text_files) # Resource files aren't explicitly listed in GN. Listing them in the depfile # ensures the target will be marked stale when resource files are removed. depfile_deps = [] resource_names = [] for resource_dir in options.resource_dirs: for resource_file in build_utils.FindInDirectory(resource_dir, '*'): # Don't list the empty .keep file in depfile. Since it doesn't end up # included in the .zip, it can lead to -w 'dupbuild=err' ninja errors # if ever moved. if not resource_file.endswith(os.path.join('empty', '.keep')): input_paths.append(resource_file) depfile_deps.append(resource_file) resource_names.append(os.path.relpath(resource_file, resource_dir)) # Resource filenames matter to the output, so add them to strings as well. # This matters if a file is renamed but not changed (http://crbug.com/597126). input_strings.extend(sorted(resource_names)) build_utils.CallAndWriteDepfileIfStale( lambda: _OnStaleMd5(options), options, input_paths=input_paths, input_strings=input_strings, output_paths=output_paths, depfile_deps=depfile_deps)
def _OnStaleMd5(options, javac_cmd, java_files, classpath): logging.info('Starting _OnStaleMd5') # Compiles with Error Prone take twice as long to run as pure javac. Thus GN # rules run both in parallel, with Error Prone only used for checks. save_outputs = not options.enable_errorprone # Use jar_path's directory to ensure paths are relative (needed for goma). temp_dir = options.jar_path + '.staging' shutil.rmtree(temp_dir, True) os.makedirs(temp_dir) try: classes_dir = os.path.join(temp_dir, 'classes') if save_outputs: input_srcjars_dir = os.path.join(options.generated_dir, 'input_srcjars') annotation_processor_outputs_dir = os.path.join( options.generated_dir, 'annotation_processor_outputs') # Delete any stale files in the generated directory. The purpose of # options.generated_dir is for codesearch. shutil.rmtree(options.generated_dir, True) info_file_context = _InfoFileContext( options.chromium_code, options.jar_info_exclude_globs) else: input_srcjars_dir = os.path.join(temp_dir, 'input_srcjars') annotation_processor_outputs_dir = os.path.join( temp_dir, 'annotation_processor_outputs') if options.java_srcjars: logging.info('Extracting srcjars to %s', input_srcjars_dir) build_utils.MakeDirectory(input_srcjars_dir) for srcjar in options.java_srcjars: extracted_files = build_utils.ExtractAll( srcjar, no_clobber=True, path=input_srcjars_dir, pattern='*.java') java_files.extend(extracted_files) if save_outputs: info_file_context.AddSrcJarSources(srcjar, extracted_files, input_srcjars_dir) logging.info('Done extracting srcjars') if save_outputs and java_files: info_file_context.SubmitFiles(java_files) if java_files: # Don't include the output directory in the initial set of args since it # being in a temp dir makes it unstable (breaks md5 stamping). cmd = list(javac_cmd) cmd += ['-d', classes_dir] cmd += ['-s', annotation_processor_outputs_dir] # Pass classpath and source paths as response files to avoid extremely # long command lines that are tedius to debug. if classpath: cmd += ['-classpath', ':'.join(classpath)] java_files_rsp_path = os.path.join(temp_dir, 'files_list.txt') with open(java_files_rsp_path, 'w') as f: f.write(' '.join(java_files)) cmd += ['@' + java_files_rsp_path] logging.debug('Build command %s', cmd) os.makedirs(classes_dir) os.makedirs(annotation_processor_outputs_dir) build_utils.CheckOutput(cmd, print_stdout=options.chromium_code, stderr_filter=ProcessJavacOutput) logging.info('Finished build command') if save_outputs: annotation_processor_java_files = build_utils.FindInDirectory( annotation_processor_outputs_dir) if annotation_processor_java_files: info_file_context.SubmitFiles(annotation_processor_java_files) _CreateJarFile(options.jar_path, options.provider_configurations, options.additional_jar_files, classes_dir) info_file_context.Commit(options.jar_path + '.info') else: build_utils.Touch(options.jar_path) logging.info('Completed all steps in _OnStaleMd5') finally: shutil.rmtree(temp_dir)
def main(argv): colorama.init() argv = build_utils.ExpandFileArgs(argv) parser = optparse.OptionParser() build_utils.AddDepfileOption(parser) parser.add_option( '--src-gendirs', help='Directories containing generated java files.') parser.add_option( '--java-srcjars', action='append', default=[], help='List of srcjars to include in compilation.') parser.add_option( '--bootclasspath', action='append', default=[], help='Boot classpath for javac. If this is specified multiple times, ' 'they will all be appended to construct the classpath.') parser.add_option( '--classpath', action='append', help='Classpath for javac. If this is specified multiple times, they ' 'will all be appended to construct the classpath.') parser.add_option( '--javac-includes', help='A list of file patterns. If provided, only java files that match' 'one of the patterns will be compiled.') parser.add_option( '--jar-excluded-classes', default='', help='List of .class file patterns to exclude from the jar.') parser.add_option( '--chromium-code', type='int', help='Whether code being compiled should be built with stricter ' 'warnings for chromium code.') parser.add_option( '--use-errorprone-path', help='Use the Errorprone compiler at this path.') parser.add_option( '--classes-dir', help='Directory for compiled .class files.') parser.add_option('--jar-path', help='Jar output path.') parser.add_option( '--main-class', help='The class containing the main method.') parser.add_option( '--manifest-entry', action='append', help='Key:value pairs to add to the .jar manifest.') parser.add_option('--stamp', help='Path to touch on success.') options, args = parser.parse_args(argv) if options.main_class and not options.jar_path: parser.error('--main-class requires --jar-path') bootclasspath = [] for arg in options.bootclasspath: bootclasspath += build_utils.ParseGypList(arg) classpath = [] for arg in options.classpath: classpath += build_utils.ParseGypList(arg) java_srcjars = [] for arg in options.java_srcjars: java_srcjars += build_utils.ParseGypList(arg) java_files = args if options.src_gendirs: src_gendirs = build_utils.ParseGypList(options.src_gendirs) java_files += build_utils.FindInDirectories(src_gendirs, '*.java') input_files = bootclasspath + classpath + java_srcjars + java_files with build_utils.TempDir() as temp_dir: classes_dir = os.path.join(temp_dir, 'classes') os.makedirs(classes_dir) if java_srcjars: java_dir = os.path.join(temp_dir, 'java') os.makedirs(java_dir) for srcjar in java_srcjars: build_utils.ExtractAll(srcjar, path=java_dir, pattern='*.java') java_files += build_utils.FindInDirectory(java_dir, '*.java') if options.javac_includes: javac_includes = build_utils.ParseGypList(options.javac_includes) filtered_java_files = [] for f in java_files: for include in javac_includes: if fnmatch.fnmatch(f, include): filtered_java_files.append(f) break java_files = filtered_java_files if len(java_files) != 0: DoJavac( bootclasspath, classpath, classes_dir, options.chromium_code, options.use_errorprone_path, java_files) if options.jar_path: if options.main_class or options.manifest_entry: if options.manifest_entry: entries = map(lambda e: e.split(":"), options.manifest_entry) else: entries = [] manifest_file = os.path.join(temp_dir, 'manifest') CreateManifest(manifest_file, classpath, options.main_class, entries) else: manifest_file = None jar.JarDirectory(classes_dir, build_utils.ParseGypList(options.jar_excluded_classes), options.jar_path, manifest_file=manifest_file) if options.classes_dir: # Delete the old classes directory. This ensures that all .class files in # the output are actually from the input .java files. For example, if a # .java file is deleted or an inner class is removed, the classes # directory should not contain the corresponding old .class file after # running this action. build_utils.DeleteDirectory(options.classes_dir) shutil.copytree(classes_dir, options.classes_dir) if options.depfile: build_utils.WriteDepfile( options.depfile, input_files + build_utils.GetPythonDependencies()) if options.stamp: build_utils.Touch(options.stamp)
def _RunCompiler(options, javac_cmd, java_files, classpath, jar_path, save_outputs=True): logging.info('Starting _RunCompiler') # Compiles with Error Prone take twice as long to run as pure javac. Thus GN # rules run both in parallel, with Error Prone only used for checks. save_outputs = not options.enable_errorprone # Use jar_path's directory to ensure paths are relative (needed for goma). temp_dir = jar_path + '.staging' shutil.rmtree(temp_dir, True) os.makedirs(temp_dir) try: classes_dir = os.path.join(temp_dir, 'classes') transitive_classes = os.path.join(temp_dir, 'transitive_classes') if save_outputs: input_srcjars_dir = os.path.join(options.generated_dir, 'input_srcjars') annotation_processor_outputs_dir = os.path.join( options.generated_dir, 'annotation_processor_outputs') # Delete any stale files in the generated directory. The purpose of # options.generated_dir is for codesearch. shutil.rmtree(options.generated_dir, True) info_file_context = _InfoFileContext(options.chromium_code, options.jar_info_exclude_globs) else: input_srcjars_dir = os.path.join(temp_dir, 'input_srcjars') annotation_processor_outputs_dir = os.path.join( temp_dir, 'annotation_processor_outputs') if options.java_srcjars: logging.info('Extracting srcjars to %s', input_srcjars_dir) build_utils.MakeDirectory(input_srcjars_dir) for srcjar in options.java_srcjars: extracted_files = build_utils.ExtractAll( srcjar, no_clobber=True, path=input_srcjars_dir, pattern='*.java') java_files.extend(extracted_files) if save_outputs: info_file_context.AddSrcJarSources(srcjar, extracted_files, input_srcjars_dir) logging.info('Done extracting srcjars') if options.header_jar: logging.info('Extracting transitive classes to %s', transitive_classes) # Without the META-INF pattern prefix, it takes more than 4 seconds to # extract all the .class files from chrome_java's header jar. With the # prefix it takes 0.6 seconds. build_utils.ExtractAll(options.header_jar, no_clobber=True, path=transitive_classes, pattern='META-INF*.class') # Specifying the root directory is required, see: # https://docs.oracle.com/javase/8/docs/technotes/tools/findingclasses.html#userclass classpath.append( os.path.join(transitive_classes, 'META-INF', 'TRANSITIVE')) logging.info('Done extracting transitive classes') if save_outputs and java_files: info_file_context.SubmitFiles(java_files) if java_files: # Don't include the output directory in the initial set of args since it # being in a temp dir makes it unstable (breaks md5 stamping). cmd = list(javac_cmd) os.makedirs(classes_dir) cmd += ['-d', classes_dir] if options.processors: os.makedirs(annotation_processor_outputs_dir) cmd += ['-s', annotation_processor_outputs_dir] if classpath: cmd += ['-classpath', ':'.join(classpath)] # Pass source paths as response files to avoid extremely long command # lines that are tedius to debug. java_files_rsp_path = os.path.join(temp_dir, 'files_list.txt') with open(java_files_rsp_path, 'w') as f: f.write(' '.join(java_files)) cmd += ['@' + java_files_rsp_path] logging.debug('Build command %s', cmd) start = time.time() build_utils.CheckOutput( cmd, print_stdout=options.chromium_code, stdout_filter=ProcessJavacOutput, stderr_filter=ProcessJavacOutput) end = time.time() - start logging.info('Java compilation took %ss', end) if save_outputs: if options.processors: annotation_processor_java_files = build_utils.FindInDirectory( annotation_processor_outputs_dir) if annotation_processor_java_files: info_file_context.SubmitFiles(annotation_processor_java_files) _CreateJarFile(jar_path, options.provider_configurations, options.additional_jar_files, classes_dir) info_file_context.Commit(jar_path + '.info') else: build_utils.Touch(jar_path) logging.info('Completed all steps in _RunCompiler') finally: shutil.rmtree(temp_dir)
def _OnStaleMd5(changes, options, javac_cmd, java_files, classpath_inputs): incremental = options.incremental # Don't bother enabling incremental compilation for third_party code, since # _CheckPathMatchesClassName() fails on some of it, and it's not really much # benefit. for java_file in java_files: if 'third_party' in java_file: incremental = False else: _CheckPathMatchesClassName(java_file) with build_utils.TempDir() as temp_dir: srcjars = options.java_srcjars # The .excluded.jar contains .class files excluded from the main jar. # It is used for incremental compiles. excluded_jar_path = options.jar_path.replace('.jar', '.excluded.jar') classes_dir = os.path.join(temp_dir, 'classes') os.makedirs(classes_dir) changed_paths = None # jmake can handle deleted files, but it's a rare case and it would # complicate this script's logic. if incremental and changes.AddedOrModifiedOnly(): changed_paths = set(changes.IterChangedPaths()) # Do a full compile if classpath has changed. # jmake doesn't seem to do this on its own... Might be that ijars mess up # its change-detection logic. if any(p in changed_paths for p in classpath_inputs): changed_paths = None if incremental: # jmake is a compiler wrapper that figures out the minimal set of .java # files that need to be rebuilt given a set of .java files that have # changed. # jmake determines what files are stale based on timestamps between .java # and .class files. Since we use .jars, .srcjars, and md5 checks, # timestamp info isn't accurate for this purpose. Rather than use jmake's # programatic interface (like we eventually should), we ensure that all # .class files are newer than their .java files, and convey to jmake which # sources are stale by having their .class files be missing entirely # (by not extracting them). pdb_path = options.jar_path + '.pdb' javac_cmd = _ConvertToJMakeArgs(javac_cmd, pdb_path) if srcjars: _FixTempPathsInIncrementalMetadata(pdb_path, temp_dir) if srcjars: java_dir = os.path.join(temp_dir, 'java') os.makedirs(java_dir) for srcjar in options.java_srcjars: if changed_paths: changed_paths.update( os.path.join(java_dir, f) for f in changes.IterChangedSubpaths(srcjar)) build_utils.ExtractAll(srcjar, path=java_dir, pattern='*.java') jar_srcs = build_utils.FindInDirectory(java_dir, '*.java') jar_srcs = _FilterJavaFiles(jar_srcs, options.javac_includes) java_files.extend(jar_srcs) if changed_paths: # Set the mtime of all sources to 0 since we use the absense of .class # files to tell jmake which files are stale. for path in jar_srcs: os.utime(path, (0, 0)) if java_files: if changed_paths: changed_java_files = [ p for p in java_files if p in changed_paths ] if os.path.exists(options.jar_path): _ExtractClassFiles(options.jar_path, classes_dir, changed_java_files) if os.path.exists(excluded_jar_path): _ExtractClassFiles(excluded_jar_path, classes_dir, changed_java_files) # Add the extracted files to the classpath. This is required because # when compiling only a subset of files, classes that haven't changed # need to be findable. classpath_idx = javac_cmd.index('-classpath') javac_cmd[classpath_idx + 1] += ':' + classes_dir # Can happen when a target goes from having no sources, to having sources. # It's created by the call to build_utils.Touch() below. if incremental: if os.path.exists(pdb_path) and not os.path.getsize(pdb_path): os.unlink(pdb_path) # Don't include the output directory in the initial set of args since it # being in a temp dir makes it unstable (breaks md5 stamping). cmd = javac_cmd + ['-d', classes_dir] + java_files # JMake prints out some diagnostic logs that we want to ignore. # This assumes that all compiler output goes through stderr. stdout_filter = lambda s: '' if md5_check.PRINT_EXPLANATIONS: stdout_filter = None attempt_build = lambda: build_utils.CheckOutput( cmd, print_stdout=options.chromium_code, stdout_filter=stdout_filter, stderr_filter=ColorJavacOutput) try: attempt_build() except build_utils.CalledProcessError as e: # Work-around for a bug in jmake (http://crbug.com/551449). if 'project database corrupted' not in e.output: raise print( 'Applying work-around for jmake project database corrupted ' '(http://crbug.com/551449).') os.unlink(pdb_path) attempt_build() elif incremental: # Make sure output exists. build_utils.Touch(pdb_path) glob = options.jar_excluded_classes inclusion_predicate = lambda f: not build_utils.MatchesGlob(f, glob) exclusion_predicate = lambda f: not inclusion_predicate(f) jar.JarDirectory( classes_dir, options.jar_path, predicate=inclusion_predicate, provider_configurations=options.provider_configurations, additional_files=options.additional_jar_files) jar.JarDirectory( classes_dir, excluded_jar_path, predicate=exclusion_predicate, provider_configurations=options.provider_configurations, additional_files=options.additional_jar_files)
def JarDirectory(classes_dir, jar_path, manifest_file=None, predicate=None): class_files = build_utils.FindInDirectory(classes_dir, '*.class') if predicate: class_files = [f for f in class_files if predicate(f)] Jar(class_files, classes_dir, jar_path, manifest_file=manifest_file)
def main(): parser = argparse.ArgumentParser() build_utils.AddDepfileOption(parser) parser.add_argument('--lint-path', required=True, help='Path to lint executable.') parser.add_argument('--product-dir', required=True, help='Path to product dir.') parser.add_argument('--result-path', required=True, help='Path to XML lint result file.') parser.add_argument('--cache-dir', required=True, help='Path to the directory in which the android cache ' 'directory tree should be stored.') parser.add_argument('--platform-xml-path', required=True, help='Path to api-platforms.xml') parser.add_argument('--android-sdk-version', help='Version (API level) of the Android SDK used for ' 'building.') parser.add_argument('--can-fail-build', action='store_true', help='If set, script will exit with nonzero exit status' ' if lint errors are present') parser.add_argument('--include-unexpected-failures', action='store_true', help='If set, script will exit with nonzero exit status' ' if lint itself crashes with unexpected failures.') parser.add_argument('--config-path', help='Path to lint suppressions file.') parser.add_argument('--disable', help='List of checks to disable.') parser.add_argument('--jar-path', help='Jar file containing class files.') parser.add_argument('--java-sources-file', help='File containing a list of java files.') parser.add_argument('--manifest-path', help='Path to AndroidManifest.xml') parser.add_argument('--classpath', default=[], action='append', help='GYP-list of classpath .jar files') parser.add_argument('--processed-config-path', help='Path to processed lint suppressions file.') parser.add_argument('--resource-dir', help='Path to resource dir.') parser.add_argument('--resource-sources', default=[], action='append', help='GYP-list of resource sources (directories with ' 'resources or archives created by resource-generating ' 'tasks.') parser.add_argument('--silent', action='store_true', help='If set, script will not log anything.') parser.add_argument('--src-dirs', help='Directories containing java files.') parser.add_argument('--srcjars', help='GN list of included srcjars.') parser.add_argument( '--min-sdk-version', required=True, help='Minimal SDK version to lint against.') parser.add_argument( '--manifest-package', help='Package name of the AndroidManifest.xml.') args = parser.parse_args(build_utils.ExpandFileArgs(sys.argv[1:])) sources = [] if args.src_dirs: src_dirs = build_utils.ParseGnList(args.src_dirs) sources = _FindInDirectories(src_dirs, '*.java') elif args.java_sources_file: sources.extend(build_utils.ReadSourcesList(args.java_sources_file)) if args.config_path and not args.processed_config_path: parser.error('--config-path specified without --processed-config-path') elif args.processed_config_path and not args.config_path: parser.error('--processed-config-path specified without --config-path') input_paths = [ args.lint_path, args.platform_xml_path, ] if args.config_path: input_paths.append(args.config_path) if args.jar_path: input_paths.append(args.jar_path) if args.manifest_path: input_paths.append(args.manifest_path) if sources: input_paths.extend(sources) classpath = [] for gyp_list in args.classpath: classpath.extend(build_utils.ParseGnList(gyp_list)) input_paths.extend(classpath) resource_sources = [] if args.resource_dir: # Backward compatibility with GYP resource_sources += [ args.resource_dir ] for gyp_list in args.resource_sources: resource_sources += build_utils.ParseGnList(gyp_list) for resource_source in resource_sources: if os.path.isdir(resource_source): input_paths.extend(build_utils.FindInDirectory(resource_source, '*')) else: input_paths.append(resource_source) input_strings = [ args.can_fail_build, args.include_unexpected_failures, args.silent, ] if args.android_sdk_version: input_strings.append(args.android_sdk_version) if args.processed_config_path: input_strings.append(args.processed_config_path) disable = [] if args.disable: disable = build_utils.ParseGnList(args.disable) input_strings.extend(disable) output_paths = [args.result_path, args.processed_config_path] build_utils.CallAndWriteDepfileIfStale( lambda: _OnStaleMd5(args.lint_path, args.config_path, args.processed_config_path, args.manifest_path, args.result_path, args.product_dir, sources, args.jar_path, args.cache_dir, args.android_sdk_version, args.srcjars, args.min_sdk_version, args.manifest_package, resource_sources, disable=disable, classpath=classpath, can_fail_build=args.can_fail_build, include_unexpected=args.include_unexpected_failures, silent=args.silent), args, input_paths=input_paths, input_strings=input_strings, output_paths=output_paths, depfile_deps=classpath)
def _OnStaleMd5(changes, options, javac_cmd, java_files, classpath_inputs, classpath): # Don't bother enabling incremental compilation for non-chromium code. incremental = options.incremental and options.chromium_code with build_utils.TempDir() as temp_dir: srcjars = options.java_srcjars classes_dir = os.path.join(temp_dir, 'classes') os.makedirs(classes_dir) changed_paths = None # jmake can handle deleted files, but it's a rare case and it would # complicate this script's logic. if incremental and changes.AddedOrModifiedOnly(): changed_paths = set(changes.IterChangedPaths()) # Do a full compile if classpath has changed. # jmake doesn't seem to do this on its own... Might be that ijars mess up # its change-detection logic. if any(p in changed_paths for p in classpath_inputs): changed_paths = None if options.incremental: pdb_path = options.jar_path + '.pdb' if incremental: # jmake is a compiler wrapper that figures out the minimal set of .java # files that need to be rebuilt given a set of .java files that have # changed. # jmake determines what files are stale based on timestamps between .java # and .class files. Since we use .jars, .srcjars, and md5 checks, # timestamp info isn't accurate for this purpose. Rather than use jmake's # programatic interface (like we eventually should), we ensure that all # .class files are newer than their .java files, and convey to jmake which # sources are stale by having their .class files be missing entirely # (by not extracting them). javac_cmd = _ConvertToJMakeArgs(javac_cmd, pdb_path) if srcjars: _FixTempPathsInIncrementalMetadata(pdb_path, temp_dir) srcjar_files = dict() if srcjars: java_dir = os.path.join(temp_dir, 'java') os.makedirs(java_dir) for srcjar in options.java_srcjars: if changed_paths: changed_paths.update(os.path.join(java_dir, f) for f in changes.IterChangedSubpaths(srcjar)) extracted_files = build_utils.ExtractAll( srcjar, path=java_dir, pattern='*.java') for path in extracted_files: srcjar_files[path] = srcjar jar_srcs = build_utils.FindInDirectory(java_dir, '*.java') java_files.extend(jar_srcs) if changed_paths: # Set the mtime of all sources to 0 since we use the absence of .class # files to tell jmake which files are stale. for path in jar_srcs: os.utime(path, (0, 0)) _CreateInfoFile(java_files, options, srcjar_files) if java_files: if changed_paths: changed_java_files = [p for p in java_files if p in changed_paths] if os.path.exists(options.jar_path): _ExtractClassFiles(options.jar_path, classes_dir, changed_java_files) # Add the extracted files to the classpath. This is required because # when compiling only a subset of files, classes that haven't changed # need to be findable. classpath.append(classes_dir) # Can happen when a target goes from having no sources, to having sources. # It's created by the call to build_utils.Touch() below. if incremental: if os.path.exists(pdb_path) and not os.path.getsize(pdb_path): os.unlink(pdb_path) # Don't include the output directory in the initial set of args since it # being in a temp dir makes it unstable (breaks md5 stamping). cmd = javac_cmd + ['-d', classes_dir] # Pass classpath and source paths as response files to avoid extremely # long command lines that are tedius to debug. if classpath: cmd += ['-classpath', ':'.join(classpath)] java_files_rsp_path = os.path.join(temp_dir, 'files_list.txt') with open(java_files_rsp_path, 'w') as f: f.write(' '.join(java_files)) cmd += ['@' + java_files_rsp_path] # JMake prints out some diagnostic logs that we want to ignore. # This assumes that all compiler output goes through stderr. stdout_filter = lambda s: '' if md5_check.PRINT_EXPLANATIONS: stdout_filter = None attempt_build = lambda: build_utils.CheckOutput( cmd, print_stdout=options.chromium_code, stdout_filter=stdout_filter, stderr_filter=ProcessJavacOutput) try: attempt_build() except build_utils.CalledProcessError as e: # Work-around for a bug in jmake (http://crbug.com/551449). if 'project database corrupted' not in e.output: raise print ('Applying work-around for jmake project database corrupted ' '(http://crbug.com/551449).') os.unlink(pdb_path) attempt_build() if options.incremental and (not java_files or not incremental): # Make sure output exists. build_utils.Touch(pdb_path) jar.JarDirectory(classes_dir, options.jar_path, provider_configurations=options.provider_configurations, additional_files=options.additional_jar_files)
def _FindInDirectories(directories, filename_filter): all_files = [] for directory in directories: all_files.extend(build_utils.FindInDirectory(directory, filename_filter)) return all_files