def DoPush(options): libraries = build_utils.ReadJson(options.libraries_json) device = build_device.GetBuildDeviceFromPath( options.build_device_configuration) if not device: return serial_number = device.GetSerialNumber() # A list so that it is modifiable in Push below. needs_directory = [True] for lib in libraries: device_path = os.path.join(options.device_dir, lib) host_path = os.path.join(options.libraries_dir, lib) def Push(): if needs_directory: device.RunShellCommand('mkdir -p ' + options.device_dir) needs_directory[:] = [] # = False device.PushIfNeeded(host_path, device_path) record_path = '%s.%s.push.md5.stamp' % (host_path, serial_number) md5_check.CallAndRecordIfStale(Push, record_path=record_path, input_paths=[host_path], input_strings=[device_path])
def main(args): args = build_utils.ExpandFileArgs(args) parser = argparse.ArgumentParser(description=__doc__) build_utils.AddDepfileOption(parser) parser.add_argument( '--jar-info-path', required=True, help='Output .jar.info file') parser.add_argument( '--pak-info-path', required=True, help='Output .pak.info file') parser.add_argument( '--res-info-path', required=True, help='Output .res.info file') parser.add_argument( '--jar-files', required=True, action='append', help='GN-list of .jar file paths') parser.add_argument( '--assets', required=True, action='append', help='GN-list of files to add as assets in the form ' '"srcPath:zipPath", where ":zipPath" is optional.') parser.add_argument( '--uncompressed-assets', required=True, action='append', help='Same as --assets, except disables compression.') parser.add_argument( '--in-res-info-path', required=True, action='append', help='Paths to .ap_.info files') options = parser.parse_args(args) options.jar_files = build_utils.ParseGnList(options.jar_files) options.assets = build_utils.ParseGnList(options.assets) options.uncompressed_assets = build_utils.ParseGnList( options.uncompressed_assets) jar_inputs = _FindJarInputs(set(options.jar_files)) pak_inputs = _PakInfoPathsForAssets(options.assets + options.uncompressed_assets) res_inputs = options.in_res_info_path # Don't bother re-running if no .info files have changed (saves ~250ms). md5_check.CallAndRecordIfStale( lambda: _MergeJarInfoFiles(options.jar_info_path, jar_inputs), input_paths=jar_inputs, output_paths=[options.jar_info_path]) # Always recreate these (just as fast as md5 checking them). _MergePakInfoFiles(options.pak_info_path, pak_inputs) _MergeResInfoFiles(options.res_info_path, res_inputs) all_inputs = jar_inputs + pak_inputs + res_inputs build_utils.WriteDepfile( options.depfile, options.jar_info_path, inputs=all_inputs, add_pydeps=False)
def Jar(class_files, classes_dir, jar_path, manifest_file=None, additional_jar_files=None): jar_path = os.path.abspath(jar_path) # The paths of the files in the jar will be the same as they are passed in to # the command. Because of this, the command should be run in # options.classes_dir so the .class file paths in the jar are correct. jar_cwd = classes_dir class_files_rel = [os.path.relpath(f, jar_cwd) for f in class_files] jar_cmd = ['jar', 'cf0', jar_path] if manifest_file: jar_cmd[1] += 'm' jar_cmd.append(os.path.abspath(manifest_file)) jar_cmd.extend(class_files_rel) if additional_jar_files: jar_cmd.extend(additional_jar_files) with build_utils.TempDir() as temp_dir: empty_file = os.path.join(temp_dir, '.empty') build_utils.Touch(empty_file) jar_cmd.append(os.path.relpath(empty_file, jar_cwd)) record_path = '%s.md5.stamp' % jar_path md5_check.CallAndRecordIfStale( lambda: build_utils.CheckOutput(jar_cmd, cwd=jar_cwd), record_path=record_path, input_paths=class_files, input_strings=jar_cmd, force=not os.path.exists(jar_path), ) build_utils.Touch(jar_path, fail_if_missing=True)
def main(): parser = optparse.OptionParser() parser.add_option('--apk-path', help='Path to .apk to install.') parser.add_option( '--split-apk-path', help='Path to .apk splits (can specify multiple times, causes ' '--install-multiple to be used.', action='append') parser.add_option('--android-sdk-tools', help='Path to the Android SDK build tools folder. ' + 'Required when using --split-apk-path.') parser.add_option( '--install-record', help='Path to install record (touched only when APK is installed).') parser.add_option('--build-device-configuration', help='Path to build device configuration.') parser.add_option('--stamp', help='Path to touch on success.') parser.add_option('--configuration-name', help='The build CONFIGURATION_NAME') parser.add_option('--output-directory', help='The output directory.') options, _ = parser.parse_args() constants.SetBuildType(options.configuration_name) devil_chromium.Initialize( output_directory=os.path.abspath(options.output_directory)) device = build_device.GetBuildDeviceFromPath( options.build_device_configuration) if not device: return serial_number = device.GetSerialNumber() apk_package = apk_helper.GetPackageName(options.apk_path) metadata_path = '%s.%s.device.time.stamp' % (options.apk_path, serial_number) # If the APK on the device does not match the one that was last installed by # the build, then the APK has to be installed (regardless of the md5 record). force_install = HasInstallMetadataChanged(device, apk_package, metadata_path) def Install(): if options.split_apk_path: device.InstallSplitApk(options.apk_path, options.split_apk_path) else: device.Install(options.apk_path, reinstall=True) RecordInstallMetadata(device, apk_package, metadata_path) build_utils.Touch(options.install_record) record_path = '%s.%s.md5.stamp' % (options.apk_path, serial_number) md5_check.CallAndRecordIfStale(Install, record_path=record_path, input_paths=[options.apk_path], force=force_install) if options.stamp: build_utils.Touch(options.stamp)
def DoPush(options): libraries = build_utils.ParseGnList(options.libraries) device = build_device.GetBuildDeviceFromPath( options.build_device_configuration) if not device: return serial_number = device.GetSerialNumber() # A list so that it is modifiable in Push below. needs_directory = [True] for lib in libraries: device_path = os.path.join(options.device_dir, lib) host_path = os.path.join(options.libraries_dir, lib) def Push(): if needs_directory: device.RunShellCommand(['mkdir', '-p', options.device_dir], check_return=True) needs_directory[:] = [] # = False device.PushChangedFiles([(os.path.abspath(host_path), device_path) ]) record_path = '%s.%s.push.md5.stamp' % (host_path, serial_number) md5_check.CallAndRecordIfStale(Push, record_path=record_path, input_paths=[host_path], input_strings=[device_path])
def DoDex(options, paths, dex_args=None): dx_binary = os.path.join(options.android_sdk_tools, 'dx') # See http://crbug.com/272064 for context on --force-jumbo. # See https://github.com/android/platform_dalvik/commit/dd140a22d for # --num-threads. dex_cmd = [dx_binary, '--num-threads=8', '--dex', '--force-jumbo', '--output', options.dex_path] if options.no_locals != '0': dex_cmd.append('--no-locals') if dex_args: dex_cmd += dex_args dex_cmd += paths record_path = '%s.md5.stamp' % options.dex_path md5_check.CallAndRecordIfStale( lambda: build_utils.CheckOutput(dex_cmd, print_stderr=False), record_path=record_path, input_paths=paths, input_strings=dex_cmd, force=not os.path.exists(options.dex_path)) build_utils.WriteJson( [os.path.relpath(p, options.output_directory) for p in paths], options.dex_path + '.inputs')
def DoJavac(options): output_dir = options.output_dir src_dirs = build_utils.ParseGypList(options.src_dirs) java_files = build_utils.FindInDirectories(src_dirs, '*.java') if options.javac_includes: javac_includes = build_utils.ParseGypList(options.javac_includes) filtered_java_files = [] for f in java_files: for include in javac_includes: if fnmatch.fnmatch(f, include): filtered_java_files.append(f) break java_files = filtered_java_files # Compiling guava with certain orderings of input files causes a compiler # crash... Sorted order works, so use that. # See https://code.google.com/p/guava-libraries/issues/detail?id=950 java_files.sort() classpath = build_utils.ParseGypList(options.classpath) jar_inputs = [] for path in classpath: if os.path.exists(path + '.TOC'): jar_inputs.append(path + '.TOC') else: jar_inputs.append(path) javac_cmd = [ 'javac', '-g', '-source', '1.5', '-target', '1.5', '-classpath', ':'.join(classpath), '-d', output_dir, '-Xlint:unchecked', '-Xlint:deprecation', ] + java_files def Compile(): # Delete the classes directory. This ensures that all .class files in the # output are actually from the input .java files. For example, if a .java # file is deleted or an inner class is removed, the classes directory should # not contain the corresponding old .class file after running this action. build_utils.DeleteDirectory(output_dir) build_utils.MakeDirectory(output_dir) suppress_output = not options.chromium_code build_utils.CheckCallDie(javac_cmd, suppress_output=suppress_output) record_path = '%s/javac.md5.stamp' % options.output_dir md5_check.CallAndRecordIfStale(Compile, record_path=record_path, input_paths=java_files + jar_inputs, input_strings=javac_cmd)
def DoJavac(options, args): output_dir = options.output_dir src_gendirs = build_utils.ParseGypList(options.src_gendirs) java_files = args + build_utils.FindInDirectories(src_gendirs, '*.java') if options.javac_includes: javac_includes = build_utils.ParseGypList(options.javac_includes) filtered_java_files = [] for f in java_files: for include in javac_includes: if fnmatch.fnmatch(f, include): filtered_java_files.append(f) break java_files = filtered_java_files # Compiling guava with certain orderings of input files causes a compiler # crash... Sorted order works, so use that. # See https://code.google.com/p/guava-libraries/issues/detail?id=950 java_files.sort() classpath = build_utils.ParseGypList(options.classpath) jar_inputs = [] for path in classpath: if os.path.exists(path + '.TOC'): jar_inputs.append(path + '.TOC') else: jar_inputs.append(path) javac_args = [ '-g', '-source', '1.5', '-target', '1.5', '-classpath', ':'.join(classpath), '-d', output_dir ] if options.chromium_code: javac_args.extend(['-Xlint:unchecked', '-Xlint:deprecation']) else: # XDignore.symbol.file makes javac compile against rt.jar instead of # ct.sym. This means that using a java internal package/class will not # trigger a compile warning or error. javac_args.extend(['-XDignore.symbol.file']) javac_cmd = ['javac'] + javac_args + java_files def Compile(): # Delete the classes directory. This ensures that all .class files in the # output are actually from the input .java files. For example, if a .java # file is deleted or an inner class is removed, the classes directory should # not contain the corresponding old .class file after running this action. build_utils.DeleteDirectory(output_dir) build_utils.MakeDirectory(output_dir) build_utils.CheckOutput(javac_cmd, print_stdout=options.chromium_code, stderr_filter=ColorJavacOutput) record_path = '%s/javac.md5.stamp' % options.output_dir md5_check.CallAndRecordIfStale(Compile, record_path=record_path, input_paths=java_files + jar_inputs, input_strings=javac_cmd)
def CheckCallAndRecord(should_call, message, force=False, outputs_specified=False, outputs_missing=False, expected_changes=None, added_or_modified_only=None, track_subentries=False, output_newer_than_record=False): output_paths = None if outputs_specified: output_file1 = tempfile.NamedTemporaryFile() if outputs_missing: output_file1.close() # Gets deleted on close(). output_paths = [output_file1.name] if output_newer_than_record: output_mtime = os.path.getmtime(output_file1.name) os.utime(record_path.name, (output_mtime - 1, output_mtime - 1)) else: # touch the record file so it doesn't look like it's older that # the output we've just created os.utime(record_path.name, None) self.called = False self.changes = None if expected_changes or added_or_modified_only is not None: def MarkCalled(changes): self.called = True self.changes = changes else: def MarkCalled(): self.called = True md5_check.CallAndRecordIfStale( MarkCalled, record_path=record_path.name, input_paths=input_files, input_strings=input_strings, output_paths=output_paths, force=force, pass_changes=(expected_changes or added_or_modified_only) is not None, track_subpaths_allowlist=zip_paths if track_subentries else None) self.assertEqual(should_call, self.called, message) if expected_changes: description = self.changes.DescribeDifference() self.assertTrue( fnmatch.fnmatch(description, expected_changes), 'Expected %s to match %s' % (repr(description), repr(expected_changes))) if should_call and added_or_modified_only is not None: self.assertEqual(added_or_modified_only, self.changes.AddedOrModifiedOnly())
def GenerateBundleApks(bundle_path, bundle_apks_path, aapt2_path, keystore_path, keystore_password, keystore_alias, universal): """Generate an .apks archive from a an app bundle if needed. Args: bundle_path: Input bundle file path. bundle_apks_path: Output bundle .apks archive path. Name must end with '.apks' or this operation will fail. aapt2_path: Path to aapt2 build tool. keystore_path: Path to keystore. keystore_password: Keystore password, as a string. keystore_alias: Keystore signing key alias. universal: Whether to create a single APK that contains the contents of all modules. """ # NOTE: BUNDLETOOL_JAR_PATH is added to input_strings, rather than # input_paths, to speed up MD5 computations by about 400ms (the .jar file # contains thousands of class files which are checked independently, # resulting in an .md5.stamp of more than 60000 lines!). input_paths = [bundle_path, aapt2_path, keystore_path] input_strings = [ keystore_password, keystore_alias, bundletool.BUNDLETOOL_JAR_PATH, # NOTE: BUNDLETOOL_VERSION is already part of BUNDLETOOL_JAR_PATH, but # it's simpler to assume that this may not be the case in the future. bundletool.BUNDLETOOL_VERSION ] output_paths = [bundle_apks_path] def rebuild(): logging.info('Building %s', os.path.basename(bundle_apks_path)) with build_utils.AtomicOutput(bundle_apks_path) as tmp_apks: cmd_args = [ 'java', '-jar', bundletool.BUNDLETOOL_JAR_PATH, 'build-apks', '--aapt2=%s' % aapt2_path, '--output=%s' % tmp_apks.name, '--bundle=%s' % bundle_path, '--ks=%s' % keystore_path, '--ks-pass=pass:%s' % keystore_password, '--ks-key-alias=%s' % keystore_alias, '--overwrite', ] if universal: cmd_args += ['--mode=universal'] build_utils.CheckOutput(cmd_args) md5_check.CallAndRecordIfStale(rebuild, input_paths=input_paths, input_strings=input_strings, output_paths=output_paths)
def main(): parser = optparse.OptionParser() parser.add_option('--apk-path', help='Path to .apk to install.') parser.add_option( '--split-apk-path', help='Path to .apk splits (can specify multiple times, causes ' '--install-multiple to be used.', action='append') parser.add_option( '--install-record', help='Path to install record (touched only when APK is installed).') parser.add_option('--build-device-configuration', help='Path to build device configuration.') parser.add_option('--stamp', help='Path to touch on success.') parser.add_option('--configuration-name', help='The build CONFIGURATION_NAME') options, _ = parser.parse_args() device = build_device.GetBuildDeviceFromPath( options.build_device_configuration) if not device: return constants.SetBuildType(options.configuration_name) serial_number = device.GetSerialNumber() apk_package = apk_helper.GetPackageName(options.apk_path) metadata_path = '%s.%s.device.time.stamp' % (options.apk_path, serial_number) # If the APK on the device does not match the one that was last installed by # the build, then the APK has to be installed (regardless of the md5 record). force_install = HasInstallMetadataChanged(device, apk_package, metadata_path) def Install(): # TODO: Filter splits using split-select. active_splits = options.split_apk_path if active_splits: device.adb.InstallMultiple([options.apk_path] + active_splits, reinstall=True) else: device.Install(options.apk_path, reinstall=True) RecordInstallMetadata(device, apk_package, metadata_path) build_utils.Touch(options.install_record) record_path = '%s.%s.md5.stamp' % (options.apk_path, serial_number) md5_check.CallAndRecordIfStale(Install, record_path=record_path, input_paths=[options.apk_path], force=force_install) if options.stamp: build_utils.Touch(options.stamp)
def DoJarToc(options): jar_path = options.jar_path toc_path = options.toc_path record_path = '%s.md5.stamp' % toc_path md5_check.CallAndRecordIfStale( lambda: UpdateToc(jar_path, toc_path), record_path=record_path, input_paths=[jar_path], ) build_utils.Touch(toc_path)
def DoJavac( bootclasspath, classpath, classes_dir, chromium_code, javac_bin, java_version, java_files): """Runs javac. Builds |java_files| with the provided |classpath| and puts the generated .class files into |classes_dir|. If |chromium_code| is true, extra lint checking will be enabled. """ jar_inputs = [] for path in classpath: if os.path.exists(path + '.TOC'): jar_inputs.append(path + '.TOC') else: jar_inputs.append(path) javac_args = [ '-g', # Chromium only allows UTF8 source files. Being explicit avoids # javac pulling a default encoding from the user's environment. '-encoding', 'UTF-8', '-classpath', ':'.join(classpath), '-d', classes_dir] if bootclasspath: javac_args.extend([ '-bootclasspath', ':'.join(bootclasspath), '-source', java_version, '-target', java_version, ]) if chromium_code: # TODO(aurimas): re-enable '-Xlint:deprecation' checks once they are fixed. javac_args.extend(['-Xlint:unchecked']) else: # XDignore.symbol.file makes javac compile against rt.jar instead of # ct.sym. This means that using a java internal package/class will not # trigger a compile warning or error. javac_args.extend(['-XDignore.symbol.file']) javac_cmd = [javac_bin] + javac_args + java_files def Compile(): build_utils.CheckOutput( javac_cmd, print_stdout=chromium_code, stderr_filter=ColorJavacOutput) record_path = os.path.join(classes_dir, 'javac.md5.stamp') md5_check.CallAndRecordIfStale( Compile, record_path=record_path, input_paths=java_files + jar_inputs, input_strings=javac_cmd)
def DoJarToc(options): jar_path = options.jar_path toc_path = options.toc_path record_path = '%s.md5.stamp' % toc_path md5_check.CallAndRecordIfStale( lambda: UpdateToc(jar_path, toc_path), record_path=record_path, input_paths=[jar_path], force=not os.path.exists(toc_path), ) build_utils.Touch(toc_path, fail_if_missing=True)
def DoDex(options, paths): dx_binary = os.path.join(options.android_sdk_root, 'platform-tools', 'dx') dex_cmd = [dx_binary, '--dex', '--output', options.dex_path] + paths record_path = '%s.md5.stamp' % options.dex_path md5_check.CallAndRecordIfStale( lambda: build_utils.CheckCallDie(dex_cmd, suppress_output=True), record_path=record_path, input_paths=paths, input_strings=dex_cmd) build_utils.Touch(options.dex_path)
def CallAndWriteDepfileIfStale(function, options, record_path=None, input_paths=None, input_strings=None, output_paths=None, force=False, pass_changes=False, depfile_deps=None, add_pydeps=True): """Wraps md5_check.CallAndRecordIfStale() and writes a depfile if applicable. Depfiles are automatically added to output_paths when present in the |options| argument. They are then created after |function| is called. By default, only python dependencies are added to the depfile. If there are other input paths that are not captured by GN deps, then they should be listed in depfile_deps. It's important to write paths to the depfile that are already captured by GN deps since GN args can cause GN deps to change, and such changes are not immediately reflected in depfiles (http://crbug.com/589311). """ if not output_paths: raise Exception('At least one output_path must be specified.') input_paths = list(input_paths or []) input_strings = list(input_strings or []) output_paths = list(output_paths or []) python_deps = None if hasattr(options, 'depfile') and options.depfile: python_deps = _ComputePythonDependencies() input_paths += python_deps output_paths += [options.depfile] def on_stale_md5(changes): args = (changes, ) if pass_changes else () function(*args) if python_deps is not None: all_depfile_deps = list(python_deps) if add_pydeps else [] if depfile_deps: all_depfile_deps.extend(depfile_deps) WriteDepfile(options.depfile, output_paths[0], all_depfile_deps, add_pydeps=False) md5_check.CallAndRecordIfStale(on_stale_md5, record_path=record_path, input_paths=input_paths, input_strings=input_strings, output_paths=output_paths, force=force, pass_changes=True)
def DoJavac( classpath, classes_dir, chromium_code, java_files): """Runs javac. Builds |java_files| with the provided |classpath| and puts the generated .class files into |classes_dir|. If |chromium_code| is true, extra lint checking will be enabled. """ # Compiling guava with certain orderings of input files causes a compiler # crash... Sorted order works, so use that. # See https://code.google.com/p/guava-libraries/issues/detail?id=950 # TODO(cjhopman): Remove this when we have update guava or the compiler to a # version without this problem. java_files.sort() jar_inputs = [] for path in classpath: if os.path.exists(path + '.TOC'): jar_inputs.append(path + '.TOC') else: jar_inputs.append(path) javac_args = [ '-g', '-source', '1.5', '-target', '1.5', '-classpath', ':'.join(classpath), '-d', classes_dir] if chromium_code: javac_args.extend(['-Xlint:unchecked', '-Xlint:deprecation']) else: # XDignore.symbol.file makes javac compile against rt.jar instead of # ct.sym. This means that using a java internal package/class will not # trigger a compile warning or error. javac_args.extend(['-XDignore.symbol.file']) javac_cmd = ['javac'] + javac_args + java_files def Compile(): build_utils.CheckOutput( javac_cmd, print_stdout=chromium_code, stderr_filter=ColorJavacOutput) record_path = os.path.join(classes_dir, 'javac.md5.stamp') md5_check.CallAndRecordIfStale( Compile, record_path=record_path, input_paths=java_files + jar_inputs, input_strings=javac_cmd)
def CreateStandaloneApk(options): def DoZip(): with tempfile.NamedTemporaryFile(suffix='.zip') as intermediate_file: intermediate_path = intermediate_file.name shutil.copy(options.input_apk_path, intermediate_path) apk_path_abs = os.path.abspath(intermediate_path) build_utils.CheckOutput(['zip', '-r', '-1', apk_path_abs, 'lib'], cwd=options.libraries_top_dir) shutil.copy(intermediate_path, options.output_apk_path) input_paths = [options.input_apk_path, options.libraries_top_dir] record_path = '%s.standalone.stamp' % options.input_apk_path md5_check.CallAndRecordIfStale(DoZip, record_path=record_path, input_paths=input_paths)
def DoDex(options, paths): d8_binary = os.path.join(options.android_sdk_tools, 'd8') dex_cmd = [d8_binary, '--output', options.dex_path] dex_cmd += paths record_path = '%s.md5.stamp' % options.dex_path md5_check.CallAndRecordIfStale( lambda: build_utils.CheckOutput(dex_cmd, print_stderr=False), record_path=record_path, input_paths=paths, input_strings=dex_cmd, force=not os.path.exists(options.dex_path)) build_utils.WriteJson( [os.path.relpath(p, options.output_directory) for p in paths], options.dex_path + '.inputs')
def CheckCallAndRecord(should_call, message, force=False, outputs_specified=False, outputs_missing=False, expected_changes=None, added_or_modified_only=None, track_subentries=False): output_paths = None if outputs_specified: output_file1 = tempfile.NamedTemporaryFile() if outputs_missing: output_file1.close() # Gets deleted on close(). output_paths = [output_file1.name] self.called = False self.changes = None if expected_changes or added_or_modified_only is not None: def MarkCalled(changes): self.called = True self.changes = changes else: def MarkCalled(): self.called = True md5_check.CallAndRecordIfStale( MarkCalled, record_path=record_path.name, input_paths=input_files, input_strings=input_strings, output_paths=output_paths, force=force, pass_changes=(expected_changes or added_or_modified_only) is not None, track_subpaths_whitelist=zip_paths if track_subentries else None) self.assertEqual(should_call, self.called, message) if expected_changes: description = self.changes.DescribeDifference() self.assertTrue( fnmatch.fnmatch(description, expected_changes), 'Expected %s to match %s' % (repr(description), repr(expected_changes))) if should_call and added_or_modified_only is not None: self.assertEqual(added_or_modified_only, self.changes.AddedOrModifiedOnly())
def DoJavac( classpath, classes_dir, chromium_code, java_files): """Runs javac. Builds |java_files| with the provided |classpath| and puts the generated .class files into |classes_dir|. If |chromium_code| is true, extra lint checking will be enabled. """ jar_inputs = [] for path in classpath: if os.path.exists(path + '.TOC'): jar_inputs.append(path + '.TOC') else: jar_inputs.append(path) javac_args = [ '-g', '-source', '1.7', '-target', '1.7', '-classpath', ':'.join(classpath), '-d', classes_dir] if chromium_code: javac_args.extend(['-Xlint:unchecked']) # TODO(aurimas): re-enable this after the L SDK is launched and make # everyone fix new deprecation warnings correctly. # http://crbug.com/405174,398669,411361,411366,411367,411376,416041 # '-Xlint:deprecation' else: # XDignore.symbol.file makes javac compile against rt.jar instead of # ct.sym. This means that using a java internal package/class will not # trigger a compile warning or error. javac_args.extend(['-XDignore.symbol.file']) javac_cmd = ['javac'] + javac_args + java_files def Compile(): build_utils.CheckOutput( javac_cmd, print_stdout=chromium_code, stderr_filter=ColorJavacOutput) record_path = os.path.join(classes_dir, 'javac.md5.stamp') md5_check.CallAndRecordIfStale( Compile, record_path=record_path, input_paths=java_files + jar_inputs, input_strings=javac_cmd)
def DoDex(options, paths): dx_binary = os.path.join(options.android_sdk_tools, 'dx') # See http://crbug.com/272064 for context on --force-jumbo. dex_cmd = [dx_binary, '--dex', '--force-jumbo', '--output', options.dex_path] if options.no_locals != '0': dex_cmd.append('--no-locals') dex_cmd += paths record_path = '%s.md5.stamp' % options.dex_path md5_check.CallAndRecordIfStale( lambda: build_utils.CheckOutput(dex_cmd, print_stderr=False), record_path=record_path, input_paths=paths, input_strings=dex_cmd) build_utils.Touch(options.dex_path)
def DoDex(options, paths): dx_binary = '' for dx_str in AddExeExtensions('dx'): dx_binary = Find(dx_str, options.android_sdk_root) if dx_binary: break dex_cmd = [dx_binary, '--dex', '--output', options.dex_path] + paths record_path = '%s.md5.stamp' % options.dex_path md5_check.CallAndRecordIfStale( lambda: build_utils.CheckCallDie(dex_cmd, suppress_output=True), record_path=record_path, input_paths=paths, input_strings=dex_cmd) build_utils.Touch(options.dex_path)
def CallAndWriteDepfileIfStale(on_stale_md5, options, record_path=None, input_paths=None, input_strings=None, output_paths=None, force=False, pass_changes=False, track_subpaths_whitelist=None, depfile_deps=None): """Wraps md5_check.CallAndRecordIfStale() and writes a depfile if applicable. Depfiles are automatically added to output_paths when present in the |options| argument. They are then created after |on_stale_md5| is called. By default, only python dependencies are added to the depfile. If there are other input paths that are not captured by GN deps, then they should be listed in depfile_deps. It's important to write paths to the depfile that are already captured by GN deps since GN args can cause GN deps to change, and such changes are not immediately reflected in depfiles (http://crbug.com/589311). """ if not output_paths: raise Exception('At least one output_path must be specified.') input_paths = list(input_paths or []) input_strings = list(input_strings or []) output_paths = list(output_paths or []) input_paths += _ComputePythonDependencies() md5_check.CallAndRecordIfStale( on_stale_md5, record_path=record_path, input_paths=input_paths, input_strings=input_strings, output_paths=output_paths, force=force, pass_changes=pass_changes, track_subpaths_whitelist=track_subpaths_whitelist) # Write depfile even when inputs have not changed to ensure build correctness # on bots that build with & without patch, and the patch changes the depfile # location. if hasattr(options, 'depfile') and options.depfile: WriteDepfile(options.depfile, output_paths[0], depfile_deps, add_pydeps=False)
def main(argv): if not build_utils.IsDeviceReady(): build_utils.PrintBigWarning( 'Zero (or multiple) devices attached. Skipping APK install.') return parser = optparse.OptionParser() parser.add_option('--android-sdk-tools', help='Path to Android SDK tools.') parser.add_option('--apk-path', help='Path to .apk to install.') parser.add_option( '--install-record', help='Path to install record (touched only when APK is installed).') parser.add_option('--stamp', help='Path to touch on success.') options, _ = parser.parse_args() # TODO(cjhopman): Should this install to all devices/be configurable? install_cmd = [ os.path.join(options.android_sdk_tools, 'adb'), 'install', '-r', options.apk_path ] serial_number = android_commands.AndroidCommands().Adb().GetSerialNumber() apk_package = apk_helper.GetPackageName(options.apk_path) metadata_path = '%s.%s.device.time.stamp' % (options.apk_path, serial_number) # If the APK on the device does not match the one that was last installed by # the build, then the APK has to be installed (regardless of the md5 record). force_install = HasInstallMetadataChanged(apk_package, metadata_path) def Install(): build_utils.CheckCallDie(install_cmd) RecordInstallMetadata(apk_package, metadata_path) build_utils.Touch(options.install_record) record_path = '%s.%s.md5.stamp' % (options.apk_path, serial_number) md5_check.CallAndRecordIfStale(Install, record_path=record_path, input_paths=[options.apk_path], input_strings=install_cmd, force=force_install) if options.stamp: build_utils.Touch(options.stamp)
def Jar(class_files, classes_dir, jar_path): jar_path = os.path.abspath(jar_path) # The paths of the files in the jar will be the same as they are passed in to # the command. Because of this, the command should be run in # options.classes_dir so the .class file paths in the jar are correct. jar_cwd = classes_dir class_files_rel = [os.path.relpath(f, jar_cwd) for f in class_files] jar_cmd = ['jar', 'cf0', jar_path] + class_files_rel record_path = '%s.md5.stamp' % jar_path md5_check.CallAndRecordIfStale( lambda: build_utils.CheckOutput(jar_cmd, cwd=jar_cwd), record_path=record_path, input_paths=class_files, input_strings=jar_cmd, force=not os.path.exists(jar_path), ) build_utils.Touch(jar_path, fail_if_missing=True)
def main(argv): parser = optparse.OptionParser() parser.add_option('--android-sdk-tools', help='Path to Android SDK tools.') parser.add_option('--apk-path', help='Path to .apk to install.') parser.add_option( '--install-record', help='Path to install record (touched only when APK is installed).') parser.add_option('--build-device-configuration', help='Path to build device configuration.') parser.add_option('--stamp', help='Path to touch on success.') options, _ = parser.parse_args() device = build_device.GetBuildDeviceFromPath( options.build_device_configuration) if not device: return serial_number = device.GetSerialNumber() apk_package = apk_helper.GetPackageName(options.apk_path) metadata_path = '%s.%s.device.time.stamp' % (options.apk_path, serial_number) # If the APK on the device does not match the one that was last installed by # the build, then the APK has to be installed (regardless of the md5 record). force_install = HasInstallMetadataChanged(device, apk_package, metadata_path) def Install(): device.Install(options.apk_path, reinstall=True) RecordInstallMetadata(device, apk_package, metadata_path) build_utils.Touch(options.install_record) record_path = '%s.%s.md5.stamp' % (options.apk_path, serial_number) md5_check.CallAndRecordIfStale(Install, record_path=record_path, input_paths=[options.apk_path], force=force_install) if options.stamp: build_utils.Touch(options.stamp)
def DoJar(options): class_files = build_utils.FindInDirectory(options.classes_dir, '*.class') for exclude in build_utils.ParseGypList(options.excluded_classes): class_files = filter( lambda f: not fnmatch.fnmatch(f, exclude), class_files) jar_path = os.path.abspath(options.jar_path) # The paths of the files in the jar will be the same as they are passed in to # the command. Because of this, the command should be run in # options.classes_dir so the .class file paths in the jar are correct. jar_cwd = options.classes_dir class_files_rel = [os.path.relpath(f, jar_cwd) for f in class_files] jar_cmd = ['jar', 'cf0', jar_path] + class_files_rel record_path = '%s.md5.stamp' % options.jar_path md5_check.CallAndRecordIfStale( lambda: build_utils.CheckCallDie(jar_cmd, cwd=jar_cwd), record_path=record_path, input_paths=class_files, input_strings=jar_cmd) build_utils.Touch(options.jar_path)
def main(argv): parser = optparse.OptionParser() build_utils.AddDepfileOption(parser) parser.add_option('--build-config', help='Path to build_config output.') parser.add_option('--type', help='Type of this target (e.g. android_library).') parser.add_option( '--possible-deps-configs', help='List of paths for dependency\'s build_config files. Some ' 'dependencies may not write build_config files. Missing build_config ' 'files are handled differently based on the type of this target.') # android_resources options parser.add_option('--srcjar', help='Path to target\'s resources srcjar.') parser.add_option('--resources-zip', help='Path to target\'s resources zip.') parser.add_option('--r-text', help='Path to target\'s R.txt file.') parser.add_option('--package-name', help='Java package name for these resources.') parser.add_option('--android-manifest', help='Path to android manifest.') # android_assets options parser.add_option('--asset-sources', help='List of asset sources.') parser.add_option('--asset-renaming-sources', help='List of asset sources with custom destinations.') parser.add_option('--asset-renaming-destinations', help='List of asset custom destinations.') parser.add_option('--disable-asset-compression', action='store_true', help='Whether to disable asset compression.') # java library options parser.add_option('--jar-path', help='Path to target\'s jar output.') parser.add_option( '--supports-android', action='store_true', help='Whether this library supports running on the Android platform.') parser.add_option( '--requires-android', action='store_true', help='Whether this library requires running on the Android platform.') parser.add_option( '--bypass-platform-checks', action='store_true', help='Bypass checks for support/require Android platform.') # android library options parser.add_option('--dex-path', help='Path to target\'s dex output.') # native library options parser.add_option('--native-libs', help='List of top-level native libs.') parser.add_option('--readelf-path', help='Path to toolchain\'s readelf.') # apk options parser.add_option('--apk-path', help='Path to the target\'s apk output.') parser.add_option( '--tested-apk-config', help= 'Path to the build config of the tested apk (for an instrumentation ' 'test apk).') parser.add_option('--proguard-enabled', action='store_true', help='Whether proguard is enabled for this apk.') parser.add_option('--proguard-info', help='Path to the proguard .info output for this apk.') options, args = parser.parse_args(argv) if args: parser.error('No positional arguments should be given.') required_options_map = { 'java_binary': ['build_config', 'jar_path'], 'java_library': ['build_config', 'jar_path'], 'android_assets': ['build_config'], 'android_resources': ['build_config', 'resources_zip'], 'android_apk': ['build_config', 'jar_path', 'dex_path', 'resources_zip'], 'deps_dex': ['build_config', 'dex_path'], 'resource_rewriter': ['build_config'] } required_options = required_options_map.get(options.type) if not required_options: raise Exception('Unknown type: <%s>' % options.type) if options.native_libs: required_options.append('readelf_path') build_utils.CheckOptions(options, parser, required_options) if options.type == 'java_library': if options.supports_android and not options.dex_path: raise Exception( 'java_library that supports Android requires a dex path.') if options.requires_android and not options.supports_android: raise Exception( '--supports-android is required when using --requires-android') possible_deps_config_paths = build_utils.ParseGypList( options.possible_deps_configs) allow_unknown_deps = (options.type in ('android_apk', 'android_assets', 'android_resources')) unknown_deps = [ c for c in possible_deps_config_paths if not os.path.exists(c) ] if unknown_deps and not allow_unknown_deps: raise Exception('Unknown deps: ' + str(unknown_deps)) direct_deps_config_paths = [ c for c in possible_deps_config_paths if not c in unknown_deps ] direct_deps_config_paths = _FilterUnwantedDepsPaths( direct_deps_config_paths, options.type) deps = Deps(direct_deps_config_paths) direct_library_deps = deps.Direct('java_library') all_library_deps = deps.All('java_library') direct_resources_deps = deps.Direct('android_resources') all_resources_deps = deps.All('android_resources') # Resources should be ordered with the highest-level dependency first so that # overrides are done correctly. all_resources_deps.reverse() if options.type == 'android_apk' and options.tested_apk_config: tested_apk_deps = Deps([options.tested_apk_config]) tested_apk_resources_deps = tested_apk_deps.All('android_resources') all_resources_deps = [ d for d in all_resources_deps if not d in tested_apk_resources_deps ] # Initialize some common config. config = { 'deps_info': { 'name': os.path.basename(options.build_config), 'path': options.build_config, 'type': options.type, 'deps_configs': direct_deps_config_paths } } deps_info = config['deps_info'] if (options.type in ('java_binary', 'java_library') and not options.bypass_platform_checks): deps_info['requires_android'] = options.requires_android deps_info['supports_android'] = options.supports_android deps_require_android = ( all_resources_deps + [d['name'] for d in all_library_deps if d['requires_android']]) deps_not_support_android = ([ d['name'] for d in all_library_deps if not d['supports_android'] ]) if deps_require_android and not options.requires_android: raise Exception( 'Some deps require building for the Android platform: ' + str(deps_require_android)) if deps_not_support_android and options.supports_android: raise Exception('Not all deps support the Android platform: ' + str(deps_not_support_android)) if options.type in ('java_binary', 'java_library', 'android_apk'): javac_classpath = [c['jar_path'] for c in direct_library_deps] java_full_classpath = [c['jar_path'] for c in all_library_deps] deps_info['resources_deps'] = [c['path'] for c in all_resources_deps] deps_info['jar_path'] = options.jar_path if options.type == 'android_apk' or options.supports_android: deps_info['dex_path'] = options.dex_path if options.type == 'android_apk': deps_info['apk_path'] = options.apk_path config['javac'] = { 'classpath': javac_classpath, } config['java'] = {'full_classpath': java_full_classpath} if options.type in ('java_binary', 'java_library'): # Only resources might have srcjars (normal srcjar targets are listed in # srcjar_deps). A resource's srcjar contains the R.java file for those # resources, and (like Android's default build system) we allow a library to # refer to the resources in any of its dependents. config['javac']['srcjars'] = [ c['srcjar'] for c in direct_resources_deps if 'srcjar' in c ] if options.type == 'android_apk': # Apks will get their resources srcjar explicitly passed to the java step. config['javac']['srcjars'] = [] if options.type == 'android_assets': all_asset_sources = [] if options.asset_renaming_sources: all_asset_sources.extend( build_utils.ParseGypList(options.asset_renaming_sources)) if options.asset_sources: all_asset_sources.extend( build_utils.ParseGypList(options.asset_sources)) deps_info['assets'] = {'sources': all_asset_sources} if options.asset_renaming_destinations: deps_info['assets']['outputs'] = (build_utils.ParseGypList( options.asset_renaming_destinations)) if options.disable_asset_compression: deps_info['assets']['disable_compression'] = True if options.type == 'android_resources': deps_info['resources_zip'] = options.resources_zip if options.srcjar: deps_info['srcjar'] = options.srcjar if options.android_manifest: manifest = AndroidManifest(options.android_manifest) deps_info['package_name'] = manifest.GetPackageName() if options.package_name: deps_info['package_name'] = options.package_name if options.r_text: deps_info['r_text'] = options.r_text if options.type in ('android_resources', 'android_apk', 'resource_rewriter'): config['resources'] = {} config['resources']['dependency_zips'] = [ c['resources_zip'] for c in all_resources_deps ] config['resources']['extra_package_names'] = [] config['resources']['extra_r_text_files'] = [] if options.type == 'android_apk' or options.type == 'resource_rewriter': config['resources']['extra_package_names'] = [ c['package_name'] for c in all_resources_deps if 'package_name' in c ] config['resources']['extra_r_text_files'] = [ c['r_text'] for c in all_resources_deps if 'r_text' in c ] if options.type in ['android_apk', 'deps_dex']: deps_dex_files = [c['dex_path'] for c in all_library_deps] proguard_enabled = options.proguard_enabled if options.type == 'android_apk': deps_info['proguard_enabled'] = proguard_enabled if proguard_enabled: deps_info['proguard_info'] = options.proguard_info config['proguard'] = {} proguard_config = config['proguard'] proguard_config['input_paths'] = [options.jar_path ] + java_full_classpath proguard_config['tested_apk_info'] = '' # An instrumentation test apk should exclude the dex files that are in the apk # under test. if options.type == 'android_apk' and options.tested_apk_config: tested_apk_deps = Deps([options.tested_apk_config]) tested_apk_library_deps = tested_apk_deps.All('java_library') tested_apk_deps_dex_files = [ c['dex_path'] for c in tested_apk_library_deps ] deps_dex_files = [ p for p in deps_dex_files if not p in tested_apk_deps_dex_files ] tested_apk_config = GetDepConfig(options.tested_apk_config) expected_tested_package = tested_apk_config['package_name'] AndroidManifest(options.android_manifest).CheckInstrumentation( expected_tested_package) if tested_apk_config['proguard_enabled']: assert proguard_enabled, ( 'proguard must be enabled for instrumentation' ' apks if it\'s enabled for the tested apk') proguard_config['tested_apk_info'] = tested_apk_config[ 'proguard_info'] deps_info['tested_apk_path'] = tested_apk_config['apk_path'] # Dependencies for the final dex file of an apk or a 'deps_dex'. if options.type in ['android_apk', 'deps_dex']: config['final_dex'] = {} dex_config = config['final_dex'] dex_config['dependency_dex_files'] = deps_dex_files if options.type == 'android_apk': config['dist_jar'] = { 'dependency_jars': [c['jar_path'] for c in all_library_deps] } manifest = AndroidManifest(options.android_manifest) deps_info['package_name'] = manifest.GetPackageName() if not options.tested_apk_config and manifest.GetInstrumentation(): # This must then have instrumentation only for itself. manifest.CheckInstrumentation(manifest.GetPackageName()) library_paths = [] java_libraries_list_holder = [None] libraries = build_utils.ParseGypList(options.native_libs or '[]') if libraries: def recompute_ordered_libraries(): libraries_dir = os.path.dirname(libraries[0]) write_ordered_libraries.SetReadelfPath(options.readelf_path) write_ordered_libraries.SetLibraryDirs([libraries_dir]) all_deps = ( write_ordered_libraries. GetSortedTransitiveDependenciesForBinaries(libraries)) # Create a java literal array with the "base" library names: # e.g. libfoo.so -> foo java_libraries_list_holder[0] = ( '{%s}' % ','.join(['"%s"' % s[3:-3] for s in all_deps])) library_paths.extend( write_ordered_libraries.FullLibraryPath(x) for x in all_deps) # This step takes about 600ms on a z620 for chrome_apk, so it's worth # caching. md5_check.CallAndRecordIfStale(recompute_ordered_libraries, record_path=options.build_config + '.nativelibs.md5.stamp', input_paths=libraries, output_paths=[options.build_config]) if not library_paths: prev_config = build_utils.ReadJson(options.build_config) java_libraries_list_holder[0] = ( prev_config['native']['java_libraries_list']) library_paths.extend(prev_config['native']['libraries']) config['native'] = { 'libraries': library_paths, 'java_libraries_list': java_libraries_list_holder[0], } config['assets'], config['uncompressed_assets'] = (_MergeAssets( deps.All('android_assets'))) build_utils.WriteJson(config, options.build_config, only_if_changed=True) if options.depfile: build_utils.WriteDepfile( options.depfile, deps.AllConfigPaths() + build_utils.GetPythonDependencies())
def GenerateBundleApks(bundle_path, bundle_apks_path, aapt2_path, keystore_path, keystore_password, keystore_alias, mode=None, local_testing=False, minimal=False, minimal_sdk_version=None, check_for_noop=True, system_image_locales=None, optimize_for=None): """Generate an .apks archive from a an app bundle if needed. Args: bundle_path: Input bundle file path. bundle_apks_path: Output bundle .apks archive path. Name must end with '.apks' or this operation will fail. aapt2_path: Path to aapt2 build tool. keystore_path: Path to keystore. keystore_password: Keystore password, as a string. keystore_alias: Keystore signing key alias. mode: Build mode, which must be either None or one of BUILD_APKS_MODES. minimal: Create the minimal set of apks possible (english-only). minimal_sdk_version: Use this sdkVersion when |minimal| or |system_image_locales| args are present. check_for_noop: Use md5_check to short-circuit when inputs have not changed. system_image_locales: Locales to package in the APK when mode is "system" or "system_compressed". optimize_for: Overrides split configuration, which must be None or one of OPTIMIZE_FOR_OPTIONS. """ device_spec = None if minimal_sdk_version: assert minimal or system_image_locales, ( 'minimal_sdk_version is only used when minimal or system_image_locales ' 'is specified') if minimal: # Measure with one language split installed. Use Hindi because it is # popular. resource_size.py looks for splits/base-hi.apk. # Note: English is always included since it's in base-master.apk. device_spec = _CreateDeviceSpec(bundle_path, minimal_sdk_version, ['hi']) elif mode in _SYSTEM_MODES: if not system_image_locales: raise Exception('system modes require system_image_locales') # Bundletool doesn't seem to understand device specs with locales in the # form of "<lang>-r<region>", so just provide the language code instead. locales = [ resource_utils.ToAndroidLocaleName(l).split('-')[0] for l in system_image_locales ] device_spec = _CreateDeviceSpec(bundle_path, minimal_sdk_version, locales) def rebuild(): logging.info('Building %s', bundle_apks_path) with build_utils.TempDir() as tmp_dir: tmp_apks_file = os.path.join(tmp_dir, 'output.apks') cmd_args = [ 'build-apks', '--aapt2=%s' % aapt2_path, '--output=%s' % tmp_apks_file, '--ks=%s' % keystore_path, '--ks-pass=pass:%s' % keystore_password, '--ks-key-alias=%s' % keystore_alias, '--overwrite', ] input_bundle_path = bundle_path # Work around bundletool not respecting uncompressDexFiles setting. # b/176198991 if mode not in _SYSTEM_MODES and _BundleMinSdkVersion( bundle_path) >= 27: input_bundle_path = os.path.join(tmp_dir, 'system.aab') _FixBundleDexCompressionGlob(bundle_path, input_bundle_path) cmd_args += ['--bundle=%s' % input_bundle_path] if local_testing: cmd_args += ['--local-testing'] if mode is not None: if mode not in BUILD_APKS_MODES: raise Exception( 'Invalid mode parameter %s (should be in %s)' % (mode, BUILD_APKS_MODES)) cmd_args += ['--mode=' + mode] if optimize_for: if optimize_for not in OPTIMIZE_FOR_OPTIONS: raise Exception('Invalid optimize_for parameter %s ' '(should be in %s)' % (mode, OPTIMIZE_FOR_OPTIONS)) cmd_args += ['--optimize-for=' + optimize_for] if device_spec: spec_file = os.path.join(tmp_dir, 'device.json') with open(spec_file, 'w') as f: json.dump(device_spec, f) cmd_args += ['--device-spec=' + spec_file] bundletool.RunBundleTool(cmd_args) shutil.move(tmp_apks_file, bundle_apks_path) if check_for_noop: input_paths = [ bundle_path, bundletool.BUNDLETOOL_JAR_PATH, aapt2_path, keystore_path, ] input_strings = [ keystore_password, keystore_alias, device_spec, ] if mode is not None: input_strings.append(mode) # Avoid rebuilding (saves ~20s) when the input files have not changed. This # is essential when calling the apk_operations.py script multiple times with # the same bundle (e.g. out/Debug/bin/monochrome_public_bundle run). md5_check.CallAndRecordIfStale(rebuild, input_paths=input_paths, input_strings=input_strings, output_paths=[bundle_apks_path]) else: rebuild()