def predicate(f): # Exclude filters take precidence over include filters. if build_utils.MatchesGlob(f, excluded_classes): return False if included_classes and not build_utils.MatchesGlob(f, included_classes): return False return True
def path_transform(path): # Exclude filters take precidence over include filters. if build_utils.MatchesGlob(path, exclude_globs): return None if include_globs and not build_utils.MatchesGlob(path, include_globs): return None return path
def _CreateKeepPredicate(resource_dirs, resource_blacklist_regex, resource_blacklist_exceptions, android_locale_whitelist): """Return a predicate lambda to determine which resource files to keep. Args: resource_dirs: list of top-level resource directories. resource_blacklist_regex: A regular expression describing all resources to exclude, except if they are mip-maps, or if they are listed in |resource_blacklist_exceptions|. resource_blacklist_exceptions: A list of glob patterns corresponding to exceptions to the |resource_blacklist_regex|. android_locale_whitelist: An optional whitelist of Android locale names. If set, any localized string resources that is not in this whitelist will be removed. Returns: A lambda that takes a path, and returns true if the corresponding file must be kept. """ if resource_blacklist_regex == '' and not android_locale_whitelist: # Do not extract dotfiles (e.g. ".gitkeep"). aapt ignores them anyways. return lambda path: os.path.basename(path)[0] != '.' if resource_blacklist_regex != '': # A simple predicate that only removes (returns False for) paths covered by # the blacklist regex, except if they are mipmaps, or listed as exceptions. naive_predicate = lambda path: (not re.search( resource_blacklist_regex, path) or re.search( r'[/-]mipmap[/-]', path) or build_utils.MatchesGlob( path, resource_blacklist_exceptions)) # Build a set of all names from drawables kept by naive_predicate(). # Used later to ensure that we never exclude drawables from densities # that are filtered-out by naive_predicate(). non_filtered_drawables = set() for resource_dir in resource_dirs: for path in _IterFiles(resource_dir): if re.search(r'[/-]drawable[/-]', path) and naive_predicate(path): non_filtered_drawables.add(_ResourceNameFromPath(path)) # NOTE: Defined as a function because when using a lambda definition, # 'git cl format' will expand everything on a very long line that is # much larger than the column limit. def drawable_predicate(path): return (naive_predicate(path) or _ResourceNameFromPath(path) not in non_filtered_drawables) if not android_locale_whitelist: return drawable_predicate # A simple predicate that removes localized strings .xml files that are # not part of |android_locale_whitelist|. android_locale_whitelist = set(android_locale_whitelist) def is_bad_locale(path): """Return true iff |path| is a resource for a non-whitelisted locale.""" locale = resource_utils.FindLocaleInStringResourceFilePath(path) return locale and locale not in android_locale_whitelist return lambda path: drawable_predicate(path) and not is_bad_locale(path)
def _ZipResources(resource_dirs, zip_path, ignore_pattern): # Python zipfile does not provide a way to replace a file (it just writes # another file with the same name). So, first collect all the files to put # in the zip (with proper overriding), and then zip them. # ignore_pattern is a string of ':' delimited list of globs used to ignore # files that should not be part of the final resource zip. files_to_zip = dict() files_to_zip_without_generated = dict() globs = _GenerateGlobs(ignore_pattern) for d in resource_dirs: for root, _, files in os.walk(d): for f in files: archive_path = f parent_dir = os.path.relpath(root, d) if parent_dir != '.': archive_path = os.path.join(parent_dir, f) path = os.path.join(root, f) if build_utils.MatchesGlob(archive_path, globs): continue # We want the original resource dirs in the .info file rather than the # generated overridden path. if not path.startswith('/tmp'): files_to_zip_without_generated[archive_path] = path files_to_zip[archive_path] = path resource_utils.CreateResourceInfoFile(files_to_zip_without_generated, zip_path) build_utils.DoZip(files_to_zip.iteritems(), zip_path)
def _CollectResourcesListFromDirectory(self, res_dir): ret = set() globs = resource_utils._GenerateGlobs(self.ignore_pattern) for root, _, files in os.walk(res_dir): resource_type = os.path.basename(root) if '-' in resource_type: resource_type = resource_type[:resource_type.index('-')] for f in files: if build_utils.MatchesGlob(f, globs): continue if resource_type == 'values': ret.update(self._ParseValuesXml(os.path.join(root, f))) else: if '.' in f: resource_name = f[:f.index('.')] else: resource_name = f ret.add( _TextSymbolEntry('int', resource_type, resource_name, _DUMMY_RTXT_ID)) # Other types not just layouts can contain new ids (eg: Menus and # Drawables). Just in case, look for new ids in all files. if f.endswith('.xml'): ret.update( self._ExtractNewIdsFromXml(os.path.join(root, f))) return ret
def _CreateInfo(aar_file, resource_exclusion_globs): """Extracts and return .info data from an .aar file. Args: aar_file: Path to an input .aar file. resource_exclusion_globs: List of globs that exclude res/ files. Returns: A dict containing .info data. """ data = {} data['aidl'] = [] data['assets'] = [] data['resources'] = [] data['subjars'] = [] data['subjar_tuples'] = [] data['has_classes_jar'] = False data['has_proguard_flags'] = False data['has_native_libraries'] = False data['has_r_text_file'] = False with zipfile.ZipFile(aar_file) as z: manifest_xml = ElementTree.fromstring(z.read('AndroidManifest.xml')) data['is_manifest_empty'] = _IsManifestEmpty(manifest_xml) manifest_package = _GetManifestPackage(manifest_xml) if manifest_package: data['manifest_package'] = manifest_package for name in z.namelist(): if name.endswith('/'): continue if name.startswith('aidl/'): data['aidl'].append(name) elif name.startswith('res/'): if not build_utils.MatchesGlob(name, resource_exclusion_globs): data['resources'].append(name) elif name.startswith('libs/') and name.endswith('.jar'): label = posixpath.basename(name)[:-4] label = re.sub(r'[^a-zA-Z0-9._]', '_', label) data['subjars'].append(name) data['subjar_tuples'].append([label, name]) elif name.startswith('assets/'): data['assets'].append(name) elif name.startswith('jni/'): data['has_native_libraries'] = True if 'native_libraries' in data: data['native_libraries'].append(name) else: data['native_libraries'] = [name] elif name == 'classes.jar': data['has_classes_jar'] = True elif name == _PROGUARD_TXT: data['has_proguard_flags'] = True elif name == 'R.txt': # Some AARs, e.g. gvr_controller_java, have empty R.txt. Such AARs # have no resources as well. We treat empty R.txt as having no R.txt. data['has_r_text_file'] = bool(z.read('R.txt').strip()) return data
def MergeApk(args, tmp_apk, tmp_dir_32, tmp_dir_64): # expected_files is the set of 32-bit related files that we expect to differ # between a 32- and 64-bit build. Hence, they will be skipped when seeding the # generated APK with the original 64-bit version, and explicitly copied in # from the 32-bit version. expected_files = [] assets_path = 'base/assets' if args.bundle else 'assets' expected_files.append('%s/snapshot_blob_32.bin' % assets_path) if args.has_unwind_cfi: expected_files.append('%s/unwind_cfi_32' % assets_path) # All native libraries are assumed to differ, and will be merged. with zipfile.ZipFile(args.apk_32bit) as z: expected_files.extend([p for p in z.namelist() if p.endswith('.so')]) UnpackApk(args.apk_32bit, tmp_dir_32) UnpackApk(args.apk_64bit, tmp_dir_64, expected_files) # These are files that we know will be different, and we will hence ignore in # the file comparison. ignores = ['META-INF', 'AndroidManifest.xml'] if args.ignore_classes_dex: ignores += ['classes.dex', 'classes2.dex', 'classes3.dex'] if args.debug: # see http://crbug.com/648720 ignores += ['webview_licenses.notice'] if args.bundle: # if merging a bundle we must ignore the bundle specific # proto files as they will always be different. ignores += ['BundleConfig.pb', 'native.pb'] dcmp = filecmp.dircmp( tmp_dir_64, tmp_dir_32, ignore=ignores) diff_files = GetDiffFiles(dcmp, tmp_dir_32) # Check that diff_files match exactly those files we want to insert into # the 64-bit APK. CheckFilesExpected(diff_files, expected_files) with zipfile.ZipFile(tmp_apk, 'w') as out_zip: exclude_patterns = ['META-INF/*'] + expected_files # Build the initial merged APK from the 64-bit APK, excluding all files we # will pull from the 32-bit APK. path_transform = ( lambda p: None if build_utils.MatchesGlob(p, exclude_patterns) else p) build_utils.MergeZips( out_zip, [args.apk_64bit], path_transform=path_transform) # Add the files from the 32-bit APK. AddDiffFiles(diff_files, tmp_dir_32, out_zip, args.uncompress_shared_libraries)
def _MergeRTxt(r_paths, include_globs): """Merging the given R.txt files and returns them as a string.""" all_lines = set() for r_path in r_paths: if include_globs and not build_utils.MatchesGlob(r_path, include_globs): continue with open(r_path) as f: all_lines.update(f.readlines()) return ''.join(sorted(all_lines))
def MergeApk(args, tmp_apk, tmp_dir_32, tmp_dir_64): # Expected files to copy from 32- to 64-bit APK together with whether to # compress within the .apk. expected_files = {'snapshot_blob_32.bin': False} if args.shared_library: expected_files[ args.shared_library] = not args.uncompress_shared_libraries if args.has_unwind_cfi: expected_files['unwind_cfi_32'] = False # TODO(crbug.com/839191): we should pass this in via script arguments. if not args.loadable_module_32: args.loadable_module_32.append('libarcore_sdk_c_minimal.so') for f in args.loadable_module_32: expected_files[f] = not args.uncompress_shared_libraries for f in args.loadable_module_64: expected_files[f] = not args.uncompress_shared_libraries # need to unpack APKs to compare their contents UnpackApk(args.apk_64bit, tmp_dir_64) UnpackApk(args.apk_32bit, tmp_dir_32) ignores = ['META-INF', 'AndroidManifest.xml'] if args.ignore_classes_dex: ignores += ['classes.dex', 'classes2.dex'] if args.debug: # see http://crbug.com/648720 ignores += ['webview_licenses.notice'] dcmp = filecmp.dircmp(tmp_dir_64, tmp_dir_32, ignore=ignores) diff_files = GetDiffFiles(dcmp, tmp_dir_32) # Check that diff_files match exactly those files we want to insert into # the 64-bit APK. CheckFilesExpected(diff_files, expected_files, args.component_build) with zipfile.ZipFile(tmp_apk, 'w') as out_zip: exclude_patterns = ['META-INF/*'] # If there are libraries for which we don't want the 32 bit versions, we # should remove them here. if args.loadable_module_32: exclude_patterns.extend([ '*' + f for f in args.loadable_module_32 if f not in args.loadable_module_64 ]) path_transform = ( lambda p: None if build_utils.MatchesGlob(p, exclude_patterns) else p) build_utils.MergeZips(out_zip, [args.apk_64bit], path_transform=path_transform) AddDiffFiles(diff_files, tmp_dir_32, out_zip, expected_files, args.component_build, args.uncompress_shared_libraries)
def _CreateValuesKeepPredicate(exclusion_rules, dep_subdir): patterns = [ x[1] for x in exclusion_rules if build_utils.MatchesGlob(dep_subdir, [x[0]]) ] if not patterns: return None regexes = [re.compile(p) for p in patterns] return lambda x: not any(r.search(x) for r in regexes)
def main(args): args = build_utils.ExpandFileArgs(args) parser = argparse.ArgumentParser(args) parser.add_argument('--input-files', help='GN-list of files to zip.') parser.add_argument( '--input-files-base-dir', help='Paths in the archive will be relative to this directory') parser.add_argument('--input-zips', help='GN-list of zips to merge.') parser.add_argument('--input-zips-excluded-globs', help='GN-list of globs for paths to exclude.') parser.add_argument('--output', required=True, help='Path to output archive.') compress_group = parser.add_mutually_exclusive_group() compress_group.add_argument('--compress', action='store_true', help='Compress entries') compress_group.add_argument('--no-compress', action='store_false', dest='compress', help='Do not compress entries') build_utils.AddDepfileOption(parser) options = parser.parse_args(args) with build_utils.AtomicOutput(options.output) as f: with zipfile.ZipFile(f.name, 'w') as out_zip: depfile_deps = None if options.input_files: files = build_utils.ParseGnList(options.input_files) build_utils.DoZip(files, out_zip, base_dir=options.input_files_base_dir, compress_fn=lambda _: options.compress) if options.input_zips: files = build_utils.ParseGnList(options.input_zips) depfile_deps = files path_transform = None if options.input_zips_excluded_globs: globs = build_utils.ParseGnList( options.input_zips_excluded_globs) path_transform = ( lambda p: None if build_utils.MatchesGlob(p, globs) else p) build_utils.MergeZips(out_zip, files, path_transform=path_transform, compress=options.compress) # Depfile used only by dist_jar(). if options.depfile: build_utils.WriteDepfile(options.depfile, options.output, inputs=depfile_deps, add_pydeps=False)
def IterResourceFilesInDirectories(directories, ignore_pattern=AAPT_IGNORE_PATTERN): globs = _GenerateGlobs(ignore_pattern) for d in directories: for root, _, files in os.walk(d): for f in files: archive_path = f parent_dir = os.path.relpath(root, d) if parent_dir != '.': archive_path = os.path.join(parent_dir, f) path = os.path.join(root, f) if build_utils.MatchesGlob(archive_path, globs): continue yield path, archive_path
def _CheckForUnwanted(kept_classes, proguard_cmd, negative_main_dex_globs): # Check if ProGuard kept any unwanted classes. found_unwanted_classes = sorted( p for p in kept_classes if build_utils.MatchesGlob(p, negative_main_dex_globs)) if found_unwanted_classes: first_class = found_unwanted_classes[0].replace('.class', '').replace('/', '.') proguard_cmd += ['-whyareyoukeeping', 'class', first_class, '{}'] output = build_utils.CheckOutput( proguard_cmd, print_stderr=False, stdout_filter=proguard_util.ProguardOutputFilter()) raise Exception( ('Found classes that should not be in the main dex:\n {}\n\n' 'Here is the -whyareyoukeeping output for {}: \n{}').format( '\n '.join(found_unwanted_classes), first_class, output))
def _AddResources(aar_zip, resource_zips, include_globs): """Adds all resource zips to the given aar_zip. Ensures all res/values/* files have unique names by prefixing them. """ for i, path in enumerate(resource_zips): if include_globs and not build_utils.MatchesGlob(path, include_globs): continue with zipfile.ZipFile(path) as res_zip: for info in res_zip.infolist(): data = res_zip.read(info) dirname, basename = posixpath.split(info.filename) if 'values' in dirname: root, ext = os.path.splitext(basename) basename = '{}_{}{}'.format(root, i, ext) info.filename = posixpath.join(dirname, basename) info.filename = posixpath.join('res', info.filename) aar_zip.writestr(info, data)
def _CreateKeepPredicate(resource_dirs, resource_blacklist_regex, resource_blacklist_exceptions): """Return a predicate lambda to determine which resource files to keep. Args: resource_dirs: list of top-level resource directories. resource_blacklist_regex: A regular expression describing all resources to exclude, except if they are mip-maps, or if they are listed in |resource_blacklist_exceptions|. resource_blacklist_exceptions: A list of glob patterns corresponding to exceptions to the |resource_blacklist_regex|. Returns: A lambda that takes a path, and returns true if the corresponding file must be kept. """ naive_predicate = lambda path: os.path.basename(path)[0] != '.' if resource_blacklist_regex == '': # Do not extract dotfiles (e.g. ".gitkeep"). aapt ignores them anyways. return naive_predicate if resource_blacklist_regex != '': # A simple predicate that only removes (returns False for) paths covered by # the blacklist regex, except if they are mipmaps, or listed as exceptions. naive_predicate = lambda path: (not re.search( resource_blacklist_regex, path) or re.search( r'[/-]mipmap[/-]', path) or build_utils.MatchesGlob( path, resource_blacklist_exceptions)) # Build a set of all names from drawables kept by naive_predicate(). # Used later to ensure that we never exclude drawables from densities # that are filtered-out by naive_predicate(). non_filtered_drawables = set() for resource_dir in resource_dirs: for path in _IterFiles(resource_dir): if re.search(r'[/-]drawable[/-]', path) and naive_predicate(path): non_filtered_drawables.add(_ResourceNameFromPath(path)) # NOTE: Defined as a function, instead of a lambda to avoid the # auto-formatter to put this on a very long line that overflows. def drawable_predicate(path): return (naive_predicate(path) or _ResourceNameFromPath(path) not in non_filtered_drawables) return drawable_predicate
def ExtractResourceDirsFromFileList(resource_files, ignore_pattern=AAPT_IGNORE_PATTERN): """Return a list of resource directories from a list of resource files.""" # Directory list order is important, cannot use set or other data structures # that change order. This is because resource files of the same name in # multiple res/ directories ellide one another (the last one passed is used). # Thus the order must be maintained to prevent non-deterministic and possibly # flakey builds. resource_dirs = [] globs = _GenerateGlobs(ignore_pattern) for resource_path in resource_files: if build_utils.MatchesGlob(os.path.basename(resource_path), globs): # Ignore non-resource files like OWNERS and the like. continue # Resources are always 1 directory deep under res/. res_dir = os.path.dirname(os.path.dirname(resource_path)) if res_dir not in resource_dirs: resource_dirs.append(res_dir) return resource_dirs
def main(): parser = optparse.OptionParser() parser.add_option('--classes-dir', help='Directory containing .class files.') parser.add_option('--input-jar', help='Jar to include .class files from') parser.add_option('--jar-path', help='Jar output path.') parser.add_option( '--excluded-classes', help='GN list of .class file patterns to exclude from the jar.') parser.add_option( '--strip-resource-classes-for', help='GN list of java package names exclude R.class files in.') parser.add_option('--stamp', help='Path to touch on success.') args = build_utils.ExpandFileArgs(sys.argv[1:]) options, _ = parser.parse_args(args) # Current implementation supports just one or the other of these: assert not options.classes_dir or not options.input_jar excluded_classes = [] if options.excluded_classes: excluded_classes = build_utils.ParseGnList(options.excluded_classes) if options.strip_resource_classes_for: packages = build_utils.ParseGnList(options.strip_resource_classes_for) excluded_classes.extend( p.replace('.', '/') + '/' + f for p in packages for f in _RESOURCE_CLASSES) predicate = None if excluded_classes: predicate = lambda f: not build_utils.MatchesGlob(f, excluded_classes) with build_utils.TempDir() as temp_dir: classes_dir = options.classes_dir if options.input_jar: build_utils.ExtractAll(options.input_jar, temp_dir) classes_dir = temp_dir JarDirectory(classes_dir, options.jar_path, predicate=predicate) if options.stamp: build_utils.Touch(options.stamp)
def main(): parser = optparse.OptionParser() parser.add_option('--classes-dir', help='Directory containing .class files.') parser.add_option('--jar-path', help='Jar output path.') parser.add_option( '--excluded-classes', help='List of .class file patterns to exclude from the jar.') parser.add_option('--stamp', help='Path to touch on success.') options, _ = parser.parse_args() predicate = None if options.excluded_classes: excluded_classes = build_utils.ParseGypList(options.excluded_classes) predicate = lambda f: not build_utils.MatchesGlob(f, excluded_classes) JarDirectory(options.classes_dir, options.jar_path, predicate=predicate) if options.stamp: build_utils.Touch(options.stamp)
def _CreateKeepPredicate(resource_dirs, exclude_xxxhdpi, xxxhdpi_whitelist): if not exclude_xxxhdpi: # Do not extract dotfiles (e.g. ".gitkeep"). aapt ignores them anyways. return lambda path: os.path.basename(path)[0] != '.' # Returns False only for xxxhdpi non-mipmap, non-whitelisted drawables. naive_predicate = lambda path: ( not re.search(r'[/-]xxxhdpi[/-]', path) or re.search(r'[/-]mipmap[/-]', path) or build_utils.MatchesGlob(path, xxxhdpi_whitelist)) # Build a set of all non-xxxhdpi drawables to ensure that we never exclude any # xxxhdpi drawable that does not exist in other densities. non_xxxhdpi_drawables = set() for resource_dir in resource_dirs: for path in build_utils.IterFiles(resource_dir): if re.search(r'[/-]drawable[/-]', path) and naive_predicate(path): non_xxxhdpi_drawables.add(_ResourceNameFromPath(path)) return lambda path: (naive_predicate(path) or _ResourceNameFromPath(path) not in non_xxxhdpi_drawables)
def _CreateKeepPredicate(resource_dirs, resource_blacklist_regex, resource_blacklist_exceptions): """Return a predicate lambda to determine which resource files to keep.""" if resource_blacklist_regex == '': # Do not extract dotfiles (e.g. ".gitkeep"). aapt ignores them anyways. return lambda path: os.path.basename(path)[0] != '.' # Returns False only for non-filtered, non-mipmap, non-whitelisted drawables. naive_predicate = lambda path: ( not re.search(resource_blacklist_regex, path) or re.search( r'[/-]mipmap[/-]', path) or build_utils.MatchesGlob( path, resource_blacklist_exceptions)) # Build a set of all non-filtered drawables to ensure that we never exclude # any drawable that does not exist in non-filtered densities. non_filtered_drawables = set() for resource_dir in resource_dirs: for path in _IterFiles(resource_dir): if re.search(r'[/-]drawable[/-]', path) and naive_predicate(path): non_filtered_drawables.add(_ResourceNameFromPath(path)) return lambda path: (naive_predicate(path) or _ResourceNameFromPath(path) not in non_filtered_drawables)
def _CreateKeepPredicate(resource_blacklist_regex, resource_blacklist_exceptions): """Return a predicate lambda to determine which resource files to keep. Args: resource_blacklist_regex: A regular expression describing all resources to exclude, except if they are mip-maps, or if they are listed in |resource_blacklist_exceptions|. resource_blacklist_exceptions: A list of glob patterns corresponding to exceptions to the |resource_blacklist_regex|. Returns: A lambda that takes a path, and returns true if the corresponding file must be kept. """ predicate = lambda path: os.path.basename(path)[0] != '.' if resource_blacklist_regex == '': # Do not extract dotfiles (e.g. ".gitkeep"). aapt ignores them anyways. return predicate # A simple predicate that only removes (returns False for) paths covered by # the blacklist regex or listed as exceptions. return lambda path: (not re.search(resource_blacklist_regex, path) or build_utils.MatchesGlob( path, resource_blacklist_exceptions))
def MergeApk(args, tmp_apk, tmp_dir_32, tmp_dir_64): # Expected files to copy from 32- to 64-bit APK together with whether to # compress within the .apk. expected_files = {'snapshot_blob_32.bin': False} if args.shared_library: expected_files[ args.shared_library] = not args.uncompress_shared_libraries if args.has_unwind_cfi: expected_files['unwind_cfi_32'] = False # TODO(crbug.com/839191): we should pass this in via script arguments. if not args.loadable_module_32: args.loadable_module_32.append('libarcore_sdk_c.so') for f in args.loadable_module_32: expected_files[f] = not args.uncompress_shared_libraries for f in args.loadable_module_64: expected_files[f] = not args.uncompress_shared_libraries # need to unpack APKs to compare their contents assets_path = 'base/assets' if args.bundle else 'assets' exclude_files_64 = [ '%s/snapshot_blob_32.bin' % assets_path, GetTargetAbiPath(args.apk_32bit, args.shared_library) ] if 'libcrashpad_handler.so' in expected_files: exclude_files_64.append( GetTargetAbiPath(args.apk_32bit, 'libcrashpad_handler.so')) if 'libcrashpad_handler_trampoline.so' in expected_files: exclude_files_64.append( GetTargetAbiPath(args.apk_32bit, 'libcrashpad_handler_trampoline.so')) if args.has_unwind_cfi: exclude_files_64.append('%s/unwind_cfi_32' % assets_path) UnpackApk(args.apk_64bit, tmp_dir_64, exclude_files_64) UnpackApk(args.apk_32bit, tmp_dir_32) ignores = ['META-INF', 'AndroidManifest.xml'] if args.ignore_classes_dex: ignores += ['classes.dex', 'classes2.dex'] if args.debug: # see http://crbug.com/648720 ignores += ['webview_licenses.notice'] if args.bundle: # if merging a bundle we must ignore the bundle specific # proto files as they will always be different. ignores += ['BundleConfig.pb', 'native.pb', 'resources.pb'] dcmp = filecmp.dircmp(tmp_dir_64, tmp_dir_32, ignore=ignores) diff_files = GetDiffFiles(dcmp, tmp_dir_32) # Check that diff_files match exactly those files we want to insert into # the 64-bit APK. CheckFilesExpected(diff_files, expected_files, args.component_build) with zipfile.ZipFile(tmp_apk, 'w') as out_zip: exclude_patterns = ['META-INF/*'] + exclude_files_64 # If there are libraries for which we don't want the 32 bit versions, we # should remove them here. if args.loadable_module_32: exclude_patterns.extend([ '*' + f for f in args.loadable_module_32 if f not in args.loadable_module_64 ]) path_transform = ( lambda p: None if build_utils.MatchesGlob(p, exclude_patterns) else p) build_utils.MergeZips(out_zip, [args.apk_64bit], path_transform=path_transform) AddDiffFiles(diff_files, tmp_dir_32, out_zip, expected_files, args.component_build, args.uncompress_shared_libraries)
def main(): parser = argparse.ArgumentParser() build_utils.AddDepfileOption(parser) parser.add_argument( '--excluded-classes', help='A list of .class file patterns to exclude from the jar.') parser.add_argument('--src-search-dirs', action='append', help='A list of directories that should be searched' ' for the source files.') parser.add_argument('--src-files', action='append', help='A list of source files to jar.') parser.add_argument( '--src-jars', action='append', help='A list of source jars to include in addition to source files.') parser.add_argument('--src-list-files', action='append', help='A list of files that contain a list of sources,' ' e.g. a list of \'.sources\' files generated by GN.') parser.add_argument('--jar-path', help='Jar output path.', required=True) options = parser.parse_args() src_jars = [] for gn_list in options.src_jars: src_jars.extend(build_utils.ParseGnList(gn_list)) src_search_dirs = [] for gn_src_search_dirs in options.src_search_dirs: src_search_dirs.extend(build_utils.ParseGnList(gn_src_search_dirs)) src_list_files = [] if options.src_list_files: for gn_src_list_file in options.src_list_files: src_list_files.extend(build_utils.ParseGnList(gn_src_list_file)) src_files = [] for gn_src_files in options.src_files: src_files.extend(build_utils.ParseGnList(gn_src_files)) # Add files from --source_list_files for src_list_file in src_list_files: with open(src_list_file, 'r') as f: src_files.extend(f.read().splitlines()) # Preprocess source files by removing any prefix that comes before # the Java package name. for i, s in enumerate(src_files): prefix_position = s.find(JAVA_PACKAGE_PREFIX) if prefix_position != -1: src_files[i] = s[prefix_position:] excluded_classes = [] if options.excluded_classes: classes = build_utils.ParseGnList(options.excluded_classes) excluded_classes.extend(f.replace('.class', '.java') for f in classes) predicate = None if excluded_classes: predicate = lambda f: not build_utils.MatchesGlob(f, excluded_classes) # Create a dictionary that maps every source directory # to source files that it contains. dir_to_files_map = {} # Initialize the map. for src_search_dir in src_search_dirs: dir_to_files_map[src_search_dir] = [] # Fill the map. for src_file in src_files: number_of_file_instances = 0 for src_search_dir in src_search_dirs: target_path = os.path.join(src_search_dir, src_file) if os.path.isfile(target_path): number_of_file_instances += 1 if not predicate or predicate(src_file): dir_to_files_map[src_search_dir].append(target_path) if (number_of_file_instances > 1): raise Exception( 'There is more than one instance of file %s in %s' % (src_file, src_search_dirs)) if (number_of_file_instances < 1): raise Exception('Unable to find file %s in %s' % (src_file, src_search_dirs)) # Jar the sources from every source search directory. with build_utils.AtomicOutput(options.jar_path) as o, \ zipfile.ZipFile(o, 'w', zipfile.ZIP_DEFLATED) as z: for src_search_dir in src_search_dirs: subpaths = dir_to_files_map[src_search_dir] if subpaths: build_utils.DoZip(subpaths, z, base_dir=src_search_dir) else: raise Exception( 'Directory %s does not contain any files and can be' ' removed from the list of directories to search' % src_search_dir) # Jar additional src jars if src_jars: build_utils.MergeZips(z, src_jars, compress=True) if options.depfile: deps = [] for sources in dir_to_files_map.itervalues(): deps.extend(sources) # Srcjar deps already captured in GN rules (no need to list them here). build_utils.WriteDepfile(options.depfile, options.jar_path, deps)
def _OnStaleMd5(changes, options, javac_cmd, java_files, classpath_inputs, runtime_classpath): with build_utils.TempDir() as temp_dir: srcjars = options.java_srcjars # The .excluded.jar contains .class files excluded from the main jar. # It is used for incremental compiles. excluded_jar_path = options.jar_path.replace('.jar', '.excluded.jar') classes_dir = os.path.join(temp_dir, 'classes') os.makedirs(classes_dir) changed_paths = None # jmake can handle deleted files, but it's a rare case and it would # complicate this script's logic. if options.incremental and changes.AddedOrModifiedOnly(): changed_paths = set(changes.IterChangedPaths()) # Do a full compile if classpath has changed. # jmake doesn't seem to do this on its own... Might be that ijars mess up # its change-detection logic. if any(p in changed_paths for p in classpath_inputs): changed_paths = None if options.incremental: # jmake is a compiler wrapper that figures out the minimal set of .java # files that need to be rebuilt given a set of .java files that have # changed. # jmake determines what files are stale based on timestamps between .java # and .class files. Since we use .jars, .srcjars, and md5 checks, # timestamp info isn't accurate for this purpose. Rather than use jmake's # programatic interface (like we eventually should), we ensure that all # .class files are newer than their .java files, and convey to jmake which # sources are stale by having their .class files be missing entirely # (by not extracting them). pdb_path = options.jar_path + '.pdb' javac_cmd = _ConvertToJMakeArgs(javac_cmd, pdb_path) if srcjars: _FixTempPathsInIncrementalMetadata(pdb_path, temp_dir) if srcjars: java_dir = os.path.join(temp_dir, 'java') os.makedirs(java_dir) for srcjar in options.java_srcjars: if changed_paths: changed_paths.update(os.path.join(java_dir, f) for f in changes.IterChangedSubpaths(srcjar)) build_utils.ExtractAll(srcjar, path=java_dir, pattern='*.java') jar_srcs = build_utils.FindInDirectory(java_dir, '*.java') jar_srcs = _FilterJavaFiles(jar_srcs, options.javac_includes) java_files.extend(jar_srcs) if changed_paths: # Set the mtime of all sources to 0 since we use the absense of .class # files to tell jmake which files are stale. for path in jar_srcs: os.utime(path, (0, 0)) if java_files: if changed_paths: changed_java_files = [p for p in java_files if p in changed_paths] if os.path.exists(options.jar_path): _ExtractClassFiles(options.jar_path, classes_dir, changed_java_files) if os.path.exists(excluded_jar_path): _ExtractClassFiles(excluded_jar_path, classes_dir, changed_java_files) # Add the extracted files to the classpath. This is required because # when compiling only a subset of files, classes that haven't changed # need to be findable. classpath_idx = javac_cmd.index('-classpath') javac_cmd[classpath_idx + 1] += ':' + classes_dir # Don't include the output directory in the initial set of args since it # being in a temp dir makes it unstable (breaks md5 stamping). cmd = javac_cmd + ['-d', classes_dir] + java_files # JMake prints out some diagnostic logs that we want to ignore. # This assumes that all compiler output goes through stderr. stdout_filter = lambda s: '' if md5_check.PRINT_EXPLANATIONS: stdout_filter = None build_utils.CheckOutput( cmd, print_stdout=options.chromium_code, stdout_filter=stdout_filter, stderr_filter=ColorJavacOutput) if options.main_class or options.manifest_entry: entries = [] if options.manifest_entry: entries = [e.split(':') for e in options.manifest_entry] manifest_file = os.path.join(temp_dir, 'manifest') _CreateManifest(manifest_file, runtime_classpath, options.main_class, entries) else: manifest_file = None glob = options.jar_excluded_classes inclusion_predicate = lambda f: not build_utils.MatchesGlob(f, glob) exclusion_predicate = lambda f: not inclusion_predicate(f) jar.JarDirectory(classes_dir, options.jar_path, manifest_file=manifest_file, predicate=inclusion_predicate) jar.JarDirectory(classes_dir, excluded_jar_path, predicate=exclusion_predicate)
def _OnStaleMd5(changes, options, javac_cmd, java_files, classpath_inputs, runtime_classpath): with build_utils.TempDir() as temp_dir: srcjars = options.java_srcjars # The .excluded.jar contains .class files excluded from the main jar. # It is used for incremental compiles. excluded_jar_path = options.jar_path.replace('.jar', '.excluded.jar') classes_dir = os.path.join(temp_dir, 'classes') os.makedirs(classes_dir) changed_paths = None if options.incremental and changes.AddedOrModifiedOnly(): changed_paths = set(changes.IterChangedPaths()) # Do a full compile if classpath has changed. if any(p in changed_paths for p in classpath_inputs): changed_paths = None else: java_files = [p for p in java_files if p in changed_paths] srcjars = [p for p in srcjars if p in changed_paths] if srcjars: java_dir = os.path.join(temp_dir, 'java') os.makedirs(java_dir) for srcjar in options.java_srcjars: extract_predicate = None if changed_paths: changed_subpaths = set(changes.IterChangedSubpaths(srcjar)) extract_predicate = lambda p: p in changed_subpaths build_utils.ExtractAll(srcjar, path=java_dir, pattern='*.java', predicate=extract_predicate) jar_srcs = build_utils.FindInDirectory(java_dir, '*.java') java_files.extend( _FilterJavaFiles(jar_srcs, options.javac_includes)) if java_files: if changed_paths: # When no files have been removed and the output jar already # exists, reuse .class files from the existing jar. _ExtractClassFiles(options.jar_path, classes_dir, java_files) _ExtractClassFiles(excluded_jar_path, classes_dir, java_files) # Add the extracted files to the classpath. classpath_idx = javac_cmd.index('-classpath') javac_cmd[classpath_idx + 1] += ':' + classes_dir # Don't include the output directory in the initial set of args since it # being in a temp dir makes it unstable (breaks md5 stamping). cmd = javac_cmd + ['-d', classes_dir] + java_files build_utils.CheckOutput(cmd, print_stdout=options.chromium_code, stderr_filter=ColorJavacOutput) if options.main_class or options.manifest_entry: entries = [] if options.manifest_entry: entries = [e.split(':') for e in options.manifest_entry] manifest_file = os.path.join(temp_dir, 'manifest') _CreateManifest(manifest_file, runtime_classpath, options.main_class, entries) else: manifest_file = None glob = options.jar_excluded_classes inclusion_predicate = lambda f: not build_utils.MatchesGlob(f, glob) exclusion_predicate = lambda f: not inclusion_predicate(f) jar.JarDirectory(classes_dir, options.jar_path, manifest_file=manifest_file, predicate=inclusion_predicate) jar.JarDirectory(classes_dir, excluded_jar_path, predicate=exclusion_predicate)
def _ShouldIncludeInJarInfo(self, fully_qualified_name): name_as_class_glob = fully_qualified_name.replace('.', '/') + '.class' return not build_utils.MatchesGlob(name_as_class_glob, self._excluded_globs)
def _OnStaleMd5(changes, options, javac_cmd, java_files, classpath_inputs): incremental = options.incremental # Don't bother enabling incremental compilation for third_party code, since # _CheckPathMatchesClassName() fails on some of it, and it's not really much # benefit. for java_file in java_files: if 'third_party' in java_file: incremental = False else: _CheckPathMatchesClassName(java_file) with build_utils.TempDir() as temp_dir: srcjars = options.java_srcjars # The .excluded.jar contains .class files excluded from the main jar. # It is used for incremental compiles. excluded_jar_path = options.jar_path.replace('.jar', '.excluded.jar') classes_dir = os.path.join(temp_dir, 'classes') os.makedirs(classes_dir) changed_paths = None # jmake can handle deleted files, but it's a rare case and it would # complicate this script's logic. if incremental and changes.AddedOrModifiedOnly(): changed_paths = set(changes.IterChangedPaths()) # Do a full compile if classpath has changed. # jmake doesn't seem to do this on its own... Might be that ijars mess up # its change-detection logic. if any(p in changed_paths for p in classpath_inputs): changed_paths = None if options.incremental: pdb_path = options.jar_path + '.pdb' if incremental: # jmake is a compiler wrapper that figures out the minimal set of .java # files that need to be rebuilt given a set of .java files that have # changed. # jmake determines what files are stale based on timestamps between .java # and .class files. Since we use .jars, .srcjars, and md5 checks, # timestamp info isn't accurate for this purpose. Rather than use jmake's # programatic interface (like we eventually should), we ensure that all # .class files are newer than their .java files, and convey to jmake which # sources are stale by having their .class files be missing entirely # (by not extracting them). javac_cmd = _ConvertToJMakeArgs(javac_cmd, pdb_path) if srcjars: _FixTempPathsInIncrementalMetadata(pdb_path, temp_dir) if srcjars: java_dir = os.path.join(temp_dir, 'java') os.makedirs(java_dir) for srcjar in options.java_srcjars: if changed_paths: changed_paths.update( os.path.join(java_dir, f) for f in changes.IterChangedSubpaths(srcjar)) build_utils.ExtractAll(srcjar, path=java_dir, pattern='*.java') jar_srcs = build_utils.FindInDirectory(java_dir, '*.java') jar_srcs = _FilterJavaFiles(jar_srcs, options.javac_includes) java_files.extend(jar_srcs) if changed_paths: # Set the mtime of all sources to 0 since we use the absense of .class # files to tell jmake which files are stale. for path in jar_srcs: os.utime(path, (0, 0)) if java_files: if changed_paths: changed_java_files = [ p for p in java_files if p in changed_paths ] if os.path.exists(options.jar_path): _ExtractClassFiles(options.jar_path, classes_dir, changed_java_files) if os.path.exists(excluded_jar_path): _ExtractClassFiles(excluded_jar_path, classes_dir, changed_java_files) # Add the extracted files to the classpath. This is required because # when compiling only a subset of files, classes that haven't changed # need to be findable. classpath_idx = javac_cmd.index('-classpath') javac_cmd[classpath_idx + 1] += ':' + classes_dir # Can happen when a target goes from having no sources, to having sources. # It's created by the call to build_utils.Touch() below. if incremental: if os.path.exists(pdb_path) and not os.path.getsize(pdb_path): os.unlink(pdb_path) # Don't include the output directory in the initial set of args since it # being in a temp dir makes it unstable (breaks md5 stamping). cmd = javac_cmd + ['-d', classes_dir] + java_files # JMake prints out some diagnostic logs that we want to ignore. # This assumes that all compiler output goes through stderr. stdout_filter = lambda s: '' if md5_check.PRINT_EXPLANATIONS: stdout_filter = None attempt_build = lambda: build_utils.CheckOutput( cmd, print_stdout=options.chromium_code, stdout_filter=stdout_filter, stderr_filter=ColorJavacOutput) try: attempt_build() except build_utils.CalledProcessError as e: # Work-around for a bug in jmake (http://crbug.com/551449). if 'project database corrupted' not in e.output: raise print( 'Applying work-around for jmake project database corrupted ' '(http://crbug.com/551449).') os.unlink(pdb_path) attempt_build() if options.incremental and (not java_files or not incremental): # Make sure output exists. build_utils.Touch(pdb_path) glob = options.jar_excluded_classes inclusion_predicate = lambda f: not build_utils.MatchesGlob(f, glob) exclusion_predicate = lambda f: not inclusion_predicate(f) jar.JarDirectory( classes_dir, options.jar_path, predicate=inclusion_predicate, provider_configurations=options.provider_configurations, additional_files=options.additional_jar_files) jar.JarDirectory( classes_dir, excluded_jar_path, predicate=exclusion_predicate, provider_configurations=options.provider_configurations, additional_files=options.additional_jar_files)
def _FilterJavaFiles(paths, filters): return [ f for f in paths if not filters or build_utils.MatchesGlob(f, filters) ]