def CopyResources(output_dir, resources, resource_strings): res_dir = os.path.join(output_dir, 'res') build_utils.MakeDirectory(res_dir) def _resource_predicate(name): """Predicate for the ExtractAll() call below. Makes sure only the files we want are extracted.""" if name == 'OWNERS': return False _, ext = os.path.splitext(name) if ext not in ('.png', '.wav', '.xml'): # We raise an exception here because if there is a new file type being # packaged we need to check what changed compared to what was going on # before. raise ValueError("Unexpected file type: %s" % name) return True # Part 1: extract the partly-processed resource zip files (which do not # include the .grd string zips), making sure we replace crunched 9-patch # images with the original ones and avoiding file name colisions. for index, resource in enumerate(resources): with build_utils.TempDir() as temp_dir: temp_res_dir = os.path.join(temp_dir, 'res') build_utils.ExtractAll(resource.filename, path=temp_res_dir, predicate=_resource_predicate) for dirpath, _, filenames in os.walk(temp_res_dir): if dirpath == temp_res_dir: # Do not create res/res/. continue res_dir_subpath = os.path.join(res_dir, os.path.basename(dirpath)) build_utils.MakeDirectory(res_dir_subpath) for filename in filenames: if filename.endswith('.9.png'): # 9-patch files need to be handled specially. We need the original, # uncrunched versions to avoid crunching them twice and failing # (once when building the resources, and then when the user is # building their project with Crosswalk). original_9p = os.path.join(resource.src, os.path.basename(dirpath), filename) if not os.path.isfile(original_9p): raise IOError("Expected to find %s." % original_9p) shutil.copy2(original_9p, os.path.join(dirpath, filename)) # Avoid ovewriting existing files. root, ext = os.path.splitext(filename) if os.path.isfile(os.path.join(res_dir_subpath, filename)): destname = '%s_%02d%s' % (root, index, ext) else: destname = filename shutil.copy2(os.path.join(dirpath, filename), os.path.join(res_dir_subpath, destname)) package_resources.MoveImagesToNonMdpiFolders(res_dir) # Part 2: extract .xml strings files (made from .grd files). for zip_file in resource_strings: # Exclude anything that doesn't end in .xml (such as .stamp files generated # with GN). build_utils.ExtractAll(zip_file, path=res_dir, pattern='*.xml')
def _ImportFromAars(config, tmp_paths, repo): for client in config['clients']: aar_name = '%s-%s.aar' % (client, config['lib_version']) aar_path = os.path.join(repo, M2_PKG_PATH, client, config['lib_version'], aar_name) aar_out_path = os.path.join(tmp_paths['imported_clients'], client) build_utils.ExtractAll(aar_path, aar_out_path) client_jar_path = os.path.join(aar_out_path, 'classes.jar') build_utils.ExtractAll(client_jar_path, tmp_paths['extracted_jars'], no_clobber=False)
def _ExtractClassFiles(jar_path, dest_dir, java_files): """Extracts all .class files not corresponding to |java_files|.""" # Two challenges exist here: # 1. |java_files| have prefixes that are not represented in the the jar paths. # 2. A single .java file results in multiple .class files when it contains # nested classes. # Here's an example: # source path: ../../base/android/java/src/org/chromium/Foo.java # jar paths: org/chromium/Foo.class, org/chromium/Foo$Inner.class # To extract only .class files not related to the given .java files, we strip # off ".class" and "$*.class" and use a substring match against java_files. def extract_predicate(path): if not path.endswith('.class'): return False path_without_suffix = re.sub(r'(?:\$|\.)[^/]*class$', '', path) partial_java_path = path_without_suffix + '.java' return not any(p.endswith(partial_java_path) for p in java_files) logging.info('Extracting class files from %s', jar_path) build_utils.ExtractAll(jar_path, path=dest_dir, predicate=extract_predicate) for path in build_utils.FindInDirectory(dest_dir, '*.class'): shutil.copystat(jar_path, path)
def ExtractDeps(dep_zips, deps_dir): """Extract a list of resource dependency zip files. Args: dep_zips: A list of zip file paths, each one will be extracted to a subdirectory of |deps_dir|, named after the zip file's path (e.g. '/some/path/foo.zip' -> '{deps_dir}/some_path_foo/'). deps_dir: Top-level extraction directory. Returns: The list of all sub-directory paths, relative to |deps_dir|. Raises: Exception: If a sub-directory already exists with the same name before extraction. """ dep_subdirs = [] for z in dep_zips: subdirname = z.replace(os.path.sep, '_') subdir = os.path.join(deps_dir, subdirname) if os.path.exists(subdir): raise Exception('Resource zip name conflict: ' + subdirname) build_utils.ExtractAll(z, path=subdir) if _HasMultipleResDirs(z): # basename of the directory is used to create a zip during resource # compilation, include the path in the basename to help blame errors on # the correct target. For example directory 0_res may be renamed # chrome_android_chrome_app_java_resources_0_res pointing to the name and # path of the android_resources target from whence it came. subdir_subdirs = _RenameSubdirsWithPrefix(subdir, subdirname) dep_subdirs.extend(subdir_subdirs) else: dep_subdirs.append(subdir) return dep_subdirs
def _OnStaleMd5(package_command, options): with build_utils.TempDir() as temp_dir: if options.resource_zips: dep_zips = options.resource_zips extract_predicate = _CreateExtractPredicate( dep_zips, options.exclude_xxxhdpi, options.xxxhdpi_whitelist) png_paths = [] package_subdirs = [] for z in dep_zips: subdir = os.path.join(temp_dir, os.path.basename(z)) if os.path.exists(subdir): raise Exception('Resource zip name conflict: ' + os.path.basename(z)) extracted_files = build_utils.ExtractAll( z, path=subdir, predicate=extract_predicate) if extracted_files: package_subdirs.append(subdir) png_paths.extend(f for f in extracted_files if f.endswith('.png')) if png_paths and options.png_to_webp: _ConvertToWebP(options.webp_binary, png_paths) for subdir in package_subdirs: package_command += PackageArgsForExtractedZip(subdir) build_utils.CheckOutput( package_command, print_stdout=False, print_stderr=False) if options.create_density_splits or options.language_splits: CheckForMissedConfigs(options.apk_path, options.create_density_splits, options.language_splits) if options.create_density_splits: RenameDensitySplits(options.apk_path)
def _DuplicateZhResources(zip_files, temp_dir): new_zip_files = [] for i, zip_path in enumerate(zip_files): # We use zh-TW resources for zh-HK (if we have zh-TW resources). If no # zh-TW resources exists (ex. api specific resources), then just use the # original zip. if not _ZipContains(zip_path, r'zh-r(HK|TW)'): new_zip_files.append(zip_path) continue resource_dir = os.path.join(temp_dir, str(i)) new_zip_path = os.path.join(temp_dir, str(i) + '.zip') # Exclude existing zh-HK resources so that we don't mess up any resource # IDs. This can happen if the type IDs in the existing resources don't # align with ours (since they've already been generated at this point). build_utils.ExtractAll( zip_path, path=resource_dir, predicate=lambda x: not 'zh-rHK' in x) for path in build_utils.IterFiles(resource_dir): if 'zh-rTW' in path: hk_path = path.replace('zh-rTW', 'zh-rHK') build_utils.Touch(hk_path) shutil.copyfile(path, hk_path) build_utils.ZipDir(new_zip_path, resource_dir) new_zip_files.append(new_zip_path) return new_zip_files
def rebuild(): logging.info('Building %s', bundle_apks_path) with tempfile.NamedTemporaryFile(suffix='.apks') as tmp_apks_file: cmd_args = [ 'build-apks', '--aapt2=%s' % aapt2_path, '--output=%s' % tmp_apks_file.name, '--bundle=%s' % bundle_path, '--ks=%s' % keystore_path, '--ks-pass=pass:%s' % keystore_password, '--ks-key-alias=%s' % keystore_alias, '--overwrite', ] if mode is not None: if mode not in BUILD_APKS_MODES: raise Exception( 'Invalid mode parameter %s (should be in %s)' % (mode, BUILD_APKS_MODES)) cmd_args += ['--mode=' + mode] with tempfile.NamedTemporaryFile(suffix='.json') as spec_file: if device_spec: json.dump(device_spec, spec_file) spec_file.flush() cmd_args += ['--device-spec=' + spec_file.name] bundletool.RunBundleTool(cmd_args) # Make the resulting .apks file hermetic. with build_utils.TempDir() as temp_dir, \ build_utils.AtomicOutput(bundle_apks_path, only_if_changed=False) as f: files = build_utils.ExtractAll(tmp_apks_file.name, temp_dir) build_utils.DoZip(files, f, base_dir=temp_dir)
def _RunDx(changes, options, dex_cmd, paths): with build_utils.TempDir() as classes_temp_dir: # --multi-dex is incompatible with --incremental. if options.multi_dex: dex_cmd.append('--main-dex-list=%s' % options.main_dex_list_path) else: # Use --incremental when .class files are added or modified (never when # removed). # --incremental tells dx to merge all newly dex'ed .class files with # what that already exist in the output dex file (existing classes are # replaced). if options.incremental and changes.AddedOrModifiedOnly(): changed_inputs = set(changes.IterChangedPaths()) changed_paths = [p for p in paths if p in changed_inputs] if not changed_paths: return # When merging in other dex files, there's no easy way to know if # classes were removed from them. if _AllSubpathsAreClassFiles(changed_paths, changes): dex_cmd.append('--incremental') for path in changed_paths: changed_subpaths = set(changes.IterChangedSubpaths(path)) # Not a fundamental restriction, but it's the case right now and it # simplifies the logic to assume so. assert changed_subpaths, 'All inputs should be zip files.' build_utils.ExtractAll(path, path=classes_temp_dir, predicate=lambda p: p in changed_subpaths) paths = [classes_temp_dir] dex_cmd += paths build_utils.CheckOutput(dex_cmd, print_stderr=False) if options.dex_path.endswith('.zip'): _RemoveUnwantedFilesFromZip(options.dex_path)
def _OnStaleMd5(package_command, options): with build_utils.TempDir() as temp_dir: if options.resource_zips: dep_zips = options.resource_zips extract_predicate = _CreateExtractPredicate( dep_zips, options.exclude_xxxhdpi, options.xxxhdpi_whitelist) for z in dep_zips: subdir = os.path.join(temp_dir, os.path.basename(z)) if os.path.exists(subdir): raise Exception('Resource zip name conflict: ' + os.path.basename(z)) if build_utils.ExtractAll(z, path=subdir, predicate=extract_predicate): package_command += PackageArgsForExtractedZip(subdir) build_utils.CheckOutput(package_command, print_stdout=False, print_stderr=False) if options.create_density_splits or options.language_splits: CheckForMissedConfigs(options.apk_path, options.create_density_splits, options.language_splits) if options.create_density_splits: RenameDensitySplits(options.apk_path)
def _RunDexsplitter(options, output_dir): cmd = [ build_utils.JAVA_PATH, '-jar', options.r8_path, 'dexsplitter', '--output', output_dir, '--proguard-map', options.proguard_mapping_file, ] for base_jar in options.features['base']: cmd += ['--base-jar', base_jar] base_jars_lookup = set(options.features['base']) for feature in options.features: if feature == 'base': continue for feature_jar in options.features[feature]: if feature_jar not in base_jars_lookup: cmd += ['--feature-jar', feature_jar + ':' + feature] with build_utils.TempDir() as temp_dir: unzipped_files = build_utils.ExtractAll(options.input_dex_zip, temp_dir) for file_name in unzipped_files: cmd += ['--input', file_name] build_utils.CheckOutput(cmd)
def Unzip(zip_file, filename=None): """Utility for temporary use of a single file in a zip archive.""" with build_utils.TempDir() as unzipped_dir: unzipped_files = build_utils.ExtractAll( zip_file, unzipped_dir, True, pattern=filename) if len(unzipped_files) == 0: raise Exception( '%s not found in %s' % (filename, zip_file)) yield unzipped_files[0]
def _ExtractDeps(dep_zips, deps_dir): dep_subdirs = [] for z in dep_zips: subdir = os.path.join(deps_dir, os.path.basename(z)) if os.path.exists(subdir): raise Exception('Resource zip name conflict: ' + os.path.basename(z)) build_utils.ExtractAll(z, path=subdir) dep_subdirs.append(subdir) return dep_subdirs
def _PerformExtract(aar_file, output_dir, name_whitelist): with build_utils.TempDir() as tmp_dir: tmp_dir = os.path.join(tmp_dir, 'staging') os.mkdir(tmp_dir) build_utils.ExtractAll(aar_file, path=tmp_dir, predicate=name_whitelist.__contains__) shutil.rmtree(output_dir, ignore_errors=True) shutil.move(tmp_dir, output_dir)
def main(): parser = argparse.ArgumentParser(description=__doc__) command_parsers = parser.add_subparsers(dest='command') subp = command_parsers.add_parser( 'list', help='Output a GN scope describing the contents of the .aar.') _AddCommonArgs(subp) subp.add_argument('--output', help='Output file.', default='-') subp = command_parsers.add_parser('extract', help='Extracts the .aar') _AddCommonArgs(subp) subp.add_argument('--output-dir', help='Output directory for the extracted files.', required=True, type=os.path.normpath) subp.add_argument('--assert-info-file', help='Path to .info file. Asserts that it matches what ' '"list" would output.', type=argparse.FileType('r')) args = parser.parse_args() if args.command == 'extract': if args.assert_info_file: expected = _CreateInfo(args.aar_file) actual = args.assert_info_file.read() if actual != expected: raise Exception( 'android_aar_prebuilt() cached .info file is ' 'out-of-date. Run gn gen with ' 'update_android_aar_prebuilts=true to update it.') # Clear previously extracted versions of the AAR. shutil.rmtree(args.output_dir, True) build_utils.ExtractAll(args.aar_file, path=args.output_dir) elif args.command == 'list': aar_info = _CreateInfo(args.aar_file) aar_output_present = args.output != '-' and os.path.isfile(args.output) if aar_output_present: # Some .info files are read-only, for examples the cipd-controlled ones # under third_party/android_deps/repositoty. To deal with these, first # that its content is correct, and if it is, exit without touching # the file system. file_info = open(args.output, 'r').read() if file_info == aar_info: return # Try to write the file. This may fail for read-only ones that were # not updated. try: with open(args.output, 'w') as f: f.write(aar_info) except IOError as e: if not aar_output_present: raise e raise Exception('Could not update output file: %s\n%s\n' % (args.output, e))
def main(): options = ParseArgs() android_jar = os.path.join(options.android_sdk, 'android.jar') aapt = options.aapt_path with build_utils.TempDir() as temp_dir: package_command = [aapt, 'package', '--version-code', options.version_code, '--version-name', options.version_name, '-M', options.android_manifest, '--no-crunch', '-f', '--auto-add-overlay', '-I', android_jar, '-F', options.apk_path, '--ignore-assets', build_utils.AAPT_IGNORE_PATTERN, ] if options.no_compress: for ext in options.no_compress.split(','): package_command += ['-0', ext] if options.shared_resources: package_command.append('--shared-lib') if options.asset_dir and os.path.exists(options.asset_dir): package_command += ['-A', options.asset_dir] if options.resource_zips: dep_zips = build_utils.ParseGypList(options.resource_zips) for z in dep_zips: subdir = os.path.join(temp_dir, os.path.basename(z)) if os.path.exists(subdir): raise Exception('Resource zip name conflict: ' + os.path.basename(z)) build_utils.ExtractAll(z, path=subdir) package_command += PackageArgsForExtractedZip(subdir) if options.create_density_splits: for config in DENSITY_SPLITS.itervalues(): package_command.extend(('--split', ','.join(config))) if 'Debug' in options.configuration_name: package_command += ['--debug-mode'] build_utils.CheckOutput( package_command, print_stdout=False, print_stderr=False) if options.create_density_splits: CheckDensityMissedConfigs(options.apk_path) RenameDensitySplits(options.apk_path) if options.depfile: build_utils.WriteDepfile( options.depfile, build_utils.GetPythonDependencies())
def main(): parser = optparse.OptionParser() parser.add_option('--jars', help='The jars to merge.') parser.add_option('--jar-path', help='The output merged jar file.') options, _ = parser.parse_args() with build_utils.TempDir() as temp_dir: for jar_file in build_utils.ParseGypList(options.jars): build_utils.ExtractAll(jar_file, path=temp_dir, pattern='*.class') jar.JarDirectory(temp_dir, [], options.jar_path)
def _PerformExtract(aar_file, output_dir, name_allowlist): with build_utils.TempDir() as tmp_dir: tmp_dir = os.path.join(tmp_dir, 'staging') os.mkdir(tmp_dir) build_utils.ExtractAll(aar_file, path=tmp_dir, predicate=name_allowlist.__contains__) # Write a breadcrumb so that SuperSize can attribute files back to the .aar. with open(os.path.join(tmp_dir, 'source.info'), 'w') as f: f.write('source={}\n'.format(aar_file)) shutil.rmtree(output_dir, ignore_errors=True) shutil.move(tmp_dir, output_dir)
def _ExtractClassFiles(changes, tmp_dir, class_inputs): classes_list = [] for jar in class_inputs: if changes: changed_class_list = set(changes.IterChangedSubpaths(jar)) predicate = lambda x: x in changed_class_list and x.endswith('.class') else: predicate = lambda x: x.endswith('.class') classes_list.extend( build_utils.ExtractAll(jar, path=tmp_dir, predicate=predicate)) return classes_list
def _ExtractClassFiles(changes, tmp_dir, class_inputs, required_classes_set): classes_list = [] for jar in class_inputs: if changes: changed_class_list = (set(changes.IterChangedSubpaths(jar)) | required_classes_set) predicate = lambda x: x in changed_class_list and _IsClassFile(x) else: predicate = _IsClassFile classes_list.extend( build_utils.ExtractAll(jar, path=tmp_dir, predicate=predicate)) return classes_list
def main(): options = ParseArgs() android_jar = os.path.join(options.android_sdk, 'android.jar') aapt = os.path.join(options.android_sdk_tools, 'aapt') with build_utils.TempDir() as temp_dir: package_command = [ aapt, 'package', '--version-code', options.version_code, '--version-name', options.version_name, '-M', options.android_manifest, '--no-crunch', '-f', '--auto-add-overlay', '-I', android_jar, '-F', options.apk_path, ] if options.no_compress: for ext in options.no_compress.split(','): package_command += ['-0', ext] if os.path.exists(options.asset_dir): package_command += ['-A', options.asset_dir] dep_zips = build_utils.ParseGypList(options.resource_zips) for z in dep_zips: subdir = os.path.join(temp_dir, os.path.basename(z)) if os.path.exists(subdir): raise Exception('Resource zip name conflict: ' + os.path.basename(z)) build_utils.ExtractAll(z, path=subdir) MoveImagesToNonMdpiFolders(subdir) package_command += ['-S', subdir] if 'Debug' in options.configuration_name: package_command += ['--debug-mode'] build_utils.CheckOutput(package_command, print_stdout=False, print_stderr=False) if options.depfile: build_utils.WriteDepfile(options.depfile, build_utils.GetPythonDependencies())
def _ImportFromExtractedRepo(config, tmp_paths, repo): # Import the clients try: for client in config['clients']: client_out_dir = os.path.join(tmp_paths['imported_clients'], client) shutil.copytree(os.path.join(repo, client), client_out_dir) client_jar_path = os.path.join(client_out_dir, 'classes.jar') build_utils.ExtractAll(client_jar_path, tmp_paths['extracted_jars'], no_clobber=False) finally: _MakeWritable(tmp_paths['imported_clients'])
def main(): parser = argparse.ArgumentParser() parser.add_argument('jars', nargs=argparse.REMAINDER, help='The jars to merge.') parser.add_argument('--output-jar', help='Name of the merged JAR file.') parser.add_argument( '--validate-skipped-jars-list', action='store_true', help='Whether to validate KNOWN_SKIPPED_JARS by making ' 'sure it matches all the jars passed in --jars that are ' 'being skipped.') options = parser.parse_args(build_utils.ExpandFileArgs(sys.argv[1:])) jars = [] for j in options.jars: jars.extend(build_utils.ParseGypList(j)) options.jars = jars if options.validate_skipped_jars_list: extra, missing = ValidateKnownSkippedJars(options.jars) # It is fine for |extra| not to be empty: different build options may # include fewer JARs. |missing| being non-empty is fatal, though, as it # means there will be problems for users since we are not skipping files # that we should. if extra: print print 'merge_jars.py: The following JARs in KNOWN_SKIPPED_JARS were ' \ 'not used:' print ' %s' % ', '.join(sorted(extra)) print if missing: print print 'merge_jars.py: The following JARs are not mergeable but are ' \ 'not part of KNOWN_SKIPPED_JARS:' print ' %s' % ', '.join(sorted(missing)) print return 1 with build_utils.TempDir() as temp_dir: for jar_file in options.jars: # If a JAR has classes outside our allowed namespaces (mostly # org.chromium and org.xwalk), we need to skip it otherwise there can be # build issues when a user builds an app with Crosswalk as well as # another package with one of these non-allowed namespaces (see # XWALK-5092, XWALK-6597). if not IsMergeableJar(jar_file): continue build_utils.ExtractAll(jar_file, path=temp_dir, pattern='*.class') jar.JarDirectory(temp_dir, options.output_jar)
def _OnStaleMd5(input_jar, output_jar, classpath, android_sdk_jar): with build_utils.TempDir() as temp_dir: build_utils.ExtractAll(input_jar, path=temp_dir) cmd = [ 'java', '-Dretrolambda.inputDir=' + temp_dir, '-Dretrolambda.classpath=' + ':'.join([temp_dir] + classpath + [android_sdk_jar]), '-javaagent:' + _RETROLAMBDA_JAR_PATH, '-jar', _RETROLAMBDA_JAR_PATH, ] build_utils.CheckOutput(cmd, print_stdout=False) build_utils.ZipDir(output_jar + '.tmp', temp_dir) shutil.move(output_jar + '.tmp', output_jar)
def main(): parser = optparse.OptionParser() parser.add_option('--build-dir', help='Base build directory, such as out/Release. JARs ' 'outside this directory will be skipped.') parser.add_option('--jars', help='The jars to merge.') parser.add_option('--output-jar', help='Name of the merged JAR file.') options, _ = parser.parse_args() build_dir = os.path.abspath(options.build_dir) with build_utils.TempDir() as temp_dir: for jar_file in build_utils.ParseGypList(options.jars): if not os.path.abspath(jar_file).startswith(build_dir): continue build_utils.ExtractAll(jar_file, path=temp_dir, pattern='*.class') jar.JarDirectory(temp_dir, [], options.output_jar)
def Dump(apk_path): """Dumps class and method information from a APK into a dict via dexdump. Args: apk_path: An absolute path to an APK file to dump. Returns: A dict in the following format: { <package_name>: { 'classes': { <class_name>: { 'methods': [<method_1>, <method_2>] } } } } """ try: dexfile_dir = tempfile.mkdtemp() parsed_dex_files = [] for dex_file in build_utils.ExtractAll(apk_path, dexfile_dir, pattern='*classes*.dex'): output_xml = cmd_helper.GetCmdOutput( [DEXDUMP_PATH, '-j', '-l', 'xml', dex_file]) # Dexdump doesn't escape its XML output very well; decode it as utf-8 with # invalid sequences replaced, then remove forbidden characters and # re-encode it (as etree expects a byte string as input so it can figure # out the encoding itself from the XML declaration) BAD_XML_CHARS = re.compile( u'[\x00-\x08\x0b-\x0c\x0e-\x1f\x7f-\x84\x86-\x9f' + u'\ud800-\udfff\ufdd0-\ufddf\ufffe-\uffff]') if sys.version_info[0] < 3: decoded_xml = output_xml.decode('utf-8', 'replace') clean_xml = BAD_XML_CHARS.sub(u'\ufffd', decoded_xml) else: # Line duplicated to avoid pylint redefined-variable-type error. clean_xml = BAD_XML_CHARS.sub(u'\ufffd', output_xml) parsed_dex_files.append( _ParseRootNode( ElementTree.fromstring(clean_xml.encode('utf-8')))) return parsed_dex_files finally: shutil.rmtree(dexfile_dir)
def main(): parser = optparse.OptionParser() parser.add_option('--classes-dir', help='Directory containing .class files.') parser.add_option('--input-jar', help='Jar to include .class files from') parser.add_option('--jar-path', help='Jar output path.') parser.add_option( '--excluded-classes', help='GN list of .class file patterns to exclude from the jar.') parser.add_option( '--strip-resource-classes-for', help='GN list of java package names exclude R.class files in.') parser.add_option('--stamp', help='Path to touch on success.') args = build_utils.ExpandFileArgs(sys.argv[1:]) options, _ = parser.parse_args(args) # Current implementation supports just one or the other of these: assert not options.classes_dir or not options.input_jar excluded_classes = [] if options.excluded_classes: excluded_classes = build_utils.ParseGnList(options.excluded_classes) if options.strip_resource_classes_for: packages = build_utils.ParseGnList(options.strip_resource_classes_for) excluded_classes.extend( p.replace('.', '/') + '/' + f for p in packages for f in _RESOURCE_CLASSES) predicate = None if excluded_classes: predicate = lambda f: not build_utils.MatchesGlob(f, excluded_classes) with build_utils.TempDir() as temp_dir: classes_dir = options.classes_dir if options.input_jar: build_utils.ExtractAll(options.input_jar, temp_dir) classes_dir = temp_dir JarDirectory(classes_dir, options.jar_path, predicate=predicate) if options.stamp: build_utils.Touch(options.stamp)
def main(): parser = argparse.ArgumentParser(description=__doc__) parser.add_argument('--input-file', help='Path to the AAR file.', required=True, metavar='FILE') parser.add_argument('--extract', help='Extract the files to output directory.', action='store_true') parser.add_argument('--list', help='List all the resource and jar files.', action='store_true') parser.add_argument('--output-dir', help='Output directory for the extracted files. Must ' 'be set if --extract is set.', metavar='DIR') args = parser.parse_args() if not args.extract and not args.list: parser.error('Either --extract or --list has to be specified.') aar_file = args.input_file output_dir = args.output_dir if args.extract: # Clear previously extracted versions of the AAR. shutil.rmtree(output_dir, True) build_utils.ExtractAll(aar_file, path=output_dir) if args.list: data = {} data['resources'] = [] data['jars'] = [] with zipfile.ZipFile(aar_file) as z: for name in z.namelist(): if name.startswith('res/') and not name.endswith('/'): data['resources'].append(name) if name.endswith('.jar'): data['jars'].append(name) print gn_helpers.ToGNString(data)
def ExtractDeps(dep_zips, deps_dir): """Extract a list of resource dependency zip files. Args: dep_zips: A list of zip file paths, each one will be extracted to a subdirectory of |deps_dir|, named after the zip file (e.g. '/some/path/foo.zip' -> '{deps_dir}/foo/'). deps_dir: Top-level extraction directory. Returns: The list of all sub-directory paths, relative to |deps_dir|. Raises: Exception: If a sub-directory already exists with the same name before extraction. """ dep_subdirs = [] for z in dep_zips: subdir = os.path.join(deps_dir, os.path.basename(z)) if os.path.exists(subdir): raise Exception('Resource zip name conflict: ' + os.path.basename(z)) build_utils.ExtractAll(z, path=subdir) dep_subdirs.append(subdir) return dep_subdirs
def _HardcodeSharedLibraryDynamicAttributes(zip_path): """Hardcodes the package IDs of dynamic attributes to 0x02. This is a workaround for b/147674078, which affects Android versions pre-N. Args: zip_path: Path to proto APK file. """ with build_utils.TempDir() as tmp_dir: build_utils.ExtractAll(zip_path, path=tmp_dir) # First process the resources file. table = Resources_pb2.ResourceTable() with open(os.path.join(tmp_dir, 'resources.pb')) as f: table.ParseFromString(f.read()) for package in table.package: for _type in package.type: for entry in _type.entry: for config_value in entry.config_value: _ProcessProtoValue(config_value.value) with open(os.path.join(tmp_dir, 'resources.pb'), 'w') as f: f.write(table.SerializeToString()) # Next process all the XML files. xml_files = build_utils.FindInDirectory(tmp_dir, '*.xml') for xml_file in xml_files: xml_node = Resources_pb2.XmlNode() with open(xml_file) as f: xml_node.ParseFromString(f.read()) _ProcessProtoXmlNode(xml_node) with open(xml_file, 'w') as f: f.write(xml_node.SerializeToString()) # Overwrite the original zip file. build_utils.ZipDir(zip_path, tmp_dir)
def main(): parser = argparse.ArgumentParser(description=__doc__) command_parsers = parser.add_subparsers(dest='command') subp = command_parsers.add_parser( 'list', help='Output a GN scope describing the contents of the .aar.') _AddCommonArgs(subp) subp.add_argument('--output', help='Output file.', type=argparse.FileType('w'), default='-') subp = command_parsers.add_parser('extract', help='Extracts the .aar') _AddCommonArgs(subp) subp.add_argument('--output-dir', help='Output directory for the extracted files.', required=True, type=os.path.normpath) subp.add_argument('--assert-info-file', help='Path to .info file. Asserts that it matches what ' '"list" would output.', type=argparse.FileType('r')) args = parser.parse_args() if args.command == 'extract': if args.assert_info_file: expected = _CreateInfo(args.aar_file) actual = args.assert_info_file.read() if actual != expected: raise Exception( 'android_aar_prebuilt() cached .info file is ' 'out-of-date. Run gn gen with ' 'update_android_aar_prebuilts=true to update it.') # Clear previously extracted versions of the AAR. shutil.rmtree(args.output_dir, True) build_utils.ExtractAll(args.aar_file, path=args.output_dir) elif args.command == 'list': args.output.write(_CreateInfo(args.aar_file))