def main(argv): """Pack a list of files into a zip archive. Args: output: The file path of the zip archive. base_dir: Base path of input files. languages: Comma-separated list of languages, e.g. en-US,de. add: List of files to include in the archive. The language placeholder ${lang} is expanded into one file for each language. """ parser = optparse.OptionParser() parser.add_option("--output", dest="output") parser.add_option("--base_dir", dest="base_dir") parser.add_option("--languages", dest="languages") parser.add_option("--add", action="append", dest="files", default=[]) options, args = parser.parse_args(argv[1:]) # Process file list, possibly expanding language placeholders. _LANG_PLACEHOLDER = "${lang}" languages = filter(bool, options.languages.split(',')) file_list = [] for file_to_add in options.files: if (_LANG_PLACEHOLDER in file_to_add): for lang in languages: file_list.append(file_to_add.replace(_LANG_PLACEHOLDER, lang)) else: file_list.append(file_to_add) with build_utils.AtomicOutput(options.output) as f: build_utils.DoZip(file_list, f, options.base_dir)
def rebuild(): logging.info('Building %s', bundle_apks_path) with tempfile.NamedTemporaryFile(suffix='.apks') as tmp_apks_file: cmd_args = [ 'build-apks', '--aapt2=%s' % aapt2_path, '--output=%s' % tmp_apks_file.name, '--bundle=%s' % bundle_path, '--ks=%s' % keystore_path, '--ks-pass=pass:%s' % keystore_password, '--ks-key-alias=%s' % keystore_alias, '--overwrite', ] if mode is not None: if mode not in BUILD_APKS_MODES: raise Exception( 'Invalid mode parameter %s (should be in %s)' % (mode, BUILD_APKS_MODES)) cmd_args += ['--mode=' + mode] with tempfile.NamedTemporaryFile(suffix='.json') as spec_file: if device_spec: json.dump(device_spec, spec_file) spec_file.flush() cmd_args += ['--device-spec=' + spec_file.name] bundletool.RunBundleTool(cmd_args) # Make the resulting .apks file hermetic. with build_utils.TempDir() as temp_dir, \ build_utils.AtomicOutput(bundle_apks_path, only_if_changed=False) as f: files = build_utils.ExtractAll(tmp_apks_file.name, temp_dir) build_utils.DoZip(files, f, base_dir=temp_dir)
def _ZipResources(resource_dirs, zip_path, ignore_pattern): # Python zipfile does not provide a way to replace a file (it just writes # another file with the same name). So, first collect all the files to put # in the zip (with proper overriding), and then zip them. # ignore_pattern is a string of ':' delimited list of globs used to ignore # files that should not be part of the final resource zip. files_to_zip = dict() files_to_zip_without_generated = dict() for index, resource_dir in enumerate(resource_dirs): for path, archive_path in resource_utils.IterResourceFilesInDirectories( [resource_dir], ignore_pattern): resource_dir_name = os.path.basename(resource_dir) archive_path = '{}_{}/{}'.format(index, resource_dir_name, archive_path) # We want the original resource dirs in the .info file rather than the # generated overridden path. if not path.startswith('/tmp'): files_to_zip_without_generated[archive_path] = path files_to_zip[archive_path] = path resource_utils.CreateResourceInfoFile(files_to_zip_without_generated, zip_path) with zipfile.ZipFile(zip_path, 'w') as z: # This magic comment signals to resource_utils.ExtractDeps that this zip is # not just the contents of a single res dir, without the encapsulating res/ # (like the outputs of android_generated_resources targets), but instead has # the contents of possibly multiple res/ dirs each within an encapsulating # directory within the zip. z.comment = resource_utils.MULTIPLE_RES_MAGIC_STRING build_utils.DoZip(files_to_zip.iteritems(), z)
def CreateOutput(self, has_imported_lib=False, keep_rule_output=None): found_files = build_utils.FindInDirectory(self.staging_dir) if not found_files: raise Exception('Missing dex outputs in {}'.format( self.staging_dir)) if self._final_output_path.endswith('.dex'): if has_imported_lib: raise Exception( 'Trying to create a single .dex file, but a dependency requires ' 'JDK Library Desugaring (which necessitates a second file).' 'Refer to %s to see what desugaring was required' % keep_rule_output) if len(found_files) != 1: raise Exception( 'Expected exactly 1 dex file output, found: {}'.format( '\t'.join(found_files))) shutil.move(found_files[0], self._final_output_path) return # Add to .jar using Python rather than having R8 output to a .zip directly # in order to disable compression of the .jar, saving ~500ms. tmp_jar_output = self.staging_dir + '.jar' build_utils.DoZip(found_files, tmp_jar_output, base_dir=self.staging_dir) shutil.move(tmp_jar_output, self._final_output_path)
def CreateResourceRewriterSrcjar(package, res_packages, srcjar_path): with build_utils.TempDir() as temp_dir: output_dir = os.path.join(temp_dir, *package.split('.')) CreateResourceRewriter(package, res_packages, output_dir) build_utils.DoZip([os.path.join(output_dir, RESOURCE_REWRITER_JAVA)], srcjar_path, temp_dir)
def _ZipResources(resource_dirs, zip_path, ignore_pattern): # Python zipfile does not provide a way to replace a file (it just writes # another file with the same name). So, first collect all the files to put # in the zip (with proper overriding), and then zip them. # ignore_pattern is a string of ':' delimited list of globs used to ignore # files that should not be part of the final resource zip. files_to_zip = dict() files_to_zip_without_generated = dict() globs = _GenerateGlobs(ignore_pattern) for d in resource_dirs: for root, _, files in os.walk(d): for f in files: archive_path = f parent_dir = os.path.relpath(root, d) if parent_dir != '.': archive_path = os.path.join(parent_dir, f) path = os.path.join(root, f) if build_utils.MatchesGlob(archive_path, globs): continue # We want the original resource dirs in the .info file rather than the # generated overridden path. if not path.startswith('/tmp'): files_to_zip_without_generated[archive_path] = path files_to_zip[archive_path] = path resource_utils.CreateResourceInfoFile(files_to_zip_without_generated, zip_path) build_utils.DoZip(files_to_zip.iteritems(), zip_path)
def _ZipSingleDex(dex_file, zip_name): """Zip up a single dex file. Args: dex_file: A dexfile whose name is ignored. zip_name: The output file in which to write the zip. """ build_utils.DoZip([('classes.dex', dex_file)], zip_name)
def main(): parser = optparse.OptionParser() parser.add_option('--input-dir', help='Directory of files to archive.') parser.add_option('--output', help='Path to output archive.') options, _ = parser.parse_args() inputs = build_utils.FindInDirectory(options.input_dir, '*') build_utils.DoZip(inputs, options.output, options.input_dir)
def main(args): args = build_utils.ExpandFileArgs(args) parser = argparse.ArgumentParser(args) parser.add_argument('--input-files', help='GN-list of files to zip.') parser.add_argument( '--input-files-base-dir', help='Paths in the archive will be relative to this directory') parser.add_argument('--input-zips', help='GN-list of zips to merge.') parser.add_argument('--input-zips-excluded-globs', help='GN-list of globs for paths to exclude.') parser.add_argument('--output', required=True, help='Path to output archive.') compress_group = parser.add_mutually_exclusive_group() compress_group.add_argument('--compress', action='store_true', help='Compress entries') compress_group.add_argument('--no-compress', action='store_false', dest='compress', help='Do not compress entries') build_utils.AddDepfileOption(parser) options = parser.parse_args(args) with build_utils.AtomicOutput(options.output) as f: with zipfile.ZipFile(f.name, 'w') as out_zip: depfile_deps = None if options.input_files: files = build_utils.ParseGnList(options.input_files) build_utils.DoZip(files, out_zip, base_dir=options.input_files_base_dir, compress_fn=lambda _: options.compress) if options.input_zips: files = build_utils.ParseGnList(options.input_zips) depfile_deps = files path_transform = None if options.input_zips_excluded_globs: globs = build_utils.ParseGnList( options.input_zips_excluded_globs) path_transform = ( lambda p: None if build_utils.MatchesGlob(p, globs) else p) build_utils.MergeZips(out_zip, files, path_transform=path_transform, compress=options.compress) # Depfile used only by dist_jar(). if options.depfile: build_utils.WriteDepfile(options.depfile, options.output, inputs=depfile_deps, add_pydeps=False)
def ZipResources(resource_dirs, zip_path): # Python zipfile does not provide a way to replace a file (it just writes # another file with the same name). So, first collect all the files to put # in the zip (with proper overriding), and then zip them. files_to_zip = dict() for d in resource_dirs: for root, _, files in os.walk(d): for f in files: archive_path = f parent_dir = os.path.relpath(root, d) if parent_dir != '.': archive_path = os.path.join(parent_dir, f) path = os.path.join(root, f) files_to_zip[archive_path] = path build_utils.DoZip(files_to_zip.iteritems(), zip_path)
def CreateOutput(self): found_files = build_utils.FindInDirectory(self.staging_dir) if not found_files: raise Exception('Missing dex outputs in {}'.format(self.staging_dir)) if self._final_output_path.endswith('.dex'): if len(found_files) != 1: raise Exception('Expected exactly 1 dex file output, found: {}'.format( '\t'.join(found_files))) shutil.move(found_files[0], self._final_output_path) return # Add to .jar using Python rather than having R8 output to a .zip directly # in order to disable compression of the .jar, saving ~500ms. tmp_jar_output = self.staging_dir + '.jar' build_utils.DoZip(found_files, tmp_jar_output, base_dir=self.staging_dir) shutil.move(tmp_jar_output, self._final_output_path)
def _ZipResources(resource_dirs, zip_path, ignore_pattern): # Python zipfile does not provide a way to replace a file (it just writes # another file with the same name). So, first collect all the files to put # in the zip (with proper overriding), and then zip them. # ignore_pattern is a string of ':' delimited list of globs used to ignore # files that should not be part of the final resource zip. files_to_zip = dict() files_to_zip_without_generated = dict() for path, archive_path in resource_utils.IterResourceFilesInDirectories( resource_dirs, ignore_pattern): # We want the original resource dirs in the .info file rather than the # generated overridden path. if not path.startswith('/tmp'): files_to_zip_without_generated[archive_path] = path files_to_zip[archive_path] = path resource_utils.CreateResourceInfoFile(files_to_zip_without_generated, zip_path) build_utils.DoZip(files_to_zip.iteritems(), zip_path)
def Jar(class_files, classes_dir, jar_path, provider_configurations=None, additional_files=None): files = [(os.path.relpath(f, classes_dir), f) for f in class_files] if additional_files: for filepath, jar_filepath in additional_files: files.append((jar_filepath, filepath)) if provider_configurations: for config in provider_configurations: files.append(('META-INF/services/' + os.path.basename(config), config)) # Zeros out timestamps so that builds are hermetic. with build_utils.AtomicOutput(jar_path) as f: build_utils.DoZip(files, f)
def main(): parser = optparse.OptionParser() build_utils.AddDepfileOption(parser) parser.add_option('--inputs', help='List of files to archive.') parser.add_option('--output', help='Path to output archive.') parser.add_option('--base-dir', help='If provided, the paths in the archive will be ' 'relative to this directory', default='.') options, _ = parser.parse_args() inputs = ast.literal_eval(options.inputs) output = options.output base_dir = options.base_dir build_utils.DoZip(inputs, output, base_dir) if options.depfile: build_utils.WriteDepfile(options.depfile, output)
def _ZipResources(resource_dirs, zip_path, ignore_pattern): # ignore_pattern is a string of ':' delimited list of globs used to ignore # files that should not be part of the final resource zip. files_to_zip = [] path_info = resource_utils.ResourceInfoFile() for index, resource_dir in enumerate(resource_dirs): attributed_aar = None if not resource_dir.startswith('..'): aar_source_info_path = os.path.join(os.path.dirname(resource_dir), 'source.info') if os.path.exists(aar_source_info_path): attributed_aar = jar_info_utils.ReadAarSourceInfo( aar_source_info_path) for path, archive_path in resource_utils.IterResourceFilesInDirectories( [resource_dir], ignore_pattern): attributed_path = path if attributed_aar: attributed_path = os.path.join(attributed_aar, 'res', path[len(resource_dir) + 1:]) # Use the non-prefixed archive_path in the .info file. path_info.AddMapping(archive_path, attributed_path) resource_dir_name = os.path.basename(resource_dir) archive_path = '{}_{}/{}'.format(index, resource_dir_name, archive_path) files_to_zip.append((archive_path, path)) path_info.Write(zip_path + '.info') with zipfile.ZipFile(zip_path, 'w') as z: # This magic comment signals to resource_utils.ExtractDeps that this zip is # not just the contents of a single res dir, without the encapsulating res/ # (like the outputs of android_generated_resources targets), but instead has # the contents of possibly multiple res/ dirs each within an encapsulating # directory within the zip. z.comment = resource_utils.MULTIPLE_RES_MAGIC_STRING build_utils.DoZip(files_to_zip, z)
def main(): """Pack a list of files into a zip archive. Args: output: The file path of the zip archive. base_dir: Base path of input files. languages: Comma-separated list of languages, e.g. en-US,de. add: List of files to include in the archive. The language placeholder ${lang} is expanded into one file for each language. """ parser = argparse.ArgumentParser() parser.add_argument("--output", dest="output") parser.add_argument("--timestamp", type=int, metavar="TIME", help="Unix timestamp to use for files in the archive") parser.add_argument("--base_dir", dest="base_dir") parser.add_argument("--languages", dest="languages") parser.add_argument("--add", action="append", dest="files", default=[]) args = parser.parse_args() # Process file list, possibly expanding language placeholders. _LANG_PLACEHOLDER = "${lang}" languages = list(filter(bool, args.languages.split(','))) file_list = [] for file_to_add in args.files: if (_LANG_PLACEHOLDER in file_to_add): for lang in languages: file_list.append(file_to_add.replace(_LANG_PLACEHOLDER, lang)) else: file_list.append(file_to_add) with build_utils.AtomicOutput(args.output) as f: build_utils.DoZip(file_list, f, args.base_dir, timestamp=args.timestamp)
def _ZipMultidex(file_dir, dex_files): """Zip dex files into a multidex. Args: file_dir: The directory into which to write the output. dex_files: The dexfiles forming the multizip. Their names must end with classes.dex, classes2.dex, ... Returns: The name of the multidex file, which will live in file_dir. """ ordered_files = [] # List of (archive name, file name) for f in dex_files: if f.endswith('dex.jar'): ordered_files.append(('classes.dex', f)) break if not ordered_files: raise Exception('Could not find classes.dex multidex file in %s', dex_files) for dex_idx in xrange(2, len(dex_files) + 1): archive_name = 'classes%d.dex' % dex_idx for f in dex_files: if f.endswith(archive_name): ordered_files.append((archive_name, f)) break else: raise Exception('Could not find classes%d.dex multidex file in %s', dex_files) if len(set(f[1] for f in ordered_files)) != len(ordered_files): raise Exception('Unexpected clashing filenames for multidex in %s', dex_files) zip_name = os.path.join(file_dir, 'multidex_classes.zip') build_utils.DoZip(((archive_name, os.path.join(file_dir, file_name)) for archive_name, file_name in ordered_files), zip_name) return zip_name
def main(): parser = argparse.ArgumentParser() build_utils.AddDepfileOption(parser) parser.add_argument( '--excluded-classes', help='A list of .class file patterns to exclude from the jar.') parser.add_argument('--src-search-dirs', action='append', help='A list of directories that should be searched' ' for the source files.') parser.add_argument('--src-files', action='append', help='A list of source files to jar.') parser.add_argument( '--src-jars', action='append', help='A list of source jars to include in addition to source files.') parser.add_argument('--src-list-files', action='append', help='A list of files that contain a list of sources,' ' e.g. a list of \'.sources\' files generated by GN.') parser.add_argument('--jar-path', help='Jar output path.', required=True) options = parser.parse_args() src_jars = [] for gn_list in options.src_jars: src_jars.extend(build_utils.ParseGnList(gn_list)) src_search_dirs = [] for gn_src_search_dirs in options.src_search_dirs: src_search_dirs.extend(build_utils.ParseGnList(gn_src_search_dirs)) src_list_files = [] if options.src_list_files: for gn_src_list_file in options.src_list_files: src_list_files.extend(build_utils.ParseGnList(gn_src_list_file)) src_files = [] for gn_src_files in options.src_files: src_files.extend(build_utils.ParseGnList(gn_src_files)) # Add files from --source_list_files for src_list_file in src_list_files: with open(src_list_file, 'r') as f: src_files.extend(f.read().splitlines()) # Preprocess source files by removing any prefix that comes before # the Java package name. for i, s in enumerate(src_files): prefix_position = s.find(JAVA_PACKAGE_PREFIX) if prefix_position != -1: src_files[i] = s[prefix_position:] excluded_classes = [] if options.excluded_classes: classes = build_utils.ParseGnList(options.excluded_classes) excluded_classes.extend(f.replace('.class', '.java') for f in classes) predicate = None if excluded_classes: predicate = lambda f: not build_utils.MatchesGlob(f, excluded_classes) # Create a dictionary that maps every source directory # to source files that it contains. dir_to_files_map = {} # Initialize the map. for src_search_dir in src_search_dirs: dir_to_files_map[src_search_dir] = [] # Fill the map. for src_file in src_files: number_of_file_instances = 0 for src_search_dir in src_search_dirs: target_path = os.path.join(src_search_dir, src_file) if os.path.isfile(target_path): number_of_file_instances += 1 if not predicate or predicate(src_file): dir_to_files_map[src_search_dir].append(target_path) if (number_of_file_instances > 1): raise Exception( 'There is more than one instance of file %s in %s' % (src_file, src_search_dirs)) if (number_of_file_instances < 1): raise Exception('Unable to find file %s in %s' % (src_file, src_search_dirs)) # Jar the sources from every source search directory. with build_utils.AtomicOutput(options.jar_path) as o, \ zipfile.ZipFile(o, 'w', zipfile.ZIP_DEFLATED) as z: for src_search_dir in src_search_dirs: subpaths = dir_to_files_map[src_search_dir] if subpaths: build_utils.DoZip(subpaths, z, base_dir=src_search_dir) else: raise Exception( 'Directory %s does not contain any files and can be' ' removed from the list of directories to search' % src_search_dir) # Jar additional src jars if src_jars: build_utils.MergeZips(z, src_jars, compress=True) if options.depfile: deps = [] for sources in dir_to_files_map.itervalues(): deps.extend(sources) # Srcjar deps already captured in GN rules (no need to list them here). build_utils.WriteDepfile(options.depfile, options.jar_path, deps)
def _OptimizeWithR8(options, config_paths, libraries, dynamic_config_data, print_stdout=False): with build_utils.TempDir() as tmp_dir: if dynamic_config_data: tmp_config_path = os.path.join(tmp_dir, 'proguard_config.txt') with open(tmp_config_path, 'w') as f: f.write(dynamic_config_data) config_paths = config_paths + [tmp_config_path] tmp_mapping_path = os.path.join(tmp_dir, 'mapping.txt') # If there is no output (no classes are kept), this prevents this script # from failing. build_utils.Touch(tmp_mapping_path) tmp_output = os.path.join(tmp_dir, 'r8out') os.mkdir(tmp_output) cmd = [ build_utils.JAVA_PATH, '-jar', options.r8_path, '--no-data-resources', '--output', tmp_output, '--pg-map-output', tmp_mapping_path, ] for lib in libraries: cmd += ['--lib', lib] for config_file in config_paths: cmd += ['--pg-conf', config_file] if options.min_api: cmd += ['--min-api', options.min_api] if options.force_enable_assertions: cmd += ['--force-enable-assertions'] if options.main_dex_rules_path: for main_dex_rule in options.main_dex_rules_path: cmd += ['--main-dex-rules', main_dex_rule] cmd += options.input_paths env = os.environ.copy() stderr_filter = lambda l: re.sub(r'.*_JAVA_OPTIONS.*\n?', '', l) env['_JAVA_OPTIONS'] = '-Dcom.android.tools.r8.allowTestProguardOptions=1' if options.disable_outlining: env['_JAVA_OPTIONS'] += ' -Dcom.android.tools.r8.disableOutlining=1' try: build_utils.CheckOutput(cmd, env=env, print_stdout=print_stdout, stderr_filter=stderr_filter) except build_utils.CalledProcessError as err: debugging_link = ('R8 failed. Please see {}.'.format( 'https://chromium.googlesource.com/chromium/src/+/HEAD/build/' 'android/docs/java_optimization.md#Debugging-common-failures\n' )) raise ProguardProcessError(err, debugging_link) found_files = build_utils.FindInDirectory(tmp_output) if not options.output_path.endswith('.dex'): # Add to .jar using Python rather than having R8 output to a .zip directly # in order to disable compression of the .jar, saving ~500ms. tmp_jar_output = tmp_output + '.jar' build_utils.DoZip(found_files, tmp_jar_output, base_dir=tmp_output) shutil.move(tmp_jar_output, options.output_path) else: if len(found_files) > 1: raise Exception( 'Too many files created: {}'.format(found_files)) shutil.move(found_files[0], options.output_path) with open(options.mapping_output, 'w') as out_file, \ open(tmp_mapping_path) as in_file: # Mapping files generated by R8 include comments that may break # some of our tooling so remove those (specifically: apkanalyzer). out_file.writelines(l for l in in_file if not l.startswith('#'))