def main(argv): argv = build_utils.ExpandFileArgs(argv) parser = optparse.OptionParser() build_utils.AddDepfileOption(parser) parser.add_option('--output', help='Output path for executable script.') parser.add_option('--jar-path', help='Path to the main jar.') parser.add_option( '--main-class', help='Name of the java class with the "main" entry point.') parser.add_option('--classpath', action='append', help='Classpath for running the jar.') options, _ = parser.parse_args(argv) classpath = [options.jar_path] for cp_arg in options.classpath: classpath += build_utils.ParseGypList(cp_arg) run_dir = os.path.dirname(options.output) classpath = [os.path.relpath(p, run_dir) for p in classpath] with open(options.output, 'w') as script: script.write( script_template.format(classpath=('"%s"' % '", "'.join(classpath)), main_class=options.main_class)) os.chmod(options.output, 0750) if options.depfile: build_utils.WriteDepfile(options.depfile, build_utils.GetPythonDependencies())
def _MaybeWriteStampAndDepFile(options, inputs): output = options.output_path if options.stamp: build_utils.Touch(options.stamp) output = options.stamp if options.depfile: build_utils.WriteDepfile(options.depfile, output, inputs=inputs)
def main(args): args = build_utils.ExpandFileArgs(args) options = _ParseArgs(args) debug_temp_resources_dir = os.environ.get(_ENV_DEBUG_VARIABLE) if debug_temp_resources_dir: debug_temp_resources_dir = os.path.join(debug_temp_resources_dir, os.path.basename(options.apk_path)) build_utils.DeleteDirectory(debug_temp_resources_dir) build_utils.MakeDirectory(debug_temp_resources_dir) with resource_utils.BuildContext(debug_temp_resources_dir) as build: dep_subdirs = resource_utils.ExtractDeps(options.dependencies_res_zips, build.deps_dir) _PackageApk(options, dep_subdirs, build.temp_dir, build.gen_dir, build.r_txt_path) r_txt_path = _WriteFinalRTxtFile(options, build.r_txt_path) # If --shared-resources-whitelist is used, the all resources listed in # the corresponding R.txt file will be non-final, and an onResourcesLoaded() # will be generated to adjust them at runtime. # # Otherwise, if --shared-resources is used, the all resources will be # non-final, and an onResourcesLoaded() method will be generated too. # # Otherwise, all resources will be final, and no method will be generated. # rjava_build_options = resource_utils.RJavaBuildOptions() if options.shared_resources_whitelist: rjava_build_options.ExportSomeResources( options.shared_resources_whitelist) rjava_build_options.GenerateOnResourcesLoaded() elif options.shared_resources or options.app_as_shared_lib: rjava_build_options.ExportAllResources() rjava_build_options.GenerateOnResourcesLoaded() resource_utils.CreateRJavaFiles( build.srcjar_dir, None, r_txt_path, options.extra_res_packages, options.extra_r_text_files, rjava_build_options) if options.srcjar_out: build_utils.ZipDir(options.srcjar_out, build.srcjar_dir) if options.check_resources_pkg_id is not None: expected_id = options.check_resources_pkg_id package_id = _ExtractPackageIdFromApk(options.apk_path, options.aapt_path) if package_id != expected_id: raise Exception('Invalid package ID 0x%x (expected 0x%x)' % (package_id, expected_id)) if options.depfile: build_utils.WriteDepfile( options.depfile, options.apk_path, inputs=options.dependencies_res_zips + options.extra_r_text_files, add_pydeps=False)
def main(args): args = build_utils.ExpandFileArgs(args) parser = argparse.ArgumentParser(description=__doc__) build_utils.AddDepfileOption(parser) parser.add_argument('--jar-info-path', required=True, help='Output .jar.info file') parser.add_argument('--pak-info-path', required=True, help='Output .pak.info file') parser.add_argument('--res-info-path', required=True, help='Output .res.info file') parser.add_argument('--jar-files', required=True, action='append', help='GN-list of .jar file paths') parser.add_argument('--assets', required=True, action='append', help='GN-list of files to add as assets in the form ' '"srcPath:zipPath", where ":zipPath" is optional.') parser.add_argument('--uncompressed-assets', required=True, action='append', help='Same as --assets, except disables compression.') parser.add_argument('--resource-apk', dest='resource_apks', required=True, action='append', help='An .ap_ file built using aapt') options = parser.parse_args(args) options.jar_files = build_utils.ParseGnList(options.jar_files) options.assets = build_utils.ParseGnList(options.assets) options.uncompressed_assets = build_utils.ParseGnList( options.uncompressed_assets) jar_inputs = _FindJarInputs(set(options.jar_files)) pak_inputs = _PakInfoPathsForAssets(options.assets + options.uncompressed_assets) res_inputs = [p + '.info' for p in options.resource_apks] # Don't bother re-running if no .info files have changed (saves ~250ms). md5_check.CallAndRecordIfStale( lambda: _MergeJarInfoFiles(options.jar_info_path, jar_inputs), input_paths=jar_inputs, output_paths=[options.jar_info_path]) # Always recreate these (just as fast as md5 checking them). _MergePakInfoFiles(options.pak_info_path, pak_inputs) _MergeResInfoFiles(options.res_info_path, res_inputs) all_inputs = jar_inputs + pak_inputs + res_inputs build_utils.WriteDepfile(options.depfile, options.jar_info_path, inputs=all_inputs, add_pydeps=False)
def main(argv): parser = optparse.OptionParser() build_utils.AddDepfileOption(parser) parser.add_option("--protoc", help="Path to protoc binary.") parser.add_option("--proto-path", help="Path to proto directory.") parser.add_option("--java-out-dir", help="Path to output directory for java files.") parser.add_option("--srcjar", help="Path to output srcjar.") parser.add_option("--stamp", help="File to touch on success.") parser.add_option("--nano", help="Use to generate nano protos.", action='store_true') parser.add_option("--protoc-javalite-plugin-dir", help="Path to protoc java lite plugin directory.") options, args = parser.parse_args(argv) build_utils.CheckOptions(options, parser, ['protoc', 'proto_path']) if not options.java_out_dir and not options.srcjar: print 'One of --java-out-dir or --srcjar must be specified.' return 1 if not options.nano and not options.protoc_javalite_plugin_dir: print 'One of --nano or --protoc-javalite-plugin-dir must be specified.' return 1 with build_utils.TempDir() as temp_dir: if options.nano: # Specify arguments to the generator. generator_args = ['optional_field_style=reftypes', 'store_unknown_fields=true'] out_arg = '--javanano_out=' + ','.join(generator_args) + ':' + temp_dir else: out_arg = '--javalite_out=' + temp_dir custom_env = os.environ.copy() if options.protoc_javalite_plugin_dir: # if we are generating lite protos, then the lite plugin needs to be in the path when protoc # is called. See https://github.com/protocolbuffers/protobuf/blob/master/java/lite.md custom_env['PATH'] = '{}:{}'.format( os.path.abspath(options.protoc_javalite_plugin_dir), custom_env['PATH']) # Generate Java files using protoc. build_utils.CheckOutput( [options.protoc, '--proto_path', options.proto_path, out_arg] + args, env=custom_env) if options.java_out_dir: build_utils.DeleteDirectory(options.java_out_dir) shutil.copytree(temp_dir, options.java_out_dir) else: build_utils.ZipDir(options.srcjar, temp_dir) if options.depfile: assert options.srcjar deps = args + [options.protoc] build_utils.WriteDepfile(options.depfile, options.srcjar, deps, add_pydeps=False) if options.stamp: build_utils.Touch(options.stamp)
def main(): options = _ParseOptions() libraries = [] for p in options.classpath: # TODO(bjoyce): Remove filter once old android support libraries are gone. # Fix for having Library class extend program class dependency problem. if 'com_android_support' in p or 'android_support_test' in p: continue # If a jar is part of input no need to include it as library jar. if p not in libraries and p not in options.input_paths: libraries.append(p) _VerifyNoEmbeddedConfigs(options.input_paths + libraries) proguard_configs = options.proguard_configs if options.disable_checkdiscard: proguard_configs = _ValidateAndFilterCheckDiscards(proguard_configs) # ProGuard configs that are derived from flags. dynamic_config_data = _CreateDynamicConfig(options) # ProGuard configs that are derived from flags. merged_configs = _CombineConfigs( proguard_configs, dynamic_config_data, exclude_generated=True) print_stdout = _ContainsDebuggingConfig(merged_configs) or options.verbose # Writing the config output before we know ProGuard is going to succeed isn't # great, since then a failure will result in one of the outputs being updated. # We do it anyways though because the error message prints out the path to the # config. Ninja will still know to re-run the command because of the other # stale outputs. if options.output_config: with open(options.output_config, 'w') as f: f.write(merged_configs) if options.expected_configs_file: _VerifyExpectedConfigs(options.expected_configs_file, options.output_config, options.proguard_expectations_failure_file) if options.r8_path: _OptimizeWithR8(options, proguard_configs, libraries, dynamic_config_data, print_stdout) else: _OptimizeWithProguard(options, proguard_configs, libraries, dynamic_config_data, print_stdout) # After ProGuard / R8 has run: for output in options.extra_mapping_output_paths: shutil.copy(options.mapping_output, output) inputs = options.proguard_configs + options.input_paths + libraries if options.apply_mapping: inputs.append(options.apply_mapping) build_utils.WriteDepfile( options.depfile, options.output_path, inputs=inputs, add_pydeps=False)
def main(): parser = optparse.OptionParser() build_utils.AddDepfileOption(parser) parser.add_option('--lint-path', help='Path to lint executable.') parser.add_option('--config-path', help='Path to lint suppressions file.') parser.add_option('--processed-config-path', help='Path to processed lint suppressions file.') parser.add_option('--manifest-path', help='Path to AndroidManifest.xml') parser.add_option('--result-path', help='Path to XML lint result file.') parser.add_option('--product-dir', help='Path to product dir.') parser.add_option('--src-dirs', help='Directories containing java files.') parser.add_option('--java-files', help='Paths to java files.') parser.add_option('--jar-path', help='Jar file containing class files.') parser.add_option('--resource-dir', help='Path to resource dir.') parser.add_option('--can-fail-build', action='store_true', help='If set, script will exit with nonzero exit status' ' if lint errors are present') parser.add_option('--stamp', help='Path to touch on success.') parser.add_option('--enable', action='store_true', help='Run lint instead of just touching stamp.') options, _ = parser.parse_args() build_utils.CheckOptions(options, parser, required=[ 'lint_path', 'config_path', 'processed_config_path', 'manifest_path', 'result_path', 'product_dir', 'jar_path' ]) rc = 0 if options.enable: sources = [] if options.src_dirs: src_dirs = build_utils.ParseGypList(options.src_dirs) sources = build_utils.FindInDirectories(src_dirs, '*.java') elif options.java_files: sources = build_utils.ParseGypList(options.java_files) else: print 'One of --src-dirs or --java-files must be specified.' return 1 rc = _RunLint(options.lint_path, options.config_path, options.processed_config_path, options.manifest_path, options.result_path, options.product_dir, sources, options.jar_path, options.resource_dir) if options.depfile: build_utils.WriteDepfile(options.depfile, build_utils.GetPythonDependencies()) if options.stamp and not rc: build_utils.Touch(options.stamp) return rc if options.can_fail_build else 0
def _MaybeWriteDepAndStampFiles(options, depfile_deps): if options.stamp: build_utils.Touch(options.stamp) if options.depfile: if options.only_verify_expectations: output = options.stamp else: output = options.output_apk build_utils.WriteDepfile(options.depfile, output, inputs=depfile_deps)
def main(args): parser = argparse.ArgumentParser() build_utils.AddDepfileOption(parser) parser.add_argument('--script', required=True, help='Path to the unused resources detector script.') parser.add_argument( '--dependencies-res-zips', required=True, help='Resources zip archives to investigate for unused resources.') parser.add_argument('--dex', required=True, help='Path to dex file, or zip with dex files.') parser.add_argument( '--proguard-mapping', required=True, help='Path to proguard mapping file for the optimized dex.') parser.add_argument('--r-text', required=True, help='Path to R.txt') parser.add_argument('--android-manifest', required=True, help='Path to AndroidManifest') parser.add_argument('--output-config', required=True, help='Path to output the aapt2 config to.') args = build_utils.ExpandFileArgs(args) options = parser.parse_args(args) options.dependencies_res_zips = (build_utils.ParseGnList( options.dependencies_res_zips)) # in case of no resources, short circuit early. if not options.dependencies_res_zips: build_utils.Touch(options.output_config) return with build_utils.TempDir() as temp_dir: dep_subdirs = [] for dependency_res_zip in options.dependencies_res_zips: dep_subdirs += resource_utils.ExtractDeps([dependency_res_zip], temp_dir) build_utils.CheckOutput([ options.script, '--rtxts', options.r_text, '--manifests', options.android_manifest, '--resourceDirs', ':'.join(dep_subdirs), '--dex', options.dex, '--mapping', options.proguard_mapping, '--outputConfig', options.output_config ]) if options.depfile: depfile_deps = options.dependencies_res_zips + [ options.r_text, options.android_manifest, options.dex, options.proguard_mapping, ] build_utils.WriteDepfile(options.depfile, options.output_config, depfile_deps)
def main(argv): parser = optparse.OptionParser() build_utils.AddDepfileOption(parser) parser.add_option("--protoc", help="Path to protoc binary.") parser.add_option("--proto-path", help="Path to proto directory.") parser.add_option("--java-out-dir", help="Path to output directory for java files.") parser.add_option("--srcjar", help="Path to output srcjar.") parser.add_option("--stamp", help="File to touch on success.") parser.add_option("--lite", help="Use to generate lite protos.", action='store_true') options, args = parser.parse_args(argv) build_utils.CheckOptions(options, parser, ['protoc', 'proto_path']) if not options.java_out_dir and not options.srcjar: print 'One of --java-out-dir or --srcjar must be specified.' return 1 with build_utils.TempDir() as temp_dir: if options.lite: out_arg = '--java_out=' + temp_dir # Check if all proto files (which are listed in the args) are opting to # use the lite runtime, otherwise we'd have to include the much heavier # regular proto runtime in Chrome. for proto_file in args: if not 'LITE_RUNTIME' in open(proto_file).read(): raise Exception( 'Chrome only supports lite protos. Please add "optimize_for = ' 'LITE_RUNTIME" to your proto file to enable the lite runtime.' ) else: # Specify arguments to the generator. generator_args = [ 'optional_field_style=reftypes', 'store_unknown_fields=true' ] out_arg = '--javanano_out=' + ','.join( generator_args) + ':' + temp_dir # Generate Java files using protoc. build_utils.CheckOutput( [options.protoc, '--proto_path', options.proto_path, out_arg] + args) if options.java_out_dir: build_utils.DeleteDirectory(options.java_out_dir) shutil.copytree(temp_dir, options.java_out_dir) else: build_utils.ZipDir(options.srcjar, temp_dir) if options.depfile: assert options.srcjar deps = args + [options.protoc] build_utils.WriteDepfile(options.depfile, options.srcjar, deps) if options.stamp: build_utils.Touch(options.stamp)
def main(): build_utils.InitLogging('PROGUARD_DEBUG') options = _ParseOptions() logging.debug('Preparing configs') proguard_configs = options.proguard_configs # ProGuard configs that are derived from flags. dynamic_config_data = _CreateDynamicConfig(options) # ProGuard configs that are derived from flags. merged_configs = _CombineConfigs(proguard_configs, dynamic_config_data, exclude_generated=True) print_stdout = _ContainsDebuggingConfig(merged_configs) or options.verbose if options.expected_file: diff_utils.CheckExpectations(merged_configs, options) if options.only_verify_expectations: build_utils.WriteDepfile(options.depfile, options.actual_file, inputs=options.proguard_configs) return logging.debug('Looking for embedded configs') libraries = [] for p in options.classpath: # TODO(bjoyce): Remove filter once old android support libraries are gone. # Fix for having Library class extend program class dependency problem. if 'com_android_support' in p or 'android_support_test' in p: continue # If a jar is part of input no need to include it as library jar. if p not in libraries and p not in options.input_paths: libraries.append(p) _VerifyNoEmbeddedConfigs(options.input_paths + libraries) _OptimizeWithR8(options, proguard_configs, libraries, dynamic_config_data, print_stdout) if not options.disable_checks: logging.debug('Running tracereferences') all_dex_files = [] if options.output_path: all_dex_files.append(options.output_path) if options.dex_dests: all_dex_files.extend(options.dex_dests) _CheckForMissingSymbols(options.r8_path, all_dex_files, options.classpath, options.warnings_as_errors) for output in options.extra_mapping_output_paths: shutil.copy(options.mapping_output, output) inputs = options.proguard_configs + options.input_paths + libraries if options.apply_mapping: inputs.append(options.apply_mapping) _MaybeWriteStampAndDepFile(options, inputs)
def main(args): args = build_utils.ExpandFileArgs(args) options = _ParseOptions(args) input_paths = [] for feature_jars in options.features.itervalues(): for feature_jar in feature_jars: input_paths.append(feature_jar) with build_utils.TempDir() as dexsplitter_output_dir: curr_location_to_dest = [] if len(options.features) == 1: # Don't run dexsplitter since it needs at least 1 feature module. curr_location_to_dest.append( (options.input_dex_zip, options.dex_dests[0])) else: _RunDexsplitter(options, dexsplitter_output_dir) for i, dest in enumerate(options.dex_dests): module_dex_file = os.path.join(dexsplitter_output_dir, options.feature_names[i], 'classes.dex') if os.path.exists(module_dex_file): curr_location_to_dest.append((module_dex_file, dest)) else: module_dex_file += '.zip' assert os.path.exists( module_dex_file), 'Dexsplitter tool output not found.' curr_location_to_dest.append( (module_dex_file + '.zip', dest)) for curr_location, dest in curr_location_to_dest: with build_utils.AtomicOutput(dest) as f: if curr_location.endswith('.zip'): if dest.endswith('.zip'): shutil.move(curr_location, f.name) else: with zipfile.ZipFile(curr_location, 'r') as z: namelist = z.namelist() assert len(namelist) == 1, ( 'Unzipping to single dex file, but not single dex file in ' + options.input_dex_zip) z.extract(namelist[0], f.name) else: if dest.endswith('.zip'): build_utils.ZipDir( f.name, os.path.abspath( os.path.join(curr_location, os.pardir))) else: shutil.move(curr_location, f.name) build_utils.Touch(options.stamp) build_utils.WriteDepfile(options.depfile, options.stamp, inputs=input_paths)
def main(): options = _ParseOptions() libraries = [] for p in options.classpath: # If a jar is part of input no need to include it as library jar. if p not in libraries and p not in options.input_paths: libraries.append(p) _VerifyNoEmbeddedConfigs(options.input_paths + libraries) # Apply config exclusion filter. config_paths = [ p for p in options.proguard_configs if p not in options.proguard_config_exclusions ] # ProGuard configs that are derived from flags. dynamic_config_data = _CreateDynamicConfig(options) # ProGuard configs that are derived from flags. merged_configs = _CombineConfigs( config_paths, dynamic_config_data, exclude_generated=True) print_stdout = '-whyareyoukeeping' in merged_configs or options.verbose # Writing the config output before we know ProGuard is going to succeed isn't # great, since then a failure will result in one of the outputs being updated. # We do it anyways though because the error message prints out the path to the # config. Ninja will still know to re-run the command because of the other # stale outputs. if options.output_config: with open(options.output_config, 'w') as f: f.write(merged_configs) if options.expected_configs_file: _VerifyExpectedConfigs(options.expected_configs_file, options.output_config, options.verify_expected_configs) if options.r8_path: _OptimizeWithR8(options, config_paths, libraries, dynamic_config_data, print_stdout) else: _OptimizeWithProguard(options, config_paths, libraries, dynamic_config_data, print_stdout) # After ProGuard / R8 has run: for output in options.extra_mapping_output_paths: shutil.copy(options.mapping_output, output) inputs = options.proguard_configs + options.input_paths + libraries if options.apply_mapping: inputs += options.apply_mapping build_utils.WriteDepfile( options.depfile, options.output_path, inputs=inputs, add_pydeps=False)
def main(): parser = optparse.OptionParser() build_utils.AddDepfileOption(parser) parser.add_option('--inputs', help='The template files to process.') parser.add_option('--output', help='The output file to generate. Valid ' 'only if there is a single input.') parser.add_option('--outputs-zip', help='A zip file containing the processed ' 'templates. Required if there are multiple inputs.') parser.add_option('--inputs-base-dir', help='A common ancestor directory of ' 'the inputs. Each output\'s path in the output zip will ' 'match the relative path from INPUTS_BASE_DIR to the ' 'input. Required if --output-zip is given.') parser.add_option('--variables', help='Variables to be made available in the ' 'template processing environment, as a GYP list (e.g. ' '--variables "channel=beta mstone=39")', default='') options, args = parser.parse_args() build_utils.CheckOptions(options, parser, required=['inputs']) inputs = build_utils.ParseGypList(options.inputs) if (options.output is None) == (options.outputs_zip is None): parser.error('Exactly one of --output and --output-zip must be given') if options.output and len(inputs) != 1: parser.error('--output cannot be used with multiple inputs') if options.outputs_zip and not options.inputs_base_dir: parser.error( '--inputs-base-dir must be given when --output-zip is used') if args: parser.error('No positional arguments should be given.') variables = {} for v in build_utils.ParseGypList(options.variables): if '=' not in v: parser.error('--variables argument must contain "=": ' + v) name, _, value = v.partition('=') variables[name] = value loader = RecordingFileSystemLoader(build_utils.CHROMIUM_SRC) env = jinja2.Environment(loader=loader, undefined=jinja2.StrictUndefined, line_comment_prefix='##') if options.output: ProcessFile(env, inputs[0], options.output, variables) else: ProcessFiles(env, inputs, options.inputs_base_dir, options.outputs_zip, variables) if options.depfile: deps = loader.get_loaded_templates( ) + build_utils.GetPythonDependencies() build_utils.WriteDepfile(options.depfile, deps)
def main(argv): argv = build_utils.ExpandFileArgs(argv) parser = optparse.OptionParser() build_utils.AddDepfileOption(parser) parser.add_option('--output', help='Output path for executable script.') parser.add_option('--jar-path', help='Path to the main jar.') parser.add_option( '--main-class', help='Name of the java class with the "main" entry point.') parser.add_option('--classpath', action='append', default=[], help='Classpath for running the jar.') parser.add_option( '--bootclasspath', action='append', default=[], help='zip/jar files to add to bootclasspath for java cmd.') parser.add_option('--noverify', action='store_true', help='JVM flag: noverify.') options, extra_program_args = parser.parse_args(argv) if (options.noverify): noverify_flag = 'java_cmd.append("-noverify")' else: noverify_flag = '' classpath = [options.jar_path] for cp_arg in options.classpath: classpath += build_utils.ParseGypList(cp_arg) bootclasspath = [] for bootcp_arg in options.bootclasspath: bootclasspath += build_utils.ParseGypList(bootcp_arg) run_dir = os.path.dirname(options.output) bootclasspath = [os.path.relpath(p, run_dir) for p in bootclasspath] classpath = [os.path.relpath(p, run_dir) for p in classpath] with open(options.output, 'w') as script: script.write( script_template.format(classpath=('"%s"' % '", "'.join(classpath)), bootclasspath=('"%s"' % '", "'.join(bootclasspath) if bootclasspath else ''), main_class=options.main_class, extra_program_args=repr(extra_program_args), noverify_flag=noverify_flag)) os.chmod(options.output, 0750) if options.depfile: build_utils.WriteDepfile(options.depfile, build_utils.GetPythonDependencies())
def main(): options = ParseArgs() android_jar = os.path.join(options.android_sdk, 'android.jar') aapt = options.aapt_path with build_utils.TempDir() as temp_dir: package_command = [aapt, 'package', '--version-code', options.version_code, '--version-name', options.version_name, '-M', options.android_manifest, '--no-crunch', '-f', '--auto-add-overlay', '-I', android_jar, '-F', options.apk_path, '--ignore-assets', build_utils.AAPT_IGNORE_PATTERN, ] if options.no_compress: for ext in options.no_compress.split(','): package_command += ['-0', ext] if options.shared_resources: package_command.append('--shared-lib') if options.asset_dir and os.path.exists(options.asset_dir): package_command += ['-A', options.asset_dir] if options.resource_zips: dep_zips = build_utils.ParseGypList(options.resource_zips) for z in dep_zips: subdir = os.path.join(temp_dir, os.path.basename(z)) if os.path.exists(subdir): raise Exception('Resource zip name conflict: ' + os.path.basename(z)) build_utils.ExtractAll(z, path=subdir) package_command += PackageArgsForExtractedZip(subdir) if options.create_density_splits: for config in DENSITY_SPLITS.itervalues(): package_command.extend(('--split', ','.join(config))) if 'Debug' in options.configuration_name: package_command += ['--debug-mode'] build_utils.CheckOutput( package_command, print_stdout=False, print_stderr=False) if options.create_density_splits: CheckDensityMissedConfigs(options.apk_path) RenameDensitySplits(options.apk_path) if options.depfile: build_utils.WriteDepfile( options.depfile, build_utils.GetPythonDependencies())
def main(args): args = build_utils.ExpandFileArgs(args) parser = argparse.ArgumentParser(args) parser.add_argument('--input-files', help='GN-list of files to zip.') parser.add_argument( '--input-files-base-dir', help='Paths in the archive will be relative to this directory') parser.add_argument('--input-zips', help='GN-list of zips to merge.') parser.add_argument('--input-zips-excluded-globs', help='GN-list of globs for paths to exclude.') parser.add_argument('--output', required=True, help='Path to output archive.') compress_group = parser.add_mutually_exclusive_group() compress_group.add_argument('--compress', action='store_true', help='Compress entries') compress_group.add_argument('--no-compress', action='store_false', dest='compress', help='Do not compress entries') build_utils.AddDepfileOption(parser) options = parser.parse_args(args) with build_utils.AtomicOutput(options.output) as f: with zipfile.ZipFile(f.name, 'w') as out_zip: depfile_deps = None if options.input_files: files = build_utils.ParseGnList(options.input_files) build_utils.DoZip(files, out_zip, base_dir=options.input_files_base_dir, compress_fn=lambda _: options.compress) if options.input_zips: files = build_utils.ParseGnList(options.input_zips) depfile_deps = files path_transform = None if options.input_zips_excluded_globs: globs = build_utils.ParseGnList( options.input_zips_excluded_globs) path_transform = ( lambda p: None if build_utils.MatchesGlob(p, globs) else p) build_utils.MergeZips(out_zip, files, path_transform=path_transform, compress=options.compress) # Depfile used only by dist_jar(). if options.depfile: build_utils.WriteDepfile(options.depfile, options.output, inputs=depfile_deps, add_pydeps=False)
def _RunInstrumentCommand(_command, options, _, option_parser): """Instruments jar files using EMMA. Args: command: String indicating the command that was received to trigger this function. options: optparse options dictionary. args: List of extra args from optparse. option_parser: optparse.OptionParser object. Returns: An exit code. """ if not (options.input_path and options.output_path and options.coverage_file and options.sources_list_file and (options.source_files or options.source_dirs) and options.src_root and options.emma_jar): option_parser.error('All arguments are required.') if os.path.exists(options.coverage_file): os.remove(options.coverage_file) temp_dir = tempfile.mkdtemp() try: cmd = [ 'java', '-cp', options.emma_jar, 'emma', 'instr', '-ip', options.input_path, '-ix', options.filter_string, '-d', temp_dir, '-out', options.coverage_file, '-m', 'fullcopy' ] build_utils.CheckOutput(cmd) temp_jar_dir = os.path.join(temp_dir, 'lib') jars = os.listdir(temp_jar_dir) if len(jars) != 1: print('Error: multiple output files in: %s' % (temp_jar_dir)) return 1 shutil.copy(os.path.join(temp_jar_dir, jars[0]), options.output_path) finally: shutil.rmtree(temp_dir) if options.source_dirs: source_dirs = build_utils.ParseGypList(options.source_dirs) else: source_dirs = _GetSourceDirsFromSourceFiles(options.source_files) _CreateSourcesListFile(source_dirs, options.sources_list_file, options.src_root) if options.stamp: build_utils.Touch(options.stamp) if options.depfile: build_utils.WriteDepfile(options.depfile, build_utils.GetPythonDependencies()) return 0
def main(argv): parser = argparse.ArgumentParser() build_utils.AddDepfileOption(parser) parser.add_argument('--protoc', required=True, help='Path to protoc binary.') parser.add_argument('--proto-path', required=True, help='Path to proto directory.') parser.add_argument('--java-out-dir', help='Path to output directory for java files.') parser.add_argument('--srcjar', help='Path to output srcjar.') parser.add_argument('--stamp', help='File to touch on success.') parser.add_argument( '--import-dir', action='append', default=[], help='Extra import directory for protos, can be repeated.') parser.add_argument('protos', nargs='+', help='proto source files') options = parser.parse_args(argv) if not options.java_out_dir and not options.srcjar: raise Exception('One of --java-out-dir or --srcjar must be specified.') _EnforceJavaPackage(options.protos) with build_utils.TempDir() as temp_dir: out_arg = '--java_out=lite:' + temp_dir proto_path_args = ['--proto_path', options.proto_path] for path in options.import_dir: proto_path_args += ["--proto_path", path] # Generate Java files using protoc. build_utils.CheckOutput( [options.protoc] + proto_path_args + [out_arg] + options.protos, # protoc generates superfluous warnings about LITE_RUNTIME deprecation # even though we are using the new non-deprecated method. stderr_filter=lambda output: build_utils.FilterLines( output, '|'.join( [r'optimize_for = LITE_RUNTIME', r'java/lite\.md']))) if options.java_out_dir: build_utils.DeleteDirectory(options.java_out_dir) shutil.copytree(temp_dir, options.java_out_dir) else: build_utils.ZipDir(options.srcjar, temp_dir) if options.depfile: assert options.srcjar deps = options.protos + [options.protoc] build_utils.WriteDepfile(options.depfile, options.srcjar, deps) if options.stamp: build_utils.Touch(options.stamp)
def main(): build_utils.InitLogging('LINT_DEBUG') args = _ParseArgs(sys.argv[1:]) # TODO(wnwen): Consider removing lint cache now that there are only two lint # invocations. # Avoid parallelizing cache creation since lint runs without the cache defeat # the purpose of creating the cache in the first place. if (not args.create_cache and not args.skip_build_server and server_utils.MaybeRunCommand(name=args.target_name, argv=sys.argv, stamp_file=args.stamp, force=args.use_build_server)): return sources = [] for java_sources_file in args.java_sources: sources.extend(build_utils.ReadSourcesList(java_sources_file)) resource_sources = [] for resource_sources_file in args.resource_sources: resource_sources.extend(build_utils.ReadSourcesList(resource_sources_file)) possible_depfile_deps = (args.srcjars + args.resource_zips + sources + resource_sources + [ args.baseline, args.manifest_path, ]) depfile_deps = [p for p in possible_depfile_deps if p] _RunLint(args.create_cache, args.lint_binary_path, args.backported_methods, args.config_path, args.manifest_path, args.extra_manifest_paths, sources, args.classpath, args.cache_dir, args.android_sdk_version, args.aars, args.srcjars, args.min_sdk_version, resource_sources, args.resource_zips, args.android_sdk_root, args.lint_gen_dir, args.baseline, testonly_target=args.testonly, warnings_as_errors=args.warnings_as_errors) logging.info('Creating stamp file') build_utils.Touch(args.stamp) if args.depfile: build_utils.WriteDepfile(args.depfile, args.stamp, depfile_deps)
def main(): parser = argparse.ArgumentParser() build_utils.AddDepfileOption(parser) parser.add_argument('--module', required=True, help='The module name.') parser.add_argument('--libraries-file', required=True, help='Path to file with GN list of library paths') parser.add_argument('--paks', help='GN list of PAK file paths') parser.add_argument('--output', required=True, help='Path to the generated srcjar file.') parser.add_argument( '--load-native-on-get-impl', action='store_true', default=False, help='Load module automatically on calling Module.getImpl().') options = parser.parse_args() options.paks = build_utils.ParseGnList(options.paks) with open(options.libraries_file) as f: libraries_list = build_utils.ParseGnList(f.read()) libraries = [] for path in libraries_list: path = path.strip() filename = os.path.split(path)[1] assert filename.startswith('lib') assert filename.endswith('.so') # Remove lib prefix and .so suffix. libraries += [filename[3:-3]] paks = options.paks if options.paks else [] format_dict = { 'MODULE': options.module, 'LIBRARIES': ','.join(['"%s"' % l for l in libraries]), 'PAKS': ','.join(['"%s"' % os.path.basename(p) for p in paks]), 'LOAD_NATIVE_ON_GET_IMPL': ('true' if options.load_native_on_get_impl else 'false'), } with build_utils.AtomicOutput(options.output) as f: with zipfile.ZipFile(f.name, 'w') as srcjar_file: build_utils.AddToZipHermetic( srcjar_file, 'org/chromium/components/module_installer/builder/' 'ModuleDescriptor_%s.java' % options.module, data=_TEMPLATE.format(**format_dict)) if options.depfile: build_utils.WriteDepfile(options.depfile, options.output, inputs=[options.libraries_file])
def main(args): options = _ParseArgs(args) input_paths = list(options.inputs) if options.multi_dex and options.main_dex_list_path: input_paths.append(options.main_dex_list_path) _PerformDexing(options) build_utils.WriteDepfile( options.depfile, options.output, input_paths, add_pydeps=False)
def main(): options = ParseArgs() main_manifest = file(options.main_manifest).read() split_manifest = Build(main_manifest, options.split, options.has_code) with file(options.out_manifest, 'w') as f: f.write(split_manifest) if options.depfile: build_utils.WriteDepfile(options.depfile, [main_manifest] + build_utils.GetPythonDependencies())
def main(): options = ParseArgs() main_manifest = file(options.main_manifest).read() split_manifest = Build(main_manifest, options.split, options.has_code) with file(options.out_manifest, 'w') as f: f.write(split_manifest) if options.depfile: deps = [options.main_manifest] build_utils.WriteDepfile(options.depfile, options.out_manifest, deps)
def main(): args = build_utils.ExpandFileArgs(sys.argv[1:]) parser = argparse.ArgumentParser() build_utils.AddDepfileOption(parser) parser.add_argument('--desugar-jar', required=True, help='Path to Desugar.jar.') parser.add_argument('--input-jar', required=True, help='Jar input path to include .class files from.') parser.add_argument('--output-jar', required=True, help='Jar output path.') parser.add_argument('--classpath', action='append', required=True, help='Classpath.') parser.add_argument('--bootclasspath', required=True, help='Path to javac bootclasspath interface jar.') parser.add_argument('--warnings-as-errors', action='store_true', help='Treat all warnings as errors.') options = parser.parse_args(args) options.bootclasspath = build_utils.ParseGnList(options.bootclasspath) options.classpath = build_utils.ParseGnList(options.classpath) cmd = build_utils.JavaCmd(options.warnings_as_errors) + [ '-jar', options.desugar_jar, '--input', options.input_jar, '--output', options.output_jar, '--generate_base_classes_for_default_methods', # Don't include try-with-resources files in every .jar. Instead, they # are included via //third_party/bazel/desugar:desugar_runtime_java. '--desugar_try_with_resources_omit_runtime_classes', ] for path in options.bootclasspath: cmd += ['--bootclasspath_entry', path] for path in options.classpath: cmd += ['--classpath_entry', path] build_utils.CheckOutput( cmd, print_stdout=False, stderr_filter=build_utils.FilterReflectiveAccessJavaWarnings, fail_on_output=options.warnings_as_errors) if options.depfile: build_utils.WriteDepfile(options.depfile, options.output_jar, inputs=options.bootclasspath + options.classpath)
def main(): options = _ParseArgs() with open(options.src_manifest) as f: main_manifest_data = f.read() new_manifest_data = _ProcessManifest(main_manifest_data, options.disable_isolated_processes) with open(options.out_manifest, 'w') as f: f.write(new_manifest_data) if options.depfile: deps = [options.src_manifest] build_utils.WriteDepfile(options.depfile, options.out_manifest, deps)
def main(args): args = build_utils.ExpandFileArgs(args) parser = argparse.ArgumentParser() build_utils.AddDepfileOption(parser) parser.add_argument('--output', required=True, help='Path to output aar.') parser.add_argument('--jars', required=True, help='GN list of jar inputs.') parser.add_argument('--dependencies-res-zips', required=True, help='GN list of resource zips') parser.add_argument('--r-text-files', required=True, help='GN list of R.txt files to merge') parser.add_argument('--proguard-configs', required=True, help='GN list of ProGuard flag files to merge.') parser.add_argument( '--android-manifest', help='Path to AndroidManifest.xml to include.', default=os.path.join(_ANDROID_BUILD_DIR, 'AndroidManifest.xml')) options = parser.parse_args(args) options.jars = build_utils.ParseGnList(options.jars) options.dependencies_res_zips = build_utils.ParseGnList( options.dependencies_res_zips) options.r_text_files = build_utils.ParseGnList(options.r_text_files) options.proguard_configs = build_utils.ParseGnList(options.proguard_configs) with tempfile.NamedTemporaryFile(delete=False) as staging_file: try: with zipfile.ZipFile(staging_file.name, 'w') as z: build_utils.AddToZipHermetic( z, 'AndroidManifest.xml', src_path=options.android_manifest) with tempfile.NamedTemporaryFile() as jar_file: build_utils.MergeZips(jar_file.name, options.jars) build_utils.AddToZipHermetic(z, 'classes.jar', src_path=jar_file.name) build_utils.AddToZipHermetic( z, 'R.txt', data=_MergeRTxt(options.r_text_files)) build_utils.AddToZipHermetic(z, 'public.txt', data='') if options.proguard_configs: build_utils.AddToZipHermetic( z, 'proguard.txt', data=_MergeProguardConfigs(options.proguard_configs)) _AddResources(z, options.dependencies_res_zips) except: os.unlink(staging_file.name) raise shutil.move(staging_file.name, options.output) if options.depfile: all_inputs = (options.jars + options.dependencies_res_zips + options.r_text_files + options.proguard_configs) build_utils.WriteDepfile(options.depfile, options.output, all_inputs)
def main(args): args = build_utils.ExpandFileArgs(args) parser = argparse.ArgumentParser(description=__doc__) build_utils.AddDepfileOption(parser) parser.add_argument('--jar-info-path', required=True, help='Output .jar.info file') parser.add_argument('--pak-info-path', required=True, help='Output .pak.info file') parser.add_argument('--res-info-path', required=True, help='Output .res.info file') parser.add_argument('--jar-files', required=True, action='append', help='GN-list of .jar file paths') parser.add_argument('--assets', required=True, action='append', help='GN-list of files to add as assets in the form ' '"srcPath:zipPath", where ":zipPath" is optional.') parser.add_argument('--uncompressed-assets', required=True, action='append', help='Same as --assets, except disables compression.') parser.add_argument('--in-res-info-path', required=True, action='append', help='Paths to .ap_.info files') options = parser.parse_args(args) options.jar_files = build_utils.ParseGnList(options.jar_files) options.assets = build_utils.ParseGnList(options.assets) options.uncompressed_assets = build_utils.ParseGnList( options.uncompressed_assets) jar_inputs = _FindJarInputs(set(options.jar_files)) pak_inputs = _PakInfoPathsForAssets(options.assets + options.uncompressed_assets) res_inputs = options.in_res_info_path # Just create the info files every time. See https://crbug.com/1045024 _MergeJarInfoFiles(options.jar_info_path, jar_inputs) _MergePakInfoFiles(options.pak_info_path, pak_inputs) _MergeResInfoFiles(options.res_info_path, res_inputs) all_inputs = jar_inputs + pak_inputs + res_inputs build_utils.WriteDepfile(options.depfile, options.jar_info_path, inputs=all_inputs, add_pydeps=False)
def main(): parser = argparse.ArgumentParser() parser.add_argument('--inputs', required=True, help='The template files to process.') parser.add_argument('--output', help='The output file to generate. Valid ' 'only if there is a single input.') parser.add_argument('--outputs-zip', help='A zip file for the processed ' 'templates. Required if there are multiple inputs.') parser.add_argument('--inputs-base-dir', help='A common ancestor directory ' 'of the inputs. Each output\'s path in the output zip ' 'will match the relative path from INPUTS_BASE_DIR to ' 'the input. Required if --output-zip is given.') parser.add_argument( '--loader-base-dir', help='Base path used by the ' 'template loader. Must be a common ancestor directory of ' 'the inputs. Defaults to DIR_SOURCE_ROOT.', default=host_paths.DIR_SOURCE_ROOT) parser.add_argument('--variables', help='Variables to be made available in ' 'the template processing environment, as a GYP list ' '(e.g. --variables "channel=beta mstone=39")', default='') build_utils.AddDepfileOption(parser) options = parser.parse_args() inputs = build_utils.ParseGypList(options.inputs) if (options.output is None) == (options.outputs_zip is None): parser.error('Exactly one of --output and --output-zip must be given') if options.output and len(inputs) != 1: parser.error('--output cannot be used with multiple inputs') if options.outputs_zip and not options.inputs_base_dir: parser.error( '--inputs-base-dir must be given when --output-zip is used') variables = _ParseVariables(options.variables, parser.error) processor = JinjaProcessor(options.loader_base_dir, variables=variables) if options.output: _ProcessFile(processor, inputs[0], options.output) else: _ProcessFiles(processor, inputs, options.inputs_base_dir, options.outputs_zip) if options.depfile: deps = processor.GetLoadedTemplates( ) + build_utils.GetPythonDependencies() build_utils.WriteDepfile(options.depfile, deps)
def main(argv): arg_parser = argparse.ArgumentParser() build_utils.AddDepfileOption(arg_parser) arg_parser.add_argument( '--sources-files', required=True, help='A list of .sources files which contain Java ' 'file paths.') arg_parser.add_argument( '--header-path', help='Path to output header file (optional).') arg_parser.add_argument( '--srcjar-path', required=True, help='Path to output srcjar for GEN_JNI.java.') arg_parser.add_argument( '--sources-blacklist', default=[], help='A list of Java files which should be ignored ' 'by the parser.') arg_parser.add_argument('--namespace', default='', help='Namespace to wrap the registration functions ' 'into.') arg_parser.add_argument( '--use_proxy_hash', action='store_true', help='Enables hashing of the native declaration ' 'for methods in an @JniNatives interface') args = arg_parser.parse_args(build_utils.ExpandFileArgs(argv[1:])) args.sources_files = build_utils.ParseGnList(args.sources_files) java_file_paths = [] for f in args.sources_files: # java_file_paths stores each Java file path as a string. java_file_paths += [ p for p in build_utils.ReadSourcesList(f) if p not in args.sources_blacklist ] _Generate( java_file_paths, args.srcjar_path, use_proxy_hash=args.use_proxy_hash, header_path=args.header_path, namespace=args.namespace) if args.depfile: build_utils.WriteDepfile( args.depfile, args.srcjar_path, args.sources_files + java_file_paths, add_pydeps=False)